5621065c4ce1e19fe28276f365aa540b7b674ba9
[platform/core/uifw/libtpl-egl.git] / src / tpl_wl_egl_thread.c
1
2 #include "tpl_internal.h"
3
4 #include <string.h>
5 #include <fcntl.h>
6 #include <unistd.h>
7 #include <sys/eventfd.h>
8
9 #include <tbm_bufmgr.h>
10 #include <tbm_surface.h>
11 #include <tbm_surface_internal.h>
12 #include <tbm_surface_queue.h>
13
14 #include <wayland-client.h>
15 #include <wayland-tbm-server.h>
16 #include <wayland-tbm-client.h>
17 #include <wayland-egl-backend.h>
18
19 #include <tdm_client.h>
20
21 #include "wayland-egl-tizen/wayland-egl-tizen.h"
22 #include "wayland-egl-tizen/wayland-egl-tizen-priv.h"
23
24 #ifndef TIZEN_FEATURE_ENABLE
25 #define TIZEN_FEATURE_ENABLE 1
26 #endif
27
28 #if TIZEN_FEATURE_ENABLE
29 #include <tizen-surface-client-protocol.h>
30 #include <presentation-time-client-protocol.h>
31 #include <linux-explicit-synchronization-unstable-v1-client-protocol.h>
32 #endif
33
34 #include "tpl_utils_gthread.h"
35
36 static int wl_egl_buffer_key;
37 #define KEY_WL_EGL_BUFFER (unsigned long)(&wl_egl_buffer_key)
38
39 /* In wayland, application and compositor create its own drawing buffers. Recommend size is more than 2. */
40 #define BUFFER_ARRAY_SIZE 9
41
42 typedef struct _tpl_wl_egl_display tpl_wl_egl_display_t;
43 typedef struct _tpl_wl_egl_surface tpl_wl_egl_surface_t;
44 typedef struct _tpl_wl_egl_buffer  tpl_wl_egl_buffer_t;
45 typedef struct _surface_vblank     tpl_surface_vblank_t;
46
47 #define wl_egl_display(ptr) *wl_egl_display = (tpl_wl_egl_display_t *)ptr;
48 #define wl_egl_surface(ptr) *wl_egl_surface = (tpl_wl_egl_surface_t *)ptr;
49 #define wl_egl_buffer(ptr) *wl_egl_buffer = (tpl_wl_egl_buffer_t *)ptr;
50 #define tizen_private(ptr) *tizen_private = (struct tizen_private *)ptr;
51
52 struct _tpl_wl_egl_display {
53         tpl_gsource                  *disp_source;
54         tpl_gthread                  *thread;
55         tpl_gmutex                    wl_event_mutex;
56
57         struct wl_display            *wl_display;
58         struct wl_event_queue        *ev_queue;
59         struct wayland_tbm_client    *wl_tbm_client;
60         int                           last_error; /* errno of the last wl_display error*/
61
62         tpl_bool_t                    wl_initialized;
63
64         tpl_bool_t                    use_wait_vblank;
65         tpl_bool_t                    use_explicit_sync;
66         tpl_bool_t                    use_tss;
67         tpl_bool_t                    prepared;
68         /* To make sure that tpl_gsource has been successfully finalized. */
69         tpl_bool_t                    gsource_finalized;
70         tpl_gmutex                    disp_mutex;
71         tpl_gcond                     disp_cond;
72         struct {
73                 tdm_client               *tdm_client;
74                 tpl_gsource              *tdm_source;
75                 int                       tdm_display_fd;
76                 tpl_bool_t                tdm_initialized;
77                 tpl_list_t               *surface_vblanks;
78
79                 /* To make sure that tpl_gsource has been successfully finalized. */
80                 tpl_bool_t                gsource_finalized;
81                 tpl_gmutex                tdm_mutex;
82                 tpl_gcond                 tdm_cond;
83         } tdm;
84
85 #if TIZEN_FEATURE_ENABLE
86         struct tizen_surface_shm     *tss; /* used for surface buffer_flush */
87         struct wp_presentation       *presentation; /* for presentation feedback */
88         struct zwp_linux_explicit_synchronization_v1 *explicit_sync; /* for explicit fence sync */
89 #endif
90 };
91
92 typedef enum surf_message {
93         NONE_MESSAGE = 0,
94         INIT_SURFACE,
95         ACQUIRABLE,
96 } surf_message;
97
98 struct _tpl_wl_egl_surface {
99         tpl_gsource                  *surf_source;
100
101         tbm_surface_queue_h           tbm_queue;
102         int                           num_buffers;
103
104         struct wl_egl_window         *wl_egl_window;
105         struct wl_surface            *wl_surface;
106
107 #if TIZEN_FEATURE_ENABLE
108         struct zwp_linux_surface_synchronization_v1 *surface_sync; /* for explicit fence sync */
109         struct tizen_surface_shm_flusher *tss_flusher; /* used for surface buffer_flush */
110 #endif
111
112         tpl_surface_vblank_t         *vblank;
113
114         /* surface information */
115         int                           render_done_cnt;
116         unsigned int                  serial;
117
118         int                           width;
119         int                           height;
120         int                           format;
121         int                           latest_transform;
122         int                           rotation;
123         int                           post_interval;
124
125         tpl_wl_egl_display_t         *wl_egl_display;
126         tpl_surface_t                *tpl_surface;
127
128         /* wl_egl_buffer list for buffer tracing */
129         tpl_list_t                   *buffers;
130         int                           buffer_cnt; /* the number of using wl_egl_buffers */
131         tpl_gmutex                    buffers_mutex;
132         tbm_surface_h                 last_enq_buffer;
133
134         tpl_list_t                   *presentation_feedbacks; /* for tracing presentation feedbacks */
135
136         struct {
137                 tpl_gmutex                mutex;
138                 int                       fd;
139         } commit_sync;
140
141         struct {
142                 tpl_gmutex                mutex;
143                 int                       fd;
144         } presentation_sync;
145
146         tpl_gmutex                    surf_mutex;
147         tpl_gcond                     surf_cond;
148
149         surf_message                  sent_message;
150
151         /* for waiting draw done */
152         tpl_bool_t                    use_render_done_fence;
153         tpl_bool_t                    is_activated;
154         tpl_bool_t                    reset; /* TRUE if queue reseted by external  */
155         tpl_bool_t                    need_to_enqueue;
156         tpl_bool_t                    prerotation_capability;
157         tpl_bool_t                    vblank_done;
158         tpl_bool_t                    vblank_enable;
159         tpl_bool_t                    set_serial_is_used;
160         tpl_bool_t                    initialized_in_thread;
161         tpl_bool_t                    frontbuffer_activated;
162
163         /* To make sure that tpl_gsource has been successfully finalized. */
164         tpl_bool_t                    gsource_finalized;
165 };
166
167 struct _surface_vblank {
168         tdm_client_vblank            *tdm_vblank;
169         tpl_wl_egl_surface_t         *wl_egl_surface;
170         tpl_list_t                   *waiting_buffers; /* for FIFO/FIFO_RELAXED modes */
171         tpl_gmutex                    mutex;
172 };
173
174 typedef enum buffer_status {
175         RELEASED = 0,             // 0
176         DEQUEUED,                 // 1
177         ENQUEUED,                 // 2
178         ACQUIRED,                 // 3
179         WAITING_SIGNALED,         // 4
180         WAITING_VBLANK,           // 5
181         COMMITTED,                // 6
182 } buffer_status_t;
183
184 static const char *status_to_string[7] = {
185         "RELEASED",                 // 0
186         "DEQUEUED",                 // 1
187         "ENQUEUED",                 // 2
188         "ACQUIRED",                 // 3
189         "WAITING_SIGNALED",         // 4
190         "WAITING_VBLANK",           // 5
191         "COMMITTED",                // 6
192 };
193
194 struct _tpl_wl_egl_buffer {
195         tbm_surface_h                 tbm_surface;
196         int                           bo_name;
197
198         struct wl_proxy              *wl_buffer;
199         int                           dx, dy; /* position to attach to wl_surface */
200         int                           width, height; /* size to attach to wl_surface */
201
202         buffer_status_t               status; /* for tracing buffer status */
203
204         /* for damage region */
205         int                           num_rects;
206         int                          *rects;
207
208         /* for wayland_tbm_client_set_buffer_transform */
209         int                           w_transform;
210         tpl_bool_t                    w_rotated;
211
212         /* for wl_surface_set_buffer_transform */
213         int                           transform;
214
215         /* for wayland_tbm_client_set_buffer_serial */
216         unsigned int                  serial;
217
218         /* for checking need_to_commit (frontbuffer mode) */
219         tpl_bool_t                    need_to_commit;
220
221         /* for checking draw done */
222         tpl_bool_t                    draw_done;
223
224 #if TIZEN_FEATURE_ENABLE
225         /* to get release event via zwp_linux_buffer_release_v1 */
226         struct zwp_linux_buffer_release_v1 *buffer_release;
227 #endif
228         /* each buffers own its release_fence_fd, until it passes ownership
229          * to it to EGL */
230         int32_t                       release_fence_fd;
231
232         /* each buffers own its acquire_fence_fd.
233          * If it use zwp_linux_buffer_release_v1 the ownership of this fd
234          * will be passed to display server
235          * Otherwise it will be used as a fence waiting for render done
236          * on tpl thread */
237         int32_t                       acquire_fence_fd;
238
239         /* Fd to send a signal when wl_surface_commit with this buffer */
240         int32_t                       commit_sync_fd;
241
242         /* Fd to send a siganl when receive the
243          * presentation feedback from display server */
244         int32_t                       presentation_sync_fd;
245
246         tpl_gsource                  *waiting_source;
247
248         tpl_gmutex                    mutex;
249         tpl_gcond                     cond;
250
251         tpl_wl_egl_surface_t         *wl_egl_surface;
252 };
253
254 #if TIZEN_FEATURE_ENABLE
255 struct pst_feedback {
256         /* to get presentation feedback from display server */
257         struct wp_presentation_feedback *presentation_feedback;
258
259         int32_t                          pst_sync_fd;
260
261         int                              bo_name;
262         tpl_wl_egl_surface_t            *wl_egl_surface;
263
264 };
265 #endif
266
267 static const struct wl_buffer_listener wl_buffer_release_listener;
268
269 static int
270 _get_tbm_surface_bo_name(tbm_surface_h tbm_surface);
271 static void
272 _print_buffer_lists(tpl_wl_egl_surface_t *wl_egl_surface);
273 static tpl_bool_t
274 _check_buffer_validate(tpl_wl_egl_surface_t *wl_egl_surface, tbm_surface_h tbm_surface);
275 static void
276 __cb_wl_egl_buffer_free(tpl_wl_egl_buffer_t *wl_egl_buffer);
277 static tpl_wl_egl_buffer_t *
278 _get_wl_egl_buffer(tbm_surface_h tbm_surface);
279 static int
280 _write_to_eventfd(int eventfd, uint64_t value);
281 static int
282 send_signal(int fd, const char *type);
283 static void
284 _thread_wl_egl_surface_init(tpl_wl_egl_surface_t *wl_egl_surface);
285 static tpl_result_t
286 _thread_surface_queue_acquire(tpl_wl_egl_surface_t *wl_egl_surface);
287 static void
288 _thread_wl_surface_commit(tpl_wl_egl_surface_t *wl_egl_surface,
289                                                   tpl_wl_egl_buffer_t *wl_egl_buffer);
290 static void
291 __cb_surface_vblank_free(void *data);
292
293 static struct tizen_private *
294 tizen_private_create()
295 {
296         struct tizen_private *private = calloc(1, sizeof(struct tizen_private));
297         if (private) {
298                 private->magic = WL_EGL_TIZEN_MAGIC;
299                 private->rotation = 0;
300                 private->frontbuffer_mode = 0;
301                 private->transform = 0;
302                 private->window_transform = 0;
303                 private->serial = 0;
304
305                 private->data = NULL;
306                 private->rotate_callback = NULL;
307                 private->get_rotation_capability = NULL;
308                 private->set_window_serial_callback = NULL;
309                 private->set_frontbuffer_callback = NULL;
310                 private->create_commit_sync_fd = NULL;
311                 private->create_presentation_sync_fd = NULL;
312                 private->merge_sync_fds = NULL;
313         }
314
315         return private;
316 }
317
318 static tpl_bool_t
319 _check_native_handle_is_wl_display(tpl_handle_t display)
320 {
321         struct wl_interface *wl_egl_native_dpy = *(void **) display;
322
323         if (!wl_egl_native_dpy) {
324                 TPL_ERR("Invalid parameter. native_display(%p)", wl_egl_native_dpy);
325                 return TPL_FALSE;
326         }
327
328         /* MAGIC CHECK: A native display handle is a wl_display if the de-referenced first value
329            is a memory address pointing the structure of wl_display_interface. */
330         if (wl_egl_native_dpy == &wl_display_interface)
331                 return TPL_TRUE;
332
333         if (strncmp(wl_egl_native_dpy->name, wl_display_interface.name,
334                                 strlen(wl_display_interface.name)) == 0) {
335                 return TPL_TRUE;
336         }
337
338         return TPL_FALSE;
339 }
340
341 static tpl_bool_t
342 __thread_func_tdm_dispatch(tpl_gsource *gsource, uint64_t message)
343 {
344         tpl_wl_egl_display_t wl_egl_display(tpl_gsource_get_data(gsource));
345         tdm_error                   tdm_err = TDM_ERROR_NONE;
346
347         TPL_IGNORE(message);
348
349         if (!wl_egl_display) {
350                 TPL_ERR("Failed to get wl_egl_display from gsource(%p)", gsource);
351                 TPL_WARN("tdm_source(%p) will be removed from thread.", gsource);
352                 return TPL_FALSE;
353         }
354
355         tdm_err = tdm_client_handle_events(wl_egl_display->tdm.tdm_client);
356
357         /* If an error occurs in tdm_client_handle_events, it cannot be recovered.
358          * When tdm_source is no longer available due to an unexpected situation,
359          * wl_egl_thread must remove it from the thread and destroy it.
360          * In that case, tdm_vblank can no longer be used for surfaces and displays
361          * that used this tdm_source. */
362         if (tdm_err != TDM_ERROR_NONE) {
363                 TPL_ERR("Error occured in tdm_client_handle_events. tdm_err(%d)",
364                                 tdm_err);
365                 TPL_WARN("tdm_source(%p) will be removed from thread.", gsource);
366
367                 tpl_gsource_destroy(gsource, TPL_FALSE);
368
369                 wl_egl_display->tdm.tdm_source = NULL;
370
371                 return TPL_FALSE;
372         }
373
374         return TPL_TRUE;
375 }
376
377 static void
378 __thread_func_tdm_finalize(tpl_gsource *gsource)
379 {
380         tpl_wl_egl_display_t wl_egl_display(tpl_gsource_get_data(gsource));
381
382         tpl_gmutex_lock(&wl_egl_display->tdm.tdm_mutex);
383
384         TPL_INFO("[TDM_CLIENT_FINI]",
385                          "wl_egl_display(%p) tdm_client(%p) tdm_display_fd(%d)",
386                          wl_egl_display, wl_egl_display->tdm.tdm_client,
387                          wl_egl_display->tdm.tdm_display_fd);
388
389         if (wl_egl_display->tdm.tdm_client) {
390
391                 if (wl_egl_display->tdm.surface_vblanks) {
392                         __tpl_list_free(wl_egl_display->tdm.surface_vblanks,
393                                     __cb_surface_vblank_free);
394                         wl_egl_display->tdm.surface_vblanks = NULL;
395                 }
396
397                 tdm_client_destroy(wl_egl_display->tdm.tdm_client);
398                 wl_egl_display->tdm.tdm_client = NULL;
399                 wl_egl_display->tdm.tdm_display_fd = -1;
400                 wl_egl_display->tdm.tdm_source = NULL;
401         }
402
403         wl_egl_display->use_wait_vblank = TPL_FALSE;
404         wl_egl_display->tdm.tdm_initialized = TPL_FALSE;
405         wl_egl_display->tdm.gsource_finalized = TPL_TRUE;
406
407         tpl_gcond_signal(&wl_egl_display->tdm.tdm_cond);
408         tpl_gmutex_unlock(&wl_egl_display->tdm.tdm_mutex);
409 }
410
411 static tpl_gsource_functions tdm_funcs = {
412         .prepare  = NULL,
413         .check    = NULL,
414         .dispatch = __thread_func_tdm_dispatch,
415         .finalize = __thread_func_tdm_finalize,
416 };
417
418 tpl_result_t
419 _thread_tdm_init(tpl_wl_egl_display_t *wl_egl_display)
420 {
421         tdm_client       *tdm_client = NULL;
422         int               tdm_display_fd = -1;
423         tdm_error         tdm_err = TDM_ERROR_NONE;
424
425         tdm_client = tdm_client_create(&tdm_err);
426         if (!tdm_client || tdm_err != TDM_ERROR_NONE) {
427                 TPL_ERR("TDM_ERROR:%d Failed to create tdm_client\n", tdm_err);
428                 return TPL_ERROR_INVALID_OPERATION;
429         }
430
431         tdm_err = tdm_client_get_fd(tdm_client, &tdm_display_fd);
432         if (tdm_display_fd < 0 || tdm_err != TDM_ERROR_NONE) {
433                 TPL_ERR("TDM_ERROR:%d Failed to get tdm_client fd\n", tdm_err);
434                 tdm_client_destroy(tdm_client);
435                 return TPL_ERROR_INVALID_OPERATION;
436         }
437
438         wl_egl_display->tdm.tdm_display_fd  = tdm_display_fd;
439         wl_egl_display->tdm.tdm_client      = tdm_client;
440         wl_egl_display->tdm.tdm_source      = NULL;
441         wl_egl_display->tdm.tdm_initialized = TPL_TRUE;
442         wl_egl_display->tdm.surface_vblanks = __tpl_list_alloc();
443
444         TPL_INFO("[TDM_CLIENT_INIT]",
445                          "wl_egl_display(%p) tdm_client(%p) tdm_display_fd(%d)",
446                          wl_egl_display, tdm_client, tdm_display_fd);
447
448         return TPL_ERROR_NONE;
449 }
450
451 #define IMPL_TIZEN_SURFACE_SHM_VERSION 2
452
453
454 static void
455 __cb_wl_resistry_global_callback(void *data, struct wl_registry *wl_registry,
456                                                           uint32_t name, const char *interface,
457                                                           uint32_t version)
458 {
459 #if TIZEN_FEATURE_ENABLE
460         tpl_wl_egl_display_t wl_egl_display(data);
461
462         if (!strcmp(interface, "tizen_surface_shm")) {
463                 wl_egl_display->tss =
464                         wl_registry_bind(wl_registry,
465                                                          name,
466                                                          &tizen_surface_shm_interface,
467                                                          ((version < IMPL_TIZEN_SURFACE_SHM_VERSION) ?
468                                                          version : IMPL_TIZEN_SURFACE_SHM_VERSION));
469                 wl_egl_display->use_tss = TPL_TRUE;
470         } else if (!strcmp(interface, wp_presentation_interface.name)) {
471                 wl_egl_display->presentation =
472                                         wl_registry_bind(wl_registry,
473                                                                          name, &wp_presentation_interface, 1);
474                 TPL_LOG_D("[REGISTRY_BIND]",
475                                   "wl_egl_display(%p) bind wp_presentation_interface",
476                                   wl_egl_display);
477         } else if (strcmp(interface, "zwp_linux_explicit_synchronization_v1") == 0) {
478                 char *env = tpl_getenv("TPL_EFS");
479                 if (env && !atoi(env)) {
480                         wl_egl_display->use_explicit_sync = TPL_FALSE;
481                 } else {
482                         wl_egl_display->explicit_sync =
483                                         wl_registry_bind(wl_registry, name,
484                                                                          &zwp_linux_explicit_synchronization_v1_interface, 1);
485                         wl_egl_display->use_explicit_sync = TPL_TRUE;
486                         TPL_LOG_D("[REGISTRY_BIND]",
487                                           "wl_egl_display(%p) bind zwp_linux_explicit_synchronization_v1_interface",
488                                           wl_egl_display);
489                 }
490         }
491 #endif
492 }
493
494 static void
495 __cb_wl_resistry_global_remove_callback(void *data,
496                                                                          struct wl_registry *wl_registry,
497                                                                          uint32_t name)
498 {
499 }
500
501 static const struct wl_registry_listener registry_listener = {
502         __cb_wl_resistry_global_callback,
503         __cb_wl_resistry_global_remove_callback
504 };
505
506 static void
507 _wl_display_print_err(tpl_wl_egl_display_t *wl_egl_display,
508                                           const char *func_name)
509 {
510         int dpy_err;
511         char buf[1024];
512         strerror_r(errno, buf, sizeof(buf));
513
514         if (wl_egl_display->last_error == errno)
515                 return;
516
517         TPL_ERR("falied to %s. error:%d(%s)", func_name, errno, buf);
518
519         dpy_err = wl_display_get_error(wl_egl_display->wl_display);
520         if (dpy_err == EPROTO) {
521                 const struct wl_interface *err_interface;
522                 uint32_t err_proxy_id, err_code;
523                 err_code = wl_display_get_protocol_error(wl_egl_display->wl_display,
524                                                                                                  &err_interface,
525                                                                                                  &err_proxy_id);
526                 TPL_ERR("[Protocol Error] interface: %s, error_code: %d, proxy_id: %d",
527                                 (err_interface ? err_interface->name : "UNKNOWN"),
528                                 err_code, err_proxy_id);
529         }
530
531         wl_egl_display->last_error = errno;
532 }
533
534 tpl_result_t
535 _thread_wl_display_init(tpl_wl_egl_display_t *wl_egl_display)
536 {
537         struct wl_registry *registry                = NULL;
538         struct wl_event_queue *queue                = NULL;
539         struct wl_display *display_wrapper          = NULL;
540         struct wl_proxy *wl_tbm                     = NULL;
541         struct wayland_tbm_client *wl_tbm_client    = NULL;
542         int ret;
543         tpl_result_t result = TPL_ERROR_NONE;
544
545         queue = wl_display_create_queue(wl_egl_display->wl_display);
546         if (!queue) {
547                 TPL_ERR("Failed to create wl_queue wl_display(%p)",
548                                 wl_egl_display->wl_display);
549                 result = TPL_ERROR_INVALID_OPERATION;
550                 goto fini;
551         }
552
553         wl_egl_display->ev_queue = wl_display_create_queue(wl_egl_display->wl_display);
554         if (!wl_egl_display->ev_queue) {
555                 TPL_ERR("Failed to create wl_queue wl_display(%p)",
556                                 wl_egl_display->wl_display);
557                 result = TPL_ERROR_INVALID_OPERATION;
558                 goto fini;
559         }
560
561         display_wrapper = wl_proxy_create_wrapper(wl_egl_display->wl_display);
562         if (!display_wrapper) {
563                 TPL_ERR("Failed to create a proxy wrapper of wl_display(%p)",
564                                 wl_egl_display->wl_display);
565                 result = TPL_ERROR_INVALID_OPERATION;
566                 goto fini;
567         }
568
569         wl_proxy_set_queue((struct wl_proxy *)display_wrapper, queue);
570
571         registry = wl_display_get_registry(display_wrapper);
572         if (!registry) {
573                 TPL_ERR("Failed to create wl_registry");
574                 result = TPL_ERROR_INVALID_OPERATION;
575                 goto fini;
576         }
577
578         wl_proxy_wrapper_destroy(display_wrapper);
579         display_wrapper = NULL;
580
581         wl_tbm_client = wayland_tbm_client_init(wl_egl_display->wl_display);
582         if (!wl_tbm_client) {
583                 TPL_ERR("Failed to initialize wl_tbm_client.");
584                 result = TPL_ERROR_INVALID_CONNECTION;
585                 goto fini;
586         }
587
588         wl_tbm = (struct wl_proxy *)wayland_tbm_client_get_wl_tbm(wl_tbm_client);
589         if (!wl_tbm) {
590                 TPL_ERR("Failed to get wl_tbm from wl_tbm_client(%p)", wl_tbm_client);
591                 result = TPL_ERROR_INVALID_CONNECTION;
592                 goto fini;
593         }
594
595         wl_proxy_set_queue(wl_tbm, wl_egl_display->ev_queue);
596         wl_egl_display->wl_tbm_client = wl_tbm_client;
597
598         if (wl_registry_add_listener(registry, &registry_listener,
599                                                                  wl_egl_display)) {
600                 TPL_ERR("Failed to wl_registry_add_listener");
601                 result = TPL_ERROR_INVALID_OPERATION;
602                 goto fini;
603         }
604
605         ret = wl_display_roundtrip_queue(wl_egl_display->wl_display, queue);
606         if (ret == -1) {
607                 _wl_display_print_err(wl_egl_display, "roundtrip_queue");
608                 result = TPL_ERROR_INVALID_OPERATION;
609                 goto fini;
610         }
611
612 #if TIZEN_FEATURE_ENABLE
613         /* set tizen_surface_shm's queue as client's private queue */
614         if (wl_egl_display->tss) {
615                 wl_proxy_set_queue((struct wl_proxy *)wl_egl_display->tss,
616                                                    wl_egl_display->ev_queue);
617                 TPL_LOG_T("WL_EGL", "tizen_surface_shm(%p) init.", wl_egl_display->tss);
618         }
619
620         if (wl_egl_display->presentation) {
621                 wl_proxy_set_queue((struct wl_proxy *)wl_egl_display->presentation,
622                                                    wl_egl_display->ev_queue);
623                 TPL_LOG_T("WL_EGL", "wp_presentation(%p) init.",
624                                   wl_egl_display->presentation);
625         }
626
627         if (wl_egl_display->explicit_sync) {
628                 wl_proxy_set_queue((struct wl_proxy *)wl_egl_display->explicit_sync,
629                                                    wl_egl_display->ev_queue);
630                 TPL_LOG_T("WL_EGL", "zwp_linux_explicit_synchronization_v1(%p) init.",
631                                   wl_egl_display->explicit_sync);
632         }
633 #endif
634         wl_egl_display->wl_initialized = TPL_TRUE;
635
636         TPL_INFO("[WAYLAND_INIT]",
637                          "wl_egl_display(%p) wl_display(%p) wl_tbm_client(%p) event_queue(%p)",
638                          wl_egl_display, wl_egl_display->wl_display,
639                          wl_egl_display->wl_tbm_client, wl_egl_display->ev_queue);
640 #if TIZEN_FEATURE_ENABLE
641         TPL_INFO("[WAYLAND_INIT]",
642                          "tizen_surface_shm(%p) wp_presentation(%p) explicit_sync(%p)",
643                          wl_egl_display->tss, wl_egl_display->presentation,
644                          wl_egl_display->explicit_sync);
645 #endif
646 fini:
647         if (display_wrapper)
648                 wl_proxy_wrapper_destroy(display_wrapper);
649         if (registry)
650                 wl_registry_destroy(registry);
651         if (queue)
652                 wl_event_queue_destroy(queue);
653
654         return result;
655 }
656
657 void
658 _thread_wl_display_fini(tpl_wl_egl_display_t *wl_egl_display)
659 {
660         /* If wl_egl_display is in prepared state, cancel it */
661         if (wl_egl_display->prepared) {
662                 wl_display_cancel_read(wl_egl_display->wl_display);
663                 wl_egl_display->prepared = TPL_FALSE;
664         }
665
666         if (wl_display_roundtrip_queue(wl_egl_display->wl_display,
667                                                                    wl_egl_display->ev_queue) == -1) {
668                 _wl_display_print_err(wl_egl_display, "roundtrip_queue");
669         }
670
671 #if TIZEN_FEATURE_ENABLE
672         if (wl_egl_display->tss) {
673                 TPL_INFO("[TIZEN_SURFACE_SHM_DESTROY]",
674                                  "wl_egl_display(%p) tizen_surface_shm(%p) fini.",
675                                  wl_egl_display, wl_egl_display->tss);
676                 tizen_surface_shm_destroy(wl_egl_display->tss);
677                 wl_egl_display->tss = NULL;
678         }
679
680         if (wl_egl_display->presentation) {
681                 TPL_INFO("[WP_PRESENTATION_DESTROY]",
682                                  "wl_egl_display(%p) wp_presentation(%p) fini.",
683                                  wl_egl_display, wl_egl_display->presentation);
684                 wp_presentation_destroy(wl_egl_display->presentation);
685                 wl_egl_display->presentation = NULL;
686         }
687
688         if (wl_egl_display->explicit_sync) {
689                 TPL_INFO("[EXPLICIT_SYNC_DESTROY]",
690                                  "wl_egl_display(%p) zwp_linux_explicit_synchronization_v1(%p) fini.",
691                                  wl_egl_display, wl_egl_display->explicit_sync);
692                 zwp_linux_explicit_synchronization_v1_destroy(wl_egl_display->explicit_sync);
693                 wl_egl_display->explicit_sync = NULL;
694         }
695 #endif
696         if (wl_egl_display->wl_tbm_client) {
697                 struct wl_proxy *wl_tbm = NULL;
698
699                 wl_tbm = (struct wl_proxy *)wayland_tbm_client_get_wl_tbm(
700                                                                                 wl_egl_display->wl_tbm_client);
701                 if (wl_tbm) {
702                         wl_proxy_set_queue(wl_tbm, NULL);
703                 }
704
705                 TPL_INFO("[WL_TBM_DEINIT]",
706                                  "wl_egl_display(%p) wl_tbm_client(%p)",
707                                  wl_egl_display, wl_egl_display->wl_tbm_client);
708                 wayland_tbm_client_deinit(wl_egl_display->wl_tbm_client);
709                 wl_egl_display->wl_tbm_client = NULL;
710         }
711
712         wl_event_queue_destroy(wl_egl_display->ev_queue);
713
714         wl_egl_display->ev_queue = NULL;
715         wl_egl_display->wl_initialized = TPL_FALSE;
716
717         TPL_INFO("[DISPLAY_FINI]", "wl_egl_display(%p) wl_display(%p)",
718                          wl_egl_display, wl_egl_display->wl_display);
719 }
720
721 static void*
722 _thread_init(void *data)
723 {
724         tpl_wl_egl_display_t wl_egl_display(data);
725
726         if (_thread_wl_display_init(wl_egl_display) != TPL_ERROR_NONE) {
727                 TPL_ERR("Failed to initialize wl_egl_display(%p) with wl_display(%p)",
728                                 wl_egl_display, wl_egl_display->wl_display);
729         }
730
731         if (wl_egl_display->use_wait_vblank &&
732                 _thread_tdm_init(wl_egl_display) != TPL_ERROR_NONE) {
733                 TPL_WARN("Failed to initialize tdm-client. TPL_WAIT_VLANK:DISABLED");
734         }
735
736         return wl_egl_display;
737 }
738
739 static tpl_bool_t
740 __thread_func_disp_prepare(tpl_gsource *gsource)
741 {
742         tpl_wl_egl_display_t wl_egl_display(tpl_gsource_get_data(gsource));
743
744         /* If this wl_egl_display is already prepared,
745          * do nothing in this function. */
746         if (wl_egl_display->prepared)
747                 return TPL_FALSE;
748
749         /* If there is a last_error, there is no need to poll,
750          * so skip directly to dispatch.
751          * prepare -> dispatch */
752         if (wl_egl_display->last_error)
753                 return TPL_TRUE;
754
755         while (wl_display_prepare_read_queue(wl_egl_display->wl_display,
756                                                                                  wl_egl_display->ev_queue) != 0) {
757                 if (wl_display_dispatch_queue_pending(wl_egl_display->wl_display,
758                                                                                           wl_egl_display->ev_queue) == -1) {
759                         _wl_display_print_err(wl_egl_display, "dispatch_queue_pending");
760                 }
761         }
762
763         wl_egl_display->prepared = TPL_TRUE;
764
765         wl_display_flush(wl_egl_display->wl_display);
766
767         return TPL_FALSE;
768 }
769
770 static tpl_bool_t
771 __thread_func_disp_check(tpl_gsource *gsource)
772 {
773         tpl_wl_egl_display_t wl_egl_display(tpl_gsource_get_data(gsource));
774         tpl_bool_t ret = TPL_FALSE;
775
776         if (!wl_egl_display->prepared)
777                 return ret;
778
779         /* If prepared, but last_error is set,
780          * cancel_read is executed and FALSE is returned.
781          * That can lead to G_SOURCE_REMOVE by calling disp_prepare again
782          * and skipping disp_check from prepare to disp_dispatch.
783          * check -> prepare -> dispatch -> G_SOURCE_REMOVE */
784         if (wl_egl_display->prepared && wl_egl_display->last_error) {
785                 wl_display_cancel_read(wl_egl_display->wl_display);
786                 return ret;
787         }
788
789         if (tpl_gsource_check_io_condition(gsource)) {
790                 if (wl_display_read_events(wl_egl_display->wl_display) == -1)
791                         _wl_display_print_err(wl_egl_display, "read_event");
792                 ret = TPL_TRUE;
793         } else {
794                 wl_display_cancel_read(wl_egl_display->wl_display);
795                 ret = TPL_FALSE;
796         }
797
798         wl_egl_display->prepared = TPL_FALSE;
799
800         return ret;
801 }
802
803 static tpl_bool_t
804 __thread_func_disp_dispatch(tpl_gsource *gsource, uint64_t message)
805 {
806         tpl_wl_egl_display_t wl_egl_display(tpl_gsource_get_data(gsource));
807
808         TPL_IGNORE(message);
809
810         /* If there is last_error, SOURCE_REMOVE should be returned
811          * to remove the gsource from the main loop.
812          * This is because wl_egl_display is not valid since last_error was set.*/
813         if (wl_egl_display->last_error) {
814                 return TPL_FALSE;
815         }
816
817         tpl_gmutex_lock(&wl_egl_display->wl_event_mutex);
818         if (tpl_gsource_check_io_condition(gsource)) {
819                 if (wl_display_dispatch_queue_pending(wl_egl_display->wl_display,
820                                                                                           wl_egl_display->ev_queue) == -1) {
821                         _wl_display_print_err(wl_egl_display, "dispatch_queue_pending");
822                 }
823         }
824
825         wl_display_flush(wl_egl_display->wl_display);
826         tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
827
828         return TPL_TRUE;
829 }
830
831 static void
832 __thread_func_disp_finalize(tpl_gsource *gsource)
833 {
834         tpl_wl_egl_display_t wl_egl_display(tpl_gsource_get_data(gsource));
835
836         tpl_gmutex_lock(&wl_egl_display->disp_mutex);
837         TPL_LOG_D("[D_FINALIZE]", "wl_egl_display(%p) tpl_gsource(%p)",
838                           wl_egl_display, gsource);
839
840         if (wl_egl_display->wl_initialized)
841                 _thread_wl_display_fini(wl_egl_display);
842
843         wl_egl_display->gsource_finalized = TPL_TRUE;
844
845         tpl_gcond_signal(&wl_egl_display->disp_cond);
846         tpl_gmutex_unlock(&wl_egl_display->disp_mutex);
847
848         return;
849 }
850
851
852 static tpl_gsource_functions disp_funcs = {
853         .prepare  = __thread_func_disp_prepare,
854         .check    = __thread_func_disp_check,
855         .dispatch = __thread_func_disp_dispatch,
856         .finalize = __thread_func_disp_finalize,
857 };
858
859 static tpl_result_t
860 __tpl_wl_egl_display_init(tpl_display_t *display)
861 {
862         tpl_wl_egl_display_t *wl_egl_display    = NULL;
863
864         TPL_ASSERT(display);
865
866         /* Do not allow default display in wayland. */
867         if (!display->native_handle) {
868                 TPL_ERR("Invalid native handle for display.");
869                 return TPL_ERROR_INVALID_PARAMETER;
870         }
871
872         if (!_check_native_handle_is_wl_display(display->native_handle)) {
873                 TPL_ERR("native_handle(%p) is not wl_display", display->native_handle);
874                 return TPL_ERROR_INVALID_PARAMETER;
875         }
876
877         wl_egl_display = calloc(1, sizeof(tpl_wl_egl_display_t));
878         if (!wl_egl_display) {
879                 TPL_ERR("Failed to allocate memory for new tpl_wl_egl_display_t.");
880                 return TPL_ERROR_OUT_OF_MEMORY;
881         }
882
883         display->backend.data             = wl_egl_display;
884         display->bufmgr_fd                = -1;
885
886         wl_egl_display->tdm.tdm_initialized   = TPL_FALSE;
887         wl_egl_display->tdm.tdm_client        = NULL;
888         wl_egl_display->tdm.tdm_display_fd    = -1;
889         wl_egl_display->tdm.tdm_source        = NULL;
890
891         wl_egl_display->wl_initialized    = TPL_FALSE;
892
893         wl_egl_display->ev_queue          = NULL;
894         wl_egl_display->wl_display        = (struct wl_display *)display->native_handle;
895         wl_egl_display->last_error        = 0;
896         wl_egl_display->use_tss           = TPL_FALSE;
897         wl_egl_display->use_explicit_sync = TPL_FALSE;   // default disabled
898         wl_egl_display->prepared          = TPL_FALSE;
899         wl_egl_display->gsource_finalized = TPL_FALSE;
900
901 #if TIZEN_FEATURE_ENABLE
902         /* Wayland Interfaces */
903         wl_egl_display->tss               = NULL;
904         wl_egl_display->presentation      = NULL;
905         wl_egl_display->explicit_sync     = NULL;
906 #endif
907         wl_egl_display->wl_tbm_client     = NULL;
908
909         wl_egl_display->use_wait_vblank   = TPL_TRUE;   // default enabled
910         {
911                 char *env = tpl_getenv("TPL_WAIT_VBLANK");
912                 if (env && !atoi(env)) {
913                         wl_egl_display->use_wait_vblank = TPL_FALSE;
914                 }
915         }
916
917         tpl_gmutex_init(&wl_egl_display->wl_event_mutex);
918
919         tpl_gmutex_init(&wl_egl_display->disp_mutex);
920         tpl_gcond_init(&wl_egl_display->disp_cond);
921
922         /* Create gthread */
923         wl_egl_display->thread = tpl_gthread_create("wl_egl_thread",
924                                                                                                 (tpl_gthread_func)_thread_init,
925                                                                                                 (void *)wl_egl_display);
926         if (!wl_egl_display->thread) {
927                 TPL_ERR("Failed to create wl_egl_thread");
928                 goto free_display;
929         }
930
931         wl_egl_display->disp_source = tpl_gsource_create(wl_egl_display->thread,
932                                                                                                          (void *)wl_egl_display,
933                                                                                                          wl_display_get_fd(wl_egl_display->wl_display),
934                                                                                                          FD_TYPE_SOCKET,
935                                                                                                          &disp_funcs, SOURCE_TYPE_NORMAL);
936         if (!wl_egl_display->disp_source) {
937                 TPL_ERR("Failed to add native_display(%p) to thread(%p)",
938                                 display->native_handle,
939                                 wl_egl_display->thread);
940                 goto free_display;
941         }
942
943         if (wl_egl_display->use_wait_vblank &&
944                 wl_egl_display->tdm.tdm_initialized) {
945                 tpl_gmutex_init(&wl_egl_display->tdm.tdm_mutex);
946                 tpl_gcond_init(&wl_egl_display->tdm.tdm_cond);
947                 wl_egl_display->tdm.tdm_source = tpl_gsource_create(wl_egl_display->thread,
948                                                                                                                 (void *)wl_egl_display,
949                                                                                                                 wl_egl_display->tdm.tdm_display_fd,
950                                                                                                                 FD_TYPE_SOCKET,
951                                                                                                                 &tdm_funcs, SOURCE_TYPE_NORMAL);
952                 wl_egl_display->tdm.gsource_finalized = TPL_FALSE;
953                 if (!wl_egl_display->tdm.tdm_source) {
954                         TPL_ERR("Failed to create tdm_gsource\n");
955                         goto free_display;
956                 }
957         }
958
959         wl_egl_display->use_wait_vblank = (wl_egl_display->tdm.tdm_initialized &&
960                                                                            (wl_egl_display->tdm.tdm_source != NULL));
961
962         TPL_INFO("[DISPLAY_INIT]",
963                          "wl_egl_display(%p) tpl_gthread(%p) wl_display(%p)",
964                          wl_egl_display,
965                          wl_egl_display->thread,
966                          wl_egl_display->wl_display);
967
968         TPL_INFO("[DISPLAY_INIT]",
969                          "USE_WAIT_VBLANK(%s) TIZEN_SURFACE_SHM(%s) USE_EXPLICIT_SYNC(%s)",
970                          wl_egl_display->use_wait_vblank ? "TRUE" : "FALSE",
971                          wl_egl_display->use_tss ? "TRUE" : "FALSE",
972                          wl_egl_display->use_explicit_sync ? "TRUE" : "FALSE");
973
974         return TPL_ERROR_NONE;
975
976 free_display:
977         if (wl_egl_display->tdm.tdm_source) {
978                 tpl_gmutex_lock(&wl_egl_display->tdm.tdm_mutex);
979                 // Send destroy mesage to thread
980                 tpl_gsource_destroy(wl_egl_display->tdm.tdm_source, TPL_TRUE);
981                 while (!wl_egl_display->tdm.gsource_finalized) {
982                         tpl_gcond_wait(&wl_egl_display->tdm.tdm_cond, &wl_egl_display->tdm.tdm_mutex);
983                 }
984                 tpl_gmutex_unlock(&wl_egl_display->tdm.tdm_mutex);
985         }
986
987         if (wl_egl_display->disp_source) {
988                 tpl_gmutex_lock(&wl_egl_display->disp_mutex);
989                 // Send destroy mesage to thread
990                 tpl_gsource_destroy(wl_egl_display->disp_source, TPL_TRUE);
991                 while (!wl_egl_display->gsource_finalized) {
992                         tpl_gcond_wait(&wl_egl_display->disp_cond, &wl_egl_display->disp_mutex);
993                 }
994                 tpl_gmutex_unlock(&wl_egl_display->disp_mutex);
995         }
996
997         if (wl_egl_display->thread) {
998                 tpl_gthread_destroy(wl_egl_display->thread);
999         }
1000
1001         tpl_gcond_clear(&wl_egl_display->tdm.tdm_cond);
1002         tpl_gmutex_clear(&wl_egl_display->tdm.tdm_mutex);
1003         tpl_gcond_clear(&wl_egl_display->disp_cond);
1004         tpl_gmutex_clear(&wl_egl_display->disp_mutex);
1005
1006         wl_egl_display->thread = NULL;
1007         free(wl_egl_display);
1008
1009         display->backend.data = NULL;
1010         return TPL_ERROR_INVALID_OPERATION;
1011 }
1012
1013 static void
1014 __tpl_wl_egl_display_fini(tpl_display_t *display)
1015 {
1016         tpl_wl_egl_display_t wl_egl_display(display->backend.data);
1017         if (wl_egl_display) {
1018                 TPL_INFO("[DISPLAY_FINI]",
1019                                   "wl_egl_display(%p) tpl_gthread(%p) wl_display(%p)",
1020                                   wl_egl_display,
1021                                   wl_egl_display->thread,
1022                                   wl_egl_display->wl_display);
1023
1024                 if (wl_egl_display->tdm.tdm_source && wl_egl_display->tdm.tdm_initialized) {
1025                         /* This is a protection to prevent problems that arise in unexpected situations
1026                          * that g_cond_wait cannot work normally.
1027                          * When calling tpl_gsource_destroy() with destroy_in_thread is TPL_TRUE,
1028                          * caller should use tpl_gcond_wait() in the loop with checking finalized flag
1029                          * */
1030                         tpl_gmutex_lock(&wl_egl_display->tdm.tdm_mutex);
1031                         // Send destroy mesage to thread
1032                         tpl_gsource_destroy(wl_egl_display->tdm.tdm_source, TPL_TRUE);
1033                         while (!wl_egl_display->tdm.gsource_finalized) {
1034                                 tpl_gcond_wait(&wl_egl_display->tdm.tdm_cond, &wl_egl_display->tdm.tdm_mutex);
1035                         }
1036                         wl_egl_display->tdm.tdm_source = NULL;
1037                         tpl_gmutex_unlock(&wl_egl_display->tdm.tdm_mutex);
1038                 }
1039
1040                 if (wl_egl_display->disp_source) {
1041                         tpl_gmutex_lock(&wl_egl_display->disp_mutex);
1042                         // Send destroy mesage to thread
1043                         tpl_gsource_destroy(wl_egl_display->disp_source, TPL_TRUE);
1044                         /* This is a protection to prevent problems that arise in unexpected situations
1045                          * that g_cond_wait cannot work normally.
1046                          * When calling tpl_gsource_destroy() with destroy_in_thread is TPL_TRUE,
1047                          * caller should use tpl_gcond_wait() in the loop with checking finalized flag
1048                          * */
1049                         while (!wl_egl_display->gsource_finalized) {
1050                                 tpl_gcond_wait(&wl_egl_display->disp_cond, &wl_egl_display->disp_mutex);
1051                         }
1052                         wl_egl_display->disp_source = NULL;
1053                         tpl_gmutex_unlock(&wl_egl_display->disp_mutex);
1054                 }
1055
1056                 if (wl_egl_display->thread) {
1057                         tpl_gthread_destroy(wl_egl_display->thread);
1058                         wl_egl_display->thread = NULL;
1059                 }
1060
1061                 tpl_gcond_clear(&wl_egl_display->tdm.tdm_cond);
1062                 tpl_gmutex_clear(&wl_egl_display->tdm.tdm_mutex);
1063                 tpl_gcond_clear(&wl_egl_display->disp_cond);
1064                 tpl_gmutex_clear(&wl_egl_display->disp_mutex);
1065
1066                 tpl_gmutex_clear(&wl_egl_display->wl_event_mutex);
1067
1068                 free(wl_egl_display);
1069         }
1070
1071         display->backend.data = NULL;
1072 }
1073
1074 static tpl_result_t
1075 __tpl_wl_egl_display_query_config(tpl_display_t *display,
1076                                                                   tpl_surface_type_t surface_type,
1077                                                                   int red_size, int green_size,
1078                                                                   int blue_size, int alpha_size,
1079                                                                   int color_depth, int *native_visual_id,
1080                                                                   tpl_bool_t *is_slow)
1081 {
1082         TPL_ASSERT(display);
1083
1084         if (surface_type == TPL_SURFACE_TYPE_WINDOW && red_size == 8 &&
1085                         green_size == 8 && blue_size == 8 &&
1086                         (color_depth == 32 || color_depth == 24)) {
1087
1088                 if (alpha_size == 8) {
1089                         if (native_visual_id) *native_visual_id = TBM_FORMAT_ARGB8888;
1090                         if (is_slow) *is_slow = TPL_FALSE;
1091                         return TPL_ERROR_NONE;
1092                 }
1093                 if (alpha_size == 0) {
1094                         if (native_visual_id) *native_visual_id = TBM_FORMAT_XRGB8888;
1095                         if (is_slow) *is_slow = TPL_FALSE;
1096                         return TPL_ERROR_NONE;
1097                 }
1098         }
1099
1100         return TPL_ERROR_INVALID_PARAMETER;
1101 }
1102
1103 static tpl_result_t
1104 __tpl_wl_egl_display_filter_config(tpl_display_t *display, int *visual_id,
1105                                                                    int alpha_size)
1106 {
1107         TPL_IGNORE(display);
1108         TPL_IGNORE(visual_id);
1109         TPL_IGNORE(alpha_size);
1110         return TPL_ERROR_NONE;
1111 }
1112
1113 static tpl_result_t
1114 __tpl_wl_egl_display_get_window_info(tpl_display_t *display,
1115                                                                          tpl_handle_t window, int *width,
1116                                                                          int *height, tbm_format *format,
1117                                                                          int depth, int a_size)
1118 {
1119         tpl_result_t ret = TPL_ERROR_NONE;
1120         struct wl_egl_window *wl_egl_window = (struct wl_egl_window *)window;
1121
1122         if (!wl_egl_window) {
1123                 TPL_ERR("Invalid parameter. tpl_handle_t(%p)", window);
1124                 return TPL_ERROR_INVALID_PARAMETER;
1125         }
1126
1127         if (width) *width = wl_egl_window->width;
1128         if (height) *height = wl_egl_window->height;
1129         if (format) {
1130                 struct tizen_private tizen_private(wl_egl_window->driver_private);
1131                 if (tizen_private && tizen_private->data) {
1132                         tpl_wl_egl_surface_t wl_egl_surface(tizen_private->data);
1133                         *format = wl_egl_surface->format;
1134                 } else {
1135                         if (a_size == 8)
1136                                 *format = TBM_FORMAT_ARGB8888;
1137                         else
1138                                 *format = TBM_FORMAT_XRGB8888;
1139                 }
1140         }
1141
1142         return ret;
1143 }
1144
1145 static tpl_result_t
1146 __tpl_wl_egl_display_get_pixmap_info(tpl_display_t *display,
1147                                                                          tpl_handle_t pixmap, int *width,
1148                                                                          int *height, tbm_format *format)
1149 {
1150         tbm_surface_h   tbm_surface = NULL;
1151
1152         if (!pixmap) {
1153                 TPL_ERR("Invalid parameter. tpl_handle_t(%p)", pixmap);
1154                 return TPL_ERROR_INVALID_PARAMETER;
1155         }
1156
1157         tbm_surface = wayland_tbm_server_get_surface(NULL,
1158                                                                                                  (struct wl_resource *)pixmap);
1159         if (!tbm_surface) {
1160                 TPL_ERR("Failed to get tbm_surface from wayland_tbm.");
1161                 return TPL_ERROR_INVALID_PARAMETER;
1162         }
1163
1164         if (width) *width = tbm_surface_get_width(tbm_surface);
1165         if (height) *height = tbm_surface_get_height(tbm_surface);
1166         if (format) *format = tbm_surface_get_format(tbm_surface);
1167
1168         return TPL_ERROR_NONE;
1169 }
1170
1171 static tbm_surface_h
1172 __tpl_wl_egl_display_get_buffer_from_native_pixmap(tpl_handle_t pixmap)
1173 {
1174         tbm_surface_h tbm_surface = NULL;
1175
1176         TPL_ASSERT(pixmap);
1177
1178         tbm_surface = wayland_tbm_server_get_surface(NULL,
1179                                                                                                  (struct wl_resource *)pixmap);
1180         if (!tbm_surface) {
1181                 TPL_ERR("Failed to get tbm_surface_h from wayland_tbm.");
1182                 return NULL;
1183         }
1184
1185         return tbm_surface;
1186 }
1187
1188 tpl_bool_t
1189 __tpl_display_choose_backend_wl_egl_thread(tpl_handle_t native_dpy)
1190 {
1191         struct wl_interface *wl_egl_native_dpy = *(void **) native_dpy;
1192
1193         TPL_CHECK_ON_NULL_RETURN_VAL(wl_egl_native_dpy, TPL_FALSE);
1194
1195         /* MAGIC CHECK: A native display handle is a wl_display if the de-referenced first value
1196            is a memory address pointing the structure of wl_display_interface. */
1197         if (wl_egl_native_dpy == &wl_display_interface)
1198                 return TPL_TRUE;
1199
1200         if (strncmp(wl_egl_native_dpy->name, wl_display_interface.name,
1201                                 strlen(wl_display_interface.name)) == 0) {
1202                 return TPL_TRUE;
1203         }
1204
1205         return TPL_FALSE;
1206 }
1207
1208 /* -- BEGIN -- wl_egl_window callback functions */
1209 static void
1210 __cb_destroy_callback(void *private)
1211 {
1212         struct tizen_private tizen_private(private);
1213
1214         if (!tizen_private) {
1215                 TPL_LOG_D("[WL_EGL_WINDOW_DESTROY_CALLBACK]", "Already destroyed surface");
1216                 return;
1217         }
1218
1219         tpl_wl_egl_surface_t wl_egl_surface(tizen_private->data);
1220         if (wl_egl_surface) {
1221                 TPL_WARN("[DESTROY_CB][!!!ABNORMAL BEHAVIOR!!!] wl_egl_window(%p) is destroyed.",
1222                                  wl_egl_surface->wl_egl_window);
1223                 TPL_WARN("[DESTROY_CB] native window should be destroyed after eglDestroySurface.");
1224
1225                 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
1226                 wl_egl_surface->wl_egl_window->destroy_window_callback = NULL;
1227                 wl_egl_surface->wl_egl_window->resize_callback = NULL;
1228                 wl_egl_surface->wl_egl_window->driver_private = NULL;
1229                 wl_egl_surface->wl_egl_window = NULL;
1230                 wl_egl_surface->wl_surface = NULL;
1231
1232                 tizen_private->set_window_serial_callback = NULL;
1233                 tizen_private->rotate_callback = NULL;
1234                 tizen_private->get_rotation_capability = NULL;
1235                 tizen_private->set_frontbuffer_callback = NULL;
1236                 tizen_private->create_commit_sync_fd = NULL;
1237                 tizen_private->create_presentation_sync_fd = NULL;
1238                 tizen_private->data = NULL;
1239
1240                 free(tizen_private);
1241                 tizen_private = NULL;
1242                 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
1243         }
1244 }
1245
1246 static void
1247 __cb_resize_callback(struct wl_egl_window *wl_egl_window, void *private)
1248 {
1249         TPL_ASSERT(private);
1250
1251         struct tizen_private tizen_private(private);
1252         tpl_wl_egl_surface_t wl_egl_surface(tizen_private->data);
1253         int cur_w, cur_h, req_w, req_h, format;
1254
1255         if (!wl_egl_surface) {
1256                 TPL_ERR("Invalid wl_egl_window(%p) tizen_private->data is null.",
1257                                 wl_egl_window);
1258                 return;
1259         }
1260
1261         format = wl_egl_surface->format;
1262         cur_w = wl_egl_surface->width;
1263         cur_h = wl_egl_surface->height;
1264         req_w = wl_egl_window->width;
1265         req_h = wl_egl_window->height;
1266
1267         TPL_INFO("[WINDOW_RESIZE]",
1268                          "wl_egl_surface(%p) wl_egl_window(%p) (%dx%d) -> (%dx%d)",
1269                          wl_egl_surface, wl_egl_window, cur_w, cur_h, req_w, req_h);
1270
1271         if (tbm_surface_queue_reset(wl_egl_surface->tbm_queue, req_w, req_h, format)
1272                         != TBM_SURFACE_QUEUE_ERROR_NONE) {
1273                 TPL_ERR("Failed to reset tbm_surface_queue(%p)", wl_egl_surface->tbm_queue);
1274                 return;
1275         }
1276 }
1277 /* -- END -- wl_egl_window callback functions */
1278
1279 /* -- BEGIN -- wl_egl_window tizen private callback functions */
1280
1281 /* There is no usecase for using prerotation callback below */
1282 static void
1283 __cb_rotate_callback(struct wl_egl_window *wl_egl_window, void *private)
1284 {
1285         TPL_ASSERT(private);
1286
1287         struct tizen_private tizen_private(private);
1288         tpl_wl_egl_surface_t wl_egl_surface(tizen_private->data);
1289         int rotation = tizen_private->rotation;
1290
1291         if (!wl_egl_surface) {
1292                 TPL_ERR("Invalid wl_egl_window(%p) tizen_private->data is null.",
1293                                 wl_egl_window);
1294                 return;
1295         }
1296
1297         TPL_INFO("[WINDOW_ROTATE]",
1298                          "wl_egl_surface(%p) wl_egl_window(%p) (%d) -> (%d)",
1299                          wl_egl_surface, wl_egl_window,
1300                          wl_egl_surface->rotation, rotation);
1301
1302         wl_egl_surface->rotation = rotation;
1303 }
1304
1305 /* There is no usecase for using prerotation callback below */
1306 static int
1307 __cb_get_rotation_capability(struct wl_egl_window *wl_egl_window,
1308                                                          void *private)
1309 {
1310         TPL_ASSERT(private);
1311
1312         int rotation_capability              = WL_EGL_WINDOW_TIZEN_CAPABILITY_NONE;
1313         struct tizen_private tizen_private(private);
1314         tpl_wl_egl_surface_t wl_egl_surface(tizen_private->data);
1315
1316         if (!wl_egl_surface) {
1317                 TPL_ERR("Invalid wl_egl_window(%p) tizen_private->data is null.",
1318                                 wl_egl_window);
1319                 return rotation_capability;
1320         }
1321
1322         if (wl_egl_surface->prerotation_capability == TPL_TRUE)
1323                 rotation_capability = WL_EGL_WINDOW_TIZEN_CAPABILITY_ROTATION_SUPPORTED;
1324         else
1325                 rotation_capability = WL_EGL_WINDOW_TIZEN_CAPABILITY_ROTATION_UNSUPPORTED;
1326
1327
1328         return rotation_capability;
1329 }
1330
1331 static void
1332 __cb_set_window_serial_callback(struct wl_egl_window *wl_egl_window,
1333                                                                 void *private, unsigned int serial)
1334 {
1335         TPL_ASSERT(private);
1336
1337         struct tizen_private tizen_private(private);
1338         tpl_wl_egl_surface_t wl_egl_surface(tizen_private->data);
1339
1340         if (!wl_egl_surface) {
1341                 TPL_ERR("Invalid wl_egl_window(%p) tizen_private->data is null.",
1342                                 wl_egl_window);
1343                 return;
1344         }
1345
1346         wl_egl_surface->set_serial_is_used = TPL_TRUE;
1347         wl_egl_surface->serial = serial;
1348 }
1349
1350 static int
1351 __cb_create_commit_sync_fd(struct wl_egl_window *wl_egl_window, void *private)
1352 {
1353         TPL_ASSERT(private);
1354         TPL_ASSERT(wl_egl_window);
1355
1356         int commit_sync_fd = -1;
1357
1358         struct tizen_private tizen_private(private);
1359         tpl_wl_egl_surface_t wl_egl_surface(tizen_private->data);
1360
1361         if (!wl_egl_surface) {
1362                 TPL_ERR("Invalid parameter. wl_egl_surface(%p) is NULL", wl_egl_surface);
1363                 return -1;
1364         }
1365
1366         tpl_gmutex_lock(&wl_egl_surface->commit_sync.mutex);
1367
1368         if (wl_egl_surface->commit_sync.fd != -1) {
1369                 commit_sync_fd = dup(wl_egl_surface->commit_sync.fd);
1370                 TRACE_MARK("[ONLY_DUP] commit_sync_fd(%d) dup(%d)",
1371                                    wl_egl_surface->commit_sync.fd, commit_sync_fd);
1372                 TPL_LOG_D("[COMMIT_SYNC][DUP]", "wl_egl_surface(%p) commit_sync_fd(%d) dup(%d)",
1373                                   wl_egl_surface, wl_egl_surface->commit_sync.fd, commit_sync_fd);
1374                 tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
1375                 return commit_sync_fd;
1376         }
1377
1378         wl_egl_surface->commit_sync.fd = eventfd(0, EFD_CLOEXEC);
1379         if (wl_egl_surface->commit_sync.fd == -1) {
1380                 TPL_ERR("Failed to create commit_sync_fd. wl_egl_surface(%p)",
1381                                 wl_egl_surface);
1382                 tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
1383                 return -1;
1384         }
1385
1386         commit_sync_fd = dup(wl_egl_surface->commit_sync.fd);
1387
1388         TRACE_MARK("[CREATE] commit_sync_fd(%d) dup(%d)",
1389                            wl_egl_surface->commit_sync.fd, commit_sync_fd);
1390         TPL_LOG_D("[COMMIT_SYNC][CREATE]", "wl_egl_surface(%p) commit_sync_fd(%d)",
1391                           wl_egl_surface, commit_sync_fd);
1392
1393         tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
1394
1395         return commit_sync_fd;
1396 }
1397
1398 static void
1399 __cb_client_window_set_frontbuffer_mode(struct wl_egl_window *wl_egl_window,
1400                                                                                 void *private, int set)
1401 {
1402         TPL_ASSERT(private);
1403         TPL_ASSERT(wl_egl_window);
1404         struct tizen_private tizen_private(private);
1405         tpl_wl_egl_surface_t wl_egl_surface(tizen_private->data);
1406         TPL_CHECK_ON_NULL_RETURN(wl_egl_surface);
1407
1408         tpl_surface_t *surface = wl_egl_surface->tpl_surface;
1409         TPL_CHECK_ON_NULL_RETURN(surface);
1410
1411         tpl_bool_t is_frontbuffer_mode = set ? TPL_TRUE : TPL_FALSE;
1412
1413         TPL_OBJECT_LOCK(surface);
1414         if (is_frontbuffer_mode == surface->is_frontbuffer_mode) {
1415                 TPL_OBJECT_UNLOCK(surface);
1416                 return;
1417         }
1418
1419         TPL_INFO("[FRONTBUFFER_MODE]",
1420                          "[%s] wl_egl_surface(%p) wl_egl_window(%p)",
1421                          is_frontbuffer_mode ? "ON" : "OFF",
1422                          wl_egl_surface, wl_egl_window);
1423
1424         surface->is_frontbuffer_mode = is_frontbuffer_mode;
1425
1426         TPL_OBJECT_UNLOCK(surface);
1427 }
1428
1429 #if TIZEN_FEATURE_ENABLE
1430 static int
1431 __cb_create_presentation_sync_fd(struct wl_egl_window *wl_egl_window, void *private)
1432 {
1433         TPL_ASSERT(private);
1434         TPL_ASSERT(wl_egl_window);
1435
1436         int presentation_sync_fd = -1;
1437
1438         struct tizen_private tizen_private(private);
1439         tpl_wl_egl_surface_t wl_egl_surface(tizen_private->data);
1440
1441         if (!wl_egl_surface) {
1442                 TPL_ERR("Invalid parameter. wl_egl_surface is NULL");
1443                 return -1;
1444         }
1445
1446         tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
1447         if (wl_egl_surface->presentation_sync.fd != -1) {
1448                 presentation_sync_fd = dup(wl_egl_surface->presentation_sync.fd);
1449                 TRACE_MARK("[ONLY_DUP] presentation_sync_fd(%d) dup(%d)",
1450                                    wl_egl_surface->presentation_sync.fd, presentation_sync_fd);
1451                 TPL_LOG_D("[PRESENTATION_SYNC][DUP]", "wl_egl_surface(%p) presentation_sync_fd(%d) dup(%d)",
1452                                   wl_egl_surface, wl_egl_surface->presentation_sync.fd, presentation_sync_fd);
1453                 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
1454                 return presentation_sync_fd;
1455         }
1456
1457         wl_egl_surface->presentation_sync.fd = eventfd(0, EFD_CLOEXEC);
1458         if (wl_egl_surface->presentation_sync.fd == -1) {
1459                 TPL_ERR("Failed to create presentation_sync_fd. wl_egl_surface(%p)",
1460                                 wl_egl_surface);
1461                 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
1462                 return -1;
1463         }
1464
1465         presentation_sync_fd = dup(wl_egl_surface->presentation_sync.fd);
1466         TRACE_MARK("[CREATE] presentation_sync_fd(%d) dup(%d)",
1467                            wl_egl_surface->presentation_sync.fd, presentation_sync_fd);
1468         TPL_LOG_D("[PRESENTATION_SYNC][CREATE]", "wl_egl_surface(%p) presentation_sync_fd(%d) dup(%d)",
1469                           wl_egl_surface, wl_egl_surface->presentation_sync.fd, presentation_sync_fd);
1470
1471         tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
1472
1473         return presentation_sync_fd;
1474 }
1475 /* -- END -- wl_egl_window tizen private callback functions */
1476
1477 /* -- BEGIN -- tizen_surface_shm_flusher_listener */
1478 static void __cb_tss_flusher_flush_callback(void *data,
1479                 struct tizen_surface_shm_flusher *tss_flusher)
1480 {
1481         tpl_wl_egl_surface_t wl_egl_surface(data);
1482         tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
1483
1484         TPL_INFO("[BUFFER_FLUSH]", "wl_egl_surface(%p) tbm_queue(%p)",
1485                          wl_egl_surface, wl_egl_surface->tbm_queue);
1486
1487         tsq_err = tbm_surface_queue_flush(wl_egl_surface->tbm_queue);
1488         if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
1489                 TPL_ERR("Failed to flush tbm_queue(%p)", wl_egl_surface->tbm_queue);
1490                 return;
1491         }
1492 }
1493
1494 static void __cb_tss_flusher_free_flush_callback(void *data,
1495                 struct tizen_surface_shm_flusher *tss_flusher)
1496 {
1497         tpl_wl_egl_surface_t wl_egl_surface(data);
1498         tbm_surface_queue_error_e tsq_err    = TBM_SURFACE_QUEUE_ERROR_NONE;
1499
1500         TPL_INFO("[FREE_BUFFER_FLUSH]", "wl_egl_surface(%p) tbm_queue(%p)",
1501                          wl_egl_surface, wl_egl_surface->tbm_queue);
1502
1503         tsq_err = tbm_surface_queue_free_flush(wl_egl_surface->tbm_queue);
1504         if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
1505                 TPL_ERR("Failed to free flush tbm_queue(%p)", wl_egl_surface->tbm_queue);
1506                 return;
1507         }
1508 }
1509
1510 static const struct tizen_surface_shm_flusher_listener
1511 tss_flusher_listener = {
1512         __cb_tss_flusher_flush_callback,
1513         __cb_tss_flusher_free_flush_callback
1514 };
1515 /* -- END -- tizen_surface_shm_flusher_listener */
1516 #endif
1517
1518 /* -- BEGIN -- tbm_surface_queue callback funstions */
1519 static void
1520 __cb_tbm_queue_reset_callback(tbm_surface_queue_h tbm_queue,
1521                                                                           void *data)
1522 {
1523         tpl_wl_egl_display_t *wl_egl_display = NULL;
1524         tpl_surface_t *surface = NULL;
1525         tpl_bool_t is_activated = TPL_FALSE;
1526         int width, height;
1527
1528         tpl_wl_egl_surface_t wl_egl_surface(data);
1529         TPL_CHECK_ON_NULL_RETURN(wl_egl_surface);
1530
1531         wl_egl_display = wl_egl_surface->wl_egl_display;
1532         TPL_CHECK_ON_NULL_RETURN(wl_egl_display);
1533
1534         surface = wl_egl_surface->tpl_surface;
1535         TPL_CHECK_ON_NULL_RETURN(surface);
1536
1537         /* When the queue is resized, change the reset flag to TPL_TRUE to reflect
1538          * the changed window size at the next frame. */
1539         width = tbm_surface_queue_get_width(tbm_queue);
1540         height = tbm_surface_queue_get_height(tbm_queue);
1541         if (surface->width != width || surface->height != height) {
1542                 TPL_INFO("[QUEUE_RESIZE]",
1543                                  "wl_egl_surface(%p) tbm_queue(%p) (%dx%d) -> (%dx%d)",
1544                                  wl_egl_surface, tbm_queue,
1545                                  surface->width, surface->height, width, height);
1546         }
1547
1548         /* When queue_reset_callback is called, if is_activated is different from
1549          * its previous state change the reset flag to TPL_TRUE to get a new buffer
1550          * with the changed state(ACTIVATED/DEACTIVATED) at the next frame. */
1551         is_activated = wayland_tbm_client_queue_check_activate(wl_egl_display->wl_tbm_client,
1552                                                                                                                    wl_egl_surface->tbm_queue);
1553         if (wl_egl_surface->is_activated != is_activated) {
1554                 if (is_activated) {
1555                         TPL_INFO("[ACTIVATED]",
1556                                           "wl_egl_surface(%p) wl_surface(%p) tbm_queue(%p)",
1557                                           wl_egl_surface, wl_egl_surface->wl_surface, tbm_queue);
1558                 } else {
1559                         TPL_INFO("[DEACTIVATED]",
1560                                          " wl_egl_surface(%p) wl_surface(%p) tbm_queue(%p)",
1561                                          wl_egl_surface, wl_egl_surface->wl_surface, tbm_queue);
1562                 }
1563         }
1564
1565         wl_egl_surface->reset = TPL_TRUE;
1566
1567         if (surface->reset_cb)
1568                 surface->reset_cb(surface->reset_data);
1569 }
1570
1571 static void
1572 __cb_tbm_queue_acquirable_callback(tbm_surface_queue_h tbm_queue,
1573                                                                    void *data)
1574 {
1575         TPL_IGNORE(tbm_queue);
1576
1577         tpl_wl_egl_surface_t wl_egl_surface(data);
1578         TPL_CHECK_ON_NULL_RETURN(wl_egl_surface);
1579
1580         tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
1581         if (wl_egl_surface->sent_message == NONE_MESSAGE) {
1582                 wl_egl_surface->sent_message = ACQUIRABLE;
1583                 tpl_gsource_send_message(wl_egl_surface->surf_source,
1584                                                          wl_egl_surface->sent_message);
1585         }
1586         tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
1587 }
1588 /* -- END -- tbm_surface_queue callback funstions */
1589
1590 static void
1591 _thread_wl_egl_surface_fini(tpl_wl_egl_surface_t *wl_egl_surface)
1592 {
1593         tpl_wl_egl_display_t *wl_egl_display = wl_egl_surface->wl_egl_display;
1594
1595         TPL_INFO("[SURFACE_FINI]",
1596                           "wl_egl_surface(%p) wl_egl_window(%p) wl_surface(%p)",
1597                           wl_egl_surface, wl_egl_surface->wl_egl_window,
1598                           wl_egl_surface->wl_surface);
1599 #if TIZEN_FEATURE_ENABLE
1600         tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
1601
1602         if (wl_egl_display->presentation && wl_egl_surface->presentation_feedbacks) {
1603                 while (!__tpl_list_is_empty(wl_egl_surface->presentation_feedbacks)) {
1604                         struct pst_feedback *pst_feedback =
1605                                 (struct pst_feedback *)__tpl_list_pop_front(
1606                                                 wl_egl_surface->presentation_feedbacks, NULL);
1607                         if (pst_feedback) {
1608                                 send_signal(pst_feedback->pst_sync_fd, "PST_FEEDBACK");
1609                                 pst_feedback->pst_sync_fd = -1;
1610
1611                                 wp_presentation_feedback_destroy(pst_feedback->presentation_feedback);
1612                                 pst_feedback->presentation_feedback = NULL;
1613
1614                                 free(pst_feedback);
1615                         }
1616                 }
1617
1618                 __tpl_list_free(wl_egl_surface->presentation_feedbacks, NULL);
1619                 wl_egl_surface->presentation_feedbacks = NULL;
1620         }
1621
1622         send_signal(wl_egl_surface->presentation_sync.fd, "PST_SYNC");
1623         wl_egl_surface->presentation_sync.fd = -1;
1624
1625         tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
1626
1627         if (wl_egl_surface->surface_sync) {
1628                 TPL_INFO("[SURFACE_SYNC_DESTROY]",
1629                                  "wl_egl_surface(%p) surface_sync(%p)",
1630                                   wl_egl_surface, wl_egl_surface->surface_sync);
1631                 zwp_linux_surface_synchronization_v1_destroy(wl_egl_surface->surface_sync);
1632                 wl_egl_surface->surface_sync = NULL;
1633         }
1634
1635         if (wl_egl_surface->tss_flusher) {
1636                 TPL_INFO("[FLUSHER_DESTROY]",
1637                                   "wl_egl_surface(%p) tss_flusher(%p)",
1638                                   wl_egl_surface, wl_egl_surface->tss_flusher);
1639                 tizen_surface_shm_flusher_destroy(wl_egl_surface->tss_flusher);
1640                 wl_egl_surface->tss_flusher = NULL;
1641         }
1642 #endif
1643
1644         if (wl_egl_surface->tbm_queue) {
1645                 TPL_INFO("[TBM_QUEUE_DESTROY]",
1646                                  "wl_egl_surface(%p) tbm_queue(%p)",
1647                                  wl_egl_surface, wl_egl_surface->tbm_queue);
1648                 tbm_surface_queue_destroy(wl_egl_surface->tbm_queue);
1649                 wl_egl_surface->tbm_queue = NULL;
1650         }
1651
1652         if (wl_egl_surface->vblank && wl_egl_surface->vblank->waiting_buffers) {
1653                 tpl_gmutex_lock(&wl_egl_surface->vblank->mutex);
1654                 __tpl_list_free(wl_egl_surface->vblank->waiting_buffers, NULL);
1655                 wl_egl_surface->vblank->waiting_buffers = NULL;
1656                 tpl_gmutex_unlock(&wl_egl_surface->vblank->mutex);
1657         }
1658
1659         if (wl_egl_surface->vblank) {
1660                 __tpl_list_remove_data(wl_egl_display->tdm.surface_vblanks,
1661                                                            (void *)wl_egl_surface->vblank,
1662                                                            TPL_FIRST,
1663                                                            __cb_surface_vblank_free);
1664                 wl_egl_surface->vblank = NULL;
1665         }
1666 }
1667
1668 static tpl_bool_t
1669 __thread_func_surf_dispatch(tpl_gsource *gsource, uint64_t message)
1670 {
1671         tpl_wl_egl_surface_t wl_egl_surface(tpl_gsource_get_data(gsource));
1672
1673         tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
1674         if (message == INIT_SURFACE) { /* Initialize surface */
1675                 TPL_LOG_D("[MSG_RECEIVED]", "wl_egl_surface(%p) initialize message received!",
1676                                   wl_egl_surface);
1677                 _thread_wl_egl_surface_init(wl_egl_surface);
1678                 wl_egl_surface->initialized_in_thread = TPL_TRUE;
1679                 tpl_gcond_signal(&wl_egl_surface->surf_cond);
1680         } else if (message == ACQUIRABLE) { /* Acquirable */
1681                 TPL_LOG_D("[MSG_RECEIVED]", "wl_egl_surface(%p) acquirable message received!",
1682                                   wl_egl_surface);
1683                 _thread_surface_queue_acquire(wl_egl_surface);
1684         }
1685
1686         wl_egl_surface->sent_message = NONE_MESSAGE;
1687
1688         tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
1689
1690         return TPL_TRUE;
1691 }
1692
1693 static void
1694 __thread_func_surf_finalize(tpl_gsource *gsource)
1695 {
1696         tpl_wl_egl_surface_t wl_egl_surface(tpl_gsource_get_data(gsource));
1697         TPL_CHECK_ON_NULL_RETURN(wl_egl_surface);
1698
1699         tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
1700         TPL_LOG_D("[S_FINALIZE]", "wl_egl_surface(%p) tpl_gsource(%p)",
1701                           wl_egl_surface, gsource);
1702
1703         _thread_wl_egl_surface_fini(wl_egl_surface);
1704
1705         wl_egl_surface->gsource_finalized = TPL_TRUE;
1706
1707         tpl_gcond_signal(&wl_egl_surface->surf_cond);
1708         tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
1709 }
1710
1711 static tpl_gsource_functions surf_funcs = {
1712         .prepare = NULL,
1713         .check = NULL,
1714         .dispatch = __thread_func_surf_dispatch,
1715         .finalize = __thread_func_surf_finalize,
1716 };
1717
1718 static tpl_result_t
1719 __tpl_wl_egl_surface_init(tpl_surface_t *surface)
1720 {
1721         tpl_wl_egl_display_t wl_egl_display(surface->display->backend.data);
1722         tpl_wl_egl_surface_t *wl_egl_surface    = NULL;
1723         tpl_gsource *surf_source                = NULL;
1724
1725         struct wl_egl_window *wl_egl_window =
1726                 (struct wl_egl_window *)surface->native_handle;
1727
1728         TPL_ASSERT(surface->type == TPL_SURFACE_TYPE_WINDOW);
1729         TPL_ASSERT(surface->native_handle);
1730         TPL_CHECK_ON_NULL_RETURN_VAL(wl_egl_display, TPL_ERROR_INVALID_PARAMETER);
1731
1732         wl_egl_surface = calloc(1, sizeof(tpl_wl_egl_surface_t));
1733         if (!wl_egl_surface) {
1734                 TPL_ERR("Failed to allocate memory for new tpl_wl_egl_surface_t.");
1735                 return TPL_ERROR_OUT_OF_MEMORY;
1736         }
1737
1738         surf_source = tpl_gsource_create(wl_egl_display->thread, (void *)wl_egl_surface,
1739                                                                          -1, FD_TYPE_NONE, &surf_funcs, SOURCE_TYPE_NORMAL);
1740         if (!surf_source) {
1741                 TPL_ERR("Failed to create surf_source with wl_egl_surface(%p)",
1742                                 wl_egl_surface);
1743                 goto surf_source_create_fail;
1744         }
1745
1746         surface->backend.data = (void *)wl_egl_surface;
1747         surface->width        = wl_egl_window->width;
1748         surface->height       = wl_egl_window->height;
1749         surface->rotation     = 0;
1750
1751         wl_egl_surface->tpl_surface            = surface;
1752         wl_egl_surface->width                  = wl_egl_window->width;
1753         wl_egl_surface->height                 = wl_egl_window->height;
1754         wl_egl_surface->format                 = surface->format;
1755         wl_egl_surface->num_buffers            = surface->num_buffers;
1756
1757         wl_egl_surface->surf_source            = surf_source;
1758         wl_egl_surface->wl_egl_window          = wl_egl_window;
1759         wl_egl_surface->wl_surface             = wl_egl_window->surface;
1760
1761         wl_egl_surface->wl_egl_display         = wl_egl_display;
1762
1763         wl_egl_surface->reset                  = TPL_FALSE;
1764         wl_egl_surface->is_activated           = TPL_FALSE;
1765         wl_egl_surface->need_to_enqueue        = TPL_TRUE;
1766         wl_egl_surface->prerotation_capability = TPL_FALSE;
1767         wl_egl_surface->vblank_done            = TPL_TRUE;
1768         wl_egl_surface->use_render_done_fence  = TPL_FALSE;
1769         wl_egl_surface->set_serial_is_used     = TPL_FALSE;
1770         wl_egl_surface->gsource_finalized      = TPL_FALSE;
1771         wl_egl_surface->initialized_in_thread  = TPL_FALSE;
1772         wl_egl_surface->frontbuffer_activated  = TPL_FALSE;
1773
1774         wl_egl_surface->latest_transform       = -1;
1775         wl_egl_surface->render_done_cnt        = 0;
1776         wl_egl_surface->serial                 = 0;
1777
1778         wl_egl_surface->vblank                 = NULL;
1779 #if TIZEN_FEATURE_ENABLE
1780         wl_egl_surface->tss_flusher            = NULL;
1781         wl_egl_surface->surface_sync           = NULL;
1782 #endif
1783
1784         wl_egl_surface->post_interval          = surface->post_interval;
1785
1786         wl_egl_surface->vblank_enable          = TPL_FALSE;
1787
1788         wl_egl_surface->commit_sync.fd         = -1;
1789         wl_egl_surface->presentation_sync.fd   = -1;
1790
1791         wl_egl_surface->sent_message           = NONE_MESSAGE;
1792         wl_egl_surface->last_enq_buffer        = NULL;
1793
1794         wl_egl_surface->buffers = __tpl_list_alloc();
1795
1796         {
1797                 struct tizen_private *tizen_private = NULL;
1798
1799                 if (wl_egl_window->driver_private)
1800                         tizen_private = (struct tizen_private *)wl_egl_window->driver_private;
1801                 else {
1802                         tizen_private = tizen_private_create();
1803                         wl_egl_window->driver_private = (void *)tizen_private;
1804                 }
1805
1806                 if (tizen_private) {
1807                         tizen_private->data = (void *)wl_egl_surface;
1808                         tizen_private->rotate_callback = (void *)__cb_rotate_callback;
1809                         tizen_private->get_rotation_capability = (void *)
1810                                 __cb_get_rotation_capability;
1811                         tizen_private->set_window_serial_callback = (void *)
1812                                 __cb_set_window_serial_callback;
1813                         tizen_private->create_commit_sync_fd = (void *)__cb_create_commit_sync_fd;
1814                         tizen_private->set_frontbuffer_callback = (void *)__cb_client_window_set_frontbuffer_mode;
1815 #if TIZEN_FEATURE_ENABLE
1816                         tizen_private->create_presentation_sync_fd = (void *)__cb_create_presentation_sync_fd;
1817 #else
1818                         tizen_private->create_presentation_sync_fd = NULL;
1819 #endif
1820
1821                         wl_egl_window->destroy_window_callback = (void *)__cb_destroy_callback;
1822                         wl_egl_window->resize_callback = (void *)__cb_resize_callback;
1823                 }
1824         }
1825
1826         tpl_gmutex_init(&wl_egl_surface->commit_sync.mutex);
1827         tpl_gmutex_init(&wl_egl_surface->presentation_sync.mutex);
1828
1829         tpl_gmutex_init(&wl_egl_surface->buffers_mutex);
1830
1831         tpl_gmutex_init(&wl_egl_surface->surf_mutex);
1832         tpl_gcond_init(&wl_egl_surface->surf_cond);
1833
1834         /* Initialize in thread */
1835         tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
1836         wl_egl_surface->sent_message = INIT_SURFACE;
1837         tpl_gsource_send_message(wl_egl_surface->surf_source,
1838                                                          wl_egl_surface->sent_message);
1839         while (!wl_egl_surface->initialized_in_thread)
1840                 tpl_gcond_wait(&wl_egl_surface->surf_cond, &wl_egl_surface->surf_mutex);
1841         tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
1842
1843         TPL_ASSERT(wl_egl_surface->tbm_queue);
1844
1845         TPL_INFO("[SURFACE_INIT]",
1846                           "tpl_surface(%p) wl_egl_surface(%p) gsource(%p)",
1847                           surface, wl_egl_surface, wl_egl_surface->surf_source);
1848
1849         return TPL_ERROR_NONE;
1850
1851 surf_source_create_fail:
1852         free(wl_egl_surface);
1853         surface->backend.data = NULL;
1854         return TPL_ERROR_INVALID_OPERATION;
1855 }
1856
1857 static tbm_surface_queue_h
1858 _thread_create_tbm_queue(tpl_wl_egl_surface_t *wl_egl_surface,
1859                                                  struct wayland_tbm_client *wl_tbm_client,
1860                                                  int num_buffers)
1861 {
1862         tbm_surface_queue_h tbm_queue = NULL;
1863         tbm_bufmgr bufmgr             = NULL;
1864         unsigned int capability;
1865
1866         struct wl_surface *wl_surface = wl_egl_surface->wl_surface;
1867         int width = wl_egl_surface->width;
1868         int height = wl_egl_surface->height;
1869         int format = wl_egl_surface->format;
1870
1871         if (!wl_tbm_client || !wl_surface) {
1872                 TPL_ERR("Invalid parameters. wl_tbm_client(%p) wl_surface(%p)",
1873                                 wl_tbm_client, wl_surface);
1874                 return NULL;
1875         }
1876
1877         bufmgr = tbm_bufmgr_init(-1);
1878         capability = tbm_bufmgr_get_capability(bufmgr);
1879         tbm_bufmgr_deinit(bufmgr);
1880
1881         if (capability & TBM_BUFMGR_CAPABILITY_TILED_MEMORY) {
1882                 tbm_queue = wayland_tbm_client_create_surface_queue_tiled(
1883                                                 wl_tbm_client,
1884                                                 wl_surface,
1885                                                 num_buffers,
1886                                                 width,
1887                                                 height,
1888                                                 format);
1889         } else {
1890                 tbm_queue = wayland_tbm_client_create_surface_queue(
1891                                                 wl_tbm_client,
1892                                                 wl_surface,
1893                                                 num_buffers,
1894                                                 width,
1895                                                 height,
1896                                                 format);
1897         }
1898
1899         if (!tbm_queue) {
1900                 TPL_ERR("Failed to create tbm_queue. wl_tbm_client(%p)",
1901                                 wl_tbm_client);
1902                 return NULL;
1903         }
1904
1905         if (tbm_surface_queue_set_modes(
1906                         tbm_queue, TBM_SURFACE_QUEUE_MODE_GUARANTEE_CYCLE) !=
1907                                 TBM_SURFACE_QUEUE_ERROR_NONE) {
1908                 TPL_ERR("Failed to set queue mode to tbm_surface_queue(%p)",
1909                                 tbm_queue);
1910                 tbm_surface_queue_destroy(tbm_queue);
1911                 return NULL;
1912         }
1913
1914         if (tbm_surface_queue_add_reset_cb(
1915                         tbm_queue,
1916                         __cb_tbm_queue_reset_callback,
1917                         (void *)wl_egl_surface) != TBM_SURFACE_QUEUE_ERROR_NONE) {
1918                 TPL_ERR("Failed to register reset callback to tbm_surface_queue(%p)",
1919                                 tbm_queue);
1920                 tbm_surface_queue_destroy(tbm_queue);
1921                 return NULL;
1922         }
1923
1924         if (tbm_surface_queue_add_acquirable_cb(
1925                         tbm_queue,
1926                         __cb_tbm_queue_acquirable_callback,
1927                         (void *)wl_egl_surface) != TBM_SURFACE_QUEUE_ERROR_NONE) {
1928                 TPL_ERR("Failed to register acquirable callback to tbm_surface_queue(%p)",
1929                                 tbm_queue);
1930                 tbm_surface_queue_destroy(tbm_queue);
1931                 return NULL;
1932         }
1933
1934         return tbm_queue;
1935 }
1936
1937 static tdm_client_vblank*
1938 _thread_create_tdm_client_vblank(tdm_client *tdm_client)
1939 {
1940         tdm_client_vblank *tdm_vblank = NULL;
1941         tdm_client_output *tdm_output = NULL;
1942         tdm_error tdm_err = TDM_ERROR_NONE;
1943
1944         if (!tdm_client) {
1945                 TPL_ERR("Invalid parameter. tdm_client(%p)", tdm_client);
1946                 return NULL;
1947         }
1948
1949         tdm_output = tdm_client_get_output(tdm_client, "primary", &tdm_err);
1950         if (!tdm_output || tdm_err != TDM_ERROR_NONE) {
1951                 TPL_ERR("Failed to get tdm_client_output. tdm_err(%d)", tdm_err);
1952                 return NULL;
1953         }
1954
1955         tdm_vblank = tdm_client_output_create_vblank(tdm_output, &tdm_err);
1956         if (!tdm_vblank || tdm_err != TDM_ERROR_NONE) {
1957                 TPL_ERR("Failed to create tdm_vblank. tdm_err(%d)", tdm_err);
1958                 return NULL;
1959         }
1960
1961         tdm_err = tdm_client_handle_pending_events(tdm_client);
1962         if (tdm_err != TDM_ERROR_NONE) {
1963                 TPL_ERR("Failed to handle pending events. tdm_err(%d)", tdm_err);
1964         }
1965
1966         tdm_client_vblank_set_enable_fake(tdm_vblank, 1);
1967         tdm_client_vblank_set_sync(tdm_vblank, 0);
1968
1969         return tdm_vblank;
1970 }
1971
1972 static void
1973 __cb_surface_vblank_free(void *data)
1974 {
1975         TPL_CHECK_ON_NULL_RETURN(data);
1976
1977         tpl_surface_vblank_t *vblank = (tpl_surface_vblank_t *)data;
1978         tpl_wl_egl_surface_t *wl_egl_surface = vblank->wl_egl_surface;
1979
1980         TPL_INFO("[VBLANK_DESTROY]",
1981                          "wl_egl_surface(%p) surface_vblank(%p) tdm_vblank(%p)",
1982                          wl_egl_surface, vblank,
1983                          vblank->tdm_vblank);
1984
1985         tdm_client_vblank_destroy(vblank->tdm_vblank);
1986         vblank->tdm_vblank = NULL;
1987         vblank->wl_egl_surface = NULL;
1988         tpl_gmutex_clear(&vblank->mutex);
1989
1990         free(vblank);
1991
1992         wl_egl_surface->vblank = NULL;
1993 }
1994
1995 static void
1996 _thread_wl_egl_surface_init(tpl_wl_egl_surface_t *wl_egl_surface)
1997 {
1998         tpl_wl_egl_display_t *wl_egl_display = wl_egl_surface->wl_egl_display;
1999         tpl_surface_vblank_t *vblank         = NULL;
2000
2001         wl_egl_surface->tbm_queue = _thread_create_tbm_queue(
2002                                                                         wl_egl_surface,
2003                                                                         wl_egl_display->wl_tbm_client,
2004                                                                         wl_egl_surface->num_buffers);
2005         if (!wl_egl_surface->tbm_queue) {
2006                 TPL_ERR("Failed to create tbm_queue. wl_egl_surface(%p) wl_tbm_client(%p)",
2007                                 wl_egl_surface, wl_egl_display->wl_tbm_client);
2008                 return;
2009         }
2010
2011         TPL_INFO("[QUEUE_CREATION][1/2]",
2012                          "wl_egl_surface(%p) wl_surface(%p) wl_tbm_client(%p)",
2013                          wl_egl_surface, wl_egl_surface->wl_surface,
2014                          wl_egl_display->wl_tbm_client);
2015         TPL_INFO("[QUEUE_CREATION][2/2]",
2016                          "wl_egl_surface(%p) tbm_queue(%p) size(%d x %d) X %d format(%d)",
2017                          wl_egl_surface,
2018                          wl_egl_surface->tbm_queue,
2019                          wl_egl_surface->width,
2020                          wl_egl_surface->height,
2021                          wl_egl_surface->num_buffers,
2022                          wl_egl_surface->format);
2023
2024         if (wl_egl_display->use_wait_vblank) {
2025                 vblank = (tpl_surface_vblank_t *)calloc(1, sizeof(tpl_surface_vblank_t));
2026                 if (vblank) {
2027                         vblank->tdm_vblank = _thread_create_tdm_client_vblank(
2028                                                                         wl_egl_display->tdm.tdm_client);
2029                         if (!vblank->tdm_vblank) {
2030                                 TPL_ERR("Failed to create tdm_vblank from tdm_client(%p)",
2031                                                 wl_egl_display->tdm.tdm_client);
2032                                 free(vblank);
2033                                 vblank = NULL;
2034                         } else {
2035                                 vblank->waiting_buffers = __tpl_list_alloc();
2036                                 if (!vblank->waiting_buffers) {
2037                                         tdm_client_vblank_destroy(vblank->tdm_vblank);
2038                                         free(vblank);
2039                                         vblank = NULL;
2040                                 } else{
2041                                         vblank->wl_egl_surface = wl_egl_surface;
2042                                         tpl_gmutex_init(&vblank->mutex);
2043
2044                                         __tpl_list_push_back(wl_egl_display->tdm.surface_vblanks,
2045                                                                                 (void *)vblank);
2046
2047                                         TPL_INFO("[VBLANK_INIT]",
2048                                                         "wl_egl_surface(%p) tdm_client(%p) tdm_vblank(%p)",
2049                                                         wl_egl_surface, wl_egl_display->tdm.tdm_client,
2050                                                         vblank->tdm_vblank);
2051                                 }
2052                         }
2053                 }
2054         }
2055
2056         wl_egl_surface->vblank = vblank;
2057         wl_egl_surface->vblank_enable = (vblank != NULL &&
2058                                                                         wl_egl_surface->post_interval > 0);
2059
2060 #if TIZEN_FEATURE_ENABLE
2061         if (wl_egl_display->tss) {
2062                 wl_egl_surface->tss_flusher =
2063                         tizen_surface_shm_get_flusher(wl_egl_display->tss,
2064                                                                                   wl_egl_surface->wl_surface);
2065         }
2066
2067         if (wl_egl_surface->tss_flusher) {
2068                 tizen_surface_shm_flusher_add_listener(wl_egl_surface->tss_flusher,
2069                                                                                            &tss_flusher_listener,
2070                                                                                            wl_egl_surface);
2071                 TPL_INFO("[FLUSHER_INIT]",
2072                                  "wl_egl_surface(%p) tss_flusher(%p)",
2073                                  wl_egl_surface, wl_egl_surface->tss_flusher);
2074         }
2075
2076         if (wl_egl_display->explicit_sync && wl_egl_display->use_explicit_sync) {
2077                 wl_egl_surface->surface_sync =
2078                         zwp_linux_explicit_synchronization_v1_get_synchronization(
2079                                         wl_egl_display->explicit_sync, wl_egl_surface->wl_surface);
2080                 if (wl_egl_surface->surface_sync) {
2081                         TPL_INFO("[EXPLICIT_SYNC_INIT]",
2082                                          "wl_egl_surface(%p) surface_sync(%p)",
2083                                          wl_egl_surface, wl_egl_surface->surface_sync);
2084                 } else {
2085                         TPL_WARN("Failed to create surface_sync. | wl_egl_surface(%p)",
2086                                          wl_egl_surface);
2087                         wl_egl_display->use_explicit_sync = TPL_FALSE;
2088                 }
2089         }
2090 #endif
2091         wl_egl_surface->presentation_feedbacks = __tpl_list_alloc();
2092 }
2093
2094 static void
2095 _tpl_wl_egl_surface_buffer_clear(tpl_wl_egl_surface_t *wl_egl_surface)
2096 {
2097         tbm_surface_queue_error_e tsq_err       = TBM_SURFACE_QUEUE_ERROR_NONE;
2098         tpl_wl_egl_display_t *wl_egl_display    = wl_egl_surface->wl_egl_display;
2099         tpl_bool_t need_to_release              = TPL_FALSE;
2100         tpl_bool_t need_to_cancel               = TPL_FALSE;
2101         buffer_status_t status                  = RELEASED;
2102         int buffer_cnt                          = 0;
2103         int idx                                 = 0;
2104
2105         tpl_gthread_pause_in_idle(wl_egl_display->thread);
2106
2107         buffer_cnt = __tpl_list_get_count(wl_egl_surface->buffers);
2108
2109         while (!__tpl_list_is_empty(wl_egl_surface->buffers)) {
2110                 tpl_wl_egl_buffer_t wl_egl_buffer(__tpl_list_pop_front(wl_egl_surface->buffers,
2111                                                                                                                            NULL));
2112
2113                 tpl_gmutex_lock(&wl_egl_buffer->mutex);
2114
2115                 status = wl_egl_buffer->status;
2116
2117                 TPL_INFO("[BUFFER_CLEAR]",
2118                                  "[%d/%d] wl_egl_surface(%p) wl_egl_buffer(%p) tbm_surface(%p) status(%s)",
2119                                  ++idx, buffer_cnt, wl_egl_surface, wl_egl_buffer,
2120                                  wl_egl_buffer->tbm_surface,
2121                                  status_to_string[status]);
2122
2123                 if (status >= ENQUEUED) {
2124                         tpl_result_t wait_result = TPL_ERROR_NONE;
2125
2126                         while (status < COMMITTED && wait_result != TPL_ERROR_TIME_OUT) {
2127                                 tpl_gthread_continue(wl_egl_display->thread);
2128                                 wait_result = tpl_gcond_timed_wait(&wl_egl_buffer->cond,
2129                                                                                                    &wl_egl_buffer->mutex,
2130                                                                                                    500); /* 500ms */
2131                                 tpl_gthread_pause_in_idle(wl_egl_display->thread);
2132                                 status = wl_egl_buffer->status; /* update status */
2133
2134                                 if (wait_result == TPL_ERROR_TIME_OUT) {
2135                                         TPL_WARN("timeout occured waiting signaled. wl_egl_buffer(%p) status(%s)",
2136                                                          wl_egl_buffer, status_to_string[status]);
2137                                 }
2138                         }
2139                 }
2140
2141                 /* ACQUIRED, WAITING_SIGNALED, WAITING_VBLANK, COMMITTED */
2142                 /* It has been acquired but has not yet been released, so this
2143                  * buffer must be released. */
2144                 need_to_release = (status >= ACQUIRED && status <= COMMITTED);
2145
2146                 /* After dequeue, it has not been enqueued yet
2147                  * so cancel_dequeue must be performed. */
2148                 need_to_cancel = (status == DEQUEUED);
2149
2150                 if (need_to_release) {
2151                         tsq_err = tbm_surface_queue_release(wl_egl_surface->tbm_queue,
2152                                                                                                 wl_egl_buffer->tbm_surface);
2153                         if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
2154                                 TPL_ERR("Failed to release. tbm_surface(%p) tsq_err(%d)",
2155                                                 wl_egl_buffer->tbm_surface, tsq_err);
2156                 }
2157
2158                 if (need_to_cancel) {
2159                         tsq_err = tbm_surface_queue_cancel_dequeue(wl_egl_surface->tbm_queue,
2160                                                                                                            wl_egl_buffer->tbm_surface);
2161                         if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
2162                                 TPL_ERR("Failed to release tbm_surface(%p) tsq_err(%d)",
2163                                                 wl_egl_buffer->tbm_surface, tsq_err);
2164                 }
2165
2166                 wl_egl_buffer->status = RELEASED;
2167
2168                 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2169
2170                 if (need_to_release || need_to_cancel || status == ENQUEUED)
2171                         tbm_surface_internal_unref(wl_egl_buffer->tbm_surface);
2172         }
2173
2174         tpl_gthread_continue(wl_egl_display->thread);
2175 }
2176
2177 static void
2178 __tpl_wl_egl_surface_fini(tpl_surface_t *surface)
2179 {
2180         tpl_wl_egl_display_t *wl_egl_display = NULL;
2181
2182         TPL_ASSERT(surface);
2183         TPL_ASSERT(surface->display);
2184
2185         TPL_CHECK_ON_FALSE_RETURN(surface->type == TPL_SURFACE_TYPE_WINDOW);
2186
2187         tpl_wl_egl_surface_t wl_egl_surface(surface->backend.data);
2188         TPL_CHECK_ON_NULL_RETURN(wl_egl_surface);
2189
2190         wl_egl_display = wl_egl_surface->wl_egl_display;
2191         TPL_CHECK_ON_NULL_RETURN(wl_egl_display);
2192
2193         TPL_INFO("[SURFACE_FINI][BEGIN]",
2194                          "wl_egl_surface(%p) wl_surface(%p) tbm_queue(%p)",
2195                          wl_egl_surface,
2196                          wl_egl_surface->wl_surface, wl_egl_surface->tbm_queue);
2197
2198         _tpl_wl_egl_surface_buffer_clear(wl_egl_surface);
2199
2200         if (wl_egl_surface->surf_source) {
2201                 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
2202                 // Send destroy mesage to thread
2203                 tpl_gsource_destroy(wl_egl_surface->surf_source, TPL_TRUE);
2204                 /* This is a protection to prevent problems that arise in unexpected situations
2205                  * that g_cond_wait cannot work normally.
2206                  * When calling tpl_gsource_destroy() with destroy_in_thread is TPL_TRUE,
2207                  * caller should use tpl_gcond_wait() in the loop with checking finalized flag
2208                  * */
2209                 while (!wl_egl_surface->gsource_finalized) {
2210                         tpl_gcond_wait(&wl_egl_surface->surf_cond, &wl_egl_surface->surf_mutex);
2211                 }
2212                 wl_egl_surface->surf_source = NULL;
2213                 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
2214         }
2215
2216         if (wl_egl_surface->wl_egl_window) {
2217                 struct wl_egl_window *wl_egl_window = wl_egl_surface->wl_egl_window;
2218                 struct tizen_private tizen_private(wl_egl_window->driver_private);
2219                 TPL_INFO("[WL_EGL_WINDOW_FINI]",
2220                                  "wl_egl_surface(%p) wl_egl_window(%p) wl_surface(%p)",
2221                                  wl_egl_surface, wl_egl_window,
2222                                  wl_egl_surface->wl_surface);
2223
2224                 if (tizen_private) {
2225                         tizen_private->set_window_serial_callback = NULL;
2226                         tizen_private->rotate_callback = NULL;
2227                         tizen_private->get_rotation_capability = NULL;
2228                         tizen_private->create_presentation_sync_fd = NULL;
2229                         tizen_private->create_commit_sync_fd = NULL;
2230                         tizen_private->set_frontbuffer_callback = NULL;
2231                         tizen_private->merge_sync_fds = NULL;
2232                         tizen_private->data = NULL;
2233                         free(tizen_private);
2234
2235                         wl_egl_window->driver_private = NULL;
2236                 }
2237
2238                 wl_egl_window->destroy_window_callback = NULL;
2239                 wl_egl_window->resize_callback = NULL;
2240
2241                 wl_egl_surface->wl_egl_window = NULL;
2242         }
2243
2244         wl_egl_surface->last_enq_buffer = NULL;
2245
2246         wl_egl_surface->wl_surface = NULL;
2247         wl_egl_surface->wl_egl_display = NULL;
2248         wl_egl_surface->tpl_surface = NULL;
2249
2250         tpl_gmutex_lock(&wl_egl_surface->buffers_mutex);
2251         __tpl_list_free(wl_egl_surface->buffers, NULL);
2252         wl_egl_surface->buffers = NULL;
2253         tpl_gmutex_unlock(&wl_egl_surface->buffers_mutex);
2254         tpl_gmutex_clear(&wl_egl_surface->buffers_mutex);
2255
2256         tpl_gmutex_lock(&wl_egl_surface->commit_sync.mutex);
2257         tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
2258         tpl_gmutex_clear(&wl_egl_surface->commit_sync.mutex);
2259
2260         tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
2261         tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
2262         tpl_gmutex_clear(&wl_egl_surface->presentation_sync.mutex);
2263
2264         tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
2265         tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
2266         tpl_gmutex_clear(&wl_egl_surface->surf_mutex);
2267         tpl_gcond_clear(&wl_egl_surface->surf_cond);
2268
2269         TPL_INFO("[SURFACE_FINI][END]", "wl_egl_surface(%p)", wl_egl_surface);
2270
2271         free(wl_egl_surface);
2272         surface->backend.data = NULL;
2273 }
2274
2275 static tpl_result_t
2276 __tpl_wl_egl_surface_set_rotation_capability(tpl_surface_t *surface,
2277                                                                                          tpl_bool_t set)
2278 {
2279         TPL_CHECK_ON_NULL_RETURN_VAL(surface, TPL_ERROR_INVALID_PARAMETER);
2280
2281         tpl_wl_egl_surface_t wl_egl_surface(surface->backend.data);
2282
2283         TPL_CHECK_ON_NULL_RETURN_VAL(wl_egl_surface, TPL_ERROR_INVALID_PARAMETER);
2284
2285         TPL_INFO("[SET_PREROTATION_CAPABILITY]",
2286                          "wl_egl_surface(%p) prerotation capability set to [%s]",
2287                          wl_egl_surface, (set ? "TRUE" : "FALSE"));
2288
2289         wl_egl_surface->prerotation_capability = set;
2290         return TPL_ERROR_NONE;
2291 }
2292
2293 static tpl_result_t
2294 __tpl_wl_egl_surface_set_post_interval(tpl_surface_t *surface,
2295                                                                            int post_interval)
2296 {
2297         TPL_CHECK_ON_NULL_RETURN_VAL(surface, TPL_ERROR_INVALID_PARAMETER);
2298
2299         tpl_wl_egl_surface_t wl_egl_surface(surface->backend.data);
2300
2301         TPL_CHECK_ON_NULL_RETURN_VAL(wl_egl_surface, TPL_ERROR_INVALID_PARAMETER);
2302
2303         TPL_INFO("[SET_POST_INTERVAL]",
2304                          "wl_egl_surface(%p) post_interval(%d -> %d)",
2305                          wl_egl_surface, wl_egl_surface->post_interval, post_interval);
2306
2307         tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
2308         wl_egl_surface->post_interval = post_interval;
2309         tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
2310
2311         return TPL_ERROR_NONE;
2312 }
2313
2314 static tpl_bool_t
2315 __tpl_wl_egl_surface_validate(tpl_surface_t *surface)
2316 {
2317         tpl_bool_t retval = TPL_TRUE;
2318
2319         TPL_ASSERT(surface);
2320         TPL_ASSERT(surface->backend.data);
2321
2322         tpl_wl_egl_surface_t wl_egl_surface(surface->backend.data);
2323
2324         retval = !(wl_egl_surface->reset);
2325
2326         return retval;
2327 }
2328
2329 static void
2330 __tpl_wl_egl_surface_get_size(tpl_surface_t *surface, int *width, int *height)
2331 {
2332         tpl_wl_egl_surface_t wl_egl_surface(surface->backend.data);
2333
2334         if (width)
2335                 *width = tbm_surface_queue_get_width(wl_egl_surface->tbm_queue);
2336         if (height)
2337                 *height = tbm_surface_queue_get_height(wl_egl_surface->tbm_queue);
2338 }
2339
2340 static tpl_bool_t
2341 __tpl_wl_egl_surface_fence_sync_is_available(tpl_surface_t *surface)
2342 {
2343         tpl_wl_egl_surface_t wl_egl_surface(surface->backend.data);
2344
2345         return !wl_egl_surface->frontbuffer_activated;
2346 }
2347
2348 #define CAN_DEQUEUE_TIMEOUT_MS 10000
2349
2350 tpl_result_t
2351 _tbm_queue_force_flush(tpl_wl_egl_surface_t *wl_egl_surface)
2352 {
2353         tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
2354
2355         _print_buffer_lists(wl_egl_surface);
2356
2357         if (wl_egl_surface->vblank) {
2358                 tpl_gmutex_lock(&wl_egl_surface->vblank->mutex);
2359
2360                 if (wl_egl_surface->vblank->waiting_buffers)
2361                         __tpl_list_fini(wl_egl_surface->vblank->waiting_buffers, NULL);
2362
2363                 tpl_gmutex_unlock(&wl_egl_surface->vblank->mutex);
2364         }
2365
2366         if ((tsq_err = tbm_surface_queue_flush(wl_egl_surface->tbm_queue))
2367                 != TBM_SURFACE_QUEUE_ERROR_NONE) {
2368                 TPL_ERR("Failed to flush tbm_surface_queue(%p) tsq_err(%d)",
2369                                 wl_egl_surface->tbm_queue, tsq_err);
2370                 return TPL_ERROR_INVALID_OPERATION;
2371         }
2372
2373         while (!__tpl_list_is_empty(wl_egl_surface->buffers)) {
2374                 tpl_bool_t need_to_release = TPL_FALSE;
2375                 tpl_wl_egl_buffer_t wl_egl_buffer(
2376                         __tpl_list_pop_front(wl_egl_surface->buffers, NULL));
2377                 need_to_release = (wl_egl_buffer->status >= ACQUIRED) &&
2378                                                         (wl_egl_buffer->status <= COMMITTED);
2379
2380                 if (need_to_release) {
2381                         tsq_err = tbm_surface_queue_release(wl_egl_surface->tbm_queue,
2382                                                                                                 wl_egl_buffer->tbm_surface);
2383                         if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
2384                                 TPL_ERR("Failed to release. tbm_surface(%p) tsq_err(%d)",
2385                                                 wl_egl_buffer->tbm_surface, tsq_err);
2386                         tbm_surface_internal_unref(wl_egl_buffer->tbm_surface);
2387                 }
2388         }
2389
2390         TPL_INFO("[FORCE_FLUSH]",
2391                          "wl_egl_surface(%p) tbm_queue(%p)",
2392                          wl_egl_surface, wl_egl_surface->tbm_queue);
2393
2394         _print_buffer_lists(wl_egl_surface);
2395
2396         return TPL_ERROR_NONE;
2397 }
2398
2399 static void
2400 _wl_egl_buffer_init(tpl_wl_egl_buffer_t *wl_egl_buffer,
2401                                         tpl_wl_egl_surface_t *wl_egl_surface)
2402 {
2403         struct wl_egl_window *wl_egl_window = wl_egl_surface->wl_egl_window;
2404         struct tizen_private tizen_private(wl_egl_window->driver_private);
2405
2406         TPL_ASSERT(tizen_private);
2407
2408         wl_egl_buffer->draw_done                = TPL_FALSE;
2409         wl_egl_buffer->need_to_commit           = TPL_TRUE;
2410 #if TIZEN_FEATURE_ENABLE
2411         wl_egl_buffer->buffer_release           = NULL;
2412 #endif
2413         wl_egl_buffer->transform                = tizen_private->transform;
2414
2415         if (wl_egl_buffer->w_transform != tizen_private->window_transform) {
2416                 wl_egl_buffer->w_transform          = tizen_private->window_transform;
2417                 wl_egl_buffer->w_rotated            = TPL_TRUE;
2418         }
2419
2420         if (wl_egl_surface->set_serial_is_used) {
2421                 wl_egl_buffer->serial               = wl_egl_surface->serial;
2422         } else {
2423                 wl_egl_buffer->serial               = ++tizen_private->serial;
2424         }
2425
2426         if (wl_egl_buffer->rects) {
2427                 free(wl_egl_buffer->rects);
2428                 wl_egl_buffer->rects                = NULL;
2429                 wl_egl_buffer->num_rects            = 0;
2430         }
2431 }
2432
2433 static tpl_wl_egl_buffer_t *
2434 _get_wl_egl_buffer(tbm_surface_h tbm_surface)
2435 {
2436         tpl_wl_egl_buffer_t *wl_egl_buffer = NULL;
2437         tbm_surface_internal_get_user_data(tbm_surface, KEY_WL_EGL_BUFFER,
2438                                                                            (void **)&wl_egl_buffer);
2439         return wl_egl_buffer;
2440 }
2441
2442 static tpl_wl_egl_buffer_t *
2443 _wl_egl_buffer_create(tpl_wl_egl_surface_t *wl_egl_surface,
2444                                           tbm_surface_h tbm_surface)
2445 {
2446         tpl_wl_egl_buffer_t  *wl_egl_buffer  = NULL;
2447         struct wl_egl_window *wl_egl_window  = wl_egl_surface->wl_egl_window;
2448
2449         wl_egl_buffer = _get_wl_egl_buffer(tbm_surface);
2450
2451         if (!wl_egl_buffer) {
2452                 wl_egl_buffer = calloc(1, sizeof(tpl_wl_egl_buffer_t));
2453                 TPL_CHECK_ON_NULL_RETURN_VAL(wl_egl_buffer, NULL);
2454
2455                 tbm_surface_internal_add_user_data(tbm_surface, KEY_WL_EGL_BUFFER,
2456                                                                                    (tbm_data_free)__cb_wl_egl_buffer_free);
2457                 tbm_surface_internal_set_user_data(tbm_surface, KEY_WL_EGL_BUFFER,
2458                                                                                    wl_egl_buffer);
2459
2460                 wl_egl_buffer->wl_buffer                = NULL;
2461                 wl_egl_buffer->tbm_surface              = tbm_surface;
2462                 wl_egl_buffer->bo_name                  = _get_tbm_surface_bo_name(tbm_surface);
2463                 wl_egl_buffer->wl_egl_surface           = wl_egl_surface;
2464
2465                 wl_egl_buffer->status                   = RELEASED;
2466
2467                 wl_egl_buffer->acquire_fence_fd         = -1;
2468                 wl_egl_buffer->commit_sync_fd           = -1;
2469                 wl_egl_buffer->presentation_sync_fd     = -1;
2470                 wl_egl_buffer->release_fence_fd         = -1;
2471
2472                 wl_egl_buffer->dx                       = wl_egl_window->dx;
2473                 wl_egl_buffer->dy                       = wl_egl_window->dy;
2474                 wl_egl_buffer->width                    = tbm_surface_get_width(tbm_surface);
2475                 wl_egl_buffer->height                   = tbm_surface_get_height(tbm_surface);
2476
2477                 wl_egl_buffer->w_transform              = -1;
2478
2479                 tpl_gmutex_init(&wl_egl_buffer->mutex);
2480                 tpl_gcond_init(&wl_egl_buffer->cond);
2481
2482                 tpl_gmutex_lock(&wl_egl_surface->buffers_mutex);
2483                 __tpl_list_push_back(wl_egl_surface->buffers, (void *)wl_egl_buffer);
2484                 tpl_gmutex_unlock(&wl_egl_surface->buffers_mutex);
2485
2486                 TPL_INFO("[WL_EGL_BUFFER_CREATE]",
2487                                  "wl_egl_surface(%p) wl_egl_buffer(%p) tbm_surface(%p) bo(%d)",
2488                                  wl_egl_surface, wl_egl_buffer, tbm_surface,
2489                                  wl_egl_buffer->bo_name);
2490         }
2491
2492         _wl_egl_buffer_init(wl_egl_buffer, wl_egl_surface);
2493
2494         return wl_egl_buffer;
2495 }
2496
2497 static tbm_surface_h
2498 __tpl_wl_egl_surface_dequeue_buffer(tpl_surface_t *surface, uint64_t timeout_ns,
2499                                                                         int32_t *release_fence)
2500 {
2501         TPL_ASSERT(surface->backend.data);
2502         TPL_ASSERT(surface->display);
2503         TPL_ASSERT(surface->display->backend.data);
2504
2505         tpl_wl_egl_surface_t wl_egl_surface(surface->backend.data);
2506         tpl_wl_egl_display_t wl_egl_display(surface->display->backend.data);
2507         tpl_wl_egl_buffer_t *wl_egl_buffer   = NULL;
2508
2509         tbm_surface_queue_error_e tsq_err    = TBM_SURFACE_QUEUE_ERROR_NONE;
2510         int             bo_name              = 0;
2511         tbm_surface_h   tbm_surface          = NULL;
2512
2513         TPL_OBJECT_UNLOCK(surface);
2514         tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
2515         if (wl_egl_surface->reset == TPL_TRUE) {
2516                 if (_check_buffer_validate(wl_egl_surface, wl_egl_surface->last_enq_buffer) &&
2517                         tbm_surface_internal_is_valid(wl_egl_surface->last_enq_buffer)) {
2518                         tbm_surface_h last_enq_buffer = wl_egl_surface->last_enq_buffer;
2519                         tpl_wl_egl_buffer_t *enqueued_buffer =
2520                                 _get_wl_egl_buffer(last_enq_buffer);
2521
2522                         if (enqueued_buffer) {
2523                                 tbm_surface_internal_ref(last_enq_buffer);
2524                                 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
2525                                 tpl_gmutex_lock(&enqueued_buffer->mutex);
2526                                 while (enqueued_buffer->status >= ENQUEUED &&
2527                                            enqueued_buffer->status < COMMITTED) {
2528                                         tpl_result_t wait_result;
2529                                         TPL_INFO("[DEQ_AFTER_RESET]",
2530                                                          "wl_egl_surface(%p) waiting for previous wl_egl_buffer(%p) commit",
2531                                                          wl_egl_surface, enqueued_buffer);
2532
2533                                         wait_result = tpl_gcond_timed_wait(&enqueued_buffer->cond,
2534                                                                                                           &enqueued_buffer->mutex,
2535                                                                                                           200); /* 200ms */
2536                                         if (wait_result == TPL_ERROR_TIME_OUT) {
2537                                                 TPL_WARN("timeout occured waiting signaled. wl_egl_buffer(%p)",
2538                                                                  enqueued_buffer);
2539                                                 break;
2540                                         }
2541                                 }
2542                                 tpl_gmutex_unlock(&enqueued_buffer->mutex);
2543                                 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
2544                                 tbm_surface_internal_unref(last_enq_buffer);
2545                         }
2546                 }
2547
2548                 wl_egl_surface->last_enq_buffer = NULL;
2549         }
2550         tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
2551
2552         tsq_err = tbm_surface_queue_can_dequeue_wait_timeout(
2553                                 wl_egl_surface->tbm_queue, CAN_DEQUEUE_TIMEOUT_MS);
2554         TPL_OBJECT_LOCK(surface);
2555
2556
2557         if (tsq_err == TBM_SURFACE_QUEUE_ERROR_TIMEOUT) {
2558                 TPL_WARN("[CAN_DEQUEUE_TIMEOUT] queue(%p) will be reset. surface(%p)",
2559                                  wl_egl_surface->tbm_queue, surface);
2560
2561                 tpl_gthread_pause_in_idle(wl_egl_display->thread);
2562                 /* Locking wl_event_mutex is a secondary means of preparing for
2563                  * the failure of tpl_gthread_pause_in_idle().
2564                  * If tpl_gthread_pause_in_idle()is successful,
2565                  * locking wl_event_mutex does not affect. */
2566                 tpl_gmutex_lock(&wl_egl_display->wl_event_mutex);
2567                 if (_tbm_queue_force_flush(wl_egl_surface) != TPL_ERROR_NONE) {
2568                         TPL_ERR("Failed to timeout reset. tbm_queue(%p) surface(%p)",
2569                                         wl_egl_surface->tbm_queue, surface);
2570                         tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2571                         tpl_gthread_continue(wl_egl_display->thread);
2572                         return NULL;
2573                 } else {
2574                         tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
2575                 }
2576
2577                 wl_egl_surface->vblank_done = TPL_TRUE;
2578
2579                 tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2580                 tpl_gthread_continue(wl_egl_display->thread);
2581         }
2582
2583         if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
2584                 TPL_ERR("Failed to query can_dequeue. tbm_queue(%p) surface(%p)",
2585                                 wl_egl_surface->tbm_queue, surface);
2586                 return NULL;
2587         }
2588
2589         /* After the can dequeue state, lock the wl_event_mutex to prevent other
2590          * events from being processed in wayland_egl_thread
2591          * during below dequeue procedure. */
2592         tpl_gmutex_lock(&wl_egl_display->wl_event_mutex);
2593
2594         surface->width = tbm_surface_queue_get_width(wl_egl_surface->tbm_queue);
2595         surface->height = tbm_surface_queue_get_height(wl_egl_surface->tbm_queue);
2596         wl_egl_surface->width = surface->width;
2597         wl_egl_surface->height = surface->height;
2598
2599
2600         /* If surface->frontbuffer is not null, the frontbuffer rendering mode will be
2601          * maintained if the surface state meets the conditions below.
2602          *  1. surface->is_frontbuffer_mode == TPL_TRUE
2603          *   - It may be changed to true or false by calling
2604          *         tpl_surface_set_frontbuffer_mode(will be deprecated)
2605          *      or
2606          *         wl_egl_window_tizen_set_frontbuffer_mode (recommanded)
2607          *  2. is_activated == TPL_TRUE
2608          *   - To check wheter direct display is possible.
2609          *  3. wl_egl_surface->reset == TPL_FALSE
2610          *   - tbm_queue reset should not have occured due to window resize.
2611          * If surface is not satisfied with any of above conditions,
2612          *  frontbuffer rendering will be stopped and surface->frontbuffer becomes null.
2613          * */
2614         if (surface->frontbuffer) {
2615                 if (!surface->is_frontbuffer_mode ||
2616                         !wl_egl_surface->is_activated ||
2617                         wl_egl_surface->reset) {
2618                         surface->frontbuffer = NULL;
2619                         wl_egl_surface->need_to_enqueue = TPL_TRUE;
2620                         wl_egl_surface->frontbuffer_activated = TPL_FALSE;
2621                         TPL_INFO("[FRONTBUFFER_RENDERING_STOP]",
2622                                          "wl_egl_surface(%p) wl_egl_window(%p)",
2623                                          wl_egl_surface, wl_egl_surface->wl_egl_window);
2624                 } else {
2625                         bo_name = _get_tbm_surface_bo_name(surface->frontbuffer);
2626                         TPL_LOG_T("WL_EGL",
2627                                           "[DEQ][F] surface->frontbuffer(%p) BO_NAME(%d)",
2628                                           surface->frontbuffer, bo_name);
2629                         TRACE_ASYNC_BEGIN((intptr_t)surface->frontbuffer,
2630                                                           "[DEQ]~[ENQ] BO_NAME:%d",
2631                                                           bo_name);
2632                         wl_egl_surface->frontbuffer_activated = TPL_TRUE;
2633                         tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2634                         return surface->frontbuffer;
2635                 }
2636         }
2637
2638         tsq_err = tbm_surface_queue_dequeue(wl_egl_surface->tbm_queue,
2639                                                                                 &tbm_surface);
2640         if (!tbm_surface) {
2641                 TPL_ERR("Failed to dequeue from tbm_queue(%p) wl_egl_surface(%p)| tsq_err = %d",
2642                                 wl_egl_surface->tbm_queue, wl_egl_surface, tsq_err);
2643                 tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2644                 return NULL;
2645         }
2646
2647         tbm_surface_internal_ref(tbm_surface);
2648
2649         wl_egl_buffer = _wl_egl_buffer_create(wl_egl_surface, tbm_surface);
2650         TPL_CHECK_ON_FALSE_ASSERT_FAIL(wl_egl_buffer, "Failed to create/get wl_egl_buffer.");
2651
2652         tpl_gmutex_lock(&wl_egl_buffer->mutex);
2653         wl_egl_buffer->status = DEQUEUED;
2654
2655         /* If wl_egl_buffer->release_fence_fd is -1,
2656          * the tbm_surface can be used immediately.
2657          * If not, user(EGL) have to wait until signaled. */
2658         if (release_fence) {
2659 #if TIZEN_FEATURE_ENABLE
2660                 if (wl_egl_display->use_explicit_sync) {
2661                         *release_fence = wl_egl_buffer->release_fence_fd;
2662                         TPL_LOG_D("[EXPLICIT_FENCE]", "wl_egl_surface(%p) wl_egl_buffer(%p) release_fence_fd(%d)",
2663                                           wl_egl_surface, wl_egl_buffer, *release_fence);
2664
2665                         wl_egl_buffer->release_fence_fd = -1;
2666                 } else
2667 #endif
2668                 {
2669                         *release_fence = -1;
2670                 }
2671         }
2672
2673         if (surface->is_frontbuffer_mode && wl_egl_surface->is_activated) {
2674                 if (surface->frontbuffer == NULL) {
2675                         TPL_INFO("[FRONTBUFFER_RENDERING_START]",
2676                                          "wl_egl_surface(%p) wl_egl_window(%p) bo(%d)",
2677                                          wl_egl_surface, wl_egl_surface->wl_egl_window,
2678                                          _get_tbm_surface_bo_name(tbm_surface));
2679                 }
2680                 surface->frontbuffer = tbm_surface;
2681         }
2682
2683         wl_egl_surface->reset = TPL_FALSE;
2684
2685         TRACE_MARK("[DEQ][NEW]BO_NAME:%d", wl_egl_buffer->bo_name);
2686         TRACE_ASYNC_BEGIN((intptr_t)tbm_surface, "[DEQ]~[ENQ] BO_NAME:%d",
2687                                           wl_egl_buffer->bo_name);
2688         TPL_LOG_T("WL_EGL", "[DEQ] wl_egl_buffer(%p) tbm_surface(%p) bo(%d) fence(%d)",
2689                           wl_egl_buffer, tbm_surface, wl_egl_buffer->bo_name,
2690                           release_fence ? *release_fence : -1);
2691
2692         tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2693         tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2694
2695         return tbm_surface;
2696 }
2697
2698 static tpl_result_t
2699 __tpl_wl_egl_surface_cancel_buffer(tpl_surface_t *surface,
2700                                                                    tbm_surface_h tbm_surface)
2701 {
2702         TPL_ASSERT(surface);
2703         TPL_ASSERT(surface->backend.data);
2704
2705         tpl_wl_egl_surface_t wl_egl_surface(surface->backend.data);
2706         tpl_wl_egl_buffer_t *wl_egl_buffer      = NULL;
2707         tbm_surface_queue_error_e tsq_err       = TBM_SURFACE_QUEUE_ERROR_NONE;
2708
2709         if (!tbm_surface_internal_is_valid(tbm_surface)) {
2710                 TPL_ERR("Invalid buffer. tbm_surface(%p)", tbm_surface);
2711                 return TPL_ERROR_INVALID_PARAMETER;
2712         }
2713
2714         wl_egl_buffer = _get_wl_egl_buffer(tbm_surface);
2715         if (wl_egl_buffer) {
2716                 tpl_gmutex_lock(&wl_egl_buffer->mutex);
2717                 wl_egl_buffer->status = RELEASED;
2718                 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2719         }
2720
2721         tbm_surface_internal_unref(tbm_surface);
2722
2723         tsq_err = tbm_surface_queue_cancel_dequeue(wl_egl_surface->tbm_queue,
2724                                                                                            tbm_surface);
2725         if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
2726                 TPL_ERR("Failed to release tbm_surface(%p) surface(%p)",
2727                                 tbm_surface, surface);
2728                 return TPL_ERROR_INVALID_OPERATION;
2729         }
2730
2731         TPL_INFO("[CANCEL_BUFFER]", "wl_egl_surface(%p) tbm_surface(%p) bo(%d)",
2732                           wl_egl_surface, tbm_surface, _get_tbm_surface_bo_name(tbm_surface));
2733
2734         return TPL_ERROR_NONE;
2735 }
2736
2737 static tpl_result_t
2738 __tpl_wl_egl_surface_enqueue_buffer(tpl_surface_t *surface,
2739                 tbm_surface_h tbm_surface,
2740                 int num_rects, const int *rects, int32_t acquire_fence)
2741 {
2742         TPL_ASSERT(surface);
2743         TPL_ASSERT(surface->display);
2744         TPL_ASSERT(surface->backend.data);
2745         TPL_ASSERT(tbm_surface);
2746         TPL_OBJECT_CHECK_RETURN(surface, TPL_ERROR_INVALID_PARAMETER);
2747
2748         tpl_wl_egl_surface_t wl_egl_surface(surface->backend.data);
2749         tpl_wl_egl_buffer_t *wl_egl_buffer      = NULL;
2750         tbm_surface_queue_error_e tsq_err       = TBM_SURFACE_QUEUE_ERROR_NONE;
2751         int bo_name                             = -1;
2752
2753         if (!tbm_surface_internal_is_valid(tbm_surface)) {
2754                 TPL_ERR("Failed to enqueue tbm_surface(%p) Invalid value.",
2755                                 tbm_surface);
2756                 TRACE_ASYNC_END((intptr_t)tbm_surface, "[DEQ]~[ENQ] BO_NAME:%d", bo_name);
2757                 return TPL_ERROR_INVALID_PARAMETER;
2758         }
2759
2760         wl_egl_buffer = _get_wl_egl_buffer(tbm_surface);
2761         if (!wl_egl_buffer) {
2762                 TPL_ERR("Failed to get wl_egl_buffer from tbm_surface(%p)", tbm_surface);
2763                 return TPL_ERROR_INVALID_PARAMETER;
2764         }
2765
2766         bo_name = _get_tbm_surface_bo_name(tbm_surface);
2767
2768         TRACE_MARK("[ENQ] BO_NAME:%d", bo_name);
2769
2770         tpl_gmutex_lock(&wl_egl_buffer->mutex);
2771
2772         /* If there are received region information, save it to wl_egl_buffer */
2773         if (num_rects && rects) {
2774                 if (wl_egl_buffer->rects != NULL) {
2775                         free(wl_egl_buffer->rects);
2776                         wl_egl_buffer->rects = NULL;
2777                         wl_egl_buffer->num_rects = 0;
2778                 }
2779
2780                 wl_egl_buffer->rects = (int *)calloc(1, (sizeof(int) * 4 * num_rects));
2781                 wl_egl_buffer->num_rects = num_rects;
2782
2783                 if (!wl_egl_buffer->rects) {
2784                         TPL_ERR("Failed to allocate memory fo damage rects info.");
2785                         tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2786                         return TPL_ERROR_OUT_OF_MEMORY;
2787                 }
2788
2789                 memcpy((char *)wl_egl_buffer->rects, (char *)rects, sizeof(int) * 4 * num_rects);
2790         }
2791
2792         if (!wl_egl_surface->need_to_enqueue ||
2793                 !wl_egl_buffer->need_to_commit) {
2794
2795                 if (acquire_fence != -1) {
2796                         close(acquire_fence);
2797                         acquire_fence = -1;
2798                 }
2799                 TPL_LOG_T("FRONTBUFFER_MODE", "[ENQ_SKIP] tbm_surface(%p) bo(%d) need not to enqueue",
2800                                   tbm_surface, _get_tbm_surface_bo_name(tbm_surface));
2801                 TRACE_ASYNC_END((intptr_t)tbm_surface, "[DEQ]~[ENQ] BO_NAME:%d", bo_name);
2802                 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2803                 return TPL_ERROR_NONE;
2804         }
2805
2806         /* In frontbuffer mode, will skip tbm_surface_queue_enqueue, acquire, and
2807          * commit if surface->frontbuffer that is already set and the tbm_surface
2808          * client want to enqueue are the same.
2809          */
2810         if (surface->is_frontbuffer_mode) {
2811                 /* The first buffer to be activated in frontbuffer mode must be
2812                  * committed. Subsequence frames do not need to be committed because
2813                  * the buffer is already displayed.
2814                  */
2815                 if (surface->frontbuffer == tbm_surface)
2816                         wl_egl_surface->need_to_enqueue = TPL_FALSE;
2817         }
2818
2819         if (wl_egl_buffer->acquire_fence_fd != -1)
2820                 close(wl_egl_buffer->acquire_fence_fd);
2821
2822         wl_egl_buffer->acquire_fence_fd = acquire_fence;
2823
2824         tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
2825         if (wl_egl_surface->presentation_sync.fd != -1) {
2826                 wl_egl_buffer->presentation_sync_fd  = wl_egl_surface->presentation_sync.fd;
2827                 wl_egl_surface->presentation_sync.fd = -1;
2828         }
2829         tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
2830
2831         tpl_gmutex_lock(&wl_egl_surface->commit_sync.mutex);
2832         if (wl_egl_surface->commit_sync.fd != -1) {
2833                 wl_egl_buffer->commit_sync_fd  = wl_egl_surface->commit_sync.fd;
2834                 wl_egl_surface->commit_sync.fd = -1;
2835                 TRACE_ASYNC_BEGIN(wl_egl_buffer->commit_sync_fd, "[COMMIT_SYNC] bo(%d)",
2836                                                   _get_tbm_surface_bo_name(tbm_surface));
2837         }
2838         tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
2839
2840         wl_egl_buffer->status = ENQUEUED;
2841         TPL_LOG_T("WL_EGL",
2842                           "[ENQ] wl_egl_buffer(%p) tbm_surface(%p) bo(%d) fence(%d)",
2843                           wl_egl_buffer, tbm_surface, bo_name, acquire_fence);
2844
2845         tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2846
2847         tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
2848         wl_egl_surface->last_enq_buffer = tbm_surface;
2849         tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
2850
2851         tsq_err = tbm_surface_queue_enqueue(wl_egl_surface->tbm_queue,
2852                                                                                 tbm_surface);
2853         if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
2854                 tbm_surface_internal_unref(tbm_surface);
2855                 TPL_ERR("Failed to enqueue tbm_surface(%p). wl_egl_surface(%p) tsq_err=%d",
2856                                 tbm_surface, wl_egl_surface, tsq_err);
2857                 TRACE_ASYNC_END((intptr_t)tbm_surface, "[DEQ]~[ENQ] BO_NAME:%d", bo_name);
2858                 return TPL_ERROR_INVALID_OPERATION;
2859         }
2860
2861         tbm_surface_internal_unref(tbm_surface);
2862
2863         TRACE_ASYNC_END((intptr_t)tbm_surface, "[DEQ]~[ENQ] BO_NAME:%d", bo_name);
2864
2865         return TPL_ERROR_NONE;
2866 }
2867
2868 static tpl_bool_t
2869 __thread_func_waiting_source_dispatch(tpl_gsource *gsource, uint64_t message)
2870 {
2871         tpl_wl_egl_buffer_t wl_egl_buffer(tpl_gsource_get_data(gsource));
2872         TPL_CHECK_ON_NULL_RETURN_VAL(wl_egl_buffer, TPL_FALSE);
2873
2874         tpl_wl_egl_surface_t *wl_egl_surface    = wl_egl_buffer->wl_egl_surface;
2875         TPL_CHECK_ON_NULL_RETURN_VAL(wl_egl_surface, TPL_FALSE);
2876
2877         tbm_surface_h tbm_surface               = wl_egl_buffer->tbm_surface;
2878         TPL_CHECK_ON_NULL_RETURN_VAL(tbm_surface, TPL_FALSE);
2879         TPL_CHECK_ON_FALSE_RETURN_VAL(tbm_surface_internal_is_valid(tbm_surface), TPL_FALSE);
2880
2881         wl_egl_surface->render_done_cnt++;
2882
2883         TRACE_ASYNC_END(wl_egl_buffer->acquire_fence_fd, "FENCE WAIT fd(%d)",
2884                                         wl_egl_buffer->acquire_fence_fd);
2885
2886         TPL_LOG_D("[RENDER DONE]", "wl_egl_surface(%p) wl_egl_buffer(%p) tbm_surface(%p)",
2887                           wl_egl_surface, wl_egl_buffer, tbm_surface);
2888
2889         tpl_gmutex_lock(&wl_egl_buffer->mutex);
2890         wl_egl_buffer->status = WAITING_VBLANK;
2891
2892         TPL_LOG_D("[FINALIZE]", "wl_egl_surface(%p) wl_egl_buffer(%p) wait_source(%p) fence_fd(%d)",
2893                           wl_egl_surface, wl_egl_buffer, wl_egl_buffer->waiting_source,
2894                           wl_egl_buffer->acquire_fence_fd);
2895
2896         wl_egl_buffer->acquire_fence_fd = -1;
2897         wl_egl_buffer->waiting_source = NULL;
2898
2899         tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2900
2901         tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
2902
2903         if (!wl_egl_surface->vblank_enable || wl_egl_surface->vblank_done)
2904                 _thread_wl_surface_commit(wl_egl_surface, wl_egl_buffer);
2905         else {
2906                 tpl_gmutex_lock(&wl_egl_surface->vblank->mutex);
2907                 __tpl_list_push_back(wl_egl_surface->vblank->waiting_buffers,
2908                                                          wl_egl_buffer);
2909                 tpl_gmutex_unlock(&wl_egl_surface->vblank->mutex);
2910         }
2911
2912         tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
2913
2914         return TPL_FALSE;
2915 }
2916
2917 static void
2918 __thread_func_waiting_source_finalize(tpl_gsource *gsource)
2919 {
2920         TPL_IGNORE(gsource);
2921 }
2922
2923 static tpl_gsource_functions buffer_funcs = {
2924         .prepare = NULL,
2925         .check = NULL,
2926         .dispatch = __thread_func_waiting_source_dispatch,
2927         .finalize = __thread_func_waiting_source_finalize,
2928 };
2929
2930 static tpl_result_t
2931 _thread_surface_queue_acquire(tpl_wl_egl_surface_t *wl_egl_surface)
2932 {
2933         tbm_surface_h tbm_surface            = NULL;
2934         tbm_surface_queue_error_e tsq_err    = TBM_SURFACE_QUEUE_ERROR_NONE;
2935         tpl_wl_egl_display_t *wl_egl_display = wl_egl_surface->wl_egl_display;
2936         tpl_wl_egl_buffer_t *wl_egl_buffer   = NULL;
2937         tpl_bool_t ready_to_commit           = TPL_FALSE;
2938
2939         while (tbm_surface_queue_can_acquire(wl_egl_surface->tbm_queue, 0)) {
2940                 tsq_err = tbm_surface_queue_acquire(wl_egl_surface->tbm_queue,
2941                                                                                         &tbm_surface);
2942                 if (!tbm_surface || tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
2943                         TPL_ERR("Failed to acquire from tbm_queue(%p)",
2944                                         wl_egl_surface->tbm_queue);
2945                         return TPL_ERROR_INVALID_OPERATION;
2946                 }
2947
2948                 tbm_surface_internal_ref(tbm_surface);
2949
2950                 wl_egl_buffer = _get_wl_egl_buffer(tbm_surface);
2951                 TPL_CHECK_ON_FALSE_ASSERT_FAIL(wl_egl_buffer != NULL,
2952                                                                            "wl_egl_buffer sould be not NULL");
2953
2954                 tpl_gmutex_lock(&wl_egl_buffer->mutex);
2955
2956                 wl_egl_buffer->status = ACQUIRED;
2957
2958                 TPL_LOG_T("WL_EGL", "[ACQ] wl_egl_buffer(%p) tbm_surface(%p) bo(%d)",
2959                                   wl_egl_buffer, tbm_surface,
2960                                   _get_tbm_surface_bo_name(tbm_surface));
2961
2962                 if (wl_egl_buffer->acquire_fence_fd != -1) {
2963 #if TIZEN_FEATURE_ENABLE
2964                         if (wl_egl_display->use_explicit_sync)
2965                                 ready_to_commit = TPL_TRUE;
2966                         else
2967 #endif
2968                         {
2969                                 if (wl_egl_buffer->waiting_source) {
2970                                         tpl_gsource_destroy(wl_egl_buffer->waiting_source, TPL_FALSE);
2971                                         wl_egl_buffer->waiting_source = NULL;
2972                                 }
2973
2974                                 wl_egl_buffer->waiting_source =
2975                                         tpl_gsource_create(wl_egl_display->thread, wl_egl_buffer,
2976                                                                            wl_egl_buffer->acquire_fence_fd,
2977                                                                            FD_TYPE_FENCE, &buffer_funcs,
2978                                                                            SOURCE_TYPE_DISPOSABLE);
2979                                 wl_egl_buffer->status = WAITING_SIGNALED;
2980
2981                                 TRACE_ASYNC_BEGIN(wl_egl_buffer->acquire_fence_fd, "FENCE WAIT fd(%d)",
2982                                                                   wl_egl_buffer->acquire_fence_fd);
2983
2984                                 ready_to_commit = TPL_FALSE;
2985                         }
2986                 } else {
2987                         ready_to_commit = TPL_TRUE;
2988                 }
2989
2990                 if (ready_to_commit) {
2991                         if (!wl_egl_surface->vblank_enable || wl_egl_surface->vblank_done)
2992                                 ready_to_commit = TPL_TRUE;
2993                         else {
2994                                 wl_egl_buffer->status = WAITING_VBLANK;
2995                                 tpl_gmutex_lock(&wl_egl_surface->vblank->mutex);
2996                                 __tpl_list_push_back(wl_egl_surface->vblank->waiting_buffers, wl_egl_buffer);
2997                                 tpl_gmutex_unlock(&wl_egl_surface->vblank->mutex);
2998                                 ready_to_commit = TPL_FALSE;
2999                         }
3000                 }
3001
3002                 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
3003
3004                 if (ready_to_commit)
3005                         _thread_wl_surface_commit(wl_egl_surface, wl_egl_buffer);
3006         }
3007
3008         return TPL_ERROR_NONE;
3009 }
3010
3011 /* -- BEGIN -- tdm_client vblank callback function */
3012 static void
3013 __cb_tdm_client_vblank(tdm_client_vblank *vblank, tdm_error error,
3014                                            unsigned int sequence, unsigned int tv_sec,
3015                                            unsigned int tv_usec, void *user_data)
3016 {
3017         tpl_wl_egl_surface_t wl_egl_surface(user_data);
3018
3019         TRACE_ASYNC_END((intptr_t)wl_egl_surface, "WAIT_VBLANK");
3020         TPL_LOG_D("[VBLANK_DONE]", "wl_egl_surface(%p)", wl_egl_surface);
3021
3022         if (error == TDM_ERROR_TIMEOUT)
3023                 TPL_WARN("[TDM_ERROR_TIMEOUT] It will keep going. wl_egl_surface(%p)",
3024                                  wl_egl_surface);
3025
3026         tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
3027         wl_egl_surface->vblank_done = TPL_TRUE;
3028
3029         if (wl_egl_surface->vblank && wl_egl_surface->vblank->waiting_buffers) {
3030                 tpl_bool_t is_empty = TPL_TRUE;
3031                 do {
3032                         tpl_gmutex_lock(&wl_egl_surface->vblank->mutex);
3033                         tpl_wl_egl_buffer_t wl_egl_buffer(
3034                                 __tpl_list_pop_front( wl_egl_surface->vblank->waiting_buffers, NULL));
3035                         is_empty = __tpl_list_is_empty(wl_egl_surface->vblank->waiting_buffers);
3036                         tpl_gmutex_unlock(&wl_egl_surface->vblank->mutex);
3037
3038                         if (!wl_egl_buffer) break;
3039
3040                         _thread_wl_surface_commit(wl_egl_surface, wl_egl_buffer);
3041
3042                         /* If tdm error such as TIMEOUT occured,
3043                          * flush all vblank waiting buffers of its wl_egl_surface.
3044                          * Otherwise, only one wl_egl_buffer will be commited per one vblank event.
3045                          */
3046                         if (error == TDM_ERROR_NONE && wl_egl_surface->post_interval > 0)
3047                                 break;
3048                 } while (!is_empty);
3049
3050                 wl_egl_surface->vblank_enable = (wl_egl_surface->post_interval > 0);
3051         }
3052         tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
3053 }
3054 /* -- END -- tdm_client vblank callback function */
3055
3056 #if TIZEN_FEATURE_ENABLE
3057 static void
3058 __cb_buffer_fenced_release(void *data,
3059                                 struct zwp_linux_buffer_release_v1 *release, int32_t fence)
3060 {
3061         tpl_wl_egl_buffer_t wl_egl_buffer(data);
3062         tbm_surface_h tbm_surface           = NULL;
3063
3064         TPL_CHECK_ON_NULL_RETURN(wl_egl_buffer);
3065
3066         tbm_surface = wl_egl_buffer->tbm_surface;
3067
3068         if (tbm_surface_internal_is_valid(tbm_surface)) {
3069                 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_INVALID_SURFACE;
3070
3071                 tpl_gmutex_lock(&wl_egl_buffer->mutex);
3072                 if (wl_egl_buffer->status == COMMITTED) {
3073                         tpl_wl_egl_surface_t *wl_egl_surface = wl_egl_buffer->wl_egl_surface;
3074
3075                         zwp_linux_buffer_release_v1_destroy(wl_egl_buffer->buffer_release);
3076                         wl_egl_buffer->buffer_release = NULL;
3077
3078                         wl_egl_buffer->release_fence_fd = fence;
3079                         wl_egl_buffer->status = RELEASED;
3080
3081                         TRACE_MARK("[FENCED_RELEASE] BO(%d) fence(%d)",
3082                                            _get_tbm_surface_bo_name(tbm_surface),
3083                                            fence);
3084                         TRACE_ASYNC_END((intptr_t)tbm_surface, "[COMMIT ~ RELEASE] BO(%d)",
3085                                                         _get_tbm_surface_bo_name(tbm_surface));
3086
3087                         TPL_LOG_T("WL_EGL",
3088                                           "[FENCED_RELEASE] wl_egl_buffer(%p) tbm_surface(%p) bo(%d) fence(%d)",
3089                                           wl_egl_buffer, tbm_surface,
3090                                           _get_tbm_surface_bo_name(tbm_surface),
3091                                           fence);
3092
3093                         tsq_err = tbm_surface_queue_release(wl_egl_surface->tbm_queue,
3094                                                                                                 tbm_surface);
3095                         if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
3096                                 TPL_ERR("tbm_surface(%p) tsq_err(%d)", tbm_surface, tsq_err);
3097                 }
3098
3099                 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
3100
3101                 if (tsq_err == TBM_SURFACE_QUEUE_ERROR_NONE)
3102                         tbm_surface_internal_unref(tbm_surface);
3103
3104         } else {
3105                 TPL_ERR("Invalid parameter | tbm_surface(%p)", tbm_surface);
3106         }
3107 }
3108
3109 static void
3110 __cb_buffer_immediate_release(void *data,
3111                                                           struct zwp_linux_buffer_release_v1 *release)
3112 {
3113         tpl_wl_egl_buffer_t wl_egl_buffer(data);
3114         tbm_surface_h tbm_surface           = NULL;
3115
3116         TPL_CHECK_ON_NULL_RETURN(wl_egl_buffer);
3117
3118         tbm_surface = wl_egl_buffer->tbm_surface;
3119
3120         if (tbm_surface_internal_is_valid(tbm_surface)) {
3121                 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_INVALID_SURFACE;
3122
3123                 tpl_gmutex_lock(&wl_egl_buffer->mutex);
3124                 if (wl_egl_buffer->status == COMMITTED) {
3125                         tpl_wl_egl_surface_t *wl_egl_surface = wl_egl_buffer->wl_egl_surface;
3126
3127                         zwp_linux_buffer_release_v1_destroy(wl_egl_buffer->buffer_release);
3128                         wl_egl_buffer->buffer_release = NULL;
3129
3130                         wl_egl_buffer->release_fence_fd = -1;
3131                         wl_egl_buffer->status = RELEASED;
3132
3133                         TRACE_MARK("[IMMEDIATE_RELEASE] BO(%d)",
3134                                            _get_tbm_surface_bo_name(tbm_surface));
3135                         TRACE_ASYNC_END((intptr_t)tbm_surface, "[COMMIT ~ RELEASE] BO(%d)",
3136                                                         _get_tbm_surface_bo_name(tbm_surface));
3137
3138                         TPL_LOG_T("WL_EGL",
3139                                           "[IMMEDIATE_RELEASE] wl_egl_buffer(%p) tbm_surface(%p) bo(%d)",
3140                                           wl_egl_buffer, tbm_surface,
3141                                           _get_tbm_surface_bo_name(tbm_surface));
3142
3143                         tsq_err = tbm_surface_queue_release(wl_egl_surface->tbm_queue,
3144                                                                                                 tbm_surface);
3145                         if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
3146                                 TPL_ERR("tbm_surface(%p) tsq_err(%d)", tbm_surface, tsq_err);
3147                 }
3148
3149                 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
3150
3151                 if (tsq_err == TBM_SURFACE_QUEUE_ERROR_NONE)
3152                         tbm_surface_internal_unref(tbm_surface);
3153
3154         } else {
3155                 TPL_ERR("Invalid parameter | tbm_surface(%p)", tbm_surface);
3156         }
3157 }
3158
3159 static const struct zwp_linux_buffer_release_v1_listener zwp_release_listner = {
3160         __cb_buffer_fenced_release,
3161         __cb_buffer_immediate_release,
3162 };
3163 #endif
3164
3165 static void
3166 __cb_wl_buffer_release(void *data, struct wl_proxy *wl_buffer)
3167 {
3168         tpl_wl_egl_buffer_t wl_egl_buffer(data);
3169         tbm_surface_h tbm_surface = NULL;
3170
3171         TPL_CHECK_ON_NULL_RETURN(wl_egl_buffer)
3172
3173         tbm_surface = wl_egl_buffer->tbm_surface;
3174
3175         if (tbm_surface_internal_is_valid(tbm_surface)) {
3176                 tpl_wl_egl_surface_t *wl_egl_surface = wl_egl_buffer->wl_egl_surface;
3177                 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_INVALID_SURFACE;
3178
3179                 tpl_gmutex_lock(&wl_egl_buffer->mutex);
3180
3181                 if (wl_egl_buffer->status == COMMITTED) {
3182
3183                         tsq_err = tbm_surface_queue_release(wl_egl_surface->tbm_queue,
3184                                                                                                 tbm_surface);
3185                         if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
3186                                 TPL_ERR("tbm_surface(%p) tsq_err(%d)", tbm_surface, tsq_err);
3187
3188                         wl_egl_buffer->status = RELEASED;
3189
3190                         TRACE_MARK("[RELEASE] BO(%d)", _get_tbm_surface_bo_name(tbm_surface));
3191                         TRACE_ASYNC_END((intptr_t)tbm_surface, "[COMMIT ~ RELEASE] BO(%d)",
3192                                                         _get_tbm_surface_bo_name(tbm_surface));
3193
3194                         TPL_LOG_T("WL_EGL", "[REL] wl_buffer(%p) tbm_surface(%p) bo(%d)",
3195                                           wl_egl_buffer->wl_buffer, tbm_surface,
3196                                           _get_tbm_surface_bo_name(tbm_surface));
3197                 }
3198
3199                 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
3200
3201                 if (tsq_err == TBM_SURFACE_QUEUE_ERROR_NONE)
3202                         tbm_surface_internal_unref(tbm_surface);
3203         } else {
3204                 TPL_ERR("Invalid parameter | tbm_surface(%p)", tbm_surface);
3205         }
3206 }
3207
3208 static const struct wl_buffer_listener wl_buffer_release_listener = {
3209         (void *)__cb_wl_buffer_release,
3210 };
3211 #if TIZEN_FEATURE_ENABLE
3212 static void
3213 __cb_presentation_feedback_sync_output(void *data,
3214                         struct wp_presentation_feedback *presentation_feedback,
3215                         struct wl_output *output)
3216 {
3217         TPL_IGNORE(data);
3218         TPL_IGNORE(presentation_feedback);
3219         TPL_IGNORE(output);
3220         /* Nothing to do */
3221 }
3222
3223 static void
3224 __cb_presentation_feedback_presented(void *data,
3225                         struct wp_presentation_feedback *presentation_feedback,
3226                         uint32_t tv_sec_hi,
3227                         uint32_t tv_sec_lo,
3228                         uint32_t tv_nsec,
3229                         uint32_t refresh_nsec,
3230                         uint32_t seq_hi,
3231                         uint32_t seq_lo,
3232                         uint32_t flags)
3233 {
3234         TPL_IGNORE(tv_sec_hi);
3235         TPL_IGNORE(tv_sec_lo);
3236         TPL_IGNORE(tv_nsec);
3237         TPL_IGNORE(refresh_nsec);
3238         TPL_IGNORE(seq_hi);
3239         TPL_IGNORE(seq_lo);
3240         TPL_IGNORE(flags);
3241
3242         struct pst_feedback *pst_feedback       = (struct pst_feedback *)data;
3243         tpl_wl_egl_surface_t *wl_egl_surface    = pst_feedback->wl_egl_surface;
3244
3245         tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
3246
3247         TPL_LOG_D("[PRESENTED]", "pst_feedback(%p) presentation_feedback(%p) bo(%d)",
3248                           pst_feedback, presentation_feedback, pst_feedback->bo_name);
3249
3250         if (pst_feedback->pst_sync_fd != -1) {
3251                 TRACE_ASYNC_END(pst_feedback->pst_sync_fd,
3252                                                 "[PRESENTATION_SYNC] bo(%d)",
3253                                                 pst_feedback->bo_name);
3254                 send_signal(pst_feedback->pst_sync_fd, "PST_FEEDBACK");
3255                 pst_feedback->pst_sync_fd = -1;
3256         }
3257
3258         wp_presentation_feedback_destroy(presentation_feedback);
3259
3260         pst_feedback->presentation_feedback = NULL;
3261         pst_feedback->wl_egl_surface        = NULL;
3262         pst_feedback->bo_name               = 0;
3263
3264         __tpl_list_remove_data(wl_egl_surface->presentation_feedbacks, pst_feedback,
3265                                                    TPL_FIRST, NULL);
3266
3267         free(pst_feedback);
3268
3269         tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
3270 }
3271
3272 static void
3273 __cb_presentation_feedback_discarded(void *data,
3274                         struct wp_presentation_feedback *presentation_feedback)
3275 {
3276         struct pst_feedback *pst_feedback       = (struct pst_feedback *)data;
3277         tpl_wl_egl_surface_t *wl_egl_surface    = pst_feedback->wl_egl_surface;
3278
3279         tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
3280
3281         TPL_LOG_D("[DISCARDED]", "pst_feedback(%p) presentation_feedback(%p) bo(%d)",
3282                           pst_feedback, presentation_feedback, pst_feedback->bo_name);
3283
3284         if (pst_feedback->pst_sync_fd != -1) {
3285                 TRACE_ASYNC_END(pst_feedback->pst_sync_fd,
3286                                                 "[PRESENTATION_SYNC] bo(%d)",
3287                                                 pst_feedback->bo_name);
3288                 send_signal(pst_feedback->pst_sync_fd, "PST_FEEDBACK");
3289                 pst_feedback->pst_sync_fd = -1;
3290         }
3291
3292         wp_presentation_feedback_destroy(presentation_feedback);
3293
3294         pst_feedback->presentation_feedback = NULL;
3295         pst_feedback->wl_egl_surface        = NULL;
3296         pst_feedback->bo_name               = 0;
3297
3298         __tpl_list_remove_data(wl_egl_surface->presentation_feedbacks, pst_feedback,
3299                                                    TPL_FIRST, NULL);
3300
3301         free(pst_feedback);
3302
3303         tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
3304 }
3305
3306 static const struct wp_presentation_feedback_listener feedback_listener = {
3307         __cb_presentation_feedback_sync_output, /* sync_output feedback -*/
3308         __cb_presentation_feedback_presented,
3309         __cb_presentation_feedback_discarded
3310 };
3311 #endif
3312
3313 static tpl_result_t
3314 _thread_surface_vblank_wait(tpl_wl_egl_surface_t *wl_egl_surface)
3315 {
3316         tdm_error tdm_err                       = TDM_ERROR_NONE;
3317         tpl_surface_vblank_t *vblank            = wl_egl_surface->vblank;
3318
3319         tdm_err = tdm_client_vblank_wait(vblank->tdm_vblank,
3320                         wl_egl_surface->post_interval,
3321                         __cb_tdm_client_vblank,
3322                         (void *)wl_egl_surface);
3323
3324         if (tdm_err == TDM_ERROR_NONE) {
3325                 wl_egl_surface->vblank_done = TPL_FALSE;
3326                 TRACE_ASYNC_BEGIN((intptr_t)wl_egl_surface, "WAIT_VBLANK");
3327         } else {
3328                 TPL_ERR("Failed to tdm_client_vblank_wait. tdm_err(%d)", tdm_err);
3329                 return TPL_ERROR_INVALID_OPERATION;
3330         }
3331
3332         return TPL_ERROR_NONE;
3333 }
3334
3335 static void
3336 _thread_wl_surface_commit(tpl_wl_egl_surface_t *wl_egl_surface,
3337                                                   tpl_wl_egl_buffer_t *wl_egl_buffer)
3338 {
3339         tpl_wl_egl_display_t *wl_egl_display    = wl_egl_surface->wl_egl_display;
3340         struct wl_surface *wl_surface           = wl_egl_surface->wl_surface;
3341         struct wl_egl_window *wl_egl_window     = wl_egl_surface->wl_egl_window;
3342         uint32_t version;
3343
3344         TPL_CHECK_ON_FALSE_ASSERT_FAIL(wl_egl_buffer != NULL,
3345                                                                    "wl_egl_buffer sould be not NULL");
3346
3347         if (wl_egl_buffer->wl_buffer == NULL) {
3348                 wl_egl_buffer->wl_buffer =
3349                         (struct wl_proxy *)wayland_tbm_client_create_buffer(
3350                                                 wl_egl_display->wl_tbm_client,
3351                                                 wl_egl_buffer->tbm_surface);
3352
3353                 TPL_CHECK_ON_FALSE_ASSERT_FAIL(wl_egl_buffer->wl_buffer != NULL,
3354                                                                            "[FATAL] Failed to create wl_buffer");
3355
3356                 TPL_INFO("[WL_BUFFER_CREATE]",
3357                                  "wl_egl_surface(%p) wl_egl_buffer(%p) wl_buffer(%p) tbm_surface(%p)",
3358                                  wl_egl_surface, wl_egl_buffer, wl_egl_buffer->wl_buffer,
3359                                  wl_egl_buffer->tbm_surface);
3360
3361 #if TIZEN_FEATURE_ENABLE
3362                 if (!wl_egl_display->use_explicit_sync ||
3363                         wl_egl_buffer->acquire_fence_fd == -1)
3364 #endif
3365                 {
3366                         wl_buffer_add_listener((struct wl_buffer *)wl_egl_buffer->wl_buffer,
3367                                                                    &wl_buffer_release_listener,
3368                                                                    wl_egl_buffer);
3369                 }
3370         }
3371
3372         version = wl_proxy_get_version((struct wl_proxy *)wl_surface);
3373
3374 #if TIZEN_FEATURE_ENABLE
3375         /* create presentation feedback and add listener */
3376         tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
3377         if (wl_egl_display->presentation && wl_egl_buffer->presentation_sync_fd != -1) {
3378
3379                 struct pst_feedback *pst_feedback = NULL;
3380                 pst_feedback = (struct pst_feedback *) calloc(1, sizeof(struct pst_feedback));
3381                 if (pst_feedback) {
3382                         pst_feedback->presentation_feedback =
3383                                 wp_presentation_feedback(wl_egl_display->presentation,
3384                                                                                  wl_surface);
3385
3386                         pst_feedback->wl_egl_surface        = wl_egl_surface;
3387                         pst_feedback->bo_name               = wl_egl_buffer->bo_name;
3388
3389                         pst_feedback->pst_sync_fd           = wl_egl_buffer->presentation_sync_fd;
3390                         wl_egl_buffer->presentation_sync_fd = -1;
3391
3392                         wp_presentation_feedback_add_listener(pst_feedback->presentation_feedback,
3393                                                                                                   &feedback_listener, pst_feedback);
3394                         __tpl_list_push_back(wl_egl_surface->presentation_feedbacks, pst_feedback);
3395                         TRACE_ASYNC_BEGIN(pst_feedback->pst_sync_fd,
3396                                                           "[PRESENTATION_SYNC] bo(%d)",
3397                                                           pst_feedback->bo_name);
3398                 } else {
3399                         TPL_ERR("Failed to create presentation feedback. wl_egl_buffer(%p)",
3400                                         wl_egl_buffer);
3401                         send_signal(wl_egl_buffer->presentation_sync_fd, "PST_SYNC");
3402                         wl_egl_buffer->presentation_sync_fd = -1;
3403                 }
3404         }
3405         tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
3406 #endif
3407
3408         if (wl_egl_buffer->w_rotated == TPL_TRUE) {
3409                 if (version > 1) {
3410                         wayland_tbm_client_set_buffer_transform(
3411                                         wl_egl_display->wl_tbm_client,
3412                                         (void *)wl_egl_buffer->wl_buffer,
3413                                         wl_egl_buffer->w_transform);
3414                         TPL_INFO("[W_TRANSFORM]",
3415                                          "wl_egl_surface(%p) wl_egl_buffer(%p) w_transform(%d)",
3416                                          wl_egl_surface, wl_egl_buffer, wl_egl_buffer->w_transform);
3417                 }
3418                 wl_egl_buffer->w_rotated = TPL_FALSE;
3419         }
3420
3421         if (wl_egl_surface->latest_transform != wl_egl_buffer->transform) {
3422                 if (version > 1) {
3423                         wl_surface_set_buffer_transform(wl_surface, wl_egl_buffer->transform);
3424                         TPL_INFO("[TRANSFORM]",
3425                                          "wl_egl_surface(%p) wl_egl_buffer(%p) transform(%d -> %d)",
3426                                          wl_egl_surface, wl_egl_buffer,
3427                                          wl_egl_surface->latest_transform, wl_egl_buffer->transform);
3428                 }
3429                 wl_egl_surface->latest_transform = wl_egl_buffer->transform;
3430         }
3431
3432         if (wl_egl_window) {
3433                 wl_egl_window->attached_width = wl_egl_buffer->width;
3434                 wl_egl_window->attached_height = wl_egl_buffer->height;
3435         }
3436
3437         wl_surface_attach(wl_surface, (void *)wl_egl_buffer->wl_buffer,
3438                                           wl_egl_buffer->dx, wl_egl_buffer->dy);
3439
3440         if (wl_egl_buffer->num_rects < 1 || wl_egl_buffer->rects == NULL) {
3441                 if (version < 4) {
3442                         wl_surface_damage(wl_surface,
3443                                                           wl_egl_buffer->dx, wl_egl_buffer->dy,
3444                                                           wl_egl_buffer->width, wl_egl_buffer->height);
3445                 } else {
3446                         wl_surface_damage_buffer(wl_surface,
3447                                                                          0, 0,
3448                                                                          wl_egl_buffer->width, wl_egl_buffer->height);
3449                 }
3450         } else {
3451                 int i;
3452                 for (i = 0; i < wl_egl_buffer->num_rects; i++) {
3453                         int inverted_y =
3454                                 wl_egl_buffer->height - (wl_egl_buffer->rects[i * 4 + 1] +
3455                                                 wl_egl_buffer->rects[i * 4 + 3]);
3456                         if (version < 4) {
3457                                 wl_surface_damage(wl_surface,
3458                                                                   wl_egl_buffer->rects[i * 4 + 0],
3459                                                                   inverted_y,
3460                                                                   wl_egl_buffer->rects[i * 4 + 2],
3461                                                                   wl_egl_buffer->rects[i * 4 + 3]);
3462                         } else {
3463                                 wl_surface_damage_buffer(wl_surface,
3464                                                                                  wl_egl_buffer->rects[i * 4 + 0],
3465                                                                                  inverted_y,
3466                                                                                  wl_egl_buffer->rects[i * 4 + 2],
3467                                                                                  wl_egl_buffer->rects[i * 4 + 3]);
3468                         }
3469                 }
3470         }
3471
3472         wayland_tbm_client_set_buffer_serial(wl_egl_display->wl_tbm_client,
3473                                                 (void *)wl_egl_buffer->wl_buffer,
3474                                                 wl_egl_buffer->serial);
3475 #if TIZEN_FEATURE_ENABLE
3476         if (wl_egl_display->use_explicit_sync &&
3477                 wl_egl_buffer->acquire_fence_fd != -1) {
3478
3479                 zwp_linux_surface_synchronization_v1_set_acquire_fence(wl_egl_surface->surface_sync,
3480                                                                                                                            wl_egl_buffer->acquire_fence_fd);
3481                 TPL_LOG_D("[SET_ACQUIRE_FENCE][1/2]", "wl_egl_surface(%p) tbm_surface(%p) acquire_fence(%d)",
3482                                   wl_egl_surface, wl_egl_buffer->tbm_surface, wl_egl_buffer->acquire_fence_fd);
3483                 close(wl_egl_buffer->acquire_fence_fd);
3484                 wl_egl_buffer->acquire_fence_fd = -1;
3485
3486                 wl_egl_buffer->buffer_release =
3487                         zwp_linux_surface_synchronization_v1_get_release(wl_egl_surface->surface_sync);
3488                 if (!wl_egl_buffer->buffer_release) {
3489                         TPL_ERR("Failed to get buffer_release. wl_egl_surface(%p)", wl_egl_surface);
3490                 } else {
3491                         zwp_linux_buffer_release_v1_add_listener(
3492                                 wl_egl_buffer->buffer_release, &zwp_release_listner, wl_egl_buffer);
3493                         TPL_LOG_D("[SET_ACQUIRE_FENCE][2/2]", "add explicit_sync_release_listener.");
3494                 }
3495         }
3496 #endif
3497
3498         wl_surface_commit(wl_surface);
3499
3500         wl_display_flush(wl_egl_display->wl_display);
3501
3502         TRACE_ASYNC_BEGIN((intptr_t)wl_egl_buffer->tbm_surface, "[COMMIT ~ RELEASE] BO(%d)",
3503                                           wl_egl_buffer->bo_name);
3504
3505         tpl_gmutex_lock(&wl_egl_buffer->mutex);
3506
3507         wl_egl_buffer->need_to_commit   = TPL_FALSE;
3508         wl_egl_buffer->status           = COMMITTED;
3509         if (wl_egl_surface->last_enq_buffer == wl_egl_buffer->tbm_surface)
3510                 wl_egl_surface->last_enq_buffer = NULL;
3511
3512         tpl_gcond_signal(&wl_egl_buffer->cond);
3513
3514         tpl_gmutex_unlock(&wl_egl_buffer->mutex);
3515
3516         TPL_LOG_T("WL_EGL",
3517                           "[COMMIT] wl_egl_buffer(%p) wl_buffer(%p) tbm_surface(%p) bo(%d)",
3518                           wl_egl_buffer, wl_egl_buffer->wl_buffer, wl_egl_buffer->tbm_surface,
3519                           wl_egl_buffer->bo_name);
3520
3521         if (wl_egl_surface->post_interval > 0 && wl_egl_surface->vblank != NULL) {
3522                 wl_egl_surface->vblank_enable = TPL_TRUE;
3523                 if (_thread_surface_vblank_wait(wl_egl_surface) != TPL_ERROR_NONE)
3524                         TPL_ERR("Failed to set wait vblank.");
3525         }
3526
3527         tpl_gmutex_lock(&wl_egl_surface->commit_sync.mutex);
3528
3529         if (wl_egl_buffer->commit_sync_fd != -1) {
3530                 TRACE_ASYNC_END(wl_egl_buffer->commit_sync_fd, "[COMMIT_SYNC] bo(%d)",
3531                                                 wl_egl_buffer->bo_name);
3532                 TPL_LOG_D("[COMMIT_SYNC][SEND]", "wl_egl_surface(%p) commit_sync_fd(%d)",
3533                                   wl_egl_surface, wl_egl_buffer->commit_sync_fd);
3534                 send_signal(wl_egl_buffer->commit_sync_fd, "COMMIT_SYNC");
3535                 wl_egl_buffer->commit_sync_fd = -1;
3536         }
3537
3538         tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
3539 }
3540
3541 static int
3542 _write_to_eventfd(int eventfd, uint64_t value)
3543 {
3544         int ret;
3545
3546         ret = write(eventfd, &value, sizeof(uint64_t));
3547         if (ret == -1) {
3548                 TPL_ERR("failed to write to fd(%d)", eventfd);
3549                 return ret;
3550         }
3551
3552         return ret;
3553 }
3554
3555 static int send_signal(int fd, const char *type)
3556 {
3557         int ret = 0;
3558         if (fd < 0) return ret;
3559
3560         ret = _write_to_eventfd(fd, 1);
3561         if (ret < 0)
3562                 TPL_ERR("Failed to send %s signal to fd(%d)", type, fd);
3563
3564         close(fd);
3565
3566         return ret;
3567 }
3568
3569 void
3570 __tpl_display_init_backend_wl_egl_thread(tpl_display_backend_t *backend)
3571 {
3572         TPL_ASSERT(backend);
3573
3574         backend->type = TPL_BACKEND_WAYLAND_THREAD;
3575         backend->data = NULL;
3576
3577         backend->init = __tpl_wl_egl_display_init;
3578         backend->fini = __tpl_wl_egl_display_fini;
3579         backend->query_config = __tpl_wl_egl_display_query_config;
3580         backend->filter_config = __tpl_wl_egl_display_filter_config;
3581         backend->get_window_info = __tpl_wl_egl_display_get_window_info;
3582         backend->get_pixmap_info = __tpl_wl_egl_display_get_pixmap_info;
3583         backend->get_buffer_from_native_pixmap =
3584                 __tpl_wl_egl_display_get_buffer_from_native_pixmap;
3585 }
3586
3587 void
3588 __tpl_surface_init_backend_wl_egl_thread(tpl_surface_backend_t *backend)
3589 {
3590         TPL_ASSERT(backend);
3591
3592         backend->type = TPL_BACKEND_WAYLAND_THREAD;
3593         backend->data = NULL;
3594
3595         backend->init = __tpl_wl_egl_surface_init;
3596         backend->fini = __tpl_wl_egl_surface_fini;
3597         backend->validate = __tpl_wl_egl_surface_validate;
3598         backend->cancel_dequeued_buffer =
3599                 __tpl_wl_egl_surface_cancel_buffer;
3600         backend->dequeue_buffer = __tpl_wl_egl_surface_dequeue_buffer;
3601         backend->enqueue_buffer = __tpl_wl_egl_surface_enqueue_buffer;
3602         backend->set_rotation_capability =
3603                 __tpl_wl_egl_surface_set_rotation_capability;
3604         backend->set_post_interval =
3605                 __tpl_wl_egl_surface_set_post_interval;
3606         backend->get_size =
3607                 __tpl_wl_egl_surface_get_size;
3608         backend->fence_sync_is_available =
3609                 __tpl_wl_egl_surface_fence_sync_is_available;
3610 }
3611
3612 static void
3613 __cb_wl_egl_buffer_free(tpl_wl_egl_buffer_t *wl_egl_buffer)
3614 {
3615         tpl_wl_egl_surface_t *wl_egl_surface = wl_egl_buffer->wl_egl_surface;
3616         tpl_wl_egl_display_t *wl_egl_display = wl_egl_surface->wl_egl_display;
3617
3618         TPL_INFO("[BUFFER_FREE]", "wl_egl_surface(%p) wl_egl_buffer(%p) tbm_surface(%p) bo(%d)",
3619                          wl_egl_surface, wl_egl_buffer, wl_egl_buffer->tbm_surface, wl_egl_buffer->bo_name);
3620
3621         tpl_gmutex_lock(&wl_egl_surface->buffers_mutex);
3622         if (wl_egl_surface->buffers) {
3623                 __tpl_list_remove_data(wl_egl_surface->buffers, (void *)wl_egl_buffer,
3624                                                            TPL_FIRST, NULL);
3625         }
3626         tpl_gmutex_unlock(&wl_egl_surface->buffers_mutex);
3627
3628         if (wl_egl_surface->vblank) {
3629                 tpl_gmutex_lock(&wl_egl_surface->vblank->mutex);
3630                 if (wl_egl_surface->vblank->waiting_buffers)
3631                         __tpl_list_remove_data(wl_egl_surface->vblank->waiting_buffers, (void *)wl_egl_buffer,
3632                                                                    TPL_FIRST, NULL);
3633                 tpl_gmutex_unlock(&wl_egl_surface->vblank->mutex);
3634         }
3635
3636         tpl_gmutex_lock(&wl_egl_buffer->mutex);
3637
3638         if (wl_egl_display) {
3639                 if (wl_egl_display->wl_tbm_client && wl_egl_buffer->wl_buffer) {
3640                         wayland_tbm_client_destroy_buffer(wl_egl_display->wl_tbm_client,
3641                                                                                           (void *)wl_egl_buffer->wl_buffer);
3642                         wl_egl_buffer->wl_buffer = NULL;
3643                 }
3644
3645                 wl_display_flush(wl_egl_display->wl_display);
3646         }
3647
3648
3649 #if TIZEN_FEATURE_ENABLE
3650         if (wl_egl_buffer->buffer_release) {
3651                 zwp_linux_buffer_release_v1_destroy(wl_egl_buffer->buffer_release);
3652                 wl_egl_buffer->buffer_release = NULL;
3653         }
3654
3655         if (wl_egl_buffer->release_fence_fd != -1) {
3656                 close(wl_egl_buffer->release_fence_fd);
3657                 wl_egl_buffer->release_fence_fd = -1;
3658         }
3659 #endif
3660
3661         if (wl_egl_buffer->waiting_source) {
3662                 tpl_gsource_destroy(wl_egl_buffer->waiting_source, TPL_FALSE);
3663                 wl_egl_buffer->waiting_source = NULL;
3664         }
3665
3666         send_signal(wl_egl_buffer->commit_sync_fd, "COMMIT_SYNC");
3667         wl_egl_buffer->commit_sync_fd = -1;
3668
3669         send_signal(wl_egl_buffer->presentation_sync_fd, "PST_SYNC");
3670         wl_egl_buffer->presentation_sync_fd = -1;
3671
3672         if (wl_egl_buffer->rects) {
3673                 free(wl_egl_buffer->rects);
3674                 wl_egl_buffer->rects = NULL;
3675                 wl_egl_buffer->num_rects = 0;
3676         }
3677
3678         wl_egl_buffer->wl_egl_surface = NULL;
3679         wl_egl_buffer->tbm_surface = NULL;
3680         wl_egl_buffer->bo_name = -1;
3681         wl_egl_buffer->status = RELEASED;
3682
3683         tpl_gmutex_unlock(&wl_egl_buffer->mutex);
3684         tpl_gmutex_clear(&wl_egl_buffer->mutex);
3685         tpl_gcond_clear(&wl_egl_buffer->cond);
3686         free(wl_egl_buffer);
3687 }
3688
3689 static int
3690 _get_tbm_surface_bo_name(tbm_surface_h tbm_surface)
3691 {
3692         return tbm_bo_export(tbm_surface_internal_get_bo(tbm_surface, 0));
3693 }
3694
3695 static void
3696 _print_buffer_lists(tpl_wl_egl_surface_t *wl_egl_surface)
3697 {
3698         tpl_list_node_t *node = NULL;
3699         int buffer_cnt = 0;
3700         int idx = 0;
3701
3702         tpl_gmutex_lock(&wl_egl_surface->buffers_mutex);
3703         buffer_cnt = __tpl_list_get_count(wl_egl_surface->buffers);
3704
3705         node = __tpl_list_get_front_node(wl_egl_surface->buffers);
3706         do {
3707                 if (!node) break;
3708                 tpl_wl_egl_buffer_t wl_egl_buffer(__tpl_list_node_get_data(node));
3709                 TPL_INFO("[BUFFERS_INFO]",
3710                                  "[%d/%d] wl_egl_surface(%p), wl_egl_buffer(%p) tbm_surface(%p) bo(%d) | status(%s)",
3711                                  ++idx, buffer_cnt, wl_egl_surface, wl_egl_buffer,
3712                                  wl_egl_buffer->tbm_surface, wl_egl_buffer->bo_name,
3713                                  status_to_string[wl_egl_buffer->status]);
3714         } while ((node = __tpl_list_node_next(node)));
3715         tpl_gmutex_unlock(&wl_egl_surface->buffers_mutex);
3716 }
3717
3718 static tpl_bool_t
3719 _check_buffer_validate(tpl_wl_egl_surface_t *wl_egl_surface, tbm_surface_h tbm_surface)
3720 {
3721         tpl_list_node_t *node = NULL;
3722         tpl_bool_t ret = TPL_FALSE;
3723
3724         /* silent return */
3725         if (!wl_egl_surface || !tbm_surface)
3726                 return ret;
3727
3728         tpl_gmutex_lock(&wl_egl_surface->buffers_mutex);
3729         node = __tpl_list_get_front_node(wl_egl_surface->buffers);
3730         do {
3731                 if (!node) break;
3732                 tpl_wl_egl_buffer_t wl_egl_buffer(__tpl_list_node_get_data(node));
3733                 if (wl_egl_buffer->tbm_surface == tbm_surface) {
3734                         ret = TPL_TRUE;
3735                         break;
3736                 }
3737         } while ((node = __tpl_list_node_next(node)));
3738
3739         if (ret == TPL_FALSE) {
3740                 TPL_ERR("tbm_surface(%p) is not owned by wl_egl_surface(%p)",
3741                                 tbm_surface, wl_egl_surface);
3742         }
3743
3744         tpl_gmutex_unlock(&wl_egl_surface->buffers_mutex);
3745
3746         return ret;
3747 }