a00116ac32623c6cf50c19f25779194b86227a56
[platform/core/uifw/libtpl-egl.git] / src / tpl_wl_egl_thread.c
1
2 #include "tpl_internal.h"
3
4 #include <string.h>
5 #include <fcntl.h>
6 #include <unistd.h>
7 #include <sys/eventfd.h>
8
9 #include <tbm_bufmgr.h>
10 #include <tbm_surface.h>
11 #include <tbm_surface_internal.h>
12 #include <tbm_surface_queue.h>
13
14 #include <wayland-client.h>
15 #include <wayland-tbm-server.h>
16 #include <wayland-tbm-client.h>
17 #include <wayland-egl-backend.h>
18
19 #include <tdm_client.h>
20
21 #include "wayland-egl-tizen/wayland-egl-tizen.h"
22 #include "wayland-egl-tizen/wayland-egl-tizen-priv.h"
23
24 #ifndef TIZEN_FEATURE_ENABLE
25 #define TIZEN_FEATURE_ENABLE 1
26 #endif
27
28 #if TIZEN_FEATURE_ENABLE
29 #include <tizen-surface-client-protocol.h>
30 #include <presentation-time-client-protocol.h>
31 #include <linux-explicit-synchronization-unstable-v1-client-protocol.h>
32 #endif
33
34 #include "tpl_utils_gthread.h"
35
36 static int wl_egl_buffer_key;
37 #define KEY_WL_EGL_BUFFER (unsigned long)(&wl_egl_buffer_key)
38
39 /* In wayland, application and compositor create its own drawing buffers. Recommend size is more than 2. */
40 #define BUFFER_ARRAY_SIZE 9
41
42 typedef struct _tpl_wl_egl_display tpl_wl_egl_display_t;
43 typedef struct _tpl_wl_egl_surface tpl_wl_egl_surface_t;
44 typedef struct _tpl_wl_egl_buffer  tpl_wl_egl_buffer_t;
45 typedef struct _surface_vblank     tpl_surface_vblank_t;
46
47 struct _tpl_wl_egl_display {
48         tpl_gsource                  *disp_source;
49         tpl_gthread                  *thread;
50         tpl_gmutex                    wl_event_mutex;
51
52         struct wl_display            *wl_display;
53         struct wl_event_queue        *ev_queue;
54         struct wayland_tbm_client    *wl_tbm_client;
55         int                           last_error; /* errno of the last wl_display error*/
56
57         tpl_bool_t                    wl_initialized;
58
59         tpl_bool_t                    use_wait_vblank;
60         tpl_bool_t                    use_explicit_sync;
61         tpl_bool_t                    use_tss;
62         tpl_bool_t                    prepared;
63         /* To make sure that tpl_gsource has been successfully finalized. */
64         tpl_bool_t                    gsource_finalized;
65         tpl_gmutex                    disp_mutex;
66         tpl_gcond                     disp_cond;
67         struct {
68                 tdm_client               *tdm_client;
69                 tpl_gsource              *tdm_source;
70                 int                       tdm_display_fd;
71                 tpl_bool_t                tdm_initialized;
72                 tpl_list_t               *surface_vblanks;
73
74                 /* To make sure that tpl_gsource has been successfully finalized. */
75                 tpl_bool_t                gsource_finalized;
76                 tpl_gmutex                tdm_mutex;
77                 tpl_gcond                 tdm_cond;
78         } tdm;
79
80 #if TIZEN_FEATURE_ENABLE
81         struct tizen_surface_shm     *tss; /* used for surface buffer_flush */
82         struct wp_presentation       *presentation; /* for presentation feedback */
83         struct zwp_linux_explicit_synchronization_v1 *explicit_sync; /* for explicit fence sync */
84 #endif
85 };
86
87 typedef enum surf_message {
88         NONE_MESSAGE = 0,
89         INIT_SURFACE,
90         ACQUIRABLE,
91 } surf_message;
92
93 struct _tpl_wl_egl_surface {
94         tpl_gsource                  *surf_source;
95
96         tbm_surface_queue_h           tbm_queue;
97         int                           num_buffers;
98
99         struct wl_egl_window         *wl_egl_window;
100         struct wl_surface            *wl_surface;
101
102 #if TIZEN_FEATURE_ENABLE
103         struct zwp_linux_surface_synchronization_v1 *surface_sync; /* for explicit fence sync */
104         struct tizen_surface_shm_flusher *tss_flusher; /* used for surface buffer_flush */
105 #endif
106
107         tpl_surface_vblank_t         *vblank;
108
109         /* surface information */
110         int                           render_done_cnt;
111         unsigned int                  serial;
112
113         int                           width;
114         int                           height;
115         int                           format;
116         int                           latest_transform;
117         int                           rotation;
118         int                           post_interval;
119
120         tpl_wl_egl_display_t         *wl_egl_display;
121         tpl_surface_t                *tpl_surface;
122
123         /* wl_egl_buffer array for buffer tracing */
124         tpl_wl_egl_buffer_t          *buffers[BUFFER_ARRAY_SIZE];
125         int                           buffer_cnt; /* the number of using wl_egl_buffers */
126         tpl_gmutex                    buffers_mutex;
127         tbm_surface_h                 last_enq_buffer;
128
129         tpl_list_t                   *presentation_feedbacks; /* for tracing presentation feedbacks */
130
131         struct {
132                 tpl_gmutex                mutex;
133                 int                       fd;
134         } commit_sync;
135
136         struct {
137                 tpl_gmutex                mutex;
138                 int                       fd;
139         } presentation_sync;
140
141         tpl_gmutex                    surf_mutex;
142         tpl_gcond                     surf_cond;
143
144         surf_message                  sent_message;
145
146         /* for waiting draw done */
147         tpl_bool_t                    use_render_done_fence;
148         tpl_bool_t                    is_activated;
149         tpl_bool_t                    reset; /* TRUE if queue reseted by external  */
150         tpl_bool_t                    need_to_enqueue;
151         tpl_bool_t                    prerotation_capability;
152         tpl_bool_t                    vblank_done;
153         tpl_bool_t                    set_serial_is_used;
154         tpl_bool_t                    initialized_in_thread;
155
156         /* To make sure that tpl_gsource has been successfully finalized. */
157         tpl_bool_t                    gsource_finalized;
158 };
159
160 struct _surface_vblank {
161         tdm_client_vblank            *tdm_vblank;
162         tpl_wl_egl_surface_t         *wl_egl_surface;
163         tpl_list_t                   *waiting_buffers; /* for FIFO/FIFO_RELAXED modes */
164         tpl_gmutex                    mutex;
165 };
166
167 typedef enum buffer_status {
168         RELEASED = 0,             // 0
169         DEQUEUED,                 // 1
170         ENQUEUED,                 // 2
171         ACQUIRED,                 // 3
172         WAITING_SIGNALED,         // 4
173         WAITING_VBLANK,           // 5
174         COMMITTED,                // 6
175 } buffer_status_t;
176
177 static const char *status_to_string[7] = {
178         "RELEASED",                 // 0
179         "DEQUEUED",                 // 1
180         "ENQUEUED",                 // 2
181         "ACQUIRED",                 // 3
182         "WAITING_SIGNALED",         // 4
183         "WAITING_VBLANK",           // 5
184         "COMMITTED",                // 6
185 };
186
187 struct _tpl_wl_egl_buffer {
188         tbm_surface_h                 tbm_surface;
189         int                           bo_name;
190
191         struct wl_proxy              *wl_buffer;
192         int                           dx, dy; /* position to attach to wl_surface */
193         int                           width, height; /* size to attach to wl_surface */
194
195         buffer_status_t               status; /* for tracing buffer status */
196         int                           idx; /* position index in buffers array of wl_egl_surface */
197
198         /* for damage region */
199         int                           num_rects;
200         int                          *rects;
201
202         /* for wayland_tbm_client_set_buffer_transform */
203         int                           w_transform;
204         tpl_bool_t                    w_rotated;
205
206         /* for wl_surface_set_buffer_transform */
207         int                           transform;
208
209         /* for wayland_tbm_client_set_buffer_serial */
210         unsigned int                  serial;
211
212         /* for checking need_to_commit (frontbuffer mode) */
213         tpl_bool_t                    need_to_commit;
214
215         /* for checking draw done */
216         tpl_bool_t                    draw_done;
217
218 #if TIZEN_FEATURE_ENABLE
219         /* to get release event via zwp_linux_buffer_release_v1 */
220         struct zwp_linux_buffer_release_v1 *buffer_release;
221 #endif
222         /* each buffers own its release_fence_fd, until it passes ownership
223          * to it to EGL */
224         int32_t                       release_fence_fd;
225
226         /* each buffers own its acquire_fence_fd.
227          * If it use zwp_linux_buffer_release_v1 the ownership of this fd
228          * will be passed to display server
229          * Otherwise it will be used as a fence waiting for render done
230          * on tpl thread */
231         int32_t                       acquire_fence_fd;
232
233         /* Fd to send a signal when wl_surface_commit with this buffer */
234         int32_t                       commit_sync_fd;
235
236         /* Fd to send a siganl when receive the
237          * presentation feedback from display server */
238         int32_t                       presentation_sync_fd;
239
240         tpl_gsource                  *waiting_source;
241
242         tpl_gmutex                    mutex;
243         tpl_gcond                     cond;
244
245         tpl_wl_egl_surface_t         *wl_egl_surface;
246 };
247
248 #if TIZEN_FEATURE_ENABLE
249 struct pst_feedback {
250         /* to get presentation feedback from display server */
251         struct wp_presentation_feedback *presentation_feedback;
252
253         int32_t                          pst_sync_fd;
254
255         int                              bo_name;
256         tpl_wl_egl_surface_t            *wl_egl_surface;
257
258 };
259 #endif
260
261 static const struct wl_buffer_listener wl_buffer_release_listener;
262
263 static int
264 _get_tbm_surface_bo_name(tbm_surface_h tbm_surface);
265 static void
266 _print_buffer_lists(tpl_wl_egl_surface_t *wl_egl_surface);
267 static tpl_bool_t
268 _check_buffer_validate(tpl_wl_egl_surface_t *wl_egl_surface, tbm_surface_h tbm_surface);
269 static void
270 __cb_wl_egl_buffer_free(tpl_wl_egl_buffer_t *wl_egl_buffer);
271 static tpl_wl_egl_buffer_t *
272 _get_wl_egl_buffer(tbm_surface_h tbm_surface);
273 static int
274 _write_to_eventfd(int eventfd);
275 static void
276 _thread_wl_egl_surface_init(tpl_wl_egl_surface_t *wl_egl_surface);
277 static tpl_result_t
278 _thread_surface_queue_acquire(tpl_wl_egl_surface_t *wl_egl_surface);
279 static void
280 _thread_wl_surface_commit(tpl_wl_egl_surface_t *wl_egl_surface,
281                                                   tpl_wl_egl_buffer_t *wl_egl_buffer);
282 static void
283 __cb_surface_vblank_free(void *data);
284
285 static struct tizen_private *
286 tizen_private_create()
287 {
288         struct tizen_private *private = NULL;
289         private = (struct tizen_private *)calloc(1, sizeof(struct tizen_private));
290         if (private) {
291                 private->magic = WL_EGL_TIZEN_MAGIC;
292                 private->rotation = 0;
293                 private->frontbuffer_mode = 0;
294                 private->transform = 0;
295                 private->window_transform = 0;
296                 private->serial = 0;
297
298                 private->data = NULL;
299                 private->rotate_callback = NULL;
300                 private->get_rotation_capability = NULL;
301                 private->set_window_serial_callback = NULL;
302                 private->set_frontbuffer_callback = NULL;
303                 private->create_commit_sync_fd = NULL;
304                 private->create_presentation_sync_fd = NULL;
305                 private->merge_sync_fds = NULL;
306         }
307
308         return private;
309 }
310
311 static tpl_bool_t
312 _check_native_handle_is_wl_display(tpl_handle_t display)
313 {
314         struct wl_interface *wl_egl_native_dpy = *(void **) display;
315
316         if (!wl_egl_native_dpy) {
317                 TPL_ERR("Invalid parameter. native_display(%p)", wl_egl_native_dpy);
318                 return TPL_FALSE;
319         }
320
321         /* MAGIC CHECK: A native display handle is a wl_display if the de-referenced first value
322            is a memory address pointing the structure of wl_display_interface. */
323         if (wl_egl_native_dpy == &wl_display_interface)
324                 return TPL_TRUE;
325
326         if (strncmp(wl_egl_native_dpy->name, wl_display_interface.name,
327                                 strlen(wl_display_interface.name)) == 0) {
328                 return TPL_TRUE;
329         }
330
331         return TPL_FALSE;
332 }
333
334 static tpl_bool_t
335 __thread_func_tdm_dispatch(tpl_gsource *gsource, uint64_t message)
336 {
337         tpl_wl_egl_display_t       *wl_egl_display = NULL;
338         tdm_error                   tdm_err = TDM_ERROR_NONE;
339
340         TPL_IGNORE(message);
341
342         wl_egl_display = (tpl_wl_egl_display_t *)tpl_gsource_get_data(gsource);
343         if (!wl_egl_display) {
344                 TPL_ERR("Failed to get wl_egl_display from gsource(%p)", gsource);
345                 TPL_WARN("tdm_source(%p) will be removed from thread.", gsource);
346                 return TPL_FALSE;
347         }
348
349         tdm_err = tdm_client_handle_events(wl_egl_display->tdm.tdm_client);
350
351         /* If an error occurs in tdm_client_handle_events, it cannot be recovered.
352          * When tdm_source is no longer available due to an unexpected situation,
353          * wl_egl_thread must remove it from the thread and destroy it.
354          * In that case, tdm_vblank can no longer be used for surfaces and displays
355          * that used this tdm_source. */
356         if (tdm_err != TDM_ERROR_NONE) {
357                 TPL_ERR("Error occured in tdm_client_handle_events. tdm_err(%d)",
358                                 tdm_err);
359                 TPL_WARN("tdm_source(%p) will be removed from thread.", gsource);
360
361                 tpl_gsource_destroy(gsource, TPL_FALSE);
362
363                 wl_egl_display->tdm.tdm_source = NULL;
364
365                 return TPL_FALSE;
366         }
367
368         return TPL_TRUE;
369 }
370
371 static void
372 __thread_func_tdm_finalize(tpl_gsource *gsource)
373 {
374         tpl_wl_egl_display_t *wl_egl_display = NULL;
375
376         wl_egl_display = (tpl_wl_egl_display_t *)tpl_gsource_get_data(gsource);
377
378         tpl_gmutex_lock(&wl_egl_display->tdm.tdm_mutex);
379
380         TPL_INFO("[TDM_CLIENT_FINI]",
381                          "wl_egl_display(%p) tdm_client(%p) tdm_display_fd(%d)",
382                          wl_egl_display, wl_egl_display->tdm.tdm_client,
383                          wl_egl_display->tdm.tdm_display_fd);
384
385         if (wl_egl_display->tdm.tdm_client) {
386
387                 if (wl_egl_display->tdm.surface_vblanks) {
388                         __tpl_list_free(wl_egl_display->tdm.surface_vblanks,
389                                     __cb_surface_vblank_free);
390                         wl_egl_display->tdm.surface_vblanks = NULL;
391                 }
392
393                 tdm_client_destroy(wl_egl_display->tdm.tdm_client);
394                 wl_egl_display->tdm.tdm_client = NULL;
395                 wl_egl_display->tdm.tdm_display_fd = -1;
396                 wl_egl_display->tdm.tdm_source = NULL;
397         }
398
399         wl_egl_display->use_wait_vblank = TPL_FALSE;
400         wl_egl_display->tdm.tdm_initialized = TPL_FALSE;
401         wl_egl_display->tdm.gsource_finalized = TPL_TRUE;
402
403         tpl_gcond_signal(&wl_egl_display->tdm.tdm_cond);
404         tpl_gmutex_unlock(&wl_egl_display->tdm.tdm_mutex);
405 }
406
407 static tpl_gsource_functions tdm_funcs = {
408         .prepare  = NULL,
409         .check    = NULL,
410         .dispatch = __thread_func_tdm_dispatch,
411         .finalize = __thread_func_tdm_finalize,
412 };
413
414 tpl_result_t
415 _thread_tdm_init(tpl_wl_egl_display_t *wl_egl_display)
416 {
417         tdm_client       *tdm_client = NULL;
418         int               tdm_display_fd = -1;
419         tdm_error         tdm_err = TDM_ERROR_NONE;
420
421         tdm_client = tdm_client_create(&tdm_err);
422         if (!tdm_client || tdm_err != TDM_ERROR_NONE) {
423                 TPL_ERR("TDM_ERROR:%d Failed to create tdm_client\n", tdm_err);
424                 return TPL_ERROR_INVALID_OPERATION;
425         }
426
427         tdm_err = tdm_client_get_fd(tdm_client, &tdm_display_fd);
428         if (tdm_display_fd < 0 || tdm_err != TDM_ERROR_NONE) {
429                 TPL_ERR("TDM_ERROR:%d Failed to get tdm_client fd\n", tdm_err);
430                 tdm_client_destroy(tdm_client);
431                 return TPL_ERROR_INVALID_OPERATION;
432         }
433
434         wl_egl_display->tdm.tdm_display_fd  = tdm_display_fd;
435         wl_egl_display->tdm.tdm_client      = tdm_client;
436         wl_egl_display->tdm.tdm_source      = NULL;
437         wl_egl_display->tdm.tdm_initialized = TPL_TRUE;
438         wl_egl_display->tdm.surface_vblanks = __tpl_list_alloc();
439
440         TPL_INFO("[TDM_CLIENT_INIT]",
441                          "wl_egl_display(%p) tdm_client(%p) tdm_display_fd(%d)",
442                          wl_egl_display, tdm_client, tdm_display_fd);
443
444         return TPL_ERROR_NONE;
445 }
446
447 #define IMPL_TIZEN_SURFACE_SHM_VERSION 2
448
449
450 static void
451 __cb_wl_resistry_global_callback(void *data, struct wl_registry *wl_registry,
452                                                           uint32_t name, const char *interface,
453                                                           uint32_t version)
454 {
455 #if TIZEN_FEATURE_ENABLE
456         tpl_wl_egl_display_t *wl_egl_display = (tpl_wl_egl_display_t *)data;
457
458         if (!strcmp(interface, "tizen_surface_shm")) {
459                 wl_egl_display->tss =
460                         wl_registry_bind(wl_registry,
461                                                          name,
462                                                          &tizen_surface_shm_interface,
463                                                          ((version < IMPL_TIZEN_SURFACE_SHM_VERSION) ?
464                                                          version : IMPL_TIZEN_SURFACE_SHM_VERSION));
465                 wl_egl_display->use_tss = TPL_TRUE;
466         } else if (!strcmp(interface, wp_presentation_interface.name)) {
467                 wl_egl_display->presentation =
468                                         wl_registry_bind(wl_registry,
469                                                                          name, &wp_presentation_interface, 1);
470                 TPL_DEBUG("bind wp_presentation_interface");
471         } else if (strcmp(interface, "zwp_linux_explicit_synchronization_v1") == 0) {
472                 char *env = tpl_getenv("TPL_EFS");
473                 if (env && !atoi(env)) {
474                         wl_egl_display->use_explicit_sync = TPL_FALSE;
475                 } else {
476                         wl_egl_display->explicit_sync =
477                                         wl_registry_bind(wl_registry, name,
478                                                                          &zwp_linux_explicit_synchronization_v1_interface, 1);
479                         wl_egl_display->use_explicit_sync = TPL_TRUE;
480                         TPL_DEBUG("bind zwp_linux_explicit_synchronization_v1_interface");
481                 }
482         }
483 #endif
484 }
485
486 static void
487 __cb_wl_resistry_global_remove_callback(void *data,
488                                                                          struct wl_registry *wl_registry,
489                                                                          uint32_t name)
490 {
491 }
492
493 static const struct wl_registry_listener registry_listener = {
494         __cb_wl_resistry_global_callback,
495         __cb_wl_resistry_global_remove_callback
496 };
497
498 static void
499 _wl_display_print_err(tpl_wl_egl_display_t *wl_egl_display,
500                                           const char *func_name)
501 {
502         int dpy_err;
503         char buf[1024];
504         strerror_r(errno, buf, sizeof(buf));
505
506         if (wl_egl_display->last_error == errno)
507                 return;
508
509         TPL_ERR("falied to %s. error:%d(%s)", func_name, errno, buf);
510
511         dpy_err = wl_display_get_error(wl_egl_display->wl_display);
512         if (dpy_err == EPROTO) {
513                 const struct wl_interface *err_interface;
514                 uint32_t err_proxy_id, err_code;
515                 err_code = wl_display_get_protocol_error(wl_egl_display->wl_display,
516                                                                                                  &err_interface,
517                                                                                                  &err_proxy_id);
518                 TPL_ERR("[Protocol Error] interface: %s, error_code: %d, proxy_id: %d",
519                                 err_interface->name, err_code, err_proxy_id);
520         }
521
522         wl_egl_display->last_error = errno;
523 }
524
525 tpl_result_t
526 _thread_wl_display_init(tpl_wl_egl_display_t *wl_egl_display)
527 {
528         struct wl_registry *registry                = NULL;
529         struct wl_event_queue *queue                = NULL;
530         struct wl_display *display_wrapper          = NULL;
531         struct wl_proxy *wl_tbm                     = NULL;
532         struct wayland_tbm_client *wl_tbm_client    = NULL;
533         int ret;
534         tpl_result_t result = TPL_ERROR_NONE;
535
536         queue = wl_display_create_queue(wl_egl_display->wl_display);
537         if (!queue) {
538                 TPL_ERR("Failed to create wl_queue wl_display(%p)",
539                                 wl_egl_display->wl_display);
540                 result = TPL_ERROR_INVALID_OPERATION;
541                 goto fini;
542         }
543
544         wl_egl_display->ev_queue = wl_display_create_queue(wl_egl_display->wl_display);
545         if (!wl_egl_display->ev_queue) {
546                 TPL_ERR("Failed to create wl_queue wl_display(%p)",
547                                 wl_egl_display->wl_display);
548                 result = TPL_ERROR_INVALID_OPERATION;
549                 goto fini;
550         }
551
552         display_wrapper = wl_proxy_create_wrapper(wl_egl_display->wl_display);
553         if (!display_wrapper) {
554                 TPL_ERR("Failed to create a proxy wrapper of wl_display(%p)",
555                                 wl_egl_display->wl_display);
556                 result = TPL_ERROR_INVALID_OPERATION;
557                 goto fini;
558         }
559
560         wl_proxy_set_queue((struct wl_proxy *)display_wrapper, queue);
561
562         registry = wl_display_get_registry(display_wrapper);
563         if (!registry) {
564                 TPL_ERR("Failed to create wl_registry");
565                 result = TPL_ERROR_INVALID_OPERATION;
566                 goto fini;
567         }
568
569         wl_proxy_wrapper_destroy(display_wrapper);
570         display_wrapper = NULL;
571
572         wl_tbm_client = wayland_tbm_client_init(wl_egl_display->wl_display);
573         if (!wl_tbm_client) {
574                 TPL_ERR("Failed to initialize wl_tbm_client.");
575                 result = TPL_ERROR_INVALID_CONNECTION;
576                 goto fini;
577         }
578
579         wl_tbm = (struct wl_proxy *)wayland_tbm_client_get_wl_tbm(wl_tbm_client);
580         if (!wl_tbm) {
581                 TPL_ERR("Failed to get wl_tbm from wl_tbm_client(%p)", wl_tbm_client);
582                 result = TPL_ERROR_INVALID_CONNECTION;
583                 goto fini;
584         }
585
586         wl_proxy_set_queue(wl_tbm, wl_egl_display->ev_queue);
587         wl_egl_display->wl_tbm_client = wl_tbm_client;
588
589         if (wl_registry_add_listener(registry, &registry_listener,
590                                                                  wl_egl_display)) {
591                 TPL_ERR("Failed to wl_registry_add_listener");
592                 result = TPL_ERROR_INVALID_OPERATION;
593                 goto fini;
594         }
595
596         ret = wl_display_roundtrip_queue(wl_egl_display->wl_display, queue);
597         if (ret == -1) {
598                 _wl_display_print_err(wl_egl_display, "roundtrip_queue");
599                 result = TPL_ERROR_INVALID_OPERATION;
600                 goto fini;
601         }
602
603 #if TIZEN_FEATURE_ENABLE
604         /* set tizen_surface_shm's queue as client's private queue */
605         if (wl_egl_display->tss) {
606                 wl_proxy_set_queue((struct wl_proxy *)wl_egl_display->tss,
607                                                    wl_egl_display->ev_queue);
608                 TPL_LOG_T("WL_EGL", "tizen_surface_shm(%p) init.", wl_egl_display->tss);
609         }
610
611         if (wl_egl_display->presentation) {
612                 wl_proxy_set_queue((struct wl_proxy *)wl_egl_display->presentation,
613                                                    wl_egl_display->ev_queue);
614                 TPL_LOG_T("WL_EGL", "wp_presentation(%p) init.",
615                                   wl_egl_display->presentation);
616         }
617
618         if (wl_egl_display->explicit_sync) {
619                 wl_proxy_set_queue((struct wl_proxy *)wl_egl_display->explicit_sync,
620                                                    wl_egl_display->ev_queue);
621                 TPL_LOG_T("WL_EGL", "zwp_linux_explicit_synchronization_v1(%p) init.",
622                                   wl_egl_display->explicit_sync);
623         }
624 #endif
625         wl_egl_display->wl_initialized = TPL_TRUE;
626
627         TPL_INFO("[WAYLAND_INIT]",
628                          "wl_egl_display(%p) wl_display(%p) wl_tbm_client(%p) event_queue(%p)",
629                          wl_egl_display, wl_egl_display->wl_display,
630                          wl_egl_display->wl_tbm_client, wl_egl_display->ev_queue);
631 #if TIZEN_FEATURE_ENABLE
632         TPL_INFO("[WAYLAND_INIT]",
633                          "tizen_surface_shm(%p) wp_presentation(%p) explicit_sync(%p)",
634                          wl_egl_display->tss, wl_egl_display->presentation,
635                          wl_egl_display->explicit_sync);
636 #endif
637 fini:
638         if (display_wrapper)
639                 wl_proxy_wrapper_destroy(display_wrapper);
640         if (registry)
641                 wl_registry_destroy(registry);
642         if (queue)
643                 wl_event_queue_destroy(queue);
644
645         return result;
646 }
647
648 void
649 _thread_wl_display_fini(tpl_wl_egl_display_t *wl_egl_display)
650 {
651         /* If wl_egl_display is in prepared state, cancel it */
652         if (wl_egl_display->prepared) {
653                 wl_display_cancel_read(wl_egl_display->wl_display);
654                 wl_egl_display->prepared = TPL_FALSE;
655         }
656
657         if (wl_display_roundtrip_queue(wl_egl_display->wl_display,
658                                                                    wl_egl_display->ev_queue) == -1) {
659                 _wl_display_print_err(wl_egl_display, "roundtrip_queue");
660         }
661
662 #if TIZEN_FEATURE_ENABLE
663         if (wl_egl_display->tss) {
664                 TPL_INFO("[TIZEN_SURFACE_SHM_DESTROY]",
665                                  "wl_egl_display(%p) tizen_surface_shm(%p) fini.",
666                                  wl_egl_display, wl_egl_display->tss);
667                 tizen_surface_shm_destroy(wl_egl_display->tss);
668                 wl_egl_display->tss = NULL;
669         }
670
671         if (wl_egl_display->presentation) {
672                 TPL_INFO("[WP_PRESENTATION_DESTROY]",
673                                  "wl_egl_display(%p) wp_presentation(%p) fini.",
674                                  wl_egl_display, wl_egl_display->presentation);
675                 wp_presentation_destroy(wl_egl_display->presentation);
676                 wl_egl_display->presentation = NULL;
677         }
678
679         if (wl_egl_display->explicit_sync) {
680                 TPL_INFO("[EXPLICIT_SYNC_DESTROY]",
681                                  "wl_egl_display(%p) zwp_linux_explicit_synchronization_v1(%p) fini.",
682                                  wl_egl_display, wl_egl_display->explicit_sync);
683                 zwp_linux_explicit_synchronization_v1_destroy(wl_egl_display->explicit_sync);
684                 wl_egl_display->explicit_sync = NULL;
685         }
686 #endif
687         if (wl_egl_display->wl_tbm_client) {
688                 struct wl_proxy *wl_tbm = NULL;
689
690                 wl_tbm = (struct wl_proxy *)wayland_tbm_client_get_wl_tbm(
691                                                                                 wl_egl_display->wl_tbm_client);
692                 if (wl_tbm) {
693                         wl_proxy_set_queue(wl_tbm, NULL);
694                 }
695
696                 TPL_INFO("[WL_TBM_DEINIT]",
697                                  "wl_egl_display(%p) wl_tbm_client(%p)",
698                                  wl_egl_display, wl_egl_display->wl_tbm_client);
699                 wayland_tbm_client_deinit(wl_egl_display->wl_tbm_client);
700                 wl_egl_display->wl_tbm_client = NULL;
701         }
702
703         wl_event_queue_destroy(wl_egl_display->ev_queue);
704
705         wl_egl_display->ev_queue = NULL;
706         wl_egl_display->wl_initialized = TPL_FALSE;
707
708         TPL_INFO("[DISPLAY_FINI]", "wl_egl_display(%p) wl_display(%p)",
709                          wl_egl_display, wl_egl_display->wl_display);
710 }
711
712 static void*
713 _thread_init(void *data)
714 {
715         tpl_wl_egl_display_t *wl_egl_display = (tpl_wl_egl_display_t *)data;
716
717         if (_thread_wl_display_init(wl_egl_display) != TPL_ERROR_NONE) {
718                 TPL_ERR("Failed to initialize wl_egl_display(%p) with wl_display(%p)",
719                                 wl_egl_display, wl_egl_display->wl_display);
720         }
721
722         if (wl_egl_display->use_wait_vblank &&
723                 _thread_tdm_init(wl_egl_display) != TPL_ERROR_NONE) {
724                 TPL_WARN("Failed to initialize tdm-client. TPL_WAIT_VLANK:DISABLED");
725         }
726
727         return wl_egl_display;
728 }
729
730 static tpl_bool_t
731 __thread_func_disp_prepare(tpl_gsource *gsource)
732 {
733         tpl_wl_egl_display_t *wl_egl_display =
734                 (tpl_wl_egl_display_t *)tpl_gsource_get_data(gsource);
735
736         /* If this wl_egl_display is already prepared,
737          * do nothing in this function. */
738         if (wl_egl_display->prepared)
739                 return TPL_FALSE;
740
741         /* If there is a last_error, there is no need to poll,
742          * so skip directly to dispatch.
743          * prepare -> dispatch */
744         if (wl_egl_display->last_error)
745                 return TPL_TRUE;
746
747         while (wl_display_prepare_read_queue(wl_egl_display->wl_display,
748                                                                                  wl_egl_display->ev_queue) != 0) {
749                 if (wl_display_dispatch_queue_pending(wl_egl_display->wl_display,
750                                                                                           wl_egl_display->ev_queue) == -1) {
751                         _wl_display_print_err(wl_egl_display, "dispatch_queue_pending");
752                 }
753         }
754
755         wl_egl_display->prepared = TPL_TRUE;
756
757         wl_display_flush(wl_egl_display->wl_display);
758
759         return TPL_FALSE;
760 }
761
762 static tpl_bool_t
763 __thread_func_disp_check(tpl_gsource *gsource)
764 {
765         tpl_wl_egl_display_t *wl_egl_display =
766                 (tpl_wl_egl_display_t *)tpl_gsource_get_data(gsource);
767         tpl_bool_t ret = TPL_FALSE;
768
769         if (!wl_egl_display->prepared)
770                 return ret;
771
772         /* If prepared, but last_error is set,
773          * cancel_read is executed and FALSE is returned.
774          * That can lead to G_SOURCE_REMOVE by calling disp_prepare again
775          * and skipping disp_check from prepare to disp_dispatch.
776          * check -> prepare -> dispatch -> G_SOURCE_REMOVE */
777         if (wl_egl_display->prepared && wl_egl_display->last_error) {
778                 wl_display_cancel_read(wl_egl_display->wl_display);
779                 return ret;
780         }
781
782         if (tpl_gsource_check_io_condition(gsource)) {
783                 if (wl_display_read_events(wl_egl_display->wl_display) == -1)
784                         _wl_display_print_err(wl_egl_display, "read_event");
785                 ret = TPL_TRUE;
786         } else {
787                 wl_display_cancel_read(wl_egl_display->wl_display);
788                 ret = TPL_FALSE;
789         }
790
791         wl_egl_display->prepared = TPL_FALSE;
792
793         return ret;
794 }
795
796 static tpl_bool_t
797 __thread_func_disp_dispatch(tpl_gsource *gsource, uint64_t message)
798 {
799         tpl_wl_egl_display_t *wl_egl_display =
800                 (tpl_wl_egl_display_t *)tpl_gsource_get_data(gsource);
801
802         TPL_IGNORE(message);
803
804         /* If there is last_error, SOURCE_REMOVE should be returned
805          * to remove the gsource from the main loop.
806          * This is because wl_egl_display is not valid since last_error was set.*/
807         if (wl_egl_display->last_error) {
808                 return TPL_FALSE;
809         }
810
811         tpl_gmutex_lock(&wl_egl_display->wl_event_mutex);
812         if (tpl_gsource_check_io_condition(gsource)) {
813                 if (wl_display_dispatch_queue_pending(wl_egl_display->wl_display,
814                                                                                           wl_egl_display->ev_queue) == -1) {
815                         _wl_display_print_err(wl_egl_display, "dispatch_queue_pending");
816                 }
817         }
818
819         wl_display_flush(wl_egl_display->wl_display);
820         tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
821
822         return TPL_TRUE;
823 }
824
825 static void
826 __thread_func_disp_finalize(tpl_gsource *gsource)
827 {
828         tpl_wl_egl_display_t *wl_egl_display =
829                 (tpl_wl_egl_display_t *)tpl_gsource_get_data(gsource);
830
831         tpl_gmutex_lock(&wl_egl_display->disp_mutex);
832         TPL_DEBUG("[FINALIZE] wl_egl_display(%p) tpl_gsource(%p)",
833                           wl_egl_display, gsource);
834
835         if (wl_egl_display->wl_initialized)
836                 _thread_wl_display_fini(wl_egl_display);
837
838         wl_egl_display->gsource_finalized = TPL_TRUE;
839
840         tpl_gcond_signal(&wl_egl_display->disp_cond);
841         tpl_gmutex_unlock(&wl_egl_display->disp_mutex);
842
843         return;
844 }
845
846
847 static tpl_gsource_functions disp_funcs = {
848         .prepare  = __thread_func_disp_prepare,
849         .check    = __thread_func_disp_check,
850         .dispatch = __thread_func_disp_dispatch,
851         .finalize = __thread_func_disp_finalize,
852 };
853
854 static tpl_result_t
855 __tpl_wl_egl_display_init(tpl_display_t *display)
856 {
857         tpl_wl_egl_display_t *wl_egl_display    = NULL;
858
859         TPL_ASSERT(display);
860
861         /* Do not allow default display in wayland. */
862         if (!display->native_handle) {
863                 TPL_ERR("Invalid native handle for display.");
864                 return TPL_ERROR_INVALID_PARAMETER;
865         }
866
867         if (!_check_native_handle_is_wl_display(display->native_handle)) {
868                 TPL_ERR("native_handle(%p) is not wl_display", display->native_handle);
869                 return TPL_ERROR_INVALID_PARAMETER;
870         }
871
872         wl_egl_display = (tpl_wl_egl_display_t *) calloc(1,
873                                                   sizeof(tpl_wl_egl_display_t));
874         if (!wl_egl_display) {
875                 TPL_ERR("Failed to allocate memory for new tpl_wl_egl_display_t.");
876                 return TPL_ERROR_OUT_OF_MEMORY;
877         }
878
879         display->backend.data             = wl_egl_display;
880         display->bufmgr_fd                = -1;
881
882         wl_egl_display->tdm.tdm_initialized   = TPL_FALSE;
883         wl_egl_display->tdm.tdm_client        = NULL;
884         wl_egl_display->tdm.tdm_display_fd    = -1;
885         wl_egl_display->tdm.tdm_source        = NULL;
886
887         wl_egl_display->wl_initialized    = TPL_FALSE;
888
889         wl_egl_display->ev_queue          = NULL;
890         wl_egl_display->wl_display        = (struct wl_display *)display->native_handle;
891         wl_egl_display->last_error        = 0;
892         wl_egl_display->use_tss           = TPL_FALSE;
893         wl_egl_display->use_explicit_sync = TPL_FALSE;   // default disabled
894         wl_egl_display->prepared          = TPL_FALSE;
895         wl_egl_display->gsource_finalized = TPL_FALSE;
896
897 #if TIZEN_FEATURE_ENABLE
898         /* Wayland Interfaces */
899         wl_egl_display->tss               = NULL;
900         wl_egl_display->presentation      = NULL;
901         wl_egl_display->explicit_sync     = NULL;
902 #endif
903         wl_egl_display->wl_tbm_client     = NULL;
904
905         wl_egl_display->use_wait_vblank   = TPL_TRUE;   // default enabled
906         {
907                 char *env = tpl_getenv("TPL_WAIT_VBLANK");
908                 if (env && !atoi(env)) {
909                         wl_egl_display->use_wait_vblank = TPL_FALSE;
910                 }
911         }
912
913         tpl_gmutex_init(&wl_egl_display->wl_event_mutex);
914
915         tpl_gmutex_init(&wl_egl_display->disp_mutex);
916         tpl_gcond_init(&wl_egl_display->disp_cond);
917
918         /* Create gthread */
919         wl_egl_display->thread = tpl_gthread_create("wl_egl_thread",
920                                                                                                 (tpl_gthread_func)_thread_init,
921                                                                                                 (void *)wl_egl_display);
922         if (!wl_egl_display->thread) {
923                 TPL_ERR("Failed to create wl_egl_thread");
924                 goto free_display;
925         }
926
927         wl_egl_display->disp_source = tpl_gsource_create(wl_egl_display->thread,
928                                                                                                          (void *)wl_egl_display,
929                                                                                                          wl_display_get_fd(wl_egl_display->wl_display),
930                                                                                                          &disp_funcs, SOURCE_TYPE_NORMAL);
931         if (!wl_egl_display->disp_source) {
932                 TPL_ERR("Failed to add native_display(%p) to thread(%p)",
933                                 display->native_handle,
934                                 wl_egl_display->thread);
935                 goto free_display;
936         }
937
938         if (wl_egl_display->use_wait_vblank &&
939                 wl_egl_display->tdm.tdm_initialized) {
940                 tpl_gmutex_init(&wl_egl_display->tdm.tdm_mutex);
941                 tpl_gcond_init(&wl_egl_display->tdm.tdm_cond);
942                 wl_egl_display->tdm.tdm_source = tpl_gsource_create(wl_egl_display->thread,
943                                                                                                                 (void *)wl_egl_display,
944                                                                                                                 wl_egl_display->tdm.tdm_display_fd,
945                                                                                                                 &tdm_funcs, SOURCE_TYPE_NORMAL);
946                 wl_egl_display->tdm.gsource_finalized = TPL_FALSE;
947                 if (!wl_egl_display->tdm.tdm_source) {
948                         TPL_ERR("Failed to create tdm_gsource\n");
949                         goto free_display;
950                 }
951         }
952
953         wl_egl_display->use_wait_vblank = (wl_egl_display->tdm.tdm_initialized &&
954                                                                            (wl_egl_display->tdm.tdm_source != NULL));
955
956         TPL_INFO("[DISPLAY_INIT]",
957                          "wl_egl_display(%p) tpl_gthread(%p) wl_display(%p)",
958                          wl_egl_display,
959                          wl_egl_display->thread,
960                          wl_egl_display->wl_display);
961
962         TPL_INFO("[DISPLAY_INIT]",
963                          "USE_WAIT_VBLANK(%s) TIZEN_SURFACE_SHM(%s) USE_EXPLICIT_SYNC(%s)",
964                          wl_egl_display->use_wait_vblank ? "TRUE" : "FALSE",
965                          wl_egl_display->use_tss ? "TRUE" : "FALSE",
966                          wl_egl_display->use_explicit_sync ? "TRUE" : "FALSE");
967
968         return TPL_ERROR_NONE;
969
970 free_display:
971         if (wl_egl_display->tdm.tdm_source) {
972                 tpl_gmutex_lock(&wl_egl_display->tdm.tdm_mutex);
973                 // Send destroy mesage to thread
974                 tpl_gsource_destroy(wl_egl_display->tdm.tdm_source, TPL_TRUE);
975                 while (!wl_egl_display->tdm.gsource_finalized) {
976                         tpl_gcond_wait(&wl_egl_display->tdm.tdm_cond, &wl_egl_display->tdm.tdm_mutex);
977                 }
978                 tpl_gmutex_unlock(&wl_egl_display->tdm.tdm_mutex);
979         }
980
981         if (wl_egl_display->disp_source) {
982                 tpl_gmutex_lock(&wl_egl_display->disp_mutex);
983                 // Send destroy mesage to thread
984                 tpl_gsource_destroy(wl_egl_display->disp_source, TPL_TRUE);
985                 while (!wl_egl_display->gsource_finalized) {
986                         tpl_gcond_wait(&wl_egl_display->disp_cond, &wl_egl_display->disp_mutex);
987                 }
988                 tpl_gmutex_unlock(&wl_egl_display->disp_mutex);
989         }
990
991         if (wl_egl_display->thread) {
992                 tpl_gthread_destroy(wl_egl_display->thread);
993         }
994
995         tpl_gcond_clear(&wl_egl_display->tdm.tdm_cond);
996         tpl_gmutex_clear(&wl_egl_display->tdm.tdm_mutex);
997         tpl_gcond_clear(&wl_egl_display->disp_cond);
998         tpl_gmutex_clear(&wl_egl_display->disp_mutex);
999
1000         wl_egl_display->thread = NULL;
1001         free(wl_egl_display);
1002
1003         display->backend.data = NULL;
1004         return TPL_ERROR_INVALID_OPERATION;
1005 }
1006
1007 static void
1008 __tpl_wl_egl_display_fini(tpl_display_t *display)
1009 {
1010         tpl_wl_egl_display_t *wl_egl_display;
1011
1012         TPL_ASSERT(display);
1013
1014         wl_egl_display = (tpl_wl_egl_display_t *)display->backend.data;
1015         if (wl_egl_display) {
1016                 TPL_INFO("[DISPLAY_FINI]",
1017                                   "wl_egl_display(%p) tpl_gthread(%p) wl_display(%p)",
1018                                   wl_egl_display,
1019                                   wl_egl_display->thread,
1020                                   wl_egl_display->wl_display);
1021
1022                 if (wl_egl_display->tdm.tdm_source && wl_egl_display->tdm.tdm_initialized) {
1023                         /* This is a protection to prevent problems that arise in unexpected situations
1024                          * that g_cond_wait cannot work normally.
1025                          * When calling tpl_gsource_destroy() with destroy_in_thread is TPL_TRUE,
1026                          * caller should use tpl_gcond_wait() in the loop with checking finalized flag
1027                          * */
1028                         tpl_gmutex_lock(&wl_egl_display->tdm.tdm_mutex);
1029                         // Send destroy mesage to thread
1030                         tpl_gsource_destroy(wl_egl_display->tdm.tdm_source, TPL_TRUE);
1031                         while (!wl_egl_display->tdm.gsource_finalized) {
1032                                 tpl_gcond_wait(&wl_egl_display->tdm.tdm_cond, &wl_egl_display->tdm.tdm_mutex);
1033                         }
1034                         wl_egl_display->tdm.tdm_source = NULL;
1035                         tpl_gmutex_unlock(&wl_egl_display->tdm.tdm_mutex);
1036                 }
1037
1038                 if (wl_egl_display->disp_source) {
1039                         tpl_gmutex_lock(&wl_egl_display->disp_mutex);
1040                         // Send destroy mesage to thread
1041                         tpl_gsource_destroy(wl_egl_display->disp_source, TPL_TRUE);
1042                         /* This is a protection to prevent problems that arise in unexpected situations
1043                          * that g_cond_wait cannot work normally.
1044                          * When calling tpl_gsource_destroy() with destroy_in_thread is TPL_TRUE,
1045                          * caller should use tpl_gcond_wait() in the loop with checking finalized flag
1046                          * */
1047                         while (!wl_egl_display->gsource_finalized) {
1048                                 tpl_gcond_wait(&wl_egl_display->disp_cond, &wl_egl_display->disp_mutex);
1049                         }
1050                         wl_egl_display->disp_source = NULL;
1051                         tpl_gmutex_unlock(&wl_egl_display->disp_mutex);
1052                 }
1053
1054                 if (wl_egl_display->thread) {
1055                         tpl_gthread_destroy(wl_egl_display->thread);
1056                         wl_egl_display->thread = NULL;
1057                 }
1058
1059                 tpl_gcond_clear(&wl_egl_display->tdm.tdm_cond);
1060                 tpl_gmutex_clear(&wl_egl_display->tdm.tdm_mutex);
1061                 tpl_gcond_clear(&wl_egl_display->disp_cond);
1062                 tpl_gmutex_clear(&wl_egl_display->disp_mutex);
1063
1064                 tpl_gmutex_clear(&wl_egl_display->wl_event_mutex);
1065
1066                 free(wl_egl_display);
1067         }
1068
1069         display->backend.data = NULL;
1070 }
1071
1072 static tpl_result_t
1073 __tpl_wl_egl_display_query_config(tpl_display_t *display,
1074                                                                   tpl_surface_type_t surface_type,
1075                                                                   int red_size, int green_size,
1076                                                                   int blue_size, int alpha_size,
1077                                                                   int color_depth, int *native_visual_id,
1078                                                                   tpl_bool_t *is_slow)
1079 {
1080         TPL_ASSERT(display);
1081
1082         if (surface_type == TPL_SURFACE_TYPE_WINDOW && red_size == 8 &&
1083                         green_size == 8 && blue_size == 8 &&
1084                         (color_depth == 32 || color_depth == 24)) {
1085
1086                 if (alpha_size == 8) {
1087                         if (native_visual_id) *native_visual_id = TBM_FORMAT_ARGB8888;
1088                         if (is_slow) *is_slow = TPL_FALSE;
1089                         return TPL_ERROR_NONE;
1090                 }
1091                 if (alpha_size == 0) {
1092                         if (native_visual_id) *native_visual_id = TBM_FORMAT_XRGB8888;
1093                         if (is_slow) *is_slow = TPL_FALSE;
1094                         return TPL_ERROR_NONE;
1095                 }
1096         }
1097
1098         return TPL_ERROR_INVALID_PARAMETER;
1099 }
1100
1101 static tpl_result_t
1102 __tpl_wl_egl_display_filter_config(tpl_display_t *display, int *visual_id,
1103                                                                    int alpha_size)
1104 {
1105         TPL_IGNORE(display);
1106         TPL_IGNORE(visual_id);
1107         TPL_IGNORE(alpha_size);
1108         return TPL_ERROR_NONE;
1109 }
1110
1111 static tpl_result_t
1112 __tpl_wl_egl_display_get_window_info(tpl_display_t *display,
1113                                                                          tpl_handle_t window, int *width,
1114                                                                          int *height, tbm_format *format,
1115                                                                          int depth, int a_size)
1116 {
1117         tpl_result_t ret = TPL_ERROR_NONE;
1118         struct wl_egl_window *wl_egl_window = (struct wl_egl_window *)window;
1119
1120         TPL_ASSERT(display);
1121         TPL_ASSERT(window);
1122
1123         if (!wl_egl_window) {
1124                 TPL_ERR("Invalid parameter. tpl_handle_t(%p)", window);
1125                 return TPL_ERROR_INVALID_PARAMETER;
1126         }
1127
1128         if (width) *width = wl_egl_window->width;
1129         if (height) *height = wl_egl_window->height;
1130         if (format) {
1131                 struct tizen_private *tizen_private =
1132                                 (struct tizen_private *)wl_egl_window->driver_private;
1133                 if (tizen_private && tizen_private->data) {
1134                         tpl_wl_egl_surface_t *wl_egl_surface =
1135                                 (tpl_wl_egl_surface_t *)tizen_private->data;
1136                         *format = wl_egl_surface->format;
1137                 } else {
1138                         if (a_size == 8)
1139                                 *format = TBM_FORMAT_ARGB8888;
1140                         else
1141                                 *format = TBM_FORMAT_XRGB8888;
1142                 }
1143         }
1144
1145         return ret;
1146 }
1147
1148 static tpl_result_t
1149 __tpl_wl_egl_display_get_pixmap_info(tpl_display_t *display,
1150                                                                          tpl_handle_t pixmap, int *width,
1151                                                                          int *height, tbm_format *format)
1152 {
1153         tbm_surface_h   tbm_surface = NULL;
1154
1155         if (!pixmap) {
1156                 TPL_ERR("Invalid parameter. tpl_handle_t(%p)", pixmap);
1157                 return TPL_ERROR_INVALID_PARAMETER;
1158         }
1159
1160         tbm_surface = wayland_tbm_server_get_surface(NULL,
1161                                                                                                  (struct wl_resource *)pixmap);
1162         if (!tbm_surface) {
1163                 TPL_ERR("Failed to get tbm_surface from wayland_tbm.");
1164                 return TPL_ERROR_INVALID_PARAMETER;
1165         }
1166
1167         if (width) *width = tbm_surface_get_width(tbm_surface);
1168         if (height) *height = tbm_surface_get_height(tbm_surface);
1169         if (format) *format = tbm_surface_get_format(tbm_surface);
1170
1171         return TPL_ERROR_NONE;
1172 }
1173
1174 static tbm_surface_h
1175 __tpl_wl_egl_display_get_buffer_from_native_pixmap(tpl_handle_t pixmap)
1176 {
1177         tbm_surface_h tbm_surface = NULL;
1178
1179         TPL_ASSERT(pixmap);
1180
1181         tbm_surface = wayland_tbm_server_get_surface(NULL,
1182                                                                                                  (struct wl_resource *)pixmap);
1183         if (!tbm_surface) {
1184                 TPL_ERR("Failed to get tbm_surface_h from wayland_tbm.");
1185                 return NULL;
1186         }
1187
1188         return tbm_surface;
1189 }
1190
1191 tpl_bool_t
1192 __tpl_display_choose_backend_wl_egl_thread(tpl_handle_t native_dpy)
1193 {
1194         struct wl_interface *wl_egl_native_dpy = *(void **) native_dpy;
1195
1196         TPL_CHECK_ON_NULL_RETURN_VAL(wl_egl_native_dpy, TPL_FALSE);
1197
1198         /* MAGIC CHECK: A native display handle is a wl_display if the de-referenced first value
1199            is a memory address pointing the structure of wl_display_interface. */
1200         if (wl_egl_native_dpy == &wl_display_interface)
1201                 return TPL_TRUE;
1202
1203         if (strncmp(wl_egl_native_dpy->name, wl_display_interface.name,
1204                                 strlen(wl_display_interface.name)) == 0) {
1205                 return TPL_TRUE;
1206         }
1207
1208         return TPL_FALSE;
1209 }
1210
1211 /* -- BEGIN -- wl_egl_window callback functions */
1212 static void
1213 __cb_destroy_callback(void *private)
1214 {
1215         struct tizen_private *tizen_private  = (struct tizen_private *)private;
1216         tpl_wl_egl_surface_t *wl_egl_surface = NULL;
1217
1218         if (!tizen_private) {
1219                 TPL_LOG_B("WL_EGL", "[DESTROY_CB] Already destroyed surface");
1220                 return;
1221         }
1222
1223         wl_egl_surface = (tpl_wl_egl_surface_t *)tizen_private->data;
1224         if (wl_egl_surface) {
1225                 TPL_WARN("[DESTROY_CB][!!!ABNORMAL BEHAVIOR!!!] wl_egl_window(%p) is destroyed.",
1226                                  wl_egl_surface->wl_egl_window);
1227                 TPL_WARN("[DESTROY_CB] native window should be destroyed after eglDestroySurface.");
1228
1229                 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
1230                 wl_egl_surface->wl_egl_window->destroy_window_callback = NULL;
1231                 wl_egl_surface->wl_egl_window->resize_callback = NULL;
1232                 wl_egl_surface->wl_egl_window->driver_private = NULL;
1233                 wl_egl_surface->wl_egl_window = NULL;
1234                 wl_egl_surface->wl_surface = NULL;
1235
1236                 tizen_private->set_window_serial_callback = NULL;
1237                 tizen_private->rotate_callback = NULL;
1238                 tizen_private->get_rotation_capability = NULL;
1239                 tizen_private->set_frontbuffer_callback = NULL;
1240                 tizen_private->create_commit_sync_fd = NULL;
1241                 tizen_private->create_presentation_sync_fd = NULL;
1242                 tizen_private->data = NULL;
1243
1244                 free(tizen_private);
1245                 tizen_private = NULL;
1246                 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
1247         }
1248 }
1249
1250 static void
1251 __cb_resize_callback(struct wl_egl_window *wl_egl_window, void *private)
1252 {
1253         TPL_ASSERT(private);
1254         TPL_ASSERT(wl_egl_window);
1255
1256         struct tizen_private *tizen_private  = (struct tizen_private *)private;
1257         tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)tizen_private->data;
1258         int cur_w, cur_h, req_w, req_h, format;
1259
1260         if (!wl_egl_surface) {
1261                 TPL_ERR("Invalid wl_egl_window(%p) tizen_private->data is null.",
1262                                 wl_egl_window);
1263                 return;
1264         }
1265
1266         format = wl_egl_surface->format;
1267         cur_w = wl_egl_surface->width;
1268         cur_h = wl_egl_surface->height;
1269         req_w = wl_egl_window->width;
1270         req_h = wl_egl_window->height;
1271
1272         TPL_INFO("[WINDOW_RESIZE]",
1273                          "wl_egl_surface(%p) wl_egl_window(%p) (%dx%d) -> (%dx%d)",
1274                          wl_egl_surface, wl_egl_window, cur_w, cur_h, req_w, req_h);
1275
1276         if (tbm_surface_queue_reset(wl_egl_surface->tbm_queue, req_w, req_h, format)
1277                         != TBM_SURFACE_QUEUE_ERROR_NONE) {
1278                 TPL_ERR("Failed to reset tbm_surface_queue(%p)", wl_egl_surface->tbm_queue);
1279                 return;
1280         }
1281 }
1282 /* -- END -- wl_egl_window callback functions */
1283
1284 /* -- BEGIN -- wl_egl_window tizen private callback functions */
1285
1286 /* There is no usecase for using prerotation callback below */
1287 static void
1288 __cb_rotate_callback(struct wl_egl_window *wl_egl_window, void *private)
1289 {
1290         TPL_ASSERT(private);
1291         TPL_ASSERT(wl_egl_window);
1292
1293         struct tizen_private *tizen_private  = (struct tizen_private *)private;
1294         tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)tizen_private->data;
1295         int rotation = tizen_private->rotation;
1296
1297         if (!wl_egl_surface) {
1298                 TPL_ERR("Invalid wl_egl_window(%p) tizen_private->data is null.",
1299                                 wl_egl_window);
1300                 return;
1301         }
1302
1303         TPL_INFO("[WINDOW_ROTATE]",
1304                          "wl_egl_surface(%p) wl_egl_window(%p) (%d) -> (%d)",
1305                          wl_egl_surface, wl_egl_window,
1306                          wl_egl_surface->rotation, rotation);
1307
1308         wl_egl_surface->rotation = rotation;
1309 }
1310
1311 /* There is no usecase for using prerotation callback below */
1312 static int
1313 __cb_get_rotation_capability(struct wl_egl_window *wl_egl_window,
1314                                                          void *private)
1315 {
1316         TPL_ASSERT(private);
1317         TPL_ASSERT(wl_egl_window);
1318
1319         int rotation_capability              = WL_EGL_WINDOW_TIZEN_CAPABILITY_NONE;
1320         struct tizen_private *tizen_private  = (struct tizen_private *)private;
1321         tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)tizen_private->data;
1322
1323         if (!wl_egl_surface) {
1324                 TPL_ERR("Invalid wl_egl_window(%p) tizen_private->data is null.",
1325                                 wl_egl_window);
1326                 return rotation_capability;
1327         }
1328
1329         if (wl_egl_surface->prerotation_capability == TPL_TRUE)
1330                 rotation_capability = WL_EGL_WINDOW_TIZEN_CAPABILITY_ROTATION_SUPPORTED;
1331         else
1332                 rotation_capability = WL_EGL_WINDOW_TIZEN_CAPABILITY_ROTATION_UNSUPPORTED;
1333
1334
1335         return rotation_capability;
1336 }
1337
1338 static void
1339 __cb_set_window_serial_callback(struct wl_egl_window *wl_egl_window,
1340                                                                 void *private, unsigned int serial)
1341 {
1342         TPL_ASSERT(private);
1343         TPL_ASSERT(wl_egl_window);
1344
1345         struct tizen_private *tizen_private  = (struct tizen_private *)private;
1346         tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)tizen_private->data;
1347
1348         if (!wl_egl_surface) {
1349                 TPL_ERR("Invalid wl_egl_window(%p) tizen_private->data is null.",
1350                                 wl_egl_window);
1351                 return;
1352         }
1353
1354         wl_egl_surface->set_serial_is_used = TPL_TRUE;
1355         wl_egl_surface->serial = serial;
1356 }
1357
1358 static int
1359 __cb_create_commit_sync_fd(struct wl_egl_window *wl_egl_window, void *private)
1360 {
1361         TPL_ASSERT(private);
1362         TPL_ASSERT(wl_egl_window);
1363
1364         int commit_sync_fd = -1;
1365
1366         struct tizen_private *tizen_private  = (struct tizen_private *)private;
1367         tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)tizen_private->data;
1368
1369         if (!wl_egl_surface) {
1370                 TPL_ERR("Invalid parameter. wl_egl_surface(%p) is NULL", wl_egl_surface);
1371                 return -1;
1372         }
1373
1374         tpl_gmutex_lock(&wl_egl_surface->commit_sync.mutex);
1375
1376         if (wl_egl_surface->commit_sync.fd != -1) {
1377                 commit_sync_fd = dup(wl_egl_surface->commit_sync.fd);
1378                 TRACE_MARK("[ONLY_DUP] commit_sync_fd(%d) dup(%d)",
1379                                    wl_egl_surface->commit_sync.fd, commit_sync_fd);
1380                 TPL_DEBUG("[DUP_COMMIT_SYNC] wl_egl_surface(%p) commit_sync_fd(%d) dup(%d)",
1381                                   wl_egl_surface, wl_egl_surface->commit_sync.fd, commit_sync_fd);
1382                 tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
1383                 return commit_sync_fd;
1384         }
1385
1386         wl_egl_surface->commit_sync.fd = eventfd(0, EFD_CLOEXEC);
1387         if (wl_egl_surface->commit_sync.fd == -1) {
1388                 TPL_ERR("Failed to create commit_sync_fd. wl_egl_surface(%p)",
1389                                 wl_egl_surface);
1390                 tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
1391                 return -1;
1392         }
1393
1394         commit_sync_fd = dup(wl_egl_surface->commit_sync.fd);
1395
1396         TRACE_MARK("[CREATE] commit_sync_fd(%d) dup(%d)",
1397                            wl_egl_surface->commit_sync.fd, commit_sync_fd);
1398         TPL_DEBUG("[CREATE_COMMIT_SYNC] wl_egl_surface(%p) commit_sync_fd(%d)",
1399                           wl_egl_surface, commit_sync_fd);
1400
1401         tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
1402
1403         return commit_sync_fd;
1404 }
1405
1406 #if TIZEN_FEATURE_ENABLE
1407 static int
1408 __cb_create_presentation_sync_fd(struct wl_egl_window *wl_egl_window, void *private)
1409 {
1410         TPL_ASSERT(private);
1411         TPL_ASSERT(wl_egl_window);
1412
1413         int presentation_sync_fd = -1;
1414
1415         struct tizen_private *tizen_private  = (struct tizen_private *)private;
1416         tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)tizen_private->data;
1417
1418         if (!wl_egl_surface) {
1419                 TPL_ERR("Invalid parameter. wl_egl_surface is NULL");
1420                 return -1;
1421         }
1422
1423         tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
1424         if (wl_egl_surface->presentation_sync.fd != -1) {
1425                 presentation_sync_fd = dup(wl_egl_surface->presentation_sync.fd);
1426                 TRACE_MARK("[ONLY_DUP] presentation_sync_fd(%d) dup(%d)",
1427                                    wl_egl_surface->presentation_sync.fd, presentation_sync_fd);
1428                 TPL_DEBUG("[DUP_PRESENTATION_SYNC] wl_egl_surface(%p) presentation_sync_fd(%d) dup(%d)",
1429                                   wl_egl_surface, wl_egl_surface->presentation_sync.fd, presentation_sync_fd);
1430                 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
1431                 return presentation_sync_fd;
1432         }
1433
1434         wl_egl_surface->presentation_sync.fd = eventfd(0, EFD_CLOEXEC);
1435         if (wl_egl_surface->presentation_sync.fd == -1) {
1436                 TPL_ERR("Failed to create presentation_sync_fd. wl_egl_surface(%p)",
1437                                 wl_egl_surface);
1438                 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
1439                 return -1;
1440         }
1441
1442         presentation_sync_fd = dup(wl_egl_surface->presentation_sync.fd);
1443         TRACE_MARK("[CREATE] presentation_sync_fd(%d) dup(%d)",
1444                            wl_egl_surface->presentation_sync.fd, presentation_sync_fd);
1445         TPL_DEBUG("[CREATE_PRESENTATION_SYNC] wl_egl_surface(%p) presentation_sync_fd(%d) dup(%d)",
1446                           wl_egl_surface, wl_egl_surface->presentation_sync.fd, presentation_sync_fd);
1447
1448         tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
1449
1450         return presentation_sync_fd;
1451 }
1452 /* -- END -- wl_egl_window tizen private callback functions */
1453
1454 /* -- BEGIN -- tizen_surface_shm_flusher_listener */
1455 static void __cb_tss_flusher_flush_callback(void *data,
1456                 struct tizen_surface_shm_flusher *tss_flusher)
1457 {
1458         tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)data;
1459         tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
1460
1461         TPL_INFO("[BUFFER_FLUSH]", "wl_egl_surface(%p) tbm_queue(%p)",
1462                          wl_egl_surface, wl_egl_surface->tbm_queue);
1463
1464         _print_buffer_lists(wl_egl_surface);
1465
1466         tsq_err = tbm_surface_queue_flush(wl_egl_surface->tbm_queue);
1467         if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
1468                 TPL_ERR("Failed to flush tbm_queue(%p)", wl_egl_surface->tbm_queue);
1469                 return;
1470         }
1471 }
1472
1473 static void __cb_tss_flusher_free_flush_callback(void *data,
1474                 struct tizen_surface_shm_flusher *tss_flusher)
1475 {
1476         tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)data;
1477         tbm_surface_queue_error_e tsq_err    = TBM_SURFACE_QUEUE_ERROR_NONE;
1478
1479         TPL_INFO("[FREE_BUFFER_FLUSH]", "wl_egl_surface(%p) tbm_queue(%p)",
1480                          wl_egl_surface, wl_egl_surface->tbm_queue);
1481
1482         _print_buffer_lists(wl_egl_surface);
1483
1484         tsq_err = tbm_surface_queue_free_flush(wl_egl_surface->tbm_queue);
1485         if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
1486                 TPL_ERR("Failed to free flush tbm_queue(%p)", wl_egl_surface->tbm_queue);
1487                 return;
1488         }
1489 }
1490
1491 static const struct tizen_surface_shm_flusher_listener
1492 tss_flusher_listener = {
1493         __cb_tss_flusher_flush_callback,
1494         __cb_tss_flusher_free_flush_callback
1495 };
1496 /* -- END -- tizen_surface_shm_flusher_listener */
1497 #endif
1498
1499 /* -- BEGIN -- tbm_surface_queue callback funstions */
1500 static void
1501 __cb_tbm_queue_reset_callback(tbm_surface_queue_h tbm_queue,
1502                                                                           void *data)
1503 {
1504         tpl_wl_egl_surface_t *wl_egl_surface = NULL;
1505         tpl_wl_egl_display_t *wl_egl_display = NULL;
1506         tpl_surface_t *surface = NULL;
1507         tpl_bool_t is_activated = TPL_FALSE;
1508         int width, height;
1509
1510         wl_egl_surface = (tpl_wl_egl_surface_t *)data;
1511         TPL_CHECK_ON_NULL_RETURN(wl_egl_surface);
1512
1513         wl_egl_display = wl_egl_surface->wl_egl_display;
1514         TPL_CHECK_ON_NULL_RETURN(wl_egl_display);
1515
1516         surface = wl_egl_surface->tpl_surface;
1517         TPL_CHECK_ON_NULL_RETURN(surface);
1518
1519         /* When the queue is resized, change the reset flag to TPL_TRUE to reflect
1520          * the changed window size at the next frame. */
1521         width = tbm_surface_queue_get_width(tbm_queue);
1522         height = tbm_surface_queue_get_height(tbm_queue);
1523         if (surface->width != width || surface->height != height) {
1524                 TPL_INFO("[QUEUE_RESIZE]",
1525                                  "wl_egl_surface(%p) tbm_queue(%p) (%dx%d) -> (%dx%d)",
1526                                  wl_egl_surface, tbm_queue,
1527                                  surface->width, surface->height, width, height);
1528         }
1529
1530         /* When queue_reset_callback is called, if is_activated is different from
1531          * its previous state change the reset flag to TPL_TRUE to get a new buffer
1532          * with the changed state(ACTIVATED/DEACTIVATED) at the next frame. */
1533         is_activated = wayland_tbm_client_queue_check_activate(wl_egl_display->wl_tbm_client,
1534                                                                                                                    wl_egl_surface->tbm_queue);
1535         if (wl_egl_surface->is_activated != is_activated) {
1536                 if (is_activated) {
1537                         TPL_INFO("[ACTIVATED]",
1538                                           "wl_egl_surface(%p) wl_surface(%p) tbm_queue(%p)",
1539                                           wl_egl_surface, wl_egl_surface->wl_surface, tbm_queue);
1540                 } else {
1541                         TPL_LOG_T("[DEACTIVATED]",
1542                                           " wl_egl_surface(%p) wl_surface(%p) tbm_queue(%p)",
1543                                           wl_egl_surface, wl_egl_surface->wl_surface, tbm_queue);
1544                 }
1545         }
1546
1547         wl_egl_surface->reset = TPL_TRUE;
1548
1549         if (surface->reset_cb)
1550                 surface->reset_cb(surface->reset_data);
1551 }
1552
1553 static void
1554 __cb_tbm_queue_acquirable_callback(tbm_surface_queue_h tbm_queue,
1555                                                                    void *data)
1556 {
1557         TPL_IGNORE(tbm_queue);
1558
1559         tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)data;
1560         TPL_CHECK_ON_NULL_RETURN(wl_egl_surface);
1561
1562         tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
1563         if (wl_egl_surface->sent_message == NONE_MESSAGE) {
1564                 wl_egl_surface->sent_message = ACQUIRABLE;
1565                 tpl_gsource_send_message(wl_egl_surface->surf_source,
1566                                                          wl_egl_surface->sent_message);
1567         }
1568         tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
1569 }
1570 /* -- END -- tbm_surface_queue callback funstions */
1571
1572 static void
1573 _thread_wl_egl_surface_fini(tpl_wl_egl_surface_t *wl_egl_surface)
1574 {
1575         tpl_wl_egl_display_t *wl_egl_display = wl_egl_surface->wl_egl_display;
1576
1577         TPL_INFO("[SURFACE_FINI]",
1578                           "wl_egl_surface(%p) wl_egl_window(%p) wl_surface(%p)",
1579                           wl_egl_surface, wl_egl_surface->wl_egl_window,
1580                           wl_egl_surface->wl_surface);
1581 #if TIZEN_FEATURE_ENABLE
1582         tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
1583
1584         if (wl_egl_display->presentation && wl_egl_surface->presentation_feedbacks) {
1585                 while (!__tpl_list_is_empty(wl_egl_surface->presentation_feedbacks)) {
1586                         struct pst_feedback *pst_feedback =
1587                                 (struct pst_feedback *)__tpl_list_pop_front(
1588                                                 wl_egl_surface->presentation_feedbacks, NULL);
1589                         if (pst_feedback) {
1590                                 _write_to_eventfd(pst_feedback->pst_sync_fd);
1591                                 close(pst_feedback->pst_sync_fd);
1592                                 pst_feedback->pst_sync_fd = -1;
1593
1594                                 wp_presentation_feedback_destroy(pst_feedback->presentation_feedback);
1595                                 pst_feedback->presentation_feedback = NULL;
1596
1597                                 free(pst_feedback);
1598                         }
1599                 }
1600
1601                 __tpl_list_free(wl_egl_surface->presentation_feedbacks, NULL);
1602                 wl_egl_surface->presentation_feedbacks = NULL;
1603         }
1604
1605         if (wl_egl_surface->presentation_sync.fd != -1) {
1606                 _write_to_eventfd(wl_egl_surface->presentation_sync.fd);
1607                 close(wl_egl_surface->presentation_sync.fd);
1608                 wl_egl_surface->presentation_sync.fd = -1;
1609         }
1610
1611         tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
1612
1613         if (wl_egl_surface->surface_sync) {
1614                 TPL_INFO("[SURFACE_SYNC_DESTROY]",
1615                                  "wl_egl_surface(%p) surface_sync(%p)",
1616                                   wl_egl_surface, wl_egl_surface->surface_sync);
1617                 zwp_linux_surface_synchronization_v1_destroy(wl_egl_surface->surface_sync);
1618                 wl_egl_surface->surface_sync = NULL;
1619         }
1620
1621         if (wl_egl_surface->tss_flusher) {
1622                 TPL_INFO("[FLUSHER_DESTROY]",
1623                                   "wl_egl_surface(%p) tss_flusher(%p)",
1624                                   wl_egl_surface, wl_egl_surface->tss_flusher);
1625                 tizen_surface_shm_flusher_destroy(wl_egl_surface->tss_flusher);
1626                 wl_egl_surface->tss_flusher = NULL;
1627         }
1628 #endif
1629
1630         if (wl_egl_surface->tbm_queue) {
1631                 TPL_INFO("[TBM_QUEUE_DESTROY]",
1632                                  "wl_egl_surface(%p) tbm_queue(%p)",
1633                                  wl_egl_surface, wl_egl_surface->tbm_queue);
1634                 tbm_surface_queue_destroy(wl_egl_surface->tbm_queue);
1635                 wl_egl_surface->tbm_queue = NULL;
1636         }
1637
1638         if (wl_egl_surface->vblank && wl_egl_surface->vblank->waiting_buffers) {
1639                 tpl_gmutex_lock(&wl_egl_surface->vblank->mutex);
1640                 __tpl_list_free(wl_egl_surface->vblank->waiting_buffers, NULL);
1641                 wl_egl_surface->vblank->waiting_buffers = NULL;
1642                 tpl_gmutex_unlock(&wl_egl_surface->vblank->mutex);
1643         }
1644
1645         if (wl_egl_surface->vblank) {
1646                 __tpl_list_remove_data(wl_egl_display->tdm.surface_vblanks,
1647                                                            (void *)wl_egl_surface->vblank,
1648                                                            TPL_FIRST,
1649                                                            __cb_surface_vblank_free);
1650                 wl_egl_surface->vblank = NULL;
1651         }
1652 }
1653
1654 static tpl_bool_t
1655 __thread_func_surf_dispatch(tpl_gsource *gsource, uint64_t message)
1656 {
1657         tpl_wl_egl_surface_t *wl_egl_surface = NULL;
1658
1659         wl_egl_surface = (tpl_wl_egl_surface_t *)tpl_gsource_get_data(gsource);
1660
1661         tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
1662         if (message == INIT_SURFACE) { /* Initialize surface */
1663                 TPL_DEBUG("wl_egl_surface(%p) initialize message received!",
1664                                   wl_egl_surface);
1665                 _thread_wl_egl_surface_init(wl_egl_surface);
1666                 wl_egl_surface->initialized_in_thread = TPL_TRUE;
1667                 tpl_gcond_signal(&wl_egl_surface->surf_cond);
1668         } else if (message == ACQUIRABLE) { /* Acquirable */
1669                 TPL_DEBUG("wl_egl_surface(%p) acquirable message received!",
1670                                   wl_egl_surface);
1671                 _thread_surface_queue_acquire(wl_egl_surface);
1672         }
1673
1674         wl_egl_surface->sent_message = NONE_MESSAGE;
1675
1676         tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
1677
1678         return TPL_TRUE;
1679 }
1680
1681 static void
1682 __thread_func_surf_finalize(tpl_gsource *gsource)
1683 {
1684         tpl_wl_egl_surface_t *wl_egl_surface = NULL;
1685
1686         wl_egl_surface = (tpl_wl_egl_surface_t *)tpl_gsource_get_data(gsource);
1687         TPL_CHECK_ON_NULL_RETURN(wl_egl_surface);
1688
1689         tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
1690         TPL_DEBUG("[FINALIZE] wl_egl_surface(%p) tpl_gsource(%p)",
1691                           wl_egl_surface, gsource);
1692
1693         _thread_wl_egl_surface_fini(wl_egl_surface);
1694
1695         wl_egl_surface->gsource_finalized = TPL_TRUE;
1696
1697         tpl_gcond_signal(&wl_egl_surface->surf_cond);
1698         tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
1699 }
1700
1701 static tpl_gsource_functions surf_funcs = {
1702         .prepare = NULL,
1703         .check = NULL,
1704         .dispatch = __thread_func_surf_dispatch,
1705         .finalize = __thread_func_surf_finalize,
1706 };
1707
1708 static tpl_result_t
1709 __tpl_wl_egl_surface_init(tpl_surface_t *surface)
1710 {
1711         tpl_wl_egl_display_t *wl_egl_display    = NULL;
1712         tpl_wl_egl_surface_t *wl_egl_surface    = NULL;
1713         tpl_gsource *surf_source                = NULL;
1714
1715         struct wl_egl_window *wl_egl_window =
1716                 (struct wl_egl_window *)surface->native_handle;
1717
1718         TPL_ASSERT(surface);
1719         TPL_ASSERT(surface->display);
1720         TPL_ASSERT(surface->type == TPL_SURFACE_TYPE_WINDOW);
1721         TPL_ASSERT(surface->native_handle);
1722
1723         wl_egl_display =
1724                 (tpl_wl_egl_display_t *)surface->display->backend.data;
1725         if (!wl_egl_display) {
1726                 TPL_ERR("Invalid parameter. wl_egl_display(%p)",
1727                                 wl_egl_display);
1728                 return TPL_ERROR_INVALID_PARAMETER;
1729         }
1730
1731         wl_egl_surface = (tpl_wl_egl_surface_t *) calloc(1,
1732                                                   sizeof(tpl_wl_egl_surface_t));
1733         if (!wl_egl_surface) {
1734                 TPL_ERR("Failed to allocate memory for new tpl_wl_egl_surface_t.");
1735                 return TPL_ERROR_OUT_OF_MEMORY;
1736         }
1737
1738         surf_source = tpl_gsource_create(wl_egl_display->thread, (void *)wl_egl_surface,
1739                                                                          -1, &surf_funcs, SOURCE_TYPE_NORMAL);
1740         if (!surf_source) {
1741                 TPL_ERR("Failed to create surf_source with wl_egl_surface(%p)",
1742                                 wl_egl_surface);
1743                 goto surf_source_create_fail;
1744         }
1745
1746         surface->backend.data = (void *)wl_egl_surface;
1747         surface->width        = wl_egl_window->width;
1748         surface->height       = wl_egl_window->height;
1749         surface->rotation     = 0;
1750
1751         wl_egl_surface->tpl_surface            = surface;
1752         wl_egl_surface->width                  = wl_egl_window->width;
1753         wl_egl_surface->height                 = wl_egl_window->height;
1754         wl_egl_surface->format                 = surface->format;
1755         wl_egl_surface->num_buffers            = surface->num_buffers;
1756
1757         wl_egl_surface->surf_source            = surf_source;
1758         wl_egl_surface->wl_egl_window          = wl_egl_window;
1759         wl_egl_surface->wl_surface             = wl_egl_window->surface;
1760
1761         wl_egl_surface->wl_egl_display         = wl_egl_display;
1762
1763         wl_egl_surface->reset                  = TPL_FALSE;
1764         wl_egl_surface->is_activated           = TPL_FALSE;
1765         wl_egl_surface->need_to_enqueue        = TPL_TRUE;
1766         wl_egl_surface->prerotation_capability = TPL_FALSE;
1767         wl_egl_surface->vblank_done            = TPL_TRUE;
1768         wl_egl_surface->use_render_done_fence  = TPL_FALSE;
1769         wl_egl_surface->set_serial_is_used     = TPL_FALSE;
1770         wl_egl_surface->gsource_finalized      = TPL_FALSE;
1771         wl_egl_surface->initialized_in_thread  = TPL_FALSE;
1772
1773         wl_egl_surface->latest_transform       = -1;
1774         wl_egl_surface->render_done_cnt        = 0;
1775         wl_egl_surface->serial                 = 0;
1776
1777         wl_egl_surface->vblank                 = NULL;
1778 #if TIZEN_FEATURE_ENABLE
1779         wl_egl_surface->tss_flusher            = NULL;
1780         wl_egl_surface->surface_sync           = NULL;
1781 #endif
1782
1783         wl_egl_surface->post_interval          = surface->post_interval;
1784
1785         wl_egl_surface->commit_sync.fd         = -1;
1786         wl_egl_surface->presentation_sync.fd   = -1;
1787
1788         wl_egl_surface->sent_message           = NONE_MESSAGE;
1789
1790         {
1791                 int i = 0;
1792                 for (i = 0; i < BUFFER_ARRAY_SIZE; i++)
1793                         wl_egl_surface->buffers[i]     = NULL;
1794                 wl_egl_surface->buffer_cnt         = 0;
1795         }
1796
1797         wl_egl_surface->last_enq_buffer        = NULL;
1798
1799         {
1800                 struct tizen_private *tizen_private = NULL;
1801
1802                 if (wl_egl_window->driver_private)
1803                         tizen_private = (struct tizen_private *)wl_egl_window->driver_private;
1804                 else {
1805                         tizen_private = tizen_private_create();
1806                         wl_egl_window->driver_private = (void *)tizen_private;
1807                 }
1808
1809                 if (tizen_private) {
1810                         tizen_private->data = (void *)wl_egl_surface;
1811                         tizen_private->rotate_callback = (void *)__cb_rotate_callback;
1812                         tizen_private->get_rotation_capability = (void *)
1813                                 __cb_get_rotation_capability;
1814                         tizen_private->set_window_serial_callback = (void *)
1815                                 __cb_set_window_serial_callback;
1816                         tizen_private->create_commit_sync_fd = (void *)__cb_create_commit_sync_fd;
1817 #if TIZEN_FEATURE_ENABLE
1818                         tizen_private->create_presentation_sync_fd = (void *)__cb_create_presentation_sync_fd;
1819 #else
1820                         tizen_private->create_presentation_sync_fd = NULL;
1821 #endif
1822
1823                         wl_egl_window->destroy_window_callback = (void *)__cb_destroy_callback;
1824                         wl_egl_window->resize_callback = (void *)__cb_resize_callback;
1825                 }
1826         }
1827
1828         tpl_gmutex_init(&wl_egl_surface->commit_sync.mutex);
1829         tpl_gmutex_init(&wl_egl_surface->presentation_sync.mutex);
1830
1831         tpl_gmutex_init(&wl_egl_surface->buffers_mutex);
1832
1833         tpl_gmutex_init(&wl_egl_surface->surf_mutex);
1834         tpl_gcond_init(&wl_egl_surface->surf_cond);
1835
1836         /* Initialize in thread */
1837         tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
1838         wl_egl_surface->sent_message = INIT_SURFACE;
1839         tpl_gsource_send_message(wl_egl_surface->surf_source,
1840                                                          wl_egl_surface->sent_message);
1841         while (!wl_egl_surface->initialized_in_thread)
1842                 tpl_gcond_wait(&wl_egl_surface->surf_cond, &wl_egl_surface->surf_mutex);
1843         tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
1844
1845         TPL_ASSERT(wl_egl_surface->tbm_queue);
1846
1847         TPL_INFO("[SURFACE_INIT]",
1848                           "tpl_surface(%p) wl_egl_surface(%p) gsource(%p)",
1849                           surface, wl_egl_surface, wl_egl_surface->surf_source);
1850
1851         return TPL_ERROR_NONE;
1852
1853 surf_source_create_fail:
1854         free(wl_egl_surface);
1855         surface->backend.data = NULL;
1856         return TPL_ERROR_INVALID_OPERATION;
1857 }
1858
1859 static tbm_surface_queue_h
1860 _thread_create_tbm_queue(tpl_wl_egl_surface_t *wl_egl_surface,
1861                                                  struct wayland_tbm_client *wl_tbm_client,
1862                                                  int num_buffers)
1863 {
1864         tbm_surface_queue_h tbm_queue = NULL;
1865         tbm_bufmgr bufmgr             = NULL;
1866         unsigned int capability;
1867
1868         struct wl_surface *wl_surface = wl_egl_surface->wl_surface;
1869         int width = wl_egl_surface->width;
1870         int height = wl_egl_surface->height;
1871         int format = wl_egl_surface->format;
1872
1873         if (!wl_tbm_client || !wl_surface) {
1874                 TPL_ERR("Invalid parameters. wl_tbm_client(%p) wl_surface(%p)",
1875                                 wl_tbm_client, wl_surface);
1876                 return NULL;
1877         }
1878
1879         bufmgr = tbm_bufmgr_init(-1);
1880         capability = tbm_bufmgr_get_capability(bufmgr);
1881         tbm_bufmgr_deinit(bufmgr);
1882
1883         if (capability & TBM_BUFMGR_CAPABILITY_TILED_MEMORY) {
1884                 tbm_queue = wayland_tbm_client_create_surface_queue_tiled(
1885                                                 wl_tbm_client,
1886                                                 wl_surface,
1887                                                 num_buffers,
1888                                                 width,
1889                                                 height,
1890                                                 format);
1891         } else {
1892                 tbm_queue = wayland_tbm_client_create_surface_queue(
1893                                                 wl_tbm_client,
1894                                                 wl_surface,
1895                                                 num_buffers,
1896                                                 width,
1897                                                 height,
1898                                                 format);
1899         }
1900
1901         if (!tbm_queue) {
1902                 TPL_ERR("Failed to create tbm_queue. wl_tbm_client(%p)",
1903                                 wl_tbm_client);
1904                 return NULL;
1905         }
1906
1907         if (tbm_surface_queue_set_modes(
1908                         tbm_queue, TBM_SURFACE_QUEUE_MODE_GUARANTEE_CYCLE) !=
1909                                 TBM_SURFACE_QUEUE_ERROR_NONE) {
1910                 TPL_ERR("Failed to set queue mode to tbm_surface_queue(%p)",
1911                                 tbm_queue);
1912                 tbm_surface_queue_destroy(tbm_queue);
1913                 return NULL;
1914         }
1915
1916         if (tbm_surface_queue_add_reset_cb(
1917                         tbm_queue,
1918                         __cb_tbm_queue_reset_callback,
1919                         (void *)wl_egl_surface) != TBM_SURFACE_QUEUE_ERROR_NONE) {
1920                 TPL_ERR("Failed to register reset callback to tbm_surface_queue(%p)",
1921                                 tbm_queue);
1922                 tbm_surface_queue_destroy(tbm_queue);
1923                 return NULL;
1924         }
1925
1926         if (tbm_surface_queue_add_acquirable_cb(
1927                         tbm_queue,
1928                         __cb_tbm_queue_acquirable_callback,
1929                         (void *)wl_egl_surface) != TBM_SURFACE_QUEUE_ERROR_NONE) {
1930                 TPL_ERR("Failed to register acquirable callback to tbm_surface_queue(%p)",
1931                                 tbm_queue);
1932                 tbm_surface_queue_destroy(tbm_queue);
1933                 return NULL;
1934         }
1935
1936         return tbm_queue;
1937 }
1938
1939 static tdm_client_vblank*
1940 _thread_create_tdm_client_vblank(tdm_client *tdm_client)
1941 {
1942         tdm_client_vblank *tdm_vblank = NULL;
1943         tdm_client_output *tdm_output = NULL;
1944         tdm_error tdm_err = TDM_ERROR_NONE;
1945
1946         if (!tdm_client) {
1947                 TPL_ERR("Invalid parameter. tdm_client(%p)", tdm_client);
1948                 return NULL;
1949         }
1950
1951         tdm_output = tdm_client_get_output(tdm_client, "primary", &tdm_err);
1952         if (!tdm_output || tdm_err != TDM_ERROR_NONE) {
1953                 TPL_ERR("Failed to get tdm_client_output. tdm_err(%d)", tdm_err);
1954                 return NULL;
1955         }
1956
1957         tdm_vblank = tdm_client_output_create_vblank(tdm_output, &tdm_err);
1958         if (!tdm_vblank || tdm_err != TDM_ERROR_NONE) {
1959                 TPL_ERR("Failed to create tdm_vblank. tdm_err(%d)", tdm_err);
1960                 return NULL;
1961         }
1962
1963         tdm_client_vblank_set_enable_fake(tdm_vblank, 1);
1964         tdm_client_vblank_set_sync(tdm_vblank, 0);
1965
1966         return tdm_vblank;
1967 }
1968
1969 static void
1970 __cb_surface_vblank_free(void *data)
1971 {
1972         TPL_CHECK_ON_NULL_RETURN(data);
1973
1974         tpl_surface_vblank_t *vblank = (tpl_surface_vblank_t *)data;
1975         tpl_wl_egl_surface_t *wl_egl_surface = vblank->wl_egl_surface;
1976
1977         TPL_INFO("[VBLANK_DESTROY]",
1978                          "wl_egl_surface(%p) surface_vblank(%p) tdm_vblank(%p)",
1979                          wl_egl_surface, vblank,
1980                          vblank->tdm_vblank);
1981
1982         tdm_client_vblank_destroy(vblank->tdm_vblank);
1983         vblank->tdm_vblank = NULL;
1984         vblank->wl_egl_surface = NULL;
1985         tpl_gmutex_clear(&vblank->mutex);
1986
1987         free(vblank);
1988
1989         wl_egl_surface->vblank = NULL;
1990 }
1991
1992 static void
1993 _thread_wl_egl_surface_init(tpl_wl_egl_surface_t *wl_egl_surface)
1994 {
1995         tpl_wl_egl_display_t *wl_egl_display = wl_egl_surface->wl_egl_display;
1996         tpl_surface_vblank_t *vblank         = NULL;
1997
1998         wl_egl_surface->tbm_queue = _thread_create_tbm_queue(
1999                                                                         wl_egl_surface,
2000                                                                         wl_egl_display->wl_tbm_client,
2001                                                                         wl_egl_surface->num_buffers);
2002         if (!wl_egl_surface->tbm_queue) {
2003                 TPL_ERR("Failed to create tbm_queue. wl_egl_surface(%p) wl_tbm_client(%p)",
2004                                 wl_egl_surface, wl_egl_display->wl_tbm_client);
2005                 return;
2006         }
2007
2008         TPL_INFO("[QUEUE_CREATION]",
2009                          "wl_egl_surface(%p) wl_surface(%p) wl_tbm_client(%p)",
2010                          wl_egl_surface, wl_egl_surface->wl_surface,
2011                          wl_egl_display->wl_tbm_client);
2012         TPL_INFO("[QUEUE_CREATION]",
2013                          "tbm_queue(%p) size(%d x %d) X %d format(%d)",
2014                          wl_egl_surface->tbm_queue,
2015                          wl_egl_surface->width,
2016                          wl_egl_surface->height,
2017                          wl_egl_surface->num_buffers,
2018                          wl_egl_surface->format);
2019
2020         if (wl_egl_display->use_wait_vblank) {
2021                 vblank = (tpl_surface_vblank_t *)calloc(1, sizeof(tpl_surface_vblank_t));
2022                 if (vblank) {
2023                         vblank->tdm_vblank = _thread_create_tdm_client_vblank(
2024                                                                         wl_egl_display->tdm.tdm_client);
2025                         if (!vblank->tdm_vblank) {
2026                                 TPL_ERR("Failed to create tdm_vblank from tdm_client(%p)",
2027                                                 wl_egl_display->tdm.tdm_client);
2028                                 free(vblank);
2029                                 vblank = NULL;
2030                         } else {
2031                                 vblank->waiting_buffers = __tpl_list_alloc();
2032                                 vblank->wl_egl_surface = wl_egl_surface;
2033                                 tpl_gmutex_init(&vblank->mutex);
2034
2035                                 __tpl_list_push_back(wl_egl_display->tdm.surface_vblanks,
2036                                                                          (void *)vblank);
2037
2038                                 TPL_INFO("[VBLANK_INIT]",
2039                                                  "wl_egl_surface(%p) tdm_client(%p) tdm_vblank(%p)",
2040                                                  wl_egl_surface, wl_egl_display->tdm.tdm_client,
2041                                                  vblank->tdm_vblank);
2042                         }
2043                 }
2044         }
2045
2046         wl_egl_surface->vblank = vblank;
2047 #if TIZEN_FEATURE_ENABLE
2048         if (wl_egl_display->tss) {
2049                 wl_egl_surface->tss_flusher =
2050                         tizen_surface_shm_get_flusher(wl_egl_display->tss,
2051                                                                                   wl_egl_surface->wl_surface);
2052         }
2053
2054         if (wl_egl_surface->tss_flusher) {
2055                 tizen_surface_shm_flusher_add_listener(wl_egl_surface->tss_flusher,
2056                                                                                            &tss_flusher_listener,
2057                                                                                            wl_egl_surface);
2058                 TPL_INFO("[FLUSHER_INIT]",
2059                                  "wl_egl_surface(%p) tss_flusher(%p)",
2060                                  wl_egl_surface, wl_egl_surface->tss_flusher);
2061         }
2062
2063         if (wl_egl_display->explicit_sync && wl_egl_display->use_explicit_sync) {
2064                 wl_egl_surface->surface_sync =
2065                         zwp_linux_explicit_synchronization_v1_get_synchronization(
2066                                         wl_egl_display->explicit_sync, wl_egl_surface->wl_surface);
2067                 if (wl_egl_surface->surface_sync) {
2068                         TPL_INFO("[EXPLICIT_SYNC_INIT]",
2069                                          "wl_egl_surface(%p) surface_sync(%p)",
2070                                          wl_egl_surface, wl_egl_surface->surface_sync);
2071                 } else {
2072                         TPL_WARN("Failed to create surface_sync. | wl_egl_surface(%p)",
2073                                          wl_egl_surface);
2074                         wl_egl_display->use_explicit_sync = TPL_FALSE;
2075                 }
2076         }
2077 #endif
2078         wl_egl_surface->presentation_feedbacks = __tpl_list_alloc();
2079 }
2080
2081 static void
2082 _tpl_wl_egl_surface_buffer_clear(tpl_wl_egl_surface_t *wl_egl_surface)
2083 {
2084         tbm_surface_queue_error_e tsq_err       = TBM_SURFACE_QUEUE_ERROR_NONE;
2085         tpl_wl_egl_display_t *wl_egl_display    = wl_egl_surface->wl_egl_display;
2086         tpl_wl_egl_buffer_t *wl_egl_buffer      = NULL;
2087         tpl_bool_t need_to_release              = TPL_FALSE;
2088         tpl_bool_t need_to_cancel               = TPL_FALSE;
2089         buffer_status_t status                  = RELEASED;
2090         int idx                                 = 0;
2091
2092         tpl_gthread_pause_in_idle(wl_egl_display->thread);
2093
2094         TPL_INFO("[BUFFER_CLEAR]", "BEGIN | wl_egl_surface(%p)", wl_egl_surface);
2095
2096         while (wl_egl_surface->buffer_cnt) {
2097                 wl_egl_buffer = wl_egl_surface->buffers[idx];
2098
2099                 if (wl_egl_buffer) {
2100                         wl_egl_surface->buffers[idx] = NULL;
2101                         wl_egl_surface->buffer_cnt--;
2102                 } else {
2103                         idx++;
2104                         continue;
2105                 }
2106
2107                 tpl_gmutex_lock(&wl_egl_buffer->mutex);
2108
2109                 status = wl_egl_buffer->status;
2110
2111                 TPL_INFO("[BUFFER]","idx(%d)| wl_egl_buffer(%p) tbm_surface(%p) status(%s)",
2112                                  idx, wl_egl_buffer,
2113                                  wl_egl_buffer->tbm_surface,
2114                                  status_to_string[status]);
2115
2116                 if (status >= ENQUEUED) {
2117                         tpl_result_t wait_result = TPL_ERROR_NONE;
2118
2119                         while (status < COMMITTED && wait_result != TPL_ERROR_TIME_OUT) {
2120                                 tpl_gthread_continue(wl_egl_display->thread);
2121                                 wait_result = tpl_gcond_timed_wait(&wl_egl_buffer->cond,
2122                                                                                                    &wl_egl_buffer->mutex,
2123                                                                                                    500); /* 500ms */
2124                                 tpl_gthread_pause_in_idle(wl_egl_display->thread);
2125                                 status = wl_egl_buffer->status; /* update status */
2126
2127                                 if (wait_result == TPL_ERROR_TIME_OUT) {
2128                                         TPL_WARN("timeout occured waiting signaled. wl_egl_buffer(%p) status(%s)",
2129                                                          wl_egl_buffer, status_to_string[status]);
2130                                 }
2131                         }
2132                 }
2133
2134                 /* ACQUIRED, WAITING_SIGNALED, WAITING_VBLANK, COMMITTED */
2135                 /* It has been acquired but has not yet been released, so this
2136                  * buffer must be released. */
2137                 need_to_release = (status >= ACQUIRED && status <= COMMITTED);
2138
2139                 /* After dequeue, it has not been enqueued yet
2140                  * so cancel_dequeue must be performed. */
2141                 need_to_cancel = (status == DEQUEUED);
2142
2143                 if (need_to_release) {
2144                         tsq_err = tbm_surface_queue_release(wl_egl_surface->tbm_queue,
2145                                                                                                 wl_egl_buffer->tbm_surface);
2146                         if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
2147                                 TPL_ERR("Failed to release. tbm_surface(%p) tsq_err(%d)",
2148                                                 wl_egl_buffer->tbm_surface, tsq_err);
2149                 }
2150
2151                 if (need_to_cancel) {
2152                         tsq_err = tbm_surface_queue_cancel_dequeue(wl_egl_surface->tbm_queue,
2153                                                                                                            wl_egl_buffer->tbm_surface);
2154                         if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
2155                                 TPL_ERR("Failed to release tbm_surface(%p) tsq_err(%d)",
2156                                                 wl_egl_buffer->tbm_surface, tsq_err);
2157                 }
2158
2159                 wl_egl_buffer->status = RELEASED;
2160
2161                 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2162
2163                 if (need_to_release || need_to_cancel)
2164                         tbm_surface_internal_unref(wl_egl_buffer->tbm_surface);
2165
2166                 idx++;
2167         }
2168         TPL_INFO("[BUFFER_CLEAR]", "END | wl_egl_surface(%p)", wl_egl_surface);
2169
2170         tpl_gthread_continue(wl_egl_display->thread);
2171 }
2172
2173 static void
2174 __tpl_wl_egl_surface_fini(tpl_surface_t *surface)
2175 {
2176         tpl_wl_egl_surface_t *wl_egl_surface = NULL;
2177         tpl_wl_egl_display_t *wl_egl_display = NULL;
2178
2179         TPL_ASSERT(surface);
2180         TPL_ASSERT(surface->display);
2181
2182         TPL_CHECK_ON_FALSE_RETURN(surface->type == TPL_SURFACE_TYPE_WINDOW);
2183
2184         wl_egl_surface = (tpl_wl_egl_surface_t *) surface->backend.data;
2185         TPL_CHECK_ON_NULL_RETURN(wl_egl_surface);
2186
2187         wl_egl_display = wl_egl_surface->wl_egl_display;
2188         TPL_CHECK_ON_NULL_RETURN(wl_egl_display);
2189
2190         TPL_INFO("[SURFACE_FINI][BEGIN]",
2191                          "wl_egl_surface(%p) wl_surface(%p) tbm_queue(%p)",
2192                          wl_egl_surface,
2193                          wl_egl_surface->wl_surface, wl_egl_surface->tbm_queue);
2194
2195         tpl_gthread_wait_idle(wl_egl_display->thread);
2196
2197         _tpl_wl_egl_surface_buffer_clear(wl_egl_surface);
2198
2199         if (wl_egl_surface->surf_source) {
2200                 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
2201                 // Send destroy mesage to thread
2202                 tpl_gsource_destroy(wl_egl_surface->surf_source, TPL_TRUE);
2203                 /* This is a protection to prevent problems that arise in unexpected situations
2204                  * that g_cond_wait cannot work normally.
2205                  * When calling tpl_gsource_destroy() with destroy_in_thread is TPL_TRUE,
2206                  * caller should use tpl_gcond_wait() in the loop with checking finalized flag
2207                  * */
2208                 while (!wl_egl_surface->gsource_finalized) {
2209                         tpl_gcond_wait(&wl_egl_surface->surf_cond, &wl_egl_surface->surf_mutex);
2210                 }
2211                 wl_egl_surface->surf_source = NULL;
2212                 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
2213         }
2214
2215         _print_buffer_lists(wl_egl_surface);
2216
2217         if (wl_egl_surface->wl_egl_window) {
2218                 struct tizen_private *tizen_private = NULL;
2219                 struct wl_egl_window *wl_egl_window = wl_egl_surface->wl_egl_window;
2220                 TPL_INFO("[WL_EGL_WINDOW_FINI]",
2221                                  "wl_egl_surface(%p) wl_egl_window(%p) wl_surface(%p)",
2222                                  wl_egl_surface, wl_egl_window,
2223                                  wl_egl_surface->wl_surface);
2224                 tizen_private = (struct tizen_private *)wl_egl_window->driver_private;
2225                 if (tizen_private) {
2226                         tizen_private->set_window_serial_callback = NULL;
2227                         tizen_private->rotate_callback = NULL;
2228                         tizen_private->get_rotation_capability = NULL;
2229                         tizen_private->create_presentation_sync_fd = NULL;
2230                         tizen_private->create_commit_sync_fd = NULL;
2231                         tizen_private->set_frontbuffer_callback = NULL;
2232                         tizen_private->merge_sync_fds = NULL;
2233                         tizen_private->data = NULL;
2234                         free(tizen_private);
2235
2236                         wl_egl_window->driver_private = NULL;
2237                 }
2238
2239                 wl_egl_window->destroy_window_callback = NULL;
2240                 wl_egl_window->resize_callback = NULL;
2241
2242                 wl_egl_surface->wl_egl_window = NULL;
2243         }
2244
2245         wl_egl_surface->last_enq_buffer = NULL;
2246
2247         wl_egl_surface->wl_surface = NULL;
2248         wl_egl_surface->wl_egl_display = NULL;
2249         wl_egl_surface->tpl_surface = NULL;
2250
2251         tpl_gmutex_lock(&wl_egl_surface->commit_sync.mutex);
2252         tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
2253         tpl_gmutex_clear(&wl_egl_surface->commit_sync.mutex);
2254
2255         tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
2256         tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
2257         tpl_gmutex_clear(&wl_egl_surface->presentation_sync.mutex);
2258
2259         tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
2260         tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
2261         tpl_gmutex_clear(&wl_egl_surface->surf_mutex);
2262         tpl_gcond_clear(&wl_egl_surface->surf_cond);
2263
2264         TPL_INFO("[SURFACE_FINI][END]", "wl_egl_surface(%p)", wl_egl_surface);
2265
2266         free(wl_egl_surface);
2267         surface->backend.data = NULL;
2268 }
2269
2270 static tpl_result_t
2271 __tpl_wl_egl_surface_set_rotation_capability(tpl_surface_t *surface,
2272                                                                                          tpl_bool_t set)
2273 {
2274         tpl_wl_egl_surface_t *wl_egl_surface = NULL;
2275
2276         TPL_CHECK_ON_NULL_RETURN_VAL(surface, TPL_ERROR_INVALID_PARAMETER);
2277
2278         wl_egl_surface = (tpl_wl_egl_surface_t *)surface->backend.data;
2279
2280         TPL_CHECK_ON_NULL_RETURN_VAL(wl_egl_surface, TPL_ERROR_INVALID_PARAMETER);
2281
2282         TPL_INFO("[SET_PREROTATION_CAPABILITY]",
2283                          "wl_egl_surface(%p) prerotation capability set to [%s]",
2284                          wl_egl_surface, (set ? "TRUE" : "FALSE"));
2285
2286         wl_egl_surface->prerotation_capability = set;
2287         return TPL_ERROR_NONE;
2288 }
2289
2290 static tpl_result_t
2291 __tpl_wl_egl_surface_set_post_interval(tpl_surface_t *surface,
2292                                                                            int post_interval)
2293 {
2294         tpl_wl_egl_surface_t *wl_egl_surface = NULL;
2295
2296         TPL_CHECK_ON_NULL_RETURN_VAL(surface, TPL_ERROR_INVALID_PARAMETER);
2297
2298         wl_egl_surface = (tpl_wl_egl_surface_t *)surface->backend.data;
2299
2300         TPL_CHECK_ON_NULL_RETURN_VAL(wl_egl_surface, TPL_ERROR_INVALID_PARAMETER);
2301
2302         TPL_INFO("[SET_POST_INTERVAL]",
2303                          "wl_egl_surface(%p) post_interval(%d -> %d)",
2304                          wl_egl_surface, wl_egl_surface->post_interval, post_interval);
2305
2306         wl_egl_surface->post_interval = post_interval;
2307
2308         return TPL_ERROR_NONE;
2309 }
2310
2311 static tpl_bool_t
2312 __tpl_wl_egl_surface_validate(tpl_surface_t *surface)
2313 {
2314         tpl_bool_t retval = TPL_TRUE;
2315
2316         TPL_ASSERT(surface);
2317         TPL_ASSERT(surface->backend.data);
2318
2319         tpl_wl_egl_surface_t *wl_egl_surface =
2320                 (tpl_wl_egl_surface_t *)surface->backend.data;
2321
2322         retval = !(wl_egl_surface->reset);
2323
2324         return retval;
2325 }
2326
2327 static void
2328 __tpl_wl_egl_surface_get_size(tpl_surface_t *surface, int *width, int *height)
2329 {
2330         tpl_wl_egl_surface_t *wl_egl_surface =
2331                 (tpl_wl_egl_surface_t *)surface->backend.data;
2332
2333         if (width)
2334                 *width = tbm_surface_queue_get_width(wl_egl_surface->tbm_queue);
2335         if (height)
2336                 *height = tbm_surface_queue_get_height(wl_egl_surface->tbm_queue);
2337 }
2338
2339 #define CAN_DEQUEUE_TIMEOUT_MS 10000
2340
2341 tpl_result_t
2342 _tbm_queue_force_flush(tpl_wl_egl_surface_t *wl_egl_surface)
2343 {
2344         tpl_wl_egl_display_t *wl_egl_display = wl_egl_surface->wl_egl_display;
2345         tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
2346
2347         tpl_gthread_pause_in_idle(wl_egl_display->thread);
2348
2349         _print_buffer_lists(wl_egl_surface);
2350
2351         if ((tsq_err = tbm_surface_queue_flush(wl_egl_surface->tbm_queue))
2352                 != TBM_SURFACE_QUEUE_ERROR_NONE) {
2353                 TPL_ERR("Failed to flush tbm_surface_queue(%p) tsq_err(%d)",
2354                                 wl_egl_surface->tbm_queue, tsq_err);
2355                 tpl_gthread_continue(wl_egl_display->thread);
2356                 return TPL_ERROR_INVALID_OPERATION;
2357         }
2358
2359         {
2360                 int i;
2361                 tpl_wl_egl_buffer_t *wl_egl_buffer = NULL;
2362                 for (i = 0; i < BUFFER_ARRAY_SIZE; i++) {
2363                         buffer_status_t status;
2364                         wl_egl_buffer = wl_egl_surface->buffers[i];
2365                         if (wl_egl_buffer) {
2366                                 status = wl_egl_buffer->status;
2367                         } else {
2368                                 continue;
2369                         }
2370
2371                         if (status > ENQUEUED && status <= COMMITTED) {
2372                                 tsq_err = tbm_surface_queue_release(wl_egl_surface->tbm_queue,
2373                                                                                                         wl_egl_buffer->tbm_surface);
2374                                 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
2375                                         TPL_ERR("Failed to release. tbm_surface(%p) tsq_err(%d)",
2376                                                         wl_egl_buffer->tbm_surface, tsq_err);
2377                                 tbm_surface_internal_unref(wl_egl_buffer->tbm_surface);
2378                         }
2379                 }
2380         }
2381
2382         TPL_INFO("[FORCE_FLUSH]",
2383                          "wl_egl_surface(%p) tbm_queue(%p)",
2384                          wl_egl_surface, wl_egl_surface->tbm_queue);
2385
2386         _print_buffer_lists(wl_egl_surface);
2387
2388         tpl_gthread_continue(wl_egl_display->thread);
2389
2390         return TPL_ERROR_NONE;
2391 }
2392
2393 static void
2394 _wl_egl_buffer_init(tpl_wl_egl_buffer_t *wl_egl_buffer,
2395                                         tpl_wl_egl_surface_t *wl_egl_surface)
2396 {
2397         struct wl_egl_window *wl_egl_window = wl_egl_surface->wl_egl_window;
2398         struct tizen_private *tizen_private =
2399                 (struct tizen_private *)wl_egl_window->driver_private;
2400
2401         TPL_ASSERT(tizen_private);
2402
2403         wl_egl_buffer->draw_done                = TPL_FALSE;
2404         wl_egl_buffer->need_to_commit           = TPL_TRUE;
2405 #if TIZEN_FEATURE_ENABLE
2406         wl_egl_buffer->buffer_release           = NULL;
2407 #endif
2408         wl_egl_buffer->transform                = tizen_private->transform;
2409
2410         if (wl_egl_buffer->w_transform != tizen_private->window_transform) {
2411                 wl_egl_buffer->w_transform          = tizen_private->window_transform;
2412                 wl_egl_buffer->w_rotated            = TPL_TRUE;
2413         }
2414
2415         if (wl_egl_surface->set_serial_is_used) {
2416                 wl_egl_buffer->serial               = wl_egl_surface->serial;
2417         } else {
2418                 wl_egl_buffer->serial               = ++tizen_private->serial;
2419         }
2420
2421         if (wl_egl_buffer->rects) {
2422                 free(wl_egl_buffer->rects);
2423                 wl_egl_buffer->rects                = NULL;
2424                 wl_egl_buffer->num_rects            = 0;
2425         }
2426 }
2427
2428 static tpl_wl_egl_buffer_t *
2429 _get_wl_egl_buffer(tbm_surface_h tbm_surface)
2430 {
2431         tpl_wl_egl_buffer_t *wl_egl_buffer = NULL;
2432         tbm_surface_internal_get_user_data(tbm_surface, KEY_WL_EGL_BUFFER,
2433                                                                            (void **)&wl_egl_buffer);
2434         return wl_egl_buffer;
2435 }
2436
2437 static tpl_wl_egl_buffer_t *
2438 _wl_egl_buffer_create(tpl_wl_egl_surface_t *wl_egl_surface,
2439                                           tbm_surface_h tbm_surface)
2440 {
2441         tpl_wl_egl_buffer_t  *wl_egl_buffer  = NULL;
2442         struct wl_egl_window *wl_egl_window  = wl_egl_surface->wl_egl_window;
2443
2444         wl_egl_buffer = _get_wl_egl_buffer(tbm_surface);
2445
2446         if (!wl_egl_buffer) {
2447                 wl_egl_buffer = (tpl_wl_egl_buffer_t *)calloc(1, sizeof(tpl_wl_egl_buffer_t));
2448                 TPL_CHECK_ON_NULL_RETURN_VAL(wl_egl_buffer, NULL);
2449
2450                 tbm_surface_internal_add_user_data(tbm_surface, KEY_WL_EGL_BUFFER,
2451                                                                                    (tbm_data_free)__cb_wl_egl_buffer_free);
2452                 tbm_surface_internal_set_user_data(tbm_surface, KEY_WL_EGL_BUFFER,
2453                                                                                    wl_egl_buffer);
2454
2455                 wl_egl_buffer->wl_buffer                = NULL;
2456                 wl_egl_buffer->tbm_surface              = tbm_surface;
2457                 wl_egl_buffer->bo_name                  = _get_tbm_surface_bo_name(tbm_surface);
2458                 wl_egl_buffer->wl_egl_surface           = wl_egl_surface;
2459
2460                 wl_egl_buffer->status                   = RELEASED;
2461
2462                 wl_egl_buffer->acquire_fence_fd         = -1;
2463                 wl_egl_buffer->commit_sync_fd           = -1;
2464                 wl_egl_buffer->presentation_sync_fd     = -1;
2465                 wl_egl_buffer->release_fence_fd         = -1;
2466
2467                 wl_egl_buffer->dx                       = wl_egl_window->dx;
2468                 wl_egl_buffer->dy                       = wl_egl_window->dy;
2469                 wl_egl_buffer->width                    = tbm_surface_get_width(tbm_surface);
2470                 wl_egl_buffer->height                   = tbm_surface_get_height(tbm_surface);
2471
2472                 wl_egl_buffer->w_transform              = -1;
2473
2474                 tpl_gmutex_init(&wl_egl_buffer->mutex);
2475                 tpl_gcond_init(&wl_egl_buffer->cond);
2476
2477                 tpl_gmutex_lock(&wl_egl_surface->buffers_mutex);
2478                 {
2479                         int i;
2480                         for (i = 0; i < BUFFER_ARRAY_SIZE; i++)
2481                                 if (wl_egl_surface->buffers[i] == NULL) break;
2482
2483                         /* If this exception is reached,
2484                          * it may be a critical memory leak problem. */
2485                         if (i == BUFFER_ARRAY_SIZE) {
2486                                 tpl_wl_egl_buffer_t *evicted_buffer = NULL;
2487                                 int evicted_idx = 0; /* evict the frontmost buffer */
2488
2489                                 evicted_buffer = wl_egl_surface->buffers[evicted_idx];
2490
2491                                 TPL_WARN("wl_egl_surface(%p) buffers array is full. evict one.",
2492                                                  wl_egl_surface);
2493                                 TPL_WARN("evicted buffer (%p) tbm_surface(%p) status(%s)",
2494                                                  evicted_buffer, evicted_buffer->tbm_surface,
2495                                                  status_to_string[evicted_buffer->status]);
2496
2497                                 /* [TODO] need to think about whether there will be
2498                                  * better modifications */
2499                                 wl_egl_surface->buffer_cnt--;
2500                                 wl_egl_surface->buffers[evicted_idx]      = NULL;
2501
2502                                 i = evicted_idx;
2503                         }
2504
2505                         wl_egl_surface->buffer_cnt++;
2506                         wl_egl_surface->buffers[i]          = wl_egl_buffer;
2507                         wl_egl_buffer->idx                  = i;
2508                 }
2509                 tpl_gmutex_unlock(&wl_egl_surface->buffers_mutex);
2510
2511                 TPL_INFO("[WL_EGL_BUFFER_CREATE]",
2512                                  "wl_egl_surface(%p) wl_egl_buffer(%p) tbm_surface(%p) bo(%d)",
2513                                  wl_egl_surface, wl_egl_buffer, tbm_surface,
2514                                  wl_egl_buffer->bo_name);
2515         }
2516
2517         _wl_egl_buffer_init(wl_egl_buffer, wl_egl_surface);
2518
2519         return wl_egl_buffer;
2520 }
2521
2522 static tbm_surface_h
2523 __tpl_wl_egl_surface_dequeue_buffer(tpl_surface_t *surface, uint64_t timeout_ns,
2524                                                                         int32_t *release_fence)
2525 {
2526         TPL_ASSERT(surface);
2527         TPL_ASSERT(surface->backend.data);
2528         TPL_ASSERT(surface->display);
2529         TPL_ASSERT(surface->display->backend.data);
2530         TPL_OBJECT_CHECK_RETURN(surface, NULL);
2531
2532         tpl_wl_egl_surface_t *wl_egl_surface =
2533                 (tpl_wl_egl_surface_t *)surface->backend.data;
2534         tpl_wl_egl_display_t *wl_egl_display =
2535                 (tpl_wl_egl_display_t *)surface->display->backend.data;
2536         tpl_wl_egl_buffer_t *wl_egl_buffer   = NULL;
2537
2538         tbm_surface_queue_error_e tsq_err    = TBM_SURFACE_QUEUE_ERROR_NONE;
2539         tpl_bool_t      is_activated         = 0;
2540         int             bo_name              = 0;
2541         tbm_surface_h   tbm_surface          = NULL;
2542
2543         TPL_OBJECT_UNLOCK(surface);
2544         tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
2545         if (wl_egl_surface->reset == TPL_TRUE) {
2546                 if (_check_buffer_validate(wl_egl_surface, wl_egl_surface->last_enq_buffer) &&
2547                         tbm_surface_internal_is_valid(wl_egl_surface->last_enq_buffer)) {
2548                         tbm_surface_h last_enq_buffer = wl_egl_surface->last_enq_buffer;
2549                         tpl_wl_egl_buffer_t *enqueued_buffer =
2550                                 _get_wl_egl_buffer(last_enq_buffer);
2551
2552                         if (enqueued_buffer) {
2553                                 tbm_surface_internal_ref(last_enq_buffer);
2554                                 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
2555                                 tpl_gmutex_lock(&enqueued_buffer->mutex);
2556                                 while (enqueued_buffer->status >= ENQUEUED &&
2557                                            enqueued_buffer->status < COMMITTED) {
2558                                         tpl_result_t wait_result;
2559                                         TPL_INFO("[DEQ_AFTER_RESET]",
2560                                                          "waiting for previous wl_egl_buffer(%p) commit",
2561                                                          enqueued_buffer);
2562
2563                                         wait_result = tpl_gcond_timed_wait(&enqueued_buffer->cond,
2564                                                                                                           &enqueued_buffer->mutex,
2565                                                                                                           200); /* 200ms */
2566                                         if (wait_result == TPL_ERROR_TIME_OUT) {
2567                                                 TPL_WARN("timeout occured waiting signaled. wl_egl_buffer(%p)",
2568                                                                  enqueued_buffer);
2569                                                 break;
2570                                         }
2571                                 }
2572                                 tpl_gmutex_unlock(&enqueued_buffer->mutex);
2573                                 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
2574                                 tbm_surface_internal_unref(last_enq_buffer);
2575                         }
2576                 }
2577
2578                 wl_egl_surface->last_enq_buffer = NULL;
2579         }
2580         tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
2581
2582         tsq_err = tbm_surface_queue_can_dequeue_wait_timeout(
2583                                 wl_egl_surface->tbm_queue, CAN_DEQUEUE_TIMEOUT_MS);
2584         TPL_OBJECT_LOCK(surface);
2585
2586
2587         if (tsq_err == TBM_SURFACE_QUEUE_ERROR_TIMEOUT) {
2588                 TPL_WARN("[CAN_DEQUEUE_TIMEOUT] queue(%p) will be reset. surface(%p)",
2589                                  wl_egl_surface->tbm_queue, surface);
2590                 if (_tbm_queue_force_flush(wl_egl_surface) != TPL_ERROR_NONE) {
2591                         TPL_ERR("Failed to timeout reset. tbm_queue(%p) surface(%p)",
2592                                         wl_egl_surface->tbm_queue, surface);
2593                         return NULL;
2594                 } else {
2595                         tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
2596                 }
2597         }
2598
2599         if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
2600                 TPL_ERR("Failed to query can_dequeue. tbm_queue(%p) surface(%p)",
2601                                 wl_egl_surface->tbm_queue, surface);
2602                 return NULL;
2603         }
2604
2605         /* After the can dequeue state, lock the wl_event_mutex to prevent other
2606          * events from being processed in wayland_egl_thread
2607          * during below dequeue procedure. */
2608         tpl_gmutex_lock(&wl_egl_display->wl_event_mutex);
2609
2610         /* wayland client can check their states (ACTIVATED or DEACTIVATED) with
2611          * below function [wayland_tbm_client_queue_check_activate()].
2612          * This function has to be called before tbm_surface_queue_dequeue()
2613          * in order to know what state the buffer will be dequeued next.
2614          *
2615          * ACTIVATED state means non-composite mode. Client can get buffers which
2616             can be displayed directly(without compositing).
2617          * DEACTIVATED state means composite mode. Client's buffer will be displayed
2618             by compositor(E20) with compositing.
2619          */
2620         is_activated = wayland_tbm_client_queue_check_activate(
2621                                                 wl_egl_display->wl_tbm_client,
2622                                                 wl_egl_surface->tbm_queue);
2623
2624         wl_egl_surface->is_activated = is_activated;
2625
2626         surface->width = tbm_surface_queue_get_width(wl_egl_surface->tbm_queue);
2627         surface->height = tbm_surface_queue_get_height(wl_egl_surface->tbm_queue);
2628         wl_egl_surface->width = surface->width;
2629         wl_egl_surface->height = surface->height;
2630
2631         if (surface->is_frontbuffer_mode && surface->frontbuffer != NULL) {
2632                 /* If surface->frontbuffer is already set in frontbuffer mode,
2633                  * it will return that frontbuffer if it is still activated,
2634                  * otherwise dequeue the new buffer after initializing
2635                  * surface->frontbuffer to NULL. */
2636                 if (is_activated && !wl_egl_surface->reset) {
2637                         bo_name = _get_tbm_surface_bo_name(surface->frontbuffer);
2638
2639                         TPL_LOG_T("WL_EGL",
2640                                           "[DEQ][F] surface->frontbuffer(%p) BO_NAME(%d)",
2641                                           surface->frontbuffer, bo_name);
2642                         TRACE_ASYNC_BEGIN((intptr_t)surface->frontbuffer,
2643                                                           "[DEQ]~[ENQ] BO_NAME:%d",
2644                                                           bo_name);
2645                         tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2646                         return surface->frontbuffer;
2647                 } else {
2648                         surface->frontbuffer = NULL;
2649                         wl_egl_surface->need_to_enqueue = TPL_TRUE;
2650                 }
2651         } else {
2652                 surface->frontbuffer = NULL;
2653         }
2654
2655         tsq_err = tbm_surface_queue_dequeue(wl_egl_surface->tbm_queue,
2656                                                                                 &tbm_surface);
2657         if (!tbm_surface) {
2658                 TPL_ERR("Failed to dequeue from tbm_queue(%p) wl_egl_surface(%p)| tsq_err = %d",
2659                                 wl_egl_surface->tbm_queue, wl_egl_surface, tsq_err);
2660                 tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2661                 return NULL;
2662         }
2663
2664         tbm_surface_internal_ref(tbm_surface);
2665
2666         wl_egl_buffer = _wl_egl_buffer_create(wl_egl_surface, tbm_surface);
2667         TPL_CHECK_ON_FALSE_ASSERT_FAIL(wl_egl_buffer, "Failed to create/get wl_egl_buffer.");
2668
2669         tpl_gmutex_lock(&wl_egl_buffer->mutex);
2670         wl_egl_buffer->status = DEQUEUED;
2671
2672         /* If wl_egl_buffer->release_fence_fd is -1,
2673          * the tbm_surface can be used immediately.
2674          * If not, user(EGL) have to wait until signaled. */
2675         if (release_fence) {
2676 #if TIZEN_FEATURE_ENABLE
2677                 if (wl_egl_display->use_explicit_sync) {
2678                         *release_fence = wl_egl_buffer->release_fence_fd;
2679                         TPL_DEBUG("wl_egl_surface(%p) wl_egl_buffer(%p) release_fence_fd(%d)",
2680                                           wl_egl_surface, wl_egl_buffer, *release_fence);
2681
2682                         wl_egl_buffer->release_fence_fd = -1;
2683                 } else
2684 #endif
2685                 {
2686                         *release_fence = -1;
2687                 }
2688         }
2689
2690         if (surface->is_frontbuffer_mode && is_activated)
2691                 surface->frontbuffer = tbm_surface;
2692
2693         wl_egl_surface->reset = TPL_FALSE;
2694
2695         TRACE_MARK("[DEQ][NEW]BO_NAME:%d", wl_egl_buffer->bo_name);
2696         TRACE_ASYNC_BEGIN((intptr_t)tbm_surface, "[DEQ]~[ENQ] BO_NAME:%d",
2697                                           wl_egl_buffer->bo_name);
2698         TPL_LOG_T("WL_EGL", "[DEQ] wl_egl_buffer(%p) tbm_surface(%p) bo(%d) fence(%d)",
2699                           wl_egl_buffer, tbm_surface, wl_egl_buffer->bo_name,
2700                           release_fence ? *release_fence : -1);
2701
2702         tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2703         tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2704
2705         return tbm_surface;
2706 }
2707
2708 static tpl_result_t
2709 __tpl_wl_egl_surface_cancel_buffer(tpl_surface_t *surface,
2710                                                                    tbm_surface_h tbm_surface)
2711 {
2712         TPL_ASSERT(surface);
2713         TPL_ASSERT(surface->backend.data);
2714
2715         tpl_wl_egl_surface_t *wl_egl_surface    =
2716                 (tpl_wl_egl_surface_t *)surface->backend.data;
2717         tpl_wl_egl_buffer_t *wl_egl_buffer      = NULL;
2718         tbm_surface_queue_error_e tsq_err       = TBM_SURFACE_QUEUE_ERROR_NONE;
2719
2720         if (!tbm_surface_internal_is_valid(tbm_surface)) {
2721                 TPL_ERR("Invalid buffer. tbm_surface(%p)", tbm_surface);
2722                 return TPL_ERROR_INVALID_PARAMETER;
2723         }
2724
2725         wl_egl_buffer = _get_wl_egl_buffer(tbm_surface);
2726         if (wl_egl_buffer) {
2727                 tpl_gmutex_lock(&wl_egl_buffer->mutex);
2728                 wl_egl_buffer->status = RELEASED;
2729                 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2730         }
2731
2732         tbm_surface_internal_unref(tbm_surface);
2733
2734         tsq_err = tbm_surface_queue_cancel_dequeue(wl_egl_surface->tbm_queue,
2735                                                                                            tbm_surface);
2736         if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
2737                 TPL_ERR("Failed to release tbm_surface(%p) surface(%p)",
2738                                 tbm_surface, surface);
2739                 return TPL_ERROR_INVALID_OPERATION;
2740         }
2741
2742         TPL_INFO("[CANCEL_BUFFER]", "wl_egl_surface(%p) tbm_surface(%p) bo(%d)",
2743                           wl_egl_surface, tbm_surface, _get_tbm_surface_bo_name(tbm_surface));
2744
2745         return TPL_ERROR_NONE;
2746 }
2747
2748 static tpl_result_t
2749 __tpl_wl_egl_surface_enqueue_buffer(tpl_surface_t *surface,
2750                 tbm_surface_h tbm_surface,
2751                 int num_rects, const int *rects, int32_t acquire_fence)
2752 {
2753         TPL_ASSERT(surface);
2754         TPL_ASSERT(surface->display);
2755         TPL_ASSERT(surface->backend.data);
2756         TPL_ASSERT(tbm_surface);
2757         TPL_OBJECT_CHECK_RETURN(surface, TPL_ERROR_INVALID_PARAMETER);
2758
2759         tpl_wl_egl_surface_t *wl_egl_surface    =
2760                 (tpl_wl_egl_surface_t *) surface->backend.data;
2761         tpl_wl_egl_buffer_t *wl_egl_buffer      = NULL;
2762         tbm_surface_queue_error_e tsq_err       = TBM_SURFACE_QUEUE_ERROR_NONE;
2763         int bo_name                             = -1;
2764
2765         if (!tbm_surface_internal_is_valid(tbm_surface)) {
2766                 TPL_ERR("Failed to enqueue tbm_surface(%p) Invalid value.",
2767                                 tbm_surface);
2768                 TRACE_ASYNC_END((intptr_t)tbm_surface, "[DEQ]~[ENQ] BO_NAME:%d", bo_name);
2769                 return TPL_ERROR_INVALID_PARAMETER;
2770         }
2771
2772         wl_egl_buffer = _get_wl_egl_buffer(tbm_surface);
2773         if (!wl_egl_buffer) {
2774                 TPL_ERR("Failed to get wl_egl_buffer from tbm_surface(%p)", tbm_surface);
2775                 return TPL_ERROR_INVALID_PARAMETER;
2776         }
2777
2778         bo_name = _get_tbm_surface_bo_name(tbm_surface);
2779
2780         TRACE_MARK("[ENQ] BO_NAME:%d", bo_name);
2781
2782         tpl_gmutex_lock(&wl_egl_buffer->mutex);
2783
2784         /* If there are received region information, save it to wl_egl_buffer */
2785         if (num_rects && rects) {
2786                 if (wl_egl_buffer->rects != NULL) {
2787                         free(wl_egl_buffer->rects);
2788                         wl_egl_buffer->rects = NULL;
2789                         wl_egl_buffer->num_rects = 0;
2790                 }
2791
2792                 wl_egl_buffer->rects = (int *)calloc(1, (sizeof(int) * 4 * num_rects));
2793                 wl_egl_buffer->num_rects = num_rects;
2794
2795                 if (!wl_egl_buffer->rects) {
2796                         TPL_ERR("Failed to allocate memory fo damage rects info.");
2797                         tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2798                         return TPL_ERROR_OUT_OF_MEMORY;
2799                 }
2800
2801                 memcpy((char *)wl_egl_buffer->rects, (char *)rects, sizeof(int) * 4 * num_rects);
2802         }
2803
2804         if (!wl_egl_surface->need_to_enqueue ||
2805                 !wl_egl_buffer->need_to_commit) {
2806                 TPL_WARN("[ENQ_SKIP][Frontbuffer:%s] tbm_surface(%p) need not to enqueue",
2807                                  ((surface->frontbuffer == tbm_surface) ? "ON" : "OFF"), tbm_surface);
2808                 TRACE_ASYNC_END((intptr_t)tbm_surface, "[DEQ]~[ENQ] BO_NAME:%d", bo_name);
2809                 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2810                 return TPL_ERROR_NONE;
2811         }
2812
2813         /* In frontbuffer mode, will skip tbm_surface_queue_enqueue, acquire, and
2814          * commit if surface->frontbuffer that is already set and the tbm_surface
2815          * client want to enqueue are the same.
2816          */
2817         if (surface->is_frontbuffer_mode) {
2818                 /* The first buffer to be activated in frontbuffer mode must be
2819                  * committed. Subsequence frames do not need to be committed because
2820                  * the buffer is already displayed.
2821                  */
2822                 if (surface->frontbuffer == tbm_surface)
2823                         wl_egl_surface->need_to_enqueue = TPL_FALSE;
2824
2825                 if (acquire_fence != -1) {
2826                         close(acquire_fence);
2827                         acquire_fence = -1;
2828                 }
2829         }
2830
2831         if (wl_egl_buffer->acquire_fence_fd != -1)
2832                 close(wl_egl_buffer->acquire_fence_fd);
2833
2834         wl_egl_buffer->acquire_fence_fd = acquire_fence;
2835
2836         tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
2837         if (wl_egl_surface->presentation_sync.fd != -1) {
2838                 wl_egl_buffer->presentation_sync_fd  = wl_egl_surface->presentation_sync.fd;
2839                 wl_egl_surface->presentation_sync.fd = -1;
2840         }
2841         tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
2842
2843         tpl_gmutex_lock(&wl_egl_surface->commit_sync.mutex);
2844         if (wl_egl_surface->commit_sync.fd != -1) {
2845                 wl_egl_buffer->commit_sync_fd  = wl_egl_surface->commit_sync.fd;
2846                 wl_egl_surface->commit_sync.fd = -1;
2847                 TRACE_ASYNC_BEGIN(wl_egl_buffer->commit_sync_fd, "[COMMIT_SYNC] bo(%d)",
2848                                                   _get_tbm_surface_bo_name(tbm_surface));
2849         }
2850         tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
2851
2852         wl_egl_buffer->status = ENQUEUED;
2853         TPL_LOG_T("WL_EGL",
2854                           "[ENQ] wl_egl_buffer(%p) tbm_surface(%p) bo(%d) fence(%d)",
2855                           wl_egl_buffer, tbm_surface, bo_name, acquire_fence);
2856
2857         tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2858
2859         tsq_err = tbm_surface_queue_enqueue(wl_egl_surface->tbm_queue,
2860                                                                                 tbm_surface);
2861         if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
2862                 tbm_surface_internal_unref(tbm_surface);
2863                 TPL_ERR("Failed to enqueue tbm_surface(%p). wl_egl_surface(%p) tsq_err=%d",
2864                                 tbm_surface, wl_egl_surface, tsq_err);
2865                 TRACE_ASYNC_END((intptr_t)tbm_surface, "[DEQ]~[ENQ] BO_NAME:%d", bo_name);
2866                 return TPL_ERROR_INVALID_OPERATION;
2867         }
2868
2869         tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
2870         wl_egl_surface->last_enq_buffer = tbm_surface;
2871         tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
2872
2873         tbm_surface_internal_unref(tbm_surface);
2874
2875         TRACE_ASYNC_END((intptr_t)tbm_surface, "[DEQ]~[ENQ] BO_NAME:%d", bo_name);
2876
2877         return TPL_ERROR_NONE;
2878 }
2879
2880 static tpl_bool_t
2881 __thread_func_waiting_source_dispatch(tpl_gsource *gsource, uint64_t message)
2882 {
2883         tpl_wl_egl_buffer_t *wl_egl_buffer      =
2884                 (tpl_wl_egl_buffer_t *)tpl_gsource_get_data(gsource);
2885         tpl_wl_egl_surface_t *wl_egl_surface    = wl_egl_buffer->wl_egl_surface;
2886         tbm_surface_h tbm_surface               = wl_egl_buffer->tbm_surface;
2887
2888         wl_egl_surface->render_done_cnt++;
2889
2890         TRACE_ASYNC_END(wl_egl_buffer->acquire_fence_fd, "FENCE WAIT fd(%d)",
2891                                         wl_egl_buffer->acquire_fence_fd);
2892
2893         TPL_DEBUG("[RENDER DONE] wl_egl_buffer(%p) tbm_surface(%p)",
2894                           wl_egl_buffer, tbm_surface);
2895
2896         tpl_gmutex_lock(&wl_egl_buffer->mutex);
2897         wl_egl_buffer->status = WAITING_VBLANK;
2898
2899         TPL_DEBUG("[FINALIZE] wl_egl_buffer(%p) wait_source(%p) fence_fd(%d)",
2900                           wl_egl_buffer, wl_egl_buffer->waiting_source,
2901                           wl_egl_buffer->acquire_fence_fd);
2902
2903         close(wl_egl_buffer->acquire_fence_fd);
2904         wl_egl_buffer->acquire_fence_fd = -1;
2905         wl_egl_buffer->waiting_source = NULL;
2906
2907         tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2908
2909         tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
2910
2911         if (wl_egl_surface->vblank == NULL || wl_egl_surface->vblank_done)
2912                 _thread_wl_surface_commit(wl_egl_surface, wl_egl_buffer);
2913         else {
2914                 tpl_gmutex_lock(&wl_egl_surface->vblank->mutex);
2915                 __tpl_list_push_back(wl_egl_surface->vblank->waiting_buffers,
2916                                                          wl_egl_buffer);
2917                 tpl_gmutex_unlock(&wl_egl_surface->vblank->mutex);
2918         }
2919
2920         tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
2921
2922         return TPL_FALSE;
2923 }
2924
2925 static void
2926 __thread_func_waiting_source_finalize(tpl_gsource *gsource)
2927 {
2928         TPL_IGNORE(gsource);
2929 }
2930
2931 static tpl_gsource_functions buffer_funcs = {
2932         .prepare = NULL,
2933         .check = NULL,
2934         .dispatch = __thread_func_waiting_source_dispatch,
2935         .finalize = __thread_func_waiting_source_finalize,
2936 };
2937
2938 static tpl_result_t
2939 _thread_surface_queue_acquire(tpl_wl_egl_surface_t *wl_egl_surface)
2940 {
2941         tbm_surface_h tbm_surface            = NULL;
2942         tbm_surface_queue_error_e tsq_err    = TBM_SURFACE_QUEUE_ERROR_NONE;
2943         tpl_wl_egl_display_t *wl_egl_display = wl_egl_surface->wl_egl_display;
2944         tpl_wl_egl_buffer_t *wl_egl_buffer   = NULL;
2945         tpl_bool_t ready_to_commit           = TPL_FALSE;
2946
2947         while (tbm_surface_queue_can_acquire(wl_egl_surface->tbm_queue, 0)) {
2948                 tsq_err = tbm_surface_queue_acquire(wl_egl_surface->tbm_queue,
2949                                                                                         &tbm_surface);
2950                 if (!tbm_surface || tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
2951                         TPL_ERR("Failed to acquire from tbm_queue(%p)",
2952                                         wl_egl_surface->tbm_queue);
2953                         return TPL_ERROR_INVALID_OPERATION;
2954                 }
2955
2956                 tbm_surface_internal_ref(tbm_surface);
2957
2958                 wl_egl_buffer = _get_wl_egl_buffer(tbm_surface);
2959                 TPL_CHECK_ON_FALSE_ASSERT_FAIL(wl_egl_buffer != NULL,
2960                                                                            "wl_egl_buffer sould be not NULL");
2961
2962                 tpl_gmutex_lock(&wl_egl_buffer->mutex);
2963
2964                 wl_egl_buffer->status = ACQUIRED;
2965
2966                 TPL_LOG_T("WL_EGL", "[ACQ] wl_egl_buffer(%p) tbm_surface(%p) bo(%d)",
2967                                   wl_egl_buffer, tbm_surface,
2968                                   _get_tbm_surface_bo_name(tbm_surface));
2969
2970                 if (wl_egl_buffer->acquire_fence_fd != -1) {
2971 #if TIZEN_FEATURE_ENABLE
2972                         if (wl_egl_display->use_explicit_sync)
2973                                 ready_to_commit = TPL_TRUE;
2974                         else
2975 #endif
2976                         {
2977                                 if (wl_egl_buffer->waiting_source) {
2978                                         tpl_gsource_destroy(wl_egl_buffer->waiting_source, TPL_FALSE);
2979                                         wl_egl_buffer->waiting_source = NULL;
2980                                 }
2981
2982                                 wl_egl_buffer->waiting_source =
2983                                         tpl_gsource_create(wl_egl_display->thread, wl_egl_buffer,
2984                                                                            wl_egl_buffer->acquire_fence_fd, &buffer_funcs,
2985                                                                            SOURCE_TYPE_DISPOSABLE);
2986                                 wl_egl_buffer->status = WAITING_SIGNALED;
2987
2988                                 TRACE_ASYNC_BEGIN(wl_egl_buffer->acquire_fence_fd, "FENCE WAIT fd(%d)",
2989                                                                   wl_egl_buffer->acquire_fence_fd);
2990
2991                                 ready_to_commit = TPL_FALSE;
2992                         }
2993                 } else {
2994                         ready_to_commit = TPL_TRUE;
2995                 }
2996
2997                 if (ready_to_commit) {
2998                         if (wl_egl_surface->vblank == NULL || wl_egl_surface->vblank_done)
2999                                 ready_to_commit = TPL_TRUE;
3000                         else {
3001                                 wl_egl_buffer->status = WAITING_VBLANK;
3002                                 tpl_gmutex_lock(&wl_egl_surface->vblank->mutex);
3003                                 __tpl_list_push_back(wl_egl_surface->vblank->waiting_buffers, wl_egl_buffer);
3004                                 tpl_gmutex_unlock(&wl_egl_surface->vblank->mutex);
3005                                 ready_to_commit = TPL_FALSE;
3006                         }
3007                 }
3008
3009                 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
3010
3011                 if (ready_to_commit)
3012                         _thread_wl_surface_commit(wl_egl_surface, wl_egl_buffer);
3013         }
3014
3015         return TPL_ERROR_NONE;
3016 }
3017
3018 /* -- BEGIN -- tdm_client vblank callback function */
3019 static void
3020 __cb_tdm_client_vblank(tdm_client_vblank *vblank, tdm_error error,
3021                                            unsigned int sequence, unsigned int tv_sec,
3022                                            unsigned int tv_usec, void *user_data)
3023 {
3024         tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)user_data;
3025         tpl_wl_egl_buffer_t *wl_egl_buffer   = NULL;
3026
3027         TRACE_ASYNC_END((intptr_t)wl_egl_surface, "WAIT_VBLANK");
3028         TPL_DEBUG("[VBLANK] wl_egl_surface(%p)", wl_egl_surface);
3029
3030         if (error == TDM_ERROR_TIMEOUT)
3031                 TPL_WARN("[TDM_ERROR_TIMEOUT] It will keep going. wl_egl_surface(%p)",
3032                                  wl_egl_surface);
3033
3034         wl_egl_surface->vblank_done = TPL_TRUE;
3035
3036         tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
3037         if (wl_egl_surface->vblank && wl_egl_surface->vblank->waiting_buffers) {
3038                 tpl_bool_t is_empty = TPL_TRUE;
3039                 do {
3040                         tpl_gmutex_lock(&wl_egl_surface->vblank->mutex);
3041                         wl_egl_buffer = (tpl_wl_egl_buffer_t *)__tpl_list_pop_front(
3042                                                                 wl_egl_surface->vblank->waiting_buffers,
3043                                                                 NULL);
3044                         is_empty = __tpl_list_is_empty(wl_egl_surface->vblank->waiting_buffers);
3045                         tpl_gmutex_unlock(&wl_egl_surface->vblank->mutex);
3046
3047                         if (!wl_egl_buffer) break;
3048
3049                         _thread_wl_surface_commit(wl_egl_surface, wl_egl_buffer);
3050
3051                         /* If tdm error such as TIMEOUT occured,
3052                          * flush all vblank waiting buffers of its wl_egl_surface.
3053                          * Otherwise, only one wl_egl_buffer will be commited per one vblank event.
3054                          */
3055                         if (error == TDM_ERROR_NONE) break;
3056                 } while (!is_empty);
3057         }
3058         tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
3059 }
3060 /* -- END -- tdm_client vblank callback function */
3061
3062 #if TIZEN_FEATURE_ENABLE
3063 static void
3064 __cb_buffer_fenced_release(void *data,
3065                                 struct zwp_linux_buffer_release_v1 *release, int32_t fence)
3066 {
3067         tpl_wl_egl_buffer_t *wl_egl_buffer  = (tpl_wl_egl_buffer_t *)data;
3068         tbm_surface_h tbm_surface           = NULL;
3069
3070         TPL_CHECK_ON_NULL_RETURN(wl_egl_buffer);
3071
3072         tbm_surface = wl_egl_buffer->tbm_surface;
3073
3074         if (tbm_surface_internal_is_valid(tbm_surface)) {
3075                 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_INVALID_SURFACE;
3076
3077                 tpl_gmutex_lock(&wl_egl_buffer->mutex);
3078                 if (wl_egl_buffer->status == COMMITTED) {
3079                         tpl_wl_egl_surface_t *wl_egl_surface = wl_egl_buffer->wl_egl_surface;
3080
3081                         zwp_linux_buffer_release_v1_destroy(wl_egl_buffer->buffer_release);
3082                         wl_egl_buffer->buffer_release = NULL;
3083
3084                         wl_egl_buffer->release_fence_fd = fence;
3085                         wl_egl_buffer->status = RELEASED;
3086
3087                         TRACE_MARK("[FENCED_RELEASE] BO(%d) fence(%d)",
3088                                            _get_tbm_surface_bo_name(tbm_surface),
3089                                            fence);
3090                         TRACE_ASYNC_END((intptr_t)tbm_surface, "[COMMIT ~ RELEASE] BO(%d)",
3091                                                         _get_tbm_surface_bo_name(tbm_surface));
3092
3093                         TPL_LOG_T("WL_EGL",
3094                                           "[FENCED_RELEASE] wl_egl_buffer(%p) tbm_surface(%p) bo(%d) fence(%d)",
3095                                           wl_egl_buffer, tbm_surface,
3096                                           _get_tbm_surface_bo_name(tbm_surface),
3097                                           fence);
3098
3099                         tsq_err = tbm_surface_queue_release(wl_egl_surface->tbm_queue,
3100                                                                                                 tbm_surface);
3101                         if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
3102                                 TPL_ERR("tbm_surface(%p) tsq_err(%d)", tbm_surface, tsq_err);
3103                 }
3104
3105                 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
3106
3107                 if (tsq_err == TBM_SURFACE_QUEUE_ERROR_NONE)
3108                         tbm_surface_internal_unref(tbm_surface);
3109
3110         } else {
3111                 TPL_ERR("Invalid parameter | tbm_surface(%p)", tbm_surface);
3112         }
3113 }
3114
3115 static void
3116 __cb_buffer_immediate_release(void *data,
3117                                                           struct zwp_linux_buffer_release_v1 *release)
3118 {
3119         tpl_wl_egl_buffer_t *wl_egl_buffer  = (tpl_wl_egl_buffer_t *)data;
3120         tbm_surface_h tbm_surface           = NULL;
3121
3122         TPL_CHECK_ON_NULL_RETURN(wl_egl_buffer);
3123
3124         tbm_surface = wl_egl_buffer->tbm_surface;
3125
3126         if (tbm_surface_internal_is_valid(tbm_surface)) {
3127                 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_INVALID_SURFACE;
3128
3129                 tpl_gmutex_lock(&wl_egl_buffer->mutex);
3130                 if (wl_egl_buffer->status == COMMITTED) {
3131                         tpl_wl_egl_surface_t *wl_egl_surface = wl_egl_buffer->wl_egl_surface;
3132
3133                         zwp_linux_buffer_release_v1_destroy(wl_egl_buffer->buffer_release);
3134                         wl_egl_buffer->buffer_release = NULL;
3135
3136                         wl_egl_buffer->release_fence_fd = -1;
3137                         wl_egl_buffer->status = RELEASED;
3138
3139                         TRACE_MARK("[IMMEDIATE_RELEASE] BO(%d)",
3140                                            _get_tbm_surface_bo_name(tbm_surface));
3141                         TRACE_ASYNC_END((intptr_t)tbm_surface, "[COMMIT ~ RELEASE] BO(%d)",
3142                                                         _get_tbm_surface_bo_name(tbm_surface));
3143
3144                         TPL_LOG_T("WL_EGL",
3145                                           "[IMMEDIATE_RELEASE] wl_egl_buffer(%p) tbm_surface(%p) bo(%d)",
3146                                           wl_egl_buffer, tbm_surface,
3147                                           _get_tbm_surface_bo_name(tbm_surface));
3148
3149                         tsq_err = tbm_surface_queue_release(wl_egl_surface->tbm_queue,
3150                                                                                                 tbm_surface);
3151                         if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
3152                                 TPL_ERR("tbm_surface(%p) tsq_err(%d)", tbm_surface, tsq_err);
3153                 }
3154
3155                 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
3156
3157                 if (tsq_err == TBM_SURFACE_QUEUE_ERROR_NONE)
3158                         tbm_surface_internal_unref(tbm_surface);
3159
3160         } else {
3161                 TPL_ERR("Invalid parameter | tbm_surface(%p)", tbm_surface);
3162         }
3163 }
3164
3165 static const struct zwp_linux_buffer_release_v1_listener zwp_release_listner = {
3166         __cb_buffer_fenced_release,
3167         __cb_buffer_immediate_release,
3168 };
3169 #endif
3170
3171 static void
3172 __cb_wl_buffer_release(void *data, struct wl_proxy *wl_buffer)
3173 {
3174         tpl_wl_egl_buffer_t *wl_egl_buffer = (tpl_wl_egl_buffer_t *)data;
3175         tbm_surface_h tbm_surface = NULL;
3176
3177         TPL_CHECK_ON_NULL_RETURN(wl_egl_buffer)
3178
3179         tbm_surface = wl_egl_buffer->tbm_surface;
3180
3181         if (tbm_surface_internal_is_valid(tbm_surface)) {
3182                 tpl_wl_egl_surface_t *wl_egl_surface = wl_egl_buffer->wl_egl_surface;
3183                 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_INVALID_SURFACE;
3184
3185                 tpl_gmutex_lock(&wl_egl_buffer->mutex);
3186
3187                 if (wl_egl_buffer->status == COMMITTED) {
3188
3189                         tsq_err = tbm_surface_queue_release(wl_egl_surface->tbm_queue,
3190                                                                                                 tbm_surface);
3191                         if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
3192                                 TPL_ERR("tbm_surface(%p) tsq_err(%d)", tbm_surface, tsq_err);
3193
3194                         wl_egl_buffer->status = RELEASED;
3195
3196                         TRACE_MARK("[RELEASE] BO(%d)", _get_tbm_surface_bo_name(tbm_surface));
3197                         TRACE_ASYNC_END((intptr_t)tbm_surface, "[COMMIT ~ RELEASE] BO(%d)",
3198                                                         _get_tbm_surface_bo_name(tbm_surface));
3199
3200                         TPL_LOG_T("WL_EGL", "[REL] wl_buffer(%p) tbm_surface(%p) bo(%d)",
3201                                           wl_egl_buffer->wl_buffer, tbm_surface,
3202                                           _get_tbm_surface_bo_name(tbm_surface));
3203                 }
3204
3205                 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
3206
3207                 if (tsq_err == TBM_SURFACE_QUEUE_ERROR_NONE)
3208                         tbm_surface_internal_unref(tbm_surface);
3209         } else {
3210                 TPL_ERR("Invalid parameter | tbm_surface(%p)", tbm_surface);
3211         }
3212 }
3213
3214 static const struct wl_buffer_listener wl_buffer_release_listener = {
3215         (void *)__cb_wl_buffer_release,
3216 };
3217 #if TIZEN_FEATURE_ENABLE
3218 static void
3219 __cb_presentation_feedback_sync_output(void *data,
3220                         struct wp_presentation_feedback *presentation_feedback,
3221                         struct wl_output *output)
3222 {
3223         TPL_IGNORE(data);
3224         TPL_IGNORE(presentation_feedback);
3225         TPL_IGNORE(output);
3226         /* Nothing to do */
3227 }
3228
3229 static void
3230 __cb_presentation_feedback_presented(void *data,
3231                         struct wp_presentation_feedback *presentation_feedback,
3232                         uint32_t tv_sec_hi,
3233                         uint32_t tv_sec_lo,
3234                         uint32_t tv_nsec,
3235                         uint32_t refresh_nsec,
3236                         uint32_t seq_hi,
3237                         uint32_t seq_lo,
3238                         uint32_t flags)
3239 {
3240         TPL_IGNORE(tv_sec_hi);
3241         TPL_IGNORE(tv_sec_lo);
3242         TPL_IGNORE(tv_nsec);
3243         TPL_IGNORE(refresh_nsec);
3244         TPL_IGNORE(seq_hi);
3245         TPL_IGNORE(seq_lo);
3246         TPL_IGNORE(flags);
3247
3248         struct pst_feedback *pst_feedback       = (struct pst_feedback *)data;
3249         tpl_wl_egl_surface_t *wl_egl_surface    = pst_feedback->wl_egl_surface;
3250
3251         tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
3252
3253         TPL_DEBUG("[FEEDBACK][PRESENTED] pst_feedback(%p) presentation_feedback(%p) bo(%d)",
3254                           pst_feedback, presentation_feedback, pst_feedback->bo_name);
3255
3256         if (pst_feedback->pst_sync_fd != -1) {
3257                 int ret = _write_to_eventfd(pst_feedback->pst_sync_fd);
3258                 if (ret == -1) {
3259                         TPL_ERR("Failed to send presentation_sync signal to fd(%d)",
3260                                         pst_feedback->pst_sync_fd);
3261                 }
3262
3263                 TRACE_ASYNC_END(pst_feedback->pst_sync_fd,
3264                                                 "[PRESENTATION_SYNC] bo(%d)",
3265                                                 pst_feedback->bo_name);
3266
3267                 close(pst_feedback->pst_sync_fd);
3268                 pst_feedback->pst_sync_fd = -1;
3269         }
3270
3271         wp_presentation_feedback_destroy(presentation_feedback);
3272
3273         pst_feedback->presentation_feedback = NULL;
3274         pst_feedback->wl_egl_surface        = NULL;
3275         pst_feedback->bo_name               = 0;
3276
3277         __tpl_list_remove_data(wl_egl_surface->presentation_feedbacks, pst_feedback,
3278                                                    TPL_FIRST, NULL);
3279
3280         free(pst_feedback);
3281
3282         tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
3283 }
3284
3285 static void
3286 __cb_presentation_feedback_discarded(void *data,
3287                         struct wp_presentation_feedback *presentation_feedback)
3288 {
3289         struct pst_feedback *pst_feedback       = (struct pst_feedback *)data;
3290         tpl_wl_egl_surface_t *wl_egl_surface    = pst_feedback->wl_egl_surface;
3291
3292         tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
3293
3294         TPL_DEBUG("[FEEDBACK][DISCARDED] pst_feedback(%p) presentation_feedback(%p) bo(%d)",
3295                           pst_feedback, presentation_feedback, pst_feedback->bo_name);
3296
3297         if (pst_feedback->pst_sync_fd != -1) {
3298                 int ret = _write_to_eventfd(pst_feedback->pst_sync_fd);
3299                 if (ret == -1) {
3300                         TPL_ERR("Failed to send presentation_sync signal to fd(%d)",
3301                                         pst_feedback->pst_sync_fd);
3302                 }
3303
3304                 TRACE_ASYNC_END(pst_feedback->pst_sync_fd,
3305                                                 "[PRESENTATION_SYNC] bo(%d)",
3306                                                 pst_feedback->bo_name);
3307
3308                 close(pst_feedback->pst_sync_fd);
3309                 pst_feedback->pst_sync_fd = -1;
3310         }
3311
3312         wp_presentation_feedback_destroy(presentation_feedback);
3313
3314         pst_feedback->presentation_feedback = NULL;
3315         pst_feedback->wl_egl_surface        = NULL;
3316         pst_feedback->bo_name               = 0;
3317
3318         __tpl_list_remove_data(wl_egl_surface->presentation_feedbacks, pst_feedback,
3319                                                    TPL_FIRST, NULL);
3320
3321         free(pst_feedback);
3322
3323         tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
3324 }
3325
3326 static const struct wp_presentation_feedback_listener feedback_listener = {
3327         __cb_presentation_feedback_sync_output, /* sync_output feedback -*/
3328         __cb_presentation_feedback_presented,
3329         __cb_presentation_feedback_discarded
3330 };
3331 #endif
3332
3333 static tpl_result_t
3334 _thread_surface_vblank_wait(tpl_wl_egl_surface_t *wl_egl_surface)
3335 {
3336         tdm_error tdm_err                       = TDM_ERROR_NONE;
3337         tpl_surface_vblank_t *vblank            = wl_egl_surface->vblank;
3338
3339         tdm_err = tdm_client_vblank_wait(vblank->tdm_vblank,
3340                         wl_egl_surface->post_interval,
3341                         __cb_tdm_client_vblank,
3342                         (void *)wl_egl_surface);
3343
3344         if (tdm_err == TDM_ERROR_NONE) {
3345                 wl_egl_surface->vblank_done = TPL_FALSE;
3346                 TRACE_ASYNC_BEGIN((intptr_t)wl_egl_surface, "WAIT_VBLANK");
3347         } else {
3348                 TPL_ERR("Failed to tdm_client_vblank_wait. tdm_err(%d)", tdm_err);
3349                 return TPL_ERROR_INVALID_OPERATION;
3350         }
3351
3352         return TPL_ERROR_NONE;
3353 }
3354
3355 static void
3356 _thread_wl_surface_commit(tpl_wl_egl_surface_t *wl_egl_surface,
3357                                                   tpl_wl_egl_buffer_t *wl_egl_buffer)
3358 {
3359         tpl_wl_egl_display_t *wl_egl_display    = wl_egl_surface->wl_egl_display;
3360         struct wl_surface *wl_surface           = wl_egl_surface->wl_surface;
3361         struct wl_egl_window *wl_egl_window     = wl_egl_surface->wl_egl_window;
3362         uint32_t version;
3363
3364         TPL_CHECK_ON_FALSE_ASSERT_FAIL(wl_egl_buffer != NULL,
3365                                                                    "wl_egl_buffer sould be not NULL");
3366
3367         if (wl_egl_buffer->wl_buffer == NULL) {
3368                 wl_egl_buffer->wl_buffer =
3369                         (struct wl_proxy *)wayland_tbm_client_create_buffer(
3370                                                 wl_egl_display->wl_tbm_client,
3371                                                 wl_egl_buffer->tbm_surface);
3372
3373                 TPL_CHECK_ON_FALSE_ASSERT_FAIL(wl_egl_buffer->wl_buffer != NULL,
3374                                                                            "[FATAL] Failed to create wl_buffer");
3375
3376                 TPL_INFO("[WL_BUFFER_CREATE]",
3377                                  "wl_egl_buffer(%p) wl_buffer(%p) tbm_surface(%p)",
3378                                  wl_egl_buffer, wl_egl_buffer->wl_buffer,
3379                                  wl_egl_buffer->tbm_surface);
3380
3381 #if TIZEN_FEATURE_ENABLE
3382                 if (!wl_egl_display->use_explicit_sync ||
3383                         wl_egl_buffer->acquire_fence_fd == -1)
3384 #endif
3385                 {
3386                         wl_buffer_add_listener((struct wl_buffer *)wl_egl_buffer->wl_buffer,
3387                                                                    &wl_buffer_release_listener,
3388                                                                    wl_egl_buffer);
3389                 }
3390         }
3391
3392         version = wl_proxy_get_version((struct wl_proxy *)wl_surface);
3393
3394 #if TIZEN_FEATURE_ENABLE
3395         /* create presentation feedback and add listener */
3396         tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
3397         if (wl_egl_display->presentation && wl_egl_buffer->presentation_sync_fd != -1) {
3398
3399                 struct pst_feedback *pst_feedback = NULL;
3400                 pst_feedback = (struct pst_feedback *) calloc(1, sizeof(struct pst_feedback));
3401                 if (pst_feedback) {
3402                         pst_feedback->presentation_feedback =
3403                                 wp_presentation_feedback(wl_egl_display->presentation,
3404                                                                                  wl_surface);
3405
3406                         pst_feedback->wl_egl_surface        = wl_egl_surface;
3407                         pst_feedback->bo_name               = wl_egl_buffer->bo_name;
3408
3409                         pst_feedback->pst_sync_fd           = wl_egl_buffer->presentation_sync_fd;
3410                         wl_egl_buffer->presentation_sync_fd = -1;
3411
3412                         wp_presentation_feedback_add_listener(pst_feedback->presentation_feedback,
3413                                                                                                   &feedback_listener, pst_feedback);
3414                         __tpl_list_push_back(wl_egl_surface->presentation_feedbacks, pst_feedback);
3415                         TRACE_ASYNC_BEGIN(pst_feedback->pst_sync_fd,
3416                                                           "[PRESENTATION_SYNC] bo(%d)",
3417                                                           pst_feedback->bo_name);
3418                 } else {
3419                         TPL_ERR("Failed to create presentation feedback. wl_egl_buffer(%p)",
3420                                         wl_egl_buffer);
3421                         _write_to_eventfd(wl_egl_buffer->presentation_sync_fd);
3422                         close(wl_egl_buffer->presentation_sync_fd);
3423                         wl_egl_buffer->presentation_sync_fd = -1;
3424                 }
3425         }
3426         tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
3427 #endif
3428
3429         if (wl_egl_buffer->w_rotated == TPL_TRUE) {
3430                 if (version > 1) {
3431                         wayland_tbm_client_set_buffer_transform(
3432                                         wl_egl_display->wl_tbm_client,
3433                                         (void *)wl_egl_buffer->wl_buffer,
3434                                         wl_egl_buffer->w_transform);
3435                         TPL_INFO("[W_TRANSFORM]",
3436                                          "wl_egl_surface(%p) wl_egl_buffer(%p) w_transform(%d)",
3437                                          wl_egl_surface, wl_egl_buffer, wl_egl_buffer->w_transform);
3438                 }
3439                 wl_egl_buffer->w_rotated = TPL_FALSE;
3440         }
3441
3442         if (wl_egl_surface->latest_transform != wl_egl_buffer->transform) {
3443                 if (version > 1) {
3444                         wl_surface_set_buffer_transform(wl_surface, wl_egl_buffer->transform);
3445                         TPL_INFO("[TRANSFORM]",
3446                                          "wl_egl_surface(%p) wl_egl_buffer(%p) transform(%d -> %d)",
3447                                          wl_egl_surface, wl_egl_buffer,
3448                                          wl_egl_surface->latest_transform, wl_egl_buffer->transform);
3449                 }
3450                 wl_egl_surface->latest_transform = wl_egl_buffer->transform;
3451         }
3452
3453         if (wl_egl_window) {
3454                 wl_egl_window->attached_width = wl_egl_buffer->width;
3455                 wl_egl_window->attached_height = wl_egl_buffer->height;
3456         }
3457
3458         wl_surface_attach(wl_surface, (void *)wl_egl_buffer->wl_buffer,
3459                                           wl_egl_buffer->dx, wl_egl_buffer->dy);
3460
3461         if (wl_egl_buffer->num_rects < 1 || wl_egl_buffer->rects == NULL) {
3462                 if (version < 4) {
3463                         wl_surface_damage(wl_surface,
3464                                                           wl_egl_buffer->dx, wl_egl_buffer->dy,
3465                                                           wl_egl_buffer->width, wl_egl_buffer->height);
3466                 } else {
3467                         wl_surface_damage_buffer(wl_surface,
3468                                                                          0, 0,
3469                                                                          wl_egl_buffer->width, wl_egl_buffer->height);
3470                 }
3471         } else {
3472                 int i;
3473                 for (i = 0; i < wl_egl_buffer->num_rects; i++) {
3474                         int inverted_y =
3475                                 wl_egl_buffer->height - (wl_egl_buffer->rects[i * 4 + 1] +
3476                                                 wl_egl_buffer->rects[i * 4 + 3]);
3477                         if (version < 4) {
3478                                 wl_surface_damage(wl_surface,
3479                                                                   wl_egl_buffer->rects[i * 4 + 0],
3480                                                                   inverted_y,
3481                                                                   wl_egl_buffer->rects[i * 4 + 2],
3482                                                                   wl_egl_buffer->rects[i * 4 + 3]);
3483                         } else {
3484                                 wl_surface_damage_buffer(wl_surface,
3485                                                                                  wl_egl_buffer->rects[i * 4 + 0],
3486                                                                                  inverted_y,
3487                                                                                  wl_egl_buffer->rects[i * 4 + 2],
3488                                                                                  wl_egl_buffer->rects[i * 4 + 3]);
3489                         }
3490                 }
3491         }
3492
3493         wayland_tbm_client_set_buffer_serial(wl_egl_display->wl_tbm_client,
3494                                                 (void *)wl_egl_buffer->wl_buffer,
3495                                                 wl_egl_buffer->serial);
3496 #if TIZEN_FEATURE_ENABLE
3497         if (wl_egl_display->use_explicit_sync &&
3498                 wl_egl_buffer->acquire_fence_fd != -1) {
3499
3500                 zwp_linux_surface_synchronization_v1_set_acquire_fence(wl_egl_surface->surface_sync,
3501                                                                                                                            wl_egl_buffer->acquire_fence_fd);
3502                 TPL_DEBUG("[SET_ACQUIRE_FENCE] wl_egl_surface(%p) tbm_surface(%p) acquire_fence(%d)",
3503                                   wl_egl_surface, wl_egl_buffer->tbm_surface, wl_egl_buffer->acquire_fence_fd);
3504                 close(wl_egl_buffer->acquire_fence_fd);
3505                 wl_egl_buffer->acquire_fence_fd = -1;
3506
3507                 wl_egl_buffer->buffer_release =
3508                         zwp_linux_surface_synchronization_v1_get_release(wl_egl_surface->surface_sync);
3509                 if (!wl_egl_buffer->buffer_release) {
3510                         TPL_ERR("Failed to get buffer_release. wl_egl_surface(%p)", wl_egl_surface);
3511                 } else {
3512                         zwp_linux_buffer_release_v1_add_listener(
3513                                 wl_egl_buffer->buffer_release, &zwp_release_listner, wl_egl_buffer);
3514                         TPL_DEBUG("add explicit_sync_release_listener.");
3515                 }
3516         }
3517 #endif
3518
3519         wl_surface_commit(wl_surface);
3520
3521         wl_display_flush(wl_egl_display->wl_display);
3522
3523         TRACE_ASYNC_BEGIN((intptr_t)wl_egl_buffer->tbm_surface, "[COMMIT ~ RELEASE] BO(%d)",
3524                                           wl_egl_buffer->bo_name);
3525
3526         tpl_gmutex_lock(&wl_egl_buffer->mutex);
3527
3528         wl_egl_buffer->need_to_commit   = TPL_FALSE;
3529         wl_egl_buffer->status           = COMMITTED;
3530         if (wl_egl_surface->last_enq_buffer == wl_egl_buffer->tbm_surface)
3531                 wl_egl_surface->last_enq_buffer = NULL;
3532
3533         tpl_gcond_signal(&wl_egl_buffer->cond);
3534
3535         tpl_gmutex_unlock(&wl_egl_buffer->mutex);
3536
3537         TPL_LOG_T("WL_EGL",
3538                           "[COMMIT] wl_egl_buffer(%p) wl_buffer(%p) tbm_surface(%p) bo(%d)",
3539                           wl_egl_buffer, wl_egl_buffer->wl_buffer, wl_egl_buffer->tbm_surface,
3540                           wl_egl_buffer->bo_name);
3541
3542         if (wl_egl_surface->vblank != NULL &&
3543                 _thread_surface_vblank_wait(wl_egl_surface) != TPL_ERROR_NONE)
3544                 TPL_ERR("Failed to set wait vblank.");
3545
3546         tpl_gmutex_lock(&wl_egl_surface->commit_sync.mutex);
3547
3548         if (wl_egl_buffer->commit_sync_fd != -1) {
3549                 int ret = _write_to_eventfd(wl_egl_buffer->commit_sync_fd);
3550                 if (ret == -1) {
3551                         TPL_ERR("Failed to send commit_sync signal to fd(%d)", wl_egl_buffer->commit_sync_fd);
3552                 }
3553
3554                 TRACE_ASYNC_END(wl_egl_buffer->commit_sync_fd, "[COMMIT_SYNC] bo(%d)",
3555                                                 wl_egl_buffer->bo_name);
3556                 TPL_DEBUG("[COMMIT_SYNC][SEND] wl_egl_surface(%p) commit_sync_fd(%d)",
3557                                   wl_egl_surface, wl_egl_buffer->commit_sync_fd);
3558
3559                 close(wl_egl_buffer->commit_sync_fd);
3560                 wl_egl_buffer->commit_sync_fd = -1;
3561         }
3562
3563         tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
3564 }
3565
3566 static int
3567 _write_to_eventfd(int eventfd)
3568 {
3569         uint64_t value = 1;
3570         int ret;
3571
3572         if (eventfd == -1) {
3573                 TPL_ERR("Invalid fd(-1)");
3574                 return -1;
3575         }
3576
3577         ret = write(eventfd, &value, sizeof(uint64_t));
3578         if (ret == -1) {
3579                 TPL_ERR("failed to write to fd(%d)", eventfd);
3580                 return ret;
3581         }
3582
3583         return ret;
3584 }
3585
3586 void
3587 __tpl_display_init_backend_wl_egl_thread(tpl_display_backend_t *backend)
3588 {
3589         TPL_ASSERT(backend);
3590
3591         backend->type = TPL_BACKEND_WAYLAND_THREAD;
3592         backend->data = NULL;
3593
3594         backend->init = __tpl_wl_egl_display_init;
3595         backend->fini = __tpl_wl_egl_display_fini;
3596         backend->query_config = __tpl_wl_egl_display_query_config;
3597         backend->filter_config = __tpl_wl_egl_display_filter_config;
3598         backend->get_window_info = __tpl_wl_egl_display_get_window_info;
3599         backend->get_pixmap_info = __tpl_wl_egl_display_get_pixmap_info;
3600         backend->get_buffer_from_native_pixmap =
3601                 __tpl_wl_egl_display_get_buffer_from_native_pixmap;
3602 }
3603
3604 void
3605 __tpl_surface_init_backend_wl_egl_thread(tpl_surface_backend_t *backend)
3606 {
3607         TPL_ASSERT(backend);
3608
3609         backend->type = TPL_BACKEND_WAYLAND_THREAD;
3610         backend->data = NULL;
3611
3612         backend->init = __tpl_wl_egl_surface_init;
3613         backend->fini = __tpl_wl_egl_surface_fini;
3614         backend->validate = __tpl_wl_egl_surface_validate;
3615         backend->cancel_dequeued_buffer =
3616                 __tpl_wl_egl_surface_cancel_buffer;
3617         backend->dequeue_buffer = __tpl_wl_egl_surface_dequeue_buffer;
3618         backend->enqueue_buffer = __tpl_wl_egl_surface_enqueue_buffer;
3619         backend->set_rotation_capability =
3620                 __tpl_wl_egl_surface_set_rotation_capability;
3621         backend->set_post_interval =
3622                 __tpl_wl_egl_surface_set_post_interval;
3623         backend->get_size =
3624                 __tpl_wl_egl_surface_get_size;
3625 }
3626
3627 static void
3628 __cb_wl_egl_buffer_free(tpl_wl_egl_buffer_t *wl_egl_buffer)
3629 {
3630         tpl_wl_egl_surface_t *wl_egl_surface = wl_egl_buffer->wl_egl_surface;
3631         tpl_wl_egl_display_t *wl_egl_display = wl_egl_surface->wl_egl_display;
3632
3633         TPL_INFO("[BUFFER_FREE]", "wl_egl_buffer(%p) wl_buffer(%p) tbm_surface(%p)",
3634                          wl_egl_buffer, wl_egl_buffer->wl_buffer, wl_egl_buffer->tbm_surface);
3635
3636         tpl_gmutex_lock(&wl_egl_surface->buffers_mutex);
3637         if (wl_egl_buffer->idx >= 0 && wl_egl_surface->buffers[wl_egl_buffer->idx]) {
3638                 wl_egl_surface->buffers[wl_egl_buffer->idx] = NULL;
3639                 wl_egl_surface->buffer_cnt--;
3640
3641                 wl_egl_buffer->idx = -1;
3642         }
3643         tpl_gmutex_unlock(&wl_egl_surface->buffers_mutex);
3644
3645         if (wl_egl_surface->vblank && wl_egl_surface->vblank->waiting_buffers) {
3646                 tpl_gmutex_lock(&wl_egl_surface->vblank->mutex);
3647                 __tpl_list_remove_data(wl_egl_surface->vblank->waiting_buffers,
3648                                                            (void *)wl_egl_buffer,
3649                                                            TPL_FIRST,
3650                                                            NULL);
3651                 tpl_gmutex_unlock(&wl_egl_surface->vblank->mutex);
3652         }
3653
3654         if (wl_egl_display) {
3655                 if (wl_egl_buffer->wl_buffer) {
3656                         wayland_tbm_client_destroy_buffer(wl_egl_display->wl_tbm_client,
3657                                                                                           (void *)wl_egl_buffer->wl_buffer);
3658                         wl_egl_buffer->wl_buffer = NULL;
3659                 }
3660
3661                 wl_display_flush(wl_egl_display->wl_display);
3662         }
3663
3664         tpl_gmutex_lock(&wl_egl_buffer->mutex);
3665 #if TIZEN_FEATURE_ENABLE
3666         if (wl_egl_buffer->buffer_release) {
3667                 zwp_linux_buffer_release_v1_destroy(wl_egl_buffer->buffer_release);
3668                 wl_egl_buffer->buffer_release = NULL;
3669         }
3670
3671         if (wl_egl_buffer->release_fence_fd != -1) {
3672                 close(wl_egl_buffer->release_fence_fd);
3673                 wl_egl_buffer->release_fence_fd = -1;
3674         }
3675 #endif
3676
3677         if (wl_egl_buffer->waiting_source) {
3678                 tpl_gsource_destroy(wl_egl_buffer->waiting_source, TPL_FALSE);
3679                 wl_egl_buffer->waiting_source = NULL;
3680         }
3681
3682         if (wl_egl_buffer->commit_sync_fd != -1) {
3683                 int ret = _write_to_eventfd(wl_egl_buffer->commit_sync_fd);
3684                 if (ret == -1)
3685                         TPL_ERR("Failed to send commit_sync signal to fd(%d)",
3686                                         wl_egl_buffer->commit_sync_fd);
3687                 close(wl_egl_buffer->commit_sync_fd);
3688                 wl_egl_buffer->commit_sync_fd = -1;
3689         }
3690
3691         if (wl_egl_buffer->presentation_sync_fd != -1) {
3692                 int ret = _write_to_eventfd(wl_egl_buffer->presentation_sync_fd);
3693                 if (ret == -1)
3694                         TPL_ERR("Failed to send presentation_sync signal to fd(%d)",
3695                                         wl_egl_buffer->presentation_sync_fd);
3696                 close(wl_egl_buffer->presentation_sync_fd);
3697                 wl_egl_buffer->presentation_sync_fd = -1;
3698         }
3699
3700         if (wl_egl_buffer->rects) {
3701                 free(wl_egl_buffer->rects);
3702                 wl_egl_buffer->rects = NULL;
3703                 wl_egl_buffer->num_rects = 0;
3704         }
3705
3706         wl_egl_buffer->tbm_surface = NULL;
3707         wl_egl_buffer->bo_name = -1;
3708         wl_egl_buffer->status = RELEASED;
3709
3710         tpl_gmutex_unlock(&wl_egl_buffer->mutex);
3711         tpl_gmutex_clear(&wl_egl_buffer->mutex);
3712         tpl_gcond_clear(&wl_egl_buffer->cond);
3713         free(wl_egl_buffer);
3714 }
3715
3716 static int
3717 _get_tbm_surface_bo_name(tbm_surface_h tbm_surface)
3718 {
3719         return tbm_bo_export(tbm_surface_internal_get_bo(tbm_surface, 0));
3720 }
3721
3722 static void
3723 _print_buffer_lists(tpl_wl_egl_surface_t *wl_egl_surface)
3724 {
3725         int idx = 0;
3726
3727         tpl_gmutex_lock(&wl_egl_surface->buffers_mutex);
3728         TPL_INFO("[BUFFERS_INFO]", "wl_egl_surface(%p) buffer_cnt(%d)",
3729                          wl_egl_surface, wl_egl_surface->buffer_cnt);
3730         for (idx = 0; idx < BUFFER_ARRAY_SIZE; idx++) {
3731                 tpl_wl_egl_buffer_t *wl_egl_buffer = wl_egl_surface->buffers[idx];
3732                 if (wl_egl_buffer) {
3733                         TPL_INFO("[INFO]",
3734                                          "INDEX[%d] | wl_egl_buffer(%p) tbm_surface(%p) bo(%d) | status(%s)",
3735                                          idx, wl_egl_buffer, wl_egl_buffer->tbm_surface,
3736                                          wl_egl_buffer->bo_name,
3737                                          status_to_string[wl_egl_buffer->status]);
3738                 }
3739         }
3740         tpl_gmutex_unlock(&wl_egl_surface->buffers_mutex);
3741 }
3742
3743 static tpl_bool_t
3744 _check_buffer_validate(tpl_wl_egl_surface_t *wl_egl_surface, tbm_surface_h tbm_surface)
3745 {
3746         int idx = 0;
3747         tpl_bool_t ret = TPL_FALSE;
3748
3749         /* silent return */
3750         if (!wl_egl_surface || !tbm_surface)
3751                 return ret;
3752
3753         tpl_gmutex_lock(&wl_egl_surface->buffers_mutex);
3754         for (idx = 0; idx < BUFFER_ARRAY_SIZE; idx++) {
3755                 tpl_wl_egl_buffer_t *wl_egl_buffer = wl_egl_surface->buffers[idx];
3756                 if (wl_egl_buffer && wl_egl_buffer->tbm_surface == tbm_surface) {
3757                         ret = TPL_TRUE;
3758                         break;
3759                 }
3760         }
3761
3762         if (ret == TPL_FALSE || idx == BUFFER_ARRAY_SIZE) {
3763                 TPL_ERR("tbm_surface(%p) is not owned by wl_egl_surface(%p)",
3764                                 tbm_surface, wl_egl_surface);
3765         }
3766         tpl_gmutex_unlock(&wl_egl_surface->buffers_mutex);
3767
3768         return ret;
3769 }