1 #define inline __inline__
5 #include "tpl_internal.h"
11 #include <tbm_bufmgr.h>
12 #include <tbm_surface.h>
13 #include <tbm_surface_internal.h>
14 #include <tbm_surface_queue.h>
16 #include "tpl_wayland_egl_thread.h"
18 /* In wayland, application and compositor create its own drawing buffers. Recommend size is more than 2. */
19 #define CLIENT_QUEUE_SIZE 3
21 typedef struct _tpl_wayland_egl_display tpl_wayland_egl_display_t;
22 typedef struct _tpl_wayland_egl_surface tpl_wayland_egl_surface_t;
24 struct _tpl_wayland_egl_display {
25 twe_thread *wl_egl_thread;
26 twe_display_h twe_display;
29 struct _tpl_wayland_egl_surface {
31 twe_surface_h twe_surface;
32 tbm_surface_queue_h tbm_queue;
33 tpl_bool_t is_activated;
34 tpl_bool_t reset; /* TRUE if queue reseted by external */
35 tpl_bool_t need_to_enqueue;
39 __tpl_wl_egl_display_init(tpl_display_t *display)
41 tpl_wayland_egl_display_t *wayland_egl_display = NULL;
45 /* Do not allow default display in wayland. */
46 if (!display->native_handle) {
47 TPL_ERR("Invalid native handle for display.");
48 return TPL_ERROR_INVALID_PARAMETER;
51 wayland_egl_display = (tpl_wayland_egl_display_t *) calloc(1,
52 sizeof(tpl_wayland_egl_display_t));
53 if (!wayland_egl_display) {
54 TPL_ERR("Failed to allocate memory for new tpl_wayland_egl_display_t.");
55 return TPL_ERROR_OUT_OF_MEMORY;
58 display->backend.data = wayland_egl_display;
59 display->bufmgr_fd = -1;
61 if (twe_check_native_handle_is_wl_display(display->native_handle)) {
62 wayland_egl_display->wl_egl_thread = twe_thread_create();
63 if (!wayland_egl_display->wl_egl_thread) {
64 TPL_ERR("Failed to create twe_thread.");
68 wayland_egl_display->twe_display =
69 twe_display_add(wayland_egl_display->wl_egl_thread,
70 display->native_handle,
71 display->backend.type);
72 if (!wayland_egl_display->twe_display) {
73 TPL_ERR("Failed to add native_display(%p) to thread(%p)",
74 display->native_handle,
75 wayland_egl_display->wl_egl_thread);
80 TPL_ERR("Invalid native handle for display.");
85 "[INIT DISPLAY] wayland_egl_display(%p) twe_thread(%p) twe_display(%p)",
87 wayland_egl_display->wl_egl_thread,
88 wayland_egl_display->twe_display);
90 return TPL_ERROR_NONE;
93 if (wayland_egl_display->twe_display)
94 twe_display_del(wayland_egl_display->twe_display);
95 if (wayland_egl_display->wl_egl_thread)
96 twe_thread_destroy(wayland_egl_display->wl_egl_thread);
97 wayland_egl_display->wl_egl_thread = NULL;
98 wayland_egl_display->twe_display = NULL;
100 free(wayland_egl_display);
101 display->backend.data = NULL;
102 return TPL_ERROR_INVALID_OPERATION;
106 __tpl_wl_egl_display_fini(tpl_display_t *display)
108 tpl_wayland_egl_display_t *wayland_egl_display;
112 wayland_egl_display = (tpl_wayland_egl_display_t *)display->backend.data;
113 if (wayland_egl_display) {
116 "[FINI] wayland_egl_display(%p) twe_thread(%p) twe_display(%p)",
118 wayland_egl_display->wl_egl_thread,
119 wayland_egl_display->twe_display);
121 if (wayland_egl_display->twe_display) {
122 tpl_result_t ret = TPL_ERROR_NONE;
123 ret = twe_display_del(wayland_egl_display->twe_display);
124 if (ret != TPL_ERROR_NONE)
125 TPL_ERR("Failed to delete twe_display(%p) from twe_thread(%p)",
126 wayland_egl_display->twe_display,
127 wayland_egl_display->wl_egl_thread);
128 wayland_egl_display->twe_display = NULL;
131 if (wayland_egl_display->wl_egl_thread) {
132 twe_thread_destroy(wayland_egl_display->wl_egl_thread);
133 wayland_egl_display->wl_egl_thread = NULL;
136 free(wayland_egl_display);
139 display->backend.data = NULL;
143 __tpl_wl_egl_display_query_config(tpl_display_t *display,
144 tpl_surface_type_t surface_type,
145 int red_size, int green_size,
146 int blue_size, int alpha_size,
147 int color_depth, int *native_visual_id,
152 if (surface_type == TPL_SURFACE_TYPE_WINDOW && red_size == 8 &&
153 green_size == 8 && blue_size == 8 &&
154 (color_depth == 32 || color_depth == 24)) {
156 if (alpha_size == 8) {
157 if (native_visual_id) *native_visual_id = TBM_FORMAT_ARGB8888;
158 if (is_slow) *is_slow = TPL_FALSE;
159 return TPL_ERROR_NONE;
161 if (alpha_size == 0) {
162 if (native_visual_id) *native_visual_id = TBM_FORMAT_XRGB8888;
163 if (is_slow) *is_slow = TPL_FALSE;
164 return TPL_ERROR_NONE;
168 return TPL_ERROR_INVALID_PARAMETER;
172 __tpl_wl_egl_display_filter_config(tpl_display_t *display, int *visual_id,
176 TPL_IGNORE(visual_id);
177 TPL_IGNORE(alpha_size);
178 return TPL_ERROR_NONE;
182 __tpl_wl_egl_display_get_window_info(tpl_display_t *display,
183 tpl_handle_t window, int *width,
184 int *height, tbm_format *format,
185 int depth, int a_size)
187 tpl_result_t ret = TPL_ERROR_NONE;
192 if ((ret = twe_get_native_window_info(window, width, height, format, a_size))
194 TPL_ERR("Failed to get size info of native_window(%p)", window);
201 __tpl_wl_egl_display_get_pixmap_info(tpl_display_t *display,
202 tpl_handle_t pixmap, int *width,
203 int *height, tbm_format *format)
205 tbm_surface_h tbm_surface = NULL;
207 tbm_surface = twe_get_native_buffer_from_pixmap(pixmap);
209 TPL_ERR("Failed to get tbm_surface_h from native pixmap.");
210 return TPL_ERROR_INVALID_OPERATION;
213 if (width) *width = tbm_surface_get_width(tbm_surface);
214 if (height) *height = tbm_surface_get_height(tbm_surface);
215 if (format) *format = tbm_surface_get_format(tbm_surface);
217 return TPL_ERROR_NONE;
221 __tpl_wl_egl_display_get_buffer_from_native_pixmap(tpl_handle_t pixmap)
223 tbm_surface_h tbm_surface = NULL;
227 tbm_surface = twe_get_native_buffer_from_pixmap(pixmap);
229 TPL_ERR("Failed to get tbm_surface_h from wayland_tbm.");
237 __cb_tbm_surface_queue_reset_callback(tbm_surface_queue_h surface_queue,
240 tpl_surface_t *surface = NULL;
241 tpl_wayland_egl_surface_t *wayland_egl_surface = NULL;
242 tpl_bool_t is_activated = TPL_FALSE;
245 surface = (tpl_surface_t *)data;
246 TPL_CHECK_ON_NULL_RETURN(surface);
248 wayland_egl_surface = (tpl_wayland_egl_surface_t *)surface->backend.data;
249 TPL_CHECK_ON_NULL_RETURN(wayland_egl_surface);
251 /* When the queue is resized, change the reset flag to TPL_TRUE to reflect
252 * the changed window size at the next frame. */
253 width = tbm_surface_queue_get_width(surface_queue);
254 height = tbm_surface_queue_get_height(surface_queue);
255 if (surface->width != width || surface->height != height) {
257 "[QUEUE_RESIZE_CB] wayland_egl_surface(%p) tbm_queue(%p) (%dx%d)",
258 wayland_egl_surface, surface_queue, width, height);
261 /* When queue_reset_callback is called, if is_activated is different from
262 * its previous state change the reset flag to TPL_TRUE to get a new buffer
263 * with the changed state(ACTIVATED/DEACTIVATED) at the next frame. */
264 is_activated = twe_surface_check_activated(wayland_egl_surface->twe_surface);
265 if (wayland_egl_surface->is_activated != is_activated) {
268 "[ACTIVATED_CB] wayland_egl_surface(%p) tbm_queue(%p)",
269 wayland_egl_surface, surface_queue);
272 "[DEACTIVATED_CB] wayland_egl_surface(%p) tbm_queue(%p)",
273 wayland_egl_surface, surface_queue);
277 wayland_egl_surface->reset = TPL_TRUE;
279 if (surface->reset_cb)
280 surface->reset_cb(surface->reset_data);
283 void __cb_window_rotate_callback(void *data)
285 tpl_surface_t *surface = (tpl_surface_t *)data;
286 tpl_wayland_egl_surface_t *wayland_egl_surface = NULL;
290 TPL_ERR("Inavlid parameter. surface is NULL.");
294 wayland_egl_surface = (tpl_wayland_egl_surface_t *)surface->backend.data;
295 if (!wayland_egl_surface) {
296 TPL_ERR("Invalid parameter. surface->backend.data is NULL");
300 rotation = twe_surface_get_rotation(wayland_egl_surface->twe_surface);
302 surface->rotation = rotation;
306 __tpl_wl_egl_surface_init(tpl_surface_t *surface)
308 tpl_wayland_egl_display_t *wayland_egl_display = NULL;
309 tpl_wayland_egl_surface_t *wayland_egl_surface = NULL;
310 tbm_surface_queue_h tbm_queue = NULL;
311 twe_surface_h twe_surface = NULL;
312 tpl_result_t ret = TPL_ERROR_NONE;
315 TPL_ASSERT(surface->display);
316 TPL_ASSERT(surface->type == TPL_SURFACE_TYPE_WINDOW);
317 TPL_ASSERT(surface->native_handle);
319 wayland_egl_display =
320 (tpl_wayland_egl_display_t *)surface->display->backend.data;
321 if (!wayland_egl_display) {
322 TPL_ERR("Invalid parameter. wayland_egl_display(%p)",
323 wayland_egl_display);
324 return TPL_ERROR_INVALID_PARAMETER;
327 wayland_egl_surface = (tpl_wayland_egl_surface_t *) calloc(1,
328 sizeof(tpl_wayland_egl_surface_t));
329 if (!wayland_egl_surface) {
330 TPL_ERR("Failed to allocate memory for new tpl_wayland_egl_surface_t.");
331 return TPL_ERROR_OUT_OF_MEMORY;
334 surface->backend.data = (void *)wayland_egl_surface;
336 if (__tpl_object_init(&wayland_egl_surface->base,
338 NULL) != TPL_ERROR_NONE) {
339 TPL_ERR("Failed to initialize backend surface's base object!");
340 goto object_init_fail;
343 twe_surface = twe_surface_add(wayland_egl_display->wl_egl_thread,
344 wayland_egl_display->twe_display,
345 surface->native_handle,
346 surface->format, surface->num_buffers);
348 TPL_ERR("Failed to add native_window(%p) to thread(%p)",
349 surface->native_handle, wayland_egl_display->wl_egl_thread);
350 goto create_twe_surface_fail;
353 tbm_queue = twe_surface_get_tbm_queue(twe_surface);
355 TPL_ERR("Failed to get tbm_queue from twe_surface(%p)", twe_surface);
356 goto queue_create_fail;
359 /* Set reset_callback to tbm_queue */
360 if (tbm_surface_queue_add_reset_cb(tbm_queue,
361 __cb_tbm_surface_queue_reset_callback,
363 TPL_ERR("TBM surface queue add reset cb failed!");
364 goto add_reset_cb_fail;
367 wayland_egl_surface->reset = TPL_FALSE;
368 wayland_egl_surface->twe_surface = twe_surface;
369 wayland_egl_surface->tbm_queue = tbm_queue;
370 wayland_egl_surface->is_activated = TPL_FALSE;
371 wayland_egl_surface->need_to_enqueue = TPL_TRUE;
373 surface->width = tbm_surface_queue_get_width(tbm_queue);
374 surface->height = tbm_surface_queue_get_height(tbm_queue);
375 surface->rotation = twe_surface_get_rotation(twe_surface);
377 ret = twe_surface_set_rotate_callback(twe_surface, (void *)surface,
378 (tpl_surface_cb_func_t)__cb_window_rotate_callback);
379 if (ret != TPL_ERROR_NONE) {
380 TPL_WARN("Failed to register rotate callback.");
384 "[INIT1/2]tpl_surface(%p) tpl_wayland_egl_surface(%p) twe_surface(%p)",
385 surface, wayland_egl_surface, twe_surface);
387 "[INIT2/2]size(%dx%d)rot(%d)|tbm_queue(%p)|native_window(%p)",
388 surface->width, surface->height, surface->rotation,
389 tbm_queue, surface->native_handle);
391 return TPL_ERROR_NONE;
395 twe_surface_del(twe_surface);
396 create_twe_surface_fail:
398 free(wayland_egl_surface);
399 surface->backend.data = NULL;
400 return TPL_ERROR_INVALID_OPERATION;
404 __tpl_wl_egl_surface_fini(tpl_surface_t *surface)
406 tpl_wayland_egl_surface_t *wayland_egl_surface = NULL;
407 tpl_wayland_egl_display_t *wayland_egl_display = NULL;
410 TPL_ASSERT(surface->display);
412 wayland_egl_surface = (tpl_wayland_egl_surface_t *) surface->backend.data;
413 TPL_CHECK_ON_NULL_RETURN(wayland_egl_surface);
415 TPL_OBJECT_LOCK(wayland_egl_surface);
417 wayland_egl_display = (tpl_wayland_egl_display_t *)
418 surface->display->backend.data;
420 if (wayland_egl_display == NULL) {
421 TPL_ERR("check failed: wayland_egl_display == NULL");
422 TPL_OBJECT_UNLOCK(wayland_egl_surface);
426 if (surface->type == TPL_SURFACE_TYPE_WINDOW) {
428 "[FINI] wayland_egl_surface(%p) native_window(%p) twe_surface(%p)",
429 wayland_egl_surface, surface->native_handle,
430 wayland_egl_surface->twe_surface);
432 if (twe_surface_del(wayland_egl_surface->twe_surface)
434 TPL_ERR("Failed to delete twe_surface(%p) from thread(%p)",
435 wayland_egl_surface->twe_surface,
436 wayland_egl_display->wl_egl_thread);
439 wayland_egl_surface->twe_surface = NULL;
440 wayland_egl_surface->tbm_queue = NULL;
443 TPL_OBJECT_UNLOCK(wayland_egl_surface);
444 __tpl_object_fini(&wayland_egl_surface->base);
445 free(wayland_egl_surface);
446 surface->backend.data = NULL;
450 __tpl_wl_egl_surface_set_rotation_capability(tpl_surface_t *surface,
453 tpl_wayland_egl_surface_t *wayland_egl_surface = NULL;
456 TPL_ERR("Invalid parameter. tpl_surface(%p)", surface);
457 return TPL_ERROR_INVALID_PARAMETER;
460 wayland_egl_surface = (tpl_wayland_egl_surface_t *)surface->backend.data;
461 if (!wayland_egl_surface) {
462 TPL_ERR("Invalid parameter. wayland_egl_surface(%p)",
463 wayland_egl_surface);
464 return TPL_ERROR_INVALID_PARAMETER;
467 if (!wayland_egl_surface->twe_surface) {
468 TPL_ERR("Invalid parameter. wayland_egl_surface(%p) twe_surface(%p)",
469 wayland_egl_surface, wayland_egl_surface->twe_surface);
470 return TPL_ERROR_INVALID_PARAMETER;
473 twe_surface_set_rotation_capablity(wayland_egl_surface->twe_surface,
476 return TPL_ERROR_NONE;
480 __tpl_wl_egl_surface_set_post_interval(tpl_surface_t *surface,
483 tpl_wayland_egl_surface_t *wayland_egl_surface = NULL;
486 TPL_ERR("Invalid parameter. tpl_surface(%p)", surface);
487 return TPL_ERROR_INVALID_PARAMETER;
490 wayland_egl_surface = (tpl_wayland_egl_surface_t *)surface->backend.data;
491 if (!wayland_egl_surface) {
492 TPL_ERR("Invalid parameter. wayland_egl_surface(%p)",
493 wayland_egl_surface);
494 return TPL_ERROR_INVALID_PARAMETER;
497 if (!wayland_egl_surface->twe_surface) {
498 TPL_ERR("Invalid parameter. wayland_egl_surface(%p) twe_surface(%p)",
499 wayland_egl_surface, wayland_egl_surface->twe_surface);
500 return TPL_ERROR_INVALID_PARAMETER;
503 twe_surface_set_post_interval(wayland_egl_surface->twe_surface,
506 return TPL_ERROR_NONE;
510 __tpl_wl_egl_surface_enqueue_buffer(tpl_surface_t *surface,
511 tbm_surface_h tbm_surface,
512 int num_rects, const int *rects, tbm_fd sync_fence)
515 TPL_ASSERT(surface->display);
516 TPL_ASSERT(tbm_surface);
517 TPL_OBJECT_CHECK_RETURN(surface, TPL_ERROR_INVALID_PARAMETER);
519 tpl_wayland_egl_surface_t *wayland_egl_surface =
520 (tpl_wayland_egl_surface_t *) surface->backend.data;
521 tbm_surface_queue_error_e tsq_err;
522 tpl_result_t ret = TPL_ERROR_NONE;
525 TPL_OBJECT_LOCK(wayland_egl_surface);
527 bo_name = tbm_bo_export(tbm_surface_internal_get_bo(tbm_surface, 0));
529 if (!wayland_egl_surface) {
530 TPL_ERR("Invalid parameter. wayland_egl_surface(%p)",
531 wayland_egl_surface);
532 TRACE_ASYNC_END((int)tbm_surface, "[DEQ]~[ENQ] BO_NAME:%d", bo_name);
533 TPL_OBJECT_UNLOCK(wayland_egl_surface);
534 return TPL_ERROR_INVALID_PARAMETER;
537 if (!tbm_surface_internal_is_valid(tbm_surface)) {
538 TPL_ERR("Failed to enqueue tbm_surface(%p) Invalid value.",
540 TRACE_ASYNC_END((int)tbm_surface, "[DEQ]~[ENQ] BO_NAME:%d", bo_name);
541 TPL_OBJECT_UNLOCK(wayland_egl_surface);
542 return TPL_ERROR_INVALID_PARAMETER;
545 TRACE_MARK("[ENQ] BO_NAME:%d", bo_name);
548 "[ENQ] wayland_egl_surface(%p) tbm_surface(%p) bo(%d) fence(%d)",
549 wayland_egl_surface, tbm_surface, bo_name, sync_fence);
551 /* If there are received region information,
552 * save it to buf_info in tbm_surface user_data using below API. */
553 if (num_rects && rects) {
554 ret = twe_surface_set_damage_region(tbm_surface, num_rects, rects);
555 if (ret != TPL_ERROR_NONE) {
556 TPL_WARN("Failed to set damage region. num_rects(%d) rects(%p)",
561 if (!wayland_egl_surface->need_to_enqueue ||
562 !twe_surface_check_commit_needed(wayland_egl_surface->twe_surface,
565 "[ENQ_SKIP][Frontbuffer:%s] tbm_surface(%p) need not to enqueue",
566 ((surface->frontbuffer == tbm_surface) ? "ON" : "OFF"), tbm_surface);
567 TRACE_ASYNC_END((int)tbm_surface, "[DEQ]~[ENQ] BO_NAME:%d", bo_name);
568 TPL_OBJECT_UNLOCK(wayland_egl_surface);
569 return TPL_ERROR_NONE;
572 /* In frontbuffer mode, will skip tbm_surface_queue_enqueue, acquire, and
573 * commit if surface->frontbuffer that is already set and the tbm_surface
574 * client want to enqueue are the same.
576 if (surface->is_frontbuffer_mode) {
577 /* The first buffer to be activated in frontbuffer mode must be
578 * committed. Subsequence frames do not need to be committed because
579 * the buffer is already displayed.
581 if (surface->frontbuffer == tbm_surface)
582 wayland_egl_surface->need_to_enqueue = TPL_FALSE;
584 if (sync_fence != -1) {
590 if (sync_fence != -1) {
591 ret = twe_surface_set_sync_fd(wayland_egl_surface->twe_surface,
592 tbm_surface, sync_fence);
593 if (ret != TPL_ERROR_NONE) {
594 TPL_WARN("Failed to set sync fd (%d). But it will continue.",
599 tsq_err = tbm_surface_queue_enqueue(wayland_egl_surface->tbm_queue,
601 if (tsq_err == TBM_SURFACE_QUEUE_ERROR_NONE) {
603 * If tbm_surface_queue has not been reset, tbm_surface_queue_enqueue
604 * will return ERROR_NONE. Otherwise, queue has been reset
605 * this tbm_surface may have only one ref_count. So we need to
606 * unreference this tbm_surface after getting ERROR_NONE result from
607 * tbm_surface_queue_enqueue in order to prevent destruction.
609 tbm_surface_internal_unref(tbm_surface);
611 TPL_ERR("Failed to enqueue tbm_surface(%p). tsq_err=%d",
612 tbm_surface, tsq_err);
613 TRACE_ASYNC_END((int)tbm_surface, "[DEQ]~[ENQ] BO_NAME:%d", bo_name);
614 TPL_OBJECT_UNLOCK(wayland_egl_surface);
615 return TPL_ERROR_INVALID_OPERATION;
618 TRACE_ASYNC_END((int)tbm_surface, "[DEQ]~[ENQ] BO_NAME:%d", bo_name);
619 TPL_OBJECT_UNLOCK(wayland_egl_surface);
621 return TPL_ERROR_NONE;
625 __tpl_wl_egl_surface_validate(tpl_surface_t *surface)
627 tpl_bool_t retval = TPL_TRUE;
630 TPL_ASSERT(surface->backend.data);
632 tpl_wayland_egl_surface_t *wayland_egl_surface =
633 (tpl_wayland_egl_surface_t *)surface->backend.data;
635 retval = !(wayland_egl_surface->reset);
641 __tpl_wl_egl_surface_cancel_dequeued_buffer(tpl_surface_t *surface,
642 tbm_surface_h tbm_surface)
644 tpl_wayland_egl_surface_t *wayland_egl_surface = NULL;
645 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
647 wayland_egl_surface = (tpl_wayland_egl_surface_t *)surface->backend.data;
648 if (!wayland_egl_surface) {
649 TPL_ERR("Invalid backend surface. surface(%p) wayland_egl_surface(%p)",
650 surface, wayland_egl_surface);
651 return TPL_ERROR_INVALID_PARAMETER;
654 if (!tbm_surface_internal_is_valid(tbm_surface)) {
655 TPL_ERR("Invalid buffer. tbm_surface(%p)", tbm_surface);
656 return TPL_ERROR_INVALID_PARAMETER;
659 tbm_surface_internal_unref(tbm_surface);
661 tsq_err = tbm_surface_queue_cancel_dequeue(wayland_egl_surface->tbm_queue,
663 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
664 TPL_ERR("Failed to release tbm_surface(%p)", tbm_surface);
665 return TPL_ERROR_INVALID_OPERATION;
668 TPL_LOG_T("WL_EGL", "[CANCEL BUFFER] tpl_surface(%p) tbm_surface(%p)",
669 surface, tbm_surface);
671 return TPL_ERROR_NONE;
674 #define CAN_DEQUEUE_TIMEOUT_MS 10000
677 __tpl_wl_egl_surface_dequeue_buffer(tpl_surface_t *surface, uint64_t timeout_ns,
681 TPL_ASSERT(surface->backend.data);
682 TPL_ASSERT(surface->display);
683 TPL_ASSERT(surface->display->backend.data);
684 TPL_OBJECT_CHECK_RETURN(surface, NULL);
686 tbm_surface_h tbm_surface = NULL;
687 tpl_wayland_egl_surface_t *wayland_egl_surface =
688 (tpl_wayland_egl_surface_t *)surface->backend.data;
689 tpl_wayland_egl_display_t *wayland_egl_display =
690 (tpl_wayland_egl_display_t *)surface->display->backend.data;
691 tbm_surface_queue_error_e tsq_err = 0;
692 int is_activated = 0;
694 tpl_result_t lock_ret = TPL_FALSE;
699 TPL_OBJECT_UNLOCK(surface);
700 tsq_err = tbm_surface_queue_can_dequeue_wait_timeout(
701 wayland_egl_surface->tbm_queue, CAN_DEQUEUE_TIMEOUT_MS);
702 TPL_OBJECT_LOCK(surface);
704 /* After the can dequeue state, call twe_display_lock to prevent other
705 * events from being processed in wayland_egl_thread
706 * during below dequeue procedure. */
707 lock_ret = twe_display_lock(wayland_egl_display->twe_display);
709 if (tsq_err == TBM_SURFACE_QUEUE_ERROR_TIMEOUT) {
710 TPL_ERR("[CAN_DEQUEUE_TIMEOUT] queue(%p) will be reset",
711 wayland_egl_surface->tbm_queue);
712 if (twe_surface_queue_force_flush(wayland_egl_surface->twe_surface)
714 TPL_ERR("Failed to timeout reset. tbm_queue(%p)", wayland_egl_surface->tbm_queue);
715 if (lock_ret == TPL_ERROR_NONE)
716 twe_display_unlock(wayland_egl_display->twe_display);
719 tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
723 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
724 TPL_ERR("Failed to query can_dequeue. tbm_queue(%p)", wayland_egl_surface->tbm_queue);
725 if (lock_ret == TPL_ERROR_NONE)
726 twe_display_unlock(wayland_egl_display->twe_display);
730 /* wayland client can check their states (ACTIVATED or DEACTIVATED) with
731 * below function [wayland_tbm_client_queue_check_activate()].
732 * This function has to be called before tbm_surface_queue_dequeue()
733 * in order to know what state the buffer will be dequeued next.
735 * ACTIVATED state means non-composite mode. Client can get buffers which
736 can be displayed directly(without compositing).
737 * DEACTIVATED state means composite mode. Client's buffer will be displayed
738 by compositor(E20) with compositing.
740 is_activated = twe_surface_check_activated(wayland_egl_surface->twe_surface);
741 wayland_egl_surface->is_activated = is_activated;
743 surface->width = tbm_surface_queue_get_width(wayland_egl_surface->tbm_queue);
744 surface->height = tbm_surface_queue_get_height(wayland_egl_surface->tbm_queue);
746 if (surface->is_frontbuffer_mode && surface->frontbuffer != NULL) {
747 /* If surface->frontbuffer is already set in frontbuffer mode,
748 * it will return that frontbuffer if it is still activated,
749 * otherwise dequeue the new buffer after initializing
750 * surface->frontbuffer to NULL. */
751 if (is_activated && !wayland_egl_surface->reset) {
753 "[DEQ][F] surface->frontbuffer(%p) BO_NAME(%d)",
754 surface->frontbuffer,
755 tbm_bo_export(tbm_surface_internal_get_bo(
756 surface->frontbuffer, 0)));
757 TRACE_ASYNC_BEGIN((int)surface->frontbuffer,
758 "[DEQ]~[ENQ] BO_NAME:%d",
759 tbm_bo_export(tbm_surface_internal_get_bo(
760 surface->frontbuffer, 0)));
761 if (lock_ret == TPL_ERROR_NONE)
762 twe_display_unlock(wayland_egl_display->twe_display);
763 return surface->frontbuffer;
765 surface->frontbuffer = NULL;
766 wayland_egl_surface->need_to_enqueue = TPL_TRUE;
769 surface->frontbuffer = NULL;
772 tsq_err = tbm_surface_queue_dequeue(wayland_egl_surface->tbm_queue,
775 TPL_ERR("Failed to get tbm_surface from tbm_surface_queue | tsq_err = %d",
777 if (lock_ret == TPL_ERROR_NONE)
778 twe_display_unlock(wayland_egl_display->twe_display);
782 tbm_surface_internal_ref(tbm_surface);
783 bo_name = tbm_bo_export(tbm_surface_internal_get_bo(tbm_surface, 0));
785 if (surface->is_frontbuffer_mode && is_activated)
786 surface->frontbuffer = tbm_surface;
788 wayland_egl_surface->reset = TPL_FALSE;
790 TRACE_MARK("[DEQ][NEW]BO_NAME:%d", bo_name);
791 TRACE_ASYNC_BEGIN((int)tbm_surface, "[DEQ]~[ENQ] BO_NAME:%d", bo_name);
792 TPL_LOG_T("WL_EGL", "[DEQ][N] tbm_surface(%p) bo(%d)",
793 tbm_surface, bo_name);
795 if (lock_ret == TPL_ERROR_NONE)
796 twe_display_unlock(wayland_egl_display->twe_display);
802 __tpl_display_choose_backend_wl_egl_thread(tpl_handle_t native_dpy)
804 if (!native_dpy) return TPL_FALSE;
806 if (twe_check_native_handle_is_wl_display(native_dpy))
813 __tpl_display_init_backend_wl_egl_thread(tpl_display_backend_t *backend)
817 backend->type = TPL_BACKEND_WAYLAND_THREAD;
818 backend->data = NULL;
820 backend->init = __tpl_wl_egl_display_init;
821 backend->fini = __tpl_wl_egl_display_fini;
822 backend->query_config = __tpl_wl_egl_display_query_config;
823 backend->filter_config = __tpl_wl_egl_display_filter_config;
824 backend->get_window_info = __tpl_wl_egl_display_get_window_info;
825 backend->get_pixmap_info = __tpl_wl_egl_display_get_pixmap_info;
826 backend->get_buffer_from_native_pixmap =
827 __tpl_wl_egl_display_get_buffer_from_native_pixmap;
831 __tpl_surface_init_backend_wl_egl_thread(tpl_surface_backend_t *backend)
835 backend->type = TPL_BACKEND_WAYLAND_THREAD;
836 backend->data = NULL;
838 backend->init = __tpl_wl_egl_surface_init;
839 backend->fini = __tpl_wl_egl_surface_fini;
840 backend->validate = __tpl_wl_egl_surface_validate;
841 backend->cancel_dequeued_buffer =
842 __tpl_wl_egl_surface_cancel_dequeued_buffer;
843 backend->dequeue_buffer = __tpl_wl_egl_surface_dequeue_buffer;
844 backend->enqueue_buffer = __tpl_wl_egl_surface_enqueue_buffer;
845 backend->set_rotation_capability =
846 __tpl_wl_egl_surface_set_rotation_capability;
847 backend->set_post_interval =
848 __tpl_wl_egl_surface_set_post_interval;