1 /**************************************************************************
5 Copyright 2015 Samsung Electronics co., Ltd. All Rights Reserved.
7 Contact: Eunchul Kim <chulspro.kim@samsung.com>,
8 JinYoung Jeon <jy0.jeon@samsung.com>,
9 Taeheon Kim <th908.kim@samsung.com>,
10 YoungJun Cho <yj44.cho@samsung.com>,
11 SooChan Lim <sc1.lim@samsung.com>,
12 Boram Park <sc1.lim@samsung.com>
14 Permission is hereby granted, free of charge, to any person obtaining a
15 copy of this software and associated documentation files (the
16 "Software"), to deal in the Software without restriction, including
17 without limitation the rights to use, copy, modify, merge, publish,
18 distribute, sub license, and/or sell copies of the Software, and to
19 permit persons to whom the Software is furnished to do so, subject to
20 the following conditions:
22 The above copyright notice and this permission notice (including the
23 next paragraph) shall be included in all copies or substantial portions
26 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
27 OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
28 MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
29 IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
30 ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
31 TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
32 SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
34 **************************************************************************/
41 #include "tdm_backend.h"
42 #include "tdm_private.h"
46 #define DISPLAY_FUNC_ENTRY() \
47 tdm_private_display *private_display; \
48 tdm_error ret = TDM_ERROR_NONE; /* default TDM_ERROR_NONE */\
49 TDM_RETURN_VAL_IF_FAIL(dpy != NULL, TDM_ERROR_INVALID_PARAMETER); \
50 private_display = (tdm_private_display*)dpy;
52 #define DISPLAY_FUNC_ENTRY_ERROR() \
53 tdm_private_display *private_display; \
54 tdm_error ret = TDM_ERROR_NONE; /* default TDM_ERROR_NONE */\
55 TDM_RETURN_VAL_IF_FAIL_WITH_ERROR(dpy != NULL, TDM_ERROR_INVALID_PARAMETER, NULL); \
56 private_display = (tdm_private_display*)dpy;
58 #define OUTPUT_FUNC_ENTRY() \
59 tdm_private_display *private_display; \
60 tdm_private_output *private_output; \
61 tdm_error ret = TDM_ERROR_NONE; /* default TDM_ERROR_NONE */\
62 TDM_RETURN_VAL_IF_FAIL(output != NULL, TDM_ERROR_INVALID_PARAMETER); \
63 private_output = (tdm_private_output*)output; \
64 private_display = private_output->private_display
66 #define OUTPUT_FUNC_ENTRY_ERROR() \
67 tdm_private_display *private_display; \
68 tdm_private_output *private_output; \
69 tdm_error ret = TDM_ERROR_NONE; /* default TDM_ERROR_NONE */\
70 TDM_RETURN_VAL_IF_FAIL_WITH_ERROR(output != NULL, TDM_ERROR_INVALID_PARAMETER, NULL); \
71 private_output = (tdm_private_output*)output; \
72 private_display = private_output->private_display
74 #define LAYER_FUNC_ENTRY() \
75 tdm_private_display *private_display; \
76 tdm_private_output *private_output; \
77 tdm_private_layer *private_layer; \
78 tdm_error ret = TDM_ERROR_NONE; /* default TDM_ERROR_NONE */\
79 TDM_RETURN_VAL_IF_FAIL(layer != NULL, TDM_ERROR_INVALID_PARAMETER); \
80 private_layer = (tdm_private_layer*)layer; \
81 private_output = private_layer->private_output; \
82 private_display = private_output->private_display
84 #define LAYER_FUNC_ENTRY_ERROR() \
85 tdm_private_display *private_display; \
86 tdm_private_output *private_output; \
87 tdm_private_layer *private_layer; \
88 tdm_error ret = TDM_ERROR_NONE; /* default TDM_ERROR_NONE */\
89 TDM_RETURN_VAL_IF_FAIL_WITH_ERROR(layer != NULL, TDM_ERROR_INVALID_PARAMETER, NULL); \
90 private_layer = (tdm_private_layer*)layer; \
91 private_output = private_layer->private_output; \
92 private_display = private_output->private_display
94 #define LAYER_FUNC_ENTRY_VOID_RETURN() \
95 tdm_private_display *private_display; \
96 tdm_private_output *private_output; \
97 tdm_private_layer *private_layer; \
98 tdm_error ret = TDM_ERROR_NONE; /* default TDM_ERROR_NONE */\
99 TDM_RETURN_IF_FAIL(layer != NULL); \
100 private_layer = (tdm_private_layer*)layer; \
101 private_output = private_layer->private_output; \
102 private_display = private_output->private_display
105 tdm_display_get_capabilities(tdm_display *dpy,
106 tdm_display_capability *capabilities)
108 DISPLAY_FUNC_ENTRY();
110 TDM_RETURN_VAL_IF_FAIL(capabilities != NULL, TDM_ERROR_INVALID_PARAMETER);
112 pthread_mutex_lock(&private_display->lock);
114 *capabilities = private_display->capabilities;
116 pthread_mutex_unlock(&private_display->lock);
122 tdm_display_get_pp_capabilities(tdm_display *dpy,
123 tdm_pp_capability *capabilities)
125 DISPLAY_FUNC_ENTRY();
127 TDM_RETURN_VAL_IF_FAIL(capabilities != NULL, TDM_ERROR_INVALID_PARAMETER);
129 pthread_mutex_lock(&private_display->lock);
131 if (!(private_display->capabilities & TDM_DISPLAY_CAPABILITY_PP)) {
132 TDM_ERR("no pp capability");
133 pthread_mutex_unlock(&private_display->lock);
134 return TDM_ERROR_NO_CAPABILITY;
137 *capabilities = private_display->caps_pp.capabilities;
139 pthread_mutex_unlock(&private_display->lock);
145 tdm_display_get_pp_available_formats(tdm_display *dpy,
146 const tbm_format **formats, int *count)
148 DISPLAY_FUNC_ENTRY();
150 TDM_RETURN_VAL_IF_FAIL(formats != NULL, TDM_ERROR_INVALID_PARAMETER);
151 TDM_RETURN_VAL_IF_FAIL(count != NULL, TDM_ERROR_INVALID_PARAMETER);
153 pthread_mutex_lock(&private_display->lock);
155 if (!(private_display->capabilities & TDM_DISPLAY_CAPABILITY_PP)) {
156 TDM_ERR("no pp capability");
157 pthread_mutex_unlock(&private_display->lock);
158 return TDM_ERROR_NO_CAPABILITY;
161 *formats = (const tbm_format *)private_display->caps_pp.formats;
162 *count = private_display->caps_pp.format_count;
164 pthread_mutex_unlock(&private_display->lock);
170 tdm_display_get_pp_available_size(tdm_display *dpy, int *min_w, int *min_h,
171 int *max_w, int *max_h, int *preferred_align)
173 DISPLAY_FUNC_ENTRY();
175 pthread_mutex_lock(&private_display->lock);
177 if (!(private_display->capabilities & TDM_DISPLAY_CAPABILITY_PP)) {
178 TDM_ERR("no pp capability");
179 pthread_mutex_unlock(&private_display->lock);
180 return TDM_ERROR_NO_CAPABILITY;
184 *min_w = private_display->caps_pp.min_w;
186 *min_h = private_display->caps_pp.min_h;
188 *max_w = private_display->caps_pp.max_w;
190 *max_h = private_display->caps_pp.max_h;
192 *preferred_align = private_display->caps_pp.preferred_align;
194 pthread_mutex_unlock(&private_display->lock);
200 tdm_display_get_capture_capabilities(tdm_display *dpy,
201 tdm_capture_capability *capabilities)
203 DISPLAY_FUNC_ENTRY();
205 TDM_RETURN_VAL_IF_FAIL(capabilities != NULL, TDM_ERROR_INVALID_PARAMETER);
207 pthread_mutex_lock(&private_display->lock);
209 if (!(private_display->capabilities & TDM_DISPLAY_CAPABILITY_CAPTURE)) {
210 TDM_ERR("no capture capability");
211 pthread_mutex_unlock(&private_display->lock);
212 return TDM_ERROR_NO_CAPABILITY;
215 *capabilities = private_display->caps_capture.capabilities;
217 pthread_mutex_unlock(&private_display->lock);
223 tdm_display_get_catpure_available_formats(tdm_display *dpy,
224 const tbm_format **formats, int *count)
226 DISPLAY_FUNC_ENTRY();
228 TDM_RETURN_VAL_IF_FAIL(formats != NULL, TDM_ERROR_INVALID_PARAMETER);
229 TDM_RETURN_VAL_IF_FAIL(count != NULL, TDM_ERROR_INVALID_PARAMETER);
231 pthread_mutex_lock(&private_display->lock);
233 if (!(private_display->capabilities & TDM_DISPLAY_CAPABILITY_CAPTURE)) {
234 TDM_ERR("no capture capability");
235 pthread_mutex_unlock(&private_display->lock);
236 return TDM_ERROR_NO_CAPABILITY;
239 *formats = (const tbm_format *)private_display->caps_capture.formats;
240 *count = private_display->caps_capture.format_count;
242 pthread_mutex_unlock(&private_display->lock);
248 tdm_display_get_output_count(tdm_display *dpy, int *count)
250 tdm_private_output *private_output = NULL;
252 DISPLAY_FUNC_ENTRY();
254 TDM_RETURN_VAL_IF_FAIL(count != NULL, TDM_ERROR_INVALID_PARAMETER);
256 pthread_mutex_lock(&private_display->lock);
259 LIST_FOR_EACH_ENTRY(private_output, &private_display->output_list, link)
263 pthread_mutex_unlock(&private_display->lock);
264 return TDM_ERROR_NONE;
267 pthread_mutex_unlock(&private_display->lock);
274 tdm_display_get_output(tdm_display *dpy, int index, tdm_error *error)
276 tdm_private_output *private_output = NULL;
279 DISPLAY_FUNC_ENTRY_ERROR();
281 pthread_mutex_lock(&private_display->lock);
284 *error = TDM_ERROR_NONE;
287 LIST_FOR_EACH_ENTRY(private_output, &private_display->output_list, link) {
289 pthread_mutex_unlock(&private_display->lock);
290 return private_output;
295 pthread_mutex_unlock(&private_display->lock);
301 tdm_display_get_fd(tdm_display *dpy, int *fd)
303 tdm_func_display *func_display;
304 DISPLAY_FUNC_ENTRY();
306 TDM_RETURN_VAL_IF_FAIL(fd != NULL, TDM_ERROR_INVALID_PARAMETER);
308 pthread_mutex_lock(&private_display->lock);
310 func_display = &private_display->func_display;
312 if (!func_display->display_get_fd) {
313 pthread_mutex_unlock(&private_display->lock);
314 return TDM_ERROR_NONE;
317 ret = func_display->display_get_fd(private_display->bdata, fd);
319 pthread_mutex_unlock(&private_display->lock);
325 tdm_display_handle_events(tdm_display *dpy)
327 tdm_func_display *func_display;
328 DISPLAY_FUNC_ENTRY();
330 pthread_mutex_lock(&private_display->lock);
332 func_display = &private_display->func_display;
334 if (!func_display->display_handle_events) {
335 pthread_mutex_unlock(&private_display->lock);
336 return TDM_ERROR_NONE;
339 ret = func_display->display_handle_events(private_display->bdata);
341 pthread_mutex_unlock(&private_display->lock);
347 tdm_display_create_pp(tdm_display *dpy, tdm_error *error)
351 DISPLAY_FUNC_ENTRY_ERROR();
353 pthread_mutex_lock(&private_display->lock);
355 pp = (tdm_pp *)tdm_pp_create_internal(private_display, error);
357 pthread_mutex_unlock(&private_display->lock);
363 tdm_output_get_model_info(tdm_output *output, const char **maker,
364 const char **model, const char **name)
368 pthread_mutex_lock(&private_display->lock);
371 *maker = private_output->caps.maker;
373 *model = private_output->caps.model;
375 *name = private_output->caps.name;
377 pthread_mutex_unlock(&private_display->lock);
383 tdm_output_get_conn_status(tdm_output *output, tdm_output_conn_status *status)
387 TDM_RETURN_VAL_IF_FAIL(status != NULL, TDM_ERROR_INVALID_PARAMETER);
389 pthread_mutex_lock(&private_display->lock);
391 *status = private_output->caps.status;
393 pthread_mutex_unlock(&private_display->lock);
399 tdm_output_get_output_type(tdm_output *output, tdm_output_type *type)
403 TDM_RETURN_VAL_IF_FAIL(type != NULL, TDM_ERROR_INVALID_PARAMETER);
405 pthread_mutex_lock(&private_display->lock);
407 *type = private_output->caps.type;
409 pthread_mutex_unlock(&private_display->lock);
415 tdm_output_get_layer_count(tdm_output *output, int *count)
417 tdm_private_layer *private_layer = NULL;
421 TDM_RETURN_VAL_IF_FAIL(count != NULL, TDM_ERROR_INVALID_PARAMETER);
423 pthread_mutex_lock(&private_display->lock);
426 LIST_FOR_EACH_ENTRY(private_layer, &private_output->layer_list, link)
429 pthread_mutex_unlock(&private_display->lock);
430 return TDM_ERROR_NONE;
433 pthread_mutex_unlock(&private_display->lock);
440 tdm_output_get_layer(tdm_output *output, int index, tdm_error *error)
442 tdm_private_layer *private_layer = NULL;
445 OUTPUT_FUNC_ENTRY_ERROR();
447 pthread_mutex_lock(&private_display->lock);
450 *error = TDM_ERROR_NONE;
452 LIST_FOR_EACH_ENTRY(private_layer, &private_output->layer_list, link) {
454 pthread_mutex_unlock(&private_display->lock);
455 return private_layer;
460 pthread_mutex_unlock(&private_display->lock);
466 tdm_output_get_available_properties(tdm_output *output, const tdm_prop **props,
471 TDM_RETURN_VAL_IF_FAIL(props != NULL, TDM_ERROR_INVALID_PARAMETER);
472 TDM_RETURN_VAL_IF_FAIL(count != NULL, TDM_ERROR_INVALID_PARAMETER);
474 pthread_mutex_lock(&private_display->lock);
476 *props = (const tdm_prop *)private_output->caps.props;
477 *count = private_output->caps.prop_count;
479 pthread_mutex_unlock(&private_display->lock);
485 tdm_output_get_available_modes(tdm_output *output,
486 const tdm_output_mode **modes, int *count)
490 TDM_RETURN_VAL_IF_FAIL(modes != NULL, TDM_ERROR_INVALID_PARAMETER);
491 TDM_RETURN_VAL_IF_FAIL(count != NULL, TDM_ERROR_INVALID_PARAMETER);
493 pthread_mutex_lock(&private_display->lock);
495 *modes = (const tdm_output_mode *)private_output->caps.modes;
496 *count = private_output->caps.mode_count;
498 pthread_mutex_unlock(&private_display->lock);
504 tdm_output_get_available_size(tdm_output *output, int *min_w, int *min_h,
505 int *max_w, int *max_h, int *preferred_align)
509 pthread_mutex_lock(&private_display->lock);
512 *min_w = private_output->caps.min_w;
514 *min_h = private_output->caps.min_h;
516 *max_w = private_output->caps.max_w;
518 *max_h = private_output->caps.max_h;
520 *preferred_align = private_output->caps.preferred_align;
522 pthread_mutex_unlock(&private_display->lock);
528 tdm_output_get_physical_size(tdm_output *output, unsigned int *mmWidth,
529 unsigned int *mmHeight)
533 pthread_mutex_lock(&private_display->lock);
536 *mmWidth = private_output->caps.mmWidth;
538 *mmHeight = private_output->caps.mmHeight;
540 pthread_mutex_unlock(&private_display->lock);
546 tdm_output_get_subpixel(tdm_output *output, unsigned int *subpixel)
549 TDM_RETURN_VAL_IF_FAIL(subpixel != NULL, TDM_ERROR_INVALID_PARAMETER);
551 pthread_mutex_lock(&private_display->lock);
553 *subpixel = private_output->caps.subpixel;
555 pthread_mutex_unlock(&private_display->lock);
561 tdm_output_get_pipe(tdm_output *output, unsigned int *pipe)
564 TDM_RETURN_VAL_IF_FAIL(pipe != NULL, TDM_ERROR_INVALID_PARAMETER);
566 pthread_mutex_lock(&private_display->lock);
568 *pipe = private_output->pipe;
570 pthread_mutex_unlock(&private_display->lock);
577 tdm_output_set_property(tdm_output *output, unsigned int id, tdm_value value)
579 tdm_func_output *func_output;
582 pthread_mutex_lock(&private_display->lock);
584 func_output = &private_display->func_output;
586 if (!func_output->output_set_property) {
587 pthread_mutex_unlock(&private_display->lock);
588 return TDM_ERROR_NONE;
591 ret = func_output->output_set_property(private_output->output_backend, id,
594 pthread_mutex_unlock(&private_display->lock);
600 tdm_output_get_property(tdm_output *output, unsigned int id, tdm_value *value)
602 tdm_func_output *func_output;
605 TDM_RETURN_VAL_IF_FAIL(value != NULL, TDM_ERROR_INVALID_PARAMETER);
607 pthread_mutex_lock(&private_display->lock);
609 func_output = &private_display->func_output;
611 if (!func_output->output_get_property) {
612 pthread_mutex_unlock(&private_display->lock);
613 return TDM_ERROR_NONE;
616 ret = func_output->output_get_property(private_output->output_backend, id,
619 pthread_mutex_unlock(&private_display->lock);
625 _tdm_output_cb_vblank(tdm_output *output_backend, unsigned int sequence,
626 unsigned int tv_sec, unsigned int tv_usec, void *user_data)
628 tdm_private_vblank_handler *vblank_handler = user_data;
629 tdm_private_display *private_display;
631 TDM_RETURN_IF_FAIL(vblank_handler);
633 private_display = vblank_handler->private_output->private_display;
635 if (vblank_handler->func) {
636 pthread_mutex_unlock(&private_display->lock);
637 vblank_handler->func(vblank_handler->private_output, sequence,
638 tv_sec, tv_usec, vblank_handler->user_data);
639 pthread_mutex_lock(&private_display->lock);
642 LIST_DEL(&vblank_handler->link);
643 free(vblank_handler);
647 _tdm_output_cb_commit(tdm_output *output_backend, unsigned int sequence,
648 unsigned int tv_sec, unsigned int tv_usec, void *user_data)
650 tdm_private_commit_handler *commit_handler = user_data;
651 tdm_private_display *private_display;
652 tdm_private_output *private_output;
653 tdm_private_layer *private_layer = NULL;
655 TDM_RETURN_IF_FAIL(commit_handler);
657 private_output = commit_handler->private_output;
658 private_display = private_output->private_display;
660 LIST_FOR_EACH_ENTRY(private_layer, &private_output->layer_list, link) {
661 if (!private_layer->waiting_buffer)
664 if (private_layer->showing_buffer) {
665 pthread_mutex_unlock(&private_display->lock);
666 tdm_buffer_unref_backend(private_layer->showing_buffer);
667 pthread_mutex_lock(&private_display->lock);
669 if (private_layer->buffer_queue) {
670 pthread_mutex_unlock(&private_display->lock);
671 tbm_surface_queue_release(private_layer->buffer_queue,
672 private_layer->showing_buffer);
673 pthread_mutex_lock(&private_display->lock);
677 private_layer->showing_buffer = private_layer->waiting_buffer;
678 private_layer->waiting_buffer = NULL;
680 if (tdm_debug_buffer)
681 TDM_INFO("layer(%p) waiting_buffer(%p) showing_buffer(%p)",
682 private_layer, private_layer->waiting_buffer,
683 private_layer->showing_buffer);
686 if (commit_handler->func) {
687 pthread_mutex_unlock(&private_display->lock);
688 commit_handler->func(private_output, sequence,
689 tv_sec, tv_usec, commit_handler->user_data);
690 pthread_mutex_lock(&private_display->lock);
693 LIST_DEL(&commit_handler->link);
694 free(commit_handler);
698 tdm_output_wait_vblank(tdm_output *output, int interval, int sync,
699 tdm_output_vblank_handler func, void *user_data)
701 tdm_func_output *func_output;
702 tdm_private_vblank_handler *vblank_handler;
705 pthread_mutex_lock(&private_display->lock);
707 func_output = &private_display->func_output;
709 if (!func_output->output_wait_vblank) {
710 pthread_mutex_unlock(&private_display->lock);
711 return TDM_ERROR_NONE;
714 vblank_handler = calloc(1, sizeof(tdm_private_vblank_handler));
715 if (!vblank_handler) {
716 TDM_ERR("failed: alloc memory");
717 pthread_mutex_unlock(&private_display->lock);
718 return TDM_ERROR_OUT_OF_MEMORY;
721 LIST_ADD(&vblank_handler->link, &private_output->vblank_handler_list);
722 vblank_handler->private_output = private_output;
723 vblank_handler->func = func;
724 vblank_handler->user_data = user_data;
726 ret = func_output->output_wait_vblank(private_output->output_backend, interval,
727 sync, vblank_handler);
728 if (ret != TDM_ERROR_NONE) {
729 pthread_mutex_unlock(&private_display->lock);
733 if (!private_output->regist_vblank_cb) {
734 private_output->regist_vblank_cb = 1;
735 ret = func_output->output_set_vblank_handler(private_output->output_backend,
736 _tdm_output_cb_vblank);
739 pthread_mutex_unlock(&private_display->lock);
745 _tdm_output_commit(tdm_output *output, int sync, tdm_output_commit_handler func,
748 tdm_func_output *func_output;
749 tdm_private_commit_handler *commit_handler;
752 func_output = &private_display->func_output;
754 if (!func_output->output_commit) {
755 return TDM_ERROR_NONE;
758 commit_handler = calloc(1, sizeof(tdm_private_commit_handler));
759 if (!commit_handler) {
760 TDM_ERR("failed: alloc memory");
761 return TDM_ERROR_OUT_OF_MEMORY;
764 LIST_ADD(&commit_handler->link, &private_output->commit_handler_list);
765 commit_handler->private_output = private_output;
766 commit_handler->func = func;
767 commit_handler->user_data = user_data;
769 ret = func_output->output_commit(private_output->output_backend, sync,
771 TDM_RETURN_VAL_IF_FAIL(ret == TDM_ERROR_NONE, ret);
773 if (!private_output->regist_commit_cb) {
774 private_output->regist_commit_cb = 1;
775 ret = func_output->output_set_commit_handler(private_output->output_backend,
776 _tdm_output_cb_commit);
783 tdm_output_commit(tdm_output *output, int sync, tdm_output_commit_handler func,
788 pthread_mutex_lock(&private_display->lock);
790 ret = _tdm_output_commit(output, sync, func, user_data);
792 pthread_mutex_unlock(&private_display->lock);
798 tdm_output_set_mode(tdm_output *output, const tdm_output_mode *mode)
800 tdm_func_output *func_output;
803 TDM_RETURN_VAL_IF_FAIL(mode != NULL, TDM_ERROR_INVALID_PARAMETER);
805 pthread_mutex_lock(&private_display->lock);
807 func_output = &private_display->func_output;
809 if (!func_output->output_set_mode) {
810 pthread_mutex_unlock(&private_display->lock);
811 return TDM_ERROR_NONE;
814 ret = func_output->output_set_mode(private_output->output_backend, mode);
816 pthread_mutex_unlock(&private_display->lock);
822 tdm_output_get_mode(tdm_output *output, const tdm_output_mode **mode)
824 tdm_func_output *func_output;
827 TDM_RETURN_VAL_IF_FAIL(mode != NULL, TDM_ERROR_INVALID_PARAMETER);
829 pthread_mutex_lock(&private_display->lock);
831 func_output = &private_display->func_output;
833 if (!func_output->output_get_mode) {
834 pthread_mutex_unlock(&private_display->lock);
835 return TDM_ERROR_NONE;
838 ret = func_output->output_get_mode(private_output->output_backend, mode);
840 pthread_mutex_unlock(&private_display->lock);
846 tdm_output_set_dpms(tdm_output *output, tdm_output_dpms dpms_value)
848 tdm_func_output *func_output;
851 if (dpms_value < TDM_OUTPUT_DPMS_ON)
852 dpms_value = TDM_OUTPUT_DPMS_ON;
853 else if (dpms_value > TDM_OUTPUT_DPMS_OFF)
854 dpms_value = TDM_OUTPUT_DPMS_OFF;
856 pthread_mutex_lock(&private_display->lock);
858 func_output = &private_display->func_output;
860 if (!func_output->output_set_dpms) {
861 pthread_mutex_unlock(&private_display->lock);
862 return TDM_ERROR_NONE;
865 ret = func_output->output_set_dpms(private_output->output_backend, dpms_value);
867 pthread_mutex_unlock(&private_display->lock);
873 tdm_output_get_dpms(tdm_output *output, tdm_output_dpms *dpms_value)
875 tdm_func_output *func_output;
878 TDM_RETURN_VAL_IF_FAIL(dpms_value != NULL, TDM_ERROR_INVALID_PARAMETER);
880 pthread_mutex_lock(&private_display->lock);
882 func_output = &private_display->func_output;
884 if (!func_output->output_get_dpms) {
885 pthread_mutex_unlock(&private_display->lock);
886 return TDM_ERROR_NONE;
889 ret = func_output->output_get_dpms(private_output->output_backend, dpms_value);
891 pthread_mutex_unlock(&private_display->lock);
897 tdm_output_create_capture(tdm_output *output, tdm_error *error)
899 tdm_capture *capture = NULL;
901 OUTPUT_FUNC_ENTRY_ERROR();
903 pthread_mutex_lock(&private_display->lock);
905 capture = (tdm_capture *)tdm_capture_create_output_internal(private_output,
908 pthread_mutex_unlock(&private_display->lock);
914 tdm_layer_get_capabilities(tdm_layer *layer, tdm_layer_capability *capabilities)
918 TDM_RETURN_VAL_IF_FAIL(capabilities != NULL, TDM_ERROR_INVALID_PARAMETER);
920 pthread_mutex_lock(&private_display->lock);
922 *capabilities = private_layer->caps.capabilities;
924 pthread_mutex_unlock(&private_display->lock);
930 tdm_layer_get_available_formats(tdm_layer *layer, const tbm_format **formats,
935 TDM_RETURN_VAL_IF_FAIL(formats != NULL, TDM_ERROR_INVALID_PARAMETER);
936 TDM_RETURN_VAL_IF_FAIL(count != NULL, TDM_ERROR_INVALID_PARAMETER);
938 pthread_mutex_lock(&private_display->lock);
940 *formats = (const tbm_format *)private_layer->caps.formats;
941 *count = private_layer->caps.format_count;
943 pthread_mutex_unlock(&private_display->lock);
949 tdm_layer_get_available_properties(tdm_layer *layer, const tdm_prop **props,
954 TDM_RETURN_VAL_IF_FAIL(props != NULL, TDM_ERROR_INVALID_PARAMETER);
955 TDM_RETURN_VAL_IF_FAIL(count != NULL, TDM_ERROR_INVALID_PARAMETER);
957 pthread_mutex_lock(&private_display->lock);
959 *props = (const tdm_prop *)private_layer->caps.props;
960 *count = private_layer->caps.prop_count;
962 pthread_mutex_unlock(&private_display->lock);
968 tdm_layer_get_zpos(tdm_layer *layer, unsigned int *zpos)
972 TDM_RETURN_VAL_IF_FAIL(zpos != NULL, TDM_ERROR_INVALID_PARAMETER);
974 pthread_mutex_lock(&private_display->lock);
976 *zpos = private_layer->caps.zpos;
978 pthread_mutex_unlock(&private_display->lock);
984 tdm_layer_set_property(tdm_layer *layer, unsigned int id, tdm_value value)
986 tdm_func_layer *func_layer;
989 pthread_mutex_lock(&private_display->lock);
991 func_layer = &private_display->func_layer;
993 if (!func_layer->layer_set_property) {
994 pthread_mutex_unlock(&private_display->lock);
995 return TDM_ERROR_NONE;
998 ret = func_layer->layer_set_property(private_layer->layer_backend, id, value);
1000 pthread_mutex_unlock(&private_display->lock);
1006 tdm_layer_get_property(tdm_layer *layer, unsigned int id, tdm_value *value)
1008 tdm_func_layer *func_layer;
1011 TDM_RETURN_VAL_IF_FAIL(value != NULL, TDM_ERROR_INVALID_PARAMETER);
1013 pthread_mutex_lock(&private_display->lock);
1015 func_layer = &private_display->func_layer;
1017 if (!func_layer->layer_get_property) {
1018 pthread_mutex_unlock(&private_display->lock);
1019 return TDM_ERROR_NONE;
1022 ret = func_layer->layer_get_property(private_layer->layer_backend, id, value);
1024 pthread_mutex_unlock(&private_display->lock);
1030 tdm_layer_set_info(tdm_layer *layer, tdm_info_layer *info)
1032 tdm_func_layer *func_layer;
1035 TDM_RETURN_VAL_IF_FAIL(info != NULL, TDM_ERROR_INVALID_PARAMETER);
1037 pthread_mutex_lock(&private_display->lock);
1039 func_layer = &private_display->func_layer;
1041 private_layer->usable = 0;
1043 if (!func_layer->layer_set_info) {
1044 pthread_mutex_unlock(&private_display->lock);
1045 return TDM_ERROR_NONE;
1048 TDM_INFO("layer(%p) info: src(%dx%d %d,%d %dx%d %c%c%c%c) dst(%d,%d %dx%d) trans(%d)",
1049 private_layer, info->src_config.size.h, info->src_config.size.v,
1050 info->src_config.pos.x, info->src_config.pos.y,
1051 info->src_config.pos.w, info->src_config.pos.h,
1052 FOURCC_STR(info->src_config.format),
1053 info->dst_pos.x, info->dst_pos.y,
1054 info->dst_pos.w, info->dst_pos.h,
1057 ret = func_layer->layer_set_info(private_layer->layer_backend, info);
1058 TDM_WARNING_IF_FAIL(ret == TDM_ERROR_NONE);
1060 pthread_mutex_unlock(&private_display->lock);
1066 tdm_layer_get_info(tdm_layer *layer, tdm_info_layer *info)
1068 tdm_func_layer *func_layer;
1071 TDM_RETURN_VAL_IF_FAIL(info != NULL, TDM_ERROR_INVALID_PARAMETER);
1073 pthread_mutex_lock(&private_display->lock);
1075 func_layer = &private_display->func_layer;
1077 if (!func_layer->layer_get_info) {
1078 pthread_mutex_unlock(&private_display->lock);
1079 return TDM_ERROR_NONE;
1082 ret = func_layer->layer_get_info(private_layer->layer_backend, info);
1084 pthread_mutex_unlock(&private_display->lock);
1090 tdm_layer_set_buffer(tdm_layer *layer, tbm_surface_h buffer)
1092 tdm_func_layer *func_layer;
1096 TDM_RETURN_VAL_IF_FAIL(buffer != NULL, TDM_ERROR_INVALID_PARAMETER);
1098 pthread_mutex_lock(&private_display->lock);
1100 func_layer = &private_display->func_layer;
1102 private_layer->usable = 0;
1104 if (!func_layer->layer_set_buffer) {
1105 pthread_mutex_unlock(&private_display->lock);
1106 return TDM_ERROR_NONE;
1109 ret = func_layer->layer_set_buffer(private_layer->layer_backend, buffer);
1110 TDM_WARNING_IF_FAIL(ret == TDM_ERROR_NONE);
1112 if (ret == TDM_ERROR_NONE) {
1113 /* FIXME: should save to pending_buffer first. And after committing
1114 * successfully, need to move to waiting_buffer.
1116 if (private_layer->waiting_buffer) {
1117 pthread_mutex_unlock(&private_display->lock);
1118 tdm_buffer_unref_backend(private_layer->waiting_buffer);
1119 pthread_mutex_lock(&private_display->lock);
1122 private_layer->waiting_buffer = tdm_buffer_ref_backend(buffer);
1123 if (tdm_debug_buffer)
1124 TDM_INFO("layer(%p) waiting_buffer(%p)",
1125 private_layer, private_layer->waiting_buffer);
1128 pthread_mutex_unlock(&private_display->lock);
1134 tdm_layer_unset_buffer(tdm_layer *layer)
1136 tdm_func_layer *func_layer;
1139 pthread_mutex_lock(&private_display->lock);
1141 func_layer = &private_display->func_layer;
1143 if (private_layer->waiting_buffer) {
1144 pthread_mutex_unlock(&private_display->lock);
1145 tdm_buffer_unref_backend(private_layer->waiting_buffer);
1146 pthread_mutex_lock(&private_display->lock);
1147 private_layer->waiting_buffer = NULL;
1149 if (tdm_debug_buffer)
1150 TDM_INFO("layer(%p) waiting_buffer(%p)",
1151 private_layer, private_layer->waiting_buffer);
1154 if (private_layer->showing_buffer) {
1155 pthread_mutex_unlock(&private_display->lock);
1156 tdm_buffer_unref_backend(private_layer->showing_buffer);
1157 pthread_mutex_lock(&private_display->lock);
1158 private_layer->showing_buffer = NULL;
1160 if (tdm_debug_buffer)
1161 TDM_INFO("layer(%p) showing_buffer(%p)",
1162 private_layer, private_layer->showing_buffer);
1165 private_layer->usable = 1;
1167 if (!func_layer->layer_unset_buffer) {
1168 pthread_mutex_unlock(&private_display->lock);
1169 return TDM_ERROR_NONE;
1172 ret = func_layer->layer_unset_buffer(private_layer->layer_backend);
1173 TDM_WARNING_IF_FAIL(ret == TDM_ERROR_NONE);
1175 pthread_mutex_unlock(&private_display->lock);
1181 _tbm_layer_queue_acquirable_cb(tbm_surface_queue_h surface_queue, void *data)
1183 TDM_RETURN_IF_FAIL(data != NULL);
1184 tdm_layer *layer = data;
1185 tdm_func_layer *func_layer;
1186 tbm_surface_h surface = NULL;
1187 LAYER_FUNC_ENTRY_VOID_RETURN();
1189 pthread_mutex_lock(&private_display->lock);
1191 func_layer = &private_display->func_layer;
1192 if (!func_layer->layer_set_buffer) {
1193 pthread_mutex_unlock(&private_display->lock);
1197 if (TBM_SURFACE_QUEUE_ERROR_NONE != tbm_surface_queue_acquire(
1198 private_layer->buffer_queue, &surface) ||
1200 TDM_ERR("layer(%p) tbm_surface_queue_acquire() failed surface:%p",
1201 private_layer, surface);
1202 pthread_mutex_unlock(&private_display->lock);
1206 ret = func_layer->layer_set_buffer(private_layer->layer_backend, surface);
1207 TDM_WARNING_IF_FAIL(ret == TDM_ERROR_NONE);
1209 if (ret == TDM_ERROR_NONE) {
1210 if (private_layer->waiting_buffer) {
1211 pthread_mutex_unlock(&private_display->lock);
1212 tdm_buffer_unref_backend(private_layer->waiting_buffer);
1213 tbm_surface_queue_release(private_layer->buffer_queue,
1214 private_layer->waiting_buffer);
1215 pthread_mutex_lock(&private_display->lock);
1218 private_layer->waiting_buffer = tdm_buffer_ref_backend(surface);
1220 if (tdm_debug_buffer)
1221 TDM_INFO("layer(%p) waiting_buffer(%p)",
1222 private_layer, private_layer->waiting_buffer);
1224 ret = _tdm_output_commit(private_layer->private_output, 0, NULL, NULL);
1225 if (ret != TDM_ERROR_NONE)
1226 TDM_ERR("layer(%p) _tdm_output_commit() is fail", private_layer);
1229 pthread_mutex_unlock(&private_display->lock);
1233 _tbm_layer_queue_destroy_cb(tbm_surface_queue_h surface_queue, void *data)
1235 TDM_RETURN_IF_FAIL(data != NULL);
1236 tdm_layer *layer = data;
1237 LAYER_FUNC_ENTRY_VOID_RETURN();
1238 TDM_RETURN_IF_FAIL(ret == TDM_ERROR_NONE);
1240 pthread_mutex_lock(&private_display->lock);
1242 if (private_layer->waiting_buffer) {
1243 pthread_mutex_unlock(&private_display->lock);
1244 tdm_buffer_unref_backend(private_layer->waiting_buffer);
1245 tbm_surface_queue_release(private_layer->buffer_queue,
1246 private_layer->waiting_buffer);
1247 pthread_mutex_lock(&private_display->lock);
1250 private_layer->buffer_queue = NULL;
1252 pthread_mutex_unlock(&private_display->lock);
1256 tdm_layer_set_buffer_queue(tdm_layer *layer, tbm_surface_queue_h buffer_queue)
1258 tdm_func_layer *func_layer;
1261 TDM_RETURN_VAL_IF_FAIL(buffer_queue != NULL, TDM_ERROR_INVALID_PARAMETER);
1263 pthread_mutex_lock(&private_display->lock);
1265 func_layer = &private_display->func_layer;
1267 private_layer->usable = 0;
1269 if (!func_layer->layer_set_buffer) {
1270 pthread_mutex_unlock(&private_display->lock);
1271 return TDM_ERROR_NONE;
1274 if (buffer_queue == private_layer->buffer_queue) {
1275 pthread_mutex_unlock(&private_display->lock);
1276 return TDM_ERROR_NONE;
1279 if (private_layer->waiting_buffer) {
1280 pthread_mutex_unlock(&private_display->lock);
1281 tdm_buffer_unref_backend(private_layer->waiting_buffer);
1282 tbm_surface_queue_release(private_layer->buffer_queue,
1283 private_layer->waiting_buffer);
1284 private_layer->waiting_buffer = NULL;
1285 pthread_mutex_lock(&private_display->lock);
1287 if (tdm_debug_buffer)
1288 TDM_INFO("layer(%p) waiting_buffer(%p)",
1289 private_layer, private_layer->waiting_buffer);
1292 private_layer->buffer_queue = buffer_queue;
1293 tbm_surface_queue_add_acquirable_cb(private_layer->buffer_queue,
1294 _tbm_layer_queue_acquirable_cb,
1296 tbm_surface_queue_add_destroy_cb(private_layer->buffer_queue,
1297 _tbm_layer_queue_destroy_cb,
1299 pthread_mutex_unlock(&private_display->lock);
1305 tdm_layer_unset_buffer_queue(tdm_layer *layer)
1307 tdm_func_layer *func_layer;
1310 pthread_mutex_lock(&private_display->lock);
1312 func_layer = &private_display->func_layer;
1314 if (private_layer->waiting_buffer) {
1315 pthread_mutex_unlock(&private_display->lock);
1316 tdm_buffer_unref_backend(private_layer->waiting_buffer);
1317 tbm_surface_queue_release(private_layer->buffer_queue,
1318 private_layer->waiting_buffer);
1319 private_layer->waiting_buffer = NULL;
1320 pthread_mutex_lock(&private_display->lock);
1322 if (tdm_debug_buffer)
1323 TDM_INFO("layer(%p) waiting_buffer(%p)",
1324 private_layer, private_layer->waiting_buffer);
1327 if (private_layer->showing_buffer) {
1328 pthread_mutex_unlock(&private_display->lock);
1329 tdm_buffer_unref_backend(private_layer->showing_buffer);
1330 tbm_surface_queue_release(private_layer->buffer_queue,
1331 private_layer->showing_buffer);
1332 pthread_mutex_lock(&private_display->lock);
1333 private_layer->showing_buffer = NULL;
1335 if (tdm_debug_buffer)
1336 TDM_INFO("layer(%p) showing_buffer(%p)",
1337 private_layer, private_layer->showing_buffer);
1340 tbm_surface_queue_remove_acquirable_cb(private_layer->buffer_queue, _tbm_layer_queue_acquirable_cb, layer);
1341 tbm_surface_queue_remove_destroy_cb(private_layer->buffer_queue, _tbm_layer_queue_destroy_cb, layer);
1342 private_layer->buffer_queue = NULL;
1343 private_layer->usable = 1;
1345 if (!func_layer->layer_unset_buffer) {
1346 pthread_mutex_unlock(&private_display->lock);
1347 return TDM_ERROR_NONE;
1350 ret = func_layer->layer_unset_buffer(private_layer->layer_backend);
1352 pthread_mutex_unlock(&private_display->lock);
1358 tdm_layer_is_usable(tdm_layer *layer, unsigned int *usable)
1362 TDM_RETURN_VAL_IF_FAIL(usable != NULL, TDM_ERROR_INVALID_PARAMETER);
1364 pthread_mutex_lock(&private_display->lock);
1366 *usable = private_layer->usable;
1368 pthread_mutex_unlock(&private_display->lock);
1374 tdm_layer_set_video_pos(tdm_layer *layer, int zpos)
1376 tdm_func_layer *func_layer;
1379 pthread_mutex_lock(&private_display->lock);
1381 func_layer = &private_display->func_layer;
1383 if (!(private_layer->caps.capabilities & TDM_LAYER_CAPABILITY_VIDEO)) {
1384 TDM_ERR("layer(%p) is not video layer", private_layer);
1385 pthread_mutex_unlock(&private_display->lock);
1386 return TDM_ERROR_INVALID_PARAMETER;
1389 if (!func_layer->layer_set_video_pos) {
1390 pthread_mutex_unlock(&private_display->lock);
1391 return TDM_ERROR_NONE;
1394 ret = func_layer->layer_set_video_pos(private_layer->layer_backend, zpos);
1396 pthread_mutex_unlock(&private_display->lock);
1401 EXTERN tdm_capture *
1402 tdm_layer_create_capture(tdm_layer *layer, tdm_error *error)
1404 tdm_capture *capture = NULL;
1406 LAYER_FUNC_ENTRY_ERROR();
1408 pthread_mutex_lock(&private_display->lock);
1410 capture = (tdm_capture *)tdm_capture_create_layer_internal(private_layer,
1413 pthread_mutex_unlock(&private_display->lock);