1 /**************************************************************************
5 Copyright 2012 Samsung Electronics co., Ltd. All Rights Reserved.
7 Contact: SooChan Lim <sc1.lim@samsung.com>, Sangjin Lee <lsj119@samsung.com>
9 Permission is hereby granted, free of charge, to any person obtaining a
10 copy of this software and associated documentation files (the
11 "Software"), to deal in the Software without restriction, including
12 without limitation the rights to use, copy, modify, merge, publish,
13 distribute, sub license, and/or sell copies of the Software, and to
14 permit persons to whom the Software is furnished to do so, subject to
15 the following conditions:
17 The above copyright notice and this permission notice (including the
18 next paragraph) shall be included in all copies or substantial portions
21 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
22 OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
23 MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
24 IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
25 ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
26 TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
27 SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
29 **************************************************************************/
43 #include <sys/ioctl.h>
44 #include <sys/types.h>
51 #include <drm/sprd_drm.h>
53 #include <hal-common.h>
54 #include <hal-tbm-types.h>
55 #include <hal-tbm-interface.h>
57 //#define USE_CONTIG_ONLY
62 #include "tbm_bufmgr_tgl.h"
64 #include "tbm_backend_log.h"
66 #define TBM_COLOR_FORMAT_COUNT 4
68 #define SPRD_DRM_NAME "sprd"
70 #define STRERR_BUFSIZE 128
72 #define SIZE_ALIGN(value, base) (((value) + ((base) - 1)) & ~((base) - 1))
74 #define TBM_SURFACE_ALIGNMENT_PLANE (64)
75 #define TBM_SURFACE_ALIGNMENT_PITCH_RGB (128)
76 #define TBM_SURFACE_ALIGNMENT_PITCH_YUV (16)
80 unsigned int fence_supported;
84 #define DMA_BUF_ACCESS_READ 0x1
85 #define DMA_BUF_ACCESS_WRITE 0x2
86 #define DMA_BUF_ACCESS_DMA 0x4
87 #define DMA_BUF_ACCESS_MAX 0x8
89 #define DMA_FENCE_LIST_MAX 5
91 struct dma_buf_fence {
96 #define DMABUF_IOCTL_BASE 'F'
97 #define DMABUF_IOWR(nr, type) _IOWR(DMABUF_IOCTL_BASE, nr, type)
99 #define DMABUF_IOCTL_GET_INFO DMABUF_IOWR(0x00, struct dma_buf_info)
100 #define DMABUF_IOCTL_GET_FENCE DMABUF_IOWR(0x01, struct dma_buf_fence)
101 #define DMABUF_IOCTL_PUT_FENCE DMABUF_IOWR(0x02, struct dma_buf_fence)
104 #define GLOBAL_KEY ((unsigned int)(-1))
106 #define TBM_SPRD_CACHE_INV 0x01 /**< cache invalidate */
107 #define TBM_SPRD_CACHE_CLN 0x02 /**< cache clean */
108 #define TBM_SPRD_CACHE_ALL 0x10 /**< cache all */
109 #define TBM_SPRD_CACHE_FLUSH (TBM_SPRD_CACHE_INV|TBM_SPRD_CACHE_CLN) /**< cache flush */
110 #define TBM_SPRD_CACHE_FLUSH_ALL (TBM_SPRD_CACHE_FLUSH|TBM_SPRD_CACHE_ALL) /**< cache flush all */
114 DEVICE_CA, /* cache aware device */
115 DEVICE_CO /* cache oblivious device */
118 typedef union _tbm_bo_cache_state tbm_bo_cache_state;
120 union _tbm_bo_cache_state {
123 unsigned int cntFlush:16; /*Flush all index for sync */
124 unsigned int isCached:1;
125 unsigned int isDirtied:2;
129 typedef struct _tbm_sprd_bufmgr tbm_sprd_bufmgr;
130 typedef struct _tbm_sprd_bo tbm_sprd_bo;
132 /* tbm buffor object for sprd */
133 struct _tbm_sprd_bo {
136 unsigned int name; /* FLINK ID */
138 unsigned int gem; /* GEM Handle */
140 unsigned int dmabuf; /* fd for dmabuf */
142 void *pBase; /* virtual address */
146 unsigned int flags_sprd;
147 unsigned int flags_tbm;
149 pthread_mutex_t mutex;
150 struct dma_buf_fence dma_fence[DMA_FENCE_LIST_MAX];
154 tbm_bo_cache_state cache_state;
155 unsigned int map_cnt;
157 tbm_sprd_bufmgr *bufmgr_data;
160 /* tbm bufmgr private for sprd */
161 struct _tbm_sprd_bufmgr {
170 char *STR_DEVICE[] = {
186 uint32_t tbm_sprd_color_format_list[TBM_COLOR_FORMAT_COUNT] = {
187 HAL_TBM_FORMAT_ARGB8888,
188 HAL_TBM_FORMAT_XRGB8888,
190 HAL_TBM_FORMAT_YUV420
195 _tgl_get_version(int fd)
197 struct tgl_ver_data data;
199 char buf[STRERR_BUFSIZE];
201 err = ioctl(fd, TGL_IOCTL_GET_VERSION, &data);
203 TBM_BACKEND_ERR("error(%s) %s:%d\n",
204 strerror_r(errno, buf, STRERR_BUFSIZE));
208 TBM_BACKEND_DBG("tgl version is (%u, %u).\n", data.major, data.minor);
214 _tgl_init(int fd, unsigned int key)
216 struct tgl_reg_data data;
218 char buf[STRERR_BUFSIZE];
221 data.timeout_ms = 1000;
223 err = ioctl(fd, TGL_IOCTL_REGISTER, &data);
225 TBM_BACKEND_ERR("error(%s) key:%d\n",
226 strerror_r(errno, buf, STRERR_BUFSIZE), key);
234 _tgl_destroy(int fd, unsigned int key)
236 struct tgl_reg_data data;
238 char buf[STRERR_BUFSIZE];
241 err = ioctl(fd, TGL_IOCTL_UNREGISTER, &data);
243 TBM_BACKEND_ERR("error(%s) key:%d\n",
244 strerror_r(errno, buf, STRERR_BUFSIZE), key);
252 _tgl_lock(int fd, unsigned int key, int opt)
254 struct tgl_lock_data data;
255 enum tgl_type_data tgl_type;
257 char buf[STRERR_BUFSIZE];
260 case TBM_OPTION_READ:
261 tgl_type = TGL_TYPE_READ;
263 case TBM_OPTION_WRITE:
264 tgl_type = TGL_TYPE_WRITE;
267 tgl_type = TGL_TYPE_NONE;
272 data.type = tgl_type;
274 err = ioctl(fd, TGL_IOCTL_LOCK, &data);
276 TBM_BACKEND_ERR("error(%s) key:%d opt:%d\n",
277 strerror_r(errno, buf, STRERR_BUFSIZE), key, opt);
285 _tgl_unlock(int fd, unsigned int key)
287 struct tgl_lock_data data;
289 char buf[STRERR_BUFSIZE];
292 data.type = TGL_TYPE_NONE;
294 err = ioctl(fd, TGL_IOCTL_UNLOCK, &data);
296 TBM_BACKEND_ERR("error(%s) key:%d\n",
297 strerror_r(errno, buf, STRERR_BUFSIZE), key);
305 _tgl_set_data(int fd, unsigned int key, unsigned int val)
307 struct tgl_usr_data data;
309 char buf[STRERR_BUFSIZE];
314 err = ioctl(fd, TGL_IOCTL_SET_DATA, &data);
316 TBM_BACKEND_ERR("error(%s) key:%d\n",
317 strerror_r(errno, buf, STRERR_BUFSIZE), key);
324 static inline unsigned int
325 _tgl_get_data(int fd, unsigned int key, unsigned int *locked)
327 struct tgl_usr_data data = { 0, };
329 char buf[STRERR_BUFSIZE];
333 err = ioctl(fd, TGL_IOCTL_GET_DATA, &data);
335 TBM_BACKEND_ERR("error(%s) key:%d\n",
336 strerror_r(errno, buf, STRERR_BUFSIZE), key);
341 *locked = (unsigned int)data.status;
348 _tbm_sprd_open_drm(void)
351 struct udev_device *drm_device = NULL;
352 struct udev_list_entry *entry = NULL;
353 struct udev_enumerate *e;
354 const char *filepath;
360 fd = drmOpen(SPRD_DRM_NAME, NULL);
365 TBM_BACKEND_DBG("warning fail to open drm. search drm-device by udev\n");
369 TBM_BACKEND_ERR("udev_new() failed.\n");
373 e = udev_enumerate_new(udev);
374 udev_enumerate_add_match_subsystem(e, "drm");
375 udev_enumerate_add_match_sysname(e, "card[0-9]*");
376 udev_enumerate_scan_devices(e);
378 udev_list_entry_foreach(entry, udev_enumerate_get_list_entry(e)) {
379 struct udev_device *device, *device_parent;
381 device = udev_device_new_from_syspath(udev_enumerate_get_udev(e),
382 udev_list_entry_get_name(entry));
383 device_parent = udev_device_get_parent(device);
384 /* Not need unref device_parent. device_parent and device have same refcnt */
386 if (strcmp(udev_device_get_sysname(device_parent), "sprd-drm") == 0) {
388 TBM_BACKEND_DBG("Found render device: '%s' (%s)\n",
389 udev_device_get_syspath(drm_device),
390 udev_device_get_sysname(device_parent));
394 udev_device_unref(device);
397 udev_enumerate_unref(e);
399 /* Get device file path. */
400 filepath = udev_device_get_devnode(drm_device);
402 TBM_BACKEND_ERR("udev_device_get_devnode() failed.\n");
403 udev_device_unref(drm_device);
408 udev_device_unref(drm_device);
411 /* Open DRM device file and check validity. */
412 fd = open(filepath, O_RDWR | O_CLOEXEC);
414 TBM_BACKEND_ERR("open(%s, O_RDWR | O_CLOEXEC) failed.\n");
419 TBM_BACKEND_ERR("fstat() failed %s.\n");
430 _sprd_bo_cache_flush(tbm_sprd_bufmgr *bufmgr_data, tbm_sprd_bo bo_sprd, int flags)
432 TBM_BACKEND_RETURN_VAL_IF_FAIL(bufmgr_data != NULL, 0);
434 /* cache flush is managed by kernel side when using dma-fence. */
435 if (bufmgr_data->use_dma_fence)
437 // TODO: The tm1 kernel does not support ioctl for cache flush right now.
438 // The drm in tm1 kernel has to support cache_flush to turn on this feature(TBM_SRPD_CACHE_FLUSH).
439 #if TBM_SRPD_CACHE_FLUSH
440 struct drm_sprd_gem_cache_op cache_op = {0, };
443 /* if bo_sprd is null, do cache_flush_all */
446 cache_op.usr_addr = (uint64_t)((uint32_t)bo_sprd->pBase);
447 cache_op.size = bo_sprd->size;
449 flags = TBM_SPRD_CACHE_FLUSH_ALL;
451 cache_op.usr_addr = 0;
455 if (flags & TBM_SPRD_CACHE_INV) {
456 if (flags & TBM_SPRD_CACHE_ALL)
457 cache_op.flags |= SPRD_DRM_CACHE_INV_ALL;
459 cache_op.flags |= SPRD_DRM_CACHE_INV_RANGE;
462 if (flags & TBM_SPRD_CACHE_CLN) {
463 if (flags & TBM_SPRD_CACHE_ALL)
464 cache_op.flags |= SPRD_DRM_CACHE_CLN_ALL;
466 cache_op.flags |= SPRD_DRM_CACHE_CLN_RANGE;
469 if (flags & TBM_SPRD_CACHE_ALL)
470 cache_op.flags |= SPRD_DRM_ALL_CACHES_CORES;
472 ret = drmCommandWriteRead(bufmgr_data->fd, DRM_SPRD_GEM_CACHE_OP, &cache_op,
475 TBM_BACKEND_ERR("error fail to flush the cache.\n");
485 _bo_init_cache_state(tbm_sprd_bufmgr *bufmgr_data, tbm_sprd_bo *bo_data, int import)
488 TBM_BACKEND_RETURN_VAL_IF_FAIL(bo_data != NULL, 0);
489 TBM_BACKEND_RETURN_VAL_IF_FAIL(bufmgr_data != NULL, 0);
491 if (bufmgr_data->use_dma_fence)
494 _tgl_init(bufmgr_data->tgl_fd, bo_data->name);
496 tbm_bo_cache_state cache_state;
499 cache_state.data.isDirtied = DEVICE_NONE;
500 cache_state.data.isCached = 0;
501 cache_state.data.cntFlush = 0;
503 _tgl_set_data(bufmgr_data->tgl_fd, bo_data->name, cache_state.val);
511 _bo_set_cache_state(tbm_sprd_bufmgr *bufmgr_data, tbm_sprd_bo *bo_data, int device, int opt)
514 TBM_BACKEND_RETURN_VAL_IF_FAIL(bo_data != NULL, 0);
515 TBM_BACKEND_RETURN_VAL_IF_FAIL(bufmgr_data != NULL, 0);
518 unsigned short cntFlush = 0;
520 if (bufmgr_data->use_dma_fence)
523 if (bo_data->flags_sprd & SPRD_BO_NONCACHABLE)
526 /* get cache state of a bo_data */
527 bo_data->cache_state.val = _tgl_get_data(bufmgr_data->tgl_fd, bo_data->name, NULL);
529 /* get global cache flush count */
530 cntFlush = (unsigned short)_tgl_get_data(bufmgr_data->tgl_fd, GLOBAL_KEY, NULL);
532 if (opt == HAL_TBM_DEVICE_CPU) {
533 if (bo_data->cache_state.data.isDirtied == DEVICE_CO &&
534 bo_data->cache_state.data.isCached)
535 need_flush = TBM_SPRD_CACHE_INV;
537 bo_data->cache_state.data.isCached = 1;
538 if (opt & TBM_OPTION_WRITE)
539 bo_data->cache_state.data.isDirtied = DEVICE_CA;
541 if (bo_data->cache_state.data.isDirtied != DEVICE_CA)
542 bo_data->cache_state.data.isDirtied = DEVICE_NONE;
545 if (bo_data->cache_state.data.isDirtied == DEVICE_CA &&
546 bo_data->cache_state.data.isCached &&
547 bo_data->cache_state.data.cntFlush == cntFlush)
548 need_flush = TBM_SPRD_CACHE_CLN | TBM_SPRD_CACHE_ALL;
550 if (opt & TBM_OPTION_WRITE)
551 bo_data->cache_state.data.isDirtied = DEVICE_CO;
553 if (bo_data->cache_state.data.isDirtied != DEVICE_CO)
554 bo_data->cache_state.data.isDirtied = DEVICE_NONE;
559 if (need_flush & TBM_SPRD_CACHE_ALL)
560 _tgl_set_data(bufmgr_data->tgl_fd, GLOBAL_KEY, (unsigned int)(++cntFlush));
562 /* call cache flush */
563 _sprd_bo_cache_flush(bufmgr_data, bo_data, need_flush);
565 TBM_BACKEND_DBG("\tcache(%d,%d)....flush:0x%x, cntFlush(%d)\n",
566 bo_data->cache_state.data.isCached,
567 bo_data->cache_state.data.isDirtied,
577 _bo_save_cache_state(tbm_sprd_bufmgr *bufmgr_data, tbm_sprd_bo *bo_data)
580 TBM_BACKEND_RETURN_VAL_IF_FAIL(bo_data != NULL, 0);
581 TBM_BACKEND_RETURN_VAL_IF_FAIL(bufmgr_data != NULL, 0);
583 if (bufmgr_data->use_dma_fence)
586 unsigned short cntFlush = 0;
588 /* get global cache flush count */
589 cntFlush = (unsigned short)_tgl_get_data(bufmgr_data->tgl_fd, GLOBAL_KEY, NULL);
591 /* save global cache flush count */
592 bo_data->cache_state.data.cntFlush = cntFlush;
593 _tgl_set_data(bufmgr_data->tgl_fd, bo_data->name, bo_data->cache_state.val);
600 _bo_destroy_cache_state(tbm_sprd_bufmgr *bufmgr_data, tbm_sprd_bo *bo_data)
603 TBM_BACKEND_RETURN_IF_FAIL(bo_data != NULL);
604 TBM_BACKEND_RETURN_IF_FAIL(bufmgr_data != NULL);
606 if (bufmgr_data->use_dma_fence)
609 _tgl_destroy(bufmgr_data->tgl_fd, bo_data->name);
614 _bufmgr_init_cache_state(tbm_sprd_bufmgr *bufmgr_data)
617 TBM_BACKEND_RETURN_VAL_IF_FAIL(bufmgr_data != NULL, 0);
619 if (bufmgr_data->use_dma_fence)
622 /* open tgl fd for saving cache flush data */
623 bufmgr_data->tgl_fd = open(tgl_devfile, O_RDWR);
625 if (bufmgr_data->tgl_fd < 0) {
626 bufmgr_data->tgl_fd = open(tgl_devfile1, O_RDWR);
627 if (bufmgr_data->tgl_fd < 0) {
628 TBM_BACKEND_ERR("fail to open global_lock:%s\n",
634 if (!_tgl_get_version(bufmgr_data->tgl_fd)) {
635 TBM_BACKEND_ERR("fail to get tgl_version. tgl init failed.\n");
636 close(bufmgr_data->tgl_fd);
640 if (!_tgl_init(bufmgr_data->tgl_fd, GLOBAL_KEY)) {
641 TBM_BACKEND_ERR("fail to initialize the tgl\n");
642 close(bufmgr_data->tgl_fd);
651 _bufmgr_deinit_cache_state(tbm_sprd_bufmgr *bufmgr_data)
654 TBM_BACKEND_RETURN_IF_FAIL(bufmgr_data != NULL);
656 if (bufmgr_data->use_dma_fence)
659 if (bufmgr_data->tgl_fd >= 0)
660 close(bufmgr_data->tgl_fd);
664 #ifndef USE_CONTIG_ONLY
666 _get_sprd_flag_from_tbm(unsigned int ftbm)
668 unsigned int flags = 0;
671 * HAL_TBM_BO_DEFAULT => ION_HEAP_ID_MASK_SYSTEM
672 * HAL_TBM_BO_SCANOUT => ION_HEAP_ID_MASK_MM
673 * TBM_BO_VENDOR => ION_HEAP_ID_MASK_OVERLAY
674 * To be updated appropriately once DRM-GEM supports different heap id masks.
677 if (ftbm & HAL_TBM_BO_SCANOUT)
678 flags = SPRD_BO_CONTIG;
680 flags = SPRD_BO_NONCONTIG | SPRD_BO_DEV_SYSTEM;
682 if (ftbm & HAL_TBM_BO_WC)
684 else if (ftbm & HAL_TBM_BO_NONCACHABLE)
685 flags |= SPRD_BO_NONCACHABLE;
691 _get_tbm_flag_from_sprd(unsigned int fsprd)
693 unsigned int flags = 0;
695 if (fsprd & SPRD_BO_NONCONTIG)
696 flags |= HAL_TBM_BO_DEFAULT;
698 flags |= HAL_TBM_BO_SCANOUT;
700 if (fsprd & SPRD_BO_WC)
701 flags |= HAL_TBM_BO_WC;
702 else if (fsprd & SPRD_BO_CACHABLE)
703 flags |= HAL_TBM_BO_DEFAULT;
705 flags |= HAL_TBM_BO_NONCACHABLE;
712 _get_name(int fd, unsigned int gem)
714 struct drm_gem_flink arg = {0,};
717 if (drmIoctl(fd, DRM_IOCTL_GEM_FLINK, &arg)) {
718 TBM_BACKEND_ERR("fail to DRM_IOCTL_GEM_FLINK gem:%d", gem);
722 return (unsigned int)arg.name;
725 static hal_tbm_bo_handle
726 _sprd_bo_handle(tbm_sprd_bo *bo_data, int device)
728 hal_tbm_bo_handle bo_handle;
730 memset(&bo_handle, 0x0, sizeof(uint64_t));
733 case HAL_TBM_DEVICE_DEFAULT:
734 case HAL_TBM_DEVICE_2D:
735 bo_handle.u32 = (uint32_t)bo_data->gem;
737 case HAL_TBM_DEVICE_CPU:
738 if (!bo_data->pBase) {
739 struct drm_sprd_gem_mmap arg = {0,};
741 arg.handle = bo_data->gem;
742 arg.size = bo_data->size;
743 if (drmCommandWriteRead(bo_data->fd, DRM_SPRD_GEM_MMAP, &arg, sizeof(arg))) {
744 TBM_BACKEND_ERR("error Cannot usrptr gem=%d\n", bo_data->gem);
745 return (hal_tbm_bo_handle) NULL;
747 bo_data->pBase = (void *)((uint32_t)arg.mapped);
750 bo_handle.ptr = (void *)bo_data->pBase;
752 case HAL_TBM_DEVICE_3D:
754 if (!bo_data->dmabuf) {
755 struct drm_prime_handle arg = {0, };
756 arg.handle = bo_data->gem;
757 if (drmIoctl(bo_data->fd, DRM_IOCTL_PRIME_HANDLE_TO_FD, &arg)) {
758 TBM_BACKEND_ERR("error Cannot dmabuf=%d\n", bo_data->gem);
759 return (hal_tbm_bo_handle) NULL;
761 bo_data->dmabuf = arg.fd;
764 bo_handle.u32 = (uint32_t)bo_data->dmabuf;
769 case HAL_TBM_DEVICE_MM:
771 //TODO : Add ioctl for GSP MAP once available.
772 TBM_BACKEND_DBG("%s In case HAL_TBM_DEVICE_MM: \n", __FUNCTION_);
774 if (!bo_data->dmabuf) {
775 struct drm_prime_handle arg = {0, };
777 arg.handle = bo_data->gem;
778 if (drmIoctl(bo_data->fd, DRM_IOCTL_PRIME_HANDLE_TO_FD, &arg)) {
779 TBM_BACKEND_ERR("error Cannot dmabuf=%d\n", bo_data->gem);
780 return (hal_tbm_bo_handle) NULL;
782 bo_data->dmabuf = arg.fd;
785 bo_handle.u32 = (uint32_t)bo_data->dmabuf;
789 TBM_BACKEND_ERR("Not supported device:%d\n", device);
790 bo_handle.ptr = (void *) NULL;
797 static hal_tbm_bufmgr_capability
798 tbm_sprd_bufmgr_get_capabilities(hal_tbm_bufmgr *bufmgr_data, hal_tbm_error *error)
800 hal_tbm_bufmgr_capability capabilities = HAL_TBM_BUFMGR_CAPABILITY_NONE;
802 capabilities = HAL_TBM_BUFMGR_CAPABILITY_SHARE_KEY | HAL_TBM_BUFMGR_CAPABILITY_SHARE_FD;
805 *error = HAL_TBM_ERROR_NONE;
811 tbm_sprd_bufmgr_get_supported_formats(hal_tbm_bufmgr *bufmgr,
812 uint32_t **formats, uint32_t *num)
814 tbm_sprd_bufmgr *bufmgr_data = (tbm_sprd_bufmgr *)bufmgr;
815 uint32_t *color_formats;
817 TBM_BACKEND_RETURN_VAL_IF_FAIL(bufmgr_data != NULL, HAL_TBM_ERROR_INVALID_PARAMETER);
819 color_formats = (uint32_t *)calloc(1, sizeof(uint32_t) * TBM_COLOR_FORMAT_COUNT);
820 if (color_formats == NULL)
821 return HAL_TBM_ERROR_OUT_OF_MEMORY;
823 memcpy(color_formats, tbm_sprd_color_format_list, sizeof(uint32_t)*TBM_COLOR_FORMAT_COUNT);
825 *formats = color_formats;
826 *num = TBM_COLOR_FORMAT_COUNT;
828 TBM_BACKEND_DBG("supported format count = %d\n", *num);
830 return HAL_TBM_ERROR_NONE;
834 tbm_sprd_bufmgr_get_plane_data(hal_tbm_bufmgr *bufmgr,
835 hal_tbm_format format, int plane_idx, int width,
836 int height, uint32_t *size, uint32_t *offset,
837 uint32_t *pitch, int *bo_idx)
839 tbm_sprd_bufmgr *bufmgr_data = (tbm_sprd_bufmgr *)bufmgr;
845 int _align_height = 0;
847 TBM_BACKEND_RETURN_VAL_IF_FAIL(bufmgr_data != NULL, HAL_TBM_ERROR_INVALID_PARAMETER);
851 case HAL_TBM_FORMAT_XRGB4444:
852 case HAL_TBM_FORMAT_XBGR4444:
853 case HAL_TBM_FORMAT_RGBX4444:
854 case HAL_TBM_FORMAT_BGRX4444:
855 case HAL_TBM_FORMAT_ARGB4444:
856 case HAL_TBM_FORMAT_ABGR4444:
857 case HAL_TBM_FORMAT_RGBA4444:
858 case HAL_TBM_FORMAT_BGRA4444:
859 case HAL_TBM_FORMAT_XRGB1555:
860 case HAL_TBM_FORMAT_XBGR1555:
861 case HAL_TBM_FORMAT_RGBX5551:
862 case HAL_TBM_FORMAT_BGRX5551:
863 case HAL_TBM_FORMAT_ARGB1555:
864 case HAL_TBM_FORMAT_ABGR1555:
865 case HAL_TBM_FORMAT_RGBA5551:
866 case HAL_TBM_FORMAT_BGRA5551:
867 case HAL_TBM_FORMAT_RGB565:
870 _pitch = SIZE_ALIGN((width * bpp) >> 3, TBM_SURFACE_ALIGNMENT_PITCH_RGB);
871 _size = SIZE_ALIGN(_pitch * height, TBM_SURFACE_ALIGNMENT_PLANE);
875 case HAL_TBM_FORMAT_RGB888:
876 case HAL_TBM_FORMAT_BGR888:
879 _pitch = SIZE_ALIGN((width * bpp) >> 3, TBM_SURFACE_ALIGNMENT_PITCH_RGB);
880 _size = SIZE_ALIGN(_pitch * height, TBM_SURFACE_ALIGNMENT_PLANE);
884 case HAL_TBM_FORMAT_XRGB8888:
885 case HAL_TBM_FORMAT_XBGR8888:
886 case HAL_TBM_FORMAT_RGBX8888:
887 case HAL_TBM_FORMAT_BGRX8888:
888 case HAL_TBM_FORMAT_ARGB8888:
889 case HAL_TBM_FORMAT_ABGR8888:
890 case HAL_TBM_FORMAT_RGBA8888:
891 case HAL_TBM_FORMAT_BGRA8888:
894 _pitch = SIZE_ALIGN((width * bpp) >> 3, TBM_SURFACE_ALIGNMENT_PITCH_RGB);
895 _size = SIZE_ALIGN(_pitch * height, TBM_SURFACE_ALIGNMENT_PLANE);
900 case HAL_TBM_FORMAT_YUYV:
901 case HAL_TBM_FORMAT_YVYU:
902 case HAL_TBM_FORMAT_UYVY:
903 case HAL_TBM_FORMAT_VYUY:
904 case HAL_TBM_FORMAT_AYUV:
907 _pitch = SIZE_ALIGN((width * bpp) >> 3, TBM_SURFACE_ALIGNMENT_PITCH_YUV);
908 _size = SIZE_ALIGN(_pitch * height, TBM_SURFACE_ALIGNMENT_PLANE);
914 * index 0 = Y plane, [7:0] Y
915 * index 1 = Cr:Cb plane, [15:0] Cr:Cb little endian
917 * index 1 = Cb:Cr plane, [15:0] Cb:Cr little endian
919 case HAL_TBM_FORMAT_NV12:
920 case HAL_TBM_FORMAT_NV21:
922 // if (plane_idx == 0)
925 _pitch = SIZE_ALIGN(width , TBM_SURFACE_ALIGNMENT_PITCH_YUV);
926 _align_height = SIZE_ALIGN(height, TBM_SURFACE_ALIGNMENT_PITCH_YUV);
927 _size = SIZE_ALIGN(_pitch * _align_height, TBM_SURFACE_ALIGNMENT_PLANE);
932 // else if (plane_idx == 1)
935 _pitch = SIZE_ALIGN(width , TBM_SURFACE_ALIGNMENT_PITCH_YUV);
936 _align_height = SIZE_ALIGN(height / 2, TBM_SURFACE_ALIGNMENT_PITCH_YUV);
937 _size = SIZE_ALIGN(_pitch * _align_height, TBM_SURFACE_ALIGNMENT_PLANE);
942 case HAL_TBM_FORMAT_NV16:
943 case HAL_TBM_FORMAT_NV61:
948 _pitch = SIZE_ALIGN(width, TBM_SURFACE_ALIGNMENT_PITCH_YUV);
949 _size = SIZE_ALIGN(_pitch * height, TBM_SURFACE_ALIGNMENT_PLANE);
954 //else if( plane_idx ==1 )
957 _pitch = SIZE_ALIGN(width, TBM_SURFACE_ALIGNMENT_PITCH_YUV);
958 _size = SIZE_ALIGN(_pitch * height, TBM_SURFACE_ALIGNMENT_PLANE);
965 * index 0: Y plane, [7:0] Y
966 * index 1: Cb plane, [7:0] Cb
967 * index 2: Cr plane, [7:0] Cr
969 * index 1: Cr plane, [7:0] Cr
970 * index 2: Cb plane, [7:0] Cb
973 NATIVE_BUFFER_FORMAT_YV12
974 NATIVE_BUFFER_FORMAT_I420
976 case HAL_TBM_FORMAT_YUV410:
977 case HAL_TBM_FORMAT_YVU410:
980 case HAL_TBM_FORMAT_YUV411:
981 case HAL_TBM_FORMAT_YVU411:
982 case HAL_TBM_FORMAT_YUV420:
983 case HAL_TBM_FORMAT_YVU420:
988 _pitch = SIZE_ALIGN(width, TBM_SURFACE_ALIGNMENT_PITCH_YUV);
989 _align_height = SIZE_ALIGN(height, TBM_SURFACE_ALIGNMENT_PITCH_YUV);
990 _size = SIZE_ALIGN(_pitch * _align_height, TBM_SURFACE_ALIGNMENT_PLANE);
995 //else if( plane_idx == 1 )
998 _pitch = SIZE_ALIGN(width / 2, TBM_SURFACE_ALIGNMENT_PITCH_YUV / 2);
999 _align_height = SIZE_ALIGN(height / 2, TBM_SURFACE_ALIGNMENT_PITCH_YUV);
1000 _size = SIZE_ALIGN(_pitch * _align_height, TBM_SURFACE_ALIGNMENT_PLANE);
1005 //else if (plane_idx == 2 )
1008 _pitch = SIZE_ALIGN(width / 2, TBM_SURFACE_ALIGNMENT_PITCH_YUV / 2);
1009 _align_height = SIZE_ALIGN(height / 2, TBM_SURFACE_ALIGNMENT_PITCH_YUV);
1010 _size = SIZE_ALIGN(_pitch * _align_height, TBM_SURFACE_ALIGNMENT_PLANE);
1014 case HAL_TBM_FORMAT_YUV422:
1015 case HAL_TBM_FORMAT_YVU422:
1017 //if(plane_idx == 0)
1020 _pitch = SIZE_ALIGN(width, TBM_SURFACE_ALIGNMENT_PITCH_YUV);
1021 _size = SIZE_ALIGN(_pitch * height, TBM_SURFACE_ALIGNMENT_PLANE);
1026 //else if( plane_idx == 1 )
1029 _pitch = SIZE_ALIGN(width / 2, TBM_SURFACE_ALIGNMENT_PITCH_YUV / 2);
1030 _size = SIZE_ALIGN(_pitch * (height), TBM_SURFACE_ALIGNMENT_PLANE);
1035 //else if (plane_idx == 2 )
1038 _pitch = SIZE_ALIGN(width / 2, TBM_SURFACE_ALIGNMENT_PITCH_YUV / 2);
1039 _size = SIZE_ALIGN(_pitch * (height), TBM_SURFACE_ALIGNMENT_PLANE);
1043 case HAL_TBM_FORMAT_YUV444:
1044 case HAL_TBM_FORMAT_YVU444:
1046 //if(plane_idx == 0)
1049 _pitch = SIZE_ALIGN(width, TBM_SURFACE_ALIGNMENT_PITCH_YUV);
1050 _size = SIZE_ALIGN(_pitch * height, TBM_SURFACE_ALIGNMENT_PLANE);
1055 //else if( plane_idx == 1 )
1058 _pitch = SIZE_ALIGN(width, TBM_SURFACE_ALIGNMENT_PITCH_YUV);
1059 _size = SIZE_ALIGN(_pitch * height, TBM_SURFACE_ALIGNMENT_PLANE);
1064 //else if (plane_idx == 2 )
1067 _pitch = SIZE_ALIGN(width, TBM_SURFACE_ALIGNMENT_PITCH_YUV);
1068 _size = SIZE_ALIGN(_pitch * height, TBM_SURFACE_ALIGNMENT_PLANE);
1082 return HAL_TBM_ERROR_NONE;
1086 tbm_sprd_bufmgr_alloc_bo(hal_tbm_bufmgr *bufmgr, unsigned int size,
1087 hal_tbm_bo_memory_type flags, hal_tbm_error *error)
1089 tbm_sprd_bufmgr *bufmgr_data = (tbm_sprd_bufmgr *)bufmgr;
1090 tbm_sprd_bo *bo_data;
1091 unsigned int sprd_flags;
1093 if (bufmgr_data == NULL) {
1094 TBM_BACKEND_ERR("bufmgr_data is null\n");
1096 *error = HAL_TBM_ERROR_INVALID_PARAMETER;
1100 bo_data = calloc(1, sizeof(struct _tbm_sprd_bo));
1102 TBM_BACKEND_ERR("error fail to allocate the bo_data\n");
1104 *error = HAL_TBM_ERROR_OUT_OF_MEMORY;
1107 bo_data->bufmgr_data = bufmgr_data;
1109 #ifdef USE_CONTIG_ONLY
1110 flags = HAL_TBM_BO_SCANOUT;
1111 sprd_flags = SPRD_BO_CONTIG;
1113 sprd_flags = _get_sprd_flag_from_tbm(flags);
1114 if ((flags & HAL_TBM_BO_SCANOUT) && (size <= 4 * 1024))
1115 sprd_flags |= SPRD_BO_NONCONTIG;
1116 #endif // USE_CONTIG_ONLY
1118 struct drm_sprd_gem_create arg = {0, };
1120 arg.size = (uint64_t)size;
1121 arg.flags = sprd_flags;
1122 if (drmCommandWriteRead(bufmgr_data->fd, DRM_SPRD_GEM_CREATE, &arg,
1124 TBM_BACKEND_ERR("error Cannot create bo_data(flag:%x, size:%d)\n",
1125 arg.flags, (unsigned int)arg.size);
1128 *error = HAL_TBM_ERROR_INVALID_OPERATION;
1132 bo_data->fd = bufmgr_data->fd;
1133 bo_data->gem = arg.handle;
1134 bo_data->size = size;
1135 bo_data->flags_tbm = flags;
1136 bo_data->flags_sprd = sprd_flags;
1137 bo_data->name = _get_name(bo_data->fd, bo_data->gem);
1139 if (!_bo_init_cache_state(bufmgr_data, bo_data, 0)) {
1140 TBM_BACKEND_ERR("error fail init cache state(%d)\n", bo_data->name);
1143 *error = HAL_TBM_ERROR_INVALID_OPERATION;
1147 pthread_mutex_init(&bo_data->mutex, NULL);
1149 if (bufmgr_data->use_dma_fence && !bo_data->dmabuf) {
1150 struct drm_prime_handle arg = {0, };
1152 arg.handle = bo_data->gem;
1153 if (drmIoctl(bo_data->fd, DRM_IOCTL_PRIME_HANDLE_TO_FD, &arg)) {
1154 TBM_BACKEND_ERR("error Cannot dmabuf=%d\n", bo_data->gem);
1157 *error = HAL_TBM_ERROR_INVALID_OPERATION;
1160 bo_data->dmabuf = arg.fd;
1163 /* add bo_data to hash */
1164 if (drmHashInsert(bufmgr_data->hashBos, bo_data->name, (void *)bo_data) < 0)
1165 TBM_BACKEND_ERR("Cannot insert bo_data to Hash(%d)\n", bo_data->name);
1167 TBM_BACKEND_DBG("%s size:%d, gem:%d(%d), flags:%d(%d)\n",
1168 __FUNCTION__, bo_data->size,
1169 bo_data->gem, bo_data->name,
1173 *error = HAL_TBM_ERROR_NONE;
1175 return (hal_tbm_bo *)bo_data;
1179 tbm_sprd_bufmgr_import_fd(hal_tbm_bufmgr *bufmgr, hal_tbm_fd key, hal_tbm_error *error)
1181 tbm_sprd_bufmgr *bufmgr_data = (tbm_sprd_bufmgr *)bufmgr;
1182 tbm_sprd_bo *bo_data;
1186 char buf[STRERR_BUFSIZE];
1188 if (bufmgr_data == NULL) {
1189 TBM_BACKEND_ERR("bufmgr_data is null\n");
1191 *error = HAL_TBM_ERROR_INVALID_PARAMETER;
1195 /*getting handle from fd*/
1196 struct drm_prime_handle arg = {0, };
1199 if (drmIoctl(bufmgr_data->fd, DRM_IOCTL_PRIME_FD_TO_HANDLE, &arg)) {
1200 TBM_BACKEND_ERR("Cannot get gem handle from fd:%d (%s)\n",
1201 arg.fd, strerror_r(errno, buf, STRERR_BUFSIZE));
1203 *error = HAL_TBM_ERROR_INVALID_OPERATION;
1208 name = _get_name(bufmgr_data->fd, gem);
1210 TBM_BACKEND_ERR("Cannot get name from gem:%d, fd:%d (%s)\n",
1211 gem, key, strerror_r(errno, buf, STRERR_BUFSIZE));
1213 *error = HAL_TBM_ERROR_INVALID_OPERATION;
1217 ret = drmHashLookup(bufmgr_data->hashBos, name, (void **)&bo_data);
1219 if (gem == bo_data->gem) {
1221 *error = HAL_TBM_ERROR_NONE;
1226 /* Determine size of bo_data. The fd-to-handle ioctl really should
1227 * return the size, but it doesn't. If we have kernel 3.12 or
1228 * later, we can lseek on the prime fd to get the size. Older
1229 * kernels will just fail, in which case we fall back to the
1230 * provided (estimated or guess size).
1233 unsigned int real_size;
1234 struct drm_sprd_gem_info info = {0, };
1236 real_size = lseek(key, 0, SEEK_END);
1239 if (drmCommandWriteRead(bufmgr_data->fd,
1242 sizeof(struct drm_sprd_gem_info))) {
1243 TBM_BACKEND_ERR("Cannot get gem info from gem:%d, fd:%d (%s)\n",
1244 gem, key, strerror_r(errno, buf, STRERR_BUFSIZE));
1246 *error = HAL_TBM_ERROR_INVALID_OPERATION;
1250 if (real_size == -1)
1251 real_size = info.size;
1253 bo_data = calloc(1, sizeof(struct _tbm_sprd_bo));
1255 TBM_BACKEND_ERR("error bo_data:%p fail to allocate the bo_data\n", bo_data);
1257 *error = HAL_TBM_ERROR_OUT_OF_MEMORY;
1260 bo_data->bufmgr_data = bufmgr_data;
1262 bo_data->fd = bufmgr_data->fd;
1264 bo_data->size = real_size;
1265 bo_data->flags_sprd = info.flags;
1266 bo_data->flags_tbm = _get_tbm_flag_from_sprd(bo_data->flags_sprd);
1267 bo_data->name = name;
1269 if (!_bo_init_cache_state(bufmgr_data, bo_data, 1)) {
1270 TBM_BACKEND_ERR("error fail init cache state(%d)\n", bo_data->name);
1272 *error = HAL_TBM_ERROR_INVALID_OPERATION;
1273 goto fail_init_cache;
1276 /* add bo_data to hash */
1277 if (drmHashInsert(bufmgr_data->hashBos, bo_data->name, (void *)bo_data) < 0)
1278 TBM_BACKEND_ERR("bo_data:%p Cannot insert bo_data to Hash(%d) from gem:%d, fd:%d\n",
1279 bo_data, bo_data->name, gem, key);
1281 TBM_BACKEND_DBG("bo_data:%p, gem:%d(%d), fd:%d, key_fd:%d, flags:%d(%d), size:%d\n",
1283 bo_data->gem, bo_data->name,
1286 bo_data->flags_tbm, bo_data->flags_sprd,
1290 *error = HAL_TBM_ERROR_NONE;
1292 return (hal_tbm_bo *)bo_data;
1300 tbm_sprd_bufmgr_import_key(hal_tbm_bufmgr *bufmgr, hal_tbm_key key, hal_tbm_error *error)
1302 tbm_sprd_bufmgr *bufmgr_data = (tbm_sprd_bufmgr *)bufmgr;
1303 tbm_sprd_bo *bo_data;
1306 if (bufmgr_data == NULL) {
1307 TBM_BACKEND_ERR("bufmgr_data is null\n");
1309 *error = HAL_TBM_ERROR_INVALID_PARAMETER;
1313 ret = drmHashLookup(bufmgr_data->hashBos, key, (void **)&bo_data);
1316 *error = HAL_TBM_ERROR_NONE;
1317 return (hal_tbm_bo *)bo_data;
1320 struct drm_gem_open arg = {0, };
1321 struct drm_sprd_gem_info info = {0, };
1324 if (drmIoctl(bufmgr_data->fd, DRM_IOCTL_GEM_OPEN, &arg)) {
1325 TBM_BACKEND_ERR("error Cannot open gem name=%d\n", key);
1327 *error = HAL_TBM_ERROR_INVALID_OPERATION;
1331 info.handle = arg.handle;
1332 if (drmCommandWriteRead(bufmgr_data->fd,
1335 sizeof(struct drm_sprd_gem_info))) {
1336 TBM_BACKEND_ERR("error Cannot get gem info=%d\n", key);
1338 *error = HAL_TBM_ERROR_INVALID_OPERATION;
1342 bo_data = calloc(1, sizeof(struct _tbm_sprd_bo));
1344 TBM_BACKEND_ERR("error fail to allocate the bo_data\n");
1346 *error = HAL_TBM_ERROR_OUT_OF_MEMORY;
1349 bo_data->bufmgr_data = bufmgr_data;
1351 bo_data->fd = bufmgr_data->fd;
1352 bo_data->gem = arg.handle;
1353 bo_data->size = arg.size;
1354 bo_data->flags_sprd = info.flags;
1355 bo_data->name = key;
1356 #ifdef USE_CONTIG_ONLY
1357 bo_data->flags_sprd = SPRD_BO_CONTIG;
1358 bo_data->flags_tbm |= HAL_TBM_BO_SCANOUT;
1360 bo_data->flags_tbm = _get_tbm_flag_from_sprd(bo_data->flags_sprd);
1363 if (!_bo_init_cache_state(bufmgr_data, bo_data, 1)) {
1364 TBM_BACKEND_ERR("error fail init cache state(%d)\n", bo_data->name);
1366 *error = HAL_TBM_ERROR_INVALID_OPERATION;
1367 goto fail_init_cache;
1370 if (!bo_data->dmabuf) {
1371 struct drm_prime_handle arg = {0, };
1373 arg.handle = bo_data->gem;
1374 if (drmIoctl(bo_data->fd, DRM_IOCTL_PRIME_HANDLE_TO_FD, &arg)) {
1375 TBM_BACKEND_ERR("error Cannot dmabuf=%d\n", bo_data->gem);
1377 *error = HAL_TBM_ERROR_INVALID_OPERATION;
1378 goto fail_prime_handle_to_fd;
1380 bo_data->dmabuf = arg.fd;
1383 /* add bo_data to hash */
1384 if (drmHashInsert(bufmgr_data->hashBos, bo_data->name, (void *)bo_data) < 0)
1385 TBM_BACKEND_ERR("Cannot insert bo_data to Hash(%d)\n", bo_data->name);
1387 TBM_BACKEND_DBG(" bo_data:%p, gem:%d(%d), fd:%d, flags:%d(%d), size:%d\n",
1389 bo_data->gem, bo_data->name,
1391 bo_data->flags_tbm, bo_data->flags_sprd,
1395 *error = HAL_TBM_ERROR_NONE;
1397 return (hal_tbm_bo *)bo_data;
1399 fail_prime_handle_to_fd:
1400 _bo_destroy_cache_state(bufmgr_data, bo_data);
1406 struct drm_gem_close gem_close_arg = {arg.handle, 0};
1407 drmIoctl(bufmgr_data->fd, DRM_IOCTL_GEM_CLOSE, &gem_close_arg);
1413 tbm_sprd_bo_free(hal_tbm_bo *bo)
1415 tbm_sprd_bo *bo_data = (tbm_sprd_bo *)bo;
1417 tbm_sprd_bufmgr *bufmgr_data;
1418 char buf[STRERR_BUFSIZE];
1424 bufmgr_data = bo_data->bufmgr_data;
1428 TBM_BACKEND_DBG(" bo_data:%p, gem:%d(%d), fd:%d, size:%d\n",
1430 bo_data->gem, bo_data->name,
1434 if (bo_data->pBase) {
1435 if (munmap(bo_data->pBase, bo_data->size) == -1) {
1436 TBM_BACKEND_ERR("bo_data:%p fail to munmap(%s)\n",
1437 bo_data, strerror_r(errno, buf, STRERR_BUFSIZE));
1442 if (bo_data->dmabuf) {
1443 close(bo_data->dmabuf);
1444 bo_data->dmabuf = 0;
1447 /* delete bo from hash */
1448 ret = drmHashLookup(bufmgr_data->hashBos, bo_data->name,
1451 drmHashDelete(bufmgr_data->hashBos, bo_data->name);
1453 TBM_BACKEND_ERR("Cannot find bo_data to Hash(%d), ret=%d\n", bo_data->name, ret);
1455 if (temp != bo_data)
1456 TBM_BACKEND_ERR("hashBos probably has several BOs with same name!!!\n");
1458 _bo_destroy_cache_state(bufmgr_data, bo_data);
1460 /* Free gem handle */
1461 struct drm_gem_close arg = {0, };
1463 memset(&arg, 0, sizeof(arg));
1464 arg.handle = bo_data->gem;
1465 if (drmIoctl(bo_data->fd, DRM_IOCTL_GEM_CLOSE, &arg))
1466 TBM_BACKEND_ERR("bo_data:%p fail to gem close.(%s)\n",
1467 bo_data, strerror_r(errno, buf, STRERR_BUFSIZE));
1472 tbm_sprd_bo_get_size(hal_tbm_bo *bo, hal_tbm_error *error)
1474 tbm_sprd_bo *bo_data = (tbm_sprd_bo *)bo;
1478 *error = HAL_TBM_ERROR_INVALID_PARAMETER;
1483 *error = HAL_TBM_ERROR_NONE;
1485 return bo_data->size;
1488 static hal_tbm_bo_memory_type
1489 tbm_sprd_bo_get_memory_type(hal_tbm_bo *bo, hal_tbm_error *error)
1491 tbm_sprd_bo *bo_data = (tbm_sprd_bo *)bo;
1495 *error = HAL_TBM_ERROR_INVALID_PARAMETER;
1496 return HAL_TBM_BO_DEFAULT;
1500 *error = HAL_TBM_ERROR_NONE;
1502 return bo_data->flags_tbm;
1505 static hal_tbm_bo_handle
1506 tbm_sprd_bo_get_handle(hal_tbm_bo *bo, hal_tbm_bo_device_type device, hal_tbm_error *error)
1508 tbm_sprd_bo *bo_data = (tbm_sprd_bo *)bo;
1509 hal_tbm_bo_handle bo_handle;
1513 *error = HAL_TBM_ERROR_INVALID_PARAMETER;
1514 return (hal_tbm_bo_handle) NULL;
1517 if (!bo_data->gem) {
1518 TBM_BACKEND_ERR("Cannot map gem=%d\n", bo_data->gem);
1520 *error = HAL_TBM_ERROR_INVALID_PARAMETER;
1521 return (hal_tbm_bo_handle) NULL;
1524 TBM_BACKEND_DBG("bo_data:%p, gem:%d(%d), fd:%d, flags:%d(%d), size:%d, %s\n",
1526 bo_data->gem, bo_data->name,
1528 bo_data->flags_tbm, bo_data->flags_sprd,
1530 STR_DEVICE[device]);
1532 /*Get mapped bo_handle*/
1533 bo_handle = _sprd_bo_handle(bo_data, device);
1534 if (bo_handle.ptr == NULL) {
1535 TBM_BACKEND_ERR("Cannot get handle: gem:%d, device:%d\n",
1536 bo_data->gem, device);
1538 *error = HAL_TBM_ERROR_INVALID_OPERATION;
1539 return (hal_tbm_bo_handle) NULL;
1543 *error = HAL_TBM_ERROR_NONE;
1548 static hal_tbm_bo_handle
1549 tbm_sprd_bo_map(hal_tbm_bo *bo, hal_tbm_bo_device_type device,
1550 hal_tbm_bo_access_option opt, hal_tbm_error *error)
1552 tbm_sprd_bo *bo_data = (tbm_sprd_bo *)bo;
1553 hal_tbm_bo_handle bo_handle;
1554 tbm_sprd_bufmgr *bufmgr_data;
1558 *error = HAL_TBM_ERROR_INVALID_PARAMETER;
1559 return (hal_tbm_bo_handle) NULL;
1562 bufmgr_data = bo_data->bufmgr_data;
1565 *error = HAL_TBM_ERROR_INVALID_PARAMETER;
1566 return (hal_tbm_bo_handle) NULL;
1569 if (!bo_data->gem) {
1570 TBM_BACKEND_ERR("Cannot map gem=%d\n", bo_data->gem);
1572 *error = HAL_TBM_ERROR_INVALID_PARAMETER;
1573 return (hal_tbm_bo_handle) NULL;
1576 TBM_BACKEND_DBG(" bo_data:%p, gem:%d(%d), fd:%d, %s, %s\n",
1578 bo_data->gem, bo_data->name,
1583 /*Get mapped bo_handle*/
1584 bo_handle = _sprd_bo_handle(bo_data, device);
1585 if (bo_handle.ptr == NULL) {
1586 TBM_BACKEND_ERR("Cannot get handle: gem:%d, device:%d, opt:%d\n",
1587 bo_data->gem, device, opt);
1589 *error = HAL_TBM_ERROR_INVALID_OPERATION;
1590 return (hal_tbm_bo_handle) NULL;
1593 if (bo_data->map_cnt == 0)
1594 _bo_set_cache_state(bufmgr_data, bo_data, device, opt);
1599 *error = HAL_TBM_ERROR_NONE;
1604 static hal_tbm_error
1605 tbm_sprd_bo_unmap(hal_tbm_bo *bo)
1607 tbm_sprd_bo *bo_data = (tbm_sprd_bo *)bo;
1608 tbm_sprd_bufmgr *bufmgr_data;
1611 return HAL_TBM_ERROR_INVALID_PARAMETER;
1613 bufmgr_data = bo_data->bufmgr_data;
1615 return HAL_TBM_ERROR_INVALID_PARAMETER;
1618 return HAL_TBM_ERROR_INVALID_PARAMETER;
1622 if (bo_data->map_cnt == 0)
1623 _bo_save_cache_state(bufmgr_data, bo_data);
1625 TBM_BACKEND_DBG(" bo_data:%p, gem:%d(%d), fd:%d\n",
1627 bo_data->gem, bo_data->name,
1630 return HAL_TBM_ERROR_NONE;
1634 tbm_sprd_bo_export_fd(hal_tbm_bo *bo, hal_tbm_error *error)
1636 tbm_sprd_bo *bo_data = (tbm_sprd_bo *)bo;
1637 struct drm_prime_handle arg = {0, };
1639 char buf[STRERR_BUFSIZE];
1643 *error = HAL_TBM_ERROR_INVALID_PARAMETER;
1647 arg.handle = bo_data->gem;
1648 ret = drmIoctl(bo_data->fd, DRM_IOCTL_PRIME_HANDLE_TO_FD, &arg);
1650 TBM_BACKEND_ERR("bo_data:%p Cannot dmabuf=%d (%s)\n",
1651 bo_data, bo_data->gem, strerror_r(errno, buf, STRERR_BUFSIZE));
1653 *error = HAL_TBM_ERROR_INVALID_OPERATION;
1654 return (hal_tbm_fd) ret;
1657 TBM_BACKEND_DBG("bo_data:%p, gem:%d(%d), fd:%d, key_fd:%d, flags:%d(%d), size:%d\n",
1659 bo_data->gem, bo_data->name,
1662 bo_data->flags_tbm, bo_data->flags_sprd,
1666 *error = HAL_TBM_ERROR_NONE;
1668 return (hal_tbm_fd)arg.fd;
1672 tbm_sprd_bo_export_key(hal_tbm_bo *bo, hal_tbm_error *error)
1674 tbm_sprd_bo *bo_data = (tbm_sprd_bo *)bo;
1678 *error = HAL_TBM_ERROR_INVALID_PARAMETER;
1682 if (!bo_data->name) {
1683 bo_data->name = _get_name(bo_data->fd, bo_data->gem);
1684 if (!bo_data->name) {
1685 TBM_BACKEND_ERR("error Cannot get name\n");
1687 *error = HAL_TBM_ERROR_INVALID_PARAMETER;
1692 TBM_BACKEND_DBG(" bo_data:%p, gem:%d(%d), fd:%d, flags:%d(%d), size:%d\n",
1694 bo_data->gem, bo_data->name,
1696 bo_data->flags_tbm, bo_data->flags_sprd,
1700 *error = HAL_TBM_ERROR_NONE;
1702 return (hal_tbm_key)bo_data->name;
1705 static hal_tbm_error
1706 _tbm_sprd_authenticated_drm_fd_handler(hal_tbm_fd auth_fd, void *user_data)
1708 tbm_sprd_bufmgr *bufmgr_data = (tbm_sprd_bufmgr *) user_data;
1710 TBM_BACKEND_RETURN_VAL_IF_FAIL(bufmgr_data != NULL, HAL_TBM_ERROR_INVALID_PARAMETER);
1712 bufmgr_data->fd = auth_fd;
1713 TBM_BACKEND_INFO("Get the authenticated drm_fd(%d)!\n", bufmgr_data->fd);
1715 return HAL_TBM_ERROR_NONE;
1719 hal_backend_tbm_sprd_exit(void *data)
1721 hal_tbm_backend_data *backend_data = (hal_tbm_backend_data *)data;
1722 tbm_sprd_bufmgr *bufmgr_data;
1726 TBM_BACKEND_RETURN_VAL_IF_FAIL(backend_data != NULL, -1);
1728 bufmgr_data = (tbm_sprd_bufmgr *)backend_data->bufmgr;
1729 TBM_BACKEND_RETURN_VAL_IF_FAIL(bufmgr_data != NULL, -1);
1731 if (backend_data->bufmgr_funcs)
1732 free(backend_data->bufmgr_funcs);
1733 if (backend_data->bo_funcs)
1734 free(backend_data->bo_funcs);
1736 if (bufmgr_data->hashBos) {
1737 while (drmHashFirst(bufmgr_data->hashBos, &key, &value) > 0) {
1739 drmHashDelete(bufmgr_data->hashBos, key);
1742 drmHashDestroy(bufmgr_data->hashBos);
1743 bufmgr_data->hashBos = NULL;
1746 _bufmgr_deinit_cache_state(bufmgr_data);
1748 close(bufmgr_data->fd);
1750 free(backend_data->bufmgr);
1753 return HAL_TBM_ERROR_NONE;
1757 hal_backend_tbm_sprd_init(void **data)
1759 hal_tbm_backend_data *backend_data = NULL;
1760 hal_tbm_bufmgr_funcs *bufmgr_funcs = NULL;
1761 hal_tbm_bo_funcs *bo_funcs = NULL;
1762 tbm_sprd_bufmgr *bufmgr_data = NULL;
1766 /* allocate a hal_tbm_backend_data */
1767 backend_data = calloc(1, sizeof(struct _hal_tbm_backend_data));
1768 if (!backend_data) {
1769 TBM_BACKEND_ERR("fail to alloc backend_data!\n");
1773 *data = backend_data;
1775 /* allocate a hal_tbm_bufmgr */
1776 bufmgr_data = calloc(1, sizeof(struct _tbm_sprd_bufmgr));
1778 TBM_BACKEND_ERR("fail to alloc bufmgr_data!\n");
1779 goto fail_alloc_bufmgr_data;
1781 backend_data->bufmgr = (hal_tbm_bufmgr *)bufmgr_data;
1784 drm_fd = _tbm_sprd_open_drm();
1786 TBM_BACKEND_ERR("fail to open drm!\n");
1790 // set true when backend has a drm_device.
1791 backend_data->has_drm_device = 1;
1793 // check if drm_fd is master_drm_fd.
1794 if (drmIsMaster(drm_fd)) {
1795 // drm_fd is a master_drm_fd.
1796 backend_data->drm_info.drm_fd = drm_fd;
1797 backend_data->drm_info.is_master = 1;
1799 bufmgr_data->fd = drm_fd;
1800 TBM_BACKEND_INFO("Get the master drm_fd(%d)!\n", bufmgr_data->fd);
1802 // drm_fd is not a master_drm_fd.
1803 // request authenticated fd
1805 backend_data->drm_info.drm_fd = -1;
1806 backend_data->drm_info.is_master = 0;
1807 backend_data->drm_info.auth_drm_fd_func = _tbm_sprd_authenticated_drm_fd_handler;
1808 backend_data->drm_info.user_data = bufmgr_data;
1810 TBM_BACKEND_INFO("A backend requests an authenticated drm_fd.\n");
1813 //Check if the tbm manager supports dma fence or not.
1814 fp = open("/sys/module/dmabuf_sync/parameters/enabled", O_RDONLY);
1817 int length = read(fp, buf, 1);
1819 if (length == 1 && buf[0] == '1')
1820 bufmgr_data->use_dma_fence = 1;
1825 TBM_BACKEND_DBG("DMABUF FENCE is %s\n",
1826 bufmgr_data->use_dma_fence ? "supported!" : "NOT supported!");
1828 if (!_bufmgr_init_cache_state(bufmgr_data)) {
1829 TBM_BACKEND_ERR("fail to init bufmgr cache state\n");
1830 goto fail_init_cache_state;
1833 /*Create Hash Table*/
1834 bufmgr_data->hashBos = drmHashCreate();
1836 /* alloc and register bufmgr_funcs */
1837 bufmgr_funcs = calloc(1, sizeof(struct _hal_tbm_bufmgr_funcs));
1838 if (!bufmgr_funcs) {
1839 TBM_BACKEND_ERR("fail to alloc bufmgr_funcs!\n");
1840 goto fail_alloc_bufmgr_funcs;
1842 backend_data->bufmgr_funcs = bufmgr_funcs;
1844 bufmgr_funcs->bufmgr_get_capabilities = tbm_sprd_bufmgr_get_capabilities;
1845 bufmgr_funcs->bufmgr_get_supported_formats = tbm_sprd_bufmgr_get_supported_formats;
1846 bufmgr_funcs->bufmgr_get_plane_data = tbm_sprd_bufmgr_get_plane_data;
1847 bufmgr_funcs->bufmgr_alloc_bo = tbm_sprd_bufmgr_alloc_bo;
1848 bufmgr_funcs->bufmgr_alloc_bo_with_format = NULL;
1849 bufmgr_funcs->bufmgr_import_fd = tbm_sprd_bufmgr_import_fd;
1850 bufmgr_funcs->bufmgr_import_key = tbm_sprd_bufmgr_import_key;
1852 /* alloc and register bo_funcs */
1853 bo_funcs = calloc(1, sizeof(struct _hal_tbm_bo_funcs));
1855 TBM_BACKEND_ERR("fail to alloc bo_funcs!\n");
1856 goto fail_alloc_bo_funcs;
1858 backend_data->bo_funcs = bo_funcs;
1860 bo_funcs->bo_free = tbm_sprd_bo_free;
1861 bo_funcs->bo_get_size = tbm_sprd_bo_get_size;
1862 bo_funcs->bo_get_memory_types = tbm_sprd_bo_get_memory_type;
1863 bo_funcs->bo_get_handle = tbm_sprd_bo_get_handle;
1864 bo_funcs->bo_map = tbm_sprd_bo_map;
1865 bo_funcs->bo_unmap = tbm_sprd_bo_unmap;
1866 bo_funcs->bo_lock = NULL;;
1867 bo_funcs->bo_unlock = NULL;
1868 bo_funcs->bo_export_fd = tbm_sprd_bo_export_fd;
1869 bo_funcs->bo_export_key = tbm_sprd_bo_export_key;
1871 return HAL_TBM_ERROR_NONE;
1873 fail_alloc_bo_funcs:
1875 fail_alloc_bufmgr_funcs:
1876 _bufmgr_deinit_cache_state(bufmgr_data);
1877 if (bufmgr_data->hashBos)
1878 drmHashDestroy(bufmgr_data->hashBos);
1879 fail_init_cache_state:
1880 close(bufmgr_data->fd);
1883 fail_alloc_bufmgr_data:
1891 hal_backend hal_backend_tbm_data = {
1894 HAL_ABI_VERSION_TIZEN_6_5,
1895 hal_backend_tbm_sprd_init,
1896 hal_backend_tbm_sprd_exit