From: Tae-Young Chung Date: Thu, 4 Jul 2019 10:26:17 +0000 (+0900) Subject: Initial codes X-Git-Tag: submit/tizen/20190704.120336^0 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=e5343720d0f958224fa9191b5704edaaee1b7a6b;p=platform%2Fcore%2Fmultimedia%2Finference-engine-interface.git Initial codes 1. Provides inference-engine-interface-common for general purpose inference 2. Provides inference-engine-interface-vision for vision specific inference Signed-off-by: Tae-Young Chung --- diff --git a/CMakeLists.txt b/CMakeLists.txt new file mode 100644 index 0000000..7ca6ea7 --- /dev/null +++ b/CMakeLists.txt @@ -0,0 +1,141 @@ + +CMAKE_MINIMUM_REQUIRED(VERSION 2.6) +SET(fw_name "inference-engine-interface") +SET(fw_name_vision ${fw_name}-vision) +SET(fw_name_common ${fw_name}-common) + +PROJECT(${fw_name_vision}) + +SET(CMAKE_INSTALL_PREFIX /usr) +SET(PREFIX ${CMAKE_INSTALL_PREFIX}) + +SET(INC_DIR "${PROJECT_SOURCE_DIR}/include") + +SET(dependents "dlog") +SET(pc_dependents "capi-base-common") +INCLUDE(FindPkgConfig) + +pkg_check_modules(${fw_name_vision} REQUIRED ${dependents}) +FOREACH(flag ${${fw_name_vision}_CFLAGS}) + SET(EXTRA_CFLAGS "${EXTRA_CFLAGS} ${flag}") + SET(EXTRA_CXXFLAGS "${EXTRA_CXXFLAGS} ${flag}") +ENDFOREACH(flag) + +#OpenCV +FIND_PACKAGE(OpenCV REQUIRED core) +if(NOT OpenCV_FOUND) + MESSAGE(SEND_ERROR "OpenCV NOT FOUND") + RETURN() +else() + INCLUDE_DIRECTORIES(${OpenCV_INCLUDE_DIRS}) +endif() + +SET(CMAKE_C_FLAGS "-I./include -I./include/headers ${CMAKE_C_FLAGS} ${EXTRA_CFLAGS} -fPIC -Wall -w") +SET(CMAKE_C_FLAGS_DEBUG "-O0 -g") + +SET(CMAKE_CXX_FLAGS "-I./include -I./include/headers ${CMAKE_CXX_FLAGS} ${EXTRA_CXXFLAGS} -fPIC") +SET(CMAKE_CXX_FLAGS_DEBUG "-O0 -g --w") + +ADD_DEFINITIONS("-DPREFIX=\"${CMAKE_INSTALL_PREFIX}\"") +ADD_DEFINITIONS("-DTIZEN_DEBUG") + +SET(CMAKE_EXE_LINKER_FLAGS "-Wl,--as-needed -Wl,--rpath=${LIB_INSTALL_DIR}") + +#common +aux_source_directory(common SOURCES) +ADD_LIBRARY(${fw_name_common} SHARED ${SOURCES}) + +TARGET_LINK_LIBRARIES(${fw_name_common} dlog) + + +SET_TARGET_PROPERTIES(${fw_name_common} + PROPERTIES + VERSION ${FULLVER} + SOVERSION ${MAJORVER} + CLEAN_DIRECT_OUTPUT 1 +) + +INSTALL(TARGETS ${fw_name_common} DESTINATION ${LIB_INSTALL_DIR}) +INSTALL( + DIRECTORY ${INC_DIR}/ DESTINATION include/media + FILES_MATCHING + PATTERN "*_private.h" EXCLUDE + PATTERN "*.h" + ) + +SET(PC_NAME ${fw_namefw_name_common_vision}) +SET(PC_REQUIRED ${pc_dependents}) +SET(PC_LDFLAGS -l${fw_name_common}) +SET(PC_CFLAGS -I\${includedir}/media) + +CONFIGURE_FILE( + ${fw_name_common}.pc.in + ${CMAKE_CURRENT_SOURCE_DIR}/${fw_name_common}.pc + @ONLY +) +INSTALL(FILES ${CMAKE_CURRENT_SOURCE_DIR}/${fw_name_common}.pc DESTINATION ${LIB_INSTALL_DIR}/pkgconfig) + +#vision +aux_source_directory(vision SOURCES) +ADD_LIBRARY(${fw_name_vision} SHARED ${SOURCES}) + +TARGET_LINK_LIBRARIES(${fw_name_vision} ${OpenCV_LIBS} dlog) + + +SET_TARGET_PROPERTIES(${fw_name_vision} + PROPERTIES + VERSION ${FULLVER} + SOVERSION ${MAJORVER} + CLEAN_DIRECT_OUTPUT 1 +) + +INSTALL(TARGETS ${fw_name_vision} DESTINATION ${LIB_INSTALL_DIR}) +INSTALL( + DIRECTORY ${INC_DIR}/ DESTINATION include/media + FILES_MATCHING + PATTERN "*_private.h" EXCLUDE + PATTERN "*.h" + ) + +SET(PC_NAME ${fw_name_vision}) +SET(PC_REQUIRED ${pc_dependents}) +SET(PC_LDFLAGS -l${fw_name_vision}) +SET(PC_CFLAGS -I\${includedir}/media) + +CONFIGURE_FILE( + ${fw_name_vision}.pc.in + ${CMAKE_CURRENT_SOURCE_DIR}/${fw_name_vision}.pc + @ONLY +) +INSTALL(FILES ${CMAKE_CURRENT_SOURCE_DIR}/${fw_name_vision}.pc DESTINATION ${LIB_INSTALL_DIR}/pkgconfig) + +IF(UNIX) + +ADD_CUSTOM_TARGET (distclean @echo cleaning for source distribution) +ADD_CUSTOM_COMMAND( + DEPENDS clean + COMMENT "distribution clean" + COMMAND find + ARGS . + -not -name config.cmake -and \( + -name tester.c -or + -name Testing -or + -name CMakeFiles -or + -name cmake.depends -or + -name cmake.check_depends -or + -name CMakeCache.txt -or + -name cmake.check_cache -or + -name *.cmake -or + -name Makefile -or + -name core -or + -name core.* -or + -name gmon.out -or + -name install_manifest.txt -or + -name *.pc -or + -name *~ \) + | grep -v TC | xargs rm -rf + TARGET distclean + VERBATIM +) + +ENDIF(UNIX) \ No newline at end of file diff --git a/LICENSE.APLv2 b/LICENSE.APLv2 new file mode 100644 index 0000000..bbe9d02 --- /dev/null +++ b/LICENSE.APLv2 @@ -0,0 +1,206 @@ +Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved. + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + diff --git a/README.md b/README.md new file mode 100644 index 0000000..3eecd82 --- /dev/null +++ b/README.md @@ -0,0 +1,2 @@ +# Inference Engine Interface +This is an interface of various inference engines such as Tensorflow, Caffe, OpenCV and so on. \ No newline at end of file diff --git a/common/inference_engine_common_impl.cpp b/common/inference_engine_common_impl.cpp new file mode 100644 index 0000000..24afee2 --- /dev/null +++ b/common/inference_engine_common_impl.cpp @@ -0,0 +1,173 @@ +/** + * Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#include +#include +#include +#include +#include + +extern "C" { + +#include + +#ifdef LOG_TAG +#undef LOG_TAG +#endif + +#define LOG_TAG "INFERENCE_ENGINE_COMMON" +} + +namespace InferenceEngineInterface { +namespace Common { + +const char* engineLibs[] = { + "libinference-engine-caffe.so", + "libinference-engine-tf.so", + "libinference-engine-tflite.so"}; + +InferenceEngineCommon::InferenceEngineCommon(inference_backend_type_e backend) : + mBackend(backend) +{ + LOGE("ENTER"); + LOGE("LEAVE"); +} + +InferenceEngineCommon::~InferenceEngineCommon() +{ + LOGW("ENTER"); + destroy_t *engineDestroy = (destroy_t*)dlsym(handle, "EngineCommonDestroy"); + // NULL CHECK? + engineDestroy(engine); + dlclose(handle); + + LOGW("LEAVE"); +} + +int InferenceEngineCommon::Init(std::string configFile, + std::string weightFile, std::string userFile) +{ + LOGW("ENTER"); + char *error = NULL; + handle = dlopen(engineLibs[mBackend], RTLD_LAZY); + if (!handle) { + LOGE("Fail to dlopen %s", engineLibs[mBackend]); + LOGE("Error: %s\n", dlerror()); + return INFERENCE_ENGINE_ERROR_NOT_SUPPORTED; + } + + init_t* EngineInit = (init_t *)dlsym(handle, "EngineCommonInit"); + if ((error = dlerror()) != NULL) { + LOGE("Error: %s\n", error); + dlclose(handle); + return INFERENCE_ENGINE_ERROR_INTERNAL; + } + + engine = EngineInit(configFile, weightFile, userFile); + if (engine == NULL) { + LOGE("Fail to EngineInit"); + dlclose(handle); + return INFERENCE_ENGINE_ERROR_INTERNAL; + } + + LOGW("LEAVE"); + return INFERENCE_ENGINE_ERROR_NONE; +} + +void InferenceEngineCommon::Deinit() +{ + ; +} + +int InferenceEngineCommon::SetInputTensorParam() +{ + return INFERENCE_ENGINE_ERROR_NOT_SUPPORTED; +} + +int InferenceEngineCommon::SetInputTensorParamNode(std::string node) +{ + int ret = engine->SetInputTensorParamNode(node); + if (ret != INFERENCE_ENGINE_ERROR_NONE) + LOGE("Fail to SetInputTensorParamNode"); + + return ret; +} + +int InferenceEngineCommon::SetOutputTensorParam() +{ + return INFERENCE_ENGINE_ERROR_NOT_SUPPORTED; +} + +int InferenceEngineCommon::SetOutputTensorParamNode(std::string node) +{ + int ret = engine->SetOutPutTensorParamNodes(node); + if (ret != INFERENCE_ENGINE_ERROR_NONE) + LOGE("Fail to SetOutputTensorParamNodes"); + + return ret; +} + +int InferenceEngineCommon::SetTargetDevice(inference_target_type_e type) +{ + int ret = engine->SetTargetDevice(type); + if (ret != INFERENCE_ENGINE_ERROR_NONE) + LOGE("Fail to SetTargetDevice"); + + return ret; +} + +int InferenceEngineCommon::Load() +{ + int ret = engine->Load(); + if (ret != INFERENCE_ENGINE_ERROR_NONE) + LOGE("Fail to load InferenceEngineVision"); + + ret = engine->CreateInputLayerPassage(); + if (ret != INFERENCE_ENGINE_ERROR_NONE) + LOGE("Fail to load CreateInputLayerPassage"); + + + ret = engine->PrepareInputLayerPassage(); + if (ret != INFERENCE_ENGINE_ERROR_NONE) + LOGE("Fail to load PrepareInputLayerPassage"); + + + return ret; +} + +int InferenceEngineCommon::Run(std::vector tensor) +{ + int ret = engine->Run(tensor); + if (ret != INFERENCE_ENGINE_ERROR_NONE) + LOGE("Fail to run InferenceEngineVision"); + + return ret; +} + +int InferenceEngineCommon::GetInferenceResult(std::vector>& dimInfo, std::vector& results) +{ + int ret = engine->GetInferenceResult(dimInfo, results); + + if (ret != INFERENCE_ENGINE_ERROR_NONE) + LOGE("Fail to GetInferenceResult"); + + return ret; +} +} /* Common */ +} /* InferenceEngineInterface */ diff --git a/include/inference_engine_common.h b/include/inference_engine_common.h new file mode 100644 index 0000000..6dc02ff --- /dev/null +++ b/include/inference_engine_common.h @@ -0,0 +1,64 @@ +/** + * Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __INFERENCE_ENGINE_COMMON_H__ +#define __INFERENCE_ENGINE_COMMON_H__ + +#include +#include + +#include "inference_engine_type.h" + +namespace InferenceEngineInterface { +namespace Common { + +class IInferenceEngineCommon { +public: + + virtual ~IInferenceEngineCommon() {}; + + // InputTensor + virtual int SetInputTensorParam() = 0; + + virtual int SetInputTensorParamNode(std::string node) = 0; + + + // OutputTensor + virtual int SetOutputTensorParam() = 0; + + virtual int SetOutPutTensorParamNodes(std::string node) = 0; + + virtual int SetTargetDevice(inference_target_type_e type) = 0; + + // Load and Run + virtual int Load() = 0; + + virtual int CreateInputLayerPassage() = 0; + + virtual int PrepareInputLayerPassage() = 0; + + + virtual int Run(std::vector tensor) = 0; + + virtual int GetInferenceResult(std::vector>& dimInfo, std::vector& results) = 0; +}; + +typedef void destroy_t(IInferenceEngineCommon*); +typedef IInferenceEngineCommon* init_t(std::string configFile, std::string weightFile, std::string userFile); +} /* Common */ +} /* InferenceEngineInterface */ + +#endif /* __INFERENCE_ENGINE_COMMON_H__ */ diff --git a/include/inference_engine_common_impl.h b/include/inference_engine_common_impl.h new file mode 100644 index 0000000..f797103 --- /dev/null +++ b/include/inference_engine_common_impl.h @@ -0,0 +1,76 @@ +/** + * Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __INFERENCE_ENGINE_COMMON_IMPL_H__ +#define __INFERENCE_ENGINE_COMMON_IMPL_H__ + +#include +#include + +#include +#include + +namespace InferenceEngineInterface { +namespace Common { + +class InferenceEngineCommon { +public: + InferenceEngineCommon(inference_backend_type_e backend); + + ~InferenceEngineCommon(); + + int Init(std::string configFile, + std::string weightFile, std::string UserFile); + + void Deinit(); + + // InputTensor + int SetInputTensorParam(); + + int SetInputTensorParamNode(std::string node); + + + // OutputTensor + int SetOutputTensorParam(); + + int SetOutputTensorParamNode(std::string node); + + int SetTargetDevice(inference_target_type_e type); + + // Load and Run + int Load(); + + int CreateInputLayerPassage(); + + int PrepareInputLayerPassage(); + + + int Run(std::vector tensor); + + int GetInferenceResult(std::vector>& dimInfo, std::vector& results); + +private: + void *handle; + IInferenceEngineCommon *engine; + inference_backend_type_e mBackend; + std::vector mUserListName; + +}; + +} /* Common */ +} /* InferenceEngineInterface */ + +#endif /* __INFERENCE_ENGINE_COMMON_IMPL_H__ */ \ No newline at end of file diff --git a/include/inference_engine_error.h b/include/inference_engine_error.h new file mode 100644 index 0000000..e9dbc44 --- /dev/null +++ b/include/inference_engine_error.h @@ -0,0 +1,65 @@ +/** + * Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __INFERENCE_ENGINE_ERROR_H__ +#define __INFERENCE_ENGINE_ERROR_H__ + +#include + +#ifdef __cplusplus +extern "C" { +#endif /* __cplusplus */ + +/** + * @file inference_engine_error.h + * @brief This file contains error type required by + * inference engine +*/ + +typedef enum { + INFERENCE_ENGINE_ERROR_NONE + = TIZEN_ERROR_NONE, /**< Successful */ + INFERENCE_ENGINE_ERROR_NOT_SUPPORTED + = TIZEN_ERROR_NOT_SUPPORTED, /**< Not supported */ + INFERENCE_ENGINE_ERROR_MSG_TOO_LONG + = TIZEN_ERROR_MSG_TOO_LONG, /**< Message too long */ + INFERENCE_ENGINE_ERROR_NO_DATA + = TIZEN_ERROR_NO_DATA, /**< No data */ + INFERENCE_ENGINE_ERROR_KEY_NOT_AVAILABLE + = TIZEN_ERROR_KEY_NOT_AVAILABLE, /**< Key not available */ + INFERENCE_ENGINE_ERROR_OUT_OF_MEMORY + = TIZEN_ERROR_OUT_OF_MEMORY, /**< Out of memory */ + INFERENCE_ENGINE_ERROR_INVALID_PARAMETER + = TIZEN_ERROR_INVALID_PARAMETER, /**< Invalid parameter */ + INFERENCE_ENGINE_ERROR_INVALID_OPERATION + = TIZEN_ERROR_INVALID_OPERATION, /**< Invalid operation */ + INFERENCE_ENGINE_ERROR_PERMISSION_DENIED + = TIZEN_ERROR_NOT_PERMITTED, /**< Not permitted */ + INFERENCE_ENGINE_ERROR_NOT_SUPPORTED_FORMAT + = TIZEN_ERROR_MEDIA_VISION | 0x01, /**< Not supported format */ + INFERENCE_ENGINE_ERROR_INTERNAL + = TIZEN_ERROR_MEDIA_VISION | 0x02, /**< Internal error */ + INFERENCE_ENGINE_ERROR_INVALID_DATA + = TIZEN_ERROR_MEDIA_VISION | 0x03, /**< Invalid data */ + INFERENCE_ENGINE_ERROR_INVALID_PATH + = TIZEN_ERROR_MEDIA_VISION | 0x04, /**< Invalid path*/ +} inference_engine_error_e; + +#ifdef __cplusplus +} +#endif /* __cplusplus */ + +#endif /* __INFERENCE_ENGINE_ERROR_H__ */ \ No newline at end of file diff --git a/include/inference_engine_type.h b/include/inference_engine_type.h new file mode 100644 index 0000000..10ba9e7 --- /dev/null +++ b/include/inference_engine_type.h @@ -0,0 +1,104 @@ +/** + * Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __INFERENCE_ENGINE_TYPE_H__ +#define __INFERENCE_ENGINE_TYPE_H__ + +#include + +#ifdef __cplusplus +extern "C" { +#endif /* __cplusplus */ + +/** + * @file inference_engine_type.h + * @brief This file contains enumerations and handles definition required by + * inference engine API. + */ + +/** + * @brief Enumeration for inference backend. + * + * @since_tizen 5.5 + * + * @see mv_inference_prepare() + */ +typedef enum { + INFERENCE_BACKEND_NONE = -1, + INFERENCE_BACKEND_OPENCV, /**< OpenCV */ + INFERENCE_BACKEND_CAFFE, /**< Caffe */ + INFERENCE_BACKEND_TF, /**< TensorFlow */ + INFERENCE_BACKEND_TFLite, /**< TensorFlow-Lite */ + INFERENCE_BACKEND_MAX +} inference_backend_type_e; + +/** + * @brief Enumeration for inference target. + * + * @since_tizen 5.5 + * + */ +typedef enum { + INFERENCE_TARGET_NONE = -1, + INFERENCE_TARGET_CPU, /**< CPU */ + INFERENCE_TARGET_GPU, /**< GPU*/ + INFERENCE_TARGET_MAX +} inference_target_type_e; + +typedef enum { + INFERENCE_INPUT_GENERAL = 0, + INFERENCE_INPUT_IMAGE, + INFERENCE_INPUT_MAX +} inference_input_type_e; + +typedef struct _ImageClassficationResults { + int number_of_classes; + std::vector indices; + std::vector names; + std::vector confidences; +} ImageClassificationResults; /**< structure ImageClassificationResults */ + +typedef struct _ObjectDetectionResults { + int number_of_objects; + std::vector indices; + std::vector names; + std::vector confidences; + std::vector locations; +} ObjectDetectionResults; /**< structure ObjectDetectionResults */ + +typedef struct _FaceDetectionResults { + int number_of_faces; + std::vector confidences; + std::vector locations; +} FaceDetectionResults; /**< structure ObjectDetectionResults */ + +typedef struct _FacialLandMarkDetectionResults { + int number_of_landmarks; + std::vector locations; +} FacialLandMarkDetectionResults; /**< structure ObjectDetectionResults */ + +typedef struct _InferenceResults{ + int dimInfoSize; + std::vector> dimInfo; + std::vector data; +} InferenceResults; /**< structure InferenceResults */ + + +#ifdef __cplusplus +} +#endif /* __cplusplus */ + +#endif /* __INFERENCE_ENGINE_TYPE_H__ */ \ No newline at end of file diff --git a/include/inference_engine_vision.h b/include/inference_engine_vision.h new file mode 100644 index 0000000..a4cb8c3 --- /dev/null +++ b/include/inference_engine_vision.h @@ -0,0 +1,88 @@ +/** + * Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __INFERENCE_ENGINE_VISION_H__ +#define __INFERENCE_ENGINE_VISION_H__ + +#include +#include + +#include "inference_engine_type.h" +#include + +namespace InferenceEngineInterface { +namespace Vision { + +class IInferenceEngineVision { +public: + + virtual ~IInferenceEngineVision() {}; + + // InputTensor + virtual int SetInputTensorParam() = 0; + + virtual int SetInputTensorParamInput(int width, int height, int dim, int ch) = 0; + + virtual int SetInputTensorParamNorm(double deviation, double mean) = 0; + + virtual int SetInputTensorParamNode(std::string node) = 0; + + + // OutputTensor + virtual int SetOutputTensorParam() = 0; + + virtual int SetOutputTensorParamThresHold(double threshold) = 0; + + virtual int SetOutputTensorParamNumbers(int number) = 0; + + virtual int SetOutputTensorParamType(int type) = 0; + + virtual int SetOutPutTensorParamNodes(std::string node) = 0; + + virtual int SetTargetDevice(inference_target_type_e type) = 0; + + // Load and Run + virtual int Load() = 0; + + virtual int CreateInputLayerPassage() = 0; + + virtual int PrepareInputLayerPassage(inference_input_type_e type) = 0; +Vision + virtual int Run(cv::Mat tensor) = 0; + + virtual int Run(std::vector tensor) = 0; + + virtual int GetInferenceResult(ImageClassificationResults& results) = 0; + + virtual int GetInferenceResult(ObjectDetectionResults& results) = 0; + + virtual int GetInferenceResult(FaceDetectionResults& results) = 0; + + virtual int GetInferenceResult(FacialLandMarkDetectionResults& results) = 0; + + virtual int GetInferenceResult(std::vector>& dimInfo, std::vector& results) = 0; + + virtual int GetNumberOfOutputs() = 0; + + virtual void SetUserListName(std::string userlist) = 0; +}; + +typedef void destroy_t(IInferenceEngineVision*); +typedef IInferenceEngineVision* init_t(std::string configFile, std::string weightFile, std::string userFile); +} /* Vision */ +} /* InferenceEngineInterface */ + +#endif /* __INFERENCE_ENGINE_VISION_H__ */ diff --git a/include/inference_engine_vision_impl.h b/include/inference_engine_vision_impl.h new file mode 100644 index 0000000..b50fbfb --- /dev/null +++ b/include/inference_engine_vision_impl.h @@ -0,0 +1,90 @@ +/** + * Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __INFERENCE_ENGINE_VISION_IMPL_H__ +#define __INFERENCE_ENGINE_VISION_IMPL_H__ + +#include +#include + +#include +#include +#include + + +namespace InferenceEngineInterface { +namespace Vision { + +class InferenceEngineVision { +public: + InferenceEngineVision(inference_backend_type_e backend); + + ~InferenceEngineVision(); + + int Init(std::string configFile, + std::string weightFile, std::string UserFile); + + void Deinit(); + + // Input Tensor parameters + int SetInputTensorParamInput(int width, int height, int dim, int ch); + + int SetInputTensorParamNorm(double deviation, double mean); + + int SetInputTensorParamNode(std::string node); + + // Output Tensor parameters + int SetOutputTensorParamThresHold(double threshold); + + int SetOutputTensorParamNumbers(int number); + + int SetOutputTensorParamType(int type); + + int SetOutPutTensorParamNodes(std::string node); + + // Set target device + int SetTargetDevice(inference_target_type_e device); + + int Load(); + + int Run(cv::Mat tensor); + + int GetInferenceResult(ImageClassificationResults& results); + + int GetInferenceResult(ObjectDetectionResults& results); + + int GetInferenceResult(FaceDetectionResults& results); + + int GetInferenceResult(FacialLandMarkDetectionResults& results); + + int GetInferenceResult(std::vector>& dimInfo, std::vector& results); + + int GetNumberOfOutputs(); + + void SetUserListName(std::string userlist); + +private: + void *handle; + IInferenceEngineVision *engine; + inference_backend_type_e mBackend; + std::vector mUserListName; + +}; + +} /* Vision */ +} /* InferenceEngineInterface */ + +#endif /* __INFERENCE_ENGINE_VISION_IMPL_H__ */ \ No newline at end of file diff --git a/inference-engine-interface-common.manifest b/inference-engine-interface-common.manifest new file mode 100644 index 0000000..86dbb26 --- /dev/null +++ b/inference-engine-interface-common.manifest @@ -0,0 +1,5 @@ + + + + + diff --git a/inference-engine-interface-common.pc.in b/inference-engine-interface-common.pc.in new file mode 100644 index 0000000..e7cd18f --- /dev/null +++ b/inference-engine-interface-common.pc.in @@ -0,0 +1,14 @@ + +# Package Information for pkg-config + +prefix=@PREFIX@ +exec_prefix=/usr +libdir=@LIB_INSTALL_DIR@ +includedir=/usr/include/media + +Name: @PC_NAME@ +Description: @PACKAGE_DESCRIPTION@ +Version: @VERSION@ +Requires: @PC_REQUIRED@ +Libs: -L${libdir} @PC_LDFLAGS@ +Cflags: -I${includedir} -I/usr/include diff --git a/inference-engine-interface-vision.manifest b/inference-engine-interface-vision.manifest new file mode 100644 index 0000000..86dbb26 --- /dev/null +++ b/inference-engine-interface-vision.manifest @@ -0,0 +1,5 @@ + + + + + diff --git a/inference-engine-interface-vision.pc.in b/inference-engine-interface-vision.pc.in new file mode 100644 index 0000000..e7cd18f --- /dev/null +++ b/inference-engine-interface-vision.pc.in @@ -0,0 +1,14 @@ + +# Package Information for pkg-config + +prefix=@PREFIX@ +exec_prefix=/usr +libdir=@LIB_INSTALL_DIR@ +includedir=/usr/include/media + +Name: @PC_NAME@ +Description: @PACKAGE_DESCRIPTION@ +Version: @VERSION@ +Requires: @PC_REQUIRED@ +Libs: -L${libdir} @PC_LDFLAGS@ +Cflags: -I${includedir} -I/usr/include diff --git a/packaging/inference-engine-interface.spec b/packaging/inference-engine-interface.spec new file mode 100644 index 0000000..b3d6ac3 --- /dev/null +++ b/packaging/inference-engine-interface.spec @@ -0,0 +1,101 @@ +Name: inference-engine-interface +Summary: Interface of inference engines +Version: 0.0.1 +Release: 1 +Group: Multimedia/Framework +License: Apache-2.0 +Source0: %{name}-%{version}.tar.gz +BuildRequires: cmake +BuildRequires: pkgconfig(dlog) +BuildRequires: pkgconfig(libtzplatform-config) +BuildRequires: pkgconfig(capi-base-common) +BuildRequires: pkgconfig(opencv) >= 3.4.1 +BuildRequires: pkgconfig(python) + +%description +Interface of inference engines + +%package devel +Summary: Interface of inference engines +Group: Multimedia/Framework +Requires: %{name} = %{version}-%{release} + +%description devel +Interface of inference engines (Dev) + + +%package common +Summary: Common interface of inference engines +Group: Multimedia/Framework + +%description common +Common interface of inference engines + +%package common-devel +Summary: Common interface of inference engines +Group: Multimedia/Framework +Requires: inference-engine-interface-common + +%description common-devel +Common interface of inference engines (Dev) + +%package vision +Summary: Vision interface of inference engines +Group: Multimedia/Framework + +%description vision +Vision interface of inference engines + +%package vision-devel +Summary: Vision interface of inference engines +Group: Multimedia/Framework +Requires: inference-engine-interface-vision + +%description vision-devel +Vision interface of inference enginese (Dev) + +%prep +%setup -q + +%build +%if 0%{?sec_build_binary_debug_enable} +export CFLAGS="$CFLAGS -DTIZEN_DEBUG_ENABLE" +export CXXFLAGS="$CXXFLAGS -DTIZEN_DEBUG_ENABLE" +export FFLAGS="$FFLAGS -DTIZEN_DEBUG_ENABLE" +%endif + +export CFLAGS+=" -DPATH_LIBDIR=\\\"%{_libdir}\\\"" +export CXXFLAGS+=" -DPATH_LIBDIR=\\\"%{_libdir}\\\"" + +MAJORVER=`echo %{version} | awk 'BEGIN {FS="."}{print $1}'` +%cmake . -DFULLVER=%{version} -DMAJORVER=${MAJORVER} -DTZ_SYS_BIN=%TZ_SYS_BIN \ + +make %{?jobs:-j%jobs} + +%install +rm -rf %{buildroot} + +%make_install + +%post -p /sbin/ldconfig +%postun -p /sbin/ldconfig + +%files common +%manifest inference-engine-interface-common.manifest +%license LICENSE.APLv2 +%{_libdir}/libinference-engine-interface-common.so.* + +%files common-devel +%{_includedir}/media/*.h +%{_libdir}/pkgconfig/*common.pc +%{_libdir}/lib*-common.so + +%files vision +%manifest inference-engine-interface-vision.manifest +%license LICENSE.APLv2 +%{_libdir}/libinference-engine-interface-vision.so.* + +%files vision-devel +%{_includedir}/media/*.h +%{_libdir}/pkgconfig/*vision.pc +%{_libdir}/lib*-vision.so \ No newline at end of file diff --git a/vision/inference_engine_vision_impl.cpp b/vision/inference_engine_vision_impl.cpp new file mode 100644 index 0000000..9487e7a --- /dev/null +++ b/vision/inference_engine_vision_impl.cpp @@ -0,0 +1,250 @@ +/** + * Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + + +#include +#include +#include +#include +#include + +extern "C" { + +#include + +#ifdef LOG_TAG +#undef LOG_TAG +#endif + +#define LOG_TAG "INFERENCE_ENGINE_VISION" +} + +namespace InferenceEngineInterface { +namespace Vision { + +const char* engineLibs[] = { + "libinference-engine-opencv.so", + "libinference-engine-caffe.so", + "libinference-engine-tf.so", + "libinference-engine-tflite.so"}; + +InferenceEngineVision::InferenceEngineVision(inference_backend_type_e backend) : + mBackend(backend) +{ + LOGE("ENTER"); + LOGE("LEAVE"); +} + +InferenceEngineVision::~InferenceEngineVision() +{ + LOGW("ENTER"); + destroy_t *engineDestroy = (destroy_t*)dlsym(handle, "EngineVisionDestroy"); + // NULL CHECK? + engineDestroy(engine); + dlclose(handle); + + LOGW("LEAVE"); +} + +int InferenceEngineVision::Init(std::string configFile, + std::string weightFile, std::string userFile) +{ + LOGW("ENTER"); + char *error = NULL; + handle = dlopen(engineLibs[mBackend], RTLD_LAZY); + if (!handle) { + LOGE("Fail to dlopen %s", engineLibs[mBackend]); + LOGE("Error: %s\n", dlerror()); + return INFERENCE_ENGINE_ERROR_NOT_SUPPORTED; + } + + init_t* EngineInit = (init_t *)dlsym(handle, "EngineVisionInit"); + if ((error = dlerror()) != NULL) { + LOGE("Error: %s\n", error); + dlclose(handle); + return INFERENCE_ENGINE_ERROR_INTERNAL; + } + + engine = EngineInit(configFile, weightFile, userFile); + if (engine == NULL) { + LOGE("Fail to EngineInit"); + dlclose(handle); + return INFERENCE_ENGINE_ERROR_INTERNAL; + } + + LOGW("LEAVE"); + return INFERENCE_ENGINE_ERROR_NONE; +} + +void InferenceEngineVision::Deinit() +{ + ; +} + +int InferenceEngineVision::SetInputTensorParamInput(int width, int height, int dim, int ch) +{ + int ret = engine->SetInputTensorParamInput(width, height, dim, ch); + if (ret != INFERENCE_ENGINE_ERROR_NONE) + LOGE("Fail to SetInputTensorParamInput"); + + return ret; +} + +int InferenceEngineVision::SetInputTensorParamNorm(double deviation, double mean) +{ + int ret = engine->SetInputTensorParamNorm(deviation, mean); + if (ret != INFERENCE_ENGINE_ERROR_NONE) + LOGE("Fail to SetInputTensorParamNorm"); + + return ret; +} + +int InferenceEngineVision::SetInputTensorParamNode(std::string node) +{ + int ret = engine->SetInputTensorParamNode(node); + if (ret != INFERENCE_ENGINE_ERROR_NONE) + LOGE("Fail to SetInputTensorParamNode"); + + return ret; +} + +int InferenceEngineVision::SetOutputTensorParamThresHold(double threshold) +{ + int ret = engine->SetOutputTensorParamThresHold(threshold); + if (ret != INFERENCE_ENGINE_ERROR_NONE) + LOGE("Fail to SetOutputTensorParamThresHold"); + + return ret; +} + +int InferenceEngineVision::SetOutputTensorParamNumbers(int numbers) +{ + int ret = engine->SetOutputTensorParamNumbers(numbers); + if (ret != INFERENCE_ENGINE_ERROR_NONE) + LOGE("Fail to SetOuputTensorParamNumbers"); + + return ret; +} + +int InferenceEngineVision::SetOutputTensorParamType(int type) +{ + int ret = engine->SetOutputTensorParamType(type); + if (ret != INFERENCE_ENGINE_ERROR_NONE) + LOGE("Fail to SetOutputTensorParamType"); + + return ret; +} + +int InferenceEngineVision::SetTargetDevice(inference_target_type_e type) +{ + int ret = engine->SetTargetDevice(type); + if (ret != INFERENCE_ENGINE_ERROR_NONE) + LOGE("Fail to SetTargetDevice"); + + return ret; +} + +int InferenceEngineVision::Load() +{ + int ret = engine->Load(); + if (ret != INFERENCE_ENGINE_ERROR_NONE) + LOGE("Fail to load InferenceEngineVision"); + + ret = engine->CreateInputLayerPassage(); + if (ret != INFERENCE_ENGINE_ERROR_NONE) + LOGE("Fail to load CreateInputLayerPassage"); + + ret = engine->PrepareInputLayerPassage(INFERENCE_INPUT_IMAGE); + if (ret != INFERENCE_ENGINE_ERROR_NONE) + LOGE("Fail to load PrepareInputLayerPassage"); + + return ret; +} + +int InferenceEngineVision::Run(cv::Mat tensor) +{ + int ret = engine->Run(tensor); + if (ret != INFERENCE_ENGINE_ERROR_NONE) + LOGE("Fail to run InferenceEngineVision"); + + return ret; +} + +int InferenceEngineVision::GetInferenceResult(ImageClassificationResults& results) +{ + int ret = engine->GetInferenceResult(results); + + if (ret != INFERENCE_ENGINE_ERROR_NONE) + LOGE("Fail to GetClassficationResults"); + // NULL CHECK? + return ret; +} + +int InferenceEngineVision::GetInferenceResult(ObjectDetectionResults& results) +{ + int ret = engine->GetInferenceResult(results); + + if (ret != INFERENCE_ENGINE_ERROR_NONE) + LOGE("Fail to GetObjectDetectionResults"); + + return ret; +} + +int InferenceEngineVision::GetInferenceResult(FaceDetectionResults& results) +{ + int ret = engine->GetInferenceResult(results); + + if (ret != INFERENCE_ENGINE_ERROR_NONE) + LOGE("Fail to GetFaceDetectionResults"); + + return ret; +} + +int InferenceEngineVision::GetInferenceResult(FacialLandMarkDetectionResults& results) +{ + int ret = engine->GetInferenceResult(results); + + if (ret != INFERENCE_ENGINE_ERROR_NONE) + LOGE("Fail to GetFacialLandMarkDetectionResults"); + + return ret; +} + +int InferenceEngineVision::GetInferenceResult(std::vector>& dimInfo, std::vector& results) +{ + int ret = engine->GetInferenceResult(dimInfo, results); + + if (ret != INFERENCE_ENGINE_ERROR_NONE) + LOGE("Fail to GetInferenceResult"); + + return ret; +} + +int InferenceEngineVision::GetNumberOfOutputs() +{ + return engine->GetNumberOfOutputs(); +} + +void InferenceEngineVision::SetUserListName(std::string userlist) +{ + ; +} + +} /* Vision */ +} /* InferenceEngineInterface */