--- /dev/null
+
+CMAKE_MINIMUM_REQUIRED(VERSION 2.6)
+SET(fw_name "inference-engine-interface")
+SET(fw_name_vision ${fw_name}-vision)
+SET(fw_name_common ${fw_name}-common)
+
+PROJECT(${fw_name_vision})
+
+SET(CMAKE_INSTALL_PREFIX /usr)
+SET(PREFIX ${CMAKE_INSTALL_PREFIX})
+
+SET(INC_DIR "${PROJECT_SOURCE_DIR}/include")
+
+SET(dependents "dlog")
+SET(pc_dependents "capi-base-common")
+INCLUDE(FindPkgConfig)
+
+pkg_check_modules(${fw_name_vision} REQUIRED ${dependents})
+FOREACH(flag ${${fw_name_vision}_CFLAGS})
+ SET(EXTRA_CFLAGS "${EXTRA_CFLAGS} ${flag}")
+ SET(EXTRA_CXXFLAGS "${EXTRA_CXXFLAGS} ${flag}")
+ENDFOREACH(flag)
+
+#OpenCV
+FIND_PACKAGE(OpenCV REQUIRED core)
+if(NOT OpenCV_FOUND)
+ MESSAGE(SEND_ERROR "OpenCV NOT FOUND")
+ RETURN()
+else()
+ INCLUDE_DIRECTORIES(${OpenCV_INCLUDE_DIRS})
+endif()
+
+SET(CMAKE_C_FLAGS "-I./include -I./include/headers ${CMAKE_C_FLAGS} ${EXTRA_CFLAGS} -fPIC -Wall -w")
+SET(CMAKE_C_FLAGS_DEBUG "-O0 -g")
+
+SET(CMAKE_CXX_FLAGS "-I./include -I./include/headers ${CMAKE_CXX_FLAGS} ${EXTRA_CXXFLAGS} -fPIC")
+SET(CMAKE_CXX_FLAGS_DEBUG "-O0 -g --w")
+
+ADD_DEFINITIONS("-DPREFIX=\"${CMAKE_INSTALL_PREFIX}\"")
+ADD_DEFINITIONS("-DTIZEN_DEBUG")
+
+SET(CMAKE_EXE_LINKER_FLAGS "-Wl,--as-needed -Wl,--rpath=${LIB_INSTALL_DIR}")
+
+#common
+aux_source_directory(common SOURCES)
+ADD_LIBRARY(${fw_name_common} SHARED ${SOURCES})
+
+TARGET_LINK_LIBRARIES(${fw_name_common} dlog)
+
+
+SET_TARGET_PROPERTIES(${fw_name_common}
+ PROPERTIES
+ VERSION ${FULLVER}
+ SOVERSION ${MAJORVER}
+ CLEAN_DIRECT_OUTPUT 1
+)
+
+INSTALL(TARGETS ${fw_name_common} DESTINATION ${LIB_INSTALL_DIR})
+INSTALL(
+ DIRECTORY ${INC_DIR}/ DESTINATION include/media
+ FILES_MATCHING
+ PATTERN "*_private.h" EXCLUDE
+ PATTERN "*.h"
+ )
+
+SET(PC_NAME ${fw_namefw_name_common_vision})
+SET(PC_REQUIRED ${pc_dependents})
+SET(PC_LDFLAGS -l${fw_name_common})
+SET(PC_CFLAGS -I\${includedir}/media)
+
+CONFIGURE_FILE(
+ ${fw_name_common}.pc.in
+ ${CMAKE_CURRENT_SOURCE_DIR}/${fw_name_common}.pc
+ @ONLY
+)
+INSTALL(FILES ${CMAKE_CURRENT_SOURCE_DIR}/${fw_name_common}.pc DESTINATION ${LIB_INSTALL_DIR}/pkgconfig)
+
+#vision
+aux_source_directory(vision SOURCES)
+ADD_LIBRARY(${fw_name_vision} SHARED ${SOURCES})
+
+TARGET_LINK_LIBRARIES(${fw_name_vision} ${OpenCV_LIBS} dlog)
+
+
+SET_TARGET_PROPERTIES(${fw_name_vision}
+ PROPERTIES
+ VERSION ${FULLVER}
+ SOVERSION ${MAJORVER}
+ CLEAN_DIRECT_OUTPUT 1
+)
+
+INSTALL(TARGETS ${fw_name_vision} DESTINATION ${LIB_INSTALL_DIR})
+INSTALL(
+ DIRECTORY ${INC_DIR}/ DESTINATION include/media
+ FILES_MATCHING
+ PATTERN "*_private.h" EXCLUDE
+ PATTERN "*.h"
+ )
+
+SET(PC_NAME ${fw_name_vision})
+SET(PC_REQUIRED ${pc_dependents})
+SET(PC_LDFLAGS -l${fw_name_vision})
+SET(PC_CFLAGS -I\${includedir}/media)
+
+CONFIGURE_FILE(
+ ${fw_name_vision}.pc.in
+ ${CMAKE_CURRENT_SOURCE_DIR}/${fw_name_vision}.pc
+ @ONLY
+)
+INSTALL(FILES ${CMAKE_CURRENT_SOURCE_DIR}/${fw_name_vision}.pc DESTINATION ${LIB_INSTALL_DIR}/pkgconfig)
+
+IF(UNIX)
+
+ADD_CUSTOM_TARGET (distclean @echo cleaning for source distribution)
+ADD_CUSTOM_COMMAND(
+ DEPENDS clean
+ COMMENT "distribution clean"
+ COMMAND find
+ ARGS .
+ -not -name config.cmake -and \(
+ -name tester.c -or
+ -name Testing -or
+ -name CMakeFiles -or
+ -name cmake.depends -or
+ -name cmake.check_depends -or
+ -name CMakeCache.txt -or
+ -name cmake.check_cache -or
+ -name *.cmake -or
+ -name Makefile -or
+ -name core -or
+ -name core.* -or
+ -name gmon.out -or
+ -name install_manifest.txt -or
+ -name *.pc -or
+ -name *~ \)
+ | grep -v TC | xargs rm -rf
+ TARGET distclean
+ VERBATIM
+)
+
+ENDIF(UNIX)
\ No newline at end of file
--- /dev/null
+Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved.\r
+\r
+ Apache License\r
+ Version 2.0, January 2004\r
+ http://www.apache.org/licenses/\r
+\r
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\r
+\r
+ 1. Definitions.\r
+\r
+ "License" shall mean the terms and conditions for use, reproduction,\r
+ and distribution as defined by Sections 1 through 9 of this document.\r
+\r
+ "Licensor" shall mean the copyright owner or entity authorized by\r
+ the copyright owner that is granting the License.\r
+\r
+ "Legal Entity" shall mean the union of the acting entity and all\r
+ other entities that control, are controlled by, or are under common\r
+ control with that entity. For the purposes of this definition,\r
+ "control" means (i) the power, direct or indirect, to cause the\r
+ direction or management of such entity, whether by contract or\r
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the\r
+ outstanding shares, or (iii) beneficial ownership of such entity.\r
+\r
+ "You" (or "Your") shall mean an individual or Legal Entity\r
+ exercising permissions granted by this License.\r
+\r
+ "Source" form shall mean the preferred form for making modifications,\r
+ including but not limited to software source code, documentation\r
+ source, and configuration files.\r
+\r
+ "Object" form shall mean any form resulting from mechanical\r
+ transformation or translation of a Source form, including but\r
+ not limited to compiled object code, generated documentation,\r
+ and conversions to other media types.\r
+\r
+ "Work" shall mean the work of authorship, whether in Source or\r
+ Object form, made available under the License, as indicated by a\r
+ copyright notice that is included in or attached to the work\r
+ (an example is provided in the Appendix below).\r
+\r
+ "Derivative Works" shall mean any work, whether in Source or Object\r
+ form, that is based on (or derived from) the Work and for which the\r
+ editorial revisions, annotations, elaborations, or other modifications\r
+ represent, as a whole, an original work of authorship. For the purposes\r
+ of this License, Derivative Works shall not include works that remain\r
+ separable from, or merely link (or bind by name) to the interfaces of,\r
+ the Work and Derivative Works thereof.\r
+\r
+ "Contribution" shall mean any work of authorship, including\r
+ the original version of the Work and any modifications or additions\r
+ to that Work or Derivative Works thereof, that is intentionally\r
+ submitted to Licensor for inclusion in the Work by the copyright owner\r
+ or by an individual or Legal Entity authorized to submit on behalf of\r
+ the copyright owner. For the purposes of this definition, "submitted"\r
+ means any form of electronic, verbal, or written communication sent\r
+ to the Licensor or its representatives, including but not limited to\r
+ communication on electronic mailing lists, source code control systems,\r
+ and issue tracking systems that are managed by, or on behalf of, the\r
+ Licensor for the purpose of discussing and improving the Work, but\r
+ excluding communication that is conspicuously marked or otherwise\r
+ designated in writing by the copyright owner as "Not a Contribution."\r
+\r
+ "Contributor" shall mean Licensor and any individual or Legal Entity\r
+ on behalf of whom a Contribution has been received by Licensor and\r
+ subsequently incorporated within the Work.\r
+\r
+ 2. Grant of Copyright License. Subject to the terms and conditions of\r
+ this License, each Contributor hereby grants to You a perpetual,\r
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable\r
+ copyright license to reproduce, prepare Derivative Works of,\r
+ publicly display, publicly perform, sublicense, and distribute the\r
+ Work and such Derivative Works in Source or Object form.\r
+\r
+ 3. Grant of Patent License. Subject to the terms and conditions of\r
+ this License, each Contributor hereby grants to You a perpetual,\r
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable\r
+ (except as stated in this section) patent license to make, have made,\r
+ use, offer to sell, sell, import, and otherwise transfer the Work,\r
+ where such license applies only to those patent claims licensable\r
+ by such Contributor that are necessarily infringed by their\r
+ Contribution(s) alone or by combination of their Contribution(s)\r
+ with the Work to which such Contribution(s) was submitted. If You\r
+ institute patent litigation against any entity (including a\r
+ cross-claim or counterclaim in a lawsuit) alleging that the Work\r
+ or a Contribution incorporated within the Work constitutes direct\r
+ or contributory patent infringement, then any patent licenses\r
+ granted to You under this License for that Work shall terminate\r
+ as of the date such litigation is filed.\r
+\r
+ 4. Redistribution. You may reproduce and distribute copies of the\r
+ Work or Derivative Works thereof in any medium, with or without\r
+ modifications, and in Source or Object form, provided that You\r
+ meet the following conditions:\r
+\r
+ (a) You must give any other recipients of the Work or\r
+ Derivative Works a copy of this License; and\r
+\r
+ (b) You must cause any modified files to carry prominent notices\r
+ stating that You changed the files; and\r
+\r
+ (c) You must retain, in the Source form of any Derivative Works\r
+ that You distribute, all copyright, patent, trademark, and\r
+ attribution notices from the Source form of the Work,\r
+ excluding those notices that do not pertain to any part of\r
+ the Derivative Works; and\r
+\r
+ (d) If the Work includes a "NOTICE" text file as part of its\r
+ distribution, then any Derivative Works that You distribute must\r
+ include a readable copy of the attribution notices contained\r
+ within such NOTICE file, excluding those notices that do not\r
+ pertain to any part of the Derivative Works, in at least one\r
+ of the following places: within a NOTICE text file distributed\r
+ as part of the Derivative Works; within the Source form or\r
+ documentation, if provided along with the Derivative Works; or,\r
+ within a display generated by the Derivative Works, if and\r
+ wherever such third-party notices normally appear. The contents\r
+ of the NOTICE file are for informational purposes only and\r
+ do not modify the License. You may add Your own attribution\r
+ notices within Derivative Works that You distribute, alongside\r
+ or as an addendum to the NOTICE text from the Work, provided\r
+ that such additional attribution notices cannot be construed\r
+ as modifying the License.\r
+\r
+ You may add Your own copyright statement to Your modifications and\r
+ may provide additional or different license terms and conditions\r
+ for use, reproduction, or distribution of Your modifications, or\r
+ for any such Derivative Works as a whole, provided Your use,\r
+ reproduction, and distribution of the Work otherwise complies with\r
+ the conditions stated in this License.\r
+\r
+ 5. Submission of Contributions. Unless You explicitly state otherwise,\r
+ any Contribution intentionally submitted for inclusion in the Work\r
+ by You to the Licensor shall be under the terms and conditions of\r
+ this License, without any additional terms or conditions.\r
+ Notwithstanding the above, nothing herein shall supersede or modify\r
+ the terms of any separate license agreement you may have executed\r
+ with Licensor regarding such Contributions.\r
+\r
+ 6. Trademarks. This License does not grant permission to use the trade\r
+ names, trademarks, service marks, or product names of the Licensor,\r
+ except as required for reasonable and customary use in describing the\r
+ origin of the Work and reproducing the content of the NOTICE file.\r
+\r
+ 7. Disclaimer of Warranty. Unless required by applicable law or\r
+ agreed to in writing, Licensor provides the Work (and each\r
+ Contributor provides its Contributions) on an "AS IS" BASIS,\r
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\r
+ implied, including, without limitation, any warranties or conditions\r
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\r
+ PARTICULAR PURPOSE. You are solely responsible for determining the\r
+ appropriateness of using or redistributing the Work and assume any\r
+ risks associated with Your exercise of permissions under this License.\r
+\r
+ 8. Limitation of Liability. In no event and under no legal theory,\r
+ whether in tort (including negligence), contract, or otherwise,\r
+ unless required by applicable law (such as deliberate and grossly\r
+ negligent acts) or agreed to in writing, shall any Contributor be\r
+ liable to You for damages, including any direct, indirect, special,\r
+ incidental, or consequential damages of any character arising as a\r
+ result of this License or out of the use or inability to use the\r
+ Work (including but not limited to damages for loss of goodwill,\r
+ work stoppage, computer failure or malfunction, or any and all\r
+ other commercial damages or losses), even if such Contributor\r
+ has been advised of the possibility of such damages.\r
+\r
+ 9. Accepting Warranty or Additional Liability. While redistributing\r
+ the Work or Derivative Works thereof, You may choose to offer,\r
+ and charge a fee for, acceptance of support, warranty, indemnity,\r
+ or other liability obligations and/or rights consistent with this\r
+ License. However, in accepting such obligations, You may act only\r
+ on Your own behalf and on Your sole responsibility, not on behalf\r
+ of any other Contributor, and only if You agree to indemnify,\r
+ defend, and hold each Contributor harmless for any liability\r
+ incurred by, or claims asserted against, such Contributor by reason\r
+ of your accepting any such warranty or additional liability.\r
+\r
+ END OF TERMS AND CONDITIONS\r
+\r
+ APPENDIX: How to apply the Apache License to your work.\r
+\r
+ To apply the Apache License to your work, attach the following\r
+ boilerplate notice, with the fields enclosed by brackets "[]"\r
+ replaced with your own identifying information. (Don't include\r
+ the brackets!) The text should be enclosed in the appropriate\r
+ comment syntax for the file format. We also recommend that a\r
+ file or class name and description of purpose be included on the\r
+ same "printed page" as the copyright notice for easier\r
+ identification within third-party archives.\r
+\r
+ Copyright [yyyy] [name of copyright owner]\r
+\r
+ Licensed under the Apache License, Version 2.0 (the "License");\r
+ you may not use this file except in compliance with the License.\r
+ You may obtain a copy of the License at\r
+\r
+ http://www.apache.org/licenses/LICENSE-2.0\r
+\r
+ Unless required by applicable law or agreed to in writing, software\r
+ distributed under the License is distributed on an "AS IS" BASIS,\r
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
+ See the License for the specific language governing permissions and\r
+ limitations under the License.\r
+\r
+\r
+\r
--- /dev/null
+# Inference Engine Interface
+This is an interface of various inference engines such as Tensorflow, Caffe, OpenCV and so on.
\ No newline at end of file
--- /dev/null
+/**
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <inference_engine_error.h>
+#include <inference_engine_common_impl.h>
+
+#include <fstream>
+#include <iostream>
+#include <unistd.h>
+#include <time.h>
+#include <dlfcn.h>
+
+extern "C" {
+
+#include <dlog.h>
+
+#ifdef LOG_TAG
+#undef LOG_TAG
+#endif
+
+#define LOG_TAG "INFERENCE_ENGINE_COMMON"
+}
+
+namespace InferenceEngineInterface {
+namespace Common {
+
+const char* engineLibs[] = {
+ "libinference-engine-caffe.so",
+ "libinference-engine-tf.so",
+ "libinference-engine-tflite.so"};
+
+InferenceEngineCommon::InferenceEngineCommon(inference_backend_type_e backend) :
+ mBackend(backend)
+{
+ LOGE("ENTER");
+ LOGE("LEAVE");
+}
+
+InferenceEngineCommon::~InferenceEngineCommon()
+{
+ LOGW("ENTER");
+ destroy_t *engineDestroy = (destroy_t*)dlsym(handle, "EngineCommonDestroy");
+ // NULL CHECK?
+ engineDestroy(engine);
+ dlclose(handle);
+
+ LOGW("LEAVE");
+}
+
+int InferenceEngineCommon::Init(std::string configFile,
+ std::string weightFile, std::string userFile)
+{
+ LOGW("ENTER");
+ char *error = NULL;
+ handle = dlopen(engineLibs[mBackend], RTLD_LAZY);
+ if (!handle) {
+ LOGE("Fail to dlopen %s", engineLibs[mBackend]);
+ LOGE("Error: %s\n", dlerror());
+ return INFERENCE_ENGINE_ERROR_NOT_SUPPORTED;
+ }
+
+ init_t* EngineInit = (init_t *)dlsym(handle, "EngineCommonInit");
+ if ((error = dlerror()) != NULL) {
+ LOGE("Error: %s\n", error);
+ dlclose(handle);
+ return INFERENCE_ENGINE_ERROR_INTERNAL;
+ }
+
+ engine = EngineInit(configFile, weightFile, userFile);
+ if (engine == NULL) {
+ LOGE("Fail to EngineInit");
+ dlclose(handle);
+ return INFERENCE_ENGINE_ERROR_INTERNAL;
+ }
+
+ LOGW("LEAVE");
+ return INFERENCE_ENGINE_ERROR_NONE;
+}
+
+void InferenceEngineCommon::Deinit()
+{
+ ;
+}
+
+int InferenceEngineCommon::SetInputTensorParam()
+{
+ return INFERENCE_ENGINE_ERROR_NOT_SUPPORTED;
+}
+
+int InferenceEngineCommon::SetInputTensorParamNode(std::string node)
+{
+ int ret = engine->SetInputTensorParamNode(node);
+ if (ret != INFERENCE_ENGINE_ERROR_NONE)
+ LOGE("Fail to SetInputTensorParamNode");
+
+ return ret;
+}
+
+int InferenceEngineCommon::SetOutputTensorParam()
+{
+ return INFERENCE_ENGINE_ERROR_NOT_SUPPORTED;
+}
+
+int InferenceEngineCommon::SetOutputTensorParamNode(std::string node)
+{
+ int ret = engine->SetOutPutTensorParamNodes(node);
+ if (ret != INFERENCE_ENGINE_ERROR_NONE)
+ LOGE("Fail to SetOutputTensorParamNodes");
+
+ return ret;
+}
+
+int InferenceEngineCommon::SetTargetDevice(inference_target_type_e type)
+{
+ int ret = engine->SetTargetDevice(type);
+ if (ret != INFERENCE_ENGINE_ERROR_NONE)
+ LOGE("Fail to SetTargetDevice");
+
+ return ret;
+}
+
+int InferenceEngineCommon::Load()
+{
+ int ret = engine->Load();
+ if (ret != INFERENCE_ENGINE_ERROR_NONE)
+ LOGE("Fail to load InferenceEngineVision");
+
+ ret = engine->CreateInputLayerPassage();
+ if (ret != INFERENCE_ENGINE_ERROR_NONE)
+ LOGE("Fail to load CreateInputLayerPassage");
+
+
+ ret = engine->PrepareInputLayerPassage();
+ if (ret != INFERENCE_ENGINE_ERROR_NONE)
+ LOGE("Fail to load PrepareInputLayerPassage");
+
+
+ return ret;
+}
+
+int InferenceEngineCommon::Run(std::vector<float> tensor)
+{
+ int ret = engine->Run(tensor);
+ if (ret != INFERENCE_ENGINE_ERROR_NONE)
+ LOGE("Fail to run InferenceEngineVision");
+
+ return ret;
+}
+
+int InferenceEngineCommon::GetInferenceResult(std::vector<std::vector<int>>& dimInfo, std::vector<float*>& results)
+{
+ int ret = engine->GetInferenceResult(dimInfo, results);
+
+ if (ret != INFERENCE_ENGINE_ERROR_NONE)
+ LOGE("Fail to GetInferenceResult");
+
+ return ret;
+}
+} /* Common */
+} /* InferenceEngineInterface */
--- /dev/null
+/**
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __INFERENCE_ENGINE_COMMON_H__
+#define __INFERENCE_ENGINE_COMMON_H__
+
+#include <vector>
+#include <string>
+
+#include "inference_engine_type.h"
+
+namespace InferenceEngineInterface {
+namespace Common {
+
+class IInferenceEngineCommon {
+public:
+
+ virtual ~IInferenceEngineCommon() {};
+
+ // InputTensor
+ virtual int SetInputTensorParam() = 0;
+
+ virtual int SetInputTensorParamNode(std::string node) = 0;
+
+
+ // OutputTensor
+ virtual int SetOutputTensorParam() = 0;
+
+ virtual int SetOutPutTensorParamNodes(std::string node) = 0;
+
+ virtual int SetTargetDevice(inference_target_type_e type) = 0;
+
+ // Load and Run
+ virtual int Load() = 0;
+
+ virtual int CreateInputLayerPassage() = 0;
+
+ virtual int PrepareInputLayerPassage() = 0;
+
+
+ virtual int Run(std::vector<float> tensor) = 0;
+
+ virtual int GetInferenceResult(std::vector<std::vector<int>>& dimInfo, std::vector<float*>& results) = 0;
+};
+
+typedef void destroy_t(IInferenceEngineCommon*);
+typedef IInferenceEngineCommon* init_t(std::string configFile, std::string weightFile, std::string userFile);
+} /* Common */
+} /* InferenceEngineInterface */
+
+#endif /* __INFERENCE_ENGINE_COMMON_H__ */
--- /dev/null
+/**
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __INFERENCE_ENGINE_COMMON_IMPL_H__
+#define __INFERENCE_ENGINE_COMMON_IMPL_H__
+
+#include <vector>
+#include <string>
+
+#include <inference_engine_common.h>
+#include <inference_engine_type.h>
+
+namespace InferenceEngineInterface {
+namespace Common {
+
+class InferenceEngineCommon {
+public:
+ InferenceEngineCommon(inference_backend_type_e backend);
+
+ ~InferenceEngineCommon();
+
+ int Init(std::string configFile,
+ std::string weightFile, std::string UserFile);
+
+ void Deinit();
+
+ // InputTensor
+ int SetInputTensorParam();
+
+ int SetInputTensorParamNode(std::string node);
+
+
+ // OutputTensor
+ int SetOutputTensorParam();
+
+ int SetOutputTensorParamNode(std::string node);
+
+ int SetTargetDevice(inference_target_type_e type);
+
+ // Load and Run
+ int Load();
+
+ int CreateInputLayerPassage();
+
+ int PrepareInputLayerPassage();
+
+
+ int Run(std::vector<float> tensor);
+
+ int GetInferenceResult(std::vector<std::vector<int>>& dimInfo, std::vector<float*>& results);
+
+private:
+ void *handle;
+ IInferenceEngineCommon *engine;
+ inference_backend_type_e mBackend;
+ std::vector<std::string> mUserListName;
+
+};
+
+} /* Common */
+} /* InferenceEngineInterface */
+
+#endif /* __INFERENCE_ENGINE_COMMON_IMPL_H__ */
\ No newline at end of file
--- /dev/null
+/**
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __INFERENCE_ENGINE_ERROR_H__
+#define __INFERENCE_ENGINE_ERROR_H__
+
+#include <tizen.h>
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+/**
+ * @file inference_engine_error.h
+ * @brief This file contains error type required by
+ * inference engine
+*/
+
+typedef enum {
+ INFERENCE_ENGINE_ERROR_NONE
+ = TIZEN_ERROR_NONE, /**< Successful */
+ INFERENCE_ENGINE_ERROR_NOT_SUPPORTED
+ = TIZEN_ERROR_NOT_SUPPORTED, /**< Not supported */
+ INFERENCE_ENGINE_ERROR_MSG_TOO_LONG
+ = TIZEN_ERROR_MSG_TOO_LONG, /**< Message too long */
+ INFERENCE_ENGINE_ERROR_NO_DATA
+ = TIZEN_ERROR_NO_DATA, /**< No data */
+ INFERENCE_ENGINE_ERROR_KEY_NOT_AVAILABLE
+ = TIZEN_ERROR_KEY_NOT_AVAILABLE, /**< Key not available */
+ INFERENCE_ENGINE_ERROR_OUT_OF_MEMORY
+ = TIZEN_ERROR_OUT_OF_MEMORY, /**< Out of memory */
+ INFERENCE_ENGINE_ERROR_INVALID_PARAMETER
+ = TIZEN_ERROR_INVALID_PARAMETER, /**< Invalid parameter */
+ INFERENCE_ENGINE_ERROR_INVALID_OPERATION
+ = TIZEN_ERROR_INVALID_OPERATION, /**< Invalid operation */
+ INFERENCE_ENGINE_ERROR_PERMISSION_DENIED
+ = TIZEN_ERROR_NOT_PERMITTED, /**< Not permitted */
+ INFERENCE_ENGINE_ERROR_NOT_SUPPORTED_FORMAT
+ = TIZEN_ERROR_MEDIA_VISION | 0x01, /**< Not supported format */
+ INFERENCE_ENGINE_ERROR_INTERNAL
+ = TIZEN_ERROR_MEDIA_VISION | 0x02, /**< Internal error */
+ INFERENCE_ENGINE_ERROR_INVALID_DATA
+ = TIZEN_ERROR_MEDIA_VISION | 0x03, /**< Invalid data */
+ INFERENCE_ENGINE_ERROR_INVALID_PATH
+ = TIZEN_ERROR_MEDIA_VISION | 0x04, /**< Invalid path*/
+} inference_engine_error_e;
+
+#ifdef __cplusplus
+}
+#endif /* __cplusplus */
+
+#endif /* __INFERENCE_ENGINE_ERROR_H__ */
\ No newline at end of file
--- /dev/null
+/**
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __INFERENCE_ENGINE_TYPE_H__
+#define __INFERENCE_ENGINE_TYPE_H__
+
+#include <opencv2/core.hpp>
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+/**
+ * @file inference_engine_type.h
+ * @brief This file contains enumerations and handles definition required by
+ * inference engine API.
+ */
+
+/**
+ * @brief Enumeration for inference backend.
+ *
+ * @since_tizen 5.5
+ *
+ * @see mv_inference_prepare()
+ */
+typedef enum {
+ INFERENCE_BACKEND_NONE = -1,
+ INFERENCE_BACKEND_OPENCV, /**< OpenCV */
+ INFERENCE_BACKEND_CAFFE, /**< Caffe */
+ INFERENCE_BACKEND_TF, /**< TensorFlow */
+ INFERENCE_BACKEND_TFLite, /**< TensorFlow-Lite */
+ INFERENCE_BACKEND_MAX
+} inference_backend_type_e;
+
+/**
+ * @brief Enumeration for inference target.
+ *
+ * @since_tizen 5.5
+ *
+ */
+typedef enum {
+ INFERENCE_TARGET_NONE = -1,
+ INFERENCE_TARGET_CPU, /**< CPU */
+ INFERENCE_TARGET_GPU, /**< GPU*/
+ INFERENCE_TARGET_MAX
+} inference_target_type_e;
+
+typedef enum {
+ INFERENCE_INPUT_GENERAL = 0,
+ INFERENCE_INPUT_IMAGE,
+ INFERENCE_INPUT_MAX
+} inference_input_type_e;
+
+typedef struct _ImageClassficationResults {
+ int number_of_classes;
+ std::vector<int> indices;
+ std::vector<std::string> names;
+ std::vector<float> confidences;
+} ImageClassificationResults; /**< structure ImageClassificationResults */
+
+typedef struct _ObjectDetectionResults {
+ int number_of_objects;
+ std::vector<int> indices;
+ std::vector<std::string> names;
+ std::vector<float> confidences;
+ std::vector<cv::Rect> locations;
+} ObjectDetectionResults; /**< structure ObjectDetectionResults */
+
+typedef struct _FaceDetectionResults {
+ int number_of_faces;
+ std::vector<float> confidences;
+ std::vector<cv::Rect> locations;
+} FaceDetectionResults; /**< structure ObjectDetectionResults */
+
+typedef struct _FacialLandMarkDetectionResults {
+ int number_of_landmarks;
+ std::vector<cv::Point> locations;
+} FacialLandMarkDetectionResults; /**< structure ObjectDetectionResults */
+
+typedef struct _InferenceResults{
+ int dimInfoSize;
+ std::vector<std::vector<int>> dimInfo;
+ std::vector<float*> data;
+} InferenceResults; /**< structure InferenceResults */
+
+
+#ifdef __cplusplus
+}
+#endif /* __cplusplus */
+
+#endif /* __INFERENCE_ENGINE_TYPE_H__ */
\ No newline at end of file
--- /dev/null
+/**
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __INFERENCE_ENGINE_VISION_H__
+#define __INFERENCE_ENGINE_VISION_H__
+
+#include <vector>
+#include <string>
+
+#include "inference_engine_type.h"
+#include <opencv2/core.hpp>
+
+namespace InferenceEngineInterface {
+namespace Vision {
+
+class IInferenceEngineVision {
+public:
+
+ virtual ~IInferenceEngineVision() {};
+
+ // InputTensor
+ virtual int SetInputTensorParam() = 0;
+
+ virtual int SetInputTensorParamInput(int width, int height, int dim, int ch) = 0;
+
+ virtual int SetInputTensorParamNorm(double deviation, double mean) = 0;
+
+ virtual int SetInputTensorParamNode(std::string node) = 0;
+
+
+ // OutputTensor
+ virtual int SetOutputTensorParam() = 0;
+
+ virtual int SetOutputTensorParamThresHold(double threshold) = 0;
+
+ virtual int SetOutputTensorParamNumbers(int number) = 0;
+
+ virtual int SetOutputTensorParamType(int type) = 0;
+
+ virtual int SetOutPutTensorParamNodes(std::string node) = 0;
+
+ virtual int SetTargetDevice(inference_target_type_e type) = 0;
+
+ // Load and Run
+ virtual int Load() = 0;
+
+ virtual int CreateInputLayerPassage() = 0;
+
+ virtual int PrepareInputLayerPassage(inference_input_type_e type) = 0;
+Vision
+ virtual int Run(cv::Mat tensor) = 0;
+
+ virtual int Run(std::vector<float> tensor) = 0;
+
+ virtual int GetInferenceResult(ImageClassificationResults& results) = 0;
+
+ virtual int GetInferenceResult(ObjectDetectionResults& results) = 0;
+
+ virtual int GetInferenceResult(FaceDetectionResults& results) = 0;
+
+ virtual int GetInferenceResult(FacialLandMarkDetectionResults& results) = 0;
+
+ virtual int GetInferenceResult(std::vector<std::vector<int>>& dimInfo, std::vector<float*>& results) = 0;
+
+ virtual int GetNumberOfOutputs() = 0;
+
+ virtual void SetUserListName(std::string userlist) = 0;
+};
+
+typedef void destroy_t(IInferenceEngineVision*);
+typedef IInferenceEngineVision* init_t(std::string configFile, std::string weightFile, std::string userFile);
+} /* Vision */
+} /* InferenceEngineInterface */
+
+#endif /* __INFERENCE_ENGINE_VISION_H__ */
--- /dev/null
+/**
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __INFERENCE_ENGINE_VISION_IMPL_H__
+#define __INFERENCE_ENGINE_VISION_IMPL_H__
+
+#include <vector>
+#include <string>
+
+#include <inference_engine_vision.h>
+#include <inference_engine_type.h>
+#include <opencv2/core.hpp>
+
+
+namespace InferenceEngineInterface {
+namespace Vision {
+
+class InferenceEngineVision {
+public:
+ InferenceEngineVision(inference_backend_type_e backend);
+
+ ~InferenceEngineVision();
+
+ int Init(std::string configFile,
+ std::string weightFile, std::string UserFile);
+
+ void Deinit();
+
+ // Input Tensor parameters
+ int SetInputTensorParamInput(int width, int height, int dim, int ch);
+
+ int SetInputTensorParamNorm(double deviation, double mean);
+
+ int SetInputTensorParamNode(std::string node);
+
+ // Output Tensor parameters
+ int SetOutputTensorParamThresHold(double threshold);
+
+ int SetOutputTensorParamNumbers(int number);
+
+ int SetOutputTensorParamType(int type);
+
+ int SetOutPutTensorParamNodes(std::string node);
+
+ // Set target device
+ int SetTargetDevice(inference_target_type_e device);
+
+ int Load();
+
+ int Run(cv::Mat tensor);
+
+ int GetInferenceResult(ImageClassificationResults& results);
+
+ int GetInferenceResult(ObjectDetectionResults& results);
+
+ int GetInferenceResult(FaceDetectionResults& results);
+
+ int GetInferenceResult(FacialLandMarkDetectionResults& results);
+
+ int GetInferenceResult(std::vector<std::vector<int>>& dimInfo, std::vector<float*>& results);
+
+ int GetNumberOfOutputs();
+
+ void SetUserListName(std::string userlist);
+
+private:
+ void *handle;
+ IInferenceEngineVision *engine;
+ inference_backend_type_e mBackend;
+ std::vector<std::string> mUserListName;
+
+};
+
+} /* Vision */
+} /* InferenceEngineInterface */
+
+#endif /* __INFERENCE_ENGINE_VISION_IMPL_H__ */
\ No newline at end of file
--- /dev/null
+<manifest>
+ <request>
+ <domain name="_" />
+ </request>
+</manifest>
--- /dev/null
+
+# Package Information for pkg-config
+
+prefix=@PREFIX@
+exec_prefix=/usr
+libdir=@LIB_INSTALL_DIR@
+includedir=/usr/include/media
+
+Name: @PC_NAME@
+Description: @PACKAGE_DESCRIPTION@
+Version: @VERSION@
+Requires: @PC_REQUIRED@
+Libs: -L${libdir} @PC_LDFLAGS@
+Cflags: -I${includedir} -I/usr/include
--- /dev/null
+<manifest>
+ <request>
+ <domain name="_" />
+ </request>
+</manifest>
--- /dev/null
+
+# Package Information for pkg-config
+
+prefix=@PREFIX@
+exec_prefix=/usr
+libdir=@LIB_INSTALL_DIR@
+includedir=/usr/include/media
+
+Name: @PC_NAME@
+Description: @PACKAGE_DESCRIPTION@
+Version: @VERSION@
+Requires: @PC_REQUIRED@
+Libs: -L${libdir} @PC_LDFLAGS@
+Cflags: -I${includedir} -I/usr/include
--- /dev/null
+Name: inference-engine-interface
+Summary: Interface of inference engines
+Version: 0.0.1
+Release: 1
+Group: Multimedia/Framework
+License: Apache-2.0
+Source0: %{name}-%{version}.tar.gz
+BuildRequires: cmake
+BuildRequires: pkgconfig(dlog)
+BuildRequires: pkgconfig(libtzplatform-config)
+BuildRequires: pkgconfig(capi-base-common)
+BuildRequires: pkgconfig(opencv) >= 3.4.1
+BuildRequires: pkgconfig(python)
+
+%description
+Interface of inference engines
+
+%package devel
+Summary: Interface of inference engines
+Group: Multimedia/Framework
+Requires: %{name} = %{version}-%{release}
+
+%description devel
+Interface of inference engines (Dev)
+
+
+%package common
+Summary: Common interface of inference engines
+Group: Multimedia/Framework
+
+%description common
+Common interface of inference engines
+
+%package common-devel
+Summary: Common interface of inference engines
+Group: Multimedia/Framework
+Requires: inference-engine-interface-common
+
+%description common-devel
+Common interface of inference engines (Dev)
+
+%package vision
+Summary: Vision interface of inference engines
+Group: Multimedia/Framework
+
+%description vision
+Vision interface of inference engines
+
+%package vision-devel
+Summary: Vision interface of inference engines
+Group: Multimedia/Framework
+Requires: inference-engine-interface-vision
+
+%description vision-devel
+Vision interface of inference enginese (Dev)
+
+%prep
+%setup -q
+
+%build
+%if 0%{?sec_build_binary_debug_enable}
+export CFLAGS="$CFLAGS -DTIZEN_DEBUG_ENABLE"
+export CXXFLAGS="$CXXFLAGS -DTIZEN_DEBUG_ENABLE"
+export FFLAGS="$FFLAGS -DTIZEN_DEBUG_ENABLE"
+%endif
+
+export CFLAGS+=" -DPATH_LIBDIR=\\\"%{_libdir}\\\""
+export CXXFLAGS+=" -DPATH_LIBDIR=\\\"%{_libdir}\\\""
+
+MAJORVER=`echo %{version} | awk 'BEGIN {FS="."}{print $1}'`
+%cmake . -DFULLVER=%{version} -DMAJORVER=${MAJORVER} -DTZ_SYS_BIN=%TZ_SYS_BIN \
+
+make %{?jobs:-j%jobs}
+
+%install
+rm -rf %{buildroot}
+
+%make_install
+
+%post -p /sbin/ldconfig
+%postun -p /sbin/ldconfig
+
+%files common
+%manifest inference-engine-interface-common.manifest
+%license LICENSE.APLv2
+%{_libdir}/libinference-engine-interface-common.so.*
+
+%files common-devel
+%{_includedir}/media/*.h
+%{_libdir}/pkgconfig/*common.pc
+%{_libdir}/lib*-common.so
+
+%files vision
+%manifest inference-engine-interface-vision.manifest
+%license LICENSE.APLv2
+%{_libdir}/libinference-engine-interface-vision.so.*
+
+%files vision-devel
+%{_includedir}/media/*.h
+%{_libdir}/pkgconfig/*vision.pc
+%{_libdir}/lib*-vision.so
\ No newline at end of file
--- /dev/null
+/**
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <inference_engine_error.h>
+#include <inference_engine_vision_impl.h>
+
+
+#include <fstream>
+#include <iostream>
+#include <unistd.h>
+#include <time.h>
+#include <dlfcn.h>
+
+extern "C" {
+
+#include <dlog.h>
+
+#ifdef LOG_TAG
+#undef LOG_TAG
+#endif
+
+#define LOG_TAG "INFERENCE_ENGINE_VISION"
+}
+
+namespace InferenceEngineInterface {
+namespace Vision {
+
+const char* engineLibs[] = {
+ "libinference-engine-opencv.so",
+ "libinference-engine-caffe.so",
+ "libinference-engine-tf.so",
+ "libinference-engine-tflite.so"};
+
+InferenceEngineVision::InferenceEngineVision(inference_backend_type_e backend) :
+ mBackend(backend)
+{
+ LOGE("ENTER");
+ LOGE("LEAVE");
+}
+
+InferenceEngineVision::~InferenceEngineVision()
+{
+ LOGW("ENTER");
+ destroy_t *engineDestroy = (destroy_t*)dlsym(handle, "EngineVisionDestroy");
+ // NULL CHECK?
+ engineDestroy(engine);
+ dlclose(handle);
+
+ LOGW("LEAVE");
+}
+
+int InferenceEngineVision::Init(std::string configFile,
+ std::string weightFile, std::string userFile)
+{
+ LOGW("ENTER");
+ char *error = NULL;
+ handle = dlopen(engineLibs[mBackend], RTLD_LAZY);
+ if (!handle) {
+ LOGE("Fail to dlopen %s", engineLibs[mBackend]);
+ LOGE("Error: %s\n", dlerror());
+ return INFERENCE_ENGINE_ERROR_NOT_SUPPORTED;
+ }
+
+ init_t* EngineInit = (init_t *)dlsym(handle, "EngineVisionInit");
+ if ((error = dlerror()) != NULL) {
+ LOGE("Error: %s\n", error);
+ dlclose(handle);
+ return INFERENCE_ENGINE_ERROR_INTERNAL;
+ }
+
+ engine = EngineInit(configFile, weightFile, userFile);
+ if (engine == NULL) {
+ LOGE("Fail to EngineInit");
+ dlclose(handle);
+ return INFERENCE_ENGINE_ERROR_INTERNAL;
+ }
+
+ LOGW("LEAVE");
+ return INFERENCE_ENGINE_ERROR_NONE;
+}
+
+void InferenceEngineVision::Deinit()
+{
+ ;
+}
+
+int InferenceEngineVision::SetInputTensorParamInput(int width, int height, int dim, int ch)
+{
+ int ret = engine->SetInputTensorParamInput(width, height, dim, ch);
+ if (ret != INFERENCE_ENGINE_ERROR_NONE)
+ LOGE("Fail to SetInputTensorParamInput");
+
+ return ret;
+}
+
+int InferenceEngineVision::SetInputTensorParamNorm(double deviation, double mean)
+{
+ int ret = engine->SetInputTensorParamNorm(deviation, mean);
+ if (ret != INFERENCE_ENGINE_ERROR_NONE)
+ LOGE("Fail to SetInputTensorParamNorm");
+
+ return ret;
+}
+
+int InferenceEngineVision::SetInputTensorParamNode(std::string node)
+{
+ int ret = engine->SetInputTensorParamNode(node);
+ if (ret != INFERENCE_ENGINE_ERROR_NONE)
+ LOGE("Fail to SetInputTensorParamNode");
+
+ return ret;
+}
+
+int InferenceEngineVision::SetOutputTensorParamThresHold(double threshold)
+{
+ int ret = engine->SetOutputTensorParamThresHold(threshold);
+ if (ret != INFERENCE_ENGINE_ERROR_NONE)
+ LOGE("Fail to SetOutputTensorParamThresHold");
+
+ return ret;
+}
+
+int InferenceEngineVision::SetOutputTensorParamNumbers(int numbers)
+{
+ int ret = engine->SetOutputTensorParamNumbers(numbers);
+ if (ret != INFERENCE_ENGINE_ERROR_NONE)
+ LOGE("Fail to SetOuputTensorParamNumbers");
+
+ return ret;
+}
+
+int InferenceEngineVision::SetOutputTensorParamType(int type)
+{
+ int ret = engine->SetOutputTensorParamType(type);
+ if (ret != INFERENCE_ENGINE_ERROR_NONE)
+ LOGE("Fail to SetOutputTensorParamType");
+
+ return ret;
+}
+
+int InferenceEngineVision::SetTargetDevice(inference_target_type_e type)
+{
+ int ret = engine->SetTargetDevice(type);
+ if (ret != INFERENCE_ENGINE_ERROR_NONE)
+ LOGE("Fail to SetTargetDevice");
+
+ return ret;
+}
+
+int InferenceEngineVision::Load()
+{
+ int ret = engine->Load();
+ if (ret != INFERENCE_ENGINE_ERROR_NONE)
+ LOGE("Fail to load InferenceEngineVision");
+
+ ret = engine->CreateInputLayerPassage();
+ if (ret != INFERENCE_ENGINE_ERROR_NONE)
+ LOGE("Fail to load CreateInputLayerPassage");
+
+ ret = engine->PrepareInputLayerPassage(INFERENCE_INPUT_IMAGE);
+ if (ret != INFERENCE_ENGINE_ERROR_NONE)
+ LOGE("Fail to load PrepareInputLayerPassage");
+
+ return ret;
+}
+
+int InferenceEngineVision::Run(cv::Mat tensor)
+{
+ int ret = engine->Run(tensor);
+ if (ret != INFERENCE_ENGINE_ERROR_NONE)
+ LOGE("Fail to run InferenceEngineVision");
+
+ return ret;
+}
+
+int InferenceEngineVision::GetInferenceResult(ImageClassificationResults& results)
+{
+ int ret = engine->GetInferenceResult(results);
+
+ if (ret != INFERENCE_ENGINE_ERROR_NONE)
+ LOGE("Fail to GetClassficationResults");
+ // NULL CHECK?
+ return ret;
+}
+
+int InferenceEngineVision::GetInferenceResult(ObjectDetectionResults& results)
+{
+ int ret = engine->GetInferenceResult(results);
+
+ if (ret != INFERENCE_ENGINE_ERROR_NONE)
+ LOGE("Fail to GetObjectDetectionResults");
+
+ return ret;
+}
+
+int InferenceEngineVision::GetInferenceResult(FaceDetectionResults& results)
+{
+ int ret = engine->GetInferenceResult(results);
+
+ if (ret != INFERENCE_ENGINE_ERROR_NONE)
+ LOGE("Fail to GetFaceDetectionResults");
+
+ return ret;
+}
+
+int InferenceEngineVision::GetInferenceResult(FacialLandMarkDetectionResults& results)
+{
+ int ret = engine->GetInferenceResult(results);
+
+ if (ret != INFERENCE_ENGINE_ERROR_NONE)
+ LOGE("Fail to GetFacialLandMarkDetectionResults");
+
+ return ret;
+}
+
+int InferenceEngineVision::GetInferenceResult(std::vector<std::vector<int>>& dimInfo, std::vector<float*>& results)
+{
+ int ret = engine->GetInferenceResult(dimInfo, results);
+
+ if (ret != INFERENCE_ENGINE_ERROR_NONE)
+ LOGE("Fail to GetInferenceResult");
+
+ return ret;
+}
+
+int InferenceEngineVision::GetNumberOfOutputs()
+{
+ return engine->GetNumberOfOutputs();
+}
+
+void InferenceEngineVision::SetUserListName(std::string userlist)
+{
+ ;
+}
+
+} /* Vision */
+} /* InferenceEngineInterface */