Remove InferenceEngineVision relevant things
authorInki Dae <inki.dae@samsung.com>
Wed, 12 Feb 2020 03:03:57 +0000 (12:03 +0900)
committerInki Dae <inki.dae@samsung.com>
Tue, 14 Apr 2020 00:42:53 +0000 (09:42 +0900)
Change-Id: Iad41e0c01f776a787c95bca3252b9c7e0c04fd1a
Signed-off-by: Inki Dae <inki.dae@samsung.com>
CMakeLists.txt
include/inference_engine_vision_impl.h [deleted file]
inference-engine-interface-vision.manifest [deleted file]
inference-engine-interface-vision.pc.in [deleted file]
packaging/inference-engine-interface.spec
vision/inference_engine_vision_impl.cpp [deleted file]

index 2476ab769eacfb3b5e68d83e3cd23464282012c7..5c26a49e6a2aa96f9c6abf02f8ed4fbeef9d1a9c 100644 (file)
@@ -1,10 +1,9 @@
 
 CMAKE_MINIMUM_REQUIRED(VERSION 2.6)
 SET(fw_name "inference-engine-interface")
-SET(fw_name_vision ${fw_name}-vision)
 SET(fw_name_common ${fw_name}-common)
 
-PROJECT(${fw_name_vision})
+PROJECT(${fw_name_common})
 
 SET(CMAKE_INSTALL_PREFIX /usr)
 SET(PREFIX ${CMAKE_INSTALL_PREFIX})
@@ -15,21 +14,12 @@ SET(dependents "dlog iniparser")
 SET(pc_dependents "capi-base-common")
 INCLUDE(FindPkgConfig)
 
-pkg_check_modules(${fw_name_vision} REQUIRED ${dependents})
-FOREACH(flag ${${fw_name_vision}_CFLAGS})
+pkg_check_modules(${fw_name_common} REQUIRED ${dependents})
+FOREACH(flag ${${fw_name_common}_CFLAGS})
     SET(EXTRA_CFLAGS "${EXTRA_CFLAGS} ${flag}")
     SET(EXTRA_CXXFLAGS "${EXTRA_CXXFLAGS} ${flag}")
 ENDFOREACH(flag)
 
-#OpenCV
-FIND_PACKAGE(OpenCV REQUIRED core)
-if(NOT OpenCV_FOUND)
-       MESSAGE(SEND_ERROR "OpenCV NOT FOUND")
-       RETURN()
-else()
-       INCLUDE_DIRECTORIES(${OpenCV_INCLUDE_DIRS})
-endif()
-
 SET(CMAKE_C_FLAGS "-I./include -I./include/headers ${CMAKE_C_FLAGS} ${EXTRA_CFLAGS} -fPIC -Wall -w")
 SET(CMAKE_C_FLAGS_DEBUG "-O0 -g")
 
@@ -75,40 +65,6 @@ CONFIGURE_FILE(
 )
 INSTALL(FILES ${CMAKE_CURRENT_SOURCE_DIR}/${fw_name_common}.pc DESTINATION ${LIB_INSTALL_DIR}/pkgconfig)
 
-#vision
-aux_source_directory(vision SOURCES)
-ADD_LIBRARY(${fw_name_vision} SHARED ${SOURCES})
-
-TARGET_LINK_LIBRARIES(${fw_name_vision} ${OpenCV_LIBS} dlog stdc++fs)
-
-
-SET_TARGET_PROPERTIES(${fw_name_vision}
-     PROPERTIES
-     VERSION ${FULLVER}
-     SOVERSION ${MAJORVER}
-     CLEAN_DIRECT_OUTPUT 1
-)
-
-INSTALL(TARGETS ${fw_name_vision} DESTINATION ${LIB_INSTALL_DIR})
-INSTALL(
-        DIRECTORY ${INC_DIR}/ DESTINATION include/media
-        FILES_MATCHING
-        PATTERN "*_private.h" EXCLUDE
-        PATTERN "*.h"
-        )
-
-SET(PC_NAME ${fw_name_vision})
-SET(PC_REQUIRED ${pc_dependents})
-SET(PC_LDFLAGS -l${fw_name_vision})
-SET(PC_CFLAGS -I\${includedir}/media)
-
-CONFIGURE_FILE(
-    ${fw_name_vision}.pc.in
-    ${CMAKE_CURRENT_SOURCE_DIR}/${fw_name_vision}.pc
-    @ONLY
-)
-INSTALL(FILES ${CMAKE_CURRENT_SOURCE_DIR}/${fw_name_vision}.pc DESTINATION ${LIB_INSTALL_DIR}/pkgconfig)
-
 IF(UNIX)
 
 ADD_CUSTOM_TARGET (distclean @echo cleaning for source distribution)
diff --git a/include/inference_engine_vision_impl.h b/include/inference_engine_vision_impl.h
deleted file mode 100755 (executable)
index c8d0e40..0000000
+++ /dev/null
@@ -1,145 +0,0 @@
-/**
- * Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __INFERENCE_ENGINE_VISION_IMPL_H__
-#define __INFERENCE_ENGINE_VISION_IMPL_H__
-
-#include <vector>
-#include <string>
-#include "inference_engine_common_impl.h"
-#include "inference_engine_common.h"
-#include "inference_engine_type.h"
-
-using namespace InferenceEngineInterface::Common;
-
-namespace InferenceEngineInterface {
-namespace Vision {
-
-class InferenceEngineVision {
-public:
-    InferenceEngineVision(inference_engine_config *config);
-    InferenceEngineVision(std::string backend);
-
-    int BindBackend(inference_engine_config *config);
-
-    void UnbindBackend(void);
-
-    ~InferenceEngineVision();
-
-    /**
-     * @brief Set input node name. Deprecated.
-     *
-     * @since_tizen 5.5
-     */
-    int SetInputTensorParamNode(std::string node);
-
-    /**
-     * @brief Set output nodes' names. Deprecated.
-     *
-     * @since_tizen 5.5
-     */
-    int SetOutputTensorParamNodes(std::vector<std::string> nodes);
-
-    int SetInputDataBuffer(tensor_t data);
-
-    /**
-     * @brief Get an input layer property information from a given backend engine.
-     *
-     * @since_tizen 6.0
-     */
-    int GetInputTensorProperty(inference_engine_layer_property *property);
-
-    /**
-     * @brief Get an output layer property information from a given backend engine.
-     *
-     * @since_tizen 6.0
-     */
-    int GetOutputTensorProperty(inference_engine_layer_property *property);
-
-    void *GetInputDataPtr();
-
-    int GetInputLayerAttrType();
-
-    /**
-     * @brief Set an input layer property information to a given backend engine.
-     *
-     * @since_tizen 6.0
-     */
-    int SetInputTensorProperty(inference_engine_layer_property &property);
-
-    /**
-     * @brief Set an output layer property information to a given backend engine.
-     *
-     * @since_tizen 6.0
-     */
-    int SetOutputTensorProperty(inference_engine_layer_property &property);
-
-    /**
-     * @brief Set target devices.
-     * @details See #inference_target_type_e
-     *
-     * @since_tizen 5.5
-     */
-    int SetTargetDevices(int types);
-
-    /**
-     * @brief Load model data with user-given model information.
-     *
-     * @since_tizen 6.0
-     */
-    int Load(std::vector<std::string> model_paths, inference_model_format_e model_format);
-
-    /**
-     * @brief Get capacity from a given backend engine.
-     *
-     * @since_tizen 6.0
-     */
-    int GetBackendCapacity(inference_engine_capacity *capacity);
-
-    /**
-     * @brief Run an inference with a tensor. Deprecated.
-     *
-     * @since_tizen 5.5
-     */
-    int Run();
-
-    /**
-     * @brief Run an inference with user-given input and output buffers.
-     *
-     * @since_tizen 6.0
-     */
-    int Run(std::vector<inference_engine_tensor_buffer> &input_buffers,
-            std::vector<inference_engine_tensor_buffer> &output_buffers);
-
-    /**
-     * @brief Get inference results from a backend engine.
-     *
-     * @since_tizen 6.0
-     */
-    int GetInferenceResult(tensor_t& results);
-
-    int SetLibraryPath(std::string path);
-private:
-    std::string mBackendLibName;
-
-    InferenceEngineCommon *mCommonEngine; /**< InferenceEngineCommon is used to
-                                               do typical process */
-};
-
-} /* Vision */
-} /* InferenceEngineInterface */
-
-#endif /* __INFERENCE_ENGINE_VISION_IMPL_H__ */
diff --git a/inference-engine-interface-vision.manifest b/inference-engine-interface-vision.manifest
deleted file mode 100644 (file)
index 86dbb26..0000000
+++ /dev/null
@@ -1,5 +0,0 @@
-<manifest>
-    <request>
-        <domain name="_" />
-    </request>
-</manifest>
diff --git a/inference-engine-interface-vision.pc.in b/inference-engine-interface-vision.pc.in
deleted file mode 100644 (file)
index e7cd18f..0000000
+++ /dev/null
@@ -1,14 +0,0 @@
-
-# Package Information for pkg-config
-
-prefix=@PREFIX@
-exec_prefix=/usr
-libdir=@LIB_INSTALL_DIR@
-includedir=/usr/include/media
-
-Name: @PC_NAME@
-Description: @PACKAGE_DESCRIPTION@
-Version: @VERSION@
-Requires: @PC_REQUIRED@
-Libs: -L${libdir} @PC_LDFLAGS@
-Cflags: -I${includedir} -I/usr/include
index 4bfd8d27ce97396c6cef28d82a378c64947b55b9..17e9757c48441f173fecd35145dd61842ab52360 100644 (file)
@@ -40,21 +40,6 @@ Requires:   inference-engine-interface-common
 %description common-devel
 Common interface of inference engines (Dev)
 
-%package vision
-Summary:    Vision interface of inference engines
-Group:      Multimedia/Framework
-
-%description vision
-Vision interface of inference engines
-
-%package vision-devel
-Summary:    Vision interface of inference engines
-Group:      Multimedia/Framework
-Requires:   inference-engine-interface-vision
-
-%description vision-devel
-Vision interface of inference enginese (Dev)
-
 %prep
 %setup -q
 
@@ -90,13 +75,3 @@ rm -rf %{buildroot}
 %{_includedir}/media/*.h
 %{_libdir}/pkgconfig/*common.pc
 %{_libdir}/lib*-common.so
-
-%files vision
-%manifest inference-engine-interface-vision.manifest
-%license LICENSE.APLv2
-%{_libdir}/libinference-engine-interface-vision.so.*
-
-%files vision-devel
-%{_includedir}/media/*.h
-%{_libdir}/pkgconfig/*vision.pc
-%{_libdir}/lib*-vision.so
diff --git a/vision/inference_engine_vision_impl.cpp b/vision/inference_engine_vision_impl.cpp
deleted file mode 100755 (executable)
index 503df22..0000000
+++ /dev/null
@@ -1,245 +0,0 @@
-/**
- * Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "inference_engine_error.h"
-#include "inference_engine_vision_impl.h"
-
-
-#include <fstream>
-#include <iostream>
-#include <unistd.h>
-#include <time.h>
-#include <dlfcn.h>
-#include <queue>
-
-extern "C" {
-
-#include <dlog.h>
-
-#ifdef LOG_TAG
-#undef LOG_TAG
-#endif
-
-#define LOG_TAG "INFERENCE_ENGINE_VISION"
-}
-
-namespace InferenceEngineInterface {
-namespace Vision {
-InferenceEngineVision::InferenceEngineVision(inference_engine_config *config) :
-    mCommonEngine(nullptr)
-{
-    LOGE("ENTER");
-
-    mCommonEngine = new InferenceEngineCommon(config);
-
-    LOGE("LEAVE");
-}
-
-InferenceEngineVision::InferenceEngineVision(std::string backend) :
-    mCommonEngine(nullptr)
-{
-    LOGE("ENTER");
-
-    mCommonEngine = new InferenceEngineCommon(backend);
-
-    LOGE("LEAVE");
-}
-
-InferenceEngineVision::~InferenceEngineVision()
-{
-    LOGW("ENTER");
-
-    if (mCommonEngine != nullptr) {
-        delete mCommonEngine;
-        mCommonEngine = nullptr;
-    }
-
-    LOGW("LEAVE");
-}
-
-int InferenceEngineVision::BindBackend(inference_engine_config *config)
-{
-    LOGI("ENTER");
-    int ret = mCommonEngine->BindBackend(config);
-    if (ret != INFERENCE_ENGINE_ERROR_NONE)
-        LOGE("Fail to bind backend library.");
-    LOGI("LEAVE");
-
-    return ret;
-}
-
-void InferenceEngineVision::UnbindBackend(void)
-{
-    LOGI("ENTER");
-    mCommonEngine->UnbindBackend();
-    LOGI("LEAVE");
-}
-
-int InferenceEngineVision::SetInputTensorParamNode(std::string node)
-{
-    LOGE("ENTER");
-    int ret = mCommonEngine->SetInputTensorParamNode(node);
-    if (ret != INFERENCE_ENGINE_ERROR_NONE)
-        LOGE("Fail to SetInputTensorParamNode");
-    LOGE("LEAVE");
-    return ret;
-}
-
-int InferenceEngineVision::GetInputTensorProperty(inference_engine_layer_property *property)
-{
-    LOGE("ENTER");
-    int ret = mCommonEngine->GetInputTensorProperty(property);
-    if (ret != INFERENCE_ENGINE_ERROR_NONE)
-        LOGE("Fail to GetInputTensorProperty");
-    LOGE("LEAVE");
-    return ret;
-}
-
-void *InferenceEngineVision::GetInputDataPtr()
-{
-    return mCommonEngine->GetInputDataPtr();
-}
-
-int InferenceEngineVision::GetOutputTensorProperty(inference_engine_layer_property *property)
-{
-    LOGE("ENTER");
-    int ret = mCommonEngine->GetOutputTensorProperty(property);
-    if (ret != INFERENCE_ENGINE_ERROR_NONE)
-        LOGE("Fail to GetOutputTensorProperty");
-    LOGE("LEAVE");
-    return ret;
-}
-
-int InferenceEngineVision::SetInputDataBuffer(tensor_t data)
-{
-    return mCommonEngine->SetInputDataBuffer(data);
-}
-
-int InferenceEngineVision::SetInputTensorProperty(inference_engine_layer_property &property)
-{
-    LOGE("ENTER");
-    int ret = mCommonEngine->SetInputTensorProperty(property);
-    if (ret != INFERENCE_ENGINE_ERROR_NONE)
-        LOGE("Fail to SetInputTensorProperty");
-    LOGE("LEAVE");
-    return ret;
-}
-
-int InferenceEngineVision::SetOutputTensorProperty(inference_engine_layer_property &property)
-{
-    LOGE("ENTER");
-    int ret = mCommonEngine->SetOutputTensorProperty(property);
-    if (ret != INFERENCE_ENGINE_ERROR_NONE)
-        LOGE("Fail to SetOutputTensorProperty");
-    LOGE("LEAVE");
-    return ret;
-}
-
-int InferenceEngineVision::SetTargetDevices(int types)
-{
-    int ret = mCommonEngine->SetTargetDevices(types);
-    if (ret != INFERENCE_ENGINE_ERROR_NONE)
-        LOGE("Fail to SetTargetDevice");
-
-    return ret;
-}
-
-int InferenceEngineVision::SetOutputTensorParamNodes(std::vector<std::string> nodes)
-{
-    LOGI("ENTER");
-    int ret = mCommonEngine->SetOutputTensorParamNodes(nodes);
-    if (ret != INFERENCE_ENGINE_ERROR_NONE)
-        LOGE("Fail to SetOutputTensorParamNodes");
-    LOGI("LEAVE");
-    return ret;
-}
-
-int InferenceEngineVision::Load(std::vector<std::string> model_paths, inference_model_format_e model_format)
-{
-    LOGI("ENTER");
-
-    // Load model files.
-    int ret = mCommonEngine->Load(model_paths, model_format);
-    if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-        LOGE("Fail to load InferenceEngineVision");
-        return ret;
-    }
-
-    LOGI("LEAVE");
-
-    return ret;
-}
-
-int InferenceEngineVision::GetInputLayerAttrType()
-{
-    return mCommonEngine->GetInputLayerAttrType();
-}
-
-int InferenceEngineVision::GetBackendCapacity(inference_engine_capacity *capacity)
-{
-    LOGI("ENTER");
-
-    int ret = mCommonEngine->GetBackendCapacity(capacity);
-    if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-        LOGE("Fail to get backend capacity");
-        return ret;
-    }
-
-    LOGI("LEAVE");
-
-    return ret;
-}
-
-int InferenceEngineVision::Run()
-{
-    LOGI("ENTER");
-
-    int ret = mCommonEngine->Run();
-    if (ret != INFERENCE_ENGINE_ERROR_NONE)
-        LOGE("Fail to run InferenceEngineVision");
-
-    LOGI("LEAVE");
-    return ret;
-}
-
-int InferenceEngineVision::Run(std::vector<inference_engine_tensor_buffer> &input_buffers,
-                               std::vector<inference_engine_tensor_buffer> &output_buffers)
-{
-    LOGI("ENTER");
-    int ret = mCommonEngine->Run(input_buffers, output_buffers);
-    if (ret != INFERENCE_ENGINE_ERROR_NONE)
-        LOGE("Fail to run InferenceEngineVision");
-
-    LOGI("LEAVE");
-    return ret;
-}
-
-int InferenceEngineVision::GetInferenceResult(tensor_t &results)
-{
-    return mCommonEngine->GetInferenceResult(results);
-}
-
-int InferenceEngineVision::SetLibraryPath(std::string path)
-{
-    int ret = 0;
-    ret = mCommonEngine->SetLibraryPath(path);
-    if (ret != INFERENCE_ENGINE_ERROR_NONE)
-        LOGE("Fail to run SetLibraryPath");
-    LOGI("LEAVE");
-    return ret;
-}
-} /* Vision */
-} /* InferenceEngineInterface */