Consider tensor filter plugin type for nnstreamer sandbox/inki.dae/nnstreamer
authorInki Dae <inki.dae@samsung.com>
Wed, 27 May 2020 07:16:58 +0000 (16:16 +0900)
committerInki Dae <inki.dae@samsung.com>
Wed, 27 May 2020 07:16:58 +0000 (16:16 +0900)
Change-Id: I4b80d07d0282f7472e221e23669c168a0537555c
Signed-off-by: Inki Dae <inki.dae@samsung.com>
include/inference_engine_common.h
include/inference_engine_common_impl.h
include/inference_engine_type.h
src/inference_engine_common_impl.cpp
test/src/inference_engine_profiler.cpp
test/src/inference_engine_tc.cpp

index 7501744cd035ddb56cfb462bca720abe99447ad0..be6b037e08e1085335281d61b1eb72a9ad19b1c8 100755 (executable)
@@ -30,6 +30,16 @@ public:
 
     virtual ~IInferenceEngineCommon() {};
 
+    /**
+     * @brief Set a tensor filter plugin type.
+     * @details See #inference_backend_type_e
+     *          This callback passes a tensor filter plugin type - NNFW or VIVANTE to a tensor filter plugin for NNStreamer.
+     *
+     * @since_tizen 6.0 (Optional)
+     * @param[in] type This could be one among plugin types enumerated on inference_backend_type_e.
+     */
+       virtual int SetPluginType(const int type = 0) { return type; }
+
     /**
      * @brief Set target devices.
      * @details See #inference_target_type_e
index 08ccf52aeb151dc86fd9403b241ffb2d54e0b0f5..793423686cd63d98ea7d52c31b7d998a81840231 100755 (executable)
@@ -229,7 +229,7 @@ public:
        int DumpProfileToFile(const std::string filename = "dump.txt");
 
 private:
-       int InitBackendEngine(const std::string &backend_path);
+       int InitBackendEngine(const std::string &backend_path, int backend_type);
        int CheckTensorBuffers(std::vector<inference_engine_tensor_buffer> &buffers);
        int CheckLayerProperty(inference_engine_layer_property &property);
 
index 8468cc7c64bcb2b3e3265e13058983c17607f26b..33c656e264cacc40cf44769269038f344c225990 100644 (file)
@@ -39,6 +39,7 @@ typedef enum {
     INFERENCE_BACKEND_TFLITE,    /**< TensorFlow-Lite */
     INFERENCE_BACKEND_ARMNN,     /**< ARMNN */
     INFERENCE_BACKEND_VIVANTE,   /** < Vivante */
+    INFERENCE_BACKEND_NNFW,      /** < NNFW */
     INFERENCE_BACKEND_MAX        /**< Backend MAX */
 } inference_backend_type_e;
 
@@ -134,7 +135,8 @@ typedef struct _tensor_t {
  * @since_tizen 6.0
  */
 typedef struct _inference_engine_config {
-    std::string backend_name; /**< a backend name which could be one among supported backends(tflite, opencv, armnn, dldt) */
+    std::string backend_name; /**< a backend name which could be one among supported backends(tflite, opencv, armnn, dldt, nnstreamer) */
+       int backend_type; /**< a tensor filter plugin type for NNStreamer if a backend is NNStreamer. */
     int target_devices; /**< which device or devices to be targeted for inference. (Please, refer to inference_target_type_e) */
     // TODO.
 } inference_engine_config;
index 61a662698d81fb95037e1bfef8b97ae7d051b53d..ada716f6fcd38f6e0bca4801397a5cdacaba0762 100755 (executable)
@@ -167,7 +167,7 @@ int InferenceEngineCommon::DumpProfileToFile(const std::string filename)
        return INFERENCE_ENGINE_ERROR_NONE;
 }
 
-int InferenceEngineCommon::InitBackendEngine(const std::string &backend_path)
+int InferenceEngineCommon::InitBackendEngine(const std::string &backend_path, int backend_type)
 {
        LOGI("lib: %s", backend_path.c_str());
        mBackendModule = dlopen(backend_path.c_str(), RTLD_NOW);
@@ -196,6 +196,15 @@ int InferenceEngineCommon::InitBackendEngine(const std::string &backend_path)
                return INFERENCE_ENGINE_ERROR_INTERNAL;
        }
 
+       // If a backend is nnstreamer then set a tensor filter plugin type.
+       if (backend_type == INFERENCE_BACKEND_NNFW || backend_type == INFERENCE_BACKEND_VIVANTE) {
+               int ret = mBackendHandle->SetPluginType(backend_type);
+               if (ret != INFERENCE_ENGINE_ERROR_NONE) {
+                       LOGE("Failed to set a tensor filter plugin.");
+                       return INFERENCE_ENGINE_ERROR_NOT_SUPPORTED;
+               }
+       }
+
        return INFERENCE_ENGINE_ERROR_NONE;
 }
 
@@ -220,7 +229,7 @@ int InferenceEngineCommon::BindBackend(inference_engine_config *config)
 
     std::string backendLibName = "libinference-engine-" + config->backend_name + ".so";
 
-       int ret = InitBackendEngine(backendLibName);
+       int ret = InitBackendEngine(backendLibName, config->backend_type);
        if (ret != INFERENCE_ENGINE_ERROR_NONE) {
                return ret;
        }
@@ -257,12 +266,13 @@ int InferenceEngineCommon::BindBackend(int backend_type)
                [INFERENCE_BACKEND_OPENCV] = "opencv",
                [INFERENCE_BACKEND_TFLITE] = "tflite",
                [INFERENCE_BACKEND_ARMNN] = "armnn",
-               [INFERENCE_BACKEND_VIVANTE] = "nnstreamer"
+               [INFERENCE_BACKEND_VIVANTE] = "nnstreamer",
+               [INFERENCE_BACKEND_NNFW] = "nnstreamer"
        };
 
     std::string backendLibName = "libinference-engine-" + backendNameTable[backend_type] + ".so";
 
-       int ret = InitBackendEngine(backendLibName);
+       int ret = InitBackendEngine(backendLibName, backend_type);
        if (ret != INFERENCE_ENGINE_ERROR_NONE) {
                return ret;
        }
index d46ea1bbe3cfa252ebfb348108077216562041a9..949d729e7979b498ac43d2428bf2dc15630b57d0 100644 (file)
@@ -78,6 +78,7 @@ TEST_P(InferenceEngineTfliteTest, Inference)
        std::cout << test_name << " inference test : backend = " << backend_name << ", target device = " << Target_Formats[target_devices] << std::endl;
        inference_engine_config config = {
                .backend_name = backend_name,
+               .backend_type = 0,
                .target_devices = target_devices
        };
 
@@ -253,6 +254,7 @@ TEST_P(InferenceEngineCaffeTest, Inference)
        std::cout << test_name << " inference test : backend = " << backend_name << ", target device = " << Target_Formats[target_devices] << std::endl;
        inference_engine_config config = {
                .backend_name = backend_name,
+               .backend_type = 0,
                .target_devices = target_devices
        };
 
@@ -430,6 +432,7 @@ TEST_P(InferenceEngineDldtTest, Inference)
        std::cout << test_name << " inference test : backend = " << backend_name << ", target device = " << Target_Formats[target_devices] << std::endl;
        inference_engine_config config = {
                .backend_name = backend_name,
+               .backend_type = 0,
                .target_devices = target_devices
        };
 
index 2738c5cadb5339b457476f11ba78a5618d1d824a..d7a37af7ae7c4b02af8b5c3b2c1410553411fa76 100644 (file)
@@ -52,13 +52,13 @@ class InferenceEngineTestCase_G7 : public testing::TestWithParam<ParamType_One_I
 class InferenceEngineTestCase_G8 : public testing::TestWithParam<ParamType_One_Int> { };
 
 static auto InferenceEngineInit_One_Param = [](InferenceEngineCommon *engine, std::string &backend_name) -> int {
-       inference_engine_config config = { backend_name, 0 };
+       inference_engine_config config = { backend_name, 0, 0 };
 
        return engine->BindBackend(&config);
 };
 
 static auto InferenceEngineInit_Two_Params = [](InferenceEngineCommon *engine, std::string &backend_name, int &target_devices) -> int {
-       inference_engine_config config = { backend_name, target_devices };
+       inference_engine_config config = { backend_name, 0, target_devices };
 
        int ret = engine->BindBackend(&config);
        if (ret != INFERENCE_ENGINE_ERROR_NONE)