From: Inki Dae Date: Wed, 27 May 2020 07:16:58 +0000 (+0900) Subject: Consider tensor filter plugin type for nnstreamer X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=01b0c14057ecdda25f84ac33d439c6bf0229d90b;p=platform%2Fcore%2Fmultimedia%2Finference-engine-interface.git Consider tensor filter plugin type for nnstreamer Change-Id: I4b80d07d0282f7472e221e23669c168a0537555c Signed-off-by: Inki Dae --- diff --git a/include/inference_engine_common.h b/include/inference_engine_common.h index 7501744..be6b037 100755 --- a/include/inference_engine_common.h +++ b/include/inference_engine_common.h @@ -30,6 +30,16 @@ public: virtual ~IInferenceEngineCommon() {}; + /** + * @brief Set a tensor filter plugin type. + * @details See #inference_backend_type_e + * This callback passes a tensor filter plugin type - NNFW or VIVANTE to a tensor filter plugin for NNStreamer. + * + * @since_tizen 6.0 (Optional) + * @param[in] type This could be one among plugin types enumerated on inference_backend_type_e. + */ + virtual int SetPluginType(const int type = 0) { return type; } + /** * @brief Set target devices. * @details See #inference_target_type_e diff --git a/include/inference_engine_common_impl.h b/include/inference_engine_common_impl.h index 08ccf52..7934236 100755 --- a/include/inference_engine_common_impl.h +++ b/include/inference_engine_common_impl.h @@ -229,7 +229,7 @@ public: int DumpProfileToFile(const std::string filename = "dump.txt"); private: - int InitBackendEngine(const std::string &backend_path); + int InitBackendEngine(const std::string &backend_path, int backend_type); int CheckTensorBuffers(std::vector &buffers); int CheckLayerProperty(inference_engine_layer_property &property); diff --git a/include/inference_engine_type.h b/include/inference_engine_type.h index 8468cc7..33c656e 100644 --- a/include/inference_engine_type.h +++ b/include/inference_engine_type.h @@ -39,6 +39,7 @@ typedef enum { INFERENCE_BACKEND_TFLITE, /**< TensorFlow-Lite */ INFERENCE_BACKEND_ARMNN, /**< ARMNN */ INFERENCE_BACKEND_VIVANTE, /** < Vivante */ + INFERENCE_BACKEND_NNFW, /** < NNFW */ INFERENCE_BACKEND_MAX /**< Backend MAX */ } inference_backend_type_e; @@ -134,7 +135,8 @@ typedef struct _tensor_t { * @since_tizen 6.0 */ typedef struct _inference_engine_config { - std::string backend_name; /**< a backend name which could be one among supported backends(tflite, opencv, armnn, dldt) */ + std::string backend_name; /**< a backend name which could be one among supported backends(tflite, opencv, armnn, dldt, nnstreamer) */ + int backend_type; /**< a tensor filter plugin type for NNStreamer if a backend is NNStreamer. */ int target_devices; /**< which device or devices to be targeted for inference. (Please, refer to inference_target_type_e) */ // TODO. } inference_engine_config; diff --git a/src/inference_engine_common_impl.cpp b/src/inference_engine_common_impl.cpp index 61a6626..ada716f 100755 --- a/src/inference_engine_common_impl.cpp +++ b/src/inference_engine_common_impl.cpp @@ -167,7 +167,7 @@ int InferenceEngineCommon::DumpProfileToFile(const std::string filename) return INFERENCE_ENGINE_ERROR_NONE; } -int InferenceEngineCommon::InitBackendEngine(const std::string &backend_path) +int InferenceEngineCommon::InitBackendEngine(const std::string &backend_path, int backend_type) { LOGI("lib: %s", backend_path.c_str()); mBackendModule = dlopen(backend_path.c_str(), RTLD_NOW); @@ -196,6 +196,15 @@ int InferenceEngineCommon::InitBackendEngine(const std::string &backend_path) return INFERENCE_ENGINE_ERROR_INTERNAL; } + // If a backend is nnstreamer then set a tensor filter plugin type. + if (backend_type == INFERENCE_BACKEND_NNFW || backend_type == INFERENCE_BACKEND_VIVANTE) { + int ret = mBackendHandle->SetPluginType(backend_type); + if (ret != INFERENCE_ENGINE_ERROR_NONE) { + LOGE("Failed to set a tensor filter plugin."); + return INFERENCE_ENGINE_ERROR_NOT_SUPPORTED; + } + } + return INFERENCE_ENGINE_ERROR_NONE; } @@ -220,7 +229,7 @@ int InferenceEngineCommon::BindBackend(inference_engine_config *config) std::string backendLibName = "libinference-engine-" + config->backend_name + ".so"; - int ret = InitBackendEngine(backendLibName); + int ret = InitBackendEngine(backendLibName, config->backend_type); if (ret != INFERENCE_ENGINE_ERROR_NONE) { return ret; } @@ -257,12 +266,13 @@ int InferenceEngineCommon::BindBackend(int backend_type) [INFERENCE_BACKEND_OPENCV] = "opencv", [INFERENCE_BACKEND_TFLITE] = "tflite", [INFERENCE_BACKEND_ARMNN] = "armnn", - [INFERENCE_BACKEND_VIVANTE] = "nnstreamer" + [INFERENCE_BACKEND_VIVANTE] = "nnstreamer", + [INFERENCE_BACKEND_NNFW] = "nnstreamer" }; std::string backendLibName = "libinference-engine-" + backendNameTable[backend_type] + ".so"; - int ret = InitBackendEngine(backendLibName); + int ret = InitBackendEngine(backendLibName, backend_type); if (ret != INFERENCE_ENGINE_ERROR_NONE) { return ret; } diff --git a/test/src/inference_engine_profiler.cpp b/test/src/inference_engine_profiler.cpp index d46ea1b..949d729 100644 --- a/test/src/inference_engine_profiler.cpp +++ b/test/src/inference_engine_profiler.cpp @@ -78,6 +78,7 @@ TEST_P(InferenceEngineTfliteTest, Inference) std::cout << test_name << " inference test : backend = " << backend_name << ", target device = " << Target_Formats[target_devices] << std::endl; inference_engine_config config = { .backend_name = backend_name, + .backend_type = 0, .target_devices = target_devices }; @@ -253,6 +254,7 @@ TEST_P(InferenceEngineCaffeTest, Inference) std::cout << test_name << " inference test : backend = " << backend_name << ", target device = " << Target_Formats[target_devices] << std::endl; inference_engine_config config = { .backend_name = backend_name, + .backend_type = 0, .target_devices = target_devices }; @@ -430,6 +432,7 @@ TEST_P(InferenceEngineDldtTest, Inference) std::cout << test_name << " inference test : backend = " << backend_name << ", target device = " << Target_Formats[target_devices] << std::endl; inference_engine_config config = { .backend_name = backend_name, + .backend_type = 0, .target_devices = target_devices }; diff --git a/test/src/inference_engine_tc.cpp b/test/src/inference_engine_tc.cpp index 2738c5c..d7a37af 100644 --- a/test/src/inference_engine_tc.cpp +++ b/test/src/inference_engine_tc.cpp @@ -52,13 +52,13 @@ class InferenceEngineTestCase_G7 : public testing::TestWithParam { }; static auto InferenceEngineInit_One_Param = [](InferenceEngineCommon *engine, std::string &backend_name) -> int { - inference_engine_config config = { backend_name, 0 }; + inference_engine_config config = { backend_name, 0, 0 }; return engine->BindBackend(&config); }; static auto InferenceEngineInit_Two_Params = [](InferenceEngineCommon *engine, std::string &backend_name, int &target_devices) -> int { - inference_engine_config config = { backend_name, target_devices }; + inference_engine_config config = { backend_name, 0, target_devices }; int ret = engine->BindBackend(&config); if (ret != INFERENCE_ENGINE_ERROR_NONE)