Consider tensor filter plugin type for NNFW
authorInki Dae <inki.dae@samsung.com>
Mon, 22 Jun 2020 02:35:56 +0000 (11:35 +0900)
committerInki Dae <inki.dae@samsung.com>
Mon, 22 Jun 2020 02:35:56 +0000 (11:35 +0900)
This patch adds a new backend type for NNFW support via NNStreamer.

MediaVision will support Vivante NPU and NNFW(in-house NN Runtime)
through ML Single API of NNStreamer.

For this, this patch also adds a new member, backend_type, in
inference_engine_config, which is used to let ML Single API of
NNStreamer know which tensor filter - Vivante NPU or NNFW - should be used.

Change-Id: I532a44b16155dda0ab1ebc68dfc208645944995c
Signed-off-by: Inki Dae <inki.dae@samsung.com>
include/mv_inference_type.h
mv_inference/inference/src/Inference.cpp

index c014e29b05f56415b90c33f1d89b70ecf7380d33..46fb65b7381f0ea4de08385a0c0d6c6cff69df8b 100644 (file)
@@ -45,6 +45,7 @@ typedef enum {
     MV_INFERENCE_BACKEND_TFLITE,    /**< TensorFlow-Lite */
     MV_INFERENCE_BACKEND_ARMNN,     /**< ARMNN (Since 6.0) */
     MV_INFERENCE_BACKEND_VIVANTE,   /**< Vivante (Since 6.0) */
+       MV_INFERENCE_BACKEND_NNFW,      /**< NNFW (Since 6.0) */
     MV_INFERENCE_BACKEND_MAX        /**< Backend MAX */
 } mv_inference_backend_type_e;
 
index 7c7b52301c2a136c0c61641cc85a9e22bcdf2bf5..8540915a1bf46ba7d23725809a25a5e06fadaed5 100755 (executable)
@@ -84,6 +84,7 @@ Inference::Inference() :
        mSupportedInferenceBackend.insert(std::make_pair(MV_INFERENCE_BACKEND_TFLITE, std::make_pair("tflite", false)));
        mSupportedInferenceBackend.insert(std::make_pair(MV_INFERENCE_BACKEND_ARMNN, std::make_pair("armnn", false)));
     mSupportedInferenceBackend.insert(std::make_pair(MV_INFERENCE_BACKEND_VIVANTE, std::make_pair("nnstreamer", false)));
+       mSupportedInferenceBackend.insert(std::make_pair(MV_INFERENCE_BACKEND_NNFW, std::make_pair("nnstreamer", false)));
 
        CheckSupportedInferenceBackend();
 
@@ -737,6 +738,7 @@ int Inference::Bind(void)
 
        inference_engine_config config = {
                .backend_name = backendName,
+               .backend_type = mConfig.mBackedType,
                // As a default, Target device is CPU. If user defined desired device type in json file
                // then the device type will be set by Load callback.
                .target_devices = mConfig.mTargetTypes,