Consider tensor filter plugin type for NNFW
authorInki Dae <inki.dae@samsung.com>
Wed, 27 May 2020 07:19:23 +0000 (16:19 +0900)
committerInki Dae <inki.dae@samsung.com>
Thu, 25 Jun 2020 23:58:22 +0000 (08:58 +0900)
This patch adds a new backend type for NNFW support via NNStreamer.

MediaVision will support Vivante NPU and NNFW(in-house NN Runtime)
through ML Single API of NNStreamer.

For this, this patch also adds a new member, backend_type, in
inference_engine_config, which is used to let ML Single API of
NNStreamer know which tensor filter - Vivante NPU or NNFW - should be used.

Change-Id: I5d67fcc36d8999513c05590cccb290d07539ab33
Signed-off-by: Inki Dae <inki.dae@samsung.com>
include/mv_inference_type.h
mv_inference/inference/src/Inference.cpp

index c014e29..68bd570 100644 (file)
@@ -45,6 +45,7 @@ typedef enum {
     MV_INFERENCE_BACKEND_TFLITE,    /**< TensorFlow-Lite */
     MV_INFERENCE_BACKEND_ARMNN,     /**< ARMNN (Since 6.0) */
     MV_INFERENCE_BACKEND_VIVANTE,   /**< Vivante (Since 6.0) */
+    MV_INFERENCE_BACKEND_NNFW,      /**< NNFW (Since 6.0) */
     MV_INFERENCE_BACKEND_MAX        /**< Backend MAX */
 } mv_inference_backend_type_e;
 
index e330de2..68cae92 100755 (executable)
@@ -84,6 +84,7 @@ Inference::Inference() :
        mSupportedInferenceBackend.insert(std::make_pair(MV_INFERENCE_BACKEND_TFLITE, std::make_pair("tflite", false)));
        mSupportedInferenceBackend.insert(std::make_pair(MV_INFERENCE_BACKEND_ARMNN, std::make_pair("armnn", false)));
        mSupportedInferenceBackend.insert(std::make_pair(MV_INFERENCE_BACKEND_VIVANTE, std::make_pair("nnstreamer", false)));
+       mSupportedInferenceBackend.insert(std::make_pair(MV_INFERENCE_BACKEND_NNFW, std::make_pair("nnstreamer", false)));
 
        CheckSupportedInferenceBackend();
 
@@ -726,6 +727,7 @@ int Inference::Bind(void)
 
        inference_engine_config config = {
                .backend_name = backendName,
+               .backend_type = mConfig.mBackedType,
                // As a default, Target device is CPU. If user defined desired device type in json file
                // then the device type will be set by Load callback.
                .target_devices = mConfig.mTargetTypes,