Add nnstreamer backend type
authorInki Dae <inki.dae@samsung.com>
Thu, 21 May 2020 05:54:45 +0000 (14:54 +0900)
committerInki Dae <inki.dae@samsung.com>
Thu, 21 May 2020 07:34:41 +0000 (16:34 +0900)
Change-Id: I3aaae1c071a5b17a69c6b9a09d36d14f809090fe
Signed-off-by: Inki Dae <inki.dae@samsung.com>
include/inference_engine_type.h
src/inference_engine_common_impl.cpp

index ef7e74f5462a4adab76cf9edf0c24e557261e0fb..8468cc7c64bcb2b3e3265e13058983c17607f26b 100644 (file)
@@ -38,6 +38,7 @@ typedef enum {
     INFERENCE_BACKEND_OPENCV,    /**< OpenCV */
     INFERENCE_BACKEND_TFLITE,    /**< TensorFlow-Lite */
     INFERENCE_BACKEND_ARMNN,     /**< ARMNN */
+    INFERENCE_BACKEND_VIVANTE,   /** < Vivante */
     INFERENCE_BACKEND_MAX        /**< Backend MAX */
 } inference_backend_type_e;
 
@@ -70,6 +71,7 @@ typedef enum {
     INFERENCE_MODEL_DARKNET,         /**< Darknet. *.cfg config file is needed. */
     INFERENCE_MODEL_DLDT,            /**< DLDT. *.xml config file is needed. */
     INFERENCE_MODEL_ONNX,            /**< ONNX */
+    INFERENCE_MODEL_VIVANTE,         /**< Vivante. model specific so library and nb model files are needed. */
     INFERENCE_MODEL_MAX
 } inference_model_format_e;
 
index f8b902d76d0401cf92bed0f1ec3b05b2233bcd5b..61a662698d81fb95037e1bfef8b97ae7d051b53d 100755 (executable)
@@ -256,7 +256,8 @@ int InferenceEngineCommon::BindBackend(int backend_type)
        std::string backendNameTable[INFERENCE_BACKEND_MAX] = {
                [INFERENCE_BACKEND_OPENCV] = "opencv",
                [INFERENCE_BACKEND_TFLITE] = "tflite",
-               [INFERENCE_BACKEND_ARMNN] = "armnn"
+               [INFERENCE_BACKEND_ARMNN] = "armnn",
+               [INFERENCE_BACKEND_VIVANTE] = "nnstreamer"
        };
 
     std::string backendLibName = "libinference-engine-" + backendNameTable[backend_type] + ".so";