mv_inference: Change in-house runtime backend name
authorInki Dae <inki.dae@samsung.com>
Mon, 22 Jun 2020 02:49:53 +0000 (11:49 +0900)
committerInki Dae <inki.dae@samsung.com>
Mon, 22 Jun 2020 02:49:53 +0000 (11:49 +0900)
Official name of NNFW is ONE(On-device Neural Engine)
so use it instead of NNFW.

Change-Id: Iefbab8ed475be0037fb3458842093d59fad418d8
Signed-off-by: Inki Dae <inki.dae@samsung.com>
include/mv_inference_type.h
mv_inference/inference/src/Inference.cpp
mv_inference/inference/src/mv_inference_open.cpp
test/testsuites/inference/inference_test_suite.c

index 434fabaddfed9f02245907ad33a86aff612d072b..ff3cc27abb6e34361680fba2cf846cf6a321aa60 100644 (file)
@@ -45,7 +45,7 @@ typedef enum {
     MV_INFERENCE_BACKEND_TFLITE,    /**< TensorFlow-Lite */
     MV_INFERENCE_BACKEND_ARMNN,     /**< ARMNN (Since 6.0) */
     MV_INFERENCE_BACKEND_MLAPI,     /**< Vivante (Since 6.0) */
-       MV_INFERENCE_BACKEND_NNFW,      /**< NNFW (Since 6.0) */
+       MV_INFERENCE_BACKEND_ONE,       /**< On-device Neural Engine (Since 6.0) */
     MV_INFERENCE_BACKEND_MAX        /**< Backend MAX */
 } mv_inference_backend_type_e;
 
index 7235bddb6fb0858ef6d301ca03d0cc344f150fe1..f7c928a4e35174c05b8fc929eb1f5f07debf8b93 100755 (executable)
@@ -84,7 +84,7 @@ Inference::Inference() :
        mSupportedInferenceBackend.insert(std::make_pair(MV_INFERENCE_BACKEND_TFLITE, std::make_pair("tflite", false)));
        mSupportedInferenceBackend.insert(std::make_pair(MV_INFERENCE_BACKEND_ARMNN, std::make_pair("armnn", false)));
     mSupportedInferenceBackend.insert(std::make_pair(MV_INFERENCE_BACKEND_MLAPI, std::make_pair("mlapi", false)));
-       mSupportedInferenceBackend.insert(std::make_pair(MV_INFERENCE_BACKEND_NNFW, std::make_pair("mlapi", false)));
+       mSupportedInferenceBackend.insert(std::make_pair(MV_INFERENCE_BACKEND_ONE, std::make_pair("mlapi", false)));
 
        CheckSupportedInferenceBackend();
 
index c23119285dbca2d2aff652756c0ede254241b9c2..08261e05ab2342ec6651b8120621ea0f102972da 100755 (executable)
@@ -187,7 +187,7 @@ int mv_inference_configure_model_open(mv_inference_h infer, mv_engine_config_h e
                  backendType < MV_INFERENCE_BACKEND_MAX) &&
                  (backendType != MV_INFERENCE_BACKEND_TFLITE) &&
                  (backendType != MV_INFERENCE_BACKEND_ARMNN) &&
-                 (backendType != MV_INFERENCE_BACKEND_NNFW)) {
+                 (backendType != MV_INFERENCE_BACKEND_ONE)) {
                if ( access(modelConfigFilePath, F_OK)) {
                        LOGE("modelConfigFilePath in [%s] ", modelConfigFilePath);
                        ret = MEDIA_VISION_ERROR_INVALID_PATH;
index a4ade66231b65d71fabb3b421905c1e541aa266a..e081e014a3895c0836f9f95f611e0b96ddc47806 100644 (file)
@@ -975,7 +975,7 @@ int perform_vivante_inceptionv3_config(mv_engine_config_h *engine_cfg)
     return err;
 }
 
-int perform_nnfw_mobilenetv1_quant_config(mv_engine_config_h *engine_cfg)
+int perform_one_mobilenetv1_quant_config(mv_engine_config_h *engine_cfg)
 {
        int err = MEDIA_VISION_ERROR_NONE;
 
@@ -1014,7 +1014,7 @@ int perform_nnfw_mobilenetv1_quant_config(mv_engine_config_h *engine_cfg)
                        handle, MV_INFERENCE_CONFIDENCE_THRESHOLD, 0.6);
 
        mv_engine_config_set_int_attribute(handle, MV_INFERENCE_BACKEND_TYPE,
-                                                                          MV_INFERENCE_BACKEND_NNFW);
+                                                                          MV_INFERENCE_BACKEND_ONE);
 
        mv_engine_config_set_int_attribute(handle, MV_INFERENCE_TARGET_TYPE,
                                                                           MV_INFERENCE_TARGET_CPU);
@@ -1132,7 +1132,7 @@ int perform_image_classification()
                              "OpenCV(cpu + Squeezenet)",
                              "ARMNN(cpu + Mobilenet)",
                                                         "Vivante(NPU + Inceptionv3)",
-                                                        "NNFW(cpu + Mobilenet_Q)",
+                                                        "ONE(cpu + Mobilenet_Q)",
                              "Prepare",
                              "Run",
                              "Back"};
@@ -1210,14 +1210,14 @@ int perform_image_classification()
             break;
                case 6:
                {
-                       // perform NNFW
+                       // perform ONE(On-device Neural Engine)
                        if (engine_cfg) {
                                int err2 = mv_destroy_engine_config(engine_cfg);
                                if (err2 != MEDIA_VISION_ERROR_NONE)
                                        printf("Fail to destroy engine_cfg [err:%i]\n", err2);
                        }
 
-                       err = perform_nnfw_mobilenetv1_quant_config(&engine_cfg);
+                       err = perform_one_mobilenetv1_quant_config(&engine_cfg);
                }
                        break;
         case 7: