add Hailo NPU support
authorInki Dae <inki.dae@samsung.com>
Tue, 14 Jan 2025 05:59:41 +0000 (14:59 +0900)
committerInki Dae <inki.dae@samsung.com>
Tue, 14 Jan 2025 05:59:41 +0000 (14:59 +0900)
Change-Id: I5591dd0251753b10cc19d06e85de880cf5796193
Signed-off-by: Inki Dae <inki.dae@samsung.com>
include/inference_engine_private_type.h
include/inference_engine_type.h
src/inference_engine_common_impl.cpp

index dde57ddaeb9376c30669232cba91b4f76d96d6e9..0f3eaf18c8dbb68c46378c2b6aaf1f4fb98b32c4 100644 (file)
@@ -21,6 +21,8 @@ typedef enum {
        INFERENCE_BACKEND_NPU_NONE = -1,
        INFERENCE_BACKEND_NPU_VIVANTE,  /**< Vivante NPU. */
        INFERENCE_BACKEND_NPU_TRIV2,    /**< TRIV2 NPU. */
+       INFERENCE_BACKEND_NPU_HAILO8L,  /**< Hailo-8L NPU. */
+       INFERENCE_BACKEND_NPU_HAILO8,   /**< Hailo-8 NPU. */
        INFERENCE_BACKEND_NPU_MAX
 } inference_backend_npu_type_e;
 
index 72f6d10b7f54aaa21444f60db6f097d27f5a9d9d..11016a746a4afd9cfab70ada00107bf495d2dd17 100644 (file)
@@ -47,6 +47,7 @@ extern "C"
                INFERENCE_BACKEND_ONE, /** < On-device Neural Engine. */
                INFERENCE_BACKEND_NNTRAINER, /** < NNTrainer. */
                INFERENCE_BACKEND_SNPE, /** < SNPE. */
+               INFERENCE_BACKEND_HAILORT, /** < Hailo NPU. */
                INFERENCE_BACKEND_MAX /**< Backend MAX */
        } inference_backend_type_e;
 
@@ -82,6 +83,7 @@ extern "C"
                INFERENCE_MODEL_VIVANTE, /**< Vivante. model specific so library and nb model files are needed. */
                INFERENCE_MODEL_NNTRAINER, /**< NNTrainer. only *.ini file is used. */
                INFERENCE_MODEL_SNPE, /**< SNPE. only *.dlc file is used. */
+               INFERENCE_MODEL_HAILORT, /**< Hailo NPU. only *.hef file is used. */
                INFERENCE_MODEL_MAX
        } inference_model_format_e;
 
index b3ba4eb7c154df44ddff3fe9983575895e9846be..628438a75ddb8c75a7adec7cd0bcb0888b084005 100644 (file)
@@ -65,7 +65,9 @@ namespace Common
        std::map<std::string, inference_backend_npu_type_e> sNpuBackend =
        {
                { "VIVANTE", INFERENCE_BACKEND_NPU_VIVANTE },
-               { "TRIV2", INFERENCE_BACKEND_NPU_TRIV2}
+               { "TRIV2", INFERENCE_BACKEND_NPU_TRIV2},
+               { "HAILO8L", INFERENCE_BACKEND_NPU_HAILO8L},
+               { "HAILO8", INFERENCE_BACKEND_NPU_HAILO8}
        };
 
        int sApiFwForTFLITE = -1, sApiFwForARMNN = -1, sApiFwForOPENCV = -1;
@@ -97,7 +99,6 @@ namespace Common
        int InferenceEngineCommon::UseMLAPI(const int backend_type, const int device_type)
        {
                if (backend_type == INFERENCE_BACKEND_MLAPI ||
-                               device_type == INFERENCE_TARGET_CUSTOM ||
                                backend_type == INFERENCE_BACKEND_ONE ||
                                backend_type == INFERENCE_BACKEND_NNTRAINER ||
                                backend_type == INFERENCE_BACKEND_SNPE ||
@@ -147,7 +148,6 @@ namespace Common
        int InferenceEngineCommon::LoadConfigFile(std::string ini_file_path)
        {
                int ret = INFERENCE_ENGINE_ERROR_NONE;
-               std::string strNpuBackend = "", strApiFwName = "";
 
                if (ini_file_path.empty())
                        ini_file_path = BACKEND_PATH_INI_FILENAME;
@@ -340,7 +340,7 @@ namespace Common
 
                // If NPU type is declared in ini file then pass the type to
                // a given inference engine backend.
-               if (backend_type == INFERENCE_BACKEND_MLAPI &&
+               if ((backend_type == INFERENCE_BACKEND_MLAPI || backend_type == INFERENCE_BACKEND_HAILORT) &&
                                device_type == INFERENCE_TARGET_CUSTOM && sBackendForNpu > 0)
                        backend_type = sBackendForNpu;
 
@@ -388,6 +388,8 @@ namespace Common
                                config->backend_type = -1;
                }
 
+               LOGD("given backend name = %s", config->backend_name.c_str());
+
                // If backend_type of config is -1 then update it according to backend_name.
                if (config->backend_type == -1) {
                        std::map<std::string,int> BackendTable;
@@ -399,6 +401,7 @@ namespace Common
                        BackendTable.insert(std::make_pair("one",INFERENCE_BACKEND_ONE));
                        BackendTable.insert(std::make_pair("nntrainer", INFERENCE_BACKEND_NNTRAINER));
                        BackendTable.insert(std::make_pair("snpe", INFERENCE_BACKEND_SNPE));
+                       BackendTable.insert(std::make_pair("hailort", INFERENCE_BACKEND_HAILORT));
 
                        config->backend_type = BackendTable[config->backend_name];
                } else {
@@ -411,6 +414,7 @@ namespace Common
                        BackendTable.insert(std::make_pair(INFERENCE_BACKEND_ONE, "one"));
                        BackendTable.insert(std::make_pair(INFERENCE_BACKEND_NNTRAINER, "nntrainer"));
                        BackendTable.insert(std::make_pair(INFERENCE_BACKEND_SNPE, "snpe"));
+                       BackendTable.insert(std::make_pair(INFERENCE_BACKEND_HAILORT, "hailort"));
 
                        config->backend_name = BackendTable[config->backend_type];
                }