From 5edfc0dc724cc90e89c147c218f8969f6194bdd1 Mon Sep 17 00:00:00 2001 From: Inki Dae Date: Tue, 14 Jan 2025 14:59:41 +0900 Subject: [PATCH] add Hailo NPU support Change-Id: I5591dd0251753b10cc19d06e85de880cf5796193 Signed-off-by: Inki Dae --- include/inference_engine_private_type.h | 2 ++ include/inference_engine_type.h | 2 ++ src/inference_engine_common_impl.cpp | 12 ++++++++---- 3 files changed, 12 insertions(+), 4 deletions(-) diff --git a/include/inference_engine_private_type.h b/include/inference_engine_private_type.h index dde57dd..0f3eaf1 100644 --- a/include/inference_engine_private_type.h +++ b/include/inference_engine_private_type.h @@ -21,6 +21,8 @@ typedef enum { INFERENCE_BACKEND_NPU_NONE = -1, INFERENCE_BACKEND_NPU_VIVANTE, /**< Vivante NPU. */ INFERENCE_BACKEND_NPU_TRIV2, /**< TRIV2 NPU. */ + INFERENCE_BACKEND_NPU_HAILO8L, /**< Hailo-8L NPU. */ + INFERENCE_BACKEND_NPU_HAILO8, /**< Hailo-8 NPU. */ INFERENCE_BACKEND_NPU_MAX } inference_backend_npu_type_e; diff --git a/include/inference_engine_type.h b/include/inference_engine_type.h index 72f6d10..11016a7 100644 --- a/include/inference_engine_type.h +++ b/include/inference_engine_type.h @@ -47,6 +47,7 @@ extern "C" INFERENCE_BACKEND_ONE, /** < On-device Neural Engine. */ INFERENCE_BACKEND_NNTRAINER, /** < NNTrainer. */ INFERENCE_BACKEND_SNPE, /** < SNPE. */ + INFERENCE_BACKEND_HAILORT, /** < Hailo NPU. */ INFERENCE_BACKEND_MAX /**< Backend MAX */ } inference_backend_type_e; @@ -82,6 +83,7 @@ extern "C" INFERENCE_MODEL_VIVANTE, /**< Vivante. model specific so library and nb model files are needed. */ INFERENCE_MODEL_NNTRAINER, /**< NNTrainer. only *.ini file is used. */ INFERENCE_MODEL_SNPE, /**< SNPE. only *.dlc file is used. */ + INFERENCE_MODEL_HAILORT, /**< Hailo NPU. only *.hef file is used. */ INFERENCE_MODEL_MAX } inference_model_format_e; diff --git a/src/inference_engine_common_impl.cpp b/src/inference_engine_common_impl.cpp index b3ba4eb..628438a 100644 --- a/src/inference_engine_common_impl.cpp +++ b/src/inference_engine_common_impl.cpp @@ -65,7 +65,9 @@ namespace Common std::map sNpuBackend = { { "VIVANTE", INFERENCE_BACKEND_NPU_VIVANTE }, - { "TRIV2", INFERENCE_BACKEND_NPU_TRIV2} + { "TRIV2", INFERENCE_BACKEND_NPU_TRIV2}, + { "HAILO8L", INFERENCE_BACKEND_NPU_HAILO8L}, + { "HAILO8", INFERENCE_BACKEND_NPU_HAILO8} }; int sApiFwForTFLITE = -1, sApiFwForARMNN = -1, sApiFwForOPENCV = -1; @@ -97,7 +99,6 @@ namespace Common int InferenceEngineCommon::UseMLAPI(const int backend_type, const int device_type) { if (backend_type == INFERENCE_BACKEND_MLAPI || - device_type == INFERENCE_TARGET_CUSTOM || backend_type == INFERENCE_BACKEND_ONE || backend_type == INFERENCE_BACKEND_NNTRAINER || backend_type == INFERENCE_BACKEND_SNPE || @@ -147,7 +148,6 @@ namespace Common int InferenceEngineCommon::LoadConfigFile(std::string ini_file_path) { int ret = INFERENCE_ENGINE_ERROR_NONE; - std::string strNpuBackend = "", strApiFwName = ""; if (ini_file_path.empty()) ini_file_path = BACKEND_PATH_INI_FILENAME; @@ -340,7 +340,7 @@ namespace Common // If NPU type is declared in ini file then pass the type to // a given inference engine backend. - if (backend_type == INFERENCE_BACKEND_MLAPI && + if ((backend_type == INFERENCE_BACKEND_MLAPI || backend_type == INFERENCE_BACKEND_HAILORT) && device_type == INFERENCE_TARGET_CUSTOM && sBackendForNpu > 0) backend_type = sBackendForNpu; @@ -388,6 +388,8 @@ namespace Common config->backend_type = -1; } + LOGD("given backend name = %s", config->backend_name.c_str()); + // If backend_type of config is -1 then update it according to backend_name. if (config->backend_type == -1) { std::map BackendTable; @@ -399,6 +401,7 @@ namespace Common BackendTable.insert(std::make_pair("one",INFERENCE_BACKEND_ONE)); BackendTable.insert(std::make_pair("nntrainer", INFERENCE_BACKEND_NNTRAINER)); BackendTable.insert(std::make_pair("snpe", INFERENCE_BACKEND_SNPE)); + BackendTable.insert(std::make_pair("hailort", INFERENCE_BACKEND_HAILORT)); config->backend_type = BackendTable[config->backend_name]; } else { @@ -411,6 +414,7 @@ namespace Common BackendTable.insert(std::make_pair(INFERENCE_BACKEND_ONE, "one")); BackendTable.insert(std::make_pair(INFERENCE_BACKEND_NNTRAINER, "nntrainer")); BackendTable.insert(std::make_pair(INFERENCE_BACKEND_SNPE, "snpe")); + BackendTable.insert(std::make_pair(INFERENCE_BACKEND_HAILORT, "hailort")); config->backend_name = BackendTable[config->backend_type]; } -- 2.34.1