INFERENCE_BACKEND_ONE, /** < On-device Neural Engine. */
INFERENCE_BACKEND_NNTRAINER, /** < NNTrainer. */
INFERENCE_BACKEND_SNPE, /** < SNPE. */
+ INFERENCE_BACKEND_HAILORT, /** < Hailo NPU. */
INFERENCE_BACKEND_MAX /**< Backend MAX */
} inference_backend_type_e;
INFERENCE_MODEL_VIVANTE, /**< Vivante. model specific so library and nb model files are needed. */
INFERENCE_MODEL_NNTRAINER, /**< NNTrainer. only *.ini file is used. */
INFERENCE_MODEL_SNPE, /**< SNPE. only *.dlc file is used. */
+ INFERENCE_MODEL_HAILORT, /**< Hailo NPU. only *.hef file is used. */
INFERENCE_MODEL_MAX
} inference_model_format_e;
std::map<std::string, inference_backend_npu_type_e> sNpuBackend =
{
{ "VIVANTE", INFERENCE_BACKEND_NPU_VIVANTE },
- { "TRIV2", INFERENCE_BACKEND_NPU_TRIV2}
+ { "TRIV2", INFERENCE_BACKEND_NPU_TRIV2},
+ { "HAILO8L", INFERENCE_BACKEND_NPU_HAILO8L},
+ { "HAILO8", INFERENCE_BACKEND_NPU_HAILO8}
};
int sApiFwForTFLITE = -1, sApiFwForARMNN = -1, sApiFwForOPENCV = -1;
int InferenceEngineCommon::UseMLAPI(const int backend_type, const int device_type)
{
if (backend_type == INFERENCE_BACKEND_MLAPI ||
- device_type == INFERENCE_TARGET_CUSTOM ||
backend_type == INFERENCE_BACKEND_ONE ||
backend_type == INFERENCE_BACKEND_NNTRAINER ||
backend_type == INFERENCE_BACKEND_SNPE ||
int InferenceEngineCommon::LoadConfigFile(std::string ini_file_path)
{
int ret = INFERENCE_ENGINE_ERROR_NONE;
- std::string strNpuBackend = "", strApiFwName = "";
if (ini_file_path.empty())
ini_file_path = BACKEND_PATH_INI_FILENAME;
// If NPU type is declared in ini file then pass the type to
// a given inference engine backend.
- if (backend_type == INFERENCE_BACKEND_MLAPI &&
+ if ((backend_type == INFERENCE_BACKEND_MLAPI || backend_type == INFERENCE_BACKEND_HAILORT) &&
device_type == INFERENCE_TARGET_CUSTOM && sBackendForNpu > 0)
backend_type = sBackendForNpu;
config->backend_type = -1;
}
+ LOGD("given backend name = %s", config->backend_name.c_str());
+
// If backend_type of config is -1 then update it according to backend_name.
if (config->backend_type == -1) {
std::map<std::string,int> BackendTable;
BackendTable.insert(std::make_pair("one",INFERENCE_BACKEND_ONE));
BackendTable.insert(std::make_pair("nntrainer", INFERENCE_BACKEND_NNTRAINER));
BackendTable.insert(std::make_pair("snpe", INFERENCE_BACKEND_SNPE));
+ BackendTable.insert(std::make_pair("hailort", INFERENCE_BACKEND_HAILORT));
config->backend_type = BackendTable[config->backend_name];
} else {
BackendTable.insert(std::make_pair(INFERENCE_BACKEND_ONE, "one"));
BackendTable.insert(std::make_pair(INFERENCE_BACKEND_NNTRAINER, "nntrainer"));
BackendTable.insert(std::make_pair(INFERENCE_BACKEND_SNPE, "snpe"));
+ BackendTable.insert(std::make_pair(INFERENCE_BACKEND_HAILORT, "hailort"));
config->backend_name = BackendTable[config->backend_type];
}