INFERENCE_BACKEND_ARMNN, /**< ARMNN */
INFERENCE_BACKEND_MLAPI, /** < ML Single API of NNStreamer.*/
INFERENCE_BACKEND_ONE, /** < On-device Neural Engine. */
+ INFERENCE_BACKEND_NNTRAINER, /** < NNTrainer. */
INFERENCE_BACKEND_MAX /**< Backend MAX */
} inference_backend_type_e;
Name: inference-engine-interface
Summary: Interface of inference engines
-Version: 0.0.3
-Release: 14
+Version: 0.1.0
+Release: 0
Group: Multimedia/Framework
License: Apache-2.0
Source0: %{name}-%{version}.tar.gz
if (backend_type == INFERENCE_BACKEND_MLAPI ||
device_type == INFERENCE_TARGET_CUSTOM ||
backend_type == INFERENCE_BACKEND_ONE ||
+ backend_type == INFERENCE_BACKEND_NNTRAINER ||
(backend_type == INFERENCE_BACKEND_TFLITE && sApiFwForTFLITE == INFERENCE_BACKEND_MLAPI) ||
(backend_type == INFERENCE_BACKEND_ARMNN && sApiFwForARMNN == INFERENCE_BACKEND_MLAPI))
return 1;
BackendTable.insert(std::make_pair("opencv",INFERENCE_BACKEND_OPENCV));
BackendTable.insert(std::make_pair("mlapi",INFERENCE_BACKEND_MLAPI));
BackendTable.insert(std::make_pair("one",INFERENCE_BACKEND_ONE));
+ BackendTable.insert(std::make_pair("nntrainer", INFERENCE_BACKEND_NNTRAINER));
config->backend_type = BackendTable[config->backend_name];
}