LOGI("SNPE tensor filter will be used.");
return std::make_tuple(ML_NNFW_TYPE_SNPE, ML_NNFW_HW_ANY);
+ case INFERENCE_BACKEND_NNTRAINER:
+ LOGI("NNTRAINER tensor filter will be used.");
+ return std::make_tuple(ML_NNFW_TYPE_NNTR_INF, ML_NNFW_HW_ANY);
+
default:
LOGE("Invalid plugin type.");
throw std::invalid_argument("invalid tensor type.");
case INFERENCE_BACKEND_TFLITE:
/* fall through */
case INFERENCE_BACKEND_SNPE:
+ /* fall through */
+ case INFERENCE_BACKEND_NNTRAINER:
if (!IsFileReadable(model_paths[0]))
throw std::runtime_error("invalid path");
return model_paths[0];
int err = ml_single_open_full(&mSingle, model_str.c_str(), in_info, out_info,
nnfw_type, nnfw_hw, GetCustomProp());
if (err != ML_ERROR_NONE) {
- LOGE("Failed to request ml_single_open(%d).", err);
+ LOGE("Failed to request ml_single_open_full(%d).", err);
return INFERENCE_ENGINE_ERROR_INVALID_OPERATION;
}