}
}
+ const char *InferenceMLAPI::GetCustomProp()
+ {
+ if (mPluginType != INFERENCE_BACKEND_SNPE)
+ return "";
+
+ return mTargetDevice == INFERENCE_TARGET_CPU ? "RUNTIME:CPU" :
+ mTargetDevice == INFERENCE_TARGET_GPU ? "RUNTIME:GPU" : "RUNTIME:DSP";
+ }
+
int InferenceMLAPI::Load(std::vector<std::string> model_paths,
inference_model_format_e model_format)
{
return ret;
}
- int err = ml_single_open(&mSingle, model_str.c_str(), in_info, out_info,
- nnfw_type, nnfw_hw);
+ int err = ml_single_open_full(&mSingle, model_str.c_str(), in_info, out_info,
+ nnfw_type, nnfw_hw, GetCustomProp());
if (err != ML_ERROR_NONE) {
LOGE("Failed to request ml_single_open(%d).", err);
return INFERENCE_ENGINE_ERROR_INVALID_OPERATION;
bool IsFileReadable(const std::string& path);
std::tuple<ml_nnfw_type_e, ml_nnfw_hw_e> GetNNFWInfo();
std::string GetModelPath(const std::vector<std::string>& model_paths);
+ const char *GetCustomProp();
int mPluginType;
int mTargetDevice;