return false;
}
+static bool IsValidBackendType(const int backend_type)
+{
+ if (backend_type > MV_INFERENCE_BACKEND_NONE &&
+ backend_type < MV_INFERENCE_BACKEND_MAX)
+ return true;
+
+ return false;
+}
+
+static bool IsConfigFilePathRequired(const int target_device_type, const int backend_type)
+{
+ // In case of MV_INFERENCE_TARGET_DEVICE_CUSTOM via MLAPI backend, config file path is required.
+ if (backend_type == MV_INFERENCE_BACKEND_MLAPI &&
+ target_device_type & MV_INFERENCE_TARGET_DEVICE_CUSTOM)
+ return true;
+
+ return false;
+}
+
int mv_inference_configure_model_open(mv_inference_h infer,
mv_engine_config_h engine_config)
{
goto release_model_meta_file_path;
}
+ if (!IsValidBackendType(backendType)) {
+ LOGE("Invalid backend type(%d).", backendType);
+ ret = MEDIA_VISION_ERROR_INVALID_PARAMETER;
+ goto release_model_meta_file_path;
+ }
+
if (access(modelWeightFilePath, F_OK)) {
LOGE("weightFilePath in [%s] ", modelWeightFilePath);
ret = MEDIA_VISION_ERROR_INVALID_PATH;
goto release_model_meta_file_path;
}
- if ((backendType > MV_INFERENCE_BACKEND_NONE &&
- backendType < MV_INFERENCE_BACKEND_MAX) &&
- (backendType != MV_INFERENCE_BACKEND_TFLITE) &&
- (backendType != MV_INFERENCE_BACKEND_ARMNN) &&
- (backendType == MV_INFERENCE_BACKEND_MLAPI &&
- (pInfer->GetTargetType() & MV_INFERENCE_TARGET_DEVICE_CUSTOM)) &&
- (backendType != MV_INFERENCE_BACKEND_ONE)) {
+ if (IsConfigFilePathRequired(pInfer->GetTargetType(), backendType)) {
if (access(modelConfigFilePath, F_OK)) {
LOGE("modelConfigFilePath in [%s] ", modelConfigFilePath);
ret = MEDIA_VISION_ERROR_INVALID_PATH;