#define MV_INFERENCE_MODEL_USER_FILE_PATH "MV_INFERENCE_MODEL_USER_FILE_PATH"
/**
+ * @brief Defines #MV_INFERENCE_MODEL_META_FILE_PATH to set inference
+ * models's metadata file attribute of the engine configuration.
+ * @details The file includes inference model's metadata such as input and output
+ * node names, input tensor's width and height,
+ * mean and standard deviation values for pre-processing.
+ *
+ * @since_tizen 6.5
+ * @see mv_engine_config_set_string_attribute()
+ * @see mv_engine_config_get_string_attribute()
+ */
+#define MV_INFERENCE_MODEL_META_FILE_PATH "MV_INFERENCE_MODEL_META_FILE_PATH"
+
+/**
+ * @deprecated Deprecated since 6.5. Use #MV_INFERENCE_MODEL_META_FILE_PATH instead.
* @brief Defines #MV_INFERENCE_MODEL_MEAN_VALUE to set inference
* model's mean attribute of the engine configuration.
*
#define MV_INFERENCE_MODEL_MEAN_VALUE "MV_INFERENCE_MODEL_MEAN_VALUE"
/**
+ * @deprecated Deprecated since 6.5. Use #MV_INFERENCE_MODEL_META_FILE_PATH instead.
* @brief Defines #MV_INFERENCE_MODEL_STD_VALUE to set an input image's
* standard deviation attribute of the engine configuration.
*
#define MV_INFERENCE_TARGET_DEVICE_TYPE "MV_INFERENCE_TARGET_DEVICE_TYPE"
/**
+ * @deprecated Deprecated since 6.5. Use #MV_INFERENCE_MODEL_META_FILE_PATH instead.
* @brief Defines #MV_INFERENCE_INPUT_TENSOR_WIDTH to set the width
* of input tensor.
*
#define MV_INFERENCE_INPUT_TENSOR_WIDTH "MV_INFERENCE_INPUT_TENSOR_WIDTH"
/**
+ * @deprecated Deprecated since 6.5. Use #MV_INFERENCE_MODEL_META_FILE_PATH instead.
* @brief Defines #MV_INFERENCE_INPUT_TENSOR_HEIGHT to set the height
* of input tensor.
*
#define MV_INFERENCE_INPUT_TENSOR_HEIGHT "MV_INFERENCE_INPUT_TENSOR_HEIGHT"
/**
+ * @deprecated Deprecated since 6.5. Use #MV_INFERENCE_MODEL_META_FILE_PATH instead.
* @brief Defines #MV_INFERENCE_INPUT_TENSOR_CHANNELS to set the channels,
* for example 3 in case of RGB colorspace, of input tensor.
*
#define MV_INFERENCE_INPUT_TENSOR_CHANNELS "MV_INFERENCE_INPUT_TENSOR_CHANNELS"
/**
+ * @deprecated Deprecated since 6.5. Use #MV_INFERENCE_MODEL_META_FILE_PATH instead.
* @brief Defines #MV_INFERENCE_INPUT_DATA_TYPE to set data type of input tensor.
* @details Data type of input tensor can be changed according to a given weight file.
* Switches between Float32 or UInt8:\n
#define MV_INFERENCE_INPUT_DATA_TYPE "MV_INFERENCE_INPUT_DATA_TYPE"
/**
+ * @deprecated Deprecated since 6.5. Use #MV_INFERENCE_MODEL_META_FILE_PATH instead.
* @brief Defines #MV_INFERENCE_INPUT_NODE_NAME to set the input node name.
*
* @since_tizen 5.5
#define MV_INFERENCE_INPUT_NODE_NAME "MV_INFERENCE_INPUT_NODE_NAME"
/**
+ * @deprecated Deprecated since 6.5. Use #MV_INFERENCE_MODEL_META_FILE_PATH instead.
* @brief Defines #MV_INFERENCE_OUTPUT_NODE_NAMES to set the output node names.
*
* @since_tizen 5.5
#define MV_INFERENCE_OUTPUT_NODE_NAMES "MV_INFERENCE_OUTPUT_NODE_NAMES"
/**
+ * @deprecated Deprecated since 6.5. Use #MV_INFERENCE_MODEL_META_FILE_PATH instead.
* @brief Defines #MV_INFERENCE_OUTPUT_MAX_NUMBER
* to set the maximum number of output attributes
* of the engine configuration.
#define MV_INFERENCE_OUTPUT_MAX_NUMBER "MV_INFERENCE_OUTPUT_MAX_NUMBER"
/**
+ * @deprecated Deprecated since 6.5. Use #MV_INFERENCE_MODEL_META_FILE_PATH instead.
* @brief Defines #MV_INFERENCE_CONFIDENCE_THRESHOLD
* to set the threshold value for the confidence of inference results.
* @details Default value is 0.6 and its range is between 0.0 and 1.0.
return MEDIA_VISION_ERROR_NONE;
}
+bool IsJsonFile(const std::string& fileName)
+{
+ if (!fileName.substr(fileName.find_last_of(".") + 1).compare("json"))
+ return true;
+
+ return false;
+}
+
int mv_inference_configure_model_open(mv_inference_h infer,
mv_engine_config_h engine_config)
{
char *modelConfigFilePath = NULL;
char *modelWeightFilePath = NULL;
char *modelUserFilePath = NULL;
+ char *modelMetaFilePath = NULL;
double modelMeanValue = 0.0;
int backendType = 0;
size_t userFileLength = 0;
- // TODO: a temporal variable, later, it should be removed.
- std::string metaFilePath;
-
ret = mv_engine_config_get_string_attribute(
engine_config, MV_INFERENCE_MODEL_CONFIGURATION_FILE_PATH,
&modelConfigFilePath);
goto _ERROR_;
}
+ ret = mv_engine_config_get_string_attribute(
+ engine_config, MV_INFERENCE_MODEL_META_FILE_PATH,
+ &modelMetaFilePath);
+ if (ret != MEDIA_VISION_ERROR_NONE) {
+ LOGE("Fail to get model meta file path");
+ goto _ERROR_;
+ }
+
+ if (!IsJsonFile(std::string(modelMetaFilePath))) {
+ ret = MEDIA_VISION_ERROR_INVALID_PATH;
+ LOGE("Model meta file should be json");
+ goto _ERROR_;
+ }
+
ret = mv_engine_config_get_double_attribute(
engine_config, MV_INFERENCE_MODEL_MEAN_VALUE, &modelMeanValue);
if (ret != MEDIA_VISION_ERROR_NONE) {
pInfer->ConfigureModelFiles(std::string(modelConfigFilePath),
std::string(modelWeightFilePath),
std::string(modelUserFilePath));
- /* FIXME
- * temporal code lines to get a metafile, which has the same name
- * with modelsWeightFilePath except the extension.
- * Later, it should get a metafilename and the below lines should be
- * removed.
- */
- metaFilePath = std::string(modelWeightFilePath).substr(0,
- std::string(modelWeightFilePath).find_last_of('.')) + ".json";
- LOGI("metaFilePath: %s", metaFilePath.c_str());
- pInfer->ParseMetadata(metaFilePath);
+
+ ret = pInfer->ParseMetadata(std::string(modelMetaFilePath));
+ if (ret != MEDIA_VISION_ERROR_NONE) {
+ LOGE("Fail to ParseMetadata");
+ }
_ERROR_:
if (modelConfigFilePath)
if (modelUserFilePath)
free(modelUserFilePath);
+ if (modelMetaFilePath)
+ free(modelMetaFilePath);
+
LOGI("LEAVE");
return ret;