* @brief Defines #MV_INFERENCE_TARGET_TYPE to set the type used
* for device running attribute of the engine configuration.
* @details Switches between CPU, GPU, or Custom:\n
- * #MV_INFERENCE_TARGET_CPU,\n
- * #MV_INFERENCE_TARGET_GPU,\n
- * #MV_INFERENCE_TARGET_CUSTOM.\n
- * The default type is CPU.
+ * #MV_INFERENCE_TARGET_CPU(Deprecated),\n
+ * #MV_INFERENCE_TARGET_GPU(Deprecated),\n
+ * #MV_INFERENCE_TARGET_CUSTOM(Deprecated).\n
+ * #MV_INFERENCE_CPU,\n
+ * #MV_INFERENCE_GPU,\n
+ * #MV_INFERENCE_CUSTOM.\n
+ *
+ * The default type is CPU. Please do not use deprecated types since Tizen 6.0.
+ * Old ones have been deprecated.
*
* @since_tizen 5.5
* @see mv_engine_config_set_int_attribute()
/**
* @brief Enumeration for inference target.
*
- * @since_tizen 5.5
+ * @since_tizen 5.5 (Deprecated)
*
*/
typedef enum {
- MV_INFERENCE_TARGET_NONE = 0, /**< None */
- MV_INFERENCE_TARGET_CPU = 1 << 0, /**< CPU */
- MV_INFERENCE_TARGET_GPU = 1 << 1, /**< GPU*/
- MV_INFERENCE_TARGET_CUSTOM = 1 << 2, /**< CUSTOM*/
- MV_INFERENCE_TARGET_MAX = 1 << 3 /**< Target MAX */
+ MV_INFERENCE_TARGET_NONE = -1, /**< None */
+ MV_INFERENCE_TARGET_CPU, /**< CPU */
+ MV_INFERENCE_TARGET_GPU, /**< GPU*/
+ MV_INFERENCE_TARGET_CUSTOM, /**< CUSTOM*/
+ MV_INFERENCE_TARGET_MAX /**< Target MAX */
} mv_inference_target_type_e;
+/**
+ * @brief Enumeration for inference target.
+ *
+ * @since_tizen 6.0
+ *
+ */
+typedef enum {
+ MV_INFERENCE_NONE = 0, /**< None */
+ MV_INFERENCE_CPU = 1 << 0, /**< CPU */
+ MV_INFERENCE_GPU = 1 << 1, /**< GPU*/
+ MV_INFERENCE_CUSTOM = 1 << 2, /**< CUSTOM*/
+ MV_INFERENCE_MAX = 1 << 3 /**< Target MAX */
+} mv_inference_target_device_e;
+
/**
* @brief Enumeration for input data type.
*
int ConfigureBackendType(const mv_inference_backend_type_e backendType);
/**
- * @brief Configure inference target types such as CPU, GPU or NPU. (one more types can be combined)
+ * @brief Configure a inference target device type such as CPU, GPU or NPU. (only one type can be set)
+ * @details Internally, a given device type will be converted to new type.
+ * This API is just used for backward compatibility.
+ *
+ * @since_tizen 6.0 (Deprecated)
+ */
+ int ConfigureTargetTypes(const int targetType);
+
+ /**
+ * @brief Configure inference target devices such as CPU, GPU or NPU. (one more types can be combined)
*
* @since_tizen 6.0
*/
- int ConfigureTargetTypes(const int targetTypes);
+ int ConfigureTargetDevices(const int targetDevices);
/**
* @brief Configure the maximum number of inference results
{
int target_types = INFERENCE_TARGET_NONE;
- if (given_types & MV_INFERENCE_TARGET_CPU)
+ if (given_types & MV_INFERENCE_CPU)
target_types |= INFERENCE_TARGET_CPU;
- if (given_types & MV_INFERENCE_TARGET_GPU)
+ if (given_types & MV_INFERENCE_GPU)
target_types |= INFERENCE_TARGET_GPU;
- if (given_types & INFERENCE_TARGET_CUSTOM)
+ if (given_types & MV_INFERENCE_CUSTOM)
target_types |= INFERENCE_TARGET_CUSTOM;
return target_types;
return MEDIA_VISION_ERROR_NONE;
}
-int Inference::ConfigureTargetTypes(const int targetTypes)
+int Inference::ConfigureTargetTypes(const int targetType)
{
// Check if given target types are valid or not.
- if (MV_INFERENCE_TARGET_NONE >= targetTypes || MV_INFERENCE_TARGET_MAX <= targetTypes) {
+ if (MV_INFERENCE_TARGET_NONE >= targetType || MV_INFERENCE_TARGET_MAX <= targetType) {
LOGE("Invalid target device.");
return MEDIA_VISION_ERROR_INVALID_PARAMETER;
}
- LOGI("target devices : %d", targetTypes);
+ unsigned int new_type = MV_INFERENCE_NONE;
- mConfig.mTargetTypes = targetTypes;
+ // Convert old type to new one.
+ switch (targetType) {
+ case MV_INFERENCE_TARGET_CPU:
+ new_type = MV_INFERENCE_CPU;
+ break;
+ case MV_INFERENCE_TARGET_GPU:
+ new_type = MV_INFERENCE_GPU;
+ break;
+ case MV_INFERENCE_TARGET_CUSTOM:
+ new_type = MV_INFERENCE_CUSTOM;
+ break;
+ }
+
+ LOGI("target devices : %d", new_type);
+
+ mConfig.mTargetTypes = new_type;
+
+ return MEDIA_VISION_ERROR_NONE;
+}
+
+int Inference::ConfigureTargetDevices(const int targetDevices)
+{
+ // Check if given target types are valid or not.
+ if (MV_INFERENCE_NONE >= targetDevices || MV_INFERENCE_MAX <= targetDevices) {
+ LOGE("Invalid target device.");
+ return MEDIA_VISION_ERROR_INVALID_PARAMETER;
+ }
+
+ LOGI("target devices : %d", targetDevices);
+
+ mConfig.mTargetTypes = targetDevices;
return MEDIA_VISION_ERROR_NONE;
}
LOGI("threshold %.4f", mThreshold);
// Check if backend supports a given target device/devices or not.
- if (mConfig.mTargetTypes & MV_INFERENCE_TARGET_CPU) {
+ if (mConfig.mTargetTypes & MV_INFERENCE_CPU) {
if (!(mBackendCapacity.supported_accel_devices & INFERENCE_TARGET_CPU)) {
LOGE("Backend doesn't support CPU device as an accelerator.");
return MEDIA_VISION_ERROR_INVALID_PARAMETER;
}
}
- if (mConfig.mTargetTypes & MV_INFERENCE_TARGET_GPU) {
+ if (mConfig.mTargetTypes & MV_INFERENCE_GPU) {
if (!(mBackendCapacity.supported_accel_devices & INFERENCE_TARGET_GPU)) {
LOGE("Backend doesn't support CPU device as an accelerator.");
return MEDIA_VISION_ERROR_INVALID_PARAMETER;
}
}
- if (mConfig.mTargetTypes & MV_INFERENCE_TARGET_CUSTOM) {
+ if (mConfig.mTargetTypes & MV_INFERENCE_CUSTOM) {
if (!(mBackendCapacity.supported_accel_devices & INFERENCE_TARGET_CUSTOM)) {
LOGE("Backend doesn't support CPU device as an accelerator.");
return MEDIA_VISION_ERROR_INVALID_PARAMETER;
using namespace mediavision::inference;
+static bool is_new_mv_inference_engine(mv_engine_config_h engine_config)
+{
+ int dataType = 0;
+
+ int ret = mv_engine_config_get_int_attribute(engine_config,
+ MV_INFERENCE_INPUT_DATA_TYPE,
+ &dataType);
+ if (ret != MEDIA_VISION_ERROR_NONE) {
+ return false;
+ }
+
+ return true;
+}
+
mv_engine_config_h mv_inference_get_engine_config(mv_inference_h infer)
{
Inference *pInfer = static_cast<Inference *>(infer);
goto _ERROR_;
}
- ret = pInfer->ConfigureTargetTypes(targetTypes);
- if (ret != MEDIA_VISION_ERROR_NONE) {
- LOGE("Tried to configure invalid target types.");
- goto _ERROR_;
+ // Check if new inference engine framework or old one.
+ // new inference engine framework has different mv_inference_target_type_e enumeration values
+ // to support multiple inference target devices. So in case of old version,
+ // enumeration value given by user should be converted to new value, which
+ // will be done at ConfigureTargetTypes callback internally.
+ if (is_new_mv_inference_engine(engine_config) == false) {
+ ret = pInfer->ConfigureTargetTypes(targetTypes);
+ if (ret != MEDIA_VISION_ERROR_NONE) {
+ LOGE("Tried to configure invalid target types.");
+ goto _ERROR_;
+ }
+ } else {
+ ret = pInfer->ConfigureTargetDevices(targetTypes);
+ if (ret != MEDIA_VISION_ERROR_NONE) {
+ LOGE("Tried to configure invalid target types.");
+ goto _ERROR_;
+ }
}
pInfer->ConfigureModelFiles(std::string(modelConfigFilePath),