using namespace mediavision::inference;
-static int check_mv_inference_engine_version(mv_engine_config_h engine_config, bool *is_new_version)
-{
- int oldType = 0, newType = 0;
-
- int ret = mv_engine_config_get_int_attribute(engine_config, MV_INFERENCE_TARGET_TYPE, &oldType);
- if (ret != MEDIA_VISION_ERROR_NONE)
- oldType = -1;
-
- ret = mv_engine_config_get_int_attribute(engine_config, MV_INFERENCE_TARGET_DEVICE_TYPE, &newType);
- if (ret != MEDIA_VISION_ERROR_NONE)
- newType = -1;
-
- // At least one of two target device types of
- // media-vision-config.json file should have CPU device.
- if (oldType == -1 && newType == -1)
- return MEDIA_VISION_ERROR_INVALID_PARAMETER;
-
- // If values of both types are changed then return an error.
- // only one of two types should be used.
- if (oldType != MV_INFERENCE_TARGET_CPU && newType != MV_INFERENCE_TARGET_DEVICE_CPU) {
- LOGE("Please use only one of below two device types.");
- LOGE("MV_INFERENCE_TARGET_TYPE(deprecated) or MV_INFERENCE_TARGET_DEVICE_TYPE(recommended).");
- return MEDIA_VISION_ERROR_INVALID_PARAMETER;
- }
-
- LOGI("oldType = %d, newType = %d", oldType, newType);
-
- // If default value of only old type is changed then use old type.
- // Otherwise, use new type in following cases,
- // - all default values of two types aren't changed.
- // (oldType == MV_INFERENCE_TARGET_CPU && newType == MV_INFERENCE_TARGET_DEVICE_CPU)
- // - default value of only new type is changed.
- // (oldType == MV_INFERENCE_TARGET_CPU && (newType != -1 && newType != MV_INFERENCE_TARGET_DEVICE_CPU))
- if ((oldType != -1 && oldType != MV_INFERENCE_TARGET_CPU) && newType == MV_INFERENCE_TARGET_DEVICE_CPU)
- *is_new_version = false;
- else
- *is_new_version = true;
-
- return MEDIA_VISION_ERROR_NONE;
-}
-
mv_engine_config_h mv_inference_get_engine_config(mv_inference_h infer)
{
Inference *pInfer = static_cast<Inference *>(infer);
goto out_of_function;
}
- bool is_new_version;
-
- // Check if new inference engine framework or old one.
- // new inference engine framework has different mv_inference_target_type_e enumeration values
- // to support multiple inference target devices. So in case of old version,
- // enumeration value given by user should be converted to new value, which
- // will be done at ConfigureTargetTypes callback internally.
- // Ps. this function will be dropped with deprecated code version-after-next of Tizen.
- ret = check_mv_inference_engine_version(engine_config, &is_new_version);
- if (ret != MEDIA_VISION_ERROR_NONE)
- goto out_of_function;
-
- // Convert old type to new one and then use it if is_new_version is false
- if (pInfer->ConfigureTargetTypes(targetTypes, is_new_version) != MEDIA_VISION_ERROR_NONE) {
+ // Set target device type.
+ if (pInfer->ConfigureTargetDevices(targetTypes) != MEDIA_VISION_ERROR_NONE) {
LOGE("Tried to configure invalid target types.");
goto out_of_function;
}
{
RET_IF_FAIL(mv_engine_config_set_string_attribute(handle, MV_INFERENCE_MODEL_WEIGHT_FILE_PATH, tf_weight));
RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_BACKEND_TYPE, MV_INFERENCE_BACKEND_TFLITE));
- RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_TARGET_TYPE, MV_INFERENCE_TARGET_CPU));
+ RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_TARGET_DEVICE_TYPE,
+ MV_INFERENCE_TARGET_DEVICE_CPU));
if (meta_file != NULL)
RET_IF_FAIL(mv_engine_config_set_string_attribute(handle, MV_INFERENCE_MODEL_META_FILE_PATH, meta_file));
return MEDIA_VISION_ERROR_NONE;
printf("Invalid type! Try again.\n");
}
- err = mv_engine_config_set_int_attribute(engine_cfg, MV_INFERENCE_TARGET_TYPE,
- (mv_inference_target_type_e) targetType);
+ err = mv_engine_config_set_int_attribute(engine_cfg, MV_INFERENCE_TARGET_DEVICE_TYPE,
+ (mv_inference_target_device_e) targetType);
if (err != MEDIA_VISION_ERROR_NONE) {
printf("Fail to set target type: %d\n", targetType);
}
RET_IF_FAIL(mv_engine_config_set_double_attribute(handle, MV_INFERENCE_MODEL_STD_VALUE, 127.0));
RET_IF_FAIL(mv_engine_config_set_double_attribute(handle, MV_INFERENCE_CONFIDENCE_THRESHOLD, 0.6));
RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_BACKEND_TYPE, MV_INFERENCE_BACKEND_TFLITE));
- RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_TARGET_TYPE, MV_INFERENCE_TARGET_CPU));
+ RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_TARGET_DEVICE_TYPE,
+ MV_INFERENCE_TARGET_DEVICE_CPU));
RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_WIDTH, 224));
RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_HEIGHT, 224));
RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_CHANNELS, 3));
RET_IF_FAIL(mv_engine_config_set_double_attribute(handle, MV_INFERENCE_MODEL_STD_VALUE, 127.0));
RET_IF_FAIL(mv_engine_config_set_double_attribute(handle, MV_INFERENCE_CONFIDENCE_THRESHOLD, 0.6));
RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_BACKEND_TYPE, MV_INFERENCE_BACKEND_ARMNN));
- RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_TARGET_TYPE, MV_INFERENCE_TARGET_CPU));
+ RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_TARGET_DEVICE_TYPE,
+ MV_INFERENCE_TARGET_DEVICE_CPU));
RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_WIDTH, 224));
RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_HEIGHT, 224));
RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_CHANNELS, 3));
RET_IF_FAIL(mv_engine_config_set_double_attribute(handle, MV_INFERENCE_MODEL_STD_VALUE, 1.0));
RET_IF_FAIL(mv_engine_config_set_double_attribute(handle, MV_INFERENCE_CONFIDENCE_THRESHOLD, 0.6));
RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_BACKEND_TYPE, MV_INFERENCE_BACKEND_ONE));
- RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_TARGET_TYPE, MV_INFERENCE_TARGET_CPU));
+ RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_TARGET_DEVICE_TYPE,
+ MV_INFERENCE_TARGET_DEVICE_CPU));
RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_WIDTH, 224));
RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_HEIGHT, 224));
RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_CHANNELS, 3));
RET_IF_FAIL(mv_engine_config_set_double_attribute(handle, MV_INFERENCE_MODEL_STD_VALUE, 1.0));
RET_IF_FAIL(mv_engine_config_set_double_attribute(handle, MV_INFERENCE_CONFIDENCE_THRESHOLD, 0.3));
RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_BACKEND_TYPE, MV_INFERENCE_BACKEND_OPENCV));
- RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_TARGET_TYPE, MV_INFERENCE_TARGET_CPU));
+ RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_TARGET_DEVICE_TYPE,
+ MV_INFERENCE_TARGET_DEVICE_CPU));
RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_WIDTH, 227));
RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_HEIGHT, 227));
RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_CHANNELS, 3));
RET_IF_FAIL(mv_engine_config_set_double_attribute(handle, MV_INFERENCE_MODEL_STD_VALUE, 127.5));
RET_IF_FAIL(mv_engine_config_set_double_attribute(handle, MV_INFERENCE_CONFIDENCE_THRESHOLD, 0.3));
RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_BACKEND_TYPE, MV_INFERENCE_BACKEND_TFLITE));
- RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_TARGET_TYPE, MV_INFERENCE_TARGET_CPU));
+ RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_TARGET_DEVICE_TYPE,
+ MV_INFERENCE_TARGET_DEVICE_CPU));
RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_WIDTH, 300));
RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_HEIGHT, 300));
RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_CHANNELS, 3));
RET_IF_FAIL(mv_engine_config_set_double_attribute(handle, MV_INFERENCE_MODEL_STD_VALUE, 127.5));
RET_IF_FAIL(mv_engine_config_set_double_attribute(handle, MV_INFERENCE_CONFIDENCE_THRESHOLD, 0.3));
RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_BACKEND_TYPE, MV_INFERENCE_BACKEND_OPENCV));
- RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_TARGET_TYPE, MV_INFERENCE_TARGET_CPU));
+ RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_TARGET_DEVICE_TYPE,
+ MV_INFERENCE_TARGET_DEVICE_CPU));
RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_WIDTH, 300));
RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_HEIGHT, 300));
RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_CHANNELS, 3));
RET_IF_FAIL(mv_engine_config_set_double_attribute(handle, MV_INFERENCE_MODEL_STD_VALUE, 127.5));
RET_IF_FAIL(mv_engine_config_set_double_attribute(handle, MV_INFERENCE_CONFIDENCE_THRESHOLD, 0.3));
RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_BACKEND_TYPE, MV_INFERENCE_BACKEND_ARMNN));
- RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_TARGET_TYPE, MV_INFERENCE_TARGET_CPU));
+ RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_TARGET_DEVICE_TYPE,
+ MV_INFERENCE_TARGET_DEVICE_CPU));
RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_WIDTH, 300));
RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_HEIGHT, 300));
RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_CHANNELS, 3));
RET_IF_FAIL(mv_engine_config_set_double_attribute(handle, MV_INFERENCE_MODEL_STD_VALUE, 1.0));
RET_IF_FAIL(mv_engine_config_set_double_attribute(handle, MV_INFERENCE_CONFIDENCE_THRESHOLD, 0.3));
RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_BACKEND_TYPE, MV_INFERENCE_BACKEND_OPENCV));
- RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_TARGET_TYPE, MV_INFERENCE_TARGET_CPU));
+ RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_TARGET_DEVICE_TYPE,
+ MV_INFERENCE_TARGET_DEVICE_CPU));
RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_WIDTH, 300));
RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_HEIGHT, 300));
RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_CHANNELS, 3));
RET_IF_FAIL(mv_engine_config_set_double_attribute(handle, MV_INFERENCE_MODEL_STD_VALUE, 127.5));
RET_IF_FAIL(mv_engine_config_set_double_attribute(handle, MV_INFERENCE_CONFIDENCE_THRESHOLD, 0.3));
RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_BACKEND_TYPE, MV_INFERENCE_BACKEND_ARMNN));
- RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_TARGET_TYPE, MV_INFERENCE_TARGET_CPU));
+ RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_TARGET_DEVICE_TYPE,
+ MV_INFERENCE_TARGET_DEVICE_CPU));
RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_WIDTH, 300));
RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_HEIGHT, 300));
RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_CHANNELS, 3));
RET_IF_FAIL(mv_engine_config_set_double_attribute(handle, MV_INFERENCE_MODEL_MEAN_VALUE, 127.5));
RET_IF_FAIL(mv_engine_config_set_double_attribute(handle, MV_INFERENCE_MODEL_STD_VALUE, 127.5));
RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_BACKEND_TYPE, MV_INFERENCE_BACKEND_OPENCV));
- RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_TARGET_TYPE, MV_INFERENCE_TARGET_CPU));
+ RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_TARGET_DEVICE_TYPE,
+ MV_INFERENCE_TARGET_DEVICE_CPU));
RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_WIDTH, 128));
RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_HEIGHT, 128));
RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_CHANNELS, 3));