mv_machine_learning: drop deprecated enum type
authorInki Dae <inki.dae@samsung.com>
Mon, 16 Jan 2023 07:46:39 +0000 (16:46 +0900)
committerKwanghoon Son <k.son@samsung.com>
Mon, 13 Feb 2023 02:31:48 +0000 (11:31 +0900)
[Issue type] code cleanup

Dropped the deprecated target type enum type and relevant code.

This is a old type which indicates a type on inference target device
but new type is used instead. So drop it to reduce code complexity.
We don't have to consider the old type anymore.

Change-Id: I68b59a334e274af740201c09b8e7f80ce524510a
Signed-off-by: Inki Dae <inki.dae@samsung.com>
include/mv_inference_type.h
mv_machine_learning/inference/include/Inference.h
mv_machine_learning/inference/src/Inference.cpp
mv_machine_learning/inference/src/mv_inference_open.cpp
test/testsuites/machine_learning/inference/inference_test_suite.c
test/testsuites/machine_learning/inference/test_face_landmark_detection.cpp

index f7312ed..c8deca2 100644 (file)
@@ -71,21 +71,6 @@ typedef enum {
 } mv_inference_backend_type_e;
 
 /**
- * @deprecated Deprecated since 6.0. Use #mv_inference_target_device_e instead.
- * @brief Enumeration for inference target.
- *
- * @since_tizen 5.5
- *
- */
-typedef enum {
-       MV_INFERENCE_TARGET_NONE = -1, /**< None */
-       MV_INFERENCE_TARGET_CPU, /**< CPU */
-       MV_INFERENCE_TARGET_GPU, /**< GPU*/
-       MV_INFERENCE_TARGET_CUSTOM, /**< CUSTOM*/
-       MV_INFERENCE_TARGET_MAX /**< Target MAX */
-} mv_inference_target_type_e;
-
-/**
  * @brief Enumeration for inference target.
  *
  * @since_tizen 6.0
index 52d4dee..cfcbb68 100644 (file)
@@ -177,15 +177,6 @@ public:
        int configureOutputMetaInfo(mediavision::machine_learning::MetaMap &outputMetaInfo);
 
        /**
-                * @brief   Configure a inference target device type such as CPU, GPU or NPU. (only one type can be set)
-                * @details Internally, a given device type will be converted to new type.
-                *                      This API is just used for backward compatibility.
-                *
-                * @since_tizen 6.0 (Deprecated)
-                */
-       int ConfigureTargetTypes(int targetType, bool isNewVersion);
-
-       /**
                 * @brief   Configure inference target devices such as CPU, GPU or NPU. (one more types can be combined)
                 *
                 * @since_tizen 6.0
index 0681849..558dbb0 100644 (file)
@@ -535,43 +535,6 @@ int Inference::CheckBackendType(const mv_inference_backend_type_e backendType)
        return MEDIA_VISION_ERROR_NONE;
 }
 
-int Inference::ConfigureTargetTypes(int targetType, bool isNewVersion)
-{
-       if (isNewVersion) {
-               if (MV_INFERENCE_TARGET_DEVICE_NONE >= targetType || MV_INFERENCE_TARGET_DEVICE_MAX <= targetType) {
-                       LOGE("Invalid target device.");
-                       return MEDIA_VISION_ERROR_INVALID_PARAMETER;
-               }
-       } else {
-               if (MV_INFERENCE_TARGET_NONE >= targetType || MV_INFERENCE_TARGET_MAX <= targetType) {
-                       LOGE("Invalid target device.");
-                       return MEDIA_VISION_ERROR_INVALID_PARAMETER;
-               }
-
-               LOGI("Before converting target types : %d", targetType);
-
-               // Convert old type to new one.
-               switch (targetType) {
-               case MV_INFERENCE_TARGET_CPU:
-                       targetType = MV_INFERENCE_TARGET_DEVICE_CPU;
-                       break;
-               case MV_INFERENCE_TARGET_GPU:
-
-                       targetType = MV_INFERENCE_TARGET_DEVICE_GPU;
-                       break;
-               case MV_INFERENCE_TARGET_CUSTOM:
-                       targetType = MV_INFERENCE_TARGET_DEVICE_CUSTOM;
-                       break;
-               }
-
-               LOGI("After converting target types : %d", targetType);
-       }
-
-       mConfig.mTargetTypes = targetType;
-
-       return MEDIA_VISION_ERROR_NONE;
-}
-
 int Inference::ConfigureTargetDevices(const int targetDevices)
 {
        // Check if given target types are valid or not.
@@ -582,11 +545,6 @@ int Inference::ConfigureTargetDevices(const int targetDevices)
 
        LOGI("target devices : %d", targetDevices);
 
-       if (!(mBackendCapacity.supported_accel_devices & targetDevices)) {
-               LOGE("Backend doesn't support a given device acceleration.");
-               return MEDIA_VISION_ERROR_NOT_SUPPORTED;
-       }
-
        mConfig.mTargetTypes = targetDevices;
 
        return MEDIA_VISION_ERROR_NONE;
index 9a8ec6a..734237c 100644 (file)
 
 using namespace mediavision::inference;
 
-static int check_mv_inference_engine_version(mv_engine_config_h engine_config, bool *is_new_version)
-{
-       int oldType = 0, newType = 0;
-
-       int ret = mv_engine_config_get_int_attribute(engine_config, MV_INFERENCE_TARGET_TYPE, &oldType);
-       if (ret != MEDIA_VISION_ERROR_NONE)
-               oldType = -1;
-
-       ret = mv_engine_config_get_int_attribute(engine_config, MV_INFERENCE_TARGET_DEVICE_TYPE, &newType);
-       if (ret != MEDIA_VISION_ERROR_NONE)
-               newType = -1;
-
-       // At least one of two target device types of
-       // media-vision-config.json file should have CPU device.
-       if (oldType == -1 && newType == -1)
-               return MEDIA_VISION_ERROR_INVALID_PARAMETER;
-
-       // If values of both types are changed then return an error.
-       // only one of two types should be used.
-       if (oldType != MV_INFERENCE_TARGET_CPU && newType != MV_INFERENCE_TARGET_DEVICE_CPU) {
-               LOGE("Please use only one of below two device types.");
-               LOGE("MV_INFERENCE_TARGET_TYPE(deprecated) or MV_INFERENCE_TARGET_DEVICE_TYPE(recommended).");
-               return MEDIA_VISION_ERROR_INVALID_PARAMETER;
-       }
-
-       LOGI("oldType = %d, newType = %d", oldType, newType);
-
-       // If default value of only old type is changed then use old type.
-       // Otherwise, use new type in following cases,
-       // - all default values of two types aren't changed.
-       //   (oldType == MV_INFERENCE_TARGET_CPU && newType == MV_INFERENCE_TARGET_DEVICE_CPU)
-       // - default value of only new type is changed.
-       //   (oldType == MV_INFERENCE_TARGET_CPU && (newType != -1 && newType != MV_INFERENCE_TARGET_DEVICE_CPU))
-       if ((oldType != -1 && oldType != MV_INFERENCE_TARGET_CPU) && newType == MV_INFERENCE_TARGET_DEVICE_CPU)
-               *is_new_version = false;
-       else
-               *is_new_version = true;
-
-       return MEDIA_VISION_ERROR_NONE;
-}
-
 mv_engine_config_h mv_inference_get_engine_config(mv_inference_h infer)
 {
        Inference *pInfer = static_cast<Inference *>(infer);
@@ -329,20 +288,8 @@ int mv_inference_configure_engine_open(mv_inference_h infer, mv_engine_config_h
                goto out_of_function;
        }
 
-       bool is_new_version;
-
-       // Check if new inference engine framework or old one.
-       // new inference engine framework has different mv_inference_target_type_e enumeration values
-       // to support multiple inference target devices. So in case of old version,
-       // enumeration value given by user should be converted to new value, which
-       // will be done at ConfigureTargetTypes callback internally.
-       // Ps. this function will be dropped with deprecated code version-after-next of Tizen.
-       ret = check_mv_inference_engine_version(engine_config, &is_new_version);
-       if (ret != MEDIA_VISION_ERROR_NONE)
-               goto out_of_function;
-
-       // Convert old type to new one and then use it if is_new_version is false
-       if (pInfer->ConfigureTargetTypes(targetTypes, is_new_version) != MEDIA_VISION_ERROR_NONE) {
+       // Set target device type.
+       if (pInfer->ConfigureTargetDevices(targetTypes) != MEDIA_VISION_ERROR_NONE) {
                LOGE("Tried to configure invalid target types.");
                goto out_of_function;
        }
index 8220ee7..51af616 100644 (file)
@@ -245,7 +245,8 @@ int engine_config_hosted_tflite_cpu(mv_engine_config_h handle, const char *tf_we
 {
        RET_IF_FAIL(mv_engine_config_set_string_attribute(handle, MV_INFERENCE_MODEL_WEIGHT_FILE_PATH, tf_weight));
        RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_BACKEND_TYPE, MV_INFERENCE_BACKEND_TFLITE));
-       RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_TARGET_TYPE, MV_INFERENCE_TARGET_CPU));
+       RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_TARGET_DEVICE_TYPE,
+                                                                                                  MV_INFERENCE_TARGET_DEVICE_CPU));
        if (meta_file != NULL)
                RET_IF_FAIL(mv_engine_config_set_string_attribute(handle, MV_INFERENCE_MODEL_META_FILE_PATH, meta_file));
        return MEDIA_VISION_ERROR_NONE;
@@ -619,8 +620,8 @@ int perform_configure_set_target(mv_engine_config_h engine_cfg)
                printf("Invalid type! Try again.\n");
        }
 
-       err = mv_engine_config_set_int_attribute(engine_cfg, MV_INFERENCE_TARGET_TYPE,
-                                                                                        (mv_inference_target_type_e) targetType);
+       err = mv_engine_config_set_int_attribute(engine_cfg, MV_INFERENCE_TARGET_DEVICE_TYPE,
+                                                                                        (mv_inference_target_device_e) targetType);
        if (err != MEDIA_VISION_ERROR_NONE) {
                printf("Fail to set target type: %d\n", targetType);
        }
@@ -785,7 +786,8 @@ int perform_tflite_mobilenetv1_config(mv_engine_config_h handle)
        RET_IF_FAIL(mv_engine_config_set_double_attribute(handle, MV_INFERENCE_MODEL_STD_VALUE, 127.0));
        RET_IF_FAIL(mv_engine_config_set_double_attribute(handle, MV_INFERENCE_CONFIDENCE_THRESHOLD, 0.6));
        RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_BACKEND_TYPE, MV_INFERENCE_BACKEND_TFLITE));
-       RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_TARGET_TYPE, MV_INFERENCE_TARGET_CPU));
+       RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_TARGET_DEVICE_TYPE,
+                                                                                                  MV_INFERENCE_TARGET_DEVICE_CPU));
        RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_WIDTH, 224));
        RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_HEIGHT, 224));
        RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_CHANNELS, 3));
@@ -807,7 +809,8 @@ int perform_armnn_mobilenetv1_config(mv_engine_config_h handle)
        RET_IF_FAIL(mv_engine_config_set_double_attribute(handle, MV_INFERENCE_MODEL_STD_VALUE, 127.0));
        RET_IF_FAIL(mv_engine_config_set_double_attribute(handle, MV_INFERENCE_CONFIDENCE_THRESHOLD, 0.6));
        RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_BACKEND_TYPE, MV_INFERENCE_BACKEND_ARMNN));
-       RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_TARGET_TYPE, MV_INFERENCE_TARGET_CPU));
+       RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_TARGET_DEVICE_TYPE,
+                                                                                                  MV_INFERENCE_TARGET_DEVICE_CPU));
        RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_WIDTH, 224));
        RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_HEIGHT, 224));
        RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_CHANNELS, 3));
@@ -829,7 +832,8 @@ int perform_one_mobilenetv1_quant_config(mv_engine_config_h handle)
        RET_IF_FAIL(mv_engine_config_set_double_attribute(handle, MV_INFERENCE_MODEL_STD_VALUE, 1.0));
        RET_IF_FAIL(mv_engine_config_set_double_attribute(handle, MV_INFERENCE_CONFIDENCE_THRESHOLD, 0.6));
        RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_BACKEND_TYPE, MV_INFERENCE_BACKEND_ONE));
-       RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_TARGET_TYPE, MV_INFERENCE_TARGET_CPU));
+       RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_TARGET_DEVICE_TYPE,
+                                                                                                  MV_INFERENCE_TARGET_DEVICE_CPU));
        RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_WIDTH, 224));
        RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_HEIGHT, 224));
        RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_CHANNELS, 3));
@@ -881,7 +885,8 @@ int perform_opencv_caffe_squeezenet_config(mv_engine_config_h handle)
        RET_IF_FAIL(mv_engine_config_set_double_attribute(handle, MV_INFERENCE_MODEL_STD_VALUE, 1.0));
        RET_IF_FAIL(mv_engine_config_set_double_attribute(handle, MV_INFERENCE_CONFIDENCE_THRESHOLD, 0.3));
        RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_BACKEND_TYPE, MV_INFERENCE_BACKEND_OPENCV));
-       RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_TARGET_TYPE, MV_INFERENCE_TARGET_CPU));
+       RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_TARGET_DEVICE_TYPE,
+                                                                                                  MV_INFERENCE_TARGET_DEVICE_CPU));
        RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_WIDTH, 227));
        RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_HEIGHT, 227));
        RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_CHANNELS, 3));
@@ -1012,7 +1017,8 @@ int perform_tflite_mobilenetv1ssd_config(mv_engine_config_h handle)
        RET_IF_FAIL(mv_engine_config_set_double_attribute(handle, MV_INFERENCE_MODEL_STD_VALUE, 127.5));
        RET_IF_FAIL(mv_engine_config_set_double_attribute(handle, MV_INFERENCE_CONFIDENCE_THRESHOLD, 0.3));
        RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_BACKEND_TYPE, MV_INFERENCE_BACKEND_TFLITE));
-       RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_TARGET_TYPE, MV_INFERENCE_TARGET_CPU));
+       RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_TARGET_DEVICE_TYPE,
+                                                                                                  MV_INFERENCE_TARGET_DEVICE_CPU));
        RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_WIDTH, 300));
        RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_HEIGHT, 300));
        RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_CHANNELS, 3));
@@ -1037,7 +1043,8 @@ int perform_opencv_mobilenetv1ssd_config(mv_engine_config_h handle)
        RET_IF_FAIL(mv_engine_config_set_double_attribute(handle, MV_INFERENCE_MODEL_STD_VALUE, 127.5));
        RET_IF_FAIL(mv_engine_config_set_double_attribute(handle, MV_INFERENCE_CONFIDENCE_THRESHOLD, 0.3));
        RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_BACKEND_TYPE, MV_INFERENCE_BACKEND_OPENCV));
-       RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_TARGET_TYPE, MV_INFERENCE_TARGET_CPU));
+       RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_TARGET_DEVICE_TYPE,
+                                                                                                  MV_INFERENCE_TARGET_DEVICE_CPU));
        RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_WIDTH, 300));
        RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_HEIGHT, 300));
        RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_CHANNELS, 3));
@@ -1060,7 +1067,8 @@ int perform_armnn_mobilenetv1ssd_config(mv_engine_config_h handle)
        RET_IF_FAIL(mv_engine_config_set_double_attribute(handle, MV_INFERENCE_MODEL_STD_VALUE, 127.5));
        RET_IF_FAIL(mv_engine_config_set_double_attribute(handle, MV_INFERENCE_CONFIDENCE_THRESHOLD, 0.3));
        RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_BACKEND_TYPE, MV_INFERENCE_BACKEND_ARMNN));
-       RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_TARGET_TYPE, MV_INFERENCE_TARGET_CPU));
+       RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_TARGET_DEVICE_TYPE,
+                                                                                                  MV_INFERENCE_TARGET_DEVICE_CPU));
        RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_WIDTH, 300));
        RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_HEIGHT, 300));
        RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_CHANNELS, 3));
@@ -1165,7 +1173,8 @@ int perform_opencv_resnet10ssd_face(mv_engine_config_h handle)
        RET_IF_FAIL(mv_engine_config_set_double_attribute(handle, MV_INFERENCE_MODEL_STD_VALUE, 1.0));
        RET_IF_FAIL(mv_engine_config_set_double_attribute(handle, MV_INFERENCE_CONFIDENCE_THRESHOLD, 0.3));
        RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_BACKEND_TYPE, MV_INFERENCE_BACKEND_OPENCV));
-       RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_TARGET_TYPE, MV_INFERENCE_TARGET_CPU));
+       RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_TARGET_DEVICE_TYPE,
+                                                                                                  MV_INFERENCE_TARGET_DEVICE_CPU));
        RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_WIDTH, 300));
        RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_HEIGHT, 300));
        RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_CHANNELS, 3));
@@ -1187,7 +1196,8 @@ int perform_armnn_mobilenetv1ssd_face(mv_engine_config_h handle)
        RET_IF_FAIL(mv_engine_config_set_double_attribute(handle, MV_INFERENCE_MODEL_STD_VALUE, 127.5));
        RET_IF_FAIL(mv_engine_config_set_double_attribute(handle, MV_INFERENCE_CONFIDENCE_THRESHOLD, 0.3));
        RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_BACKEND_TYPE, MV_INFERENCE_BACKEND_ARMNN));
-       RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_TARGET_TYPE, MV_INFERENCE_TARGET_CPU));
+       RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_TARGET_DEVICE_TYPE,
+                                                                                                  MV_INFERENCE_TARGET_DEVICE_CPU));
        RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_WIDTH, 300));
        RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_HEIGHT, 300));
        RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_CHANNELS, 3));
@@ -1277,7 +1287,8 @@ int perform_opencv_cnncascade(mv_engine_config_h handle)
        RET_IF_FAIL(mv_engine_config_set_double_attribute(handle, MV_INFERENCE_MODEL_MEAN_VALUE, 127.5));
        RET_IF_FAIL(mv_engine_config_set_double_attribute(handle, MV_INFERENCE_MODEL_STD_VALUE, 127.5));
        RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_BACKEND_TYPE, MV_INFERENCE_BACKEND_OPENCV));
-       RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_TARGET_TYPE, MV_INFERENCE_TARGET_CPU));
+       RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_TARGET_DEVICE_TYPE,
+                                                                                                  MV_INFERENCE_TARGET_DEVICE_CPU));
        RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_WIDTH, 128));
        RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_HEIGHT, 128));
        RET_IF_FAIL(mv_engine_config_set_int_attribute(handle, MV_INFERENCE_INPUT_TENSOR_CHANNELS, 3));
index 321456a..3eb1d37 100644 (file)
@@ -74,7 +74,8 @@ TEST_P(TestFaceLandmarkDetectionOpenCV, CAFFE_CNNCASCADE)
                          MEDIA_VISION_ERROR_NONE);
        ASSERT_EQ(mv_engine_config_set_int_attribute(engine_cfg, MV_INFERENCE_BACKEND_TYPE, MV_INFERENCE_BACKEND_OPENCV),
                          MEDIA_VISION_ERROR_NONE);
-       ASSERT_EQ(mv_engine_config_set_int_attribute(engine_cfg, MV_INFERENCE_TARGET_TYPE, MV_INFERENCE_TARGET_CPU),
+       ASSERT_EQ(mv_engine_config_set_int_attribute(engine_cfg, MV_INFERENCE_TARGET_DEVICE_TYPE,
+                                                                                                MV_INFERENCE_TARGET_DEVICE_CPU),
                          MEDIA_VISION_ERROR_NONE);
        ASSERT_EQ(mv_engine_config_set_int_attribute(engine_cfg, MV_INFERENCE_INPUT_TENSOR_WIDTH, 128),
                          MEDIA_VISION_ERROR_NONE);