mv_inference: keep backward compatibility
authorInki Dae <inki.dae@samsung.com>
Fri, 17 Apr 2020 04:16:17 +0000 (13:16 +0900)
committerInki Dae <inki.dae@samsung.com>
Fri, 17 Apr 2020 07:06:59 +0000 (16:06 +0900)
This patch keeps backward compatibility with Tizen 5.5
by adding new mv_inference_target_device_e enumeration
instead of modifying existing one.

Change-Id: I40df1dacc6b9a74ba438d0767d84ff1f25133584
Signed-off-by: Inki Dae <inki.dae@samsung.com>
include/mv_inference.h
include/mv_inference_type.h
mv_inference/inference/include/Inference.h
mv_inference/inference/src/Inference.cpp
mv_inference/inference/src/mv_inference_open.cpp

index d72f87b83bd631b3e16b5ecda25b72cd888d31f5..f4bcb36f715a1a457d09735173a3edda98682fcc 100644 (file)
@@ -108,10 +108,15 @@ extern "C" {
  * @brief Defines #MV_INFERENCE_TARGET_TYPE to set the type used
  *        for device running attribute of the engine configuration.
  * @details Switches between CPU, GPU, or Custom:\n
- *          #MV_INFERENCE_TARGET_CPU,\n
- *          #MV_INFERENCE_TARGET_GPU,\n
- *          #MV_INFERENCE_TARGET_CUSTOM.\n
- *          The default type is CPU.
+ *          #MV_INFERENCE_TARGET_CPU(Deprecated),\n
+ *          #MV_INFERENCE_TARGET_GPU(Deprecated),\n
+ *          #MV_INFERENCE_TARGET_CUSTOM(Deprecated).\n
+ *          #MV_INFERENCE_CPU,\n
+ *          #MV_INFERENCE_GPU,\n
+ *          #MV_INFERENCE_CUSTOM.\n
+ *
+ *          The default type is CPU. Please do not use deprecated types since Tizen 6.0.
+ *                     Old ones have been deprecated.
  *
  * @since_tizen 5.5
  * @see mv_engine_config_set_int_attribute()
index 58e2faa4359749cc1589e3dd9c5edfaf5e50f5ea..6192334c7956316c37c691a9f1647eb3715de342 100644 (file)
@@ -50,17 +50,31 @@ typedef enum {
 /**
  * @brief Enumeration for inference target.
  *
- * @since_tizen 5.5
+ * @since_tizen 5.5 (Deprecated)
  *
  */
 typedef enum {
-    MV_INFERENCE_TARGET_NONE   = 0, /**< None */
-    MV_INFERENCE_TARGET_CPU    = 1 << 0,    /**< CPU */
-    MV_INFERENCE_TARGET_GPU    = 1 << 1,    /**< GPU*/
-    MV_INFERENCE_TARGET_CUSTOM = 1 << 2,    /**< CUSTOM*/
-    MV_INFERENCE_TARGET_MAX    = 1 << 3     /**< Target MAX */
+    MV_INFERENCE_TARGET_NONE = -1,     /**< None */
+    MV_INFERENCE_TARGET_CPU,           /**< CPU */
+    MV_INFERENCE_TARGET_GPU,           /**< GPU*/
+    MV_INFERENCE_TARGET_CUSTOM,                /**< CUSTOM*/
+    MV_INFERENCE_TARGET_MAX                    /**< Target MAX */
 } mv_inference_target_type_e;
 
+/**
+ * @brief Enumeration for inference target.
+ *
+ * @since_tizen 6.0
+ *
+ */
+typedef enum {
+    MV_INFERENCE_NONE  = 0,            /**< None */
+    MV_INFERENCE_CPU   = 1 << 0,       /**< CPU */
+    MV_INFERENCE_GPU   = 1 << 1,       /**< GPU*/
+    MV_INFERENCE_CUSTOM        = 1 << 2,       /**< CUSTOM*/
+    MV_INFERENCE_MAX   = 1 << 3        /**< Target MAX */
+} mv_inference_target_device_e;
+
 /**
  * @brief Enumeration for input data type.
  *
index 9cd580d1c1d69a200a9cbb4dc96502c5e6167ec9..16e84b005b58c3887951a09e1f15d63fd4e2fcc4 100755 (executable)
@@ -169,11 +169,20 @@ public:
        int ConfigureBackendType(const mv_inference_backend_type_e backendType);
 
        /**
-        * @brief   Configure inference target types such as CPU, GPU or NPU. (one more types can be combined)
+        * @brief   Configure a inference target device type such as CPU, GPU or NPU. (only one type can be set)
+        * @details Internally, a given device type will be converted to new type.
+        *                      This API is just used for backward compatibility.
+        *
+        * @since_tizen 6.0 (Deprecated)
+        */
+       int ConfigureTargetTypes(const int targetType);
+
+       /**
+        * @brief   Configure inference target devices such as CPU, GPU or NPU. (one more types can be combined)
         *
         * @since_tizen 6.0
         */
-       int ConfigureTargetTypes(const int targetTypes);
+       int ConfigureTargetDevices(const int targetDevices);
 
        /**
         * @brief   Configure the maximum number of inference results
index f207d7c14ef359f0f1977803327842695a699869..730e7530b9c92965b604d572b1c1035834925372 100755 (executable)
@@ -199,11 +199,11 @@ int Inference::ConvertTargetTypes(int given_types)
 {
        int target_types = INFERENCE_TARGET_NONE;
 
-       if (given_types & MV_INFERENCE_TARGET_CPU)
+       if (given_types & MV_INFERENCE_CPU)
                target_types |= INFERENCE_TARGET_CPU;
-       if (given_types & MV_INFERENCE_TARGET_GPU)
+       if (given_types & MV_INFERENCE_GPU)
                target_types |= INFERENCE_TARGET_GPU;
-       if (given_types & INFERENCE_TARGET_CUSTOM)
+       if (given_types & MV_INFERENCE_CUSTOM)
                target_types |= INFERENCE_TARGET_CUSTOM;
 
        return target_types;
@@ -423,17 +423,47 @@ int Inference::ConfigureBackendType(const mv_inference_backend_type_e backendTyp
        return MEDIA_VISION_ERROR_NONE;
 }
 
-int Inference::ConfigureTargetTypes(const int targetTypes)
+int Inference::ConfigureTargetTypes(const int targetType)
 {
        // Check if given target types are valid or not.
-       if (MV_INFERENCE_TARGET_NONE >= targetTypes || MV_INFERENCE_TARGET_MAX <= targetTypes) {
+       if (MV_INFERENCE_TARGET_NONE >= targetType || MV_INFERENCE_TARGET_MAX <= targetType) {
                LOGE("Invalid target device.");
                return MEDIA_VISION_ERROR_INVALID_PARAMETER;
        }
 
-       LOGI("target devices : %d", targetTypes);
+       unsigned int new_type = MV_INFERENCE_NONE;
 
-       mConfig.mTargetTypes = targetTypes;
+       // Convert old type to new one.
+       switch (targetType) {
+       case MV_INFERENCE_TARGET_CPU:
+               new_type = MV_INFERENCE_CPU;
+               break;
+       case MV_INFERENCE_TARGET_GPU:
+               new_type = MV_INFERENCE_GPU;
+               break;
+       case MV_INFERENCE_TARGET_CUSTOM:
+               new_type = MV_INFERENCE_CUSTOM;
+               break;
+       }
+
+       LOGI("target devices : %d", new_type);
+
+       mConfig.mTargetTypes = new_type;
+
+       return MEDIA_VISION_ERROR_NONE;
+}
+
+int Inference::ConfigureTargetDevices(const int targetDevices)
+{
+       // Check if given target types are valid or not.
+       if (MV_INFERENCE_NONE >= targetDevices || MV_INFERENCE_MAX <= targetDevices) {
+               LOGE("Invalid target device.");
+               return MEDIA_VISION_ERROR_INVALID_PARAMETER;
+       }
+
+       LOGI("target devices : %d", targetDevices);
+
+       mConfig.mTargetTypes = targetDevices;
 
        return MEDIA_VISION_ERROR_NONE;
 }
@@ -740,21 +770,21 @@ int Inference::Prepare(void)
        LOGI("threshold %.4f", mThreshold);
 
        // Check if backend supports a given target device/devices or not.
-       if (mConfig.mTargetTypes & MV_INFERENCE_TARGET_CPU) {
+       if (mConfig.mTargetTypes & MV_INFERENCE_CPU) {
                if (!(mBackendCapacity.supported_accel_devices & INFERENCE_TARGET_CPU)) {
                        LOGE("Backend doesn't support CPU device as an accelerator.");
                        return MEDIA_VISION_ERROR_INVALID_PARAMETER;
                }
        }
 
-       if (mConfig.mTargetTypes & MV_INFERENCE_TARGET_GPU) {
+       if (mConfig.mTargetTypes & MV_INFERENCE_GPU) {
                if (!(mBackendCapacity.supported_accel_devices & INFERENCE_TARGET_GPU)) {
                        LOGE("Backend doesn't support CPU device as an accelerator.");
                        return MEDIA_VISION_ERROR_INVALID_PARAMETER;
                }
        }
 
-       if (mConfig.mTargetTypes & MV_INFERENCE_TARGET_CUSTOM) {
+       if (mConfig.mTargetTypes & MV_INFERENCE_CUSTOM) {
                if (!(mBackendCapacity.supported_accel_devices & INFERENCE_TARGET_CUSTOM)) {
                        LOGE("Backend doesn't support CPU device as an accelerator.");
                        return MEDIA_VISION_ERROR_INVALID_PARAMETER;
index 77be31435d698fb705ec2b1a4ee679cb0ef16900..68e891b23c145aef1a097d5e63c4c6c34b8d03e1 100755 (executable)
 
 using namespace mediavision::inference;
 
+static bool is_new_mv_inference_engine(mv_engine_config_h engine_config)
+{
+       int dataType = 0;
+
+       int ret = mv_engine_config_get_int_attribute(engine_config,
+                                                                                       MV_INFERENCE_INPUT_DATA_TYPE,
+                                                                                       &dataType);
+       if (ret != MEDIA_VISION_ERROR_NONE) {
+               return false;
+       }
+
+       return true;
+}
+
 mv_engine_config_h mv_inference_get_engine_config(mv_inference_h infer)
 {
        Inference *pInfer = static_cast<Inference *>(infer);
@@ -151,10 +165,23 @@ int mv_inference_configure_model_open(mv_inference_h infer, mv_engine_config_h e
                goto _ERROR_;
        }
 
-       ret = pInfer->ConfigureTargetTypes(targetTypes);
-       if (ret != MEDIA_VISION_ERROR_NONE) {
-               LOGE("Tried to configure invalid target types.");
-               goto _ERROR_;
+       // Check if new inference engine framework or old one.
+       // new inference engine framework has different mv_inference_target_type_e enumeration values
+       // to support multiple inference target devices. So in case of old version,
+       // enumeration value given by user should be converted to new value, which
+       // will be done at ConfigureTargetTypes callback internally.
+       if (is_new_mv_inference_engine(engine_config) == false) {
+               ret = pInfer->ConfigureTargetTypes(targetTypes);
+               if (ret != MEDIA_VISION_ERROR_NONE) {
+                       LOGE("Tried to configure invalid target types.");
+                       goto _ERROR_;
+               }
+       } else {
+               ret = pInfer->ConfigureTargetDevices(targetTypes);
+               if (ret != MEDIA_VISION_ERROR_NONE) {
+                       LOGE("Tried to configure invalid target types.");
+                       goto _ERROR_;
+               }
        }
 
        pInfer->ConfigureModelFiles(std::string(modelConfigFilePath),