mv_inference: clean up ABI header files 70/231270/2
authorInki Dae <inki.dae@samsung.com>
Tue, 21 Apr 2020 00:45:03 +0000 (09:45 +0900)
committerInki Dae <inki.dae@samsung.com>
Wed, 22 Apr 2020 03:09:48 +0000 (12:09 +0900)
This patch cleans up the ABI header files,
mv_inference.h and mv_inference_type.h according to
ACR(ACR-1545) review comments below,
- Add a description to each constant of mv_inference_data_type_e
- Change constant names of mv_inference_target_device_e.
- Clean up some descriptions.
- Revive @since_tizen 5.5 tag
- Fixed description of #define MV_INFERENCE_TARGET_TYPE

Change-Id: Icd893710a04a550205c64a99f355ebf48825a584
Signed-off-by: Inki Dae <inki.dae@samsung.com>
include/mv_inference.h
include/mv_inference_type.h
mv_inference/inference/src/Inference.cpp

index f4bcb36f715a1a457d09735173a3edda98682fcc..e7ac0cc2c7017c6a5ed0cb77cd60feaa775cb6d3 100644 (file)
@@ -108,12 +108,12 @@ extern "C" {
  * @brief Defines #MV_INFERENCE_TARGET_TYPE to set the type used
  *        for device running attribute of the engine configuration.
  * @details Switches between CPU, GPU, or Custom:\n
- *          #MV_INFERENCE_TARGET_CPU(Deprecated),\n
- *          #MV_INFERENCE_TARGET_GPU(Deprecated),\n
- *          #MV_INFERENCE_TARGET_CUSTOM(Deprecated).\n
- *          #MV_INFERENCE_CPU,\n
- *          #MV_INFERENCE_GPU,\n
- *          #MV_INFERENCE_CUSTOM.\n
+ *          #MV_INFERENCE_TARGET_CPU (Deprecated),\n
+ *          #MV_INFERENCE_TARGET_GPU (Deprecated),\n
+ *          #MV_INFERENCE_TARGET_CUSTOM (Deprecated).\n
+ *          #MV_INFERENCE_TARGET_DEVICE_CPU,\n
+ *          #MV_INFERENCE_TARGET_DEVICE_GPU,\n
+ *          #MV_INFERENCE_TARGET_DEVICE_CUSTOM.\n
  *
  *          The default type is CPU. Please do not use deprecated types since Tizen 6.0.
  *                     Old ones have been deprecated.
index 0db4fb5d0489f3b43c8077c468f76b492a77d181..d51e0c1f517f0e40b219dc0561129f3b1ae23d61 100644 (file)
@@ -51,6 +51,8 @@ typedef enum {
  * @deprecated Deprecated since tizen 6.0. Use #mv_inference_target_device_e instead.
  * @brief Enumeration for inference target.
  *
+ * @since_tizem 5.5
+ *
  */
 typedef enum {
     MV_INFERENCE_TARGET_NONE = -1,     /**< None */
@@ -67,11 +69,11 @@ typedef enum {
  *
  */
 typedef enum {
-    MV_INFERENCE_NONE  = 0,            /**< None */
-    MV_INFERENCE_CPU   = 1 << 0,       /**< CPU */
-    MV_INFERENCE_GPU   = 1 << 1,       /**< GPU*/
-    MV_INFERENCE_CUSTOM        = 1 << 2,       /**< CUSTOM*/
-    MV_INFERENCE_MAX   = 1 << 3        /**< Target MAX */
+    MV_INFERENCE_TARGET_DEVICE_NONE = 0,               /**< None */
+    MV_INFERENCE_TARGET_DEVICE_CPU = 1 << 0,   /**< CPU */
+    MV_INFERENCE_TARGET_DEVICE_GPU = 1 << 1,   /**< GPU*/
+    MV_INFERENCE_TARGET_DEVICE_CUSTOM = 1 << 2,        /**< CUSTOM*/
+    MV_INFERENCE_TARGET_DEVICE_MAX = 1 << 3            /**< Target MAX */
 } mv_inference_target_device_e;
 
 /**
@@ -81,8 +83,8 @@ typedef enum {
  *
  */
 typedef enum {
-       MV_INFERENCE_DATA_FLOAT32 = 0,
-       MV_INFERENCE_DATA_UINT8
+       MV_INFERENCE_DATA_FLOAT32 = 0,  /** Data type of a given pre-trained model is float. */
+       MV_INFERENCE_DATA_UINT8                 /** Data type of a given pre-trained model is unsigned char. */
 } mv_inference_data_type_e;
 
 /**
index 08c9e22022dcc4c25d8fb7c38c5ac18cdc89e129..d75607db84ab72790b51e94e6761be62e84b8b1e 100755 (executable)
@@ -199,11 +199,11 @@ int Inference::ConvertTargetTypes(int given_types)
 {
        int target_types = INFERENCE_TARGET_NONE;
 
-       if (given_types & MV_INFERENCE_CPU)
+       if (given_types & MV_INFERENCE_TARGET_DEVICE_CPU)
                target_types |= INFERENCE_TARGET_CPU;
-       if (given_types & MV_INFERENCE_GPU)
+       if (given_types & MV_INFERENCE_TARGET_DEVICE_GPU)
                target_types |= INFERENCE_TARGET_GPU;
-       if (given_types & MV_INFERENCE_CUSTOM)
+       if (given_types & MV_INFERENCE_TARGET_DEVICE_CUSTOM)
                target_types |= INFERENCE_TARGET_CUSTOM;
 
        return target_types;
@@ -431,18 +431,18 @@ int Inference::ConfigureTargetTypes(const int targetType)
                return MEDIA_VISION_ERROR_INVALID_PARAMETER;
        }
 
-       unsigned int new_type = MV_INFERENCE_NONE;
+       unsigned int new_type = MV_INFERENCE_TARGET_DEVICE_NONE;
 
        // Convert old type to new one.
        switch (targetType) {
        case MV_INFERENCE_TARGET_CPU:
-               new_type = MV_INFERENCE_CPU;
+               new_type = MV_INFERENCE_TARGET_DEVICE_CPU;
                break;
        case MV_INFERENCE_TARGET_GPU:
-               new_type = MV_INFERENCE_GPU;
+               new_type = MV_INFERENCE_TARGET_DEVICE_GPU;
                break;
        case MV_INFERENCE_TARGET_CUSTOM:
-               new_type = MV_INFERENCE_CUSTOM;
+               new_type = MV_INFERENCE_TARGET_DEVICE_CUSTOM;
                break;
        }
 
@@ -456,7 +456,7 @@ int Inference::ConfigureTargetTypes(const int targetType)
 int Inference::ConfigureTargetDevices(const int targetDevices)
 {
        // Check if given target types are valid or not.
-       if (MV_INFERENCE_NONE >= targetDevices || MV_INFERENCE_MAX <= targetDevices) {
+       if (MV_INFERENCE_TARGET_DEVICE_NONE >= targetDevices || MV_INFERENCE_TARGET_DEVICE_MAX <= targetDevices) {
                LOGE("Invalid target device.");
                return MEDIA_VISION_ERROR_INVALID_PARAMETER;
        }
@@ -770,21 +770,21 @@ int Inference::Prepare(void)
        LOGI("threshold %.4f", mThreshold);
 
        // Check if backend supports a given target device/devices or not.
-       if (mConfig.mTargetTypes & MV_INFERENCE_CPU) {
+       if (mConfig.mTargetTypes & MV_INFERENCE_TARGET_DEVICE_CPU) {
                if (!(mBackendCapacity.supported_accel_devices & INFERENCE_TARGET_CPU)) {
                        LOGE("Backend doesn't support CPU device as an accelerator.");
                        return MEDIA_VISION_ERROR_INVALID_PARAMETER;
                }
        }
 
-       if (mConfig.mTargetTypes & MV_INFERENCE_GPU) {
+       if (mConfig.mTargetTypes & MV_INFERENCE_TARGET_DEVICE_GPU) {
                if (!(mBackendCapacity.supported_accel_devices & INFERENCE_TARGET_GPU)) {
                        LOGE("Backend doesn't support CPU device as an accelerator.");
                        return MEDIA_VISION_ERROR_INVALID_PARAMETER;
                }
        }
 
-       if (mConfig.mTargetTypes & MV_INFERENCE_CUSTOM) {
+       if (mConfig.mTargetTypes & MV_INFERENCE_TARGET_DEVICE_CUSTOM) {
                if (!(mBackendCapacity.supported_accel_devices & INFERENCE_TARGET_CUSTOM)) {
                        LOGE("Backend doesn't support CPU device as an accelerator.");
                        return MEDIA_VISION_ERROR_INVALID_PARAMETER;