[MediaVision] Add and deprecate enumerations (#1580)
authorTae-Young Chung <taeyoung1484@gmail.com>
Mon, 11 May 2020 05:37:55 +0000 (14:37 +0900)
committerGitHub <noreply@github.com>
Mon, 11 May 2020 05:37:55 +0000 (14:37 +0900)
* [MediaVision] Add new enumerations

Signed-off-by: Tae-Young Chung <ty83.chung@samsung.com>
* [MediaVision] Use Obsolete tag

Signed-off-by: Tae-Young Chung <ty83.chung@samsung.com>
* [MediaVision] add _keyTargetDevice

To support compatibility with Tizen 5.5,
Native API suppose New definition "MV_INFERENCE_TARGET_DEVICE".
This commit apply the changes.

Signed-off-by: Tae-Young Chung <ty83.chung@samsung.com>
* [MediaVision] Modify clearly the description of the obsolete enum and attribute

Signed-off-by: Tae-Young Chung <ty83.chung@samsung.com>
src/Tizen.Multimedia.Vision/MediaVision/InferenceModelConfiguration.cs [changed mode: 0644->0755]
src/Tizen.Multimedia.Vision/MediaVision/InferenceType.cs
src/Tizen.Multimedia.Vision/Tizen.Multimedia.Vision.sln

old mode 100644 (file)
new mode 100755 (executable)
index 2b53959..f0401a5
@@ -46,9 +46,11 @@ namespace Tizen.Multimedia.Vision
         private const string _keyModelStdValue = "MV_INFERENCE_MODEL_STD_VALUE";
         private const string _keyBackendType = "MV_INFERENCE_BACKEND_TYPE";
         private const string _keyTargetType = "MV_INFERENCE_TARGET_TYPE";
+        private const string _keyTargetDevice = "MV_INFERENCE_TARGET_DEVICE";
         private const string _keyInputTensorWidth = "MV_INFERENCE_INPUT_TENSOR_WIDTH";
         private const string _keyInputTensorHeight = "MV_INFERENCE_INPUT_TENSOR_HEIGHT";
         private const string _keyInputTensorChannels = "MV_INFERENCE_INPUT_TENSOR_CHANNELS";
+        private const string _keyDataType = "MV_INFERENCE_INPUT_DATA_TYPE";
         private const string _keyInputNodeName = "MV_INFERENCE_INPUT_NODE_NAME";
         private const string _keyOutputNodeNames = "MV_INFERENCE_OUTPUT_NODE_NAMES";
         private const string _keyOutputMaxNumber = "MV_INFERENCE_OUTPUT_MAX_NUMBER";
@@ -57,6 +59,7 @@ namespace Tizen.Multimedia.Vision
         // The following strings are fixed in native and will not be changed.
         private const string _backendTypeOpenCV = "opencv";
         private const string _backendTypeTFLite = "tflite";
+        private const string _backendTypeArmNN = "armnn";
 
         /// <summary>
         /// Initializes a new instance of the <see cref="InferenceModelConfiguration"/> class.
@@ -151,6 +154,9 @@ namespace Tizen.Multimedia.Vision
                         case _backendTypeTFLite:
                             supportedBackend.Add(InferenceBackendType.TFLite);
                             break;
+                        case _backendTypeArmNN:
+                            supportedBackend.Add(InferenceBackendType.ArmNN);
+                            break;
                     }
                 }
 
@@ -318,6 +324,7 @@ namespace Tizen.Multimedia.Vision
         /// </remarks>
         /// <exception cref="ArgumentException"><paramref name="value"/> is not valid.</exception>
         /// <since_tizen> 6 </since_tizen>
+        [Obsolete("Deprecated since API8; Will be removed in API10. Please use Device instead.")]
         public InferenceTargetType Target
         {
             get
@@ -333,6 +340,30 @@ namespace Tizen.Multimedia.Vision
         }
 
         /// <summary>
+        /// Gets or sets the inference model's device.
+        /// </summary>
+        /// <remarks>
+        /// The default device is <see cref="InferenceTargetDevice.CPU"/>.<br/>
+        /// If a device doesn't support <see cref="InferenceTargetDevice.GPU"/> and <see cref="InferenceTargetDevice.Custom"/>,
+        /// <see cref="InferenceTargetDevice.CPU"/> will be used internally, despite the user's choice.
+        /// </remarks>
+        /// <exception cref="ArgumentException"><paramref name="value"/> is not valid.</exception>
+        /// <since_tizen> 8 </since_tizen>
+        public InferenceTargetDevice Device
+        {
+            get
+            {
+                return (InferenceTargetDevice)GetInt(_keyTargetDevice);
+            }
+            set
+            {
+                ValidationUtil.ValidateEnum(typeof(InferenceTargetDevice), value, nameof(Device));
+
+                Set(_keyTargetDevice, (int)value);
+            }
+        }
+
+        /// <summary>
         /// Gets or sets the size of inference model's tensor.
         /// </summary>
         /// <remarks>
@@ -403,6 +434,29 @@ namespace Tizen.Multimedia.Vision
         }
 
         /// <summary>
+        /// Gets or sets the inference model's input data type.
+        /// </summary>
+        /// <remarks>
+        /// For example, for a model data supporting float32 this value should be set to <see cref="InferenceDataType.Float32"/>.<br/>
+        /// <see cref="InferenceDataType.Float32"/> will be used internally unless a user doesn't set the value.
+        /// </remarks>
+        /// <exception cref="ArgumentException"><paramref name="value"/> is not valid.</exception>
+        /// <since_tizen> 8 </since_tizen>
+        public InferenceDataType DataType
+        {
+            get
+            {
+                return (InferenceDataType)GetInt(_keyDataType);
+            }
+            set
+            {
+                ValidationUtil.ValidateEnum(typeof(InferenceDataType), value, nameof(DataType));
+
+                Set(_keyDataType, (int)value);
+            }
+        }
+
+        /// <summary>
         /// Gets or sets the name of an input node
         /// </summary>
         /// <exception cref="ArgumentNullException"><paramref name="value"/> is null.</exception>
index d2519f7..4c4568a 100755 (executable)
@@ -14,6 +14,8 @@
  * limitations under the License.
  */
 
+using System;
+
 namespace Tizen.Multimedia.Vision
 {
     /// <summary>
@@ -30,13 +32,19 @@ namespace Tizen.Multimedia.Vision
         /// <summary>
         /// Tensor Flow Lite backend type
         /// </summary>
-        TFLite
+        TFLite,
+
+        /// <summary>
+        /// ArmNN backend type
+        /// </summary>
+        ArmNN
     }
 
     /// <summary>
     /// Specifies the type of target. It's used for running inference backend.
     /// </summary>
     /// <since_tizen> 6 </since_tizen>
+    [Obsolete("Deprecated since API8; Will be removed in API10. Please use InferenceTargetDevice instead.")]
     public enum InferenceTargetType
     {
         /// <summary>
@@ -54,4 +62,43 @@ namespace Tizen.Multimedia.Vision
         /// </summary>
         Custom
     }
+
+    /// <summary>
+    /// Specifies the device of target. It's used for running inference backend.
+    /// </summary>
+    /// <since_tizen> 8 </since_tizen>
+    public enum InferenceTargetDevice
+    {
+        /// <summary>
+        /// CPU device
+        /// </summary>
+        CPU = 1 << 0,
+
+        /// <summary>
+        /// GPU device
+        /// </summary>
+        GPU = 1 << 1,
+
+        /// <summary>
+        /// Custom device
+        /// </summary>
+        Custom = 1 << 2
+    }
+
+    /// <summary>
+    /// Specifies the data type.
+    /// </summary>
+    /// <since_tizen> 8 </since_tizen>
+    public enum InferenceDataType
+    {
+        /// <summary>
+        /// Float 32 bit
+        /// </summary>
+        Float32,
+
+        /// <summary>
+        /// Unsigned Integer 8 bit
+        /// </summary>
+        UInt8
+    }
 }
index f0a5351..0f5ace6 100755 (executable)
@@ -1,6 +1,6 @@
 Microsoft Visual Studio Solution File, Format Version 12.00
-# Visual Studio 15
-VisualStudioVersion = 15.0.26730.12
+# Visual Studio Version 16
+VisualStudioVersion = 16.0.29926.136
 MinimumVisualStudioVersion = 10.0.40219.1
 Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Tizen.Multimedia.Vision", "Tizen.Multimedia.Vision.csproj", "{2BF51BFD-D43A-45D8-BAD6-E41B12B775A5}"
 EndProject
@@ -20,6 +20,8 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Tizen.Log", "..\Tizen.Log\T
 EndProject
 Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Tizen.System.Information", "..\Tizen.System.Information\Tizen.System.Information.csproj", "{5F4C6E94-6FC5-4267-B4D6-5565E5979B78}"
 EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Tizen.Applications.ComponentBased", "..\Tizen.Applications.ComponentBased\Tizen.Applications.ComponentBased.csproj", "{018A0C4C-8E6B-4C55-A440-EFF0D9403A19}"
+EndProject
 Global
        GlobalSection(SolutionConfigurationPlatforms) = preSolution
                Debug|Any CPU = Debug|Any CPU
@@ -62,6 +64,10 @@ Global
                {5F4C6E94-6FC5-4267-B4D6-5565E5979B78}.Debug|Any CPU.Build.0 = Debug|Any CPU
                {5F4C6E94-6FC5-4267-B4D6-5565E5979B78}.Release|Any CPU.ActiveCfg = Release|Any CPU
                {5F4C6E94-6FC5-4267-B4D6-5565E5979B78}.Release|Any CPU.Build.0 = Release|Any CPU
+               {018A0C4C-8E6B-4C55-A440-EFF0D9403A19}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+               {018A0C4C-8E6B-4C55-A440-EFF0D9403A19}.Debug|Any CPU.Build.0 = Debug|Any CPU
+               {018A0C4C-8E6B-4C55-A440-EFF0D9403A19}.Release|Any CPU.ActiveCfg = Release|Any CPU
+               {018A0C4C-8E6B-4C55-A440-EFF0D9403A19}.Release|Any CPU.Build.0 = Release|Any CPU
        EndGlobalSection
        GlobalSection(SolutionProperties) = preSolution
                HideSolutionNode = FALSE