private const string _keyModelStdValue = "MV_INFERENCE_MODEL_STD_VALUE";
private const string _keyBackendType = "MV_INFERENCE_BACKEND_TYPE";
private const string _keyTargetType = "MV_INFERENCE_TARGET_TYPE";
+ private const string _keyTargetDevice = "MV_INFERENCE_TARGET_DEVICE";
private const string _keyInputTensorWidth = "MV_INFERENCE_INPUT_TENSOR_WIDTH";
private const string _keyInputTensorHeight = "MV_INFERENCE_INPUT_TENSOR_HEIGHT";
private const string _keyInputTensorChannels = "MV_INFERENCE_INPUT_TENSOR_CHANNELS";
+ private const string _keyDataType = "MV_INFERENCE_INPUT_DATA_TYPE";
private const string _keyInputNodeName = "MV_INFERENCE_INPUT_NODE_NAME";
private const string _keyOutputNodeNames = "MV_INFERENCE_OUTPUT_NODE_NAMES";
private const string _keyOutputMaxNumber = "MV_INFERENCE_OUTPUT_MAX_NUMBER";
// The following strings are fixed in native and will not be changed.
private const string _backendTypeOpenCV = "opencv";
private const string _backendTypeTFLite = "tflite";
+ private const string _backendTypeArmNN = "armnn";
/// <summary>
/// Initializes a new instance of the <see cref="InferenceModelConfiguration"/> class.
case _backendTypeTFLite:
supportedBackend.Add(InferenceBackendType.TFLite);
break;
+ case _backendTypeArmNN:
+ supportedBackend.Add(InferenceBackendType.ArmNN);
+ break;
}
}
/// </remarks>
/// <exception cref="ArgumentException"><paramref name="value"/> is not valid.</exception>
/// <since_tizen> 6 </since_tizen>
+ [Obsolete("Deprecated since API8; Will be removed in API10. Please use Device instead.")]
public InferenceTargetType Target
{
get
}
/// <summary>
+ /// Gets or sets the inference model's device.
+ /// </summary>
+ /// <remarks>
+ /// The default device is <see cref="InferenceTargetDevice.CPU"/>.<br/>
+ /// If a device doesn't support <see cref="InferenceTargetDevice.GPU"/> and <see cref="InferenceTargetDevice.Custom"/>,
+ /// <see cref="InferenceTargetDevice.CPU"/> will be used internally, despite the user's choice.
+ /// </remarks>
+ /// <exception cref="ArgumentException"><paramref name="value"/> is not valid.</exception>
+ /// <since_tizen> 8 </since_tizen>
+ public InferenceTargetDevice Device
+ {
+ get
+ {
+ return (InferenceTargetDevice)GetInt(_keyTargetDevice);
+ }
+ set
+ {
+ ValidationUtil.ValidateEnum(typeof(InferenceTargetDevice), value, nameof(Device));
+
+ Set(_keyTargetDevice, (int)value);
+ }
+ }
+
+ /// <summary>
/// Gets or sets the size of inference model's tensor.
/// </summary>
/// <remarks>
}
/// <summary>
+ /// Gets or sets the inference model's input data type.
+ /// </summary>
+ /// <remarks>
+ /// For example, for a model data supporting float32 this value should be set to <see cref="InferenceDataType.Float32"/>.<br/>
+ /// <see cref="InferenceDataType.Float32"/> will be used internally unless a user doesn't set the value.
+ /// </remarks>
+ /// <exception cref="ArgumentException"><paramref name="value"/> is not valid.</exception>
+ /// <since_tizen> 8 </since_tizen>
+ public InferenceDataType DataType
+ {
+ get
+ {
+ return (InferenceDataType)GetInt(_keyDataType);
+ }
+ set
+ {
+ ValidationUtil.ValidateEnum(typeof(InferenceDataType), value, nameof(DataType));
+
+ Set(_keyDataType, (int)value);
+ }
+ }
+
+ /// <summary>
/// Gets or sets the name of an input node
/// </summary>
/// <exception cref="ArgumentNullException"><paramref name="value"/> is null.</exception>
* limitations under the License.
*/
+using System;
+
namespace Tizen.Multimedia.Vision
{
/// <summary>
/// <summary>
/// Tensor Flow Lite backend type
/// </summary>
- TFLite
+ TFLite,
+
+ /// <summary>
+ /// ArmNN backend type
+ /// </summary>
+ ArmNN
}
/// <summary>
/// Specifies the type of target. It's used for running inference backend.
/// </summary>
/// <since_tizen> 6 </since_tizen>
+ [Obsolete("Deprecated since API8; Will be removed in API10. Please use InferenceTargetDevice instead.")]
public enum InferenceTargetType
{
/// <summary>
/// </summary>
Custom
}
+
+ /// <summary>
+ /// Specifies the device of target. It's used for running inference backend.
+ /// </summary>
+ /// <since_tizen> 8 </since_tizen>
+ public enum InferenceTargetDevice
+ {
+ /// <summary>
+ /// CPU device
+ /// </summary>
+ CPU = 1 << 0,
+
+ /// <summary>
+ /// GPU device
+ /// </summary>
+ GPU = 1 << 1,
+
+ /// <summary>
+ /// Custom device
+ /// </summary>
+ Custom = 1 << 2
+ }
+
+ /// <summary>
+ /// Specifies the data type.
+ /// </summary>
+ /// <since_tizen> 8 </since_tizen>
+ public enum InferenceDataType
+ {
+ /// <summary>
+ /// Float 32 bit
+ /// </summary>
+ Float32,
+
+ /// <summary>
+ /// Unsigned Integer 8 bit
+ /// </summary>
+ UInt8
+ }
}
Microsoft Visual Studio Solution File, Format Version 12.00
-# Visual Studio 15
-VisualStudioVersion = 15.0.26730.12
+# Visual Studio Version 16
+VisualStudioVersion = 16.0.29926.136
MinimumVisualStudioVersion = 10.0.40219.1
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Tizen.Multimedia.Vision", "Tizen.Multimedia.Vision.csproj", "{2BF51BFD-D43A-45D8-BAD6-E41B12B775A5}"
EndProject
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Tizen.System.Information", "..\Tizen.System.Information\Tizen.System.Information.csproj", "{5F4C6E94-6FC5-4267-B4D6-5565E5979B78}"
EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Tizen.Applications.ComponentBased", "..\Tizen.Applications.ComponentBased\Tizen.Applications.ComponentBased.csproj", "{018A0C4C-8E6B-4C55-A440-EFF0D9403A19}"
+EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
{5F4C6E94-6FC5-4267-B4D6-5565E5979B78}.Debug|Any CPU.Build.0 = Debug|Any CPU
{5F4C6E94-6FC5-4267-B4D6-5565E5979B78}.Release|Any CPU.ActiveCfg = Release|Any CPU
{5F4C6E94-6FC5-4267-B4D6-5565E5979B78}.Release|Any CPU.Build.0 = Release|Any CPU
+ {018A0C4C-8E6B-4C55-A440-EFF0D9403A19}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {018A0C4C-8E6B-4C55-A440-EFF0D9403A19}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {018A0C4C-8E6B-4C55-A440-EFF0D9403A19}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {018A0C4C-8E6B-4C55-A440-EFF0D9403A19}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE