<ItemGroup>
<ProjectReference Include="..\Tizen.Log\Tizen.Log.csproj" />
<ProjectReference Include="..\Tizen\Tizen.csproj" />
+ <ProjectReference Include="..\Tizen.System.Information\Tizen.System.Information.csproj" />
</ItemGroup>
</Project>
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Tizen.MachineLearning.Inference", "Tizen.MachineLearning.Inference.csproj", "{AC675801-2A5D-4346-BFD3-3A9809EB9767}"
ProjectSection(ProjectDependencies) = postProject
{5BC75930-86EF-4A1B-BC26-BC8109773F9A} = {5BC75930-86EF-4A1B-BC26-BC8109773F9A}
+ {70F72761-B9AD-4F64-BD0B-096B71E36016} = {70F72761-B9AD-4F64-BD0B-096B71E36016}
{12E4988C-94E5-45BD-89FF-011970716A18} = {12E4988C-94E5-45BD-89FF-011970716A18}
EndProjectSection
EndProject
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Tizen.Log", "..\Tizen.Log\Tizen.Log.csproj", "{12E4988C-94E5-45BD-89FF-011970716A18}"
EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Tizen.System.Information", "..\Tizen.System.Information\Tizen.System.Information.csproj", "{70F72761-B9AD-4F64-BD0B-096B71E36016}"
+EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
{12E4988C-94E5-45BD-89FF-011970716A18}.Release|x64.Build.0 = Release|Any CPU
{12E4988C-94E5-45BD-89FF-011970716A18}.Release|x86.ActiveCfg = Release|Any CPU
{12E4988C-94E5-45BD-89FF-011970716A18}.Release|x86.Build.0 = Release|Any CPU
+ {70F72761-B9AD-4F64-BD0B-096B71E36016}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {70F72761-B9AD-4F64-BD0B-096B71E36016}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {70F72761-B9AD-4F64-BD0B-096B71E36016}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {70F72761-B9AD-4F64-BD0B-096B71E36016}.Debug|x64.Build.0 = Debug|Any CPU
+ {70F72761-B9AD-4F64-BD0B-096B71E36016}.Debug|x86.ActiveCfg = Debug|Any CPU
+ {70F72761-B9AD-4F64-BD0B-096B71E36016}.Debug|x86.Build.0 = Debug|Any CPU
+ {70F72761-B9AD-4F64-BD0B-096B71E36016}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {70F72761-B9AD-4F64-BD0B-096B71E36016}.Release|Any CPU.Build.0 = Release|Any CPU
+ {70F72761-B9AD-4F64-BD0B-096B71E36016}.Release|x64.ActiveCfg = Release|Any CPU
+ {70F72761-B9AD-4F64-BD0B-096B71E36016}.Release|x64.Build.0 = Release|Any CPU
+ {70F72761-B9AD-4F64-BD0B-096B71E36016}.Release|x86.ActiveCfg = Release|Any CPU
+ {70F72761-B9AD-4F64-BD0B-096B71E36016}.Release|x86.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
using System;
using System.IO;
+using Tizen.System;
namespace Tizen.MachineLearning.Inference
{
{
internal const string TAG = "ML.Inference";
+ internal const string FeatureKey = "http://tizen.org/feature/machine_learning.inference";
+
+ private static int _alreadyChecked = -1; /* -1: not yet, 0: Not Support, 1: Support */
+
internal static void CheckException(NNStreamerError error, string msg)
{
if (error != NNStreamerError.None)
throw NNStreamerExceptionFactory.CreateException(error, msg);
}
}
+
+ internal static void CheckNNStreamerSupport()
+ {
+ if (_alreadyChecked == 1)
+ return;
+
+ string msg = "Machine Learning Inference Feature is not supported.";
+ if (_alreadyChecked == 0)
+ {
+ Log.Error(NNStreamer.TAG, msg);
+ throw NNStreamerExceptionFactory.CreateException(NNStreamerError.NotSupported, msg);
+ }
+
+ /* Feature Key check */
+ bool isSupported = false;
+ bool error = Information.TryGetValue<bool>(FeatureKey, out isSupported);
+ if (!error || !isSupported)
+ {
+ _alreadyChecked = 0;
+
+ Log.Error(NNStreamer.TAG, msg);
+ throw NNStreamerExceptionFactory.CreateException(NNStreamerError.NotSupported, msg);
+ }
+
+ /* Check required so files */
+ try
+ {
+ Interop.Util.CheckNNFWAvailability(NNFWType.TensorflowLite, HWType.CPU, out isSupported);
+ }
+ catch (DllNotFoundException)
+ {
+ _alreadyChecked = 0;
+ Log.Error(NNStreamer.TAG, msg);
+ throw NNStreamerExceptionFactory.CreateException(NNStreamerError.NotSupported, msg);
+ }
+
+ _alreadyChecked = 1;
+ }
}
internal class NNStreamerExceptionFactory
/// <since_tizen> 6 </since_tizen>
public SingleShot(string modelAbsPath, TensorsInfo inTensorsInfo, TensorsInfo outTensorsInfo)
{
+ NNStreamer.CheckNNStreamerSupport();
+
+ if (inTensorsInfo == null || outTensorsInfo == null)
+ {
+ string msg = "TensorsInfo is null";
+ throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
+ }
+
CreateSingleShot(modelAbsPath, inTensorsInfo, outTensorsInfo);
}
public TensorsData Invoke(TensorsData inTensorsData)
{
TensorsData out_data;
- IntPtr out_ptr;
+ IntPtr out_ptr = IntPtr.Zero;
NNStreamerError ret = NNStreamerError.None;
+ if (inTensorsData == null)
+ {
+ string msg = "TensorsData is null";
+ throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
+ }
+
ret = Interop.SingleShot.InvokeSingle(_handle, inTensorsData.Handle, out out_ptr);
NNStreamer.CheckException(ret, "fail to invoke the single inference engine");
*/
using System;
+using System.IO;
namespace Tizen.MachineLearning.Inference
{
public int Count
{
get {
+ NNStreamer.CheckNNStreamerSupport();
+
if (_count != Tensor.InvalidCount)
return _count;
NNStreamerError ret = NNStreamerError.None;
- ret = Interop.Util.GetTensorsCount(_handle, out int count);
+ int count = 0;
+
+ ret = Interop.Util.GetTensorsCount(_handle, out count);
NNStreamer.CheckException(ret, "unable to get the count of TensorsData");
_count = count;
{
NNStreamerError ret = NNStreamerError.None;
+ NNStreamer.CheckNNStreamerSupport();
+
+ if (buffer == null)
+ {
+ string msg = "buffer is null";
+ throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
+ }
+
ret = Interop.Util.SetTensorData(_handle, index, buffer, buffer.Length);
NNStreamer.CheckException(ret, "unable to set the buffer of TensorsData: " + index.ToString());
}
/// <since_tizen> 6 </since_tizen>
public byte[] GetTensorData(int index)
{
- byte[] retBuffer;
+ byte[] retBuffer = null;
IntPtr raw_data;
int size;
-
NNStreamerError ret = NNStreamerError.None;
+
+ NNStreamer.CheckNNStreamerSupport();
+
ret = Interop.Util.GetTensorData(_handle, index, out raw_data, out size);
NNStreamer.CheckException(ret, "unable to get the buffer of TensorsData: " + index.ToString());
retBuffer = Interop.Util.IntPtrToByteArray(raw_data, size);
+
return retBuffer;
}
// release unmanaged objects
if (_handle != IntPtr.Zero)
{
- NNStreamerError ret = NNStreamerError.None;
- ret = Interop.Util.DestroyTensorsData(_handle);
+ NNStreamerError ret = Interop.Util.DestroyTensorsData(_handle);
if (ret != NNStreamerError.None)
{
Log.Error(NNStreamer.TAG, "failed to destroy TensorsData object");
/// <summary>
/// Creates a TensorsInfo instance.
/// </summary>
+ /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
+ /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
/// <since_tizen> 6 </since_tizen>
public TensorsInfo()
{
+ NNStreamer.CheckNNStreamerSupport();
+
Log.Info(NNStreamer.TAG, "TensorsInfo is created");
_infoList = new List<TensorInfo>();
}
/// <since_tizen> 6 </since_tizen>
public void AddTensorInfo(TensorType type, int[] dimension)
{
+ NNStreamer.CheckNNStreamerSupport();
+
AddTensorInfo(null, type, dimension);
}
/// <since_tizen> 6 </since_tizen>
public void AddTensorInfo(string name, TensorType type, int[] dimension)
{
+ NNStreamer.CheckNNStreamerSupport();
+
int idx = _infoList.Count;
if (idx >= Tensor.SizeLimit) {
throw new IndexOutOfRangeException("Max size of the tensors is " + Tensor.SizeLimit);
{
NNStreamerError ret = NNStreamerError.None;
- /* Set the number of tensors */
ret = Interop.Util.SetTensorsCount(_handle, _infoList.Count);
NNStreamer.CheckException(ret, "unable to set the number of tensors");
- /* Set the type and dimension of Tensor */
ret = Interop.Util.SetTensorType(_handle, idx, type);
NNStreamer.CheckException(ret, "fail to set TensorsInfo type");
/// <since_tizen> 6 </since_tizen>
public void SetTensorName(int idx, string name)
{
+ NNStreamer.CheckNNStreamerSupport();
+
CheckIndexBoundary(idx);
_infoList[idx].Name = name;
/// <param name="idx">The index of the tensor.</param>
/// <returns>The tensor name.</returns>
/// <exception cref="IndexOutOfRangeException">Thrown when the index is greater than the number of Tensor.</exception>
+ /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
/// <since_tizen> 6 </since_tizen>
public string GetTensorName(int idx)
{
+ NNStreamer.CheckNNStreamerSupport();
+
CheckIndexBoundary(idx);
return _infoList[idx].Name;
}
/// <since_tizen> 6 </since_tizen>
public void SetTensorType(int idx, TensorType type)
{
+ NNStreamer.CheckNNStreamerSupport();
+
CheckIndexBoundary(idx);
_infoList[idx].Type = type;
/// <returns>The tensor type</returns>
/// <exception cref="IndexOutOfRangeException">Thrown when the index is greater than the number of Tensor.</exception>
/// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
+ /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
/// <since_tizen> 6 </since_tizen>
public TensorType GetTensorType(int idx)
{
+ NNStreamer.CheckNNStreamerSupport();
+
CheckIndexBoundary(idx);
return _infoList[idx].Type;
}
/// <since_tizen> 6 </since_tizen>
public void SetDimension(int idx, int[] dimension)
{
+ NNStreamer.CheckNNStreamerSupport();
+
CheckIndexBoundary(idx);
_infoList[idx].SetDimension(dimension);
/// <returns>The tensor dimension.</returns>
/// <exception cref="IndexOutOfRangeException">Thrown when the index is greater than the number of Tensor.</exception>
/// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
+ /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
/// <since_tizen> 6 </since_tizen>
public int[] GetDimension(int idx)
{
+ NNStreamer.CheckNNStreamerSupport();
+
CheckIndexBoundary(idx);
return _infoList[idx].Dimension;
}
/// <since_tizen> 6 </since_tizen>
public TensorsData GetTensorsData()
{
- IntPtr tensorsData_h;
+ IntPtr tensorsData_h = IntPtr.Zero;
TensorsData retTensorData;
NNStreamerError ret = NNStreamerError.None;
+ NNStreamer.CheckNNStreamerSupport();
+
if (_handle == IntPtr.Zero)
{
Log.Info(NNStreamer.TAG, "_handle is IntPtr.Zero\n" + " GetTensorsInfoHandle() is called");
ret = Interop.Util.CreateTensorsData(_handle, out tensorsData_h);
NNStreamer.CheckException(ret, "unable to create the tensorsData object");
- Log.Info(NNStreamer.TAG, "success to CreateTensorsData()\n");
retTensorData = TensorsData.CreateFromNativeHandle(tensorsData_h);
internal IntPtr GetTensorsInfoHandle()
{
NNStreamerError ret = NNStreamerError.None;
- IntPtr ret_handle;
+ IntPtr ret_handle = IntPtr.Zero;
int idx;
/* Already created */
// release unmanaged objects
if (_handle != IntPtr.Zero)
{
- NNStreamerError ret = NNStreamerError.None;
- ret = Interop.Util.DestroyTensorsInfo(_handle);
+ NNStreamerError ret = Interop.Util.DestroyTensorsInfo(_handle);
+
if (ret != NNStreamerError.None)
{
Log.Error(NNStreamer.TAG, "failed to destroy TensorsInfo object");