[MachineLearning.Inference] NotSupportedException support (#1122)
authorSangjung Woo <sangjung.woo@samsung.com>
Wed, 13 Nov 2019 05:51:36 +0000 (14:51 +0900)
committerWonYoung Choi <wy80.choi@samsung.com>
Wed, 13 Nov 2019 05:51:36 +0000 (14:51 +0900)
If machine_learning.inference is false or does not exist, or NNStreamer
installed on target device, NotSupportedException occurs when calling
the C# API of MachineLearning Inference.

Signed-off-by: Sangjung Woo <sangjung.woo@samsung.com>
src/Tizen.MachineLearning.Inference/Tizen.MachineLearning.Inference.csproj
src/Tizen.MachineLearning.Inference/Tizen.MachineLearning.Inference.sln
src/Tizen.MachineLearning.Inference/Tizen.MachineLearning.Inference/Commons.cs
src/Tizen.MachineLearning.Inference/Tizen.MachineLearning.Inference/SingleShot.cs
src/Tizen.MachineLearning.Inference/Tizen.MachineLearning.Inference/TensorsData.cs
src/Tizen.MachineLearning.Inference/Tizen.MachineLearning.Inference/TensorsInfo.cs

index ccca1d1..969ce76 100755 (executable)
@@ -7,6 +7,7 @@
   <ItemGroup>
     <ProjectReference Include="..\Tizen.Log\Tizen.Log.csproj" />
     <ProjectReference Include="..\Tizen\Tizen.csproj" />
+    <ProjectReference Include="..\Tizen.System.Information\Tizen.System.Information.csproj" />
   </ItemGroup>
 
 </Project>
index 02b0ae5..f25b7ec 100755 (executable)
@@ -6,6 +6,7 @@ MinimumVisualStudioVersion = 15.0.26124.0
 Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Tizen.MachineLearning.Inference", "Tizen.MachineLearning.Inference.csproj", "{AC675801-2A5D-4346-BFD3-3A9809EB9767}"
        ProjectSection(ProjectDependencies) = postProject
                {5BC75930-86EF-4A1B-BC26-BC8109773F9A} = {5BC75930-86EF-4A1B-BC26-BC8109773F9A}
+               {70F72761-B9AD-4F64-BD0B-096B71E36016} = {70F72761-B9AD-4F64-BD0B-096B71E36016}
                {12E4988C-94E5-45BD-89FF-011970716A18} = {12E4988C-94E5-45BD-89FF-011970716A18}
        EndProjectSection
 EndProject
@@ -13,6 +14,8 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Tizen", "..\Tizen\Tizen.csp
 EndProject
 Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Tizen.Log", "..\Tizen.Log\Tizen.Log.csproj", "{12E4988C-94E5-45BD-89FF-011970716A18}"
 EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Tizen.System.Information", "..\Tizen.System.Information\Tizen.System.Information.csproj", "{70F72761-B9AD-4F64-BD0B-096B71E36016}"
+EndProject
 Global
        GlobalSection(SolutionConfigurationPlatforms) = preSolution
                Debug|Any CPU = Debug|Any CPU
@@ -59,6 +62,18 @@ Global
                {12E4988C-94E5-45BD-89FF-011970716A18}.Release|x64.Build.0 = Release|Any CPU
                {12E4988C-94E5-45BD-89FF-011970716A18}.Release|x86.ActiveCfg = Release|Any CPU
                {12E4988C-94E5-45BD-89FF-011970716A18}.Release|x86.Build.0 = Release|Any CPU
+               {70F72761-B9AD-4F64-BD0B-096B71E36016}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+               {70F72761-B9AD-4F64-BD0B-096B71E36016}.Debug|Any CPU.Build.0 = Debug|Any CPU
+               {70F72761-B9AD-4F64-BD0B-096B71E36016}.Debug|x64.ActiveCfg = Debug|Any CPU
+               {70F72761-B9AD-4F64-BD0B-096B71E36016}.Debug|x64.Build.0 = Debug|Any CPU
+               {70F72761-B9AD-4F64-BD0B-096B71E36016}.Debug|x86.ActiveCfg = Debug|Any CPU
+               {70F72761-B9AD-4F64-BD0B-096B71E36016}.Debug|x86.Build.0 = Debug|Any CPU
+               {70F72761-B9AD-4F64-BD0B-096B71E36016}.Release|Any CPU.ActiveCfg = Release|Any CPU
+               {70F72761-B9AD-4F64-BD0B-096B71E36016}.Release|Any CPU.Build.0 = Release|Any CPU
+               {70F72761-B9AD-4F64-BD0B-096B71E36016}.Release|x64.ActiveCfg = Release|Any CPU
+               {70F72761-B9AD-4F64-BD0B-096B71E36016}.Release|x64.Build.0 = Release|Any CPU
+               {70F72761-B9AD-4F64-BD0B-096B71E36016}.Release|x86.ActiveCfg = Release|Any CPU
+               {70F72761-B9AD-4F64-BD0B-096B71E36016}.Release|x86.Build.0 = Release|Any CPU
        EndGlobalSection
        GlobalSection(SolutionProperties) = preSolution
                HideSolutionNode = FALSE
index d74aad1..25308f8 100755 (executable)
@@ -16,6 +16,7 @@
 
 using System;
 using System.IO;
+using Tizen.System;
 
 namespace Tizen.MachineLearning.Inference
 {
@@ -163,6 +164,10 @@ namespace Tizen.MachineLearning.Inference
     {
         internal const string TAG = "ML.Inference";
 
+        internal const string FeatureKey = "http://tizen.org/feature/machine_learning.inference";
+
+        private static int _alreadyChecked = -1;    /* -1: not yet, 0: Not Support, 1: Support */
+
         internal static void CheckException(NNStreamerError error, string msg)
         {
             if (error != NNStreamerError.None)
@@ -171,6 +176,44 @@ namespace Tizen.MachineLearning.Inference
                 throw NNStreamerExceptionFactory.CreateException(error, msg);
             }
         }
+
+        internal static void CheckNNStreamerSupport()
+        {
+            if (_alreadyChecked == 1)
+                return;
+
+            string msg = "Machine Learning Inference Feature is not supported.";
+            if (_alreadyChecked == 0)
+            {
+                Log.Error(NNStreamer.TAG, msg);
+                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.NotSupported, msg);
+            }
+
+            /* Feature Key check */
+            bool isSupported = false;
+            bool error = Information.TryGetValue<bool>(FeatureKey, out isSupported);
+            if (!error || !isSupported)
+            {
+                _alreadyChecked = 0;
+
+                Log.Error(NNStreamer.TAG, msg);
+                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.NotSupported, msg);
+            }
+
+            /* Check required so files */
+            try
+            {
+                Interop.Util.CheckNNFWAvailability(NNFWType.TensorflowLite, HWType.CPU, out isSupported);
+            }
+            catch (DllNotFoundException)
+            {
+                _alreadyChecked = 0;
+                Log.Error(NNStreamer.TAG, msg);
+                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.NotSupported, msg);
+            }
+
+            _alreadyChecked = 1;
+        }
     }
 
     internal class NNStreamerExceptionFactory
index e3c2d48..b6fc121 100755 (executable)
@@ -43,6 +43,14 @@ namespace Tizen.MachineLearning.Inference
         /// <since_tizen> 6 </since_tizen>
         public SingleShot(string modelAbsPath, TensorsInfo inTensorsInfo, TensorsInfo outTensorsInfo)
         {
+            NNStreamer.CheckNNStreamerSupport();
+
+            if (inTensorsInfo == null || outTensorsInfo == null)
+            {
+                string msg = "TensorsInfo is null";
+                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
+            }
+
             CreateSingleShot(modelAbsPath, inTensorsInfo, outTensorsInfo);
         }
 
@@ -79,9 +87,15 @@ namespace Tizen.MachineLearning.Inference
         public TensorsData Invoke(TensorsData inTensorsData)
         {
             TensorsData out_data;
-            IntPtr out_ptr;
+            IntPtr out_ptr = IntPtr.Zero;
             NNStreamerError ret = NNStreamerError.None;
 
+            if (inTensorsData == null)
+            {
+                string msg = "TensorsData is null";
+                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
+            }
+
             ret = Interop.SingleShot.InvokeSingle(_handle, inTensorsData.Handle, out out_ptr);
             NNStreamer.CheckException(ret, "fail to invoke the single inference engine");
 
index 3ba452e..ddabbb9 100755 (executable)
@@ -15,6 +15,7 @@
 */
 
 using System;
+using System.IO;
 
 namespace Tizen.MachineLearning.Inference
 {
@@ -63,11 +64,15 @@ namespace Tizen.MachineLearning.Inference
         public int Count
         {
             get {
+                NNStreamer.CheckNNStreamerSupport();
+
                 if (_count != Tensor.InvalidCount)
                     return _count;
 
                 NNStreamerError ret = NNStreamerError.None;
-                ret = Interop.Util.GetTensorsCount(_handle, out int count);
+                int count = 0;
+
+                ret = Interop.Util.GetTensorsCount(_handle, out count);
                 NNStreamer.CheckException(ret, "unable to get the count of TensorsData");
 
                 _count = count;
@@ -88,6 +93,14 @@ namespace Tizen.MachineLearning.Inference
         {
             NNStreamerError ret = NNStreamerError.None;
 
+            NNStreamer.CheckNNStreamerSupport();
+
+            if (buffer == null)
+            {
+                string msg = "buffer is null";
+                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
+            }
+
             ret = Interop.Util.SetTensorData(_handle, index, buffer, buffer.Length);
             NNStreamer.CheckException(ret, "unable to set the buffer of TensorsData: " + index.ToString());
         }
@@ -103,15 +116,18 @@ namespace Tizen.MachineLearning.Inference
         /// <since_tizen> 6 </since_tizen>
         public byte[] GetTensorData(int index)
         {
-            byte[] retBuffer;
+            byte[] retBuffer = null;
             IntPtr raw_data;
             int size;
-
             NNStreamerError ret = NNStreamerError.None;
+
+            NNStreamer.CheckNNStreamerSupport();
+
             ret = Interop.Util.GetTensorData(_handle, index, out raw_data, out size);
             NNStreamer.CheckException(ret, "unable to get the buffer of TensorsData: " + index.ToString());
 
             retBuffer = Interop.Util.IntPtrToByteArray(raw_data, size);
+
             return retBuffer;
         }
 
@@ -142,8 +158,7 @@ namespace Tizen.MachineLearning.Inference
             // release unmanaged objects
             if (_handle != IntPtr.Zero)
             {
-                NNStreamerError ret = NNStreamerError.None;
-                ret = Interop.Util.DestroyTensorsData(_handle);
+                NNStreamerError ret = Interop.Util.DestroyTensorsData(_handle);
                 if (ret != NNStreamerError.None)
                 {
                     Log.Error(NNStreamer.TAG, "failed to destroy TensorsData object");
index 4986b57..53c718f 100755 (executable)
@@ -39,9 +39,13 @@ namespace Tizen.MachineLearning.Inference
         /// <summary>
         /// Creates a TensorsInfo instance.
         /// </summary>
+        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
+        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
         /// <since_tizen> 6 </since_tizen>
         public TensorsInfo()
         {
+            NNStreamer.CheckNNStreamerSupport();
+
             Log.Info(NNStreamer.TAG, "TensorsInfo is created");
             _infoList = new List<TensorInfo>();
         }
@@ -67,6 +71,8 @@ namespace Tizen.MachineLearning.Inference
         /// <since_tizen> 6 </since_tizen>
         public void AddTensorInfo(TensorType type, int[] dimension)
         {
+            NNStreamer.CheckNNStreamerSupport();
+
             AddTensorInfo(null, type, dimension);
         }
 
@@ -83,6 +89,8 @@ namespace Tizen.MachineLearning.Inference
         /// <since_tizen> 6 </since_tizen>
         public void AddTensorInfo(string name, TensorType type, int[] dimension)
         {
+            NNStreamer.CheckNNStreamerSupport();
+
             int idx = _infoList.Count;
             if (idx >= Tensor.SizeLimit) {
                 throw new IndexOutOfRangeException("Max size of the tensors is " + Tensor.SizeLimit);
@@ -93,11 +101,9 @@ namespace Tizen.MachineLearning.Inference
             {
                 NNStreamerError ret = NNStreamerError.None;
 
-                /* Set the number of tensors */
                 ret = Interop.Util.SetTensorsCount(_handle, _infoList.Count);
                 NNStreamer.CheckException(ret, "unable to set the number of tensors");
 
-                /* Set the type and dimension of Tensor */
                 ret = Interop.Util.SetTensorType(_handle, idx, type);
                 NNStreamer.CheckException(ret, "fail to set TensorsInfo type");
 
@@ -118,6 +124,8 @@ namespace Tizen.MachineLearning.Inference
         /// <since_tizen> 6 </since_tizen>
         public void SetTensorName(int idx, string name)
         {
+            NNStreamer.CheckNNStreamerSupport();
+
             CheckIndexBoundary(idx);
             _infoList[idx].Name = name;
 
@@ -135,9 +143,12 @@ namespace Tizen.MachineLearning.Inference
         /// <param name="idx">The index of the tensor.</param>
         /// <returns>The tensor name.</returns>
         /// <exception cref="IndexOutOfRangeException">Thrown when the index is greater than the number of Tensor.</exception>
+        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
         /// <since_tizen> 6 </since_tizen>
         public string GetTensorName(int idx)
         {
+            NNStreamer.CheckNNStreamerSupport();
+
             CheckIndexBoundary(idx);
             return _infoList[idx].Name;
         }
@@ -154,6 +165,8 @@ namespace Tizen.MachineLearning.Inference
         /// <since_tizen> 6 </since_tizen>
         public void SetTensorType(int idx, TensorType type)
         {
+            NNStreamer.CheckNNStreamerSupport();
+
             CheckIndexBoundary(idx);
             _infoList[idx].Type = type;
 
@@ -172,9 +185,12 @@ namespace Tizen.MachineLearning.Inference
         /// <returns>The tensor type</returns>
         /// <exception cref="IndexOutOfRangeException">Thrown when the index is greater than the number of Tensor.</exception>
         /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
+        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
         /// <since_tizen> 6 </since_tizen>
         public TensorType GetTensorType(int idx)
         {
+            NNStreamer.CheckNNStreamerSupport();
+
             CheckIndexBoundary(idx);
             return _infoList[idx].Type;
         }
@@ -191,6 +207,8 @@ namespace Tizen.MachineLearning.Inference
         /// <since_tizen> 6 </since_tizen>
         public void SetDimension(int idx, int[] dimension)
         {
+            NNStreamer.CheckNNStreamerSupport();
+
             CheckIndexBoundary(idx);
             _infoList[idx].SetDimension(dimension);
 
@@ -209,9 +227,12 @@ namespace Tizen.MachineLearning.Inference
         /// <returns>The tensor dimension.</returns>
         /// <exception cref="IndexOutOfRangeException">Thrown when the index is greater than the number of Tensor.</exception>
         /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
+        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
         /// <since_tizen> 6 </since_tizen>
         public int[] GetDimension(int idx)
         {
+            NNStreamer.CheckNNStreamerSupport();
+
             CheckIndexBoundary(idx);
             return _infoList[idx].Dimension;
         }
@@ -226,10 +247,12 @@ namespace Tizen.MachineLearning.Inference
         /// <since_tizen> 6 </since_tizen>
         public TensorsData GetTensorsData()
         {
-            IntPtr tensorsData_h;
+            IntPtr tensorsData_h = IntPtr.Zero;
             TensorsData retTensorData;
             NNStreamerError ret = NNStreamerError.None;
 
+            NNStreamer.CheckNNStreamerSupport();
+
             if (_handle == IntPtr.Zero)
             {
                 Log.Info(NNStreamer.TAG, "_handle is IntPtr.Zero\n" + "  GetTensorsInfoHandle() is called");
@@ -238,7 +261,6 @@ namespace Tizen.MachineLearning.Inference
 
             ret = Interop.Util.CreateTensorsData(_handle, out tensorsData_h);
             NNStreamer.CheckException(ret, "unable to create the tensorsData object");
-            Log.Info(NNStreamer.TAG, "success to CreateTensorsData()\n");
 
             retTensorData = TensorsData.CreateFromNativeHandle(tensorsData_h);
 
@@ -248,7 +270,7 @@ namespace Tizen.MachineLearning.Inference
         internal IntPtr GetTensorsInfoHandle()
         {
             NNStreamerError ret = NNStreamerError.None;
-            IntPtr ret_handle;
+            IntPtr ret_handle = IntPtr.Zero;
             int idx;
 
             /* Already created */
@@ -314,8 +336,8 @@ namespace Tizen.MachineLearning.Inference
             // release unmanaged objects
             if (_handle != IntPtr.Zero)
             {
-                NNStreamerError ret = NNStreamerError.None;
-                ret = Interop.Util.DestroyTensorsInfo(_handle);
+                NNStreamerError ret = Interop.Util.DestroyTensorsInfo(_handle);
+
                 if (ret != NNStreamerError.None)
                 {
                     Log.Error(NNStreamer.TAG, "failed to destroy TensorsInfo object");