[MachineLearning.Inference] Update SingleShot and its related classes (#1154)
authorSangjung Woo <sangjung.woo@samsung.com>
Wed, 12 Feb 2020 07:49:22 +0000 (16:49 +0900)
committerGitHub <noreply@github.com>
Wed, 12 Feb 2020 07:49:22 +0000 (16:49 +0900)
SingleShot class
- Support various Neural Network framework such as TensorFlow,
  TensorFlowLite, Custom Filter or NNFW (default: Any)
- Support various HW resources such as CPU, GPU or NPU (default: Any)
- Load neural network model without Input / Output TensorsInfo
  (Dynamic mode or model file has input/output information)
- Newly adds `Input` property for Input tensors information
- Newly adds `Output` property for Output tensors information
- Newly adds SetTimeout() to set the maximum amount of time to wait for an
  output
- Remove `IOException` since it does not occur

TensorsData class
- Newly adds TensorsInfo property to fetch tensors information
- Newly adds static Allocate() for TensorsData Instance

TensorsInfo class
- Newly adds GetTensorSize() to get the byte size of tensor data
- Remove `ArgumentException` in SetTensorType(), GetTensorType() and
  GetDimension() since it does not occur

Signed-off-by: Sangjung Woo <sangjung.woo@samsung.com>
src/Tizen.MachineLearning.Inference/Interop/Interop.Nnstreamer.cs
src/Tizen.MachineLearning.Inference/Tizen.MachineLearning.Inference/Commons.cs
src/Tizen.MachineLearning.Inference/Tizen.MachineLearning.Inference/SingleShot.cs
src/Tizen.MachineLearning.Inference/Tizen.MachineLearning.Inference/TensorsData.cs
src/Tizen.MachineLearning.Inference/Tizen.MachineLearning.Inference/TensorsInfo.cs

index 883e1ab..c18d98d 100755 (executable)
@@ -39,13 +39,25 @@ internal static partial class Interop
         [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_single_invoke", CallingConvention = CallingConvention.Cdecl)]
         internal static extern NNStreamerError InvokeSingle(IntPtr single_handle, IntPtr input_data, out IntPtr output_data);
 
+        /* int ml_single_invoke_dynamic (ml_single_h single, const ml_tensors_data_h input, const ml_tensors_info_h in_info, ml_tensors_data_h * output, ml_tensors_info_h * out_info) */
+        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_single_invoke_dynamic", CallingConvention = CallingConvention.Cdecl)]
+        internal static extern NNStreamerError InvokeSingleDynamic(IntPtr single_handle, IntPtr input_data, IntPtr input_info, out IntPtr output_data, out IntPtr output_info);
+
         /* int ml_single_get_input_info (ml_single_h single, ml_tensors_info_h *info) */
-        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_single_invoke", CallingConvention = CallingConvention.Cdecl)]
-        internal static extern NNStreamerError GetInputTensorsInfoFromSingle(IntPtr single_handle, out IntPtr input_info);
+        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_single_get_input_info", CallingConvention = CallingConvention.Cdecl)]
+        internal static extern NNStreamerError GetInputTensorsInfo(IntPtr single_handle, out IntPtr input_info);
 
         /* int ml_single_get_output_info (ml_single_h single, ml_tensors_info_h *info) */
         [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_single_get_output_info", CallingConvention = CallingConvention.Cdecl)]
-        internal static extern NNStreamerError GetOutputTensorsInfoFromSingle(IntPtr single_handle, out IntPtr output_info);
+        internal static extern NNStreamerError GetOutputTensorsInfo(IntPtr single_handle, out IntPtr output_info);
+
+        /* int ml_single_set_input_info (ml_single_h single, const ml_tensors_info_h info) */
+        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_single_set_input_info", CallingConvention = CallingConvention.Cdecl)]
+        internal static extern NNStreamerError SetInputInfo(IntPtr single_handle, IntPtr in_handle);
+
+        /* int ml_single_set_timeout (ml_single_h single, unsigned int timeout)*/
+        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_single_set_timeout", CallingConvention = CallingConvention.Cdecl)]
+        internal static extern NNStreamerError SetTimeout(IntPtr single_handle, int time_ms);
     }
 
     internal static partial class Util
@@ -58,14 +70,6 @@ internal static partial class Interop
         [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_tensors_info_destroy", CallingConvention = CallingConvention.Cdecl)]
         internal static extern NNStreamerError DestroyTensorsInfo(IntPtr info);
 
-        /* int ml_tensors_info_validate (const ml_tensors_info_h info, bool *valid) */
-        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_tensors_info_validate", CallingConvention = CallingConvention.Cdecl)]
-        internal static extern NNStreamerError ValidateTensorsInfo(IntPtr info, out bool valid);
-
-        /* int ml_tensors_info_clone (ml_tensors_info_h dest, const ml_tensors_info_h src) */
-        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_tensors_info_clone", CallingConvention = CallingConvention.Cdecl)]
-        internal static extern NNStreamerError CloneTensorsInfo(out IntPtr dest_info, IntPtr src_info);
-
         /* int ml_tensors_info_set_count (ml_tensors_info_h info, unsigned int count) */
         [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_tensors_info_set_count", CallingConvention = CallingConvention.Cdecl)]
         internal static extern NNStreamerError SetTensorsCount(IntPtr info, int count);
@@ -96,11 +100,7 @@ internal static partial class Interop
 
         /* int ml_tensors_info_get_tensor_dimension (ml_tensors_info_h info, unsigned int index, ml_tensor_dimension dimension) */
         [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_tensors_info_get_tensor_dimension", CallingConvention = CallingConvention.Cdecl)]
-        internal static extern NNStreamerError GetTensorDimension(IntPtr info, int index, out int[] dimension);
-
-        /* size_t ml_tensors_info_get_size (const ml_tensors_info_h info) */
-        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_tensors_info_get_size", CallingConvention = CallingConvention.Cdecl)]
-        internal static extern int GetTensorsSize(IntPtr info);
+        internal static extern NNStreamerError GetTensorDimension(IntPtr info, int index, [In, Out] uint[] dimension);
 
         /* int ml_tensors_data_create (const ml_tensors_info_h info, ml_tensors_data_h *data) */
         [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_tensors_data_create", CallingConvention = CallingConvention.Cdecl)]
@@ -122,10 +122,6 @@ internal static partial class Interop
         [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_check_nnfw_availability", CallingConvention = CallingConvention.Cdecl)]
         internal static extern NNStreamerError CheckNNFWAvailability(NNFWType nnfw, HWType hw, out bool available);
 
-        /* ml_tensors_data_get_tensor_count (ml_tensors_data_h data, unsigned int *num_tensors) */
-        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_tensors_data_get_tensor_count", CallingConvention = CallingConvention.Cdecl)]
-        internal static extern NNStreamerError GetTensorsCount(IntPtr data, out uint count);
-
         internal static byte[] IntPtrToByteArray(IntPtr unmanagedByteArray, int size)
         {
             byte[] retByte = new byte[size];
index 25308f8..3977b75 100755 (executable)
@@ -79,6 +79,8 @@ namespace Tizen.MachineLearning.Inference
         Unknown = Tizen.Internals.Errors.ErrorCode.Unknown,
         TimedOut = Tizen.Internals.Errors.ErrorCode.TimedOut,
         NotSupported = Tizen.Internals.Errors.ErrorCode.NotSupported,
+        QuotaExceeded = Tizen.Internals.Errors.ErrorCode.QuotaExceeded,
+        InvalidOperation = Tizen.Internals.Errors.ErrorCode.InvalidOperation,
     }
 
     /// <summary>
@@ -241,8 +243,12 @@ namespace Tizen.MachineLearning.Inference
                     exp = new TimeoutException(msg);
                     break;
 
+                case NNStreamerError.QuotaExceeded:
+                    exp = new IndexOutOfRangeException(msg);
+                    break;
+
                 default:
-                    exp = new InvalidOperationException(msg);
+                    exp = new NotSupportedException(msg);
                     break;
             }
             return exp;
index b6fc121..7ddfb17 100755 (executable)
@@ -15,9 +15,6 @@
 */
 
 using System;
-using System.Collections.Generic;
-using System.Text;
-using System.IO;
 
 namespace Tizen.MachineLearning.Inference
 {
@@ -28,8 +25,12 @@ namespace Tizen.MachineLearning.Inference
     public class SingleShot : IDisposable
     {
         private IntPtr _handle = IntPtr.Zero;
+        private bool _dynamicMode = false;
         private bool _disposed = false;
 
+        private TensorsInfo _inInfo = null;
+        private TensorsInfo _outInfo = null;
+
         /// <summary>
         /// Loads the neural network model and configures runtime environment
         /// </summary>
@@ -38,7 +39,6 @@ namespace Tizen.MachineLearning.Inference
         /// <param name="outTensorsInfo">Output TensorsInfo object for inference result</param>
         /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
         /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
-        /// <exception cref="IOException">Thrown when constructing the pipeline is failed.</exception>
         /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
         /// <since_tizen> 6 </since_tizen>
         public SingleShot(string modelAbsPath, TensorsInfo inTensorsInfo, TensorsInfo outTensorsInfo)
@@ -46,12 +46,137 @@ namespace Tizen.MachineLearning.Inference
             NNStreamer.CheckNNStreamerSupport();
 
             if (inTensorsInfo == null || outTensorsInfo == null)
+                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "TensorsInfo is null");
+
+            CreateSingleShot(modelAbsPath, inTensorsInfo, outTensorsInfo, NNFWType.Any, HWType.Any, false);
+        }
+
+        /// <summary>
+        /// Loads the neural network model and configures runtime environment with Neural Network Framework and HW information
+        /// </summary>
+        /// <param name="modelAbsPath">Absolute path to the neural network model file.</param>
+        /// <param name="inTensorsInfo">Input TensorsInfo object</param>
+        /// <param name="outTensorsInfo">Output TensorsInfo object for inference result</param>
+        /// <param name="fwType">Types of Neural Network Framework</param>
+        /// <param name="hwType">Types of hardware resources to be used for NNFWs</param>
+        /// <param name="isDynamicMode">Support Dynamic Mode</param>
+        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
+        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
+        /// <since_tizen> 8 </since_tizen>
+        public SingleShot(string modelAbsPath,
+            TensorsInfo inTensorsInfo, TensorsInfo outTensorsInfo, NNFWType fwType, HWType hwType, bool isDynamicMode)
+        {
+            NNStreamer.CheckNNStreamerSupport();
+
+            if (inTensorsInfo == null || outTensorsInfo == null)
+                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "TensorsInfo is null");
+
+            CreateSingleShot(modelAbsPath, inTensorsInfo, outTensorsInfo, fwType, hwType, isDynamicMode);
+        }
+
+        /// <summary>
+        /// Loads the neural network model and configures runtime environment without TensorsInfo
+        /// </summary>
+        /// <param name="modelAbsPath">Absolute path to the neural network model file.</param>
+        /// <param name="fwType">Types of Neural Network Framework (Default:NNFWType.Any)</param>
+        /// <param name="hwType">Types of hardware resources to be used for NNFWs (Default: HWType.Any)</param>
+        /// <param name="isDynamicMode">Support Dynamic Mode (Default: false)</param>
+        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
+        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
+        /// <since_tizen> 8 </since_tizen>
+        public SingleShot(string modelAbsPath, NNFWType fwType = NNFWType.Any, HWType hwType = HWType.Any, bool isDynamicMode = false)
+        {
+            NNStreamer.CheckNNStreamerSupport();
+
+            CreateSingleShot(modelAbsPath, null, null, fwType, hwType, isDynamicMode);
+        }
+
+        /// <summary>
+        /// The information (tensor dimension, type, name and so on) of required input data for the given model.
+        /// </summary>
+        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
+        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
+        /// <since_tizen> 8 </since_tizen>
+        public TensorsInfo Input
+        {
+            get
             {
-                string msg = "TensorsInfo is null";
-                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
+                NNStreamer.CheckNNStreamerSupport();
+
+                IntPtr inHandle;
+                NNStreamerError ret = NNStreamerError.None;
+
+                if (_inInfo != null)
+                    return _inInfo;
+
+                ret = Interop.SingleShot.GetInputTensorsInfo(_handle, out inHandle);
+                NNStreamer.CheckException(ret, "fail to get Input TensorsInfo handle");
+
+                TensorsInfo retInfo = TensorsInfo.ConvertTensorsInfoFromHandle(inHandle);
+
+                _inInfo = retInfo;
+                return retInfo;
             }
+            set
+            {
+                NNStreamer.CheckNNStreamerSupport();
+                NNStreamerError ret = NNStreamerError.None;
+
+                if (value == null)
+                    throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "TensorsInfo is null");
+
+                ret = Interop.SingleShot.SetInputInfo(_handle, value.GetTensorsInfoHandle());
+                NNStreamer.CheckException(ret, "fail to set Input TensorsInfo");
 
-            CreateSingleShot(modelAbsPath, inTensorsInfo, outTensorsInfo);
+                _inInfo = value;
+            }
+        }
+
+        /// <summary>
+        /// The information (tensor dimension, type, name and so on) of output data for the given model.
+        /// </summary>
+        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
+        /// <since_tizen> 8 </since_tizen>
+        public TensorsInfo Output
+        {
+            get
+            {
+                NNStreamer.CheckNNStreamerSupport();
+
+                IntPtr outHandle;
+                NNStreamerError ret = NNStreamerError.None;
+
+                if (_outInfo != null)
+                    return _outInfo;
+
+                ret = Interop.SingleShot.GetOutputTensorsInfo(_handle, out outHandle);
+                NNStreamer.CheckException(ret, "fail to get Output TensorsInfo handle");
+
+                TensorsInfo retInfo = TensorsInfo.ConvertTensorsInfoFromHandle(outHandle);
+
+                _outInfo = retInfo;
+                return retInfo;
+            }
+        }
+
+        /// <summary>
+        /// Sets the maximum amount of time to wait for an output, in milliseconds.
+        /// </summary>
+        /// <param name="ms">The time to wait for an output (milliseconds)</param>
+        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
+        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
+        /// <since_tizen> 8 </since_tizen>
+        public void SetTimeout(int ms)
+        {
+            NNStreamer.CheckNNStreamerSupport();
+            NNStreamerError ret = NNStreamerError.None;
+
+            if (ms <= 0)
+                ret = NNStreamerError.InvalidParameter;
+            NNStreamer.CheckException(ret, "timeout: " + ms.ToString());
+
+            ret = Interop.SingleShot.SetTimeout(_handle, ms);
+            NNStreamer.CheckException(ret, "fail to set the timeout!");
         }
 
         /// <summary>
@@ -80,44 +205,88 @@ namespace Tizen.MachineLearning.Inference
         /// <returns>TensorsData instance which contains the inferred result.</returns>
         /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
         /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
-        /// <exception cref="IOException">Thrown when failed to push an input data into source element.</exception>
         /// <exception cref="TimeoutException">Thrown when failed to get the result from sink element.</exception>
         /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
         /// <since_tizen> 6 </since_tizen>
         public TensorsData Invoke(TensorsData inTensorsData)
         {
-            TensorsData out_data;
-            IntPtr out_ptr = IntPtr.Zero;
+            TensorsData out_data = null;
+            IntPtr outDataPtr = IntPtr.Zero;
             NNStreamerError ret = NNStreamerError.None;
 
+            NNStreamer.CheckNNStreamerSupport();
+
             if (inTensorsData == null)
             {
                 string msg = "TensorsData is null";
                 throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
             }
 
-            ret = Interop.SingleShot.InvokeSingle(_handle, inTensorsData.Handle, out out_ptr);
-            NNStreamer.CheckException(ret, "fail to invoke the single inference engine");
+            if (_dynamicMode)
+            {
+                TensorsInfo inInfo = inTensorsData.TensorsInfo;
+                if (inInfo == null)
+                    throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "TensorsInfo is null");
+
+                /* Apply all data */
+                inTensorsData.PrepareInvoke();
+
+                IntPtr outInfoPtr = IntPtr.Zero;
+                ret = Interop.SingleShot.InvokeSingleDynamic(_handle, inTensorsData.GetHandle(), inInfo.GetTensorsInfoHandle(), out outDataPtr, out outInfoPtr);
+                NNStreamer.CheckException(ret, "fail to invoke the single dynamic inference");
+
+                out_data = TensorsData.CreateFromNativeHandle(outDataPtr, outInfoPtr, true);
+            }
+            else
+            {
+                TensorsInfo data_inInfo = inTensorsData.TensorsInfo;
+
+                if (!data_inInfo.Equals(_inInfo))
+                {
+                    string msg = "The TensorsInfo of Input TensorsData is different from that of SingleShot object";
+                    throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
+                }
+
+                /* Apply all data */
+                inTensorsData.PrepareInvoke();
 
-            out_data = TensorsData.CreateFromNativeHandle(out_ptr);
+                ret = Interop.SingleShot.InvokeSingle(_handle, inTensorsData.GetHandle(), out outDataPtr);
+                NNStreamer.CheckException(ret, "fail to invoke the single inference");
+
+                out_data = TensorsData.CreateFromNativeHandle(outDataPtr, data_inInfo.GetTensorsInfoHandle(), true);
+            }
             return out_data;
         }
 
-        private void CreateSingleShot(string modelAbsPath, TensorsInfo inTensorInfo, TensorsInfo outTensorInfo)
+        private void CreateSingleShot(string modelAbsPath,
+            TensorsInfo inTensorInfo, TensorsInfo outTensorInfo,
+            NNFWType FWType, HWType HWType, bool IsDynamicMode)
         {
             NNStreamerError ret = NNStreamerError.None;
-            IntPtr input_info;
-            IntPtr output_info;
+            IntPtr input_info = IntPtr.Zero;
+            IntPtr output_info = IntPtr.Zero;
 
             /* Check model path */
             if (string.IsNullOrEmpty(modelAbsPath))
                 ret = NNStreamerError.InvalidParameter;
             NNStreamer.CheckException(ret, "model path is invalid: " + modelAbsPath);
 
-            input_info = inTensorInfo.GetTensorsInfoHandle();
-            output_info = outTensorInfo.GetTensorsInfoHandle();
+            /* Set Dynamic Mode */
+            _dynamicMode = IsDynamicMode;
+
+            if (inTensorInfo != null)
+            {
+                input_info = inTensorInfo.GetTensorsInfoHandle();
+                _inInfo = inTensorInfo;
+            }
+
+            if (outTensorInfo != null)
+            {
+                output_info = outTensorInfo.GetTensorsInfoHandle();
+                _outInfo = outTensorInfo;
+            }
 
-            ret = Interop.SingleShot.OpenSingle(out _handle, modelAbsPath, input_info, output_info, NNFWType.Any, HWType.Any);
+            ret = Interop.SingleShot.OpenSingle(out _handle, modelAbsPath, input_info, output_info, FWType, HWType);
             NNStreamer.CheckException(ret, "fail to open the single inference engine");
         }
 
index ddabbb9..7ab6542 100755 (executable)
@@ -15,7 +15,7 @@
 */
 
 using System;
-using System.IO;
+using System.Collections;
 
 namespace Tizen.MachineLearning.Inference
 {
@@ -27,16 +27,57 @@ namespace Tizen.MachineLearning.Inference
     {
         private IntPtr _handle = IntPtr.Zero;
         private bool _disposed = false;
-        private int _count = Tensor.InvalidCount;
+        private TensorsInfo _tensorsInfo = null;
+        private ArrayList _dataList = null;
 
         /// <summary>
-        /// Creates a TensorsInfo instance with handle which is given by TensorsInfo.
+        /// Creates a TensorsData instance with handle which is given by TensorsInfo.
         /// </summary>
         /// <param name="handle">The handle of tensors data.</param>
+        /// <param name="info">The handle of tensors info. (Default: null)</param>
+        /// <param name="isFetch">The boolean value for fetching the data (Default: false)</param>
         /// <since_tizen> 6 </since_tizen>
-        private TensorsData(IntPtr handle)
+        private TensorsData(IntPtr handle, TensorsInfo info, bool isFetch)
         {
+            NNStreamer.CheckNNStreamerSupport();
+            NNStreamerError ret = NNStreamerError.None;
+
+            /* Set internal object */
             _handle = handle;
+            _tensorsInfo = info;
+
+            /* Set count */
+            int count = 0;
+            ret = Interop.Util.GetTensorsCount(_handle, out count);
+            NNStreamer.CheckException(ret, "unable to get the count of TensorsData");
+
+            _dataList = new ArrayList(count);
+
+            if (isFetch)
+            {
+                for (int i = 0; i < count; ++i)
+                {
+                    IntPtr raw_data;
+                    byte[] bufData = null;
+                    int size;
+
+                    ret = Interop.Util.GetTensorData(_handle, i, out raw_data, out size);
+                    NNStreamer.CheckException(ret, "unable to get the buffer of TensorsData: " + i.ToString());
+
+                    bufData = Interop.Util.IntPtrToByteArray(raw_data, size);
+                    _dataList.Add(bufData);
+                }
+            }
+            else
+            {
+                for (int i = 0; i < count; ++i)
+                {
+                    int size = info.GetTensorSize(i);
+                    byte[] bufData = new byte[size];
+
+                    _dataList.Add(bufData);
+                }
+            }
         }
 
         /// <summary>
@@ -48,36 +89,54 @@ namespace Tizen.MachineLearning.Inference
             Dispose(false);
         }
 
-        internal static TensorsData CreateFromNativeHandle(IntPtr handle)
+        /// <summary>
+        /// Gets the number of Tensor in TensorsData class
+        /// </summary>
+        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
+        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
+        /// <since_tizen> 6 </since_tizen>
+        public int Count
         {
-            TensorsData retTensorsData = new TensorsData(handle);
+            get {
+                NNStreamer.CheckNNStreamerSupport();
 
-            return retTensorsData;
+                return _dataList.Count;
+            }
         }
 
         /// <summary>
-        /// Gets the number of Tensor in TensorsData class
+        /// Gets the tensors information.
         /// </summary>
+        /// <returns>The TensorsInfo instance</returns>
         /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
         /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
-        /// <since_tizen> 6 </since_tizen>
-        public int Count
+        /// <since_tizen> 8 </since_tizen>
+        public TensorsInfo TensorsInfo
         {
             get {
                 NNStreamer.CheckNNStreamerSupport();
 
-                if (_count != Tensor.InvalidCount)
-                    return _count;
+                return _tensorsInfo;
+            }
+        }
 
-                NNStreamerError ret = NNStreamerError.None;
-                int count = 0;
+        /// <summary>
+        /// Allocates a new TensorsData instance with the given tensors information.
+        /// </summary>
+        /// <param name="info">TensorsInfo object which has Tensor information</param>
+        /// <returns>The TensorsInfo instance</returns>
+        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
+        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
+        /// <since_tizen> 8 </since_tizen>
+        public static TensorsData Allocate(TensorsInfo info)
+        {
+            NNStreamer.CheckNNStreamerSupport();
 
-                ret = Interop.Util.GetTensorsCount(_handle, out count);
-                NNStreamer.CheckException(ret, "unable to get the count of TensorsData");
+            if (info == null)
+                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "TensorsInfo is null");
 
-                _count = count;
-                return _count;
-            }
+            TensorsData retData = info.GetTensorsData();
+            return retData;
         }
 
         /// <summary>
@@ -86,23 +145,17 @@ namespace Tizen.MachineLearning.Inference
         /// <param name="index">The index of the tensor.</param>
         /// <param name="buffer">Raw tensor data to be set.</param>
         /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
-        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
         /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
+        /// <exception cref="ArgumentException">Thrown when the data is not valid.</exception>
         /// <since_tizen> 6 </since_tizen>
         public void SetTensorData(int index, byte[] buffer)
         {
-            NNStreamerError ret = NNStreamerError.None;
-
             NNStreamer.CheckNNStreamerSupport();
 
-            if (buffer == null)
-            {
-                string msg = "buffer is null";
-                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
-            }
+            CheckIndex(index);
+            CheckDataBuffer(index, buffer);
 
-            ret = Interop.Util.SetTensorData(_handle, index, buffer, buffer.Length);
-            NNStreamer.CheckException(ret, "unable to set the buffer of TensorsData: " + index.ToString());
+            _dataList[index] = buffer;
         }
 
         /// <summary>
@@ -116,19 +169,11 @@ namespace Tizen.MachineLearning.Inference
         /// <since_tizen> 6 </since_tizen>
         public byte[] GetTensorData(int index)
         {
-            byte[] retBuffer = null;
-            IntPtr raw_data;
-            int size;
-            NNStreamerError ret = NNStreamerError.None;
-
             NNStreamer.CheckNNStreamerSupport();
 
-            ret = Interop.Util.GetTensorData(_handle, index, out raw_data, out size);
-            NNStreamer.CheckException(ret, "unable to get the buffer of TensorsData: " + index.ToString());
+            CheckIndex(index);
 
-            retBuffer = Interop.Util.IntPtrToByteArray(raw_data, size);
-
-            return retBuffer;
+            return (byte[])_dataList[index];
         }
 
         /// <summary>
@@ -168,9 +213,79 @@ namespace Tizen.MachineLearning.Inference
             _disposed = true;
         }
 
-        internal IntPtr Handle
+        internal IntPtr GetHandle()
+        {
+            return _handle;
+        }
+
+        internal void PrepareInvoke()
+        {
+            NNStreamerError ret = NNStreamerError.None;
+            int count = _dataList.Count;
+
+            for (int i = 0; i < count; ++i)
+            {
+                byte[] data = (byte[])_dataList[i];
+                ret = Interop.Util.SetTensorData(_handle, i, data, data.Length);
+                NNStreamer.CheckException(ret, "unable to set the buffer of TensorsData: " + i.ToString());
+            }
+        }
+
+        internal static TensorsData CreateFromNativeHandle(IntPtr dataHandle, IntPtr infoHandle, bool isFetch)
+        {
+            TensorsData retTensorsData = null;
+
+            if (infoHandle == IntPtr.Zero)
+            {
+                retTensorsData = new TensorsData(dataHandle, null, isFetch);
+            }
+            else
+            {
+                TensorsInfo info = TensorsInfo.ConvertTensorsInfoFromHandle(infoHandle);
+                retTensorsData = new TensorsData(dataHandle, info, isFetch);
+            }
+
+            return retTensorsData;
+        }
+
+        private void CheckIndex(int index)
         {
-            get { return _handle; }
+            if (index < 0 || index >= _dataList.Count)
+            {
+                string msg = "Invalid index [" + index + "] of the tensors";
+                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
+            }
+        }
+
+        private void CheckDataBuffer(int index, byte[] data)
+        {
+            if (data == null)
+            {
+                string msg = "data is not valid";
+                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
+            }
+
+            if (index >= Tensor.SizeLimit)
+            {
+                string msg = "Max size of the tensors is " + Tensor.SizeLimit;
+                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.QuotaExceeded, msg);
+            }
+
+            if (_tensorsInfo != null)
+            {
+                if (index >= _tensorsInfo.Count)
+                {
+                    string msg = "Current information has " + _tensorsInfo.Count + " tensors";
+                    throw NNStreamerExceptionFactory.CreateException(NNStreamerError.QuotaExceeded, msg);
+                }
+
+                int size = _tensorsInfo.GetTensorSize(index);
+                if (data.Length != size)
+                {
+                    string msg = "Invalid buffer size, required size is " + size.ToString();
+                    throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
+                }
+            }
         }
     }
 }
index 53c718f..d37067d 100755 (executable)
@@ -15,8 +15,8 @@
 */
 
 using System;
+using System.Linq;
 using System.Collections.Generic;
-using Log = Tizen.Log;
 
 namespace Tizen.MachineLearning.Inference
 {
@@ -24,7 +24,7 @@ namespace Tizen.MachineLearning.Inference
     /// The TensorsInfo class manages each Tensor information such as Name, Type and Dimension.
     /// </summary>
     /// <since_tizen> 6 </since_tizen>
-    public class TensorsInfo : IDisposable
+    public class TensorsInfo : IDisposable, IEquatable<TensorsInfo>
     {
         private List<TensorInfo> _infoList;
         private IntPtr _handle = IntPtr.Zero;
@@ -118,7 +118,6 @@ namespace Tizen.MachineLearning.Inference
         /// <param name="idx">The index of the tensor to be updated.</param>
         /// <param name="name">The tensor name to be set.</param>
         /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
-        /// <exception cref="IndexOutOfRangeException">Thrown when the index is greater than the number of Tensor.</exception>
         /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
         /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
         /// <since_tizen> 6 </since_tizen>
@@ -138,19 +137,21 @@ namespace Tizen.MachineLearning.Inference
         }
 
         /// <summary>
-        /// Gets the tensor name with given index.
+        /// Calculates the byte size of tensor data.
         /// </summary>
-        /// <param name="idx">The index of the tensor.</param>
-        /// <returns>The tensor name.</returns>
-        /// <exception cref="IndexOutOfRangeException">Thrown when the index is greater than the number of Tensor.</exception>
+        /// <param name="idx">The index of the tensor information in the list</param>
+        /// <returns>The byte size of tensor</returns>
+        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
+        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
         /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
-        /// <since_tizen> 6 </since_tizen>
-        public string GetTensorName(int idx)
+        /// <since_tizen> 8 </since_tizen>
+        public int GetTensorSize(int idx)
         {
             NNStreamer.CheckNNStreamerSupport();
-
             CheckIndexBoundary(idx);
-            return _infoList[idx].Name;
+
+            return _infoList[idx].Size;
+
         }
 
         /// <summary>
@@ -159,7 +160,6 @@ namespace Tizen.MachineLearning.Inference
         /// <param name="idx">The index of the tensor to be updated.</param>
         /// <param name="type">The tensor type to be set.</param>
         /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
-        /// <exception cref="IndexOutOfRangeException">Thrown when the index is greater than the number of Tensor.</exception>
         /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
         /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
         /// <since_tizen> 6 </since_tizen>
@@ -183,7 +183,7 @@ namespace Tizen.MachineLearning.Inference
         /// </summary>
         /// <param name="idx">The index of the tensor.</param>
         /// <returns>The tensor type</returns>
-        /// <exception cref="IndexOutOfRangeException">Thrown when the index is greater than the number of Tensor.</exception>
+        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
         /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
         /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
         /// <since_tizen> 6 </since_tizen>
@@ -201,7 +201,6 @@ namespace Tizen.MachineLearning.Inference
         /// <param name="idx">The index of the tensor to be updated.</param>
         /// <param name="dimension">The tensor dimension to be set.</param>
         /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
-        /// <exception cref="IndexOutOfRangeException">Thrown when the index is greater than the number of Tensor.</exception>
         /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
         /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
         /// <since_tizen> 6 </since_tizen>
@@ -225,7 +224,7 @@ namespace Tizen.MachineLearning.Inference
         /// </summary>
         /// <param name="idx">The index of the tensor.</param>
         /// <returns>The tensor dimension.</returns>
-        /// <exception cref="IndexOutOfRangeException">Thrown when the index is greater than the number of Tensor.</exception>
+        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
         /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
         /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
         /// <since_tizen> 6 </since_tizen>
@@ -262,11 +261,132 @@ namespace Tizen.MachineLearning.Inference
             ret = Interop.Util.CreateTensorsData(_handle, out tensorsData_h);
             NNStreamer.CheckException(ret, "unable to create the tensorsData object");
 
-            retTensorData = TensorsData.CreateFromNativeHandle(tensorsData_h);
+            retTensorData = TensorsData.CreateFromNativeHandle(tensorsData_h, _handle, false);
 
             return retTensorData;
         }
 
+        /// <summary>
+        /// Gets the tensor name with given index.
+        /// </summary>
+        /// <param name="idx">The index of the tensor.</param>
+        /// <returns>The tensor name</returns>
+        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
+        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
+        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
+        /// <since_tizen> 6 </since_tizen>
+        public string GetTensorName(int idx)
+        {
+            NNStreamer.CheckNNStreamerSupport();
+
+            CheckIndexBoundary(idx);
+            return _infoList[idx].Name;
+        }
+
+        /// <summary>
+        /// Gets the the hash code of this TensorsInfo object
+        /// </summary>
+        /// <returns>The hash code</returns>
+        /// <since_tizen> 8 </since_tizen>
+        public override int GetHashCode()
+        {
+            unchecked
+            {
+                int hash = 19;
+                foreach (var info in _infoList)
+                {
+                    hash = hash * 31 + info.GetHashCode();
+                }
+                return hash;
+            }
+        }
+
+        /// <summary>
+        /// Compare TensorsInfo, which is its contents are the same or not.
+        /// </summary>
+        /// <param name="obj">Object to compare</param>
+        /// <returns>True if the given object is the same object or its contents are the same</returns>
+        /// <since_tizen> 8 </since_tizen>
+        public override bool Equals(object obj)
+        {
+            if (obj == null)
+                return false;
+
+            TensorsInfo cInfo = obj as TensorsInfo;
+            return this.Equals(cInfo);
+        }
+
+        /// <summary>
+        /// Compare TensorsInfo, which is its contents are the same or not.
+        /// </summary>
+        /// <param name="other">TensorsInfo instance to compare</param>
+        /// <returns>True if the given object is the same object or its contents are the same</returns>
+        /// <since_tizen> 8 </since_tizen>
+        public bool Equals(TensorsInfo other)
+        {
+            if (other == null)
+                return false;
+
+            if (this.Count != other.Count)
+                return false;
+
+            for (int i = 0; i < this.Count; ++i)
+            {
+                // Name
+                if (string.Compare(this.GetTensorName(i), other.GetTensorName(i)) != 0)
+                    return false;
+
+                // Type
+                if (this.GetTensorType(i) != other.GetTensorType(i))
+                    return false;
+
+                // Dimension
+                if (!this.GetDimension(i).SequenceEqual(other.GetDimension(i)))
+                    return false;
+            }
+            return true;
+        }
+
+        /// <summary>
+        /// Make TensorsInfo object from Native handle
+        /// </summary>
+        /// <param name="handle">Handle of TensorsInfo object</param>
+        /// <returns>TensorsInfo object</returns>
+        internal static TensorsInfo ConvertTensorsInfoFromHandle(IntPtr handle)
+        {
+            TensorsInfo retInfo = null;
+            NNStreamerError ret = NNStreamerError.None;
+
+            int count;
+            ret = Interop.Util.GetTensorsCount(handle, out count);
+            NNStreamer.CheckException(ret, "Fail to get Tensors' count");
+
+            retInfo = new TensorsInfo();
+
+            for (int i = 0; i < count; ++i)
+            {
+                string name;
+                TensorType type;
+                uint[] dim = new uint[Tensor.RankLimit];
+
+                ret = Interop.Util.GetTensorName(handle, i, out name);
+                NNStreamer.CheckException(ret, "Fail to get Tensor's name");
+
+                ret = Interop.Util.GetTensorType(handle, i, out type);
+                NNStreamer.CheckException(ret, "Fail to get Tensor's type");
+
+                ret = Interop.Util.GetTensorDimension(handle, i, dim);
+                NNStreamer.CheckException(ret, "Fail to get Tensor's dimension");
+
+                retInfo.AddTensorInfo(name, type, (int[])(object)dim);
+            }
+            return retInfo;
+        }
+
+        /// <summary>
+        /// Return TensorsInfo handle
+        /// </summary>
+        /// <returns>IntPtr TensorsInfo handle</returns>
         internal IntPtr GetTensorsInfoHandle()
         {
             NNStreamerError ret = NNStreamerError.None;
@@ -348,8 +468,10 @@ namespace Tizen.MachineLearning.Inference
         
         private void CheckIndexBoundary(int idx)
         {
-            if (idx < 0 || idx >= _infoList.Count) {
-                throw new IndexOutOfRangeException("Invalid index [" + idx + "] of the tensors");
+            if (idx < 0 || idx >= _infoList.Count)
+            {
+                string msg = "Invalid index [" + idx + "] of the tensors";
+                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
             }
         }
 
@@ -379,8 +501,52 @@ namespace Tizen.MachineLearning.Inference
                 }
                 Dimension = (int[])dimension.Clone();
             }
+            private int GetSize()
+            {
+                int size = 0;
+
+                switch (Type) {
+                    case TensorType.Int32:
+                    case TensorType.UInt32:
+                    case TensorType.Float32:
+                        size = 4;
+                        break;
+
+                    case TensorType.Int16:
+                    case TensorType.UInt16:
+                        size = 2;
+                        break;
+
+                    case TensorType.Int8:
+                    case TensorType.UInt8:
+                        size = 1;
+                        break;
+
+                    case TensorType.Float64:
+                    case TensorType.Int64:
+                    case TensorType.UInt64:
+                        size = 8;
+                        break;
+
+                    default:
+                        /* Unknown Type */
+                        break;
+                }
+                for (int i = 0; i < Tensor.RankLimit; ++i)
+                {
+                    size *= Dimension[i];
+                }
+                return size;
+            }
+
+            public int Size
+            {
+                get {
+                    return GetSize();
+                }
+            }
 
-            public string Name { get; set; } = null;
+            public string Name { get; set; } = string.Empty;
 
             public TensorType Type { get; set; } = TensorType.Int32;