[NoACR][MachineLearning.Inference] Fix bug using wrong handle
[platform/core/csapi/tizenfx.git] / src / Tizen.MachineLearning.Inference / Tizen.MachineLearning.Inference / TensorsData.cs
index 3ba452e..8440155 100755 (executable)
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved
+* Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved.
 *
 * Licensed under the Apache License, Version 2.0 (the License);
 * you may not use this file except in compliance with the License.
@@ -15,6 +15,7 @@
 */
 
 using System;
+using System.Collections;
 
 namespace Tizen.MachineLearning.Inference
 {
@@ -26,16 +27,62 @@ namespace Tizen.MachineLearning.Inference
     {
         private IntPtr _handle = IntPtr.Zero;
         private bool _disposed = false;
-        private int _count = Tensor.InvalidCount;
+        private TensorsInfo _tensorsInfo = null;
+        private ArrayList _dataList = null;
 
         /// <summary>
-        /// Creates a TensorsInfo instance with handle which is given by TensorsInfo.
+        /// Creates a TensorsData instance with handle which is given by TensorsInfo.
         /// </summary>
         /// <param name="handle">The handle of tensors data.</param>
+        /// <param name="info">The handle of tensors info.</param>
+        /// <param name="isFetch">The boolean value for fetching the data (Default: false)</param>
+        /// <param name="hasOwnership">The boolean value for automatic disposal (Default: true)</param>
         /// <since_tizen> 6 </since_tizen>
-        private TensorsData(IntPtr handle)
+        private TensorsData(IntPtr handle, TensorsInfo info, bool isFetch = false, bool hasOwnership = true)
         {
+            NNStreamer.CheckNNStreamerSupport();
+            NNStreamerError ret = NNStreamerError.None;
+
+            /* Set internal object */
             _handle = handle;
+            /* Because developers can change the TensorsInfo object, it should be stored as a deep-copied instance. */
+            _tensorsInfo = info.Clone();
+
+            /* Set count */
+            int count = 0;
+            ret = Interop.Util.GetTensorsCount(_tensorsInfo.GetTensorsInfoHandle(), out count);
+            NNStreamer.CheckException(ret, "unable to get the count of TensorsData");
+
+            _dataList = new ArrayList(count);
+
+            if (isFetch)
+            {
+                for (int i = 0; i < count; ++i)
+                {
+                    IntPtr raw_data;
+                    byte[] bufData = null;
+                    int size;
+
+                    ret = Interop.Util.GetTensorData(_handle, i, out raw_data, out size);
+                    NNStreamer.CheckException(ret, "unable to get the buffer of TensorsData: " + i.ToString());
+
+                    bufData = Interop.Util.IntPtrToByteArray(raw_data, size);
+                    _dataList.Add(bufData);
+                }
+            }
+            else
+            {
+                for (int i = 0; i < count; ++i)
+                {
+                    int size = info.GetTensorSize(i);
+                    byte[] bufData = new byte[size];
+
+                    _dataList.Add(bufData);
+                }
+            }
+
+            /* If it created as DataReceivedEventArgs, do not dispose. */
+            _disposed = !hasOwnership;
         }
 
         /// <summary>
@@ -47,13 +94,6 @@ namespace Tizen.MachineLearning.Inference
             Dispose(false);
         }
 
-        internal static TensorsData CreateFromNativeHandle(IntPtr handle)
-        {
-            TensorsData retTensorsData = new TensorsData(handle);
-
-            return retTensorsData;
-        }
-
         /// <summary>
         /// Gets the number of Tensor in TensorsData class
         /// </summary>
@@ -63,33 +103,64 @@ namespace Tizen.MachineLearning.Inference
         public int Count
         {
             get {
-                if (_count != Tensor.InvalidCount)
-                    return _count;
+                NNStreamer.CheckNNStreamerSupport();
 
-                NNStreamerError ret = NNStreamerError.None;
-                ret = Interop.Util.GetTensorsCount(_handle, out int count);
-                NNStreamer.CheckException(ret, "unable to get the count of TensorsData");
+                return _dataList.Count;
+            }
+        }
+
+        /// <summary>
+        /// Gets the tensors information.
+        /// </summary>
+        /// <returns>The TensorsInfo instance</returns>
+        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
+        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
+        /// <since_tizen> 8 </since_tizen>
+        public TensorsInfo TensorsInfo
+        {
+            get {
+                NNStreamer.CheckNNStreamerSupport();
 
-                _count = count;
-                return _count;
+                return _tensorsInfo;
             }
         }
 
         /// <summary>
+        /// Allocates a new TensorsData instance with the given tensors information.
+        /// </summary>
+        /// <param name="info">TensorsInfo object which has Tensor information</param>
+        /// <returns>The TensorsInfo instance</returns>
+        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
+        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
+        /// <since_tizen> 8 </since_tizen>
+        public static TensorsData Allocate(TensorsInfo info)
+        {
+            NNStreamer.CheckNNStreamerSupport();
+
+            if (info == null)
+                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "TensorsInfo is null");
+
+            TensorsData retData = info.GetTensorsData();
+            return retData;
+        }
+
+        /// <summary>
         /// Sets a tensor data to given index.
         /// </summary>
         /// <param name="index">The index of the tensor.</param>
         /// <param name="buffer">Raw tensor data to be set.</param>
         /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
-        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
         /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
+        /// <exception cref="ArgumentException">Thrown when the data is not valid.</exception>
         /// <since_tizen> 6 </since_tizen>
         public void SetTensorData(int index, byte[] buffer)
         {
-            NNStreamerError ret = NNStreamerError.None;
+            NNStreamer.CheckNNStreamerSupport();
+
+            CheckIndex(index);
+            CheckDataBuffer(index, buffer);
 
-            ret = Interop.Util.SetTensorData(_handle, index, buffer, buffer.Length);
-            NNStreamer.CheckException(ret, "unable to set the buffer of TensorsData: " + index.ToString());
+            _dataList[index] = buffer;
         }
 
         /// <summary>
@@ -103,16 +174,11 @@ namespace Tizen.MachineLearning.Inference
         /// <since_tizen> 6 </since_tizen>
         public byte[] GetTensorData(int index)
         {
-            byte[] retBuffer;
-            IntPtr raw_data;
-            int size;
+            NNStreamer.CheckNNStreamerSupport();
 
-            NNStreamerError ret = NNStreamerError.None;
-            ret = Interop.Util.GetTensorData(_handle, index, out raw_data, out size);
-            NNStreamer.CheckException(ret, "unable to get the buffer of TensorsData: " + index.ToString());
+            CheckIndex(index);
 
-            retBuffer = Interop.Util.IntPtrToByteArray(raw_data, size);
-            return retBuffer;
+            return (byte[])_dataList[index];
         }
 
         /// <summary>
@@ -137,13 +203,14 @@ namespace Tizen.MachineLearning.Inference
             if (disposing)
             {
                 // release managed object
+                _tensorsInfo.Dispose();
+                _tensorsInfo = null;
             }
 
             // release unmanaged objects
             if (_handle != IntPtr.Zero)
             {
-                NNStreamerError ret = NNStreamerError.None;
-                ret = Interop.Util.DestroyTensorsData(_handle);
+                NNStreamerError ret = Interop.Util.DestroyTensorsData(_handle);
                 if (ret != NNStreamerError.None)
                 {
                     Log.Error(NNStreamer.TAG, "failed to destroy TensorsData object");
@@ -153,9 +220,74 @@ namespace Tizen.MachineLearning.Inference
             _disposed = true;
         }
 
-        internal IntPtr Handle
+        internal IntPtr GetHandle()
+        {
+            return _handle;
+        }
+
+        internal void PrepareInvoke()
+        {
+            NNStreamerError ret = NNStreamerError.None;
+            int count = _dataList.Count;
+
+            for (int i = 0; i < count; ++i)
+            {
+                byte[] data = (byte[])_dataList[i];
+                ret = Interop.Util.SetTensorData(_handle, i, data, data.Length);
+                NNStreamer.CheckException(ret, "unable to set the buffer of TensorsData: " + i.ToString());
+            }
+        }
+
+        internal static TensorsData CreateFromNativeHandle(IntPtr dataHandle, IntPtr infoHandle, bool isFetch = false, bool hasOwnership = true)
         {
-            get { return _handle; }
+            TensorsInfo info = null;
+
+            if (infoHandle != IntPtr.Zero)
+            {
+                info = TensorsInfo.ConvertTensorsInfoFromHandle(infoHandle);
+            }
+
+            return new TensorsData(dataHandle, info, isFetch, hasOwnership);
+        }
+
+        private void CheckIndex(int index)
+        {
+            if (index < 0 || index >= _dataList.Count)
+            {
+                string msg = "Invalid index [" + index + "] of the tensors";
+                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
+            }
+        }
+
+        private void CheckDataBuffer(int index, byte[] data)
+        {
+            if (data == null)
+            {
+                string msg = "data is not valid";
+                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
+            }
+
+            if (index >= Tensor.SizeLimit)
+            {
+                string msg = "Max size of the tensors is " + Tensor.SizeLimit;
+                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.QuotaExceeded, msg);
+            }
+
+            if (_tensorsInfo != null)
+            {
+                if (index >= _tensorsInfo.Count)
+                {
+                    string msg = "Current information has " + _tensorsInfo.Count + " tensors";
+                    throw NNStreamerExceptionFactory.CreateException(NNStreamerError.QuotaExceeded, msg);
+                }
+
+                int size = _tensorsInfo.GetTensorSize(index);
+                if (data.Length != size)
+                {
+                    string msg = "Invalid buffer size, required size is " + size.ToString();
+                    throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
+                }
+            }
         }
     }
 }