[MachineLearning.Inference] Update SingleShot and its related classes (#1154)
[platform/core/csapi/tizenfx.git] / src / Tizen.MachineLearning.Inference / Tizen.MachineLearning.Inference / TensorsData.cs
1 /*
2 * Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved
3 *
4 * Licensed under the Apache License, Version 2.0 (the License);
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an AS IS BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 using System;
18 using System.Collections;
19
20 namespace Tizen.MachineLearning.Inference
21 {
22     /// <summary>
23     /// The TensorsData class sets and gets the buffer data for each Tensor.
24     /// </summary>
25     /// <since_tizen> 6 </since_tizen>
26     public class TensorsData : IDisposable
27     {
28         private IntPtr _handle = IntPtr.Zero;
29         private bool _disposed = false;
30         private TensorsInfo _tensorsInfo = null;
31         private ArrayList _dataList = null;
32
33         /// <summary>
34         /// Creates a TensorsData instance with handle which is given by TensorsInfo.
35         /// </summary>
36         /// <param name="handle">The handle of tensors data.</param>
37         /// <param name="info">The handle of tensors info. (Default: null)</param>
38         /// <param name="isFetch">The boolean value for fetching the data (Default: false)</param>
39         /// <since_tizen> 6 </since_tizen>
40         private TensorsData(IntPtr handle, TensorsInfo info, bool isFetch)
41         {
42             NNStreamer.CheckNNStreamerSupport();
43             NNStreamerError ret = NNStreamerError.None;
44
45             /* Set internal object */
46             _handle = handle;
47             _tensorsInfo = info;
48
49             /* Set count */
50             int count = 0;
51             ret = Interop.Util.GetTensorsCount(_handle, out count);
52             NNStreamer.CheckException(ret, "unable to get the count of TensorsData");
53
54             _dataList = new ArrayList(count);
55
56             if (isFetch)
57             {
58                 for (int i = 0; i < count; ++i)
59                 {
60                     IntPtr raw_data;
61                     byte[] bufData = null;
62                     int size;
63
64                     ret = Interop.Util.GetTensorData(_handle, i, out raw_data, out size);
65                     NNStreamer.CheckException(ret, "unable to get the buffer of TensorsData: " + i.ToString());
66
67                     bufData = Interop.Util.IntPtrToByteArray(raw_data, size);
68                     _dataList.Add(bufData);
69                 }
70             }
71             else
72             {
73                 for (int i = 0; i < count; ++i)
74                 {
75                     int size = info.GetTensorSize(i);
76                     byte[] bufData = new byte[size];
77
78                     _dataList.Add(bufData);
79                 }
80             }
81         }
82
83         /// <summary>
84         /// Destructor of the TensorsData instance
85         /// </summary>
86         /// <since_tizen> 6 </since_tizen>
87         ~TensorsData()
88         {
89             Dispose(false);
90         }
91
92         /// <summary>
93         /// Gets the number of Tensor in TensorsData class
94         /// </summary>
95         /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
96         /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
97         /// <since_tizen> 6 </since_tizen>
98         public int Count
99         {
100             get {
101                 NNStreamer.CheckNNStreamerSupport();
102
103                 return _dataList.Count;
104             }
105         }
106
107         /// <summary>
108         /// Gets the tensors information.
109         /// </summary>
110         /// <returns>The TensorsInfo instance</returns>
111         /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
112         /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
113         /// <since_tizen> 8 </since_tizen>
114         public TensorsInfo TensorsInfo
115         {
116             get {
117                 NNStreamer.CheckNNStreamerSupport();
118
119                 return _tensorsInfo;
120             }
121         }
122
123         /// <summary>
124         /// Allocates a new TensorsData instance with the given tensors information.
125         /// </summary>
126         /// <param name="info">TensorsInfo object which has Tensor information</param>
127         /// <returns>The TensorsInfo instance</returns>
128         /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
129         /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
130         /// <since_tizen> 8 </since_tizen>
131         public static TensorsData Allocate(TensorsInfo info)
132         {
133             NNStreamer.CheckNNStreamerSupport();
134
135             if (info == null)
136                 throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "TensorsInfo is null");
137
138             TensorsData retData = info.GetTensorsData();
139             return retData;
140         }
141
142         /// <summary>
143         /// Sets a tensor data to given index.
144         /// </summary>
145         /// <param name="index">The index of the tensor.</param>
146         /// <param name="buffer">Raw tensor data to be set.</param>
147         /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
148         /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
149         /// <exception cref="ArgumentException">Thrown when the data is not valid.</exception>
150         /// <since_tizen> 6 </since_tizen>
151         public void SetTensorData(int index, byte[] buffer)
152         {
153             NNStreamer.CheckNNStreamerSupport();
154
155             CheckIndex(index);
156             CheckDataBuffer(index, buffer);
157
158             _dataList[index] = buffer;
159         }
160
161         /// <summary>
162         /// Gets a tensor data to given index.
163         /// </summary>
164         /// <param name="index">The index of the tensor.</param>
165         /// <returns>Raw tensor data</returns>
166         /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
167         /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
168         /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
169         /// <since_tizen> 6 </since_tizen>
170         public byte[] GetTensorData(int index)
171         {
172             NNStreamer.CheckNNStreamerSupport();
173
174             CheckIndex(index);
175
176             return (byte[])_dataList[index];
177         }
178
179         /// <summary>
180         /// Releases any unmanaged resources used by this object.
181         /// </summary>
182         /// <since_tizen> 6 </since_tizen>
183         public void Dispose()
184         {
185             Dispose(true);
186             GC.SuppressFinalize(this);
187         }
188
189         /// <summary>
190         /// Releases any unmanaged resources used by this object. Can also dispose any other disposable objects.
191         /// </summary>
192         /// <param name="disposing">If true, disposes any disposable objects. If false, does not dispose disposable objects.</param>
193         protected virtual void Dispose(bool disposing)
194         {
195             if (_disposed)
196                 return;
197
198             if (disposing)
199             {
200                 // release managed object
201             }
202
203             // release unmanaged objects
204             if (_handle != IntPtr.Zero)
205             {
206                 NNStreamerError ret = Interop.Util.DestroyTensorsData(_handle);
207                 if (ret != NNStreamerError.None)
208                 {
209                     Log.Error(NNStreamer.TAG, "failed to destroy TensorsData object");
210                 }
211                 _handle = IntPtr.Zero;
212             }
213             _disposed = true;
214         }
215
216         internal IntPtr GetHandle()
217         {
218             return _handle;
219         }
220
221         internal void PrepareInvoke()
222         {
223             NNStreamerError ret = NNStreamerError.None;
224             int count = _dataList.Count;
225
226             for (int i = 0; i < count; ++i)
227             {
228                 byte[] data = (byte[])_dataList[i];
229                 ret = Interop.Util.SetTensorData(_handle, i, data, data.Length);
230                 NNStreamer.CheckException(ret, "unable to set the buffer of TensorsData: " + i.ToString());
231             }
232         }
233
234         internal static TensorsData CreateFromNativeHandle(IntPtr dataHandle, IntPtr infoHandle, bool isFetch)
235         {
236             TensorsData retTensorsData = null;
237
238             if (infoHandle == IntPtr.Zero)
239             {
240                 retTensorsData = new TensorsData(dataHandle, null, isFetch);
241             }
242             else
243             {
244                 TensorsInfo info = TensorsInfo.ConvertTensorsInfoFromHandle(infoHandle);
245                 retTensorsData = new TensorsData(dataHandle, info, isFetch);
246             }
247
248             return retTensorsData;
249         }
250
251         private void CheckIndex(int index)
252         {
253             if (index < 0 || index >= _dataList.Count)
254             {
255                 string msg = "Invalid index [" + index + "] of the tensors";
256                 throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
257             }
258         }
259
260         private void CheckDataBuffer(int index, byte[] data)
261         {
262             if (data == null)
263             {
264                 string msg = "data is not valid";
265                 throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
266             }
267
268             if (index >= Tensor.SizeLimit)
269             {
270                 string msg = "Max size of the tensors is " + Tensor.SizeLimit;
271                 throw NNStreamerExceptionFactory.CreateException(NNStreamerError.QuotaExceeded, msg);
272             }
273
274             if (_tensorsInfo != null)
275             {
276                 if (index >= _tensorsInfo.Count)
277                 {
278                     string msg = "Current information has " + _tensorsInfo.Count + " tensors";
279                     throw NNStreamerExceptionFactory.CreateException(NNStreamerError.QuotaExceeded, msg);
280                 }
281
282                 int size = _tensorsInfo.GetTensorSize(index);
283                 if (data.Length != size)
284                 {
285                     string msg = "Invalid buffer size, required size is " + size.ToString();
286                     throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
287                 }
288             }
289         }
290     }
291 }