[MachineLearning.Inference] Pipeline class to execute neural network stream (#1404)
[platform/core/csapi/tizenfx.git] / src / Tizen.MachineLearning.Inference / Tizen.MachineLearning.Inference / TensorsData.cs
1 /*
2 * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved.
3 *
4 * Licensed under the Apache License, Version 2.0 (the License);
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an AS IS BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 using System;
18 using System.Collections;
19
20 namespace Tizen.MachineLearning.Inference
21 {
22     /// <summary>
23     /// The TensorsData class sets and gets the buffer data for each Tensor.
24     /// </summary>
25     /// <since_tizen> 6 </since_tizen>
26     public class TensorsData : IDisposable
27     {
28         private IntPtr _handle = IntPtr.Zero;
29         private bool _disposed = false;
30         private TensorsInfo _tensorsInfo = null;
31         private ArrayList _dataList = null;
32
33         /// <summary>
34         /// Creates a TensorsData instance with handle which is given by TensorsInfo.
35         /// </summary>
36         /// <param name="handle">The handle of tensors data.</param>
37         /// <param name="info">The handle of tensors info. (Default: null)</param>
38         /// <param name="isFetch">The boolean value for fetching the data (Default: false)</param>
39         /// <param name="hasOwnership">The boolean value for automatic disposal (Default: true)</param>
40         /// <since_tizen> 6 </since_tizen>
41         private TensorsData(IntPtr handle, TensorsInfo info, bool isFetch = false, bool hasOwnership = true)
42         {
43             NNStreamer.CheckNNStreamerSupport();
44             NNStreamerError ret = NNStreamerError.None;
45
46             /* Set internal object */
47             _handle = handle;
48             _tensorsInfo = info;
49
50             /* Set count */
51             int count = 0;
52             ret = Interop.Util.GetTensorsCount(_handle, out count);
53             NNStreamer.CheckException(ret, "unable to get the count of TensorsData");
54
55             _dataList = new ArrayList(count);
56
57             if (isFetch)
58             {
59                 for (int i = 0; i < count; ++i)
60                 {
61                     IntPtr raw_data;
62                     byte[] bufData = null;
63                     int size;
64
65                     ret = Interop.Util.GetTensorData(_handle, i, out raw_data, out size);
66                     NNStreamer.CheckException(ret, "unable to get the buffer of TensorsData: " + i.ToString());
67
68                     bufData = Interop.Util.IntPtrToByteArray(raw_data, size);
69                     _dataList.Add(bufData);
70                 }
71             }
72             else
73             {
74                 for (int i = 0; i < count; ++i)
75                 {
76                     int size = info.GetTensorSize(i);
77                     byte[] bufData = new byte[size];
78
79                     _dataList.Add(bufData);
80                 }
81             }
82
83             /* If it created as DataReceivedEventArgs, do not dispose. */
84             _disposed = !hasOwnership;
85         }
86
87         /// <summary>
88         /// Destructor of the TensorsData instance
89         /// </summary>
90         /// <since_tizen> 6 </since_tizen>
91         ~TensorsData()
92         {
93             Dispose(false);
94         }
95
96         /// <summary>
97         /// Gets the number of Tensor in TensorsData class
98         /// </summary>
99         /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
100         /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
101         /// <since_tizen> 6 </since_tizen>
102         public int Count
103         {
104             get {
105                 NNStreamer.CheckNNStreamerSupport();
106
107                 return _dataList.Count;
108             }
109         }
110
111         /// <summary>
112         /// Gets the tensors information.
113         /// </summary>
114         /// <returns>The TensorsInfo instance</returns>
115         /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
116         /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
117         /// <since_tizen> 8 </since_tizen>
118         public TensorsInfo TensorsInfo
119         {
120             get {
121                 NNStreamer.CheckNNStreamerSupport();
122
123                 return _tensorsInfo;
124             }
125         }
126
127         /// <summary>
128         /// Allocates a new TensorsData instance with the given tensors information.
129         /// </summary>
130         /// <param name="info">TensorsInfo object which has Tensor information</param>
131         /// <returns>The TensorsInfo instance</returns>
132         /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
133         /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
134         /// <since_tizen> 8 </since_tizen>
135         public static TensorsData Allocate(TensorsInfo info)
136         {
137             NNStreamer.CheckNNStreamerSupport();
138
139             if (info == null)
140                 throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "TensorsInfo is null");
141
142             TensorsData retData = info.GetTensorsData();
143             return retData;
144         }
145
146         /// <summary>
147         /// Sets a tensor data to given index.
148         /// </summary>
149         /// <param name="index">The index of the tensor.</param>
150         /// <param name="buffer">Raw tensor data to be set.</param>
151         /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
152         /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
153         /// <exception cref="ArgumentException">Thrown when the data is not valid.</exception>
154         /// <since_tizen> 6 </since_tizen>
155         public void SetTensorData(int index, byte[] buffer)
156         {
157             NNStreamer.CheckNNStreamerSupport();
158
159             CheckIndex(index);
160             CheckDataBuffer(index, buffer);
161
162             _dataList[index] = buffer;
163         }
164
165         /// <summary>
166         /// Gets a tensor data to given index.
167         /// </summary>
168         /// <param name="index">The index of the tensor.</param>
169         /// <returns>Raw tensor data</returns>
170         /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
171         /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
172         /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
173         /// <since_tizen> 6 </since_tizen>
174         public byte[] GetTensorData(int index)
175         {
176             NNStreamer.CheckNNStreamerSupport();
177
178             CheckIndex(index);
179
180             return (byte[])_dataList[index];
181         }
182
183         /// <summary>
184         /// Releases any unmanaged resources used by this object.
185         /// </summary>
186         /// <since_tizen> 6 </since_tizen>
187         public void Dispose()
188         {
189             Dispose(true);
190             GC.SuppressFinalize(this);
191         }
192
193         /// <summary>
194         /// Releases any unmanaged resources used by this object. Can also dispose any other disposable objects.
195         /// </summary>
196         /// <param name="disposing">If true, disposes any disposable objects. If false, does not dispose disposable objects.</param>
197         protected virtual void Dispose(bool disposing)
198         {
199             if (_disposed)
200                 return;
201
202             if (disposing)
203             {
204                 // release managed object
205             }
206
207             // release unmanaged objects
208             if (_handle != IntPtr.Zero)
209             {
210                 NNStreamerError ret = Interop.Util.DestroyTensorsData(_handle);
211                 if (ret != NNStreamerError.None)
212                 {
213                     Log.Error(NNStreamer.TAG, "failed to destroy TensorsData object");
214                 }
215                 _handle = IntPtr.Zero;
216             }
217             _disposed = true;
218         }
219
220         internal IntPtr GetHandle()
221         {
222             return _handle;
223         }
224
225         internal void PrepareInvoke()
226         {
227             NNStreamerError ret = NNStreamerError.None;
228             int count = _dataList.Count;
229
230             for (int i = 0; i < count; ++i)
231             {
232                 byte[] data = (byte[])_dataList[i];
233                 ret = Interop.Util.SetTensorData(_handle, i, data, data.Length);
234                 NNStreamer.CheckException(ret, "unable to set the buffer of TensorsData: " + i.ToString());
235             }
236         }
237
238         internal static TensorsData CreateFromNativeHandle(IntPtr dataHandle, IntPtr infoHandle, bool isFetch = false, bool hasOwnership = true)
239         {
240             TensorsInfo info = null;
241
242             if (infoHandle != IntPtr.Zero)
243             {
244                 info = TensorsInfo.ConvertTensorsInfoFromHandle(infoHandle);
245             }
246
247             return new TensorsData(dataHandle, info, isFetch, hasOwnership);
248         }
249
250         private void CheckIndex(int index)
251         {
252             if (index < 0 || index >= _dataList.Count)
253             {
254                 string msg = "Invalid index [" + index + "] of the tensors";
255                 throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
256             }
257         }
258
259         private void CheckDataBuffer(int index, byte[] data)
260         {
261             if (data == null)
262             {
263                 string msg = "data is not valid";
264                 throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
265             }
266
267             if (index >= Tensor.SizeLimit)
268             {
269                 string msg = "Max size of the tensors is " + Tensor.SizeLimit;
270                 throw NNStreamerExceptionFactory.CreateException(NNStreamerError.QuotaExceeded, msg);
271             }
272
273             if (_tensorsInfo != null)
274             {
275                 if (index >= _tensorsInfo.Count)
276                 {
277                     string msg = "Current information has " + _tensorsInfo.Count + " tensors";
278                     throw NNStreamerExceptionFactory.CreateException(NNStreamerError.QuotaExceeded, msg);
279                 }
280
281                 int size = _tensorsInfo.GetTensorSize(index);
282                 if (data.Length != size)
283                 {
284                     string msg = "Invalid buffer size, required size is " + size.ToString();
285                     throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
286                 }
287             }
288         }
289     }
290 }