2 * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved.
4 * Licensed under the Apache License, Version 2.0 (the License);
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an AS IS BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
18 using System.Collections;
20 namespace Tizen.MachineLearning.Inference
23 /// The TensorsData class sets and gets the buffer data for each Tensor.
25 /// <since_tizen> 6 </since_tizen>
26 public class TensorsData : IDisposable
28 private IntPtr _handle = IntPtr.Zero;
29 private bool _disposed = false;
30 private TensorsInfo _tensorsInfo = null;
31 private ArrayList _dataList = null;
34 /// Creates a TensorsData instance with handle which is given by TensorsInfo.
36 /// <param name="handle">The handle of tensors data.</param>
37 /// <param name="info">The handle of tensors info.</param>
38 /// <param name="isFetch">The boolean value for fetching the data (Default: false)</param>
39 /// <param name="hasOwnership">The boolean value for automatic disposal (Default: true)</param>
40 /// <since_tizen> 6 </since_tizen>
41 private TensorsData(IntPtr handle, TensorsInfo info, bool isFetch = false, bool hasOwnership = true)
43 NNStreamer.CheckNNStreamerSupport();
44 NNStreamerError ret = NNStreamerError.None;
46 /* Set internal object */
48 /* Because developers can change the TensorsInfo object, it should be stored as a deep-copied instance. */
49 _tensorsInfo = info.Clone();
53 ret = Interop.Util.GetTensorsCount(_handle, out count);
54 NNStreamer.CheckException(ret, "unable to get the count of TensorsData");
56 _dataList = new ArrayList(count);
60 for (int i = 0; i < count; ++i)
63 byte[] bufData = null;
66 ret = Interop.Util.GetTensorData(_handle, i, out raw_data, out size);
67 NNStreamer.CheckException(ret, "unable to get the buffer of TensorsData: " + i.ToString());
69 bufData = Interop.Util.IntPtrToByteArray(raw_data, size);
70 _dataList.Add(bufData);
75 for (int i = 0; i < count; ++i)
77 int size = info.GetTensorSize(i);
78 byte[] bufData = new byte[size];
80 _dataList.Add(bufData);
84 /* If it created as DataReceivedEventArgs, do not dispose. */
85 _disposed = !hasOwnership;
89 /// Destructor of the TensorsData instance
91 /// <since_tizen> 6 </since_tizen>
98 /// Gets the number of Tensor in TensorsData class
100 /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
101 /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
102 /// <since_tizen> 6 </since_tizen>
106 NNStreamer.CheckNNStreamerSupport();
108 return _dataList.Count;
113 /// Gets the tensors information.
115 /// <returns>The TensorsInfo instance</returns>
116 /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
117 /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
118 /// <since_tizen> 8 </since_tizen>
119 public TensorsInfo TensorsInfo
122 NNStreamer.CheckNNStreamerSupport();
129 /// Allocates a new TensorsData instance with the given tensors information.
131 /// <param name="info">TensorsInfo object which has Tensor information</param>
132 /// <returns>The TensorsInfo instance</returns>
133 /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
134 /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
135 /// <since_tizen> 8 </since_tizen>
136 public static TensorsData Allocate(TensorsInfo info)
138 NNStreamer.CheckNNStreamerSupport();
141 throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "TensorsInfo is null");
143 TensorsData retData = info.GetTensorsData();
148 /// Sets a tensor data to given index.
150 /// <param name="index">The index of the tensor.</param>
151 /// <param name="buffer">Raw tensor data to be set.</param>
152 /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
153 /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
154 /// <exception cref="ArgumentException">Thrown when the data is not valid.</exception>
155 /// <since_tizen> 6 </since_tizen>
156 public void SetTensorData(int index, byte[] buffer)
158 NNStreamer.CheckNNStreamerSupport();
161 CheckDataBuffer(index, buffer);
163 _dataList[index] = buffer;
167 /// Gets a tensor data to given index.
169 /// <param name="index">The index of the tensor.</param>
170 /// <returns>Raw tensor data</returns>
171 /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
172 /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
173 /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
174 /// <since_tizen> 6 </since_tizen>
175 public byte[] GetTensorData(int index)
177 NNStreamer.CheckNNStreamerSupport();
181 return (byte[])_dataList[index];
185 /// Releases any unmanaged resources used by this object.
187 /// <since_tizen> 6 </since_tizen>
188 public void Dispose()
191 GC.SuppressFinalize(this);
195 /// Releases any unmanaged resources used by this object. Can also dispose any other disposable objects.
197 /// <param name="disposing">If true, disposes any disposable objects. If false, does not dispose disposable objects.</param>
198 protected virtual void Dispose(bool disposing)
205 // release managed object
206 _tensorsInfo.Dispose();
210 // release unmanaged objects
211 if (_handle != IntPtr.Zero)
213 NNStreamerError ret = Interop.Util.DestroyTensorsData(_handle);
214 if (ret != NNStreamerError.None)
216 Log.Error(NNStreamer.TAG, "failed to destroy TensorsData object");
218 _handle = IntPtr.Zero;
223 internal IntPtr GetHandle()
228 internal void PrepareInvoke()
230 NNStreamerError ret = NNStreamerError.None;
231 int count = _dataList.Count;
233 for (int i = 0; i < count; ++i)
235 byte[] data = (byte[])_dataList[i];
236 ret = Interop.Util.SetTensorData(_handle, i, data, data.Length);
237 NNStreamer.CheckException(ret, "unable to set the buffer of TensorsData: " + i.ToString());
241 internal static TensorsData CreateFromNativeHandle(IntPtr dataHandle, IntPtr infoHandle, bool isFetch = false, bool hasOwnership = true)
243 TensorsInfo info = null;
245 if (infoHandle != IntPtr.Zero)
247 info = TensorsInfo.ConvertTensorsInfoFromHandle(infoHandle);
250 return new TensorsData(dataHandle, info, isFetch, hasOwnership);
253 private void CheckIndex(int index)
255 if (index < 0 || index >= _dataList.Count)
257 string msg = "Invalid index [" + index + "] of the tensors";
258 throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
262 private void CheckDataBuffer(int index, byte[] data)
266 string msg = "data is not valid";
267 throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
270 if (index >= Tensor.SizeLimit)
272 string msg = "Max size of the tensors is " + Tensor.SizeLimit;
273 throw NNStreamerExceptionFactory.CreateException(NNStreamerError.QuotaExceeded, msg);
276 if (_tensorsInfo != null)
278 if (index >= _tensorsInfo.Count)
280 string msg = "Current information has " + _tensorsInfo.Count + " tensors";
281 throw NNStreamerExceptionFactory.CreateException(NNStreamerError.QuotaExceeded, msg);
284 int size = _tensorsInfo.GetTensorSize(index);
285 if (data.Length != size)
287 string msg = "Invalid buffer size, required size is " + size.ToString();
288 throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);