[MachineLearning.Inference] Change internal rank limit and default value
[platform/core/csapi/tizenfx.git] / src / Tizen.MachineLearning.Inference / Tizen.MachineLearning.Inference / TensorsInfo.cs
1 /*
2 * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved.
3 *
4 * Licensed under the Apache License, Version 2.0 (the License);
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an AS IS BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 using System;
18 using System.Linq;
19 using System.Collections.Generic;
20
21 namespace Tizen.MachineLearning.Inference
22 {
23     /// <summary>
24     /// The TensorsInfo class manages each Tensor information such as Name, Type and Dimension.
25     /// </summary>
26     /// <since_tizen> 6 </since_tizen>
27     public class TensorsInfo : IDisposable, IEquatable<TensorsInfo>
28     {
29         private List<TensorInfo> _infoList;
30         private IntPtr _handle = IntPtr.Zero;
31         private bool _disposed = false;
32
33         /// <summary>
34         /// Get the number of Tensor information which is added.
35         /// </summary>
36         /// <since_tizen> 6 </since_tizen>
37         public int Count => _infoList.Count;
38
39         /// <summary>
40         /// Creates a TensorsInfo instance.
41         /// </summary>
42         /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
43         /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
44         /// <since_tizen> 6 </since_tizen>
45         public TensorsInfo()
46         {
47             NNStreamer.CheckNNStreamerSupport();
48
49             Log.Info(NNStreamer.TAG, "TensorsInfo is created");
50             _infoList = new List<TensorInfo>();
51         }
52
53         /// <summary>
54         /// Destroys the TensorsInfo resource.
55         /// </summary>
56         /// <since_tizen> 6 </since_tizen>
57         ~TensorsInfo()
58         {
59             Dispose(false);
60         }
61
62         /// <summary>
63         /// Add a Tensor information to the TensorsInfo instance. Note that we support up to 16 tensors in TensorsInfo.
64         /// </summary>
65         /// <param name="type">Data element type of Tensor.</param>
66         /// <param name="dimension">Dimension of Tensor. Note that we support up to 4th ranks.</param>
67         /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
68         /// <exception cref="IndexOutOfRangeException">Thrown when the number of Tensor already exceeds the size limits (i.e. Tensor.SizeLimit)</exception>
69         /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
70         /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
71         /// <since_tizen> 6 </since_tizen>
72         public void AddTensorInfo(TensorType type, int[] dimension)
73         {
74             NNStreamer.CheckNNStreamerSupport();
75
76             AddTensorInfo(null, type, dimension);
77         }
78
79         /// <summary>
80         /// Add a Tensor information to the TensorsInfo instance. Note that we support up to 16 tensors in TensorsInfo.
81         /// </summary>
82         /// <param name="name">Name of Tensor.</param>
83         /// <param name="type">Data element type of Tensor.</param>
84         /// <param name="dimension">Dimension of Tensor. Note that we support up to 4th ranks.</param>
85         /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
86         /// <exception cref="IndexOutOfRangeException">Thrown when the number of Tensor already exceeds the size limits (i.e. Tensor.SizeLimit)</exception>
87         /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
88         /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
89         /// <since_tizen> 6 </since_tizen>
90         public void AddTensorInfo(string name, TensorType type, int[] dimension)
91         {
92             NNStreamer.CheckNNStreamerSupport();
93
94             int idx = _infoList.Count;
95             if (idx >= Tensor.SizeLimit) {
96                 throw new IndexOutOfRangeException("Max size of the tensors is " + Tensor.SizeLimit);
97             }
98
99             int[] dim = ConvertDimension(dimension);
100             _infoList.Add(new TensorInfo(name, type, dim));
101
102             if (_handle != IntPtr.Zero)
103             {
104                 NNStreamerError ret = NNStreamerError.None;
105
106                 ret = Interop.Util.SetTensorsCount(_handle, _infoList.Count);
107                 NNStreamer.CheckException(ret, "Failed to set the number of tensors");
108
109                 UpdateInfoHandle(_handle, idx, name, type, dim);
110             }
111         }
112
113         /// <summary>
114         /// Sets the tensor name with given index.
115         /// </summary>
116         /// <param name="idx">The index of the tensor to be updated.</param>
117         /// <param name="name">The tensor name to be set.</param>
118         /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
119         /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
120         /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
121         /// <since_tizen> 6 </since_tizen>
122         public void SetTensorName(int idx, string name)
123         {
124             NNStreamer.CheckNNStreamerSupport();
125
126             CheckIndexBoundary(idx);
127             _infoList[idx].Name = name;
128
129             if (_handle != IntPtr.Zero)
130             {
131                 NNStreamerError ret = NNStreamerError.None;
132                 ret = Interop.Util.SetTensorName(_handle, idx, name);
133                 NNStreamer.CheckException(ret, "unable to set the name of tensor: " + idx.ToString());
134             }
135         }
136
137         /// <summary>
138         /// Gets the tensor name with given index.
139         /// </summary>
140         /// <param name="idx">The index of the tensor.</param>
141         /// <returns>The tensor name</returns>
142         /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
143         /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
144         /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
145         /// <since_tizen> 6 </since_tizen>
146         public string GetTensorName(int idx)
147         {
148             NNStreamer.CheckNNStreamerSupport();
149
150             CheckIndexBoundary(idx);
151             return _infoList[idx].Name;
152         }
153
154         /// <summary>
155         /// Sets the tensor type with given index and its type.
156         /// </summary>
157         /// <param name="idx">The index of the tensor to be updated.</param>
158         /// <param name="type">The tensor type to be set.</param>
159         /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
160         /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
161         /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
162         /// <since_tizen> 6 </since_tizen>
163         public void SetTensorType(int idx, TensorType type)
164         {
165             NNStreamer.CheckNNStreamerSupport();
166
167             CheckIndexBoundary(idx);
168             _infoList[idx].Type = type;
169
170             if (_handle != IntPtr.Zero)
171             {
172                 NNStreamerError ret = NNStreamerError.None;
173                 ret = Interop.Util.SetTensorType(_handle, idx, type);
174                 NNStreamer.CheckException(ret, "unable to set the type of tensor: " + idx.ToString());
175             }
176         }
177
178         /// <summary>
179         /// Gets the tensor type with given index.
180         /// </summary>
181         /// <param name="idx">The index of the tensor.</param>
182         /// <returns>The tensor type</returns>
183         /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
184         /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
185         /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
186         /// <since_tizen> 6 </since_tizen>
187         public TensorType GetTensorType(int idx)
188         {
189             NNStreamer.CheckNNStreamerSupport();
190
191             CheckIndexBoundary(idx);
192             return _infoList[idx].Type;
193         }
194
195         /// <summary>
196         /// Sets the tensor dimension with given index and dimension.
197         /// </summary>
198         /// <param name="idx">The index of the tensor to be updated.</param>
199         /// <param name="dimension">The tensor dimension to be set.</param>
200         /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
201         /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
202         /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
203         /// <since_tizen> 6 </since_tizen>
204         public void SetDimension(int idx, int[] dimension)
205         {
206             NNStreamer.CheckNNStreamerSupport();
207
208             CheckIndexBoundary(idx);
209
210             int[] dim = ConvertDimension(dimension);
211
212             _infoList[idx].SetDimension(dim);
213
214             if (_handle != IntPtr.Zero)
215             {
216                 NNStreamerError ret = NNStreamerError.None;
217                 ret = Interop.Util.SetTensorDimension(_handle, idx, dim);
218                 NNStreamer.CheckException(ret, "unable to set the dimension of tensor: " + idx.ToString());
219             }
220         }
221
222         /// <summary>
223         /// Gets the tensor dimension with given index.
224         /// </summary>
225         /// <param name="idx">The index of the tensor.</param>
226         /// <returns>The tensor dimension.</returns>
227         /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
228         /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
229         /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
230         /// <since_tizen> 6 </since_tizen>
231         public int[] GetDimension(int idx)
232         {
233             NNStreamer.CheckNNStreamerSupport();
234
235             CheckIndexBoundary(idx);
236             return _infoList[idx].Dimension;
237         }
238
239         /// <summary>
240         /// Creates a TensorsData instance based on informations of TensorsInfo
241         /// </summary>
242         /// <returns>TensorsData instance</returns>
243         /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
244         /// <exception cref="ArgumentException">Thrown when the method failed due to TensorsInfo's information is invalid.</exception>
245         /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
246         /// <since_tizen> 6 </since_tizen>
247         public TensorsData GetTensorsData()
248         {
249             IntPtr tensorsData_h = IntPtr.Zero;
250             TensorsData retTensorData;
251             NNStreamerError ret = NNStreamerError.None;
252
253             NNStreamer.CheckNNStreamerSupport();
254
255             if (_handle == IntPtr.Zero)
256             {
257                 Log.Info(NNStreamer.TAG, "_handle is IntPtr.Zero\n" + "  GetTensorsInfoHandle() is called");
258                 GetTensorsInfoHandle();
259             }
260
261             ret = Interop.Util.CreateTensorsData(_handle, out tensorsData_h);
262             NNStreamer.CheckException(ret, "Failed to create the TensorsData object");
263
264             retTensorData = TensorsData.CreateFromNativeHandle(tensorsData_h, _handle, false);
265
266             return retTensorData;
267         }
268
269         /// <summary>
270         /// Calculates the byte size of tensor data.
271         /// </summary>
272         /// <param name="idx">The index of the tensor information in the list</param>
273         /// <returns>The byte size of tensor</returns>
274         /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
275         /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
276         /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
277         /// <since_tizen> 8 </since_tizen>
278         public int GetTensorSize(int idx)
279         {
280             NNStreamer.CheckNNStreamerSupport();
281
282             CheckIndexBoundary(idx);
283             return _infoList[idx].Size;
284         }
285
286         /// <summary>
287         /// Gets the the hash code of this TensorsInfo object
288         /// </summary>
289         /// <returns>The hash code</returns>
290         /// <since_tizen> 8 </since_tizen>
291         public override int GetHashCode()
292         {
293             unchecked
294             {
295                 int hash = 19;
296                 foreach (var info in _infoList)
297                 {
298                     hash = hash * 31 + info.GetHashCode();
299                 }
300                 return hash;
301             }
302         }
303
304         /// <summary>
305         /// Compare TensorsInfo, which is its contents are the same or not.
306         /// </summary>
307         /// <param name="obj">Object to compare</param>
308         /// <returns>True if the given object is the same object or its contents are the same</returns>
309         /// <since_tizen> 8 </since_tizen>
310         public override bool Equals(object obj)
311         {
312             if (obj == null)
313                 return false;
314
315             TensorsInfo cInfo = obj as TensorsInfo;
316             return this.Equals(cInfo);
317         }
318
319         /// <summary>
320         /// Compare TensorsInfo, which is its contents are the same or not.
321         /// </summary>
322         /// <param name="other">TensorsInfo instance to compare</param>
323         /// <returns>True if the given object is the same object or its contents are the same</returns>
324         /// <since_tizen> 8 </since_tizen>
325         public bool Equals(TensorsInfo other)
326         {
327             if (other == null)
328                 return false;
329
330             if (this.Count != other.Count)
331                 return false;
332
333             for (int i = 0; i < this.Count; ++i)
334             {
335                 // Type
336                 if (this.GetTensorType(i) != other.GetTensorType(i))
337                     return false;
338
339                 // Dimension
340                 if (!this.GetDimension(i).SequenceEqual(other.GetDimension(i)))
341                     return false;
342             }
343             return true;
344         }
345
346         /// <summary>
347         /// Create a new TensorsInfo object cloned from the current tensors information.
348         /// </summary>
349         /// <returns>Hard-copied TensorsInfo object</returns>
350         /// <since_tizen> 9 </since_tizen>
351         internal TensorsInfo Clone()
352         {
353             TensorsInfo retInfo = null;
354             retInfo = new TensorsInfo();
355
356             foreach (TensorInfo t in _infoList) {
357                 retInfo.AddTensorInfo(t.Name, t.Type, t.Dimension);
358             }
359
360             return retInfo;
361         }
362
363         /// <summary>
364         /// Make TensorsInfo object from Native handle
365         /// </summary>
366         /// <param name="handle">Handle of TensorsInfo object</param>
367         /// <returns>TensorsInfo object</returns>
368         internal static TensorsInfo ConvertTensorsInfoFromHandle(IntPtr handle)
369         {
370             TensorsInfo retInfo = null;
371             NNStreamerError ret = NNStreamerError.None;
372
373             int count;
374             ret = Interop.Util.GetTensorsCount(handle, out count);
375             NNStreamer.CheckException(ret, "Fail to get Tensors' count");
376
377             retInfo = new TensorsInfo();
378
379             for (int i = 0; i < count; ++i)
380             {
381                 string name;
382                 TensorType type;
383                 uint[] dim = new uint[Tensor.RankLimit];
384
385                 ret = Interop.Util.GetTensorName(handle, i, out name);
386                 NNStreamer.CheckException(ret, "Fail to get Tensor's name");
387
388                 ret = Interop.Util.GetTensorType(handle, i, out type);
389                 NNStreamer.CheckException(ret, "Fail to get Tensor's type");
390
391                 ret = Interop.Util.GetTensorDimension(handle, i, dim);
392                 NNStreamer.CheckException(ret, "Fail to get Tensor's dimension");
393
394                 retInfo.AddTensorInfo(name, type, (int[])(object)dim);
395             }
396             return retInfo;
397         }
398
399         /// <summary>
400         /// Return TensorsInfo handle
401         /// </summary>
402         /// <returns>IntPtr TensorsInfo handle</returns>
403         internal IntPtr GetTensorsInfoHandle()
404         {
405             NNStreamerError ret = NNStreamerError.None;
406             IntPtr ret_handle = IntPtr.Zero;
407             int idx;
408
409             /* Already created */
410             if (_handle != IntPtr.Zero)
411                 return _handle;
412
413             /* Check required parameters */
414             int num = _infoList.Count;
415             if (num <= 0 || num > Tensor.SizeLimit)
416                 ret = NNStreamerError.InvalidParameter;
417             NNStreamer.CheckException(ret, "number of Tensor in TensorsInfo is invalid: " + _infoList.Count);
418
419             /* Create TensorsInfo object */
420             ret = Interop.Util.CreateTensorsInfoExtended(out ret_handle);
421             NNStreamer.CheckException(ret, "fail to create TensorsInfo object");
422
423             /* Set the number of tensors */
424             ret = Interop.Util.SetTensorsCount(ret_handle, _infoList.Count);
425             NNStreamer.CheckException(ret, "unable to set the number of tensors");
426
427             /* Set each Tensor info */
428             idx = 0;
429             foreach (TensorInfo t in _infoList)
430             {
431                 UpdateInfoHandle(ret_handle, idx, t.Name, t.Type, t.Dimension);
432                 idx += 1;
433             }
434
435             _handle = ret_handle;
436             return ret_handle;
437         }
438
439         /// <summary>
440         /// Releases any unmanaged resources used by this object.
441         /// </summary>
442         /// <since_tizen> 6 </since_tizen>
443         public void Dispose()
444         {
445             Dispose(true);
446             GC.SuppressFinalize(this);
447         }
448
449         /// <summary>
450         /// Releases any unmanaged resources used by this object. Can also dispose any other disposable objects.
451         /// </summary>
452         /// <param name="disposing">If true, disposes any disposable objects. If false, does not dispose disposable objects.</param>
453         protected virtual void Dispose(bool disposing)
454         {
455             if (_disposed)
456                 return;
457
458             if (disposing)
459             {
460                 // release managed objects
461                 _infoList.Clear();
462             }
463
464             // release unmanaged objects
465             if (_handle != IntPtr.Zero)
466             {
467                 NNStreamerError ret = Interop.Util.DestroyTensorsInfo(_handle);
468
469                 if (ret != NNStreamerError.None)
470                 {
471                     Log.Error(NNStreamer.TAG, "failed to destroy TensorsInfo object");
472                 }
473             }
474             _disposed = true;
475         }
476
477         private static int[] ConvertDimension(int[] dimension)
478         {
479             if (dimension == null) {
480                 throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "The dimension is null, it should be a valid array.");
481             }
482
483             if (dimension.Length > Tensor.RankLimit) {
484                 throw new IndexOutOfRangeException("Max rank limit is " + Tensor.RankLimit);
485             }
486
487             int[] dim = new int[Tensor.RankLimit];
488             int i;
489
490             for (i = 0 ; i < dimension.Length ; i++) {
491                 dim[i] = dimension[i];
492             }
493             for (; i < Tensor.RankLimit ; i++) {
494                 dim[i] = 0;
495             }
496
497             return dim;
498         }
499
500         private void UpdateInfoHandle(IntPtr handle, int idx, string name, TensorType type, int[] dimension)
501         {
502             if (handle != IntPtr.Zero)
503             {
504                 NNStreamerError ret = NNStreamerError.None;
505
506                 ret = Interop.Util.SetTensorName(handle, idx, name);
507                 NNStreamer.CheckException(ret, "Failed to set the name of tensor at index " + idx.ToString());
508
509                 ret = Interop.Util.SetTensorType(handle, idx, type);
510                 NNStreamer.CheckException(ret, "Failed to set the type of tensor at index " + idx.ToString());
511
512                 ret = Interop.Util.SetTensorDimension(handle, idx, dimension);
513                 NNStreamer.CheckException(ret, "Failed to set the dimension of tensor at index " + idx.ToString());
514             }
515         }
516
517         private void CheckIndexBoundary(int idx)
518         {
519             if (idx < 0 || idx >= _infoList.Count)
520             {
521                 string msg = "Invalid index [" + idx + "] of the tensors";
522                 throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
523             }
524         }
525
526         private class TensorInfo
527         {
528             public TensorInfo(TensorType type, int[] dimension)
529             {
530                 Type = type;
531                 SetDimension(dimension);
532             }
533
534             public TensorInfo(string name, TensorType type, int[] dimension)
535             {
536                 Name = name;
537                 Type = type;
538                 SetDimension(dimension);
539             }
540
541             public void SetDimension(int[] dimension)
542             {
543                 if (dimension == null) {
544                     throw new ArgumentException("The dimension is null, it should be a valid array.");
545                 }
546
547                 if (dimension.Length != Tensor.RankLimit) {
548                     throw new ArgumentException("The length of the dimension should be " + Tensor.RankLimit);
549                 }
550                 Dimension = (int[])dimension.Clone();
551             }
552
553             private int GetSize()
554             {
555                 int size = 0;
556
557                 switch (Type) {
558                     case TensorType.Int32:
559                     case TensorType.UInt32:
560                     case TensorType.Float32:
561                         size = 4;
562                         break;
563
564                     case TensorType.Int16:
565                     case TensorType.UInt16:
566                         size = 2;
567                         break;
568
569                     case TensorType.Int8:
570                     case TensorType.UInt8:
571                         size = 1;
572                         break;
573
574                     case TensorType.Float64:
575                     case TensorType.Int64:
576                     case TensorType.UInt64:
577                         size = 8;
578                         break;
579
580                     default:
581                         /* Unknown Type */
582                         break;
583                 }
584                 for (int i = 0; i < Tensor.RankLimit; ++i)
585                 {
586                     if (Dimension[i] == 0)
587                         break;
588                     size *= Dimension[i];
589                 }
590                 return size;
591             }
592
593             public int Size
594             {
595                 get {
596                     return GetSize();
597                 }
598             }
599
600             public string Name { get; set; } = string.Empty;
601
602             public TensorType Type { get; set; } = TensorType.Int32;
603
604             public int[] Dimension { get; private set; } = new int[Tensor.RankLimit];
605         }
606     }
607 }