fb81b3c0a3d7568be667b5d8f7b85637e8eff00b
[platform/core/csapi/tizenfx.git] / src / Tizen.MachineLearning.Inference / Tizen.MachineLearning.Inference / Commons.cs
1 /*
2 * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved.
3 *
4 * Licensed under the Apache License, Version 2.0 (the License);
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an AS IS BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 using System;
18 using System.IO;
19 using Tizen.System;
20
21 namespace Tizen.MachineLearning.Inference
22 {
23     /// <summary>
24     /// Possible data element types of Tensor in NNStreamer.
25     /// </summary>
26     /// <since_tizen> 6 </since_tizen>
27     public enum TensorType
28     {
29 #pragma warning disable CA1720 // Identifier contains type name
30         /// <summary>
31         /// Integer 32bit
32         /// </summary>
33         Int32 = 0,
34         /// <summary>
35         /// Unsigned integer 32bit
36         /// </summary>
37         UInt32,
38         /// <summary>
39         /// Integer 16bit
40         /// </summary>
41         Int16,
42         /// <summary>
43         /// Unsigned integer 16bit
44         /// </summary>
45         UInt16,
46         /// <summary>
47         /// Integer 8bit
48         /// </summary>
49         Int8,
50         /// <summary>
51         /// Unsigned integer 8bit
52         /// </summary>
53         UInt8,
54         /// <summary>
55         /// Float 64bit
56         /// </summary>
57         Float64,
58         /// <summary>
59         /// Float 32bit
60         /// </summary>
61         Float32,
62         /// <summary>
63         /// Integer 64bit
64         /// </summary>
65         Int64,
66         /// <summary>
67         /// Unsigned integer 64bit
68         /// </summary>
69         UInt64,
70 #pragma warning restore CA1720 // Identifier contains type name
71     }
72
73     internal enum NNStreamerError
74     {
75         None = Tizen.Internals.Errors.ErrorCode.None,
76         InvalidParameter = Tizen.Internals.Errors.ErrorCode.InvalidParameter,
77         StreamsPipe = Tizen.Internals.Errors.ErrorCode.StreamsPipe,
78         TryAgain = Tizen.Internals.Errors.ErrorCode.TryAgain,
79         Unknown = Tizen.Internals.Errors.ErrorCode.Unknown,
80         TimedOut = Tizen.Internals.Errors.ErrorCode.TimedOut,
81         NotSupported = Tizen.Internals.Errors.ErrorCode.NotSupported,
82         PermissionDenied = Tizen.Internals.Errors.ErrorCode.PermissionDenied,
83         OutOfMemory = Tizen.Internals.Errors.ErrorCode.OutOfMemory,
84         QuotaExceeded = Tizen.Internals.Errors.ErrorCode.QuotaExceeded,
85         InvalidOperation = Tizen.Internals.Errors.ErrorCode.InvalidOperation,
86     }
87
88     internal enum SwitchType
89     {
90         OutputSelector = 0,
91         InputSelector = 1,
92     }
93
94     internal enum PipelineBufferPolicy
95     {
96         AutoFree = 0,
97         NotFreed = 1,
98     }
99
100     /// <summary>
101     /// States of NNStreamer pipeline.
102     /// </summary>
103     /// <since_tizen> 8 </since_tizen>
104     public enum PipelineState
105     {
106         /// <summary>
107         /// Initial state of the pipeline.
108         /// </summary>
109         Null = 1,
110         /// <summary>
111         /// The pipeline is ready to go to PAUSED.
112         /// </summary>
113         Ready = 2,
114         /// <summary>
115         /// The pipeline is stopped, ready to accept and process data.
116         /// </summary>
117         Paused = 3,
118         /// <summary>
119         /// The pipeline is started and the data is flowing.
120         /// </summary>
121         Playing = 4,
122     }
123
124     /// <summary>
125     /// Types of Neural Network Framework.
126     /// </summary>
127     /// <since_tizen> 6 </since_tizen>
128     public enum NNFWType
129     {
130         /// <summary>
131         /// NNHW is not specified (Try to determine the NNFW with file extension).
132         /// </summary>
133         Any = 0,
134         /// <summary>
135         /// Custom filter (Independent shared object).
136         /// </summary>
137         CustomFilter = 1,
138         /// <summary>
139         /// Tensorflow-lite (.tflite).
140         /// </summary>
141         TensorflowLite = 2,
142         /// <summary>
143         /// Tensorflow (.pb).
144         /// </summary>
145         Tensorflow = 3,
146         /// <summary>
147         /// Neural Network Inference framework, which is developed by SR (Samsung Research).
148         /// </summary>
149         NNFW = 4,
150         /// <summary>
151         /// Intel Movidius Neural Compute SDK (libmvnc).
152         /// </summary>
153         /// <since_tizen> 8 </since_tizen>
154         MVNC = 5,
155         /// <summary>
156         /// Intel OpenVINO.
157         /// </summary>
158         /// <since_tizen> 8 </since_tizen>
159         OpenVINO = 6,
160         /// <summary>
161         /// VeriSilicon's Vivante.
162         /// </summary>
163         /// <since_tizen> 8 </since_tizen>
164         Vivante = 7,
165         /// <summary>
166         /// Google Coral Edge TPU (USB).
167         /// </summary>
168         /// <since_tizen> 8 </since_tizen>
169         EdgeTPU = 8,
170         /// <summary>
171         /// Arm Neural Network framework (support for caffe and tensorflow-lite).
172         /// </summary>
173         /// <since_tizen> 8 </since_tizen>
174         ArmNN = 9,
175         /// <summary>
176         /// Qualcomm SNPE (Snapdgragon Neural Processing Engine (.dlc)
177         /// </summary>
178         /// <since_tizen> 9 </since_tizen>
179         SNPE = 10,
180         /// <summary>
181         /// PyTorch (.pt)
182         /// </summary>
183         /// <since_tizen> 9 </since_tizen>
184         PyTorch = 11,
185         /// <summary>
186         /// Inference supported from NNTrainer, SR On-device Training Framework
187         /// </summary>
188         /// <since_tizen> 9 </since_tizen>
189         NNTrainerInferenceFW = 12,
190         /// <summary>
191         /// Inference framework for Samsung Tizen TV
192         /// </summary>
193         /// <since_tizen> 9 </since_tizen>
194         VDAIFW = 13,
195         /// <summary>
196         /// TriXEngine accesses TRIV/TRIA NPU low-level drivers directly (.tvn). You may need to use high-level drivers wrapping this low-level driver in some devices: e.g., AIFW
197         /// </summary>
198         /// <since_tizen> 9 </since_tizen>
199         TriXEngine = 14,
200     }
201
202     /// <summary>
203     /// Types of hardware resources to be used for NNFWs. Note that if the affinity (nnn) is not supported by the driver or hardware, it is ignored.
204     /// </summary>
205     /// <since_tizen> 6 </since_tizen>
206     public enum HWType
207     {
208         /// <summary>
209         /// Hardware resource is not specified.
210         /// </summary>
211         Any = 0,
212         /// <summary>
213         /// Try to schedule and optimize if possible.
214         /// </summary>
215         Auto = 1,
216         /// <summary>
217         /// Any CPU if possible.
218         /// </summary>
219         CPU = 0x1000,
220         /// <summary>
221         /// SIMD in CPU if possible.
222         /// <since_tizen> 8 </since_tizen>
223         /// </summary>
224         CPUSIMD = 0x1100,
225         /// <summary>
226         /// NEON in CPU.
227         /// </summary>
228         /// <since_tizen> 8 </since_tizen>
229         CPUNeon = CPUSIMD,
230         /// <summary>
231         /// Any GPU if possible.
232         /// </summary>
233         GPU = 0x2000,
234         /// <summary>
235         /// Any NPU if possible.
236         /// </summary>
237         NPU = 0x3000,
238         /// <summary>
239         /// Intel Movidius Stick.
240         /// </summary>
241         /// <since_tizen> 8 </since_tizen>
242         NPUMovidius = 0x3001,
243         /// <summary>
244         /// Google Coral Edge TPU (USB).
245         /// </summary>
246         /// <since_tizen> 8 </since_tizen>
247         NPUEdgeTPU = 0x3002,
248         /// <summary>
249         /// VeriSilicon's Vivante.
250         /// </summary>
251         /// <since_tizen> 8 </since_tizen>
252         NPUVivante = 0x3003,
253         /// <summary>
254         /// Samsung S.LSI NPU
255         /// </summary>
256         /// <since_tizen> 9 </since_tizen>
257         NPUSLSI = 0x3004,
258         /// <summary>
259         /// Any SR (Samsung Research) made NPU.
260         /// </summary>
261         /// <since_tizen> 8 </since_tizen>
262         NPUSR = 0x13000,
263     }
264
265     internal static class Tensor
266     {
267         /// <summary>
268         /// The maximum rank that NNStreamer supports with Tizen APIs.
269         /// </summary>
270         internal const int RankLimit = 4;
271
272         /// <summary>
273         /// The maximum number of other/tensor instances that other/tensors may have.
274         /// </summary>
275         internal const int SizeLimit = 16;
276
277         /// <summary>
278         /// Unknown Type of Tensor information. It is internally used for error check.
279         /// </summary>
280         internal const int UnknownType = 10;
281
282         /// <summary>
283         /// Invalid count of TensorsData. It is internally used for error check.
284         /// </summary>
285         internal const int InvalidCount = -1;
286     }
287
288     internal static class NNStreamer
289     {
290         internal const string TAG = "ML.Inference";
291
292         internal const string FeatureKey = "http://tizen.org/feature/machine_learning.inference";
293
294         private static int _alreadyChecked = -1;    /* -1: not yet, 0: Not Support, 1: Support */
295
296         internal static void CheckException(NNStreamerError error, string msg)
297         {
298             if (error != NNStreamerError.None)
299             {
300                 Log.Error(NNStreamer.TAG, msg + ": " + error.ToString());
301                 throw NNStreamerExceptionFactory.CreateException(error, msg);
302             }
303         }
304
305         internal static void CheckNNStreamerSupport()
306         {
307             if (_alreadyChecked == 1)
308                 return;
309
310             string msg = "Machine Learning Inference Feature is not supported.";
311             if (_alreadyChecked == 0)
312             {
313                 Log.Error(NNStreamer.TAG, msg);
314                 throw NNStreamerExceptionFactory.CreateException(NNStreamerError.NotSupported, msg);
315             }
316
317             /* Feature Key check */
318             bool isSupported = false;
319             bool error = Information.TryGetValue<bool>(FeatureKey, out isSupported);
320             if (!error || !isSupported)
321             {
322                 _alreadyChecked = 0;
323
324                 Log.Error(NNStreamer.TAG, msg);
325                 throw NNStreamerExceptionFactory.CreateException(NNStreamerError.NotSupported, msg);
326             }
327
328             /* Check required so files */
329             try
330             {
331                 Interop.Util.CheckNNFWAvailability(NNFWType.TensorflowLite, HWType.CPU, out isSupported);
332             }
333             catch (DllNotFoundException)
334             {
335                 _alreadyChecked = 0;
336                 Log.Error(NNStreamer.TAG, msg);
337                 throw NNStreamerExceptionFactory.CreateException(NNStreamerError.NotSupported, msg);
338             }
339
340             _alreadyChecked = 1;
341         }
342     }
343
344     internal class NNStreamerExceptionFactory
345     {
346         internal static Exception CreateException(NNStreamerError err, string msg)
347         {
348             Exception exp;
349
350             switch (err)
351             {
352                 case NNStreamerError.InvalidParameter:
353                     exp = new ArgumentException(msg);
354                     break;
355
356                 case NNStreamerError.NotSupported:
357                     exp = new NotSupportedException(msg);
358                     break;
359
360                 case NNStreamerError.PermissionDenied:
361                     exp = new UnauthorizedAccessException(msg);
362                     break;
363
364                 case NNStreamerError.StreamsPipe:
365                 case NNStreamerError.TryAgain:
366                     exp = new InvalidOperationException(msg);
367                     break;
368
369                 case NNStreamerError.TimedOut:
370                     exp = new TimeoutException(msg);
371                     break;
372
373                 case NNStreamerError.QuotaExceeded:
374                     exp = new IndexOutOfRangeException(msg);
375                     break;
376
377                 default:
378                     exp = new InvalidOperationException(msg);
379                     break;
380             }
381             return exp;
382         }
383     }
384 }