2 * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved.
4 * Licensed under the Apache License, Version 2.0 (the License);
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an AS IS BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
19 namespace Tizen.MachineLearning.Inference
22 /// The SingleShot class loads a Machine Learning model and make inferences from input data.
24 /// <since_tizen> 6 </since_tizen>
25 public class SingleShot : IDisposable
27 private IntPtr _handle = IntPtr.Zero;
28 private bool _dynamicMode = false;
29 private bool _disposed = false;
31 private TensorsInfo _inInfo = null;
32 private TensorsInfo _outInfo = null;
35 /// Loads the neural network model and configures runtime environment
37 /// <param name="modelAbsPath">Absolute path to the neural network model file.</param>
38 /// <param name="inTensorsInfo">Input TensorsInfo object</param>
39 /// <param name="outTensorsInfo">Output TensorsInfo object for inference result</param>
40 /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
41 /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
42 /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
43 /// <since_tizen> 6 </since_tizen>
44 public SingleShot(string modelAbsPath, TensorsInfo inTensorsInfo, TensorsInfo outTensorsInfo)
46 NNStreamer.CheckNNStreamerSupport();
48 if (inTensorsInfo == null || outTensorsInfo == null)
49 throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "TensorsInfo is null");
51 CreateSingleShot(modelAbsPath, inTensorsInfo, outTensorsInfo, NNFWType.Any, HWType.Any, false);
55 /// Loads the neural network model and configures runtime environment with Neural Network Framework and HW information
57 /// <param name="modelAbsPath">Absolute path to the neural network model file.</param>
58 /// <param name="inTensorsInfo">Input TensorsInfo object</param>
59 /// <param name="outTensorsInfo">Output TensorsInfo object for inference result</param>
60 /// <param name="fwType">Types of Neural Network Framework</param>
61 /// <param name="hwType">Types of hardware resources to be used for NNFWs</param>
62 /// <param name="isDynamicMode">Support Dynamic Mode</param>
63 /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
64 /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
65 /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
66 /// <since_tizen> 8 </since_tizen>
67 public SingleShot(string modelAbsPath,
68 TensorsInfo inTensorsInfo, TensorsInfo outTensorsInfo, NNFWType fwType, HWType hwType, bool isDynamicMode)
70 NNStreamer.CheckNNStreamerSupport();
72 if (inTensorsInfo == null || outTensorsInfo == null)
73 throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "TensorsInfo is null");
75 CreateSingleShot(modelAbsPath, inTensorsInfo, outTensorsInfo, fwType, hwType, isDynamicMode);
79 /// Loads the neural network model and configures runtime environment without TensorsInfo
81 /// <param name="modelAbsPath">Absolute path to the neural network model file.</param>
82 /// <param name="fwType">Types of Neural Network Framework (Default:NNFWType.Any)</param>
83 /// <param name="hwType">Types of hardware resources to be used for NNFWs (Default: HWType.Any)</param>
84 /// <param name="isDynamicMode">Support Dynamic Mode (Default: false)</param>
85 /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
86 /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
87 /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
88 /// <since_tizen> 8 </since_tizen>
89 public SingleShot(string modelAbsPath, NNFWType fwType = NNFWType.Any, HWType hwType = HWType.Any, bool isDynamicMode = false)
91 NNStreamer.CheckNNStreamerSupport();
93 CreateSingleShot(modelAbsPath, null, null, fwType, hwType, isDynamicMode);
97 /// The information (tensor dimension, type, name and so on) of required input data for the given model.
99 /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
100 /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
101 /// <since_tizen> 8 </since_tizen>
102 public TensorsInfo Input
106 NNStreamer.CheckNNStreamerSupport();
109 NNStreamerError ret = NNStreamerError.None;
114 ret = Interop.SingleShot.GetInputTensorsInfo(_handle, out inHandle);
115 NNStreamer.CheckException(ret, "fail to get Input TensorsInfo handle");
117 TensorsInfo retInfo = TensorsInfo.ConvertTensorsInfoFromHandle(inHandle);
124 NNStreamer.CheckNNStreamerSupport();
125 NNStreamerError ret = NNStreamerError.None;
128 throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "TensorsInfo is null");
130 ret = Interop.SingleShot.SetInputInfo(_handle, value.GetTensorsInfoHandle());
131 NNStreamer.CheckException(ret, "fail to set Input TensorsInfo");
138 /// The information (tensor dimension, type, name and so on) of output data for the given model.
140 /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
141 /// <since_tizen> 8 </since_tizen>
142 public TensorsInfo Output
146 NNStreamer.CheckNNStreamerSupport();
149 NNStreamerError ret = NNStreamerError.None;
151 if (_outInfo != null)
154 ret = Interop.SingleShot.GetOutputTensorsInfo(_handle, out outHandle);
155 NNStreamer.CheckException(ret, "fail to get Output TensorsInfo handle");
157 TensorsInfo retInfo = TensorsInfo.ConvertTensorsInfoFromHandle(outHandle);
165 /// Sets the maximum amount of time to wait for an output, in milliseconds.
167 /// <param name="ms">The time to wait for an output (milliseconds)</param>
168 /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
169 /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
170 /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
171 /// <since_tizen> 8 </since_tizen>
172 public void SetTimeout(int ms)
174 NNStreamer.CheckNNStreamerSupport();
175 NNStreamerError ret = NNStreamerError.None;
178 throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "Invalid timeout: " + ms.ToString());
180 ret = Interop.SingleShot.SetTimeout(_handle, ms);
181 NNStreamer.CheckException(ret, "fail to set the timeout!");
184 /// <summary> Sets the property value for the given model.
185 /// <para>A model/framework may support changing the model information, such as tensor dimension and data layout, after opening the model.</para>
186 /// <para>If tries to change unavailable property or the model does not allow changing the information, this will raise an exception.</para>
187 /// <para>For the details about the properties, see 'tensor_filter' plugin definition in <a href="https://github.com/nnstreamer/nnstreamer">NNStreamer</a>.</para>
189 /// <param name="name">The property name</param>
190 /// <param name="value">The property value</param>
191 /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
192 /// <exception cref="NotSupportedException">Thrown when the feature is not supported, or given property is not available.</exception>
193 /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
194 /// <since_tizen> 8 </since_tizen>
195 public void SetValue(string name, string value)
197 NNStreamerError ret = NNStreamerError.None;
199 NNStreamer.CheckNNStreamerSupport();
201 /* Check the argument */
202 if (string.IsNullOrEmpty(name))
203 throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "The property name is invalid");
205 if (string.IsNullOrEmpty(value))
206 throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "The property value is invalid");
208 ret = Interop.SingleShot.SetValue(_handle, name, value);
209 if (ret != NNStreamerError.None)
211 if (ret == NNStreamerError.NotSupported)
212 NNStreamer.CheckException(ret, "Failed to to set the property, the property name is not available.");
214 NNStreamer.CheckException(ret, "Failed to to set the property, the property value is invalid.");
219 /// Gets the property value for the given model.
221 /// <param name="name">The property name</param>
222 /// <returns>The property value</returns>
223 /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
224 /// <exception cref="NotSupportedException">Thrown when the feature is not supported, or given property is not available.</exception>
225 /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
226 /// <since_tizen> 8 </since_tizen>
227 public string GetValue(string name)
229 NNStreamerError ret = NNStreamerError.None;
230 IntPtr val = IntPtr.Zero;
232 NNStreamer.CheckNNStreamerSupport();
234 /* Check the argument */
235 if (string.IsNullOrEmpty(name))
236 throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "The property name is invalid");
238 ret = Interop.SingleShot.GetValue(_handle, name, out val);
239 if (ret != NNStreamerError.None)
241 if (ret == NNStreamerError.NotSupported)
242 NNStreamer.CheckException(ret, "Failed to to get the property, the property name is not available.");
244 NNStreamer.CheckException(ret, "Failed to to get the property, the property value is invalid.");
247 return Interop.Util.IntPtrToString(val);
251 /// Destructor of the Single instance.
253 /// <since_tizen> 6 </since_tizen>
260 /// Releases any unmanaged resources used by this object.
262 /// <since_tizen> 6 </since_tizen>
263 public void Dispose()
266 GC.SuppressFinalize(this);
270 /// Invokes the model with the given input data.
272 /// <param name="inTensorsData">The input data to be inferred.</param>
273 /// <returns>TensorsData instance which contains the inferred result.</returns>
274 /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
275 /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
276 /// <exception cref="TimeoutException">Thrown when failed to get the result from sink element.</exception>
277 /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
278 /// <since_tizen> 6 </since_tizen>
279 public TensorsData Invoke(TensorsData inTensorsData)
281 TensorsData out_data = null;
282 IntPtr outDataPtr = IntPtr.Zero;
283 NNStreamerError ret = NNStreamerError.None;
285 NNStreamer.CheckNNStreamerSupport();
287 if (inTensorsData == null)
289 string msg = "TensorsData is null";
290 throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
295 TensorsInfo inInfo = inTensorsData.TensorsInfo;
297 throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "TensorsInfo is null");
300 inTensorsData.PrepareInvoke();
302 IntPtr outInfoPtr = IntPtr.Zero;
303 ret = Interop.SingleShot.InvokeSingleDynamic(_handle, inTensorsData.GetHandle(), inInfo.GetTensorsInfoHandle(), out outDataPtr, out outInfoPtr);
304 NNStreamer.CheckException(ret, "fail to invoke the single dynamic inference");
306 out_data = TensorsData.CreateFromNativeHandle(outDataPtr, outInfoPtr, true);
310 TensorsInfo data_inInfo = inTensorsData.TensorsInfo;
312 if (!data_inInfo.Equals(_inInfo))
314 string msg = "The TensorsInfo of Input TensorsData is different from that of SingleShot object";
315 throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
319 inTensorsData.PrepareInvoke();
321 ret = Interop.SingleShot.InvokeSingle(_handle, inTensorsData.GetHandle(), out outDataPtr);
322 NNStreamer.CheckException(ret, "fail to invoke the single inference");
324 out_data = TensorsData.CreateFromNativeHandle(outDataPtr, data_inInfo.GetTensorsInfoHandle(), true);
329 private void CreateSingleShot(string modelAbsPath,
330 TensorsInfo inTensorInfo, TensorsInfo outTensorInfo,
331 NNFWType FWType, HWType HWType, bool IsDynamicMode)
333 NNStreamerError ret = NNStreamerError.None;
334 IntPtr input_info = IntPtr.Zero;
335 IntPtr output_info = IntPtr.Zero;
337 /* Check model path */
338 if (string.IsNullOrEmpty(modelAbsPath))
339 ret = NNStreamerError.InvalidParameter;
340 NNStreamer.CheckException(ret, "model path is invalid: " + modelAbsPath);
342 /* Set Dynamic Mode */
343 _dynamicMode = IsDynamicMode;
345 if (inTensorInfo != null)
347 input_info = inTensorInfo.GetTensorsInfoHandle();
348 _inInfo = inTensorInfo;
351 if (outTensorInfo != null)
353 output_info = outTensorInfo.GetTensorsInfoHandle();
354 _outInfo = outTensorInfo;
357 ret = Interop.SingleShot.OpenSingle(out _handle, modelAbsPath, input_info, output_info, FWType, HWType);
358 NNStreamer.CheckException(ret, "fail to open the single inference engine");
362 /// Releases any unmanaged resources used by this object. Can also dispose any other disposable objects.
364 /// <param name="disposing">If true, disposes any disposable objects. If false, does not dispose disposable objects.</param>
365 protected virtual void Dispose(bool disposing)
372 // release managed object
375 // release unmanaged objects
376 if (_handle != IntPtr.Zero)
378 NNStreamerError ret = NNStreamerError.None;
379 ret = Interop.SingleShot.CloseSingle(_handle);
380 if (ret != NNStreamerError.None)
382 Log.Error(NNStreamer.TAG, "failed to close inference engine");
384 _handle = IntPtr.Zero;