2 * Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved
4 * Licensed under the Apache License, Version 2.0 (the License);
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an AS IS BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
18 using System.Collections.Generic;
22 namespace Tizen.MachineLearning.Inference
25 /// The SingleShot class loads a Machine Learning model and make inferences from input data.
27 /// <since_tizen> 6 </since_tizen>
28 public class SingleShot : IDisposable
30 private IntPtr _handle = IntPtr.Zero;
31 private bool _disposed = false;
34 /// Loads the neural network model and configures runtime environment
36 /// <param name="modelAbsPath">Absolute path to the neural network model file.</param>
37 /// <param name="inTensorsInfo">Input TensorsInfo object</param>
38 /// <param name="outTensorsInfo">Output TensorsInfo object for inference result</param>
39 /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
40 /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
41 /// <exception cref="IOException">Thrown when constructing the pipeline is failed.</exception>
42 /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
43 /// <since_tizen> 6 </since_tizen>
44 public SingleShot(string modelAbsPath, TensorsInfo inTensorsInfo, TensorsInfo outTensorsInfo)
46 NNStreamer.CheckNNStreamerSupport();
48 if (inTensorsInfo == null || outTensorsInfo == null)
50 string msg = "TensorsInfo is null";
51 throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
54 CreateSingleShot(modelAbsPath, inTensorsInfo, outTensorsInfo);
58 /// Destructor of the Single instance.
60 /// <since_tizen> 6 </since_tizen>
67 /// Releases any unmanaged resources used by this object.
69 /// <since_tizen> 6 </since_tizen>
73 GC.SuppressFinalize(this);
77 /// Invokes the model with the given input data.
79 /// <param name="inTensorsData">The input data to be inferred.</param>
80 /// <returns>TensorsData instance which contains the inferred result.</returns>
81 /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
82 /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
83 /// <exception cref="IOException">Thrown when failed to push an input data into source element.</exception>
84 /// <exception cref="TimeoutException">Thrown when failed to get the result from sink element.</exception>
85 /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
86 /// <since_tizen> 6 </since_tizen>
87 public TensorsData Invoke(TensorsData inTensorsData)
90 IntPtr out_ptr = IntPtr.Zero;
91 NNStreamerError ret = NNStreamerError.None;
93 if (inTensorsData == null)
95 string msg = "TensorsData is null";
96 throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
99 ret = Interop.SingleShot.InvokeSingle(_handle, inTensorsData.Handle, out out_ptr);
100 NNStreamer.CheckException(ret, "fail to invoke the single inference engine");
102 out_data = TensorsData.CreateFromNativeHandle(out_ptr);
106 private void CreateSingleShot(string modelAbsPath, TensorsInfo inTensorInfo, TensorsInfo outTensorInfo)
108 NNStreamerError ret = NNStreamerError.None;
112 /* Check model path */
113 if (string.IsNullOrEmpty(modelAbsPath))
114 ret = NNStreamerError.InvalidParameter;
115 NNStreamer.CheckException(ret, "model path is invalid: " + modelAbsPath);
117 input_info = inTensorInfo.GetTensorsInfoHandle();
118 output_info = outTensorInfo.GetTensorsInfoHandle();
120 ret = Interop.SingleShot.OpenSingle(out _handle, modelAbsPath, input_info, output_info, NNFWType.Any, HWType.Any);
121 NNStreamer.CheckException(ret, "fail to open the single inference engine");
125 /// Releases any unmanaged resources used by this object. Can also dispose any other disposable objects.
127 /// <param name="disposing">If true, disposes any disposable objects. If false, does not dispose disposable objects.</param>
128 protected virtual void Dispose(bool disposing)
135 // release managed object
138 // release unmanaged objects
139 if (_handle != IntPtr.Zero)
141 NNStreamerError ret = NNStreamerError.None;
142 ret = Interop.SingleShot.CloseSingle(_handle);
143 if (ret != NNStreamerError.None)
145 Log.Error(NNStreamer.TAG, "failed to close inference engine");
147 _handle = IntPtr.Zero;