[MachineLearning.Inference] Pipeline class to execute neural network stream (#1404)
authorjaeyun-jung <39614140+jaeyun-jung@users.noreply.github.com>
Mon, 30 Mar 2020 08:35:01 +0000 (17:35 +0900)
committerGitHub <noreply@github.com>
Mon, 30 Mar 2020 08:35:01 +0000 (17:35 +0900)
* [MachineLearning.Inference] sync to C-API change

1. Add enum for NNFWType and HWType with C-API update.
2. Add enum for newly added in C-API (out-of-mem and permission-denied)
3. Code clean, remove duplicated code and typo correction.

Signed-off-by: Jaeyun <jy1210.jung@samsung.com>
* [MachineLearning.Inference] Pipeline class to execute neural network stream

This patch newly provides interfaces to create and execute stream pipeline with neural network.
Pipeline and its related classes support the following functionalities:
- Create a stream pipeline with NNStreamer plugins and GStreamer plugins.
- Interfaces to start/stop/destroy the pipeline.
- Interfaces to get the state of the pipeline and register callback method.
- Interfaces to push data to the pipeline from the application.
- Interfaces to pull data from the pipeline to the application.
- Interfaces to control the stream of the pipeline using switches and valves.

Signed-off-by: Jaeyun <jy1210.jung@samsung.com>
src/Tizen.MachineLearning.Inference/Interop/Interop.Nnstreamer.cs
src/Tizen.MachineLearning.Inference/Tizen.MachineLearning.Inference/Commons.cs
src/Tizen.MachineLearning.Inference/Tizen.MachineLearning.Inference/DataReceivedEventArgs.cs [new file with mode: 0644]
src/Tizen.MachineLearning.Inference/Tizen.MachineLearning.Inference/Pipeline.cs [new file with mode: 0644]
src/Tizen.MachineLearning.Inference/Tizen.MachineLearning.Inference/SingleShot.cs
src/Tizen.MachineLearning.Inference/Tizen.MachineLearning.Inference/StateChangedEventArgs.cs [new file with mode: 0644]
src/Tizen.MachineLearning.Inference/Tizen.MachineLearning.Inference/TensorsData.cs
src/Tizen.MachineLearning.Inference/Tizen.MachineLearning.Inference/TensorsInfo.cs

index d82ab6c..c92f8ce 100755 (executable)
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved
+* Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved.
 *
 * Licensed under the Apache License, Version 2.0 (the License);
 * you may not use this file except in compliance with the License.
@@ -25,6 +25,81 @@ internal static partial class Interop
         public const string Nnstreamer = "libcapi-nnstreamer.so.0";
     }
 
+    internal static partial class Pipeline
+    {
+        /* typedef void (*ml_pipeline_state_cb) (ml_pipeline_state_e state, void *user_data); */
+        [UnmanagedFunctionPointer(CallingConvention.Cdecl)]
+        internal delegate void StateChangedCallback(PipelineState state, IntPtr user_data);
+
+        /* typedef void (*ml_pipeline_sink_cb) (const ml_tensors_data_h data, const ml_tensors_info_h info, void *user_data); */
+        [UnmanagedFunctionPointer(CallingConvention.Cdecl)]
+        internal delegate void NewDataCallback(IntPtr data, IntPtr info, IntPtr user_data);
+
+        /* int ml_pipeline_construct (const char *pipeline_description, ml_pipeline_state_cb cb, void *user_data, ml_pipeline_h *pipe); */
+        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_pipeline_construct", CallingConvention = CallingConvention.Cdecl)]
+        internal static extern NNStreamerError Construct(string pipeline_description, StateChangedCallback callback, IntPtr user_data, out IntPtr pipeline_handle);
+
+        /* int ml_pipeline_destroy (ml_pipeline_h pipe); */
+        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_pipeline_destroy", CallingConvention = CallingConvention.Cdecl)]
+        internal static extern NNStreamerError Destroy(IntPtr pipeline_handle);
+
+        /* int ml_pipeline_get_state (ml_pipeline_h pipe, ml_pipeline_state_e *state) */
+        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_pipeline_get_state", CallingConvention = CallingConvention.Cdecl)]
+        internal static extern NNStreamerError GetState(IntPtr pipeline_handle, out int state);
+
+        /* int ml_pipeline_start (ml_pipeline_h pipe); */
+        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_pipeline_start", CallingConvention = CallingConvention.Cdecl)]
+        internal static extern NNStreamerError Start(IntPtr pipeline_handle);
+
+        /* int ml_pipeline_stop (ml_pipeline_h pipe); */
+        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_pipeline_stop", CallingConvention = CallingConvention.Cdecl)]
+        internal static extern NNStreamerError Stop(IntPtr pipeline_handle);
+
+        /* int ml_pipeline_sink_register (ml_pipeline_h pipe, const char *sink_name, ml_pipeline_sink_cb cb, void *user_data, ml_pipeline_sink_h *sink_handle); */
+        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_pipeline_sink_register", CallingConvention = CallingConvention.Cdecl)]
+        internal static extern NNStreamerError RegisterSinkCallback(IntPtr pipeline_handle, string sink_name, NewDataCallback callback, IntPtr user_data, out IntPtr sink_handle);
+
+        /* int ml_pipeline_sink_unregister (ml_pipeline_sink_h sink_handle); */
+        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_pipeline_sink_unregister", CallingConvention = CallingConvention.Cdecl)]
+        internal static extern NNStreamerError UnregisterSinkCallback(IntPtr sink_handle);
+
+        /* int ml_pipeline_src_get_handle (ml_pipeline_h pipe, const char *src_name, ml_pipeline_src_h *src_handle); */
+        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_pipeline_src_get_handle", CallingConvention = CallingConvention.Cdecl)]
+        internal static extern NNStreamerError GetSrcHandle(IntPtr pipeline_handle, string src_name, out IntPtr src_handle);
+
+        /* int ml_pipeline_src_release_handle (ml_pipeline_src_h src_handle); */
+        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_pipeline_src_release_handle", CallingConvention = CallingConvention.Cdecl)]
+        internal static extern NNStreamerError ReleaseSrcHandle(IntPtr src_handle);
+
+        /* int ml_pipeline_src_input_data (ml_pipeline_src_h src_handle, ml_tensors_data_h data, ml_pipeline_buf_policy_e policy); */
+        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_pipeline_src_input_data", CallingConvention = CallingConvention.Cdecl)]
+        internal static extern NNStreamerError InputSrcData(IntPtr src_handle, IntPtr data_handle, PipelineBufferPolicy policy);
+
+        /* int ml_pipeline_valve_get_handle (ml_pipeline_h pipe, const char *valve_name, ml_pipeline_valve_h *valve_handle); */
+        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_pipeline_valve_get_handle", CallingConvention = CallingConvention.Cdecl)]
+        internal static extern NNStreamerError GetValveHandle(IntPtr pipeline_handle, string valve_name, out IntPtr valve_handle);
+
+        /* int ml_pipeline_valve_release_handle (ml_pipeline_valve_h valve_handle); */
+        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_pipeline_valve_release_handle", CallingConvention = CallingConvention.Cdecl)]
+        internal static extern NNStreamerError ReleaseValveHandle(IntPtr valve_handle);
+
+        /* int ml_pipeline_valve_set_open (ml_pipeline_valve_h valve_handle, bool open); */
+        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_pipeline_valve_set_open", CallingConvention = CallingConvention.Cdecl)]
+        internal static extern NNStreamerError OpenValve(IntPtr valve_handle, bool open);
+
+        /* int ml_pipeline_switch_get_handle (ml_pipeline_h pipe, const char *switch_name, ml_pipeline_switch_e *switch_type, ml_pipeline_switch_h *switch_handle); */
+        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_pipeline_switch_get_handle", CallingConvention = CallingConvention.Cdecl)]
+        internal static extern NNStreamerError GetSwitchHandle(IntPtr pipeline_handle, string switch_name, out SwitchType switch_type, out IntPtr switch_handle);
+
+        /* int ml_pipeline_switch_release_handle (ml_pipeline_switch_h switch_handle); */
+        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_pipeline_switch_release_handle", CallingConvention = CallingConvention.Cdecl)]
+        internal static extern NNStreamerError ReleaseSwitchHandle(IntPtr switch_handle);
+
+        /* int ml_pipeline_switch_select (ml_pipeline_switch_h switch_handle, const char *pad_name); */
+        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_pipeline_switch_select", CallingConvention = CallingConvention.Cdecl)]
+        internal static extern NNStreamerError SelectSwitchPad(IntPtr switch_handle, string pad_name);
+    }
+
     internal static partial class SingleShot
     {
         /* int ml_single_open (ml_single_h *single, const char *model, const ml_tensors_info_h input_info, const ml_tensors_info_h output_info, ml_nnfw_type_e nnfw, ml_nnfw_hw_e hw) */
@@ -73,7 +148,7 @@ internal static partial class Interop
         /* int ml_tensors_info_create (ml_tensors_info_h *info) */
         [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_tensors_info_create", CallingConvention = CallingConvention.Cdecl)]
         internal static extern NNStreamerError CreateTensorsInfo(out IntPtr info);
-            
+
         /* int ml_tensors_info_destroy (ml_tensors_info_h info) */
         [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_tensors_info_destroy", CallingConvention = CallingConvention.Cdecl)]
         internal static extern NNStreamerError DestroyTensorsInfo(IntPtr info);
index 3977b75..bef8110 100755 (executable)
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved
+* Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved.
 *
 * Licensed under the Apache License, Version 2.0 (the License);
 * you may not use this file except in compliance with the License.
@@ -79,10 +79,48 @@ namespace Tizen.MachineLearning.Inference
         Unknown = Tizen.Internals.Errors.ErrorCode.Unknown,
         TimedOut = Tizen.Internals.Errors.ErrorCode.TimedOut,
         NotSupported = Tizen.Internals.Errors.ErrorCode.NotSupported,
+        PermissionDenied = Tizen.Internals.Errors.ErrorCode.PermissionDenied,
+        OutOfMemory = Tizen.Internals.Errors.ErrorCode.OutOfMemory,
         QuotaExceeded = Tizen.Internals.Errors.ErrorCode.QuotaExceeded,
         InvalidOperation = Tizen.Internals.Errors.ErrorCode.InvalidOperation,
     }
 
+    internal enum SwitchType
+    {
+        OutputSelector = 0,
+        InputSelector = 1,
+    }
+
+    internal enum PipelineBufferPolicy
+    {
+        AutoFree = 0,
+        NotFreed = 1,
+    }
+
+    /// <summary>
+    /// States of NNStreamer pipeline.
+    /// </summary>
+    /// <since_tizen> 8 </since_tizen>
+    public enum PipelineState
+    {
+        /// <summary>
+        /// Initial state of the pipeline.
+        /// </summary>
+        Null = 1,
+        /// <summary>
+        /// The pipeline is ready to go to PAUSED.
+        /// </summary>
+        Ready = 2,
+        /// <summary>
+        /// The pipeline is stopped, ready to accept and process data.
+        /// </summary>
+        Paused = 3,
+        /// <summary>
+        /// The pipeline is started and the data is flowing.
+        /// </summary>
+        Playing = 4,
+    }
+
     /// <summary>
     /// Types of Neural Network Framework.
     /// </summary>
@@ -106,9 +144,34 @@ namespace Tizen.MachineLearning.Inference
         /// </summary>
         Tensorflow,
         /// <summary>
-        /// Neural Network Inference framework, which is developed by SR
+        /// Neural Network Inference framework, which is developed by SR (Samsung Research).
         /// </summary>
         NNFW,
+        /// <summary>
+        /// Intel Movidius Neural Compute SDK (libmvnc).
+        /// </summary>
+        /// <since_tizen> 8 </since_tizen>
+        MVNC,
+        /// <summary>
+        /// Intel OpenVINO.
+        /// </summary>
+        /// <since_tizen> 8 </since_tizen>
+        OpenVINO,
+        /// <summary>
+        /// VeriSilicon's Vivante.
+        /// </summary>
+        /// <since_tizen> 8 </since_tizen>
+        Vivante,
+        /// <summary>
+        /// Google Coral Edge TPU (USB).
+        /// </summary>
+        /// <since_tizen> 8 </since_tizen>
+        EdgeTPU,
+        /// <summary>
+        /// Arm Neural Network framework (support for caffe and tensorflow-lite).
+        /// </summary>
+        /// <since_tizen> 8 </since_tizen>
+        ArmNN,
     }
 
     /// <summary>
@@ -126,17 +189,42 @@ namespace Tizen.MachineLearning.Inference
         /// </summary>
         Auto = 1,
         /// <summary>
-        /// Any CPU  if possible.
+        /// Any CPU if possible.
         /// </summary>
         CPU = 0x1000,
         /// <summary>
-        /// Any GPU  if possible.
+        /// NEON in CPU.
+        /// </summary>
+        /// <since_tizen> 8 </since_tizen>
+        CPUNeon = 0x1100,
+        /// <summary>
+        /// Any GPU if possible.
         /// </summary>
         GPU = 0x2000,
         /// <summary>
         /// Any NPU if possible.
         /// </summary>
         NPU = 0x3000,
+        /// <summary>
+        /// Intel Movidius Stick.
+        /// </summary>
+        /// <since_tizen> 8 </since_tizen>
+        NPUMovidius = 0x3001,
+        /// <summary>
+        /// Google Coral Edge TPU (USB).
+        /// </summary>
+        /// <since_tizen> 8 </since_tizen>
+        NPUEdgeTPU = 0x3002,
+        /// <summary>
+        /// VeriSilicon's Vivante.
+        /// </summary>
+        /// <since_tizen> 8 </since_tizen>
+        NPUVivante = 0x3003,
+        /// <summary>
+        /// Any SR (Samsung Research) made NPU.
+        /// </summary>
+        /// <since_tizen> 8 </since_tizen>
+        NPUSR = 0x13000,
     }
 
     internal static class Tensor
@@ -223,7 +311,7 @@ namespace Tizen.MachineLearning.Inference
         internal static Exception CreateException(NNStreamerError err, string msg)
         {
             Exception exp;
-            
+
             switch (err)
             {
                 case NNStreamerError.InvalidParameter:
@@ -234,9 +322,13 @@ namespace Tizen.MachineLearning.Inference
                     exp = new NotSupportedException(msg);
                     break;
 
+                case NNStreamerError.PermissionDenied:
+                    exp = new UnauthorizedAccessException(msg);
+                    break;
+
                 case NNStreamerError.StreamsPipe:
                 case NNStreamerError.TryAgain:
-                    exp = new IOException(msg);
+                    exp = new InvalidOperationException(msg);
                     break;
 
                 case NNStreamerError.TimedOut:
@@ -248,7 +340,7 @@ namespace Tizen.MachineLearning.Inference
                     break;
 
                 default:
-                    exp = new NotSupportedException(msg);
+                    exp = new InvalidOperationException(msg);
                     break;
             }
             return exp;
diff --git a/src/Tizen.MachineLearning.Inference/Tizen.MachineLearning.Inference/DataReceivedEventArgs.cs b/src/Tizen.MachineLearning.Inference/Tizen.MachineLearning.Inference/DataReceivedEventArgs.cs
new file mode 100644 (file)
index 0000000..0005bcf
--- /dev/null
@@ -0,0 +1,38 @@
+/*
+ * Copyright (c) 2020 Samsung Electronics Co., Ltd. All Rights Reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the License);
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+namespace Tizen.MachineLearning.Inference
+{
+    /// <summary>
+    /// An extended EventArgs class that contains newly received tensors data
+    /// </summary>
+    /// <since_tizen> 8 </since_tizen>
+    public class DataReceivedEventArgs : EventArgs
+    {
+        internal DataReceivedEventArgs(TensorsData data)
+        {
+            Data = data;
+        }
+
+        /// <summary>
+        /// Newly received data from sink node
+        /// </summary>
+        /// <since_tizen> 8 </since_tizen>
+        public TensorsData Data { get; }
+    }
+}
diff --git a/src/Tizen.MachineLearning.Inference/Tizen.MachineLearning.Inference/Pipeline.cs b/src/Tizen.MachineLearning.Inference/Tizen.MachineLearning.Inference/Pipeline.cs
new file mode 100644 (file)
index 0000000..12211d2
--- /dev/null
@@ -0,0 +1,607 @@
+/*
+ * Copyright (c) 2020 Samsung Electronics Co., Ltd. All Rights Reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the License);
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using System.Collections.Generic;
+
+namespace Tizen.MachineLearning.Inference
+{
+    /// <summary>
+    /// The Pipeline class provides interfaces to create and execute stream pipelines with neural networks.
+    /// </summary>
+    /// <since_tizen> 8 </since_tizen>
+    public class Pipeline : IDisposable
+    {
+        private IntPtr _handle = IntPtr.Zero;
+        private bool _disposed = false;
+        private IDictionary<string, NodeInfo> _nodeList;
+        private Interop.Pipeline.StateChangedCallback _stateChangedCallback;
+
+        /// <summary>
+        /// Creates a new Pipeline instance with the given pipeline description
+        /// </summary>
+        /// <remarks>http://tizen.org/privilege/mediastorage is needed if pipeline description is relevant to media storage.</remarks>
+        /// <remarks>http://tizen.org/privilege/externalstorage is needed if pipeline description is relevant to external storage.</remarks>
+        /// <remarks>http://tizen.org/privilege/camera is needed if pipeline description accesses the camera device.</remarks>
+        /// <remarks>http://tizen.org/privilege/recorder is needed if pipeline description accesses the recorder device.</remarks>
+        /// <param name="description">The pipeline description. Refer to GStreamer manual or NNStreamer documentation for examples and the grammar.</param>
+        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
+        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
+        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
+        /// <exception cref="UnauthorizedAccessException">Thrown when the application does not have the required privilege.</exception>
+        /// <exception cref="InvalidOperationException">Thrown when the method failed due to the wrong pipeline description or internal error.</exception>
+        /// <since_tizen> 8 </since_tizen>
+        public Pipeline(string description)
+        {
+            NNStreamer.CheckNNStreamerSupport();
+
+            if (string.IsNullOrEmpty(description))
+                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "The pipeline description is invalid");
+
+            _stateChangedCallback = (state, _) =>
+            {
+                StateChanged?.Invoke(this, new StateChangedEventArgs(state));
+            };
+
+            NNStreamerError ret = Interop.Pipeline.Construct(description, _stateChangedCallback, IntPtr.Zero, out _handle);
+            NNStreamer.CheckException(ret, "Failed to create Pipeline instance");
+
+            /* Init node list */
+            _nodeList = new Dictionary<string, NodeInfo>();
+        }
+
+        /// <summary>
+        /// Destructor of the Pipeline instance.
+        /// </summary>
+        /// <since_tizen> 8 </since_tizen>
+        ~Pipeline()
+        {
+            Dispose(false);
+        }
+
+        /// <summary>
+        /// Internal method to get the native handle of pipeline.
+        /// </summary>
+        /// <returns>The native handle</returns>
+        /// <since_tizen> 8 </since_tizen>
+        internal IntPtr GetHandle()
+        {
+            return _handle;
+        }
+
+        /// <summary>
+        /// The state of pipeline.
+        /// </summary>
+        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
+        /// <exception cref="InvalidOperationException">Thrown when failed to get the pipeline state.</exception>
+        /// <since_tizen> 8 </since_tizen>
+        public PipelineState State
+        {
+            get
+            {
+                NNStreamer.CheckNNStreamerSupport();
+
+                int state = 0;
+                NNStreamerError ret = NNStreamerError.None;
+
+                /* Check native handle is valid */
+                if (_handle != IntPtr.Zero)
+                {
+                    ret = Interop.Pipeline.GetState(_handle, out state);
+                    if (ret == NNStreamerError.None && state == 0)
+                        ret = NNStreamerError.InvalidOperation;
+                }
+                else
+                {
+                    ret = NNStreamerError.InvalidOperation;
+                }
+
+                NNStreamer.CheckException(ret, "Failed to get the pipeline state because of internal error");
+                return (PipelineState) state;
+            }
+        }
+
+        /// <summary>
+        /// Event to be invoked when the pipeline state is changed.
+        /// </summary>
+        /// <since_tizen> 8 </since_tizen>
+        public event EventHandler<StateChangedEventArgs> StateChanged;
+
+        /// <summary>
+        /// Starts the pipeline, asynchronously. (The state would be changed to PipelineState.Playing)
+        /// </summary>
+        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
+        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
+        /// <exception cref="InvalidOperationException">Thrown when failed to start the pipeline.</exception>
+        /// <since_tizen> 8 </since_tizen>
+        public void Start()
+        {
+            NNStreamer.CheckNNStreamerSupport();
+
+            NNStreamerError ret = Interop.Pipeline.Start(_handle);
+            NNStreamer.CheckException(ret, "Failed to start the pipeline because of internal error");
+        }
+
+        /// <summary>
+        /// Stops the pipeline, asynchronously. (The state would be changed to PipelineState.Paused)
+        /// </summary>
+        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
+        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
+        /// <exception cref="InvalidOperationException">Thrown when failed to stop the pipeline.</exception>
+        /// <since_tizen> 8 </since_tizen>
+        public void Stop()
+        {
+            NNStreamer.CheckNNStreamerSupport();
+
+            NNStreamerError ret = Interop.Pipeline.Stop(_handle);
+            NNStreamer.CheckException(ret, "Failed to stop the pipeline because of internal error");
+        }
+
+        /// <summary>
+        /// Releases any unmanaged resources used by this object.
+        /// </summary>
+        /// <since_tizen> 8 </since_tizen>
+        public void Dispose()
+        {
+            Dispose(true);
+            GC.SuppressFinalize(this);
+        }
+
+        /// <summary>
+        /// Releases any unmanaged resources used by this object including opened handle.
+        /// </summary>
+        /// <param name="disposing">If true, disposes any disposable objects. If false, does not dispose disposable objects.</param>
+        /// <since_tizen> 8 </since_tizen>
+        protected virtual void Dispose(bool disposing)
+        {
+            if (_disposed)
+                return;
+
+            if (disposing)
+            {
+                // release managed object
+            }
+
+            // release unmanaged objects
+            if (_handle != IntPtr.Zero)
+            {
+                /* Note that, when destroying the pipeline, all node handles are released internally. */
+                foreach (NodeInfo node in _nodeList.Values)
+                    node.Valid = false;
+
+                _nodeList.Clear();
+
+                NNStreamerError ret = Interop.Pipeline.Destroy(_handle);
+                if (ret != NNStreamerError.None)
+                    Log.Error(NNStreamer.TAG, "Failed to destroy the pipeline handle");
+
+                _handle = IntPtr.Zero;
+            }
+
+            _disposed = true;
+        }
+
+        /// <summary>
+        /// Gets the sink node instance with given node name.
+        /// </summary>
+        /// <param name="name">The name of sink node</param>
+        /// <returns>The sink node instance</returns>
+        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
+        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
+        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
+        /// <since_tizen> 8 </since_tizen>
+        public SinkNode GetSink(string name)
+        {
+            NNStreamer.CheckNNStreamerSupport();
+
+            /* Check the argument */
+            if (string.IsNullOrEmpty(name))
+                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "Node name is invalid");
+
+            SinkNode node;
+
+            if (_nodeList.ContainsKey(name))
+            {
+                if (_nodeList[name].Type != NodeType.Sink)
+                    throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, name + " is not a sink node");
+
+                node = (SinkNode) _nodeList[name];
+            }
+            else
+            {
+                node = new SinkNode(name, this);
+                _nodeList.Add(name, node);
+            }
+
+            return node;
+        }
+
+        /// <summary>
+        /// Gets the source node instance with given node name.
+        /// </summary>
+        /// <param name="name">The name of source node</param>
+        /// <returns>The source node instance</returns>
+        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
+        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
+        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
+        /// <since_tizen> 8 </since_tizen>
+        public SourceNode GetSource(string name)
+        {
+            NNStreamer.CheckNNStreamerSupport();
+
+            /* Check the parameter */
+            if (string.IsNullOrEmpty(name))
+                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "Node name is invalid");
+
+            SourceNode node;
+
+            if (_nodeList.ContainsKey(name))
+            {
+                if (_nodeList[name].Type != NodeType.Source)
+                    throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, name + " is not a source node");
+
+                node = (SourceNode) _nodeList[name];
+            }
+            else
+            {
+                node = new SourceNode(name, this);
+                _nodeList.Add(name, node);
+            }
+
+            return node;
+        }
+
+        /// <summary>
+        /// Gets the valve node instance with given node name.
+        /// </summary>
+        /// <param name="name">The name of valve node</param>
+        /// <returns>The valve node instance</returns>
+        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
+        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
+        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
+        /// <since_tizen> 8 </since_tizen>
+        public ValveNode GetValve(string name)
+        {
+            NNStreamer.CheckNNStreamerSupport();
+
+            /* Check the parameter */
+            if (string.IsNullOrEmpty(name))
+                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "Node name is invalid");
+
+            ValveNode node;
+
+            if (_nodeList.ContainsKey(name))
+            {
+                if (_nodeList[name].Type != NodeType.Valve)
+                    throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, name + " is not a valve node");
+
+                node = (ValveNode) _nodeList[name];
+            }
+            else
+            {
+                node = new ValveNode(name, this);
+                _nodeList.Add(name, node);
+            }
+
+            return node;
+        }
+
+        /// <summary>
+        /// Gets the switch node instance with given node name.
+        /// </summary>
+        /// <param name="name">The name of switch node.</param>
+        /// <returns>The switch node instance</returns>
+        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
+        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
+        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
+        /// <since_tizen> 8 </since_tizen>
+        public SwitchNode GetSwitch(string name)
+        {
+            NNStreamer.CheckNNStreamerSupport();
+
+            /* Check the parameter */
+            if (string.IsNullOrEmpty(name))
+                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "Node name is invalid");
+
+            SwitchNode node;
+
+            if (_nodeList.ContainsKey(name))
+            {
+                if (_nodeList[name].Type != NodeType.Switch)
+                    throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, name + " is not a switch node");
+
+                node = (SwitchNode) _nodeList[name];
+            }
+            else
+            {
+                node = new SwitchNode(name, this);
+                _nodeList.Add(name, node);
+            }
+
+            return node;
+        }
+
+        /// <summary>
+        /// SwitchNode class to handle the switch node.
+        /// </summary>
+        /// <since_tizen> 8 </since_tizen>
+        public class SwitchNode : NodeInfo
+        {
+            private SwitchType _type;
+
+            internal SwitchNode(string name, Pipeline pipe) : base(NodeType.Switch, name, pipe)
+            {
+                IntPtr handle = IntPtr.Zero;
+
+                NNStreamerError ret = Interop.Pipeline.GetSwitchHandle(pipe.GetHandle(), name, out _type, out handle);
+                NNStreamer.CheckException(ret, "Failed to get the switch node handle: " + name);
+
+                Handle = handle;
+            }
+
+            /// <summary>
+            /// Selects input/output pad.
+            /// </summary>
+            /// <param name="padName">The pad name to be activated.</param>
+            /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
+            /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
+            /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
+            /// <exception cref="InvalidOperationException">Thrown when the node is invalid.</exception>
+            /// <since_tizen> 8 </since_tizen>
+            public void Select(string padName)
+            {
+                if (string.IsNullOrEmpty(padName))
+                    throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "Pad name is invalid");
+
+                if (!Valid)
+                    NNStreamer.CheckException(NNStreamerError.InvalidOperation, "Current node is invalid: " + Name);
+
+                NNStreamerError ret = Interop.Pipeline.SelectSwitchPad(Handle, padName);
+                NNStreamer.CheckException(ret, "Failed to select pad: " + padName);
+            }
+        }
+
+        /// <summary>
+        /// ValveNode class to handle the valve node.
+        /// </summary>
+        /// <since_tizen> 8 </since_tizen>
+        public class ValveNode : NodeInfo
+        {
+            internal ValveNode(string name, Pipeline pipe) : base(NodeType.Valve, name, pipe)
+            {
+                IntPtr handle = IntPtr.Zero;
+
+                NNStreamerError ret = Interop.Pipeline.GetValveHandle(pipe.GetHandle(), name, out handle);
+                NNStreamer.CheckException(ret, "Failed to get the valve node handle: " + name);
+
+                Handle = handle;
+            }
+
+            /// <summary>
+            /// Controls the valve. Set the flag true to open (let the flow pass), false to close (stop the flow).
+            /// </summary>
+            /// <param name="open">The flag to control the flow</param>
+            /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
+            /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
+            /// <exception cref="InvalidOperationException">Thrown when the node is invalid.</exception>
+            /// <since_tizen> 8 </since_tizen>
+            public void Control(bool open)
+            {
+                if (!Valid)
+                    NNStreamer.CheckException(NNStreamerError.InvalidOperation, "Current node is invalid: " + Name);
+
+                NNStreamerError ret = Interop.Pipeline.OpenValve(Handle, open);
+                NNStreamer.CheckException(ret, "Failed to set valve status: " + Name);
+            }
+        }
+
+        /// <summary>
+        /// SourceNode class to handle the source node.
+        /// </summary>
+        /// <since_tizen> 8 </since_tizen>
+        public class SourceNode : NodeInfo
+        {
+            internal SourceNode(string name, Pipeline pipe) : base(NodeType.Source, name, pipe)
+            {
+                IntPtr handle = IntPtr.Zero;
+
+                NNStreamerError ret = Interop.Pipeline.GetSrcHandle(pipe.GetHandle(), name, out handle);
+                NNStreamer.CheckException(ret, "Failed to get the source node handle: " + name);
+
+                Handle = handle;
+            }
+
+            /// <summary>
+            /// Inputs tensor data to source node.
+            /// </summary>
+            /// <param name="data">The tensors data</param>
+            /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
+            /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
+            /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
+            /// <exception cref="InvalidOperationException">Thrown when the node is invalid, or failed to input tensors data.</exception>
+            /// <since_tizen> 8 </since_tizen>
+            public void Input(TensorsData data)
+            {
+                if (data == null)
+                    throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "Given data is invalid");
+
+                if (!Valid)
+                    NNStreamer.CheckException(NNStreamerError.InvalidOperation, "Current node is invalid: " + Name);
+
+                data.PrepareInvoke();
+
+                NNStreamerError ret = Interop.Pipeline.InputSrcData(Handle, data.GetHandle(), PipelineBufferPolicy.NotFreed);
+                NNStreamer.CheckException(ret, "Failed to input tensors data to source node: " + Name);
+            }
+        }
+
+        /// <summary>
+        /// SinkNode class to handle the sink node.
+        /// </summary>
+        /// <since_tizen> 8 </since_tizen>
+        public class SinkNode : NodeInfo
+        {
+            private event EventHandler<DataReceivedEventArgs> _dataReceived;
+            private Interop.Pipeline.NewDataCallback _dataCallback;
+
+            internal SinkNode(string name, Pipeline pipe) : base(NodeType.Sink, name, pipe)
+            {
+                _dataCallback = (data_handle, Info_handle, _) =>
+                {
+                    if (Valid)
+                    {
+                        TensorsData data = TensorsData.CreateFromNativeHandle(data_handle, Info_handle, true, false);
+                        _dataReceived?.Invoke(this, new DataReceivedEventArgs(data));
+                    }
+                };
+
+                Register();
+            }
+
+            /// <summary>
+            /// Event to be invoked when the sink node receives new data.
+            /// </summary>
+            /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
+            /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
+            /// <exception cref="InvalidOperationException">Thrown when the node is invalid.</exception>
+            /// <since_tizen> 8 </since_tizen>
+            public event EventHandler<DataReceivedEventArgs> DataReceived
+            {
+                add
+                {
+                    if (value == null)
+                        return;
+
+                    if (!Valid)
+                        NNStreamer.CheckException(NNStreamerError.InvalidOperation, "Current node is invalid: " + Name);
+
+                    Register();
+                    _dataReceived += value;
+                }
+
+                remove
+                {
+                    if (value == null)
+                        return;
+
+                    if (!Valid)
+                        NNStreamer.CheckException(NNStreamerError.InvalidOperation, "Current node is invalid: " + Name);
+
+                    _dataReceived -= value;
+
+                    if (_dataReceived == null)
+                        Unregister();
+                }
+            }
+
+            private void Register()
+            {
+                if (Handle == IntPtr.Zero)
+                {
+                    IntPtr handle = IntPtr.Zero;
+
+                    /* Register new data callback to sink node */
+                    NNStreamerError ret = Interop.Pipeline.RegisterSinkCallback(Pipe.GetHandle(), Name, _dataCallback, IntPtr.Zero, out handle);
+                    NNStreamer.CheckException(ret, "Failed to register sink node callback: " + Name);
+
+                    Handle = handle;
+                }
+            }
+
+            private void Unregister()
+            {
+                if (Handle != IntPtr.Zero)
+                {
+                    /* Unregister the data callback from sink node */
+                    NNStreamerError ret = Interop.Pipeline.UnregisterSinkCallback(Handle);
+                    NNStreamer.CheckException(ret, "Failed to unregister sink node callback: " + Name);
+
+                    Handle = IntPtr.Zero;
+                }
+            }
+        }
+
+        /// <summary>
+        /// Node type of NNStreamer pipeline.
+        /// </summary>
+        /// <since_tizen> 8 </since_tizen>
+        public enum NodeType
+        {
+            /// <summary>
+            /// The source node.
+            /// </summary>
+            Source = 0,
+            /// <summary>
+            /// The sink node.
+            /// </summary>
+            Sink = 1,
+            /// <summary>
+            /// The valve node.
+            /// </summary>
+            Valve = 2,
+            /// <summary>
+            /// The switch node.
+            /// </summary>
+            Switch = 3,
+        }
+
+        /// <summary>
+        /// NodeInfo class for node information in pipeline.
+        /// Note that, node is depend on the pipeline. If the pipeline is closed, all the node information is invalid.
+        /// </summary>
+        /// <since_tizen> 8 </since_tizen>
+        public abstract class NodeInfo
+        {
+            /// <summary>
+            /// Creates a new NodeInfo instance with the given node information
+            /// </summary>
+            /// <param name="type">The node type.</param>
+            /// <param name="name">The node name.</param>
+            /// <param name="pipe">The Pipeline instance the node included.</param>
+            /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
+            /// <since_tizen> 8 </since_tizen>
+            protected internal NodeInfo(NodeType type, string name, Pipeline pipe)
+            {
+                Pipe = pipe;
+                Name = name;
+                Type = type;
+                Handle = IntPtr.Zero;
+                Valid = true;
+            }
+
+            internal Pipeline Pipe { get; set; }
+            internal IntPtr Handle { get; set; }
+
+            /// <summary>
+            /// The node type.
+            /// </summary>
+            /// <since_tizen> 8 </since_tizen>
+            public NodeType Type { get; internal set; }
+
+            /// <summary>
+            /// The node name.
+            /// </summary>
+            /// <since_tizen> 8 </since_tizen>
+            public string Name { get; internal set; }
+
+            /// <summary>
+            /// The flag to indicate valid state.
+            /// </summary>
+            /// <since_tizen> 8 </since_tizen>
+            public bool Valid { get; internal set; }
+        }
+    }
+}
index a90ee95..695edf6 100755 (executable)
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved
+* Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved.
 *
 * Licensed under the Apache License, Version 2.0 (the License);
 * you may not use this file except in compliance with the License.
@@ -60,6 +60,7 @@ namespace Tizen.MachineLearning.Inference
         /// <param name="fwType">Types of Neural Network Framework</param>
         /// <param name="hwType">Types of hardware resources to be used for NNFWs</param>
         /// <param name="isDynamicMode">Support Dynamic Mode</param>
+        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
         /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
         /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
         /// <since_tizen> 8 </since_tizen>
@@ -81,6 +82,7 @@ namespace Tizen.MachineLearning.Inference
         /// <param name="fwType">Types of Neural Network Framework (Default:NNFWType.Any)</param>
         /// <param name="hwType">Types of hardware resources to be used for NNFWs (Default: HWType.Any)</param>
         /// <param name="isDynamicMode">Support Dynamic Mode (Default: false)</param>
+        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
         /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
         /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
         /// <since_tizen> 8 </since_tizen>
@@ -163,8 +165,9 @@ namespace Tizen.MachineLearning.Inference
         /// Sets the maximum amount of time to wait for an output, in milliseconds.
         /// </summary>
         /// <param name="ms">The time to wait for an output (milliseconds)</param>
-        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
+        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
         /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
+        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
         /// <since_tizen> 8 </since_tizen>
         public void SetTimeout(int ms)
         {
@@ -172,8 +175,7 @@ namespace Tizen.MachineLearning.Inference
             NNStreamerError ret = NNStreamerError.None;
 
             if (ms <= 0)
-                ret = NNStreamerError.InvalidParameter;
-            NNStreamer.CheckException(ret, "timeout: " + ms.ToString());
+                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "Invalid timeout: " + ms.ToString());
 
             ret = Interop.SingleShot.SetTimeout(_handle, ms);
             NNStreamer.CheckException(ret, "fail to set the timeout!");
diff --git a/src/Tizen.MachineLearning.Inference/Tizen.MachineLearning.Inference/StateChangedEventArgs.cs b/src/Tizen.MachineLearning.Inference/Tizen.MachineLearning.Inference/StateChangedEventArgs.cs
new file mode 100644 (file)
index 0000000..b0cf7c9
--- /dev/null
@@ -0,0 +1,38 @@
+/*
+ * Copyright (c) 2020 Samsung Electronics Co., Ltd. All Rights Reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the License);
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+namespace Tizen.MachineLearning.Inference
+{
+    /// <summary>
+    /// An extended EventArgs class that contains the changed state of the pipeline
+    /// </summary>
+    /// <since_tizen> 8 </since_tizen>
+    public class StateChangedEventArgs : EventArgs
+    {
+        internal StateChangedEventArgs(PipelineState state)
+        {
+            State = state;
+        }
+
+        /// <summary>
+        /// The changed state of the pipeline
+        /// </summary>
+        /// <since_tizen> 8 </since_tizen>
+        public PipelineState State { get; }
+    }
+}
index 7ab6542..ca6a6fc 100755 (executable)
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved
+* Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved.
 *
 * Licensed under the Apache License, Version 2.0 (the License);
 * you may not use this file except in compliance with the License.
@@ -36,8 +36,9 @@ namespace Tizen.MachineLearning.Inference
         /// <param name="handle">The handle of tensors data.</param>
         /// <param name="info">The handle of tensors info. (Default: null)</param>
         /// <param name="isFetch">The boolean value for fetching the data (Default: false)</param>
+        /// <param name="hasOwnership">The boolean value for automatic disposal (Default: true)</param>
         /// <since_tizen> 6 </since_tizen>
-        private TensorsData(IntPtr handle, TensorsInfo info, bool isFetch)
+        private TensorsData(IntPtr handle, TensorsInfo info, bool isFetch = false, bool hasOwnership = true)
         {
             NNStreamer.CheckNNStreamerSupport();
             NNStreamerError ret = NNStreamerError.None;
@@ -78,6 +79,9 @@ namespace Tizen.MachineLearning.Inference
                     _dataList.Add(bufData);
                 }
             }
+
+            /* If it created as DataReceivedEventArgs, do not dispose. */
+            _disposed = !hasOwnership;
         }
 
         /// <summary>
@@ -231,21 +235,16 @@ namespace Tizen.MachineLearning.Inference
             }
         }
 
-        internal static TensorsData CreateFromNativeHandle(IntPtr dataHandle, IntPtr infoHandle, bool isFetch)
+        internal static TensorsData CreateFromNativeHandle(IntPtr dataHandle, IntPtr infoHandle, bool isFetch = false, bool hasOwnership = true)
         {
-            TensorsData retTensorsData = null;
+            TensorsInfo info = null;
 
-            if (infoHandle == IntPtr.Zero)
-            {
-                retTensorsData = new TensorsData(dataHandle, null, isFetch);
-            }
-            else
+            if (infoHandle != IntPtr.Zero)
             {
-                TensorsInfo info = TensorsInfo.ConvertTensorsInfoFromHandle(infoHandle);
-                retTensorsData = new TensorsData(dataHandle, info, isFetch);
+                info = TensorsInfo.ConvertTensorsInfoFromHandle(infoHandle);
             }
 
-            return retTensorsData;
+            return new TensorsData(dataHandle, info, isFetch, hasOwnership);
         }
 
         private void CheckIndex(int index)
index d37067d..d9f1d7c 100755 (executable)
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved
+* Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved.
 *
 * Licensed under the Apache License, Version 2.0 (the License);
 * you may not use this file except in compliance with the License.
@@ -65,7 +65,7 @@ namespace Tizen.MachineLearning.Inference
         /// <param name="type">Data element type of Tensor.</param>
         /// <param name="dimension">Dimension of Tensor. Note that we support up to 4th ranks.</param>
         /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
-        /// <exception cref="IndexOutOfRangeException">Thrown when the number of Tensor already exceeds the size limits (i.e. Tensor.SlzeLimit)</exception>
+        /// <exception cref="IndexOutOfRangeException">Thrown when the number of Tensor already exceeds the size limits (i.e. Tensor.SizeLimit)</exception>
         /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
         /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
         /// <since_tizen> 6 </since_tizen>
@@ -83,7 +83,7 @@ namespace Tizen.MachineLearning.Inference
         /// <param name="type">Data element type of Tensor.</param>
         /// <param name="dimension">Dimension of Tensor. Note that we support up to 4th ranks.</param>
         /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
-        /// <exception cref="IndexOutOfRangeException">Thrown when the number of Tensor already exceeds the size limits (i.e. Tensor.SlzeLimit)</exception>
+        /// <exception cref="IndexOutOfRangeException">Thrown when the number of Tensor already exceeds the size limits (i.e. Tensor.SizeLimit)</exception>
         /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
         /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
         /// <since_tizen> 6 </since_tizen>
@@ -102,13 +102,9 @@ namespace Tizen.MachineLearning.Inference
                 NNStreamerError ret = NNStreamerError.None;
 
                 ret = Interop.Util.SetTensorsCount(_handle, _infoList.Count);
-                NNStreamer.CheckException(ret, "unable to set the number of tensors");
+                NNStreamer.CheckException(ret, "Failed to set the number of tensors");
 
-                ret = Interop.Util.SetTensorType(_handle, idx, type);
-                NNStreamer.CheckException(ret, "fail to set TensorsInfo type");
-
-                ret = Interop.Util.SetTensorDimension(_handle, idx, dimension);
-                NNStreamer.CheckException(ret, "fail to set TensorsInfo dimension");
+                UpdateInfoHandle(_handle, idx, name, type, dimension);
             }
         }
 
@@ -137,21 +133,20 @@ namespace Tizen.MachineLearning.Inference
         }
 
         /// <summary>
-        /// Calculates the byte size of tensor data.
+        /// Gets the tensor name with given index.
         /// </summary>
-        /// <param name="idx">The index of the tensor information in the list</param>
-        /// <returns>The byte size of tensor</returns>
+        /// <param name="idx">The index of the tensor.</param>
+        /// <returns>The tensor name</returns>
         /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
         /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
         /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
-        /// <since_tizen> 8 </since_tizen>
-        public int GetTensorSize(int idx)
+        /// <since_tizen> 6 </since_tizen>
+        public string GetTensorName(int idx)
         {
             NNStreamer.CheckNNStreamerSupport();
-            CheckIndexBoundary(idx);
-
-            return _infoList[idx].Size;
 
+            CheckIndexBoundary(idx);
+            return _infoList[idx].Name;
         }
 
         /// <summary>
@@ -259,7 +254,7 @@ namespace Tizen.MachineLearning.Inference
             }
 
             ret = Interop.Util.CreateTensorsData(_handle, out tensorsData_h);
-            NNStreamer.CheckException(ret, "unable to create the tensorsData object");
+            NNStreamer.CheckException(ret, "Failed to create the TensorsData object");
 
             retTensorData = TensorsData.CreateFromNativeHandle(tensorsData_h, _handle, false);
 
@@ -267,20 +262,20 @@ namespace Tizen.MachineLearning.Inference
         }
 
         /// <summary>
-        /// Gets the tensor name with given index.
+        /// Calculates the byte size of tensor data.
         /// </summary>
-        /// <param name="idx">The index of the tensor.</param>
-        /// <returns>The tensor name</returns>
+        /// <param name="idx">The index of the tensor information in the list</param>
+        /// <returns>The byte size of tensor</returns>
         /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
         /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
         /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
-        /// <since_tizen> 6 </since_tizen>
-        public string GetTensorName(int idx)
+        /// <since_tizen> 8 </since_tizen>
+        public int GetTensorSize(int idx)
         {
             NNStreamer.CheckNNStreamerSupport();
 
             CheckIndexBoundary(idx);
-            return _infoList[idx].Name;
+            return _infoList[idx].Size;
         }
 
         /// <summary>
@@ -415,12 +410,7 @@ namespace Tizen.MachineLearning.Inference
             idx = 0;
             foreach (TensorInfo t in _infoList)
             {
-                ret = Interop.Util.SetTensorType(ret_handle, idx, t.Type);
-                NNStreamer.CheckException(ret, "fail to set the type of tensor" + idx.ToString());
-
-                ret = Interop.Util.SetTensorDimension(ret_handle, idx, t.Dimension);
-                NNStreamer.CheckException(ret, "fail to set the dimension of tensor: " + idx.ToString());
-
+                UpdateInfoHandle(ret_handle, idx, t.Name, t.Type, t.Dimension);
                 idx += 1;
             }
 
@@ -465,7 +455,24 @@ namespace Tizen.MachineLearning.Inference
             }
             _disposed = true;
         }
-        
+
+        private void UpdateInfoHandle(IntPtr handle, int idx, string name, TensorType type, int[] dimension)
+        {
+            if (handle != IntPtr.Zero)
+            {
+                NNStreamerError ret = NNStreamerError.None;
+
+                ret = Interop.Util.SetTensorName(handle, idx, name);
+                NNStreamer.CheckException(ret, "Failed to set the name of tensor at index " + idx.ToString());
+
+                ret = Interop.Util.SetTensorType(handle, idx, type);
+                NNStreamer.CheckException(ret, "Failed to set the type of tensor at index " + idx.ToString());
+
+                ret = Interop.Util.SetTensorDimension(handle, idx, dimension);
+                NNStreamer.CheckException(ret, "Failed to set the dimension of tensor at index " + idx.ToString());
+            }
+        }
+
         private void CheckIndexBoundary(int idx)
         {
             if (idx < 0 || idx >= _infoList.Count)
@@ -501,6 +508,7 @@ namespace Tizen.MachineLearning.Inference
                 }
                 Dimension = (int[])dimension.Clone();
             }
+
             private int GetSize()
             {
                 int size = 0;