[MachineLearning.Inference] Add Inference APIs for Machine Learning (#940)
authorSangjung Woo <again4you@gmail.com>
Mon, 16 Sep 2019 07:13:50 +0000 (16:13 +0900)
committerSeungkeun Lee <sngn.lee@samsung.com>
Mon, 16 Sep 2019 07:13:50 +0000 (16:13 +0900)
* [MachineLearning] Add TensorsInfo and TensorsData class

This patch newly adds the TensorsInfo and TensorsData class for C# API.

Signed-off-by: Sangjung Woo <sangjung.woo@samsung.com>
* [MachineLearning] Add SingleShot class for inference

This patch newly adds the SingleShot class for C# API.

Signed-off-by: Sangjung Woo <sangjung.woo@samsung.com>
* [MachineLearning] Add testcase for MachineLearning API

This patch newly adds the test applicaion for MachineLearning API, which
is based on NNStreamer.

Signed-off-by: Sangjung Woo <sangjung.woo@samsung.com>
* [MachineLearning] Update the feature key and exception in public method

This patch newly adds the related feature key and exceptions into public
methods. It does not change the code except XML comments.

Signed-off-by: Sangjung Woo <sangjung.woo@samsung.com>
17 files changed:
packaging/PlatformFileList.txt
src/Tizen.MachineLearning.Inference/Interop/Interop.Nnstreamer.cs [new file with mode: 0755]
src/Tizen.MachineLearning.Inference/Tizen.MachineLearning.Inference.csproj [new file with mode: 0755]
src/Tizen.MachineLearning.Inference/Tizen.MachineLearning.Inference.sln [new file with mode: 0755]
src/Tizen.MachineLearning.Inference/Tizen.MachineLearning.Inference/Commons.cs [new file with mode: 0755]
src/Tizen.MachineLearning.Inference/Tizen.MachineLearning.Inference/SingleShot.cs [new file with mode: 0755]
src/Tizen.MachineLearning.Inference/Tizen.MachineLearning.Inference/TensorsData.cs [new file with mode: 0755]
src/Tizen.MachineLearning.Inference/Tizen.MachineLearning.Inference/TensorsInfo.cs [new file with mode: 0755]
test/Tizen.MachineLearning.Inference.Test/App.cs [new file with mode: 0755]
test/Tizen.MachineLearning.Inference.Test/Program.cs [new file with mode: 0755]
test/Tizen.MachineLearning.Inference.Test/SingleTest.cs [new file with mode: 0755]
test/Tizen.MachineLearning.Inference.Test/TensorsInfoTest.cs [new file with mode: 0755]
test/Tizen.MachineLearning.Inference.Test/Tizen.MachineLearning.Inference.Test.csproj [new file with mode: 0755]
test/Tizen.MachineLearning.Inference.Test/Tizen.MachineLearning.Inference.Test.sln [new file with mode: 0755]
test/Tizen.MachineLearning.Inference.Test/res/models/mobilenet_v1_1.0_224_quant.tflite [new file with mode: 0755]
test/Tizen.MachineLearning.Inference.Test/shared/res/Tizen.MachineLearning.Inference.Test.png [new file with mode: 0755]
test/Tizen.MachineLearning.Inference.Test/tizen-manifest.xml [new file with mode: 0755]

index ce2153c..4052c5b 100755 (executable)
@@ -39,6 +39,7 @@ Tizen.dll                                          #common #mobile #mobile-emul
 Tizen.Location.dll                                 #mobile #mobile-emul #tv #wearable
 Tizen.Location.Geofence.dll                        #mobile #mobile-emul
 Tizen.Log.dll                                      #common #mobile #mobile-emul #tv #wearable
+Tizen.MachineLearning.Inference.dll                #mobile #mobile-emul #tv #wearable
 Tizen.Maps.dll                                     #mobile #mobile-emul #tv #wearable
 Tizen.Messaging.dll                                #mobile #mobile-emul #wearable
 Tizen.Messaging.Push.dll                           #common #mobile #mobile-emul #tv #wearable
diff --git a/src/Tizen.MachineLearning.Inference/Interop/Interop.Nnstreamer.cs b/src/Tizen.MachineLearning.Inference/Interop/Interop.Nnstreamer.cs
new file mode 100755 (executable)
index 0000000..883e1ab
--- /dev/null
@@ -0,0 +1,136 @@
+/*
+* Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved
+*
+* Licensed under the Apache License, Version 2.0 (the License);
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an AS IS BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+using System;
+using System.Runtime.InteropServices;
+using Tizen.MachineLearning.Inference;
+
+internal static partial class Interop
+{
+    internal static partial class Libraries
+    {
+        public const string Nnstreamer = "libcapi-nnstreamer.so.0";
+    }
+
+    internal static partial class SingleShot
+    {
+        /* int ml_single_open (ml_single_h *single, const char *model, const ml_tensors_info_h input_info, const ml_tensors_info_h output_info, ml_nnfw_type_e nnfw, ml_nnfw_hw_e hw) */
+        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_single_open", CallingConvention = CallingConvention.Cdecl)]
+        internal static extern NNStreamerError OpenSingle(out IntPtr single_handle, string model_path, IntPtr input_info, IntPtr output_info, NNFWType nn_type, HWType hw_type);
+
+        /* int ml_single_close (ml_single_h single) */
+        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_single_close", CallingConvention = CallingConvention.Cdecl)]
+        internal static extern NNStreamerError CloseSingle(IntPtr single_handle);
+
+        /* int ml_single_invoke (ml_single_h single, const ml_tensors_data_h input, ml_tensors_data_h *output) */
+        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_single_invoke", CallingConvention = CallingConvention.Cdecl)]
+        internal static extern NNStreamerError InvokeSingle(IntPtr single_handle, IntPtr input_data, out IntPtr output_data);
+
+        /* int ml_single_get_input_info (ml_single_h single, ml_tensors_info_h *info) */
+        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_single_invoke", CallingConvention = CallingConvention.Cdecl)]
+        internal static extern NNStreamerError GetInputTensorsInfoFromSingle(IntPtr single_handle, out IntPtr input_info);
+
+        /* int ml_single_get_output_info (ml_single_h single, ml_tensors_info_h *info) */
+        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_single_get_output_info", CallingConvention = CallingConvention.Cdecl)]
+        internal static extern NNStreamerError GetOutputTensorsInfoFromSingle(IntPtr single_handle, out IntPtr output_info);
+    }
+
+    internal static partial class Util
+    {
+        /* int ml_tensors_info_create (ml_tensors_info_h *info) */
+        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_tensors_info_create", CallingConvention = CallingConvention.Cdecl)]
+        internal static extern NNStreamerError CreateTensorsInfo(out IntPtr info);
+            
+        /* int ml_tensors_info_destroy (ml_tensors_info_h info) */
+        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_tensors_info_destroy", CallingConvention = CallingConvention.Cdecl)]
+        internal static extern NNStreamerError DestroyTensorsInfo(IntPtr info);
+
+        /* int ml_tensors_info_validate (const ml_tensors_info_h info, bool *valid) */
+        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_tensors_info_validate", CallingConvention = CallingConvention.Cdecl)]
+        internal static extern NNStreamerError ValidateTensorsInfo(IntPtr info, out bool valid);
+
+        /* int ml_tensors_info_clone (ml_tensors_info_h dest, const ml_tensors_info_h src) */
+        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_tensors_info_clone", CallingConvention = CallingConvention.Cdecl)]
+        internal static extern NNStreamerError CloneTensorsInfo(out IntPtr dest_info, IntPtr src_info);
+
+        /* int ml_tensors_info_set_count (ml_tensors_info_h info, unsigned int count) */
+        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_tensors_info_set_count", CallingConvention = CallingConvention.Cdecl)]
+        internal static extern NNStreamerError SetTensorsCount(IntPtr info, int count);
+
+        /* int ml_tensors_info_get_count (ml_tensors_info_h info, unsigned int *count) */
+        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_tensors_info_get_count", CallingConvention = CallingConvention.Cdecl)]
+        internal static extern NNStreamerError GetTensorsCount(IntPtr info, out int count);
+
+        /* int ml_tensors_info_set_tensor_name (ml_tensors_info_h info, unsigned int index, const char *name) */
+        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_tensors_info_set_tensor_name", CallingConvention = CallingConvention.Cdecl)]
+        internal static extern NNStreamerError SetTensorName(IntPtr info, int index, string name);
+
+        /* int ml_tensors_info_get_tensor_name (ml_tensors_info_h info, unsigned int index, char **name) */
+        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_tensors_info_get_tensor_name", CallingConvention = CallingConvention.Cdecl)]
+        internal static extern NNStreamerError GetTensorName(IntPtr info, int index, out string name);
+
+        /* int ml_tensors_info_set_tensor_type (ml_tensors_info_h info, unsigned int index, const ml_tensor_type_e type) */
+        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_tensors_info_set_tensor_type", CallingConvention = CallingConvention.Cdecl)]
+        internal static extern NNStreamerError SetTensorType(IntPtr info, int index, TensorType type);
+
+        /* int ml_tensors_info_get_tensor_type (ml_tensors_info_h info, unsigned int index, ml_tensor_type_e *type) */
+        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_tensors_info_get_tensor_type", CallingConvention = CallingConvention.Cdecl)]
+        internal static extern NNStreamerError GetTensorType(IntPtr info, int index, out TensorType type);
+
+        /* int ml_tensors_info_set_tensor_dimension (ml_tensors_info_h info, unsigned int index, const ml_tensor_dimension dimension) */
+        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_tensors_info_set_tensor_dimension", CallingConvention = CallingConvention.Cdecl)]
+        internal static extern NNStreamerError SetTensorDimension(IntPtr info, int index, int[] dimension);
+
+        /* int ml_tensors_info_get_tensor_dimension (ml_tensors_info_h info, unsigned int index, ml_tensor_dimension dimension) */
+        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_tensors_info_get_tensor_dimension", CallingConvention = CallingConvention.Cdecl)]
+        internal static extern NNStreamerError GetTensorDimension(IntPtr info, int index, out int[] dimension);
+
+        /* size_t ml_tensors_info_get_size (const ml_tensors_info_h info) */
+        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_tensors_info_get_size", CallingConvention = CallingConvention.Cdecl)]
+        internal static extern int GetTensorsSize(IntPtr info);
+
+        /* int ml_tensors_data_create (const ml_tensors_info_h info, ml_tensors_data_h *data) */
+        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_tensors_data_create", CallingConvention = CallingConvention.Cdecl)]
+        internal static extern NNStreamerError CreateTensorsData(IntPtr info, out IntPtr data);
+
+        /* int ml_tensors_data_destroy (ml_tensors_data_h data) */
+        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_tensors_data_destroy", CallingConvention = CallingConvention.Cdecl)]
+        internal static extern NNStreamerError DestroyTensorsData(IntPtr data);
+
+        /* int ml_tensors_data_get_tensor_data (ml_tensors_data_h data, unsigned int index, void **raw_data, size_t *data_size) */
+        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_tensors_data_get_tensor_data", CallingConvention = CallingConvention.Cdecl)]
+        internal static extern NNStreamerError GetTensorData(IntPtr data, int index, out IntPtr raw_data, out int data_size);
+
+        /* int ml_tensors_data_set_tensor_data (ml_tensors_data_h data, unsigned int index, const void *raw_data, const size_t data_size) */
+        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_tensors_data_set_tensor_data", CallingConvention = CallingConvention.Cdecl)]
+        internal static extern NNStreamerError SetTensorData(IntPtr data, int index, byte[] raw_data, int data_size);
+
+        /* int ml_check_nnfw_availability (ml_nnfw_type_e nnfw, ml_nnfw_hw_e hw, bool *available); */
+        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_check_nnfw_availability", CallingConvention = CallingConvention.Cdecl)]
+        internal static extern NNStreamerError CheckNNFWAvailability(NNFWType nnfw, HWType hw, out bool available);
+
+        /* ml_tensors_data_get_tensor_count (ml_tensors_data_h data, unsigned int *num_tensors) */
+        [DllImport(Libraries.Nnstreamer, EntryPoint = "ml_tensors_data_get_tensor_count", CallingConvention = CallingConvention.Cdecl)]
+        internal static extern NNStreamerError GetTensorsCount(IntPtr data, out uint count);
+
+        internal static byte[] IntPtrToByteArray(IntPtr unmanagedByteArray, int size)
+        {
+            byte[] retByte = new byte[size];
+            Marshal.Copy(unmanagedByteArray, retByte, 0, size);
+            return retByte;
+        }
+    }
+}
diff --git a/src/Tizen.MachineLearning.Inference/Tizen.MachineLearning.Inference.csproj b/src/Tizen.MachineLearning.Inference/Tizen.MachineLearning.Inference.csproj
new file mode 100755 (executable)
index 0000000..ccca1d1
--- /dev/null
@@ -0,0 +1,12 @@
+<Project Sdk="Microsoft.NET.Sdk">
+
+  <PropertyGroup>
+    <TargetFramework>netstandard2.0</TargetFramework>
+  </PropertyGroup>
+
+  <ItemGroup>
+    <ProjectReference Include="..\Tizen.Log\Tizen.Log.csproj" />
+    <ProjectReference Include="..\Tizen\Tizen.csproj" />
+  </ItemGroup>
+
+</Project>
diff --git a/src/Tizen.MachineLearning.Inference/Tizen.MachineLearning.Inference.sln b/src/Tizen.MachineLearning.Inference/Tizen.MachineLearning.Inference.sln
new file mode 100755 (executable)
index 0000000..02b0ae5
--- /dev/null
@@ -0,0 +1,69 @@
+
+Microsoft Visual Studio Solution File, Format Version 12.00
+# Visual Studio 15
+VisualStudioVersion = 15.0.26124.0
+MinimumVisualStudioVersion = 15.0.26124.0
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Tizen.MachineLearning.Inference", "Tizen.MachineLearning.Inference.csproj", "{AC675801-2A5D-4346-BFD3-3A9809EB9767}"
+       ProjectSection(ProjectDependencies) = postProject
+               {5BC75930-86EF-4A1B-BC26-BC8109773F9A} = {5BC75930-86EF-4A1B-BC26-BC8109773F9A}
+               {12E4988C-94E5-45BD-89FF-011970716A18} = {12E4988C-94E5-45BD-89FF-011970716A18}
+       EndProjectSection
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Tizen", "..\Tizen\Tizen.csproj", "{5BC75930-86EF-4A1B-BC26-BC8109773F9A}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Tizen.Log", "..\Tizen.Log\Tizen.Log.csproj", "{12E4988C-94E5-45BD-89FF-011970716A18}"
+EndProject
+Global
+       GlobalSection(SolutionConfigurationPlatforms) = preSolution
+               Debug|Any CPU = Debug|Any CPU
+               Debug|x64 = Debug|x64
+               Debug|x86 = Debug|x86
+               Release|Any CPU = Release|Any CPU
+               Release|x64 = Release|x64
+               Release|x86 = Release|x86
+       EndGlobalSection
+       GlobalSection(ProjectConfigurationPlatforms) = postSolution
+               {AC675801-2A5D-4346-BFD3-3A9809EB9767}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+               {AC675801-2A5D-4346-BFD3-3A9809EB9767}.Debug|Any CPU.Build.0 = Debug|Any CPU
+               {AC675801-2A5D-4346-BFD3-3A9809EB9767}.Debug|x64.ActiveCfg = Debug|Any CPU
+               {AC675801-2A5D-4346-BFD3-3A9809EB9767}.Debug|x64.Build.0 = Debug|Any CPU
+               {AC675801-2A5D-4346-BFD3-3A9809EB9767}.Debug|x86.ActiveCfg = Debug|Any CPU
+               {AC675801-2A5D-4346-BFD3-3A9809EB9767}.Debug|x86.Build.0 = Debug|Any CPU
+               {AC675801-2A5D-4346-BFD3-3A9809EB9767}.Release|Any CPU.ActiveCfg = Release|Any CPU
+               {AC675801-2A5D-4346-BFD3-3A9809EB9767}.Release|Any CPU.Build.0 = Release|Any CPU
+               {AC675801-2A5D-4346-BFD3-3A9809EB9767}.Release|x64.ActiveCfg = Release|Any CPU
+               {AC675801-2A5D-4346-BFD3-3A9809EB9767}.Release|x64.Build.0 = Release|Any CPU
+               {AC675801-2A5D-4346-BFD3-3A9809EB9767}.Release|x86.ActiveCfg = Release|Any CPU
+               {AC675801-2A5D-4346-BFD3-3A9809EB9767}.Release|x86.Build.0 = Release|Any CPU
+               {5BC75930-86EF-4A1B-BC26-BC8109773F9A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+               {5BC75930-86EF-4A1B-BC26-BC8109773F9A}.Debug|Any CPU.Build.0 = Debug|Any CPU
+               {5BC75930-86EF-4A1B-BC26-BC8109773F9A}.Debug|x64.ActiveCfg = Debug|Any CPU
+               {5BC75930-86EF-4A1B-BC26-BC8109773F9A}.Debug|x64.Build.0 = Debug|Any CPU
+               {5BC75930-86EF-4A1B-BC26-BC8109773F9A}.Debug|x86.ActiveCfg = Debug|Any CPU
+               {5BC75930-86EF-4A1B-BC26-BC8109773F9A}.Debug|x86.Build.0 = Debug|Any CPU
+               {5BC75930-86EF-4A1B-BC26-BC8109773F9A}.Release|Any CPU.ActiveCfg = Release|Any CPU
+               {5BC75930-86EF-4A1B-BC26-BC8109773F9A}.Release|Any CPU.Build.0 = Release|Any CPU
+               {5BC75930-86EF-4A1B-BC26-BC8109773F9A}.Release|x64.ActiveCfg = Release|Any CPU
+               {5BC75930-86EF-4A1B-BC26-BC8109773F9A}.Release|x64.Build.0 = Release|Any CPU
+               {5BC75930-86EF-4A1B-BC26-BC8109773F9A}.Release|x86.ActiveCfg = Release|Any CPU
+               {5BC75930-86EF-4A1B-BC26-BC8109773F9A}.Release|x86.Build.0 = Release|Any CPU
+               {12E4988C-94E5-45BD-89FF-011970716A18}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+               {12E4988C-94E5-45BD-89FF-011970716A18}.Debug|Any CPU.Build.0 = Debug|Any CPU
+               {12E4988C-94E5-45BD-89FF-011970716A18}.Debug|x64.ActiveCfg = Debug|Any CPU
+               {12E4988C-94E5-45BD-89FF-011970716A18}.Debug|x64.Build.0 = Debug|Any CPU
+               {12E4988C-94E5-45BD-89FF-011970716A18}.Debug|x86.ActiveCfg = Debug|Any CPU
+               {12E4988C-94E5-45BD-89FF-011970716A18}.Debug|x86.Build.0 = Debug|Any CPU
+               {12E4988C-94E5-45BD-89FF-011970716A18}.Release|Any CPU.ActiveCfg = Release|Any CPU
+               {12E4988C-94E5-45BD-89FF-011970716A18}.Release|Any CPU.Build.0 = Release|Any CPU
+               {12E4988C-94E5-45BD-89FF-011970716A18}.Release|x64.ActiveCfg = Release|Any CPU
+               {12E4988C-94E5-45BD-89FF-011970716A18}.Release|x64.Build.0 = Release|Any CPU
+               {12E4988C-94E5-45BD-89FF-011970716A18}.Release|x86.ActiveCfg = Release|Any CPU
+               {12E4988C-94E5-45BD-89FF-011970716A18}.Release|x86.Build.0 = Release|Any CPU
+       EndGlobalSection
+       GlobalSection(SolutionProperties) = preSolution
+               HideSolutionNode = FALSE
+       EndGlobalSection
+       GlobalSection(ExtensibilityGlobals) = postSolution
+               SolutionGuid = {A42C750C-B824-4DB3-A2E6-7C877C1EB6D2}
+       EndGlobalSection
+EndGlobal
diff --git a/src/Tizen.MachineLearning.Inference/Tizen.MachineLearning.Inference/Commons.cs b/src/Tizen.MachineLearning.Inference/Tizen.MachineLearning.Inference/Commons.cs
new file mode 100755 (executable)
index 0000000..d74aad1
--- /dev/null
@@ -0,0 +1,208 @@
+/*
+* Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved
+*
+* Licensed under the Apache License, Version 2.0 (the License);
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an AS IS BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+using System;
+using System.IO;
+
+namespace Tizen.MachineLearning.Inference
+{
+    /// <summary>
+    /// Possible data element types of Tensor in NNStreamer.
+    /// </summary>
+    /// <since_tizen> 6 </since_tizen>
+    public enum TensorType
+    {
+#pragma warning disable CA1720 // Identifier contains type name
+        /// <summary>
+        /// Integer 32bit
+        /// </summary>
+        Int32 = 0,
+        /// <summary>
+        /// Unsigned integer 32bit
+        /// </summary>
+        UInt32,
+        /// <summary>
+        /// Integer 16bit
+        /// </summary>
+        Int16,
+        /// <summary>
+        /// Unsigned integer 16bit
+        /// </summary>
+        UInt16,
+        /// <summary>
+        /// Integer 8bit
+        /// </summary>
+        Int8,
+        /// <summary>
+        /// Unsigned integer 8bit
+        /// </summary>
+        UInt8,
+        /// <summary>
+        /// Float 64bit
+        /// </summary>
+        Float64,
+        /// <summary>
+        /// Float 32bit
+        /// </summary>
+        Float32,
+        /// <summary>
+        /// Integer 64bit
+        /// </summary>
+        Int64,
+        /// <summary>
+        /// Unsigned integer 64bit
+        /// </summary>
+        UInt64,
+#pragma warning restore CA1720 // Identifier contains type name
+    }
+
+    internal enum NNStreamerError
+    {
+        None = Tizen.Internals.Errors.ErrorCode.None,
+        InvalidParameter = Tizen.Internals.Errors.ErrorCode.InvalidParameter,
+        StreamsPipe = Tizen.Internals.Errors.ErrorCode.StreamsPipe,
+        TryAgain = Tizen.Internals.Errors.ErrorCode.TryAgain,
+        Unknown = Tizen.Internals.Errors.ErrorCode.Unknown,
+        TimedOut = Tizen.Internals.Errors.ErrorCode.TimedOut,
+        NotSupported = Tizen.Internals.Errors.ErrorCode.NotSupported,
+    }
+
+    /// <summary>
+    /// Types of Neural Network Framework.
+    /// </summary>
+    /// <since_tizen> 6 </since_tizen>
+    public enum NNFWType
+    {
+        /// <summary>
+        /// NNHW is not specified (Try to determine the NNFW with file extension).
+        /// </summary>
+        Any = 0,
+        /// <summary>
+        /// Custom filter (Independent shared object).
+        /// </summary>
+        CustomFilter,
+        /// <summary>
+        /// Tensorflow-lite (.tflite).
+        /// </summary>
+        TensorflowLite,
+        /// <summary>
+        /// Tensorflow (.pb).
+        /// </summary>
+        Tensorflow,
+        /// <summary>
+        /// Neural Network Inference framework, which is developed by SR
+        /// </summary>
+        NNFW,
+    }
+
+    /// <summary>
+    /// Types of hardware resources to be used for NNFWs. Note that if the affinity (nnn) is not supported by the driver or hardware, it is ignored.
+    /// </summary>
+    /// <since_tizen> 6 </since_tizen>
+    public enum HWType
+    {
+        /// <summary>
+        /// Hardware resource is not specified.
+        /// </summary>
+        Any = 0,
+        /// <summary>
+        /// Try to schedule and optimize if possible.
+        /// </summary>
+        Auto = 1,
+        /// <summary>
+        /// Any CPU  if possible.
+        /// </summary>
+        CPU = 0x1000,
+        /// <summary>
+        /// Any GPU  if possible.
+        /// </summary>
+        GPU = 0x2000,
+        /// <summary>
+        /// Any NPU if possible.
+        /// </summary>
+        NPU = 0x3000,
+    }
+
+    internal static class Tensor
+    {
+        /// <summary>
+        /// The maximum rank that NNStreamer supports with Tizen APIs.
+        /// </summary>
+        internal const int RankLimit = 4;
+
+        /// <summary>
+        /// The maximum number of other/tensor instances that other/tensors may have.
+        /// </summary>
+        internal const int SizeLimit = 16;
+
+        /// <summary>
+        /// Unknown Type of Tensor information. It is internally used for error check.
+        /// </summary>
+        internal const int UnknownType = 10;
+
+        /// <summary>
+        /// Invalid count of TensorsData. It is internally used for error check.
+        /// </summary>
+        internal const int InvalidCount = -1;
+    }
+
+    internal static class NNStreamer
+    {
+        internal const string TAG = "ML.Inference";
+
+        internal static void CheckException(NNStreamerError error, string msg)
+        {
+            if (error != NNStreamerError.None)
+            {
+                Log.Error(NNStreamer.TAG, msg + ": " + error.ToString());
+                throw NNStreamerExceptionFactory.CreateException(error, msg);
+            }
+        }
+    }
+
+    internal class NNStreamerExceptionFactory
+    {
+        internal static Exception CreateException(NNStreamerError err, string msg)
+        {
+            Exception exp;
+            
+            switch (err)
+            {
+                case NNStreamerError.InvalidParameter:
+                    exp = new ArgumentException(msg);
+                    break;
+
+                case NNStreamerError.NotSupported:
+                    exp = new NotSupportedException(msg);
+                    break;
+
+                case NNStreamerError.StreamsPipe:
+                case NNStreamerError.TryAgain:
+                    exp = new IOException(msg);
+                    break;
+
+                case NNStreamerError.TimedOut:
+                    exp = new TimeoutException(msg);
+                    break;
+
+                default:
+                    exp = new InvalidOperationException(msg);
+                    break;
+            }
+            return exp;
+        }
+    }
+}
diff --git a/src/Tizen.MachineLearning.Inference/Tizen.MachineLearning.Inference/SingleShot.cs b/src/Tizen.MachineLearning.Inference/Tizen.MachineLearning.Inference/SingleShot.cs
new file mode 100755 (executable)
index 0000000..e3c2d48
--- /dev/null
@@ -0,0 +1,138 @@
+/*
+* Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved
+*
+* Licensed under the Apache License, Version 2.0 (the License);
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an AS IS BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+using System;
+using System.Collections.Generic;
+using System.Text;
+using System.IO;
+
+namespace Tizen.MachineLearning.Inference
+{
+    /// <summary>
+    /// The SingleShot class loads a Machine Learning model and make inferences from input data.
+    /// </summary>
+    /// <since_tizen> 6 </since_tizen>
+    public class SingleShot : IDisposable
+    {
+        private IntPtr _handle = IntPtr.Zero;
+        private bool _disposed = false;
+
+        /// <summary>
+        /// Loads the neural network model and configures runtime environment
+        /// </summary>
+        /// <param name="modelAbsPath">Absolute path to the neural network model file.</param>
+        /// <param name="inTensorsInfo">Input TensorsInfo object</param>
+        /// <param name="outTensorsInfo">Output TensorsInfo object for inference result</param>
+        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
+        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
+        /// <exception cref="IOException">Thrown when constructing the pipeline is failed.</exception>
+        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
+        /// <since_tizen> 6 </since_tizen>
+        public SingleShot(string modelAbsPath, TensorsInfo inTensorsInfo, TensorsInfo outTensorsInfo)
+        {
+            CreateSingleShot(modelAbsPath, inTensorsInfo, outTensorsInfo);
+        }
+
+        /// <summary>
+        /// Destructor of the Single instance.
+        /// </summary>
+        /// <since_tizen> 6 </since_tizen>
+        ~SingleShot()
+        {
+            Dispose(false);
+        }
+
+        /// <summary>
+        /// Releases any unmanaged resources used by this object.
+        /// </summary>
+        /// <since_tizen> 6 </since_tizen>
+        public void Dispose()
+        {
+            Dispose(true);
+            GC.SuppressFinalize(this);
+        }
+
+        /// <summary>
+        /// Invokes the model with the given input data.
+        /// </summary>
+        /// <param name="inTensorsData">The input data to be inferred.</param>
+        /// <returns>TensorsData instance which contains the inferred result.</returns>
+        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
+        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
+        /// <exception cref="IOException">Thrown when failed to push an input data into source element.</exception>
+        /// <exception cref="TimeoutException">Thrown when failed to get the result from sink element.</exception>
+        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
+        /// <since_tizen> 6 </since_tizen>
+        public TensorsData Invoke(TensorsData inTensorsData)
+        {
+            TensorsData out_data;
+            IntPtr out_ptr;
+            NNStreamerError ret = NNStreamerError.None;
+
+            ret = Interop.SingleShot.InvokeSingle(_handle, inTensorsData.Handle, out out_ptr);
+            NNStreamer.CheckException(ret, "fail to invoke the single inference engine");
+
+            out_data = TensorsData.CreateFromNativeHandle(out_ptr);
+            return out_data;
+        }
+
+        private void CreateSingleShot(string modelAbsPath, TensorsInfo inTensorInfo, TensorsInfo outTensorInfo)
+        {
+            NNStreamerError ret = NNStreamerError.None;
+            IntPtr input_info;
+            IntPtr output_info;
+
+            /* Check model path */
+            if (string.IsNullOrEmpty(modelAbsPath))
+                ret = NNStreamerError.InvalidParameter;
+            NNStreamer.CheckException(ret, "model path is invalid: " + modelAbsPath);
+
+            input_info = inTensorInfo.GetTensorsInfoHandle();
+            output_info = outTensorInfo.GetTensorsInfoHandle();
+
+            ret = Interop.SingleShot.OpenSingle(out _handle, modelAbsPath, input_info, output_info, NNFWType.Any, HWType.Any);
+            NNStreamer.CheckException(ret, "fail to open the single inference engine");
+        }
+
+        /// <summary>
+        /// Releases any unmanaged resources used by this object. Can also dispose any other disposable objects.
+        /// </summary>
+        /// <param name="disposing">If true, disposes any disposable objects. If false, does not dispose disposable objects.</param>
+        protected virtual void Dispose(bool disposing)
+        {
+            if (_disposed)
+                return;
+
+            if (disposing)
+            {
+                // release managed object
+            }
+
+            // release unmanaged objects
+            if (_handle != IntPtr.Zero)
+            {
+                NNStreamerError ret = NNStreamerError.None;
+                ret = Interop.SingleShot.CloseSingle(_handle);
+                if (ret != NNStreamerError.None)
+                {
+                    Log.Error(NNStreamer.TAG, "failed to close inference engine");
+                }
+                _handle = IntPtr.Zero;
+            }
+            _disposed = true;
+        }
+    }
+}
diff --git a/src/Tizen.MachineLearning.Inference/Tizen.MachineLearning.Inference/TensorsData.cs b/src/Tizen.MachineLearning.Inference/Tizen.MachineLearning.Inference/TensorsData.cs
new file mode 100755 (executable)
index 0000000..3ba452e
--- /dev/null
@@ -0,0 +1,161 @@
+/*
+* Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved
+*
+* Licensed under the Apache License, Version 2.0 (the License);
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an AS IS BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+using System;
+
+namespace Tizen.MachineLearning.Inference
+{
+    /// <summary>
+    /// The TensorsData class sets and gets the buffer data for each Tensor.
+    /// </summary>
+    /// <since_tizen> 6 </since_tizen>
+    public class TensorsData : IDisposable
+    {
+        private IntPtr _handle = IntPtr.Zero;
+        private bool _disposed = false;
+        private int _count = Tensor.InvalidCount;
+
+        /// <summary>
+        /// Creates a TensorsInfo instance with handle which is given by TensorsInfo.
+        /// </summary>
+        /// <param name="handle">The handle of tensors data.</param>
+        /// <since_tizen> 6 </since_tizen>
+        private TensorsData(IntPtr handle)
+        {
+            _handle = handle;
+        }
+
+        /// <summary>
+        /// Destructor of the TensorsData instance
+        /// </summary>
+        /// <since_tizen> 6 </since_tizen>
+        ~TensorsData()
+        {
+            Dispose(false);
+        }
+
+        internal static TensorsData CreateFromNativeHandle(IntPtr handle)
+        {
+            TensorsData retTensorsData = new TensorsData(handle);
+
+            return retTensorsData;
+        }
+
+        /// <summary>
+        /// Gets the number of Tensor in TensorsData class
+        /// </summary>
+        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
+        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
+        /// <since_tizen> 6 </since_tizen>
+        public int Count
+        {
+            get {
+                if (_count != Tensor.InvalidCount)
+                    return _count;
+
+                NNStreamerError ret = NNStreamerError.None;
+                ret = Interop.Util.GetTensorsCount(_handle, out int count);
+                NNStreamer.CheckException(ret, "unable to get the count of TensorsData");
+
+                _count = count;
+                return _count;
+            }
+        }
+
+        /// <summary>
+        /// Sets a tensor data to given index.
+        /// </summary>
+        /// <param name="index">The index of the tensor.</param>
+        /// <param name="buffer">Raw tensor data to be set.</param>
+        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
+        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
+        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
+        /// <since_tizen> 6 </since_tizen>
+        public void SetTensorData(int index, byte[] buffer)
+        {
+            NNStreamerError ret = NNStreamerError.None;
+
+            ret = Interop.Util.SetTensorData(_handle, index, buffer, buffer.Length);
+            NNStreamer.CheckException(ret, "unable to set the buffer of TensorsData: " + index.ToString());
+        }
+
+        /// <summary>
+        /// Gets a tensor data to given index.
+        /// </summary>
+        /// <param name="index">The index of the tensor.</param>
+        /// <returns>Raw tensor data</returns>
+        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
+        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
+        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
+        /// <since_tizen> 6 </since_tizen>
+        public byte[] GetTensorData(int index)
+        {
+            byte[] retBuffer;
+            IntPtr raw_data;
+            int size;
+
+            NNStreamerError ret = NNStreamerError.None;
+            ret = Interop.Util.GetTensorData(_handle, index, out raw_data, out size);
+            NNStreamer.CheckException(ret, "unable to get the buffer of TensorsData: " + index.ToString());
+
+            retBuffer = Interop.Util.IntPtrToByteArray(raw_data, size);
+            return retBuffer;
+        }
+
+        /// <summary>
+        /// Releases any unmanaged resources used by this object.
+        /// </summary>
+        /// <since_tizen> 6 </since_tizen>
+        public void Dispose()
+        {
+            Dispose(true);
+            GC.SuppressFinalize(this);
+        }
+
+        /// <summary>
+        /// Releases any unmanaged resources used by this object. Can also dispose any other disposable objects.
+        /// </summary>
+        /// <param name="disposing">If true, disposes any disposable objects. If false, does not dispose disposable objects.</param>
+        protected virtual void Dispose(bool disposing)
+        {
+            if (_disposed)
+                return;
+
+            if (disposing)
+            {
+                // release managed object
+            }
+
+            // release unmanaged objects
+            if (_handle != IntPtr.Zero)
+            {
+                NNStreamerError ret = NNStreamerError.None;
+                ret = Interop.Util.DestroyTensorsData(_handle);
+                if (ret != NNStreamerError.None)
+                {
+                    Log.Error(NNStreamer.TAG, "failed to destroy TensorsData object");
+                }
+                _handle = IntPtr.Zero;
+            }
+            _disposed = true;
+        }
+
+        internal IntPtr Handle
+        {
+            get { return _handle; }
+        }
+    }
+}
diff --git a/src/Tizen.MachineLearning.Inference/Tizen.MachineLearning.Inference/TensorsInfo.cs b/src/Tizen.MachineLearning.Inference/Tizen.MachineLearning.Inference/TensorsInfo.cs
new file mode 100755 (executable)
index 0000000..4986b57
--- /dev/null
@@ -0,0 +1,368 @@
+/*
+* Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved
+*
+* Licensed under the Apache License, Version 2.0 (the License);
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an AS IS BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+using System;
+using System.Collections.Generic;
+using Log = Tizen.Log;
+
+namespace Tizen.MachineLearning.Inference
+{
+    /// <summary>
+    /// The TensorsInfo class manages each Tensor information such as Name, Type and Dimension.
+    /// </summary>
+    /// <since_tizen> 6 </since_tizen>
+    public class TensorsInfo : IDisposable
+    {
+        private List<TensorInfo> _infoList;
+        private IntPtr _handle = IntPtr.Zero;
+        private bool _disposed = false;
+
+        /// <summary>
+        /// Get the number of Tensor information which is added.
+        /// </summary>
+        /// <since_tizen> 6 </since_tizen>
+        public int Count => _infoList.Count;
+
+        /// <summary>
+        /// Creates a TensorsInfo instance.
+        /// </summary>
+        /// <since_tizen> 6 </since_tizen>
+        public TensorsInfo()
+        {
+            Log.Info(NNStreamer.TAG, "TensorsInfo is created");
+            _infoList = new List<TensorInfo>();
+        }
+
+        /// <summary>
+        /// Destroys the TensorsInfo resource.
+        /// </summary>
+        /// <since_tizen> 6 </since_tizen>
+        ~TensorsInfo()
+        {
+            Dispose(false);
+        }
+
+        /// <summary>
+        /// Add a Tensor information to the TensorsInfo instance. Note that we support up to 16 tensors in TensorsInfo.
+        /// </summary>
+        /// <param name="type">Data element type of Tensor.</param>
+        /// <param name="dimension">Dimension of Tensor. Note that we support up to 4th ranks.</param>
+        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
+        /// <exception cref="IndexOutOfRangeException">Thrown when the number of Tensor already exceeds the size limits (i.e. Tensor.SlzeLimit)</exception>
+        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
+        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
+        /// <since_tizen> 6 </since_tizen>
+        public void AddTensorInfo(TensorType type, int[] dimension)
+        {
+            AddTensorInfo(null, type, dimension);
+        }
+
+        /// <summary>
+        /// Add a Tensor information to the TensorsInfo instance. Note that we support up to 16 tensors in TensorsInfo.
+        /// </summary>
+        /// <param name="name">Name of Tensor.</param>
+        /// <param name="type">Data element type of Tensor.</param>
+        /// <param name="dimension">Dimension of Tensor. Note that we support up to 4th ranks.</param>
+        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
+        /// <exception cref="IndexOutOfRangeException">Thrown when the number of Tensor already exceeds the size limits (i.e. Tensor.SlzeLimit)</exception>
+        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
+        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
+        /// <since_tizen> 6 </since_tizen>
+        public void AddTensorInfo(string name, TensorType type, int[] dimension)
+        {
+            int idx = _infoList.Count;
+            if (idx >= Tensor.SizeLimit) {
+                throw new IndexOutOfRangeException("Max size of the tensors is " + Tensor.SizeLimit);
+            }
+            _infoList.Add(new TensorInfo(name, type, dimension));
+
+            if (_handle != IntPtr.Zero)
+            {
+                NNStreamerError ret = NNStreamerError.None;
+
+                /* Set the number of tensors */
+                ret = Interop.Util.SetTensorsCount(_handle, _infoList.Count);
+                NNStreamer.CheckException(ret, "unable to set the number of tensors");
+
+                /* Set the type and dimension of Tensor */
+                ret = Interop.Util.SetTensorType(_handle, idx, type);
+                NNStreamer.CheckException(ret, "fail to set TensorsInfo type");
+
+                ret = Interop.Util.SetTensorDimension(_handle, idx, dimension);
+                NNStreamer.CheckException(ret, "fail to set TensorsInfo dimension");
+            }
+        }
+
+        /// <summary>
+        /// Sets the tensor name with given index.
+        /// </summary>
+        /// <param name="idx">The index of the tensor to be updated.</param>
+        /// <param name="name">The tensor name to be set.</param>
+        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
+        /// <exception cref="IndexOutOfRangeException">Thrown when the index is greater than the number of Tensor.</exception>
+        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
+        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
+        /// <since_tizen> 6 </since_tizen>
+        public void SetTensorName(int idx, string name)
+        {
+            CheckIndexBoundary(idx);
+            _infoList[idx].Name = name;
+
+            if (_handle != IntPtr.Zero)
+            {
+                NNStreamerError ret = NNStreamerError.None;
+                ret = Interop.Util.SetTensorName(_handle, idx, name);
+                NNStreamer.CheckException(ret, "unable to set the name of tensor: " + idx.ToString());
+            }
+        }
+
+        /// <summary>
+        /// Gets the tensor name with given index.
+        /// </summary>
+        /// <param name="idx">The index of the tensor.</param>
+        /// <returns>The tensor name.</returns>
+        /// <exception cref="IndexOutOfRangeException">Thrown when the index is greater than the number of Tensor.</exception>
+        /// <since_tizen> 6 </since_tizen>
+        public string GetTensorName(int idx)
+        {
+            CheckIndexBoundary(idx);
+            return _infoList[idx].Name;
+        }
+
+        /// <summary>
+        /// Sets the tensor type with given index and its type.
+        /// </summary>
+        /// <param name="idx">The index of the tensor to be updated.</param>
+        /// <param name="type">The tensor type to be set.</param>
+        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
+        /// <exception cref="IndexOutOfRangeException">Thrown when the index is greater than the number of Tensor.</exception>
+        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
+        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
+        /// <since_tizen> 6 </since_tizen>
+        public void SetTensorType(int idx, TensorType type)
+        {
+            CheckIndexBoundary(idx);
+            _infoList[idx].Type = type;
+
+            if (_handle != IntPtr.Zero)
+            {
+                NNStreamerError ret = NNStreamerError.None;
+                ret = Interop.Util.SetTensorType(_handle, idx, type);
+                NNStreamer.CheckException(ret, "unable to set the type of tensor: " + idx.ToString());
+            }
+        }
+
+        /// <summary>
+        /// Gets the tensor type with given index.
+        /// </summary>
+        /// <param name="idx">The index of the tensor.</param>
+        /// <returns>The tensor type</returns>
+        /// <exception cref="IndexOutOfRangeException">Thrown when the index is greater than the number of Tensor.</exception>
+        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
+        /// <since_tizen> 6 </since_tizen>
+        public TensorType GetTensorType(int idx)
+        {
+            CheckIndexBoundary(idx);
+            return _infoList[idx].Type;
+        }
+
+        /// <summary>
+        /// Sets the tensor dimension with given index and dimension.
+        /// </summary>
+        /// <param name="idx">The index of the tensor to be updated.</param>
+        /// <param name="dimension">The tensor dimension to be set.</param>
+        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
+        /// <exception cref="IndexOutOfRangeException">Thrown when the index is greater than the number of Tensor.</exception>
+        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
+        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
+        /// <since_tizen> 6 </since_tizen>
+        public void SetDimension(int idx, int[] dimension)
+        {
+            CheckIndexBoundary(idx);
+            _infoList[idx].SetDimension(dimension);
+
+            if (_handle != IntPtr.Zero)
+            {
+                NNStreamerError ret = NNStreamerError.None;
+                ret = Interop.Util.SetTensorDimension(_handle, idx, dimension);
+                NNStreamer.CheckException(ret, "unable to set the dimension of tensor: " + idx.ToString());
+            }
+        }
+
+        /// <summary>
+        /// Gets the tensor dimension with given index.
+        /// </summary>
+        /// <param name="idx">The index of the tensor.</param>
+        /// <returns>The tensor dimension.</returns>
+        /// <exception cref="IndexOutOfRangeException">Thrown when the index is greater than the number of Tensor.</exception>
+        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
+        /// <since_tizen> 6 </since_tizen>
+        public int[] GetDimension(int idx)
+        {
+            CheckIndexBoundary(idx);
+            return _infoList[idx].Dimension;
+        }
+
+        /// <summary>
+        /// Creates a TensorsData instance based on informations of TensorsInfo
+        /// </summary>
+        /// <returns>TensorsData instance</returns>
+        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
+        /// <exception cref="ArgumentException">Thrown when the method failed due to TensorsInfo's information is invalid.</exception>
+        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
+        /// <since_tizen> 6 </since_tizen>
+        public TensorsData GetTensorsData()
+        {
+            IntPtr tensorsData_h;
+            TensorsData retTensorData;
+            NNStreamerError ret = NNStreamerError.None;
+
+            if (_handle == IntPtr.Zero)
+            {
+                Log.Info(NNStreamer.TAG, "_handle is IntPtr.Zero\n" + "  GetTensorsInfoHandle() is called");
+                GetTensorsInfoHandle();
+            }
+
+            ret = Interop.Util.CreateTensorsData(_handle, out tensorsData_h);
+            NNStreamer.CheckException(ret, "unable to create the tensorsData object");
+            Log.Info(NNStreamer.TAG, "success to CreateTensorsData()\n");
+
+            retTensorData = TensorsData.CreateFromNativeHandle(tensorsData_h);
+
+            return retTensorData;
+        }
+
+        internal IntPtr GetTensorsInfoHandle()
+        {
+            NNStreamerError ret = NNStreamerError.None;
+            IntPtr ret_handle;
+            int idx;
+
+            /* Already created */
+            if (_handle != IntPtr.Zero)
+                return _handle;
+
+            /* Check required parameters */
+            int num = _infoList.Count;
+            if (num <= 0 || num > Tensor.SizeLimit)
+                ret = NNStreamerError.InvalidParameter;
+            NNStreamer.CheckException(ret, "number of Tensor in TensorsInfo is invalid: " + _infoList.Count);
+
+            /* Create TensorsInfo object */
+            ret = Interop.Util.CreateTensorsInfo(out ret_handle);
+            NNStreamer.CheckException(ret, "fail to create TensorsInfo object");
+
+            /* Set the number of tensors */
+            ret = Interop.Util.SetTensorsCount(ret_handle, _infoList.Count);
+            NNStreamer.CheckException(ret, "unable to set the number of tensors");
+
+            /* Set each Tensor info */
+            idx = 0;
+            foreach (TensorInfo t in _infoList)
+            {
+                ret = Interop.Util.SetTensorType(ret_handle, idx, t.Type);
+                NNStreamer.CheckException(ret, "fail to set the type of tensor" + idx.ToString());
+
+                ret = Interop.Util.SetTensorDimension(ret_handle, idx, t.Dimension);
+                NNStreamer.CheckException(ret, "fail to set the dimension of tensor: " + idx.ToString());
+
+                idx += 1;
+            }
+
+            _handle = ret_handle;
+            return ret_handle;
+        }
+
+        /// <summary>
+        /// Releases any unmanaged resources used by this object.
+        /// </summary>
+        /// <since_tizen> 6 </since_tizen>
+        public void Dispose()
+        {
+            Dispose(true);
+            GC.SuppressFinalize(this);
+        }
+
+        /// <summary>
+        /// Releases any unmanaged resources used by this object. Can also dispose any other disposable objects.
+        /// </summary>
+        /// <param name="disposing">If true, disposes any disposable objects. If false, does not dispose disposable objects.</param>
+        protected virtual void Dispose(bool disposing)
+        {
+            if (_disposed)
+                return;
+
+            if (disposing)
+            {
+                // release managed objects
+                _infoList.Clear();
+            }
+
+            // release unmanaged objects
+            if (_handle != IntPtr.Zero)
+            {
+                NNStreamerError ret = NNStreamerError.None;
+                ret = Interop.Util.DestroyTensorsInfo(_handle);
+                if (ret != NNStreamerError.None)
+                {
+                    Log.Error(NNStreamer.TAG, "failed to destroy TensorsInfo object");
+                }
+            }
+            _disposed = true;
+        }
+        
+        private void CheckIndexBoundary(int idx)
+        {
+            if (idx < 0 || idx >= _infoList.Count) {
+                throw new IndexOutOfRangeException("Invalid index [" + idx + "] of the tensors");
+            }
+        }
+
+        private class TensorInfo
+        {
+            public TensorInfo(TensorType type, int[] dimension)
+            {
+                Type = type;
+                SetDimension(dimension);
+            }
+
+            public TensorInfo(string name, TensorType type, int[] dimension)
+            {
+                Name = name;
+                Type = type;
+                SetDimension(dimension);
+            }
+
+            public void SetDimension(int[] dimension)
+            {
+                if (dimension == null) {
+                    throw new ArgumentException("Max size of the tensor rank is" + Tensor.RankLimit);
+                }
+
+                if (dimension.Length > Tensor.RankLimit) {
+                    throw new ArgumentException("Max size of the tensor rank is" + Tensor.RankLimit);
+                }
+                Dimension = (int[])dimension.Clone();
+            }
+
+            public string Name { get; set; } = null;
+
+            public TensorType Type { get; set; } = TensorType.Int32;
+
+            public int[] Dimension { get; private set; } = new int[Tensor.RankLimit];
+        }
+    }
+}
diff --git a/test/Tizen.MachineLearning.Inference.Test/App.cs b/test/Tizen.MachineLearning.Inference.Test/App.cs
new file mode 100755 (executable)
index 0000000..3f4e518
--- /dev/null
@@ -0,0 +1,132 @@
+/*
+ *  Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved
+ *
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License
+ */
+
+using System;
+using Log = Tizen.Log;
+using Xamarin.Forms;
+using Tizen.MachineLearning.Inference.Test;
+
+namespace XamarinForTizen.Tizen
+{
+    public class App : Application
+    {
+        Button btnPipeline;
+        Button btnSingle;
+        Button btnTensorsInfo;
+        Label lblResult;
+        
+        
+        public App()
+        {
+            btnPipeline = new Button
+            {
+                Text = "Pipeline Test",
+                HorizontalOptions = LayoutOptions.FillAndExpand,
+                VerticalOptions = LayoutOptions.StartAndExpand,
+            };
+            btnPipeline.Clicked += OnBtnPilelineClicked;
+
+            btnSingle = new Button
+            {
+                Text = "Single Test",
+                HorizontalOptions = LayoutOptions.FillAndExpand,
+                VerticalOptions = LayoutOptions.StartAndExpand,
+            };
+            btnSingle.Clicked += OnBtnSingleClicked;
+
+            btnTensorsInfo = new Button
+            {
+                Text = "TensorsInfo Test",
+                HorizontalOptions = LayoutOptions.FillAndExpand,
+                VerticalOptions = LayoutOptions.StartAndExpand,
+            };
+            btnTensorsInfo.Clicked += OnBtnTensorsInfoClicked;
+
+            lblResult = new Label
+            {
+                Text = "",
+                HorizontalOptions = LayoutOptions.FillAndExpand,
+            };
+            // The root page of your application
+            MainPage = new ContentPage
+            {
+                Content = new StackLayout
+                {
+                    VerticalOptions = LayoutOptions.Start,
+                    Children = {
+                        btnPipeline,
+                        btnSingle,
+                        btnTensorsInfo,
+                        lblResult,
+                    }
+                }
+            };
+        }
+
+        protected override void OnStart()
+        {
+            // Handle when your app starts
+        }
+
+        protected override void OnSleep()
+        {
+            // Handle when your app sleeps
+        }
+
+        protected override void OnResume()
+        {
+            // Handle when your app resumes
+        }
+
+        private void OnBtnPilelineClicked(object s, EventArgs e)
+        {
+            string retMsg = "";
+            retMsg += "Pipeline Test Started\n\n";
+
+            retMsg += "\nPipeline Test Done";
+
+            lblResult.Text = retMsg;
+        }
+
+        private void OnBtnSingleClicked(object s, EventArgs e)
+        {
+            string msg = "Single Test Started\n";
+
+            msg += "  * BasicSingleTest_Success00: ";
+            msg += SingleShotTest.BasicSingleTest_Success00() ? "OK\n" : "Failed\n";
+
+            msg += "Single Test is Done\n";
+
+            lblResult.Text = msg;
+        }
+
+        private void OnBtnTensorsInfoClicked(object s, EventArgs e)
+        {
+            string msg = "TensorsInfo Test Started\n";
+            
+            msg += "  * BasicTensorTest_Success00: ";
+            msg += TensorsInfoTest.BasicTensorTest_Success00() ? "OK\n" : "Failed\n";
+
+            msg += "  * BasicTensorTest_Success01: ";
+            msg += TensorsInfoTest.BasicTensorTest_Success01() ? "OK\n" : "Failed\n";
+
+            msg += "  * BasicTensorTest_Success02: ";
+            msg += TensorsInfoTest.BasicTensorTest_Success02() ? "OK\n" : "Failed\n";
+
+            lblResult.Text = msg;
+        }
+    }
+}
diff --git a/test/Tizen.MachineLearning.Inference.Test/Program.cs b/test/Tizen.MachineLearning.Inference.Test/Program.cs
new file mode 100755 (executable)
index 0000000..f8d0fb7
--- /dev/null
@@ -0,0 +1,37 @@
+/*
+ *  Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved
+ *
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License
+ */
+
+using System;
+
+namespace XamarinForTizen.Tizen
+{
+    class Program : global::Xamarin.Forms.Platform.Tizen.FormsApplication
+    {
+        protected override void OnCreate()
+        {
+            base.OnCreate();
+
+            LoadApplication(new App());
+        }
+
+        static void Main(string[] args)
+        {
+            var app = new Program();
+            global::Xamarin.Forms.Platform.Tizen.Forms.Init(app);
+            app.Run(args);
+        }
+    }
+}
diff --git a/test/Tizen.MachineLearning.Inference.Test/SingleTest.cs b/test/Tizen.MachineLearning.Inference.Test/SingleTest.cs
new file mode 100755 (executable)
index 0000000..78c6b8b
--- /dev/null
@@ -0,0 +1,57 @@
+using System;
+using System.IO;
+using System.Text;
+using System.Threading.Tasks;
+using Tizen.MachineLearning.Inference;
+
+namespace Tizen.MachineLearning.Inference.Test
+{
+    class SingleShotTest
+    {
+        const string TAG = "ML.Inference.Test";
+        private static string ResourcePath = Tizen.Applications.Application.Current.DirectoryInfo.Resource;
+
+        public static bool BasicSingleTest_Success00()
+        {
+            byte[] in_buffer = new byte[3 * 224 * 224 * 1];
+            byte[] out_buffer;
+            string model_path = ResourcePath + "models/mobilenet_v1_1.0_224_quant.tflite";
+
+            TensorsInfo in_info;
+            TensorsInfo out_info;
+            TensorsData in_data;
+            TensorsData out_data;
+
+            /* Set input & output TensorsInfo */
+            in_info = new TensorsInfo();
+            in_info.AddTensorInfo(TensorType.UInt8, new int[4] { 3, 224, 224, 1 });
+
+            out_info = new TensorsInfo();
+            out_info.AddTensorInfo(TensorType.UInt8, new int[4] { 1001, 1, 1, 1 });
+
+            /* Create single inference engine */
+            SingleShot single = new SingleShot(model_path, in_info, out_info);
+
+            /* Set input data */
+            in_data = in_info.GetTensorsData();
+            in_data.SetTensorData(0, in_buffer);
+
+            /* Single shot invoke */
+            out_data = single.Invoke(in_data);
+
+            /* Get output data from TensorsData */
+            out_buffer = out_data.GetTensorData(0);
+
+            /* Release Single inference instance */
+            single.Dispose();
+
+            /* clean up */
+            in_data.Dispose();
+            out_data.Dispose();
+            in_info.Dispose();
+            out_info.Dispose();
+
+            return true;
+        }
+    }
+}
diff --git a/test/Tizen.MachineLearning.Inference.Test/TensorsInfoTest.cs b/test/Tizen.MachineLearning.Inference.Test/TensorsInfoTest.cs
new file mode 100755 (executable)
index 0000000..460e658
--- /dev/null
@@ -0,0 +1,144 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the License);
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using System.IO;
+using Tizen.MachineLearning.Inference;
+
+namespace Tizen.MachineLearning.Inference.Test
+{
+    public static class TensorsInfoTest
+    {
+        const string TAG = "Nnstreamer";
+
+        public static bool BasicTensorTest_Success00()
+        {
+            int[] in_dim = new int[4] { 3, 224, 224, 1 };
+
+            TensorsInfo tensorsInfo = new TensorsInfo();
+            tensorsInfo.AddTensorInfo(TensorType.UInt8, in_dim);
+
+            /* Check */
+            if (tensorsInfo.GetTensorType(0) != TensorType.UInt8)
+                return false;
+
+            int[] in_res = tensorsInfo.GetDimension(0);
+            for (int i = 0; i < 4; ++i)
+            {
+                if (in_dim[i] != in_res[i])
+                    return false;
+            }
+            return true;
+        }
+        public static bool BasicTensorTest_Success01()
+        {
+            TensorsInfo tensorsInfo;
+            TensorsData tensorsData;
+            int[] in_dim = new int[4] { 10, 1, 1, 1 };
+            byte[] buffer_in = new byte[] { 17, 2, 3, 4, 5, 6, 7, 8, 9, 10 };
+            byte[] buffer_out;
+
+            tensorsInfo = new TensorsInfo();
+            tensorsInfo.AddTensorInfo(TensorType.UInt8, in_dim);
+            Log.Info(TAG, "Current Count: " + tensorsInfo.Count);
+
+            tensorsData = tensorsInfo.GetTensorsData();
+            tensorsData.SetTensorData(0, buffer_in);
+
+            buffer_out = tensorsData.GetTensorData(0);
+
+            if (buffer_in.Length != buffer_out.Length)
+            {
+                Log.Error(TAG, "The size of buffers is different");
+                return false;
+            }
+
+            for (int i = 0; i < buffer_in.Length; ++i)
+            {
+                if (buffer_in[i] != buffer_out[i])
+                {
+                    Log.Error(TAG, "The value of " + i.ToString() + " th element is different");
+                    return false;
+                }
+            }
+
+            return true;
+        }
+
+        public static bool BasicTensorTest_Success02()
+        {
+            TensorsInfo tensorsInfo;
+            TensorsData tensorsData;
+            int[] in_dim = new int[4] { 10, 1, 1, 1 };
+            byte[] buffer_in = new byte[] { 17, 2, 3, 4, 5, 6, 7, 8, 9, 10 };
+            byte[] buffer_out;
+
+            tensorsInfo = new TensorsInfo();
+            tensorsInfo.AddTensorInfo(TensorType.UInt8, in_dim);
+
+            tensorsData = tensorsInfo.GetTensorsData();
+            tensorsData.SetTensorData(0, buffer_in);
+            buffer_out = tensorsData.GetTensorData(0);
+
+            if (buffer_in.Length != buffer_out.Length)
+            {
+                Log.Error(TAG, "The size of buffers is different");
+                return false;
+            }
+
+            for (int i = 0; i < buffer_in.Length; ++i)
+            {
+                if (buffer_in[i] != buffer_out[i])
+                {
+                    Log.Error(TAG, "The value of " + i.ToString() + " th element is different");
+                    return false;
+                }
+            }
+            tensorsData.Dispose();
+
+            /* Add new tensor */
+            int[] in2_dim = new int[4] { 5, 1, 1, 1 };
+            byte[] buffer_in2 = new byte[] { 10, 20, 30, 40, 50 };
+            byte[] buffer_out2;
+
+
+            tensorsInfo.AddTensorInfo(TensorType.UInt8, in2_dim);
+
+            tensorsData = tensorsInfo.GetTensorsData();
+            tensorsData.SetTensorData(0, buffer_in);
+            buffer_out = tensorsData.GetTensorData(0);
+            tensorsData.SetTensorData(1, buffer_in2);
+            buffer_out2 = tensorsData.GetTensorData(1);
+
+            if (buffer_in2.Length != buffer_out2.Length)
+            {
+                Log.Error(TAG, "The size of buffers is different");
+                return false;
+            }
+
+            for (int i = 0; i < buffer_in2.Length; ++i)
+            {
+                if (buffer_in2[i] != buffer_out2[i])
+                {
+                    Log.Error(TAG, "The value of " + i.ToString() + " th element is different");
+                    return false;
+                }
+            }
+
+            return true;
+        }
+    }
+}
diff --git a/test/Tizen.MachineLearning.Inference.Test/Tizen.MachineLearning.Inference.Test.csproj b/test/Tizen.MachineLearning.Inference.Test/Tizen.MachineLearning.Inference.Test.csproj
new file mode 100755 (executable)
index 0000000..85e0af0
--- /dev/null
@@ -0,0 +1,29 @@
+<Project Sdk="Microsoft.NET.Sdk">
+
+  <!-- Property Group for Tizen50 Project -->
+  <PropertyGroup>
+    <OutputType>Exe</OutputType>
+    <TargetFramework>tizen60</TargetFramework>
+  </PropertyGroup>
+
+  <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
+    <DebugType>portable</DebugType>
+  </PropertyGroup>
+  <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
+    <DebugType>None</DebugType>
+  </PropertyGroup>
+
+  <ItemGroup>
+    <Folder Include="lib\" />
+    <Folder Include="res\" />
+  </ItemGroup>
+  
+
+  <!-- Include Nuget Package for Tizen Project building -->
+  <ItemGroup>
+    <PackageReference Include="Tizen.NET" Version="6.0.0.14863" />
+    <PackageReference Include="Tizen.NET.Sdk" Version="1.0.3" />
+    <PackageReference Include="Xamarin.Forms" Version="4.0.0.482894" />
+  </ItemGroup>
+
+</Project>
diff --git a/test/Tizen.MachineLearning.Inference.Test/Tizen.MachineLearning.Inference.Test.sln b/test/Tizen.MachineLearning.Inference.Test/Tizen.MachineLearning.Inference.Test.sln
new file mode 100755 (executable)
index 0000000..3df090c
--- /dev/null
@@ -0,0 +1,25 @@
+
+Microsoft Visual Studio Solution File, Format Version 12.00
+# Visual Studio 15
+VisualStudioVersion = 15.0.28307.645
+MinimumVisualStudioVersion = 10.0.40219.1
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Tizen.MachineLearning.Inference.Test", "Tizen.MachineLearning.Inference.Test.csproj", "{55443533-832E-49A8-B8E4-1A4C07A97F87}"
+EndProject
+Global
+       GlobalSection(SolutionConfigurationPlatforms) = preSolution
+               Debug|Any CPU = Debug|Any CPU
+               Release|Any CPU = Release|Any CPU
+       EndGlobalSection
+       GlobalSection(ProjectConfigurationPlatforms) = postSolution
+               {55443533-832E-49A8-B8E4-1A4C07A97F87}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+               {55443533-832E-49A8-B8E4-1A4C07A97F87}.Debug|Any CPU.Build.0 = Debug|Any CPU
+               {55443533-832E-49A8-B8E4-1A4C07A97F87}.Release|Any CPU.ActiveCfg = Release|Any CPU
+               {55443533-832E-49A8-B8E4-1A4C07A97F87}.Release|Any CPU.Build.0 = Release|Any CPU
+       EndGlobalSection
+       GlobalSection(SolutionProperties) = preSolution
+               HideSolutionNode = FALSE
+       EndGlobalSection
+       GlobalSection(ExtensibilityGlobals) = postSolution
+               SolutionGuid = {2EFA7F43-50B6-4153-8FE6-290DD0A33E80}
+       EndGlobalSection
+EndGlobal
diff --git a/test/Tizen.MachineLearning.Inference.Test/res/models/mobilenet_v1_1.0_224_quant.tflite b/test/Tizen.MachineLearning.Inference.Test/res/models/mobilenet_v1_1.0_224_quant.tflite
new file mode 100755 (executable)
index 0000000..9a81d7c
Binary files /dev/null and b/test/Tizen.MachineLearning.Inference.Test/res/models/mobilenet_v1_1.0_224_quant.tflite differ
diff --git a/test/Tizen.MachineLearning.Inference.Test/shared/res/Tizen.MachineLearning.Inference.Test.png b/test/Tizen.MachineLearning.Inference.Test/shared/res/Tizen.MachineLearning.Inference.Test.png
new file mode 100755 (executable)
index 0000000..9f3cb98
Binary files /dev/null and b/test/Tizen.MachineLearning.Inference.Test/shared/res/Tizen.MachineLearning.Inference.Test.png differ
diff --git a/test/Tizen.MachineLearning.Inference.Test/tizen-manifest.xml b/test/Tizen.MachineLearning.Inference.Test/tizen-manifest.xml
new file mode 100755 (executable)
index 0000000..468c117
--- /dev/null
@@ -0,0 +1,12 @@
+<?xml version="1.0" encoding="utf-8"?>
+<manifest package="org.tizen.machine.inference.Test" version="1.0.0" api-version="5" xmlns="http://tizen.org/ns/packages">
+    <profile name="mobile" />
+    <ui-application appid="org.tizen.machine.Inference.Test" exec="Tizen.MachineLearning.Inference.Test.dll" multiple="false" nodisplay="false" taskmanage="true" type="dotnet" launch_mode="single">
+        <label>Tizen.MachineLearning.Inference.Test.dll</label>
+        <icon>Tizen.MachineLearning.Inference.Test.png</icon>
+        <metadata key="http://tizen.org/metadata/prefer_dotnet_aot" value="true" />
+        <splash-screens />
+    </ui-application>
+    <shortcut-list />
+    <provides-appdefined-privileges />
+</manifest>