[MachineLearning.Train] Add initial Layer class
authorHyunil <hyunil46.park@samsung.com>
Fri, 27 May 2022 03:06:45 +0000 (12:06 +0900)
committerJijoong Moon <jijoong.moon@samsung.com>
Tue, 23 Aug 2022 05:50:26 +0000 (14:50 +0900)
- Create Layer.cs and Interop.Layer.cs for Layer class
- Add Layer(NNTrainerLayerType type) class to Layer.cs
- Add Destroy() to Dispose()
- Add NNTrainerLayerType to Commons.cs
- Add ml_train_layer_create() to interop
- Add ml_train_layer_destroy() to interop

Signed-off-by: Hyunil <hyunil46.park@samsung.com>
src/Tizen.MachineLearning.Train/Interop/Interop.Layer.cs [new file with mode: 0644]
src/Tizen.MachineLearning.Train/Tizen.MachineLearning.Train/Commons.cs
src/Tizen.MachineLearning.Train/Tizen.MachineLearning.Train/Layer.cs [new file with mode: 0644]

diff --git a/src/Tizen.MachineLearning.Train/Interop/Interop.Layer.cs b/src/Tizen.MachineLearning.Train/Interop/Interop.Layer.cs
new file mode 100644 (file)
index 0000000..20395f8
--- /dev/null
@@ -0,0 +1,33 @@
+/*
+ * Copyright (c) 2022 Samsung Electronics Co., Ltd All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the License);
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using System.Runtime.InteropServices;
+using Tizen.MachineLearning.Train;
+
+internal static partial class Interop
+{
+    internal static partial class Layer
+    {
+        /* int ml_train_layer_create(ml_train_layer_h *layer, ml_train_layer_type_e type) */
+        [DllImport(Libraries.Nntrainer, EntryPoint = "ml_train_layer_create")]
+        public static extern NNTrainerError Create(out IntPtr layerHandle, NNTrainerLayerType type);
+
+        /* int ml_train_layer_destroy(ml_train_layer_h layer) */
+        [DllImport(Libraries.Nntrainer, EntryPoint = "ml_train_layer_destroy")]
+        public static extern NNTrainerError Destroy(IntPtr layerHandle);
+    }
+}
\ No newline at end of file
index a50a9ef..ad203d2 100644 (file)
@@ -75,6 +75,138 @@ namespace Tizen.MachineLearning.Train
         IniWithBin = 2
     }
 
+    /// <summary>
+    /// Enumeration for the neural network layer type of NNTrainer.
+    /// </summary>
+    /// <since_tizen> 10 </since_tizen>
+    public enum NNTrainerLayerType
+    {
+        /// <summary>
+        /// Input Layer
+        /// </summary>
+        Input = 0,
+        /// <summary>
+        /// Fully Connected Layer
+        /// </summary>
+        FC = 1,
+        /// <summary>
+        /// Batch Normalization Layer
+        /// </summary>
+        BN = 2,
+        /// <summary>
+        /// Convolution 2D Layer
+        /// </summary>
+        Conv2D = 3,
+        /// <summary>
+        /// Pooling 2D Layer
+        /// </summary>
+        Pooling2D= 4,
+        /// <summary>
+        /// Flatten Layer
+        /// </summary>
+        Flatten = 5,
+        /// <summary>
+        /// Activation Layer
+        /// </summary>
+        Activation = 6,
+        /// <summary>
+        /// Addition Layer
+        /// </summary>
+        Addition = 7,
+        /// <summary>
+        /// Concat Layer
+        /// </summary>
+        Concat = 8,
+        /// <summary>
+        /// MultiOut Layer
+        /// </summary>
+        MultiOut = 9,
+        /// <summary>
+        /// Embedding Layer
+        /// </summary>
+        Embedding = 10,
+        /// <summary>
+        /// RNN Layer
+        /// </summary>
+        RNN = 11,
+        /// <summary>
+        /// LSTM Layer
+        /// </summary>
+        LSTM = 12,
+        /// <summary>
+        /// Split Layer
+        /// </summary>
+        Split = 13,
+        /// <summary>
+        /// GRU Layer
+        /// </summary>
+        GRU = 14,
+        /// <summary>
+        /// Permute Layer
+        /// </summary>
+        Permute = 15,
+        /// <summary>
+        /// Dropout Layer
+        /// </summary>
+        Dropout = 16,
+        /// <summary>
+        /// Backbone using NNStreamer
+        /// </summary>
+        BackboneNNStreamer = 17,
+        /// <summary>
+        /// Centroid KNN Layer
+        /// </summary>
+        CentroidKNN = 18,
+        /// <summary>
+        /// Convolution 1D Layer
+        /// </summary>
+        Conv1D = 19,
+        /// <summary>
+        /// LSTM Cell Layer 
+        /// </summary>
+        LSTMCell = 20,
+        /// <summary>
+        /// GRU Cell Layer 
+        /// </summary>
+        GRUCell = 21,
+        /// <summary>
+        /// RNN Cell Layer 
+        /// </summary>
+        RNNCell = 22,
+        /// <summary>
+        /// ZoneoutLSTM Cell Layer 
+        /// </summary>
+        ZoneoutLSTMCell = 23,
+        /// <summary>
+        /// Preprocess flip Layer 
+        /// </summary>
+        PreprocessFlip = 300,
+        /// <summary>
+        /// Preprocess translate Layer 
+        /// </summary>
+        PreprocessTranslate = 301,
+        /// <summary>
+        /// Preprocess L2Normalization Layer 
+        /// </summary>
+        PreprocessL2Norm = 302,
+        /// <summary>
+        /// Mean Squared Error Loss Layer
+        /// </summary>
+        LoseMSE = 500,
+        /// <summary>
+        /// Cross Entropy with Sigmoid Loss Layer
+        /// </summary>
+        LossCrossEntropySigmoid = 501, 
+        /// <summary>
+        /// Cross Entropy with Softmax Loss Layer
+        /// </summary>                                            
+        LossCrossEntropySoftmax = 502, 
+        /// <summary>
+        /// Unknown
+        /// </summary>  
+        Unknown = 999
+    }
+
     internal static class NNTrainer
     {
  
diff --git a/src/Tizen.MachineLearning.Train/Tizen.MachineLearning.Train/Layer.cs b/src/Tizen.MachineLearning.Train/Tizen.MachineLearning.Train/Layer.cs
new file mode 100644 (file)
index 0000000..7734749
--- /dev/null
@@ -0,0 +1,93 @@
+/*
+* Copyright (c) 2022 Samsung Electronics Co., Ltd. All Rights Reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the License);
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an AS IS BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+using static Interop;
+using System;
+using System.IO;
+
+namespace Tizen.MachineLearning.Train
+{
+    /// <summary>
+    /// Creates a neural network layer.
+    /// </summary>
+    /// <remarks>
+    /// Use this function to create neural network layer.
+    /// If the function succeeds, layer must be released using Destroy(), if not added to a model.
+    /// If added to a model, layer is available until the model is released.
+    /// </remarks>
+    /// <since_tizen> 10 </since_tizen>
+    public class Layer: IDisposable
+    {
+        private IntPtr handle = IntPtr.Zero;
+        private bool disposed = false;
+
+        /// <summary>
+        /// Creates a neural network layer.
+        /// </summary>
+        /// <param name="type">The nntrainer layer type.</param>
+        /// <since_tizen> 10 </since_tizen>
+        public Layer(NNTrainerLayerType type)
+        {
+            NNTrainerError ret = Interop.Layer.Create(out handle, type);
+            NNTrainer.CheckException(ret, "Failed to create model instance");
+        }
+        /// <summary>
+        /// Frees the neural network layer.
+        /// </summary>
+        /// <since_tizen> 10 </since_tizen>
+        /// <remarks>
+        /// Use this function to destroy neural network layer. Fails if layer is owned by a model.
+        /// </remarks>
+        ~Layer()
+        {
+            Dispose(false);
+        }
+
+        /// <summary>
+        /// Releases any unmanaged resources used by this object.
+        /// </summary>
+        /// <since_tizen> 10 </since_tizen>
+        public void Dispose()
+        {
+            Dispose(true);
+            GC.SuppressFinalize(this);
+        }
+
+        /// <summary>
+        /// Releases any unmanaged resources used by this object including opened handle.
+        /// </summary>
+        /// <param name="disposing">If true, disposes any disposable objects. If false, does not dispose disposable objects.</param>
+        /// <since_tizen> 10 </since_tizen>
+        protected virtual void Dispose(bool disposing)
+        {
+            if (disposed)
+                return;
+            if (disposing)
+            {
+                // release managed object
+            }
+            // release unmanaged object
+            if (handle != IntPtr.Zero)
+            {
+                // Destroy the neural network layer.
+                NNTrainerError ret = Interop.Layer.Destroy(handle);
+                NNTrainer.CheckException(ret, "Failed to destroy layer instance");
+
+                handle = IntPtr.Zero;
+            }
+            disposed = true;
+        }
+    } 
+}