[Layer] Add constant derivative layer
authorJihoon Lee <jhoon.it.lee@samsung.com>
Tue, 5 Oct 2021 06:52:47 +0000 (15:52 +0900)
committerJijoong Moon <jijoong.moon@samsung.com>
Thu, 7 Oct 2021 10:20:47 +0000 (19:20 +0900)
This patch adds constant derivative layer. This layer will be used to
simulate a backward operation without any loss.

**Self evaluation:**
1. Build test: [X]Passed [ ]Failed [ ]Skipped
2. Run test: [X]Passed [ ]Failed [ ]Skipped

Signed-off-by: Jihoon Lee <jhoon.it.lee@samsung.com>
api/ccapi/include/layer.h
jni/Android.mk
nntrainer/app_context.cpp
nntrainer/layers/loss/constant_derivative_loss_layer.cpp [new file with mode: 0644]
nntrainer/layers/loss/constant_derivative_loss_layer.h [new file with mode: 0644]
nntrainer/layers/loss/meson.build

index b3e1723..bed8910 100644 (file)
@@ -71,6 +71,8 @@ enum LayerType {
                                        type */
   LAYER_LOSS_CROSS_ENTROPY_SOFTMAX, /**< Cross Entropy with Softmax Loss Layer
                                        type */
+  LAYER_LOSS_CONSTANT_DERIVATIVE,   /**< Synthetic loss layer to feed constant
+                                       derivative */
   LAYER_UNKNOWN = ML_TRAIN_LAYER_TYPE_UNKNOWN /**< Unknown */
 };
 
index e4e2f94..8ffb1e7 100644 (file)
@@ -154,6 +154,7 @@ NNTRAINER_SRCS := $(NNTRAINER_ROOT)/nntrainer/models/neuralnet.cpp \
                   $(NNTRAINER_ROOT)/nntrainer/layers/loss/mse_loss_layer.cpp \
                   $(NNTRAINER_ROOT)/nntrainer/layers/loss/cross_entropy_sigmoid_loss_layer.cpp \
                   $(NNTRAINER_ROOT)/nntrainer/layers/loss/cross_entropy_softmax_loss_layer.cpp \
+                  $(NNTRAINER_ROOT)/nntrainer/layers/loss/constant_derivative_loss_layer.cpp \
                   $(NNTRAINER_ROOT)/nntrainer/layers/conv2d_layer.cpp \
                   $(NNTRAINER_ROOT)/nntrainer/layers/pooling2d_layer.cpp \
                   $(NNTRAINER_ROOT)/nntrainer/layers/activation_layer.cpp \
index 29d9c92..960a923 100644 (file)
@@ -34,6 +34,7 @@
 #include <bn_layer.h>
 #include <centroid_knn.h>
 #include <concat_layer.h>
+#include <constant_derivative_loss_layer.h>
 #include <conv2d_layer.h>
 #include <cross_entropy_sigmoid_loss_layer.h>
 #include <cross_entropy_softmax_loss_layer.h>
@@ -277,6 +278,9 @@ static void add_default_object(AppContext &ac) {
   ac.registerFactory(nntrainer::createLayer<CrossEntropySoftmaxLossLayer>,
                      CrossEntropySoftmaxLossLayer::type,
                      LayerType::LAYER_LOSS_CROSS_ENTROPY_SOFTMAX);
+  ac.registerFactory(nntrainer::createLayer<ConstantDerivativeLossLayer>,
+                     ConstantDerivativeLossLayer::type,
+                     LayerType::LAYER_LOSS_CONSTANT_DERIVATIVE);
 
   ac.registerFactory(nntrainer::createLayer<TimeDistLayer>, TimeDistLayer::type,
                      LayerType::LAYER_TIME_DIST);
diff --git a/nntrainer/layers/loss/constant_derivative_loss_layer.cpp b/nntrainer/layers/loss/constant_derivative_loss_layer.cpp
new file mode 100644 (file)
index 0000000..ab8bf0b
--- /dev/null
@@ -0,0 +1,53 @@
+// SPDX-License-Identifier: Apache-2.0
+/**
+ * Copyright (C) 2021 Jihoon Lee <jhoon.it.lee@samsung.com>
+ *
+ * @file constant_derivative_loss_layer.cpp
+ * @date 05 Oct 2021
+ * @brief This patch contains constant derivative loss implementation
+ * @note This is special type of loss to feed an arbitrary derivative value to
+ * the last layer.
+ * @see        https://github.com/nnstreamer/nntrainer
+ * @author Jihoon Lee <jhoon.it.lee@samsung.com>
+ * @bug No known bugs except for NYI items
+ */
+
+#include <constant_derivative_loss_layer.h>
+
+namespace nntrainer {
+
+static constexpr int SINGLE_INOUT_IDX = 0;
+/// @todo make this property
+static constexpr float value = 1.0f;
+
+ConstantDerivativeLossLayer::ConstantDerivativeLossLayer() : LossLayer() {}
+ConstantDerivativeLossLayer::~ConstantDerivativeLossLayer() = default;
+
+void ConstantDerivativeLossLayer::forwarding(RunLayerContext &context,
+                                             bool training) {
+  Tensor &hidden_ = context.getOutput(SINGLE_INOUT_IDX);
+  Tensor &y = context.getInput(SINGLE_INOUT_IDX);
+
+  // fill the output
+  hidden_.fill(y);
+
+  if (context.isLabelAvailable(SINGLE_INOUT_IDX)) {
+    Tensor l(1);
+    l.setValue(value);
+    // update the loss value
+    LossLayer::updateLoss(context, l);
+  }
+}
+
+void ConstantDerivativeLossLayer::setProperty(
+  const std::vector<std::string> &values) {
+  /// update set value
+  LossLayer::setProperty(values);
+}
+
+void ConstantDerivativeLossLayer::calcDerivative(RunLayerContext &context) {
+  Tensor &ret_derivative = context.getOutgoingDerivative(SINGLE_INOUT_IDX);
+  ret_derivative.setValue(1.0f);
+}
+
+} // namespace nntrainer
diff --git a/nntrainer/layers/loss/constant_derivative_loss_layer.h b/nntrainer/layers/loss/constant_derivative_loss_layer.h
new file mode 100644 (file)
index 0000000..37fde05
--- /dev/null
@@ -0,0 +1,66 @@
+// SPDX-License-Identifier: Apache-2.0
+/**
+ * Copyright (C) 2021 Jihoon Lee <jhoon.it.lee@samsung.com>
+ *
+ * @file constant_deriv_loss_layer.h
+ * @date 05 Oct 2021
+ * @brief This patch contains constant derivative loss implementation
+ * @note This is special type of loss to feed an arbitrary derivative value to
+ * the last layer.
+ * @see        https://github.com/nnstreamer/nntrainer
+ * @author Jihoon Lee <jhoon.it.lee@samsung.com>
+ * @bug No known bugs except for NYI items
+ */
+#ifndef __CONSTANT_DERIVATIVE_LOSS_LAYER_H__
+#define __CONSTANT_DERIVATIVE_LOSS_LAYER_H__
+#ifdef __cplusplus
+
+#include <loss_layer.h>
+
+namespace nntrainer {
+
+/**
+ * @class   ConstantDerivativeLossLayer
+ * @brief   Constant Loss Layer
+ */
+class ConstantDerivativeLossLayer final : public LossLayer {
+public:
+  /**
+   * @brief     Constructor of Constant Loss Layer
+   */
+  ConstantDerivativeLossLayer();
+
+  /**
+   * @brief     Destructor of MSE Loss Layer
+   */
+  ~ConstantDerivativeLossLayer();
+
+  /**
+   * @copydoc Layer::setProperty(const std::vector<std::string> &values)
+   */
+  void setProperty(const std::vector<std::string> &values) override;
+
+  /**
+   * @copydoc Layer::forwarding(RunLayerContext &context, bool training)
+   */
+  void forwarding(RunLayerContext &context, bool training) override;
+
+  /**
+   * @copydoc Layer::calcDerivative(RunLayerContext &context)
+   */
+  void calcDerivative(RunLayerContext &context) override;
+
+  /**
+   * @copydoc Layer::getType()
+   */
+  const std::string getType() const override {
+    return ConstantDerivativeLossLayer::type;
+  };
+
+  inline static const std::string type = "constant_derivative";
+};
+} // namespace nntrainer
+
+#endif /* __cplusplus */
+
+#endif // __CONSTANT_DERIVATIVE_LOSS_LAYER_H__
index 4c1e231..2370b61 100644 (file)
@@ -2,7 +2,8 @@ loss_layer_sources = [
   'loss_layer.cpp',
   'mse_loss_layer.cpp',
   'cross_entropy_sigmoid_loss_layer.cpp',
-  'cross_entropy_softmax_loss_layer.cpp'
+  'cross_entropy_softmax_loss_layer.cpp',
+  'constant_derivative_loss_layer.cpp'
 ]
 
 loss_layer_headers = []