type */
LAYER_LOSS_CROSS_ENTROPY_SOFTMAX, /**< Cross Entropy with Softmax Loss Layer
type */
+ LAYER_LOSS_CONSTANT_DERIVATIVE, /**< Synthetic loss layer to feed constant
+ derivative */
LAYER_UNKNOWN = ML_TRAIN_LAYER_TYPE_UNKNOWN /**< Unknown */
};
$(NNTRAINER_ROOT)/nntrainer/layers/loss/mse_loss_layer.cpp \
$(NNTRAINER_ROOT)/nntrainer/layers/loss/cross_entropy_sigmoid_loss_layer.cpp \
$(NNTRAINER_ROOT)/nntrainer/layers/loss/cross_entropy_softmax_loss_layer.cpp \
+ $(NNTRAINER_ROOT)/nntrainer/layers/loss/constant_derivative_loss_layer.cpp \
$(NNTRAINER_ROOT)/nntrainer/layers/conv2d_layer.cpp \
$(NNTRAINER_ROOT)/nntrainer/layers/pooling2d_layer.cpp \
$(NNTRAINER_ROOT)/nntrainer/layers/activation_layer.cpp \
#include <bn_layer.h>
#include <centroid_knn.h>
#include <concat_layer.h>
+#include <constant_derivative_loss_layer.h>
#include <conv2d_layer.h>
#include <cross_entropy_sigmoid_loss_layer.h>
#include <cross_entropy_softmax_loss_layer.h>
ac.registerFactory(nntrainer::createLayer<CrossEntropySoftmaxLossLayer>,
CrossEntropySoftmaxLossLayer::type,
LayerType::LAYER_LOSS_CROSS_ENTROPY_SOFTMAX);
+ ac.registerFactory(nntrainer::createLayer<ConstantDerivativeLossLayer>,
+ ConstantDerivativeLossLayer::type,
+ LayerType::LAYER_LOSS_CONSTANT_DERIVATIVE);
ac.registerFactory(nntrainer::createLayer<TimeDistLayer>, TimeDistLayer::type,
LayerType::LAYER_TIME_DIST);
--- /dev/null
+// SPDX-License-Identifier: Apache-2.0
+/**
+ * Copyright (C) 2021 Jihoon Lee <jhoon.it.lee@samsung.com>
+ *
+ * @file constant_derivative_loss_layer.cpp
+ * @date 05 Oct 2021
+ * @brief This patch contains constant derivative loss implementation
+ * @note This is special type of loss to feed an arbitrary derivative value to
+ * the last layer.
+ * @see https://github.com/nnstreamer/nntrainer
+ * @author Jihoon Lee <jhoon.it.lee@samsung.com>
+ * @bug No known bugs except for NYI items
+ */
+
+#include <constant_derivative_loss_layer.h>
+
+namespace nntrainer {
+
+static constexpr int SINGLE_INOUT_IDX = 0;
+/// @todo make this property
+static constexpr float value = 1.0f;
+
+ConstantDerivativeLossLayer::ConstantDerivativeLossLayer() : LossLayer() {}
+ConstantDerivativeLossLayer::~ConstantDerivativeLossLayer() = default;
+
+void ConstantDerivativeLossLayer::forwarding(RunLayerContext &context,
+ bool training) {
+ Tensor &hidden_ = context.getOutput(SINGLE_INOUT_IDX);
+ Tensor &y = context.getInput(SINGLE_INOUT_IDX);
+
+ // fill the output
+ hidden_.fill(y);
+
+ if (context.isLabelAvailable(SINGLE_INOUT_IDX)) {
+ Tensor l(1);
+ l.setValue(value);
+ // update the loss value
+ LossLayer::updateLoss(context, l);
+ }
+}
+
+void ConstantDerivativeLossLayer::setProperty(
+ const std::vector<std::string> &values) {
+ /// update set value
+ LossLayer::setProperty(values);
+}
+
+void ConstantDerivativeLossLayer::calcDerivative(RunLayerContext &context) {
+ Tensor &ret_derivative = context.getOutgoingDerivative(SINGLE_INOUT_IDX);
+ ret_derivative.setValue(1.0f);
+}
+
+} // namespace nntrainer
--- /dev/null
+// SPDX-License-Identifier: Apache-2.0
+/**
+ * Copyright (C) 2021 Jihoon Lee <jhoon.it.lee@samsung.com>
+ *
+ * @file constant_deriv_loss_layer.h
+ * @date 05 Oct 2021
+ * @brief This patch contains constant derivative loss implementation
+ * @note This is special type of loss to feed an arbitrary derivative value to
+ * the last layer.
+ * @see https://github.com/nnstreamer/nntrainer
+ * @author Jihoon Lee <jhoon.it.lee@samsung.com>
+ * @bug No known bugs except for NYI items
+ */
+#ifndef __CONSTANT_DERIVATIVE_LOSS_LAYER_H__
+#define __CONSTANT_DERIVATIVE_LOSS_LAYER_H__
+#ifdef __cplusplus
+
+#include <loss_layer.h>
+
+namespace nntrainer {
+
+/**
+ * @class ConstantDerivativeLossLayer
+ * @brief Constant Loss Layer
+ */
+class ConstantDerivativeLossLayer final : public LossLayer {
+public:
+ /**
+ * @brief Constructor of Constant Loss Layer
+ */
+ ConstantDerivativeLossLayer();
+
+ /**
+ * @brief Destructor of MSE Loss Layer
+ */
+ ~ConstantDerivativeLossLayer();
+
+ /**
+ * @copydoc Layer::setProperty(const std::vector<std::string> &values)
+ */
+ void setProperty(const std::vector<std::string> &values) override;
+
+ /**
+ * @copydoc Layer::forwarding(RunLayerContext &context, bool training)
+ */
+ void forwarding(RunLayerContext &context, bool training) override;
+
+ /**
+ * @copydoc Layer::calcDerivative(RunLayerContext &context)
+ */
+ void calcDerivative(RunLayerContext &context) override;
+
+ /**
+ * @copydoc Layer::getType()
+ */
+ const std::string getType() const override {
+ return ConstantDerivativeLossLayer::type;
+ };
+
+ inline static const std::string type = "constant_derivative";
+};
+} // namespace nntrainer
+
+#endif /* __cplusplus */
+
+#endif // __CONSTANT_DERIVATIVE_LOSS_LAYER_H__
'loss_layer.cpp',
'mse_loss_layer.cpp',
'cross_entropy_sigmoid_loss_layer.cpp',
- 'cross_entropy_softmax_loss_layer.cpp'
+ 'cross_entropy_softmax_loss_layer.cpp',
+ 'constant_derivative_loss_layer.cpp'
]
loss_layer_headers = []