From c09bbf73ae234e9c3bd989c59d3fb7af220679b7 Mon Sep 17 00:00:00 2001 From: Jihoon Lee Date: Mon, 29 Nov 2021 12:47:20 +0900 Subject: [PATCH] [KLD loss] kld loss scaffolding This patch add kld loss scaffolding **Self evaluation:** 1. Build test: [X]Passed [ ]Failed [ ]Skipped 2. Run test: [X]Passed [ ]Failed [ ]Skipped Signed-off-by: Jihoon Lee --- jni/Android.mk | 1 + nntrainer/layers/loss/kld_loss_layer.cpp | 28 ++++++++++++++ nntrainer/layers/loss/kld_loss_layer.h | 66 ++++++++++++++++++++++++++++++++ nntrainer/layers/loss/meson.build | 1 + 4 files changed, 96 insertions(+) create mode 100644 nntrainer/layers/loss/kld_loss_layer.cpp create mode 100644 nntrainer/layers/loss/kld_loss_layer.h diff --git a/jni/Android.mk b/jni/Android.mk index 43a5328..3272a28 100644 --- a/jni/Android.mk +++ b/jni/Android.mk @@ -152,6 +152,7 @@ NNTRAINER_SRCS := $(NNTRAINER_ROOT)/nntrainer/models/neuralnet.cpp \ $(NNTRAINER_ROOT)/nntrainer/layers/bn_layer.cpp \ $(NNTRAINER_ROOT)/nntrainer/layers/loss/loss_layer.cpp \ $(NNTRAINER_ROOT)/nntrainer/layers/loss/mse_loss_layer.cpp \ + $(NNTRAINER_ROOT)/nntrainer/layers/loss/kld_loss_layer.cpp \ $(NNTRAINER_ROOT)/nntrainer/layers/loss/cross_entropy_sigmoid_loss_layer.cpp \ $(NNTRAINER_ROOT)/nntrainer/layers/loss/cross_entropy_softmax_loss_layer.cpp \ $(NNTRAINER_ROOT)/nntrainer/layers/loss/constant_derivative_loss_layer.cpp \ diff --git a/nntrainer/layers/loss/kld_loss_layer.cpp b/nntrainer/layers/loss/kld_loss_layer.cpp new file mode 100644 index 0000000..20910c2 --- /dev/null +++ b/nntrainer/layers/loss/kld_loss_layer.cpp @@ -0,0 +1,28 @@ +// SPDX-License-Identifier: Apache-2.0 +/** + * Copyright (C) 2021 Jihoon Lee + * + * @file kld_loss_layer.cpp + * @date 25 November 2021 + * @brief KLD (Kullback-Leibler Divergence) loss implementation + * @see https://github.com/nnstreamer/nntrainer + * @author Jihoon Lee + * @bug No known bugs except for NYI items + * + */ +#include +#include +#include +#include + +namespace nntrainer { +KLDLossLayer::KLDLossLayer() {} + +KLDLossLayer::~KLDLossLayer() {} + +void KLDLossLayer::setProperty(const std::vector &values) {} + +void KLDLossLayer::forwarding(RunLayerContext &context, bool training) {} + +void KLDLossLayer::calcDerivative(RunLayerContext &context) {} +} // namespace nntrainer diff --git a/nntrainer/layers/loss/kld_loss_layer.h b/nntrainer/layers/loss/kld_loss_layer.h new file mode 100644 index 0000000..30291d7 --- /dev/null +++ b/nntrainer/layers/loss/kld_loss_layer.h @@ -0,0 +1,66 @@ +// SPDX-License-Identifier: Apache-2.0 +/** + * Copyright (C) 2021 Jihoon Lee + * + * @file kld_loss_layer.h + * @date 25 November 2021 + * @brief KLD (Kullback-Leibler Divergence) loss implementation + * @see https://github.com/nnstreamer/nntrainer + * @author Jihoon Lee + * @bug No known bugs except for NYI items + * + */ +#ifndef __KLD_LOSS_LAYER_H__ +#define __KLD_LOSS_LAYER_H__ + +#ifdef __cplusplus + +#include +#include +#include + +namespace nntrainer { + +/** + * @class KLD (Kullback-Leibler Divergence) Loss layer + * @brief kld loss layer + */ +class KLDLossLayer final : public LossLayer { +public: + /** + * @brief Constructor of Constant Loss Layer + */ + KLDLossLayer(); + + /** + * @brief Destructor of MSE Loss Layer + */ + ~KLDLossLayer(); + + /** + * @copydoc Layer::setProperty(const std::vector &values) + */ + void setProperty(const std::vector &values) override; + + /** + * @copydoc Layer::forwarding(RunLayerContext &context, bool training) + */ + void forwarding(RunLayerContext &context, bool training) override; + + /** + * @copydoc Layer::calcDerivative(RunLayerContext &context) + */ + void calcDerivative(RunLayerContext &context) override; + + /** + * @copydoc Layer::getType() + */ + const std::string getType() const override { return KLDLossLayer::type; } + + inline static const std::string type = "kld"; +}; +} // namespace nntrainer + +#endif /* __cplusplus */ + +#endif // __KLD_LOSS_LAYER_H__ diff --git a/nntrainer/layers/loss/meson.build b/nntrainer/layers/loss/meson.build index 2370b61..9fccd02 100644 --- a/nntrainer/layers/loss/meson.build +++ b/nntrainer/layers/loss/meson.build @@ -1,5 +1,6 @@ loss_layer_sources = [ 'loss_layer.cpp', + 'kld_loss_layer.cpp', 'mse_loss_layer.cpp', 'cross_entropy_sigmoid_loss_layer.cpp', 'cross_entropy_softmax_loss_layer.cpp', -- 2.7.4