From 1c72bbfa09fbd3cfbe32947903e4773ebf46adee Mon Sep 17 00:00:00 2001 From: Jihoon Lee Date: Tue, 26 Jan 2021 13:04:36 +0900 Subject: [PATCH] [Fix] Reflect changes to upstream/main From merging some big prs there happend some inconsistency which casued a build break. This patch solves the issue **Changes proposed in this PR:** - Use manager.initializeTensor() in the unittest - Add training signature to forwarding **Self evaluation:** 1. Build test: [X]Passed [ ]Failed [ ]Skipped 2. Run test: [X]Passed [ ]Failed [ ]Skipped Signed-off-by: Jihoon Lee --- nntrainer/layers/preprocess_flip_layer.cpp | 9 ++++++++- nntrainer/layers/preprocess_flip_layer.h | 13 ++++++++----- nntrainer/layers/preprocess_translate_layer.cpp | 11 +++++++++-- nntrainer/layers/preprocess_translate_layer.h | 12 +++++++----- test/unittest/unittest_nntrainer_layers.cpp | 6 ++---- 5 files changed, 34 insertions(+), 17 deletions(-) diff --git a/nntrainer/layers/preprocess_flip_layer.cpp b/nntrainer/layers/preprocess_flip_layer.cpp index bd1781a..110e3c6 100644 --- a/nntrainer/layers/preprocess_flip_layer.cpp +++ b/nntrainer/layers/preprocess_flip_layer.cpp @@ -59,7 +59,14 @@ void PreprocessFlipLayer::setProperty(const PropertyType type, } } -void PreprocessFlipLayer::forwarding() { +void PreprocessFlipLayer::forwarding(bool training) { + if (!training) { + for (unsigned int idx = 0; idx < input_dim.size(); idx++) { + net_hidden[idx]->getVariableRef() = net_input[idx]->getVariableRef(); + } + + return; + } using std::swap; bool fliph, flipw; diff --git a/nntrainer/layers/preprocess_flip_layer.h b/nntrainer/layers/preprocess_flip_layer.h index a438a64..9636f8e 100644 --- a/nntrainer/layers/preprocess_flip_layer.h +++ b/nntrainer/layers/preprocess_flip_layer.h @@ -64,17 +64,17 @@ public: /** * @copydoc Layer::forwarding() */ - void forwarding(); + void forwarding(bool training = true) override; /** * @copydoc Layer::calcDerivative() */ - void calcDerivative(); + void calcDerivative() override; /** * @copydoc Layer::setTrainable(bool train) */ - void setTrainable(bool train); + void setTrainable(bool train) override; using Layer::setProperty; @@ -82,12 +82,15 @@ public: * @copydoc Layer::setProperty(const PropertyType type, const std::string * &value) */ - void setProperty(const PropertyType type, const std::string &value = ""); + void setProperty(const PropertyType type, + const std::string &value = "") override; /** * @copydoc Layer::getType() */ - const std::string getType() const { return PreprocessFlipLayer::type; }; + const std::string getType() const override { + return PreprocessFlipLayer::type; + } static const std::string type; diff --git a/nntrainer/layers/preprocess_translate_layer.cpp b/nntrainer/layers/preprocess_translate_layer.cpp index 456872d..95770fc 100644 --- a/nntrainer/layers/preprocess_translate_layer.cpp +++ b/nntrainer/layers/preprocess_translate_layer.cpp @@ -77,9 +77,16 @@ void PreprocessTranslateLayer::setProperty(const PropertyType type, } } -void PreprocessTranslateLayer::forwarding() { - for (unsigned int idx = 0; idx < input_dim.size(); idx++) { +void PreprocessTranslateLayer::forwarding(bool training) { + if (!training) { + for (unsigned int idx = 0; idx < input_dim.size(); idx++) { + net_hidden[idx]->getVariableRef() = net_input[idx]->getVariableRef(); + } + return; + } + + for (unsigned int idx = 0; idx < input_dim.size(); idx++) { Tensor &hidden_ = net_hidden[idx]->getVariableRef(); Tensor &input_ = net_input[idx]->getVariableRef(); diff --git a/nntrainer/layers/preprocess_translate_layer.h b/nntrainer/layers/preprocess_translate_layer.h index 278a187..4c4ff74 100644 --- a/nntrainer/layers/preprocess_translate_layer.h +++ b/nntrainer/layers/preprocess_translate_layer.h @@ -64,27 +64,29 @@ public: * @retval #ML_ERROR_NONE Successful. * @retval #ML_ERROR_INVALID_PARAMETER invalid parameter. */ - int initialize(Manager &manager); + int initialize(Manager &manager) override; /** * @copydoc Layer::forwarding() */ - void forwarding(); + void forwarding(bool training = true) override; /** * @copydoc Layer::calcDerivative() */ - void calcDerivative(); + void calcDerivative() override; /** * @copydoc Layer::setTrainable(bool train) */ - void setTrainable(bool train); + void setTrainable(bool train) override; /** * @copydoc Layer::getType() */ - const std::string getType() const { return PreprocessTranslateLayer::type; }; + const std::string getType() const override { + return PreprocessTranslateLayer::type; + } using Layer::setProperty; diff --git a/test/unittest/unittest_nntrainer_layers.cpp b/test/unittest/unittest_nntrainer_layers.cpp index 7e80068..811b120 100644 --- a/test/unittest/unittest_nntrainer_layers.cpp +++ b/test/unittest/unittest_nntrainer_layers.cpp @@ -520,8 +520,7 @@ TEST_F(nntrainer_PreprocessTranslateLayer, forwarding_01_p) { layer.setProperty({"random_translate=0.0"}); layer.initialize(manager); - manager.initialize(); - manager.initializeInOuts(true); + manager.initializeTensors(true); nntrainer::Tensor in(nntrainer::TensorDim({2, 3, 32, 32})); nntrainer::Tensor out_trans; @@ -542,8 +541,7 @@ TEST_F(nntrainer_PreprocessTranslateLayer, forwarding_02_p) { layer.setProperty({"random_translate=0.1"}); layer.initialize(manager); - manager.initialize(); - manager.initializeInOuts(true); + manager.initializeTensors(true); nntrainer::Tensor in(nntrainer::TensorDim({1, 3, 32, 32})); -- 2.7.4