From e50a09519362c676da888e7fdeec6aebaa818c0e Mon Sep 17 00:00:00 2001 From: Parichay Kapoor Date: Fri, 21 May 2021 18:17:58 +0900 Subject: [PATCH] [layer] Move distribute to LayerNode move distribute to LayerNode from layer Further, the distribute layer is now supposed to be used as a wrapper of a layer maintained by the LayerNode, and must not be maintained externally. Rather, use getDistribute() to check if the node is distributed and treat the layer as a regular layer. **Self evaluation:** 1. Build test: [x]Passed [ ]Failed [ ]Skipped 2. Run test: [x]Passed [ ]Failed [ ]Skipped Signed-off-by: Parichay Kapoor --- nntrainer/compiler/ini_interpreter.cpp | 14 +--- nntrainer/graph/graph_node.h | 2 +- nntrainer/graph/network_graph.cpp | 108 +++++++++++----------------- nntrainer/graph/network_graph.h | 16 ++--- nntrainer/layers/layer.cpp | 8 --- nntrainer/layers/layer_internal.h | 42 ++++------- nntrainer/layers/layer_node.cpp | 67 ++++++++++++++++- nntrainer/layers/layer_node.h | 64 ++++++++++++----- nntrainer/layers/time_dist.h | 6 +- test/unittest/unittest_nntrainer_layers.cpp | 37 +++++++--- 10 files changed, 210 insertions(+), 154 deletions(-) diff --git a/nntrainer/compiler/ini_interpreter.cpp b/nntrainer/compiler/ini_interpreter.cpp index a62a35a..fb8e2e4 100644 --- a/nntrainer/compiler/ini_interpreter.cpp +++ b/nntrainer/compiler/ini_interpreter.cpp @@ -133,20 +133,8 @@ section2layer(dictionary *ini, const std::string &sec_name, << FUNC_TAG << "section type is invalid for section name: " << sec_name; auto properties = section2properties(ini, sec_name); - std::shared_ptr nntr_layer = ac.createObject(layer_type); - - if (nntr_layer->getDistribute()) { - ml_logd("This %s layer is going to distributed", sec_name.c_str()); - std::shared_ptr dist_layer = - nntrainer::createLayer(TimeDistLayer::type); - std::dynamic_pointer_cast(dist_layer) - ->setDistLayer(nntr_layer); - - nntr_layer = dist_layer; - } - - auto layer = createLayerNode(nntr_layer, properties); + auto layer = createLayerNode(ac.createObject(layer_type), properties); return layer; } diff --git a/nntrainer/graph/graph_node.h b/nntrainer/graph/graph_node.h index 4e30331..2701be8 100644 --- a/nntrainer/graph/graph_node.h +++ b/nntrainer/graph/graph_node.h @@ -35,7 +35,7 @@ public: * @brief Get index of the node * */ - virtual size_t getIndex() = 0; + virtual size_t getIndex() const = 0; /** * @brief Set index of the node diff --git a/nntrainer/graph/network_graph.cpp b/nntrainer/graph/network_graph.cpp index 6fa9fe1..5490f0c 100644 --- a/nntrainer/graph/network_graph.cpp +++ b/nntrainer/graph/network_graph.cpp @@ -43,13 +43,6 @@ namespace nntrainer { static const std::vector in_place_layers = { ActivationLayer::type, BatchNormalizationLayer::type}; -static std::shared_ptr distributeLayer(std::shared_ptr l) { - std::shared_ptr layer = nntrainer::createLayer(TimeDistLayer::type); - std::dynamic_pointer_cast(layer)->setDistLayer(l); - - return layer; -} - int NetworkGraph::compile(const LossType loss_type) { int status = ML_ERROR_NONE; @@ -121,8 +114,14 @@ void NetworkGraph::countNonTrainableLayersAtBegin() { skip_non_trainable_layers = graph.size(); } -int NetworkGraph::realizeMultiInputType(Layer ¤t) { +int NetworkGraph::realizeMultiInputType( + const std::shared_ptr &in_node) { + Layer ¤t = *in_node->getObject(); int status = ML_ERROR_NONE; + /** + * Multi-input works with time distribution layer by itself + * + */ if (current.getNumInputs() == 1) return ML_ERROR_NONE; @@ -148,7 +147,9 @@ int NetworkGraph::realizeMultiInputType(Layer ¤t) { return status; } -int NetworkGraph::realizeFlattenType(Layer ¤t) { +int NetworkGraph::realizeFlattenType( + const std::shared_ptr &in_node) { + Layer ¤t = *in_node->getObject(); if (current.getType() == FlattenLayer::type) { ml_loge( "It is not allowed to realize flatten layer, possibly flatten layer is " @@ -172,7 +173,9 @@ int NetworkGraph::realizeFlattenType(Layer ¤t) { return ML_ERROR_NONE; } -int NetworkGraph::realizeActivationType(Layer ¤t) { +int NetworkGraph::realizeActivationType( + const std::shared_ptr &in_node) { + Layer ¤t = *in_node->getObject(); int status = ML_ERROR_NONE; ActivationType act = current.getActivationType(); @@ -203,19 +206,16 @@ int NetworkGraph::realizeActivationType(Layer ¤t) { } std::shared_ptr lnode = createLayerNode(ActivationLayer::type); - std::shared_ptr layer = lnode->getObject(); - - layer->setActivation(act); graph.ensureName(*lnode, current.getName()); - if (current.getType() == TimeDistLayer::type) { - std::string unit_str = layer->getName(); - graph.ensureName(*lnode, "", "_unit"); - layer = distributeLayer(layer); - lnode = std::make_shared(layer); - layer->setName(unit_str); + if (in_node->getDistribute()) { + lnode->setProperty({"distribute=true"}); + graph.ensureName(*lnode, "", "_distribute"); } + std::shared_ptr layer = lnode->getObject(); + layer->setActivation(act); + layer->setNumInputs(current.getNumInputs()); layer->input_layers.clear(); layer->input_layers.push_back(current.getName()); @@ -228,9 +228,16 @@ int NetworkGraph::realizeActivationType(Layer ¤t) { return status; } -int NetworkGraph::realizeMultiOutputType(Layer ¤t) { +int NetworkGraph::realizeMultiOutputType( + const std::shared_ptr &in_node) { + Layer ¤t = *in_node->getObject(); int status = ML_ERROR_NONE; - if (current.output_layers.size() == 1) + /** + * Multi-input works with time distribution layer by itself + * + */ + + if (current.getNumOutputs() == 1) return ML_ERROR_NONE; std::shared_ptr lnode = createLayerNode(OutputLayer::type); @@ -260,20 +267,13 @@ int NetworkGraph::realizeMultiOutputType(Layer ¤t) { /** TODO: this needs special attention */ int NetworkGraph::addLossLayer(const LossType loss_type) { - int status = ML_ERROR_NONE; - auto const &last_node = graph.getSortedNode(graph.size() - 1); + auto const &last_node = LNODE(graph.getSortedNode(graph.size() - 1)); auto last_layer_node = getSortedLayerNode(graph.size() - 1); if (last_node->getType() == LossLayer::type) return status; - if (last_node->getType() == TimeDistLayer::type) { - if (std::static_pointer_cast(last_layer_node->getObject()) - ->getDistLayerType() == LossLayer::type) - return status; - } - if (loss_type == LossType::LOSS_NONE) { return ML_ERROR_NONE; } @@ -282,11 +282,6 @@ int NetworkGraph::addLossLayer(const LossType loss_type) { if (updated_loss_type == LossType::LOSS_ENTROPY) { auto type = last_node->getType(); - if (type == TimeDistLayer::type) { - type = - std::dynamic_pointer_cast(last_layer_node->getObject()) - ->getDistLayerType(); - } if (type != "activation") { ml_loge("Error: Cross Entropy need last layer to have softmax or sigmoid" @@ -296,7 +291,7 @@ int NetworkGraph::addLossLayer(const LossType loss_type) { graph.removeLastNode(); - switch (last_layer_node->getObject()->getActivationType()) { + switch (last_layer_node->getActivationType()) { case ActivationType::ACT_SIGMOID: updated_loss_type = LossType::LOSS_ENTROPY_SIGMOID; break; @@ -320,12 +315,9 @@ int NetworkGraph::addLossLayer(const LossType loss_type) { std::string input_str = updated_last_node->getName(); - if (updated_last_node->getType() == TimeDistLayer::type) { - std::string unit_str = layer->getName(); - graph.ensureName(*lnode, "", "_unit"); - layer = distributeLayer(layer); - lnode = std::make_shared(layer); - layer->setName(unit_str); + if (updated_last_node->getDistribute()) { + lnode->setProperty({"distribute=true"}); + graph.ensureName(*lnode, "", "_distribute"); } last_layer_node = LNODE(updated_last_node); @@ -482,18 +474,18 @@ int NetworkGraph::realizeGraph() { if (l.getType() != AdditionLayer::type && l.getType() != ConcatLayer::type) { - status = realizeMultiInputType(l); + status = realizeMultiInputType(lnode); NN_RETURN_STATUS(); } if (l.getType() != ActivationLayer::type) { - status = realizeActivationType(l); + status = realizeActivationType(lnode); NN_RETURN_STATUS(); } // Flatten in TimeDistLayer is not supported. - if (lnode->getFlatten() && l.getType() != TimeDistLayer::type) { - status = realizeFlattenType(l); + if (lnode->getFlatten() && !lnode->getDistribute()) { + status = realizeFlattenType(lnode); NN_RETURN_STATUS(); } } @@ -509,9 +501,10 @@ int NetworkGraph::realizeGraph() { node_list = graph.getNodes(); for (unsigned int i = 0; i < num_nodes; ++i) { - Layer &l = *LNODE(node_list[i])->getObject(); - if (l.getType() != OutputLayer::type && l.getType() != SplitLayer::type) { - status = realizeMultiOutputType(l); + auto const &lnode = LNODE(node_list[i]); + if (lnode->getType() != OutputLayer::type && + lnode->getType() != SplitLayer::type) { + status = realizeMultiOutputType(lnode); NN_RETURN_STATUS(); } } @@ -704,9 +697,6 @@ void NetworkGraph::inPlaceOptimize(const std::string &layer_type, auto layer_node = *iter; auto &l = layer_node->getObject(); std::string l_type = l->getType(); - if (l_type == TimeDistLayer::type) { - l_type = std::dynamic_pointer_cast(l)->getDistLayerType(); - } if (l_type == layer_type && l->getActivationType() != ActivationType::ACT_SOFTMAX) { @@ -806,26 +796,14 @@ int NetworkGraph::initialize(std::shared_ptr manager) { auto const &lnode = getSortedLayerNode(idx); auto &lptr = lnode->getObject(); ml_logd("layer name : %s", lptr->getName().c_str()); - std::string cur_type; - if (lptr->getType() == TimeDistLayer::type) { - cur_type = - std::dynamic_pointer_cast(lptr)->getDistLayerType(); - } else { - cur_type = lptr->getType(); - } + std::string cur_type = lptr->getType(); /** * Set input dimension for all the layers. * For input layer, as input dimension is known, set input tensor. */ if (!first) { - std::string l_pre_type = - getSortedLayerNode(idx - 1)->getObject()->getType(); - if (l_pre_type == TimeDistLayer::type) { - l_pre_type = std::dynamic_pointer_cast( - getSortedLayerNode(idx - 1)->getObject()) - ->getDistLayerType(); - } + std::string l_pre_type = getSortedLayerNode(idx - 1)->getType(); if (istrequal(l_pre_type, ActivationLayer::type) && istrequal(cur_type, ActivationLayer::type)) { diff --git a/nntrainer/graph/network_graph.h b/nntrainer/graph/network_graph.h index 977d08c..78862b5 100644 --- a/nntrainer/graph/network_graph.h +++ b/nntrainer/graph/network_graph.h @@ -323,35 +323,35 @@ private: /** * @brief check and add Multi Input Layer : addition or concat Layer - * @param[in] current layer + * @param[in] in_node layernode * @retval #ML_ERROR_NONE Successful. * @retval #ML_ERROR_INVALID_PARAMETER invalid parameter. */ - int realizeMultiInputType(Layer ¤t); + int realizeMultiInputType(const std::shared_ptr &in_node); /** * @brief check and add Multi output Layer : output Layer - * @param[in] current layer + * @param[in] in_node layernode * @retval #ML_ERROR_NONE Successful. * @retval #ML_ERROR_INVALID_PARAMETER invalid parameter. */ - int realizeMultiOutputType(Layer ¤t); + int realizeMultiOutputType(const std::shared_ptr &in_node); /** * @brief Realize act type to layer and insert it to layers - * @param[in] current layer + * @param[in] in_node layernode * @retval #ML_ERROR_NONE Successful. * @retval #ML_ERROR_INVALID_PARAMETER invalid parameter. */ - int realizeActivationType(Layer ¤t); + int realizeActivationType(const std::shared_ptr &in_node); /** * @brief Realize flatten type to layer and insert it to layers - * @param[in] current layer + * @param[in] in_node layernode * @retval #ML_ERROR_NONE Successful. * @retval #ML_ERROR_INVALID_PARAMETER invalid parameter. */ - int realizeFlattenType(Layer ¤t); + int realizeFlattenType(const std::shared_ptr &in_node); /** * @brief adding loss layer at last position diff --git a/nntrainer/layers/layer.cpp b/nntrainer/layers/layer.cpp index ef508cf..57637a5 100644 --- a/nntrainer/layers/layer.cpp +++ b/nntrainer/layers/layer.cpp @@ -86,7 +86,6 @@ void Layer::copy(std::shared_ptr l) { this->weight_regularizer_constant = l->weight_regularizer_constant; this->weight_initializer = l->weight_initializer; this->trainable = l->trainable; - this->distribute = l->distribute; } sharedConstTensors Layer::forwarding_with_val(sharedConstTensors input, @@ -282,12 +281,6 @@ void Layer::setProperty(const PropertyType type, const std::string &value) { throw_status(status); } break; - case PropertyType::distribute: - if (!value.empty()) { - status = setBoolean(distribute, value); - throw_status(status); - } - break; default: std::string msg = "[Layer] Unknown Layer Property Key for value " + std::string(value); @@ -335,7 +328,6 @@ void Layer::printPropertiesMeta(std::ostream &out) { void Layer::printProperties(std::ostream &out) { out << "Trainable: " << trainable << std::endl; - out << "Distributed: " << distribute << std::endl; printIfValid(out, PropertyType::weight_regularizer, static_cast(weight_regularizer)); printIfValid(out, PropertyType::weight_regularizer_constant, diff --git a/nntrainer/layers/layer_internal.h b/nntrainer/layers/layer_internal.h index b271e1e..dcf213c 100644 --- a/nntrainer/layers/layer_internal.h +++ b/nntrainer/layers/layer_internal.h @@ -63,8 +63,7 @@ public: WeightInitializer weight_initializer_ = WeightInitializer::WEIGHT_XAVIER_UNIFORM, WeightInitializer bias_initializer_ = WeightInitializer::WEIGHT_ZEROS, - bool trainable_ = true, bool flatten_ = false, - bool distribute_ = false) : + bool trainable_ = true) : layer_props(props::Name()), loss(0.0f), activation_type(activation_type_), @@ -72,8 +71,7 @@ public: weight_regularizer_constant(weight_regularizer_constant_), weight_initializer(weight_initializer_), bias_initializer(bias_initializer_), - trainable(trainable_), - distribute(distribute_) { + trainable(trainable_) { setNumInputs(1); setNumOutputs(1); } @@ -297,8 +295,9 @@ public: /** * @brief Activation Type Getter * @retval Activation Type. + * @todo This function will soon be removed */ - ActivationType getActivationType() { return this->activation_type; } + virtual ActivationType getActivationType() { return this->activation_type; } /** * @brief Copy Layer @@ -357,18 +356,6 @@ public: virtual bool getTrainable() noexcept { return trainable; } /** - * @brief set distribute for this layer - * @param[in] dist to enable/disable distribute - */ - virtual void setDistribute(bool dist) { distribute = dist; } - - /** - * @brief get distribute for this layer - * @retval dist to enable/disable distribute - */ - virtual bool getDistribute() noexcept { return distribute; } - - /** * @brief get all weights of the layer * @retval vector of all params */ @@ -605,6 +592,14 @@ public: return output_layers; } + /** + * @brief Activation Setter + * @param[in] activation activation type + * @throw std::invalid_argument when ActivationType is unknown + * @todo This function will soon be removed + */ + virtual void setActivation(ActivationType activation); + protected: /** * @brief Print Options when printing layer info @@ -670,25 +665,12 @@ protected: */ bool trainable; - // TODO: remove this from here - /** - * @brief making this true will iterating along with time distribution - */ - bool distribute; - /** * @brief weight_list in this layer. This contains all weights of the * layer. */ std::vector weights; - /** - * @brief Activation Setter - * @param[in] activation activation type - * @throw std::invalid_argument when ActivationType is unknown - */ - virtual void setActivation(ActivationType activation); - private: // TODO: remove this from here /** diff --git a/nntrainer/layers/layer_node.cpp b/nntrainer/layers/layer_node.cpp index 080dae6..24d6cdf 100644 --- a/nntrainer/layers/layer_node.cpp +++ b/nntrainer/layers/layer_node.cpp @@ -15,9 +15,20 @@ #include #include #include +#include namespace nntrainer { +LayerNode::LayerNode(std::shared_ptr l, size_t idx) : + layer(l), + index(idx), + flatten(false), + distribute(false), + activation_type(ActivationType::ACT_NONE) { + if (layer->getType() == TimeDistLayer::type) + distribute = true; +} + /** * @brief Layer factory creator with constructor */ @@ -76,21 +87,35 @@ int LayerNode::setProperty(std::vector properties) { } } - status = layer->setProperty(remainder); + status = getLayer()->setProperty(remainder); return status; } void LayerNode::setProperty(const nntrainer::Layer::PropertyType type, const std::string &value) { int status = ML_ERROR_NONE; + using PropertyType = nntrainer::Layer::PropertyType; switch (type) { - case nntrainer::Layer::PropertyType::flatten: + case PropertyType::flatten: if (!value.empty()) { status = setBoolean(flatten, value); throw_status(status); } break; + case PropertyType::distribute: + if (!value.empty()) { + status = setBoolean(distribute, value); + throw_status(status); + if (distribute) { + auto &ac = nntrainer::AppContext::Global(); + std::shared_ptr dlayer = + ac.createObject(TimeDistLayer::type); + std::dynamic_pointer_cast(dlayer)->setDistLayer(layer); + layer = dlayer; + } + } + break; default: throw std::invalid_argument("Unknown property."); } @@ -115,4 +140,42 @@ std::ostream &operator<<(std::ostream &out, const LayerNode &l) { return out; } +std::string LayerNode::getDistLayerType() const { + if (distribute) + return std::dynamic_pointer_cast(layer)->getDistLayerType(); + else + throw std::runtime_error( + "Get distribution layer type for non-distributed layer"); +} + +ActivationType LayerNode::getActivationType() { + return getLayer()->getActivationType(); +} + +const std::string LayerNode::getType() const { return getLayer()->getType(); } + +std::shared_ptr &LayerNode::getObject() { return getLayer(); } + +const std::shared_ptr &LayerNode::getObject() const { + return getLayer(); +} + +bool LayerNode::getTrainable() const noexcept { + return getLayer()->getTrainable(); +} + +const std::shared_ptr &LayerNode::getLayer() const { + if (distribute) + return std::dynamic_pointer_cast(layer)->getDistLayer(); + else + return layer; +} + +std::shared_ptr &LayerNode::getLayer() { + if (distribute) + return std::dynamic_pointer_cast(layer)->getDistLayer(); + else + return layer; +} + }; // namespace nntrainer diff --git a/nntrainer/layers/layer_node.h b/nntrainer/layers/layer_node.h index 9bc7f9c..6073441 100644 --- a/nntrainer/layers/layer_node.h +++ b/nntrainer/layers/layer_node.h @@ -36,11 +36,7 @@ public: * @brief Constructor of LayerNode class * */ - LayerNode(std::shared_ptr l, size_t idx = 0) : - layer(l), - index(idx), - flatten(false), - activation_type(ActivationType::ACT_NONE) {} + LayerNode(std::shared_ptr l, size_t idx = 0); /** * @brief Destructor of LayerNode Class @@ -62,7 +58,7 @@ public: * * @return const std::string type representation */ - const std::string getType() const { return layer->getType(); } + const std::string getType() const; /** * @brief set Property of layer @@ -92,7 +88,7 @@ public: * @note This name might be changed once this layer is added to the model * to keep the name unique to the model */ - const std::string getName() const noexcept { return layer->getName(); } + const std::string getName() const noexcept { return getLayer()->getName(); } /** * @brief Get name of the layer @@ -102,7 +98,7 @@ public: * @note This name might be changed once this layer is added to the model * to keep the name unique to the model */ - std::string getName() noexcept { return layer->getName(); } + std::string getName() noexcept { return getLayer()->getName(); } /** * Support all the interface requirements by GraphNode @@ -112,26 +108,26 @@ public: * @brief Get underlying object * */ - std::shared_ptr &getObject() { return layer; } + std::shared_ptr &getObject(); /** * @brief Get underlying object * */ - const std::shared_ptr &getObject() const { return layer; } + const std::shared_ptr &getObject() const; /** * @brief Get index of the node * */ - size_t getIndex() { return index; } + size_t getIndex() const { return index; } /** * @brief Get the trainable property of the underlying object * * @return boolean true if trainable, else false */ - bool getTrainable() noexcept { return layer->getTrainable(); } + bool getTrainable() const noexcept; /** * Support interfaces for the properties intercepted from layer @@ -141,7 +137,25 @@ public: * @brief get if the output of this layer must be flatten * @retval flatten value */ - bool getFlatten() { return flatten; } + bool getFlatten() const { return flatten; } + + /** + * @brief get distribute for this layer + * @retval dist to enable/disable distribute + */ + bool getDistribute() const noexcept { return distribute; } + + /** + * @brief get distribute for this layer + * @retval dist to enable/disable distribute + */ + std::string getDistLayerType() const; + + /** + * @brief Activation Type Getter + * @retval Activation Type. + */ + ActivationType getActivationType(); #ifdef PROFILE int event_key; @@ -160,10 +174,10 @@ private: std::vector input_layers; /**< input layer names */ std::vector output_layers; /**< output layer names */ - bool flatten; /**< flatten the output of this node */ + bool flatten; /**< flatten the output of this node */ + bool distribute; /**< to enable itearting along with time distribution */ ActivationType activation_type; /**< activation applied to the output of this node */ - bool distribute; /** * These properties are set for the layer by the user but are intercepted @@ -181,8 +195,24 @@ private: * the particular layer * @exception std::invalid_argument invalid argument */ - virtual void setProperty(const nntrainer::Layer::PropertyType type, - const std::string &value = ""); + void setProperty(const nntrainer::Layer::PropertyType type, + const std::string &value = ""); + + /** + * @brief Get the effective layer managed by this layer node + * + * @details this is layer inside the distribution layer if this layer node + * is distributed. + */ + const std::shared_ptr &getLayer() const; + + /** + * @brief Get the effective layer managed by this layer node + * + * @details this is layer inside the distribution layer if this layer node + * is distributed. + */ + std::shared_ptr &getLayer(); }; /** diff --git a/nntrainer/layers/time_dist.h b/nntrainer/layers/time_dist.h index 98841e2..bb76f92 100644 --- a/nntrainer/layers/time_dist.h +++ b/nntrainer/layers/time_dist.h @@ -95,7 +95,7 @@ public: * @brief get distribute layer * @retval dist_layer std::shared_ptr */ - std::shared_ptr getDistLayer() { return dist_layer; }; + std::shared_ptr &getDistLayer() { return dist_layer; }; /** * @brief get transposed Tensor according to time iteration axis @@ -126,6 +126,10 @@ public: */ void setProperty(const PropertyType type, const std::string &value = "") override { + /** + * @note assumption: name of the dist_layer is set via setName() and not + * with setProperty() + */ dist_layer->setProperty(type, value); } diff --git a/test/unittest/unittest_nntrainer_layers.cpp b/test/unittest/unittest_nntrainer_layers.cpp index a9ab750..69c77ba 100644 --- a/test/unittest/unittest_nntrainer_layers.cpp +++ b/test/unittest/unittest_nntrainer_layers.cpp @@ -24,6 +24,7 @@ #include #include #include +#include #include #include #include @@ -139,7 +140,6 @@ protected: const nntrainer::TensorDim &dim) { nntrainer::Tensor golden(dim); loadFile(expected, golden); - /** FIXME: golden.length() is possibly 0 many times, verify and fix this */ matchOutput(result, golden.getData(), golden.length()); } @@ -713,14 +713,6 @@ TEST_F(nntrainer_FullyConnectedLayer, setActivation_02_n) { } /** - * @brief Fully Connected Layer - */ -TEST_F(nntrainer_FullyConnectedLayer, setDistribute_01_p) { - status = layer.setProperty({"distribute=true"}); - EXPECT_EQ(status, ML_ERROR_NONE); -} - -/** * @brief FullyConnected Layer */ TEST_F(nntrainer_FullyConnectedLayer, checkValidation_01_p) { @@ -2636,6 +2628,33 @@ TEST_F(nntrainer_SplitLayer, forwarding_backwarding_01_p) { } /** + * @brief Layer Node + */ +TEST(nntrainer_LayerNode, setDistribute_01_p) { + int status = ML_ERROR_NONE; + + auto lnode = nntrainer::createLayerNode(nntrainer::FullyConnectedLayer::type); + + EXPECT_EQ(false, lnode->getDistribute()); + + status = lnode->setProperty({"distribute=true"}); + EXPECT_EQ(status, ML_ERROR_NONE); + + EXPECT_EQ(true, lnode->getDistribute()); +} + +/** + * @brief Layer Node + */ +TEST(nntrainer_LayerNode, setFlatten_01_p) { + int status = ML_ERROR_NONE; + + auto lnode = nntrainer::createLayerNode(nntrainer::FullyConnectedLayer::type); + status = lnode->setProperty({"flatten=true"}); + EXPECT_EQ(status, ML_ERROR_NONE); +} + +/** * @brief Main gtest */ int main(int argc, char **argv) { -- 2.7.4