From c8b8327ac731adacecd7e9937b3612cd51476da9 Mon Sep 17 00:00:00 2001 From: Jihoon Lee Date: Mon, 12 Oct 2020 17:18:09 +0900 Subject: [PATCH] [IntegratedTest] Add methods and types for tests **Changes proposed in this PR:** - Open up getter for layer::num_weights and neuralnet flatGraph - Use getOutputDimension for some codes - Add types for future competibility **Self evaluation:** 1. Build test: [X]Passed [ ]Failed [ ]Skipped 2. Run test: [X]Passed [ ]Failed [ ]Skipped Signed-off-by: Jihoon Lee --- nntrainer/include/layer.h | 7 +++++++ nntrainer/include/neuralnet.h | 23 ++++++++++++++++++----- nntrainer/src/neuralnet.cpp | 17 +++++++---------- 3 files changed, 32 insertions(+), 15 deletions(-) diff --git a/nntrainer/include/layer.h b/nntrainer/include/layer.h index 8345b1b..a93267e 100644 --- a/nntrainer/include/layer.h +++ b/nntrainer/include/layer.h @@ -453,6 +453,13 @@ protected: } /** + * @brief Get the number of weights + * + * @return unsigned int number of weights + */ + unsigned int getNumWeights() { return num_weights; } + + /** * @brief weight_list in this layer. This contains trainable weights of * layers. */ diff --git a/nntrainer/include/neuralnet.h b/nntrainer/include/neuralnet.h index eb0c894..5d0b1e1 100644 --- a/nntrainer/include/neuralnet.h +++ b/nntrainer/include/neuralnet.h @@ -71,6 +71,11 @@ class NeuralNetwork { friend class ModelLoader; /** access private members of ModelLoader */ public: + using NodeType = std::shared_ptr; /** Type of a Node */ + using GraphType = std::vector; /** actual graph type */ + using FlatGraphType = + std::vector; /** topological sorted, iterable 1-D list of nodes */ + /** * @brief Constructor of NeuralNetwork Class */ @@ -220,10 +225,11 @@ public: /** * @brief add layer into neural network model + * @param[in] layer layer to add * @retval #ML_ERROR_NONE Successful. * @retval #ML_ERROR_INVALID_PARAMETER invalid parameter. */ - int addLayer(std::shared_ptr layer); + int addLayer(NodeType layer); /** * @brief set optimizer for the neural network model @@ -239,7 +245,7 @@ public: * @retval #ML_ERROR_NONE Successful. * @retval #ML_ERROR_INVALID_PARAMETER invalid parameter. */ - int getLayer(const char *name, std::shared_ptr *layer); + int getLayer(const char *name, NodeType *layer); /* * @brief get input dimension of neural network @@ -254,6 +260,14 @@ public: TensorDim getOutputDimension() { return layers.back()->getOutputDimension(); } /** + * @brief get FlatGraph of current graph + * @note flat graph contains pointer to the actual nodes, which is not deeply + * copied. + * @retval flatGraph of the current graph + */ + FlatGraphType getFlatGraph() { return layers; } + + /** * @brief Set loss type for the neural network. * @param[in] loss Type of the loss. * @retval #ML_ERROR_NONE Successful. @@ -310,8 +324,7 @@ private: NetType net_type; /**< Network Type */ - std::vector> - layers; /**< vector for store layer pointers */ + GraphType layers; /**< vector for store layer pointers */ std::shared_ptr data_buffer; /**< Data Buffer to get Input */ @@ -401,7 +414,7 @@ private: /** * @brief Ensure that layer has a name */ - void ensureName(std::shared_ptr layer, std::string prefix = ""); + void ensureName(NodeType layer, const std::string &prefix = ""); /** * @brief Swap function for the class diff --git a/nntrainer/src/neuralnet.cpp b/nntrainer/src/neuralnet.cpp index 1a4a27a..3c5b50b 100644 --- a/nntrainer/src/neuralnet.cpp +++ b/nntrainer/src/neuralnet.cpp @@ -79,7 +79,7 @@ int NeuralNetwork::initLossLayer() { return ML_ERROR_NOT_SUPPORTED; } - std::shared_ptr act_layer = layers.back(); + NodeType act_layer = layers.back(); layers.pop_back(); switch (act_layer->getActivationType()) { @@ -98,7 +98,7 @@ int NeuralNetwork::initLossLayer() { std::shared_ptr loss_layer = std::make_shared(); ensureName(loss_layer); - loss_layer->setInputDimension(layers.back()->getOutputDimension()); + loss_layer->setInputDimension(getOutputDimension()); status = loss_layer->initialize(); NN_RETURN_STATUS(); @@ -411,8 +411,7 @@ int NeuralNetwork::train(std::vector values) { setBatchSize(batch_size); /** Setup data buffer properties */ - status = - data_buffer->setClassNum(layers.back()->getOutputDimension().width()); + status = data_buffer->setClassNum(getOutputDimension().width()); NN_RETURN_STATUS(); status = data_buffer->setFeatureSize(layers[0]->getInputDimension()); @@ -449,8 +448,7 @@ int NeuralNetwork::train_run() { int count = 0; sharedTensor in = MAKE_SHARED_TENSOR(getInputDimension()); - sharedTensor label = - MAKE_SHARED_TENSOR(layers.back()->getOutputDimension()); + sharedTensor label = MAKE_SHARED_TENSOR(getOutputDimension()); while (true) { if (data_buffer->getDataFromBuffer(nntrainer::BUF_TRAIN, in->getData(), @@ -558,7 +556,7 @@ int NeuralNetwork::isInitializable() { return ML_ERROR_NONE; } -int NeuralNetwork::addLayer(std::shared_ptr layer) { +int NeuralNetwork::addLayer(NodeType layer) { int status = ML_ERROR_NONE; if (initialized) { @@ -608,8 +606,7 @@ int NeuralNetwork::setDataBuffer(std::shared_ptr data_buffer) { return ML_ERROR_NONE; } -void NeuralNetwork::ensureName(std::shared_ptr layer, - std::string prefix) { +void NeuralNetwork::ensureName(NodeType layer, const std::string &prefix) { if (layer->getName().empty()) { std::set::iterator iter; std::string name; @@ -623,7 +620,7 @@ void NeuralNetwork::ensureName(std::shared_ptr layer, } } -int NeuralNetwork::getLayer(const char *name, std::shared_ptr *layer) { +int NeuralNetwork::getLayer(const char *name, NodeType *layer) { int status = ML_ERROR_INVALID_PARAMETER; std::string name_str(name); -- 2.7.4