From 41e5355d0b0d83bc5f2f0a00b1619f974081fd1a Mon Sep 17 00:00:00 2001 From: Jihoon Lee Date: Thu, 24 Sep 2020 09:52:18 +0900 Subject: [PATCH] Add print preset **Changes proposed in this PR:** - Add print preset for model and layer - Add model print flags - move printFlagEnums and print(out, flag) to private **Self evaluation:** 1. Build test: [X]Passed [ ]Failed [ ]Skipped 2. Run test: [X]Passed [ ]Failed [ ]Skipped Cc: Parichay Kapoor Signed-off-by: Jihoon Lee --- api/capi/src/nntrainer.cpp | 2 +- nntrainer/include/layer.h | 53 +++++++++------ nntrainer/include/neuralnet.h | 34 ++++++++-- nntrainer/src/layer.cpp | 20 ++++++ nntrainer/src/neuralnet.cpp | 100 ++++++++++++++++------------ test/unittest/unittest_nntrainer_layers.cpp | 8 +-- 6 files changed, 140 insertions(+), 77 deletions(-) diff --git a/api/capi/src/nntrainer.cpp b/api/capi/src/nntrainer.cpp index 7f32f47..15a5770 100644 --- a/api/capi/src/nntrainer.cpp +++ b/api/capi/src/nntrainer.cpp @@ -314,7 +314,7 @@ static int ml_train_model_get_summary_util(ml_train_model_h model, } returnable f = [&]() { - NN->print(ss, verbosity); + NN->printPreset(ss, verbosity); return ML_ERROR_NONE; }; diff --git a/nntrainer/include/layer.h b/nntrainer/include/layer.h index a2233d6..ee6b4f9 100644 --- a/nntrainer/include/layer.h +++ b/nntrainer/include/layer.h @@ -63,26 +63,19 @@ enum class LayerType { }; /** - * @brief Print Options when printing layer info - */ -typedef enum { - // clang-format off - PRINT_INST_INFO = (1 << 0), /**< Option to print type & instance address info */ - PRINT_SHAPE_INFO = (1 << 1), /**< Option to print shape information, invalid before initiation*/ - PRINT_PROP = (1 << 2), /**< Option to print properties */ - PRINT_PROP_META = (1 << 3), /**< Option to print properties that describe meta info - e.g) layer activation type for non-activation layer. */ - PRINT_WEIGHTS = (1 << 4), /**< Option to print weights */ - PRINT_METRIC = (1 << 5) /**< Option to print metrics (currently loss only) */ - // clang-format on -} LayerPrintOption; - -/** * @class Layer Base class for layers * @brief Base class for all layers */ class Layer { public: + enum class PrintPreset { + PRINT_NONE = 0, /**< Print nothing */ + PRINT_SUMMARY, /**< Print preset including summary information */ + PRINT_SUMMARY_META, /**< Print summary preset that includes meta information + */ + PRINT_ALL /**< Print everything possible */ + }; + Layer() : name(std::string()), type(LayerType::LAYER_UNKNOWN), @@ -366,6 +359,15 @@ public: virtual std::string getBaseName() = 0; /** + * @brief print using PrintPreset + * + * @param out oustream + * @param preset preset to be used + */ + void printPreset(std::ostream &out, + PrintPreset preset = PrintPreset::PRINT_SUMMARY); + + /** * @brief Print layer related information. Do not override without clear * reason. It is recommended to override printShapeInfo, printPropertiesMeta, * printProperties, printMetric instead @@ -388,6 +390,21 @@ public: protected: /** + * @brief Print Options when printing layer info + */ + typedef enum { + // clang-format off + PRINT_INST_INFO = (1 << 0), /**< Option to print type & instance address info */ + PRINT_SHAPE_INFO = (1 << 1), /**< Option to print shape information, invalid before initiation*/ + PRINT_PROP = (1 << 2), /**< Option to print properties */ + PRINT_PROP_META = (1 << 3), /**< Option to print properties that describe meta info + e.g) layer activation type for non-activation layer. */ + PRINT_WEIGHTS = (1 << 4), /**< Option to print weights */ + PRINT_METRIC = (1 << 5) /**< Option to print metrics (currently loss only) */ + // clang-format on + } PrintOption; + + /** * @brief Name of the layer (works as the identifier) */ std::string name; @@ -547,11 +564,7 @@ private: template ::value, T> * = nullptr> std::ostream &operator<<(std::ostream &out, T &l) { - unsigned int option = nntrainer::LayerPrintOption::PRINT_INST_INFO | - nntrainer::LayerPrintOption::PRINT_SHAPE_INFO | - nntrainer::LayerPrintOption::PRINT_PROP | - nntrainer::LayerPrintOption::PRINT_PROP_META; - l.print(out, option); + l.print(out, Layer::PrintPreset::PRINT_SUMMARY); return out; } diff --git a/nntrainer/include/neuralnet.h b/nntrainer/include/neuralnet.h index 4035c68..139de19 100644 --- a/nntrainer/include/neuralnet.h +++ b/nntrainer/include/neuralnet.h @@ -36,6 +36,7 @@ #include #include #include +#include #include #include #include @@ -272,12 +273,12 @@ public: }; /** - * @brief print function for neuralnet - * @param[in] out outstream - * @param[in] flags verbosity from ml_train_summary_type_e + * @brief Print Option when printing model info. The function delegates to the + * `print` + * @param out std::ostream to print + * @param preset preset from `ml_train_summary_type_e` */ - /// @todo: change print to use NeuralNetPrintOption and add way to print out - void print(std::ostream &out, unsigned int flags = 0); + void printPreset(std::ostream &out, unsigned int preset); /** * @brief print metrics function for neuralnet @@ -287,6 +288,19 @@ public: void printMetrics(std::ostream &out, unsigned int flags = 0); private: + /** + * @brief Print Options when printing layer info + */ + typedef enum { + // clang-format off + PRINT_INST_INFO = (1 << 0), /**< Option to print type & instance address info */ + PRINT_GRAPH_INFO = (1 << 1), /**< Option to print graph topology info */ + PRINT_PROP = (1 << 2), /**< Option to print properties */ + PRINT_OPTIMIZER = (1 << 3), /**< Option to print optimizer */ + PRINT_METRIC = (1 << 4), /**< Option to print if current network is set to training */ + // clang-format on + } PrintOption; + bool is_train; /**< is train or inference */ unsigned int batch_size; /**< batch size */ @@ -330,6 +344,16 @@ private: RunStats testing; /** testing statistics of the model */ /** + * @brief print function for neuralnet + * @param[in] out outstream + * @param[in] flags bit combination of Neuralnet::PrintOption + * @param[in] Layer::PrintPreset print preset when to print layer properties + */ + void print( + std::ostream &out, unsigned int flags = 0, + Layer::PrintPreset layerPrintPreset = Layer::PrintPreset::PRINT_SUMMARY); + + /** * @brief Sets up and initialize the loss layer */ int initLossLayer(); diff --git a/nntrainer/src/layer.cpp b/nntrainer/src/layer.cpp index 7f1a898..09557dd 100644 --- a/nntrainer/src/layer.cpp +++ b/nntrainer/src/layer.cpp @@ -230,6 +230,26 @@ void Layer::printMetric(std::ostream &out) { } } +void Layer::printPreset(std::ostream &out, PrintPreset preset) { + unsigned int flags = 0; + switch (preset) { + case PrintPreset::PRINT_ALL: + flags = PRINT_WEIGHTS | PRINT_METRIC; + /// fall through intended + case PrintPreset::PRINT_SUMMARY_META: + flags |= PRINT_PROP_META; + /// fall through intended + case PrintPreset::PRINT_SUMMARY: + flags |= PRINT_INST_INFO | PRINT_SHAPE_INFO | PRINT_PROP | PRINT_PROP_META; + break; + case PrintPreset::PRINT_NONE: + return; + default: + throw ::std::invalid_argument("undefined preset given"); + } + print(out, flags); +} + void Layer::print(std::ostream &out, unsigned int flags) { if (flags & PRINT_INST_INFO) { out << "==================="; diff --git a/nntrainer/src/neuralnet.cpp b/nntrainer/src/neuralnet.cpp index 8e34598..c748f64 100644 --- a/nntrainer/src/neuralnet.cpp +++ b/nntrainer/src/neuralnet.cpp @@ -711,37 +711,7 @@ int NeuralNetwork::setLoss(LossType loss_type) { return ML_ERROR_NONE; } -static unsigned int getLayerFlag(ml_train_summary_type_e verbosity, - bool initialized = false) { - unsigned int flag = 0; - - switch (verbosity) { - case ML_TRAIN_SUMMARY_TENSOR: - flag |= LayerPrintOption::PRINT_WEIGHTS; - /// no break intended - - case ML_TRAIN_SUMMARY_LAYER: - if (!initialized) - flag |= LayerPrintOption::PRINT_PROP_META; - else - flag |= LayerPrintOption::PRINT_METRIC; - flag |= LayerPrintOption::PRINT_PROP; - /// no break intended - - case ML_TRAIN_SUMMARY_MODEL: - flag |= - LayerPrintOption::PRINT_INST_INFO | LayerPrintOption::PRINT_SHAPE_INFO; - break; - - default: - throw std::invalid_argument("given verbosity is invalid"); - } - - return flag; -} - void NeuralNetwork::printMetrics(std::ostream &out, unsigned int flags) { - switch (flags) { case ML_TRAIN_SUMMARY_MODEL_TRAIN_LOSS: out << training.loss << std::endl; @@ -760,28 +730,70 @@ void NeuralNetwork::printMetrics(std::ostream &out, unsigned int flags) { } } -void NeuralNetwork::print(std::ostream &out, unsigned int flags) { - /// @todo print neuralnet property - /// @todo print optimizer (with print optimizer prop) - /// @todo print loss function when it is not initialized. (if it is - /// initialized, loss layer will be printed) - +void NeuralNetwork::printPreset(std::ostream &out, unsigned int preset) { /** print neuralnet metrics */ - printMetrics(out, flags); - if (flags > ML_TRAIN_SUMMARY_TENSOR) + printMetrics(out, preset); + if (preset > ML_TRAIN_SUMMARY_TENSOR) return; - /** print layer properties */ + Layer::PrintPreset layer_preset = Layer::PrintPreset::PRINT_NONE; + + ///@todo match flags with preset + unsigned int flags = PRINT_INST_INFO | PRINT_GRAPH_INFO | PRINT_PROP | + PRINT_OPTIMIZER | PRINT_METRIC; + + switch (preset) { + case ML_TRAIN_SUMMARY_TENSOR: + layer_preset = Layer::PrintPreset::PRINT_ALL; + break; + case ML_TRAIN_SUMMARY_LAYER: + layer_preset = initialized ? Layer::PrintPreset::PRINT_SUMMARY + : Layer::PrintPreset::PRINT_SUMMARY_META; + break; + case ML_TRAIN_SUMMARY_MODEL: + break; + default: + throw std::invalid_argument("given verbosity is invalid"); + } + + print(out, flags, layer_preset); +} + +void NeuralNetwork::print(std::ostream &out, unsigned int flags, + Layer::PrintPreset layerPrintPreset) { + if (flags & PRINT_INST_INFO) { + out << "==================="; + printInstance(out, this); + } + + if (flags & PRINT_GRAPH_INFO) { + out << "graph contains " << layers.size() << " operation nodes\n"; + /// @todo print graph info + } + + if (flags & PRINT_PROP) { + /// @todo print neuralnet property + /// @todo print mode (if it is eval or training) + } + + if (flags & PRINT_OPTIMIZER) { + /// @todo print optimizer (with print optimizer prop) + } + + if (flags & PRINT_METRIC) { + /// @todo print metric (currently it is done at printPreset as a workaround) + /// @todo print loss function when it is not initialized. (if it is + /// initialized, loss layer will be printed) + } + if (layers.empty()) { out << "model is empty!" << std::endl; return; } - unsigned int layerFlag = - getLayerFlag((ml_train_summary_type_e)flags, initialized); - for (auto &layer : layers) { - layer->print(out, layerFlag); - } + /** print layer properties */ + for (auto &layer : layers) + layer->printPreset(out, layerPrintPreset); /// @todo Add status to check neuralnet has been run. #290 } diff --git a/test/unittest/unittest_nntrainer_layers.cpp b/test/unittest/unittest_nntrainer_layers.cpp index 97678b6..3ba432b 100644 --- a/test/unittest/unittest_nntrainer_layers.cpp +++ b/test/unittest/unittest_nntrainer_layers.cpp @@ -968,13 +968,7 @@ protected: TEST_F(nntrainer_Conv2DLayer, print_01_p) { std::stringstream ss; - unsigned int option = nntrainer::LayerPrintOption::PRINT_INST_INFO | - nntrainer::LayerPrintOption::PRINT_SHAPE_INFO | - nntrainer::LayerPrintOption::PRINT_PROP | - nntrainer::LayerPrintOption::PRINT_PROP_META | - nntrainer::LayerPrintOption::PRINT_WEIGHTS | - nntrainer::LayerPrintOption::PRINT_METRIC; - layer.print(ss, option); + layer.printPreset(ss, nntrainer::Layer::PrintPreset::PRINT_ALL); EXPECT_GT(ss.str().size(), 100); } -- 2.7.4