[Save] Prepare to save ini
authorJihoon Lee <jhoon.it.lee@samsung.com>
Tue, 24 Aug 2021 12:48:53 +0000 (21:48 +0900)
committerJijoong Moon <jijoong.moon@samsung.com>
Fri, 27 Aug 2021 09:20:23 +0000 (18:20 +0900)
This patch adds a skeleton to save into ini format

**Self evaluation:**
1. Build test: [X]Passed [ ]Failed [ ]Skipped
2. Run test: [X]Passed [ ]Failed [ ]Skipped

Signed-off-by: Jihoon Lee <jhoon.it.lee@samsung.com>
nntrainer/compiler/ini_interpreter.cpp
nntrainer/compiler/ini_interpreter.h
nntrainer/compiler/interpreter.h
nntrainer/compiler/tflite_interpreter.cpp
nntrainer/compiler/tflite_interpreter.h
nntrainer/layers/layer_node.cpp
nntrainer/layers/layer_node.h
nntrainer/models/neuralnet.cpp
test/ccapi/unittest_ccapi.cpp
test/unittest/unittest_base_properties.cpp

index edeb936e93e29d6847808b6566ad72e66998c7f8..1d46d69368c498d81c32e928b9000ffced9fc998 100644 (file)
@@ -252,12 +252,11 @@ getMergeableGraph(std::shared_ptr<const GraphRepresentation> graph,
 
 } // namespace
 
-void IniGraphInterpreter::serialize(
-  std::shared_ptr<const GraphRepresentation> representation,
-  const std::string &out) {
+void IniGraphInterpreter::serialize(const GraphRepresentation &representation,
+                                    const std::string &out) {
 
   std::vector<IniSection> sections;
-  for (auto iter = representation->cbegin(); iter != representation->cend();
+  for (auto iter = representation.cbegin(); iter != representation.cend();
        iter++) {
     const auto &ln = *iter;
 
index 1cfc19dbb160176a97343ac6a4930ae66f2a173b..9a917d19656b31df3f08787b7200e884a26a6ed9 100644 (file)
@@ -48,9 +48,10 @@ public:
   virtual ~IniGraphInterpreter(){};
 
   /**
-   * @copydoc GraphInterpreter::serialize(const std::string &out)
+   * @copydoc GraphInterpreter::serialize(const GraphRepresentation
+   * representation, const std::string &out)
    */
-  void serialize(std::shared_ptr<const GraphRepresentation> representation,
+  void serialize(const GraphRepresentation &representation,
                  const std::string &out) override;
 
   /**
index 23487ca69e7e7e2f30ae57daf77f6e1e10e3b1f5..aa391b24413e9423fd9c936124bcdf6b803e38aa 100644 (file)
@@ -61,9 +61,8 @@ public:
    * @param representation graph representation
    * @param out output file name
    */
-  virtual void
-  serialize(std::shared_ptr<const GraphRepresentation> representation,
-            const std::string &out) = 0;
+  virtual void serialize(const GraphRepresentation &representation,
+                         const std::string &out) = 0;
 
   /**
    * @brief deserialize graph from a stream
index c6539156e2cba1180ffb8a4134e9dcb6c02ede6e..5f08359c8fd2e5f561060f0e9c4f29e05c7edd2a 100644 (file)
@@ -236,12 +236,11 @@ private:
   std::vector<int> outputs;
 };
 
-TfOpNodes
-buildOpNodes(std::shared_ptr<const GraphRepresentation> representation) {
+TfOpNodes buildOpNodes(const GraphRepresentation &representation) {
   TfOpNodes nodes;
   /// @todo, look ahead of layers to get nodes that can be fused
   /// we will need to have a dedicated builder
-  for (auto iter = representation->cbegin(); iter != representation->cend();
+  for (auto iter = representation.cbegin(); iter != representation.cend();
        iter++) {
     const auto &ln = *iter;
     Exporter e;
@@ -429,9 +428,8 @@ buildSubGraphs(const TfOpNodes &nodes, const TfOpIdxMap &map,
 
 } // namespace
 
-void TfliteInterpreter::serialize(
-  std::shared_ptr<const GraphRepresentation> representation,
-  const std::string &out) {
+void TfliteInterpreter::serialize(const GraphRepresentation &representation,
+                                  const std::string &out) {
   /// @todo check if graph is finalized & initialized and ready to serialize.
   /// 1. The graph must have weights, input dims, output dims set
   flatbuffers::FlatBufferBuilder fbb;
index 9ab5fee25725bc2a8abb8502561a057cc12057a2..1a91f8d1cb967d40cb9fdc4152b3f9f498ab10a3 100644 (file)
@@ -40,7 +40,7 @@ public:
   /**
    * @copydoc GraphInterpreter::serialize(const std::string &out)
    */
-  void serialize(std::shared_ptr<const GraphRepresentation> representation,
+  void serialize(const GraphRepresentation &representation,
                  const std::string &out) override;
 
   /**
index b173df0d0f8f73fd003743603d1931dd2dc54445..06113b15bd70240942085177e53817fa02c4fcb8 100644 (file)
@@ -71,12 +71,19 @@ public:
 
   /**
    * @brief LossSpec validator
-   *
+   * @todo  detect when loss becomes Nan is useful. But it will need dedicated
+   * throw
    * @param v float to validate
-   * @retval true if it is greater or equal than 0.0
-   * @retval false if it is samller than 0.0
+   * @retval true if is valid number
+   * @retval false if it is nan
    */
-  bool isValid(const float &v) const override { return !std::isnan(v); }
+  bool isValid(const float &v) const override {
+    if (std::isnan(v)) {
+      ml_logw("loss value is NAN");
+    }
+
+    return true;
+  }
 };
 
 } // namespace props
@@ -124,8 +131,8 @@ LayerNode::LayerNode(std::unique_ptr<nntrainer::Layer> &&l) :
   finalized(false),
   activation_type(ActivationType::ACT_NONE),
   layer_node_props(new PropsType(props::Name(), props::Flatten(),
-                                 props::Distribute(), props::Trainable(),
-                                 props::Loss())),
+                                 props::Distribute(), props::Trainable())),
+  loss(new props::Loss()),
   regularization_loss(0.0f),
   exec_order({0, 0, 0}) {
   if (layer && layer->getType() == TimeDistLayer::type) {
@@ -373,8 +380,7 @@ void LayerNode::finalize() {
  * @brief     Forward Propagation of a layer
  */
 void LayerNode::forwarding(bool training) {
-  std::get<props::Loss>(*layer_node_props)
-    .set(run_context.getRegularizationLoss());
+  loss->set(run_context.getRegularizationLoss());
   layer->forwarding(run_context, training);
 }
 
@@ -422,11 +428,9 @@ bool LayerNode::requireLabel() const { return getLayer()->requireLabel(); }
 float LayerNode::getLoss() const {
   /** add loss only for loss layers */
   if (requireLabel())
-    std::get<props::Loss>(*layer_node_props)
-      .set(std::get<props::Loss>(*layer_node_props).get() +
-           run_context.getLoss());
+    loss->set(*loss + run_context.getLoss());
 
-  return std::get<props::Loss>(*layer_node_props).get();
+  return *loss;
 }
 
 /**
index bac8468c0ebbfc30e4781d21f2b7ad28f1c8082d..abb36d37f466f4c26273d8f4b04b56419d272423 100644 (file)
@@ -617,12 +617,13 @@ private:
                     properties in the context/graph unless intended. */
 
   using PropsType = std::tuple<props::Name, props::Flatten, props::Distribute,
-                               props::Trainable, props::Loss>;
+                               props::Trainable>;
   /**
    * These properties are set for the layer by the user but are intercepted
    * and used in the node which forms the basic element of the graph.
    */
   std::unique_ptr<PropsType> layer_node_props; /**< properties for the node */
+  std::unique_ptr<props::Loss> loss;           /**< loss */
   float regularization_loss;
   ExecutionOrder exec_order; /**< order/location of execution for this node
                                    in forward and backwarding operations */
index 141e8482a8a508618036610b8397ecb604a44c93..32bdc456fae1a70d422255a65e8b62944197f513 100644 (file)
@@ -27,6 +27,7 @@
 #include <sstream>
 
 #include <databuffer.h>
+#include <ini_interpreter.h>
 #include <model_loader.h>
 #include <neuralnet.h>
 #include <nntrainer_error.h>
@@ -372,8 +373,17 @@ void NeuralNetwork::save(const std::string &file_path,
     model_file.close();
     break;
   }
-  case ml::train::ModelFormat::MODEL_FORMAT_INI:
-    [[fallthrough]]; // NYI
+  case ml::train::ModelFormat::MODEL_FORMAT_INI: {
+    IniGraphInterpreter interpreter;
+
+    /// @note this is to ensure permission checks are done
+    checkedOpenStream<std::ofstream>(file_path, std::ios::out);
+    /// @todo serialize model props
+    /// @todo serialize dataset props
+    /// @todo serialize optimizer props
+    interpreter.serialize(model_graph, file_path);
+    break;
+  }
   default:
     throw nntrainer::exception::not_supported(
       "saving with given format is not supported yet");
index 61afb767b669c17c889e31e11ba61a790f469022..a11167c6dd0e589f08d7552005e50999ba9610c4 100644 (file)
@@ -405,6 +405,23 @@ TEST(nntrainer_ccapi, train_with_config_02_n) {
   EXPECT_EQ(model->train(), ML_ERROR_INVALID_PARAMETER);
 }
 
+TEST(nntrainer_ccapi, save_ini_p) {
+  std::unique_ptr<ml::train::Model> model;
+  model = ml::train::createModel(ml::train::ModelType::NEURAL_NET);
+  ScopedIni s("simple_ini", {model_base + "batch_size = 16", optimizer,
+                             dataset + "-BufferSize", inputlayer, outputlayer});
+  EXPECT_EQ(model->loadFromConfig(s.getIniName()), ML_ERROR_NONE);
+  EXPECT_EQ(model->compile(), ML_ERROR_NONE);
+  EXPECT_EQ(model->initialize(), ML_ERROR_NONE);
+  auto saved_ini_name = s.getIniName() + "_saved";
+  model->save(saved_ini_name, ml::train::ModelFormat::MODEL_FORMAT_INI);
+
+  if (remove(saved_ini_name.c_str())) {
+    std::cerr << "remove ini " << saved_ini_name
+              << "failed, reason: " << strerror(errno);
+  }
+}
+
 /**
  * @brief Main gtest
  */
index 0bfa649ac2bcb0bed98fd9b86ce3e4a0616d51f1..e0ceec47537b8c507e10728a1c2b2f9b81e1d1d1 100644 (file)
@@ -285,7 +285,7 @@ TEST(BasicProperty, valid_p) {
 
     auto result = e.getResult<nntrainer::ExportMethods::METHOD_STRINGVECTOR>();
     auto pair1 = std::pair<std::string, std::string>("unit", "1");
-    EXPECT_EQ(result->at(2), pair1);
+    EXPECT_EQ(result->at(1), pair1);
   }
 
   { /**< load from layer */