[SWAP] Add lokkahead property
authorJiho Chu <jiho.chu@samsung.com>
Thu, 15 Dec 2022 01:54:24 +0000 (10:54 +0900)
committerJijoong Moon <jijoong.moon@samsung.com>
Thu, 9 Feb 2023 22:56:24 +0000 (07:56 +0900)
This patch add lookahead property.

Signed-off-by: Jiho Chu <jiho.chu@samsung.com>
nntrainer.ini.in
nntrainer/models/model_common_properties.cpp
nntrainer/models/model_common_properties.h
nntrainer/models/neuralnet.cpp
nntrainer/models/neuralnet.h

index a48c372..9bee8cd 100644 (file)
@@ -13,3 +13,6 @@ memory_swap = @MEMORY_SWAP@
 
 # path to save swap file
 memory_swap_path = @MEMORY_SWAP_PATH@
+
+# look ahead window size
+memory_swap_lookahead = @MEMORY_SWAP_LOOKAHEAD@
index 2f83a80..f47e75b 100644 (file)
@@ -33,4 +33,8 @@ MemorySwap::MemorySwap(bool value) { set(value); }
 
 MemorySwapPath::MemorySwapPath(const std::string &value) { set(value); }
 
+MemorySwapLookahead::MemorySwapLookahead(const unsigned int &value) {
+  set(value);
+}
+
 } // namespace nntrainer::props
index 5c33ce9..3792aff 100644 (file)
@@ -161,6 +161,24 @@ public:
   MemorySwapPath(const std::string &value = ".");
 };
 
+/**
+ * @brief cache file path property
+ *
+ */
+class MemorySwapLookahead : public Property<unsigned int> {
+public:
+  static constexpr const char *key =
+    "memory_swap_lookahead";      /**< unique key to access */
+  using prop_tag = uint_prop_tag; /**< property type */
+
+  /**
+   * @brief Constructor
+   *
+   * @param value value to set, defaults to current directory
+   */
+  MemorySwapLookahead(const unsigned int &value = 0);
+};
+
 } // namespace nntrainer::props
 
 #endif
index 7faa69a..f7612c4 100644 (file)
@@ -66,10 +66,10 @@ namespace nntrainer {
 
 NeuralNetwork::NeuralNetwork() :
   model_props(props::LossType(), {}, {}, props::ClipGradByGlobalNorm()),
-  model_flex_props(props::Epochs(), props::TrainingBatchSize(),
-                   props::SavePath(), props::ContinueTrain(),
-                   props::SaveBestPath(), props::MemoryOptimization(),
-                   props::MemorySwap(), props::MemorySwapPath()),
+  model_flex_props(
+    props::Epochs(), props::TrainingBatchSize(), props::SavePath(),
+    props::ContinueTrain(), props::SaveBestPath(), props::MemoryOptimization(),
+    props::MemorySwap(), props::MemorySwapPath(), props::MemorySwapLookahead()),
   load_path(std::string()),
   epoch_idx(0),
   iter(0),
@@ -83,10 +83,10 @@ NeuralNetwork::NeuralNetwork() :
 
 NeuralNetwork::NeuralNetwork(AppContext app_context_) :
   model_props(props::LossType(), {}, {}, props::ClipGradByGlobalNorm()),
-  model_flex_props(props::Epochs(), props::TrainingBatchSize(),
-                   props::SavePath(), props::ContinueTrain(),
-                   props::SaveBestPath(), props::MemoryOptimization(),
-                   props::MemorySwap(), props::MemorySwapPath()),
+  model_flex_props(
+    props::Epochs(), props::TrainingBatchSize(), props::SavePath(),
+    props::ContinueTrain(), props::SaveBestPath(), props::MemoryOptimization(),
+    props::MemorySwap(), props::MemorySwapPath(), props::MemorySwapLookahead()),
   load_path(std::string()),
   epoch_idx(0),
   iter(0),
@@ -168,7 +168,9 @@ int NeuralNetwork::compile() {
   bool memory_swap = std::get<props::MemorySwap>(model_flex_props);
   const std::string memory_swap_path =
     std::get<props::MemorySwapPath>(model_flex_props);
-  model_graph = NetworkGraph(memory_swap, memory_swap_path);
+  unsigned int lookahead =
+    std::get<props::MemorySwapLookahead>(model_flex_props);
+  model_graph = NetworkGraph(memory_swap, memory_swap_path, lookahead);
 
   model_graph.setMemoryOptimizations(
     std::get<props::MemoryOptimization>(model_flex_props));
@@ -257,7 +259,7 @@ int NeuralNetwork::initialize() {
 /**
  * @brief     free layers
  */
-NeuralNetwork::~NeuralNetwork() = default;
+NeuralNetwork::~NeuralNetwork() { deallocate(); }
 
 /**
  * @brief     forward propagation using layers object which has layer
@@ -784,11 +786,12 @@ int NeuralNetwork::train_run(std::function<bool(void *userdata)> stop_cb) {
 
   auto train_for_iteration = [this, stop_cb](RunStats &stat,
                                              DataBuffer &buffer) {
-    model_graph.flushCache();
-
     forwarding(true, stop_cb);
     backwarding(iter++, stop_cb);
 
+    // To avoid unconsidered memory leak, we need to clear the cache
+    model_graph.flushCache();
+
     if (!stop_cb(nullptr)) {
       std::cout << "#" << epoch_idx << "/" << getEpochs();
       ml_logi("# %d / %d", epoch_idx, getEpochs());
index e851c9f..99ba960 100644 (file)
@@ -556,7 +556,7 @@ private:
     std::tuple<props::Epochs, props::TrainingBatchSize, props::SavePath,
                props::ContinueTrain, props::SaveBestPath,
                props::MemoryOptimization, props::MemorySwap,
-               props::MemorySwapPath>;
+               props::MemorySwapPath, props::MemorySwapLookahead>;
   using RigidPropTypes =
     std::tuple<props::LossType, std::vector<props::InputConnection>,
                std::vector<props::LabelLayer>, props::ClipGradByGlobalNorm>;