[ NEURALNET ] change the loss scale property to Rigid Property
authorjijoong.moon <jijoong.moon@samsung.com>
Fri, 26 Apr 2024 11:07:28 +0000 (20:07 +0900)
committerMyungJoo Ham <myungjoo.ham@samsung.com>
Sat, 4 May 2024 05:20:40 +0000 (14:20 +0900)
Loss Scale is more like Rigid Property of model, rather than flexible
property.

Resolves:

**Self evaluation:**
1. Build test:  [X]Passed [ ]Failed [ ]Skipped
2. Run test:  [X]Passed [ ]Failed [ ]Skipped

Signed-off-by: jijoong.moon <jijoong.moon@samsung.com>
nntrainer/layers/layer_context.h
nntrainer/models/neuralnet.cpp
nntrainer/models/neuralnet.h

index fe3938f75356c5cc19ec5ab66791d087f0c9475c..e5c6759638b840bb6de1414bbb69a4b7634aa6a9 100644 (file)
@@ -172,8 +172,7 @@ public:
   /**
    * @brief Request a new weight for the layer
    *
-   * @param dim_v dimension of Variagble of the weight
-   * @param dim_g dimension of Gradient of the weight
+   * @param dim dimension of Variable of the weight
    * @param init initializer for the weight
    * @param reg regularizer for the weight
    * @param reg_const regularization constant for the weight
index 62eececf0f26b3e97aa5c3142cd033dc5e7d1de7..d0e542825fa736265938cb0ae042c07fd3415227 100644 (file)
 namespace nntrainer {
 
 NeuralNetwork::NeuralNetwork() :
-  model_props(props::LossType(), {}, {}, props::ClipGradByGlobalNorm()),
+  model_props(props::LossType(), {}, {}, props::ClipGradByGlobalNorm(),
+              props::LossScale()),
   model_flex_props(
     props::Epochs(), props::TrainingBatchSize(), props::SavePath(),
     props::ContinueTrain(), props::SaveBestPath(), props::MemoryOptimization(),
     props::MemorySwap(), props::MemorySwapPath(), props::MemorySwapLookahead(),
-    props::TensorFormat(), props::ModelTensorDataType(), props::LossScale()),
+    props::TensorFormat(), props::ModelTensorDataType()),
   load_path(std::string()),
   epoch_idx(0),
   iter(0),
@@ -83,12 +84,13 @@ NeuralNetwork::NeuralNetwork() :
 }
 
 NeuralNetwork::NeuralNetwork(AppContext app_context_) :
-  model_props(props::LossType(), {}, {}, props::ClipGradByGlobalNorm()),
+  model_props(props::LossType(), {}, {}, props::ClipGradByGlobalNorm(),
+              props::LossScale()),
   model_flex_props(
     props::Epochs(), props::TrainingBatchSize(), props::SavePath(),
     props::ContinueTrain(), props::SaveBestPath(), props::MemoryOptimization(),
     props::MemorySwap(), props::MemorySwapPath(), props::MemorySwapLookahead(),
-    props::TensorFormat(), props::ModelTensorDataType(), props::LossScale()),
+    props::TensorFormat(), props::ModelTensorDataType()),
   load_path(std::string()),
   epoch_idx(0),
   iter(0),
@@ -179,9 +181,8 @@ int NeuralNetwork::compile() {
   const std::string tensor_type =
     to_string(std::get<props::ModelTensorDataType>(model_flex_props));
 
-  const float loss_scale = std::get<props::LossScale>(model_flex_props);
   model_graph = NetworkGraph(memory_swap, memory_swap_path, lookahead,
-                             tensor_format, tensor_type, loss_scale);
+                             tensor_format, tensor_type);
 
   model_graph.setMemoryOptimizations(
     std::get<props::MemoryOptimization>(model_flex_props));
index a2923ae8a7a9714603885b56def7e5b283bac19e..da1571a32875749d3a652016a61988d70a1c57b9 100644 (file)
@@ -624,14 +624,16 @@ s   * @retval shared_ptr<const Tensor>
                const std::string file_path) override;
 
 private:
-  using FlexiblePropTypes = std::tuple<
-    props::Epochs, props::TrainingBatchSize, props::SavePath,
-    props::ContinueTrain, props::SaveBestPath, props::MemoryOptimization,
-    props::MemorySwap, props::MemorySwapPath, props::MemorySwapLookahead,
-    props::TensorFormat, props::ModelTensorDataType, props::LossScale>;
+  using FlexiblePropTypes =
+    std::tuple<props::Epochs, props::TrainingBatchSize, props::SavePath,
+               props::ContinueTrain, props::SaveBestPath,
+               props::MemoryOptimization, props::MemorySwap,
+               props::MemorySwapPath, props::MemorySwapLookahead,
+               props::TensorFormat, props::ModelTensorDataType>;
   using RigidPropTypes =
     std::tuple<props::LossType, std::vector<props::InputConnection>,
-               std::vector<props::LabelLayer>, props::ClipGradByGlobalNorm>;
+               std::vector<props::LabelLayer>, props::ClipGradByGlobalNorm,
+               props::LossScale>;
 
   RigidPropTypes model_props;         /**< model props */
   FlexiblePropTypes model_flex_props; /**< model train props */