Refactor nntrainer load from config
authorJihoon Lee <jhoon.it.lee@samsung.com>
Tue, 14 Jul 2020 09:38:15 +0000 (18:38 +0900)
committerJijoong Moon <jijoong.moon@samsung.com>
Thu, 16 Jul 2020 05:09:08 +0000 (14:09 +0900)
`loadFromConfig` has many duplicated logics with layer::setProperty and
others

This PR refactor `loadFromConfig` to reuse already present logic.

**Changes proposed in this PR:**
- Add `std::out_of_range` exception to setProperty to represent validity
- Change error to warning when input_dim is not present at the head of
the network(for ini)
- Change `weightIni` -> `weight_ini` in `ini` for consistency
- Change unittest accordingly
- Separate dataset, network parser

**Self evaluation:**
1. Build test: [X]Passed [ ]Failed [ ]Skipped
2. Run test: [X]Passed [ ]Failed [ ]Skipped

Signed-off-by: Jihoon Lee <jhoon.it.lee@samsung.com>
nntrainer/include/layer.h
nntrainer/include/neuralnet.h
nntrainer/src/flatten_layer.cpp
nntrainer/src/layer.cpp
nntrainer/src/neuralnet.cpp
test/include/nntrainer_test_util.h
test/tizen_capi/unittest_tizen_capi.cpp
test/unittest/unittest_nntrainer_internal.cpp

index 09773d8..5ebc8bb 100644 (file)
@@ -537,11 +537,16 @@ protected:
    * @param[in] type property type to be passed
    * @param[in] value value to be passed, if empty string is passed, do nothing
    * but throws error when @a type is invalid
+   * @exception std::out_of_range     when property type is not valid for the
+   * particular layer
    * @exception std::invalid_argument invalid argument
    */
   virtual void setProperty(const PropertyType type,
                            const std::string &value = "");
 
+  /// @todo move virtual void setProperty to public and remove this
+  friend class NeuralNetwork;
+
 private:
   /**
    * @brief     Set containing all the names of layers
index 9626240..f68b449 100644 (file)
@@ -406,6 +406,18 @@ private:
    * @brief     Ensure that layer has a name
    */
   void ensureName(std::shared_ptr<Layer> layer, std::string prefix = "");
+
+  /**
+   * @brief     load dataset config from ini
+   * @param[in] ini will be casted to iniparser::dictionary *
+   */
+  int loadDatasetConfig(void *ini);
+
+  /**
+   * @brief     load network config from ini
+   * @param[in] ini will be casted to iniparser::dictionary *
+   */
+  int loadNetworkConfig(void *ini);
 };
 
 } /* namespace nntrainer */
index ec6f825..92a3f5c 100644 (file)
@@ -59,7 +59,7 @@ Tensor FlattenLayer::backwarding(Tensor in, int iteration) {
 
 void FlattenLayer::setProperty(const PropertyType type,
                                const std::string &value) {
-  throw std::invalid_argument("[Flatten Layer] setProperty not supported");
+  throw std::out_of_range("[Flatten Layer] setProperty not supported");
 }
 
 void FlattenLayer::copy(std::shared_ptr<Layer> l) {
index 1a77cf3..89b57be 100644 (file)
@@ -162,32 +162,32 @@ void Layer::setProperty(const PropertyType type, const std::string &value) {
   case PropertyType::name:
     if (!value.empty()) {
       status = setName(value);
+      throw_status(status);
     }
-    throw_status(status);
     break;
   case PropertyType::input_shape:
     if (!value.empty()) {
       status = input_dim.setTensorDim(value.c_str());
+      throw_status(status);
     }
-    throw_status(status);
     break;
   case PropertyType::bias_init_zero:
     if (!value.empty()) {
       status = setBoolean(this->bias_init_zero, value);
+      throw_status(status);
     }
-    throw_status(status);
     break;
   case PropertyType::activation:
     if (!value.empty()) {
       status = setActivation((ActiType)parseType(value, TOKEN_ACTI));
+      throw_status(status);
     }
-    throw_status(status);
     break;
   case PropertyType::flatten:
     if (!value.empty()) {
       status = setBoolean(flatten, value);
+      throw_status(status);
     }
-    throw_status(status);
     break;
   case PropertyType::weight_decay:
     if (!value.empty()) {
@@ -211,7 +211,7 @@ void Layer::setProperty(const PropertyType type, const std::string &value) {
   default:
     std::string msg =
       "[Layer] Unknown Layer Property Key for value" + std::string(value);
-    throw std::invalid_argument(msg);
+    throw std::out_of_range(msg);
   }
 }
 
@@ -228,7 +228,7 @@ void Layer::printIfValid(std::ostream &out, const PropertyType type,
                          const T target) {
   try {
     setProperty(type);
-  } catch (std::invalid_argument &e) {
+  } catch (std::out_of_range &e) {
     return;
   }
 
index 23b8faf..6b081c6 100644 (file)
@@ -70,22 +70,6 @@ static bool is_file_exist(std::string file_name) {
   return infile.good();
 }
 
-static int parseWeightDecay(dictionary *ini, std::string layer_name,
-                            WeightDecayParam &weight_decay) {
-  char unknown[] = "Unknown";
-  int status = ML_ERROR_NONE;
-  weight_decay.type = (WeightDecayType)parseType(
-    iniparser_getstring(ini, (layer_name + ":Weight_Decay").c_str(), unknown),
-    TOKEN_WEIGHT_DECAY);
-
-  weight_decay.lambda = 0.0;
-  if (weight_decay.type == WeightDecayType::l2norm) {
-    weight_decay.lambda = iniparser_getdouble(
-      ini, (layer_name + ":Weight_Decay_Lambda").c_str(), 0.0);
-  }
-  return status;
-}
-
 /**
  * @brief     Parsing Layer Name
  * @param[in] string layer name
@@ -123,6 +107,7 @@ NeuralNetwork::NeuralNetwork(std::string config) :
 
 int NeuralNetwork::setConfig(std::string config) {
   int status = ML_ERROR_NONE;
+
   if (!is_file_exist(config)) {
     ml_loge("Error: Cannot open model configuration file");
     return ML_ERROR_INVALID_PARAMETER;
@@ -132,58 +117,10 @@ int NeuralNetwork::setConfig(std::string config) {
   return status;
 }
 
-int NeuralNetwork::loadFromConfig() {
-  int status = ML_ERROR_NONE;
-  std::string ini_file = config;
-  int num_ini_sec = 0;
+int NeuralNetwork::loadNetworkConfig(void *_ini) {
+  dictionary *ini = static_cast<dictionary *>(_ini);
   char unknown[] = "Unknown";
-  char model_name[] = "model.bin";
-  dictionary *ini;
-  std::vector<std::string> section_names;
-  std::vector<std::string>::iterator section_names_iter;
-
-  if (ini_file.empty()) {
-    ml_loge("Error: Configuration File is not defined");
-    return ML_ERROR_INVALID_PARAMETER;
-  }
-
-  /** Parse ini file */
-  ini = iniparser_load(ini_file.c_str());
-  if (ini == NULL) {
-    ml_loge("Error: cannot parse file: %s\n", ini_file.c_str());
-    return ML_ERROR_INVALID_PARAMETER;
-  }
-
-  /** Get number of sections in the file */
-  num_ini_sec = iniparser_getnsec(ini);
-  if (num_ini_sec < 0) {
-    ml_loge("Error: invalid number of sections.");
-    return ML_ERROR_INVALID_PARAMETER;
-  }
-
-  /** Get all the section names */
-  for (int idx = 0; idx < num_ini_sec; ++idx) {
-    const char *sec_name = iniparser_getsecname(ini, idx);
-    if (!sec_name) {
-      ml_loge("Error: Unable to retrieve section names from ini.");
-      return ML_ERROR_INVALID_PARAMETER;
-    }
-    std::string sec_name_lower(sec_name);
-    std::transform(sec_name_lower.begin(), sec_name_lower.end(),
-                   sec_name_lower.begin(),
-                   [](unsigned char c) { return std::tolower(c); });
-    section_names.push_back(sec_name_lower);
-  }
-
-  /** Parse the Network section and its properties */
-  section_names_iter =
-    std::find(section_names.begin(), section_names.end(), "network");
-  if (section_names_iter == section_names.end()) {
-    ml_loge("Error: Network section not found in the .");
-    return ML_ERROR_INVALID_PARAMETER;
-  } else {
-    section_names.erase(section_names_iter);
-  }
+  int status = ML_ERROR_NONE;
 
   /** Default to neural network model type */
   net_type = (nntrainer::NetType)parseType(
@@ -191,12 +128,12 @@ int NeuralNetwork::loadFromConfig() {
   epoch = iniparser_getint(ini, "Network:Epoch", epoch);
   cost = (CostType)parseType(iniparser_getstring(ini, "Network:Cost", unknown),
                              TOKEN_COST);
-  model = iniparser_getstring(ini, "Network:Model", model_name);
+  model = iniparser_getstring(ini, "Network:Model", "model.bin");
   batch_size = iniparser_getint(ini, "Network:Minibatch", batch_size);
 
   /** Default to adam optimizer */
   status = opt.setType((OptType)parseType(
-    iniparser_getstring(ini, "Network:Optimizer", unknown), TOKEN_OPT));
+    iniparser_getstring(ini, "Network:Optimizer", "adam"), TOKEN_OPT));
   NN_INI_RETURN_STATUS();
 
   OptParam popt(opt.getType());
@@ -213,252 +150,129 @@ int NeuralNetwork::loadFromConfig() {
   status = opt.setOptParam(popt);
   NN_INI_RETURN_STATUS();
 
-  /** Parse the DataSet section */
-  section_names_iter =
-    std::find(section_names.begin(), section_names.end(), "dataset");
-  if (section_names_iter != section_names.end()) {
-    section_names.erase(section_names_iter);
+  return status;
+}
+
+int NeuralNetwork::loadDatasetConfig(void *_ini) {
+  int status = ML_ERROR_NONE;
 
-    if (iniparser_find_entry(ini, "DataSet:Tflite")) {
-      ml_loge("Error: Tflite dataset is not yet implemented!");
-      return ML_ERROR_INVALID_PARAMETER;
-    } else {
-      data_buffer = std::make_shared<DataBufferFromDataFile>();
-      std::shared_ptr<DataBufferFromDataFile> dbuffer =
-        std::static_pointer_cast<DataBufferFromDataFile>(data_buffer);
+  dictionary *ini = static_cast<dictionary *>(_ini);
 
-      status = dbuffer->setDataFile(
-        iniparser_getstring(ini, "DataSet:TrainData", ""), DATA_TRAIN);
-      NN_INI_RETURN_STATUS();
-      status = dbuffer->setDataFile(
-        iniparser_getstring(ini, "DataSet:ValidData", ""), DATA_VAL);
-      NN_INI_RETURN_STATUS();
-      status = dbuffer->setDataFile(
-        iniparser_getstring(ini, "DataSet:TestData", ""), DATA_TEST);
-      NN_INI_RETURN_STATUS();
-      status = dbuffer->setDataFile(
-        iniparser_getstring(ini, "DataSet:LabelData", ""), DATA_LABEL);
-      NN_INI_RETURN_STATUS();
-      status = data_buffer->setBufSize(
-        iniparser_getint(ini, "DataSet:BufferSize", batch_size));
-      NN_INI_RETURN_STATUS();
-    }
+  if (iniparser_find_entry(ini, "DataSet:Tflite")) {
+    ml_loge("Error: Tflite dataset is not yet implemented!");
+    return ML_ERROR_INVALID_PARAMETER;
   } else {
-    data_buffer = std::make_shared<DataBufferFromCallback>();
+    data_buffer = std::make_shared<DataBufferFromDataFile>();
+    std::shared_ptr<DataBufferFromDataFile> dbuffer =
+      std::static_pointer_cast<DataBufferFromDataFile>(data_buffer);
+
+    status = dbuffer->setDataFile(
+      iniparser_getstring(ini, "DataSet:TrainData", ""), DATA_TRAIN);
+    NN_INI_RETURN_STATUS();
+    status = dbuffer->setDataFile(
+      iniparser_getstring(ini, "DataSet:ValidData", ""), DATA_VAL);
+    NN_INI_RETURN_STATUS();
+    status = dbuffer->setDataFile(
+      iniparser_getstring(ini, "DataSet:TestData", ""), DATA_TEST);
+    NN_INI_RETURN_STATUS();
+    status = dbuffer->setDataFile(
+      iniparser_getstring(ini, "DataSet:LabelData", ""), DATA_LABEL);
+    NN_INI_RETURN_STATUS();
+    /// fixme: #299
+    status = data_buffer->setBufSize(
+      iniparser_getint(ini, "DataSet:BufferSize", batch_size));
+    NN_INI_RETURN_STATUS();
   }
 
-  /** Parse all the layers defined as sections in order */
-  for (section_names_iter = section_names.begin();
-       section_names_iter != section_names.end(); ++section_names_iter) {
-    std::string layer_name = *section_names_iter;
-    std::string layer_type_str =
-      iniparser_getstring(ini, (layer_name + ":Type").c_str(), unknown);
-    LayerType layer_type = (LayerType)parseType(layer_type_str, TOKEN_LAYER);
-    bool b_zero =
-      iniparser_getboolean(ini, (layer_name + ":bias_init_zero").c_str(), true);
-
-    switch (layer_type) {
-    case LAYER_IN: {
-      std::shared_ptr<InputLayer> input_layer = std::make_shared<InputLayer>();
-
-      status = input_layer->setName(layer_name);
-      if (status != ML_ERROR_NONE)
-        return status;
-
-      std::string input_shape_str = iniparser_getstring(
-        ini, (layer_name + ":Input_Shape").c_str(), unknown);
-
-      if (input_shape_str.compare("Unknown") == 0) {
-        status = ML_ERROR_INVALID_PARAMETER;
-        NN_INI_RETURN_STATUS();
-      }
-
-      TensorDim d;
-      status = d.setTensorDim(input_shape_str);
-      NN_INI_RETURN_STATUS();
-      input_layer->setInputDimension(d);
-
-      input_layer->setNormalization(iniparser_getboolean(
-        ini, (layer_name + ":Normalization").c_str(), false));
-      input_layer->setStandardization(iniparser_getboolean(
-        ini, (layer_name + ":Standardization").c_str(), false));
-      addLayer(input_layer);
-    } break;
-    case LAYER_CONV2D: {
-      int size[CONV2D_DIM];
-      WeightDecayParam weight_decay;
-      std::shared_ptr<Conv2DLayer> conv2d_layer =
-        std::make_shared<Conv2DLayer>();
-
-      status = conv2d_layer->setName(layer_name);
-      NN_INI_RETURN_STATUS();
-
-      std::string input_shape_str = iniparser_getstring(
-        ini, (layer_name + ":Input_Shape").c_str(), unknown);
-
-      if (input_shape_str.compare("Unknown") != 0) {
-        TensorDim d;
-        d.setTensorDim(input_shape_str);
-        conv2d_layer->setInputDimension(d);
-      } else if (section_names_iter == section_names.begin()) {
-        ml_loge("Error: %s layer input shape not specified.",
-                layer_name.c_str());
-        status = ML_ERROR_INVALID_PARAMETER;
-        NN_INI_RETURN_STATUS();
-      }
-
-      status = getValues(CONV2D_DIM,
-                         iniparser_getstring(
-                           ini, (layer_name + ":kernel_size").c_str(), unknown),
-                         (int *)size);
-      NN_INI_RETURN_STATUS();
-      status = conv2d_layer->setSize(size, Layer::PropertyType::kernel_size);
-      NN_INI_RETURN_STATUS();
-
-      status =
-        getValues(CONV2D_DIM,
-                  iniparser_getstring(ini, (layer_name + ":stride").c_str(),
-                                      getValues({1, 1})),
-                  (int *)size);
-      NN_INI_RETURN_STATUS();
-      status = conv2d_layer->setSize(size, Layer::PropertyType::stride);
-      NN_INI_RETURN_STATUS();
-
-      status =
-        getValues(CONV2D_DIM,
-                  iniparser_getstring(ini, (layer_name + ":padding").c_str(),
-                                      getValues({0, 0})),
-                  (int *)size);
-
-      NN_INI_RETURN_STATUS();
-      status = conv2d_layer->setSize(size, Layer::PropertyType::padding);
-      NN_INI_RETURN_STATUS();
-
-      status = conv2d_layer->setFilter(
-        iniparser_getint(ini, (layer_name + ":filter").c_str(), 0));
-      NN_INI_RETURN_STATUS();
-
-      conv2d_layer->setBiasZero(b_zero);
-      conv2d_layer->setWeightInit((WeightIniType)parseType(
-        iniparser_getstring(ini, (layer_name + ":WeightIni").c_str(),
-                            "xavier_uniform"),
-        TOKEN_WEIGHTINI));
-
-      status = parseWeightDecay(ini, layer_name, weight_decay);
-      NN_INI_RETURN_STATUS();
-
-      conv2d_layer->setWeightDecay(weight_decay);
-      NN_INI_RETURN_STATUS();
-
-      addLayer(conv2d_layer);
-    } break;
-
-    case LAYER_POOLING2D: {
-      int size[POOLING2D_DIM];
-      std::shared_ptr<Pooling2DLayer> pooling2d_layer =
-        std::make_shared<Pooling2DLayer>();
-
-      status = pooling2d_layer->setName(layer_name);
-      NN_INI_RETURN_STATUS();
-
-      status = getValues(
-        POOLING2D_DIM,
-        iniparser_getstring(ini, (layer_name + ":pooling_size").c_str(),
-                            getValues({1, 1})),
-        (int *)size);
-
-      NN_INI_RETURN_STATUS();
-      status =
-        pooling2d_layer->setSize(size, Layer::PropertyType::pooling_size);
-
-      NN_INI_RETURN_STATUS();
-      status =
-        getValues(POOLING2D_DIM,
-                  iniparser_getstring(ini, (layer_name + ":stride").c_str(),
-                                      getValues({1, 1})),
-                  (int *)size);
-      NN_INI_RETURN_STATUS();
-      status = pooling2d_layer->setSize(size, Layer::PropertyType::stride);
-      NN_INI_RETURN_STATUS();
-      status =
-        getValues(POOLING2D_DIM,
-                  iniparser_getstring(ini, (layer_name + ":padding").c_str(),
-                                      getValues({0, 0})),
-                  (int *)size);
-      NN_INI_RETURN_STATUS();
-      status = pooling2d_layer->setSize(size, Layer::PropertyType::padding);
-      NN_INI_RETURN_STATUS();
-
-      pooling2d_layer->setPoolingType(
-        (nntrainer::Pooling2DLayer::PoolingType)parseType(
-          iniparser_getstring(ini, (layer_name + ":pooling").c_str(),
-                              "average"),
-          TOKEN_POOLING));
-
-      addLayer(pooling2d_layer);
-    } break;
-
-    case LAYER_FLATTEN: {
-      std::shared_ptr<FlattenLayer> flatten_layer =
-        std::make_shared<FlattenLayer>();
+  return status;
+}
 
-      status = flatten_layer->setName(layer_name);
-      NN_INI_RETURN_STATUS();
+int NeuralNetwork::loadFromConfig() {
+  int status = ML_ERROR_NONE;
+  std::string ini_file = config;
+  int num_ini_sec = 0;
+  dictionary *ini;
+  const char network_str[] = "network";
+  unsigned int network_len = strlen(network_str);
+  const char dataset_str[] = "dataset";
+  unsigned int dataset_len = strlen(dataset_str);
 
-      addLayer(flatten_layer);
-    } break;
+  if (ini_file.empty()) {
+    ml_loge("Error: Configuration File is not defined");
+    return ML_ERROR_INVALID_PARAMETER;
+  }
 
-    case LAYER_FC: {
-      WeightDecayParam weight_decay;
-      std::shared_ptr<FullyConnectedLayer> fc_layer =
-        std::make_shared<FullyConnectedLayer>();
+  /** Parse ini file */
+  ini = iniparser_load(ini_file.c_str());
+  if (ini == NULL) {
+    ml_loge("Error: cannot parse file: %s\n", ini_file.c_str());
+    return ML_ERROR_INVALID_PARAMETER;
+  }
 
-      status = fc_layer->setName(layer_name);
-      NN_INI_RETURN_STATUS();
+  /** Get number of sections in the file */
+  num_ini_sec = iniparser_getnsec(ini);
+  if (num_ini_sec < 0) {
+    ml_loge("Error: invalid number of sections.");
+    return ML_ERROR_INVALID_PARAMETER;
+  }
 
-      std::string input_shape_str = iniparser_getstring(
-        ini, (layer_name + ":Input_Shape").c_str(), unknown);
-
-      if (input_shape_str.compare("Unknown") != 0) {
-        TensorDim d;
-        d.setTensorDim(input_shape_str);
-        fc_layer->setInputDimension(d);
-      } else if (section_names_iter == section_names.begin()) {
-        ml_loge("Error: %s layer input shape not specified.",
-                layer_name.c_str());
-        status = ML_ERROR_INVALID_PARAMETER;
-        NN_INI_RETURN_STATUS();
-      }
+  if (iniparser_find_entry(ini, "network") == 0) {
+    ml_loge("there is no [network] section in given ini file");
+    return ML_ERROR_INVALID_PARAMETER;
+  }
 
-      fc_layer->setUnit(static_cast<unsigned int>(
-        iniparser_getint(ini, (layer_name + ":Unit").c_str(), 0)));
+  /** Get all the section names */
+  ml_logi(
+    "parsing ini... invalid properties does not cause error, rather be ignored "
+    "only invalid value for valid property will abort the process");
+  for (int idx = 0; idx < num_ini_sec; ++idx) {
+    const char *sec_name = iniparser_getsecname(ini, idx);
 
-      fc_layer->setBiasZero(b_zero);
-      fc_layer->setWeightInit((WeightIniType)parseType(
-        iniparser_getstring(ini, (layer_name + ":WeightIni").c_str(),
-                            "xavier_uniform"),
-        TOKEN_WEIGHTINI));
+    if (!sec_name) {
+      ml_loge("Error: Unable to retrieve section names from ini.");
+      return ML_ERROR_INVALID_PARAMETER;
+    }
 
-      status = parseWeightDecay(ini, layer_name, weight_decay);
-      NN_INI_RETURN_STATUS();
+    if (strncasecmp(network_str, sec_name, network_len) == 0) {
+      status = loadNetworkConfig((void *)ini);
+      NN_RETURN_STATUS();
+      continue;
+    }
 
-      fc_layer->setWeightDecay(weight_decay);
+    if (strncasecmp(dataset_str, sec_name, dataset_len) == 0) {
+      status = loadDatasetConfig((void *)ini);
+      NN_RETURN_STATUS();
+      continue;
+    }
 
-      addLayer(fc_layer);
-    } break;
-    case LAYER_BN: {
-      std::shared_ptr<BatchNormalizationLayer> bn_layer =
-        std::make_shared<BatchNormalizationLayer>();
+    /** Parse all the layers defined as sections in order */
+    std::string layer_name(sec_name);
 
-      status = bn_layer->setName(layer_name);
-      NN_INI_RETURN_STATUS();
+    std::string layer_type_str =
+      iniparser_getstring(ini, (layer_name + ":Type").c_str(), "Unknown");
+    LayerType layer_type = (LayerType)parseType(layer_type_str, TOKEN_LAYER);
 
-      // fixme: deprecate this.
-      layers.back()->setBNfollow(true);
+    std::shared_ptr<Layer> layer;
 
-      addLayer(bn_layer);
-      NN_INI_RETURN_STATUS();
-    } break;
+    switch (layer_type) {
+    case LAYER_IN:
+      layer = std::make_shared<InputLayer>();
+      break;
+    case LAYER_CONV2D:
+      layer = std::make_shared<Conv2DLayer>();
+      break;
+    case LAYER_POOLING2D:
+      layer = std::make_shared<Pooling2DLayer>();
+      break;
+    case LAYER_FLATTEN:
+      layer = std::make_shared<FlattenLayer>();
+      break;
+    case LAYER_FC:
+      layer = std::make_shared<FullyConnectedLayer>();
+      break;
+    case LAYER_BN:
+      layer = std::make_shared<BatchNormalizationLayer>();
+      break;
     case LAYER_UNKNOWN:
     default:
       ml_loge("Error: Unknown layer type");
@@ -467,21 +281,42 @@ int NeuralNetwork::loadFromConfig() {
       break;
     }
 
-    /** Add activation layer */
-    const char *acti_str =
-      iniparser_getstring(ini, (layer_name + ":Activation").c_str(), unknown);
-    ActiType act = (ActiType)parseType(acti_str, TOKEN_ACTI);
-    layers.back()->setActivation(act);
+    unsigned int property_end =
+      static_cast<unsigned int>(Layer::PropertyType::unknown);
+
+    for (unsigned int i = 0; i < property_end; ++i) {
+      std::string prop = propToStr(i);
+      std::string value =
+        iniparser_getstring(ini, (layer_name + ":" + prop).c_str(), "");
 
-    /** Add flatten layer */
-    bool flatten =
-      iniparser_getboolean(ini, (layer_name + ":Flatten").c_str(), false);
-    layers.back()->setFlatten(flatten);
+      try {
+        /// if problem setting property, it will throw std::invalid_argument
+        layer->setProperty(static_cast<Layer::PropertyType>(i), value);
+      } catch (std::out_of_range &e) {
+        /// intended
+      }
+    }
+
+    status = layer->setName(layer_name);
+    NN_INI_RETURN_STATUS();
+
+    status = addLayer(layer);
+    NN_INI_RETURN_STATUS();
+  }
+
+  /**< Additional validation and handling for the neural network */
+  if (!data_buffer) {
+    data_buffer = std::make_shared<DataBufferFromCallback>();
   }
 
   status = data_buffer->setMiniBatch(batch_size);
   NN_INI_RETURN_STATUS();
 
+  if (layers.empty()) {
+    ml_loge("there is no layer section in the ini file");
+    return ML_ERROR_INVALID_PARAMETER;
+  }
+
   iniparser_freedict(ini);
   return status;
 }
index 6f579bb..ae4fc54 100644 (file)
@@ -157,7 +157,7 @@ const std::string config_str2 = "[Network]"
                                 "\n"
                                 "padding=0,0"
                                 "\n"
-                                "weightIni=xavier_uniform"
+                                "weight_ini=xavier_uniform"
                                 "\n"
                                 "flatten = false"
                                 "\n"
index 49ad707..f617a4f 100644 (file)
@@ -290,7 +290,7 @@ TEST(nntrainer_capi_nnmodel, train_03_n) {
   int status = ML_ERROR_NONE;
   std::string config_file = "./test_train_01_p.ini";
   RESET_CONFIG(config_file.c_str());
-  replaceString("Input_Shape = 32:1:1:62720", "Input_Shape=32:1:1:62720",
+  replaceString("Input_Shape = 32:1:1:62720", "input_shape=32:1:1:62720",
                 config_file, config_str);
   replaceString("minibatch = 32", "minibatch = 16", config_file, config_str);
   replaceString("BufferSize=100", "", config_file, config_str);
index d723bab..c947765 100644 (file)
@@ -114,9 +114,10 @@ TEST(nntrainer_NeuralNetwork, load_config_03_n) {
 }
 
 /**
- * @brief Neural Network Model initialization
+ * @brief Neural Network Model initialization, there is warning when no
+ * input_shape is provided
  */
-TEST(nntrainer_NeuralNetwork, load_config_04_n) {
+TEST(nntrainer_NeuralNetwork, load_config_04_p) {
   int status = ML_ERROR_NONE;
   RESET_CONFIG("./test.ini");
   replaceString("Input_Shape = 32:1:1:62720", "", "./test.ini", config_str);
@@ -124,7 +125,7 @@ TEST(nntrainer_NeuralNetwork, load_config_04_n) {
   status = NN.setConfig("./test.ini");
   EXPECT_EQ(status, ML_ERROR_NONE);
   status = NN.loadFromConfig();
-  EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
+  EXPECT_EQ(status, ML_ERROR_NONE);
 }
 
 /**
@@ -156,6 +157,21 @@ TEST(nntrainer_NeuralNetwork, load_config_06_p) {
   EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
 }
 
+/*
+ * @brief Neural Network Model initialization
+ */
+TEST(nntrainer_NeuralNetwork, load_config_07_p) {
+  int status = ML_ERROR_NONE;
+  RESET_CONFIG("./test.ini");
+  replaceString("bias_init_zero = true", "Bias_Init_Zero = false", "./test.ini",
+                config_str);
+  nntrainer::NeuralNetwork NN;
+  status = NN.setConfig("./test.ini");
+  EXPECT_EQ(status, ML_ERROR_NONE);
+  status = NN.loadFromConfig();
+  EXPECT_EQ(status, ML_ERROR_NONE);
+}
+
 /**
  * @brief Neural Network Model initialization
  */