*/
nntrainer::NeuralNetwork NN;
NN.setConfig(config);
+ NN.loadFromConfig();
NN.init();
NN.readModel();
*/
nntrainer::NeuralNetwork NN;
NN.setConfig(config);
+ NN.loadFromConfig();
NN.init();
NN.readModel();
/**
* @brief Initialize NN
*/
+ NN.loadFromConfig();
NN.init();
if (!training)
NN.readModel();
/**
* @brief initialize mainNet & Target Net
*/
+ mainNet.loadFromConfig();
mainNet.init();
+ targetNet.loadFromConfig();
targetNet.init();
/**
"beta1=0.9", "beta2=0.9999", "epsilon=1e-7", NULL);
NN_RETURN_STATUS();
+ /* set optimizer */
+ status = ml_nnmodel_set_optimizer (model, optimizer);
+ NN_RETURN_STATUS ();
+
/* compile model with cross entropy loss function */
- status = ml_nnmodel_compile(model, optimizer, "loss=cross", NULL);
- NN_RETURN_STATUS();
+ status = ml_nnmodel_compile (model, "loss=cross", NULL);
+ NN_RETURN_STATUS ();
/* train model with data files : epochs = 10 and store model file named
* "model.bin" */
"beta1=0.9", "beta2=0.9999", "epsilon=1e-7", NULL);
NN_RETURN_STATUS();
+ /* set optimizer */
+ status = ml_nnmodel_set_optimizer (model, optimizer);
+ NN_RETURN_STATUS ();
+
/* compile model with cross entropy loss function */
- status = ml_nnmodel_compile(model, optimizer, "loss=cross", NULL);
- NN_RETURN_STATUS();
+ status = ml_nnmodel_compile (model, "loss=cross", NULL);
+ NN_RETURN_STATUS ();
/* train model with data files : epochs = 10 and store model file named
* "model.bin" */
int status = ML_ERROR_NONE;
ml_nnmodel_h handle = NULL;
const char *config_file = "./Tizen_CAPI_config.ini";
- status = ml_nnmodel_construct(&handle);
+ status = ml_nnmodel_construct_with_conf (config_file, &handle);
if (status != ML_ERROR_NONE)
return status;
- status = ml_nnmodel_compile_with_conf(config_file, handle);
+ status = ml_nnmodel_compile (handle, NULL);
if (status != ML_ERROR_NONE)
return status;
- status = ml_nnmodel_train_with_file(handle);
+ status = ml_nnmodel_train_with_file (handle, NULL);
if (status != ML_ERROR_NONE)
return status;
status = ml_nnmodel_destruct(handle);
*/
nntrainer::NeuralNetwork NN;
NN.setConfig(config);
+ NN.loadFromConfig();
NN.init();
/**
int ml_nnmodel_construct(ml_nnmodel_h *model);
/**
- * @brief Initialize the neural network model with the given configuration file.
- * @details Use this function to initialize neural network model
+ * @brief Construct the neural network model with the given configuration file.
+ * @details Use this function to create neural network model with the given configuration file.
* @since_tizen 6.x
* @param[in] model_conf The location of nntrainer model configuration file.
- * @param[in] model The NNTrainer model handler from the given description.
+ * @param[out] model The NNTrainer model handler from the given description.
* @return @c 0 on success. Otherwise a negative error value.
* @retval #ML_ERROR_NONE Successful.
* @retval #ML_ERROR_INVALID_PARAMETER Invalid Parameter.
*/
-int ml_nnmodel_compile_with_conf(const char *model_conf, ml_nnmodel_h model);
+int ml_nnmodel_construct_with_conf(const char *model_conf, ml_nnmodel_h *model);
/**
* @brief initialize the neural network model.
- * @details Use this function to initialize neural network model
+ * @details Use this function to initialize neural network model. Once compiled, addition of new layers is not permitted. Further, updating the properties of added layers is restricted.
* @since_tizen 6.x
* @param[in] model The NNTrainer model handler from the given description.
- * @param[in] optimizer The NNTrainer optimizer handler from the given
- * description.
* @param[in] ... hyper parmeter for compile model
* @return @c 0 on success. Otherwise a negative error value.
* @retval #ML_ERROR_NONE Successful.
* @retval #ML_ERROR_INVALID_PARAMETER Invalid Parameter.
*/
-int ml_nnmodel_compile(ml_nnmodel_h model, ml_nnopt_h optimizer, ...);
+int ml_nnmodel_compile(ml_nnmodel_h model, ...);
/**
* @brief train the neural network model.
int ml_nnmodel_destruct(ml_nnmodel_h model);
/**
- * @brief add layer into the neural network model
- * @details Use this function to add layer
+ * @brief Add layer at the last of the existing layers in neural network model.
+ * @details Use this function to add a layer to the model.
* @since_tizen 6.x
- * @param[out] model The NNTrainer model handler from the given description.
- * @param[int] layer The NNTrainer layer handler
+ * @param[in] model The NNTrainer model handler from the given description.
+ * @param[in] layer The NNTrainer layer handler
* @return @c 0 on success. Otherwise a negative error value.
* @retval #ML_ERROR_NONE Successful.
* @retval #ML_ERROR_INVALID_PARAMETER Invalid Parameter.
int ml_nnmodel_add_layer(ml_nnmodel_h model, ml_nnlayer_h layer);
/**
+ * @brief Set the neural network optimizer.
+ * @details Use this function to set Neural Network Optimizer.
+ * @since_tizen 6.x
+ * @param[in] model The NNTrainer model handler from the given description.
+ * @param[in] optimizer The NNTrainer Optimizer handler
+ * @return @c 0 on success. Otherwise a negative error value.
+ * @retval #ML_ERROR_NONE Successful.
+ * @retval #ML_ERROR_INVALID_PARAMETER Invalid Parameter.
+ */
+int ml_nnmodel_set_optimizer(ml_nnmodel_h model, ml_nnopt_h optimizer);
+
+/**
* @brief Create the neural network layer.
* @details Use this function to create Neural Netowrk Layer.
* @since_tizen 6.x
* @brief Create the neural network optimizer.
* @details Use this function to create Neural Netowrk Optimizer.
* @since_tizen 6.x
- * @param[out] layer The NNTrainer Optimizer handler from the given description.
- * @param[in] type The NNTrainer Optimizer type
+ * @param[out] optimizer The NNTrainer Optimizer handler
+ * @param[in] type The NNTrainer Optimizer type
* @return @c 0 on success. Otherwise a negative error value.
* @retval #ML_ERROR_NONE Successful.
- * @retval #ML_ERROR_INVALID_PARAMETER Invalid parameter.
+ * @retval #ML_ERROR_INVALID_PARAMETER Invalid Parameter.
*/
-int ml_nnoptimizer_create(ml_nnopt_h *opt, const char *type);
+int ml_nnoptimizer_create(ml_nnopt_h *optimizer, const char *type);
/**
* @brief Delete the neural network optimizer.
* @details Use this function to delete Neural Netowrk Optimizer.
* @since_tizen 6.x
- * @param[in] layer The NNTrainer optimizer handler from the given description.
+ * @param[in] optimizer The NNTrainer optimizer handler from the given description.
* @return @c 0 on success. Otherwise a negative error value.
* @retval #ML_ERROR_NONE Successful.
* @retval #ML_ERROR_INVALID_PARAMETER Invalid Parameter.
*/
-int ml_nnoptimizer_delete(ml_nnopt_h opt);
+int ml_nnoptimizer_delete(ml_nnopt_h optimizer);
/**
* @brief Set the neural network optimizer property.
* @details Use this function to set Neural Netowrk Optimizer Property.
* @since_tizen 6.x
- * @param[in] layer The NNTrainer Optimizer handler from the given description.
+ * @param[in] optimizer The NNTrainer Optimizer handler from the given description.
* @param[in] ... Property values with NULL at the end.
* @return @c 0 on success. Otherwise a negative error value.
* @retval #ML_ERROR_NONE Successful.
* @retval #ML_ERROR_INVALID_PARAMETER Invalid parameter.
*/
-int ml_nnoptimizer_set_property(ml_nnopt_h opt, ...);
+int ml_nnoptimizer_set_property(ml_nnopt_h optimizer, ...);
/**
* @}
return status;
}
-int ml_nnmodel_compile_with_conf(const char *model_conf, ml_nnmodel_h model) {
+int ml_nnmodel_construct_with_conf(const char *model_conf, ml_nnmodel_h *model) {
int status = ML_ERROR_NONE;
ml_nnmodel *nnmodel;
- std::shared_ptr<nntrainer::NeuralNetwork> nn;
+ std::shared_ptr<nntrainer::NeuralNetwork> NN;
+ returnable f;
std::ifstream conf_file(model_conf);
if (!conf_file.good()) {
return ML_ERROR_INVALID_PARAMETER;
}
- ML_NNTRAINER_CHECK_MODEL_VALIDATION(nnmodel, model);
- nn = nnmodel->network;
-
- returnable f = [&]() { return nn->setConfig(model_conf); };
-
- status = nntrainer_exception_boundary(f);
+ status = ml_nnmodel_construct(model);
if (status != ML_ERROR_NONE)
return status;
- f = [&]() { return nn->checkValidation(); };
+ nnmodel = (ml_nnmodel *)(*model);
+ NN = nnmodel->network;
+ f = [&]() { return NN->setConfig(model_conf); };
status = nntrainer_exception_boundary(f);
- if (status != ML_ERROR_NONE)
+ if (status != ML_ERROR_NONE) {
+ ml_nnmodel_destruct(*model);
return status;
+ }
- f = [&]() { return nn->init(); };
-
+ f = [&]() { return NN->loadFromConfig(); };
status = nntrainer_exception_boundary(f);
+ if (status != ML_ERROR_NONE) {
+ ml_nnmodel_destruct(*model);
+ }
+
return status;
}
-int ml_nnmodel_compile(ml_nnmodel_h model, ml_nnopt_h optimizer, ...) {
+int ml_nnmodel_compile(ml_nnmodel_h model, ...) {
int status = ML_ERROR_NONE;
const char *data;
ml_nnmodel *nnmodel;
- ml_nnopt *nnopt;
-
- std::shared_ptr<nntrainer::NeuralNetwork> NN;
- std::shared_ptr<nntrainer::Optimizer> opti;
+ returnable f;
ML_NNTRAINER_CHECK_MODEL_VALIDATION(nnmodel, model);
- ML_NNTRAINER_CHECK_OPT_VALIDATION(nnopt, optimizer);
-
std::vector<std::string> arg_list;
-
va_list arguments;
- va_start(arguments, optimizer);
+ va_start(arguments, model);
while ((data = va_arg(arguments, const char *))) {
arg_list.push_back(data);
}
va_end(arguments);
+ std::shared_ptr<nntrainer::NeuralNetwork> NN;
NN = nnmodel->network;
- opti = nnopt->optimizer;
- returnable f = [&]() { return NN->init(opti, arg_list); };
+ f = [&]() { return NN->setProperty(arg_list); };
+ status = nntrainer_exception_boundary(f);
+ if (status != ML_ERROR_NONE)
+ return status;
+ f = [&]() { return NN->init(); };
+ status = nntrainer_exception_boundary(f);
+ if (status != ML_ERROR_NONE)
+ return status;
+
+ f = [&]() { return NN->checkValidation(); };
status = nntrainer_exception_boundary(f);
return status;
int status = ML_ERROR_NONE;
ml_nnmodel *nnmodel;
ml_nnlayer *nnlayer;
+
ML_NNTRAINER_CHECK_MODEL_VALIDATION(nnmodel, model);
ML_NNTRAINER_CHECK_LAYER_VALIDATION(nnlayer, layer);
return status;
}
+int ml_nnmodel_set_optimizer(ml_nnmodel_h model, ml_nnopt_h optimizer) {
+ int status = ML_ERROR_NONE;
+ ml_nnmodel *nnmodel;
+ ml_nnopt *nnopt;
+
+ ML_NNTRAINER_CHECK_MODEL_VALIDATION(nnmodel, model);
+ ML_NNTRAINER_CHECK_OPT_VALIDATION(nnopt, optimizer);
+
+ std::shared_ptr<nntrainer::NeuralNetwork> NN;
+ std::shared_ptr<nntrainer::Optimizer> opt;
+
+ NN = nnmodel->network;
+ opt = nnopt->optimizer;
+
+ returnable f = [&]() { return NN->setOptimizer(opt); };
+
+ status = nntrainer_exception_boundary(f);
+
+ return status;
+}
+
int ml_nnlayer_create(ml_nnlayer_h *layer, ml_layer_type_e type) {
int status = ML_ERROR_NONE;
returnable f;
NL = nnlayer->layer;
returnable f = [&]() { return NL->setProperty(arg_list); };
-
status = nntrainer_exception_boundary(f);
return status;
}
-int ml_nnoptimizer_create(ml_nnopt_h *opt, const char *type) {
+int ml_nnoptimizer_create(ml_nnopt_h *optimizer, const char *type) {
int status = ML_ERROR_NONE;
ml_nnopt *nnopt = new ml_nnopt;
nnopt->magic = ML_NNTRAINER_MAGIC;
nnopt->optimizer = std::make_shared<nntrainer::Optimizer>();
- *opt = nnopt;
+ *optimizer = nnopt;
returnable f = [&]() {
return nnopt->optimizer->setType(
return status;
}
-int ml_nnoptimizer_delete(ml_nnopt_h opt) {
+int ml_nnoptimizer_delete(ml_nnopt_h optimizer) {
int status = ML_ERROR_NONE;
ml_nnopt *nnopt;
- ML_NNTRAINER_CHECK_OPT_VALIDATION(nnopt, opt);
+ ML_NNTRAINER_CHECK_OPT_VALIDATION(nnopt, optimizer);
delete nnopt;
return status;
}
-int ml_nnoptimizer_set_property(ml_nnopt_h opt, ...) {
+int ml_nnoptimizer_set_property(ml_nnopt_h optimizer, ...) {
int status = ML_ERROR_NONE;
ml_nnopt *nnopt;
const char *data;
- nnopt = (ml_nnopt *)opt;
- ML_NNTRAINER_CHECK_OPT_VALIDATION(nnopt, opt);
+ nnopt = (ml_nnopt *)optimizer;
+ ML_NNTRAINER_CHECK_OPT_VALIDATION(nnopt, optimizer);
std::vector<std::string> arg_list;
va_list arguments;
- va_start(arguments, opt);
+ va_start(arguments, optimizer);
while ((data = va_arg(arguments, const char *))) {
arg_list.push_back(data);
va_end(arguments);
- std::shared_ptr<nntrainer::Optimizer> Opt;
- Opt = nnopt->optimizer;
+ std::shared_ptr<nntrainer::Optimizer> opt;
+ opt = nnopt->optimizer;
- returnable f = [&]() { return Opt->setProperty(arg_list); };
+ returnable f = [&]() { return opt->setProperty(arg_list); };
status = nntrainer_exception_boundary(f);
* @brief get if the output of this layer must be flatten
* @retval flatten value
*/
+ void setFlatten(bool flatten) { this->flatten = flatten; }
+
+ /**
+ * @brief get if the output of this layer must be flatten
+ * @retval flatten value
+ */
bool getFlatten() { return flatten; }
/**
void setLoss(float l);
/**
- * @brief Initialize Network. This should be called after set all hyper
- * parmeters.
+ * @brief Create and load the Network with configuration file.
* @retval #ML_ERROR_NONE Successful.
* @retval #ML_ERROR_INVALID_PARAMETER invalid parameter.
*/
- int init();
+ int loadFromConfig();
/**
* @brief set Property of Network
int setProperty(std::vector<std::string> values);
/**
- * @brief Initialize Network
- * @param[in] opimizer optimizer instance
- * @param[in] arg_list argument list
- * "loss = cross | msr"
+ * @brief Initialize Network. This should be called after set all
+ * hyperparameters.
* @retval #ML_ERROR_NONE Successful.
* @retval #ML_ERROR_INVALID_PARAMETER invalid parameter.
*/
- int init(std::shared_ptr<Optimizer> optimizer,
- std::vector<std::string> arg_list);
+ int init();
/**
* @brief forward propagation
*/
int addLayer(std::shared_ptr<Layer> layer);
+ /**
+ * @brief set optimizer for the neural network model
+ * @retval #ML_ERROR_NONE Successful.
+ * @retval #ML_ERROR_INVALID_PARAMETER invalid parameter.
+ */
+ int setOptimizer(std::shared_ptr<Optimizer> optimizer);
+
enum class PropertyType {
loss = 0,
cost = 1,
return status;
}
-int NeuralNetwork::init() {
+int NeuralNetwork::loadFromConfig() {
int status = ML_ERROR_NONE;
std::string ini_file = config;
int num_ini_sec = 0;
/** Parse all the layers defined as sections in order */
TensorDim previous_dim;
for (section_names_iter = section_names.begin();
- section_names_iter != section_names.end(); ++section_names_iter) {
- bool last = false;
+ section_names_iter != section_names.end(); ++section_names_iter) {
std::string layer_name = *section_names_iter;
std::string layer_type_str =
iniparser_getstring(ini, (layer_name + ":Type").c_str(), unknown);
bool b_zero =
iniparser_getboolean(ini, (layer_name + ":bias_init_zero").c_str(), true);
- last = (section_names_iter + 1) == section_names.end();
-
switch (layer_type) {
case LAYER_IN: {
std::shared_ptr<InputLayer> input_layer = std::make_shared<InputLayer>();
input_layer->setInputDimension(previous_dim);
- status = input_layer->initialize(last);
- NN_INI_RETURN_STATUS();
- input_layer->setBiasZero(b_zero);
-
input_layer->setNormalization(iniparser_getboolean(
ini, (layer_name + ":Normalization").c_str(), false));
input_layer->setStandardization(iniparser_getboolean(
layer_name.c_str());
status = ML_ERROR_INVALID_PARAMETER;
NN_INI_RETURN_STATUS();
- } else {
- conv2d_layer->setInputDimension(previous_dim);
- }
-
- if (last) {
- status = conv2d_layer->setCost(cost);
- NN_INI_RETURN_STATUS();
}
status = getValues(CONV2D_DIM,
conv2d_layer->setWeightDecay(weight_decay);
NN_INI_RETURN_STATUS();
- status = conv2d_layer->initialize(last);
- NN_INI_RETURN_STATUS();
-
- status = conv2d_layer->setOptimizer(opt);
- NN_INI_RETURN_STATUS();
-
addLayer(conv2d_layer);
} break;
std::shared_ptr<Pooling2DLayer> pooling2d_layer =
std::make_shared<Pooling2DLayer>();
- pooling2d_layer->setInputDimension(previous_dim);
-
status = getValues(
POOLING2D_DIM,
iniparser_getstring(ini, (layer_name + ":pooling_size").c_str(),
"average"),
TOKEN_POOLING));
- status = pooling2d_layer->initialize(last);
- NN_INI_RETURN_STATUS();
addLayer(pooling2d_layer);
} break;
std::shared_ptr<FlattenLayer> flatten_layer =
std::make_shared<FlattenLayer>();
- flatten_layer->setInputDimension(previous_dim);
-
- status = flatten_layer->initialize(last);
- NN_INI_RETURN_STATUS();
addLayer(flatten_layer);
} break;
layer_name.c_str());
status = ML_ERROR_INVALID_PARAMETER;
NN_INI_RETURN_STATUS();
- } else {
- fc_layer->setInputDimension(previous_dim);
- }
-
- if (last) {
- status = fc_layer->setCost(cost);
- NN_INI_RETURN_STATUS();
}
fc_layer->setUnit(static_cast<unsigned int>(
"xavier_uniform"),
TOKEN_WEIGHTINI));
- status = fc_layer->initialize(last);
- NN_INI_RETURN_STATUS();
-
status = parseWeightDecay(ini, layer_name, weight_decay);
NN_INI_RETURN_STATUS();
fc_layer->setWeightDecay(weight_decay);
- status = fc_layer->setOptimizer(opt);
- NN_INI_RETURN_STATUS();
addLayer(fc_layer);
} break;
case LAYER_BN: {
std::shared_ptr<BatchNormalizationLayer> bn_layer =
std::make_shared<BatchNormalizationLayer>();
- bn_layer->setInputDimension(previous_dim);
-
- status = bn_layer->initialize(last);
- NN_INI_RETURN_STATUS();
-
- bn_layer->setBiasZero(b_zero);
-
- status = bn_layer->setOptimizer(opt);
- NN_INI_RETURN_STATUS();
-
// fixme: deprecate this.
layers.back()->setBNfollow(true);
const char *acti_str =
iniparser_getstring(ini, (layer_name + ":Activation").c_str(), unknown);
ActiType act = (ActiType)parseType(acti_str, TOKEN_ACTI);
-
layers.back()->setActivation(act);
- status = initActivationLayer(act);
- NN_INI_RETURN_STATUS();
/** Add flatten layer */
bool flatten =
iniparser_getboolean(ini, (layer_name + ":Flatten").c_str(), false);
- if (flatten) {
- status = initFlattenLayer();
- NN_INI_RETURN_STATUS();
- }
+ layers.back()->setFlatten(flatten);
+
previous_dim = layers.back()->getOutputDimension();
}
- /** Add the last layer as loss layer */
- status = initLossLayer();
- NN_INI_RETURN_STATUS();
-
status = data_buffer->setMiniBatch(batch_size);
NN_INI_RETURN_STATUS();
- initialized = true;
iniparser_freedict(ini);
return status;
}
return status;
}
-int NeuralNetwork::init(std::shared_ptr<Optimizer> optimizer,
- std::vector<std::string> arg_list) {
+int NeuralNetwork::init() {
int status = ML_ERROR_NONE;
bool last = false;
TensorDim previous_dim;
- opt = *optimizer.get();
- status = setProperty(arg_list);
- NN_RETURN_STATUS();
/** Note: number of entries in layers will change. */
for (unsigned int i = 0; i < layers.size(); ++i) {
int NeuralNetwork::checkValidation() {
int status = ML_ERROR_NONE;
- if (!config.empty())
- return status;
- if (layers.size()) {
+ if (layers.empty()) {
return ML_ERROR_INVALID_PARAMETER;
} else {
for (std::vector<std::shared_ptr<nntrainer::Layer>>::iterator layer =
layers.begin();
layer != layers.end(); ++layer) {
- if (!(*layer)->checkValidation())
- return ML_ERROR_INVALID_PARAMETER;
+ status = (*layer)->checkValidation();
+ if (status != ML_ERROR_NONE)
+ return status;
}
}
return status;
}
+int NeuralNetwork::setOptimizer(std::shared_ptr<Optimizer> optimizer) {
+
+ if (optimizer->getType() == OptType::unknown)
+ return ML_ERROR_INVALID_PARAMETER;
+
+ if (initialized) {
+ return ML_ERROR_NOT_SUPPORTED;
+ }
+
+ opt = *optimizer.get();
+
+ return ML_ERROR_NONE;
+}
+
std::shared_ptr<Layer>
NeuralNetwork::_make_act_layer(ActiType act, std::shared_ptr<Layer> prev) {
if (layers.back()->getType() == LAYER_ACTIVATION) {
RESET_CONFIG(config_file.c_str());
replaceString("Layers = inputlayer outputlayer",
"Layers = inputlayer outputlayer", config_file, config_str);
- status = ml_nnmodel_construct(&handle);
+ status = ml_nnmodel_construct_with_conf(config_file.c_str(), &handle);
EXPECT_EQ(status, ML_ERROR_NONE);
- status = ml_nnmodel_compile_with_conf(config_file.c_str(), handle);
+ status = ml_nnmodel_compile(handle, NULL);
EXPECT_EQ(status, ML_ERROR_NONE);
status = ml_nnmodel_destruct(handle);
EXPECT_EQ(status, ML_ERROR_NONE);
/**
* @brief Neural Network Model Compile Test
*/
-TEST(nntrainer_capi_nnmodel, compile_02_n) {
+TEST(nntrainer_capi_nnmodel, construct_conf_01_n) {
ml_nnmodel_h handle = NULL;
int status = ML_ERROR_NONE;
std::string config_file = "/test/cannot_find.ini";
- status = ml_nnmodel_construct(&handle);
- EXPECT_EQ(status, ML_ERROR_NONE);
- status = ml_nnmodel_compile_with_conf(config_file.c_str(), handle);
+ status = ml_nnmodel_construct_with_conf(config_file.c_str(), &handle);
EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
- status = ml_nnmodel_destruct(handle);
- EXPECT_EQ(status, ML_ERROR_NONE);
}
/**
* @brief Neural Network Model Compile Test
*/
-TEST(nntrainer_capi_nnmodel, compile_03_n) {
+TEST(nntrainer_capi_nnmodel, construct_conf_02_n) {
ml_nnmodel_h handle = NULL;
int status = ML_ERROR_NONE;
std::string config_file = "./test_compile_03_n.ini";
RESET_CONFIG(config_file.c_str());
replaceString("Input_Shape = 32:1:1:62720", "Input_Shape= 32:1:1:0",
config_file, config_str);
- status = ml_nnmodel_construct(&handle);
- EXPECT_EQ(status, ML_ERROR_NONE);
- status = ml_nnmodel_compile_with_conf(config_file.c_str(), handle);
+ status = ml_nnmodel_construct_with_conf(config_file.c_str(), &handle);
EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
- status = ml_nnmodel_destruct(handle);
- EXPECT_EQ(status, ML_ERROR_NONE);
}
/**
* @brief Neural Network Model Compile Test
*/
-TEST(nntrainer_capi_nnmodel, compile_04_n) {
+TEST(nntrainer_capi_nnmodel, compile_02_n) {
int status = ML_ERROR_NONE;
std::string config_file = "./test_compile_03_n.ini";
- status = ml_nnmodel_compile_with_conf(config_file.c_str(), NULL);
+ status = ml_nnmodel_compile(NULL);
EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
}
"beta1=0.002", "beta2=0.001", "epsilon=1e-7", NULL);
EXPECT_EQ(status, ML_ERROR_NONE);
- status = ml_nnmodel_compile(model, optimizer, "loss=cross", NULL);
+ status = ml_nnmodel_set_optimizer(model, optimizer);
+ EXPECT_EQ(status, ML_ERROR_NONE);
+
+ status = ml_nnmodel_compile(model, "loss=cross", NULL);
EXPECT_EQ(status, ML_ERROR_NONE);
status = ml_nnlayer_delete(layers[0]);
config_file, config_str);
replaceString("minibatch = 32", "minibatch = 16", config_file, config_str);
replaceString("BufferSize=100", "", config_file, config_str);
- status = ml_nnmodel_construct(&handle);
+ status = ml_nnmodel_construct_with_conf(config_file.c_str(), &handle);
EXPECT_EQ(status, ML_ERROR_NONE);
- status = ml_nnmodel_compile_with_conf(config_file.c_str(), handle);
+ status = ml_nnmodel_compile(handle, NULL);
EXPECT_EQ(status, ML_ERROR_NONE);
status = ml_nnmodel_train_with_file(handle, NULL);
EXPECT_EQ(status, ML_ERROR_NONE);
replaceString("Layers = inputlayer outputlayer",
"Layers = inputlayer outputlayer", config_file, config_str);
- status = ml_nnmodel_construct(&model);
+ status = ml_nnmodel_construct_with_conf(config_file.c_str(), &model);
EXPECT_EQ(status, ML_ERROR_NONE);
- status = ml_nnmodel_compile_with_conf(config_file.c_str(), model);
+ status = ml_nnmodel_compile(model, NULL);
EXPECT_EQ(status, ML_ERROR_NONE);
status = ml_nnlayer_create(&layer, ML_LAYER_TYPE_FC);
"beta1=0.002", "beta2=0.001", "epsilon=1e-7", NULL);
EXPECT_EQ(status, ML_ERROR_NONE);
- status = ml_nnmodel_compile(model, optimizer, "loss=cross", NULL);
+ status = ml_nnmodel_set_optimizer(model, optimizer);
+ EXPECT_EQ(status, ML_ERROR_NONE);
+
+ status = ml_nnmodel_compile(model, "loss=cross", NULL);
EXPECT_EQ(status, ML_ERROR_NONE);
status = ml_nnmodel_train_with_file(
"beta1=0.002", "beta2=0.001", "epsilon=1e-7", NULL);
EXPECT_EQ(status, ML_ERROR_NONE);
- status = ml_nnmodel_compile(model, optimizer, "loss=cross", NULL);
+ status = ml_nnmodel_set_optimizer(model, optimizer);
EXPECT_EQ(status, ML_ERROR_NONE);
+
+ status = ml_nnmodel_compile(model, "loss=cross", NULL);
+ EXPECT_EQ(status, ML_ERROR_NONE);
+
status = ml_nnmodel_train_with_generator(
model, getMiniBatch_train, getMiniBatch_val, NULL, "epochs=2",
"batch_size=16", "buffer_size=100", "model_file=model.bin", NULL);
nntrainer::NeuralNetwork NN;
status = NN.setConfig(config_file);
EXPECT_EQ(status, ML_ERROR_NONE);
+ status = NN.loadFromConfig();
+ EXPECT_EQ(status, ML_ERROR_NONE);
status = NN.init();
EXPECT_EQ(status, ML_ERROR_NONE);
}
/**
* @brief Neural Network Model initialization
*/
-TEST(nntrainer_NeuralNetwork, init_02_n) {
+TEST(nntrainer_NeuralNetwork, load_config_01_n) {
int status = ML_ERROR_NONE;
RESET_CONFIG("./test.ini");
replaceString("[Network]", "", "./test.ini", config_str);
nntrainer::NeuralNetwork NN;
status = NN.setConfig("./test.ini");
EXPECT_EQ(status, ML_ERROR_NONE);
- status = NN.init();
+ status = NN.loadFromConfig();
EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
}
/**
* @brief Neural Network Model initialization
*/
-TEST(nntrainer_NeuralNetwork, init_03_n) {
+TEST(nntrainer_NeuralNetwork, load_config_02_n) {
int status = ML_ERROR_NONE;
RESET_CONFIG("./test.ini");
replaceString("adam", "aaaadam", "./test.ini", config_str);
nntrainer::NeuralNetwork NN;
status = NN.setConfig("./test.ini");
EXPECT_EQ(status, ML_ERROR_NONE);
- status = NN.init();
+ status = NN.loadFromConfig();
EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
}
/**
* @brief Neural Network Model initialization
*/
-TEST(nntrainer_NeuralNetwork, init_04_n) {
+TEST(nntrainer_NeuralNetwork, load_config_03_n) {
int status = ML_ERROR_NONE;
RESET_CONFIG("./test.ini");
replaceString("Input_Shape = 32:1:1:62720", "Input_Shape = 32:1:1:0",
nntrainer::NeuralNetwork NN;
status = NN.setConfig("./test.ini");
EXPECT_EQ(status, ML_ERROR_NONE);
- EXPECT_THROW(NN.init(), std::invalid_argument);
+ EXPECT_THROW(NN.loadFromConfig(), std::invalid_argument);
}
/**
* @brief Neural Network Model initialization
*/
-TEST(nntrainer_NeuralNetwork, init_05_n) {
+TEST(nntrainer_NeuralNetwork, load_config_04_n) {
int status = ML_ERROR_NONE;
RESET_CONFIG("./test.ini");
replaceString("Input_Shape = 32:1:1:62720", "", "./test.ini", config_str);
nntrainer::NeuralNetwork NN;
status = NN.setConfig("./test.ini");
EXPECT_EQ(status, ML_ERROR_NONE);
- status = NN.init();
+ status = NN.loadFromConfig();
EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
}
/**
* @brief Neural Network Model initialization
*/
-TEST(nntrainer_NeuralNetwork, init_06_n) {
+TEST(nntrainer_NeuralNetwork, load_config_05_n) {
int status = ML_ERROR_NONE;
RESET_CONFIG("./test.ini");
replaceString("Learning_rate = 0.0001", "Learning_rate = -0.0001",
nntrainer::NeuralNetwork NN;
status = NN.setConfig("./test.ini");
EXPECT_EQ(status, ML_ERROR_NONE);
- status = NN.init();
+ status = NN.loadFromConfig();
EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
}
/**
* @brief Neural Network Model initialization
*/
-TEST(nntrainer_NeuralNetwork, init_07_p) {
+TEST(nntrainer_NeuralNetwork, load_config_06_p) {
int status = ML_ERROR_NONE;
RESET_CONFIG("./test.ini");
replaceString("TrainData = trainingSet.dat", "", "./test.ini", config_str);
nntrainer::NeuralNetwork NN;
status = NN.setConfig("./test.ini");
EXPECT_EQ(status, ML_ERROR_NONE);
- status = NN.init();
+ status = NN.loadFromConfig();
EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
}
/**
* @brief Neural Network Model initialization
*/
-TEST(nntrainer_NeuralNetwork, init_08_n) {
+TEST(nntrainer_NeuralNetwork, init_02_p) {
int status = ML_ERROR_NONE;
RESET_CONFIG("./test.ini");
replaceString("TestData = testSet.dat", "", "./test.ini", config_str);
nntrainer::NeuralNetwork NN;
status = NN.setConfig("./test.ini");
EXPECT_EQ(status, ML_ERROR_NONE);
+ status = NN.loadFromConfig();
+ EXPECT_EQ(status, ML_ERROR_NONE);
status = NN.init();
EXPECT_EQ(status, ML_ERROR_NONE);
}
/**
* @brief Neural Network Model initialization
*/
-TEST(nntrainer_NeuralNetwork, init_09_n) {
+TEST(nntrainer_NeuralNetwork, load_config_07_n) {
int status = ML_ERROR_NONE;
RESET_CONFIG("./test.ini");
replaceString("LabelData = label.dat", "", "./test.ini", config_str);
nntrainer::NeuralNetwork NN;
status = NN.setConfig("./test.ini");
EXPECT_EQ(status, ML_ERROR_NONE);
- status = NN.init();
+ status = NN.loadFromConfig();
EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
}
/**
* @brief Neural Network Model initialization
*/
-TEST(nntrainer_NeuralNetwork, init_10_p) {
+TEST(nntrainer_NeuralNetwork, init_03_p) {
int status = ML_ERROR_NONE;
std::string config_file = "./test.ini";
RESET_CONFIG(config_file.c_str());
nntrainer::NeuralNetwork NN;
status = NN.setConfig(config_file);
EXPECT_EQ(status, ML_ERROR_NONE);
+ status = NN.loadFromConfig();
+ EXPECT_EQ(status, ML_ERROR_NONE);
status = NN.init();
EXPECT_EQ(status, ML_ERROR_NONE);
}
nntrainer::NeuralNetwork NN;
status = NN.setConfig(config_file);
EXPECT_EQ(status, ML_ERROR_NONE);
+ status = NN.loadFromConfig();
+ EXPECT_EQ(status, ML_ERROR_NONE);
status = NN.init();
EXPECT_EQ(status, ML_ERROR_NONE);
}
nntrainer::NeuralNetwork NN;
status = NN.setConfig(config_file);
EXPECT_EQ(status, ML_ERROR_NONE);
+ status = NN.loadFromConfig();
+ EXPECT_EQ(status, ML_ERROR_NONE);
status = NN.init();
EXPECT_EQ(status, ML_ERROR_NONE);
}
nntrainer::NeuralNetwork NN;
status = NN.setConfig(config_file);
EXPECT_EQ(status, ML_ERROR_NONE);
+ status = NN.loadFromConfig();
+ EXPECT_EQ(status, ML_ERROR_NONE);
status = NN.init();
EXPECT_EQ(status, ML_ERROR_NONE);
}