From: Parichay Kapoor Date: Thu, 22 Oct 2020 08:28:26 +0000 (+0900) Subject: [model] Add support of ini based backbone to the model X-Git-Tag: accepted/tizen/unified/20201120.125454~52 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=4b0073b82dda23922983df8a3e6f3a3467984928;p=platform%2Fcore%2Fml%2Fnntrainer.git [model] Add support of ini based backbone to the model This patch adds support of ini based backbone to the model neural network From the point of view of ini file, backbone is treated as a layer itself. This allows a graph of layers to be represented as a layer itself in the ini file. With this design, backbone must be specified as a layer with property backbone as shown below with a sample pseudo-ini: ```ini [Block1] backbone: base_block.ini [PoolLayer] type: pooling2d [Block2] backbone: base_block.ini ``` ModelLoader loads the layer configuration from the backbone independently and then extends the existing graph in the main model with this newly created graph from the backbone ini. The names of all layers which are inserted from the backbone to a model are prefixed with the name of the backbone for easier management and for the user to identify/manage the layers from a backbone. The patch allows nested backbones and multiple backbones in a model description. Unittests for this backbone support will follow in the next patch. See also #660 **Self evaluation:** 1. Build test: [x]Passed [ ]Failed [ ]Skipped 2. Run test: [x]Passed [ ]Failed [ ]Skipped Signed-off-by: Parichay Kapoor --- diff --git a/nntrainer/include/model_loader.h b/nntrainer/include/model_loader.h index 7f1c850..e3ed36a 100644 --- a/nntrainer/include/model_loader.h +++ b/nntrainer/include/model_loader.h @@ -45,11 +45,20 @@ public: private: /** + * @brief load all of model from given config file + * @param[in] config config file path + * @param[in/out] model model to be loaded + * @param[in] bare_layers load only the layers as backbone if enabled + */ + int loadFromConfig(std::string config, NeuralNetwork &model, + bool bare_layers); + + /** * @brief load all of model and dataset from ini * @param[in] config config file path * @param[in/out] model model to be loaded */ - int loadFromIni(std::string ini_file, NeuralNetwork &model); + int loadFromIni(std::string ini_file, NeuralNetwork &model, bool bare_layers); /** * @brief load dataset config from ini @@ -72,7 +81,17 @@ private: * @param[in] layer_name name of the layer to be loaded */ int loadLayerConfigIni(dictionary *ini, std::shared_ptr &layer, - std::string layer_name); + const std::string &layer_name); + + /** + * @brief load backbone config from ini + * @param[in] backbone_config config file containing the backbone config + * @param[in/out] model model to be added the backbone to + * @param[in] backbone_name name of the backbone to be loaded + */ + int loadBackboneConfigIni(const std::string &backbone_config, + NeuralNetwork &model, + const std::string &backbone_name); const char *unknown = "Unknown"; }; diff --git a/nntrainer/include/neuralnet.h b/nntrainer/include/neuralnet.h index e0edbc1..3d9525d 100644 --- a/nntrainer/include/neuralnet.h +++ b/nntrainer/include/neuralnet.h @@ -243,6 +243,15 @@ public: int addLayer(NodeType layer); /** + * @brief join passed graph into the existing graph model + * @param[in] graph graph to be added/to extend + * @note It is assumed that this model is valid by itself + * @retval #ML_ERROR_NONE Successful. + * @retval #ML_ERROR_INVALID_PARAMETER invalid parameter. + */ + int extendGraph(GraphType graph, std::string prefix = ""); + + /** * @brief set optimizer for the neural network model * @retval #ML_ERROR_NONE Successful. * @retval #ML_ERROR_INVALID_PARAMETER invalid parameter. @@ -288,6 +297,14 @@ public: FlatGraphType getFlatGraph() { return layers; } /** + * @brief get current graph from the model + * @note graph contains pointer to the actual nodes, which is not deeply + * copied. + * @retval current graph + */ + GraphType getGraph() { return layers; } + + /** * @brief Set loss type for the neural network. * @param[in] loss Type of the loss. * @retval #ML_ERROR_NONE Successful. @@ -424,7 +441,8 @@ private: /** * @brief Ensure that layer has a name */ - void ensureName(NodeType layer, const std::string &prefix = ""); + void ensureName(NodeType layer, const std::string &prefix = "", + bool force_rename = false); /** * @brief Swap function for the class diff --git a/nntrainer/src/model_loader.cpp b/nntrainer/src/model_loader.cpp index 7d2f858..72848f0 100644 --- a/nntrainer/src/model_loader.cpp +++ b/nntrainer/src/model_loader.cpp @@ -165,7 +165,7 @@ int ModelLoader::loadDatasetConfigIni(dictionary *ini, NeuralNetwork &model) { int ModelLoader::loadLayerConfigIni(dictionary *ini, std::shared_ptr &layer, - std::string layer_name) { + const std::string &layer_name) { int status = ML_ERROR_NONE; std::string layer_type_str = @@ -215,10 +215,26 @@ int ModelLoader::loadLayerConfigIni(dictionary *ini, return ML_ERROR_NONE; } +int ModelLoader::loadBackboneConfigIni(const std::string &backbone_config, + NeuralNetwork &model, + const std::string &backbone_name) { + int status = ML_ERROR_NONE; + NeuralNetwork backbone; + + status = loadFromConfig(backbone_config, backbone, true); + NN_RETURN_STATUS(); + + status = model.extendGraph(backbone.getGraph(), backbone_name); + NN_RETURN_STATUS(); + + return ML_ERROR_NONE; +} + /** * @brief load all of model and dataset from ini */ -int ModelLoader::loadFromIni(std::string ini_file, NeuralNetwork &model) { +int ModelLoader::loadFromIni(std::string ini_file, NeuralNetwork &model, + bool bare_layers) { int status = ML_ERROR_NONE; int num_ini_sec = 0; dictionary *ini; @@ -253,11 +269,13 @@ int ModelLoader::loadFromIni(std::string ini_file, NeuralNetwork &model) { NN_INI_RETURN_STATUS(); } - status = loadModelConfigIni(ini, model); - NN_INI_RETURN_STATUS(); + if (!bare_layers) { + status = loadModelConfigIni(ini, model); + NN_INI_RETURN_STATUS(); - status = loadDatasetConfigIni(ini, model); - NN_INI_RETURN_STATUS(); + status = loadDatasetConfigIni(ini, model); + NN_INI_RETURN_STATUS(); + } ml_logd("parsing ini started"); /** Get all the section names */ @@ -266,21 +284,33 @@ int ModelLoader::loadFromIni(std::string ini_file, NeuralNetwork &model) { ml_logi("not-allowed property for the layer throws error"); ml_logi("valid property with invalid value throws error as well"); for (int idx = 0; idx < num_ini_sec; ++idx) { - const char *sec_name = iniparser_getsecname(ini, idx); - ml_logd("probing section name: %s", sec_name); + std::string sec_name = iniparser_getsecname(ini, idx); + ml_logd("probing section name: %s", sec_name.c_str()); - if (!sec_name) { + if (sec_name.empty()) { ml_loge("Error: Unable to retrieve section names from ini."); status = ML_ERROR_INVALID_PARAMETER; NN_INI_RETURN_STATUS(); } - if (strncasecmp(model_str, sec_name, model_len) == 0) + if (strncasecmp(model_str, sec_name.c_str(), model_len) == 0) continue; - if (strncasecmp(dataset_str, sec_name, dataset_len) == 0) + if (strncasecmp(dataset_str, sec_name.c_str(), dataset_len) == 0) continue; + /** + * If this section is a backbone, load backbone section from this + * @note The order of backbones in the ini file defines the order on the + * backbones in the model graph + */ + const char *backbone = + iniparser_getstring(ini, (sec_name + ":Backbone").c_str(), nullptr); + if (backbone != nullptr) { + loadBackboneConfigIni(backbone, model, sec_name); + continue; + } + /** Parse all the layers defined as sections in order */ std::shared_ptr layer; status = loadLayerConfigIni(ini, layer, sec_name); @@ -304,12 +334,20 @@ int ModelLoader::loadFromIni(std::string ini_file, NeuralNetwork &model) { * @brief load all of model and dataset from given config file */ int ModelLoader::loadFromConfig(std::string config, NeuralNetwork &model) { + return loadFromConfig(config, model, false); +} + +/** + * @brief load all of model and dataset from given config file + */ +int ModelLoader::loadFromConfig(std::string config, NeuralNetwork &model, + bool bare_layers) { size_t position = config.find_last_of("."); if (position == std::string::npos) throw std::invalid_argument("Extension missing in config file"); if (config.substr(position + 1) == "ini") { - return loadFromIni(config, model); + return loadFromIni(config, model, bare_layers); } return ML_ERROR_INVALID_PARAMETER; diff --git a/nntrainer/src/neuralnet.cpp b/nntrainer/src/neuralnet.cpp index 6d7862b..9a2d9f5 100644 --- a/nntrainer/src/neuralnet.cpp +++ b/nntrainer/src/neuralnet.cpp @@ -565,6 +565,7 @@ int NeuralNetwork::addLayer(NodeType layer) { return ML_ERROR_NOT_SUPPORTED; } + /** Ensure that the layer has a name and is unique */ ensureName(layer); /** Validate the layer to be added */ @@ -588,6 +589,25 @@ int NeuralNetwork::addLayer(NodeType layer) { return status; } +int NeuralNetwork::extendGraph(GraphType graph, std::string prefix) { + if (initialized) { + return ML_ERROR_NOT_SUPPORTED; + } + + /** Insert the layer to the graph */ + for (auto layer : graph) { + /** + * Add prefix to the existing layer name, + * and ensure it is unique in this new graph + */ + ensureName(layer, prefix, true); + + layers.push_back(layer); + } + + return ML_ERROR_NONE; +} + int NeuralNetwork::setOptimizer( std::shared_ptr optimizer) { @@ -609,18 +629,35 @@ int NeuralNetwork::setDataBuffer(std::shared_ptr data_buffer) { return ML_ERROR_NONE; } -void NeuralNetwork::ensureName(NodeType layer, const std::string &prefix) { - if (layer->getName().empty()) { - std::set::iterator iter; - std::string name; - - do { - name = prefix + layer->getBaseName() + std::to_string(def_name_count++); - iter = layer_names.find(name); - } while (iter != layer_names.end()); +void NeuralNetwork::ensureName(NodeType layer, const std::string &prefix, + bool force_rename) { + std::string orig_name = layer->getName(); + bool orig_name_empty = orig_name.empty(); + if (!orig_name_empty && !force_rename && + layer_names.end() == layer_names.find(orig_name)) + return; - layer->setName(name); + /** If just prefix with layer name makes it unique - directly set the name */ + if (!orig_name_empty) { + std::string direct_name = prefix + orig_name; + if (layer_names.find(direct_name) != layer_names.end()) { + layer->setName(direct_name); + return; + } } + + std::set::iterator iter; + std::string name; + if (orig_name_empty) + orig_name = layer->getBaseName(); + std::string direct_name = prefix + orig_name; + + do { + name = direct_name + std::to_string(def_name_count++); + iter = layer_names.find(name); + } while (iter != layer_names.end()); + + layer->setName(name); } int NeuralNetwork::getLayer(const char *name,