private:
/**
+ * @brief load all of model from given config file
+ * @param[in] config config file path
+ * @param[in/out] model model to be loaded
+ * @param[in] bare_layers load only the layers as backbone if enabled
+ */
+ int loadFromConfig(std::string config, NeuralNetwork &model,
+ bool bare_layers);
+
+ /**
* @brief load all of model and dataset from ini
* @param[in] config config file path
* @param[in/out] model model to be loaded
*/
- int loadFromIni(std::string ini_file, NeuralNetwork &model);
+ int loadFromIni(std::string ini_file, NeuralNetwork &model, bool bare_layers);
/**
* @brief load dataset config from ini
* @param[in] layer_name name of the layer to be loaded
*/
int loadLayerConfigIni(dictionary *ini, std::shared_ptr<Layer> &layer,
- std::string layer_name);
+ const std::string &layer_name);
+
+ /**
+ * @brief load backbone config from ini
+ * @param[in] backbone_config config file containing the backbone config
+ * @param[in/out] model model to be added the backbone to
+ * @param[in] backbone_name name of the backbone to be loaded
+ */
+ int loadBackboneConfigIni(const std::string &backbone_config,
+ NeuralNetwork &model,
+ const std::string &backbone_name);
const char *unknown = "Unknown";
};
int addLayer(NodeType layer);
/**
+ * @brief join passed graph into the existing graph model
+ * @param[in] graph graph to be added/to extend
+ * @note It is assumed that this model is valid by itself
+ * @retval #ML_ERROR_NONE Successful.
+ * @retval #ML_ERROR_INVALID_PARAMETER invalid parameter.
+ */
+ int extendGraph(GraphType graph, std::string prefix = "");
+
+ /**
* @brief set optimizer for the neural network model
* @retval #ML_ERROR_NONE Successful.
* @retval #ML_ERROR_INVALID_PARAMETER invalid parameter.
FlatGraphType getFlatGraph() { return layers; }
/**
+ * @brief get current graph from the model
+ * @note graph contains pointer to the actual nodes, which is not deeply
+ * copied.
+ * @retval current graph
+ */
+ GraphType getGraph() { return layers; }
+
+ /**
* @brief Set loss type for the neural network.
* @param[in] loss Type of the loss.
* @retval #ML_ERROR_NONE Successful.
/**
* @brief Ensure that layer has a name
*/
- void ensureName(NodeType layer, const std::string &prefix = "");
+ void ensureName(NodeType layer, const std::string &prefix = "",
+ bool force_rename = false);
/**
* @brief Swap function for the class
int ModelLoader::loadLayerConfigIni(dictionary *ini,
std::shared_ptr<Layer> &layer,
- std::string layer_name) {
+ const std::string &layer_name) {
int status = ML_ERROR_NONE;
std::string layer_type_str =
return ML_ERROR_NONE;
}
+int ModelLoader::loadBackboneConfigIni(const std::string &backbone_config,
+ NeuralNetwork &model,
+ const std::string &backbone_name) {
+ int status = ML_ERROR_NONE;
+ NeuralNetwork backbone;
+
+ status = loadFromConfig(backbone_config, backbone, true);
+ NN_RETURN_STATUS();
+
+ status = model.extendGraph(backbone.getGraph(), backbone_name);
+ NN_RETURN_STATUS();
+
+ return ML_ERROR_NONE;
+}
+
/**
* @brief load all of model and dataset from ini
*/
-int ModelLoader::loadFromIni(std::string ini_file, NeuralNetwork &model) {
+int ModelLoader::loadFromIni(std::string ini_file, NeuralNetwork &model,
+ bool bare_layers) {
int status = ML_ERROR_NONE;
int num_ini_sec = 0;
dictionary *ini;
NN_INI_RETURN_STATUS();
}
- status = loadModelConfigIni(ini, model);
- NN_INI_RETURN_STATUS();
+ if (!bare_layers) {
+ status = loadModelConfigIni(ini, model);
+ NN_INI_RETURN_STATUS();
- status = loadDatasetConfigIni(ini, model);
- NN_INI_RETURN_STATUS();
+ status = loadDatasetConfigIni(ini, model);
+ NN_INI_RETURN_STATUS();
+ }
ml_logd("parsing ini started");
/** Get all the section names */
ml_logi("not-allowed property for the layer throws error");
ml_logi("valid property with invalid value throws error as well");
for (int idx = 0; idx < num_ini_sec; ++idx) {
- const char *sec_name = iniparser_getsecname(ini, idx);
- ml_logd("probing section name: %s", sec_name);
+ std::string sec_name = iniparser_getsecname(ini, idx);
+ ml_logd("probing section name: %s", sec_name.c_str());
- if (!sec_name) {
+ if (sec_name.empty()) {
ml_loge("Error: Unable to retrieve section names from ini.");
status = ML_ERROR_INVALID_PARAMETER;
NN_INI_RETURN_STATUS();
}
- if (strncasecmp(model_str, sec_name, model_len) == 0)
+ if (strncasecmp(model_str, sec_name.c_str(), model_len) == 0)
continue;
- if (strncasecmp(dataset_str, sec_name, dataset_len) == 0)
+ if (strncasecmp(dataset_str, sec_name.c_str(), dataset_len) == 0)
continue;
+ /**
+ * If this section is a backbone, load backbone section from this
+ * @note The order of backbones in the ini file defines the order on the
+ * backbones in the model graph
+ */
+ const char *backbone =
+ iniparser_getstring(ini, (sec_name + ":Backbone").c_str(), nullptr);
+ if (backbone != nullptr) {
+ loadBackboneConfigIni(backbone, model, sec_name);
+ continue;
+ }
+
/** Parse all the layers defined as sections in order */
std::shared_ptr<Layer> layer;
status = loadLayerConfigIni(ini, layer, sec_name);
* @brief load all of model and dataset from given config file
*/
int ModelLoader::loadFromConfig(std::string config, NeuralNetwork &model) {
+ return loadFromConfig(config, model, false);
+}
+
+/**
+ * @brief load all of model and dataset from given config file
+ */
+int ModelLoader::loadFromConfig(std::string config, NeuralNetwork &model,
+ bool bare_layers) {
size_t position = config.find_last_of(".");
if (position == std::string::npos)
throw std::invalid_argument("Extension missing in config file");
if (config.substr(position + 1) == "ini") {
- return loadFromIni(config, model);
+ return loadFromIni(config, model, bare_layers);
}
return ML_ERROR_INVALID_PARAMETER;
return ML_ERROR_NOT_SUPPORTED;
}
+ /** Ensure that the layer has a name and is unique */
ensureName(layer);
/** Validate the layer to be added */
return status;
}
+int NeuralNetwork::extendGraph(GraphType graph, std::string prefix) {
+ if (initialized) {
+ return ML_ERROR_NOT_SUPPORTED;
+ }
+
+ /** Insert the layer to the graph */
+ for (auto layer : graph) {
+ /**
+ * Add prefix to the existing layer name,
+ * and ensure it is unique in this new graph
+ */
+ ensureName(layer, prefix, true);
+
+ layers.push_back(layer);
+ }
+
+ return ML_ERROR_NONE;
+}
+
int NeuralNetwork::setOptimizer(
std::shared_ptr<ml::train::Optimizer> optimizer) {
return ML_ERROR_NONE;
}
-void NeuralNetwork::ensureName(NodeType layer, const std::string &prefix) {
- if (layer->getName().empty()) {
- std::set<std::string>::iterator iter;
- std::string name;
-
- do {
- name = prefix + layer->getBaseName() + std::to_string(def_name_count++);
- iter = layer_names.find(name);
- } while (iter != layer_names.end());
+void NeuralNetwork::ensureName(NodeType layer, const std::string &prefix,
+ bool force_rename) {
+ std::string orig_name = layer->getName();
+ bool orig_name_empty = orig_name.empty();
+ if (!orig_name_empty && !force_rename &&
+ layer_names.end() == layer_names.find(orig_name))
+ return;
- layer->setName(name);
+ /** If just prefix with layer name makes it unique - directly set the name */
+ if (!orig_name_empty) {
+ std::string direct_name = prefix + orig_name;
+ if (layer_names.find(direct_name) != layer_names.end()) {
+ layer->setName(direct_name);
+ return;
+ }
}
+
+ std::set<std::string>::iterator iter;
+ std::string name;
+ if (orig_name_empty)
+ orig_name = layer->getBaseName();
+ std::string direct_name = prefix + orig_name;
+
+ do {
+ name = direct_name + std::to_string(def_name_count++);
+ iter = layer_names.find(name);
+ } while (iter != layer_names.end());
+
+ layer->setName(name);
}
int NeuralNetwork::getLayer(const char *name,