friend class ModelLoader; /** access private members of ModelLoader */
public:
+ using NodeType = std::shared_ptr<Layer>; /** Type of a Node */
+ using GraphType = std::vector<NodeType>; /** actual graph type */
+ using FlatGraphType =
+ std::vector<NodeType>; /** topological sorted, iterable 1-D list of nodes */
+
/**
* @brief Constructor of NeuralNetwork Class
*/
/**
* @brief add layer into neural network model
+ * @param[in] layer layer to add
* @retval #ML_ERROR_NONE Successful.
* @retval #ML_ERROR_INVALID_PARAMETER invalid parameter.
*/
- int addLayer(std::shared_ptr<Layer> layer);
+ int addLayer(NodeType layer);
/**
* @brief set optimizer for the neural network model
* @retval #ML_ERROR_NONE Successful.
* @retval #ML_ERROR_INVALID_PARAMETER invalid parameter.
*/
- int getLayer(const char *name, std::shared_ptr<Layer> *layer);
+ int getLayer(const char *name, NodeType *layer);
/*
* @brief get input dimension of neural network
TensorDim getOutputDimension() { return layers.back()->getOutputDimension(); }
/**
+ * @brief get FlatGraph of current graph
+ * @note flat graph contains pointer to the actual nodes, which is not deeply
+ * copied.
+ * @retval flatGraph of the current graph
+ */
+ FlatGraphType getFlatGraph() { return layers; }
+
+ /**
* @brief Set loss type for the neural network.
* @param[in] loss Type of the loss.
* @retval #ML_ERROR_NONE Successful.
NetType net_type; /**< Network Type */
- std::vector<std::shared_ptr<Layer>>
- layers; /**< vector for store layer pointers */
+ GraphType layers; /**< vector for store layer pointers */
std::shared_ptr<DataBuffer> data_buffer; /**< Data Buffer to get Input */
/**
* @brief Ensure that layer has a name
*/
- void ensureName(std::shared_ptr<Layer> layer, std::string prefix = "");
+ void ensureName(NodeType layer, const std::string &prefix = "");
/**
* @brief Swap function for the class
return ML_ERROR_NOT_SUPPORTED;
}
- std::shared_ptr<Layer> act_layer = layers.back();
+ NodeType act_layer = layers.back();
layers.pop_back();
switch (act_layer->getActivationType()) {
std::shared_ptr<LossLayer> loss_layer = std::make_shared<LossLayer>();
ensureName(loss_layer);
- loss_layer->setInputDimension(layers.back()->getOutputDimension());
+ loss_layer->setInputDimension(getOutputDimension());
status = loss_layer->initialize();
NN_RETURN_STATUS();
setBatchSize(batch_size);
/** Setup data buffer properties */
- status =
- data_buffer->setClassNum(layers.back()->getOutputDimension().width());
+ status = data_buffer->setClassNum(getOutputDimension().width());
NN_RETURN_STATUS();
status = data_buffer->setFeatureSize(layers[0]->getInputDimension());
int count = 0;
sharedTensor in = MAKE_SHARED_TENSOR(getInputDimension());
- sharedTensor label =
- MAKE_SHARED_TENSOR(layers.back()->getOutputDimension());
+ sharedTensor label = MAKE_SHARED_TENSOR(getOutputDimension());
while (true) {
if (data_buffer->getDataFromBuffer(nntrainer::BUF_TRAIN, in->getData(),
return ML_ERROR_NONE;
}
-int NeuralNetwork::addLayer(std::shared_ptr<Layer> layer) {
+int NeuralNetwork::addLayer(NodeType layer) {
int status = ML_ERROR_NONE;
if (initialized) {
return ML_ERROR_NONE;
}
-void NeuralNetwork::ensureName(std::shared_ptr<Layer> layer,
- std::string prefix) {
+void NeuralNetwork::ensureName(NodeType layer, const std::string &prefix) {
if (layer->getName().empty()) {
std::set<std::string>::iterator iter;
std::string name;
}
}
-int NeuralNetwork::getLayer(const char *name, std::shared_ptr<Layer> *layer) {
+int NeuralNetwork::getLayer(const char *name, NodeType *layer) {
int status = ML_ERROR_INVALID_PARAMETER;
std::string name_str(name);