This patch applies previous input realizer.
**Self evaluation:**
1. Build test: [X]Passed [ ]Failed [ ]Skipped
2. Run test: [X]Passed [ ]Failed [ ]Skipped
Signed-off-by: Jihoon Lee <jhoon.it.lee@samsung.com>
* @note This method does add the provided layers itself but adds a deep copy
* of the passed layers to the model. The layers passed to this function can
* be reused later.
+ * @note @a reference is a set of layers connected each other to form a part
+ * or whole graph which can be loaded to a model and can be run.
+ * More specifically, the graph with a non cyclic, directed graph with all
+ * node has either incoming or outgoing connection defined when considering
+ * non-input layer is directly connected to the previous layer. 'input
+ * layer' is defined as a layer that has no incoming connection and
+ * (identified as @a start_layers or input shape is specified explicitly).
*
* @param reference a group of layers being referred to.
* @param type type of reference layers
$(NNTRAINER_ROOT)/nntrainer/compiler/ini_interpreter.cpp \
$(NNTRAINER_ROOT)/nntrainer/compiler/flatten_realizer.cpp \
$(NNTRAINER_ROOT)/nntrainer/compiler/recurrent_realizer.cpp \
+ $(NNTRAINER_ROOT)/nntrainer/compiler/previous_input_realizer.cpp \
$(NNTRAINER_ROOT)/nntrainer/compiler/remap_realizer.cpp \
$(NNTRAINER_ROOT)/nntrainer/compiler/slice_realizer.cpp \
$(NNTRAINER_ROOT)/nntrainer/compiler/input_realizer.cpp \
}
}
-void NetworkGraph::addDefaultInputLayers() {
- for (auto iter = cbegin() + 1; iter != cend(); iter++) {
- auto layer = *iter;
- auto prev_layer = *(iter - 1);
- if (layer->getNumInputConnections() == 0 &&
- !layer->hasInputShapeProperty()) {
- ml_logd("default input added %s->%s", prev_layer->getName().c_str(),
- layer->getName().c_str());
- layer->addInputLayers(prev_layer->getName());
- }
- }
-}
-
void NetworkGraph::addLayerNode(std::unique_ptr<Layer> layer) {
graph.addNode(std::make_unique<LayerNode>(std::move(layer)));
}
int NetworkGraph::realizeGraph() {
int status = ML_ERROR_NONE;
- addDefaultInputLayers();
-
/**
* invariant: the new realized nodes are added to the end,
* otherwise this iteration becomes invalid. So, every iteration must be
void setOutputLayers();
/**
- * @brief set default input layer connections
- */
- void addDefaultInputLayers();
-
- /**
* @brief Ensure that layer has a name.
* @param[in] layer Layer whose name is to be ensured to be valid
* @param[in] prefix Prefix to be attached to the layer name
#include <nntrainer_log.h>
#include <node_exporter.h>
#include <optimizer_context.h>
+#include <previous_input_realizer.h>
#include <profiler.h>
#include <recurrent_realizer.h>
#include <remap_realizer.h>
? std::string()
: std::get<props::LossType>(model_props);
+ auto &input_layer_prop =
+ std::get<std::vector<props::InputLayer>>(model_props);
+ /// @note label layer might need to be treated in the similar way as well
+
+ std::vector<std::string> input_layers = {};
+ if (!input_layer_prop.empty()) {
+ input_layers = std::vector<std::string>(input_layer_prop.begin(),
+ input_layer_prop.end());
+ }
+
/// @todo make NetworkGraph compiled at the construction instead of having
/// graph.compile(), neuralnetwork have ownership of list of layer nodes,
/// which will be passed at compile time.
rep.push_back(*iter);
}
- FlattenRealizer fr;
- rep = fr.realize(rep);
+ std::vector<std::unique_ptr<GraphRealizer>> realizers;
+
+ realizers.emplace_back(new PreviousInputRealizer(input_layers));
+ realizers.emplace_back(new FlattenRealizer());
+
+ for (auto &realizer : realizers) {
+ rep = realizer->realize(rep);
+ }
model_graph = NetworkGraph();
model_graph.setMemoryOptimizations(
forwarding(false);
};
- auto update_eval_stat = [this, batch_size, &update_train_stat](
+ auto update_eval_stat = [batch_size, &update_train_stat](
RunStats &stat, const std::vector<Tensor> &outputs,
const std::vector<Tensor> &labels) {
auto model_out = outputs[0].argmax();
auto end_layers_ = normalize(end_layers);
std::vector<std::unique_ptr<GraphRealizer>> realizers;
+
+ realizers.emplace_back(new PreviousInputRealizer(start_layers));
realizers.emplace_back(new SliceRealizer(start_layers_, end_layers_));
if (!input_layers_.empty()) {
"bias_initializer=ones", "weight_initializer=ones"});
auto fc1_zeroed = LayerRepresentation(
- "fully_connected",
- {"name=fc1", "unit=2", "bias_initializer=ones", "weight_initializer=ones"});
+ "fully_connected", {"name=fc1", "unit=2", "bias_initializer=ones",
+ "weight_initializer=ones", "input_layers=fc0"});
auto g = makeGraph({fc0_zeroed, fc1_zeroed});
PreviousInputRealizer r({"fc1", "fc4"});
realizeAndEqual(r, before, after);
}
+ { /// intermediate node is auto input
+ std::vector<LayerRepresentation> before = {
+ {"fully_connected",
+ {"name=fc1", "input_layers=fc2"}}, // intermediate node
+ {"fully_connected", {"name=fc2", "input_shape=1"}}, // model input
+ {"fully_connected", {"name=fc3"}}, // auto connected to fc3
+ {"fully_connected", {"name=fc4"}}, // auto connected to fc 3
+ };
+
+ std::vector<LayerRepresentation> after = {
+ {"fully_connected", {"name=fc1", "input_layers=fc2"}},
+ {"fully_connected", {"name=fc2", "input_shape=1"}},
+ {"fully_connected", {"name=fc3", "input_layers=fc2"}},
+ {"fully_connected", {"name=fc4", "input_layers=fc3"}},
+ };
+ PreviousInputRealizer r({});
+ realizeAndEqual(r, before, after);
+ }
}
TEST(PreviousInputRealizer, user_not_identifying_first_input_n) {