}
};
+struct ReLULayer final
+{
+ // This "Return" is unnecessary for ReLU as ReLU has no attributes), but
+ // introduced for consistency.
+ class Return
+ {
+ public:
+ Return(loco::ReLU *node) : _node{node}
+ {
+ // DO NOTHING
+ }
+
+ public:
+ loco::ReLU *node(void) { return _node; }
+
+ private:
+ loco::ReLU *_node = nullptr;
+ };
+
+ std::unique_ptr<Return> operator()(GraphBuilder::Context *ctx)
+ {
+ auto relu_node = ctx->graph()->nodes()->create<loco::ReLU>();
+
+ relu_node->input(ctx->stack()->pop());
+
+ ctx->stack()->push(relu_node);
+
+ return stdex::make_unique<Return>(relu_node);
+ }
+};
+
#endif // __GRAPH_BUILDER_H__
// Create a sample network
_graph = loco::make_graph();
+ auto graph_builder = make_graph_builder(_graph.get());
+
+ pull_node = graph_builder->push<InputLayer>()->name("input")->node();
+ relu_node = graph_builder->push<ReLULayer>()->node();
+ push_node = graph_builder->push<OutputLayer>()->name("output")->node();
+
+// TODO Remove deprecated code
+#if 0
+ // Create a sample network
+ _graph = loco::make_graph();
+
// Create Nodes
pull_node = _graph->nodes()->create<loco::Pull>();
graph_output->name("output");
loco::link(graph_output, push_node);
push_node->index(0);
+#endif
}
public: