$(NNTRAINER_ROOT)/nntrainer/utils/base_properties.cpp \
$(NNTRAINER_ROOT)/nntrainer/compiler/ini_interpreter.cpp \
$(NNTRAINER_ROOT)/nntrainer/compiler/flatten_realizer.cpp \
+ $(NNTRAINER_ROOT)/nntrainer/compiler/recurrent_realizer.cpp \
$(NNTRAINER_ROOT)/nntrainer/app_context.cpp
ifeq ($(ENABLE_TFLITE_INTERPRETER), 1)
graph.push_back(layer);
}
+ /// @todo if graph Model Type is of recurrent_wrapper, parse model and
+ /// realize before return
} catch (...) {
/** clean up and rethrow */
freedict();
compiler_sources = [
'ini_interpreter.cpp',
- 'flatten_realizer.cpp'
+ 'flatten_realizer.cpp',
+ 'recurrent_realizer.cpp'
]
compiler_headers = []
--- /dev/null
+// SPDX-License-Identifier: Apache-2.0
+/**
+ * Copyright (C) 2021 Jihoon Lee <jhoon.it.lee@samsung.com>
+ *
+ * @file recurrent_realizer.h
+ * @date 12 October 2021
+ * @brief NNTrainer graph realizer to create unrolled graph from a graph
+ * realizer
+ * @see https://github.com/nnstreamer/nntrainer
+ * @author Jihoon Lee <jhoon.it.lee@samsung.com>
+ * @bug No known bugs except for NYI items
+ */
+
+#include <recurrent_realizer.h>
+
+#include <common_properties.h>
+
+#include <nntrainer_error.h>
+#include <node_exporter.h>
+
+namespace nntrainer {
+
+namespace props {
+
+/**
+ * @brief Property check unroll_for
+ *
+ */
+class UnrollFor final : public PositiveIntegerProperty {
+public:
+ UnrollFor(const unsigned &value = 1);
+ static constexpr const char *key = "unroll_for";
+ using prop_tag = uint_prop_tag;
+};
+
+UnrollFor::UnrollFor(const unsigned &value) { set(value); }
+
+/**
+ * @brief Property for recurrent inputs
+ *
+ */
+class RecurrentInput final : public Name {
+public:
+ RecurrentInput();
+ RecurrentInput(const std::string &name);
+ static constexpr const char *key = "recurrent_input";
+ using prop_tag = str_prop_tag;
+};
+
+RecurrentInput::RecurrentInput() {}
+RecurrentInput::RecurrentInput(const std::string &name) { set(name); };
+
+/**
+ * @brief Property for recurrent outputs
+ *
+ */
+class RecurrentOutput final : public Name {
+public:
+ RecurrentOutput();
+ RecurrentOutput(const std::string &name);
+ static constexpr const char *key = "recurrent_output";
+ using prop_tag = str_prop_tag;
+};
+
+RecurrentOutput::RecurrentOutput() {}
+RecurrentOutput::RecurrentOutput(const std::string &name) { set(name); };
+} // namespace props
+
+RecurrentRealizer::RecurrentRealizer(
+ const std::vector<std::string> &properties,
+ const std::vector<std::string> &external_input_layers) :
+ recurrent_props(new PropTypes({}, {}, {}, {}, props::ReturnSequences(false),
+ props::UnrollFor(1))) {
+ auto left = loadProperties(properties, *recurrent_props);
+
+ auto throw_if_empty = [](auto &&prop) {
+ if (prop.empty()) {
+ throw std::invalid_argument(
+ "there is unfilled property for recurrent realizer, key: " +
+ std::string(getPropKey(prop)));
+ }
+ };
+
+ throw_if_empty(std::get<0>(*recurrent_props));
+ throw_if_empty(std::get<1>(*recurrent_props));
+ throw_if_empty(std::get<2>(*recurrent_props));
+ throw_if_empty(std::get<3>(*recurrent_props));
+ NNTR_THROW_IF(!left.empty(), std::invalid_argument)
+ << "There is unparesed properties";
+
+ auto &input_layers =
+ std::get<std::vector<props::InputLayer>>(*recurrent_props);
+ auto external_layers = std::vector<props::Name>(external_input_layers.begin(),
+ external_input_layers.end());
+ NNTR_THROW_IF(input_layers.size() != external_layers.size(),
+ std::invalid_argument)
+ << "input_layers and external input_layers size does not match: "
+ << to_string(input_layers) << " vs " << to_string(external_layers);
+
+ std::transform(input_layers.begin(), input_layers.end(),
+ external_layers.begin(), std::inserter(id_map, id_map.end()),
+ [](const std::string &key, const std::string &val) {
+ return std::pair<std::string, std::string>(key, val);
+ });
+}
+
+RecurrentRealizer::RecurrentRealizer(
+ const char *ini_path, const std::vector<std::string> &external_input_layers) {
+ /// NYI!
+}
+
+RecurrentRealizer::~RecurrentRealizer() {}
+
+GraphRepresentation
+RecurrentRealizer::realize(const GraphRepresentation &reference) {
+ /// @todo remap identifier input_layers -> external_input_layers
+
+ /// @todo copy the layers to loop and remap with numbers
+ /// 1. define layer node copy in this context
+ /// 2. copy and remap layers to be looped
+
+ /// @todo if return sequence is true, remap identifier and concat output
+ /// layers
+
+ /// NYI!
+ return reference;
+}
+
+} // namespace nntrainer
--- /dev/null
+// SPDX-License-Identifier: Apache-2.0
+/**
+ * Copyright (C) 2021 Jihoon Lee <jhoon.it.lee@samsung.com>
+ *
+ * @file recurrent_realizer.h
+ * @date 12 October 2021
+ * @brief NNTrainer graph realizer to create unrolled graph from a graph
+ * realizer
+ * @see https://github.com/nnstreamer/nntrainer
+ * @author Jihoon Lee <jhoon.it.lee@samsung.com>
+ * @bug No known bugs except for NYI items
+ */
+#ifndef __RECURRENT_REALIZER_H__
+#define __RECURRENT_REALIZER_H__
+
+#include <realizer.h>
+
+#include <memory>
+#include <string>
+#include <tuple>
+#include <vector>
+#include <unordered_map>
+
+namespace nntrainer {
+
+namespace props {
+class UnrollFor;
+class ReturnSequences;
+class InputLayer;
+class OutputLayer;
+class RecurrentInput;
+class RecurrentOutput;
+} // namespace props
+
+/**
+ * @brief Recurrent Realizer which unrolls graph from given graph representation
+ *
+ */
+class RecurrentRealizer final : public GraphRealizer {
+public:
+ /**
+ * @brief Construct a new Recurrent Realizer object
+ *
+ * @param properties
+ * unroll_for = <int> // define timestep of unrolloing
+ * return_sequences = <bool> // return sequences
+ * input_layers = <vector<std::string>> // internal input name
+ * input_layers = <vector<std::string>> // internal output name
+ * recurrent_inputs = <vector<std::string>> // start of the loop
+ * recurrent_ouptut = <vector<std::string>> // end of the loop
+ * @param external_input_layers input layer from outer side
+ */
+ RecurrentRealizer(const std::vector<std::string> &properties,
+ const std::vector<std::string> &external_input_layers);
+
+ /**
+ * @brief Construct a new Recurrent Realizer object
+ *
+ * @param ini ini to load recurrent properties from
+ * @param external_input_layers external input layers to map input layers
+ */
+ RecurrentRealizer(const char *ini,
+ const std::vector<std::string> &external_input_layers);
+
+ /**
+ * @brief Destroy the Recurrent Realizer object
+ *
+ */
+ ~RecurrentRealizer();
+
+ /**
+ * @brief realized graph
+ *
+ * @param reference reference to realize graph
+ * @return GraphRepresentation realized graph
+ */
+ GraphRepresentation realize(const GraphRepresentation &reference) override;
+
+private:
+ using PropTypes =
+ std::tuple<std::vector<props::InputLayer>, std::vector<props::OutputLayer>,
+ props::RecurrentInput,
+ props::RecurrentOutput, props::ReturnSequences,
+ props::UnrollFor>;
+
+ std::unique_ptr<PropTypes> recurrent_props; /**< recurrent properties */
+ std::unordered_map<std::string, std::string> id_map; /**< external input layers */
+};
+
+} // namespace nntrainer
+
+#endif // __RECURRENT_REALIZER_H__
InputLayer::InputLayer() : Name() {}
InputLayer::InputLayer(const std::string &name) : Name(name) {}
+OutputLayer::OutputLayer() : Name() {}
+OutputLayer::OutputLayer(const std::string &name) : Name(name) {}
+
LabelLayer::LabelLayer() : Name() {}
LabelLayer::LabelLayer(const std::string &name) : Name(name) {}
using prop_tag = str_prop_tag;
};
+/**
+ * @brief Output Layer name property which saves a single connection
+ * (practically, std::vector<InputLayers> is used)
+ *
+ */
+class OutputLayer : public Name {
+public:
+ OutputLayer();
+ OutputLayer(const std::string &name);
+ static constexpr const char *key = "output_layers";
+ using prop_tag = str_prop_tag;
+};
+
/**
* @brief label Layer name property which saves a single
* connection (practically, std::vector<LabelLayers> is used)
#include <flatten_realizer.h>
#include <realizer.h>
+#include <recurrent_realizer.h>
#include <compiler_test_util.h>
graphEqual(processed, expected_graph);
}
-TEST(flattenRealizer, flatten_p) {
+TEST(FlattenRealizer, flatten_p) {
FlattenRealizer fr;
LayerRepresentation input1 = {"fully_connected",
realizeAndEqual(fr, {input1}, {expected1, expected2});
}
+
+TEST(RecurrentRealizer, recurrent_p) {
+
+ RecurrentRealizer r({"unroll_for=3", "return_sequences=true",
+ "input_layers=fc1", "output_layers=fc2",
+ "recurrent_input=fc_1", "recurrent_output=fc_2"},
+ {"outter_input"});
+
+ LayerRepresentation input1 = {"fully_connected",
+ {"name=layer1", "flatten=true"}};
+
+ realizeAndEqual(r, {input1}, {input1});
+}