From 8831ad843856e38bd9f34c01fa833f65b66e4b59 Mon Sep 17 00:00:00 2001 From: DongHak Park Date: Tue, 14 Feb 2023 19:57:40 +0900 Subject: [PATCH] [Flatbuffer] Add flatbuffer_opnode Add flatbuffer_opnode.cpp & flatbuffer_opnode.h - For flatbuffer interpreter : bring opnode and make variable - Now only support Fully Connected Layer - Now only support NCHW format tensor It will be updated Signed-off-by: DongHak Park --- nntrainer/compiler/flatbuffer_opnode.cpp | 89 +++++++++++++ nntrainer/compiler/flatbuffer_opnode.h | 213 +++++++++++++++++++++++++++++++ 2 files changed, 302 insertions(+) create mode 100644 nntrainer/compiler/flatbuffer_opnode.cpp create mode 100644 nntrainer/compiler/flatbuffer_opnode.h diff --git a/nntrainer/compiler/flatbuffer_opnode.cpp b/nntrainer/compiler/flatbuffer_opnode.cpp new file mode 100644 index 0000000..6460a39 --- /dev/null +++ b/nntrainer/compiler/flatbuffer_opnode.cpp @@ -0,0 +1,89 @@ +// SPDX-License-Identifier: Apache-2.0 +/** + * Copyright (C) 2023 DongHak Park + * + * @file flatbuffer_opnode.h + * @date 10 February 2023 + * @brief NNTrainer flatbuffer opnode + * @see https://github.com/nnstreamer/nntrainer + * @author Donghak Park + * @bug No known bugs except for NYI items + */ + +#include + +#include +#include +#include + +namespace nntrainer { + +FlatBufferOpNode::FlatBufferOpNode() : + is_input(false), + is_output(false), + is_virtual(false), + op_type(nntr::BuiltinOperator_ADD), + builtin_option_type(nntr::BuiltinOptions_NONE){}; + +void FlatBufferOpNode::setLayerNode(const LayerNode &layer) { + is_input = (layer.getNumInputConnections() == 0); + is_output = (layer.getNumOutputConnections() == 0); + + /// @todo Now support only mse, cross + static const std::set loss_type = {"mse", "cross"}; + + if (layer.getNumOutputConnections() == 1) { + for (auto loss : loss_type) { + if (layer.getOutputConnections()[0].find(loss) != std::string::npos) { + is_output = true; + } + } + } + + is_virtual = (layer.getType() == "multiout"); + + auto &context = layer.getRunContext(); + + auto create_variables = [](auto tensor_getter, unsigned size) { + Variables v; + v.reserve(size); + for (unsigned i = 0; i < size; ++i) { + v.emplace_back(tensor_getter(i)); + } + return v; + }; + + inputs = create_variables( + [&context](unsigned idx) { return &context.getInput(idx); }, + context.getNumInputs()); + outputs = create_variables( + [&context](unsigned idx) { return &context.getOutput(idx); }, + context.getNumOutputs()); + weights = create_variables( + [&context](unsigned idx) { + auto &t = context.getWeight(idx); + NNTR_THROW_IF(t.empty() || !t.isAllocated(), std::invalid_argument) + << "Every weight must be allocated"; + return &t; + }, + context.getNumWeights()); +} + +flatbuffers::Offset FlatBufferOpNode::getBuiltinOps() const { + switch (op_type) { + // Now support only fully connected Layer for test + case nntr::BuiltinOperator_FULLY_CONNECTED: + return builtin_ops; + default: + throw std::runtime_error("Unsupported operator"); + } +} + +void FlatBufferOpNode::setBuiltinOptions( + nntr::BuiltinOptions builtin_option_type_, + const flatbuffers::Offset &builtin_ops_) { + builtin_ops = builtin_ops_; + builtin_option_type = builtin_option_type_; +} + +} // namespace nntrainer diff --git a/nntrainer/compiler/flatbuffer_opnode.h b/nntrainer/compiler/flatbuffer_opnode.h new file mode 100644 index 0000000..917e29c --- /dev/null +++ b/nntrainer/compiler/flatbuffer_opnode.h @@ -0,0 +1,213 @@ +// SPDX-License-Identifier: Apache-2.0 +/** + * Copyright (C) 2023 DongHak Park + * + * @file flatbuffer_opnode.h + * @date 10 February 2023 + * @brief NNTrainer flatbuffer opnode + * @see https://github.com/nnstreamer/nntrainer + * @author Donghak Park + * @bug No known bugs except for NYI items + */ + +#ifndef __FLATBUFFER_OPNODE_H__ +#define __FLATBUFFER_OPNODE_H__ + +#include +#include +#include + +#include +#include + +namespace nntrainer { + +class LayerNode; +class RunLayerContext; + +/** + * @brief FlatBufferOpNode class + * + */ +class FlatBufferOpNode { +public: + using Variables = std::vector; + + /** + * @brief Construct a new Flat Buffer Op Node object + * + */ + FlatBufferOpNode(); + + /** + * @brief Set the Layer Node object + * + * @param layer layer node + */ + void setLayerNode(const LayerNode &layer); + + /** + * @brief Set the Op Type object + * + * @param op_type_ + */ + void setOpType(nntr::BuiltinOperator op_type_) { op_type = op_type_; } + + /** + * @brief Set the Builtin Options object + * + * @param builtin_option_type_ builtin option type + * @param builtin_ops_ flatbuffer offset of builtin ops + */ + void setBuiltinOptions(nntr::BuiltinOptions builtin_option_type_, + const flatbuffers::Offset &builtin_ops_); + + /** + * @brief Get the Inputs object + * + * @return Variables& inputs + */ + Variables &getInputs() { return inputs; } + + /** + * @brief Get the Inputs object + * + * @return const Variables& inputs + */ + const Variables &getInputs() const { return inputs; } + + /** + * @brief Get the Weights object + * + * @return Variables& weights + */ + Variables &getWeights() { return weights; } + + /** + * @brief Get the Weights object + * + * @return const Variables& weights + */ + const Variables &getWeights() const { return weights; } + + /** + * @brief Get the Outputs object + * + * @return Variables& outputs + */ + Variables &getOutputs() { return outputs; } + + /** + * @brief Get the Outputs object + * + * @return const Variables& outputs + */ + const Variables &getOutputs() const { return outputs; } + + /** + * @brief check if the node is model input + * + * @return true if op node is model input + * @return false if op node is not model input + */ + bool isInputNode() const { return is_input; } + + /** + * @brief check if the node is model output + * + * @return true if op node is model output + * @return false if op node is not model output + */ + bool isOutputNode() const { return is_output; } + + /** + * @brief check if the node is virtual node + * + * @return true if this op node is virtual node + * @return false if this op node is not virtual node + */ + bool isVirtualNode() const { return is_virtual; } + + /** + * @brief Get the Op Type object + * + * @return const nntr::BuiltinOperator + */ + const nntr::BuiltinOperator getOpType() const { return op_type; } + + /** + * @brief Get the Option Type object + * + * @return const nntr::BuiltinOptions + */ + const nntr::BuiltinOptions getOptionType() const { + return builtin_option_type; + } + + /** + * @brief Get the Builtin Ops object + * @param f Flatbuffer builder + * + * @return flatbuffers::Offset + */ + flatbuffers::Offset getBuiltinOps() const; + + /** + * @brief Get the Input Nodes object + * + * @return const std::vector &input_nodes + */ + const std::vector &getInputNodes() const { + return input_nodes; + } + + /** + * @brief Set arity + * + * @param value value to set + */ + void arity(size_t value) { input_nodes.resize(value); } + + /** + * @brief Get arity + * + * @return const unsigned input_nodes size + */ + const unsigned arity() const { return input_nodes.size(); } + + /** + * @brief Set the Arg object + * + * @param index argument index to set + * @param node the node to be argument + */ + void setArg(size_t index, FlatBufferOpNode *node) { + input_nodes.at(index) = node; + } + + /** + * @brief Get the Arg object + * + * @param index argument index to get + * @return FlatBufferOpNode *input_nodes.at(index) + */ + FlatBufferOpNode *arg(size_t index) const { return input_nodes.at(index); } + +private: + Variables inputs; /**< input variables */ + Variables outputs; /**< output variables */ + Variables weights; /**< weight variables */ + std::vector input_nodes; /**< input nodes */ + + bool is_input; /**< true if given input is model input */ + bool is_output; /**< true if given output is model output */ + bool is_virtual; /**< true if given node is virtual */ + + nntr::BuiltinOperator op_type; /**< op type */ + nntr::BuiltinOptions builtin_option_type; /**< builtin option type */ + flatbuffers::Offset builtin_ops; /**< builtin ops */ +}; + +} // namespace nntrainer + +#endif // __FLATBUFFER_OPNODE_H__ -- 2.7.4