From bc1db126d1d7f4c68fcb11ae096df436dcf62d08 Mon Sep 17 00:00:00 2001 From: Jihoon Lee Date: Wed, 28 Apr 2021 19:46:58 +0900 Subject: [PATCH] [Tflite] Separate `tfopnode` This patch separates tfopnode to a file. tfOpNode will be directly built from the exporter if it doesn't need to be fused. **Self evaluation:** 1. Build test: [X]Passed [ ]Failed [ ]Skipped 2. Run test: [X]Passed [ ]Failed [ ]Skipped Signed-off-by: Jihoon Lee --- jni/Android.mk | 1 + nntrainer/compiler/meson.build | 5 +- nntrainer/compiler/tflite_interpreter.cpp | 185 +--------------------------- nntrainer/compiler/tflite_opnode.cpp | 75 ++++++++++++ nntrainer/compiler/tflite_opnode.h | 196 ++++++++++++++++++++++++++++++ 5 files changed, 277 insertions(+), 185 deletions(-) create mode 100644 nntrainer/compiler/tflite_opnode.cpp create mode 100644 nntrainer/compiler/tflite_opnode.h diff --git a/jni/Android.mk b/jni/Android.mk index 813834e..6e070ce 100644 --- a/jni/Android.mk +++ b/jni/Android.mk @@ -138,6 +138,7 @@ NNTRAINER_SRCS := $(NNTRAINER_ROOT)/nntrainer/models/neuralnet.cpp \ $(NNTRAINER_ROOT)/nntrainer/utils/node_exporter.cpp \ $(NNTRAINER_ROOT)/nntrainer/utils/base_properties.cpp \ $(NNTRAINER_ROOT)/nntrainer/compiler/ini_interpreter.cpp \ + $(NNTRAINER_ROOT)/nntrainer/compiler/tflite_opnode.cpp \ $(NNTRAINER_ROOT)/nntrainer/compiler/tflite_interpreter.cpp \ $(NNTRAINER_ROOT)/nntrainer/app_context.cpp diff --git a/nntrainer/compiler/meson.build b/nntrainer/compiler/meson.build index 039205a..d860a64 100644 --- a/nntrainer/compiler/meson.build +++ b/nntrainer/compiler/meson.build @@ -14,7 +14,10 @@ if get_option('enable-tflite-interpreter') command: [flatc, '-c', '@INPUT@']) nntrainer_sources += flat_header - compiler_sources += 'tflite_interpreter.cpp' + compiler_sources += [ + 'tflite_interpreter.cpp', + 'tflite_opnode.cpp' + ] endif foreach s : compiler_sources diff --git a/nntrainer/compiler/tflite_interpreter.cpp b/nntrainer/compiler/tflite_interpreter.cpp index ddf09cd..06384a9 100644 --- a/nntrainer/compiler/tflite_interpreter.cpp +++ b/nntrainer/compiler/tflite_interpreter.cpp @@ -25,6 +25,7 @@ #include #include #include +#include #include #include @@ -58,190 +59,6 @@ void builder2file(const flatbuffers::FlatBufferBuilder &builder, os.close(); } -/** - * @brief tensorflow operational node representation. This class contains, - * information to build operation flatbuffer - * - */ -class TfOpNode { -public: - using Variables = std::vector; - - TfOpNode() = default; - - /** - * @brief Construct a new Tf Op Node object from layer - * @note this is a shortcut to skip if layer does not need to be devided or - * fused - * @param layer layer that is converted to TfOpNode - */ - TfOpNode(const Layer &layer) : is_input(false), is_output(false) { - setInOut(layer); - setInputs(layer.getInputRef()); - setOutputs(layer.getOutputRef()); - setWeights(layer.getWeightsRef()); - setOpType(layer.getType()); - } - - /** - * @brief Set the In Out object - * - */ - void setInOut(const Layer &layer) { - auto &in = layer.getInputLayers(); - is_input = std::find(in.begin(), in.end(), "__data__") != in.end(); - - auto &out = layer.getOutputLayers(); - is_output = std::find(out.begin(), out.end(), "__exit__") != out.end(); - } - - /** - * @brief Set the Inputs object from layer - * - * @param inputs_ input to be inserted - */ - void setInputs(const std::vector> &inputs_) { - inputs.reserve(inputs_.size()); - std::transform(inputs_.begin(), inputs_.end(), std::back_inserter(inputs), - [](const auto &data) { return data.get(); }); - } - - /** - * @brief Set the Outputs object - * - * @param outputs_ output to be inserted - */ - void setOutputs(const std::vector> &outputs_) { - outputs.reserve(outputs_.size()); - std::transform(outputs_.begin(), outputs_.end(), - std::back_inserter(outputs), - [](const auto &data) { return data.get(); }); - } - - /** - * @brief Set the Weights object - * - * @param weights_ set weights from the object - */ - void setWeights(const std::vector &weights_) { - weights.reserve(weights_.size()); - std::transform(weights_.begin(), weights_.end(), - std::back_inserter(weights), - [](const auto &data) { return &data; }); - } - - /** - * @brief Set the Op Type object - * @todo Considering number of alternatives to optimize this, for now it is - * just workable. - * 1. add and maintain global unordered map - * 2. Save information in the appcontext later we can retrieve - * 3. let type be an immutable property and let exporter handle this instead - * of this method (preferrable) - * @param type type to convert - */ - void setOpType(const std::string &type) { - if (istrequal(type, FullyConnectedLayer::type)) { - setOpType(tflite::BuiltinOperator_FULLY_CONNECTED); - setBuiltinOptions(tflite::BuiltinOptions_FullyConnectedOptions, - flatbuffers::Offset()); - return; - } - - throw std::invalid_argument("not supported type"); - } - - /** - * @brief Set the Builtin Options object, - * @note this can go private, export from a layer and fill this out - * - * @param builtin_option_type_ builtin option type - * @param builtin_ops_ flatbuffer offset of builtin_ops - */ - void setBuiltinOptions(tflite::BuiltinOptions builtin_option_type_, - const flatbuffers::Offset &builtin_ops_) { - builtin_ops = builtin_ops_; - builtin_option_type = builtin_option_type_; - } - - /** - * @brief Get the Inputs object - * - * @return Variables& inputs - */ - Variables &getInputs() { return inputs; } - const Variables &getInputs() const { return inputs; } - - /** - * @brief Get the Outputs object - * - * @return Variables& - */ - Variables &getOutputs() { return outputs; } - const Variables &getOutputs() const { return outputs; } - - /** - * @brief Get the Weights object - * - * @return Variables& - */ - Variables &getWeights() { return weights; } - const Variables &getWeights() const { return weights; } - - /** - * @brief check if this op node is model input - * - * @return true if op node is model input - * @return false if op node is not model input - */ - bool isInputNode() const { return is_input; } - - /** - * @brief check if this op node is model output - * - * @return true if op node is model output - * @return false if op node is not model output - */ - bool isOutputNode() const { return is_output; } - - /** - * @brief Get the Op Type object - * - * @return const tflite::BuiltinOperator - */ - const tflite::BuiltinOperator getOpType() const { return op_type; } - - /** - * @brief Get the Op Type object - * - * @return const tflite::BuiltinOperator - */ - const tflite::BuiltinOptions getOptionType() const { - return builtin_option_type; - } - -private: - /** - * @brief Set the Op Type object - * - * @param op_type_ operation type - */ - void setOpType(tflite::BuiltinOperator op_type_) { op_type = op_type_; } - - Variables inputs; /**< input variables */ - Variables outputs; /**< output variables */ - Variables weights; /**< weight variables */ - - bool is_input; /**< true if given input is input; */ - bool is_output; /**< true if given output is output; */ - - tflite::BuiltinOperator op_type; - - /// retrieve this from export_to - flatbuffers::Offset builtin_ops; - tflite::BuiltinOptions builtin_option_type; -}; - using TfOpNodes = std::vector; /** diff --git a/nntrainer/compiler/tflite_opnode.cpp b/nntrainer/compiler/tflite_opnode.cpp new file mode 100644 index 0000000..a13d1e0 --- /dev/null +++ b/nntrainer/compiler/tflite_opnode.cpp @@ -0,0 +1,75 @@ +// SPDX-License-Identifier: Apache-2.0 +/** + * Copyright (C) 2021 Jihoon Lee + * + * @file tflite_opnode.cpp + * @date 28 April 2021 + * @brief contains tflite opnode which has information to convert to tflite file + * @see https://github.com/nnstreamer/nntrainer + * @author Jihoon Lee + * @bug No known bugs except for NYI items + */ + +#include + +#include + +namespace nntrainer { + +TfOpNode::TfOpNode(const Layer &layer) : TfOpNode() { + setInOut(layer); + setInputs(layer.getInputRef()); + setOutputs(layer.getOutputRef()); + setWeights(layer.getWeightsRef()); + setOpType(layer.getType()); +} + +void TfOpNode::setInOut(const Layer &layer) { + auto &in = layer.getInputLayers(); + is_input = std::find(in.begin(), in.end(), "__data__") != in.end(); + + auto &out = layer.getOutputLayers(); + is_output = std::find(out.begin(), out.end(), "__exit__") != out.end(); +} + +void TfOpNode::setInputs( + const std::vector> &inputs_) { + + inputs.reserve(inputs_.size()); + std::transform(inputs_.begin(), inputs_.end(), std::back_inserter(inputs), + [](const auto &data) { return data.get(); }); +} + +void TfOpNode::setOutputs( + const std::vector> &outputs_) { + outputs.reserve(outputs_.size()); + std::transform(outputs_.begin(), outputs_.end(), std::back_inserter(outputs), + [](const auto &data) { return data.get(); }); +} + +void TfOpNode::setWeights(const std::vector &weights_) { + weights.reserve(weights_.size()); + std::transform(weights_.begin(), weights_.end(), std::back_inserter(weights), + [](const auto &data) { return &data; }); +} + +void TfOpNode::setOpType(const std::string &type) { + + if (istrequal(type, FullyConnectedLayer::type)) { + setOpType(tflite::BuiltinOperator_FULLY_CONNECTED); + setBuiltinOptions(tflite::BuiltinOptions_FullyConnectedOptions, + flatbuffers::Offset()); + return; + } + + throw std::invalid_argument("not supported type"); +} + +void TfOpNode::setBuiltinOptions( + tflite::BuiltinOptions builtin_option_type_, + const flatbuffers::Offset &builtin_ops_) { + builtin_ops = builtin_ops_; + builtin_option_type = builtin_option_type_; +} + +} // namespace nntrainer diff --git a/nntrainer/compiler/tflite_opnode.h b/nntrainer/compiler/tflite_opnode.h new file mode 100644 index 0000000..c071f04 --- /dev/null +++ b/nntrainer/compiler/tflite_opnode.h @@ -0,0 +1,196 @@ +// SPDX-License-Identifier: Apache-2.0 +/** + * Copyright (C) 2021 Jihoon Lee + * + * @file tflite_opnode.h + * @date 28 April 2021 + * @brief contains tflite opnode which has information to convert to tflite file + * @see https://github.com/nnstreamer/nntrainer + * @author Jihoon Lee + * @bug No known bugs except for NYI items + */ + +#ifndef __TFLITE_OPNODE_H__ +#define __TFLITE_OPNODE_H__ + +#include +#include + +#include + +#include +#include + +namespace nntrainer { +/** + * @brief tensorflow operational node representation. This class contains, + * information to build operation flatbuffer + * + */ +class TfOpNode { +public: + using Variables = std::vector; + + /** + * @brief Construct a new Tf object + * + */ + TfOpNode() = default; + + /** + * @brief Construct a new Tf Op Node object from layer + * @note this is a shortcut to skip if layer does not need to be devided or + * fused + * @param layer layer that is converted to TfOpNode + */ + TfOpNode(const Layer &layer); + + /** + * @brief Check and set if layer has model in/out + * + * @param layer layer to check + */ + void setInOut(const Layer &layer); + + /** + * @brief Set the Inputs object from layer + * + * @param inputs_ input to be inserted + */ + void setInputs(const std::vector> &inputs_); + + /** + * @brief Set the Outputs object + * + * @param outputs_ + */ + void setOutputs(const std::vector> &outputs_); + + /** + * @brief Set the Weights object + * + * @param weights_ set weights from the object + */ + void setWeights(const std::vector &weights_); + + /** + * @brief Set the Op Type object + * @todo Considering number of alternatives to optimize this, for now it is + * just workable. + * 1. add and maintain global unordered map + * 2. Save information in the appcontext later we can retrieve + * 3. let type be an immutable property and let exporter handle this instead + * of this method (preferrable) + * @param type type to convert + */ + void setOpType(const std::string &type); + + /** + * @brief Set the Builtin Options object, + * @note this can go private, export from a layer and fill this out + * + * @param builtin_option_type_ builtin option type + * @param builtin_ops_ flatbuffer offset of builtin_ops + */ + void setBuiltinOptions(tflite::BuiltinOptions builtin_option_type_, + const flatbuffers::Offset &builtin_ops_); + + /** + * @brief Get the Inputs object + * + * @return Variables& inputs + */ + Variables &getInputs() { return inputs; } + + /** + * @brief Get the Inputs object + * + * @return const Variables& inputs + */ + const Variables &getInputs() const { return inputs; } + + /** + * @brief Get the Outputs object + * + * @return Variables& + */ + Variables &getOutputs() { return outputs; } + + /** + * @brief Get the Outputs object + * + * @return const Variables& outputs + */ + const Variables &getOutputs() const { return outputs; } + + /** + * @brief Get the Weights object + * + * @return Variables& + */ + Variables &getWeights() { return weights; } + + /** + * @brief Get the Weights object + * + * @return const Variables& outputs + */ + const Variables &getWeights() const { return weights; } + + /** + * @brief check if this op node is model input + * + * @return true if op node is model input + * @return false if op node is not model input + */ + bool isInputNode() const { return is_input; } + + /** + * @brief check if this op node is model output + * + * @return true if op node is model output + * @return false if op node is not model output + */ + bool isOutputNode() const { return is_output; } + + /** + * @brief Get the Op Type object + * + * @return const tflite::BuiltinOperator + */ + const tflite::BuiltinOperator getOpType() const { return op_type; } + + /** + * @brief Get the Op Type object + * + * @return const tflite::BuiltinOperator + */ + const tflite::BuiltinOptions getOptionType() const { + return builtin_option_type; + } + +private: + /** + * @brief Set the Op Type object + * + * @param op_type_ operation type + */ + void setOpType(tflite::BuiltinOperator op_type_) { op_type = op_type_; } + + Variables inputs; /**< input variables */ + Variables outputs; /**< output variables */ + Variables weights; /**< weight variables */ + + bool is_input; /**< true if given input is input; */ + bool is_output; /**< true if given output is output; */ + + tflite::BuiltinOperator op_type; + + /// retrieve this from export_to + flatbuffers::Offset builtin_ops; + tflite::BuiltinOptions builtin_option_type; +}; + +} // namespace nntrainer + +#endif // __TFLITE_OPNODE_H__ -- 2.7.4