From 806d7a74c2dd7740db68b017495a441964724aac Mon Sep 17 00:00:00 2001 From: Jonghyun Park Date: Tue, 6 Mar 2018 10:33:00 +0900 Subject: [PATCH] Introduce (Dummy) Custom Operators This commit introduces dummy custom operators. Signed-off-by: Jonghyun Park --- tools/tflite_run/CMakeLists.txt | 2 + tools/tflite_run/src/operators.cc | 168 +++++++++++++++++++++++++++++++++++++ tools/tflite_run/src/operators.h | 9 ++ tools/tflite_run/src/tflite_run.cc | 9 ++ 4 files changed, 188 insertions(+) create mode 100644 tools/tflite_run/src/operators.cc create mode 100644 tools/tflite_run/src/operators.h diff --git a/tools/tflite_run/CMakeLists.txt b/tools/tflite_run/CMakeLists.txt index 0c513a2..f4cbb31 100644 --- a/tools/tflite_run/CMakeLists.txt +++ b/tools/tflite_run/CMakeLists.txt @@ -1,6 +1,8 @@ list(APPEND TFLITE_RUN_SRCS "src/tflite_run.cc") +list(APPEND TFLITE_RUN_SRCS "src/operators.cc") add_executable(tflite_run ${TFLITE_RUN_SRCS}) +target_include_directories(tflite_run PRIVATE src) target_link_libraries(tflite_run tensorflow_lite) install(TARGETS tflite_run DESTINATION bin) diff --git a/tools/tflite_run/src/operators.cc b/tools/tflite_run/src/operators.cc new file mode 100644 index 0000000..d01383a --- /dev/null +++ b/tools/tflite_run/src/operators.cc @@ -0,0 +1,168 @@ +#include "tensorflow/contrib/lite/context.h" + +#include + +// +// Cast +// +namespace { + +struct CASTOp +{ +}; + +void* InitCAST(TfLiteContext* context, const char* buffer, size_t length) +{ + std::cerr << __FUNCTION__ << "(length: " << length << ")" << std::endl; + // TODO Implement this! + return new CASTOp; +} + +void FreeCAST(TfLiteContext *context, void *buffer) +{ + // TODO Implement this! + delete static_cast(buffer); +} + +TfLiteStatus PrepareCAST(TfLiteContext* context, TfLiteNode* node) +{ + std::cerr << __FUNCTION__ << "(...)" << std::endl; + // TODO Implement this! + return kTfLiteOk; +} + +TfLiteStatus EvalCAST(TfLiteContext* context, TfLiteNode* node) +{ + std::cerr << __FUNCTION__ << "(...)" << std::endl; + // TODO Implement this! + return kTfLiteError; +} + +} + +// +// Stack +// +namespace { + +struct StackOp +{ +}; + +void* InitStack(TfLiteContext* context, const char* buffer, size_t length) +{ + std::cerr << __FUNCTION__ << "(length: " << length << ")" << std::endl; + // TODO Implement this! + return new StackOp; +} + +void FreeStack(TfLiteContext *context, void *buffer) +{ + // TODO Implement this! + delete static_cast(buffer); +} + +TfLiteStatus PrepareStack(TfLiteContext* context, TfLiteNode* node) +{ + std::cerr << __FUNCTION__ << "(...)" << std::endl; + // TODO Implement this! + return kTfLiteOk; +} + +TfLiteStatus EvalStack(TfLiteContext* context, TfLiteNode* node) +{ + std::cerr << __FUNCTION__ << "(...)" << std::endl; + // TODO Implement this! + return kTfLiteError; +} + +} + +// +// ArgMax +// +namespace { + +struct ArgMaxOp +{ +}; + +void* InitArgMax(TfLiteContext* context, const char* buffer, size_t length) +{ + std::cerr << __FUNCTION__ << "(length: " << length << ")" << std::endl; + // TODO Implement this! + return new ArgMaxOp; +} + +void FreeArgMax(TfLiteContext *context, void *buffer) +{ + // TODO Implement this! + delete static_cast(buffer); +} + +TfLiteStatus PrepareArgMax(TfLiteContext* context, TfLiteNode* node) +{ + std::cerr << __FUNCTION__ << "(...)" << std::endl; + // TODO Implement this! + return kTfLiteOk; +} + +TfLiteStatus EvalArgMax(TfLiteContext* context, TfLiteNode* node) +{ + std::cerr << __FUNCTION__ << "(...)" << std::endl; + // TODO Implement this! + return kTfLiteError; +} + +} + +// +// TensorFlowMax +// +namespace { + +struct TensorFlowMaxOp +{ +}; + +void* InitTensorFlowMax(TfLiteContext* context, const char* buffer, size_t length) +{ + std::cerr << __FUNCTION__ << "(length: " << length << ")" << std::endl; + // TODO Implement this! + return new TensorFlowMaxOp; +} + +void FreeTensorFlowMax(TfLiteContext *context, void *buffer) +{ + // TODO Implement this! + delete static_cast(buffer); +} + +TfLiteStatus PrepareTensorFlowMax(TfLiteContext* context, TfLiteNode* node) +{ + std::cerr << __FUNCTION__ << "(...)" << std::endl; + // TODO Implement this! + return kTfLiteOk; +} + +TfLiteStatus EvalTensorFlowMax(TfLiteContext* context, TfLiteNode* node) +{ + std::cerr << __FUNCTION__ << "(...)" << std::endl; + // TODO Implement this! + return kTfLiteError; +} + +} + +#define REGISTER_FUNCTION(Name) \ + TfLiteRegistration* Register_##Name(void) \ + { \ + static TfLiteRegistration r = { ::Init##Name, ::Free##Name, \ + ::Prepare##Name, ::Eval##Name }; \ + return &r; \ + } +REGISTER_FUNCTION(CAST) +REGISTER_FUNCTION(Stack) +REGISTER_FUNCTION(ArgMax) +REGISTER_FUNCTION(TensorFlowMax) +#undef REGISTER_FUNCTION diff --git a/tools/tflite_run/src/operators.h b/tools/tflite_run/src/operators.h new file mode 100644 index 0000000..b691126 --- /dev/null +++ b/tools/tflite_run/src/operators.h @@ -0,0 +1,9 @@ +#ifndef __TFLITE_RUN_OPERATORS_H__ +#define __TFLITE_RUN_OPERATORS_H__ + +TfLiteRegistration* Register_CAST(void); +TfLiteRegistration* Register_Stack(void); +TfLiteRegistration* Register_ArgMax(void); +TfLiteRegistration* Register_TensorFlowMax(void); + +#endif // __TFLITE_RUN_OPERATORS_H__ diff --git a/tools/tflite_run/src/tflite_run.cc b/tools/tflite_run/src/tflite_run.cc index 143216b..e4c78ee 100644 --- a/tools/tflite_run/src/tflite_run.cc +++ b/tools/tflite_run/src/tflite_run.cc @@ -1,6 +1,8 @@ #include "tensorflow/contrib/lite/kernels/register.h" #include "tensorflow/contrib/lite/model.h" +#include "operators.h" + #include using namespace tflite; @@ -23,6 +25,13 @@ int main(int argc, char **argv) BuiltinOpResolver resolver; +#define REGISTER(Name) { resolver.AddCustom(#Name, Register_##Name()); } + REGISTER(CAST); + REGISTER(Stack); + REGISTER(ArgMax); + REGISTER(TensorFlowMax); +#undef REGISTER + InterpreterBuilder builder(*model, resolver); std::unique_ptr interpreter; -- 2.7.4