Add 'tflitekit' (#192)
author박종현/동작제어Lab(SR)/Senior Engineer/삼성전자 <jh1302.park@samsung.com>
Fri, 4 May 2018 01:13:47 +0000 (10:13 +0900)
committerGitHub Enterprise <noreply-CODE@samsung.com>
Fri, 4 May 2018 01:13:47 +0000 (10:13 +0900)
This commit adds 'tflitekit' which provides various command
related with TensorFlow Lite.

Signed-off-by: Jonghyun Park <jh1302.park@samsung.com>
contrib/tflitekit/CMakeLists.txt [new file with mode: 0644]
contrib/tflitekit/src/tflitekit.cpp [new file with mode: 0644]

diff --git a/contrib/tflitekit/CMakeLists.txt b/contrib/tflitekit/CMakeLists.txt
new file mode 100644 (file)
index 0000000..5f95cdf
--- /dev/null
@@ -0,0 +1,11 @@
+nncc_find_package(TensorFlowLite QUIET)
+
+if(NOT TensorFlowLite_FOUND)
+  return()
+endif(NOT TensorFlowLite_FOUND)
+
+file(GLOB_RECURSE SOURCES "src/*.cpp")
+
+add_executable(tflitekit ${SOURCES})
+target_link_libraries(tflitekit nncc_foundation)
+target_link_libraries(tflitekit tensorflowlite)
diff --git a/contrib/tflitekit/src/tflitekit.cpp b/contrib/tflitekit/src/tflitekit.cpp
new file mode 100644 (file)
index 0000000..1612153
--- /dev/null
@@ -0,0 +1,90 @@
+struct Command
+{
+  virtual ~Command() = default;
+
+  virtual int run(int argc, char **argv) const = 0;
+};
+
+#include "tensorflow/contrib/lite/kernels/register.h"
+#include "tensorflow/contrib/lite/model.h"
+
+#include <iostream>
+
+using namespace tflite;
+using namespace tflite::ops::builtin;
+
+class RunCommand final : public Command
+{
+public:
+  int run(int argc, char **argv) const override;
+};
+
+int RunCommand::run(int argc, char **argv) const
+{
+  // USAGE: HEADER run [.tflite]
+  const auto filename = argv[0];
+
+  StderrReporter error_reporter;
+
+  auto model = FlatBufferModel::BuildFromFile(filename, &error_reporter);
+
+  std::unique_ptr<Interpreter> interpreter;
+
+  TfLiteStatus status = kTfLiteError;
+
+  BuiltinOpResolver resolver;
+  InterpreterBuilder builder(*model, resolver);
+
+  status = builder(&interpreter);
+  assert(status == kTfLiteOk);
+
+  interpreter->SetNumThreads(1);
+
+  status = interpreter->AllocateTensors();
+  assert(status == kTfLiteOk);
+
+  status = interpreter->Invoke();
+  assert(status == kTfLiteOk);
+
+  std::cout << "# of outputs: " << interpreter->outputs().size() << std::endl;
+
+  return 0;
+}
+
+#include <nncc/foundation/Memory.h>
+
+#include <map>
+#include <memory>
+#include <string>
+
+int main(int argc, char **argv)
+{
+  std::map<std::string, std::unique_ptr<Command>> commands;
+
+  commands["run"] = nncc::foundation::make_unique<RunCommand>();
+
+  if (argc < 2)
+  {
+    std::cerr << "ERROR: COMMAND is not provided" << std::endl;
+    std::cerr << std::endl;
+    std::cerr << "USAGE: " << argv[0] << " [COMMAND] ..." << std::endl;
+    return 255;
+  }
+
+  // USAGE: HEADER [command] ...
+  if (commands.find(argv[1]) == commands.end())
+  {
+    std::cerr << "ERROR: '" << argv[1] << "' is not a valid command" << std::endl;
+    std::cerr << std::endl;
+    std::cerr << "USAGE: " << argv[0] << " [COMMAND] ..." << std::endl;
+    std::cerr << std::endl;
+    std::cerr << "SUPPORTED COMMANDS:" << std::endl;
+    for (auto it = commands.begin(); it != commands.end(); ++it)
+    {
+      std::cerr << "  " << it->first << std::endl;
+    }
+    return 255;
+  }
+
+  return commands.at(argv[1])->run(argc - 2, argv + 2);
+}