+++ /dev/null
-nncc_find_package(TensorFlowLite QUIET)
-
-if(NOT TensorFlowLite_FOUND)
- return()
-endif(NOT TensorFlowLite_FOUND)
-
-file(GLOB_RECURSE SOURCES "src/*.cpp")
-
-add_executable(tflitekit ${SOURCES})
-target_link_libraries(tflitekit nncc_foundation)
-target_link_libraries(tflitekit cli)
-target_link_libraries(tflitekit tensorflowlite)
+++ /dev/null
-#include "tensorflow/contrib/lite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-
-#include <cli/Command.h>
-#include <iostream>
-
-using namespace tflite;
-using namespace tflite::ops::builtin;
-
-class RunCommand final : public cli::Command
-{
-public:
- int run(int argc, const char * const *argv) const override;
-};
-
-int RunCommand::run(int argc, const char * const *argv) const
-{
- // USAGE: HEADER run [.tflite]
- const auto filename = argv[0];
-
- StderrReporter error_reporter;
-
- auto model = FlatBufferModel::BuildFromFile(filename, &error_reporter);
-
- std::unique_ptr<Interpreter> interpreter;
-
- TfLiteStatus status = kTfLiteError;
-
- BuiltinOpResolver resolver;
- InterpreterBuilder builder(*model, resolver);
-
- status = builder(&interpreter);
- assert(status == kTfLiteOk);
-
- interpreter->SetNumThreads(1);
-
- status = interpreter->AllocateTensors();
- assert(status == kTfLiteOk);
-
- status = interpreter->Invoke();
- assert(status == kTfLiteOk);
-
- std::cout << "# of outputs: " << interpreter->outputs().size() << std::endl;
-
- return 0;
-}
-
-#include <nncc/foundation/Memory.h>
-#include <cli/App.h>
-
-#include <map>
-#include <memory>
-#include <string>
-
-int main(int argc, char **argv)
-{
- cli::App app{argv[0]};
-
- app.insert("run", nncc::foundation::make_unique<RunCommand>());
-
- return app.run(argc - 1, argv + 1);
-}