# This script uses nnkit to run inference for given model on a given data
# Messages are printed to stderr
# Usage:
-# -b - specifies path to nnkit build folder, inside which tools/nni is located
+# -b - specifies path to nnkit build folder, inside which tools/run is located
# -f - specifies framework ('tfl' for tflite or 'caf' for caffe) that the model belogs to
# -t - specifies path to testcase folder (see it's structure in readme)
# -p - allow some sort of parallelism by processing only a subset of files,
bin_suffix = '.dat'
def get_command_caf(infilename, outfilename, proto, caffemodel):
- return [build_path + "/tools/nni/nni",
+ return [build_path + "/tools/run/nnkit-run",
"--pre", build_path + "/actions/HDF5/libnnkit_HDF5_import_action.so",
"--pre-arg", infilename,
"--backend", build_path + "/backends/caffe/libnnkit_caffe_backend.so",
"--post-arg", outfilename]
def get_command_tfl(infilename, outfilename, model_file):
- return [build_path + "/tools/nni/nni",
+ return [build_path + "/tools/run/nnkit-run",
"--pre", build_path + "/actions/HDF5/libnnkit_HDF5_import_action.so",
"--pre-arg", infilename,
"--backend", build_path + "/backends/tflite/libnnkit_tflite_backend.so",