--- /dev/null
+import h5py
+import argparse
+from argparse import RawTextHelpFormatter
+
+
+def regular_step():
+ """
+ This function is intended to decompose the necessary steps to obtain information from the command line.
+
+ :return: argparse object, which hold paths to nn model and input data
+ """
+ parser = argparse.ArgumentParser(formatter_class=RawTextHelpFormatter)
+
+ parser.add_argument(
+ '-m',
+ '--model',
+ help=("specify input file with NN model, \n[depends from model, "
+ " two for caffe and caffe2, one for onnx and tflite]"),
+ nargs='+')
+ parser.add_argument('-i', '--input', help=(" specify file with neural"
+ " network input data, hdf5 for caffe caffe2 tflite "
+ "and pb for onnx"), required=True)
+ parser.add_argument('-o', '--output_path',
+ help='here you specify which place will hold your output, default here', default='')
+
+
+ args = parser.parse_args()
+ # added to check is our input file or not. most simple way
+ try:
+ with open(args.input) as f:
+ pass
+ except IOError as e:
+ print('input file your enter doesnt exist!')
+
+ # added to check is our model right or not
+ try:
+ for i in args.model:
+ with open(i) as f:
+ pass
+ except IOError as e:
+ print('model you enter doesnt exist, write correct PATH ')
+
+
+ return args
+
+
+def save_result(output_path, output_data):
+ """
+ This function save result of nn working in .hdf5 file
+ :param output_path: you specify directory to store your result
+ :param output_data: information that you write to .hdf5 file
+ :return:
+ """
+ with open(output_path + 'responce.txt', 'w+') as f:
+ f.write(str(output_data))
+ f = h5py.File(output_path + 'responce.hdf5', 'w')
+ f.create_dataset('out', dtype='float32', data=output_data)
+ f.close()
+
+
+def read_input(input_path):
+ h5f = h5py.File(input_path, 'r')
+ for t in h5f:
+ tensorName = str(t)
+ return h5py.File(input_path, 'r')[tensorName][:]
+++ /dev/null
-import h5py
-import onnx
-import caffe
-import argparse
-import tensorflow as tf
-import caffe2.python.onnx.backend
-from argparse import RawTextHelpFormatter
-from caffe2.python import workspace
-
-
-def regular_step():
- """
- This function is intended to decompose the necessary steps to obtain information from the command line.
-
- :return: argparse object, which hold paths to nn model and input data
- """
- parser = argparse.ArgumentParser(formatter_class=RawTextHelpFormatter)
- # add command line flags and options
- parser.add_argument(
- "--caffe",
- help="treat input file as Caffe model",
- action="store_true")
- parser.add_argument(
- "--onnx",
- help="treat input file as onnx model",
- action="store_true")
- parser.add_argument(
- "--tflite",
- help="treat input file as tflite model",
- action="store_true")
- parser.add_argument(
- "--caffe2",
- help="treat input file as caffe2 model",
- action="store_true")
-
- parser.add_argument(
- '-m',
- '--model',
- help=("specify input file with NN model, \n[depends from model, "
- " two for caffe and caffe2, one for onnx and tflite]"),
- nargs='+')
- parser.add_argument('-i', '--input', help=(" specify file with neural"
- " network input data, hdf5 for caffe caffe2 tflite "
- "and pb for onnx"), required=True)
- parser.add_argument('-o', '--output_path',
- help='here you specify which place will hold your output, default here', default='')
-
-
- args = parser.parse_args()
- # added to check is our input file or not. most simple way
- try:
- with open(args.input) as f:
- pass
- except IOError as e:
- print('input file your enter doesnt exist!')
-
- # added to check is our model right or not
- try:
- for i in args.model:
- with open(i) as f:
- pass
- except IOError as e:
- print('model you enter doesnt exist, write correct PATH ')
-
- # check correct flag input
- if (args.caffe + args.caffe2 + args.tflite + args.onnx != 1):
- raise Exception('You aren\'t add necessary key caffe caffe2 onnx tflite! or add more then one')
-
- return args
-
-
-def save_result(output_path, output_data):
- """
- This function save result of nn working in .hdf5 file
- :param output_path: you specify directory to store your result
- :param output_data: information that you write to .hdf5 file
- :return:
- """
- with open(output_path + 'responce.txt', 'w+') as f:
- f.write(str(output_data))
- f = h5py.File(output_path + 'responce.hdf5', 'w')
- f.create_dataset('out', dtype='float32', data=output_data)
- f.close()
-
-
-def read_input(input_path):
- h5f = h5py.File(input_path, 'r')
- for t in h5f:
- tensorName = str(t)
- return h5py.File(input_path, 'r')[tensorName][:]
-
-
-def run_caffe2(init_net, predict_net, input_path, output_path =''):
- x = read_input(input_path)
- with open(init_net, 'rb') as f:
- init_net = f.read()
-
- with open(predict_net, 'rb') as f:
- predict_net = f.read()
- p = workspace.Predictor(init_net, predict_net)
- # TODO get 'data' parameter more universal, blobs contain other names
- results = p.run({'data': x})
- print(results)
- save_result(output_path, results)
-
-
-def run_tflite(model, input_path, output_path=''):
- input = read_input(input_path)
-
- interpreter = tf.contrib.lite.Interpreter(model_path=model)
- interpreter.allocate_tensors()
-
- input_details = interpreter.get_input_details()
- output_details = interpreter.get_output_details()
- input_data = input
- interpreter.set_tensor(input_details[0]['index'], input_data)
-
- interpreter.invoke()
- output_data = interpreter.get_tensor(output_details[0]['index'])
- print(output_data)
- save_result(output_path, output_data)
-
-
-def run_onnx(model, input_path, output_path=''): #args.model[0] , args.input
- path = model
-
- #I'll leave it in case anyone needs to read the .pb file.
- #proto_arr = onnx.TensorProto()
- #with open(input_path, 'rb') as f:
- # proto_arr.ParseFromString(f.read())
- # input_arr = onnx.numpy_helper.to_array(proto_arr)
-
- modelFile = onnx.load(path, 'rb')
- input_arr = read_input(input_path)
- output = caffe2.python.onnx.backend.run_model(modelFile, input_arr)
-
- print(output)
- save_result(output_path, output)
-
-
-def run_caffe(model_topology, model_weight, input_path, output_path=''):
- path = model_topology
- path_w = model_weight
-
- net = caffe.Net(path_w, path, caffe.TEST)
- # TODO get 'data' parameter more universal, blobs contain other names
- net.blobs['data'].data[...] = read_input(input_path)
- out = net.forward()
- all_names = [n for n in net._layer_names]
- out = out[all_names[-1]]
- save_result(output_path, out)
- print(out)
-
-
-if __name__ == '__main__':
- args = regular_step()
-
- # run one of model runner
- if args.caffe:
- run_caffe(args.model[0], args.model[1], args.input, args.output_path)
- elif args.caffe2:
- run_caffe2(args.model[0], args.model[1], args.input, args.output_path)
- elif args.tflite:
- run_tflite(args.model[0], args.input, args.output_path)
- elif args.onnx:
- run_onnx(args.model[0], args.input, args.output_path)
-
--- /dev/null
+from common_place import *
+import caffe
+
+
+def run_caffe(model_topology, model_weight, input_path, output_path=''):
+ path = model_topology
+ path_w = model_weight
+
+ net = caffe.Net(path_w, path, caffe.TEST)
+ # TODO get 'data' parameter more universal, blobs contain other names
+ net.blobs['data'].data[...] = read_input(input_path)
+ out = net.forward()
+ all_names = [n for n in net._layer_names]
+ out = out[all_names[-1]]
+ save_result(output_path, out)
+ print(out)
+
+if __name__ == '__main__':
+ args = regular_step()
+
+ run_caffe(args.model[0], args.model[1], args.input, args.output_path)
\ No newline at end of file
--- /dev/null
+from common_place import *
+
+from caffe2.python import workspace
+
+
+def run_caffe2(init_net, predict_net, input_path, output_path =''):
+ x = read_input(input_path)
+ with open(init_net, 'rb') as f:
+ init_net = f.read()
+
+ with open(predict_net, 'rb') as f:
+ predict_net = f.read()
+ p = workspace.Predictor(init_net, predict_net)
+ # TODO get 'data' parameter more universal, blobs contain other names
+ results = p.run({'data': x})
+ print(results)
+ save_result(output_path, results)
+
+if __name__ == '__main__':
+ args = regular_step()
+
+ run_caffe2(args.model[0], args.model[1], args.input, args.output_path)
\ No newline at end of file
--- /dev/null
+from common_place import *
+
+import onnx
+import caffe2.python.onnx.backend
+
+def run_onnx(model, input_path, output_path=''): #args.model[0] , args.input
+ path = model
+
+ #I'll leave it in case anyone needs to read the .pb file.
+ #proto_arr = onnx.TensorProto()
+ #with open(input_path, 'rb') as f:
+ # proto_arr.ParseFromString(f.read())
+ # input_arr = onnx.numpy_helper.to_array(proto_arr)
+
+ modelFile = onnx.load(path, 'rb')
+ input_arr = read_input(input_path)
+ output = caffe2.python.onnx.backend.run_model(modelFile, input_arr)
+
+ print(output)
+ save_result(output_path, output)
+
+if __name__ == '__main__':
+ args = regular_step()
+
+ run_onnx(args.model[0], args.input, args.output_path)
--- /dev/null
+from common_place import *
+import tensorflow as tf
+
+def run_tflite(model, input_path, output_path=''):
+ input = read_input(input_path)
+
+ interpreter = tf.contrib.lite.Interpreter(model_path=model)
+ interpreter.allocate_tensors()
+
+ input_details = interpreter.get_input_details()
+ output_details = interpreter.get_output_details()
+ input_data = input
+ interpreter.set_tensor(input_details[0]['index'], input_data)
+
+ interpreter.invoke()
+ output_data = interpreter.get_tensor(output_details[0]['index'])
+ print(output_data)
+ save_result(output_path, output_data)
+
+if __name__ == '__main__':
+ args = regular_step()
+
+ run_tflite(args.model[0], args.input, args.output_path)
caffe:
```
-$ python3 model_runner.py --caffe -m caffe1_runer/inception-v3_ref.caffemodel caffe1_runer/inception-v3_ref.prototxt -i caffe1_runer/ILSVRC2012_val_00000002.JPEG.tfl.hdf5
+$ python3 model_runner.py -m caffe1_runer/inception-v3_ref.caffemodel caffe1_runer/inception-v3_ref.prototxt -i caffe1_runer/ILSVRC2012_val_00000002.JPEG.tfl.hdf5
```
caffe2:
```
-$ python model_runner.py --caffe2 -m caffe2_runer_and_photo/caffe2_models/init_net.pb caffe2_runer_and_photo/caffe2_models/predict_net.pb -i randomInput.hdf5
+$ python model_runner.py -m caffe2_runer_and_photo/caffe2_models/init_net.pb caffe2_runer_and_photo/caffe2_models/predict_net.pb -i randomInput.hdf5
```
tflite:
```
-$ python model_runner.py --tflite -m tflite_runer_and_photo/TST-1-2\ AVARAGE_POOP_2D.tflite -i tflite_runer_and_photo/in.hdf5
+$ python model_runner.py -m tflite_runer_and_photo/TST-1-2\ AVARAGE_POOP_2D.tflite -i tflite_runer_and_photo/in.hdf5
```
onnx:
```
-$ python model_runner.py --onnx -m onnx_runer/model.onnx -i onnx_runer/input_0.pb
+$ python model_runner.py -m onnx_runer/model.onnx -i RANDOM.hdf5
```
------
-m mean pre learned model which you run
-i mean model's input
- --caffe --caffe2 --onnx --tflite - keys which talk script how to work with input files
+