--- /dev/null
+# indicating that this folder is python package
--- /dev/null
+import os
+import tensorflow as tf
+import model_freezer_util as util
+
+
+class BaseFreezer(object):
+ def __init__(self, path):
+ # files generated by child class will be stored under this path.
+ self.root_output_path = path
+
+ def getOutputDirectory(self):
+ ''' abstract class
+ override method should return directory under self.root_output_path where all pb, pbtxt, checkpoing, tensorboard log are saved '''
+ raise NotImplementedError("please implement this")
+
+ def getTopNodeName(self):
+ ''' abstract class
+ override method should return top node of frozen graph '''
+ raise NotImplementedError("please implement this")
+
+ def getModelName(self):
+ ''' abstract class
+ override method should return the name of model which you defined '''
+ raise NotImplementedError("please implement this")
+
+ def buildModel(self, sess):
+ ''' abstract class
+ override method should return ( input_node_list, output_node_list ) after building a model with all variables set '''
+ raise NotImplementedError("please implement this")
+
+ def saveRelatedFiles(self, sess, input_node_list, output_node_list):
+ ''' saves pb, pbtxt, chpt files and then freeze graph under top_node_name into directory '''
+ ''' produce pb, pbtxt, and ckpt files '''
+ (pb_path, pbtxt_path, checkpoint_path) = util.savePbAndCkpt(
+ sess, self.getOutputDirectory())
+
+ print("")
+ print("# 1. Created {}, {}, {}\n".format(pb_path, pbtxt_path, checkpoint_path))
+ '''
+ produce frozen_graph files
+ include only nodes below softmax node. nodes for gradient descent (reduce_mean, GradientDescentOptimizer, ...) will not be included
+ '''
+ sess.close()
+ (frozen_pb_path, frozen_pbtxt_path) = util.freezeGraph(pb_path, checkpoint_path,
+ self.getTopNodeName())
+
+ print("")
+ print("\n# 2. Created {}, {}\n".format(frozen_pb_path, frozen_pbtxt_path))
+ ''' generate tflite file. '''
+ # manually put back imported modules. refer to https://github.com/tensorflow/tensorflow/issues/15410#issuecomment-352189481
+ import tempfile
+ import subprocess
+ tf.contrib.lite.tempfile = tempfile
+ tf.contrib.lite.subprocess = subprocess
+
+ tflite_path = os.path.join(self.getOutputDirectory(), "graph_frozen.tflite")
+
+ tf.reset_default_graph()
+ sess = tf.Session()
+
+ util.importGraphIntoSession(sess, frozen_pb_path, "")
+ tflite_model = tf.contrib.lite.toco_convert(sess.graph_def, input_node_list,
+ output_node_list)
+ open(tflite_path, "wb").write(tflite_model)
+ print("# 3. Created {}\n".format(tflite_path))
+
+ return (pb_path, frozen_pb_path, tflite_path)
+
+ def generateTensorboardLog(self, pb_path, frozen_pb_path):
+ ''' generating tensorboard logs to compare original pb and frozen pb '''
+ tensorboardLogDir = util.generateTensorboardLog([pb_path, frozen_pb_path],
+ ['original', 'frozen'],
+ self.getOutputDirectory())
+
+ print("# You can view original graph and frozen graph with tensorboard.")
+ print(" Run the following: $ tensorboard --logdir={} ".format(tensorboardLogDir))
+
+ def createSaveFreezeModel(self):
+ ''' method that actually called by main() function. '''
+ print("")
+ print("-------------------- freezing {} ------------------------".format(
+ self.getModelName()))
+ print("# files will be saved into " + self.getOutputDirectory())
+ print("")
+
+ # without this, graph used previous session is reused : https://stackoverflow.com/questions/42706761/closing-session-in-tensorflow-doesnt-reset-graph
+ tf.reset_default_graph()
+
+ # TODO-nnfw session life cycle here is too tangled
+ sess = tf.Session()
+
+ # build model
+ (input_node_list, output_node_list) = self.buildModel(sess)
+ ''' Now, save to proto buffer format and checkpoint '''
+ (pb_path, frozen_pb_path, tflite_path) = self.saveRelatedFiles(
+ sess, input_node_list, output_node_list)
+
+ sess.close()
+
+ self.generateTensorboardLog(pb_path, frozen_pb_path)
+++ /dev/null
-import os
-import sys
-import platform
-import tensorflow as tf
-import argparse
-
-import model_freezer_util as util
-
-
-# --------
-class TensorFlowModelFreezer(object):
- def __init__(self, path):
- # files generated by child class will be stored under this path.
- self.root_output_path = path
-
- def createSaveFreezeModel(self):
- ''' abstract class '''
- raise NotImplementedError("please implement this")
-
- def getOutputDirectory(self):
- ''' abstract class
- override method should return directory under self.root_output_path where all pb, pbtxt, checkpoing, tensorboard log are saved '''
- raise NotImplementedError("please implement this")
-
- def getTopNodeName(self):
- ''' abstract class
- override method should return top node of frozen graph '''
- raise NotImplementedError("please implement this")
-
- def saveRelatedFiles(self, sess):
- ''' saves pb, pbtxt, chpt files and then freeze graph under top_node_name into directory '''
- ''' produce pb, pbtxt, and ckpt files '''
- (pb_path, pbtxt_path, checkpoint_path) = util.savePbAndCkpt(
- sess, self.getOutputDirectory())
-
- print("")
- print("# Success. pb, pbtxt, checkpoint files are created")
- print(" -> {}, {}, {}\n".format(pb_path, pbtxt_path, checkpoint_path))
- '''
- produce frozen_graph files
- include only nodes below softmax node. nodes for gradient descent (reduce_mean, GradientDescentOptimizer, ...) will not be included
- '''
- (frozen_pb_path, frozen_pbtxt_path) = util.freezeGraph(pb_path, checkpoint_path,
- self.getTopNodeName())
-
- print("")
- print("\n# Success. A frozen pb and pbtxt are created")
- print(" -> {}, {}\n".format(frozen_pb_path, frozen_pbtxt_path))
-
- return (pb_path, frozen_pb_path)
-
- def generateTensorboardLog(self, pb_path, frozen_pb_path):
- ''' generating tensorboard logs to compare original pb and frozen pb '''
- tensorboardLogDir = util.generateTensorboardLog([pb_path, frozen_pb_path],
- ['original', 'frozen'],
- self.getOutputDirectory())
-
- print("# You can view original graph and frozen graph with tensorboard.")
- print(" Run the following: $ tensorboard --logdir={} ".format(tensorboardLogDir))
-
-
-# --------
-class SoftmaxTestModelFreezer(TensorFlowModelFreezer):
- ''' class to create, save, and freeze Softmax(wx+b) '''
-
- def __init__(self, path):
- super(SoftmaxTestModelFreezer, self).__init__(path)
-
- def getOutputDirectory(self):
- assert self.root_output_path
- return os.path.join(self.root_output_path, 'softmax')
-
- def getTopNodeName(self):
- return "SOFTMAX_TOP"
-
- def createSaveFreezeModel(self):
- ''' this sample product frozen graph for 'softmax(wx+b)' into 'product/softmax' directory '''
-
- print("")
- print("-------------------- freezing Softmax(wx+b) ------------------------")
- print("# files will be saved into " + self.getOutputDirectory())
- print("")
-
- tf.reset_default_graph(
- ) # without this, graph used previous session is reused : https://stackoverflow.com/questions/42706761/closing-session-in-tensorflow-doesnt-reset-graph
-
- X = tf.placeholder("float", [None, 4])
- Y = tf.placeholder("float", [None, 3])
-
- W = tf.Variable(tf.random_normal([4, 3]), name='weight')
- b = tf.Variable(tf.random_normal([3]), name='bias')
-
- softmax_top = tf.nn.softmax(tf.matmul(X, W) + b, name=self.getTopNodeName())
-
- cost_function = tf.reduce_mean(-tf.reduce_sum(Y * tf.log(softmax_top), axis=1))
-
- gd = tf.train.GradientDescentOptimizer(learning_rate=0.11).minimize(cost_function)
-
- with tf.Session() as sess:
- sess.run(tf.global_variables_initializer())
-
- print("# start to training")
- x_input = [[1, 2, 3, 4], [5, 6, 7, 8], [1, 2, 3, 4], [5, 6, 7, 8],
- [1, 3, 5, 7], [7, 5, 3, 1], [1, 3, 2, 5], [4, 7, 6, 9]]
- y_input = [[0, 0, 1], [0, 1, 0], [1, 0, 0], [0, 0, 1], [0, 1, 0], [1, 0, 0],
- [0, 0, 1], [0, 1, 0]]
-
- for step in range(500):
- sess.run(gd, feed_dict={X: x_input, Y: y_input})
-
- print("# training is done. Now we have some weight and bias values")
- ''' Now, save to proto buffer format and checkpoint '''
- (pb_path, frozen_pb_path) = self.saveRelatedFiles(sess)
-
- self.generateTensorboardLog(pb_path, frozen_pb_path)
-
-
-# --------
-class ReluTestModelFreezer(TensorFlowModelFreezer):
- ''' class to create, save, and freeze relu(wx+b) '''
-
- def __init__(self, path):
- super(ReluTestModelFreezer, self).__init__(path)
-
- def getOutputDirectory(self):
- return os.path.join(self.root_output_path, 'relu')
-
- def getTopNodeName(self):
- return "RELU_TOP"
-
- def createSaveFreezeModel(self):
-
- print("")
- print("-------------------- freezing Relu(wx+b) ------------------------")
- print("# files will be saved into " + self.getOutputDirectory())
- print("")
-
- tf.reset_default_graph(
- ) # without this, graph used previous session is reused : https://stackoverflow.com/questions/42706761/closing-session-in-tensorflow-doesnt-reset-graph
-
- X = tf.placeholder(tf.float32, shape=[None, 3], name='X_placeholder') # input
- W = tf.get_variable(
- "W_var", [3, 2], dtype=tf.float32, initializer=tf.zeros_initializer())
- b = tf.get_variable(
- "b_var", [2, 2], dtype=tf.float32, initializer=tf.zeros_initializer())
-
- O = tf.nn.relu(
- tf.matmul(X, W) + b, name=self.getTopNodeName()) # activation / output
-
- init_op = tf.global_variables_initializer()
-
- with tf.Session() as sess:
- sess.run(init_op)
- # normally you would do some training here
- # we will just assign something to W
- sess.run(tf.assign(W, [[2, 4], [8, 16], [32, 64]]))
- sess.run(tf.assign(b, [[128, 256], [512, 1024]]))
- ''' Now, save to proto buffer format and checkpoint '''
- (pb_path, frozen_pb_path) = self.saveRelatedFiles(sess)
-
- self.generateTensorboardLog(pb_path, frozen_pb_path)
-
-
-# --------
-def main():
-
- global root_output_path
-
- if platform.python_version()[0] != '2':
- print("python version must be 2.x")
- sys.exit(0)
-
- freezer = SoftmaxTestModelFreezer(root_output_path)
- freezer.createSaveFreezeModel()
-
- freezer = ReluTestModelFreezer(root_output_path)
- freezer.createSaveFreezeModel()
-
-
-# --------
-# --------
-if __name__ == "__main__":
-
- parser = argparse.ArgumentParser(
- description='Converted Tensorflow model in python to frozen model.')
- parser.add_argument(
- "out_dir",
- help=
- "directory where generated pb, pbtxt, checkpoint and Tensorboard log files are stored."
- )
- parser.add_argument(
- "-r",
- action='store_true',
- help=
- "remove any existing out_dir. If -r is not provided, the directory is overwritten."
- )
-
- args = parser.parse_args()
- root_output_path = args.out_dir
-
- if args.r == True:
- if os.path.exists(root_output_path):
- import shutil
- shutil.rmtree(root_output_path)
- print("# Removed directory " + root_output_path)
-
- main()
This tool (python module) does the following:
-- Programmer can add Tensorflow code to build a specific graph. --(1)
+- Programmer can add Tensorflow python code to build a specific graph. --(1)
- This tool generates the graph into graph def (pb, pbtxt) and checkpoint files. -- (2)
- Files from (2) is freezed and frozen graph file is generated. -- (3)
-- Additionally, the visual structure of original graph (2) and frozen graph (3) are viewed using Tensorboard.
+ - Additionally, the visual structure of original graph (2) and frozen graph (3) are viewed using Tensorboard.
+- Files from (3) is converted into tflite file. (which can be used by Tensorflow Lite)
## How to use
+
- This tool is tested with Python 2.7 with Tensorflow 1.6
-- Programmer can add his/her graph into freeze_programmed_tensor_graph.py
- - Add code as a subclass of `TensorFlowModelFreezer`
- - Currently two sample subclasses are written:
- - ReluTestModelFreezer: Relu(wx+b)
+- Programmer can add his/her graph by writing a subclass of BaseFreezer.
+ - Please refer to existing examples:
+ - Add1x1Freezer: Add(x+const of shape [1])
+ - Add5x5Freezer: Add(x+const of shape [5,5])
+ - ReluTestModelFreezer: Relu(wx+b)
- SoftmaxTestModelFreezer: Softmax(wx+b)
-- Run `$ (nnfw root) python tools/tensorflow_model_freezer/freeze_programmed_tensor_graph.py -r output_directory` and model files will be generated under output_directory.
- - `-r` means the output_directory will be removed before generating files. If `-r` is not provided, the directory will be overwritten.
-- To launch Tensorboard, run, e.g., `$ tensorboard --logdir=output_directory/relu/.tensorbrd`
-
+- Include code to call your class into `freeze_tf_models.py`.
+- Run `~/nnfw$ PYTHONPATH=$PYTHONPATH:./tools/tensorflow_model_freezer/ python tools/tensorflow_model_freezer/sample/run_converter.py -r model_dir`
+ - model_dir : directory where models are generated
+ - `-r` : model_dir (if any) will be removed before generating files. If `-r` is not provided, the directory will be overwritten.
+- To launch Tensorboard, run, e.g., `$ tensorboard --logdir=output_directory/relu/.tensorbrd`
+ - Two graphs (before / after freezing) will be shown.
\ No newline at end of file
--- /dev/null
+# indicating that this folder is python package
--- /dev/null
+import os
+import sys
+import platform
+import tensorflow as tf
+
+import base_freezer
+
+
+class Add1x1Freezer(base_freezer.BaseFreezer):
+ ''' class to define add(x, y) '''
+
+ def __init__(self, path):
+ super(self.__class__, self).__init__(path)
+
+ def getOutputDirectory(self):
+ return os.path.join(self.root_output_path, 'add')
+
+ def getTopNodeName(self):
+ return "ADD_TOP"
+
+ def getModelName(self):
+ return "add(x, y)"
+
+ def buildModel(self, sess):
+
+ # input placeholders. input name must be given
+ x_input = tf.placeholder(tf.float32, [1], name='X_input')
+
+ y = tf.get_variable(
+ "y_var", [1], dtype=tf.float32, initializer=tf.zeros_initializer())
+
+ addTop = tf.add(x_input, y, name=self.getTopNodeName())
+
+ init_op = tf.global_variables_initializer()
+
+ sess.run(init_op)
+ # normally you would do some training here
+ # we will just assign something to y
+ sess.run(tf.assign(y, [2.]))
+
+ print(sess.run(addTop, feed_dict={x_input: [1.]}))
+
+ # returning (input_node_list, output_node_list)
+ return ([x_input], [addTop])
--- /dev/null
+import os
+import sys
+import platform
+import tensorflow as tf
+
+import base_freezer
+
+
+class Add5x5Freezer(base_freezer.BaseFreezer):
+ ''' class to define add(x, y) '''
+
+ def __init__(self, path):
+ super(self.__class__, self).__init__(path)
+
+ def getOutputDirectory(self):
+ return os.path.join(self.root_output_path, 'add5x5')
+
+ def getTopNodeName(self):
+ return "ADD_TOP"
+
+ def getModelName(self):
+ return "add(x5x5, y5x5)"
+
+ def buildModel(self, sess):
+
+ # input placeholders. input name must be given
+ x_input = tf.placeholder(tf.float32, [5, 5], name='X_input')
+
+ y = tf.get_variable(
+ "y_var", [5, 5], dtype=tf.float32, initializer=tf.zeros_initializer())
+
+ addTop = tf.add(x_input, y, name=self.getTopNodeName())
+
+ init_op = tf.global_variables_initializer()
+
+ sess.run(init_op)
+ # normally you would do some training here
+ # we will just assign something to y
+ y_value = [[1.1, 1.1, 1.1, 1.1, 1.1], [2.2, 2.2, 2.2, 2.2, 2.2],
+ [3.3, 3.3, 3.3, 3.3, 3.3], [4.4, 4.4, 4.4, 4.4,
+ 4.4], [5.5, 5.5, 5.5, 5.5, 5.5]]
+ sess.run(tf.assign(y, y_value))
+
+ # returning (input_node_list, output_node_list)
+ return ([x_input], [addTop])
--- /dev/null
+import os
+import sys
+import platform
+import tensorflow as tf
+
+import base_freezer
+
+
+class ReluFreezer(base_freezer.BaseFreezer):
+ ''' class to create, save, and freeze relu(wx+b) '''
+
+ def __init__(self, path):
+ super(self.__class__, self).__init__(path)
+
+ def getOutputDirectory(self):
+ return os.path.join(self.root_output_path, 'relu')
+
+ def getTopNodeName(self):
+ return "RELU_TOP"
+
+ def getModelName(self):
+ return "relu(wx+b)"
+
+ def buildModel(self, sess):
+
+ # input placeholders. input name must be given
+ X = tf.placeholder(tf.float32, shape=[2, 3], name='X_input')
+
+ W = tf.get_variable(
+ "W_var", [3, 2], dtype=tf.float32, initializer=tf.zeros_initializer())
+ b = tf.get_variable(
+ "b_var", [2, 2], dtype=tf.float32, initializer=tf.zeros_initializer())
+
+ O = tf.nn.relu(
+ tf.matmul(X, W) + b, name=self.getTopNodeName()) # activation / output
+
+ init_op = tf.global_variables_initializer()
+
+ sess.run(init_op)
+ # normally you would do some training here
+ # we will just assign something to W
+ sess.run(tf.assign(W, [[2, 4], [8, 16], [32, 64]]))
+ sess.run(tf.assign(b, [[128, 256], [512, 1024]]))
+
+ # returning (input_node_list, output_node_list)
+ return ([X], [O])
--- /dev/null
+import os
+import sys
+import platform
+import tensorflow as tf
+import argparse
+
+import relu_freezer
+import softmax_freezer
+import add1x1_freezer
+import add5x5_freezer
+
+
+# --------
+def main():
+
+ global root_output_path
+
+ if platform.python_version()[0] != '2':
+ print("python version must be 2.x")
+ sys.exit(0)
+
+ freezer = softmax_freezer.SoftmaxFreezer(root_output_path)
+ freezer.createSaveFreezeModel()
+
+ freezer = relu_freezer.ReluFreezer(root_output_path)
+ freezer.createSaveFreezeModel()
+
+ freezer = add1x1_freezer.Add1x1Freezer(root_output_path)
+ freezer.createSaveFreezeModel()
+
+ freezer = add5x5_freezer.Add5x5Freezer(root_output_path)
+ freezer.createSaveFreezeModel()
+
+
+# --------
+if __name__ == "__main__":
+
+ parser = argparse.ArgumentParser(
+ description='Converted Tensorflow model in python to frozen model.')
+ parser.add_argument(
+ "out_dir",
+ help=
+ "directory where generated pb, pbtxt, checkpoint and Tensorboard log files are stored."
+ )
+ parser.add_argument(
+ "-r",
+ action='store_true',
+ help=
+ "remove any existing out_dir. If -r is not provided, the directory is overwritten."
+ )
+
+ args = parser.parse_args()
+ root_output_path = args.out_dir
+
+ if args.r == True:
+ if os.path.exists(root_output_path):
+ import shutil
+ shutil.rmtree(root_output_path)
+ print("# Removed directory " + root_output_path)
+
+ main()
--- /dev/null
+import os
+import sys
+import platform
+import tensorflow as tf
+import argparse
+
+import base_freezer
+
+
+class SoftmaxFreezer(base_freezer.BaseFreezer):
+ ''' class to create, save, and freeze Softmax(wx+b) '''
+
+ def __init__(self, path):
+ super(self.__class__, self).__init__(path)
+
+ def getOutputDirectory(self):
+ assert self.root_output_path
+ return os.path.join(self.root_output_path, 'softmax')
+
+ def getTopNodeName(self):
+ return "SOFTMAX_TOP"
+
+ def getModelName(self):
+ return "softmax(x)"
+
+ def buildModel(self, sess):
+ ''' this sample product frozen graph for 'softmax(wx+b)' into 'product/softmax' directory '''
+
+ # input placeholders. input name must be given
+ X = tf.placeholder("float", [4], name='X_input')
+
+ Y = tf.get_variable(
+ "y_var", [4], dtype=tf.float32, initializer=tf.zeros_initializer())
+ add_node = X + Y
+ softmax_top = tf.nn.softmax(add_node, name=self.getTopNodeName())
+
+ sess.run(tf.global_variables_initializer())
+
+ print(sess.run(softmax_top, feed_dict={X: [100., 50., 25., 12.5]}))
+
+ # returning (input_node_list, output_node_list)
+ return ([X], [softmax_top])