[Relay][Frontend][TFlite] Add support for quantized LOGISTIC (#4696)
authorIna Dobreva <55383260+inadob@users.noreply.github.com>
Fri, 7 Feb 2020 10:23:55 +0000 (10:23 +0000)
committerGitHub <noreply@github.com>
Fri, 7 Feb 2020 10:23:55 +0000 (18:23 +0800)
* [Relay][Frontend][TFlite] Add support for quantized LOGISTIC

 * add qnn implementation
 * add qnn test case for qnn logistic

* Helper functions for quantize and dequantize.

python/tvm/relay/frontend/tflite.py
tests/python/frontend/tflite/test_forward.py

index 7e4c37a..95d5738 100644 (file)
@@ -274,6 +274,23 @@ class OperatorConverter(object):
         first_tensor = input_tensors[0]
         return first_tensor.qnn_params is not None
 
+    def quantize(self, expr, tensor_to_quantize):
+        """ Helper function to quantize a tensor with Relay """
+        tensor_type = tensor_to_quantize.tensor.Type()
+        tensor_type_str = self.get_tensor_type_str(tensor_type)
+        quantized = _qnn.op.quantize(data=expr,
+                                     output_scale=tensor_to_quantize.qnn_params['scale'],
+                                     output_zero_point=tensor_to_quantize.qnn_params['zero_point'],
+                                     out_dtype=tensor_type_str)
+        return quantized
+
+    def dequantize(self, expr, tensor):
+        """ Helper function to dequantize a tensor with Relay """
+        dequantized = _qnn.op.dequantize(data=expr,
+                                         input_scale=tensor.qnn_params['scale'],
+                                         input_zero_point=tensor.qnn_params['zero_point'])
+        return dequantized
+
     def convert_conv2d(self, op):
         """Convert TFLite conv2d"""
         return self.convert_conv(op, "conv2d")
@@ -391,7 +408,16 @@ class OperatorConverter(object):
         input_tensor = input_tensors[0]
         in_expr = self.get_expr(input_tensor.tensor_idx)
 
+        output_tensors = self.get_output_tensors(op)
+        assert len(output_tensors) == 1, "output tensors length should be 1"
+        output_tensor = output_tensors[0]
+
+        if input_tensor.qnn_params:
+            in_expr = self.dequantize(in_expr, input_tensor)
         out = _op.sigmoid(in_expr)
+        if output_tensor.qnn_params:
+            out = self.quantize(out, output_tensor)
+
         return out
 
     def convert_softmax(self, op):
index acc25d9..a3c582d 100644 (file)
@@ -1223,17 +1223,25 @@ def test_forward_unpack():
 # Logistic
 # --------
 
-def _test_logistic(data):
+def _test_logistic(data, quantized=False):
     """ One iteration of LOGISTIC """
     with tf.Graph().as_default():
-        in_data = array_ops.placeholder(shape=data.shape, dtype=data.dtype)
-        out = math_ops.sigmoid(in_data)
-        compare_tflite_with_tvm(data, 'Placeholder:0', [in_data], [out])
+        in_data = array_ops.placeholder(shape=data.shape, dtype='float32', name='in_0')
+
+        if quantized:
+            inq_data = tf.quantization.fake_quant_with_min_max_args(in_data, min=-5, max=5, name="inq_0")
+            input_range = {'inq_0': (-5, 5)}
+            out = math_ops.sigmoid(inq_data)
+            out = tf.quantization.fake_quant_with_min_max_args(out, min=0, max=1, name="out")
+            compare_tflite_with_tvm(data, 'inq_0:0', [inq_data], [out], quantized=True, input_range=input_range)
+        else:
+            out = math_ops.sigmoid(in_data)
+            compare_tflite_with_tvm(data, 'in_0:0', [in_data], [out])
 
 def test_forward_logistic():
     """ LOGISTIC """
     _test_logistic(np.arange(6.0, dtype=np.float32).reshape((1, 6)))
-
+    _test_logistic(np.random.uniform(0, 255, (3, 6)).astype(np.uint8), quantized=True)
 
 #######################################################################
 # Softmax