Imported Upstream version 1.25.0
[platform/core/ml/nnfw.git] / compiler / visq / visqlib / DumpFakeQuantFM.py
index 0248428..b8dde38 100644 (file)
@@ -16,9 +16,9 @@
 # NOTE This script runs on dalgona
 
 import numpy as np
+import json
 
 from pathlib import Path
-from Util import to_filename
 
 # Fake-quantized Op has the postfix of fq_postfix
 # TODO Remove coupling with fake quantization codes
@@ -39,28 +39,46 @@ def _name_before_fq(name):
 #
 # Before
 # self._dir/
-#  tensors.txt
+#  tensors.json
 #
 # After
 # self._dir/
-#  tensors.txt
-#  <TENSOR_NAME>.npy
-# NOTE TENSOR_NAME is transformed by to_filename
+#  tensors.json
+#  <TENSOR_ID>.npy
+# NOTE tensors.json has a dictionary {TENSOR_NAME -> TENSOR_ID}
 class DumpFakeQuantFM:
     def StartAnalysis(self, args):
         self._dir = Path(args)
         self._num_data = 0
-        with open(self._dir / 'tensors.txt') as f:
-            self._target_tensors = set([line.rstrip() for line in f])
+        with open(self._dir / 'tensors.json') as f:
+            self._tname_to_tid = json.load(f)
+        self._scale_map = {}
 
     def EndNetworkExecution(self, outputs: list):
         self._num_data += 1
 
     # TODO Use DequantizePost when dalgona supports it
-    def DefaultOpPost(self, name, opcode, inputs, output):
+    def DefaultOpPost(self, name, opcode, inputs, outputs):
         if opcode == 'Dequantize':
-            orig_name = _name_before_fq(name)
-            if orig_name in self._target_tensors:
-                data_path = self._dir / str(self._num_data)
-                data_path.mkdir(parents=False, exist_ok=True)
-                np.save(str(data_path / to_filename(orig_name)), output['data'])
+            for output in outputs:
+                name = output['name']
+                data = output['data']
+                orig_name = _name_before_fq(name)
+                if orig_name in self._tname_to_tid:
+                    tid = self._tname_to_tid[orig_name]
+                    data_path = self._dir / str(self._num_data)
+                    data_path.mkdir(parents=False, exist_ok=True)
+                    np.save(str(data_path / str(tid)), data)
+                    # Save scales (scale is fixed, so saving once)
+                    if orig_name not in self._scale_map:
+                        assert len(inputs) == 1
+                        assert 'quantparam' in inputs[0]
+                        assert 'scale' in inputs[0]['quantparam']
+                        assert len(inputs[0]['quantparam']['scale']) == 1
+                        scale = inputs[0]['quantparam']['scale'][0]
+                        self._scale_map[orig_name] = scale
+
+    def EndAnalysis(self):
+        # Dump saved scales into scales.txt
+        with open(self._dir / 'scales.txt', 'w') as f:
+            json.dump(self._scale_map, f)