[Test] Prepare layer golden test data
authorJihoon Lee <jhoon.it.lee@samsung.com>
Thu, 9 Sep 2021 10:14:55 +0000 (19:14 +0900)
committerJijoong Moon <jijoong.moon@samsung.com>
Thu, 23 Sep 2021 05:55:33 +0000 (14:55 +0900)
This patch prepares code to generate binary and package two tests for
example

**Self evaluation:**
1. Build test: [X]Passed [ ]Failed [ ]Skipped
2. Run test: [X]Passed [ ]Failed [ ]Skipped

Signed-off-by: Jihoon Lee <jhoon.it.lee@samsung.com>
.gitignore
packaging/unittest_layers_v2.tar.gz [new file with mode: 0644]
test/input_gen/genLayerTests.py [new file with mode: 0644]
test/input_gen/recorder.py
test/unittest/meson.build

index 64735c9..c4d8534 100644 (file)
@@ -42,6 +42,7 @@ Applications/**/*.bin
 *.o
 *.a
 *.o.d
+*.nnlayergolden
 
 # log files
 *.log
diff --git a/packaging/unittest_layers_v2.tar.gz b/packaging/unittest_layers_v2.tar.gz
new file mode 100644 (file)
index 0000000..cfc4b07
Binary files /dev/null and b/packaging/unittest_layers_v2.tar.gz differ
diff --git a/test/input_gen/genLayerTests.py b/test/input_gen/genLayerTests.py
new file mode 100644 (file)
index 0000000..c248598
--- /dev/null
@@ -0,0 +1,54 @@
+#!/usr/bin/env python3
+# SPDX-License-Identifier: Apache-2.0
+##
+# Copyright (C) 2021 Jihoon Lee <jhoon.it.lee@samsung.com>
+#
+# @file getLayerTests.py
+# @date 13 Se 2020
+# @brief Generate *.nnlayergolden file
+# *.nnlayergolden file is expected to contain following information **in order**
+# ## TBA ##
+#
+#
+# @author Jihoon lee <jhoon.it.lee@samsung.com>
+
+from multiprocessing.sharedctypes import Value
+import warnings
+import random
+from functools import partial
+
+from recorder import record_single
+
+with warnings.catch_warnings():
+    warnings.filterwarnings("ignore", category=FutureWarning)
+    import numpy as np
+    import tensorflow as tf
+    from tensorflow.python import keras as K
+
+from transLayer import attach_trans_layer as TL
+
+
+##
+# @brief inpsect if file is created correctly
+# @note this just checks if offset is corretly set, The result have to inspected
+# manually
+def inspect_file(file_name):
+    import struct
+    with open(file_name, "rb") as f:
+        while True:
+            sz = int.from_bytes(f.read(4), byteorder='little')
+            if not sz:
+                break
+            print("size: ", sz)
+            print(np.fromfile(f, dtype='float32', count=sz))
+
+if __name__ == "__main__":
+    fc = K.layers.Dense(5)
+    record_single(fc, (3, 1, 1, 10), "fc_golden_plain.nnlayergolden")
+    fc = K.layers.Dense(4)
+    record_single(fc, (1, 1, 1, 10), "fc_golden_single_batch.nnlayergolden")
+
+# inspect_file("fc_golden.nnlayergolden")
+
+
+
index 16a90ba..316f2db 100644 (file)
@@ -23,7 +23,7 @@ with warnings.catch_warnings():
 
 from transLayer import attach_trans_layer, MultiOutLayer
 
-__all__ = ["record"]
+__all__ = ["record", "record_single"]
 
 tf.compat.v1.enable_eager_execution()
 # Fix the seeds across frameworks
@@ -61,6 +61,7 @@ def _get_writer(file):
                 item.numpy().tofile(file)
             except AttributeError:
                 pass
+
         return items
 
     return write_fn
@@ -193,8 +194,6 @@ def train_step(model, optimizer, loss_fn, initial_input, label, writer_fn, **kwa
             layer_input = [initial_input]
 
         gradients = tape.gradient(loss, layer.trainable_weights)
-        # if layer.name == 'target':
-            # print(tape.gradient(loss, layer.hi))
         optimizer.apply_gradients(zip(gradients, layer.trainable_weights))
 
         if isinstance(optimizer, tf.keras.optimizers.Adam):
@@ -390,3 +389,41 @@ def record(
             train_step(model, optimizer, loss_fn, initial_input, label, write, **kwargs)
 
         # self.inference_step(initial_input, label, write)
+
+
+##
+# @brief record a single layer
+def record_single(layer, input_shape, file_name):
+    layer = attach_trans_layer(layer)
+    inputs = _rand_like(input_shape)
+
+    with tf.GradientTape(persistent=True) as tape:
+        tape.watch(inputs)
+        outputs = layer(inputs)
+        dy_constant = outputs * 2 # set incoming derivative to 2 instead of 1
+
+    weights = layer.weights.copy()
+    gradients = tape.gradient(dy_constant, layer.trainable_weights)
+    derivatives = tape.gradient(dy_constant, inputs)
+
+    try:
+        gradients = layer.to_nntr_trainable_weights(gradients)
+    except AttributeError:
+        pass
+
+    with open(file_name, "wb") as f:
+        writer = _get_writer(f)
+
+        def write_tensor(*tensors):
+            for tensor in tensors:
+                # print(tensor)
+                writer(tf.size(tensor), tensor)
+
+        ## @todo inputs outputs derivatives can be more than one
+        write_tensor(*weights)
+        write_tensor(inputs)
+        write_tensor(outputs)
+        write_tensor(*gradients)
+        write_tensor(derivatives)
+
+
index 96f543d..66970b6 100644 (file)
@@ -9,6 +9,7 @@ unzip_target = [
   ['valset.tar.gz', ''],
   ['testset.tar.gz', ''],
   ['unittest_layers.tar.gz', 'unittest_layers'],
+  ['unittest_layers_v2.tar.gz', 'unittest_layers'],
   ['unittest_models.tar.gz', 'unittest_models'],
 ]