--- /dev/null
+#!/usr/bin/env python3
+# SPDX-License-Identifier: Apache-2.0
+##
+# Copyright (C) 2021 Jihoon Lee <jhoon.it.lee@samsung.com>
+#
+# @file getLayerTests.py
+# @date 13 Se 2020
+# @brief Generate *.nnlayergolden file
+# *.nnlayergolden file is expected to contain following information **in order**
+# ## TBA ##
+#
+#
+# @author Jihoon lee <jhoon.it.lee@samsung.com>
+
+from multiprocessing.sharedctypes import Value
+import warnings
+import random
+from functools import partial
+
+from recorder import record_single
+
+with warnings.catch_warnings():
+ warnings.filterwarnings("ignore", category=FutureWarning)
+ import numpy as np
+ import tensorflow as tf
+ from tensorflow.python import keras as K
+
+from transLayer import attach_trans_layer as TL
+
+
+##
+# @brief inpsect if file is created correctly
+# @note this just checks if offset is corretly set, The result have to inspected
+# manually
+def inspect_file(file_name):
+ import struct
+ with open(file_name, "rb") as f:
+ while True:
+ sz = int.from_bytes(f.read(4), byteorder='little')
+ if not sz:
+ break
+ print("size: ", sz)
+ print(np.fromfile(f, dtype='float32', count=sz))
+
+if __name__ == "__main__":
+ fc = K.layers.Dense(5)
+ record_single(fc, (3, 1, 1, 10), "fc_golden_plain.nnlayergolden")
+ fc = K.layers.Dense(4)
+ record_single(fc, (1, 1, 1, 10), "fc_golden_single_batch.nnlayergolden")
+
+# inspect_file("fc_golden.nnlayergolden")
+
+
+
from transLayer import attach_trans_layer, MultiOutLayer
-__all__ = ["record"]
+__all__ = ["record", "record_single"]
tf.compat.v1.enable_eager_execution()
# Fix the seeds across frameworks
item.numpy().tofile(file)
except AttributeError:
pass
+
return items
return write_fn
layer_input = [initial_input]
gradients = tape.gradient(loss, layer.trainable_weights)
- # if layer.name == 'target':
- # print(tape.gradient(loss, layer.hi))
optimizer.apply_gradients(zip(gradients, layer.trainable_weights))
if isinstance(optimizer, tf.keras.optimizers.Adam):
train_step(model, optimizer, loss_fn, initial_input, label, write, **kwargs)
# self.inference_step(initial_input, label, write)
+
+
+##
+# @brief record a single layer
+def record_single(layer, input_shape, file_name):
+ layer = attach_trans_layer(layer)
+ inputs = _rand_like(input_shape)
+
+ with tf.GradientTape(persistent=True) as tape:
+ tape.watch(inputs)
+ outputs = layer(inputs)
+ dy_constant = outputs * 2 # set incoming derivative to 2 instead of 1
+
+ weights = layer.weights.copy()
+ gradients = tape.gradient(dy_constant, layer.trainable_weights)
+ derivatives = tape.gradient(dy_constant, inputs)
+
+ try:
+ gradients = layer.to_nntr_trainable_weights(gradients)
+ except AttributeError:
+ pass
+
+ with open(file_name, "wb") as f:
+ writer = _get_writer(f)
+
+ def write_tensor(*tensors):
+ for tensor in tensors:
+ # print(tensor)
+ writer(tf.size(tensor), tensor)
+
+ ## @todo inputs outputs derivatives can be more than one
+ write_tensor(*weights)
+ write_tensor(inputs)
+ write_tensor(outputs)
+ write_tensor(*gradients)
+ write_tensor(derivatives)
+
+