[layer/test] Add unittests for conv1d
[platform/core/ml/nntrainer.git] / test / input_gen / genLayerTests.py
1 #!/usr/bin/env python3
2 # SPDX-License-Identifier: Apache-2.0
3 ##
4 # Copyright (C) 2021 Jihoon Lee <jhoon.it.lee@samsung.com>
5 #
6 # @file getLayerTests.py
7 # @date 13 Se 2020
8 # @brief Generate *.nnlayergolden file
9 # *.nnlayergolden file is expected to contain following information **in order**
10 # - Initial Weights
11 # - inputs
12 # - outputs
13 # - *gradients
14 # - weights
15 # - derivatives
16 #
17 # @author Jihoon Lee <jhoon.it.lee@samsung.com>
18
19 from multiprocessing.sharedctypes import Value
20 import warnings
21 import random
22 from functools import partial
23
24 from recorder import record_single
25
26 with warnings.catch_warnings():
27     warnings.filterwarnings("ignore", category=FutureWarning)
28     import numpy as np
29     import tensorflow as tf
30     from tensorflow.python import keras as K
31
32 ##
33 # @brief inpsect if file is created correctly
34 # @note this just checks if offset is corretly set, The result have to inspected
35 # manually
36 def inspect_file(file_name):
37     with open(file_name, "rb") as f:
38         while True:
39             sz = int.from_bytes(f.read(4), byteorder="little")
40             if not sz:
41                 break
42             print("size: ", sz)
43             print(np.fromfile(f, dtype="float32", count=sz))
44
45
46 if __name__ == "__main__":
47     fc = K.layers.Dense(5)
48     record_single(fc, (3, 1, 1, 10), "fc_plain")
49     fc = K.layers.Dense(4)
50     record_single(fc, (1, 1, 1, 10), "fc_single_batch")
51     bn = K.layers.BatchNormalization()
52     record_single(bn, (2, 4, 2, 3), "bn_channels_training", {"training": True})
53     record_single(bn, (2, 4, 2, 3), "bn_channels_inference", {"training": False})
54     bn = K.layers.BatchNormalization()
55     record_single(bn, (2, 10), "bn_width_training", {"training": True})
56     record_single(bn, (2, 10), "bn_width_inference", {"training": False})
57
58     conv = K.layers.Conv2D(3, 2)
59     record_single(conv, (1, 1, 4, 4), "conv2d_sb_minimum")
60     record_single(conv, (3, 1, 4, 4), "conv2d_mb_minimum")
61
62     conv = K.layers.Conv2D(2, 3, padding="same")
63     record_single(conv, (1, 1, 4, 4), "conv2d_sb_same_remain")
64     record_single(conv, (3, 1, 4, 4), "conv2d_mb_same_remain", input_type='float')
65
66     conv = K.layers.Conv2D(2, 3, strides=2, padding="same")
67     record_single(conv, (1, 3, 4, 4), "conv2d_sb_same_uneven_remain")
68     record_single(conv, (3, 3, 4, 4), "conv2d_mb_same_uneven_remain")
69
70     conv = K.layers.Conv2D(2, 3, strides=2, padding="valid")
71     record_single(conv, (1, 3, 7, 7), "conv2d_sb_valid_drop_last")
72     record_single(conv, (3, 3, 7, 7), "conv2d_mb_valid_drop_last")
73
74     conv = K.layers.Conv2D(3, 2, strides=3)
75     record_single(conv, (1, 2, 5, 5), "conv2d_sb_no_overlap")
76     record_single(conv, (3, 2, 5, 5), "conv2d_mb_no_overlap")
77
78     conv = K.layers.Conv2D(3, 1, strides=2)
79     record_single(conv, (1, 2, 5, 5), "conv2d_sb_1x1_kernel")
80     record_single(conv, (3, 2, 5, 5), "conv2d_mb_1x1_kernel")
81
82     # use float data to generate input here
83     attention = K.layers.Attention()
84     record_single(attention, [(1, 5, 7), (1, 3, 7)],
85                  "attention_shared_kv", {}, input_type='float')
86     attention = K.layers.Attention()
87     record_single(attention, [(2, 5, 7), (2, 3, 7)],
88                  "attention_shared_kv_batched", {}, input_type='float')
89     attention = K.layers.Attention()
90     record_single(attention, [(2, 5, 7), (2, 3, 7), (2, 3, 7)],
91                  "attention_batched", {}, input_type='float')
92
93     lstm = K.layers.LSTM(units=5,
94                          recurrent_activation="sigmoid",
95                          activation="tanh",
96                          return_sequences=False,
97                          return_state=False)
98     record_single(lstm, (3, 1, 7), "lstm_single_step")
99     record_single(lstm, (3, 4, 7), "lstm_multi_step")
100
101     lstm = K.layers.LSTM(units=5,
102                          recurrent_activation="sigmoid",
103                          activation="tanh",
104                          return_sequences=True,
105                          return_state=False)
106     record_single(lstm, (3, 1, 7), "lstm_single_step_seq")
107     record_single(lstm, (3, 4, 7), "lstm_multi_step_seq")
108
109     lstm = K.layers.LSTM(units=5,
110                          recurrent_activation="tanh",
111                          activation="sigmoid",
112                          return_sequences=True,
113                          return_state=False)
114     record_single(lstm, (3, 4, 7), "lstm_multi_step_seq_act")
115
116     gru = K.layers.GRU(units=5,
117                          recurrent_activation="sigmoid",
118                          activation="tanh",
119                          return_sequences=False,
120                          return_state=False)
121     record_single(gru, (3, 1, 7), "gru_single_step")
122     record_single(gru, (3, 4, 7), "gru_multi_step")
123
124     gru = K.layers.GRU(units=5,
125                          recurrent_activation="sigmoid",
126                          activation="tanh",
127                          return_sequences=True,
128                          return_state=False)
129     record_single(gru, (3, 1, 7), "gru_single_step_seq")
130     record_single(gru, (3, 4, 7), "gru_multi_step_seq", input_type='float')
131
132     gru = K.layers.GRU(units=5,
133                          recurrent_activation="tanh",
134                          activation="sigmoid",
135                          return_sequences=True,
136                          return_state=False)
137     record_single(gru, (3, 4, 7), "gru_multi_step_seq_act", input_type='float')
138
139     dropout = K.layers.Dropout(rate=0.2)
140     record_single(dropout, (2, 3, 2, 3), "dropout_20_training", {"training": True})
141     record_single(dropout, (2, 3, 2, 3), "dropout_20_inference", {"training": False})
142
143     dropout = K.layers.Dropout(rate=0.0)
144     record_single(dropout, (2, 3, 2, 3), "dropout_0_training", {"training": True})
145
146     dropout = K.layers.Dropout(rate=0.9999)
147     record_single(dropout, (2, 3, 2, 3), "dropout_100_training", {"training": True})
148
149     conv = K.layers.Conv1D(3, 2)
150     record_single(conv, (1, 1, 1, 4), "conv1d_sb_minimum")
151     record_single(conv, (3, 1, 1, 4), "conv1d_mb_minimum")
152
153     conv = K.layers.Conv1D(2, 3, padding="same")
154     record_single(conv, (1, 1, 1, 4), "conv1d_sb_same_remain")
155     record_single(conv, (3, 1, 1, 4), "conv1d_mb_same_remain")
156
157     conv = K.layers.Conv1D(2, 3, strides=2, padding="same")
158     record_single(conv, (1, 3, 1, 4), "conv1d_sb_same_uneven_remain")
159     record_single(conv, (3, 3, 1, 4), "conv1d_mb_same_uneven_remain")
160
161     conv = K.layers.Conv1D(2, 3, strides=2, padding="valid")
162     record_single(conv, (1, 3, 1, 7), "conv1d_sb_valid_drop_last")
163     record_single(conv, (3, 3, 1, 7), "conv1d_mb_valid_drop_last")
164
165     conv = K.layers.Conv1D(3, 2, strides=3)
166     record_single(conv, (1, 2, 1, 5), "conv1d_sb_no_overlap")
167     record_single(conv, (3, 2, 1, 5), "conv1d_mb_no_overlap")
168
169     conv = K.layers.Conv1D(3, 1, strides=2)
170     record_single(conv, (1, 2, 1, 5), "conv1d_sb_1x1_kernel")
171     record_single(conv, (3, 2, 1, 5), "conv1d_mb_1x1_kernel")
172
173 inspect_file("dropout_20_training.nnlayergolden")
174