loss_fn_str="cross_softmax",
)
+ record(
+ file_name="conv_bn.info",
+ model=[
+ K.Input(shape=(2, 3, 5)),
+ K.layers.Conv2D(filters=2, kernel_size=(2, 2)),
+ K.layers.BatchNormalization(),
+ K.layers.Activation("relu"),
+ K.layers.Flatten(),
+ K.layers.Dense(10),
+ K.layers.Activation("softmax"),
+ ],
+ optimizer=opt.SGD(learning_rate=0.1),
+ iteration=10,
+ input_shape=(3, 2, 3, 5),
+ label_shape=(3, 10),
+ loss_fn_str="cross_softmax",
+ # debug=["summary", "initial_weights"]
+ )
+
pool_layer_tc = lambda pool_layer: partial(
record,
model=[
##
-# @brief Translayer for batch normalization layer
-class BatchNormTransLayer(IdentityTransLayer):
- def to_nntr_weights(self, tensorOrList):
- x = tensorOrList
- assert len(x) == 4
- return [x[2], x[3], x[0], x[1]]
-
-
-##
# @brief Translayer to translate channel last <-> channel first
# @note This class relies on Permute layer. This should be skipped when
# iterating through keras layer
)
##
+# @brief Translayer for batch normalization layer
+class BatchNormTransLayer(IdentityTransLayer):
+ def build(self, input_shape):
+ if len(input_shape) > 3:
+ self.tf_layer = ChannelLastTransLayer(self.tf_layer)
+ self.tf_layer.build(input_shape)
+
+ def call(self, input, training=None):
+ return self.tf_layer(input, training)
+
+ def to_nntr_weights(self, tensorOrList):
+ x = tensorOrList
+ assert len(x) == 4
+ return [x[2], x[3], x[0], x[1]]
+
+
+##
# @brief Multiout wrapper layer, this class separate gradient
# to calculate derivative properly
# when calling, this returns [x] * @a num_output just like output Layer does in NNTrainer
# @brief init function
# @param tf_layer tf_layer to get number of outputs
# @param num_output explicit number to generate number of output
- def __init__(self, tf_layer = None, *args, num_output, **kwargs):
+ def __init__(self, tf_layer=None, *args, num_output, **kwargs):
if not tf_layer:
tf_layer = K.layers.Layer()
return [layer(tf_output) for layer in self.stub_layers]
+
##
# @brief A factory function to attach translayer to existing layer
# if nothing should be attached, it does not attach the layer
}
);
+INI conv_bn(
+ "conv_bn",
+ {
+ nn_base + "learning_rate=0.1 | optimizer=sgd | loss=cross | batch_size=3",
+ I("input_layer") + input_base + "input_shape=2:3:5",
+ I("conv2d_c1") + conv_base + "kernel_size = 2,2 | filters=2",
+ I("bn") + bn_base,
+ I("act_1") + relu_base,
+ I("flatten", "type=flatten"),
+ I("outputlayer") + fc_base + "unit = 10",
+ I("act_2") + softmax_base
+ }
+);
+
INI conv_same_padding_multi_stride(
"conv_same_padding_multi_stride",
mkModelTc(conv_uneven_strides, "3:1:1:10", 10),
mkModelTc(conv_uneven_strides2, "3:1:1:10", 10),
mkModelTc(conv_uneven_strides3, "3:1:1:10", 10),
+ mkModelTc(conv_bn, "3:1:1:10", 10),
mkModelTc(conv_same_padding_multi_stride, "3:1:1:10", 10),
mkModelTc(conv_no_loss_validate, "3:1:1:10", 1),
mkModelTc(conv_none_loss_validate, "3:1:1:10", 1),