[layer] object initialization bugfix
authorParichay Kapoor <pk.kapoor@samsung.com>
Mon, 15 Jun 2020 08:37:36 +0000 (17:37 +0900)
committerJijoong Moon <jijoong.moon@samsung.com>
Mon, 15 Jun 2020 09:37:17 +0000 (18:37 +0900)
Added bugfix to object initialization

Signed-off-by: Parichay Kapoor <pk.kapoor@samsung.com>
nntrainer/include/layer.h
nntrainer/include/optimizer.h
nntrainer/src/layer.cpp

index 51689af..c372f6e 100644 (file)
@@ -98,7 +98,17 @@ typedef enum {
  */
 class Layer {
 public:
-  Layer();
+  Layer() :
+    last_layer(false),
+    init_zero(false),
+    type(LAYER_UNKNOWN),
+    activation(NULL),
+    activation_prime(NULL),
+    activation_type(ACT_UNKNOWN),
+    bn_follow(false),
+    weight_decay(),
+    weight_ini_type(WEIGHT_UNKNOWN) {}
+
   /**
    * @brief     Destructor of Layer Class
    */
index 8e7a0ef..5be118d 100644 (file)
@@ -47,9 +47,11 @@ enum class WeightDecayType { l2norm = 0, regression = 1, unknown = 2 };
 /**
  * @brief     type for the Weight Decay hyper-parameter
  */
-typedef struct {
+typedef struct WeightDecayParam_ {
   WeightDecayType type;
   float lambda;
+
+  WeightDecayParam_() : type(WeightDecayType::unknown), lambda(0.0) {}
 } WeightDecayParam;
 
 /**
index fe7b46a..940658c 100644 (file)
@@ -69,19 +69,6 @@ static void RandUniform(unsigned int b_n, Tensor &w, Args &&... args) {
   }
 }
 
-Layer::Layer() {
-  type = LAYER_UNKNOWN;
-  activation_type = ACT_UNKNOWN;
-  last_layer = false;
-  init_zero = false;
-  activation = NULL;
-  activation_prime = NULL;
-  bn_follow = false;
-  weight_decay.type = WeightDecayType::unknown;
-  weight_decay.lambda = 0.0;
-  weight_ini_type = WEIGHT_UNKNOWN;
-}
-
 int Layer::setActivation(ActiType acti) {
   int status = ML_ERROR_NONE;
   if (acti == ACT_UNKNOWN) {