[svace] Fix svace issues
authorParichay Kapoor <pk.kapoor@samsung.com>
Tue, 20 Jul 2021 11:15:50 +0000 (20:15 +0900)
committerJijoong Moon <jijoong.moon@samsung.com>
Thu, 22 Jul 2021 11:47:24 +0000 (20:47 +0900)
Fix svace issues for layer_v2 branch

Signed-off-by: Parichay Kapoor <pk.kapoor@samsung.com>
Applications/Custom/LayerClient/jni/main.cpp
Applications/SimpleShot/layers/centroid_knn.h
nntrainer/layers/bn_layer.h
nntrainer/layers/conv2d_layer.h
nntrainer/layers/fc_layer.h
nntrainer/layers/gru.h
nntrainer/layers/layer_node.cpp
nntrainer/layers/lstm.h
nntrainer/layers/pooling2d_layer.h
nntrainer/layers/rnn.h
nntrainer/tensor/var_grad.h

index c288069..bb9b294 100644 (file)
@@ -218,12 +218,9 @@ int main(int argc, char *argv[]) {
     } else {
       return ini_model_run(arg);
     }
-  } catch (std::invalid_argument &e) {
+  } catch (std::exception &e) {
     std::cerr << "failed to run the model, reason: " << e.what() << std::endl;
     return 1;
-  } catch (std::regex_error &e) {
-    std::cerr << "failed to run the model, reaseon: " << e.what() << std::endl;
-    return 1;
   }
 
   /// should not reach here
index 5a7b356..9e67aea 100644 (file)
@@ -33,7 +33,7 @@ public:
    * @brief Construct a new NearestNeighbors Layer object that does elementwise
    * subtraction from mean feature vector
    */
-  CentroidKNN() : Layer(), num_class(0) {}
+  CentroidKNN() : Layer(), num_class(0), weight_idx({0}) {}
 
   /**
    *  @brief  Move constructor.
index 1107c61..f2df4af 100644 (file)
@@ -53,7 +53,8 @@ public:
     momentum(momentum),
     axis(axis),
     initializers{moving_variance_initializer, moving_variance_initializer,
-                 gamma_initializer, beta_initializer} {}
+                 gamma_initializer, beta_initializer},
+    wt_idx({0}) {}
 
   /**
    * @brief     Destructor of BatchNormalizationLayer
index 510b6c6..600b24f 100644 (file)
@@ -42,7 +42,8 @@ public:
     filter_size(filter_size_),
     kernel_size(kernel_size_),
     stride(stride_),
-    padding(padding_) {}
+    padding(padding_),
+    wt_idx({0}) {}
 
   /**
    * @brief     Destructor of Conv 2D Layer
index 587790e..7b71c7b 100644 (file)
@@ -29,7 +29,10 @@ public:
   /**
    * @brief     Constructor of Fully Connected Layer
    */
-  FullyConnectedLayer() : LayerImpl(), fc_props(props::Unit()) {}
+  FullyConnectedLayer() :
+    LayerImpl(),
+    fc_props(props::Unit()),
+    weight_idx({0}) {}
 
   /**
    * @brief     Destructor of Fully Connected Layer
index 338b71f..2cab519 100644 (file)
@@ -36,12 +36,13 @@ public:
     bool sequence = false, float dropout = 0.0) :
     LayerImpl(),
     props(props::Unit()),
+    wt_idx({0}),
     hidden_state_activation_type(hidden_state_activation_type_),
     acti_func(hidden_state_activation_type, true),
     recurrent_activation_type(recurrent_activation_type_),
     recurrent_acti_func(recurrent_activation_type, true),
     return_sequences(sequence),
-    dropout_rate(dropout){};
+    dropout_rate(dropout) {}
 
   /**
    * @brief     Destructor of GRULayer
index 0039cae..88af9bc 100644 (file)
@@ -116,7 +116,12 @@ int LayerNode::setProperty(std::vector<std::string> properties) {
     auto &ac = nntrainer::AppContext::Global();
     std::unique_ptr<nntrainer::Layer> dlayer =
       ac.createObject<nntrainer::Layer>(TimeDistLayer::type);
-    dynamic_cast<TimeDistLayer *>(dlayer.get())->setDistLayer(std::move(layer));
+    if (dlayer.get() == nullptr)
+      throw std::invalid_argument("Error creating time distribution layer");
+    auto *time_dist_layer = dynamic_cast<TimeDistLayer *>(dlayer.get());
+    if (time_dist_layer == nullptr)
+      throw std::invalid_argument("Error casting to time distribution layer");
+    time_dist_layer->setDistLayer(std::move(layer));
     layer = std::move(dlayer);
   }
 
index ae7a3f3..237a41d 100644 (file)
@@ -36,12 +36,13 @@ public:
     bool sequence = false, float dropout = 0.0) :
     LayerImpl(),
     props(props::Unit()),
+    wt_idx({0}),
     hidden_state_activation_type(hidden_state_activation_type_),
     acti_func(hidden_state_activation_type, true),
     recurrent_activation_type(recurrent_activation_type_),
     recurrent_acti_func(recurrent_activation_type, true),
     return_sequences(sequence),
-    dropout_rate(dropout){};
+    dropout_rate(dropout) {}
 
   /**
    * @brief     Destructor of LSTMLayer
index 81cfced..d8e257b 100644 (file)
@@ -67,6 +67,7 @@ public:
     stride(stride_),
     padding(padding_),
     pool2d_props(),
+    pool_helper_idx(0),
     pooling_type(pooling_type_) {}
 
   /**
index 84d3ed8..f3c701c 100644 (file)
@@ -35,6 +35,7 @@ public:
     bool ret_sequence = false, float dropout = 0.0) :
     LayerImpl(),
     props(props::Unit()),
+    wt_idx({0}),
     hidden_state_activation_type(hidden_state_activation_type_),
     acti_func(hidden_state_activation_type, true),
     return_sequences(ret_sequence),
index f27b47a..7f98f4c 100644 (file)
@@ -38,7 +38,7 @@ public:
    * @brief Var_Grad default constructor
    * @note Default variable is not need_gradient as gradient is 0 dim tensor
    */
-  Var_Grad() = default;
+  Var_Grad() : Var_Grad(TensorDim()) {}
 
   /**
    * @brief Var_Grad default destructor