[ahub] fix ahub issues
authorSeungbaek Hong <sb92.hong@samsung.com>
Wed, 19 Jul 2023 02:21:02 +0000 (11:21 +0900)
committerJijoong Moon <jijoong.moon@samsung.com>
Fri, 21 Jul 2023 06:18:32 +0000 (15:18 +0900)
Fix some issues of svace and coverity.

**Self evaluation:**
1. Build test:  [X]Passed [ ]Failed [ ]Skipped
2. Run test:  [X]Passed [ ]Failed [ ]Skipped

Signed-off-by: Seungbaek Hong <sb92.hong@samsung.com>
nntrainer/layers/layer_node.cpp
nntrainer/models/neuralnet.cpp
nntrainer/tensor/tensor.cpp

index 2ef2da6e518ad798c64426f27dfe8be0154e48c6..9adacece9917b4eed80c0d54cb59d2b56daa3bd3 100644 (file)
@@ -162,6 +162,7 @@ LayerNode::LayerNode(std::unique_ptr<nntrainer::Layer> &&l) :
   needs_calc_derivative(false),
   needs_calc_gradient(false),
   output_connections(),
+  tensor_type(TensorDim::Format::NCHW),
   run_context(nullptr),
   layer_node_props(
     new PropsType(props::Name(), props::Distribute(), props::Trainable(), {},
index 4d61932f923521e26e4eb1f6728aa8221961a5d9..0db418fbc3c60eb6ada96827953e9d5ebc10933f 100644 (file)
@@ -266,7 +266,14 @@ int NeuralNetwork::initialize() {
 /**
  * @brief     free layers
  */
-NeuralNetwork::~NeuralNetwork() { deallocate(); }
+NeuralNetwork::~NeuralNetwork() {
+  try {
+    deallocate();
+  } catch (const std::runtime_error &e) {
+    std::cerr << "Error occured during destroying NeuralNetwork: " << e.what()
+              << std::endl;
+  }
+}
 
 /**
  * @brief     forward propagation using layers object which has layer
index 3160aa4927d83a6e4ec276112f86ec3bfa0b2711..1362e322b4af51ea2578c4b757dc018e69e0cc42 100644 (file)
@@ -1863,7 +1863,7 @@ void Tensor::print_(std::ostream &out, uint opt) const {
           for (unsigned int j = 0; j < height(); j++) {
             out << "{";
             for (unsigned int l = 0; l < width(); l++) {
-              if (l < channel() - 1)
+              if (l < width() - 1)
                 out << std::setw(10) << std::setprecision(10)
                     << this->getValue(k, l, i, j) << ", ";
               else