[Loss] Fix loss layer were allocating a new mem
authorJihoon Lee <jhoon.it.lee@samsung.com>
Fri, 22 Jan 2021 08:09:38 +0000 (17:09 +0900)
committerJijoong Moon <jijoong.moon@samsung.com>
Tue, 2 Mar 2021 01:27:55 +0000 (10:27 +0900)
As allocation is managed in `manager`, layer shouldn't be allocate a new
memory that are managed by memory.
However, loss layer was allocating a new memory. This patch fixes the issue.

**Self evaluation:**
1. Build test: [X]Passed [ ]Failed [ ]Skipped
2. Run test: [X]Passed [ ]Failed [ ]Skipped

Signed-off-by: Jihoon Lee <jhoon.it.lee@samsung.com>
Cc: Parichay Kapoor <pk.kapoor@samsung.com>
nntrainer/layers/loss_layer.cpp
test/unittest/unittest_nntrainer_models.cpp

index 9e1d185..a0c53b0 100644 (file)
@@ -129,15 +129,30 @@ void LossLayer::calcDerivative() {
   /// @todo loss backwarding is allocating a new memory. loss layer shouldn't!!
   switch (loss_type) {
   case LossType::LOSS_MSE:
-    ret_derivative = y.subtract(y2).multiply(2).divide(y.getDim().getDataLen());
+    y.subtract(y2, ret_derivative);
+    ret_derivative.multiply_i(2);
+    if (ret_derivative.divide_i(y.length()) != ML_ERROR_NONE) {
+      throw std::runtime_error(
+        "[Loss::calcDerivative] Error when calculating loss");
+    }
     break;
   case LossType::LOSS_ENTROPY_SIGMOID:
-    ret = y.apply(ActivationLayer::sigmoid, ret);
-    ret_derivative = ret.subtract(y2).divide(ret.getDim().getDataLen());
+    y.apply(ActivationLayer::sigmoid, ret_derivative);
+    ret_derivative.subtract_i(y2);
+    if (ret_derivative.divide_i(ret_derivative.length()) != ML_ERROR_NONE) {
+      throw std::runtime_error(
+        "[Loss::calcDerivative] Error when calculating loss");
+    }
     break;
   case LossType::LOSS_ENTROPY_SOFTMAX:
-    ret = y.apply(ActivationLayer::softmax, ret);
-    ret_derivative = ret.subtract(y2).divide(ret.batch());
+    /// @note y and ret_derivative can be same here, so this has to be out-place
+    /// operation
+    y.apply(ActivationLayer::softmax, ret);
+    ret.subtract(y2, ret_derivative);
+    if (ret_derivative.divide_i(ret.batch()) != ML_ERROR_NONE) {
+      throw std::runtime_error(
+        "[Loss::calcDerivative] Error when calculating loss");
+    }
     break;
   case LossType::LOSS_ENTROPY:
     throw std::runtime_error(
index 1ad3714..599e936 100644 (file)
@@ -281,11 +281,11 @@ void NodeWatcher::backward(int iteration, bool verify_deriv, bool verify_grad) {
   std::vector<nntrainer::Tensor> out = node.layer->getDerivatives();
 
   if (verify_grad) {
-    verifyGrad(err_msg);
+    verifyGrad(err_msg + " grad");
   }
 
   if (verify_deriv) {
-    verify(out[0], expected_dx, err_msg);
+    verify(out[0], expected_dx, err_msg + " deriv");
   }
 
   verifyWeight(err_msg);