From: Jihoon Lee Date: Fri, 22 Jan 2021 08:09:38 +0000 (+0900) Subject: [Loss] Fix loss layer were allocating a new mem X-Git-Tag: accepted/tizen/unified/20210305.034114~31 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=587a6cb9fb7bc1eccfe1731ab6294fa00ddeddcd;p=platform%2Fcore%2Fml%2Fnntrainer.git [Loss] Fix loss layer were allocating a new mem As allocation is managed in `manager`, layer shouldn't be allocate a new memory that are managed by memory. However, loss layer was allocating a new memory. This patch fixes the issue. **Self evaluation:** 1. Build test: [X]Passed [ ]Failed [ ]Skipped 2. Run test: [X]Passed [ ]Failed [ ]Skipped Signed-off-by: Jihoon Lee Cc: Parichay Kapoor --- diff --git a/nntrainer/layers/loss_layer.cpp b/nntrainer/layers/loss_layer.cpp index 9e1d185..a0c53b0 100644 --- a/nntrainer/layers/loss_layer.cpp +++ b/nntrainer/layers/loss_layer.cpp @@ -129,15 +129,30 @@ void LossLayer::calcDerivative() { /// @todo loss backwarding is allocating a new memory. loss layer shouldn't!! switch (loss_type) { case LossType::LOSS_MSE: - ret_derivative = y.subtract(y2).multiply(2).divide(y.getDim().getDataLen()); + y.subtract(y2, ret_derivative); + ret_derivative.multiply_i(2); + if (ret_derivative.divide_i(y.length()) != ML_ERROR_NONE) { + throw std::runtime_error( + "[Loss::calcDerivative] Error when calculating loss"); + } break; case LossType::LOSS_ENTROPY_SIGMOID: - ret = y.apply(ActivationLayer::sigmoid, ret); - ret_derivative = ret.subtract(y2).divide(ret.getDim().getDataLen()); + y.apply(ActivationLayer::sigmoid, ret_derivative); + ret_derivative.subtract_i(y2); + if (ret_derivative.divide_i(ret_derivative.length()) != ML_ERROR_NONE) { + throw std::runtime_error( + "[Loss::calcDerivative] Error when calculating loss"); + } break; case LossType::LOSS_ENTROPY_SOFTMAX: - ret = y.apply(ActivationLayer::softmax, ret); - ret_derivative = ret.subtract(y2).divide(ret.batch()); + /// @note y and ret_derivative can be same here, so this has to be out-place + /// operation + y.apply(ActivationLayer::softmax, ret); + ret.subtract(y2, ret_derivative); + if (ret_derivative.divide_i(ret.batch()) != ML_ERROR_NONE) { + throw std::runtime_error( + "[Loss::calcDerivative] Error when calculating loss"); + } break; case LossType::LOSS_ENTROPY: throw std::runtime_error( diff --git a/test/unittest/unittest_nntrainer_models.cpp b/test/unittest/unittest_nntrainer_models.cpp index 1ad3714..599e936 100644 --- a/test/unittest/unittest_nntrainer_models.cpp +++ b/test/unittest/unittest_nntrainer_models.cpp @@ -281,11 +281,11 @@ void NodeWatcher::backward(int iteration, bool verify_deriv, bool verify_grad) { std::vector out = node.layer->getDerivatives(); if (verify_grad) { - verifyGrad(err_msg); + verifyGrad(err_msg + " grad"); } if (verify_deriv) { - verify(out[0], expected_dx, err_msg); + verify(out[0], expected_dx, err_msg + " deriv"); } verifyWeight(err_msg);