From 830cb7499f2c0070624dfefbe2d815519d265314 Mon Sep 17 00:00:00 2001 From: Parichay Kapoor Date: Thu, 11 Mar 2021 22:54:44 +0900 Subject: [PATCH] [layer/unittest] Delete old unittest Delete negative unittest of loss layer which tests the failure of forward without label. However, forward label is now a valid operation for loss layer. The current reason why this unittest fails is because the number of inputs are not managed correctly and it fails. This is managed with the previous PR to ensure that number of inputs and outputs remain correct. However, now it segfaults because the layer has not been initialized, and its inputs and output havenot been assigned. Signed-off-by: Parichay Kapoor --- test/unittest/unittest_nntrainer_layers.cpp | 8 -------- 1 file changed, 8 deletions(-) diff --git a/test/unittest/unittest_nntrainer_layers.cpp b/test/unittest/unittest_nntrainer_layers.cpp index c557b06..137b84c 100644 --- a/test/unittest/unittest_nntrainer_layers.cpp +++ b/test/unittest/unittest_nntrainer_layers.cpp @@ -1933,14 +1933,6 @@ TEST(nntrainer_LossLayer, setLoss_02_n) { EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER); } -TEST(nntrainer_LossLayer, DISABLED_forward_nolabel_n) { - nntrainer::LossLayer layer; - nntrainer::Tensor a = constant(1.0, 1, 1, 1, 1); - layer.setProperty({"input_shape=1:1:1:1"}); - EXPECT_THROW(layer.forwarding_with_val({MAKE_SHARED_TENSOR(a)}), - std::invalid_argument); -} - TEST(nntrainer_LossLayer, forward_loss_unknown_n) { nntrainer::LossLayer layer; nntrainer::Tensor a = constant(1.0, 1, 1, 1, 1); -- 2.7.4