[bugfix] Resolves Android build warnings
authorDonghyeon Jeong <dhyeon.jeong@samsung.com>
Tue, 16 Jul 2024 00:34:06 +0000 (09:34 +0900)
committerJijoong Moon <jijoong.moon@samsung.com>
Wed, 17 Jul 2024 07:47:12 +0000 (16:47 +0900)
This PR resolves warnings that occur during the Android build. The list is as follows.

**Changes proposed in this PR:**
- Resolves explicitly defaulted function is implicitly deleted.
- Fix function that overrides virtual functions but is not marked override.
- Resolves clang warning on expression side effects.

**Self-evaluation:**
1. Build test: [X]Passed [ ]Failed [ ]Skipped
2. Run test:   [X]Passed [ ]Failed [ ]Skipped

Signed-off-by: Donghyeon Jeong <dhyeon.jeong@samsung.com>
nntrainer/layers/layer_node.h
nntrainer/layers/lstm.h
nntrainer/models/neuralnet.h
nntrainer/tensor/cache_pool.h
nntrainer/tensor/manager.h
test/unittest/unittest_nntrainer_appcontext.cpp
test/unittest/unittest_nntrainer_tensor.cpp
test/unittest/unittest_nntrainer_tensor_fp16.cpp
test/unittest/unittest_nntrainer_tensor_nhwc.cpp

index f37338660565c2a45d6783a501d7905a8b2cd78a..55b24f44e9656d923956e41ccd58583d45e346bf 100644 (file)
@@ -384,7 +384,7 @@ public:
    *
    * @return boolean true if trainable, else false
    */
-  bool getTrainable() const;
+  bool getTrainable() const override;
 
   /**
    * @brief     get if the output of this layer must be flatten
index f35fdf88154d483b7a49d6e3853d427b7547f7c5..21a7e495a1a4465fd6af1e9270fe1bc59c4a79c7 100644 (file)
@@ -41,13 +41,13 @@ public:
    *  @brief  Move constructor.
    *  @param[in] LSTMLayer &&
    */
-  LSTMLayer(LSTMLayer &&rhs) noexcept = default;
+  LSTMLayer(LSTMLayer &&rhs) noexcept;
 
   /**
    * @brief  Move assignment operator.
    * @parma[in] rhs LSTMLayer to be moved.
    */
-  LSTMLayer &operator=(LSTMLayer &&rhs) = default;
+  LSTMLayer &operator=(LSTMLayer &&rhs);
 
   /**
    * @copydoc Layer::finalize(InitLayerContext &context)
index da1571a32875749d3a652016a61988d70a1c57b9..30d2288fd5c54a7880ec5a7ad7a560201149515d 100644 (file)
@@ -194,7 +194,7 @@ public:
    * @retval #ML_ERROR_NONE Successful.
    * @retval #ML_ERROR_INVALID_PARAMETER invalid parameter.
    */
-  int allocate(ExecutionMode mode = ExecutionMode::TRAIN);
+  int allocate(ExecutionMode mode = ExecutionMode::TRAIN) override;
 
   /**
    * @brief     Deallocate memory for the model.
index a23e6f87abcc7a3a9a6c5444452d7a56ae50d5a0..a986f18c3da3b5af58b5aa7eb50e011868abbd96 100644 (file)
@@ -76,7 +76,7 @@ public:
     size_t bytes, unsigned int start_time, unsigned int end_time,
     std::vector<unsigned int> exec_order = std::vector<unsigned int>(),
     TensorLifespan lifespan = TensorLifespan::MAX_LIFESPAN,
-    bool is_wgrad = false);
+    bool is_wgrad = false) override;
   /**
    * @brief Get the allocated cache
    *
@@ -86,7 +86,7 @@ public:
    *
    * @details This function will throw if called before allocation.
    */
-  virtual std::shared_ptr<MemoryData> getMemory(unsigned int id);
+  virtual std::shared_ptr<MemoryData> getMemory(unsigned int id) override;
 
   /**
    * @brief Is the cache pool allocated
index ab1c018153ceadf6e3668550b005814d8f6f689e..8ae5aa890af1f427c13c0bdcee672078f049690e 100644 (file)
@@ -168,14 +168,14 @@ public:
    * @brief Move Construct a new Manager object
    *
    */
-  Manager(Manager &&) noexcept = default;
+  Manager(Manager &&) noexcept;
 
   /**
    * @brief Move assign a new Manager object
    *
    * @return Manager& reference to newly assign
    */
-  Manager &operator=(Manager &&) noexcept = default;
+  Manager &operator=(Manager &&) noexcept;
 
   /**
    * @brief     Destructor of Manager
index c7ef039df8abcd3a1e453d7f36ce362fe825cd21..c7249a4dc7cda0ab1b2b542da50173f745c1b125 100644 (file)
@@ -176,9 +176,12 @@ TEST_P(AppContextTest, RegisterCreateCustomOptimizer_p) {
   EXPECT_EQ(num_id, ((int_key == -1) ? (-1) * int_key : int_key));
   auto opt = ac.createObject<nntrainer::Optimizer>(
     ((key == "") ? "identity_optimizer" : key), {});
-  EXPECT_EQ(typeid(*opt).hash_code(), typeid(CustomOptimizer).hash_code());
+  auto &optimizer = *opt.get();
+  EXPECT_EQ(typeid(optimizer).hash_code(), typeid(CustomOptimizer).hash_code());
   opt = ac.createObject<nntrainer::Optimizer>(num_id, {});
-  EXPECT_EQ(typeid(*opt).hash_code(), typeid(CustomOptimizer).hash_code());
+  auto &new_optimizer = *opt.get();
+  EXPECT_EQ(typeid(new_optimizer).hash_code(),
+            typeid(CustomOptimizer).hash_code());
 }
 
 GTEST_PARAMETER_TEST(RegisterCreateCustomOptimizerTests, AppContextTest,
index 94aa01836d4e11f301c6b43b78ba3474692407be..12c887305595d3dbb445c65047bb59075f78f930 100644 (file)
@@ -2225,7 +2225,7 @@ TEST(nntrainer_Tensor, multiple_sum_invalid_args_01_n) {
 
 TEST(nntrainer_Tensor, multiple_sum_out_of_range_n) {
   nntrainer::Tensor t = constant(1.0, 1, 1, 1, 1);
-  EXPECT_THROW(t.sum({7}), std::out_of_range);
+  EXPECT_THROW(t.sum(7), std::out_of_range);
 }
 
 TEST(nntrainer_Tensor, multiple_sum_p) {
index 2b0d9c040dc5c950ce8a9446c4f54c783d12f123..c0b060108d141c5ad2438abc62f156fd809af126 100644 (file)
@@ -3686,7 +3686,7 @@ TEST(nntrainer_Tensor, multiple_sum_invalid_args_01_n) {
 TEST(nntrainer_Tensor, multiple_sum_out_of_range_n) {
   nntrainer::Tensor t = constant(1.0, 1, 1, 1, 1, nntrainer::Tformat::NCHW,
                                  nntrainer::Tdatatype::FP16);
-  EXPECT_THROW(t.sum({7}), std::out_of_range);
+  EXPECT_THROW(t.sum(7), std::out_of_range);
 }
 
 TEST(nntrainer_Tensor, multiple_sum_p) {
index 11f91a4189565beb2a0dd979d645f88396fa3ab5..f65e1b4eda60a80aab4b40fad021e6e42fadac7a 100644 (file)
@@ -2527,7 +2527,7 @@ TEST(nntrainer_Tensor, multiple_sum_invalid_args_01_hnwc_n) {
 
 TEST(nntrainer_Tensor, multiple_sum_out_of_range_nhwc_n) {
   nntrainer::Tensor t = constant(1.0, 1, 1, 1, 1, NHWC_, FP32_);
-  EXPECT_THROW(t.sum({7}), std::out_of_range);
+  EXPECT_THROW(t.sum(7), std::out_of_range);
 }
 
 TEST(nntrainer_Tensor, multiple_sum_nhwc_p) {