[Coverity] Fix DIVIDE_BY_ZERO issue
authorEunju Yang <ej.yang@samsung.com>
Wed, 5 Feb 2025 04:52:00 +0000 (13:52 +0900)
committerjijoong.moon <jijoong.moon@samsung.com>
Tue, 11 Feb 2025 05:43:35 +0000 (14:43 +0900)
- This commit resolves coverity issue of DIVIDE_BY_ZERO.
- This commit updatees preprocess_l2norm_layer.cpp/.h

Signed-off-by: Eunju Yang <ej.yang@samsung.com>
nntrainer/layers/preprocess_l2norm_layer.cpp
nntrainer/layers/preprocess_l2norm_layer.h

index e8b9dd5eaaddea728c383e72c79549a3e3beec87..6aeee4aa37e3f23b440f0b53ddc332f09a4edc6c 100644 (file)
@@ -39,6 +39,7 @@ void PreprocessL2NormLayer::finalize(InitLayerContext &context) {
 
 void PreprocessL2NormLayer::forwarding(RunLayerContext &context,
                                        bool training) {
+  const float epsilon = std::get<props::Epsilon>(l2norm_props);
   auto &hidden_ = context.getOutput(SINGLE_INOUT_IDX);
 
   auto &input_ = context.getInput(SINGLE_INOUT_IDX);
@@ -46,7 +47,7 @@ void PreprocessL2NormLayer::forwarding(RunLayerContext &context,
   for (uint b = 0; b < input_.batch(); ++b) {
     auto input_slice = input_.getBatchSlice(b, 1);
     auto hidden_slice = hidden_.getBatchSlice(b, 1);
-    input_slice.multiply(1 / input_slice.l2norm(), hidden_slice);
+    input_slice.multiply(1 / (input_slice.l2norm() + epsilon), hidden_slice);
   }
 }
 
@@ -57,11 +58,10 @@ void PreprocessL2NormLayer::calcDerivative(RunLayerContext &context) {
 
 void PreprocessL2NormLayer::setProperty(
   const std::vector<std::string> &values) {
-  if (!values.empty()) {
-    std::string msg = "[FlattenLayer] Unknown Layer Properties count " +
-                      std::to_string(values.size());
-    throw exception::not_supported(msg);
-  }
+  auto remain_props = loadProperties(values, l2norm_props);
+  NNTR_THROW_IF(!remain_props.empty(), std::invalid_argument)
+    << "[PreprocessL2Norm Layer] Unknown Layer Properties count " +
+         std::to_string(remain_props.size());
 }
 
 } // namespace nntrainer
index 9f28debb704fd2f887b2f2c1d4f9f7dce429fb90..7fe1cb7737642faa780c58db7683b60d701865d2 100644 (file)
@@ -31,7 +31,7 @@ public:
    * @brief Construct a new L2norm Layer object
    * that normalizes given feature with l2norm
    */
-  PreprocessL2NormLayer() : Layer() {}
+  PreprocessL2NormLayer() : Layer(), l2norm_props(props::Epsilon()) {}
 
   /**
    *  @brief  Move constructor.
@@ -91,6 +91,9 @@ public:
   void setProperty(const std::vector<std::string> &values) override;
 
   inline static const std::string type = "preprocess_l2norm";
+
+private:
+  std::tuple<props::Epsilon> l2norm_props;
 };
 } // namespace nntrainer