[layers] Dump acti_func into header
authorskykongkong8 <ss.kong@samsung.com>
Thu, 10 Aug 2023 06:46:44 +0000 (15:46 +0900)
committerJijoong Moon <jijoong.moon@samsung.com>
Mon, 21 Aug 2023 06:29:23 +0000 (15:29 +0900)
- For easier maintenance, dump everyhing to header since there only few functions left after applying template to acti_fun.cpp

Resolves:

**Self evaluation:**
1. Build test:     [X]Passed [ ]Failed [ ]Skipped
2. Run test:     [X]Passed [ ]Failed [ ]Skipped

Signed-off-by: skykongkong8 <ss.kong@samsung.com>
nntrainer/layers/acti_func.cpp [deleted file]
nntrainer/layers/acti_func.h
nntrainer/layers/meson.build

diff --git a/nntrainer/layers/acti_func.cpp b/nntrainer/layers/acti_func.cpp
deleted file mode 100644 (file)
index 039237b..0000000
+++ /dev/null
@@ -1,58 +0,0 @@
-// SPDX-License-Identifier: Apache-2.0
-/**
- * Copyright (C) 2020 Jihoon Lee <jhoon.it.lee@samsung.com>
- *
- * @file   acti_func.cpp
- * @date   22 March 2021
- * @see    https://github.com/nnstreamer/nntrainer
- * @author Jihoon Lee <jhoon.it.lee@samsung.com>
- * @author Jijoong Moon <jijoong.moon@samsung.com>
- * @bug    No known bugs except for NYI items
- * @brief  This is Activation Layer Class for Neural Network
- *
- */
-
-#include <algorithm>
-#include <cmath>
-#include <fstream>
-#include <functional>
-#include <iostream>
-#include <vector>
-
-#include <acti_func.h>
-#include <blas_interface.h>
-#include <lazy_tensor.h>
-#include <nntrainer_error.h>
-#include <nntrainer_log.h>
-#include <tensor.h>
-#include <util_func.h>
-
-namespace nntrainer {
-
-ActiFunc::~ActiFunc() {}
-
-void ActiFunc::run_fn(Tensor const &input, Tensor &output) {
-  _act_fn(input, output);
-}
-
-Tensor &ActiFunc::run_prime_fn(Tensor &input, Tensor &output,
-                               Tensor &outgoing_derivative,
-                               Tensor const &incoming_derivative) {
-  return _act_prime_fn(input, output, outgoing_derivative, incoming_derivative);
-}
-
-Tensor &ActiFunc::run_prime_fn(Tensor &output, Tensor &outgoing_derivative,
-                               Tensor const &incoming_derivative) {
-  return _act_prime_fn(Tensor(), output, outgoing_derivative,
-                       incoming_derivative);
-}
-
-bool ActiFunc::supportInPlace() const { return in_place; }
-
-void ActiFunc::executeInPlace(bool val) {
-  if (val && !supportInPlace())
-    throw std::runtime_error("Error setting activation layer to work in-place");
-
-  in_place = val;
-}
-}; // namespace nntrainer
index 1e2595f..5f194dd 100644 (file)
@@ -44,7 +44,7 @@ public:
   /**
    * @brief     Destructor of ActiFunc
    */
-  ~ActiFunc();
+  ~ActiFunc(){};
 
   /**
    * @brief setActivation by preset ActivationType
@@ -93,7 +93,7 @@ public:
    * @param[in] input : input
    * @param[out] output : output
    */
-  void run_fn(Tensor const &input, Tensor &output);
+  void run_fn(Tensor const &input, Tensor &output) { _act_fn(input, output); }
 
   /**
    * @brief run prime function
@@ -106,7 +106,10 @@ public:
    */
   Tensor &run_prime_fn(Tensor &input, Tensor &output,
                        Tensor &outgoing_derivative,
-                       Tensor const &incoming_derivative);
+                       Tensor const &incoming_derivative) {
+    return _act_prime_fn(input, output, outgoing_derivative,
+                         incoming_derivative);
+  }
 
   /**
    * @brief run prime function
@@ -117,12 +120,15 @@ public:
    * @retVal    Tensor
    */
   Tensor &run_prime_fn(Tensor &output, Tensor &outgoing_derivative,
-                       Tensor const &incoming_derivative);
+                       Tensor const &incoming_derivative) {
+    return _act_prime_fn(Tensor(), output, outgoing_derivative,
+                         incoming_derivative);
+  }
 
   /**
    * @copydoc Layer::supportInPlace()
    */
-  bool supportInPlace() const;
+  bool supportInPlace() const { return in_place; }
 
   /**
    * @brief       Calculate softmax for Tensor Type
@@ -573,7 +579,13 @@ public:
    *
    * @param val True if execute in-place, else false
    */
-  void executeInPlace(bool val);
+  void executeInPlace(bool val) {
+    if (val && !supportInPlace())
+      throw std::runtime_error(
+        "Error setting activation layer to work in-place");
+
+    in_place = val;
+  }
 
 private:
   std::function<Tensor &(Tensor const &, Tensor &)> _act_fn;
index 0433730..9d42a3e 100644 (file)
@@ -25,7 +25,6 @@ layer_sources = [
   'embedding.cpp',
   'rnn.cpp',
   'rnncell.cpp',
-  'acti_func.cpp',
   'lstm.cpp',
   'lstmcell.cpp',
   'lstmcell_core.cpp',