[TFLite Export] Update node_exporter
authorDongHak Park <donghak.park@samsung.com>
Fri, 14 Apr 2023 09:00:01 +0000 (18:00 +0900)
committerJijoong Moon <jijoong.moon@samsung.com>
Thu, 27 Jul 2023 10:38:19 +0000 (19:38 +0900)
Add Epsilon Props to additional_props for fusing
- For Fusing we need Epsilon for batch norm
Add padding, stride props to props_vector
- For Conv Fusing we need to made new BuiltinOption and for building new BuiltinOption with FUSED activation we need padding,stride

Signed-off-by: DongHak Park <donghak.park@samsung.com>
nntrainer/utils/node_exporter.cpp
nntrainer/utils/node_exporter.h

index bc76ec4..a1b4a50 100644 (file)
@@ -7,6 +7,7 @@
  * @brief NNTrainer Node exporter
  * @see        https://github.com/nnstreamer/nntrainer
  * @author Jihoon Lee <jhoon.it.lee@samsung.com>
+ * @author Donghak Park <donghak.park@samsung.com>
  * @bug No known bugs except for NYI items
  */
 #include <node_exporter.h>
@@ -143,6 +144,24 @@ void Exporter::saveTflResult(const std::tuple<props::Activation> &props,
 
 template <>
 void Exporter::saveTflResult(
+  const std::tuple<props::Epsilon, props::BNPARAMS_MU_INIT,
+                   props::BNPARAMS_VAR_INIT, props::BNPARAMS_BETA_INIT,
+                   props::BNPARAMS_GAMMA_INIT, props::Momentum, props::Axis,
+                   props::WeightDecay, props::BiasDecay> &props,
+  const BatchNormalizationLayer *self) {
+  createIfNull(tf_node);
+
+  auto epsilon = std::get<props::Epsilon>(props).get();
+  tf_node->AppendAdditionalProps(epsilon);
+
+  tf_node->setOpType(tflite::BuiltinOperator_MUL);
+  auto options =
+    tflite::CreateMulOptions(*fbb, tflite::ActivationFunctionType_NONE).Union();
+  tf_node->setBuiltinOptions(tflite::BuiltinOptions_MulOptions, options);
+}
+
+template <>
+void Exporter::saveTflResult(
   const std::tuple<props::FilterSize, std::array<props::KernelSize, CONV2D_DIM>,
                    std::array<props::Stride, CONV2D_DIM>, props::Padding2D,
                    std::array<props::Dilation, CONV2D_DIM>> &props,
@@ -183,6 +202,11 @@ void Exporter::saveTflResult(
   auto options = tflite::CreateConv2DOptions(*fbb, tflite_padding(padding),
                                              strides.at(0), strides.at(1))
                    .Union();
+
+  tf_node->AppendProps(tflite_padding(padding));
+  tf_node->AppendProps(strides.at(0));
+  tf_node->AppendProps(strides.at(1));
+
   tf_node->setBuiltinOptions(tflite::BuiltinOptions_Conv2DOptions, options);
 }
 
index e8220d9..dd7b8c6 100644 (file)
@@ -7,6 +7,7 @@
  * @brief NNTrainer Node exporter
  * @see        https://github.com/nnstreamer/nntrainer
  * @author Jihoon Lee <jhoon.it.lee@samsung.com>
+ * @author Donghak Park <donghak.park@samsung.com>
  * @bug No known bugs except for NYI items
  */
 #ifndef __NODE_EXPORTER_H__
@@ -22,6 +23,7 @@
 #include <base_properties.h>
 #include <common.h>
 #include <common_properties.h>
+#include <layer.h>
 #include <nntrainer_error.h>
 #include <util_func.h>
 
@@ -230,6 +232,7 @@ class InputConnection;
 class ClipGradByGlobalNorm;
 class DisableBias;
 class Activation;
+class BatchNormalization;
 } // namespace props
 
 class LayerNode;
@@ -245,6 +248,19 @@ void Exporter::saveTflResult(
                    props::ClipGradByGlobalNorm> &props,
   const LayerNode *self);
 
+class BatchNormalizationLayer;
+/**
+ * @copydoc template <typename PropsType, typename NodeType> void
+ * Exporter::saveTflResult(const PropsType &props, const NodeType *self);
+ */
+template <>
+void Exporter::saveTflResult(
+  const std::tuple<props::Epsilon, props::BNPARAMS_MU_INIT,
+                   props::BNPARAMS_VAR_INIT, props::BNPARAMS_BETA_INIT,
+                   props::BNPARAMS_GAMMA_INIT, props::Momentum, props::Axis,
+                   props::WeightDecay, props::BiasDecay> &props,
+  const BatchNormalizationLayer *self);
+
 class LayerImpl;
 
 /**