Use static casting to convert enum type (#7104)
author오형석/On-Device Lab(SR)/Staff Engineer/삼성전자 <hseok82.oh@samsung.com>
Tue, 3 Sep 2019 02:24:21 +0000 (11:24 +0900)
committerGitHub Enterprise <noreply-CODE@samsung.com>
Tue, 3 Sep 2019 02:24:21 +0000 (11:24 +0900)
To resolve implicit enum type casting warning, introduce convert helper and use static casting

Signed-off-by: Hyeongseok Oh <hseok82.oh@samsung.com>
runtimes/neurun/frontend/tflite/loader.cc

index 7764c32..3339119 100644 (file)
 
 #include "cpp14/memory.h"
 
+namespace
+{
+
+using namespace tflite;
+using namespace neurun;
+
+model::Activation convertActivation(const ActivationFunctionType type)
+{
+  return static_cast<model::Activation>(static_cast<uint32_t>(type));
+}
+
+} // namespace anonymous
+
 namespace tflite_loader
 {
 using namespace tflite;
@@ -158,7 +171,7 @@ template <typename Param> void Loader::loadPool2D(Param &param, const Pool2DOpti
   param.kw = options->filter_width();
   param.kh = options->filter_height();
   // Activation
-  param.activation = neurun::model::Activation(options->fused_activation_function());
+  param.activation = convertActivation(options->fused_activation_function());
 }
 
 void Loader::loadConv2D(const tflite::Operator *op)
@@ -170,7 +183,7 @@ void Loader::loadConv2D(const tflite::Operator *op)
 
   model::operation::Conv2DNode::Param param;
   const auto *options = op->builtin_options_as_Conv2DOptions();
-  param.activation = neurun::model::Activation(options->fused_activation_function());
+  param.activation = convertActivation(options->fused_activation_function());
   loadStridesAndPaddings(param, options);
   // Dilation h/w factor unused
   std::unique_ptr<model::Operation> new_op(
@@ -187,7 +200,7 @@ void Loader::loadDepthwiseConv2D(const tflite::Operator *op)
 
   model::operation::DepthwiseConv2DNode::Param param;
   const auto *options = op->builtin_options_as_DepthwiseConv2DOptions();
-  param.activation = neurun::model::Activation(options->fused_activation_function());
+  param.activation = convertActivation(options->fused_activation_function());
   loadStridesAndPaddings(param, options);
   // Multiplier
   model::Shape shape;
@@ -294,7 +307,7 @@ void Loader::loadFC(const tflite::Operator *op)
   model::operation::FullyConnectedNode::Param param;
   const auto *options = op->builtin_options_as_FullyConnectedOptions();
 
-  param.activation = neurun::model::Activation(options->fused_activation_function());
+  param.activation = convertActivation(options->fused_activation_function());
   // weights_format unused
 
   std::unique_ptr<model::Operation> new_op(
@@ -312,7 +325,7 @@ void Loader::loadAdd(const tflite::Operator *op)
   model::operation::AddNode::Param param;
   const auto *options = op->builtin_options_as_AddOptions();
 
-  param.activation = neurun::model::Activation(options->fused_activation_function());
+  param.activation = convertActivation(options->fused_activation_function());
 
   std::unique_ptr<model::Operation> new_op(new model::operation::AddNode(inputs, outputs, param));
   _graph.addOperation(std::move(new_op));
@@ -328,7 +341,7 @@ void Loader::loadMul(const tflite::Operator *op)
   model::operation::MulNode::Param param;
   const auto *options = op->builtin_options_as_MulOptions();
 
-  param.activation = neurun::model::Activation(options->fused_activation_function());
+  param.activation = convertActivation(options->fused_activation_function());
 
   std::unique_ptr<model::Operation> new_op(new model::operation::MulNode(inputs, outputs, param));
   _graph.addOperation(std::move(new_op));
@@ -344,7 +357,7 @@ void Loader::loadSub(const tflite::Operator *op)
   model::operation::SubNode::Param param;
   const auto *options = op->builtin_options_as_SubOptions();
 
-  param.activation = neurun::model::Activation(options->fused_activation_function());
+  param.activation = convertActivation(options->fused_activation_function());
 
   std::unique_ptr<model::Operation> new_op(new model::operation::SubNode(inputs, outputs, param));
   _graph.addOperation(std::move(new_op));