From: 오형석/On-Device Lab(SR)/Staff Engineer/삼성전자 Date: Tue, 3 Sep 2019 02:24:21 +0000 (+0900) Subject: Use static casting to convert enum type (#7104) X-Git-Tag: accepted/tizen/unified/20190904.110638~33 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=10d36953be07f7766a528240e2fda9220e8950e3;p=platform%2Fcore%2Fml%2Fnnfw.git Use static casting to convert enum type (#7104) To resolve implicit enum type casting warning, introduce convert helper and use static casting Signed-off-by: Hyeongseok Oh --- diff --git a/runtimes/neurun/frontend/tflite/loader.cc b/runtimes/neurun/frontend/tflite/loader.cc index 7764c32..3339119 100644 --- a/runtimes/neurun/frontend/tflite/loader.cc +++ b/runtimes/neurun/frontend/tflite/loader.cc @@ -20,6 +20,19 @@ #include "cpp14/memory.h" +namespace +{ + +using namespace tflite; +using namespace neurun; + +model::Activation convertActivation(const ActivationFunctionType type) +{ + return static_cast(static_cast(type)); +} + +} // namespace anonymous + namespace tflite_loader { using namespace tflite; @@ -158,7 +171,7 @@ template void Loader::loadPool2D(Param ¶m, const Pool2DOpti param.kw = options->filter_width(); param.kh = options->filter_height(); // Activation - param.activation = neurun::model::Activation(options->fused_activation_function()); + param.activation = convertActivation(options->fused_activation_function()); } void Loader::loadConv2D(const tflite::Operator *op) @@ -170,7 +183,7 @@ void Loader::loadConv2D(const tflite::Operator *op) model::operation::Conv2DNode::Param param; const auto *options = op->builtin_options_as_Conv2DOptions(); - param.activation = neurun::model::Activation(options->fused_activation_function()); + param.activation = convertActivation(options->fused_activation_function()); loadStridesAndPaddings(param, options); // Dilation h/w factor unused std::unique_ptr new_op( @@ -187,7 +200,7 @@ void Loader::loadDepthwiseConv2D(const tflite::Operator *op) model::operation::DepthwiseConv2DNode::Param param; const auto *options = op->builtin_options_as_DepthwiseConv2DOptions(); - param.activation = neurun::model::Activation(options->fused_activation_function()); + param.activation = convertActivation(options->fused_activation_function()); loadStridesAndPaddings(param, options); // Multiplier model::Shape shape; @@ -294,7 +307,7 @@ void Loader::loadFC(const tflite::Operator *op) model::operation::FullyConnectedNode::Param param; const auto *options = op->builtin_options_as_FullyConnectedOptions(); - param.activation = neurun::model::Activation(options->fused_activation_function()); + param.activation = convertActivation(options->fused_activation_function()); // weights_format unused std::unique_ptr new_op( @@ -312,7 +325,7 @@ void Loader::loadAdd(const tflite::Operator *op) model::operation::AddNode::Param param; const auto *options = op->builtin_options_as_AddOptions(); - param.activation = neurun::model::Activation(options->fused_activation_function()); + param.activation = convertActivation(options->fused_activation_function()); std::unique_ptr new_op(new model::operation::AddNode(inputs, outputs, param)); _graph.addOperation(std::move(new_op)); @@ -328,7 +341,7 @@ void Loader::loadMul(const tflite::Operator *op) model::operation::MulNode::Param param; const auto *options = op->builtin_options_as_MulOptions(); - param.activation = neurun::model::Activation(options->fused_activation_function()); + param.activation = convertActivation(options->fused_activation_function()); std::unique_ptr new_op(new model::operation::MulNode(inputs, outputs, param)); _graph.addOperation(std::move(new_op)); @@ -344,7 +357,7 @@ void Loader::loadSub(const tflite::Operator *op) model::operation::SubNode::Param param; const auto *options = op->builtin_options_as_SubOptions(); - param.activation = neurun::model::Activation(options->fused_activation_function()); + param.activation = convertActivation(options->fused_activation_function()); std::unique_ptr new_op(new model::operation::SubNode(inputs, outputs, param)); _graph.addOperation(std::move(new_op));