2 * Copyright (c) 2023 Samsung Electronics Co., Ltd. All Rights Reserved
3 * Copyright 2017 The TensorFlow Authors. All Rights Reserved.
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
9 * http://www.apache.org/licenses/LICENSE-2.0
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
18 #ifndef LUCI_INTERPRETER_PAL_APPLY_ACTIVATION_TO_VECTOR_H
19 #define LUCI_INTERPRETER_PAL_APPLY_ACTIVATION_TO_VECTOR_H
26 #include "tensorflow/lite/c/builtin_op_data.h"
28 namespace luci_interpreter_pal
31 // Dynamic (non-fused) activation functor. perhaps it is worth having
32 // template instantiation?
33 // TODO(aselle): Make this more efficient by pulling the switch to conv_eval
34 // using template inlining.
35 class ActivationFunctor
38 explicit ActivationFunctor(TfLiteFusedActivation act) : act_(act) {}
40 float operator()(float a) const
47 return a < 0.f ? 0.f : a;
49 return std::max(0.f, std::min(a, 6.f));
52 case kTfLiteActSigmoid:
53 return 1.0f / (1.0f + std::exp(-a));
55 assert(false && "Activation functor is not supported");
60 TfLiteFusedActivation act_;
63 inline void ApplyActivationToVector(const float *vector, int v_size,
64 TfLiteFusedActivation activation, float *result)
66 auto activation_func = ActivationFunctor(activation);
67 for (int v = 0; v < v_size; v++)
69 *result++ = (activation_func)(*vector++);
73 } // namespace luci_interpreter_pal
75 #endif // LUCI_INTERPRETER_PAL_APPLY_ACTIVATION_TO_VECTOR_H