2 // Copyright (c) 2016 Intel Corporation
4 // Licensed under the Apache License, Version 2.0 (the "License");
5 // you may not use this file except in compliance with the License.
6 // You may obtain a copy of the License at
8 // http://www.apache.org/licenses/LICENSE-2.0
10 // Unless required by applicable law or agreed to in writing, software
11 // distributed under the License is distributed on an "AS IS" BASIS,
12 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 // See the License for the specific language governing permissions and
14 // limitations under the License.
17 ///////////////////////////////////////////////////////////////////////////////////////////////////
19 #include "primitive.hpp"
23 /// @addtogroup cpp_api C++ API
25 /// @addtogroup cpp_topology Network Topology
27 /// @addtogroup cpp_primitives Primitives
30 /// @brief activation functions
31 enum class activation_func {
33 logistic, // 1/(1 + exp(-val))
34 hyperbolic_tan, // tanh(val)
36 relu_negative_slope, // max(0, val) + a * min(0, val) (a is additional param)
37 clamp, // max(a, min(b, val) (a,b are additional param)
38 softrelu, // log(1 + exp(val))
40 linear, // a*val + b (a,b are additional params)
43 elu, // max(0, val) + a * (exp(min(0, val) - 1) (a is additional param)
63 reciprocal, // (1/val)
64 erf, // Gauss error function
65 hard_sigmoid, // max(0, min(1, a * val + b)) (a,b are additional params)
66 selu, // for val <= 0: b * (a * e^val - a); for val > 0: b * val (a,b are additional params)
67 sign, // val > 0: 1; val < 0: -1; val == 0: 0
68 softplus, // ln(exp(val) + 1)
69 softsign // (val/(1+|val|))
72 /// @brief activation gradient functions
73 enum class activation_grad_func {
75 relu, // val * (input > 0)
76 relu_negative_slope, // val * ((input > 0) + a * (input <= 0) (a is additional param)
79 /// @brief activation additional params
80 struct activation_additional_params {
84 /// @brief Activation using rectified linear unit or parameterized rectified linear unit.
85 /// @details Can get one negative slope or negative slope per channel.
87 /// out(i,x,y) = max(0, in(i,x,y)) + slope(i) * min(0, in(i,x,y))
89 /// @li out(i,x,y) : value at x, y from i-th feature map after activation.
90 /// @li in(i,x,y) : value at x, y from i-th feature map before activation.
91 /// @li slope(i) : the slope value of the i-th feature map (can be shared across channels or one slope per channel).
92 struct activation : public primitive_base<activation> {
93 CLDNN_DECLARE_PRIMITIVE(activation)
95 /// @brief Constructs Relu primitive.
96 /// @param id This primitive id.
97 /// @param input Input primitive id.
98 /// @param activation_func activation function.
99 /// @param additional_params additional params (slope/max_val/linear a,b).
100 activation(const primitive_id& id,
101 const primitive_id& input,
102 activation_func activation_function,
103 activation_additional_params additional_params = {0.f, 0.f},
104 const padding& output_padding = padding())
105 : primitive_base(id, {input}, output_padding),
106 activation_function(activation_function),
107 additional_params(additional_params),
108 additional_params_input("") {}
110 /// @brief Constructs activation with input per feature.
111 /// @param id This primitive id.
112 /// @param input Input primitive id.
113 /// @param additional_params_input additional params stored on a memory.
114 /// Input x dimension should be equal to input feature size (one value per channel. in case of linear is one pair per channel).
115 /// All other dimensions should be 1.
116 activation(const primitive_id& id,
117 const primitive_id& input,
118 const primitive_id& additional_params_input,
119 activation_func activation_function,
120 const padding& output_padding = padding())
121 : primitive_base(id, {input}, output_padding),
122 activation_function(activation_function),
123 additional_params({0, 0}),
124 additional_params_input(additional_params_input) {}
126 /// @brief activation function.
127 activation_func activation_function;
129 /// @brief activation additional params.
130 activation_additional_params additional_params;
132 /// @brief PRelu activation slope input primitive id.
133 /// Input x dimension should be equal to input feature size (one slope per channel).
134 /// All other dimensions should be 1.
135 primitive_id additional_params_input;
138 std::vector<std::reference_wrapper<const primitive_id>> get_dependencies() const override {
139 if (additional_params_input.empty())
141 return {additional_params_input};