Publishing 2019 R3 content
[platform/upstream/dldt.git] / inference-engine / thirdparty / clDNN / api / activation.hpp
@@ -16,7 +16,6 @@
 
 ///////////////////////////////////////////////////////////////////////////////////////////////////
 #pragma once
-#include "../C/activation.h"
 #include "primitive.hpp"
 #include <vector>
 
@@ -28,6 +27,60 @@ namespace cldnn {
 /// @addtogroup cpp_primitives Primitives
 /// @{
 
+/// @brief activation functions
+enum class activation_func {
+    none,                 // val
+    logistic,             // 1/(1 + exp(-val))
+    hyperbolic_tan,       // tanh(val)
+    relu,                 // max(0, val)
+    relu_negative_slope,  // max(0, val) + a * min(0, val)    (a is additional param)
+    clamp,                // max(a, min(b, val)               (a,b are additional param)
+    softrelu,             // log(1 + exp(val))
+    abs,                  // abs(val)
+    linear,               // a*val + b                        (a,b are additional params)
+    square,               // val*val
+    sqrt,                 // sqrt(val)
+    elu,                  // max(0, val) + a * (exp(min(0, val) - 1) (a is additional param)
+    sin,                  // sin(val)
+    asin,                 // asin(val)
+    sinh,                 // sinh(val)
+    asinh,                // asinh(val)
+    cos,                  // cos(val)
+    acos,                 // acos(val)
+    cosh,                 // cosh(val)
+    acosh,                // acosh(val)
+    log,                  // log(val)
+    log2,                 // log2(val)
+    exp,                  // exp(val)
+    tan,                  // tan(val)
+    atan,                 // atan(val)
+    atanh,                // atanh(val)
+    floor,                // floor(val)
+    ceil,                 // ceil(val)
+    negative,             // -val
+    negation,             // !val
+    pow,                  // pow(val, a)
+    reciprocal,           // (1/val)
+    erf,                  // Gauss error function
+    hard_sigmoid,         // max(0, min(1, a * val + b))       (a,b are additional params)
+    selu,                 // for val <= 0: b * (a * e^val - a); for val > 0: b * val (a,b are additional params)
+    sign,                 // val > 0: 1; val < 0: -1; val == 0: 0
+    softplus,             // ln(exp(val) + 1)
+    softsign              // (val/(1+|val|))
+};
+
+/// @brief activation gradient functions
+enum class activation_grad_func {
+    none,                 // val
+    relu,                 // val * (input > 0)
+    relu_negative_slope,  // val * ((input > 0) + a * (input <= 0)    (a is additional param)
+};
+
+/// @brief activation additional params
+struct activation_additional_params {
+    float a, b;
+};
+
 /// @brief Activation using rectified linear unit or parameterized rectified linear unit.
 /// @details Can get one negative slope or negative slope per channel.
 /// @par Algorithm:
@@ -36,7 +89,7 @@ namespace cldnn {
 ///   @li out(i,x,y) : value at x, y from i-th feature map after activation.
 ///   @li in(i,x,y) : value at x, y from i-th feature map before activation.
 ///   @li slope(i) : the slope value of the i-th feature map (can be shared across channels or one slope per channel).
-struct activation : public primitive_base<activation, CLDNN_PRIMITIVE_DESC(activation)> {
+struct activation : public primitive_base<activation> {
     CLDNN_DECLARE_PRIMITIVE(activation)
 
     /// @brief Constructs Relu primitive.
@@ -46,11 +99,11 @@ struct activation : public primitive_base<activation, CLDNN_PRIMITIVE_DESC(activ
     /// @param additional_params additional params (slope/max_val/linear a,b).
     activation(const primitive_id& id,
                const primitive_id& input,
-               cldnn_activation_func activation_func,
-               cldnn_activation_additional_params additional_params = {0.f, 0.f},
+               activation_func activation_function,
+               activation_additional_params additional_params = {0.f, 0.f},
                const padding& output_padding = padding())
         : primitive_base(id, {input}, output_padding),
-          activation_func(activation_func),
+          activation_function(activation_function),
           additional_params(additional_params),
           additional_params_input("") {}
 
@@ -63,25 +116,18 @@ struct activation : public primitive_base<activation, CLDNN_PRIMITIVE_DESC(activ
     activation(const primitive_id& id,
                const primitive_id& input,
                const primitive_id& additional_params_input,
-               cldnn_activation_func activation_func,
+               activation_func activation_function,
                const padding& output_padding = padding())
         : primitive_base(id, {input}, output_padding),
-          activation_func(activation_func),
+          activation_function(activation_function),
           additional_params({0, 0}),
           additional_params_input(additional_params_input) {}
 
-    /// @brief Constructs a copy from basic C API @CLDNN_PRIMITIVE_DESC{activation}
-    activation(const dto* dto)
-        : primitive_base(dto),
-          activation_func(dto->activation_func),
-          additional_params(dto->additional_params),
-          additional_params_input(dto->additional_params_input) {}
-
     /// @brief activation function.
-    cldnn_activation_func activation_func;
+    activation_func activation_function;
 
     /// @brief activation additional params.
-    cldnn_activation_additional_params additional_params;
+    activation_additional_params additional_params;
 
     /// @brief PRelu activation slope input primitive id.
     /// Input x dimension should be equal to input feature size (one slope per channel).
@@ -94,14 +140,8 @@ protected:
             return {};
         return {additional_params_input};
     }
-
-    void update_dto(dto& dto) const override {
-        dto.activation_func = activation_func;
-        dto.additional_params = additional_params;
-        dto.additional_params_input = additional_params_input.c_str();
-    }
 };
 /// @}
 /// @}
 /// @}
-}  // namespace cldnn
\ No newline at end of file
+}  // namespace cldnn