updated readme file due to moving CMake scripts to the root folder
[platform/upstream/dldt.git] / inference-engine / thirdparty / mkl-dnn / src / cpu / jit_uni_softmax.hpp
1 /*******************************************************************************
2 * Copyright 2017 Intel Corporation
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *     http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 *******************************************************************************/
16
17 #ifndef CPU_JIT_UNI_SOFTMAX_HPP
18 #define CPU_JIT_UNI_SOFTMAX_HPP
19
20 #include <assert.h>
21
22 #include "c_types_map.hpp"
23 #include "cpu_softmax_pd.hpp"
24 #include "cpu_engine.hpp"
25 #include "type_helpers.hpp"
26 #include "utils.hpp"
27 #include "jit_primitive_conf.hpp"
28 #include "jit_uni_softmax_kernel_f32.hpp"
29 #include "mkldnn_types.h"
30
31
32 namespace mkldnn {
33 namespace impl {
34 namespace cpu {
35
36 template <cpu_isa_t isa>
37 struct jit_uni_softmax_fwd_t : public cpu_primitive_t {
38     struct pd_t : public cpu_softmax_fwd_pd_t {
39         pd_t(engine_t *engine, const softmax_desc_t *adesc,
40              const primitive_attr_t *attr,
41              const softmax_fwd_pd_t *hint_fwd_pd)
42             : cpu_softmax_fwd_pd_t(engine, adesc, attr, hint_fwd_pd) {}
43
44         DECLARE_COMMON_PD_T(
45                 JIT_IMPL_NAME_HELPER("jit:", isa, ""),
46                 jit_uni_softmax_fwd_t<isa>);
47
48         virtual status_t init() override {
49             using namespace prop_kind;
50
51             assert(engine()->kind() == engine_kind::cpu);
52
53             auto ndims = desc_.data_desc.ndims;
54             auto dims = desc_.data_desc.dims;
55             auto axis = desc_.softmax_axis;
56
57             size_t inner_size = utils::array_product(dims + axis + 1, ndims - axis - 1);
58
59             memory_format_t desired_fmt;
60             switch (ndims) {
61                 case 3: desired_fmt = memory_format::ncw; break;
62                 case 4: desired_fmt = memory_format::nchw; break;
63                 case 5: desired_fmt = memory_format::ncdhw; break;
64                 default: return status::unimplemented;
65             }
66
67             bool ok = mayiuse(isa)
68                       && utils::one_of(desc()->prop_kind, forward_training,
69                                        forward_inference)
70                       && utils::everyone_is(data_type::f32, desc()->data_desc.data_type)
71                       && memory_desc_wrapper(src_pd()).is_dense(true)
72                       && utils::everyone_is(desired_fmt, src_pd()->desc()->format,
73                                             dst_pd()->desc()->format)
74                       && inner_size > 1;
75
76             if (!ok) return status::unimplemented;
77
78
79             return jit_uni_softmax_kernel_f32<isa>::init_conf(jpp_, desc_,
80                                                               src_pd()->desc(), dst_pd()->desc());
81         }
82         jit_softmax_conf_t jpp_;
83     };
84
85     jit_uni_softmax_fwd_t(const pd_t *apd, const input_vector &inputs,
86                        const output_vector &outputs);
87     ~jit_uni_softmax_fwd_t();
88
89     using data_t = prec_traits<data_type::f32>::type;
90
91     virtual void execute(event_t *e) const override {
92         execute_forward();
93         e->set_state(event_t::ready);
94     }
95
96 private:
97     void execute_forward() const;
98     const pd_t *pd() const { return (const pd_t *)primitive_t::pd(); }
99     jit_uni_softmax_kernel_f32<isa> *kernel_;
100 };
101
102 }
103 }
104 }
105
106 #endif