Publishing 2019 R1 content
[platform/upstream/dldt.git] / inference-engine / thirdparty / mkl-dnn / src / cpu / jit_uni_softmax.hpp
1 /*******************************************************************************
2 * Copyright 2017 Intel Corporation
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *     http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 *******************************************************************************/
16
17 #ifndef CPU_JIT_UNI_SOFTMAX_HPP
18 #define CPU_JIT_UNI_SOFTMAX_HPP
19
20 #include <assert.h>
21
22 #include "c_types_map.hpp"
23 #include "cpu_softmax_pd.hpp"
24 #include "cpu_engine.hpp"
25 #include "type_helpers.hpp"
26 #include "utils.hpp"
27 #include "jit_primitive_conf.hpp"
28 #include "jit_uni_softmax_kernel_f32.hpp"
29 #include "mkldnn_types.h"
30
31
32 namespace mkldnn {
33 namespace impl {
34 namespace cpu {
35
36 template <cpu_isa_t isa>
37 struct jit_uni_softmax_fwd_t : public cpu_primitive_t {
38     struct pd_t : public cpu_softmax_fwd_pd_t {
39         pd_t(engine_t *engine, const softmax_desc_t *adesc,
40              const primitive_attr_t *attr,
41              const softmax_fwd_pd_t *hint_fwd_pd)
42             : cpu_softmax_fwd_pd_t(engine, adesc, attr, hint_fwd_pd) {}
43
44         DECLARE_COMMON_PD_T(
45                 JIT_IMPL_NAME_HELPER("jit:", isa, ""),
46                 jit_uni_softmax_fwd_t<isa>);
47
48         virtual status_t init() override {
49             using namespace prop_kind;
50
51             auto desired_fmt = memory_format::nchw;
52
53             assert(engine()->kind() == engine_kind::cpu);
54
55             auto ndims = desc_.data_desc.ndims;
56             auto dims = desc_.data_desc.dims;
57             auto axis = desc_.softmax_axis;
58
59             size_t inner_size = utils::array_product(dims + axis + 1, ndims - axis - 1);
60
61             bool ok = mayiuse(isa)
62                       && utils::one_of(desc()->prop_kind, forward_training,
63                                        forward_inference)
64                       && utils::everyone_is(data_type::f32, desc()->data_desc.data_type)
65                       && memory_desc_wrapper(src_pd()).is_dense(true)
66                       && utils::everyone_is(desired_fmt, src_pd()->desc()->format,
67                                             dst_pd()->desc()->format)
68                       && inner_size > 1;
69
70             if (!ok) return status::unimplemented;
71
72
73             return jit_uni_softmax_kernel_f32<isa>::init_conf(jpp_, desc_,
74                                                               src_pd()->desc(), dst_pd()->desc());
75         }
76         jit_softmax_conf_t jpp_;
77     };
78
79     jit_uni_softmax_fwd_t(const pd_t *apd, const input_vector &inputs,
80                        const output_vector &outputs);
81     ~jit_uni_softmax_fwd_t();
82
83     using data_t = prec_traits<data_type::f32>::type;
84
85     virtual void execute(event_t *e) const override {
86         execute_forward();
87         e->set_state(event_t::ready);
88     }
89
90 private:
91     void execute_forward() const;
92     const pd_t *pd() const { return (const pd_t *)primitive_t::pd(); }
93     jit_uni_softmax_kernel_f32<isa> *kernel_;
94 };
95
96 }
97 }
98 }
99
100 #endif