Publishing R3
[platform/upstream/dldt.git] / inference-engine / tests / unit / engines / mkldnn / graph / layers / internal / graph_softmax_test.cpp
1 // Copyright (C) 2018 Intel Corporation
2 //
3 // SPDX-License-Identifier: Apache-2.0
4 //
5
6 #include <gtest/gtest.h>
7 #include <gmock/gmock-spec-builders.h>
8 #include "mkldnn_plugin/mkldnn_graph.h"
9 #include "mock_mkldnn_primitive.hpp"
10
11 #include "test_graph.hpp"
12
13 #include "single_layer_common.hpp"
14 #include <mkldnn_plugin/mkldnn_extension_utils.h>
15 #include <inference_engine/cnn_network_impl.hpp>
16 #include "tests_common.hpp"
17
18
19 using namespace ::testing;
20 using namespace std;
21 using namespace mkldnn;
22
23
24 struct softmax_test_params {
25     struct {
26         size_t n;
27         size_t c;
28         size_t h;
29         size_t w;
30     } in;
31
32     int axis;
33
34     size_t num_prim_desc;
35
36     int selectedType;
37     std::vector<MKLDNNPlugin::impl_desc_type> preferTypes;
38
39     std::vector<std::function<void(MKLDNNPlugin::PrimitiveDescInfo)>> comp;
40 };
41
42 template <typename data_t>
43 void check_softmax_fwd(const InferenceEngine::TBlob<data_t> &src, softmax_test_params prm)
44 {
45     const data_t *src_data = src.readOnly();
46
47     size_t W = prm.in.w;
48     size_t H = prm.in.h;
49     size_t C = prm.in.c;
50     size_t MB = 1;
51
52     auto off = [=](int n, int c, int h, int w)
53     {
54         return (n * W * H * C + c * W * H + h * W + w);
55     };
56
57     auto check_norm = [=](double res) {
58         if(res < 0.999f || res > 1.001) {
59             ASSERT_TRUE(res > 0.99f && res < 1.01);
60         }
61     };
62
63     if(prm.axis == 0) {
64
65         for (int c = 0; c < C; ++c) {
66
67             for (int h = 0; h < H; ++h) {
68                 for (int w = 0; w < W; ++w) {
69                     double result = 0.0f;
70
71                     for (int n = 0; n < MB; ++n) {
72                         result += src_data[off(n, c, h, w)];//dst_ptr[map_index(dst_pd, off(n, c, h, w))];
73                     }
74                     check_norm(result);
75                 }
76             }
77         }
78     }
79     else if(prm.axis == 1) {
80         for (int n = 0; n < MB; ++n) {
81             for (int h = 0; h < H; ++h) {
82                 for (int w = 0; w < W; ++w) {
83                     double result = 0.0f;
84
85                     for (int c = 0; c < C; ++c) {
86                         result += src_data[off(n, c, h, w)];//dst_ptr[map_index(dst_pd, off(n, c, h, w))];
87                     }
88
89                     check_norm(result);
90                 }
91             }
92         }
93     }
94     else if(prm.axis == 2) {
95         for (int n = 0; n < MB; ++n) {
96             for (int c = 0; c < C; ++c) {
97                 for (int w = 0; w < W; ++w) {
98                     double result = 0.0f;
99
100                     for (int h = 0; h < H; ++h) {
101                         result += src_data[off(n, c, w, w)];//dst_ptr[map_index(dst_pd, off(n, c, h, w))];
102                     }
103
104                     check_norm(result);
105                 }
106             }
107         }
108     }
109     else if(prm.axis == 3) {
110         for (int n = 0; n < MB; ++n) {
111             for (int c = 0; c < C; ++c) {
112                 for (int h = 0; h < H; ++h) {
113                     double result = 0.0f;
114
115                     for (int w = 0; w < W; ++w) {
116                         result += src_data[off(n, c, h, w)];//dst_ptr[map_index(dst_pd, off(n, c, h, w))];
117                     }
118
119                     check_norm(result);
120                 }
121             }
122         }
123     }
124 }
125
126 class MKLDNNGraphSoftMaxTests: public TestsCommon,
127                                      public WithParamInterface<softmax_test_params> {
128     std::string model_t = R"V0G0N(
129 <Net Name="Lrn_Only" version="2" precision="FP32" batch="1">
130     <layers>
131         <layer name="in1" type="Input" precision="FP32" id="0">
132             <output>
133                 <port id="0">
134                     <dim>_IN_</dim>
135                     <dim>_IC_</dim>
136                     <dim>_IH_</dim>
137                     <dim>_IW_</dim>
138                 </port>
139             </output>
140         </layer>
141         <layer name="norm" id="1" type="Softmax" precision="FP32">
142             <data PrimitivesPriority="_IMPLS_"/>
143             <input>
144                 <port id="1">
145                     <dim>_IN_</dim>
146                     <dim>_IC_</dim>
147                     <dim>_IH_</dim>
148                     <dim>_IW_</dim>
149                 </port>
150             </input>
151             <output>
152                 <port id="2">
153                     <dim>_IN_</dim>
154                     <dim>_IC_</dim>
155                     <dim>_IH_</dim>
156                     <dim>_IW_</dim>
157                 </port>
158             </output>
159         </layer>
160     </layers>
161     <edges>
162         <edge from-layer="0" from-port="0" to-layer="1" to-port="1"/>
163     </edges>
164 </Net>
165 )V0G0N";
166
167 protected:
168     std::string getModel(softmax_test_params p) {
169         std::string model = model_t;
170
171         REPLACE_WITH_NUM(model, "_IW_", p.in.w);
172         REPLACE_WITH_NUM(model, "_IH_", p.in.h);
173         REPLACE_WITH_NUM(model, "_IC_", p.in.c);
174         REPLACE_WITH_NUM(model, "_IN_", p.in.n);
175         std::string impls;
176         for (const auto& preferType : p.preferTypes) {
177             if (!impls.empty())
178                 impls += ",";
179             impls += "cpu:" + MKLDNNGraphTestClass::getStrPrimitiveDescriptorType(preferType);
180         }
181         REPLACE_WITH_STR(model, "_IMPLS_", impls);
182
183         return model;
184     }
185
186     virtual void TearDown() {
187     }
188
189     virtual void SetUp() {
190         try {
191             TestsCommon::SetUp();
192             softmax_test_params p = ::testing::WithParamInterface<softmax_test_params>::GetParam();
193             std::string model = getModel(p);
194
195             InferenceEngine::CNNNetReader net_reader;
196             ASSERT_NO_THROW(net_reader.ReadNetwork(model.data(), model.length()));
197
198             MKLDNNGraphTestClass graph;
199             graph.CreateGraph(net_reader.getNetwork());
200             auto& nodes = graph.getNodes();
201             for (int i = 0; i < nodes.size(); i++) {
202                 if (nodes[i]->getType() == MKLDNNPlugin::SoftMax) {
203                     ASSERT_LE(p.num_prim_desc, nodes[i]->getSupportedPrimitiveDescriptors().size());
204                     for (size_t j = 0; j < p.num_prim_desc && j < p.comp.size(); j++) {
205                         p.comp.at(j)(nodes[i]->getSupportedPrimitiveDescriptors().at(j));
206                     }
207                     ASSERT_NE(nullptr, nodes[i]->getSelectedPrimitiveDescriptor());
208                     ASSERT_EQ(p.selectedType, nodes[i]->getSelectedPrimitiveDescriptor()->getImplementationType() & p.selectedType);
209                 }
210             }
211
212             InferenceEngine::SizeVector dims_src = {p.in.n, p.in.c, p.in.h, p.in.w};
213
214             InferenceEngine::Blob::Ptr src = InferenceEngine::make_shared_blob<float, const InferenceEngine::SizeVector>(InferenceEngine::Precision::FP32, InferenceEngine::NCHW, dims_src);
215             src->allocate();
216             fill_data(src->buffer(), src->size());
217
218             InferenceEngine::TBlob<float>* srcPtr = dynamic_cast<InferenceEngine::TBlob<float>*>(src.get());
219
220             if (srcPtr == nullptr)
221                 FAIL() << "Cannot cast blob to TBlob<float>.";
222
223             InferenceEngine::BlobMap srcs;
224             srcs.insert(std::pair<std::string, InferenceEngine::Blob::Ptr>("in1", src));
225
226             InferenceEngine::OutputsDataMap out;
227             out = net_reader.getNetwork().getOutputsInfo();
228             InferenceEngine::BlobMap outputBlobs;
229
230             std::pair<std::string, InferenceEngine::DataPtr> item = *out.begin();
231
232             InferenceEngine::TBlob<float>::Ptr output;
233             output = InferenceEngine::make_shared_blob<float>(item.second->getTensorDesc());
234             output->allocate();
235             outputBlobs[item.first] = output;
236
237             graph.Infer(srcs, outputBlobs);
238
239             check_softmax_fwd(*output, p);
240         } catch (const InferenceEngine::details::InferenceEngineException &e) {
241             FAIL() << e.what();
242         }
243     }
244 };
245
246 TEST_P(MKLDNNGraphSoftMaxTests, TestsSoftMax) {}
247
248
249 INSTANTIATE_TEST_CASE_P(
250         TestsSoftMax, MKLDNNGraphSoftMaxTests,
251         ::testing::Values(
252                 softmax_test_params{{1, 3, 228, 228}, 1, 3, MKLDNNPlugin::impl_desc_type::jit},
253                 softmax_test_params{{1, 3, 228, 228}, 1, 3, MKLDNNPlugin::impl_desc_type::ref, {MKLDNNPlugin::impl_desc_type::ref_any}}));
254
255 class MKLDNNGraphDynBatchSoftMaxTests: public MKLDNNGraphSoftMaxTests {
256 protected:
257     virtual void SetUp() {
258         try {
259             TestsCommon::SetUp();
260             softmax_test_params p = ::testing::WithParamInterface<softmax_test_params>::GetParam();
261             std::string model = getModel(p);
262             size_t MB = p.in.n;
263             if (MB < 2)
264                 MB = 2;
265
266             InferenceEngine::CNNNetReader net_reader;
267             ASSERT_NO_THROW(net_reader.ReadNetwork(model.data(), model.length()));
268             InferenceEngine::CNNNetwork network = net_reader.getNetwork();
269             auto implNet = dynamic_cast<InferenceEngine::details::CNNNetworkImpl *>(&((InferenceEngine::ICNNNetwork&)network));
270             ASSERT_NE(nullptr, implNet) << "Failed to cast ICNNNetwork to CNNNetworkImpl";
271             InferenceEngine::ResponseDesc resp;
272             InferenceEngine::StatusCode sts  = implNet->setBatchSizeReshape(MB, &resp);
273             ASSERT_EQ((int)InferenceEngine::StatusCode::OK, sts) << resp.msg;
274
275             MKLDNNGraphTestClass graph;
276             graph.setProperty({{InferenceEngine::PluginConfigParams::KEY_DYN_BATCH_ENABLED, InferenceEngine::PluginConfigParams::YES}});
277             graph.CreateGraph(net_reader.getNetwork());
278
279             InferenceEngine::SizeVector dims_src = {MB, p.in.c, p.in.h, p.in.w};
280
281             InferenceEngine::Blob::Ptr src = InferenceEngine::make_shared_blob<float, const InferenceEngine::SizeVector>(InferenceEngine::Precision::FP32, InferenceEngine::NCHW, dims_src);
282             src->allocate();
283             fill_data(src->buffer(), src->size());
284
285             InferenceEngine::TBlob<float>* srcPtr = dynamic_cast<InferenceEngine::TBlob<float>*>(src.get());
286
287             if (srcPtr == nullptr)
288                 FAIL() << "Cannot cast blob to TBlob<float>.";
289
290             InferenceEngine::BlobMap srcs;
291             srcs.insert(std::pair<std::string, InferenceEngine::Blob::Ptr>("in1", src));
292
293             InferenceEngine::OutputsDataMap out;
294             out = net_reader.getNetwork().getOutputsInfo();
295             InferenceEngine::BlobMap outputBlobs;
296
297             std::pair<std::string, InferenceEngine::DataPtr> item = *out.begin();
298
299             InferenceEngine::TBlob<float>::Ptr output;
300             output = InferenceEngine::make_shared_blob<float>(item.second->getTensorDesc());
301             output->allocate();
302             outputBlobs[item.first] = output;
303
304             auto checkSoftmax = [](const MKLDNNPlugin::MKLDNNNodePtr& node) {
305                 return node->getType() == MKLDNNPlugin::SoftMax;
306             };
307
308             graph.checkDynBatch(srcs, outputBlobs, MB, MB, checkSoftmax);
309             graph.checkDynBatch(srcs, outputBlobs, 1, MB, checkSoftmax);
310         } catch (const InferenceEngine::details::InferenceEngineException &e) {
311             FAIL() << e.what();
312         }
313     }
314 };
315
316 TEST_P(MKLDNNGraphDynBatchSoftMaxTests, TestsDynBatchSoftMax) {}
317
318
319 INSTANTIATE_TEST_CASE_P(
320         TestsDynBatchSoftMax, MKLDNNGraphDynBatchSoftMaxTests,
321         ::testing::Values(
322                 softmax_test_params{{1, 3, 228, 228}, 1, 3, MKLDNNPlugin::impl_desc_type::jit},
323                 softmax_test_params{{1, 3, 228, 228}, 1, 3, MKLDNNPlugin::impl_desc_type::ref, {MKLDNNPlugin::impl_desc_type::ref_any}}));