Publishing R3
[platform/upstream/dldt.git] / inference-engine / tests / unit / engines / mkldnn / graph / layers / internal / graph_pooling_test.cpp
1 // Copyright (C) 2018 Intel Corporation
2 //
3 // SPDX-License-Identifier: Apache-2.0
4 //
5
6 #include <gtest/gtest.h>
7 #include <gmock/gmock-spec-builders.h>
8 #include "mkldnn_plugin/mkldnn_graph.h"
9 #include "mock_mkldnn_primitive.hpp"
10
11 #include "test_graph.hpp"
12
13 #include "single_layer_common.hpp"
14 #include <mkldnn_plugin/mkldnn_extension_utils.h>
15 #include <inference_engine/cnn_network_impl.hpp>
16 #include "tests_common.hpp"
17
18
19 using namespace ::testing;
20 using namespace std;
21 using namespace mkldnn;
22
23 struct pooling_test_params {
24     struct {
25         size_t n;
26         size_t c;
27         size_t h;
28         size_t w;
29     } in;
30
31     size_t krn_w;
32     size_t krn_h;
33     size_t str_w;
34     size_t str_h;
35     size_t pad_w;
36     size_t pad_h;
37
38     size_t num_prim_desc;
39
40     MKLDNNPlugin::impl_desc_type selectedType;
41     std::vector<MKLDNNPlugin::impl_desc_type> preferTypes;
42
43     std::vector<std::function<void(MKLDNNPlugin::PrimitiveDescInfo)>> comp;
44 };
45
46 template <typename data_t>
47 void ref_pool(const InferenceEngine::TBlob<data_t> &src, InferenceEngine::TBlob<data_t> &dst, pooling_test_params prm)
48 {
49     size_t KW = prm.krn_w;
50     size_t KH = prm.krn_h;
51
52     size_t IW = prm.in.w;
53     size_t IH = prm.in.h;
54
55     size_t OW = (IW + 2 * prm.pad_w - prm.krn_w) / prm.str_w + 1;
56     size_t OH = (IH + 2 * prm.pad_h - prm.krn_h) / prm.str_h + 1;
57     size_t OC = prm.in.c;
58
59     const data_t *src_data = src.readOnly();
60     data_t *dst_data = dst.data();
61
62     IE_ASSERT( OC == dst.dims()[2]);
63
64     for (size_t c = 0; c < OC; c++) {
65         for (size_t oh = 0; oh < OH; oh++) {
66             for (size_t ow = 0; ow < OW; ow++) {
67                 size_t oidx = c * OH * OW
68                               + oh * OW + ow;
69                 data_t out_ref = data_t(0);
70                 bool is_initialized = false;
71                 for (uint32_t kh = 0; kh < KH; kh++) {
72                     for (uint32_t kw = 0; kw < KW; kw++) {
73                         int32_t iw = ow * prm.str_w - prm.pad_w + kw;
74                         int32_t ih = oh * prm.str_h - prm.pad_h + kh;
75                         if (iw < 0 || iw >= IW || ih < 0
76                             || ih >= IH)
77                             continue;
78                         uint32_t iidx = c * IH * IW + ih * IW + iw;
79
80                         data_t d = src_data[iidx];
81                         if (!is_initialized) {
82                             out_ref = d;
83                             is_initialized = true;
84                         } else {
85                             if (out_ref < d)
86                                 out_ref = d;
87                         }
88                     }
89                 }
90                 dst_data[oidx] = out_ref;
91             }
92         }
93     }
94 }
95
96 class MKLDNNGraphPoolingTests: public TestsCommon,
97                                      public WithParamInterface<pooling_test_params> {
98     std::string model_t = R"V0G0N(
99 <Net Name="Pooling_Only" version="2" precision="FP32" batch="1">
100     <layers>
101         <layer name="in1" type="Input" precision="FP32" id="0">
102             <output>
103                 <port id="0">
104                     <dim>_IN_</dim>
105                     <dim>_IC_</dim>
106                     <dim>_IH_</dim>
107                     <dim>_IW_</dim>
108                 </port>
109             </output>
110         </layer>
111         <layer name="pool" id="1" type="Pooling" precision="FP32">
112
113             <pooling stride-x="_SW_" stride-y="_SH_"
114                      pad-x="_PW_" pad-y="_PH_"
115                      kernel-x="_KW_" kernel-y="_KH_"
116                      method="MAX" round="Ceil" PrimitivesPriority="_IMPLS_"/>
117
118             <input>
119                 <port id="1">
120                     <dim>_IN_</dim>
121                     <dim>_IC_</dim>
122                     <dim>_IH_</dim>
123                     <dim>_IW_</dim>
124                 </port>
125             </input>
126             <output>
127                 <port id="1">
128                     <dim>_IN_</dim>
129                     <dim>_IC_</dim>
130                     <dim>_OH_</dim>
131                     <dim>_OW_</dim>
132                 </port>
133             </output>
134         </layer>
135     </layers>
136     <edges>
137         <edge from-layer="0" from-port="0" to-layer="1" to-port="1"/>
138     </edges>
139 </Net>
140 )V0G0N";
141
142 protected:
143     std::string getModel(pooling_test_params p) {
144         std::string model = model_t;
145
146         REPLACE_WITH_NUM(model, "_IW_", p.in.w);
147         REPLACE_WITH_NUM(model, "_IH_", p.in.h);
148         REPLACE_WITH_NUM(model, "_IC_", p.in.c);
149         REPLACE_WITH_NUM(model, "_IN_", p.in.n);
150
151         REPLACE_WITH_NUM(model, "_KW_", p.krn_w);
152         REPLACE_WITH_NUM(model, "_KH_", p.krn_h);
153         REPLACE_WITH_NUM(model, "_SW_", p.str_w);
154         REPLACE_WITH_NUM(model, "_SH_", p.str_h);
155         REPLACE_WITH_NUM(model, "_PW_", p.pad_w);
156         REPLACE_WITH_NUM(model, "_PH_", p.pad_h);
157
158         REPLACE_WITH_NUM(model, "_OW_", (p.in.w + 2 * p.pad_w - p.krn_w) / p.str_w + 1);
159         REPLACE_WITH_NUM(model, "_OH_", (p.in.h + 2 * p.pad_h - p.krn_h) / p.str_h + 1);
160
161         std::string impls;
162         for (const auto& preferType : p.preferTypes) {
163             if (!impls.empty())
164                 impls += ",";
165             impls += "cpu:" + MKLDNNGraphTestClass::getStrPrimitiveDescriptorType(preferType);
166         }
167         REPLACE_WITH_STR(model, "_IMPLS_", impls);
168         return model;
169     }
170
171     virtual void TearDown() {
172     }
173
174     virtual void SetUp() {
175         try {
176             TestsCommon::SetUp();
177             pooling_test_params p = ::testing::WithParamInterface<pooling_test_params>::GetParam();
178             std::string model = getModel(p);
179
180             InferenceEngine::CNNNetReader net_reader;
181             ASSERT_NO_THROW(net_reader.ReadNetwork(model.data(), model.length()));
182
183             MKLDNNGraphTestClass graph;
184             graph.CreateGraph(net_reader.getNetwork());
185             auto& nodes = graph.getNodes();
186             for (int i = 0; i < nodes.size(); i++) {
187                 if (nodes[i]->getType() == MKLDNNPlugin::Pooling) {
188                     ASSERT_LE(p.num_prim_desc, nodes[i]->getSupportedPrimitiveDescriptors().size());
189                     for (size_t j = 0; j < p.num_prim_desc && j < p.comp.size(); j++) {
190                         p.comp.at(j)(nodes[i]->getSupportedPrimitiveDescriptors().at(j));
191                     }
192                     ASSERT_NE(nullptr, nodes[i]->getSelectedPrimitiveDescriptor());
193                     ASSERT_TRUE(nodes[i]->getSelectedPrimitiveDescriptor()->getImplementationType() | p.selectedType);
194                 }
195             }
196
197             InferenceEngine::SizeVector dims_src = {p.in.n, p.in.c, p.in.h, p.in.w};
198
199             InferenceEngine::Blob::Ptr src = InferenceEngine::make_shared_blob<float, const InferenceEngine::SizeVector>(InferenceEngine::Precision::FP32, InferenceEngine::NCHW, dims_src);
200             src->allocate();
201             fill_data(src->buffer(), src->size());
202
203             InferenceEngine::TBlob<float>* srcPtr = dynamic_cast<InferenceEngine::TBlob<float>*>(src.get());
204
205             if (srcPtr == nullptr)
206                 FAIL() << "Cannot cast blob to TBlob<float>.";
207
208             InferenceEngine::BlobMap srcs;
209             srcs.insert(std::pair<std::string, InferenceEngine::Blob::Ptr>("in1", src));
210
211             InferenceEngine::OutputsDataMap out;
212             out = net_reader.getNetwork().getOutputsInfo();
213             InferenceEngine::BlobMap outputBlobs;
214
215             std::pair<std::string, InferenceEngine::DataPtr> item = *out.begin();
216
217             InferenceEngine::TBlob<float>::Ptr output;
218             output = InferenceEngine::make_shared_blob<float>(item.second->getTensorDesc());
219             output->allocate();
220             outputBlobs[item.first] = output;
221
222             graph.Infer(srcs, outputBlobs);
223
224             InferenceEngine::TBlob<float> dst_ref(item.second->getTensorDesc());
225             dst_ref.allocate();
226
227             ref_pool(*srcPtr, dst_ref, p);
228
229             compare(*output, dst_ref);
230         } catch (const InferenceEngine::details::InferenceEngineException &e) {
231             FAIL() << e.what();
232         }
233     }
234 };
235
236 TEST_P(MKLDNNGraphPoolingTests, TestsPooling) {}
237
238 INSTANTIATE_TEST_CASE_P(
239         TestsPooling, MKLDNNGraphPoolingTests,
240         ::testing::Values(
241                 pooling_test_params{{1, 3, 228, 228}, 2, 2, 2, 2, 0, 0, 6, MKLDNNPlugin::impl_desc_type::jit},
242                 pooling_test_params{{1, 3, 228, 228}, 4, 2, 2, 2, 0, 0, 4, MKLDNNPlugin::impl_desc_type::jit},
243                 pooling_test_params{{1, 3, 228, 228}, 4, 2, 2, 1, 0, 0, 4, MKLDNNPlugin::impl_desc_type::jit},
244                 pooling_test_params{{1, 3, 228, 228}, 2, 2, 2, 2, 0, 0, 6, MKLDNNPlugin::impl_desc_type::ref, {MKLDNNPlugin::impl_desc_type::ref_any}},
245                 pooling_test_params{{1, 3, 228, 228}, 4, 2, 2, 2, 0, 0, 4, MKLDNNPlugin::impl_desc_type::ref, {MKLDNNPlugin::impl_desc_type::ref_any}},
246                 pooling_test_params{{1, 3, 228, 228}, 4, 2, 2, 1, 0, 0, 4, MKLDNNPlugin::impl_desc_type::ref, {MKLDNNPlugin::impl_desc_type::ref_any}}));
247
248
249 class MKLDNNGraphDynBatchPoolingTests: public MKLDNNGraphPoolingTests {
250 protected:
251     virtual void SetUp() {
252         try {
253             TestsCommon::SetUp();
254             pooling_test_params p = ::testing::WithParamInterface<pooling_test_params>::GetParam();
255             std::string model = getModel(p);
256             size_t MB = p.in.n;
257             if (MB < 2)
258                 MB = 2;
259
260             InferenceEngine::CNNNetReader net_reader;
261             ASSERT_NO_THROW(net_reader.ReadNetwork(model.data(), model.length()));
262             InferenceEngine::CNNNetwork network = net_reader.getNetwork();
263             auto implNet = dynamic_cast<InferenceEngine::details::CNNNetworkImpl *>(&((InferenceEngine::ICNNNetwork&)network));
264             ASSERT_NE(nullptr, implNet) << "Failed to cast ICNNNetwork to CNNNetworkImpl";
265             InferenceEngine::ResponseDesc resp;
266             InferenceEngine::StatusCode sts  = implNet->setBatchSizeReshape(MB, &resp);
267             ASSERT_EQ((int)InferenceEngine::StatusCode::OK, sts) << resp.msg;
268
269             MKLDNNGraphTestClass graph;
270             graph.setProperty({{InferenceEngine::PluginConfigParams::KEY_DYN_BATCH_ENABLED, InferenceEngine::PluginConfigParams::YES}});
271             graph.CreateGraph(net_reader.getNetwork());
272
273             InferenceEngine::SizeVector dims_src = {MB, p.in.c, p.in.h, p.in.w};
274
275             InferenceEngine::Blob::Ptr src = InferenceEngine::make_shared_blob<float, const InferenceEngine::SizeVector>(InferenceEngine::Precision::FP32, InferenceEngine::NCHW, dims_src);
276             src->allocate();
277             fill_data(src->buffer(), src->size());
278
279             InferenceEngine::TBlob<float>* srcPtr = dynamic_cast<InferenceEngine::TBlob<float>*>(src.get());
280
281             if (srcPtr == nullptr)
282                 FAIL() << "Cannot cast blob to TBlob<float>.";
283
284             InferenceEngine::BlobMap srcs;
285             srcs.insert(std::pair<std::string, InferenceEngine::Blob::Ptr>("in1", src));
286
287             InferenceEngine::OutputsDataMap out;
288             out = net_reader.getNetwork().getOutputsInfo();
289             InferenceEngine::BlobMap outputBlobs;
290
291             std::pair<std::string, InferenceEngine::DataPtr> item = *out.begin();
292
293             InferenceEngine::TBlob<float>::Ptr output;
294             output = InferenceEngine::make_shared_blob<float>(item.second->getTensorDesc());
295             output->allocate();
296             outputBlobs[item.first] = output;
297
298             auto checkPooling = [](const MKLDNNPlugin::MKLDNNNodePtr& node) {
299                 return node->getType() == MKLDNNPlugin::Pooling;
300             };
301             graph.checkDynBatch(srcs, outputBlobs, MB, MB, checkPooling);
302             graph.checkDynBatch(srcs, outputBlobs, 1, MB, checkPooling);
303         } catch (const InferenceEngine::details::InferenceEngineException &e) {
304             FAIL() << e.what();
305         }
306     }
307 };
308
309 TEST_P(MKLDNNGraphDynBatchPoolingTests, TestsDynBatchPooling) {}
310
311 INSTANTIATE_TEST_CASE_P(
312         TestsDynBatchPooling, MKLDNNGraphDynBatchPoolingTests,
313         ::testing::Values(
314                 pooling_test_params{{1, 3, 228, 228}, 4, 2, 2, 1, 0, 0, 4, MKLDNNPlugin::impl_desc_type::jit},
315                 pooling_test_params{{1, 3, 228, 228}, 2, 2, 2, 2, 0, 0, 6, MKLDNNPlugin::impl_desc_type::ref, {MKLDNNPlugin::impl_desc_type::ref_any}},
316                 pooling_test_params{{1, 3, 228, 228}, 4, 2, 2, 2, 0, 0, 4, MKLDNNPlugin::impl_desc_type::ref, {MKLDNNPlugin::impl_desc_type::ref_any}},
317                 pooling_test_params{{1, 3, 228, 228}, 4, 2, 2, 1, 0, 0, 4, MKLDNNPlugin::impl_desc_type::ref, {MKLDNNPlugin::impl_desc_type::ref_any}}));