Publishing 2019 R1 content
[platform/upstream/dldt.git] / inference-engine / tests / unit / engines / mkldnn / graph / layers / internal / graph_pooling_test.cpp
1 // Copyright (C) 2018-2019 Intel Corporation
2 // SPDX-License-Identifier: Apache-2.0
3 //
4
5 #include <gtest/gtest.h>
6 #include <gmock/gmock-spec-builders.h>
7 #include "mkldnn_plugin/mkldnn_graph.h"
8
9 #include "test_graph.hpp"
10
11 #include "single_layer_common.hpp"
12 #include <ie_layers.h>
13 #include <mkldnn_plugin/mkldnn_extension_utils.h>
14 #include <inference_engine/cnn_network_impl.hpp>
15 #include "tests_common.hpp"
16 #include "ir_gen_helper.hpp"
17 #include <math.h>
18
19 using namespace InferenceEngine;
20 using namespace ::testing;
21 using namespace std;
22 using namespace mkldnn;
23 using namespace single_layer_tests;
24
25 struct pooling_test_params {
26     // Formats: NCHW, NCDHW
27     vector<size_t> dims;
28     // Formats: WH, WHD
29     vector<size_t> kernel;
30     vector<size_t> strides;
31     vector<size_t> pads_begin;
32     vector<size_t> pads_end;
33
34     PoolingLayer::PoolType _type;
35     bool _exclude_pad;
36
37     size_t num_prim_desc;
38
39     MKLDNNPlugin::impl_desc_type selectedType;
40     vector<MKLDNNPlugin::impl_desc_type> preferTypes;
41
42     vector<std::function<void(MKLDNNPlugin::PrimitiveDescInfo)>> comp;
43 };
44
45 template <typename data_t>
46 void ref_pool(const InferenceEngine::TBlob<data_t> &src, InferenceEngine::TBlob<data_t> &dst, pooling_test_params prm)
47 {
48     int dims_size = prm.dims.size();
49
50     int KW = prm.kernel[X_AXIS];
51     int KH = prm.kernel[Y_AXIS];
52     int KD = dims_size == 5 ? prm.kernel[Z_AXIS] : 1;
53
54     int SW = prm.strides[X_AXIS];
55     int SH = prm.strides[Y_AXIS];
56     int SD = prm.strides.size() > Z_AXIS ? prm.strides[Z_AXIS] : 1;
57
58     int IW = prm.dims[dims_size - 1];
59     int IH = prm.dims[dims_size - 2];
60     int ID = dims_size == 5 ? prm.dims[dims_size - 3] : 1;
61     
62     int PWB = prm.pads_begin[X_AXIS];
63     int PHB = prm.pads_begin[Y_AXIS];
64     int PDB = prm.pads_begin.size() > Z_AXIS ? prm.pads_begin[Z_AXIS] : 0;
65     int PWE = prm.pads_end[X_AXIS];
66     int PHE = prm.pads_end[Y_AXIS];
67     int PDE = prm.pads_end.size() > Z_AXIS ? prm.pads_end[Z_AXIS] : 0;
68
69     int OW = (IW + PWB + PWE - KW) / SW + 1;
70     int OH = (IH + PHB + PHE - KH) / SH + 1;
71     int OD = dims_size == 5 ? (ID + PDB + PDE - KD) / SD + 1 : 1;
72     int OC = prm.dims[1];
73
74     const data_t *src_data = src.readOnly();
75     data_t *dst_data = dst.data();
76
77     IE_ASSERT(OC == dst.dims()[dims_size - 2]);
78
79     int k1 = OH * OW,
80            k2 = k1 * OD,
81            k3 = IH * IW,
82            k4 = k3 * ID;
83
84     if (prm._type == PoolingLayer::MAX) {
85         for (int c = 0; c < OC; c++) {
86             int cc = c * k2;
87             for (int od = 0; od < OD; od++) {
88                 int cd = cc + od * k1;
89                 for (int oh = 0; oh < OH; oh++) {
90                     int ch = cd + oh * OW;
91                     for (int ow = 0; ow < OW; ow++) {
92
93                         int oidx = ch + ow;
94                         data_t out_ref = data_t(0);
95                         bool is_initialized = false;
96
97                         for (int kd = 0; kd < KD; kd++) {
98                             int id = dims_size == 5 ? od * SD - PDB + kd : 0lu;
99                             if (id < 0 || id >= ID) continue;
100                             for (int kh = 0; kh < KH; kh++) {
101                                 int ih = oh * SH - PHB + kh;
102                                 if (ih < 0 || ih >= IH) continue;
103                                 for (int kw = 0; kw < KW; kw++) {
104                                     int iw = ow * SW - PWB + kw;
105                                     if (iw < 0 || iw >= IW) continue;
106                                     int iidx = c * k4
107                                                 + id * k3
108                                                 + ih * IW
109                                                 + iw;
110
111                                     data_t d = src_data[iidx];
112                                     if (!is_initialized) {
113                                         out_ref = d;
114                                         is_initialized = true;
115                                     } else {
116                                         if (out_ref < d)
117                                             out_ref = d;
118                                     }
119                                 }
120                             }
121                         }
122                         dst_data[oidx] = out_ref;
123                     }
124                 }
125             }
126         }
127     } else if (prm._type == PoolingLayer::AVG) {
128
129         bool include_padding = false;
130         bool not_zero_l = false;
131         for (auto lr : prm.pads_begin) {
132             if (lr) {
133                 not_zero_l = true;
134                 break;
135             }
136         }
137         if (!prm._exclude_pad && not_zero_l)
138             include_padding = true;
139
140         int PDBKD = KD - PDB,
141             PHBKH = KH - PHB,
142             PWBKW = KW - PWB,
143             IDPDE = ID + PDE,
144             IHPHE = IH + PHE,
145             IWPWE = IW + PWE;
146
147         for (int c = 0; c < OC; c++) {
148             int cc = c * k2;
149             for (int od = 0; od < OD; od++) {
150                 int cd = cc + od * k1;
151                 int id_start = od * SD - PDB;
152                 int id_end = std::min(od * SD + PDBKD, IDPDE);
153                 for (int oh = 0; oh < OH; oh++) {
154                     int ch = cd + oh * OW;
155                     int ih_start = oh * SH - PHB;
156                     int ih_end = std::min(oh * SH + PHBKH, IHPHE);
157                     for (int ow = 0; ow < OW; ow++) {
158                         size_t oidx = ch + ow;
159                         dst_data[oidx] = (data_t)0;
160                         int iw_start = ow * SW - PWB;
161                         int iw_end = std::min(ow * SW + PWBKW, IWPWE);
162
163                         // include_padding
164                         double num_summands = (ih_end - ih_start) * (iw_end - iw_start) * (id_end - id_start);
165
166                         id_start = std::max(id_start, 0);
167                         ih_start = std::max(ih_start, 0);
168                         iw_start = std::max(iw_start, 0);
169                         id_end = std::min(id_end, ID);
170                         ih_end = std::min(ih_end, IH);
171                         iw_end = std::min(iw_end, IW);
172
173                         if (!include_padding)
174                             num_summands = (id_end - id_start) * (ih_end - ih_start) * (iw_end - iw_start);
175                         if (num_summands == 0.0) continue;
176
177                         double dst = 0.0;
178                         for (int id = id_start; id < id_end; ++id) {
179                             for (int ih = ih_start; ih < ih_end; ++ih) {
180                                 for (int iw = iw_start; iw < iw_end; ++iw) {
181                                     size_t iidx = c * k4
182                                                 + id * k3
183                                                 + ih * IW
184                                                 + iw;
185
186                                     dst += (double)src_data[iidx];
187                         }   }   }
188
189                         dst_data[oidx] = (data_t)(dst / num_summands);
190     }   }   }   }   }
191 }
192
193 class MKLDNNGraphPoolingTests: public TestsCommon,
194                                      public WithParamInterface<pooling_test_params> {
195     std::string layers_t = R"V0G0N(
196         <layer name="pool" id="1" type="Pooling" precision="FP32">
197
198             <pooling kernel="_K_"
199                      strides="_KS_"
200                      pads_begin="_PB_" pads_end="_PE_"
201                      pool-method="_PM_" exclude-pad="_EP_" rounding_type="floor"
202                      PrimitivesPriority="_IMPLS_"/>
203
204             <input>
205                 <port id="1">
206                     __SRC_DIMS__
207                 </port>
208             </input>
209             <output>
210                 <port id="1">
211                     <dim>_IN_</dim>
212                     <dim>_IC_</dim>
213                     __DST_DIMS__
214                 </port>
215             </output>
216         </layer>
217 )V0G0N";
218
219     std::string edges_t = R"V0G0N(
220         <edge from-layer="0" from-port="0" to-layer="1" to-port="1"/>
221 )V0G0N";
222
223 protected:
224     std::string getModel(pooling_test_params p) {
225         std::string model = layers_t;
226
227         std::string s_dims;
228         for (auto& dim : p.dims) {
229             s_dims += "\n                    <dim>";
230             s_dims += std::to_string(dim) + "</dim>";
231         }
232         REPLACE_WITH_STR(model, "__SRC_DIMS__", s_dims);
233
234         s_dims = "";
235         int k_len = p.kernel.size();
236         for (size_t i = 2lu; i < p.dims.size(); i++) {
237             size_t inx = k_len - i + 1lu;
238             size_t dim = (p.dims[i] + p.pads_begin[inx] + p.pads_end[inx] - p.kernel[inx]) / p.strides[inx] + 1lu;
239             s_dims += "\n                    <dim>";
240             s_dims += std::to_string(dim) + "</dim>";
241         }
242         REPLACE_WITH_STR(model, "__DST_DIMS__", s_dims);
243         
244         std::string pool_method;
245         switch (p._type) {
246             case PoolingLayer::AVG: pool_method = "avg";
247                 break;
248             case PoolingLayer::ROI: pool_method = "roi";
249                 break;
250             default: pool_method = "max";
251         }
252         REPLACE_WITH_STR(model, "_PM_", pool_method);
253         
254         std::string exclude_pad = "false";
255         if (p._exclude_pad) exclude_pad = "true";
256         REPLACE_WITH_STR(model, "_EP_", exclude_pad);
257
258         REPLACE_WITH_NUM(model, "_IN_", p.dims[0]);
259         REPLACE_WITH_NUM(model, "_IC_", p.dims[1]);
260
261         REPLACE_WITH_NUM_VECTOR_REVERSE(model, "_K_", p.kernel);
262         REPLACE_WITH_NUM_VECTOR_REVERSE(model, "_KS_", p.strides);
263         REPLACE_WITH_NUM_VECTOR_REVERSE(model, "_PB_", p.pads_begin);
264         REPLACE_WITH_NUM_VECTOR_REVERSE(model, "_PE_", p.pads_end);
265
266         std::string impls;
267         for (const auto& preferType : p.preferTypes) {
268             if (!impls.empty())
269                 impls += ",";
270             impls += "cpu:" + MKLDNNGraphTestClass::getStrPrimitiveDescriptorType(preferType);
271         }
272         REPLACE_WITH_STR(model, "_IMPLS_", impls);
273
274         model = IRTemplateGenerator::getIRTemplate("Pooling_Only", p.dims, "FP32", model, edges_t);
275
276         return model;
277     }
278
279     virtual void TearDown() {
280     }
281
282     virtual void SetUp() {
283         try {
284             TestsCommon::SetUp();
285             pooling_test_params p = ::testing::WithParamInterface<pooling_test_params>::GetParam();
286             std::string model = getModel(p);
287
288             InferenceEngine::CNNNetReader net_reader;
289             ASSERT_NO_THROW(net_reader.ReadNetwork(model.data(), model.length()));
290
291             MKLDNNGraphTestClass graph;
292             graph.CreateGraph(net_reader.getNetwork());
293             auto& nodes = graph.getNodes();
294             for (int i = 0; i < nodes.size(); i++) {
295                 if (nodes[i]->getType() == MKLDNNPlugin::Pooling) {
296                     ASSERT_LE(p.num_prim_desc, nodes[i]->getSupportedPrimitiveDescriptors().size());
297                     for (size_t j = 0; j < p.num_prim_desc && j < p.comp.size(); j++) {
298                         p.comp.at(j)(nodes[i]->getSupportedPrimitiveDescriptors().at(j));
299                     }
300                     ASSERT_NE(nullptr, nodes[i]->getSelectedPrimitiveDescriptor());
301                     ASSERT_TRUE(nodes[i]->getSelectedPrimitiveDescriptor()->getImplementationType() | p.selectedType);
302                 }
303             }
304
305             InferenceEngine::Layout layout = ANY;
306             switch (p.dims.size()) {
307                 case 4:
308                     layout = InferenceEngine::NCHW;
309                     break;
310                 case 5:
311                     layout = InferenceEngine::NCDHW;
312                     break;
313             }
314
315             InferenceEngine::Blob::Ptr src =
316                 InferenceEngine::make_shared_blob<float, const InferenceEngine::SizeVector>(InferenceEngine::Precision::FP32, layout, p.dims);
317             src->allocate();
318             fill_data(src->buffer(), src->size());
319
320             InferenceEngine::TBlob<float>* srcPtr = dynamic_cast<InferenceEngine::TBlob<float>*>(src.get());
321
322             if (srcPtr == nullptr)
323                 FAIL() << "Cannot cast blob to TBlob<float>.";
324
325             InferenceEngine::BlobMap srcs;
326             srcs.insert(std::pair<std::string, InferenceEngine::Blob::Ptr>("in1", src));
327
328             InferenceEngine::OutputsDataMap out;
329             out = net_reader.getNetwork().getOutputsInfo();
330             InferenceEngine::BlobMap outputBlobs;
331
332             std::pair<std::string, InferenceEngine::DataPtr> item = *out.begin();
333
334             InferenceEngine::TBlob<float>::Ptr output;
335             output = InferenceEngine::make_shared_blob<float>(item.second->getTensorDesc());
336             output->allocate();
337             outputBlobs[item.first] = output;
338
339             graph.Infer(srcs, outputBlobs);
340
341             InferenceEngine::TBlob<float> dst_ref(item.second->getTensorDesc());
342             dst_ref.allocate();
343
344             ref_pool(*srcPtr, dst_ref, p);
345
346             compare(*output, dst_ref, 0.0001f);
347         } catch (const InferenceEngine::details::InferenceEngineException &e) {
348             FAIL() << e.what();
349         }
350     }
351 };
352
353 TEST_P(MKLDNNGraphPoolingTests, TestsPooling) {}
354
355 INSTANTIATE_TEST_CASE_P(
356         TestsPooling, MKLDNNGraphPoolingTests,
357         ::testing::Values(
358         /*0*/   pooling_test_params{{1, 3, 228, 228}, {2, 2}, {2, 2}, {0, 0}, {0, 0}, PoolingLayer::MAX, false, 6, MKLDNNPlugin::impl_desc_type::jit},
359                 pooling_test_params{{1, 3, 228, 228}, {4, 2}, {2, 2}, {0, 0}, {0, 0}, PoolingLayer::MAX, false, 4, MKLDNNPlugin::impl_desc_type::jit},
360                 pooling_test_params{{1, 3, 228, 228}, {4, 2}, {2, 1}, {0, 0}, {0, 0}, PoolingLayer::MAX, false, 4, MKLDNNPlugin::impl_desc_type::jit},
361                 pooling_test_params{{1, 3, 228, 228}, {2, 2}, {2, 2}, {0, 0}, {0, 0}, PoolingLayer::MAX, false, 6, MKLDNNPlugin::impl_desc_type::ref,
362                             {MKLDNNPlugin::impl_desc_type::ref_any}},
363                 pooling_test_params{{1, 3, 228, 228}, {4, 2}, {2, 2}, {0, 0}, {0, 0}, PoolingLayer::MAX, false, 4, MKLDNNPlugin::impl_desc_type::ref,
364                             {MKLDNNPlugin::impl_desc_type::ref_any}},
365                 pooling_test_params{{1, 3, 228, 228}, {4, 2}, {2, 1}, {0, 0}, {0, 0}, PoolingLayer::MAX, false, 4, MKLDNNPlugin::impl_desc_type::ref,
366                             {MKLDNNPlugin::impl_desc_type::ref_any}},
367                 pooling_test_params{{1u, 4u, 128u, 128u}, {2u, 2u}, {2u, 2u}, {1u, 0u}, {0u, 0u}, PoolingLayer::AVG, false, 3u,
368                             MKLDNNPlugin::impl_desc_type::ref, {MKLDNNPlugin::impl_desc_type::ref_any}},
369                 pooling_test_params{{1u, 4u, 128u, 128u}, {2u, 2u}, {2u, 2u}, {1u, 0u}, {0u, 0u}, PoolingLayer::AVG, false, 3u,
370                             MKLDNNPlugin::impl_desc_type::jit },
371                 pooling_test_params{{1u, 4u, 128u, 128u}, {2u, 2u}, {2u, 2u}, {0u, 0u}, {0u, 0u}, PoolingLayer::AVG, true, 3u,
372                             MKLDNNPlugin::impl_desc_type::ref, {MKLDNNPlugin::impl_desc_type::ref_any}},
373         /*9*/   pooling_test_params{{1u, 4u, 128u, 128u}, {2u, 2u}, {2u, 2u}, {0u, 0u}, {0u, 0u}, PoolingLayer::AVG, true, 3u,
374                             MKLDNNPlugin::impl_desc_type::jit },
375                 pooling_test_params{{1u, 4u, 128u, 128u}, {2u, 2u}, {2u, 2u}, {2u, 2u}, {2u, 2u}, PoolingLayer::AVG, true, 3u,
376                             MKLDNNPlugin::impl_desc_type::ref, {MKLDNNPlugin::impl_desc_type::ref_any}},
377                 pooling_test_params{{1u, 4u, 128u, 128u}, {2u, 2u}, {2u, 2u}, {2u, 2u}, {2u, 2u}, PoolingLayer::AVG, false, 3u,
378                             MKLDNNPlugin::impl_desc_type::ref, {MKLDNNPlugin::impl_desc_type::ref_any}},
379                 pooling_test_params{{1u, 4u, 128u, 128u}, {2u, 2u}, {2u, 2u}, {2u, 2u}, {2u, 2u}, PoolingLayer::MAX, false, 3u,
380                             MKLDNNPlugin::impl_desc_type::ref, {MKLDNNPlugin::impl_desc_type::ref_any}},
381                 // TODO Fix jit implementation. End paddings
382 //                pooling_test_params{{1u, 4u, 128u, 128u}, {2u, 2u}, {2u, 2u}, {2u, 2u}, {2u, 0u}, PoolingLayer::AVG, true, 3u,
383 //                            MKLDNNPlugin::impl_desc_type::jit },
384 //                pooling_test_params{{1u, 4u, 128u, 128u}, {2u, 2u}, {2u, 2u}, {2u, 2u}, {2u, 0u}, PoolingLayer::AVG, false, 3u,
385 //                            MKLDNNPlugin::impl_desc_type::jit },
386 //                pooling_test_params{{1u, 4u, 128u, 128u}, {2u, 2u}, {2u, 2u}, {2u, 2u}, {2u, 0u}, PoolingLayer::MAX, false, 3u,
387 //                            MKLDNNPlugin::impl_desc_type::jit },
388
389                 // 5D tensor
390                 pooling_test_params{{1u, 3u, 16u, 32u, 32u}, {2u, 2u, 2u}, {1u, 1u, 1u}, {0u, 0u, 0u}, {0u, 0u, 0u}, PoolingLayer::MAX, false, 3u,
391                             MKLDNNPlugin::impl_desc_type::ref, {MKLDNNPlugin::impl_desc_type::ref_any}},
392                 pooling_test_params{{1u, 3u, 16u, 32u, 32u}, {2u, 2u, 2u}, {1u, 1u, 1u}, {0u, 0u, 0u}, {0u, 0u, 0u}, PoolingLayer::MAX, false, 3u,
393                             MKLDNNPlugin::impl_desc_type::jit },
394                 pooling_test_params{{1u, 3u, 16u, 32u, 32u}, {2u, 2u, 2u}, {1u, 1u, 1u}, {1u, 1u, 1u}, {1u, 1u, 1u}, PoolingLayer::MAX, false, 3u,
395                             MKLDNNPlugin::impl_desc_type::ref, {MKLDNNPlugin::impl_desc_type::ref_any}},
396                 pooling_test_params{{1u, 32u, 60u, 60u, 60u}, {2u, 3u, 4u}, {2u, 2u, 2u}, {1u, 1u, 1u}, {1u, 2u, 3u}, PoolingLayer::MAX, false, 3u,
397                             MKLDNNPlugin::impl_desc_type::ref, {MKLDNNPlugin::impl_desc_type::ref_any}},
398         /*20*/  pooling_test_params{{1u, 3u, 16u, 32u, 32u}, {2u, 2u, 2u}, {1u, 1u, 1u}, {1u, 2u, 3u}, {1u, 2u, 3u}, PoolingLayer::MAX, false, 3u,
399                             MKLDNNPlugin::impl_desc_type::jit },
400                 pooling_test_params{{1u, 4u, 128u, 128u, 128u}, {2u, 2u, 2u}, {2u, 2u, 2u}, {1u, 0u, 0u}, {0u, 0u, 0u}, PoolingLayer::AVG, false, 3u,
401                             MKLDNNPlugin::impl_desc_type::ref, {MKLDNNPlugin::impl_desc_type::ref_any}},
402                 pooling_test_params{{1u, 4u, 128u, 128u, 128u}, {2u, 2u, 2u}, {2u, 2u, 2u}, {1u, 0u, 0u}, {0u, 0u, 0u}, PoolingLayer::AVG, false, 3u,
403                             MKLDNNPlugin::impl_desc_type::jit },
404                 pooling_test_params{{1u, 4u, 128u, 128u, 128u}, {2u, 2u, 2u}, {2u, 2u, 2u}, {0u, 0u, 0u}, {0u, 0u, 0u}, PoolingLayer::AVG, true, 3u,
405                             MKLDNNPlugin::impl_desc_type::ref, {MKLDNNPlugin::impl_desc_type::ref_any}},
406                 pooling_test_params{{1u, 4u, 128u, 128u, 128u}, {2u, 2u, 2u}, {2u, 2u, 2u}, {0u, 0u, 0u}, {0u, 0u, 0u}, PoolingLayer::AVG, true, 3u,
407                             MKLDNNPlugin::impl_desc_type::jit },
408                 pooling_test_params{{1u, 4u, 128u, 128u, 128u}, {2u, 2u, 2u}, {2u, 2u, 2u}, {2u, 2u, 2u}, {0u, 0u, 0u}, PoolingLayer::AVG, true, 3u,
409                             MKLDNNPlugin::impl_desc_type::ref, {MKLDNNPlugin::impl_desc_type::ref_any}},
410                 pooling_test_params{{1u, 4u, 128u, 128u, 128u}, {2u, 2u, 2u}, {2u, 2u, 2u}, {2u, 2u, 2u}, {2u, 2u, 2u}, PoolingLayer::AVG, true, 3u,
411                             MKLDNNPlugin::impl_desc_type::jit },
412                 pooling_test_params{{1u, 4u, 128u, 128u, 128u}, {2u, 2u, 2u}, {2u, 2u, 2u}, {2u, 2u, 2u}, {2u, 2u, 2u}, PoolingLayer::AVG, false, 3u,
413                             MKLDNNPlugin::impl_desc_type::ref, {MKLDNNPlugin::impl_desc_type::ref_any}},
414                 pooling_test_params{{1u, 4u, 128u, 128u, 128u}, {2u, 2u, 2u}, {2u, 2u, 2u}, {2u, 2u, 2u}, {2u, 2u, 2u}, PoolingLayer::AVG, false, 3u,
415                             MKLDNNPlugin::impl_desc_type::jit } ));
416
417
418 class MKLDNNGraphDynBatchPoolingTests: public MKLDNNGraphPoolingTests {
419 protected:
420     virtual void SetUp() {
421         try {
422             TestsCommon::SetUp();
423             pooling_test_params p = ::testing::WithParamInterface<pooling_test_params>::GetParam();
424             std::string model = getModel(p);
425             size_t MB = p.dims[0];
426             if (MB < 2)
427                 MB = 2;
428
429             InferenceEngine::CNNNetReader net_reader;
430             ASSERT_NO_THROW(net_reader.ReadNetwork(model.data(), model.length()));
431             InferenceEngine::CNNNetwork network = net_reader.getNetwork();
432             auto implNet = dynamic_cast<InferenceEngine::details::CNNNetworkImpl *>(&((InferenceEngine::ICNNNetwork&)network));
433             ASSERT_NE(nullptr, implNet) << "Failed to cast ICNNNetwork to CNNNetworkImpl";
434             InferenceEngine::ResponseDesc resp;
435             InferenceEngine::StatusCode sts  = implNet->setBatchSizeReshape(MB, &resp);
436             ASSERT_EQ((int)InferenceEngine::StatusCode::OK, sts) << resp.msg;
437
438             MKLDNNGraphTestClass graph;
439             graph.setProperty({{InferenceEngine::PluginConfigParams::KEY_DYN_BATCH_ENABLED, InferenceEngine::PluginConfigParams::YES}});
440             graph.CreateGraph(net_reader.getNetwork());
441
442
443             InferenceEngine::Layout layout = ANY;
444             switch (p.dims.size()) {
445                 case 4:
446                     layout = InferenceEngine::NCHW;
447                     break;
448                 case 5:
449                     layout = InferenceEngine::NCDHW;
450                     break;
451             }
452             InferenceEngine::Blob::Ptr src =
453                 InferenceEngine::make_shared_blob<float, const InferenceEngine::SizeVector>(InferenceEngine::Precision::FP32, layout, p.dims);
454             src->allocate();
455             fill_data(src->buffer(), src->size());
456
457             InferenceEngine::TBlob<float>* srcPtr = dynamic_cast<InferenceEngine::TBlob<float>*>(src.get());
458
459             if (srcPtr == nullptr)
460                 FAIL() << "Cannot cast blob to TBlob<float>.";
461
462             InferenceEngine::BlobMap srcs;
463             srcs.insert(std::pair<std::string, InferenceEngine::Blob::Ptr>("in1", src));
464
465             InferenceEngine::OutputsDataMap out;
466             out = net_reader.getNetwork().getOutputsInfo();
467             InferenceEngine::BlobMap outputBlobs;
468
469             std::pair<std::string, InferenceEngine::DataPtr> item = *out.begin();
470
471             InferenceEngine::TBlob<float>::Ptr output;
472             output = InferenceEngine::make_shared_blob<float>(item.second->getTensorDesc());
473             output->allocate();
474             outputBlobs[item.first] = output;
475
476             auto checkPooling = [](const MKLDNNPlugin::MKLDNNNodePtr& node) {
477                 return node->getType() == MKLDNNPlugin::Pooling;
478             };
479             graph.checkDynBatch(srcs, outputBlobs, MB, MB, checkPooling);
480             graph.checkDynBatch(srcs, outputBlobs, 1, MB, checkPooling);
481         } catch (const InferenceEngine::details::InferenceEngineException &e) {
482             FAIL() << e.what();
483         }
484     }
485 };
486
487 TEST_P(MKLDNNGraphDynBatchPoolingTests, TestsDynBatchPooling) {}
488
489 INSTANTIATE_TEST_CASE_P(
490         TestsDynBatchPooling, MKLDNNGraphDynBatchPoolingTests,
491         ::testing::Values(
492                 pooling_test_params{{1, 3, 228, 228}, {4, 2}, {2, 1}, {0, 0}, {0, 0}, PoolingLayer::MAX, false, 4, MKLDNNPlugin::impl_desc_type::jit},
493                 pooling_test_params{{1, 3, 228, 228}, {2, 2}, {2, 2}, {0, 0}, {0, 0}, PoolingLayer::MAX, false, 6, MKLDNNPlugin::impl_desc_type::ref, {MKLDNNPlugin::impl_desc_type::ref_any}},
494                 pooling_test_params{{1, 3, 228, 228}, {4, 2}, {2, 2}, {0, 0}, {0, 0}, PoolingLayer::MAX, false, 4, MKLDNNPlugin::impl_desc_type::ref, {MKLDNNPlugin::impl_desc_type::ref_any}},
495                 pooling_test_params{{1, 3, 228, 228}, {4, 2}, {2, 1}, {0, 0}, {0, 0}, PoolingLayer::MAX, false, 4, MKLDNNPlugin::impl_desc_type::ref, {MKLDNNPlugin::impl_desc_type::ref_any}}));