Publishing R5 content (#72)
[platform/upstream/dldt.git] / inference-engine / tests / unit / engines / mkldnn / graph / layers / internal / graph_eltwise_test.cpp
1 // Copyright (C) 2018 Intel Corporation
2 // SPDX-License-Identifier: Apache-2.0
3 //
4
5 #include <gtest/gtest.h>
6 #include <gmock/gmock-spec-builders.h>
7 #include "mkldnn_plugin/mkldnn_graph.h"
8
9 #include "test_graph.hpp"
10
11 #include "single_layer_common.hpp"
12 #include <mkldnn_plugin/mkldnn_extension_utils.h>
13 #include <inference_engine/cnn_network_impl.hpp>
14 #include "tests_common.hpp"
15
16 using namespace ::testing;
17 using namespace std;
18 using namespace mkldnn;
19
20 struct eltwise_test_params {
21     // Formats: NCHW, NCDHW
22     vector<size_t> dims;
23
24     enum opType {
25         Sum = 0, Prod = 1, Max = 2
26     };
27
28     opType op;
29
30     std::string scales;
31
32     size_t num_prim_desc;
33
34     MKLDNNPlugin::impl_desc_type selectedType;
35
36     std::vector<std::function<void(MKLDNNPlugin::PrimitiveDescInfo)>> comp;
37 };
38
39 template<typename data_t>
40 void ref_eltwise(const std::vector<InferenceEngine::TBlob<data_t>> &src, InferenceEngine::TBlob<data_t> &dst, eltwise_test_params prm) {
41     std::vector<float> scales;
42     if (prm.scales != "") {
43         std::istringstream stream(prm.scales);
44         std::string str;
45         while (getline(stream, str, ',')) {
46             float val = std::stof(str);
47             scales.push_back(val);
48         }
49     } else {
50         for (int i = 0; i < src.size(); i++) {
51             scales.push_back(1.0f);
52         }
53     }
54
55     data_t *dst_data = dst.data();
56
57     const data_t *src_data = src[0].readOnly();
58
59     for (int i = 0; i < src[0].size(); i++) {
60         switch (prm.op) {
61             case eltwise_test_params::Sum: {
62                 dst_data[i] = scales[0]*src_data[i];
63             }
64                 break;
65             default: {
66                 dst_data[i] = src_data[i];
67             }
68         }
69     }
70
71     for (int n = 1; n < src.size(); n++) {
72         src_data = src[n].readOnly();
73
74         for (int i = 0; i < src[n].size(); i++) {
75             switch (prm.op) {
76                 case eltwise_test_params::Sum: {
77                     dst_data[i] += scales[n]*src_data[i];
78                 }
79                     break;
80
81                 case eltwise_test_params::Prod: {
82                     dst_data[i] *= src_data[i];
83                 }
84                     break;
85
86                 case eltwise_test_params::Max: {
87                     dst_data[i] = (std::max)(dst_data[i], src_data[i]);
88                 }
89                     break;
90             }
91         }
92     }
93 }
94
95 class MKLDNNGraphEltwiseTests: public TestsCommon,
96                                      public WithParamInterface<eltwise_test_params> {
97     std::string model_t = R"V0G0N(
98 <net name="EltwiseOnly" version="3" precision="FP32" batch="1">
99     <layers>
100         <layer name="in1" type="Input" precision="FP32" id="1">
101             <output>
102                 <port id="1">__SRC_DIMS__
103                 </port>
104             </output>
105         </layer>
106         <layer name="in2" type="Input" precision="FP32" id="2">
107             <output>
108                 <port id="2">__SRC_DIMS__
109                 </port>
110             </output>
111         </layer>
112         <layer name="in3" type="Input" precision="FP32" id="3">
113             <output>
114                 <port id="3">__SRC_DIMS__
115                 </port>
116             </output>
117         </layer>
118         <layer name="con" id="4" type="Eltwise" precision="FP32">
119             <data operation="_OP_" _COEFF_/>
120             <input>
121                 <port id="1">__SRC_DIMS__
122                 </port>
123                 <port id="2">__SRC_DIMS__
124                 </port>
125                 <port id="3">__SRC_DIMS__
126                 </port>
127             </input>
128             <output>
129                 <port id="4">__SRC_DIMS__
130                 </port>
131             </output>
132         </layer>
133     </layers>
134     <edges>
135         <edge from-layer="1" from-port="1" to-layer="4" to-port="1"/>
136         <edge from-layer="2" from-port="2" to-layer="4" to-port="2"/>
137         <edge from-layer="3" from-port="3" to-layer="4" to-port="3"/>
138     </edges>
139 </net>
140 )V0G0N";
141
142 protected:
143     std::string getModel(eltwise_test_params p) {
144         std::string model = model_t;
145         std::string op;
146
147         if (p.op == 0) {
148             op = "sum";
149         } else if (p.op == 1) {
150             op = "mul";
151         } else if (p.op == 2) {
152             op = "max";
153         }
154
155         std::string src_dims;
156         for (auto& dim : p.dims) {
157                 src_dims += "\n                    <dim>";
158                 src_dims += std::to_string(dim) + "</dim>";
159         }
160         REPLACE_WITH_STR(model, "__SRC_DIMS__", src_dims);
161
162         std::string scale;
163         if (!p.scales.empty()) {
164             scale = std::string("coeff=\"") + p.scales + std::string("\"");
165         }
166         REPLACE_WITH_STR(model, "_OP_", op);
167         REPLACE_WITH_STR(model, "_COEFF_", scale);
168         return model;
169     }
170
171     virtual void TearDown() {
172     }
173
174     virtual void SetUp() {
175         try {
176             TestsCommon::SetUp();
177             eltwise_test_params p = ::testing::WithParamInterface<eltwise_test_params>::GetParam();
178             std::string model = getModel(p);
179
180             InferenceEngine::CNNNetReader net_reader;
181             ASSERT_NO_THROW(net_reader.ReadNetwork(model.data(), model.length()));
182
183             MKLDNNGraphTestClass graph;
184             graph.CreateGraph(net_reader.getNetwork());
185
186             auto& nodes = graph.getNodes();
187             for (int i = 0; i < nodes.size(); i++) {
188                 if (nodes[i]->getType() == MKLDNNPlugin::Eltwise) {
189                     ASSERT_EQ(p.num_prim_desc, nodes[i]->getSupportedPrimitiveDescriptors().size());
190                     for (size_t j = 0; j < p.num_prim_desc && j < p.comp.size(); j++) {
191                         p.comp.at(j)(nodes[i]->getSupportedPrimitiveDescriptors().at(j));
192                     }
193                     ASSERT_NE(nullptr, nodes[i]->getSelectedPrimitiveDescriptor());
194                     ASSERT_EQ(p.selectedType, nodes[i]->getSelectedPrimitiveDescriptor()->getImplementationType());
195                 }
196             }
197
198             InferenceEngine::SizeVector dims_src = p.dims;
199             InferenceEngine::Layout layout = InferenceEngine::ANY;
200             switch (p.dims.size()) {
201                 case 4:
202                     layout = InferenceEngine::NCHW;
203                     break;
204                 case 5:
205                     layout = InferenceEngine::NCDHW;
206                     break;
207             }
208
209             InferenceEngine::Blob::Ptr src1 = InferenceEngine::make_shared_blob<float, const InferenceEngine::SizeVector>(InferenceEngine::Precision::FP32, layout, dims_src);
210             src1->allocate();
211
212             InferenceEngine::TBlob<float>* srcPtr1 = dynamic_cast<InferenceEngine::TBlob<float>*>(src1.get());
213
214             if (srcPtr1 == nullptr)
215                 FAIL() << "Cannot cast blob to TBlob<float>.";
216
217             fill_data(src1->buffer(), src1->size());
218             InferenceEngine::Blob::Ptr src2 = InferenceEngine::make_shared_blob<float, const InferenceEngine::SizeVector>(InferenceEngine::Precision::FP32, layout, dims_src);
219             src2->allocate();
220
221             InferenceEngine::TBlob<float>* srcPtr2 = dynamic_cast<InferenceEngine::TBlob<float>*>(src2.get());
222
223             if (srcPtr2 == nullptr)
224                 FAIL() << "Cannot cast blob to TBlob<float>.";
225             fill_data(src2->buffer(), src2->size());
226             InferenceEngine::Blob::Ptr src3 = InferenceEngine::make_shared_blob<float, const InferenceEngine::SizeVector>(InferenceEngine::Precision::FP32, layout, dims_src);
227             src3->allocate();
228
229             InferenceEngine::TBlob<float>* srcPtr3 = dynamic_cast<InferenceEngine::TBlob<float>*>(src3.get());
230
231             if (srcPtr3 == nullptr)
232                 FAIL() << "Cannot cast blob to TBlob<float>.";
233             fill_data(src3->buffer(), src3->size());
234             InferenceEngine::BlobMap srcs;
235             srcs.insert(std::pair<std::string, InferenceEngine::Blob::Ptr>("in1", src1));
236             srcs.insert(std::pair<std::string, InferenceEngine::Blob::Ptr>("in2", src2));
237             srcs.insert(std::pair<std::string, InferenceEngine::Blob::Ptr>("in3", src3));
238
239             InferenceEngine::OutputsDataMap out;
240             out = net_reader.getNetwork().getOutputsInfo();
241             InferenceEngine::BlobMap outputBlobs;
242
243             std::pair<std::string, InferenceEngine::DataPtr> item = *out.begin();
244
245             InferenceEngine::TBlob<float>::Ptr output;
246             output = InferenceEngine::make_shared_blob<float>(item.second->getTensorDesc());
247             output->allocate();
248             outputBlobs[item.first] = output;
249
250             graph.Infer(srcs, outputBlobs);
251
252             InferenceEngine::TBlob<float> dst_ref(item.second->getTensorDesc());
253             dst_ref.allocate();
254
255             std::vector<InferenceEngine::TBlob<float>> src_vec = {*srcPtr1, *srcPtr2, *srcPtr3};
256
257             ref_eltwise(src_vec, dst_ref, p);
258
259             compare(*output, dst_ref, 0.0005f);
260         } catch (const InferenceEngine::details::InferenceEngineException &e) {
261             FAIL() << e.what();
262         }
263     }
264 };
265
266 TEST_P(MKLDNNGraphEltwiseTests, TestsEltwise) {}
267
268
269 INSTANTIATE_TEST_CASE_P(
270         TestsEltwise, MKLDNNGraphEltwiseTests,
271         ::testing::Values(
272                 eltwise_test_params{{1, 3, 3, 3}, eltwise_test_params::opType::Sum, "", 3, MKLDNNPlugin::impl_desc_type::ref, {
273                         [](MKLDNNPlugin::PrimitiveDescInfo impl) {
274                             ASSERT_EQ(MKLDNNPlugin::impl_desc_type::ref, impl.getImplementationType());
275                             ASSERT_EQ(3, impl.getConfig().inConfs.size());
276                             ASSERT_EQ(1, impl.getConfig().outConfs.size());
277                             ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().inConfs.at(0).desc.getLayout());
278                             ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().inConfs.at(1).desc.getLayout());
279                             ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().outConfs.at(0).desc.getLayout());
280                         }
281                 } },
282                 eltwise_test_params{{1, 3, 3, 3}, eltwise_test_params::opType::Sum, "1.0,1.0,1.0", 3, MKLDNNPlugin::impl_desc_type::ref, {
283                         [](MKLDNNPlugin::PrimitiveDescInfo impl) {
284                             ASSERT_EQ(MKLDNNPlugin::impl_desc_type::ref, impl.getImplementationType());
285                             ASSERT_EQ(3, impl.getConfig().inConfs.size());
286                             ASSERT_EQ(1, impl.getConfig().outConfs.size());
287                             ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().inConfs.at(0).desc.getLayout());
288                             ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().inConfs.at(1).desc.getLayout());
289                             ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().outConfs.at(0).desc.getLayout());
290                         }
291                 } },
292                 eltwise_test_params{{1, 3, 3, 3}, eltwise_test_params::opType::Sum, "1.5,0.5,-2.0", 3, MKLDNNPlugin::impl_desc_type::ref, {
293                         [](MKLDNNPlugin::PrimitiveDescInfo impl) {
294                             ASSERT_EQ(MKLDNNPlugin::impl_desc_type::ref, impl.getImplementationType());
295                             ASSERT_EQ(3, impl.getConfig().inConfs.size());
296                             ASSERT_EQ(1, impl.getConfig().outConfs.size());
297                             ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().inConfs.at(0).desc.getLayout());
298                             ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().inConfs.at(1).desc.getLayout());
299                             ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().inConfs.at(2).desc.getLayout());
300                             ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().outConfs.at(0).desc.getLayout());
301                         }
302                 } },
303                 eltwise_test_params{{1, 3, 3, 3}, eltwise_test_params::opType::Prod, "", 3, MKLDNNPlugin::impl_desc_type::ref, {
304                         [](MKLDNNPlugin::PrimitiveDescInfo impl) {
305                             ASSERT_EQ(MKLDNNPlugin::impl_desc_type::ref, impl.getImplementationType());
306                             ASSERT_EQ(3, impl.getConfig().inConfs.size());
307                             ASSERT_EQ(1, impl.getConfig().outConfs.size());
308                             ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().inConfs.at(0).desc.getLayout());
309                             ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().inConfs.at(1).desc.getLayout());
310                             ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().inConfs.at(2).desc.getLayout());
311                             ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().outConfs.at(0).desc.getLayout());
312                         }
313                 } },
314                 eltwise_test_params{{1, 3, 3, 3}, eltwise_test_params::opType::Max, "", 3, MKLDNNPlugin::impl_desc_type::ref, {
315                         [](MKLDNNPlugin::PrimitiveDescInfo impl) {
316                             ASSERT_EQ(MKLDNNPlugin::impl_desc_type::ref, impl.getImplementationType());
317                             ASSERT_EQ(3, impl.getConfig().inConfs.size());
318                             ASSERT_EQ(1, impl.getConfig().outConfs.size());
319                             ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().inConfs.at(0).desc.getLayout());
320                             ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().inConfs.at(1).desc.getLayout());
321                             ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().inConfs.at(2).desc.getLayout());
322                             ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().outConfs.at(0).desc.getLayout());
323                         }
324                 } },
325                 eltwise_test_params{{1, 32, 16, 16, 16}, eltwise_test_params::opType::Sum, "", 3, MKLDNNPlugin::impl_desc_type::ref, {
326                         [](MKLDNNPlugin::PrimitiveDescInfo impl) {
327                             ASSERT_EQ(MKLDNNPlugin::impl_desc_type::ref, impl.getImplementationType());
328                             ASSERT_EQ(3, impl.getConfig().inConfs.size());
329                             ASSERT_EQ(1, impl.getConfig().outConfs.size());
330                             ASSERT_EQ(InferenceEngine::Layout::NCDHW, impl.getConfig().inConfs.at(0).desc.getLayout());
331                             ASSERT_EQ(InferenceEngine::Layout::NCDHW, impl.getConfig().inConfs.at(1).desc.getLayout());
332                             ASSERT_EQ(InferenceEngine::Layout::NCDHW, impl.getConfig().inConfs.at(2).desc.getLayout());
333                             ASSERT_EQ(InferenceEngine::Layout::NCDHW, impl.getConfig().outConfs.at(0).desc.getLayout());
334                         }
335                 } }
336         ));
337
338 class MKLDNNGraphDynBatchEltwiseTests: public MKLDNNGraphEltwiseTests {
339 protected:
340     virtual void SetUp() {
341         try {
342             TestsCommon::SetUp();
343             eltwise_test_params p = ::testing::WithParamInterface<eltwise_test_params>::GetParam();
344             std::string model = getModel(p);
345             size_t MB = p.dims[0];
346             if (MB < 2)
347                 MB = 2;
348
349             InferenceEngine::CNNNetReader net_reader;
350             ASSERT_NO_THROW(net_reader.ReadNetwork(model.data(), model.length()));
351             InferenceEngine::CNNNetwork network = net_reader.getNetwork();
352             auto implNet = dynamic_cast<InferenceEngine::details::CNNNetworkImpl *>(&((InferenceEngine::ICNNNetwork&)network));
353             ASSERT_NE(nullptr, implNet) << "Failed to cast ICNNNetwork to CNNNetworkImpl";
354             InferenceEngine::ResponseDesc resp;
355             InferenceEngine::StatusCode sts  = implNet->setBatchSizeReshape(MB, &resp);
356             ASSERT_EQ((int)InferenceEngine::StatusCode::OK, sts) << resp.msg;
357
358             MKLDNNGraphTestClass graph;
359             graph.setProperty({{InferenceEngine::PluginConfigParams::KEY_DYN_BATCH_ENABLED, InferenceEngine::PluginConfigParams::YES}});
360             graph.CreateGraph(net_reader.getNetwork());
361
362             InferenceEngine::SizeVector dims_src = p.dims;
363             InferenceEngine::Layout layout = InferenceEngine::ANY;
364             switch (p.dims.size()) {
365                 case 4:
366                     layout = InferenceEngine::NCHW;
367                     break;
368                 case 5:
369                     layout = InferenceEngine::NCDHW;
370                     break;
371             }
372
373             InferenceEngine::Blob::Ptr src1 = InferenceEngine::make_shared_blob<float, const InferenceEngine::SizeVector>(InferenceEngine::Precision::FP32, layout, dims_src);
374             src1->allocate();
375
376             InferenceEngine::TBlob<float>* srcPtr1 = dynamic_cast<InferenceEngine::TBlob<float>*>(src1.get());
377
378             if (srcPtr1 == nullptr)
379                 FAIL() << "Cannot cast blob to TBlob<float>.";
380
381             fill_data(src1->buffer(), src1->size());
382             InferenceEngine::Blob::Ptr src2 = InferenceEngine::make_shared_blob<float, const InferenceEngine::SizeVector>(InferenceEngine::Precision::FP32, layout, dims_src);
383             src2->allocate();
384
385             InferenceEngine::TBlob<float>* srcPtr2 = dynamic_cast<InferenceEngine::TBlob<float>*>(src2.get());
386
387             if (srcPtr2 == nullptr)
388                 FAIL() << "Cannot cast blob to TBlob<float>.";
389             fill_data(src2->buffer(), src2->size());
390             InferenceEngine::Blob::Ptr src3 = InferenceEngine::make_shared_blob<float, const InferenceEngine::SizeVector>(InferenceEngine::Precision::FP32, layout, dims_src);
391             src3->allocate();
392
393             InferenceEngine::TBlob<float>* srcPtr3 = dynamic_cast<InferenceEngine::TBlob<float>*>(src3.get());
394
395             if (srcPtr3 == nullptr)
396                 FAIL() << "Cannot cast blob to TBlob<float>.";
397             fill_data(src3->buffer(), src3->size());
398             InferenceEngine::BlobMap srcs;
399             srcs.insert(std::pair<std::string, InferenceEngine::Blob::Ptr>("in1", src1));
400             srcs.insert(std::pair<std::string, InferenceEngine::Blob::Ptr>("in2", src2));
401             srcs.insert(std::pair<std::string, InferenceEngine::Blob::Ptr>("in3", src3));
402
403             InferenceEngine::OutputsDataMap out;
404             out = net_reader.getNetwork().getOutputsInfo();
405             InferenceEngine::BlobMap outputBlobs;
406
407             std::pair<std::string, InferenceEngine::DataPtr> item = *out.begin();
408
409             InferenceEngine::TBlob<float>::Ptr output;
410             output = InferenceEngine::make_shared_blob<float>(item.second->getTensorDesc());
411             output->allocate();
412             outputBlobs[item.first] = output;
413
414
415             auto checkDepthwise = [](const MKLDNNPlugin::MKLDNNNodePtr& node) {
416                 return node->getType() == MKLDNNPlugin::Eltwise;
417             };
418
419             graph.checkDynBatch(srcs, outputBlobs, MB, MB, checkDepthwise);
420             graph.checkDynBatch(srcs, outputBlobs, 1, MB, checkDepthwise);
421         } catch (const InferenceEngine::details::InferenceEngineException &e) {
422             FAIL() << e.what();
423         }
424     }
425 };
426
427 TEST_P(MKLDNNGraphDynBatchEltwiseTests, TestsDynBatchEltwise) {}
428
429 INSTANTIATE_TEST_CASE_P(
430         TestsDynBatchEltwise, MKLDNNGraphDynBatchEltwiseTests,
431         ::testing::Values(
432                 eltwise_test_params{{1, 3, 3, 3}, eltwise_test_params::opType::Sum, "", 3, MKLDNNPlugin::impl_desc_type::ref},
433                 eltwise_test_params{{1, 3, 3, 3}, eltwise_test_params::opType::Sum, "1.0,1.0,1.0", 3, MKLDNNPlugin::impl_desc_type::ref},
434                 eltwise_test_params{{1, 3, 3, 3}, eltwise_test_params::opType::Sum, "1.5,0.5,-2.0", 3, MKLDNNPlugin::impl_desc_type::ref},
435                 eltwise_test_params{{1, 3, 3, 3}, eltwise_test_params::opType::Prod, "", 3, MKLDNNPlugin::impl_desc_type::ref},
436                 eltwise_test_params{{1, 3, 3, 3}, eltwise_test_params::opType::Max, "", 3, MKLDNNPlugin::impl_desc_type::ref}));
437
438
439 struct precisions_test_2params {
440     struct {
441         std::string precision0;
442         std::string precision1;
443     } in;
444
445     size_t num_nodes;
446     size_t num_reorder_nodes;
447 };
448
449 class MKLDNNGraphEltwise2PrecisionsTests : public TestsCommon,
450                                      public WithParamInterface<precisions_test_2params> {
451
452     std::string model_t = R"V0G0N(
453 <net name="default" version="2" batch="1">
454     <layers>
455         <layer name="second_input" type="Input" precision="_IP1_" id="1">
456             <output>
457                 <port id="1">
458                     <dim>1</dim>
459                     <dim>2</dim>
460                     <dim>3</dim>
461                 </port>
462             </output>
463         </layer>
464         <layer name="data" type="Input" precision="_IP0_" id="0">
465             <output>
466                 <port id="0">
467                     <dim>1</dim>
468                     <dim>2</dim>
469                     <dim>3</dim>
470                 </port>
471             </output>
472         </layer>
473         <layer name="output" type="Eltwise" precision="FP32" id="2">
474             <elementwise_data operation="sum" coeff=""/>
475             <input>
476                 <port id="2">
477                     <dim>1</dim>
478                     <dim>2</dim>
479                     <dim>3</dim>
480                 </port>
481                 <port id="3">
482                     <dim>1</dim>
483                     <dim>2</dim>
484                     <dim>3</dim>
485                 </port>
486             </input>
487             <output>
488                 <port id="4">
489                     <dim>1</dim>
490                     <dim>2</dim>
491                     <dim>3</dim>
492                 </port>
493             </output>
494         </layer>
495     </layers>
496     <edges>
497         <edge from-layer="0" from-port="0" to-layer="2" to-port="2"/>
498         <edge from-layer="1" from-port="1" to-layer="2" to-port="3"/>
499     </edges>
500 </net>
501 )V0G0N";
502
503 protected:
504     std::string getModel(precisions_test_2params p) {
505         std::string model = model_t;
506
507         REPLACE_WITH_STR(model, "_IP0_", p.in.precision0);
508         REPLACE_WITH_STR(model, "_IP1_", p.in.precision1);
509         return model;
510     }
511
512     virtual void TearDown() {
513     }
514
515     virtual void SetUp() {
516         try {
517             TestsCommon::SetUp();
518             precisions_test_2params p = ::testing::WithParamInterface<precisions_test_2params>::GetParam();
519             std::string model = getModel(p);
520
521             InferenceEngine::CNNNetReader net_reader;
522             ASSERT_NO_THROW(net_reader.ReadNetwork(model.data(), model.length()));
523
524             MKLDNNGraphTestClass graph;
525             ASSERT_NO_THROW(graph.CreateGraph(net_reader.getNetwork()));
526
527             auto& nodes = graph.getNodes();
528             nodes = graph.getNodes();
529             ASSERT_EQ(nodes.size(), p.num_nodes);
530
531             size_t actual_reorder_nodes = 0;
532             for (size_t i = 0; i < nodes.size(); i++) {
533                 if(nodes[i].get()->getType() == MKLDNNPlugin::Type::Reorder &&
534                     FIND_STR(nodes[i].get()->getName(), "_U8_FP32_"))
535                     actual_reorder_nodes ++;
536             }
537             ASSERT_EQ(actual_reorder_nodes, p.num_reorder_nodes);
538         } catch (const InferenceEngine::details::InferenceEngineException &e) {
539             FAIL() << e.what();
540         }
541     }
542 };
543
544 TEST_P(MKLDNNGraphEltwise2PrecisionsTests, TestsEltwise2Precisions) {}
545
546 INSTANTIATE_TEST_CASE_P(
547         TestsEltwise2Precisions, MKLDNNGraphEltwise2PrecisionsTests,
548         ::testing::Values(
549             precisions_test_2params{ {"FP32", "FP32"}, 4, 0 },
550             precisions_test_2params{ {  "U8", "FP32"}, 5, 1 },
551             precisions_test_2params{ {"FP32",   "U8"}, 5, 1 },
552             precisions_test_2params{ {  "U8",   "U8"}, 6, 2 }
553         ));
554