Publishing 2019 R1 content
[platform/upstream/dldt.git] / inference-engine / tests / unit / engines / mkldnn / graph / layers / internal / graph_input_test.cpp
1 // Copyright (C) 2018-2019 Intel Corporation
2 // SPDX-License-Identifier: Apache-2.0
3 //
4
5 #include <gtest/gtest.h>
6 #include <gmock/gmock-spec-builders.h>
7 #include "mkldnn_plugin/mkldnn_graph.h"
8
9 #include "test_graph.hpp"
10
11 #include <mkldnn_plugin/mkldnn_extension_utils.h>
12 #include "tests_common.hpp"
13
14
15 using namespace ::testing;
16 using namespace std;
17 using namespace mkldnn;
18
19
20 struct input_test_params {
21     size_t num_prim_desc;
22
23     MKLDNNPlugin::impl_desc_type selectedType;
24
25     std::vector<std::function<void(MKLDNNPlugin::PrimitiveDescInfo)>> comp;
26 };
27
28 class MKLDNNGraphInputTests: public TestsCommon,
29                                      public WithParamInterface<input_test_params> {
30     std::string model_t = R"V0G0N(
31 <net name="InputsOnly" version="2" precision="FP32" batch="1">
32     <layers>
33         <layer name="in1" type="Input" precision="FP32" id="1">
34             <output>
35                 <port id="1">
36                     <dim>1</dim>
37                     <dim>3</dim>
38                     <dim>3</dim>
39                     <dim>3</dim>
40                 </port>
41             </output>
42         </layer>
43         <layer name="in2" type="Input" precision="FP32" id="2">
44             <output>
45                 <port id="2">
46                     <dim>1</dim>
47                     <dim>3</dim>
48                     <dim>3</dim>
49                     <dim>3</dim>
50                 </port>
51             </output>
52         </layer>
53         <layer name="in3" type="Input" precision="FP32" id="3">
54             <output>
55                 <port id="3">
56                     <dim>1</dim>
57                     <dim>3</dim>
58                 </port>
59             </output>
60         </layer>
61         <layer name="power1" id="4" type="Power" precision="FP32">
62             <power_data power="1" scale="1" shift="1"/>
63             <input>
64                 <port id="4">
65                     <dim>1</dim>
66                     <dim>3</dim>
67                     <dim>3</dim>
68                     <dim>3</dim>
69                 </port>
70             </input>
71             <output>
72                 <port id="5">
73                     <dim>1</dim>
74                     <dim>3</dim>
75                     <dim>3</dim>
76                     <dim>3</dim>
77                 </port>
78             </output>
79         </layer>
80         <layer name="power2" id="5" type="Power" precision="FP32">
81             <power_data power="1" scale="1" shift="1"/>
82             <input>
83                 <port id="6">
84                     <dim>1</dim>
85                     <dim>3</dim>
86                     <dim>3</dim>
87                     <dim>3</dim>
88                 </port>
89             </input>
90             <output>
91                 <port id="7">
92                     <dim>1</dim>
93                     <dim>3</dim>
94                     <dim>3</dim>
95                     <dim>3</dim>
96                 </port>
97             </output>
98         </layer>
99         <layer name="power3" id="6" type="Power" precision="FP32">
100             <power_data power="1" scale="1" shift="1"/>
101             <input>
102                 <port id="8">
103                     <dim>1</dim>
104                     <dim>3</dim>
105                 </port>
106             </input>
107             <output>
108                 <port id="9">
109                     <dim>1</dim>
110                     <dim>3</dim>
111                 </port>
112             </output>
113         </layer>
114     </layers>
115     <edges>
116         <edge from-layer="1" from-port="1" to-layer="4" to-port="4"/>
117         <edge from-layer="2" from-port="2" to-layer="5" to-port="6"/>
118         <edge from-layer="3" from-port="3" to-layer="6" to-port="8"/>
119     </edges>
120 </net>
121 )V0G0N";
122
123     std::string getModel(input_test_params p) {
124         return model_t;
125     }
126
127 protected:
128     virtual void TearDown() {
129     }
130
131     virtual void SetUp() {
132         try {
133             TestsCommon::SetUp();
134             input_test_params p = ::testing::WithParamInterface<input_test_params>::GetParam();
135             std::string model = getModel(p);
136
137             InferenceEngine::CNNNetReader net_reader;
138             ASSERT_NO_THROW(net_reader.ReadNetwork(model.data(), model.length()));
139
140             MKLDNNGraphTestClass graph;
141             graph.CreateGraph(net_reader.getNetwork());
142
143             auto& nodes = graph.getNodes();
144             for (int i = 0; i < nodes.size(); i++) {
145                 if (nodes[i]->getType() == MKLDNNPlugin::Input || nodes[i]->getType() == MKLDNNPlugin::Output) {
146                     ASSERT_EQ(p.num_prim_desc, nodes[i]->getSupportedPrimitiveDescriptors().size());
147                     size_t count = (nodes[i]->getType() == MKLDNNPlugin::Input) ? 0 : 2;
148                     if (nodes[i]->getName() == "in3") {
149                         count = 1;
150                     }
151                     if (nodes[i]->getName() == "out_power3") {
152                         count = 3;
153                     }
154                     for (size_t j = 0; j < p.num_prim_desc && j < p.comp.size(); j++) {
155                         p.comp.at(count)(nodes[i]->getSupportedPrimitiveDescriptors().at(j));
156                     }
157                     ASSERT_NE(nullptr, nodes[i]->getSelectedPrimitiveDescriptor());
158                     ASSERT_EQ(p.selectedType, nodes[i]->getSelectedPrimitiveDescriptor()->getImplementationType());
159                 }
160             }
161         } catch (const InferenceEngine::details::InferenceEngineException &e) {
162             FAIL() << e.what();
163         }
164     }
165 };
166
167 TEST_P(MKLDNNGraphInputTests, TestsInput) {}
168
169
170 INSTANTIATE_TEST_CASE_P(
171         TestsInput, MKLDNNGraphInputTests,
172         ::testing::Values(
173                 input_test_params{1, MKLDNNPlugin::impl_desc_type::unknown, {
174                         [](MKLDNNPlugin::PrimitiveDescInfo impl) {
175                             ASSERT_EQ(MKLDNNPlugin::impl_desc_type::unknown, impl.getImplementationType());
176                             ASSERT_EQ(0, impl.getConfig().inConfs.size());
177                             ASSERT_EQ(1, impl.getConfig().outConfs.size());
178                             ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().outConfs.at(0).desc.getLayout());
179                         },
180                         [](MKLDNNPlugin::PrimitiveDescInfo impl) {
181                             ASSERT_EQ(MKLDNNPlugin::impl_desc_type::unknown, impl.getImplementationType());
182                             ASSERT_EQ(0, impl.getConfig().inConfs.size());
183                             ASSERT_EQ(1, impl.getConfig().outConfs.size());
184                             ASSERT_EQ(InferenceEngine::Layout::NC, impl.getConfig().outConfs.at(0).desc.getLayout());
185                         },
186                         [](MKLDNNPlugin::PrimitiveDescInfo impl) {
187                             ASSERT_EQ(MKLDNNPlugin::impl_desc_type::unknown, impl.getImplementationType());
188                             ASSERT_EQ(1, impl.getConfig().inConfs.size());
189                             ASSERT_EQ(0, impl.getConfig().outConfs.size());
190                             ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().inConfs.at(0).desc.getLayout());
191                         },
192                         [](MKLDNNPlugin::PrimitiveDescInfo impl) {
193                             ASSERT_EQ(MKLDNNPlugin::impl_desc_type::unknown, impl.getImplementationType());
194                             ASSERT_EQ(1, impl.getConfig().inConfs.size());
195                             ASSERT_EQ(0, impl.getConfig().outConfs.size());
196                             ASSERT_EQ(InferenceEngine::Layout::NC, impl.getConfig().inConfs.at(0).desc.getLayout());
197                         }
198                 } }
199         ));
200
201 class MKLDNNGraphConstInputTests: public TestsCommon {
202     std::string model_t = R"V0G0N(
203 <net name="ConcatOnly" version="2" precision="FP32" batch="1">
204     <layers>
205         <layer name="in1" type="Input" precision="FP32" id="1">
206             <output>
207                 <port id="1">
208                     <dim>1</dim>
209                     <dim>3</dim>
210                     <dim>2</dim>
211                     <dim>2</dim>
212                 </port>
213             </output>
214             <blobs>
215                 <custom offset="0" size="48"/>
216             </blobs>
217         </layer>
218         <layer name="in2" type="Const" precision="FP32" id="2">
219             <output>
220                 <port id="2">
221                     <dim>1</dim>
222                     <dim>3</dim>
223                     <dim>1</dim>
224                     <dim>2</dim>
225                 </port>
226             </output>
227             <blobs>
228                 <custom offset="48" size="24"/>
229             </blobs>
230         </layer>
231         <layer name="con" id="3" type="Concat" precision="FP32">
232             <concat_data axis="2"/>
233             <input>
234                 <port id="1">
235                     <dim>1</dim>
236                     <dim>3</dim>
237                     <dim>2</dim>
238                     <dim>2</dim>
239                 </port>
240                 <port id="2">
241                     <dim>1</dim>
242                     <dim>3</dim>
243                     <dim>1</dim>
244                     <dim>2</dim>
245                 </port>
246             </input>
247             <output>
248                 <port id="3">
249                     <dim>1</dim>
250                     <dim>3</dim>
251                     <dim>3</dim>
252                     <dim>2</dim>
253                 </port>
254             </output>
255         </layer>
256     </layers>
257     <edges>
258         <edge from-layer="1" from-port="1" to-layer="3" to-port="1"/>
259         <edge from-layer="2" from-port="2" to-layer="3" to-port="2"/>
260     </edges>
261 </net>
262 )V0G0N";
263
264 protected:
265     virtual void TearDown() {
266     }
267
268     virtual void SetUp() {
269         try {
270             TestsCommon::SetUp();
271             std::string model = model_t;
272
273             InferenceEngine::CNNNetReader net_reader;
274             ASSERT_NO_THROW(net_reader.ReadNetwork(model.data(), model.length()));
275
276             InferenceEngine::TBlob<uint8_t> *weights = new InferenceEngine::TBlob<uint8_t>(InferenceEngine::Precision::U8, InferenceEngine::C, {72});
277             weights->allocate();
278             float * data = weights->buffer();
279
280             std::cout << weights->size() << std::endl;
281
282             InferenceEngine::SizeVector dims_src1 = {1, 3, 2, 2};
283             InferenceEngine::SizeVector dims_src2 = {1, 3, 1, 2};
284             InferenceEngine::Blob::Ptr src1 = InferenceEngine::make_shared_blob<float, const InferenceEngine::SizeVector>(InferenceEngine::Precision::FP32, InferenceEngine::NCHW, dims_src1);
285             src1->allocate();
286             float *srcData = src1->buffer();
287             for (size_t i = 0; i < 12; i++, data++, srcData++) {
288                 *data = 1;
289                 *srcData = 1;
290             }
291
292             InferenceEngine::Blob::Ptr src2 = InferenceEngine::make_shared_blob<float, const InferenceEngine::SizeVector>(InferenceEngine::Precision::FP32, InferenceEngine::NCHW, dims_src2);
293             src2->allocate();
294             srcData = src2->buffer();
295             for (size_t i = 0; i < 6; i++, data++, srcData++) {
296                 *data = 2;
297                 *srcData = 2;
298             }
299             InferenceEngine::TBlob<uint8_t>::Ptr weights_ptr = InferenceEngine::TBlob<uint8_t>::Ptr(weights);
300
301             net_reader.SetWeights(weights_ptr);
302
303             MKLDNNGraphTestClass graph;
304             graph.CreateGraph(net_reader.getNetwork());
305             auto& nodes = graph.getNodes();
306             ASSERT_LE(3, nodes.size());
307
308             InferenceEngine::BlobMap srcs;
309             srcs["in1"] = src1;
310             InferenceEngine::OutputsDataMap out;
311             out = net_reader.getNetwork().getOutputsInfo();
312             InferenceEngine::BlobMap outputBlobs;
313
314             std::pair<std::string, InferenceEngine::DataPtr> item = *out.begin();
315
316             InferenceEngine::TBlob<float>::Ptr output;
317             output = InferenceEngine::make_shared_blob<float>(item.second->getTensorDesc());
318             output->allocate();
319             outputBlobs[item.first] = output;
320
321             graph.Infer(srcs, outputBlobs);
322
323             // Compare
324             float *src1_ptr = src1->buffer();
325             size_t src1_size = src1->size();
326             float *src2_ptr = src2->buffer();
327             size_t src2_size = src2->size();
328             float *dst_ptr = output->buffer();
329             size_t dst_size = output->size();
330
331             int len1 = 1, len2 = 1, cycles;
332             for (int dim = 2; dim < output->dims().size(); dim++) {
333                 len1 *= src1->dims()[dim];
334                 len2 *= src2->dims()[dim];
335             }
336             cycles = 2;
337
338             int index1 = 0, index2 = 0, index = 0;
339             for (int cycle = 0; cycle < cycles; cycle ++) {
340                 for (int i1 = 0; i1 < len1; i1++) {
341                     if (src1_ptr[index1] != dst_ptr[index])
342                     {
343                         FAIL() << "index: " << index << " src: " << src1_ptr[index1] << ", dst: " << dst_ptr[index];
344                     }
345                     index1++; index++;
346                 }
347                 for (int i2 = 0; i2 < len2; i2++) {
348                     if (src2_ptr[index2] != dst_ptr[index])
349                     {
350                         FAIL() << "index: " << index << " src: " << src2_ptr[index2] << ", dst: " << dst_ptr[index];
351                     }
352                     index2++; index++;
353                 }
354             }
355         } catch (const InferenceEngine::details::InferenceEngineException &e) {
356             FAIL() << e.what();
357         }
358     }
359 };
360
361 TEST_F(MKLDNNGraphConstInputTests, TestsConstInput) {}
362
363
364 struct input_layout_test_params {
365     InferenceEngine::Layout layout;
366     std::vector<float> reference;
367     MKLDNNPlugin::impl_desc_type selectedType;
368     std::vector<std::function<void(MKLDNNPlugin::PrimitiveDescInfo)>> comp;
369 };
370
371 class MKLDNNGraphInputLayoutTest : public TestsCommon, public WithParamInterface<input_layout_test_params> {
372     std::string model_t = R"V0G0N(
373 <net name="InputLayers" version="2" batch="1">
374     <layers>
375         <layer name="input" type="Input" precision="FP32" id="0">
376             <output>
377                 <port id="0">
378                     <dim>1</dim>
379                     <dim>3</dim>
380                     <dim>2</dim>
381                     <dim>2</dim>
382                 </port>
383             </output>
384         </layer>
385         <layer name="power1" id="1" type="Power" precision="FP32">
386             <power_data power="1" scale="1" shift="1"/>
387             <input>
388                 <port id="1">
389                     <dim>1</dim>
390                     <dim>3</dim>
391                     <dim>2</dim>
392                     <dim>2</dim>
393                 </port>
394             </input>
395             <output>
396                 <port id="2">
397                     <dim>1</dim>
398                     <dim>3</dim>
399                     <dim>2</dim>
400                     <dim>2</dim>
401                 </port>
402             </output>
403         </layer>
404     </layers>
405     <edges>
406         <edge from-layer="0" from-port="0" to-layer="1" to-port="1"/>
407     </edges>
408     <pre-process reference-layer-name="input" mean-precision="FP32">
409         <channel id="0">
410             <mean value="1.0"/>
411         </channel>
412         <channel id="1">
413             <mean value="2.0"/>
414         </channel>
415         <channel id="2">
416             <mean value="3.0"/>
417         </channel>
418     </pre-process>
419 </net>
420 )V0G0N";
421
422 protected:
423     virtual void TearDown() {
424     }
425
426     virtual void SetUp() {
427         try {
428             TestsCommon::SetUp();
429             input_layout_test_params p = ::testing::WithParamInterface<input_layout_test_params>::GetParam();
430             std::string model = model_t;
431
432             InferenceEngine::CNNNetReader net_reader;
433             ASSERT_NO_THROW(net_reader.ReadNetwork(model.data(), model.length()));
434
435             MKLDNNGraphTestClass graph;
436             graph.CreateGraph(net_reader.getNetwork());
437
438             InferenceEngine::TensorDesc desc(InferenceEngine::Precision::FP32, { 1, 3, 2, 2 }, p.layout);
439             InferenceEngine::Blob::Ptr src = InferenceEngine::make_shared_blob<float>(desc);
440             src->allocate();
441             fill_data_dbgval(src->buffer(), src->size());
442             InferenceEngine::BlobMap srcs;
443             srcs.insert(std::pair<std::string, InferenceEngine::Blob::Ptr>("input", src));
444
445             InferenceEngine::OutputsDataMap out = net_reader.getNetwork().getOutputsInfo();
446             std::pair<std::string, InferenceEngine::DataPtr> item = *out.begin();
447             InferenceEngine::TBlob<float>::Ptr output;
448             output = InferenceEngine::make_shared_blob<float>(item.second->getTensorDesc());
449             output->allocate();
450             InferenceEngine::BlobMap outputBlobs;
451             outputBlobs[item.first] = output;
452
453             graph.Infer(srcs, outputBlobs);
454             //  Check results
455             if (memcmp((*output).data(), &p.reference[0], p.reference.size()) != 0)
456                 FAIL() << "Wrong result with compare reference!";
457         }
458         catch (const InferenceEngine::details::InferenceEngineException &e) {
459             FAIL() << e.what();
460         }
461     }
462 };
463
464 TEST_P(MKLDNNGraphInputLayoutTest, TestsLayoutInput) {}
465
466 INSTANTIATE_TEST_CASE_P(
467     TestsLayoutInput, MKLDNNGraphInputLayoutTest,
468     ::testing::Values(
469         input_layout_test_params{ InferenceEngine::NCHW, { 0,1,2,3,3,4,5,6,6,7,8,9 }, MKLDNNPlugin::impl_desc_type::unknown },
470         input_layout_test_params{ InferenceEngine::NHWC, { 0,0,0,3,3,3,6,6,6,9,9,9 }, MKLDNNPlugin::impl_desc_type::unknown }
471 ));
472