1 // Copyright (C) 2018-2019 Intel Corporation
2 // SPDX-License-Identifier: Apache-2.0
5 #include <gtest/gtest.h>
6 #include <gmock/gmock-spec-builders.h>
7 #include "mkldnn_plugin/mkldnn_graph.h"
9 #include "test_graph.hpp"
11 #include "single_layer_common.hpp"
12 #include <mkldnn_plugin/mkldnn_extension_utils.h>
13 #include <unordered_set>
14 #include <inference_engine/cnn_network_impl.hpp>
15 #include "tests_common.hpp"
17 using namespace ::testing;
19 using namespace mkldnn;
22 struct concat_test_params {
23 // Formats: NCHW, NCDHW
31 MKLDNNPlugin::impl_desc_type selectedType;
33 std::vector<std::function<void(MKLDNNPlugin::PrimitiveDescInfo)>> comp;
36 class MKLDNNGraphConcatTests: public TestsCommon,
37 public WithParamInterface<concat_test_params> {
38 std::string model_t = R"V0G0N(
39 <net name="ConcatOnly" version="3" precision="FP32" batch="1">
41 <layer name="in1" type="Input" precision="FP32" id="1">
43 <port id="1">__SRC_DIMS_1__
47 <layer name="in2" type="Input" precision="FP32" id="2">
49 <port id="2">__SRC_DIMS_2__
53 <layer name="con" id="3" type="Concat" precision="FP32">
54 <concat_data axis="_AXIS_"/>
56 <port id="1">__SRC_DIMS_1__
58 <port id="2">__SRC_DIMS_2__
62 <port id="3">__DST_DIMS__
68 <edge from-layer="1" from-port="1" to-layer="3" to-port="1"/>
69 <edge from-layer="2" from-port="2" to-layer="3" to-port="2"/>
74 std::string getModel(concat_test_params p) {
75 std::string model = model_t;
77 for (auto& dim : p.in1) {
79 s_dims += std::to_string(dim) + "</dim>";
81 REPLACE_WITH_STR(model, "__SRC_DIMS_1__", s_dims);
84 for (auto& dim : p.in2) {
86 s_dims += std::to_string(dim) + "</dim>";
88 REPLACE_WITH_STR(model, "__SRC_DIMS_2__", s_dims);
91 for (size_t i = 0; i < p.in1.size(); i++) {
92 size_t dim = p.axis == i ? p.in1[i] + p.in2[i] : p.in1[i];
94 s_dims += std::to_string(dim) + "</dim>";
96 REPLACE_WITH_STR(model, "__DST_DIMS__", s_dims);
98 REPLACE_WITH_NUM(model, "_AXIS_", p.axis);
103 virtual void TearDown() {
106 virtual void SetUp() {
108 TestsCommon::SetUp();
109 concat_test_params p = ::testing::WithParamInterface<concat_test_params>::GetParam();
110 std::string model = getModel(p);
112 InferenceEngine::CNNNetReader net_reader;
113 ASSERT_NO_THROW(net_reader.ReadNetwork(model.data(), model.length()));
115 MKLDNNGraphTestClass graph;
116 graph.CreateGraph(net_reader.getNetwork());
117 auto& nodes = graph.getNodes();
118 for (int i = 0; i < nodes.size(); i++) {
119 if (nodes[i]->getType() == MKLDNNPlugin::Concatenation) {
120 ASSERT_EQ(p.num_prim_desc, nodes[i]->getSupportedPrimitiveDescriptors().size());
121 for (size_t j = 0; j < p.num_prim_desc && j < p.comp.size(); j++) {
122 p.comp.at(j)(nodes[i]->getSupportedPrimitiveDescriptors().at(j));
124 ASSERT_NE(nullptr, nodes[i]->getSelectedPrimitiveDescriptor());
125 ASSERT_EQ(p.selectedType, nodes[i]->getSelectedPrimitiveDescriptor()->getImplementationType());
128 ASSERT_LE(3, nodes.size());
130 InferenceEngine::SizeVector dims_src1 = p.in1;
131 InferenceEngine::SizeVector dims_src2 = p.in2;
132 InferenceEngine::Layout layout = InferenceEngine::ANY;
133 switch (p.in1.size()) {
135 layout = InferenceEngine::NCHW;
138 layout = InferenceEngine::NCDHW;
142 InferenceEngine::Blob::Ptr src1 = InferenceEngine::make_shared_blob<float, const InferenceEngine::SizeVector>(InferenceEngine::Precision::FP32, layout, dims_src1);
145 fill_data(src1->buffer(), src1->size());
146 InferenceEngine::Blob::Ptr src2 = InferenceEngine::make_shared_blob<float, const InferenceEngine::SizeVector>(InferenceEngine::Precision::FP32, layout, dims_src2);
148 fill_data(src2->buffer(), src2->size());
149 InferenceEngine::BlobMap srcs;
150 srcs.insert(std::pair<std::string, InferenceEngine::Blob::Ptr>("in1", src1));
151 srcs.insert(std::pair<std::string, InferenceEngine::Blob::Ptr>("in2", src2));
153 InferenceEngine::OutputsDataMap out;
154 out = net_reader.getNetwork().getOutputsInfo();
155 InferenceEngine::BlobMap outputBlobs;
157 std::pair<std::string, InferenceEngine::DataPtr> item = *out.begin();
159 InferenceEngine::TBlob<float>::Ptr output;
160 output = InferenceEngine::make_shared_blob<float>(item.second->getTensorDesc());
162 outputBlobs[item.first] = output;
164 graph.Infer(srcs, outputBlobs);
167 float *src1_ptr = src1->buffer();
168 size_t src1_size = src1->size();
169 float *src2_ptr = src2->buffer();
170 size_t src2_size = src2->size();
171 float *dst_ptr = output->buffer();
172 size_t dst_size = output->size();
174 int len1 = 1, len2 = 1, cycles;
175 for (int dim = p.axis; dim < output->dims().size(); dim++) {
176 len1 *= src1->dims()[dim];
177 len2 *= src2->dims()[dim];
182 int index1 = 0, index2 = 0, index = 0;
183 for (int cycle = 0; cycle < cycles; cycle ++) {
184 for (int i1 = 0; i1 < len1; i1++) {
185 if (src1_ptr[index1] != dst_ptr[index])
187 FAIL() << "index: " << index << " src: " << src1_ptr[index1] << ", dst: " << dst_ptr[index];
191 for (int i2 = 0; i2 < len2; i2++) {
192 if (src2_ptr[index2] != dst_ptr[index])
194 FAIL() << "index: " << index << " src: " << src2_ptr[index2] << ", dst: " << dst_ptr[index];
199 } catch (const InferenceEngine::details::InferenceEngineException &e) {
205 TEST_P(MKLDNNGraphConcatTests, TestsConcat) {}
207 INSTANTIATE_TEST_CASE_P(
208 TestsConcat, MKLDNNGraphConcatTests,
218 2, 1, MKLDNNPlugin::impl_desc_type::ref
228 1, 6, MKLDNNPlugin::impl_desc_type::unknown
231 {1, 64, 16, 16, 16, 1},
232 {1, 64, 16, 16, 16, 1},
233 5, 1, MKLDNNPlugin::impl_desc_type::ref
236 class MKLDNNGraphDynBatchConcatTests: public TestsCommon, public WithParamInterface<concat_test_params> {
237 std::string model_t = R"V0G0N(
238 <net name="ConcatOnly" version="2" precision="FP32" batch="1">
240 <layer name="in1" type="Input" precision="FP32" id="1">
243 <dim>1</dim>__SRC_DIMS_1__
247 <layer name="in2" type="Input" precision="FP32" id="2">
250 <dim>1</dim>__SRC_DIMS_2__
254 <layer name="con" id="3" type="Concat" precision="FP32">
255 <concat_data axis="_AXIS_"/>
258 <dim>1</dim>__SRC_DIMS_1__
261 <dim>1</dim>__SRC_DIMS_2__
266 <dim>1</dim>__DST_DIMS__
272 <edge from-layer="1" from-port="1" to-layer="3" to-port="1"/>
273 <edge from-layer="2" from-port="2" to-layer="3" to-port="2"/>
278 std::string getModel(concat_test_params p) {
279 std::string model = model_t;
281 for (size_t i = 1; i < p.in1.size(); i++) {
282 s_dims += "\n <dim>";
283 s_dims += std::to_string(p.in1[i]) + "</dim>";
285 REPLACE_WITH_STR(model, "__SRC_DIMS_1__", s_dims);
288 for (size_t i = 1; i < p.in2.size(); i++) {
289 s_dims += "\n <dim>";
290 s_dims += std::to_string(p.in2[i]) + "</dim>";
292 REPLACE_WITH_STR(model, "__SRC_DIMS_2__", s_dims);
295 for (size_t i = 1; i < p.in1.size(); i++) {
296 size_t dim = p.axis == i ? p.in1[i] + p.in2[i] : p.in1[i];
297 s_dims += "\n <dim>";
298 s_dims += std::to_string(dim) + "</dim>";
300 REPLACE_WITH_STR(model, "__DST_DIMS__", s_dims);
302 REPLACE_WITH_NUM(model, "_AXIS_", p.axis);
307 virtual void TearDown() {
310 virtual void SetUp() {
312 TestsCommon::SetUp();
313 concat_test_params p = ::testing::WithParamInterface<concat_test_params>::GetParam();
314 std::string model = getModel(p);
315 size_t MB = p.in1[0];
319 InferenceEngine::CNNNetReader net_reader;
320 ASSERT_NO_THROW(net_reader.ReadNetwork(model.data(), model.length()));
321 InferenceEngine::CNNNetwork network = net_reader.getNetwork();
322 auto implNet = dynamic_cast<InferenceEngine::details::CNNNetworkImpl *>(&((InferenceEngine::ICNNNetwork&)network));
323 ASSERT_NE(nullptr, implNet) << "Failed to cast ICNNNetwork to CNNNetworkImpl";
324 InferenceEngine::ResponseDesc resp;
325 InferenceEngine::StatusCode sts = implNet->setBatchSizeReshape(MB, &resp);
326 ASSERT_EQ((int)InferenceEngine::StatusCode::OK, sts) << resp.msg;
328 MKLDNNGraphTestClass graph;
329 graph.setProperty({{InferenceEngine::PluginConfigParams::KEY_DYN_BATCH_ENABLED, InferenceEngine::PluginConfigParams::YES}});
330 graph.CreateGraph(net_reader.getNetwork());
332 InferenceEngine::SizeVector dims_src1 = p.in1;
333 InferenceEngine::SizeVector dims_src2 = p.in2;
334 InferenceEngine::Layout layout = InferenceEngine::ANY;
335 switch (p.in1.size()) {
337 layout = InferenceEngine::NCHW;
340 layout = InferenceEngine::NCDHW;
344 InferenceEngine::Blob::Ptr src1 = InferenceEngine::make_shared_blob<float, const InferenceEngine::SizeVector>(InferenceEngine::Precision::FP32, layout, dims_src1);
347 fill_data(src1->buffer(), src1->size());
348 InferenceEngine::Blob::Ptr src2 = InferenceEngine::make_shared_blob<float, const InferenceEngine::SizeVector>(InferenceEngine::Precision::FP32, layout, dims_src2);
350 fill_data(src2->buffer(), src2->size());
351 InferenceEngine::BlobMap srcs;
352 srcs.insert(std::pair<std::string, InferenceEngine::Blob::Ptr>("in1", src1));
353 srcs.insert(std::pair<std::string, InferenceEngine::Blob::Ptr>("in2", src2));
355 InferenceEngine::OutputsDataMap out;
356 out = net_reader.getNetwork().getOutputsInfo();
357 InferenceEngine::BlobMap outputBlobs;
359 std::pair<std::string, InferenceEngine::DataPtr> item = *out.begin();
361 InferenceEngine::TBlob<float>::Ptr output;
362 output = InferenceEngine::make_shared_blob<float>(item.second->getTensorDesc());
364 outputBlobs[item.first] = output;
367 auto checkConcat = [](const MKLDNNPlugin::MKLDNNNodePtr& node) {
368 return node->getType() == MKLDNNPlugin::Concatenation;
371 MKLDNNGraphTestClass::CheckDynBatchType checkType = MKLDNNGraphTestClass::CheckDynBatchType::Both;
372 if (p.selectedType == MKLDNNPlugin::impl_desc_type::unknown)
373 checkType = MKLDNNGraphTestClass::CheckDynBatchType::Child;
375 graph.checkDynBatch(srcs, outputBlobs, MB, MB, checkConcat, checkType);
376 graph.checkDynBatch(srcs, outputBlobs, 1, MB, checkConcat, checkType);
377 } catch (const InferenceEngine::details::InferenceEngineException &e) {
383 TEST_P(MKLDNNGraphDynBatchConcatTests, TestsDynBatchConcat) {}
386 INSTANTIATE_TEST_CASE_P(
387 TestsDynBatchConcat, MKLDNNGraphDynBatchConcatTests,
392 2, 1, MKLDNNPlugin::impl_desc_type::ref
397 1, 2, MKLDNNPlugin::impl_desc_type::unknown
402 1, 2, MKLDNNPlugin::impl_desc_type::unknown
407 3, 1, MKLDNNPlugin::impl_desc_type::ref
412 1, 4, MKLDNNPlugin::impl_desc_type::unknown
417 1, 2, MKLDNNPlugin::impl_desc_type::unknown
422 1, 2, MKLDNNPlugin::impl_desc_type::unknown
425 struct concat_param {
432 struct two_concat_test_params {
433 // Formats: NCHW, NCDHW
438 concat_param concat1;
439 concat_param concat2;
442 class MKLDNNGraphTwoConcatTests: public TestsCommon,
443 public WithParamInterface<two_concat_test_params> {
444 std::string model_t = R"V0G0N(
445 <net name="TwoConcatsDiffFwd" version="2" precision="FP32" batch="1">
447 <layer name="in1" type="Input" precision="FP32" id="1">
449 <port id="1">__SRC_DIMS_1__
453 <layer name="in2" type="Input" precision="FP32" id="2">
455 <port id="1">__SRC_DIMS_2__
459 <layer name="in3" type="Input" precision="FP32" id="3">
461 <port id="1">__SRC_DIMS_3__
465 <layer name="_CONCAT1_NAME_" id="4" type="Concat" precision="FP32">
466 <concat_data axis="_CONCAT1_AXIS_"/>
484 <port id="3">__CO_DIMS_1__
488 <layer name="_CONCAT2_NAME_" id="5" type="Concat" precision="FP32">
489 <concat_data axis="_CONCAT2_AXIS_"/>
507 <port id="3">__CO_DIMS_2__
513 <edge from-layer="1" from-port="1" to-layer="_FL11_" to-port="_FP11_"/>
514 <edge from-layer="2" from-port="1" to-layer="_FL21_" to-port="_FP21_"/>
515 <edge from-layer="3" from-port="1" to-layer="_FL31_" to-port="_FP31_"/>
516 <edge from-layer="_FSL_" from-port="_FSP_" to-layer="_FSLTL_" to-port="_FSLTP_"/>
520 void changeEdgeToLayer(std::string& model, int f_l, int f_p, int t_l, int t_p, vector<size_t> dims) {
521 std::string TL = "_FL" + std::to_string(f_l) + std::to_string(f_p) + "_";
522 std::string TP = "_FP" + std::to_string(f_l) + std::to_string(f_p) + "_";
523 if (!FIND_STR(model, TL) || !FIND_STR(model, TP)) {
524 if (!FIND_STR(model, "_FSL_") || !FIND_STR(model, "_FSP_") ||
525 !FIND_STR(model, "_FSLTL_") || !FIND_STR(model, "_FSLTP_")) {
526 THROW_IE_EXCEPTION << "Incorrect configuration!";
528 REPLACE_WITH_NUM(model, "_FSL_", f_l);
529 REPLACE_WITH_NUM(model, "_FSP_", f_p);
530 REPLACE_WITH_NUM(model, "_FSLTL_", t_l);
531 REPLACE_WITH_NUM(model, "_FSLTP_", t_p);
533 REPLACE_WITH_NUM(model, TL, t_l);
534 REPLACE_WITH_NUM(model, TP, t_p);
537 std::string CI = "_CI" + std::to_string(t_l) + std::to_string(t_p);
538 auto dims_size = dims.size();
539 REPLACE_WITH_NUM(model, CI + "N_", dims[0]);
540 REPLACE_WITH_NUM(model, CI + "C_", dims[1]);
541 REPLACE_WITH_NUM(model, CI + "H_", dims[dims_size - 2]);
542 REPLACE_WITH_NUM(model, CI + "W_", dims[dims_size - 1]);
543 if (dims_size < 5) REMOVE_LINE(model, std::string("<dim>") + CI + std::string("D_") + "</dim>");
544 else REPLACE_WITH_NUM(model, CI + "D_", dims[dims_size - 3]);
548 std::string getModel(two_concat_test_params p) {
549 std::string model = model_t;
551 for (size_t i = 0; i < p.in1.size(); i++) {
552 s_dims += "\n <dim>";
553 s_dims += std::to_string(p.in1[i]) + "</dim>";
555 REPLACE_WITH_STR(model, "__SRC_DIMS_1__", s_dims);
558 for (size_t i = 0; i < p.in2.size(); i++) {
559 s_dims += "\n <dim>";
560 s_dims += std::to_string(p.in2[i]) + "</dim>";
562 REPLACE_WITH_STR(model, "__SRC_DIMS_2__", s_dims);
565 for (size_t i = 0; i < p.in3.size(); i++) {
566 s_dims += "\n <dim>";
567 s_dims += std::to_string(p.in3[i]) + "</dim>";
569 REPLACE_WITH_STR(model, "__SRC_DIMS_3__", s_dims);
571 vector<size_t> concat11;
572 switch (p.concat1.input1) {
574 changeEdgeToLayer(model, 2, 1, 4, 1, p.in2);
578 changeEdgeToLayer(model, 3, 1, 4, 1, p.in3);
582 changeEdgeToLayer(model, 1, 1, 4, 1, p.in1);
586 vector<size_t> concat12;
587 switch (p.concat1.input2) {
589 changeEdgeToLayer(model, 2, 1, 4, 2, p.in2);
593 changeEdgeToLayer(model, 3, 1, 4, 2, p.in3);
597 changeEdgeToLayer(model, 1, 1, 4, 2, p.in1);
601 vector<size_t> concat21;
602 switch (p.concat2.input1) {
604 changeEdgeToLayer(model, 2, 1, 5, 1, p.in2);
608 changeEdgeToLayer(model, 3, 1, 5, 1, p.in3);
612 changeEdgeToLayer(model, 1, 1, 5, 1, p.in1);
616 vector<size_t> concat22;
617 switch (p.concat2.input2) {
619 changeEdgeToLayer(model, 2, 1, 5, 2, p.in2);
623 changeEdgeToLayer(model, 3, 1, 5, 2, p.in3);
627 changeEdgeToLayer(model, 1, 1, 5, 2, p.in1);
632 for (size_t i = 0; i < p.in2.size(); i++) {
633 size_t concat = p.concat1.axis == i ? concat11[i] + concat12[i] : concat21[i];
634 s_dims += "\n <dim>";
635 s_dims += std::to_string(concat) + "</dim>";
637 REPLACE_WITH_STR(model, "__CO_DIMS_1__", s_dims);
639 REPLACE_WITH_NUM(model, "_CONCAT1_AXIS_", p.concat1.axis);
640 REPLACE_WITH_STR(model, "_CONCAT1_NAME_", p.concat1.name);
643 for (size_t i = 0; i < p.in2.size(); i++) {
644 size_t concat = p.concat2.axis == i ? concat21[i] + concat22[i] : concat21[i];
645 s_dims += "\n <dim>";
646 s_dims += std::to_string(concat) + "</dim>";
648 REPLACE_WITH_STR(model, "__CO_DIMS_2__", s_dims);
650 REPLACE_WITH_NUM(model, "_CONCAT2_AXIS_", p.concat2.axis);
651 REPLACE_WITH_STR(model, "_CONCAT2_NAME_", p.concat2.name);
656 virtual void TearDown() {
659 virtual void SetUp() {
661 TestsCommon::SetUp();
662 two_concat_test_params p = ::testing::WithParamInterface<two_concat_test_params>::GetParam();
663 std::string model = getModel(p);
665 InferenceEngine::CNNNetReader net_reader;
666 ASSERT_NO_THROW(net_reader.ReadNetwork(model.data(), model.length()));
668 MKLDNNGraphTestClass graph;
669 graph.CreateGraph(net_reader.getNetwork());
671 InferenceEngine::SizeVector dims_src1 = p.in1;
672 InferenceEngine::SizeVector dims_src2 = p.in2;
673 InferenceEngine::SizeVector dims_src3 = p.in3;
674 InferenceEngine::Layout layout = InferenceEngine::ANY;
675 switch (p.in1.size()) {
677 layout = InferenceEngine::NCHW;
680 layout = InferenceEngine::NCDHW;
684 InferenceEngine::Blob::Ptr src1 = InferenceEngine::make_shared_blob<float, const InferenceEngine::SizeVector>(InferenceEngine::Precision::FP32, layout, dims_src1);
686 fill_data(src1->buffer(), src1->size());
688 InferenceEngine::Blob::Ptr src2 = InferenceEngine::make_shared_blob<float, const InferenceEngine::SizeVector>(InferenceEngine::Precision::FP32, layout, dims_src2);
690 fill_data(src2->buffer(), src2->size());
692 InferenceEngine::Blob::Ptr src3 = InferenceEngine::make_shared_blob<float, const InferenceEngine::SizeVector>(InferenceEngine::Precision::FP32, layout, dims_src3);
694 fill_data(src3->buffer(), src3->size());
696 InferenceEngine::BlobMap srcs;
697 srcs.insert(std::pair<std::string, InferenceEngine::Blob::Ptr>("in1", src1));
698 srcs.insert(std::pair<std::string, InferenceEngine::Blob::Ptr>("in2", src2));
699 srcs.insert(std::pair<std::string, InferenceEngine::Blob::Ptr>("in3", src3));
701 InferenceEngine::OutputsDataMap out;
702 out = net_reader.getNetwork().getOutputsInfo();
703 InferenceEngine::BlobMap outputBlobs;
705 for (auto it = out.begin(); it != out.end(); it++) {
706 std::pair<std::string, InferenceEngine::DataPtr> item = *it;
707 InferenceEngine::TBlob<float>::Ptr output;
708 output = InferenceEngine::make_shared_blob<float>(item.second->getTensorDesc());
710 outputBlobs[item.first] = output;
713 graph.Infer(srcs, outputBlobs);
715 for (auto concat : {p.concat1, p.concat2}) {
720 InferenceEngine::Blob::Ptr src1_c;
721 InferenceEngine::Blob::Ptr src2_c;
723 switch (concat.input1) {
725 src1_ptr = src2->buffer();
726 src1_size = src2->size();
730 src1_ptr = src3->buffer();
731 src1_size = src3->size();
735 src1_ptr = src1->buffer();
736 src1_size = src1->size();
740 switch (concat.input2) {
742 src2_ptr = src2->buffer();
743 src2_size = src2->size();
747 src2_ptr = src3->buffer();
748 src2_size = src3->size();
752 src2_ptr = src1->buffer();
753 src2_size = src1->size();
757 float *dst_ptr = outputBlobs[concat.name]->buffer();
758 size_t dst_size = outputBlobs[concat.name]->size();
760 int len1 = 1, len2 = 1, cycles;
761 for (int dim = concat.axis; dim < outputBlobs[concat.name]->dims().size(); dim++) {
762 len1 *= src1_c->dims()[dim];
763 len2 *= src2_c->dims()[dim];
765 cycles = concat.axis;
767 int index1 = 0, index2 = 0, index = 0;
768 for (int cycle = 0; cycle < cycles; cycle ++) {
769 for (int i1 = 0; i1 < len1; i1++) {
770 if (src1_ptr[index1] != dst_ptr[index])
772 FAIL() << concat.name << " index: " << index << " src: "
773 << src1_ptr[index1] << ", dst: " << dst_ptr[index];
777 for (int i2 = 0; i2 < len2; i2++) {
778 if (src2_ptr[index2] != dst_ptr[index])
780 FAIL() << concat.name << " index: " << index << " src: "
781 << src2_ptr[index2] << ", dst: " << dst_ptr[index];
787 } catch (const InferenceEngine::details::InferenceEngineException &e) {
793 TEST_P(MKLDNNGraphTwoConcatTests, TestsTwoConcat) {}
795 INSTANTIATE_TEST_CASE_P(
796 TestsTwoConcat, MKLDNNGraphTwoConcatTests,
798 two_concat_test_params {
802 {"concat1", 0, 0, 1},
805 two_concat_test_params {
809 {"concat1", 1, 0, 1},
812 two_concat_test_params {
816 {"concat1", 1, 0, 1},
819 two_concat_test_params {
823 {"concat1", 0, 0, 1},
826 two_concat_test_params {
830 {"concat1", 1, 0, 1},
833 two_concat_test_params {
837 {"concat1", 1, 0, 1},
842 class MKLDNNGraphTwoInputInConcatTests: public TestsCommon {
843 std::string model_t = R"V0G0N(
844 <net name="TwoConcatsDiffFwd" version="2" precision="FP32" batch="1">
846 <layer name="in1" type="Input" precision="FP32" id="1">
856 <layer name="in2" type="Input" precision="FP32" id="2">
866 <layer name="norm" id="3" type="ReLU" precision="FP32">
884 <layer name="power" id="4" type="Power" precision="FP32">
885 <power_data power="-1" scale="-1" shift="0"/>
903 <layer name="o_concat" id="5" type="Concat" precision="FP32">
904 <concat_data axis="1"/>
930 <edge from-layer="1" from-port="1" to-layer="3" to-port="1"/>
931 <edge from-layer="1" from-port="1" to-layer="5" to-port="2"/>
932 <edge from-layer="1" from-port="1" to-layer="4" to-port="1"/>
933 <edge from-layer="2" from-port="1" to-layer="5" to-port="1"/>
939 virtual void TearDown() {
942 virtual void SetUp() {
944 TestsCommon::SetUp();
945 std::string model = model_t;
947 InferenceEngine::CNNNetReader net_reader;
948 ASSERT_NO_THROW(net_reader.ReadNetwork(model.data(), model.length()));
950 MKLDNNGraphTestClass graph;
951 graph.CreateGraph(net_reader.getNetwork());
953 InferenceEngine::SizeVector dims_src1 = {1, 3, 2, 2};
954 InferenceEngine::SizeVector dims_src2 = {1, 2, 2, 2};
956 InferenceEngine::Blob::Ptr src1 = InferenceEngine::make_shared_blob<float, const InferenceEngine::SizeVector>(InferenceEngine::Precision::FP32, InferenceEngine::NCHW, dims_src1);
958 float *src1_data = src1->buffer();
959 for (size_t i = 0; i < src1->size(); i++) {
960 src1_data[i] = i + 1;
963 InferenceEngine::Blob::Ptr src2 = InferenceEngine::make_shared_blob<float, const InferenceEngine::SizeVector>(InferenceEngine::Precision::FP32, InferenceEngine::NCHW, dims_src2);
965 fill_data(src2->buffer(), src2->size());
967 InferenceEngine::BlobMap srcs;
968 srcs.insert(std::pair<std::string, InferenceEngine::Blob::Ptr>("in1", src1));
969 srcs.insert(std::pair<std::string, InferenceEngine::Blob::Ptr>("in2", src2));
971 InferenceEngine::OutputsDataMap out;
972 out = net_reader.getNetwork().getOutputsInfo();
973 InferenceEngine::BlobMap outputBlobs;
975 for (auto it = out.begin(); it != out.end(); it++) {
976 std::pair<std::string, InferenceEngine::DataPtr> item = *it;
977 InferenceEngine::TBlob<float>::Ptr output;
978 output = InferenceEngine::make_shared_blob<float>(item.second->getTensorDesc());
980 outputBlobs[item.first] = output;
983 graph.Infer(srcs, outputBlobs);
985 float *src1_ptr = src2->buffer();
986 size_t src1_size = src2->size();
987 float *src2_ptr = src1->buffer();
988 size_t src2_size = src1->size();
990 float *dst_ptr = outputBlobs["o_concat"]->buffer();
991 size_t dst_size = outputBlobs["o_concat"]->size();
993 int len1 = 1, len2 = 1, cycles;
994 for (int dim = 1; dim < outputBlobs["o_concat"]->dims().size(); dim++) {
995 len1 *= src2->dims()[dim];
996 len2 *= src1->dims()[dim];
1000 int index1 = 0, index2 = 0, index = 0;
1001 for (int cycle = 0; cycle < cycles; cycle ++) {
1002 for (int i1 = 0; i1 < len1; i1++) {
1003 if (src1_ptr[index1] != dst_ptr[index])
1005 FAIL() << "concat index: " << index << " src: "
1006 << src1_ptr[index1] << ", dst: " << dst_ptr[index];
1010 for (int i2 = 0; i2 < len2; i2++) {
1011 if (src2_ptr[index2] != dst_ptr[index])
1013 FAIL() << "concat index: " << index << " src: "
1014 << src2_ptr[index2] << ", dst: " << dst_ptr[index];
1019 } catch (const InferenceEngine::details::InferenceEngineException &e) {
1025 TEST_F(MKLDNNGraphTwoInputInConcatTests, TestSecondInputToConcat) {}
1027 class MKLDNNGraphIncorrectConcatTests: public TestsCommon,
1028 public WithParamInterface<concat_test_params> {
1029 std::string model_t = R"V0G0N(
1030 <net name="ConcatOnly" version="2" precision="FP32" batch="1">
1032 <layer name="in1" type="Input" precision="FP32" id="1">
1034 <port id="1">__SRC_DIMS_1__
1038 <layer name="in2" type="Input" precision="FP32" id="2">
1040 <port id="2">__SRC_DIMS_2__
1044 <layer name="con" id="3" type="Concat" precision="FP32">
1045 <concat_data axis="_AXIS_"/>
1047 <port id="1">__SRC_DIMS_1__
1049 <port id="2">__SRC_DIMS_2__
1053 <port id="3">__DST_DIMS__
1059 <edge from-layer="1" from-port="1" to-layer="3" to-port="1"/>
1060 <edge from-layer="2" from-port="2" to-layer="3" to-port="2"/>
1065 std::string getModel(concat_test_params p) {
1066 std::string model = model_t;
1068 for (auto& dim : p.in1) {
1069 s_dims += "\n <dim>";
1070 s_dims += std::to_string(dim) + "</dim>";
1072 REPLACE_WITH_STR(model, "__SRC_DIMS_1__", s_dims);
1075 for (auto& dim : p.in2) {
1076 s_dims += "\n <dim>";
1077 s_dims += std::to_string(dim) + "</dim>";
1079 REPLACE_WITH_STR(model, "__SRC_DIMS_2__", s_dims);
1082 for (size_t i = 0; i < p.in1.size(); i++) {
1083 size_t dim = p.axis == i ? p.in1[i] + p.in2[i] : p.in1[i];
1084 s_dims += "\n <dim>";
1085 s_dims += std::to_string(dim) + "</dim>";
1087 REPLACE_WITH_STR(model, "__DST_DIMS__", s_dims);
1089 REPLACE_WITH_NUM(model, "_AXIS_", p.axis);
1094 virtual void TearDown() {
1097 virtual void SetUp() {
1099 TestsCommon::SetUp();
1100 concat_test_params p = ::testing::WithParamInterface<concat_test_params>::GetParam();
1101 std::string model = getModel(p);
1103 InferenceEngine::CNNNetReader net_reader;
1104 ASSERT_THROW(net_reader.ReadNetwork(model.data(), model.length()),
1105 InferenceEngine::details::InferenceEngineException);
1106 } catch (const InferenceEngine::details::InferenceEngineException &e) {
1112 TEST_P(MKLDNNGraphIncorrectConcatTests, TestsIncorrectConcat) {}
1115 INSTANTIATE_TEST_CASE_P(
1116 TestsIncorrectConcat, MKLDNNGraphIncorrectConcatTests,
1118 concat_test_params {
1123 concat_test_params {