1 // Copyright (C) 2018-2019 Intel Corporation
2 // SPDX-License-Identifier: Apache-2.0
5 #include <gtest/gtest.h>
6 #include <gmock/gmock-spec-builders.h>
7 #include "mkldnn_plugin/mkldnn_graph.h"
9 #include "test_graph.hpp"
11 #include "single_layer_common.hpp"
12 #include <ie_layers.h>
13 #include <mkldnn_plugin/mkldnn_extension_utils.h>
14 #include <inference_engine/cnn_network_impl.hpp>
15 #include "tests_common.hpp"
16 #include "ir_gen_helper.hpp"
19 using namespace InferenceEngine;
20 using namespace ::testing;
22 using namespace mkldnn;
23 using namespace single_layer_tests;
25 struct pooling_test_params {
26 // Formats: NCHW, NCDHW
29 vector<size_t> kernel;
30 vector<size_t> strides;
31 vector<size_t> pads_begin;
32 vector<size_t> pads_end;
34 PoolingLayer::PoolType _type;
39 MKLDNNPlugin::impl_desc_type selectedType;
40 vector<MKLDNNPlugin::impl_desc_type> preferTypes;
42 vector<std::function<void(MKLDNNPlugin::PrimitiveDescInfo)>> comp;
45 template <typename data_t>
46 void ref_pool(const InferenceEngine::TBlob<data_t> &src, InferenceEngine::TBlob<data_t> &dst, pooling_test_params prm)
48 int dims_size = prm.dims.size();
50 int KW = prm.kernel[X_AXIS];
51 int KH = prm.kernel[Y_AXIS];
52 int KD = dims_size == 5 ? prm.kernel[Z_AXIS] : 1;
54 int SW = prm.strides[X_AXIS];
55 int SH = prm.strides[Y_AXIS];
56 int SD = prm.strides.size() > Z_AXIS ? prm.strides[Z_AXIS] : 1;
58 int IW = prm.dims[dims_size - 1];
59 int IH = prm.dims[dims_size - 2];
60 int ID = dims_size == 5 ? prm.dims[dims_size - 3] : 1;
62 int PWB = prm.pads_begin[X_AXIS];
63 int PHB = prm.pads_begin[Y_AXIS];
64 int PDB = prm.pads_begin.size() > Z_AXIS ? prm.pads_begin[Z_AXIS] : 0;
65 int PWE = prm.pads_end[X_AXIS];
66 int PHE = prm.pads_end[Y_AXIS];
67 int PDE = prm.pads_end.size() > Z_AXIS ? prm.pads_end[Z_AXIS] : 0;
69 int OW = (IW + PWB + PWE - KW) / SW + 1;
70 int OH = (IH + PHB + PHE - KH) / SH + 1;
71 int OD = dims_size == 5 ? (ID + PDB + PDE - KD) / SD + 1 : 1;
74 const data_t *src_data = src.readOnly();
75 data_t *dst_data = dst.data();
77 IE_ASSERT(OC == dst.dims()[dims_size - 2]);
84 if (prm._type == PoolingLayer::MAX) {
85 for (int c = 0; c < OC; c++) {
87 for (int od = 0; od < OD; od++) {
88 int cd = cc + od * k1;
89 for (int oh = 0; oh < OH; oh++) {
90 int ch = cd + oh * OW;
91 for (int ow = 0; ow < OW; ow++) {
94 data_t out_ref = data_t(0);
95 bool is_initialized = false;
97 for (int kd = 0; kd < KD; kd++) {
98 int id = dims_size == 5 ? od * SD - PDB + kd : 0lu;
99 if (id < 0 || id >= ID) continue;
100 for (int kh = 0; kh < KH; kh++) {
101 int ih = oh * SH - PHB + kh;
102 if (ih < 0 || ih >= IH) continue;
103 for (int kw = 0; kw < KW; kw++) {
104 int iw = ow * SW - PWB + kw;
105 if (iw < 0 || iw >= IW) continue;
111 data_t d = src_data[iidx];
112 if (!is_initialized) {
114 is_initialized = true;
122 dst_data[oidx] = out_ref;
127 } else if (prm._type == PoolingLayer::AVG) {
129 bool include_padding = false;
130 bool not_zero_l = false;
131 for (auto lr : prm.pads_begin) {
137 if (!prm._exclude_pad && not_zero_l)
138 include_padding = true;
140 int PDBKD = KD - PDB,
147 for (int c = 0; c < OC; c++) {
149 for (int od = 0; od < OD; od++) {
150 int cd = cc + od * k1;
151 int id_start = od * SD - PDB;
152 int id_end = std::min(od * SD + PDBKD, IDPDE);
153 for (int oh = 0; oh < OH; oh++) {
154 int ch = cd + oh * OW;
155 int ih_start = oh * SH - PHB;
156 int ih_end = std::min(oh * SH + PHBKH, IHPHE);
157 for (int ow = 0; ow < OW; ow++) {
158 size_t oidx = ch + ow;
159 dst_data[oidx] = (data_t)0;
160 int iw_start = ow * SW - PWB;
161 int iw_end = std::min(ow * SW + PWBKW, IWPWE);
164 double num_summands = (ih_end - ih_start) * (iw_end - iw_start) * (id_end - id_start);
166 id_start = std::max(id_start, 0);
167 ih_start = std::max(ih_start, 0);
168 iw_start = std::max(iw_start, 0);
169 id_end = std::min(id_end, ID);
170 ih_end = std::min(ih_end, IH);
171 iw_end = std::min(iw_end, IW);
173 if (!include_padding)
174 num_summands = (id_end - id_start) * (ih_end - ih_start) * (iw_end - iw_start);
175 if (num_summands == 0.0) continue;
178 for (int id = id_start; id < id_end; ++id) {
179 for (int ih = ih_start; ih < ih_end; ++ih) {
180 for (int iw = iw_start; iw < iw_end; ++iw) {
186 dst += (double)src_data[iidx];
189 dst_data[oidx] = (data_t)(dst / num_summands);
193 class MKLDNNGraphPoolingTests: public TestsCommon,
194 public WithParamInterface<pooling_test_params> {
195 std::string layers_t = R"V0G0N(
196 <layer name="pool" id="1" type="Pooling" precision="FP32">
198 <pooling kernel="_K_"
200 pads_begin="_PB_" pads_end="_PE_"
201 pool-method="_PM_" exclude-pad="_EP_" rounding_type="floor"
202 PrimitivesPriority="_IMPLS_"/>
219 std::string edges_t = R"V0G0N(
220 <edge from-layer="0" from-port="0" to-layer="1" to-port="1"/>
224 std::string getModel(pooling_test_params p) {
225 std::string model = layers_t;
228 for (auto& dim : p.dims) {
229 s_dims += "\n <dim>";
230 s_dims += std::to_string(dim) + "</dim>";
232 REPLACE_WITH_STR(model, "__SRC_DIMS__", s_dims);
235 int k_len = p.kernel.size();
236 for (size_t i = 2lu; i < p.dims.size(); i++) {
237 size_t inx = k_len - i + 1lu;
238 size_t dim = (p.dims[i] + p.pads_begin[inx] + p.pads_end[inx] - p.kernel[inx]) / p.strides[inx] + 1lu;
239 s_dims += "\n <dim>";
240 s_dims += std::to_string(dim) + "</dim>";
242 REPLACE_WITH_STR(model, "__DST_DIMS__", s_dims);
244 std::string pool_method;
246 case PoolingLayer::AVG: pool_method = "avg";
248 case PoolingLayer::ROI: pool_method = "roi";
250 default: pool_method = "max";
252 REPLACE_WITH_STR(model, "_PM_", pool_method);
254 std::string exclude_pad = "false";
255 if (p._exclude_pad) exclude_pad = "true";
256 REPLACE_WITH_STR(model, "_EP_", exclude_pad);
258 REPLACE_WITH_NUM(model, "_IN_", p.dims[0]);
259 REPLACE_WITH_NUM(model, "_IC_", p.dims[1]);
261 REPLACE_WITH_NUM_VECTOR_REVERSE(model, "_K_", p.kernel);
262 REPLACE_WITH_NUM_VECTOR_REVERSE(model, "_KS_", p.strides);
263 REPLACE_WITH_NUM_VECTOR_REVERSE(model, "_PB_", p.pads_begin);
264 REPLACE_WITH_NUM_VECTOR_REVERSE(model, "_PE_", p.pads_end);
267 for (const auto& preferType : p.preferTypes) {
270 impls += "cpu:" + MKLDNNGraphTestClass::getStrPrimitiveDescriptorType(preferType);
272 REPLACE_WITH_STR(model, "_IMPLS_", impls);
274 model = IRTemplateGenerator::getIRTemplate("Pooling_Only", p.dims, "FP32", model, edges_t);
279 virtual void TearDown() {
282 virtual void SetUp() {
284 TestsCommon::SetUp();
285 pooling_test_params p = ::testing::WithParamInterface<pooling_test_params>::GetParam();
286 std::string model = getModel(p);
288 InferenceEngine::CNNNetReader net_reader;
289 ASSERT_NO_THROW(net_reader.ReadNetwork(model.data(), model.length()));
291 MKLDNNGraphTestClass graph;
292 graph.CreateGraph(net_reader.getNetwork());
293 auto& nodes = graph.getNodes();
294 for (int i = 0; i < nodes.size(); i++) {
295 if (nodes[i]->getType() == MKLDNNPlugin::Pooling) {
296 ASSERT_LE(p.num_prim_desc, nodes[i]->getSupportedPrimitiveDescriptors().size());
297 for (size_t j = 0; j < p.num_prim_desc && j < p.comp.size(); j++) {
298 p.comp.at(j)(nodes[i]->getSupportedPrimitiveDescriptors().at(j));
300 ASSERT_NE(nullptr, nodes[i]->getSelectedPrimitiveDescriptor());
301 ASSERT_TRUE(nodes[i]->getSelectedPrimitiveDescriptor()->getImplementationType() | p.selectedType);
305 InferenceEngine::Layout layout = ANY;
306 switch (p.dims.size()) {
308 layout = InferenceEngine::NCHW;
311 layout = InferenceEngine::NCDHW;
315 InferenceEngine::Blob::Ptr src =
316 InferenceEngine::make_shared_blob<float, const InferenceEngine::SizeVector>(InferenceEngine::Precision::FP32, layout, p.dims);
318 fill_data(src->buffer(), src->size());
320 InferenceEngine::TBlob<float>* srcPtr = dynamic_cast<InferenceEngine::TBlob<float>*>(src.get());
322 if (srcPtr == nullptr)
323 FAIL() << "Cannot cast blob to TBlob<float>.";
325 InferenceEngine::BlobMap srcs;
326 srcs.insert(std::pair<std::string, InferenceEngine::Blob::Ptr>("in1", src));
328 InferenceEngine::OutputsDataMap out;
329 out = net_reader.getNetwork().getOutputsInfo();
330 InferenceEngine::BlobMap outputBlobs;
332 std::pair<std::string, InferenceEngine::DataPtr> item = *out.begin();
334 InferenceEngine::TBlob<float>::Ptr output;
335 output = InferenceEngine::make_shared_blob<float>(item.second->getTensorDesc());
337 outputBlobs[item.first] = output;
339 graph.Infer(srcs, outputBlobs);
341 InferenceEngine::TBlob<float> dst_ref(item.second->getTensorDesc());
344 ref_pool(*srcPtr, dst_ref, p);
346 compare(*output, dst_ref, 0.0001f);
347 } catch (const InferenceEngine::details::InferenceEngineException &e) {
353 TEST_P(MKLDNNGraphPoolingTests, TestsPooling) {}
355 INSTANTIATE_TEST_CASE_P(
356 TestsPooling, MKLDNNGraphPoolingTests,
358 /*0*/ pooling_test_params{{1, 3, 228, 228}, {2, 2}, {2, 2}, {0, 0}, {0, 0}, PoolingLayer::MAX, false, 6, MKLDNNPlugin::impl_desc_type::jit},
359 pooling_test_params{{1, 3, 228, 228}, {4, 2}, {2, 2}, {0, 0}, {0, 0}, PoolingLayer::MAX, false, 4, MKLDNNPlugin::impl_desc_type::jit},
360 pooling_test_params{{1, 3, 228, 228}, {4, 2}, {2, 1}, {0, 0}, {0, 0}, PoolingLayer::MAX, false, 4, MKLDNNPlugin::impl_desc_type::jit},
361 pooling_test_params{{1, 3, 228, 228}, {2, 2}, {2, 2}, {0, 0}, {0, 0}, PoolingLayer::MAX, false, 6, MKLDNNPlugin::impl_desc_type::ref,
362 {MKLDNNPlugin::impl_desc_type::ref_any}},
363 pooling_test_params{{1, 3, 228, 228}, {4, 2}, {2, 2}, {0, 0}, {0, 0}, PoolingLayer::MAX, false, 4, MKLDNNPlugin::impl_desc_type::ref,
364 {MKLDNNPlugin::impl_desc_type::ref_any}},
365 pooling_test_params{{1, 3, 228, 228}, {4, 2}, {2, 1}, {0, 0}, {0, 0}, PoolingLayer::MAX, false, 4, MKLDNNPlugin::impl_desc_type::ref,
366 {MKLDNNPlugin::impl_desc_type::ref_any}},
367 pooling_test_params{{1u, 4u, 128u, 128u}, {2u, 2u}, {2u, 2u}, {1u, 0u}, {0u, 0u}, PoolingLayer::AVG, false, 3u,
368 MKLDNNPlugin::impl_desc_type::ref, {MKLDNNPlugin::impl_desc_type::ref_any}},
369 pooling_test_params{{1u, 4u, 128u, 128u}, {2u, 2u}, {2u, 2u}, {1u, 0u}, {0u, 0u}, PoolingLayer::AVG, false, 3u,
370 MKLDNNPlugin::impl_desc_type::jit },
371 pooling_test_params{{1u, 4u, 128u, 128u}, {2u, 2u}, {2u, 2u}, {0u, 0u}, {0u, 0u}, PoolingLayer::AVG, true, 3u,
372 MKLDNNPlugin::impl_desc_type::ref, {MKLDNNPlugin::impl_desc_type::ref_any}},
373 /*9*/ pooling_test_params{{1u, 4u, 128u, 128u}, {2u, 2u}, {2u, 2u}, {0u, 0u}, {0u, 0u}, PoolingLayer::AVG, true, 3u,
374 MKLDNNPlugin::impl_desc_type::jit },
375 pooling_test_params{{1u, 4u, 128u, 128u}, {2u, 2u}, {2u, 2u}, {2u, 2u}, {2u, 2u}, PoolingLayer::AVG, true, 3u,
376 MKLDNNPlugin::impl_desc_type::ref, {MKLDNNPlugin::impl_desc_type::ref_any}},
377 pooling_test_params{{1u, 4u, 128u, 128u}, {2u, 2u}, {2u, 2u}, {2u, 2u}, {2u, 2u}, PoolingLayer::AVG, false, 3u,
378 MKLDNNPlugin::impl_desc_type::ref, {MKLDNNPlugin::impl_desc_type::ref_any}},
379 pooling_test_params{{1u, 4u, 128u, 128u}, {2u, 2u}, {2u, 2u}, {2u, 2u}, {2u, 2u}, PoolingLayer::MAX, false, 3u,
380 MKLDNNPlugin::impl_desc_type::ref, {MKLDNNPlugin::impl_desc_type::ref_any}},
381 // TODO Fix jit implementation. End paddings
382 // pooling_test_params{{1u, 4u, 128u, 128u}, {2u, 2u}, {2u, 2u}, {2u, 2u}, {2u, 0u}, PoolingLayer::AVG, true, 3u,
383 // MKLDNNPlugin::impl_desc_type::jit },
384 // pooling_test_params{{1u, 4u, 128u, 128u}, {2u, 2u}, {2u, 2u}, {2u, 2u}, {2u, 0u}, PoolingLayer::AVG, false, 3u,
385 // MKLDNNPlugin::impl_desc_type::jit },
386 // pooling_test_params{{1u, 4u, 128u, 128u}, {2u, 2u}, {2u, 2u}, {2u, 2u}, {2u, 0u}, PoolingLayer::MAX, false, 3u,
387 // MKLDNNPlugin::impl_desc_type::jit },
390 pooling_test_params{{1u, 3u, 16u, 32u, 32u}, {2u, 2u, 2u}, {1u, 1u, 1u}, {0u, 0u, 0u}, {0u, 0u, 0u}, PoolingLayer::MAX, false, 3u,
391 MKLDNNPlugin::impl_desc_type::ref, {MKLDNNPlugin::impl_desc_type::ref_any}},
392 pooling_test_params{{1u, 3u, 16u, 32u, 32u}, {2u, 2u, 2u}, {1u, 1u, 1u}, {0u, 0u, 0u}, {0u, 0u, 0u}, PoolingLayer::MAX, false, 3u,
393 MKLDNNPlugin::impl_desc_type::jit },
394 pooling_test_params{{1u, 3u, 16u, 32u, 32u}, {2u, 2u, 2u}, {1u, 1u, 1u}, {1u, 1u, 1u}, {1u, 1u, 1u}, PoolingLayer::MAX, false, 3u,
395 MKLDNNPlugin::impl_desc_type::ref, {MKLDNNPlugin::impl_desc_type::ref_any}},
396 pooling_test_params{{1u, 32u, 60u, 60u, 60u}, {2u, 3u, 4u}, {2u, 2u, 2u}, {1u, 1u, 1u}, {1u, 2u, 3u}, PoolingLayer::MAX, false, 3u,
397 MKLDNNPlugin::impl_desc_type::ref, {MKLDNNPlugin::impl_desc_type::ref_any}},
398 /*20*/ pooling_test_params{{1u, 3u, 16u, 32u, 32u}, {2u, 2u, 2u}, {1u, 1u, 1u}, {1u, 2u, 3u}, {1u, 2u, 3u}, PoolingLayer::MAX, false, 3u,
399 MKLDNNPlugin::impl_desc_type::jit },
400 pooling_test_params{{1u, 4u, 128u, 128u, 128u}, {2u, 2u, 2u}, {2u, 2u, 2u}, {1u, 0u, 0u}, {0u, 0u, 0u}, PoolingLayer::AVG, false, 3u,
401 MKLDNNPlugin::impl_desc_type::ref, {MKLDNNPlugin::impl_desc_type::ref_any}},
402 pooling_test_params{{1u, 4u, 128u, 128u, 128u}, {2u, 2u, 2u}, {2u, 2u, 2u}, {1u, 0u, 0u}, {0u, 0u, 0u}, PoolingLayer::AVG, false, 3u,
403 MKLDNNPlugin::impl_desc_type::jit },
404 pooling_test_params{{1u, 4u, 128u, 128u, 128u}, {2u, 2u, 2u}, {2u, 2u, 2u}, {0u, 0u, 0u}, {0u, 0u, 0u}, PoolingLayer::AVG, true, 3u,
405 MKLDNNPlugin::impl_desc_type::ref, {MKLDNNPlugin::impl_desc_type::ref_any}},
406 pooling_test_params{{1u, 4u, 128u, 128u, 128u}, {2u, 2u, 2u}, {2u, 2u, 2u}, {0u, 0u, 0u}, {0u, 0u, 0u}, PoolingLayer::AVG, true, 3u,
407 MKLDNNPlugin::impl_desc_type::jit },
408 pooling_test_params{{1u, 4u, 128u, 128u, 128u}, {2u, 2u, 2u}, {2u, 2u, 2u}, {2u, 2u, 2u}, {0u, 0u, 0u}, PoolingLayer::AVG, true, 3u,
409 MKLDNNPlugin::impl_desc_type::ref, {MKLDNNPlugin::impl_desc_type::ref_any}},
410 pooling_test_params{{1u, 4u, 128u, 128u, 128u}, {2u, 2u, 2u}, {2u, 2u, 2u}, {2u, 2u, 2u}, {2u, 2u, 2u}, PoolingLayer::AVG, true, 3u,
411 MKLDNNPlugin::impl_desc_type::jit },
412 pooling_test_params{{1u, 4u, 128u, 128u, 128u}, {2u, 2u, 2u}, {2u, 2u, 2u}, {2u, 2u, 2u}, {2u, 2u, 2u}, PoolingLayer::AVG, false, 3u,
413 MKLDNNPlugin::impl_desc_type::ref, {MKLDNNPlugin::impl_desc_type::ref_any}},
414 pooling_test_params{{1u, 4u, 128u, 128u, 128u}, {2u, 2u, 2u}, {2u, 2u, 2u}, {2u, 2u, 2u}, {2u, 2u, 2u}, PoolingLayer::AVG, false, 3u,
415 MKLDNNPlugin::impl_desc_type::jit } ));
418 class MKLDNNGraphDynBatchPoolingTests: public MKLDNNGraphPoolingTests {
420 virtual void SetUp() {
422 TestsCommon::SetUp();
423 pooling_test_params p = ::testing::WithParamInterface<pooling_test_params>::GetParam();
424 std::string model = getModel(p);
425 size_t MB = p.dims[0];
429 InferenceEngine::CNNNetReader net_reader;
430 ASSERT_NO_THROW(net_reader.ReadNetwork(model.data(), model.length()));
431 InferenceEngine::CNNNetwork network = net_reader.getNetwork();
432 auto implNet = dynamic_cast<InferenceEngine::details::CNNNetworkImpl *>(&((InferenceEngine::ICNNNetwork&)network));
433 ASSERT_NE(nullptr, implNet) << "Failed to cast ICNNNetwork to CNNNetworkImpl";
434 InferenceEngine::ResponseDesc resp;
435 InferenceEngine::StatusCode sts = implNet->setBatchSizeReshape(MB, &resp);
436 ASSERT_EQ((int)InferenceEngine::StatusCode::OK, sts) << resp.msg;
438 MKLDNNGraphTestClass graph;
439 graph.setProperty({{InferenceEngine::PluginConfigParams::KEY_DYN_BATCH_ENABLED, InferenceEngine::PluginConfigParams::YES}});
440 graph.CreateGraph(net_reader.getNetwork());
443 InferenceEngine::Layout layout = ANY;
444 switch (p.dims.size()) {
446 layout = InferenceEngine::NCHW;
449 layout = InferenceEngine::NCDHW;
452 InferenceEngine::Blob::Ptr src =
453 InferenceEngine::make_shared_blob<float, const InferenceEngine::SizeVector>(InferenceEngine::Precision::FP32, layout, p.dims);
455 fill_data(src->buffer(), src->size());
457 InferenceEngine::TBlob<float>* srcPtr = dynamic_cast<InferenceEngine::TBlob<float>*>(src.get());
459 if (srcPtr == nullptr)
460 FAIL() << "Cannot cast blob to TBlob<float>.";
462 InferenceEngine::BlobMap srcs;
463 srcs.insert(std::pair<std::string, InferenceEngine::Blob::Ptr>("in1", src));
465 InferenceEngine::OutputsDataMap out;
466 out = net_reader.getNetwork().getOutputsInfo();
467 InferenceEngine::BlobMap outputBlobs;
469 std::pair<std::string, InferenceEngine::DataPtr> item = *out.begin();
471 InferenceEngine::TBlob<float>::Ptr output;
472 output = InferenceEngine::make_shared_blob<float>(item.second->getTensorDesc());
474 outputBlobs[item.first] = output;
476 auto checkPooling = [](const MKLDNNPlugin::MKLDNNNodePtr& node) {
477 return node->getType() == MKLDNNPlugin::Pooling;
479 graph.checkDynBatch(srcs, outputBlobs, MB, MB, checkPooling);
480 graph.checkDynBatch(srcs, outputBlobs, 1, MB, checkPooling);
481 } catch (const InferenceEngine::details::InferenceEngineException &e) {
487 TEST_P(MKLDNNGraphDynBatchPoolingTests, TestsDynBatchPooling) {}
489 INSTANTIATE_TEST_CASE_P(
490 TestsDynBatchPooling, MKLDNNGraphDynBatchPoolingTests,
492 pooling_test_params{{1, 3, 228, 228}, {4, 2}, {2, 1}, {0, 0}, {0, 0}, PoolingLayer::MAX, false, 4, MKLDNNPlugin::impl_desc_type::jit},
493 pooling_test_params{{1, 3, 228, 228}, {2, 2}, {2, 2}, {0, 0}, {0, 0}, PoolingLayer::MAX, false, 6, MKLDNNPlugin::impl_desc_type::ref, {MKLDNNPlugin::impl_desc_type::ref_any}},
494 pooling_test_params{{1, 3, 228, 228}, {4, 2}, {2, 2}, {0, 0}, {0, 0}, PoolingLayer::MAX, false, 4, MKLDNNPlugin::impl_desc_type::ref, {MKLDNNPlugin::impl_desc_type::ref_any}},
495 pooling_test_params{{1, 3, 228, 228}, {4, 2}, {2, 1}, {0, 0}, {0, 0}, PoolingLayer::MAX, false, 4, MKLDNNPlugin::impl_desc_type::ref, {MKLDNNPlugin::impl_desc_type::ref_any}}));