1 // Copyright (C) 2018-2019 Intel Corporation
2 // SPDX-License-Identifier: Apache-2.0
5 #include <gtest/gtest.h>
6 #include <gmock/gmock-spec-builders.h>
7 #include "mkldnn_plugin/mkldnn_graph.h"
9 #include "test_graph.hpp"
11 #include "single_layer_common.hpp"
12 #include <mkldnn_plugin/mkldnn_extension_utils.h>
13 #include <inference_engine/cnn_network_impl.hpp>
14 #include "tests_common.hpp"
17 using namespace ::testing;
19 using namespace mkldnn;
22 struct lrn_test_params {
39 std::vector<std::function<void(MKLDNNPlugin::PrimitiveDescInfo)>> comp;
42 template <typename data_t>
43 void ref_lrn(const InferenceEngine::TBlob<data_t> &src, InferenceEngine::TBlob<data_t> &dst, lrn_test_params prm)
49 const data_t *src_data = src.readOnly();
50 data_t *dst_data = dst.data();
52 for (uint32_t c = 0; c < IC; c++) {
53 for (uint32_t h = 0; h < IH; h++) {
54 for (uint32_t w = 0; w < IW; w++) {
55 uint32_t oidx = c * IH * IW
58 uint32_t sz = prm.local_size;
59 int32_t c_start = c - sz / 2;
60 int32_t c_end = c_start + sz;
61 if (c_start < 0) c_start = 0;
62 if (c_end > (int32_t)IC) c_end = IC;
64 for (int32_t c1 = c_start; c1 < c_end; c1++) {
65 uint32_t idx = c1 * IH * IW + h * IW + w;
66 data_t s = src_data[idx];
71 data_t norm_coef = powf(1. + prm.alpha * sum / sz, -prm.beta);
72 dst_data[oidx] = norm_coef * src_data[oidx];
78 class MKLDNNGraphLrnTests: public TestsCommon,
79 public WithParamInterface<lrn_test_params> {
80 std::string model_t = R"V0G0N(
81 <Net Name="Lrn_Only" version="2" precision="FP32" batch="1">
83 <layer name="in1" type="Input" precision="FP32" id="0">
93 <layer name="norm" id="1" type="LRN" precision="FP32">
94 <lrn local_size="_LS_" alpha="_A_" beta="_B_" k="_K_" region="ACROSS" />
115 <edge from-layer="0" from-port="0" to-layer="1" to-port="1"/>
121 std::string getModel(lrn_test_params p) {
122 std::string model = model_t;
124 REPLACE_WITH_NUM(model, "_IW_", p.in.w);
125 REPLACE_WITH_NUM(model, "_IH_", p.in.h);
126 REPLACE_WITH_NUM(model, "_IC_", p.in.c);
127 REPLACE_WITH_NUM(model, "_IN_", p.in.n);
129 REPLACE_WITH_NUM(model, "_LS_", p.local_size);
130 REPLACE_WITH_NUM(model, "_A_", p.alpha);
131 REPLACE_WITH_NUM(model, "_B_", p.beta);
132 REPLACE_WITH_NUM(model, "_K_", p.k);
137 virtual void TearDown() {
140 virtual void SetUp() {
142 TestsCommon::SetUp();
143 lrn_test_params p = ::testing::WithParamInterface<lrn_test_params>::GetParam();
144 std::string model = getModel(p);
146 InferenceEngine::CNNNetReader net_reader;
147 ASSERT_NO_THROW(net_reader.ReadNetwork(model.data(), model.length()));
149 MKLDNNGraphTestClass graph;
150 graph.CreateGraph(net_reader.getNetwork());
151 auto& nodes = graph.getNodes();
152 for (int i = 0; i < nodes.size(); i++) {
153 if (nodes[i]->getType() == MKLDNNPlugin::Lrn) {
154 ASSERT_LE(p.num_prim_desc, nodes[i]->getSupportedPrimitiveDescriptors().size());
155 for (size_t j = 0; j < p.num_prim_desc && j < p.comp.size(); j++) {
156 p.comp.at(j)(nodes[i]->getSupportedPrimitiveDescriptors().at(j));
158 ASSERT_NE(nullptr, nodes[i]->getSelectedPrimitiveDescriptor());
159 ASSERT_EQ(p.selectedType,
160 nodes[i]->getSelectedPrimitiveDescriptor()->getImplementationType() & p.selectedType);
163 ASSERT_EQ(3, nodes.size());
165 InferenceEngine::SizeVector dims_src = {p.in.n, p.in.c, p.in.h, p.in.w};
167 InferenceEngine::Blob::Ptr src = InferenceEngine::make_shared_blob<float, const InferenceEngine::SizeVector>(InferenceEngine::Precision::FP32, InferenceEngine::NCHW, dims_src);
169 fill_data(src->buffer(), src->size());
171 InferenceEngine::TBlob<float>* srcPtr = dynamic_cast<InferenceEngine::TBlob<float>*>(src.get());
173 if (srcPtr == nullptr)
174 FAIL() << "Cannot cast blob to TBlob<float>.";
176 InferenceEngine::BlobMap srcs;
177 srcs.insert(std::pair<std::string, InferenceEngine::Blob::Ptr>("in1", src));
179 InferenceEngine::OutputsDataMap out;
180 out = net_reader.getNetwork().getOutputsInfo();
181 InferenceEngine::BlobMap outputBlobs;
183 std::pair<std::string, InferenceEngine::DataPtr> item = *out.begin();
185 InferenceEngine::TBlob<float>::Ptr output;
186 output = InferenceEngine::make_shared_blob<float>(item.second->getTensorDesc());
188 outputBlobs[item.first] = output;
190 graph.Infer(srcs, outputBlobs);
192 InferenceEngine::TBlob<float> dst_ref(item.second->getTensorDesc());
195 ref_lrn(*srcPtr, dst_ref, p);
197 compare(*output, dst_ref);
198 } catch (const InferenceEngine::details::InferenceEngineException &e) {
204 TEST_P(MKLDNNGraphLrnTests, TestsLrn) {}
206 INSTANTIATE_TEST_CASE_P(
207 TestsLrn, MKLDNNGraphLrnTests,
211 5, 0.0001f, 0.75f, 1, 3, MKLDNNPlugin::impl_desc_type::ref_any, {
212 [](MKLDNNPlugin::PrimitiveDescInfo impl) {
213 ASSERT_EQ(MKLDNNPlugin::impl_desc_type::ref_any, impl.getImplementationType());
214 ASSERT_EQ(1, impl.getConfig().inConfs.size());
215 ASSERT_EQ(1, impl.getConfig().outConfs.size());
216 ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().inConfs.at(0).desc.getLayout());
217 ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().outConfs.at(0).desc.getLayout());
219 [](MKLDNNPlugin::PrimitiveDescInfo impl) {
220 ASSERT_EQ(MKLDNNPlugin::impl_desc_type::ref_any, impl.getImplementationType());
221 ASSERT_EQ(1, impl.getConfig().inConfs.size());
222 ASSERT_EQ(1, impl.getConfig().outConfs.size());
223 ASSERT_EQ(InferenceEngine::Layout::BLOCKED, impl.getConfig().inConfs.at(0).desc.getLayout());
224 ASSERT_EQ(InferenceEngine::Layout::BLOCKED, impl.getConfig().outConfs.at(0).desc.getLayout());
226 [](MKLDNNPlugin::PrimitiveDescInfo impl) {
227 ASSERT_EQ(MKLDNNPlugin::impl_desc_type::ref_any, impl.getImplementationType());
228 ASSERT_EQ(1, impl.getConfig().inConfs.size());
229 ASSERT_EQ(1, impl.getConfig().outConfs.size());
230 ASSERT_EQ(InferenceEngine::Layout::BLOCKED, impl.getConfig().inConfs.at(0).desc.getLayout());
231 ASSERT_EQ(InferenceEngine::Layout::BLOCKED, impl.getConfig().outConfs.at(0).desc.getLayout());
234 lrn_test_params{{1, 16, 228, 228}, 5, 0.0001f, 0.75f, 1, 3, MKLDNNPlugin::impl_desc_type::jit}));
236 class MKLDNNGraphDynBatchLrnTests: public MKLDNNGraphLrnTests {
238 virtual void SetUp() {
240 TestsCommon::SetUp();
241 lrn_test_params p = ::testing::WithParamInterface<lrn_test_params>::GetParam();
242 std::string model = getModel(p);
247 InferenceEngine::CNNNetReader net_reader;
248 ASSERT_NO_THROW(net_reader.ReadNetwork(model.data(), model.length()));
249 InferenceEngine::CNNNetwork network = net_reader.getNetwork();
250 auto implNet = dynamic_cast<InferenceEngine::details::CNNNetworkImpl *>(&((InferenceEngine::ICNNNetwork&)network));
251 ASSERT_NE(nullptr, implNet) << "Failed to cast ICNNNetwork to CNNNetworkImpl";
252 InferenceEngine::ResponseDesc resp;
253 InferenceEngine::StatusCode sts = implNet->setBatchSizeReshape(MB, &resp);
254 ASSERT_EQ((int)InferenceEngine::StatusCode::OK, sts) << resp.msg;
256 MKLDNNGraphTestClass graph;
257 graph.setProperty({{InferenceEngine::PluginConfigParams::KEY_DYN_BATCH_ENABLED, InferenceEngine::PluginConfigParams::YES}});
258 graph.CreateGraph(net_reader.getNetwork());
260 InferenceEngine::SizeVector dims_src = {MB, p.in.c, p.in.h, p.in.w};
262 InferenceEngine::Blob::Ptr src = InferenceEngine::make_shared_blob<float, const InferenceEngine::SizeVector>(InferenceEngine::Precision::FP32, InferenceEngine::NCHW, dims_src);
264 fill_data(src->buffer(), src->size());
266 InferenceEngine::TBlob<float>* srcPtr = dynamic_cast<InferenceEngine::TBlob<float>*>(src.get());
268 if (srcPtr == nullptr)
269 FAIL() << "Cannot cast blob to TBlob<float>.";
271 InferenceEngine::BlobMap srcs;
272 srcs.insert(std::pair<std::string, InferenceEngine::Blob::Ptr>("in1", src));
274 InferenceEngine::OutputsDataMap out;
275 out = net_reader.getNetwork().getOutputsInfo();
276 InferenceEngine::BlobMap outputBlobs;
278 std::pair<std::string, InferenceEngine::DataPtr> item = *out.begin();
280 InferenceEngine::TBlob<float>::Ptr output;
281 output = InferenceEngine::make_shared_blob<float>(item.second->getTensorDesc());
283 outputBlobs[item.first] = output;
285 auto checkLRN = [](const MKLDNNPlugin::MKLDNNNodePtr& node) {
286 return node->getType() == MKLDNNPlugin::Lrn;
288 graph.checkDynBatch(srcs, outputBlobs, MB, MB, checkLRN);
289 graph.checkDynBatch(srcs, outputBlobs, 1, MB, checkLRN);
290 } catch (const InferenceEngine::details::InferenceEngineException &e) {
296 TEST_P(MKLDNNGraphDynBatchLrnTests, TestsDynBatchLrn) {}
298 INSTANTIATE_TEST_CASE_P(
299 TestsDynBatchLrn, MKLDNNGraphDynBatchLrnTests,
301 lrn_test_params{{1, 3, 228, 228}, 5, 0.0001f, 0.75f, 1, 3, MKLDNNPlugin::impl_desc_type::ref_any},
302 lrn_test_params{{1, 16, 228, 228}, 5, 0.0001f, 0.75f, 1, 3, MKLDNNPlugin::impl_desc_type::jit}));