Publishing 2019 R1 content
[platform/upstream/dldt.git] / inference-engine / tests / unit / engines / mkldnn / graph / layers / internal / graph_lrn_test.cpp
1 // Copyright (C) 2018-2019 Intel Corporation
2 // SPDX-License-Identifier: Apache-2.0
3 //
4
5 #include <gtest/gtest.h>
6 #include <gmock/gmock-spec-builders.h>
7 #include "mkldnn_plugin/mkldnn_graph.h"
8
9 #include "test_graph.hpp"
10
11 #include "single_layer_common.hpp"
12 #include <mkldnn_plugin/mkldnn_extension_utils.h>
13 #include <inference_engine/cnn_network_impl.hpp>
14 #include "tests_common.hpp"
15
16
17 using namespace ::testing;
18 using namespace std;
19 using namespace mkldnn;
20
21
22 struct lrn_test_params {
23     struct {
24         size_t n;
25         size_t c;
26         size_t h;
27         size_t w;
28     } in;
29
30     size_t local_size;
31     float alpha;
32     float beta;
33     size_t k;
34
35     size_t num_prim_desc;
36
37     int selectedType;
38
39     std::vector<std::function<void(MKLDNNPlugin::PrimitiveDescInfo)>> comp;
40 };
41
42 template <typename data_t>
43 void ref_lrn(const InferenceEngine::TBlob<data_t> &src, InferenceEngine::TBlob<data_t> &dst, lrn_test_params prm)
44 {
45     size_t IW = prm.in.w;
46     size_t IH = prm.in.h;
47     size_t IC = prm.in.c;
48
49     const data_t *src_data = src.readOnly();
50     data_t *dst_data = dst.data();
51
52     for (uint32_t c = 0; c < IC; c++) {
53         for (uint32_t h = 0; h < IH; h++) {
54             for (uint32_t w = 0; w < IW; w++) {
55                 uint32_t oidx = c * IH * IW
56                                 + h * IW + w;
57
58                 uint32_t sz = prm.local_size;
59                 int32_t c_start = c - sz / 2;
60                 int32_t c_end = c_start + sz;
61                 if (c_start < 0) c_start = 0;
62                 if (c_end > (int32_t)IC) c_end = IC;
63                 data_t sum = 0.0;
64                 for (int32_t c1 = c_start; c1 < c_end; c1++) {
65                     uint32_t idx = c1 * IH * IW + h * IW + w;
66                     data_t s = src_data[idx];
67
68                     sum += s * s;
69                 }
70
71                 data_t norm_coef = powf(1. + prm.alpha * sum / sz, -prm.beta);
72                 dst_data[oidx] = norm_coef * src_data[oidx];
73             }
74         }
75     }
76 }
77
78 class MKLDNNGraphLrnTests: public TestsCommon,
79                                      public WithParamInterface<lrn_test_params> {
80     std::string model_t = R"V0G0N(
81 <Net Name="Lrn_Only" version="2" precision="FP32" batch="1">
82     <layers>
83         <layer name="in1" type="Input" precision="FP32" id="0">
84             <output>
85                 <port id="0">
86                     <dim>_IN_</dim>
87                     <dim>_IC_</dim>
88                     <dim>_IH_</dim>
89                     <dim>_IW_</dim>
90                 </port>
91             </output>
92         </layer>
93         <layer name="norm" id="1" type="LRN" precision="FP32">
94             <lrn local_size="_LS_" alpha="_A_" beta="_B_" k="_K_" region="ACROSS" />
95
96             <input>
97                 <port id="1">
98                     <dim>_IN_</dim>
99                     <dim>_IC_</dim>
100                     <dim>_IH_</dim>
101                     <dim>_IW_</dim>
102                 </port>
103             </input>
104             <output>
105                 <port id="2">
106                     <dim>_IN_</dim>
107                     <dim>_IC_</dim>
108                     <dim>_IH_</dim>
109                     <dim>_IW_</dim>
110                 </port>
111             </output>
112         </layer>
113     </layers>
114     <edges>
115         <edge from-layer="0" from-port="0" to-layer="1" to-port="1"/>
116     </edges>
117 </Net>
118 )V0G0N";
119
120 protected:
121     std::string getModel(lrn_test_params p) {
122         std::string model = model_t;
123
124         REPLACE_WITH_NUM(model, "_IW_", p.in.w);
125         REPLACE_WITH_NUM(model, "_IH_", p.in.h);
126         REPLACE_WITH_NUM(model, "_IC_", p.in.c);
127         REPLACE_WITH_NUM(model, "_IN_", p.in.n);
128
129         REPLACE_WITH_NUM(model, "_LS_", p.local_size);
130         REPLACE_WITH_NUM(model, "_A_", p.alpha);
131         REPLACE_WITH_NUM(model, "_B_", p.beta);
132         REPLACE_WITH_NUM(model, "_K_", p.k);
133
134         return model;
135     }
136
137     virtual void TearDown() {
138     }
139
140     virtual void SetUp() {
141         try {
142             TestsCommon::SetUp();
143             lrn_test_params p = ::testing::WithParamInterface<lrn_test_params>::GetParam();
144             std::string model = getModel(p);
145
146             InferenceEngine::CNNNetReader net_reader;
147             ASSERT_NO_THROW(net_reader.ReadNetwork(model.data(), model.length()));
148
149             MKLDNNGraphTestClass graph;
150             graph.CreateGraph(net_reader.getNetwork());
151             auto& nodes = graph.getNodes();
152             for (int i = 0; i < nodes.size(); i++) {
153                 if (nodes[i]->getType() == MKLDNNPlugin::Lrn) {
154                     ASSERT_LE(p.num_prim_desc, nodes[i]->getSupportedPrimitiveDescriptors().size());
155                     for (size_t j = 0; j < p.num_prim_desc && j < p.comp.size(); j++) {
156                         p.comp.at(j)(nodes[i]->getSupportedPrimitiveDescriptors().at(j));
157                     }
158                     ASSERT_NE(nullptr, nodes[i]->getSelectedPrimitiveDescriptor());
159                     ASSERT_EQ(p.selectedType,
160                               nodes[i]->getSelectedPrimitiveDescriptor()->getImplementationType() & p.selectedType);
161                 }
162             }
163             ASSERT_EQ(3, nodes.size());
164
165             InferenceEngine::SizeVector dims_src = {p.in.n, p.in.c, p.in.h, p.in.w};
166
167             InferenceEngine::Blob::Ptr src = InferenceEngine::make_shared_blob<float, const InferenceEngine::SizeVector>(InferenceEngine::Precision::FP32, InferenceEngine::NCHW, dims_src);
168             src->allocate();
169             fill_data(src->buffer(), src->size());
170
171             InferenceEngine::TBlob<float>* srcPtr = dynamic_cast<InferenceEngine::TBlob<float>*>(src.get());
172
173             if (srcPtr == nullptr)
174                 FAIL() << "Cannot cast blob to TBlob<float>.";
175
176             InferenceEngine::BlobMap srcs;
177             srcs.insert(std::pair<std::string, InferenceEngine::Blob::Ptr>("in1", src));
178
179             InferenceEngine::OutputsDataMap out;
180             out = net_reader.getNetwork().getOutputsInfo();
181             InferenceEngine::BlobMap outputBlobs;
182
183             std::pair<std::string, InferenceEngine::DataPtr> item = *out.begin();
184
185             InferenceEngine::TBlob<float>::Ptr output;
186             output = InferenceEngine::make_shared_blob<float>(item.second->getTensorDesc());
187             output->allocate();
188             outputBlobs[item.first] = output;
189
190             graph.Infer(srcs, outputBlobs);
191
192             InferenceEngine::TBlob<float> dst_ref(item.second->getTensorDesc());
193             dst_ref.allocate();
194
195             ref_lrn(*srcPtr, dst_ref, p);
196
197             compare(*output, dst_ref);
198         } catch (const InferenceEngine::details::InferenceEngineException &e) {
199             FAIL() << e.what();
200         }
201     }
202 };
203
204 TEST_P(MKLDNNGraphLrnTests, TestsLrn) {}
205
206 INSTANTIATE_TEST_CASE_P(
207         TestsLrn, MKLDNNGraphLrnTests,
208         ::testing::Values(
209                 lrn_test_params{
210                         {1, 3, 228, 228},
211                         5, 0.0001f, 0.75f, 1, 3, MKLDNNPlugin::impl_desc_type::ref_any, {
212                                 [](MKLDNNPlugin::PrimitiveDescInfo impl) {
213                                     ASSERT_EQ(MKLDNNPlugin::impl_desc_type::ref_any, impl.getImplementationType());
214                                     ASSERT_EQ(1, impl.getConfig().inConfs.size());
215                                     ASSERT_EQ(1, impl.getConfig().outConfs.size());
216                                     ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().inConfs.at(0).desc.getLayout());
217                                     ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().outConfs.at(0).desc.getLayout());
218                                 },
219                                 [](MKLDNNPlugin::PrimitiveDescInfo impl) {
220                                     ASSERT_EQ(MKLDNNPlugin::impl_desc_type::ref_any, impl.getImplementationType());
221                                     ASSERT_EQ(1, impl.getConfig().inConfs.size());
222                                     ASSERT_EQ(1, impl.getConfig().outConfs.size());
223                                     ASSERT_EQ(InferenceEngine::Layout::BLOCKED, impl.getConfig().inConfs.at(0).desc.getLayout());
224                                     ASSERT_EQ(InferenceEngine::Layout::BLOCKED, impl.getConfig().outConfs.at(0).desc.getLayout());
225                                 },
226                                 [](MKLDNNPlugin::PrimitiveDescInfo impl) {
227                                     ASSERT_EQ(MKLDNNPlugin::impl_desc_type::ref_any, impl.getImplementationType());
228                                     ASSERT_EQ(1, impl.getConfig().inConfs.size());
229                                     ASSERT_EQ(1, impl.getConfig().outConfs.size());
230                                     ASSERT_EQ(InferenceEngine::Layout::BLOCKED, impl.getConfig().inConfs.at(0).desc.getLayout());
231                                     ASSERT_EQ(InferenceEngine::Layout::BLOCKED, impl.getConfig().outConfs.at(0).desc.getLayout());
232                                 }
233                         }},
234                 lrn_test_params{{1, 16, 228, 228}, 5, 0.0001f, 0.75f, 1, 3, MKLDNNPlugin::impl_desc_type::jit}));
235
236 class MKLDNNGraphDynBatchLrnTests: public MKLDNNGraphLrnTests {
237 protected:
238     virtual void SetUp() {
239         try {
240             TestsCommon::SetUp();
241             lrn_test_params p = ::testing::WithParamInterface<lrn_test_params>::GetParam();
242             std::string model = getModel(p);
243             size_t MB = p.in.n;
244             if (MB < 2)
245                 MB = 2;
246
247             InferenceEngine::CNNNetReader net_reader;
248             ASSERT_NO_THROW(net_reader.ReadNetwork(model.data(), model.length()));
249             InferenceEngine::CNNNetwork network = net_reader.getNetwork();
250             auto implNet = dynamic_cast<InferenceEngine::details::CNNNetworkImpl *>(&((InferenceEngine::ICNNNetwork&)network));
251             ASSERT_NE(nullptr, implNet) << "Failed to cast ICNNNetwork to CNNNetworkImpl";
252             InferenceEngine::ResponseDesc resp;
253             InferenceEngine::StatusCode sts  = implNet->setBatchSizeReshape(MB, &resp);
254             ASSERT_EQ((int)InferenceEngine::StatusCode::OK, sts) << resp.msg;
255
256             MKLDNNGraphTestClass graph;
257             graph.setProperty({{InferenceEngine::PluginConfigParams::KEY_DYN_BATCH_ENABLED, InferenceEngine::PluginConfigParams::YES}});
258             graph.CreateGraph(net_reader.getNetwork());
259
260             InferenceEngine::SizeVector dims_src = {MB, p.in.c, p.in.h, p.in.w};
261
262             InferenceEngine::Blob::Ptr src = InferenceEngine::make_shared_blob<float, const InferenceEngine::SizeVector>(InferenceEngine::Precision::FP32, InferenceEngine::NCHW, dims_src);
263             src->allocate();
264             fill_data(src->buffer(), src->size());
265
266             InferenceEngine::TBlob<float>* srcPtr = dynamic_cast<InferenceEngine::TBlob<float>*>(src.get());
267
268             if (srcPtr == nullptr)
269                 FAIL() << "Cannot cast blob to TBlob<float>.";
270
271             InferenceEngine::BlobMap srcs;
272             srcs.insert(std::pair<std::string, InferenceEngine::Blob::Ptr>("in1", src));
273
274             InferenceEngine::OutputsDataMap out;
275             out = net_reader.getNetwork().getOutputsInfo();
276             InferenceEngine::BlobMap outputBlobs;
277
278             std::pair<std::string, InferenceEngine::DataPtr> item = *out.begin();
279
280             InferenceEngine::TBlob<float>::Ptr output;
281             output = InferenceEngine::make_shared_blob<float>(item.second->getTensorDesc());
282             output->allocate();
283             outputBlobs[item.first] = output;
284
285             auto checkLRN = [](const MKLDNNPlugin::MKLDNNNodePtr& node) {
286                 return node->getType() == MKLDNNPlugin::Lrn;
287             };
288             graph.checkDynBatch(srcs, outputBlobs, MB, MB, checkLRN);
289             graph.checkDynBatch(srcs, outputBlobs, 1, MB, checkLRN);
290         } catch (const InferenceEngine::details::InferenceEngineException &e) {
291             FAIL() << e.what();
292         }
293     }
294 };
295
296 TEST_P(MKLDNNGraphDynBatchLrnTests, TestsDynBatchLrn) {}
297
298 INSTANTIATE_TEST_CASE_P(
299         TestsDynBatchLrn, MKLDNNGraphDynBatchLrnTests,
300         ::testing::Values(
301                 lrn_test_params{{1, 3, 228, 228}, 5, 0.0001f, 0.75f, 1, 3, MKLDNNPlugin::impl_desc_type::ref_any},
302                 lrn_test_params{{1, 16, 228, 228}, 5, 0.0001f, 0.75f, 1, 3, MKLDNNPlugin::impl_desc_type::jit}));