Publishing R3
[platform/upstream/dldt.git] / inference-engine / tests / unit / engines / mkldnn / graph / layers / internal / graph_lrn_test.cpp
1 // Copyright (C) 2018 Intel Corporation
2 //
3 // SPDX-License-Identifier: Apache-2.0
4 //
5
6 #include <gtest/gtest.h>
7 #include <gmock/gmock-spec-builders.h>
8 #include "mkldnn_plugin/mkldnn_graph.h"
9 #include "mock_mkldnn_primitive.hpp"
10
11 #include "test_graph.hpp"
12
13 #include "single_layer_common.hpp"
14 #include <mkldnn_plugin/mkldnn_extension_utils.h>
15 #include <inference_engine/cnn_network_impl.hpp>
16 #include "tests_common.hpp"
17
18
19 using namespace ::testing;
20 using namespace std;
21 using namespace mkldnn;
22
23
24 struct lrn_test_params {
25     struct {
26         size_t n;
27         size_t c;
28         size_t h;
29         size_t w;
30     } in;
31
32     size_t local_size;
33     float alpha;
34     float beta;
35     size_t k;
36
37     size_t num_prim_desc;
38
39     int selectedType;
40
41     std::vector<std::function<void(MKLDNNPlugin::PrimitiveDescInfo)>> comp;
42 };
43
44 template <typename data_t>
45 void ref_lrn(const InferenceEngine::TBlob<data_t> &src, InferenceEngine::TBlob<data_t> &dst, lrn_test_params prm)
46 {
47     size_t IW = prm.in.w;
48     size_t IH = prm.in.h;
49     size_t IC = prm.in.c;
50
51     const data_t *src_data = src.readOnly();
52     data_t *dst_data = dst.data();
53
54     for (uint32_t c = 0; c < IC; c++) {
55         for (uint32_t h = 0; h < IH; h++) {
56             for (uint32_t w = 0; w < IW; w++) {
57                 uint32_t oidx = c * IH * IW
58                                 + h * IW + w;
59
60                 uint32_t sz = prm.local_size;
61                 int32_t c_start = c - sz / 2;
62                 int32_t c_end = c_start + sz;
63                 if (c_start < 0) c_start = 0;
64                 if (c_end > (int32_t)IC) c_end = IC;
65                 data_t sum = 0.0;
66                 for (int32_t c1 = c_start; c1 < c_end; c1++) {
67                     uint32_t idx = c1 * IH * IW + h * IW + w;
68                     data_t s = src_data[idx];
69
70                     sum += s * s;
71                 }
72
73                 data_t norm_coef = powf(1. + prm.alpha * sum / sz, -prm.beta);
74                 dst_data[oidx] = norm_coef * src_data[oidx];
75             }
76         }
77     }
78 }
79
80 class MKLDNNGraphLrnTests: public TestsCommon,
81                                      public WithParamInterface<lrn_test_params> {
82     std::string model_t = R"V0G0N(
83 <Net Name="Lrn_Only" version="2" precision="FP32" batch="1">
84     <layers>
85         <layer name="in1" type="Input" precision="FP32" id="0">
86             <output>
87                 <port id="0">
88                     <dim>_IN_</dim>
89                     <dim>_IC_</dim>
90                     <dim>_IH_</dim>
91                     <dim>_IW_</dim>
92                 </port>
93             </output>
94         </layer>
95         <layer name="norm" id="1" type="LRN" precision="FP32">
96             <lrn local_size="_LS_" alpha="_A_" beta="_B_" k="_K_" region="ACROSS" />
97
98             <input>
99                 <port id="1">
100                     <dim>_IN_</dim>
101                     <dim>_IC_</dim>
102                     <dim>_IH_</dim>
103                     <dim>_IW_</dim>
104                 </port>
105             </input>
106             <output>
107                 <port id="2">
108                     <dim>_IN_</dim>
109                     <dim>_IC_</dim>
110                     <dim>_IH_</dim>
111                     <dim>_IW_</dim>
112                 </port>
113             </output>
114         </layer>
115     </layers>
116     <edges>
117         <edge from-layer="0" from-port="0" to-layer="1" to-port="1"/>
118     </edges>
119 </Net>
120 )V0G0N";
121
122 protected:
123     std::string getModel(lrn_test_params p) {
124         std::string model = model_t;
125
126         REPLACE_WITH_NUM(model, "_IW_", p.in.w);
127         REPLACE_WITH_NUM(model, "_IH_", p.in.h);
128         REPLACE_WITH_NUM(model, "_IC_", p.in.c);
129         REPLACE_WITH_NUM(model, "_IN_", p.in.n);
130
131         REPLACE_WITH_NUM(model, "_LS_", p.local_size);
132         REPLACE_WITH_NUM(model, "_A_", p.alpha);
133         REPLACE_WITH_NUM(model, "_B_", p.beta);
134         REPLACE_WITH_NUM(model, "_K_", p.k);
135
136         return model;
137     }
138
139     virtual void TearDown() {
140     }
141
142     virtual void SetUp() {
143         try {
144             TestsCommon::SetUp();
145             lrn_test_params p = ::testing::WithParamInterface<lrn_test_params>::GetParam();
146             std::string model = getModel(p);
147
148             InferenceEngine::CNNNetReader net_reader;
149             ASSERT_NO_THROW(net_reader.ReadNetwork(model.data(), model.length()));
150
151             MKLDNNGraphTestClass graph;
152             graph.CreateGraph(net_reader.getNetwork());
153             auto& nodes = graph.getNodes();
154             for (int i = 0; i < nodes.size(); i++) {
155                 if (nodes[i]->getType() == MKLDNNPlugin::Lrn) {
156                     ASSERT_LE(p.num_prim_desc, nodes[i]->getSupportedPrimitiveDescriptors().size());
157                     for (size_t j = 0; j < p.num_prim_desc && j < p.comp.size(); j++) {
158                         p.comp.at(j)(nodes[i]->getSupportedPrimitiveDescriptors().at(j));
159                     }
160                     ASSERT_NE(nullptr, nodes[i]->getSelectedPrimitiveDescriptor());
161                     ASSERT_EQ(p.selectedType,
162                               nodes[i]->getSelectedPrimitiveDescriptor()->getImplementationType() & p.selectedType);
163                 }
164             }
165             ASSERT_EQ(3, nodes.size());
166
167             InferenceEngine::SizeVector dims_src = {p.in.n, p.in.c, p.in.h, p.in.w};
168
169             InferenceEngine::Blob::Ptr src = InferenceEngine::make_shared_blob<float, const InferenceEngine::SizeVector>(InferenceEngine::Precision::FP32, InferenceEngine::NCHW, dims_src);
170             src->allocate();
171             fill_data(src->buffer(), src->size());
172
173             InferenceEngine::TBlob<float>* srcPtr = dynamic_cast<InferenceEngine::TBlob<float>*>(src.get());
174
175             if (srcPtr == nullptr)
176                 FAIL() << "Cannot cast blob to TBlob<float>.";
177
178             InferenceEngine::BlobMap srcs;
179             srcs.insert(std::pair<std::string, InferenceEngine::Blob::Ptr>("in1", src));
180
181             InferenceEngine::OutputsDataMap out;
182             out = net_reader.getNetwork().getOutputsInfo();
183             InferenceEngine::BlobMap outputBlobs;
184
185             std::pair<std::string, InferenceEngine::DataPtr> item = *out.begin();
186
187             InferenceEngine::TBlob<float>::Ptr output;
188             output = InferenceEngine::make_shared_blob<float>(item.second->getTensorDesc());
189             output->allocate();
190             outputBlobs[item.first] = output;
191
192             graph.Infer(srcs, outputBlobs);
193
194             InferenceEngine::TBlob<float> dst_ref(item.second->getTensorDesc());
195             dst_ref.allocate();
196
197             ref_lrn(*srcPtr, dst_ref, p);
198
199             compare(*output, dst_ref);
200         } catch (const InferenceEngine::details::InferenceEngineException &e) {
201             FAIL() << e.what();
202         }
203     }
204 };
205
206 TEST_P(MKLDNNGraphLrnTests, TestsLrn) {}
207
208 INSTANTIATE_TEST_CASE_P(
209         TestsLrn, MKLDNNGraphLrnTests,
210         ::testing::Values(
211                 lrn_test_params{
212                         {1, 3, 228, 228},
213                         5, 0.0001f, 0.75f, 1, 3, MKLDNNPlugin::impl_desc_type::ref_any, {
214                                 [](MKLDNNPlugin::PrimitiveDescInfo impl) {
215                                     ASSERT_EQ(MKLDNNPlugin::impl_desc_type::ref_any, impl.getImplementationType());
216                                     ASSERT_EQ(1, impl.getConfig().inConfs.size());
217                                     ASSERT_EQ(1, impl.getConfig().outConfs.size());
218                                     ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().inConfs.at(0).desc.getLayout());
219                                     ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().outConfs.at(0).desc.getLayout());
220                                 },
221                                 [](MKLDNNPlugin::PrimitiveDescInfo impl) {
222                                     ASSERT_EQ(MKLDNNPlugin::impl_desc_type::ref_any, impl.getImplementationType());
223                                     ASSERT_EQ(1, impl.getConfig().inConfs.size());
224                                     ASSERT_EQ(1, impl.getConfig().outConfs.size());
225                                     ASSERT_EQ(InferenceEngine::Layout::BLOCKED, impl.getConfig().inConfs.at(0).desc.getLayout());
226                                     ASSERT_EQ(InferenceEngine::Layout::BLOCKED, impl.getConfig().outConfs.at(0).desc.getLayout());
227                                 },
228                                 [](MKLDNNPlugin::PrimitiveDescInfo impl) {
229                                     ASSERT_EQ(MKLDNNPlugin::impl_desc_type::ref_any, impl.getImplementationType());
230                                     ASSERT_EQ(1, impl.getConfig().inConfs.size());
231                                     ASSERT_EQ(1, impl.getConfig().outConfs.size());
232                                     ASSERT_EQ(InferenceEngine::Layout::BLOCKED, impl.getConfig().inConfs.at(0).desc.getLayout());
233                                     ASSERT_EQ(InferenceEngine::Layout::BLOCKED, impl.getConfig().outConfs.at(0).desc.getLayout());
234                                 }
235                         }},
236                 lrn_test_params{{1, 16, 228, 228}, 5, 0.0001f, 0.75f, 1, 3, MKLDNNPlugin::impl_desc_type::jit}));
237
238 class MKLDNNGraphDynBatchLrnTests: public MKLDNNGraphLrnTests {
239 protected:
240     virtual void SetUp() {
241         try {
242             TestsCommon::SetUp();
243             lrn_test_params p = ::testing::WithParamInterface<lrn_test_params>::GetParam();
244             std::string model = getModel(p);
245             size_t MB = p.in.n;
246             if (MB < 2)
247                 MB = 2;
248
249             InferenceEngine::CNNNetReader net_reader;
250             ASSERT_NO_THROW(net_reader.ReadNetwork(model.data(), model.length()));
251             InferenceEngine::CNNNetwork network = net_reader.getNetwork();
252             auto implNet = dynamic_cast<InferenceEngine::details::CNNNetworkImpl *>(&((InferenceEngine::ICNNNetwork&)network));
253             ASSERT_NE(nullptr, implNet) << "Failed to cast ICNNNetwork to CNNNetworkImpl";
254             InferenceEngine::ResponseDesc resp;
255             InferenceEngine::StatusCode sts  = implNet->setBatchSizeReshape(MB, &resp);
256             ASSERT_EQ((int)InferenceEngine::StatusCode::OK, sts) << resp.msg;
257
258             MKLDNNGraphTestClass graph;
259             graph.setProperty({{InferenceEngine::PluginConfigParams::KEY_DYN_BATCH_ENABLED, InferenceEngine::PluginConfigParams::YES}});
260             graph.CreateGraph(net_reader.getNetwork());
261
262             InferenceEngine::SizeVector dims_src = {MB, p.in.c, p.in.h, p.in.w};
263
264             InferenceEngine::Blob::Ptr src = InferenceEngine::make_shared_blob<float, const InferenceEngine::SizeVector>(InferenceEngine::Precision::FP32, InferenceEngine::NCHW, dims_src);
265             src->allocate();
266             fill_data(src->buffer(), src->size());
267
268             InferenceEngine::TBlob<float>* srcPtr = dynamic_cast<InferenceEngine::TBlob<float>*>(src.get());
269
270             if (srcPtr == nullptr)
271                 FAIL() << "Cannot cast blob to TBlob<float>.";
272
273             InferenceEngine::BlobMap srcs;
274             srcs.insert(std::pair<std::string, InferenceEngine::Blob::Ptr>("in1", src));
275
276             InferenceEngine::OutputsDataMap out;
277             out = net_reader.getNetwork().getOutputsInfo();
278             InferenceEngine::BlobMap outputBlobs;
279
280             std::pair<std::string, InferenceEngine::DataPtr> item = *out.begin();
281
282             InferenceEngine::TBlob<float>::Ptr output;
283             output = InferenceEngine::make_shared_blob<float>(item.second->getTensorDesc());
284             output->allocate();
285             outputBlobs[item.first] = output;
286
287             auto checkLRN = [](const MKLDNNPlugin::MKLDNNNodePtr& node) {
288                 return node->getType() == MKLDNNPlugin::Lrn;
289             };
290             graph.checkDynBatch(srcs, outputBlobs, MB, MB, checkLRN);
291             graph.checkDynBatch(srcs, outputBlobs, 1, MB, checkLRN);
292         } catch (const InferenceEngine::details::InferenceEngineException &e) {
293             FAIL() << e.what();
294         }
295     }
296 };
297
298 TEST_P(MKLDNNGraphDynBatchLrnTests, TestsDynBatchLrn) {}
299
300 INSTANTIATE_TEST_CASE_P(
301         TestsDynBatchLrn, MKLDNNGraphDynBatchLrnTests,
302         ::testing::Values(
303                 lrn_test_params{{1, 3, 228, 228}, 5, 0.0001f, 0.75f, 1, 3, MKLDNNPlugin::impl_desc_type::ref_any},
304                 lrn_test_params{{1, 16, 228, 228}, 5, 0.0001f, 0.75f, 1, 3, MKLDNNPlugin::impl_desc_type::jit}));