Publishing R3
[platform/upstream/dldt.git] / inference-engine / tests / unit / engines / mkldnn / graph / layers / internal / graph_relu_test.cpp
1 // Copyright (C) 2018 Intel Corporation
2 //
3 // SPDX-License-Identifier: Apache-2.0
4 //
5
6 #include <gtest/gtest.h>
7 #include <gmock/gmock-spec-builders.h>
8 #include "mkldnn_plugin/mkldnn_graph.h"
9 #include "mock_mkldnn_primitive.hpp"
10
11 #include "test_graph.hpp"
12
13 #include "single_layer_common.hpp"
14 #include <mkldnn_plugin/mkldnn_extension_utils.h>
15 #include "tests_common.hpp"
16
17
18 using namespace ::testing;
19 using namespace std;
20 using namespace mkldnn;
21
22
23 struct relu_test_params {
24     struct {
25         size_t n;
26         size_t c;
27         size_t h;
28         size_t w;
29     } in;
30
31     float n_clope;
32
33     size_t num_prim_desc;
34
35     MKLDNNPlugin::impl_desc_type selectedType;
36
37     std::vector<std::function<void(MKLDNNPlugin::PrimitiveDescInfo)>> comp;
38 };
39
40 template <typename data_t>
41 void ref_relu(const InferenceEngine::TBlob<data_t> &src, InferenceEngine::TBlob<data_t> &dst, relu_test_params prm)
42 {
43     size_t IW = prm.in.w;
44     size_t IH = prm.in.h;
45     size_t IC = prm.in.c;
46
47     const data_t *src_data = src.readOnly();
48     data_t *dst_data = dst.data();
49
50     for (uint32_t c = 0; c < IC; c++) {
51         for (uint32_t h = 0; h < IH; h++) {
52             for (uint32_t w = 0; w < IW; w++) {
53                 uint32_t oidx = c * IH * IW
54                                 + h * IW + w;
55
56                 dst_data[oidx] = src_data[oidx] >= 0.0 ?
57                                  src_data[oidx] :
58                                  src_data[oidx] * prm.n_clope;
59             }
60         }
61     }
62 }
63
64 class MKLDNNGraphReluTests: public TestsCommon,
65                                      public WithParamInterface<relu_test_params> {
66     std::string model_t = R"V0G0N(
67 <Net Name="Relu_Only" version="2" precision="FP32" batch="1">
68     <layers>
69         <layer name="in1" type="Input" precision="FP32" id="0">
70             <output>
71                 <port id="0">
72                     <dim>_IN_</dim>
73                     <dim>_IC_</dim>
74                     <dim>_IH_</dim>
75                     <dim>_IW_</dim>
76                 </port>
77             </output>
78         </layer>
79         <layer name="norm" id="1" type="ReLU" precision="FP32">
80             <input>
81                 <port id="1">
82                     <dim>_IN_</dim>
83                     <dim>_IC_</dim>
84                     <dim>_IH_</dim>
85                     <dim>_IW_</dim>
86                 </port>
87             </input>
88             <output>
89                 <port id="2">
90                     <dim>_IN_</dim>
91                     <dim>_IC_</dim>
92                     <dim>_IH_</dim>
93                     <dim>_IW_</dim>
94                 </port>
95             </output>
96         </layer>
97     </layers>
98     <edges>
99         <edge from-layer="0" from-port="0" to-layer="1" to-port="1"/>
100     </edges>
101 </Net>
102 )V0G0N";
103
104     std::string getModel(relu_test_params p) {
105         std::string model = model_t;
106
107         REPLACE_WITH_NUM(model, "_IW_", p.in.w);
108         REPLACE_WITH_NUM(model, "_IH_", p.in.h);
109         REPLACE_WITH_NUM(model, "_IC_", p.in.c);
110         REPLACE_WITH_NUM(model, "_IN_", p.in.n);
111
112         return model;
113     }
114
115 protected:
116     virtual void TearDown() {
117     }
118
119     virtual void SetUp() {
120         try {
121             TestsCommon::SetUp();
122             relu_test_params p = ::testing::WithParamInterface<relu_test_params>::GetParam();
123             std::string model = getModel(p);
124
125             InferenceEngine::CNNNetReader net_reader;
126             ASSERT_NO_THROW(net_reader.ReadNetwork(model.data(), model.length()));
127
128             MKLDNNGraphTestClass graph;
129             graph.CreateGraph(net_reader.getNetwork());
130             auto& nodes = graph.getNodes();
131             for (int i = 0; i < nodes.size(); i++) {
132                 if (nodes[i]->getType() == MKLDNNPlugin::Activation) {
133                     ASSERT_LE(p.num_prim_desc, nodes[i]->getSupportedPrimitiveDescriptors().size());
134                     for (size_t j = 0; j < p.num_prim_desc && j < p.comp.size(); j++) {
135                         p.comp.at(j)(nodes[i]->getSupportedPrimitiveDescriptors().at(j));
136                     }
137                     ASSERT_NE(nullptr, nodes[i]->getSelectedPrimitiveDescriptor());
138                     ASSERT_TRUE(nodes[i]->getSelectedPrimitiveDescriptor()->getImplementationType() | p.selectedType);
139                 }
140             }
141
142             InferenceEngine::SizeVector dims_src = {p.in.n, p.in.c, p.in.h, p.in.w};
143
144             InferenceEngine::Blob::Ptr src = InferenceEngine::make_shared_blob<float, const InferenceEngine::SizeVector>(InferenceEngine::Precision::FP32, InferenceEngine::NCHW, dims_src);
145             src->allocate();
146             fill_data(src->buffer(), src->size());
147
148             InferenceEngine::TBlob<float>* srcPtr = dynamic_cast<InferenceEngine::TBlob<float>*>(src.get());
149
150             if (srcPtr == nullptr)
151                 FAIL() << "Cannot cast blob to TBlob<float>.";
152
153             InferenceEngine::BlobMap srcs;
154             srcs.insert(std::pair<std::string, InferenceEngine::Blob::Ptr>("in1", src));
155
156             InferenceEngine::OutputsDataMap out;
157             out = net_reader.getNetwork().getOutputsInfo();
158             InferenceEngine::BlobMap outputBlobs;
159
160             std::pair<std::string, InferenceEngine::DataPtr> item = *out.begin();
161
162             InferenceEngine::TBlob<float>::Ptr output;
163             output = InferenceEngine::make_shared_blob<float>(item.second->getTensorDesc());
164             output->allocate();
165             outputBlobs[item.first] = output;
166
167             graph.Infer(srcs, outputBlobs);
168
169             InferenceEngine::TBlob<float> dst_ref(item.second->getTensorDesc());
170             dst_ref.allocate();
171
172             ref_relu(*srcPtr, dst_ref, p);
173
174             compare(*output, dst_ref);
175         } catch (const InferenceEngine::details::InferenceEngineException &e) {
176             FAIL() << e.what();
177         }
178     }
179 };
180
181 TEST_P(MKLDNNGraphReluTests, TestsRelu) {}
182
183
184 INSTANTIATE_TEST_CASE_P(
185         TestsRelu, MKLDNNGraphReluTests,
186         ::testing::Values(
187                 relu_test_params{
188                         {1, 3, 228, 228}, 0.0f, 9, MKLDNNPlugin::impl_desc_type::jit, {
189                                 [](MKLDNNPlugin::PrimitiveDescInfo impl) {
190                                     ASSERT_TRUE(impl.getImplementationType() | MKLDNNPlugin::impl_desc_type::jit);
191                                     ASSERT_EQ(1, impl.getConfig().inConfs.size());
192                                     ASSERT_EQ(1, impl.getConfig().outConfs.size());
193                                     ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().inConfs.at(0).desc.getLayout());
194                                     ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().outConfs.at(0).desc.getLayout());
195                                 },
196                                 [](MKLDNNPlugin::PrimitiveDescInfo impl) {
197                                     ASSERT_TRUE(impl.getImplementationType() | MKLDNNPlugin::impl_desc_type::jit);
198                                     ASSERT_EQ(1, impl.getConfig().inConfs.size());
199                                     ASSERT_EQ(1, impl.getConfig().outConfs.size());
200                                     ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().inConfs.at(0).desc.getLayout());
201                                     ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().outConfs.at(0).desc.getLayout());
202                                 }
203                         }}));