Publishing 2019 R1 content
[platform/upstream/dldt.git] / inference-engine / tests / unit / graph_tools / graph_test_base.hpp
1 // Copyright (C) 2018-2019 Intel Corporation
2 // SPDX-License-Identifier: Apache-2.0
3 //
4
5 #include <gtest/gtest.h>
6 #include <inference_engine/graph_tools.hpp>
7 #include <gmock/gmock-generated-function-mockers.h>
8 #include <gmock/gmock-generated-matchers.h>
9 #include <gmock/gmock-more-actions.h>
10 #include "mock_icnn_network.hpp"
11 #include "cpp/ie_cnn_network.h"
12 #include "details/ie_cnn_network_tools.h"
13
14 namespace GraphTest {
15
16 using namespace InferenceEngine;
17 using namespace std;
18
19 /**
20  * @class all layers are input without connection.
21  * Input layers are defined by absence of ins data.
22  */
23 class GraphTestsBase : public ::testing::Test {
24
25  protected:
26
27     MOCK_METHOD2(visited, void(size_t, int));
28     MOCK_METHOD2(visited2, void(size_t, int));
29
30     std::vector<CNNLayerPtr> layers;
31     std::vector<std::vector<DataPtr>> datas;
32
33     MockICNNNetwork mockNet;
34     InferenceEngine::CNNNetwork wrap = InferenceEngine::CNNNetwork(&mockNet);
35
36     /**
37      * layers that are used as lhs in oriented connect operations
38      */
39     std::unordered_set<CNNLayerPtr> lhsLayers;
40     std::unordered_set<CNNLayerPtr> rhsLayers;
41
42     virtual void prepareInputs(InputsDataMap &inputsMap, int batchSize = 1) {
43         auto prepareInputsInternal = [&inputsMap, &batchSize](std::unordered_set<CNNLayerPtr> & layersSet) {
44             for (auto layer = layersSet.begin(); layer != layersSet.end(); layer++) {
45                 if ((*layer)->insData.empty()) {
46                     auto info = make_shared<InputInfo>();
47                     auto data = make_shared<Data>((*layer)->name, Precision::FP32, Layout::NC);
48                     SizeVector dims = data->getDims();
49                     dims.push_back(batchSize);
50                     dims.push_back(batchSize);
51                     data->setDims(dims);
52                     for (auto output : (*layer)->outData) {
53                         data->getInputTo() = output->inputTo;
54                     }
55                     data->creatorLayer = (*layer);
56                     info->setInputData(data);
57                     inputsMap[(*layer)->name] = info;
58                 }
59             }
60         };
61         prepareInputsInternal(lhsLayers);
62         prepareInputsInternal(rhsLayers);
63     }
64
65     CNNLayerPtr layerByName(std::string name) {
66         auto sorted = InferenceEngine::details::CNNNetSortTopologically(mockNet);
67
68         auto i = std::find_if(sorted.begin(), sorted.end(), [&](CNNLayerPtr l){
69             return l->name == name;
70         });
71         if (i != sorted.end()) {
72             return *i;
73         }
74         return nullptr;
75     }
76
77
78     #define ASSERT_N_CONNECTIONS(a, b, n) \
79         ASSERT_EQ(countForwardConnections(#a, #b), n);\
80         ASSERT_EQ(countBackwardConnections(#a, #b), n);
81
82     #define ASSERT_CONNECTION(a, b) \
83         ASSERT_N_CONNECTIONS(a,b,1);
84
85     #define ASSERT_2_CONNECTIONS(a, b) \
86         ASSERT_N_CONNECTIONS(a,b,2);
87
88     #define ASSERT_3_CONNECTIONS(a, b) \
89         ASSERT_N_CONNECTIONS(a,b,3);
90
91     /**
92      * @brief check connection without direction
93      */
94     #define ASSERT_NO_CONNECTION(a, b) \
95         ASSERT_EQ(countConnections(#a, #b), 0);\
96         ASSERT_EQ(countConnections(#b, #a), 0);\
97
98     void ASSERT_DIMS(int x, const SizeVector & dims) {
99
100         ASSERT_EQ(datas[x].front()->getDims().size(), dims.size());
101         for(size_t i = 0; i != dims.size(); i++) {
102             ASSERT_EQ(datas[x].front()->getDims()[i], dims[i]);
103         }
104     }
105
106     int countForwardConnections(std::string a, std::string b) {
107         long int nForward = 0;
108         CNNLayerPtr layerExist;
109         try {
110             layerExist = wrap.getLayerByName(a.c_str());
111             if (!layerExist) {
112                 return 0;
113             }
114         } catch(...) {
115             return 0;
116         }
117
118         for (auto && outData : layerExist->outData) {
119             auto &inputMap = outData->inputTo;
120             nForward +=
121                 std::count_if(inputMap.begin(), inputMap.end(), [&](std::map<std::string, CNNLayerPtr>::value_type &vt) {
122                     return vt.second->name == b;
123                 });
124         }
125
126         return nForward;
127     }
128
129     int countBackwardConnections(std::string a, std::string b) {
130         CNNLayerPtr layerExist;
131         try {
132             layerExist = wrap.getLayerByName(b.c_str());
133             if (!layerExist) {
134                 return 0;
135             }
136         } catch(...) {
137             return 0;
138         }
139
140         auto prevData = layerExist->insData;
141
142         auto nBackward = std::count_if(prevData.begin(), prevData.end(), [&](DataWeakPtr wp) {
143             return wp.lock()->getCreatorLayer().lock()->name == a;
144         });
145
146         return  nBackward;
147     }
148
149     int countConnections(std::string a, std::string b) {
150         return  countForwardConnections(a, b) + countBackwardConnections(a, b);
151     }
152
153     int numCreated = 0;
154     CNNLayerPtr createGenericLayer (std::string name) {
155         auto newData = std::make_shared<Data>(name,
156                                               SizeVector({1, 1}),
157                                               Precision::FP32,
158                                               Layout::NC);
159
160         CNNLayerPtr newLayer = make_shared<GenericLayer>(LayerParams({name, "Generic_" + std::to_string(numCreated++), Precision::FP32}));
161         newData->creatorLayer = newLayer;
162         newLayer->outData.push_back(newData);
163
164         return newLayer;
165     }
166
167
168     void prepareSomeInputs(InputsDataMap &inputsMap, std::initializer_list<int> inputLayers, int batchSize = 1) {
169         for (auto layer = lhsLayers.begin(); layer != lhsLayers.end(); layer++) {
170             if ((*layer)->insData.empty()) {
171                 auto isMarked = std::find_if(begin(inputLayers), end(inputLayers), [&](int value) {
172                     return  std::to_string(value) ==(*layer)->name;
173                 });
174                 if (isMarked == end(inputLayers))
175                     continue;
176                 auto info = make_shared<InputInfo>();
177                 auto data = make_shared<Data>((*layer)->name, Precision::FP32, Layout::NC);
178                 SizeVector dims = data->getDims();
179                 dims.push_back(batchSize);
180                 dims.push_back(batchSize);
181                 data->setDims(dims);
182                 for (auto output : (*layer)->outData) {
183                     data->getInputTo() = output->inputTo;
184                 }
185                 info->setInputData(data);
186                 inputsMap[(*layer)->name] = info;
187             }
188         }
189     }
190
191     /**
192      * @brief output layers considered only leafs here
193      * @param outputMap
194      */
195     void prepareOutputs(OutputsDataMap & outputMap) {
196         for (auto layer = rhsLayers.begin(); layer != rhsLayers.end(); layer++) {
197             bool notLast = false;
198             for (auto && outData : (*layer)->outData) {
199                 if (!outData->getInputTo().empty()) {
200                     notLast = true;
201                     break;
202                 }
203             }
204             if (notLast) continue;
205             for (auto && outData : (*layer)->outData) {
206                 outputMap[outData->getName()] = outData;
207             }
208         }
209     }
210     /**
211      * @brief creates 10 independent layers without connections.
212      * Data corresponding to each layer sets up in outData
213      * Likewise creator layer sets up for data in getCreatorLayer
214      */
215     int _batchSize = 1;
216     void SetUp() override {
217         datas.resize(10);
218         for (int i = 0; i < 10; i++) {
219             layers.push_back(make_shared<CNNLayer>(LayerParams({std::to_string(i)})));
220             datas[i].push_back(make_shared<Data>(std::to_string(i), Precision::FP32, Layout::NC));
221             datas[i].back()->getCreatorLayer() = layers[i];
222
223             SizeVector dims = datas[i].back()->getDims();
224             dims.push_back(_batchSize);
225             dims.push_back(_batchSize);
226             datas[i].back()->setDims(dims);
227
228             layers.back()->outData.push_back(datas[i].back());
229         }
230     }
231
232     void TearDown() override {
233         // Reset shared_pointer circular dependencies to mitigate memory leaks.
234         for (auto& items : datas) {
235             for (auto& data : items) {
236                 for (auto& input : data->getInputTo()) {
237                     input.second.reset();
238                 }
239             }
240         }
241     }
242
243     int ID(const CNNLayerPtr &ptr) {
244         for (int i = 0; i < layers.size(); i++) {
245             if (layers[i].get() == ptr.get())
246                 return i;
247         }
248         return -1;
249     }
250
251     void ADD_ATTR(int layer, std::string attr, std::string value) {
252         layers[layer]->params[attr] = value;
253     }
254     /**
255      * @brief add edges between layers x and y, creating connected graph
256      * @param x output layer index
257      * @param y input layer index
258      */
259     void CONNECT(int x, int y) {
260         datas[x].front()->getInputTo()[std::to_string(y)] = layers[y];
261         layers[y]->insData.push_back(datas[x].front());
262         lhsLayers.insert(layers[x]);
263         rhsLayers.insert(layers[y]);
264     }
265
266     void CONNECT_FROM_PORT(int x, int port, int y) {
267         if (datas[x].size() <= port) {
268             datas[x].push_back(make_shared<Data>(std::string("split_") + std::to_string(datas[x].size()), Precision::FP32, Layout::NC));
269             datas[x].back()->getCreatorLayer() = layers[x];
270
271             SizeVector dims = datas[x].back()->getDims();
272             dims.push_back(_batchSize);
273             dims.push_back(_batchSize);
274             datas[x].back()->setDims(dims);
275             layers[x]->outData.push_back(datas[x].back());
276         }
277         datas[x][port]->getInputTo()[std::to_string(y)] = layers[y];
278         layers[y]->insData.push_back(datas[x][port]);
279         lhsLayers.insert(layers[x]);
280         rhsLayers.insert(layers[y]);
281     }
282
283     void SET_DIMS(int x, const SizeVector & dims) {
284         datas[x].front()->setDims(dims);
285     }
286
287     void SET_TYPE(int x, std::string name) {
288         layers[x]->type = name;
289     }
290 };
291
292 class MockCopier {
293  public:
294     CNNLayerPtr operator()(CNNLayerPtr layer) const {
295         return copyLayer(layer);
296     }
297     MOCK_CONST_METHOD1(copyLayer, CNNLayerPtr(CNNLayerPtr));
298 };
299
300 }
301
302 MATCHER_P2(IsBetween, a, b, std::string(negation ? "isn't" : "is") + " between " + ::testing::PrintToString(a) + " and " + ::testing::PrintToString(b)) { return a <= arg && arg <= b; }
303