1 // Copyright (C) 2018-2019 Intel Corporation
2 // SPDX-License-Identifier: Apache-2.0
5 #include <gtest/gtest.h>
6 #include <inference_engine/graph_tools.hpp>
7 #include <gmock/gmock-generated-function-mockers.h>
8 #include <gmock/gmock-generated-matchers.h>
9 #include <gmock/gmock-more-actions.h>
10 #include "mock_icnn_network.hpp"
11 #include "cpp/ie_cnn_network.h"
12 #include "details/ie_cnn_network_tools.h"
16 using namespace InferenceEngine;
20 * @class all layers are input without connection.
21 * Input layers are defined by absence of ins data.
23 class GraphTestsBase : public ::testing::Test {
27 MOCK_METHOD2(visited, void(size_t, int));
28 MOCK_METHOD2(visited2, void(size_t, int));
30 std::vector<CNNLayerPtr> layers;
31 std::vector<std::vector<DataPtr>> datas;
33 MockICNNNetwork mockNet;
34 InferenceEngine::CNNNetwork wrap = InferenceEngine::CNNNetwork(&mockNet);
37 * layers that are used as lhs in oriented connect operations
39 std::unordered_set<CNNLayerPtr> lhsLayers;
40 std::unordered_set<CNNLayerPtr> rhsLayers;
42 virtual void prepareInputs(InputsDataMap &inputsMap, int batchSize = 1) {
43 auto prepareInputsInternal = [&inputsMap, &batchSize](std::unordered_set<CNNLayerPtr> & layersSet) {
44 for (auto layer = layersSet.begin(); layer != layersSet.end(); layer++) {
45 if ((*layer)->insData.empty()) {
46 auto info = make_shared<InputInfo>();
47 auto data = make_shared<Data>((*layer)->name, Precision::FP32, Layout::NC);
48 SizeVector dims = data->getDims();
49 dims.push_back(batchSize);
50 dims.push_back(batchSize);
52 for (auto output : (*layer)->outData) {
53 data->getInputTo() = output->inputTo;
55 data->creatorLayer = (*layer);
56 info->setInputData(data);
57 inputsMap[(*layer)->name] = info;
61 prepareInputsInternal(lhsLayers);
62 prepareInputsInternal(rhsLayers);
65 CNNLayerPtr layerByName(std::string name) {
66 auto sorted = InferenceEngine::details::CNNNetSortTopologically(mockNet);
68 auto i = std::find_if(sorted.begin(), sorted.end(), [&](CNNLayerPtr l){
69 return l->name == name;
71 if (i != sorted.end()) {
78 #define ASSERT_N_CONNECTIONS(a, b, n) \
79 ASSERT_EQ(countForwardConnections(#a, #b), n);\
80 ASSERT_EQ(countBackwardConnections(#a, #b), n);
82 #define ASSERT_CONNECTION(a, b) \
83 ASSERT_N_CONNECTIONS(a,b,1);
85 #define ASSERT_2_CONNECTIONS(a, b) \
86 ASSERT_N_CONNECTIONS(a,b,2);
88 #define ASSERT_3_CONNECTIONS(a, b) \
89 ASSERT_N_CONNECTIONS(a,b,3);
92 * @brief check connection without direction
94 #define ASSERT_NO_CONNECTION(a, b) \
95 ASSERT_EQ(countConnections(#a, #b), 0);\
96 ASSERT_EQ(countConnections(#b, #a), 0);\
98 void ASSERT_DIMS(int x, const SizeVector & dims) {
100 ASSERT_EQ(datas[x].front()->getDims().size(), dims.size());
101 for(size_t i = 0; i != dims.size(); i++) {
102 ASSERT_EQ(datas[x].front()->getDims()[i], dims[i]);
106 int countForwardConnections(std::string a, std::string b) {
107 long int nForward = 0;
108 CNNLayerPtr layerExist;
110 layerExist = wrap.getLayerByName(a.c_str());
118 for (auto && outData : layerExist->outData) {
119 auto &inputMap = outData->inputTo;
121 std::count_if(inputMap.begin(), inputMap.end(), [&](std::map<std::string, CNNLayerPtr>::value_type &vt) {
122 return vt.second->name == b;
129 int countBackwardConnections(std::string a, std::string b) {
130 CNNLayerPtr layerExist;
132 layerExist = wrap.getLayerByName(b.c_str());
140 auto prevData = layerExist->insData;
142 auto nBackward = std::count_if(prevData.begin(), prevData.end(), [&](DataWeakPtr wp) {
143 return wp.lock()->getCreatorLayer().lock()->name == a;
149 int countConnections(std::string a, std::string b) {
150 return countForwardConnections(a, b) + countBackwardConnections(a, b);
154 CNNLayerPtr createGenericLayer (std::string name) {
155 auto newData = std::make_shared<Data>(name,
160 CNNLayerPtr newLayer = make_shared<GenericLayer>(LayerParams({name, "Generic_" + std::to_string(numCreated++), Precision::FP32}));
161 newData->creatorLayer = newLayer;
162 newLayer->outData.push_back(newData);
168 void prepareSomeInputs(InputsDataMap &inputsMap, std::initializer_list<int> inputLayers, int batchSize = 1) {
169 for (auto layer = lhsLayers.begin(); layer != lhsLayers.end(); layer++) {
170 if ((*layer)->insData.empty()) {
171 auto isMarked = std::find_if(begin(inputLayers), end(inputLayers), [&](int value) {
172 return std::to_string(value) ==(*layer)->name;
174 if (isMarked == end(inputLayers))
176 auto info = make_shared<InputInfo>();
177 auto data = make_shared<Data>((*layer)->name, Precision::FP32, Layout::NC);
178 SizeVector dims = data->getDims();
179 dims.push_back(batchSize);
180 dims.push_back(batchSize);
182 for (auto output : (*layer)->outData) {
183 data->getInputTo() = output->inputTo;
185 info->setInputData(data);
186 inputsMap[(*layer)->name] = info;
192 * @brief output layers considered only leafs here
195 void prepareOutputs(OutputsDataMap & outputMap) {
196 for (auto layer = rhsLayers.begin(); layer != rhsLayers.end(); layer++) {
197 bool notLast = false;
198 for (auto && outData : (*layer)->outData) {
199 if (!outData->getInputTo().empty()) {
204 if (notLast) continue;
205 for (auto && outData : (*layer)->outData) {
206 outputMap[outData->getName()] = outData;
211 * @brief creates 10 independent layers without connections.
212 * Data corresponding to each layer sets up in outData
213 * Likewise creator layer sets up for data in getCreatorLayer
216 void SetUp() override {
218 for (int i = 0; i < 10; i++) {
219 layers.push_back(make_shared<CNNLayer>(LayerParams({std::to_string(i)})));
220 datas[i].push_back(make_shared<Data>(std::to_string(i), Precision::FP32, Layout::NC));
221 datas[i].back()->getCreatorLayer() = layers[i];
223 SizeVector dims = datas[i].back()->getDims();
224 dims.push_back(_batchSize);
225 dims.push_back(_batchSize);
226 datas[i].back()->setDims(dims);
228 layers.back()->outData.push_back(datas[i].back());
232 void TearDown() override {
233 // Reset shared_pointer circular dependencies to mitigate memory leaks.
234 for (auto& items : datas) {
235 for (auto& data : items) {
236 for (auto& input : data->getInputTo()) {
237 input.second.reset();
243 int ID(const CNNLayerPtr &ptr) {
244 for (int i = 0; i < layers.size(); i++) {
245 if (layers[i].get() == ptr.get())
251 void ADD_ATTR(int layer, std::string attr, std::string value) {
252 layers[layer]->params[attr] = value;
255 * @brief add edges between layers x and y, creating connected graph
256 * @param x output layer index
257 * @param y input layer index
259 void CONNECT(int x, int y) {
260 datas[x].front()->getInputTo()[std::to_string(y)] = layers[y];
261 layers[y]->insData.push_back(datas[x].front());
262 lhsLayers.insert(layers[x]);
263 rhsLayers.insert(layers[y]);
266 void CONNECT_FROM_PORT(int x, int port, int y) {
267 if (datas[x].size() <= port) {
268 datas[x].push_back(make_shared<Data>(std::string("split_") + std::to_string(datas[x].size()), Precision::FP32, Layout::NC));
269 datas[x].back()->getCreatorLayer() = layers[x];
271 SizeVector dims = datas[x].back()->getDims();
272 dims.push_back(_batchSize);
273 dims.push_back(_batchSize);
274 datas[x].back()->setDims(dims);
275 layers[x]->outData.push_back(datas[x].back());
277 datas[x][port]->getInputTo()[std::to_string(y)] = layers[y];
278 layers[y]->insData.push_back(datas[x][port]);
279 lhsLayers.insert(layers[x]);
280 rhsLayers.insert(layers[y]);
283 void SET_DIMS(int x, const SizeVector & dims) {
284 datas[x].front()->setDims(dims);
287 void SET_TYPE(int x, std::string name) {
288 layers[x]->type = name;
294 CNNLayerPtr operator()(CNNLayerPtr layer) const {
295 return copyLayer(layer);
297 MOCK_CONST_METHOD1(copyLayer, CNNLayerPtr(CNNLayerPtr));
302 MATCHER_P2(IsBetween, a, b, std::string(negation ? "isn't" : "is") + " between " + ::testing::PrintToString(a) + " and " + ::testing::PrintToString(b)) { return a <= arg && arg <= b; }