1 // Copyright (C) 2018-2020 Intel Corporation
2 // SPDX-License-Identifier: Apache-2.0
15 #include <unordered_map>
16 #include <unordered_set>
20 #include "cnn_network_impl.hpp"
21 #include "ie_algorithm.hpp"
22 #include "ie_icnn_network.hpp"
23 #include "layer_transform.hpp"
25 IE_SUPPRESS_DEPRECATED_START
27 namespace InferenceEngine {
31 * @brief Iterate over all layers followed by certain CNNLayer layer, and suitable to use ranged loops for output layers
33 class OutLayersIterator {
34 std::vector<DataPtr>::iterator dataCntIteratorCurrent;
35 std::vector<DataPtr>::iterator dataCntIteratorEnd;
37 using OutdataIterator = std::map<std::string, CNNLayerPtr>::iterator;
38 bool pointingToEnd = true;
39 OutdataIterator currentIterator;
42 OutLayersIterator() = default;
44 static OutLayersIterator make_begin(std::vector<DataPtr>& origin) {
50 it.dataCntIteratorCurrent = origin.begin();
51 it.dataCntIteratorEnd = origin.end();
52 it.moveToNextNonEmptyData();
57 bool operator==(const OutLayersIterator& it) const {
58 if (pointingToEnd || it.pointingToEnd) {
59 return pointingToEnd && it.pointingToEnd;
61 return it.dataCntIteratorCurrent == dataCntIteratorCurrent && it.currentIterator == currentIterator;
64 bool operator!=(const OutLayersIterator& it) const {
65 return !this->operator==(it);
69 if (dataCntIteratorCurrent == dataCntIteratorEnd) {
77 if (currentIterator != dataCntIteratorCurrent->get()->getInputTo().end()) {
81 dataCntIteratorCurrent++;
82 moveToNextNonEmptyData();
85 CNNLayerPtr operator*() const {
86 return currentIterator->second;
90 void moveToNextNonEmptyData() {
92 for (; dataCntIteratorCurrent != dataCntIteratorEnd; dataCntIteratorCurrent++) {
93 if (!dataCntIteratorCurrent->get()->getInputTo().empty()) {
94 currentIterator = dataCntIteratorCurrent->get()->getInputTo().begin();
95 pointingToEnd = false;
102 class OutInfoWrapper {
103 CNNLayer* origin = nullptr;
106 explicit OutInfoWrapper(CNNLayer* origin): origin(origin) {}
107 OutLayersIterator begin() const {
108 return OutLayersIterator::make_begin(origin->outData);
111 OutLayersIterator end() const {
116 inline OutInfoWrapper default_order(CNNLayer* layer) {
117 return OutInfoWrapper(layer);
121 * @brief implementation of DFS with visiting checking to avoid multientry
122 * @param visited - set to store visited layers
123 * @param layer - current layer to start DFS from
124 * @param visit - user callback on visited node
125 * @param visitBefore - indicates when callback is happened before all child nodes or after
126 * @return false if cycle detected
128 template <class T, class Ordering = std::function<OutInfoWrapper(CNNLayer*)>>
129 inline bool DFS(std::unordered_map<CNNLayer*, bool>& visited, const InferenceEngine::CNNLayerPtr& layer, const T& visit,
130 bool visitBefore, const Ordering& order = &default_order) {
131 if (layer == nullptr) {
135 if (visitBefore) visit(layer);
136 visited[layer.get()] = false;
137 for (auto outLayerPtr : order(layer.get())) {
138 auto i = visited.find(outLayerPtr.get());
139 if (i != visited.end()) {
141 * cycle detected we entered still not completed node
148 if (!DFS(visited, outLayerPtr, visit, visitBefore, order)) {
153 if (!visitBefore) visit(layer);
154 visited[layer.get()] = true;
159 * @brief implementation of DFS in unordered graph, mean next layers not just child but also parents
160 * @param visited - set to store visited layers
161 * @param layer - current layer to start UnorderedDFS from
162 * @param visit - user callback on visited node
163 * @param visitBefore - indicates when callback is happened before all child nodes or after
166 inline void UnorderedDFS(std::unordered_set<CNNLayer*>& visited, const InferenceEngine::CNNLayerPtr& layer,
167 const T& visit, bool visitBefore) {
168 std::queue<InferenceEngine::CNNLayerPtr> layers;
169 auto cycleDFS = [&]() {
170 if (layers.empty()) return;
171 auto cnnLayer = layers.front();
174 if (cnnLayer == nullptr) {
177 if (visited.end() != visited.find(cnnLayer.get())) {
181 if (visitBefore) visit(cnnLayer);
182 visited.insert(cnnLayer.get());
185 for (auto& od : cnnLayer->outData) {
186 for (auto nl : od->getInputTo()) {
187 layers.push(nl.second);
192 for (size_t i = 0; i < cnnLayer->insData.size(); i++) {
193 auto& input = cnnLayer->insData[i];
195 THROW_IE_EXCEPTION << "Data " << i << " inserted into layer " << cnnLayer->name << " is nullptr";
197 auto creatorLayer = input.lock()->getCreatorLayer().lock();
199 layers.push(creatorLayer);
204 if (!visitBefore) visit(cnnLayer);
207 while (!layers.empty()) {
213 * @brief implementation of DFS with visiting checking to avoid multyentry
214 * @param visited - set to store visited layers
215 * @param layer - current layer to start DFS from
216 * @param visit - user callback on visited node
219 inline void BFS(InferenceEngine::CNNLayerPtr layer, const T& visit, int maxDepth) {
220 std::set<InferenceEngine::CNNLayer*> visited;
221 std::list<InferenceEngine::CNNLayerPtr> nextLayers;
222 nextLayers.push_back(layer);
224 int layersOnLevel = 1;
225 for (; !nextLayers.empty() && maxDepth != 0;) {
226 visit(*nextLayers.begin());
227 for (auto& od : (*nextLayers.begin())->outData) {
228 for (auto nl : od->getInputTo()) {
229 if (visited.find(nl.second.get()) == visited.end()) {
230 nextLayers.push_back(nl.second);
231 visited.insert(nl.second.get());
235 nextLayers.pop_front();
237 if (!--layersOnLevel) {
238 layersOnLevel = nextLayers.size();
244 } // namespace details
247 * Generic DFS algorithm traverser
248 * @param layer - starting layer
249 * @param visit - callback to be called upon visiting
250 * @param visitBefore - indicates when callback is happened before all child nodes or after
252 template <class T, class Ordering = std::function<details::OutInfoWrapper(CNNLayer*)>>
253 inline bool CNNNetDFS(const InferenceEngine::CNNLayerPtr& layer, const T& visit, bool visitBefore = true,
254 const Ordering& order = &details::default_order) {
255 if (layer == nullptr) {
259 std::unordered_map<CNNLayer*, bool> visited;
260 return details::DFS(visited, layer, visit, visitBefore, order);
263 * DFS algorithm with multiple starting data
264 * @param layer - starting data
265 * @param visit - callback to be called upon visiting
266 * @param visitBefore - indicates when callback is happened before all child nodes or after
269 inline bool CNNNetForestDFS(const std::vector<DataPtr>& heads, const T& visit, bool bVisitBefore) {
270 std::unordered_map<CNNLayer*, bool> visited;
271 for (const auto& in : heads) {
272 for (const auto& to : in->getInputTo()) {
273 if (visited.find(to.second.get()) != visited.end()) continue;
274 if (!details::DFS(visited, to.second, visit, bVisitBefore)) {
283 * DFS algorithm with multiple starting nodes
284 * @param layer - starting layer
285 * @param visit - callback to be called upon visiting
286 * @param visitBefore - indicates when callback is happened before all child nodes or after
288 template <class Forest, class T>
289 inline bool CNNNetForestDFS(const Forest& heads, const T& visit, bool bVisitBefore) {
294 std::unordered_map<CNNLayer*, bool> visited;
295 for (auto& layer : heads) {
296 if (!details::DFS(visited, layer, visit, bVisitBefore)) {
304 * DFS algorithm with multiple starting nodes
305 * @param layer - starting layer
306 * @param visit - callback to be called upon visiting
307 * @param visitBefore - indicates when callback is happened before all child nodes or after
309 template <class Ordering, class Forest, class T>
310 inline bool CNNNetForestDFS(const Forest& heads, const T& visit, bool bVisitBefore, const Ordering& order) {
315 std::unordered_map<CNNLayer*, bool> visited;
316 for (auto& layer : heads) {
317 if (!details::DFS(visited, layer, visit, bVisitBefore, order)) {
325 * Generic BFS algorithm traverser
326 * @param layer - starting layer
327 * @param visit - callback to be called upon visiting
330 inline void CNNNetBFS(const InferenceEngine::CNNLayerPtr& layer, const T& visit) {
334 details::BFS(layer, visit, -1);
338 * @brief pointer of previous layers
339 * @param idx - index in previous layer collection
342 inline bool CNNNetHasPrevLayer(const InferenceEngine::CNNLayer* layer, int idx = 0) {
343 IE_ASSERT(layer != nullptr);
344 if (layer->insData.empty() || static_cast<int>(layer->insData.size()) <= idx) {
347 auto prevData = layer->insData[idx].lock();
348 return !!prevData->getCreatorLayer().lock();
352 * @brief to allow storing of LayersSP in collections ordered by names
355 class LayerNameLess {
357 bool operator()(const CNNLayerPtr& lhs, const CNNLayerPtr& rhs) const {
358 return std::less<std::string>()(lhs->name, rhs->name);
362 using CNNLayerSet = std::set<CNNLayerPtr, LayerNameLess>;
365 * @brief returns all layers that are input or memory
367 * @return set of input layers
369 inline CNNLayerSet CNNNetGetAllInputLayers(const ICNNNetwork& network) {
370 InputsDataMap inputs;
371 network.getInputsInfo(inputs);
373 CNNLayerSet inputLayers;
374 std::unordered_set<CNNLayer*> allLayers;
376 if (inputs.empty()) return inputLayers;
378 for (const auto& input : inputs) {
379 auto& secondLayers = input.second->getInputData()->getInputTo();
381 if (secondLayers.empty()) continue;
383 details::UnorderedDFS(
384 allLayers, secondLayers.begin()->second,
385 [&](CNNLayerPtr layer) {
386 if (layer->insData.empty()) {
387 inputLayers.insert(layer);
396 * @brief returns all layers that are input or memory , search started from arbitrary location in network
398 * @return set of input layers
400 inline CNNLayerSet CNNNetGetAllInputLayers(CNNLayer* layer) {
401 CNNLayerSet inputLayers;
402 std::unordered_set<CNNLayer*> allLayers;
404 CNNLayerPtr layerPtr(layer, [](CNNLayer*) {});
406 details::UnorderedDFS(
408 [&](CNNLayerPtr layer) {
409 if (layer->insData.empty()) {
410 inputLayers.insert(layer);
418 * @brief Sorts CNNNetork graph in topological order, while uses custom ordering when walking among child nodes
419 * @param network - input CNNNetwork
420 * @param ordering - callback that returns output layers for given CNNLayer pointer, see default_order function
421 * @return sorted CNNNetwork layers
423 template <class LayerOrdering>
424 std::vector<CNNLayerPtr> CNNNetSortTopologicallyEx(const ICNNNetwork& network, LayerOrdering ordering) {
425 std::vector<CNNLayerPtr> stackOfVisited;
426 bool res = CNNNetForestDFS(
427 CNNNetGetAllInputLayers(network),
428 [&](CNNLayerPtr current) {
429 stackOfVisited.push_back(current);
434 THROW_IE_EXCEPTION << "Sorting not possible, due to existed loop.";
437 std::reverse(std::begin(stackOfVisited), std::end(stackOfVisited));
439 return stackOfVisited;
442 using CNNNetPtr = std::shared_ptr<ICNNNetwork>;
443 using CNNNetCPtr = std::shared_ptr<const ICNNNetwork>;
446 * @brief deep copy of the entire network, structure using custom copier for layers
447 * @param input - source network
448 * @param cp - custom copier object, ex: [](CNNLayerPtr lp) { return injectData<EmptyStruct>(lp); }
449 * @return copied network
451 template <class Copier>
452 inline CNNNetPtr CNNNetCopy(const ICNNNetwork& input, const Copier& cp) {
453 auto net = std::make_shared<details::CNNNetworkImpl>();
456 IE_SUPPRESS_DEPRECATED_START
457 net->setPrecision(input.getPrecision());
458 IE_SUPPRESS_DEPRECATED_END
460 net->setName(input.getName());
462 // rest info is layer dependent so have to create graph clone
463 std::unordered_map<CNNLayer*, CNNLayerPtr> oldToNewLayers;
465 auto starters = CNNNetGetAllInputLayers(input);
467 // 1st pass node creation
468 bool res = CNNNetForestDFS(
470 [&](CNNLayerPtr current) {
471 auto newLayer = cp(current);
472 oldToNewLayers[current.get()] = newLayer;
473 net->addLayer(newLayer);
478 THROW_IE_EXCEPTION << "Copying of network not possible, due to existed loop.";
481 // internal utility to locate out data idx in layer
482 auto findOutDataIdx = [&](DataPtr sourceData) {
484 auto sourceLayer = sourceData->getCreatorLayer().lock();
486 THROW_IE_EXCEPTION << "Data " << sourceData->getName() << " has no creator layer";
488 for (size_t j = 0; j < sourceLayer->outData.size(); j++) {
489 if (sourceData.get() == sourceLayer->outData[j].get()) {
490 dataIdx = static_cast<int>(j);
494 IE_ASSERT(dataIdx != -1);
498 // compares data, for copied network and in old network
499 auto areEqualDatas = [&](DataPtr source, DataPtr target) {
500 if (source.get() == target.get()) {
505 // actual dims value might be incorrect dueto syntetic case
506 // , when getbatch() size returns value not reflect in actual data
508 if (source->getTensorDesc().getDims().size() != target->getTensorDesc().getDims().size()) {
513 if (source->getName() != target->getName()) {
517 // inputTO layers are identical by design
520 // internal utility to locate input data idx in layer
521 auto findInsDataIdx = [&](DataPtr sourceData, CNNLayerPtr layer) {
523 auto sourceLayerMap = sourceData->getInputTo();
524 for (auto& layersMapping : sourceLayerMap) {
525 if (layersMapping.second.get() != layer.get()) {
528 for (size_t j = 0; j < layer->insData.size(); j++) {
529 if (areEqualDatas(layer->insData[j].lock(), sourceData)) {
530 dataIdx = static_cast<int>(j);
537 IE_ASSERT(dataIdx != -1);
541 // 2nd pass edges creation
544 [&](CNNLayerPtr current) {
545 auto newLayer = oldToNewLayers[current.get()];
547 for (size_t i = 0; i != current->outData.size(); i++) {
548 newLayer->outData[i]->getCreatorLayer() = CNNLayerWeakPtr(newLayer);
550 // transfer data info for getData routine
551 net->getData(newLayer->outData[i]->getName()) = newLayer->outData[i];
553 for (auto inputTo = std::begin(newLayer->outData[i]->getInputTo());
554 inputTo != std::end(newLayer->outData[i]->getInputTo()); inputTo++) {
555 inputTo->second = oldToNewLayers[inputTo->second.get()];
559 for (size_t i = 0; i != current->insData.size(); i++) {
560 // found that data IDX
561 auto sourceData = current->insData[i].lock();
562 auto sourceLayer = sourceData->getCreatorLayer().lock();
564 THROW_IE_EXCEPTION << "Data " << sourceData->getName() << " has no creator layer";
566 // find insData Entry in outData of sourceLayer
567 newLayer->insData[i] = oldToNewLayers[sourceLayer.get()]->outData[findOutDataIdx(sourceData)];
572 // transfer input info
573 InputsDataMap inputsInfo;
574 input.getInputsInfo(inputsInfo);
575 std::set<DataPtr> insDatas;
576 for (auto&& info : inputsInfo) {
577 for (auto secondLayer : info.second->getInputData()->getInputTo()) {
578 auto secondLayerNew = oldToNewLayers[secondLayer.second.get()];
579 InputInfo::Ptr infoNew = std::make_shared<InputInfo>();
580 infoNew->setInputData(
581 secondLayerNew->insData[findInsDataIdx(info.second->getInputData(), secondLayer.second)].lock());
582 infoNew->getPreProcess() = info.second->getPreProcess();
583 net->setInputInfo(infoNew);
587 // transfer output info
588 OutputsDataMap outmap;
589 input.getOutputsInfo(outmap);
590 for (auto&& data : outmap) {
592 if (OK != net->addOutput(data.second->getCreatorLayer().lock()->name, findOutDataIdx(data.second), &dsc)) {
593 THROW_IE_EXCEPTION << dsc.msg;
598 // transfer batch size
599 if (OK != net->setBatchSize(input.getBatchSize(), &dsc)) {
600 THROW_IE_EXCEPTION << dsc.msg;
607 * @brief deep copy of the entire network
611 inline CNNNetPtr CNNNetCopy(const ICNNNetwork& input) {
612 struct EmptyStruct {};
613 auto copier = [](CNNLayerPtr lp) {
614 return injectData<EmptyStruct>(lp);
616 return InferenceEngine::CNNNetCopy(input, copier);
620 * @brief Replaces layer with newLayer in network
621 * @param network - graph containing the layer
622 * @param layer - layer which need to replace
623 * @param newLayer - new layer instead of layer; it must have same name like a layer for replace
625 void CNNNetSubstituteLayer(InferenceEngine::ICNNNetwork& network, const InferenceEngine::CNNLayerPtr& layer,
626 const InferenceEngine::CNNLayerPtr& newLayer);
631 * The structure to wrap network as lists of input and output data objects
632 * Each layer of network is achievable by DFS started from inputs.
634 * NB! The input collection may contain a "fake" data object which is not a
635 * real input to network, but just a holder to keep "const" and "memory"
636 * layers alive. Fake data object points layers with empty creator field.
637 * The fake data object always has "UNSPECIFIED" precision attribute.
640 std::vector<DataPtr> inputs;
641 std::vector<DataPtr> outputs;
645 * @brief Detect all input data object, not only provided as entry point.
646 * @param heads collection of some input into graph
647 * @return all input data objects including "fake" data (layers holder).
649 inline std::vector<DataPtr> CNNSubnetGetAllInputs(const std::vector<DataPtr>& heads) {
650 CNNLayerSet inputLayers;
651 std::unordered_set<CNNLayer*> allLayers;
653 // Define all start layers
654 for (const auto& data : heads) {
655 auto& secondLayers = data->getInputTo();
657 if (secondLayers.empty()) continue;
659 details::UnorderedDFS(
660 allLayers, secondLayers.begin()->second,
661 [&](CNNLayerPtr layer) {
662 if (layer->insData.empty()) {
663 inputLayers.insert(layer);
669 std::vector<DataPtr> res = heads;
670 // Add fake input data to point on not achievable
671 // layers from head (like const placeholders)
672 for (auto& starter : inputLayers) {
673 DataPtr holder(new Data(starter->name + ":input_holder", starter->precision));
674 holder->getInputTo()[starter->name] = starter;
675 res.push_back(holder);
682 * @brief Sorts SNNSubnet graph representation in topological order
683 * @param subnet input object
684 * @return layer collection sorted in topological order
686 inline std::vector<CNNLayerPtr> CNNSubnetSortTopologically(const CNNSubnet& subnet) {
687 std::vector<CNNLayerPtr> stackOfVisited;
688 bool res = CNNNetForestDFS(
689 CNNSubnetGetAllInputs(subnet.inputs),
690 [&](CNNLayerPtr current) {
691 stackOfVisited.push_back(current);
695 THROW_IE_EXCEPTION << "Sorting not possible, due to existed loop.";
698 std::reverse(stackOfVisited.begin(), stackOfVisited.end());
699 return stackOfVisited;
702 } // namespace details
703 } // namespace InferenceEngine
705 IE_SUPPRESS_DEPRECATED_END