Publishing 2019 R1 content
[platform/upstream/dldt.git] / inference-engine / src / inference_engine / ie_util_internal.cpp
1 // Copyright (C) 2018-2019 Intel Corporation
2 // SPDX-License-Identifier: Apache-2.0
3 //
4
5 #include "ie_util_internal.hpp"
6 #include "graph_tools.hpp"
7 #include "details/caseless.hpp"
8 #include "ie_utils.hpp"
9 #include "ie_icnn_network_stats.hpp"
10 #include "details/ie_cnn_network_tools.h"
11
12 #include <ie_layers.h>
13
14 #include <vector>
15 #include <unordered_set>
16 #include <unordered_map>
17 #include <deque>
18 #include <string>
19 #include <cassert>
20 #include <memory>
21 #include <utility>
22 #include <iomanip>
23
24 using std::string;
25
26 namespace InferenceEngine {
27
28 using namespace details;
29
30 namespace {
31 template<typename Visitor>
32 void groupSubgraphsHelper(const InferenceEngine::CNNLayerPtr& layer,
33                           Visitor&& visitor) {
34     for (auto&& out : layer->outData) {
35         for (auto&& out_link : out->getInputTo()) {
36             auto& nextLayer = out_link.second;
37             if (nullptr != nextLayer &&
38                 visitor(layer, nextLayer)) {
39                 groupSubgraphsHelper(nextLayer, std::forward<Visitor>(visitor));
40             }
41         }
42     }
43 }
44 }  // namespace
45
46 std::vector<std::vector<CNNLayerPtr> >
47 groupSubgraphs(ICNNNetwork& network,
48                std::function<bool(const CNNLayerPtr&,
49                                   const CNNLayerPtr&)> splitter) {
50     // TODO splitter std::function is heavy and can be replaced with
51     // llvm::function_ref-like lightweight callable when we add one
52     std::unordered_set<InferenceEngine::CNNLayerPtr> visitedObjects;
53     std::deque<InferenceEngine::CNNLayerPtr> layersToCheck;
54     InputsDataMap inputs;
55     network.getInputsInfo(inputs);
56     for (auto&& input : inputs) {
57         auto data = input.second->getInputData();
58         for (auto&& to : data->getInputTo()) {
59             auto nextLayer = to.second;
60             assert(nullptr != nextLayer);
61             layersToCheck.push_front(nextLayer);
62         }
63     }
64
65     std::vector<std::vector<InferenceEngine::CNNLayerPtr>> ret;
66
67     while (!layersToCheck.empty()) {
68         auto layer = layersToCheck.back();
69         layersToCheck.pop_back();
70         if (visitedObjects.find(layer) == visitedObjects.end()) {
71             visitedObjects.insert(layer);
72             std::vector<InferenceEngine::CNNLayerPtr> subgraph;
73             subgraph.push_back(layer);
74             groupSubgraphsHelper(layer,
75                                  [&](const InferenceEngine::CNNLayerPtr& layer1,
76                                      const InferenceEngine::CNNLayerPtr& layer2) {
77                 if (visitedObjects.find(layer2) == visitedObjects.end()) {
78                     if (splitter(layer1, layer2)) {
79                         // Layer belongs to different subgraph
80                         // Do not add it to visited objects list here,
81                         // because we need to visit it during next while iteration
82                         layersToCheck.push_front(layer2);
83                         return false;
84                     } else {
85                         // Layer belongs to same subgraph
86                         // add it to list
87                         subgraph.push_back(layer2);
88                         visitedObjects.insert(layer2);
89                         return true;
90                     }
91                 }
92                 return false;
93             });
94             ret.emplace_back(std::move(subgraph));
95         }
96     }
97
98     return ret;
99 }
100
101
102 DataPtr cloneData(const InferenceEngine::Data& source) {
103     auto cloned = std::make_shared<InferenceEngine::Data>(source);
104     cloned->getCreatorLayer().reset();
105     cloned->getInputTo().clear();
106     return cloned;
107 }
108
109 namespace {
110 template<typename T>
111 CNNLayerPtr layerCloneImpl(const CNNLayer* source) {
112     auto layer = dynamic_cast<const T*>(source);
113     if (nullptr != layer) {
114         auto newLayer = std::make_shared<T>(*layer);
115         newLayer->_fusedWith = nullptr;
116         newLayer->outData.clear();
117         newLayer->insData.clear();
118         return std::static_pointer_cast<CNNLayer>(newLayer);
119     }
120     return nullptr;
121 }
122
123 }  // namespace
124
125 CNNLayerPtr clonelayer(const CNNLayer& source) {
126     using fptr = CNNLayerPtr (*)(const CNNLayer*);
127     // Most derived layers must go first in this list
128     static const fptr cloners[] = {
129         &layerCloneImpl<BatchNormalizationLayer>,
130         &layerCloneImpl<PowerLayer             >,
131         &layerCloneImpl<ScaleShiftLayer        >,
132         &layerCloneImpl<PReLULayer             >,
133         &layerCloneImpl<TileLayer              >,
134         &layerCloneImpl<ReshapeLayer           >,
135         &layerCloneImpl<CropLayer              >,
136         &layerCloneImpl<EltwiseLayer           >,
137         &layerCloneImpl<GemmLayer              >,
138         &layerCloneImpl<PadLayer               >,
139         &layerCloneImpl<GatherLayer            >,
140         &layerCloneImpl<StridedSliceLayer      >,
141         &layerCloneImpl<ShuffleChannelsLayer   >,
142         &layerCloneImpl<DepthToSpaceLayer      >,
143         &layerCloneImpl<SpaceToDepthLayer      >,
144         &layerCloneImpl<ReverseSequenceLayer   >,
145         &layerCloneImpl<SqueezeLayer           >,
146         &layerCloneImpl<UnsqueezeLayer         >,
147         &layerCloneImpl<RangeLayer             >,
148         &layerCloneImpl<FillLayer              >,
149         &layerCloneImpl<ExpandLayer            >,
150         &layerCloneImpl<ClampLayer             >,
151         &layerCloneImpl<ReLULayer              >,
152         &layerCloneImpl<SoftMaxLayer           >,
153         &layerCloneImpl<GRNLayer               >,
154         &layerCloneImpl<MVNLayer               >,
155         &layerCloneImpl<NormLayer              >,
156         &layerCloneImpl<SplitLayer             >,
157         &layerCloneImpl<ConcatLayer            >,
158         &layerCloneImpl<FullyConnectedLayer    >,
159         &layerCloneImpl<PoolingLayer           >,
160         &layerCloneImpl<DeconvolutionLayer     >,
161         &layerCloneImpl<ConvolutionLayer       >,
162         &layerCloneImpl<TensorIterator         >,
163         &layerCloneImpl<RNNSequenceLayer       >,
164         &layerCloneImpl<RNNCellBase            >,
165         &layerCloneImpl<QuantizeLayer          >,
166         &layerCloneImpl<BinaryConvolutionLayer >,
167         &layerCloneImpl<WeightableLayer        >,
168         &layerCloneImpl<CNNLayer               >
169     };
170     for (auto cloner : cloners) {
171         auto cloned = cloner(&source);
172         if (nullptr != cloned) {
173             return cloned;
174         }
175     }
176     assert(!"All layers derived from CNNLayer so we must never get here");
177     return nullptr;  // Silence "control may reach end of non-void function" warning
178 }
179
180 details::CNNNetworkImplPtr cloneNet(const ICNNNetwork &network) {
181     std::vector<CNNLayerPtr> layers;
182     details::CNNNetworkIterator i(const_cast<ICNNNetwork *>(&network));
183     while (i != details::CNNNetworkIterator()) {
184         layers.push_back(*i);
185         i++;
186     }
187
188     InferenceEngine::ICNNNetworkStats* pstatsSrc = nullptr;
189     if (StatusCode::OK != network.getStats(&pstatsSrc, nullptr)) {
190         pstatsSrc = nullptr;
191     }
192     // copy of the network
193     details::CNNNetworkImplPtr net = cloneNet(layers, pstatsSrc);
194     // going over output layers and duplicatig them:
195     OutputsDataMap outputs;
196     network.getOutputsInfo(outputs);
197     for (auto o : outputs) {
198         net->addOutput(o.first);
199     }
200     net->setPrecision(network.getPrecision());
201     net->setName(network.getName());
202     net->setTargetDevice(network.getTargetDevice());
203
204     InputsDataMap externalInputsData;
205     network.getInputsInfo(externalInputsData);
206
207     InputsDataMap clonedInputs;
208     net->getInputsInfo(clonedInputs);
209     for (auto &&it : externalInputsData) {
210         auto inp = clonedInputs.find(it.first);
211         if (inp != clonedInputs.end() && nullptr != inp->second) {
212             inp->second->setInputPrecision(it.second->getInputPrecision());
213             inp->second->getPreProcess() = it.second->getPreProcess();
214         }
215     }
216
217     return net;
218 }
219
220
221 details::CNNNetworkImplPtr cloneNet(const std::vector<CNNLayerPtr>& layers,
222                                     const ICNNNetworkStats* networkStats,
223                                     std::function<CNNLayerPtr(const CNNLayer&)> layerCloner) {
224     // TODO layerCloner std::function is heavy and can be replaced with
225     // llvm::function_ref-like lightweight callable when we add one
226     auto net = std::make_shared<InferenceEngine::details::CNNNetworkImpl>();
227
228     // Src to cloned data map
229     std::unordered_map<InferenceEngine::DataPtr, InferenceEngine::DataPtr> dataMap;
230     // Cloned to src data map
231     std::unordered_map<InferenceEngine::DataPtr, InferenceEngine::DataPtr> clonedDataMap;
232     std::vector<InferenceEngine::DataPtr> clonedDatas;
233
234     auto createDataImpl = [&](const InferenceEngine::DataPtr& data) {
235         assert(nullptr != data);
236         if (!contains(dataMap, data)) {
237             auto clonedData = cloneData(*data);
238             dataMap[data] = clonedData;
239             clonedDataMap[clonedData] = data;
240             clonedDatas.push_back(clonedData);
241             net->getData(clonedData->getName()) = clonedData;
242             return clonedData;
243         }
244         return dataMap[data];
245     };
246
247     auto cloneLayerImpl = [&](const CNNLayer &srcLayer) {
248         CNNLayerPtr clonedLayer = layerCloner(srcLayer);
249         clonedLayer->_fusedWith = nullptr;
250         // We will need to reconstruct all connections in new graph
251         clonedLayer->outData.clear();
252         clonedLayer->insData.clear();
253         net->addLayer(clonedLayer);
254         return clonedLayer;
255     };
256
257     for (auto&& srcLayer : layers) {
258         CNNLayerPtr clonedLayer = cloneLayerImpl(*srcLayer);
259         for (auto&& src : srcLayer->insData) {
260             auto data = src.lock();
261             auto clonedData = createDataImpl(data);
262
263             string inputName;
264             // Find input name
265             for (auto&& inp : data->getInputTo()) {
266                 if (srcLayer == inp.second) {
267                     inputName = inp.first;
268                     break;
269                 }
270             }
271             assert(!inputName.empty());
272             clonedData->getInputTo().insert({ inputName, clonedLayer });
273             clonedLayer->insData.push_back(clonedData);
274         }
275
276         for (auto&& data : srcLayer->outData) {
277             auto clonedData = createDataImpl(data);
278             clonedData->getCreatorLayer() = clonedLayer;
279             clonedLayer->outData.push_back(clonedData);
280             for (auto&& inp : data->getInputTo()) {
281                 auto layer = inp.second;
282                 // TODO(amalyshe) is it the best place to check priorbox and remove
283                 // such edge from outputs?
284                 if (std::find(layers.begin(), layers.end(), layer) == layers.end() &&
285                     !(CaselessEq<string>()(layer->type, "priorbox") ||
286                       CaselessEq<string>()(layer->type, "PriorBoxClustered"))) {
287                     net->addOutput(data->getName());
288                     break;
289                 }
290             }
291         }
292     }
293
294     for (auto&& data : clonedDatas) {
295         auto layer = data->getCreatorLayer().lock();
296         // create an artificial input layer because logic in some algorithms rely
297         // on existence of these layers in the network
298         if (nullptr == layer) {
299             assert(contains(clonedDataMap, data));
300             auto originalData = clonedDataMap[data];
301             assert(nullptr != originalData);
302
303             if (auto originalLayer = originalData->creatorLayer.lock()) {
304                 if (CaselessEq<string>()(originalLayer->type, "input") ||
305                     CaselessEq<string>()(originalLayer->type, "const")) {
306                     layer = cloneLayerImpl(*originalLayer);
307                     layer->outData.push_back(data);
308                     data->getCreatorLayer() = layer;
309                 }
310             }
311
312             if (nullptr == layer) {
313                 LayerParams params;
314                 params.name = data->getName();
315                 params.precision = data->getPrecision();
316                 params.type = "Input";
317                 layer = std::make_shared<CNNLayer>(params);
318                 // this place should be transactional
319                 layer->outData.push_back(data);
320                 data->getCreatorLayer() = layer;
321                 net->addLayer(layer);
322             }
323         }
324         if (CaselessEq<string>()(layer->type, "input")) {
325             auto input = std::make_shared<InferenceEngine::InputInfo>();
326             input->setInputData(data);
327             net->setInputInfo(input);
328         }
329     }
330
331     net->resolveOutput();
332
333     // cloning of statistics
334     InferenceEngine::ICNNNetworkStats* pstatsTarget = nullptr;
335     if (networkStats != nullptr && !networkStats->isEmpty()) {
336         StatusCode st = net->getStats(&pstatsTarget, nullptr);
337         if (st == StatusCode::OK && pstatsTarget) {
338             pstatsTarget->setNodesStats(networkStats->getNodesStats());
339         }
340     }
341
342     return net;
343 }
344
345 namespace traverse {
346
347 void forward(const CNNLayerPtr& layer, std::deque<InferenceEngine::CNNLayerPtr>& layers) {
348     for (const auto& out : layer->outData) {
349         for (const auto& out_link : out->getInputTo()) {
350             const auto& nextLayer = out_link.second;
351             if (nullptr != nextLayer) {
352                 layers.emplace_back(nextLayer);
353             }
354         }
355     }
356 }
357
358 void backward(const CNNLayerPtr& layer, std::deque<InferenceEngine::CNNLayerPtr>& layers) {
359     for (const auto& data : layer->insData) {
360         const auto data_ptr = data.lock();
361         const auto creatorLayer = data_ptr->creatorLayer.lock();
362         if (nullptr != creatorLayer &&
363             creatorLayer->type != "Input" &&
364             creatorLayer->type != "input" ) {
365             layers.emplace_back(creatorLayer);
366         }
367     }
368 }
369
370 void traverse(InferenceEngine::ICNNNetwork& network,
371               std::function<void(InferenceEngine::CNNLayerPtr& layer)> apply,
372               std::function<void(const InferenceEngine::CNNLayerPtr& layer, std::deque<InferenceEngine::CNNLayerPtr>& layers)> expand) {
373     std::vector<InferenceEngine::CNNLayerPtr> layers;
374
375     InferenceEngine::InputsDataMap inputs;
376     network.getInputsInfo(inputs);
377     for (const auto& input : inputs) {
378         const auto data = input.second->getInputData();
379         for (const auto& to : data->getInputTo()) {
380             const auto nextLayer = to.second;
381             assert(nullptr != nextLayer);
382             layers.emplace_back(nextLayer);
383         }
384     }
385
386     traverse(layers, apply, expand);
387 }
388
389 }  // namespace traverse
390
391
392 struct NodePrinter {
393     enum FILL_COLOR { DATA, SUPPORTED_LAYER, UNSOPPORTED_LAYER };
394
395     std::unordered_set<InferenceEngine::Data*> printed_data;
396     std::unordered_set<InferenceEngine::CNNLayer*> printed_layers;
397     std::ostream &out;
398
399     printer_callback layer_cb;
400
401     explicit NodePrinter(std::ostream &os, printer_callback cb)
402         : out(os), layer_cb(std::move(cb)) {}
403
404     bool isPrinted(const CNNLayerPtr &layer) {
405         return static_cast<bool>(printed_layers.count(layer.get()));
406     }
407
408     bool isPrinted(const DataPtr &datum) {
409         return static_cast<bool>(printed_data.count(datum.get()));
410     }
411
412     string colorToStr(FILL_COLOR color) {
413         switch (color) {
414             case DATA :
415                 return "#FCF6E3";
416             case SUPPORTED_LAYER:
417                 return "#D9EAD3";
418             case UNSOPPORTED_LAYER:
419                 return "#F4CCCC";
420             default:
421                 return "#FFFFFF";
422         }
423     }
424
425     string formatSize_(const std::vector<unsigned int>& spatialDims) {
426         string result;
427         if (spatialDims.empty()) return result;
428         result = std::to_string(spatialDims[0]);
429         for (auto dim : spatialDims) {
430             result += "x" + std::to_string(dim);
431         }
432         return result;
433     }
434
435     string cleanNodeName_(string node_name) const {
436         // remove dot and dash symbols from node name. It is incorrectly displayed in xdot
437         node_name.erase(remove(node_name.begin(), node_name.end(), '.'), node_name.end());
438         std::replace(node_name.begin(), node_name.end(), '-', '_');
439         std::replace(node_name.begin(), node_name.end(), ':', '_');
440         return node_name;
441     }
442
443     void printLayerNode(const CNNLayerPtr &layer) {
444         auto node_name = "layer_" + cleanNodeName_(layer->name);
445         printed_layers.insert(layer.get());
446
447         ordered_properties printed_properties;
448
449         ordered_properties node_properties = {
450             {"shape", "box"},
451             {"style", "filled"},
452             {"fillcolor", colorToStr(SUPPORTED_LAYER)}
453         };
454
455         auto type = layer->type;
456         printed_properties.emplace_back("type", type);
457
458         if (type == "Convolution") {
459             auto* conv = dynamic_cast<ConvolutionLayer*>(layer.get());
460
461             unsigned int
462                 depth = conv->_out_depth,
463                 group = conv->_group;
464
465             printed_properties.emplace_back("kernel size", formatSize_({&(conv->_kernel[0]), &(conv->_kernel[conv->_kernel.size() - 1])}));
466             printed_properties.emplace_back("output depth", std::to_string(depth));
467             printed_properties.emplace_back("group", std::to_string(group));
468             printed_properties.emplace_back("padding begin", formatSize_({&(conv->_padding[0]), &(conv->_padding[conv->_padding.size() - 1])}));
469             printed_properties.emplace_back("padding end", formatSize_({&(conv->_pads_end[0]), &(conv->_pads_end[conv->_pads_end.size() - 1])}));
470             printed_properties.emplace_back("strides", formatSize_({&(conv->_stride[0]), &(conv->_stride[conv->_stride.size() - 1])}));
471             printed_properties.emplace_back("dilations", formatSize_({&(conv->_dilation[0]), &(conv->_dilation[conv->_dilation.size() - 1])}));
472         } else if (type == "Pooling") {
473             auto* pool = dynamic_cast<PoolingLayer*>(layer.get());
474
475             printed_properties.emplace_back("window size", formatSize_({&(pool->_kernel[0]), &(pool->_kernel[pool->_kernel.size() - 1])}));
476             printed_properties.emplace_back("padding begin", formatSize_({&(pool->_padding[0]), &(pool->_padding[pool->_padding.size() - 1])}));
477             printed_properties.emplace_back("padding end", formatSize_({&(pool->_pads_end[0]), &(pool->_pads_end[pool->_pads_end.size() - 1])}));
478             printed_properties.emplace_back("strides", formatSize_({&(pool->_stride[0]), &(pool->_stride[pool->_stride.size() - 1])}));
479         } else if (type == "ReLU") {
480             auto* relu = dynamic_cast<ReLULayer*>(layer.get());
481
482             float negative_slope = relu->negative_slope;
483
484             if (negative_slope != 0.0f)
485                 printed_properties.emplace_back("negative_slope", std::to_string(negative_slope));
486         } else if (type == "Eltwise") {
487             auto* eltwise = dynamic_cast<EltwiseLayer*>(layer.get());
488
489             std::string operation;
490
491             if (eltwise->_operation == EltwiseLayer::Sum)
492                 operation = "Sum";
493             else if (eltwise->_operation == EltwiseLayer::Prod)
494                 operation = "Prod";
495             else if (eltwise->_operation == EltwiseLayer::Max)
496                 operation = "Max";
497             else if (eltwise->_operation == EltwiseLayer::Sub)
498                 operation = "Sub";
499             else if (eltwise->_operation == EltwiseLayer::Min)
500                 operation = "Min";
501             else if (eltwise->_operation == EltwiseLayer::Div)
502                 operation = "Div";
503             else if (eltwise->_operation == EltwiseLayer::Squared_diff)
504                 operation = "Squared_diff";
505             else if (eltwise->_operation == EltwiseLayer::Equal)
506                 operation = "Equal";
507             else if (eltwise->_operation == EltwiseLayer::Not_equal)
508                 operation = "Not_equal";
509             else if (eltwise->_operation == EltwiseLayer::Less)
510                 operation = "Less";
511             else if (eltwise->_operation == EltwiseLayer::Less_equal)
512                 operation = "Less_equal";
513             else if (eltwise->_operation == EltwiseLayer::Greater)
514                 operation = "Greater";
515             else if (eltwise->_operation == EltwiseLayer::Greater_equal)
516                 operation = "Greater_equal";
517             else if (eltwise->_operation == EltwiseLayer::Logical_AND)
518                 operation = "Logical_AND";
519             else if (eltwise->_operation == EltwiseLayer::Logical_OR)
520                 operation = "Logical_OR";
521             else if (eltwise->_operation == EltwiseLayer::Logical_XOR)
522                 operation = "Logical_XOR";
523
524             printed_properties.emplace_back("operation", operation);
525         }
526
527         if (layer_cb != nullptr) {
528             layer_cb(layer, printed_properties, node_properties);
529         }
530
531         printNode(node_name, layer->name, node_properties, printed_properties);
532     }
533
534     void printDataNode(const std::shared_ptr<Data> &data) {
535         auto node_name = "data_" + cleanNodeName_(data->getName());
536         printed_data.insert(data.get());
537
538         ordered_properties printed_properties;
539         ordered_properties node_properties = {
540             {"shape", "ellipse"},
541             {"style", "filled"},
542             {"fillcolor", colorToStr(DATA)}
543         };
544
545         std::stringstream dims_ss;
546         size_t idx = data->getTensorDesc().getDims().size();
547         dims_ss << '[';
548         for (auto &dim : data->getTensorDesc().getDims()) {
549             dims_ss << dim << ((--idx) != 0u ? ", " : "");
550         }
551         dims_ss << ']';
552
553         printed_properties.emplace_back("dims", dims_ss.str());
554         printed_properties.emplace_back("precision", data->getPrecision().name());
555
556         printNode(node_name, data->name, node_properties, printed_properties);
557     }
558
559     void printNode(string const &node_name, const string &node_title,
560                    ordered_properties const &node_properties,
561                    ordered_properties const &printed_properties) {
562         // normalization of names, removing all prohibited symbols like "/"
563         string nodeNameN = node_name;
564         std::replace(nodeNameN.begin(), nodeNameN.end(), '/', '_');
565         string dataNameN = node_title;
566         std::replace(dataNameN.begin(), dataNameN.end(), '/', '_');
567
568         out << '\t' << nodeNameN << " [";
569         for (auto &node_property : node_properties) {
570             out << node_property.first << "=\"" << node_property.second << "\", ";
571         }
572
573         out << "label=\"" << node_title;
574         for (auto &printed_property : printed_properties) {
575             out << "\\n" << printed_property.first << ": " << printed_property.second;
576         }
577         out << "\"];\n";
578     }
579
580     void printEdge(const CNNLayerPtr &from_, const DataPtr &to_, bool reverse) {
581         auto from_name = "layer_" + cleanNodeName_(from_->name);
582         auto to_name = "data_" + cleanNodeName_(to_->getName());
583         std::replace(from_name.begin(), from_name.end(), '/', '_');
584         std::replace(to_name.begin(), to_name.end(), '/', '_');
585         if (reverse)
586             std::swap(from_name, to_name);
587         out << '\t' << from_name << " -> " << to_name << ";\n";
588     }
589 };
590
591 void saveGraphToDot(InferenceEngine::ICNNNetwork &network, std::ostream &out, printer_callback layer_cb) {
592     NodePrinter printer(out, std::move(layer_cb));
593
594     CNNLayerSet inputs;
595     for (auto& data : getRootDataObjects(network)) {
596         assert(nullptr != data);
597         for (auto& l :  data->getInputTo()) {
598             inputs.insert(l.second);
599         }
600     }
601
602     out << "strict digraph Network {\n";
603     // Traverse graph and print nodes
604     for (const auto &layer : details::CNNNetSortTopologically(network)) {
605         printer.printLayerNode(layer);
606
607         // Print output Data Object
608         for (auto &dataptr : layer->outData) {
609             if (!printer.isPrinted(dataptr)) {
610                 printer.printDataNode(dataptr);
611             }
612             printer.printEdge(layer, dataptr, false);
613         }
614
615         // Print input Data objects
616         for (auto &datum : layer->insData) {
617             auto dataptr = datum.lock();
618             if (!printer.isPrinted(dataptr)) {
619                 printer.printDataNode(dataptr);
620             }
621             // in order not to keep additional set with
622             // printed edges, strict keyword for digraph is used
623             // to remove duplicate edges
624             printer.printEdge(layer, dataptr, true);
625         }
626     }
627     out << "}" << std::endl;
628 }
629
630 std::unordered_set<DataPtr> getRootDataObjects(ICNNNetwork &network) {
631     std::unordered_set<DataPtr> ret;
632     details::CNNNetworkIterator i(&network);
633     while (i != details::CNNNetworkIterator()) {
634         CNNLayer::Ptr layer = *i;
635
636         // TODO: Data without creatorLayer
637         if (CaselessEq<string>()(layer->type, "input") ||
638             CaselessEq<string>()(layer->type, "const")) {
639             ret.insert(layer->outData.begin(), layer->outData.end());
640         }
641         i++;
642     }
643     return ret;
644 }
645
646 }  // namespace InferenceEngine