1 // Copyright (C) 2018-2020 Intel Corporation
2 // SPDX-License-Identifier: Apache-2.0
5 #include "ie_util_internal.hpp"
14 #include <unordered_map>
15 #include <unordered_set>
19 #include "details/caseless.hpp"
20 #include "details/ie_cnn_network_tools.h"
21 #include "details/os/os_filesystem.hpp"
22 #include "file_utils.h"
23 #include "graph_tools.hpp"
25 #include "precision_utils.h"
29 namespace InferenceEngine {
31 using namespace details;
33 DataPtr cloneData(const InferenceEngine::Data& source) {
34 auto cloned = std::make_shared<InferenceEngine::Data>(source);
35 if (cloned != nullptr) {
36 cloned->getCreatorLayer().reset();
37 cloned->getInputTo().clear();
44 CNNLayerPtr layerCloneImpl(const CNNLayer* source) {
45 auto layer = dynamic_cast<const T*>(source);
46 if (nullptr != layer) {
47 auto newLayer = std::make_shared<T>(*layer);
48 newLayer->_fusedWith = nullptr;
49 newLayer->outData.clear();
50 newLayer->insData.clear();
51 return std::static_pointer_cast<CNNLayer>(newLayer);
56 /* Make this function explicit for TensorIterator layer
57 * because of specific handling of the body field */
59 CNNLayerPtr layerCloneImpl<TensorIterator>(const CNNLayer* source) {
60 auto layer = dynamic_cast<const TensorIterator*>(source);
61 if (nullptr != layer) {
62 auto newLayer = std::make_shared<TensorIterator>(*layer);
63 newLayer->_fusedWith = nullptr;
64 newLayer->outData.clear();
65 newLayer->insData.clear();
67 newLayer->body = NetPass::CopyTIBody(newLayer->body);
69 return std::static_pointer_cast<CNNLayer>(newLayer);
76 CNNLayerPtr clonelayer(const CNNLayer& source) {
77 using fptr = CNNLayerPtr (*)(const CNNLayer*);
78 // Most derived layers must go first in this list
79 static const fptr cloners[] = {&layerCloneImpl<ExperimentalDetectronTopKROIs>,
80 &layerCloneImpl<ExperimentalDetectronGenerateProposalsSingleImageLayer>,
81 &layerCloneImpl<ExperimentalDetectronPriorGridGeneratorLayer>,
82 &layerCloneImpl<ScatterUpdateLayer>,
83 &layerCloneImpl<ScatterElementsUpdateLayer>,
84 &layerCloneImpl<NonMaxSuppressionLayer>,
85 &layerCloneImpl<SelectLayer>,
86 &layerCloneImpl<BatchNormalizationLayer>,
87 &layerCloneImpl<TopKLayer>,
88 &layerCloneImpl<PowerLayer>,
89 &layerCloneImpl<ScaleShiftLayer>,
90 &layerCloneImpl<PReLULayer>,
91 &layerCloneImpl<TileLayer>,
92 &layerCloneImpl<ReshapeLayer>,
93 &layerCloneImpl<CropLayer>,
94 &layerCloneImpl<EltwiseLayer>,
95 &layerCloneImpl<GemmLayer>,
96 &layerCloneImpl<PadLayer>,
97 &layerCloneImpl<GatherLayer>,
98 &layerCloneImpl<StridedSliceLayer>,
99 &layerCloneImpl<ShuffleChannelsLayer>,
100 &layerCloneImpl<DepthToSpaceLayer>,
101 &layerCloneImpl<SpaceToDepthLayer>,
102 &layerCloneImpl<SpaceToBatchLayer>,
103 &layerCloneImpl<BatchToSpaceLayer>,
104 &layerCloneImpl<SparseFillEmptyRowsLayer>,
105 &layerCloneImpl<SparseSegmentReduceLayer>,
106 &layerCloneImpl<ExperimentalSparseWeightedReduceLayer>,
107 &layerCloneImpl<SparseToDenseLayer>,
108 &layerCloneImpl<BucketizeLayer>,
109 &layerCloneImpl<ReverseSequenceLayer>,
110 &layerCloneImpl<RangeLayer>,
111 &layerCloneImpl<FillLayer>,
112 &layerCloneImpl<BroadcastLayer>,
113 &layerCloneImpl<MathLayer>,
114 &layerCloneImpl<ReduceLayer>,
115 &layerCloneImpl<ClampLayer>,
116 &layerCloneImpl<ReLULayer>,
117 &layerCloneImpl<SoftMaxLayer>,
118 &layerCloneImpl<GRNLayer>,
119 &layerCloneImpl<MVNLayer>,
120 &layerCloneImpl<NormLayer>,
121 &layerCloneImpl<SplitLayer>,
122 &layerCloneImpl<ConcatLayer>,
123 &layerCloneImpl<FullyConnectedLayer>,
124 &layerCloneImpl<PoolingLayer>,
125 &layerCloneImpl<DeconvolutionLayer>,
126 &layerCloneImpl<DeformableConvolutionLayer>,
127 &layerCloneImpl<ConvolutionLayer>,
128 &layerCloneImpl<TensorIterator>,
129 &layerCloneImpl<RNNSequenceLayer>,
130 &layerCloneImpl<LSTMCell>,
131 &layerCloneImpl<GRUCell>,
132 &layerCloneImpl<RNNCell>,
133 &layerCloneImpl<QuantizeLayer>,
134 &layerCloneImpl<BinaryConvolutionLayer>,
135 &layerCloneImpl<WeightableLayer>,
136 &layerCloneImpl<OneHotLayer>,
137 &layerCloneImpl<CNNLayer>,
138 &layerCloneImpl<UniqueLayer>};
139 for (auto cloner : cloners) {
140 auto cloned = cloner(&source);
141 if (nullptr != cloned) {
145 assert(!"All layers derived from CNNLayer so we must never get here");
146 return nullptr; // Silence "control may reach end of non-void function" warning
149 std::shared_ptr<ICNNNetwork> cloneNetwork(const ICNNNetwork& network) {
150 if (auto func = network.getFunction()) {
151 CNNNetwork net(func);
153 InputsDataMap originInputs;
154 OutputsDataMap originOutputs;
155 network.getInputsInfo(originInputs);
156 network.getOutputsInfo(originOutputs);
157 InputsDataMap clonedInputs = net.getInputsInfo();
158 OutputsDataMap clonedOutputs = net.getOutputsInfo();
160 for (const auto& outputInfo : originOutputs) {
161 if (clonedOutputs.find(outputInfo.first) == clonedOutputs.end())
162 THROW_IE_EXCEPTION << "Cannot clone network! Cloned network doesn't contain all outputs";
163 clonedOutputs[outputInfo.first]->setPrecision(outputInfo.second->getPrecision());
164 clonedOutputs[outputInfo.first]->setLayout(outputInfo.second->getLayout());
166 for (const auto& inputInfo : originInputs) {
167 if (clonedInputs.find(inputInfo.first) == clonedInputs.end())
168 THROW_IE_EXCEPTION << "Cannot clone network! Cloned network doesn't contain all inputs";
169 clonedInputs[inputInfo.first]->setPrecision(inputInfo.second->getPrecision());
170 clonedInputs[inputInfo.first]->setLayout(inputInfo.second->getLayout());
171 clonedInputs[inputInfo.first]->getPreProcess() = inputInfo.second->getPreProcess();
176 return cloneNet(network);
179 details::CNNNetworkImplPtr cloneNet(const ICNNNetwork& origin_network) {
180 std::shared_ptr<ICNNNetwork> clonedNetwork;
181 // Call conversion only on the copy of nGraph function
182 if (auto func = origin_network.getFunction()) {
183 clonedNetwork = cloneNetwork(origin_network);
185 const ICNNNetwork& network = (clonedNetwork) ? *clonedNetwork : origin_network;
187 std::vector<CNNLayerPtr> layers;
188 details::CNNNetworkIterator i(&network);
189 while (i != details::CNNNetworkIterator()) {
190 layers.push_back(*i);
194 // copy of the network
195 details::CNNNetworkImplPtr net = cloneNet(layers);
196 // going over output layers and aligning output ports and outputs
197 OutputsDataMap outputs;
198 network.getOutputsInfo(outputs);
199 OutputsDataMap outputInfo;
200 net->getOutputsInfo(outputInfo);
201 for (auto o : outputs) {
202 auto it = outputInfo.find(o.first);
203 if (it != outputInfo.end()) {
204 outputInfo.erase(it);
206 net->addOutput(o.first);
209 // remove output ports which unconnected with outputs
210 for (auto o : outputInfo) {
211 net->removeOutput(o.first);
213 IE_SUPPRESS_DEPRECATED_START
214 net->setPrecision(network.getPrecision());
215 IE_SUPPRESS_DEPRECATED_END
216 net->setName(network.getName());
218 InputsDataMap externalInputsData;
219 network.getInputsInfo(externalInputsData);
221 InputsDataMap clonedInputs;
222 net->getInputsInfo(clonedInputs);
223 for (auto&& it : externalInputsData) {
224 auto inp = clonedInputs.find(it.first);
225 if (inp != clonedInputs.end() && nullptr != inp->second) {
226 inp->second->setPrecision(it.second->getPrecision());
227 inp->second->getPreProcess() = it.second->getPreProcess();
234 details::CNNNetworkImplPtr cloneNet(const std::vector<CNNLayerPtr>& layers) {
235 auto net = std::make_shared<InferenceEngine::details::CNNNetworkImpl>();
237 // Src to cloned data map
238 std::unordered_map<InferenceEngine::DataPtr, InferenceEngine::DataPtr> dataMap;
239 // Cloned to src data map
240 std::unordered_map<InferenceEngine::DataPtr, InferenceEngine::DataPtr> clonedDataMap;
241 std::vector<InferenceEngine::DataPtr> clonedDatas;
243 auto createDataImpl = [&](const InferenceEngine::DataPtr& data) {
244 assert(nullptr != data);
245 if (!contains(dataMap, data)) {
246 auto clonedData = cloneData(*data);
247 dataMap[data] = clonedData;
248 clonedDataMap[clonedData] = data;
249 clonedDatas.push_back(clonedData);
250 net->getData(clonedData->getName()) = clonedData;
253 return dataMap[data];
256 auto cloneLayerImpl = [&](const CNNLayer& srcLayer) {
257 CNNLayerPtr clonedLayer = clonelayer(srcLayer);
258 clonedLayer->_fusedWith = nullptr;
259 // We will need to reconstruct all connections in new graph
260 clonedLayer->outData.clear();
261 clonedLayer->insData.clear();
262 net->addLayer(clonedLayer);
266 for (auto&& srcLayer : layers) {
267 CNNLayerPtr clonedLayer = cloneLayerImpl(*srcLayer);
268 for (auto&& src : srcLayer->insData) {
269 auto data = src.lock();
270 auto clonedData = createDataImpl(data);
274 for (auto&& inp : data->getInputTo()) {
275 if (srcLayer == inp.second) {
276 inputName = inp.first;
280 assert(!inputName.empty());
281 clonedData->getInputTo().insert({inputName, clonedLayer});
282 clonedLayer->insData.push_back(clonedData);
285 for (auto&& data : srcLayer->outData) {
286 auto clonedData = createDataImpl(data);
287 clonedData->getCreatorLayer() = clonedLayer;
288 clonedLayer->outData.push_back(clonedData);
289 for (auto&& inp : data->getInputTo()) {
290 auto layer = inp.second;
291 // TODO(amalyshe) is it the best place to check priorbox and remove
292 // such edge from outputs?
293 if (std::find(layers.begin(), layers.end(), layer) == layers.end() &&
294 !(CaselessEq<string>()(layer->type, "priorbox") ||
295 CaselessEq<string>()(layer->type, "PriorBoxClustered"))) {
296 net->addOutput(data->getName());
303 for (auto&& data : clonedDatas) {
304 auto layer = data->getCreatorLayer().lock();
305 // create an artificial input layer because logic in some algorithms rely
306 // on existence of these layers in the network
307 if (nullptr == layer) {
308 assert(contains(clonedDataMap, data));
309 auto originalData = clonedDataMap[data];
310 assert(nullptr != originalData);
312 if (auto originalLayer = originalData->getCreatorLayer().lock()) {
313 if (CaselessEq<string>()(originalLayer->type, "input") ||
314 CaselessEq<string>()(originalLayer->type, "const") ||
315 CaselessEq<string>()(originalLayer->type, "memory")) {
316 layer = cloneLayerImpl(*originalLayer);
317 layer->outData.push_back(data);
318 data->getCreatorLayer() = layer;
322 if (nullptr == layer) {
323 LayerParams params = { data->getName(), std::string("Input"), data->getPrecision() };
324 layer = std::make_shared<CNNLayer>(params);
325 // this place should be transactional
326 layer->outData.push_back(data);
327 data->getCreatorLayer() = layer;
328 net->addLayer(layer);
331 if (CaselessEq<string>()(layer->type, "input")) {
332 auto input = std::make_shared<InferenceEngine::InputInfo>();
333 input->setInputData(data);
334 net->setInputInfo(input);
338 net->resolveOutput();
344 enum FILL_COLOR { DATA, SUPPORTED_LAYER, UNSOPPORTED_LAYER };
346 std::unordered_set<InferenceEngine::Data*> printed_data;
347 std::unordered_set<InferenceEngine::CNNLayer*> printed_layers;
350 printer_callback layer_cb;
352 explicit NodePrinter(std::ostream& os, printer_callback cb): out(os), layer_cb(std::move(cb)) {}
354 bool isPrinted(const CNNLayerPtr& layer) {
355 return static_cast<bool>(printed_layers.count(layer.get()));
358 bool isPrinted(const DataPtr& datum) {
359 return static_cast<bool>(printed_data.count(datum.get()));
362 string colorToStr(FILL_COLOR color) {
366 case SUPPORTED_LAYER:
368 case UNSOPPORTED_LAYER:
375 string formatSize_(const std::vector<unsigned int>& spatialDims) {
377 if (spatialDims.empty()) return result;
378 result = std::to_string(spatialDims[0]);
379 for (auto dim : spatialDims) {
380 result += "x" + std::to_string(dim);
385 string cleanNodeName_(string node_name) const {
386 // remove dot and dash symbols from node name. It is incorrectly displayed in xdot
387 node_name.erase(remove(node_name.begin(), node_name.end(), '.'), node_name.end());
388 std::replace(node_name.begin(), node_name.end(), '-', '_');
389 std::replace(node_name.begin(), node_name.end(), ':', '_');
393 void printLayerNode(const CNNLayerPtr& layer) {
394 auto node_name = "layer_" + cleanNodeName_(layer->name);
395 printed_layers.insert(layer.get());
397 ordered_properties printed_properties;
399 ordered_properties node_properties = {{"shape", "box"},
401 {"fillcolor", colorToStr(SUPPORTED_LAYER)}};
403 auto type = layer->type;
404 printed_properties.emplace_back("type", type);
406 if (type == "Convolution") {
407 auto* conv = dynamic_cast<ConvolutionLayer*>(layer.get());
409 if (conv != nullptr) {
410 unsigned int depth = conv->_out_depth, group = conv->_group;
412 printed_properties.emplace_back(
413 "kernel size", formatSize_({&(conv->_kernel[0]), &(conv->_kernel[conv->_kernel.size() - 1])}));
414 printed_properties.emplace_back("output depth", std::to_string(depth));
415 printed_properties.emplace_back("group", std::to_string(group));
416 printed_properties.emplace_back(
417 "padding begin", formatSize_({&(conv->_padding[0]), &(conv->_padding[conv->_padding.size() - 1])}));
418 printed_properties.emplace_back(
420 formatSize_({&(conv->_pads_end[0]), &(conv->_pads_end[conv->_pads_end.size() - 1])}));
421 printed_properties.emplace_back(
422 "strides", formatSize_({&(conv->_stride[0]), &(conv->_stride[conv->_stride.size() - 1])}));
423 printed_properties.emplace_back(
424 "dilations", formatSize_({&(conv->_dilation[0]), &(conv->_dilation[conv->_dilation.size() - 1])}));
426 } else if (type == "Pooling") {
427 auto* pool = dynamic_cast<PoolingLayer*>(layer.get());
429 if (pool != nullptr) {
430 printed_properties.emplace_back(
431 "window size", formatSize_({&(pool->_kernel[0]), &(pool->_kernel[pool->_kernel.size() - 1])}));
432 printed_properties.emplace_back(
433 "padding begin", formatSize_({&(pool->_padding[0]), &(pool->_padding[pool->_padding.size() - 1])}));
434 printed_properties.emplace_back(
436 formatSize_({&(pool->_pads_end[0]), &(pool->_pads_end[pool->_pads_end.size() - 1])}));
437 printed_properties.emplace_back(
438 "strides", formatSize_({&(pool->_stride[0]), &(pool->_stride[pool->_stride.size() - 1])}));
440 } else if (type == "ReLU") {
441 auto* relu = dynamic_cast<ReLULayer*>(layer.get());
443 if (relu != nullptr) {
444 float negative_slope = relu->negative_slope;
446 if (negative_slope != 0.0f)
447 printed_properties.emplace_back("negative_slope", CNNLayer::ie_serialize_float(negative_slope));
449 } else if (type == "Eltwise") {
450 auto* eltwise = dynamic_cast<EltwiseLayer*>(layer.get());
452 if (eltwise != nullptr) {
453 std::string operation;
455 if (eltwise->_operation == EltwiseLayer::Sum)
457 else if (eltwise->_operation == EltwiseLayer::Prod)
459 else if (eltwise->_operation == EltwiseLayer::Max)
461 else if (eltwise->_operation == EltwiseLayer::Sub)
463 else if (eltwise->_operation == EltwiseLayer::Min)
465 else if (eltwise->_operation == EltwiseLayer::Div)
467 else if (eltwise->_operation == EltwiseLayer::Squared_diff)
468 operation = "Squared_diff";
469 else if (eltwise->_operation == EltwiseLayer::Equal)
471 else if (eltwise->_operation == EltwiseLayer::Not_equal)
472 operation = "Not_equal";
473 else if (eltwise->_operation == EltwiseLayer::Less)
475 else if (eltwise->_operation == EltwiseLayer::Less_equal)
476 operation = "Less_equal";
477 else if (eltwise->_operation == EltwiseLayer::Greater)
478 operation = "Greater";
479 else if (eltwise->_operation == EltwiseLayer::Greater_equal)
480 operation = "Greater_equal";
481 else if (eltwise->_operation == EltwiseLayer::Logical_NOT)
482 operation = "Logical_NOT";
483 else if (eltwise->_operation == EltwiseLayer::Logical_AND)
484 operation = "Logical_AND";
485 else if (eltwise->_operation == EltwiseLayer::Logical_OR)
486 operation = "Logical_OR";
487 else if (eltwise->_operation == EltwiseLayer::Logical_XOR)
488 operation = "Logical_XOR";
489 else if (eltwise->_operation == EltwiseLayer::Floor_mod)
490 operation = "Floor_mod";
491 else if (eltwise->_operation == EltwiseLayer::Pow)
493 else if (eltwise->_operation == EltwiseLayer::Mean)
496 printed_properties.emplace_back("operation", operation);
500 if (layer_cb != nullptr) {
501 layer_cb(layer, printed_properties, node_properties);
504 printNode(node_name, layer->name, node_properties, printed_properties);
507 void printDataNode(const std::shared_ptr<Data>& data) {
508 auto node_name = "data_" + cleanNodeName_(data->getName());
509 printed_data.insert(data.get());
511 ordered_properties printed_properties;
512 ordered_properties node_properties = {{"shape", "ellipse"},
514 {"fillcolor", colorToStr(DATA)}};
516 std::stringstream dims_ss;
517 size_t idx = data->getTensorDesc().getDims().size();
519 for (auto& dim : data->getTensorDesc().getDims()) {
520 dims_ss << dim << ((--idx) != 0u ? ", " : "");
524 printed_properties.emplace_back("dims", dims_ss.str());
525 printed_properties.emplace_back("precision", data->getPrecision().name());
527 std::stringstream ss;
528 ss << data->getTensorDesc().getLayout();
529 printed_properties.emplace_back("layout", ss.str());
530 printed_properties.emplace_back("name", data->getName());
531 if (data->getCreatorLayer().lock() != nullptr)
532 printed_properties.emplace_back("creator layer", data->getCreatorLayer().lock()->name);
533 printNode(node_name, data->getName(), node_properties, printed_properties);
536 void printNode(string const& node_name, const string& node_title, ordered_properties const& node_properties,
537 ordered_properties const& printed_properties) {
538 // normalization of names, removing all prohibited symbols like "/"
539 string nodeNameN = node_name;
540 std::replace(nodeNameN.begin(), nodeNameN.end(), '/', '_');
541 string dataNameN = node_title;
542 std::replace(dataNameN.begin(), dataNameN.end(), '/', '_');
544 out << '\t' << nodeNameN << " [";
545 for (auto& node_property : node_properties) {
546 out << node_property.first << "=\"" << node_property.second << "\", ";
549 out << "label=\"" << node_title;
550 for (auto& printed_property : printed_properties) {
551 out << "\\n" << printed_property.first << ": " << printed_property.second;
556 void printEdge(const CNNLayerPtr& from_, const DataPtr& to_, bool reverse) {
557 auto from_name = "layer_" + cleanNodeName_(from_->name);
558 auto to_name = "data_" + cleanNodeName_(to_->getName());
559 std::replace(from_name.begin(), from_name.end(), '/', '_');
560 std::replace(to_name.begin(), to_name.end(), '/', '_');
561 if (reverse) std::swap(from_name, to_name);
562 out << '\t' << from_name << " -> " << to_name << ";\n";
566 void saveGraphToDot(InferenceEngine::ICNNNetwork& network, std::ostream& out, printer_callback layer_cb) {
567 NodePrinter printer(out, std::move(layer_cb));
569 out << "digraph Network {\n";
570 // Traverse graph and print nodes
571 for (const auto& layer : details::CNNNetSortTopologically(network)) {
572 printer.printLayerNode(layer);
574 // Print output Data Object
575 for (auto& dataptr : layer->outData) {
576 if (!printer.isPrinted(dataptr)) {
577 printer.printDataNode(dataptr);
579 printer.printEdge(layer, dataptr, false);
582 // Print input Data objects
583 for (auto& datum : layer->insData) {
584 auto dataptr = datum.lock();
585 if (!printer.isPrinted(dataptr)) {
586 printer.printDataNode(dataptr);
588 printer.printEdge(layer, dataptr, true);
591 out << "}" << std::endl;
594 std::unordered_set<DataPtr> getRootDataObjects(ICNNNetwork& network) {
595 std::unordered_set<DataPtr> ret;
596 details::CNNNetworkIterator i(&network);
597 while (i != details::CNNNetworkIterator()) {
598 CNNLayer::Ptr layer = *i;
600 // TODO: Data without creatorLayer
601 if (CaselessEq<string>()(layer->type, "input") || CaselessEq<string>()(layer->type, "const") ||
602 CaselessEq<string>()(layer->type, "memory")) {
603 ret.insert(layer->outData.begin(), layer->outData.end());
610 } // namespace InferenceEngine