Publishing 2019 R1 content
[platform/upstream/dldt.git] / inference-engine / src / mkldnn_plugin / mkldnn_graph_dumper.cpp
1 //
2 // Copyright 2016-2018 Intel Corporation.
3 //
4 // This software and the related documents are Intel copyrighted materials,
5 // and your use of them is governed by the express license under which they
6 // were provided to you (End User License Agreement for the Intel(R) Software
7 // Development Products (Version May 2017)). Unless the License provides
8 // otherwise, you may not use, modify, copy, publish, distribute, disclose or
9 // transmit this software or the related documents without Intel's prior
10 // written permission.
11 //
12 // This software and the related documents are provided as is, with no
13 // express or implied warranties, other than those that are expressly
14 // stated in the License.
15 //
16
17 #include "mkldnn_graph_dumper.h"
18 #include "cnn_network_impl.hpp"
19 #include "ie_util_internal.hpp"
20 #include "exec_graph_info.hpp"
21
22 #include <vector>
23 #include <string>
24 #include <memory>
25 #include <map>
26
27 using namespace InferenceEngine;
28
29 namespace MKLDNNPlugin {
30
31 static void copy_node_metadata(const MKLDNNNodePtr &, CNNLayer::Ptr &);
32 static void drawer_callback(const InferenceEngine::CNNLayerPtr, ordered_properties &, ordered_properties &);
33
34 CNNLayer::Ptr convert_node(const MKLDNNNodePtr &node) {
35     CNNLayer::Ptr layer(new CNNLayer({"name", "type", Precision::FP32}));
36     copy_node_metadata(node, layer);
37
38     auto &cfg = node->getSelectedPrimitiveDescriptor()->getConfig();
39     layer->insData.resize(cfg.inConfs.size());
40     layer->outData.resize(cfg.outConfs.size());
41
42     return layer;
43 }
44
45 std::shared_ptr<ICNNNetwork> dump_graph_as_ie_net(const MKLDNNGraph &graph) {
46     auto net = std::make_shared<details::CNNNetworkImpl>();
47
48     net->setPrecision(Precision::FP32);
49     net->setName("runtime_cpu_graph");
50     std::map<MKLDNNNodePtr, CNNLayerPtr> node2layer;
51
52     // Copy all nodes to network
53     for (auto &node : graph.graphNodes) {
54         auto layer = convert_node(node);
55         node2layer[node] = layer;
56         net->addLayer(layer);
57     }
58
59     // Copy all edges to network
60     for (auto &node : graph.graphNodes) {
61         auto pr = node2layer[node];
62         auto ch_edges = node->getChildEdges();
63
64         for (int i = 0; i < ch_edges.size(); i++) {
65             auto edge = node->getChildEdgeAt(i);
66             int out_port = edge->getInputNum();
67             int in_port = edge->getOutputNum();
68             auto ch_node = edge->getChild();
69             auto ch  = node2layer[ch_node];
70
71             DataPtr data;
72             if (i < pr->outData.size()) {
73                 std::string data_name = node->getName() + "_out" + std::to_string(i);
74                 pr->outData[i] = std::make_shared<Data>(data_name, edge->getDesc());
75                 data = pr->outData[i];
76                 data->creatorLayer = pr;
77             } else {
78                 data = pr->outData[0];
79             }
80
81             data->inputTo[ch->name] = ch;
82             ch->insData[in_port] = data;
83         }
84     }
85
86     // Specify inputs data
87     for (auto kvp : graph.inputNodes) {
88         auto in_node = kvp.second;
89         auto in_layer = node2layer[in_node];
90
91         auto in_info = std::make_shared<InputInfo>();
92         in_info->setInputData(in_layer->outData[0]);
93         net->setInputInfo(in_info);
94     }
95
96     return net;
97 }
98
99 void dump_graph_as_dot(const MKLDNNGraph &graph, std::ostream &out) {
100     auto dump_net = dump_graph_as_ie_net(graph);
101     InferenceEngine::saveGraphToDot(*dump_net, out, drawer_callback);
102 }
103
104 //**********************************
105 // Special converters of meta data
106 //**********************************
107
108 static std::map<Type, std::string> type_n2l {
109     {Unknown, "Unknown"},
110     {Generic, "Unknown"},
111     {Reorder, "Reorder"},
112     {Copy, "Reorder"},
113     {Input, "Input"},
114     {Output, "Output"},
115     {Convolution, "Conv"},
116     {Deconvolution, "Deconv"},
117     {Convolution_Sum, "Conv_Eltw"},
118     {Convolution_Activation, "Conv_Activ"},
119     {Convolution_Sum_Activation, "Conv_Eltw_Activ"},
120     {Activation, "Activation"},
121     {Depthwise, "Depthwise"},
122     {Lrn, "Lrn"},
123     {Pooling, "Pool"},
124     {FullyConnected, "FC"},
125     {FullyConnected_Activation, "FC_Activ"},
126     {SoftMax, "SoftMax"},
127     {Split, "Split"},
128     {Concatenation, "Concat"},
129     {Power, "Power"},
130     {Eltwise, "Eltwise"},
131     {Crop, "Crop"},
132     {Reshape, "Reshape"},
133     {Tile, "Tile"},
134     {SimplerNMS, "Proposal"},
135     {ROIPooling, "ROIPooling"},
136     {BatchNormalization, "BatchNorm"},
137     {Flatten, "Flatten"},
138     {Permute, "Permute"},
139     {Quantize, "Quantize"},
140     {BinaryConvolution, "BinaryConvolution"},
141     {MemoryOutput, "MemoryIn"},
142     {MemoryInput, "MemoryOut"}
143 };
144
145 static const char BLUE[]  = "#D8D9F1";
146 static const char GREEN[] = "#D9EAD3";
147
148 void copy_node_metadata(const MKLDNNNodePtr &node, CNNLayer::Ptr &layer) {
149     layer->type = type_n2l[node->getType()];
150     layer->name = node->getName();  // Is ID
151
152     // Original layers
153     layer->params[ExecGraphInfoSerialization::ORIGIN_NAMES] = node->getOriginalLayers();
154
155     // Implementation type name
156     layer->params[ExecGraphInfoSerialization::IMPL_TYPE] = node->getPrimitiveDescriptorType();
157
158     // Precision
159     // TODO: That is not fully correct mapping type to precision.
160     std::string precision = "FP32";
161     auto desc = node->getSelectedPrimitiveDescriptor();
162     if (desc == nullptr) {
163         THROW_IE_EXCEPTION << "Internal error - descriptor is empty";
164     }
165     impl_desc_type impl_type = desc->getImplementationType();
166
167     if (impl_type == gemm_blas &&
168         node->getParentEdgeAt(0)->getDesc().getPrecision() == Precision::U8)  precision = "INT8";
169
170     if (impl_type & jit && impl_type & avx512 &&
171         node->getParentEdgeAt(0)->getDesc().getPrecision() == Precision::U8)  precision = "INT8";
172
173     layer->params[ExecGraphInfoSerialization::PRECISION] = precision;
174
175     // Performance
176     if (node->PerfCounter().avg() != 0) {
177         layer->params[ExecGraphInfoSerialization::PERF_COUNTER] = std::to_string(node->PerfCounter().avg());
178     } else {
179         layer->params[ExecGraphInfoSerialization::PERF_COUNTER] = "not_executed";  // it means it was not calculated yet
180     }
181 }
182
183 void drawer_callback(const InferenceEngine::CNNLayerPtr layer,
184         ordered_properties &printed_properties,
185         ordered_properties &node_properties) {
186     const auto &params = layer->params;
187
188     // Implementation
189     auto impl = params.find(ExecGraphInfoSerialization::IMPL_TYPE);
190     if (impl != params.end()) {
191         printed_properties.push_back({"impl", impl->second});
192     }
193
194     // Original names
195     auto orig = params.find(ExecGraphInfoSerialization::ORIGIN_NAMES);
196     if (orig != params.end()) {
197         printed_properties.push_back({"originals", orig->second});
198     }
199
200     // Precision
201     auto prec = params.find(ExecGraphInfoSerialization::PRECISION);
202     if (prec != params.end()) {
203         printed_properties.push_back({"precision", prec->second});
204     }
205
206     // Set color
207     node_properties.push_back({"fillcolor", prec->second == "FP32" ? GREEN : BLUE});
208
209     // Set xlabel containing PM data if calculated
210     auto perf = layer->params.find(ExecGraphInfoSerialization::PERF_COUNTER);
211     node_properties.push_back({"xlabel", (perf != layer->params.end()) ? perf->second : ""});
212 }
213
214 }  // namespace MKLDNNPlugin