Publishing 2019 R1 content
[platform/upstream/dldt.git] / inference-engine / src / inference_engine / builders / ie_network_builder.cpp
1 // Copyright (C) 2018-2019 Intel Corporation
2 // SPDX-License-Identifier: Apache-2.0
3 //
4
5 #include <ie_builders.hpp>
6 #include "graph_tools.hpp"
7
8 #include <unordered_map>
9 #include <unordered_set>
10 #include <utility>
11 #include <memory>
12 #include <vector>
13 #include <string>
14 #include <limits>
15 #include <map>
16
17 #include <shape_infer/ie_reshaper.hpp>
18 #include "ie_format_parser.h"
19 #include "ie_layer_parsers.h"
20 #include "blob_factory.hpp"
21 #include <details/caseless.hpp>
22
23 #include "ie_cnn_layer_builder.h"
24 #include "ie_memcpy.h"
25
26 using namespace InferenceEngine;
27
28 /******************************************************************************
29  Network builder
30  ******************************************************************************/
31 Builder::Network::Network(const std::string &name): Builder::Network(Context(), name) {}
32 Builder::Network::Network(const INetwork &network): Builder::Network(Context(), network) {}
33 Builder::Network::Network(const ICNNNetwork &network): Builder::Network(Context(), network) {}
34
35 Builder::Network::Network(const Context& ieContext, const std::string &name) {
36     parameters["name"] = name;
37     parameters["context"] = ieContext;
38     parameters["version"] = 3;
39     parameters["layers"] = std::vector<Layer::Ptr>();
40     parameters["connections"] = std::vector<Connection>();
41 }
42
43 Builder::Network::Network(const Context& ieContext, const INetwork &network): Network(ieContext, network.getName()) {
44     for (const auto& layer : network) {
45         parameters["layers"].as<std::vector<Layer::Ptr>>().push_back(std::make_shared<Layer>(layer));
46         const auto layerConnections = network.getLayerConnections(layer->getId());
47         for (const auto& connection : layerConnections) {
48             bool found = false;
49             for (const auto& con : parameters["connections"].as<std::vector<Connection>>()) {
50                 if (con == connection) {
51                     found = true;
52                     break;
53                 }
54             }
55             if (!found) {
56                 parameters["connections"].as<std::vector<Connection>>().push_back(connection);
57             }
58         }
59     }
60 }
61
62 Builder::Network::Network(const Context& ieContext, const ICNNNetwork &network): Network(ieContext, network.getName()) {
63     parameters["version"] = 0;
64     auto allInputs = CNNNetGetAllInputLayers(network);
65     InputsDataMap inputs;
66     network.getInputsInfo(inputs);
67     if (inputs.empty() && allInputs.empty())
68         THROW_IE_EXCEPTION << "Cannot create graph! No inputs for the topology " << network.getName();
69
70     std::unordered_map<std::string, idx_t> name2id;
71     std::unordered_set<Data*> dataPtrs;
72     std::vector<CNNLayerPtr> queueLayers;
73
74     auto createGenericFromCNNLayer = [&](const CNNLayerPtr& cnnLayer) {
75         for (const auto& data : cnnLayer->insData) {
76             auto lockedData = data.lock();
77             if (!lockedData)
78                 continue;
79             if (dataPtrs.find(lockedData.get()) == dataPtrs.end()) {
80                 dataPtrs.insert(lockedData.get());
81             }
82         }
83         for (const auto& data : cnnLayer->outData) {
84             if (dataPtrs.find(data.get()) == dataPtrs.end()) {
85                 dataPtrs.insert(data.get());
86             }
87         }
88         std::map<std::string, Blob::Ptr> blobs = cnnLayer->blobs;
89         size_t inputsCount(0);
90         for (const auto& data : cnnLayer->insData) {
91             auto lockedData = data.lock();
92             if (!lockedData)
93                 continue;
94             inputsCount++;
95         }
96         const auto layer = builderFromCNNLayer(cnnLayer);
97         idx_t layerId = addLayer(layer);
98
99         if (blobs.find("weights") != blobs.end()) {
100             idx_t constLayerId = addLayer(ConstLayer("weights").setData(blobs["weights"]));
101             connect({constLayerId}, {layerId, inputsCount++});
102         }
103         if (blobs.find("biases") != blobs.end()) {
104             if (blobs.find("weights") == blobs.end()) ++inputsCount;
105
106             idx_t constLayerId = addLayer(ConstLayer("biases").setData(blobs["biases"]));
107             connect({constLayerId}, {layerId, inputsCount++});
108         }
109         for (const auto& it : blobs) {
110             if (it.first == "weights" || it.first == "biases")
111                 continue;
112             idx_t constLayerId = addLayer(ConstLayer(it.first).setData(it.second));
113             connect({constLayerId}, {layerId, inputsCount++});
114         }
115         name2id[layer.getName()] = layerId;
116         return layerId;
117     };
118
119     auto addPreProcessFor = [&](const InputInfo::Ptr& inputInfo) {
120         auto inputLayer = getLayer(name2id[inputInfo->name()]);
121         if (inputLayer->getType().empty() && inputLayer->getName().empty())
122             return;
123
124         inputLayer->getParameters()["preProcess"] = inputInfo->getPreProcess();
125     };
126
127     for (auto input : inputs) {
128         auto inputLayer = input.second->getInputData()->getCreatorLayer().lock();
129
130         if (dataPtrs.find(input.second->getInputData().get()) == dataPtrs.end()) {
131             dataPtrs.insert(input.second->getInputData().get());
132         }
133
134         if (!inputLayer) {
135             // For v1 parser
136             inputLayer.reset(new CNNLayer({input.second->getInputData()->getName(),
137                                            "Input",
138                                            input.second->getInputData()->getPrecision()}));
139
140             inputLayer->outData.push_back(input.second->getInputData());
141         }
142         const auto layer = InputLayer(inputLayer->name).setPort(Port(inputLayer->outData[0]->getTensorDesc().getDims()));
143         name2id[layer.getName()] = addLayer(layer);
144
145         for (const auto &nlayer : input.second->getInputData()->getInputTo()) {
146             queueLayers.push_back(nlayer.second);
147         }
148     }
149     for (auto input : allInputs) {
150         auto isRealInput = std::find_if(std::begin(inputs), std::end(inputs),
151                                         [&](InputsDataMap::value_type &inputInfo) {
152                                             return inputInfo.second->getInputData()->getName() == input->name;
153                                         });
154         if (isRealInput != std::end(inputs)) {
155             continue;
156         }
157
158         details::CaselessEq<std::string> eq;
159         CNNLayerPtr cnnLayer = input;
160
161         if (eq(input->type, "Memory")) {
162             auto memoryId = input->GetParamAsString("id");
163             cnnLayer.reset(new CNNLayer({input->name + "/id=" + memoryId, "MemoryInput", input->precision}));
164             cnnLayer->params = input->params;
165             cnnLayer->outData = input->outData;
166         }
167
168         createGenericFromCNNLayer(cnnLayer);
169
170         size_t count_out = 0;
171         for (auto &&outData : input->outData) {
172             for (auto &&nlayer : outData->getInputTo()) {
173                 queueLayers.push_back(nlayer.second);
174             }
175             count_out++;
176         }
177     }
178     while (!queueLayers.empty()) {
179         auto cnnLayerPtr = *queueLayers.begin();
180
181         if (name2id.find(cnnLayerPtr->name) == name2id.end()) {
182             createGenericFromCNNLayer(cnnLayerPtr);
183
184             for (auto &&outData : cnnLayerPtr->outData) {
185                 for (auto &&nlayer : outData->getInputTo()) {
186                     queueLayers.push_back(nlayer.second);
187                 }
188             }
189         }
190
191         queueLayers.erase(queueLayers.begin());
192     }
193     std::map<std::string, DataPtr> output;
194     network.getOutputsInfo(output);
195
196     for (auto it = output.begin(); it != output.end(); it++) {
197         CNNLayerPtr creator = (*it).second->getCreatorLayer().lock();
198         if (name2id.find(creator->name) == name2id.end())
199             THROW_IE_EXCEPTION << "Cannot find output layer " << creator->name;
200
201         auto lastLayer = getLayer(name2id[creator->name]);
202         if (lastLayer->getName() == "" && lastLayer->getType().empty())
203             THROW_IE_EXCEPTION << "Cannot find output layer " << creator->name;
204
205         std::string name = "out_" + lastLayer->getName();
206
207         CNNLayerPtr cnnOutLayer(new CNNLayer({name, "Output", creator->outData[0]->getPrecision()}));
208         cnnOutLayer->insData.push_back((*it).second);
209
210         idx_t outLayerId = createGenericFromCNNLayer(cnnOutLayer);
211
212         idx_t inIdx(0);
213         for (size_t i = 0; i < creator->outData.size(); i++) {
214             if (creator->outData[i] == (*it).second) {
215                 inIdx = i;
216                 break;
217             }
218         }
219
220         parameters["connections"].as<std::vector<Connection>>().push_back(Connection({lastLayer->getId(), inIdx}, {outLayerId}));
221     }
222
223     for (const auto dataPtr : dataPtrs) {
224         auto cnnInputLayer = dataPtr->getCreatorLayer().lock();
225         idx_t inIdx(0);
226         if (!cnnInputLayer) {
227             // For v1 parser
228             cnnInputLayer.reset(new CNNLayer({dataPtr->getName(),
229                                               "Input",
230                                               dataPtr->getPrecision()}));
231         } else {
232             for (size_t i = 0; i < cnnInputLayer->outData.size(); i++) {
233                 if (cnnInputLayer->outData[i].get() == dataPtr) {
234                     inIdx = i;
235                     break;
236                 }
237             }
238         }
239         for (const auto& it : dataPtr->inputTo) {
240             if (name2id.find(cnnInputLayer->name) == name2id.end() || name2id.find(it.second->name) == name2id.end())
241                 THROW_IE_EXCEPTION << "Cannot create connections between nodes: " << cnnInputLayer->name << " -> " << it.second->name;
242             idx_t outIdx(0);
243
244             for (size_t i = 0; i < it.second->insData.size(); i++) {
245                 const auto lockedData = it.second->insData[i].lock();
246                 if (lockedData && lockedData.get() == dataPtr) {
247                     outIdx = i;
248                     break;
249                 }
250             }
251             parameters["connections"].as<std::vector<Connection>>()
252                 .push_back(Connection({name2id[cnnInputLayer->name], inIdx}, {name2id[it.second->name], outIdx}));
253         }
254     }
255
256     for (const auto &input : inputs) {
257         addPreProcessFor(input.second);
258     }
259 }
260
261 const std::vector<Builder::Layer::Ptr>& Builder::Network::getLayers() const {
262     return parameters.at("layers").as<std::vector<Layer::Ptr>>();
263 }
264 std::vector<Builder::Layer::Ptr>& Builder::Network::getLayers() {
265     return parameters["layers"].as<std::vector<Layer::Ptr>>();
266 }
267
268 idx_t Builder::Network::addLayer(const std::vector<PortInfo> &inputs,
269                                  const Layer& layer) {
270     auto layer_id = addLayer(layer);
271     for (size_t i = 0; i < inputs.size(); i++) {
272         connect({inputs[i].layerId(), inputs[i].portId()}, {layer_id, i});
273     }
274     return layer_id;
275 }
276
277 idx_t Builder::Network::addLayer(const Layer& layer) {
278     auto getAvailableId = [&](idx_t defaultId) {
279         if (defaultId == (std::numeric_limits<idx_t>::max)())
280             defaultId = 0;
281
282         auto it = parameters["layers"].as<std::vector<Layer::Ptr>>().begin();
283         while (it != parameters["layers"].as<std::vector<Layer::Ptr>>().end()) {
284             for (it = parameters["layers"].as<std::vector<Layer::Ptr>>().begin();
285                     it != parameters["layers"].as<std::vector<Layer::Ptr>>().end(); it++) {
286                 if ((*it)->getId() == defaultId) {
287                     defaultId++;
288                     break;
289                 }
290             }
291         }
292         return defaultId;
293     };
294     auto generateAvailableName = [&](const std::string& name, idx_t id) {
295         const std::string idName = "id" + std::to_string(id);
296         std::string generatedName(name);
297         if (generatedName.empty())
298             generatedName = idName;
299         bool nameIsUnique(false);
300         while (!nameIsUnique) {
301             nameIsUnique = true;
302             for (const auto& layer : parameters["layers"].as<std::vector<Layer::Ptr>>()) {
303                 if (generatedName == layer->getName()) {
304                     nameIsUnique = false;
305                     generatedName += "_" + idName;
306                 }
307             }
308         }
309         return generatedName;
310     };
311     idx_t generatedId = getAvailableId(layer.getId());
312     const auto name = generateAvailableName(layer.getName(), generatedId);
313     parameters["layers"].as<std::vector<Layer::Ptr>>().emplace_back(std::make_shared<Layer>(generatedId, layer));
314     parameters["layers"].as<std::vector<Layer::Ptr>>()[parameters["layers"].as<std::vector<Layer::Ptr>>().size() - 1]->setName(name);
315     return generatedId;
316 }
317
318 void Builder::Network::connect(const PortInfo& input, const PortInfo& output) {
319     const auto mergePortData = [&]() -> bool {
320         const auto blobEqualOrEmpty = [](const Blob::Ptr& ref, const Blob::Ptr& test) -> bool {
321             return (ref->size() == test->size() || test->size() == 0) &&
322                    (!memcmp(ref->cbuffer(), test->cbuffer(), test->byteSize())) &&
323                    (ref->getTensorDesc().getPrecision() == test->getTensorDesc().getPrecision() ||
324                     test->getTensorDesc().getPrecision() == Precision::UNSPECIFIED) &&
325                    (ref->getTensorDesc().getLayout() == test->getTensorDesc().getLayout() ||
326                     test->getTensorDesc().getLayout() == Layout::ANY) &&
327                    (ref->getTensorDesc().getDims() == test->getTensorDesc().getDims() ||
328                     test->getTensorDesc().getDims().empty()) &&
329                    (ref->cbuffer().as<char *>() == test->cbuffer().as<char *>() ||
330                     test->cbuffer() == nullptr);
331         };
332
333         const auto srcPortData = getLayer(input.layerId())->getOutputPorts()[input.portId()].getData();
334         const auto dstPortData = getLayer(output.layerId())->getInputPorts()[output.portId()].getData();
335         if (srcPortData == dstPortData)
336             return true;
337
338         if (srcPortData->getParameters() != dstPortData->getParameters() &&
339                 !srcPortData->getParameters().empty() &&
340                 !dstPortData->getParameters().empty())
341             return false;
342
343         size_t srcDataCount(0), dstDataCount(0);
344         if (!srcPortData->getParameters().empty()) srcDataCount++;
345         if (!dstPortData->getParameters().empty()) dstDataCount++;
346
347         const auto srcBlb = srcPortData->getData();
348         const auto dstBlb = dstPortData->getData();
349         if (srcBlb == dstBlb || (srcBlb->size() == dstBlb->size() &&
350                 srcBlb->getTensorDesc() == dstBlb->getTensorDesc() &&
351                 ((srcBlb->cbuffer().as<char *>() == dstBlb->cbuffer().as<char *>()) ||
352                     (srcBlb->cbuffer() != nullptr && dstBlb->cbuffer() != nullptr &&
353                     !memcmp(srcBlb->cbuffer(), dstBlb->cbuffer(), dstBlb->byteSize()))))) {
354             srcDataCount++;
355             dstDataCount++;
356         } else if (blobEqualOrEmpty(srcBlb, dstBlb)) {
357             srcDataCount++;
358         } else if (blobEqualOrEmpty(dstBlb, srcBlb)) {
359             dstDataCount++;
360         } else {
361             return false;
362         }
363
364         if (dstDataCount > srcDataCount) {
365             // Change source and all src destination data
366             for (const auto& connection : getLayerConnections(input.layerId())) {
367                 if (connection.from() != input)
368                     continue;
369                 getLayer(connection.to().layerId())->getInputPorts()[connection.to().portId()].setData(dstPortData);
370             }
371             getLayer(input.layerId())->getOutputPorts()[input.portId()].setData(dstPortData);
372         } else {
373             // Change destination data
374             getLayer(output.layerId())->getInputPorts()[output.portId()].setData(srcPortData);
375         }
376
377         return true;
378     };
379
380     if (!mergePortData())
381         THROW_IE_EXCEPTION << "Cannot connect two ports with different data!";
382
383     parameters["connections"].as<std::vector<Connection>>().emplace_back(input, output);
384 }
385
386 void Builder::Network::removeLayer(idx_t layerId) {
387     auto it = parameters["layers"].as<std::vector<Layer::Ptr>>().begin();
388     for (; it != parameters["layers"].as<std::vector<Layer::Ptr>>().end(); it++) {
389         if ((*it)->getId() == layerId) {
390             break;
391         }
392     }
393     if (it != parameters["layers"].as<std::vector<Layer::Ptr>>().end())
394         parameters["layers"].as<std::vector<Layer::Ptr>>().erase(it);
395 }
396
397 void Builder::Network::disconnect(const Connection& connection) {
398     auto it = parameters["connections"].as<std::vector<Connection>>().begin();
399     for (; it != parameters["connections"].as<std::vector<Connection>>().end(); it++) {
400         if (connection == *it)
401             break;
402     }
403     if (it != parameters["connections"].as<std::vector<Connection>>().end())
404         parameters["connections"].as<std::vector<Connection>>().erase(it);
405
406     try {
407         auto layer = getLayer(connection.to().layerId());
408         layer->getInputPorts()[connection.to().portId()].setData(std::make_shared<PortData>());
409     } catch (InferenceEngine::details::InferenceEngineException& ex) {}
410 }
411
412 const INetwork::CPtr Builder::Network::build() {
413     validate();
414     InferenceEngine::Builder::Network::Ptr network =
415             std::make_shared<InferenceEngine::Builder::Network>(static_cast<const INetwork&>(*this));
416     return network;
417 }
418
419 void Builder::Network::validate() {
420     // Check that all ports are connected
421     for (const auto& layer : getLayers()) {
422         std::vector<bool> existInCon(layer->getInputPorts().size());
423         for (size_t i = 0; i < layer->getInputPorts().size(); i++) {
424             if (layer->getInputPorts()[i].getParameters().find("type") != layer->getInputPorts()[i].getParameters().end())
425                 existInCon[i] = true;
426         }
427         std::vector<bool> existOutCon(layer->getOutputPorts().size());
428
429         const auto layerConnections = getLayerConnections(layer->getId());
430         for (const auto& connection : layerConnections) {
431             if (connection.from().layerId() == layer->getId()) {
432                 existOutCon[connection.from().portId()] = true;
433                 getLayer(connection.to().layerId());
434             }
435             if (connection.to().layerId() == layer->getId()) {
436                 existInCon[connection.to().portId()] = true;
437                 getLayer(connection.from().layerId());
438             }
439         }
440         bool allPortsConnected = true;
441         for (const auto& cons : {existInCon, existOutCon}) {
442             for (const auto &existCon : cons) {
443                 allPortsConnected = allPortsConnected && existCon;
444             }
445         }
446         if (!allPortsConnected)
447             THROW_IE_EXCEPTION << "Not all ports of layer " << layer->getName() << " were connected!";
448     }
449
450     // Check all layers
451     for (const auto& connection : getConnections()) {
452         if (!getLayer(connection.to().layerId()))
453             THROW_IE_EXCEPTION << "Cannot find layer with id: " << connection.to().layerId();
454         if (!getLayer(connection.from().layerId()))
455             THROW_IE_EXCEPTION << "Cannot find layer with id: " << connection.from().layerId();
456     }
457
458     std::map<std::string, SizeVector> inputShapes;
459     for (const auto& input : getInputs())
460         inputShapes[input->getName()] = input->getOutputPorts()[0].shape();
461
462     if (parameters.at("version").as<int>()) {
463         details::BaseCreator::version_ = parameters.at("version");
464     }
465
466     ShapeInfer::Reshaper reshaper(this);
467     ResponseDesc resp;
468     StatusCode sts = reshaper.run(inputShapes, &resp);
469     // Not all implementations may be registered if all shapes were read from IR.
470     if (sts == NOT_FOUND) {
471         bool allShapesLooksGood = true;
472         for (const auto& connection : getConnections()) {
473             if (getLayer(connection.from().layerId())->getOutputPorts()[connection.from().portId()].shape() !=
474                 getLayer(connection.to().layerId())->getInputPorts()[connection.to().portId()].shape() ||
475                 getLayer(connection.to().layerId())->getInputPorts()[connection.to().portId()].shape().empty()) {
476                 allShapesLooksGood = false;
477                 break;
478             }
479         }
480         if (allShapesLooksGood)
481             sts = OK;
482     }
483
484     if (sts != OK)
485         THROW_IE_EXCEPTION << resp.msg;
486
487     // Check all parameters
488     for (const auto& layer : getLayers()) {
489         try {
490             layer->build();
491         } catch(InferenceEngine::details::InferenceEngineException& ex) {
492             THROW_IE_EXCEPTION << "Cannot build layer " << layer->getName() << ": " << ex.what();
493         } catch(std::bad_cast& ex) {
494             THROW_IE_EXCEPTION << "Cannot build layer " << layer->getName() << ": " << ex.what();
495         }
496     }
497 }
498
499 const std::shared_ptr<ICNNNetwork> Builder::convertToICNNNetwork(const INetwork::CPtr& network) {
500     auto createCNNLayer = [](const std::shared_ptr<const ILayer>& layer, Precision precision) {
501         static std::vector<std::shared_ptr<BaseConverter>> convertors = {
502                 std::make_shared<LayerConverter<InferenceEngine::PowerLayer>>("Power"),
503                 std::make_shared<LayerConverter<InferenceEngine::ConvolutionLayer>>("Convolution"),
504                 std::make_shared<LayerConverter<InferenceEngine::DeconvolutionLayer>>("Deconvolution"),
505                 std::make_shared<LayerConverter<InferenceEngine::PoolingLayer>>("Pooling"),
506                 std::make_shared<LayerConverter<InferenceEngine::FullyConnectedLayer>>("InnerProduct"),
507                 std::make_shared<LayerConverter<InferenceEngine::FullyConnectedLayer>>("FullyConnected"),
508                 std::make_shared<LayerConverter<InferenceEngine::NormLayer>>("LRN"),
509                 std::make_shared<LayerConverter<InferenceEngine::NormLayer>>("Norm"),
510                 std::make_shared<LayerConverter<InferenceEngine::SoftMaxLayer>>("Softmax"),
511                 std::make_shared<LayerConverter<InferenceEngine::GRNLayer>>("GRN"),
512                 std::make_shared<LayerConverter<InferenceEngine::MVNLayer>>("MVN"),
513                 std::make_shared<LayerConverter<InferenceEngine::ReLULayer>>("ReLU"),
514                 std::make_shared<LayerConverter<InferenceEngine::ClampLayer>>("Clamp"),
515                 std::make_shared<LayerConverter<InferenceEngine::SplitLayer>>("Split"),
516                 std::make_shared<LayerConverter<InferenceEngine::SplitLayer>>("Slice"),
517                 std::make_shared<LayerConverter<InferenceEngine::ConcatLayer>>("Concat"),
518                 std::make_shared<LayerConverter<InferenceEngine::EltwiseLayer>>("Eltwise"),
519                 std::make_shared<LayerConverter<InferenceEngine::ScaleShiftLayer>>("ScaleShift"),
520                 std::make_shared<LayerConverter<InferenceEngine::PReLULayer>>("PReLU"),
521                 std::make_shared<LayerConverter<InferenceEngine::CropLayer>>("Crop"),
522                 std::make_shared<LayerConverter<InferenceEngine::ReshapeLayer>>("Reshape"),
523                 std::make_shared<LayerConverter<InferenceEngine::ReshapeLayer>>("Flatten"),
524                 std::make_shared<LayerConverter<InferenceEngine::TileLayer>>("Tile"),
525                 std::make_shared<LayerConverter<InferenceEngine::PadLayer>>("Pad"),
526                 std::make_shared<ActivationConverter>(),
527                 std::make_shared<RNNSequenceConverter>(),
528                 std::make_shared<LayerConverter<InferenceEngine::BatchNormalizationLayer>>("BatchNormalization"),
529         };
530         for (auto &convertor : convertors) {
531             if (!convertor->canCreate(layer->getType()))
532                 continue;
533             return convertor->createLayer(layer, precision);
534         }
535         static LayerConverter<CNNLayer> genericCreator("");
536         return genericCreator.createLayer(layer, precision);
537     };
538
539     auto keep_input_info = [](std::unique_ptr<details::CNNNetworkImpl>& network, DataPtr &in_data,
540             PreProcessInfo preProc) {
541         InputInfo::Ptr info(new InputInfo());
542         info->getPreProcess() = preProc;
543         info->setInputData(in_data);
544         Precision prc = info->getInputPrecision();
545
546         // Convert precision into native format (keep element size)
547         prc = prc == Precision::Q78 ? Precision::I16 :
548               prc == Precision::FP16 ? Precision::FP32 :
549               static_cast<Precision::ePrecision>(prc);
550
551         info->setInputPrecision(prc);
552         network->setInputInfo(info);
553     };
554
555     std::unique_ptr<details::CNNNetworkImpl> cnnNetworkImpl(new details::CNNNetworkImpl());
556
557     Precision detectedPrecision = Precision::UNSPECIFIED;
558     for (const auto& layer : *network) {
559         for (const auto& port : layer->getInputPorts()) {
560             Precision prc = port.getData()->getData()->getTensorDesc().getPrecision();
561             if (prc != Precision::UNSPECIFIED) {
562                 detectedPrecision = prc;
563                 break;
564             }
565         }
566         for (const auto& port : layer->getOutputPorts()) {
567             Precision prc = port.getData()->getData()->getTensorDesc().getPrecision();
568             if (prc != Precision::UNSPECIFIED) {
569                 detectedPrecision = prc;
570                 break;
571             }
572         }
573         if (detectedPrecision != Precision::UNSPECIFIED)
574             break;
575     }
576     if (detectedPrecision == Precision::UNSPECIFIED)
577         detectedPrecision = Precision::FP32;
578
579     details::CaselessEq<std::string> eq;
580     cnnNetworkImpl->setName(network->getName());
581     cnnNetworkImpl->setPrecision(Precision::UNSPECIFIED);
582     for (const auto& layer : *network) {
583         bool isInternalLayer = eq(layer->getType(), "Const");
584         for (const auto& connection : network->getLayerConnections(layer->getId())) {
585             if (!isInternalLayer)
586                 break;
587             if (connection.from().layerId() != layer->getId())
588                 continue;
589             const auto& port = network->getLayer(connection.to().layerId())->getInputPorts()[connection.to().portId()];
590             isInternalLayer = isInternalLayer &&
591                     port.getParameters().find("type") != port.getParameters().end();
592         }
593         isInternalLayer = isInternalLayer || eq(layer->getType(), "Output");
594
595         if (isInternalLayer)
596             continue;
597
598         CNNLayerPtr cnnLayer = createCNNLayer(layer, detectedPrecision);
599         if (cnnNetworkImpl->getPrecision() == Precision::UNSPECIFIED) {
600             cnnNetworkImpl->setPrecision(cnnLayer->precision);
601         } else if (cnnNetworkImpl->getPrecision() == Precision::MIXED &&
602                    cnnNetworkImpl->getPrecision() != cnnLayer->precision) {
603             cnnNetworkImpl->setPrecision(Precision::MIXED);
604         }
605
606         auto connections = network->getLayerConnections(layer->getId());
607         std::unordered_set<idx_t> inputNum, outputNum;
608         for (const auto& connection : connections) {
609             if (connection.from().layerId() != layer->getId()) {
610                 const auto& port = layer->getInputPorts()[connection.to().portId()];
611                 if (port.getParameters().find("type") == port.getParameters().end())
612                     inputNum.insert(connection.to().portId());
613             } else {
614                 outputNum.insert(connection.from().portId());
615             }
616         }
617         cnnLayer->insData.resize(inputNum.size());
618         cnnLayer->outData.resize(outputNum.size());
619         cnnNetworkImpl->addLayer(cnnLayer);
620     }
621
622     for (const auto& layer : *network) {
623         auto connections = network->getLayerConnections(layer->getId());
624         CNNLayerPtr cnnLayer;
625         StatusCode sts = cnnNetworkImpl->getLayerByName(layer->getName().c_str(), cnnLayer, nullptr);
626
627         if (sts != OK && (eq(layer->getType(), "Output") || eq(layer->getType(), "Const")))
628             continue;
629         else if (sts != OK)
630             THROW_IE_EXCEPTION << "Cannot find CNNLayer by name " << layer->getName();
631
632         for (const auto& connection : connections) {
633             if (connection.from().layerId() != layer->getId())
634                 continue;
635
636             const auto& outLayer = network->getLayer(connection.to().layerId());
637
638             CNNLayerPtr cnnOutLayer;
639             sts = cnnNetworkImpl->getLayerByName(outLayer->getName().c_str(), cnnOutLayer, nullptr);
640             if (sts != OK && !eq(outLayer->getType(), "Output") && !eq(layer->getType(), "Const"))
641                 THROW_IE_EXCEPTION << "Cannot find CNNLayer by name " << outLayer->getName();
642
643             std::string dataName = layer->getName();
644             if (cnnLayer->outData.size() > 1) {
645                 dataName += "." + std::to_string(connection.from().portId());
646             }
647             DataPtr& data = cnnNetworkImpl->getData(dataName);
648             if (!data) {
649                 TensorDesc dataDesc(detectedPrecision, layer->getOutputPorts()[connection.from().portId()].shape(),
650                                     TensorDesc::getLayoutByDims(layer->getOutputPorts()[connection.from().portId()].shape()));
651                 data = std::make_shared<Data>(dataName, dataDesc);
652                 data->creatorLayer = cnnLayer;
653             }
654             cnnLayer->outData[connection.from().portId()] = data;
655
656             idx_t realPortId(0);
657             const auto inputPorts = outLayer->getInputPorts();
658             for (size_t i = 0; i < connection.to().portId() && i < inputPorts.size(); i++) {
659                 if (inputPorts[i].getParameters().find("type") == inputPorts[i].getParameters().end())
660                     realPortId++;
661             }
662             if (cnnOutLayer) {
663                 data->inputTo[outLayer->getName()] = cnnOutLayer;
664                 cnnOutLayer->insData[realPortId] = data;
665             } else {
666                 cnnNetworkImpl->addOutput(data->getName());
667             }
668         }
669
670         cnnLayer->validateLayer();
671         if (eq(cnnLayer->type, "Input")) {
672             PreProcessInfo preProc;
673             if (layer->getParameters().find("preProcess") != layer->getParameters().end())
674                 preProc = layer->getParameters().at("preProcess");
675             keep_input_info(cnnNetworkImpl, *cnnLayer->outData.begin(), preProc);
676         }
677     }
678
679     // Set default output precision to FP32 (for back-compatibility)
680     OutputsDataMap outputsInfo;
681     cnnNetworkImpl->getOutputsInfo(outputsInfo);
682     for (auto outputInfo : outputsInfo) {
683         if (outputInfo.second->getPrecision() != Precision::FP32 &&
684             outputInfo.second->getPrecision() != Precision::I32) {
685             outputInfo.second->setPrecision(Precision::FP32);
686         }
687     }
688
689     return std::shared_ptr<ICNNNetwork>(cnnNetworkImpl.release());
690 }
691
692 Builder::Network::operator const INetwork::CPtr() {
693     return build();
694 }
695
696 const ILayer::CPtr Builder::Network::getLayer(idx_t layerId) const noexcept {
697     try {
698         for (auto& layer : getLayers()) {
699             if (layer->getId() == layerId)
700                 return layer->build();
701         }
702     } catch(...) {}
703
704     return nullptr;
705 }
706
707 Builder::Layer::Ptr Builder::Network::getLayer(idx_t layerId) {
708     for (auto& layer : getLayers()) {
709         if (layer->getId() == layerId)
710             return layer;
711     }
712     THROW_IE_EXCEPTION << "Cannot find layer with id: " << layerId;
713 }
714
715 const std::string& Builder::Network::getName() const noexcept {
716     return parameters.at("name");
717 }
718
719 const Context& Builder::Network::getContext() const noexcept {
720     return parameters.at("context");
721 }
722
723 Context& Builder::Network::getContext() noexcept {
724     return parameters.at("context");
725 }
726
727 Builder::Network::const_iterator Builder::Network::begin() const noexcept {
728     try {
729         return Network::const_iterator(this);
730     } catch (...) {
731         return Network::const_iterator(this, true);
732     }
733 }
734
735
736 Builder::Network::const_iterator Builder::Network::end() const noexcept {
737     return Network::const_iterator(this, true);
738 }
739
740 size_t Builder::Network::size() const noexcept {
741     return static_cast<size_t>(std::distance(std::begin(*this), std::end(*this)));
742 }
743
744 Builder::Network::iterator Builder::Network::begin() {
745     return Network::iterator(this);
746 }
747
748 Builder::Network::iterator Builder::Network::end() {
749     return Network::iterator(this, true);
750 }
751
752 const std::vector<ILayer::CPtr> Builder::Network::getInputs() const noexcept {
753     std::vector<ILayer::CPtr> inputs;
754     for (const auto& layer : parameters.at("layers").as<std::vector<Layer::Ptr>>()) {
755         bool isInputLayer = true;
756         for (const auto& connection : getLayerConnections(layer->getId())) {
757             if (connection.to().layerId() == layer->getId()) {
758                 isInputLayer = false;
759                 break;
760             }
761         }
762         if (isInputLayer) {
763             inputs.push_back(layer->build());
764         }
765     }
766     return inputs;
767 }
768
769 std::vector<Builder::Layer::Ptr> Builder::Network::getInputs() {
770     std::vector<Builder::Layer::Ptr> inputs;
771     for (auto& layer : parameters.at("layers").as<std::vector<Layer::Ptr>>()) {
772         bool isInputLayer = true;
773         for (const auto& connection : getLayerConnections(layer->getId())) {
774             if (connection.to().layerId() == layer->getId()) {
775                 isInputLayer = false;
776                 break;
777             }
778         }
779         if (isInputLayer) {
780             inputs.push_back(layer);
781         }
782     }
783     return inputs;
784 }
785
786 const std::vector<ILayer::CPtr> Builder::Network::getOutputs() const noexcept {
787     std::vector<ILayer::CPtr> outputs;
788     for (const auto& layer : parameters.at("layers").as<std::vector<Layer::Ptr>>()) {
789         bool isOutputLayer = true;
790         for (const auto& connection : getLayerConnections(layer->getId())) {
791             if (connection.from().layerId() == layer->getId()) {
792                 isOutputLayer = false;
793                 break;
794             }
795         }
796         if (isOutputLayer) {
797             outputs.push_back(layer->build());
798         }
799     }
800     return outputs;
801 }
802
803 std::vector<Builder::Layer::Ptr> Builder::Network::getOutputs() {
804     std::vector<Builder::Layer::Ptr> outputs;
805     for (auto& layer : parameters.at("layers").as<std::vector<Layer::Ptr>>()) {
806         bool isOutputLayer = true;
807         for (const auto& connection : getLayerConnections(layer->getId())) {
808             if (connection.from().layerId() == layer->getId()) {
809                 isOutputLayer = false;
810                 break;
811             }
812         }
813         if (isOutputLayer) {
814             outputs.push_back(layer);
815         }
816     }
817     return outputs;
818 }
819
820 const std::vector<Connection>& Builder::Network::getConnections() const {
821     return parameters.at("connections").as<std::vector<Connection>>();
822 }
823
824 const std::vector<Connection> Builder::Network::getLayerConnections(idx_t layerId) const noexcept {
825     std::vector<Connection> layerConnections;
826     for (const auto connection : parameters.at("connections").as<std::vector<Connection>>()) {
827         if (connection.from().layerId() == layerId || connection.to().layerId() == layerId)
828             layerConnections.push_back(connection);
829     }
830     return layerConnections;
831 }