1 // Copyright (C) 2018-2019 Intel Corporation
2 // SPDX-License-Identifier: Apache-2.0
5 #include <ie_builders.hpp>
6 #include "graph_tools.hpp"
8 #include <unordered_map>
9 #include <unordered_set>
17 #include <shape_infer/ie_reshaper.hpp>
18 #include "ie_format_parser.h"
19 #include "ie_layer_parsers.h"
20 #include "blob_factory.hpp"
21 #include <details/caseless.hpp>
23 #include "ie_cnn_layer_builder.h"
24 #include "ie_memcpy.h"
26 using namespace InferenceEngine;
28 /******************************************************************************
30 ******************************************************************************/
31 Builder::Network::Network(const std::string &name): Builder::Network(Context(), name) {}
32 Builder::Network::Network(const INetwork &network): Builder::Network(Context(), network) {}
33 Builder::Network::Network(const ICNNNetwork &network): Builder::Network(Context(), network) {}
35 Builder::Network::Network(const Context& ieContext, const std::string &name) {
36 parameters["name"] = name;
37 parameters["context"] = ieContext;
38 parameters["version"] = 3;
39 parameters["layers"] = std::vector<Layer::Ptr>();
40 parameters["connections"] = std::vector<Connection>();
43 Builder::Network::Network(const Context& ieContext, const INetwork &network): Network(ieContext, network.getName()) {
44 for (const auto& layer : network) {
45 parameters["layers"].as<std::vector<Layer::Ptr>>().push_back(std::make_shared<Layer>(layer));
46 const auto layerConnections = network.getLayerConnections(layer->getId());
47 for (const auto& connection : layerConnections) {
49 for (const auto& con : parameters["connections"].as<std::vector<Connection>>()) {
50 if (con == connection) {
56 parameters["connections"].as<std::vector<Connection>>().push_back(connection);
62 Builder::Network::Network(const Context& ieContext, const ICNNNetwork &network): Network(ieContext, network.getName()) {
63 parameters["version"] = 0;
64 auto allInputs = CNNNetGetAllInputLayers(network);
66 network.getInputsInfo(inputs);
67 if (inputs.empty() && allInputs.empty())
68 THROW_IE_EXCEPTION << "Cannot create graph! No inputs for the topology " << network.getName();
70 std::unordered_map<std::string, idx_t> name2id;
71 std::unordered_set<Data*> dataPtrs;
72 std::vector<CNNLayerPtr> queueLayers;
74 auto createGenericFromCNNLayer = [&](const CNNLayerPtr& cnnLayer) {
75 for (const auto& data : cnnLayer->insData) {
76 auto lockedData = data.lock();
79 if (dataPtrs.find(lockedData.get()) == dataPtrs.end()) {
80 dataPtrs.insert(lockedData.get());
83 for (const auto& data : cnnLayer->outData) {
84 if (dataPtrs.find(data.get()) == dataPtrs.end()) {
85 dataPtrs.insert(data.get());
88 std::map<std::string, Blob::Ptr> blobs = cnnLayer->blobs;
89 size_t inputsCount(0);
90 for (const auto& data : cnnLayer->insData) {
91 auto lockedData = data.lock();
96 const auto layer = builderFromCNNLayer(cnnLayer);
97 idx_t layerId = addLayer(layer);
99 if (blobs.find("weights") != blobs.end()) {
100 idx_t constLayerId = addLayer(ConstLayer("weights").setData(blobs["weights"]));
101 connect({constLayerId}, {layerId, inputsCount++});
103 if (blobs.find("biases") != blobs.end()) {
104 if (blobs.find("weights") == blobs.end()) ++inputsCount;
106 idx_t constLayerId = addLayer(ConstLayer("biases").setData(blobs["biases"]));
107 connect({constLayerId}, {layerId, inputsCount++});
109 for (const auto& it : blobs) {
110 if (it.first == "weights" || it.first == "biases")
112 idx_t constLayerId = addLayer(ConstLayer(it.first).setData(it.second));
113 connect({constLayerId}, {layerId, inputsCount++});
115 name2id[layer.getName()] = layerId;
119 auto addPreProcessFor = [&](const InputInfo::Ptr& inputInfo) {
120 auto inputLayer = getLayer(name2id[inputInfo->name()]);
121 if (inputLayer->getType().empty() && inputLayer->getName().empty())
124 inputLayer->getParameters()["preProcess"] = inputInfo->getPreProcess();
127 for (auto input : inputs) {
128 auto inputLayer = input.second->getInputData()->getCreatorLayer().lock();
130 if (dataPtrs.find(input.second->getInputData().get()) == dataPtrs.end()) {
131 dataPtrs.insert(input.second->getInputData().get());
136 inputLayer.reset(new CNNLayer({input.second->getInputData()->getName(),
138 input.second->getInputData()->getPrecision()}));
140 inputLayer->outData.push_back(input.second->getInputData());
142 const auto layer = InputLayer(inputLayer->name).setPort(Port(inputLayer->outData[0]->getTensorDesc().getDims()));
143 name2id[layer.getName()] = addLayer(layer);
145 for (const auto &nlayer : input.second->getInputData()->getInputTo()) {
146 queueLayers.push_back(nlayer.second);
149 for (auto input : allInputs) {
150 auto isRealInput = std::find_if(std::begin(inputs), std::end(inputs),
151 [&](InputsDataMap::value_type &inputInfo) {
152 return inputInfo.second->getInputData()->getName() == input->name;
154 if (isRealInput != std::end(inputs)) {
158 details::CaselessEq<std::string> eq;
159 CNNLayerPtr cnnLayer = input;
161 if (eq(input->type, "Memory")) {
162 auto memoryId = input->GetParamAsString("id");
163 cnnLayer.reset(new CNNLayer({input->name + "/id=" + memoryId, "MemoryInput", input->precision}));
164 cnnLayer->params = input->params;
165 cnnLayer->outData = input->outData;
168 createGenericFromCNNLayer(cnnLayer);
170 size_t count_out = 0;
171 for (auto &&outData : input->outData) {
172 for (auto &&nlayer : outData->getInputTo()) {
173 queueLayers.push_back(nlayer.second);
178 while (!queueLayers.empty()) {
179 auto cnnLayerPtr = *queueLayers.begin();
181 if (name2id.find(cnnLayerPtr->name) == name2id.end()) {
182 createGenericFromCNNLayer(cnnLayerPtr);
184 for (auto &&outData : cnnLayerPtr->outData) {
185 for (auto &&nlayer : outData->getInputTo()) {
186 queueLayers.push_back(nlayer.second);
191 queueLayers.erase(queueLayers.begin());
193 std::map<std::string, DataPtr> output;
194 network.getOutputsInfo(output);
196 for (auto it = output.begin(); it != output.end(); it++) {
197 CNNLayerPtr creator = (*it).second->getCreatorLayer().lock();
198 if (name2id.find(creator->name) == name2id.end())
199 THROW_IE_EXCEPTION << "Cannot find output layer " << creator->name;
201 auto lastLayer = getLayer(name2id[creator->name]);
202 if (lastLayer->getName() == "" && lastLayer->getType().empty())
203 THROW_IE_EXCEPTION << "Cannot find output layer " << creator->name;
205 std::string name = "out_" + lastLayer->getName();
207 CNNLayerPtr cnnOutLayer(new CNNLayer({name, "Output", creator->outData[0]->getPrecision()}));
208 cnnOutLayer->insData.push_back((*it).second);
210 idx_t outLayerId = createGenericFromCNNLayer(cnnOutLayer);
213 for (size_t i = 0; i < creator->outData.size(); i++) {
214 if (creator->outData[i] == (*it).second) {
220 parameters["connections"].as<std::vector<Connection>>().push_back(Connection({lastLayer->getId(), inIdx}, {outLayerId}));
223 for (const auto dataPtr : dataPtrs) {
224 auto cnnInputLayer = dataPtr->getCreatorLayer().lock();
226 if (!cnnInputLayer) {
228 cnnInputLayer.reset(new CNNLayer({dataPtr->getName(),
230 dataPtr->getPrecision()}));
232 for (size_t i = 0; i < cnnInputLayer->outData.size(); i++) {
233 if (cnnInputLayer->outData[i].get() == dataPtr) {
239 for (const auto& it : dataPtr->inputTo) {
240 if (name2id.find(cnnInputLayer->name) == name2id.end() || name2id.find(it.second->name) == name2id.end())
241 THROW_IE_EXCEPTION << "Cannot create connections between nodes: " << cnnInputLayer->name << " -> " << it.second->name;
244 for (size_t i = 0; i < it.second->insData.size(); i++) {
245 const auto lockedData = it.second->insData[i].lock();
246 if (lockedData && lockedData.get() == dataPtr) {
251 parameters["connections"].as<std::vector<Connection>>()
252 .push_back(Connection({name2id[cnnInputLayer->name], inIdx}, {name2id[it.second->name], outIdx}));
256 for (const auto &input : inputs) {
257 addPreProcessFor(input.second);
261 const std::vector<Builder::Layer::Ptr>& Builder::Network::getLayers() const {
262 return parameters.at("layers").as<std::vector<Layer::Ptr>>();
264 std::vector<Builder::Layer::Ptr>& Builder::Network::getLayers() {
265 return parameters["layers"].as<std::vector<Layer::Ptr>>();
268 idx_t Builder::Network::addLayer(const std::vector<PortInfo> &inputs,
269 const Layer& layer) {
270 auto layer_id = addLayer(layer);
271 for (size_t i = 0; i < inputs.size(); i++) {
272 connect({inputs[i].layerId(), inputs[i].portId()}, {layer_id, i});
277 idx_t Builder::Network::addLayer(const Layer& layer) {
278 auto getAvailableId = [&](idx_t defaultId) {
279 if (defaultId == (std::numeric_limits<idx_t>::max)())
282 auto it = parameters["layers"].as<std::vector<Layer::Ptr>>().begin();
283 while (it != parameters["layers"].as<std::vector<Layer::Ptr>>().end()) {
284 for (it = parameters["layers"].as<std::vector<Layer::Ptr>>().begin();
285 it != parameters["layers"].as<std::vector<Layer::Ptr>>().end(); it++) {
286 if ((*it)->getId() == defaultId) {
294 auto generateAvailableName = [&](const std::string& name, idx_t id) {
295 const std::string idName = "id" + std::to_string(id);
296 std::string generatedName(name);
297 if (generatedName.empty())
298 generatedName = idName;
299 bool nameIsUnique(false);
300 while (!nameIsUnique) {
302 for (const auto& layer : parameters["layers"].as<std::vector<Layer::Ptr>>()) {
303 if (generatedName == layer->getName()) {
304 nameIsUnique = false;
305 generatedName += "_" + idName;
309 return generatedName;
311 idx_t generatedId = getAvailableId(layer.getId());
312 const auto name = generateAvailableName(layer.getName(), generatedId);
313 parameters["layers"].as<std::vector<Layer::Ptr>>().emplace_back(std::make_shared<Layer>(generatedId, layer));
314 parameters["layers"].as<std::vector<Layer::Ptr>>()[parameters["layers"].as<std::vector<Layer::Ptr>>().size() - 1]->setName(name);
318 void Builder::Network::connect(const PortInfo& input, const PortInfo& output) {
319 const auto mergePortData = [&]() -> bool {
320 const auto blobEqualOrEmpty = [](const Blob::Ptr& ref, const Blob::Ptr& test) -> bool {
321 return (ref->size() == test->size() || test->size() == 0) &&
322 (!memcmp(ref->cbuffer(), test->cbuffer(), test->byteSize())) &&
323 (ref->getTensorDesc().getPrecision() == test->getTensorDesc().getPrecision() ||
324 test->getTensorDesc().getPrecision() == Precision::UNSPECIFIED) &&
325 (ref->getTensorDesc().getLayout() == test->getTensorDesc().getLayout() ||
326 test->getTensorDesc().getLayout() == Layout::ANY) &&
327 (ref->getTensorDesc().getDims() == test->getTensorDesc().getDims() ||
328 test->getTensorDesc().getDims().empty()) &&
329 (ref->cbuffer().as<char *>() == test->cbuffer().as<char *>() ||
330 test->cbuffer() == nullptr);
333 const auto srcPortData = getLayer(input.layerId())->getOutputPorts()[input.portId()].getData();
334 const auto dstPortData = getLayer(output.layerId())->getInputPorts()[output.portId()].getData();
335 if (srcPortData == dstPortData)
338 if (srcPortData->getParameters() != dstPortData->getParameters() &&
339 !srcPortData->getParameters().empty() &&
340 !dstPortData->getParameters().empty())
343 size_t srcDataCount(0), dstDataCount(0);
344 if (!srcPortData->getParameters().empty()) srcDataCount++;
345 if (!dstPortData->getParameters().empty()) dstDataCount++;
347 const auto srcBlb = srcPortData->getData();
348 const auto dstBlb = dstPortData->getData();
349 if (srcBlb == dstBlb || (srcBlb->size() == dstBlb->size() &&
350 srcBlb->getTensorDesc() == dstBlb->getTensorDesc() &&
351 ((srcBlb->cbuffer().as<char *>() == dstBlb->cbuffer().as<char *>()) ||
352 (srcBlb->cbuffer() != nullptr && dstBlb->cbuffer() != nullptr &&
353 !memcmp(srcBlb->cbuffer(), dstBlb->cbuffer(), dstBlb->byteSize()))))) {
356 } else if (blobEqualOrEmpty(srcBlb, dstBlb)) {
358 } else if (blobEqualOrEmpty(dstBlb, srcBlb)) {
364 if (dstDataCount > srcDataCount) {
365 // Change source and all src destination data
366 for (const auto& connection : getLayerConnections(input.layerId())) {
367 if (connection.from() != input)
369 getLayer(connection.to().layerId())->getInputPorts()[connection.to().portId()].setData(dstPortData);
371 getLayer(input.layerId())->getOutputPorts()[input.portId()].setData(dstPortData);
373 // Change destination data
374 getLayer(output.layerId())->getInputPorts()[output.portId()].setData(srcPortData);
380 if (!mergePortData())
381 THROW_IE_EXCEPTION << "Cannot connect two ports with different data!";
383 parameters["connections"].as<std::vector<Connection>>().emplace_back(input, output);
386 void Builder::Network::removeLayer(idx_t layerId) {
387 auto it = parameters["layers"].as<std::vector<Layer::Ptr>>().begin();
388 for (; it != parameters["layers"].as<std::vector<Layer::Ptr>>().end(); it++) {
389 if ((*it)->getId() == layerId) {
393 if (it != parameters["layers"].as<std::vector<Layer::Ptr>>().end())
394 parameters["layers"].as<std::vector<Layer::Ptr>>().erase(it);
397 void Builder::Network::disconnect(const Connection& connection) {
398 auto it = parameters["connections"].as<std::vector<Connection>>().begin();
399 for (; it != parameters["connections"].as<std::vector<Connection>>().end(); it++) {
400 if (connection == *it)
403 if (it != parameters["connections"].as<std::vector<Connection>>().end())
404 parameters["connections"].as<std::vector<Connection>>().erase(it);
407 auto layer = getLayer(connection.to().layerId());
408 layer->getInputPorts()[connection.to().portId()].setData(std::make_shared<PortData>());
409 } catch (InferenceEngine::details::InferenceEngineException& ex) {}
412 const INetwork::CPtr Builder::Network::build() {
414 InferenceEngine::Builder::Network::Ptr network =
415 std::make_shared<InferenceEngine::Builder::Network>(static_cast<const INetwork&>(*this));
419 void Builder::Network::validate() {
420 // Check that all ports are connected
421 for (const auto& layer : getLayers()) {
422 std::vector<bool> existInCon(layer->getInputPorts().size());
423 for (size_t i = 0; i < layer->getInputPorts().size(); i++) {
424 if (layer->getInputPorts()[i].getParameters().find("type") != layer->getInputPorts()[i].getParameters().end())
425 existInCon[i] = true;
427 std::vector<bool> existOutCon(layer->getOutputPorts().size());
429 const auto layerConnections = getLayerConnections(layer->getId());
430 for (const auto& connection : layerConnections) {
431 if (connection.from().layerId() == layer->getId()) {
432 existOutCon[connection.from().portId()] = true;
433 getLayer(connection.to().layerId());
435 if (connection.to().layerId() == layer->getId()) {
436 existInCon[connection.to().portId()] = true;
437 getLayer(connection.from().layerId());
440 bool allPortsConnected = true;
441 for (const auto& cons : {existInCon, existOutCon}) {
442 for (const auto &existCon : cons) {
443 allPortsConnected = allPortsConnected && existCon;
446 if (!allPortsConnected)
447 THROW_IE_EXCEPTION << "Not all ports of layer " << layer->getName() << " were connected!";
451 for (const auto& connection : getConnections()) {
452 if (!getLayer(connection.to().layerId()))
453 THROW_IE_EXCEPTION << "Cannot find layer with id: " << connection.to().layerId();
454 if (!getLayer(connection.from().layerId()))
455 THROW_IE_EXCEPTION << "Cannot find layer with id: " << connection.from().layerId();
458 std::map<std::string, SizeVector> inputShapes;
459 for (const auto& input : getInputs())
460 inputShapes[input->getName()] = input->getOutputPorts()[0].shape();
462 if (parameters.at("version").as<int>()) {
463 details::BaseCreator::version_ = parameters.at("version");
466 ShapeInfer::Reshaper reshaper(this);
468 StatusCode sts = reshaper.run(inputShapes, &resp);
469 // Not all implementations may be registered if all shapes were read from IR.
470 if (sts == NOT_FOUND) {
471 bool allShapesLooksGood = true;
472 for (const auto& connection : getConnections()) {
473 if (getLayer(connection.from().layerId())->getOutputPorts()[connection.from().portId()].shape() !=
474 getLayer(connection.to().layerId())->getInputPorts()[connection.to().portId()].shape() ||
475 getLayer(connection.to().layerId())->getInputPorts()[connection.to().portId()].shape().empty()) {
476 allShapesLooksGood = false;
480 if (allShapesLooksGood)
485 THROW_IE_EXCEPTION << resp.msg;
487 // Check all parameters
488 for (const auto& layer : getLayers()) {
491 } catch(InferenceEngine::details::InferenceEngineException& ex) {
492 THROW_IE_EXCEPTION << "Cannot build layer " << layer->getName() << ": " << ex.what();
493 } catch(std::bad_cast& ex) {
494 THROW_IE_EXCEPTION << "Cannot build layer " << layer->getName() << ": " << ex.what();
499 const std::shared_ptr<ICNNNetwork> Builder::convertToICNNNetwork(const INetwork::CPtr& network) {
500 auto createCNNLayer = [](const std::shared_ptr<const ILayer>& layer, Precision precision) {
501 static std::vector<std::shared_ptr<BaseConverter>> convertors = {
502 std::make_shared<LayerConverter<InferenceEngine::PowerLayer>>("Power"),
503 std::make_shared<LayerConverter<InferenceEngine::ConvolutionLayer>>("Convolution"),
504 std::make_shared<LayerConverter<InferenceEngine::DeconvolutionLayer>>("Deconvolution"),
505 std::make_shared<LayerConverter<InferenceEngine::PoolingLayer>>("Pooling"),
506 std::make_shared<LayerConverter<InferenceEngine::FullyConnectedLayer>>("InnerProduct"),
507 std::make_shared<LayerConverter<InferenceEngine::FullyConnectedLayer>>("FullyConnected"),
508 std::make_shared<LayerConverter<InferenceEngine::NormLayer>>("LRN"),
509 std::make_shared<LayerConverter<InferenceEngine::NormLayer>>("Norm"),
510 std::make_shared<LayerConverter<InferenceEngine::SoftMaxLayer>>("Softmax"),
511 std::make_shared<LayerConverter<InferenceEngine::GRNLayer>>("GRN"),
512 std::make_shared<LayerConverter<InferenceEngine::MVNLayer>>("MVN"),
513 std::make_shared<LayerConverter<InferenceEngine::ReLULayer>>("ReLU"),
514 std::make_shared<LayerConverter<InferenceEngine::ClampLayer>>("Clamp"),
515 std::make_shared<LayerConverter<InferenceEngine::SplitLayer>>("Split"),
516 std::make_shared<LayerConverter<InferenceEngine::SplitLayer>>("Slice"),
517 std::make_shared<LayerConverter<InferenceEngine::ConcatLayer>>("Concat"),
518 std::make_shared<LayerConverter<InferenceEngine::EltwiseLayer>>("Eltwise"),
519 std::make_shared<LayerConverter<InferenceEngine::ScaleShiftLayer>>("ScaleShift"),
520 std::make_shared<LayerConverter<InferenceEngine::PReLULayer>>("PReLU"),
521 std::make_shared<LayerConverter<InferenceEngine::CropLayer>>("Crop"),
522 std::make_shared<LayerConverter<InferenceEngine::ReshapeLayer>>("Reshape"),
523 std::make_shared<LayerConverter<InferenceEngine::ReshapeLayer>>("Flatten"),
524 std::make_shared<LayerConverter<InferenceEngine::TileLayer>>("Tile"),
525 std::make_shared<LayerConverter<InferenceEngine::PadLayer>>("Pad"),
526 std::make_shared<ActivationConverter>(),
527 std::make_shared<RNNSequenceConverter>(),
528 std::make_shared<LayerConverter<InferenceEngine::BatchNormalizationLayer>>("BatchNormalization"),
530 for (auto &convertor : convertors) {
531 if (!convertor->canCreate(layer->getType()))
533 return convertor->createLayer(layer, precision);
535 static LayerConverter<CNNLayer> genericCreator("");
536 return genericCreator.createLayer(layer, precision);
539 auto keep_input_info = [](std::unique_ptr<details::CNNNetworkImpl>& network, DataPtr &in_data,
540 PreProcessInfo preProc) {
541 InputInfo::Ptr info(new InputInfo());
542 info->getPreProcess() = preProc;
543 info->setInputData(in_data);
544 Precision prc = info->getInputPrecision();
546 // Convert precision into native format (keep element size)
547 prc = prc == Precision::Q78 ? Precision::I16 :
548 prc == Precision::FP16 ? Precision::FP32 :
549 static_cast<Precision::ePrecision>(prc);
551 info->setInputPrecision(prc);
552 network->setInputInfo(info);
555 std::unique_ptr<details::CNNNetworkImpl> cnnNetworkImpl(new details::CNNNetworkImpl());
557 Precision detectedPrecision = Precision::UNSPECIFIED;
558 for (const auto& layer : *network) {
559 for (const auto& port : layer->getInputPorts()) {
560 Precision prc = port.getData()->getData()->getTensorDesc().getPrecision();
561 if (prc != Precision::UNSPECIFIED) {
562 detectedPrecision = prc;
566 for (const auto& port : layer->getOutputPorts()) {
567 Precision prc = port.getData()->getData()->getTensorDesc().getPrecision();
568 if (prc != Precision::UNSPECIFIED) {
569 detectedPrecision = prc;
573 if (detectedPrecision != Precision::UNSPECIFIED)
576 if (detectedPrecision == Precision::UNSPECIFIED)
577 detectedPrecision = Precision::FP32;
579 details::CaselessEq<std::string> eq;
580 cnnNetworkImpl->setName(network->getName());
581 cnnNetworkImpl->setPrecision(Precision::UNSPECIFIED);
582 for (const auto& layer : *network) {
583 bool isInternalLayer = eq(layer->getType(), "Const");
584 for (const auto& connection : network->getLayerConnections(layer->getId())) {
585 if (!isInternalLayer)
587 if (connection.from().layerId() != layer->getId())
589 const auto& port = network->getLayer(connection.to().layerId())->getInputPorts()[connection.to().portId()];
590 isInternalLayer = isInternalLayer &&
591 port.getParameters().find("type") != port.getParameters().end();
593 isInternalLayer = isInternalLayer || eq(layer->getType(), "Output");
598 CNNLayerPtr cnnLayer = createCNNLayer(layer, detectedPrecision);
599 if (cnnNetworkImpl->getPrecision() == Precision::UNSPECIFIED) {
600 cnnNetworkImpl->setPrecision(cnnLayer->precision);
601 } else if (cnnNetworkImpl->getPrecision() == Precision::MIXED &&
602 cnnNetworkImpl->getPrecision() != cnnLayer->precision) {
603 cnnNetworkImpl->setPrecision(Precision::MIXED);
606 auto connections = network->getLayerConnections(layer->getId());
607 std::unordered_set<idx_t> inputNum, outputNum;
608 for (const auto& connection : connections) {
609 if (connection.from().layerId() != layer->getId()) {
610 const auto& port = layer->getInputPorts()[connection.to().portId()];
611 if (port.getParameters().find("type") == port.getParameters().end())
612 inputNum.insert(connection.to().portId());
614 outputNum.insert(connection.from().portId());
617 cnnLayer->insData.resize(inputNum.size());
618 cnnLayer->outData.resize(outputNum.size());
619 cnnNetworkImpl->addLayer(cnnLayer);
622 for (const auto& layer : *network) {
623 auto connections = network->getLayerConnections(layer->getId());
624 CNNLayerPtr cnnLayer;
625 StatusCode sts = cnnNetworkImpl->getLayerByName(layer->getName().c_str(), cnnLayer, nullptr);
627 if (sts != OK && (eq(layer->getType(), "Output") || eq(layer->getType(), "Const")))
630 THROW_IE_EXCEPTION << "Cannot find CNNLayer by name " << layer->getName();
632 for (const auto& connection : connections) {
633 if (connection.from().layerId() != layer->getId())
636 const auto& outLayer = network->getLayer(connection.to().layerId());
638 CNNLayerPtr cnnOutLayer;
639 sts = cnnNetworkImpl->getLayerByName(outLayer->getName().c_str(), cnnOutLayer, nullptr);
640 if (sts != OK && !eq(outLayer->getType(), "Output") && !eq(layer->getType(), "Const"))
641 THROW_IE_EXCEPTION << "Cannot find CNNLayer by name " << outLayer->getName();
643 std::string dataName = layer->getName();
644 if (cnnLayer->outData.size() > 1) {
645 dataName += "." + std::to_string(connection.from().portId());
647 DataPtr& data = cnnNetworkImpl->getData(dataName);
649 TensorDesc dataDesc(detectedPrecision, layer->getOutputPorts()[connection.from().portId()].shape(),
650 TensorDesc::getLayoutByDims(layer->getOutputPorts()[connection.from().portId()].shape()));
651 data = std::make_shared<Data>(dataName, dataDesc);
652 data->creatorLayer = cnnLayer;
654 cnnLayer->outData[connection.from().portId()] = data;
657 const auto inputPorts = outLayer->getInputPorts();
658 for (size_t i = 0; i < connection.to().portId() && i < inputPorts.size(); i++) {
659 if (inputPorts[i].getParameters().find("type") == inputPorts[i].getParameters().end())
663 data->inputTo[outLayer->getName()] = cnnOutLayer;
664 cnnOutLayer->insData[realPortId] = data;
666 cnnNetworkImpl->addOutput(data->getName());
670 cnnLayer->validateLayer();
671 if (eq(cnnLayer->type, "Input")) {
672 PreProcessInfo preProc;
673 if (layer->getParameters().find("preProcess") != layer->getParameters().end())
674 preProc = layer->getParameters().at("preProcess");
675 keep_input_info(cnnNetworkImpl, *cnnLayer->outData.begin(), preProc);
679 // Set default output precision to FP32 (for back-compatibility)
680 OutputsDataMap outputsInfo;
681 cnnNetworkImpl->getOutputsInfo(outputsInfo);
682 for (auto outputInfo : outputsInfo) {
683 if (outputInfo.second->getPrecision() != Precision::FP32 &&
684 outputInfo.second->getPrecision() != Precision::I32) {
685 outputInfo.second->setPrecision(Precision::FP32);
689 return std::shared_ptr<ICNNNetwork>(cnnNetworkImpl.release());
692 Builder::Network::operator const INetwork::CPtr() {
696 const ILayer::CPtr Builder::Network::getLayer(idx_t layerId) const noexcept {
698 for (auto& layer : getLayers()) {
699 if (layer->getId() == layerId)
700 return layer->build();
707 Builder::Layer::Ptr Builder::Network::getLayer(idx_t layerId) {
708 for (auto& layer : getLayers()) {
709 if (layer->getId() == layerId)
712 THROW_IE_EXCEPTION << "Cannot find layer with id: " << layerId;
715 const std::string& Builder::Network::getName() const noexcept {
716 return parameters.at("name");
719 const Context& Builder::Network::getContext() const noexcept {
720 return parameters.at("context");
723 Context& Builder::Network::getContext() noexcept {
724 return parameters.at("context");
727 Builder::Network::const_iterator Builder::Network::begin() const noexcept {
729 return Network::const_iterator(this);
731 return Network::const_iterator(this, true);
736 Builder::Network::const_iterator Builder::Network::end() const noexcept {
737 return Network::const_iterator(this, true);
740 size_t Builder::Network::size() const noexcept {
741 return static_cast<size_t>(std::distance(std::begin(*this), std::end(*this)));
744 Builder::Network::iterator Builder::Network::begin() {
745 return Network::iterator(this);
748 Builder::Network::iterator Builder::Network::end() {
749 return Network::iterator(this, true);
752 const std::vector<ILayer::CPtr> Builder::Network::getInputs() const noexcept {
753 std::vector<ILayer::CPtr> inputs;
754 for (const auto& layer : parameters.at("layers").as<std::vector<Layer::Ptr>>()) {
755 bool isInputLayer = true;
756 for (const auto& connection : getLayerConnections(layer->getId())) {
757 if (connection.to().layerId() == layer->getId()) {
758 isInputLayer = false;
763 inputs.push_back(layer->build());
769 std::vector<Builder::Layer::Ptr> Builder::Network::getInputs() {
770 std::vector<Builder::Layer::Ptr> inputs;
771 for (auto& layer : parameters.at("layers").as<std::vector<Layer::Ptr>>()) {
772 bool isInputLayer = true;
773 for (const auto& connection : getLayerConnections(layer->getId())) {
774 if (connection.to().layerId() == layer->getId()) {
775 isInputLayer = false;
780 inputs.push_back(layer);
786 const std::vector<ILayer::CPtr> Builder::Network::getOutputs() const noexcept {
787 std::vector<ILayer::CPtr> outputs;
788 for (const auto& layer : parameters.at("layers").as<std::vector<Layer::Ptr>>()) {
789 bool isOutputLayer = true;
790 for (const auto& connection : getLayerConnections(layer->getId())) {
791 if (connection.from().layerId() == layer->getId()) {
792 isOutputLayer = false;
797 outputs.push_back(layer->build());
803 std::vector<Builder::Layer::Ptr> Builder::Network::getOutputs() {
804 std::vector<Builder::Layer::Ptr> outputs;
805 for (auto& layer : parameters.at("layers").as<std::vector<Layer::Ptr>>()) {
806 bool isOutputLayer = true;
807 for (const auto& connection : getLayerConnections(layer->getId())) {
808 if (connection.from().layerId() == layer->getId()) {
809 isOutputLayer = false;
814 outputs.push_back(layer);
820 const std::vector<Connection>& Builder::Network::getConnections() const {
821 return parameters.at("connections").as<std::vector<Connection>>();
824 const std::vector<Connection> Builder::Network::getLayerConnections(idx_t layerId) const noexcept {
825 std::vector<Connection> layerConnections;
826 for (const auto connection : parameters.at("connections").as<std::vector<Connection>>()) {
827 if (connection.from().layerId() == layerId || connection.to().layerId() == layerId)
828 layerConnections.push_back(connection);
830 return layerConnections;