-// Copyright (C) 2018 Intel Corporation
+// Copyright (C) 2018-2019 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include <builders/ie_convolution_layer.hpp>
-#include <details/caseless.hpp>
+#include <ie_cnn_layer_builder.h>
#include <vector>
#include <string>
+#include <limits>
using namespace InferenceEngine;
-Builder::ConvolutionLayer::ConvolutionLayer(const std::string& name): LayerFragment("Convolution", name) {
- getLayer().getInputPorts().resize(1);
- getLayer().getOutputPorts().resize(1);
+Builder::ConvolutionLayer::ConvolutionLayer(const std::string& name): LayerDecorator("Convolution", name) {
+ getLayer()->getInputPorts().resize(3);
+ getLayer()->getInputPorts()[1].setParameter("type", "weights");
+ getLayer()->getInputPorts()[2].setParameter("type", "biases");
+ getLayer()->getOutputPorts().resize(1);
+ setGroup(1);
+ setKernel({});
+ setOutDepth(0);
+ setStrides({});
+ setDilation({});
+ setPaddingsEnd({});
+ setPaddingsBegin({});
}
-Builder::ConvolutionLayer::ConvolutionLayer(Layer& genLayer): LayerFragment(genLayer) {
- if (!details::CaselessEq<std::string>()(getLayer().getType(), "Convolution"))
- THROW_IE_EXCEPTION << "Cannot create ConvolutionLayer decorator for layer " << getLayer().getType();
+Builder::ConvolutionLayer::ConvolutionLayer(const Layer::Ptr& layer): LayerDecorator(layer) {
+ checkType("Convolution");
}
-Builder::ConvolutionLayer::operator Builder::Layer() const {
- Layer genLayer(getLayer());
-
- std::vector<size_t> l_kernel = getKernel();
- std::vector<size_t> l_dilation = getDilation();
- std::vector<size_t> l_paddingBegin = getPaddingsBegin();
- std::vector<size_t> l_paddingEnd = getPaddingsEnd();
- std::vector<size_t> l_strides = getStrides();
-
- if (l_paddingBegin.empty() && !l_kernel.empty())
- l_paddingBegin.resize(l_kernel.size(), 0);
- if (l_paddingEnd.empty() && !l_kernel.empty())
- l_paddingEnd.resize(l_kernel.size(), 0);
- if (l_dilation.empty() && !l_kernel.empty())
- l_dilation.resize(l_kernel.size(), 1);
- if (l_strides.empty() && !l_kernel.empty())
- l_strides.resize(l_kernel.size(), 1);
-
- if (!getOutDepth() || l_kernel.empty() || l_kernel.size() != l_paddingBegin.size() || l_kernel.size() != l_paddingEnd.size() ||
- l_kernel.size() != l_dilation.size() || l_kernel.size() != l_strides.size())
- THROW_IE_EXCEPTION << genLayer.getType() << " node " << genLayer.getName() << " contains incorrect parameters!";
-
- genLayer.getParameters()["kernel"] = l_kernel;
- genLayer.getParameters()["strides"] = l_strides;
- genLayer.getParameters()["pads_begin"] = l_paddingBegin;
- genLayer.getParameters()["pads_end"] = l_paddingEnd;
- genLayer.getParameters()["dilations"] = l_dilation;
- return genLayer;
+Builder::ConvolutionLayer::ConvolutionLayer(const Layer::CPtr& layer): LayerDecorator(layer) {
+ checkType("Convolution");
}
Builder::ConvolutionLayer &Builder::ConvolutionLayer::setName(const std::string &name) {
- getLayer().getName() = name;
- return *this;
-}
-
-Builder::ConvolutionLayer& Builder::ConvolutionLayer::setWeights(const Blob::CPtr& weights) {
- getLayer().addConstantData("weights", weights);
- return *this;
-}
-Builder::ConvolutionLayer& Builder::ConvolutionLayer::setBiases(const Blob::CPtr& biases) {
- getLayer().addConstantData("biases", biases);
+ getLayer()->setName(name);
return *this;
}
const Port& Builder::ConvolutionLayer::getInputPort() const {
- return getLayer().getInputPorts()[0];
+ return getLayer()->getInputPorts()[0];
}
Builder::ConvolutionLayer& Builder::ConvolutionLayer::setInputPort(const Port& port) {
- getLayer().getInputPorts()[0] = port;
+ getLayer()->getInputPorts()[0] = port;
return *this;
}
const Port& Builder::ConvolutionLayer::getOutputPort() const {
- return getLayer().getOutputPorts()[0];
+ return getLayer()->getOutputPorts()[0];
}
Builder::ConvolutionLayer& Builder::ConvolutionLayer::setOutputPort(const Port& port) {
- getLayer().getOutputPorts()[0] = port;
+ getLayer()->getOutputPorts()[0] = port;
return *this;
}
const std::vector<size_t> Builder::ConvolutionLayer::getKernel() const {
- return uInts2size_t(getLayer().getParameters()["kernel"].asUInts({}));
+ return getLayer()->getParameters().at("kernel");
}
Builder::ConvolutionLayer& Builder::ConvolutionLayer::setKernel(const std::vector<size_t>& kernel) {
- getLayer().getParameters()["kernel"] = kernel;
+ getLayer()->getParameters()["kernel"] = kernel;
return *this;
}
const std::vector<size_t> Builder::ConvolutionLayer::getStrides() const {
- return uInts2size_t(getLayer().getParameters()["strides"].asUInts({}));
+ return getLayer()->getParameters().at("strides");
}
Builder::ConvolutionLayer& Builder::ConvolutionLayer::setStrides(const std::vector<size_t>& strides) {
- getLayer().getParameters()["strides"] = strides;
+ getLayer()->getParameters()["strides"] = strides;
return *this;
}
const std::vector<size_t> Builder::ConvolutionLayer::getDilation() const {
- return uInts2size_t(getLayer().getParameters()["dilations"].asUInts({}));
+ return getLayer()->getParameters().at("dilations");
}
Builder::ConvolutionLayer& Builder::ConvolutionLayer::setDilation(const std::vector<size_t>& dilation) {
- getLayer().getParameters()["dilations"] = dilation;
+ getLayer()->getParameters()["dilations"] = dilation;
return *this;
}
const std::vector<size_t> Builder::ConvolutionLayer::getPaddingsBegin() const {
- return uInts2size_t(getLayer().getParameters()["pads_begin"].asUInts({}));
+ return getLayer()->getParameters().at("pads_begin");
}
Builder::ConvolutionLayer& Builder::ConvolutionLayer::setPaddingsBegin(const std::vector<size_t>& paddings) {
- getLayer().getParameters()["pads_begin"] = paddings;
+ getLayer()->getParameters()["pads_begin"] = paddings;
return *this;
}
const std::vector<size_t> Builder::ConvolutionLayer::getPaddingsEnd() const {
- return uInts2size_t(getLayer().getParameters()["pads_end"].asUInts({}));
+ return getLayer()->getParameters().at("pads_end");
}
Builder::ConvolutionLayer& Builder::ConvolutionLayer::setPaddingsEnd(const std::vector<size_t>& paddings) {
- getLayer().getParameters()["pads_end"] = paddings;
+ getLayer()->getParameters()["pads_end"] = paddings;
return *this;
}
size_t Builder::ConvolutionLayer::getGroup() const {
- return getLayer().getParameters()["group"].asUInt(1);
+ return getLayer()->getParameters().at("group");
}
Builder::ConvolutionLayer& Builder::ConvolutionLayer::setGroup(size_t group) {
- getLayer().getParameters()["group"] = group;
+ getLayer()->getParameters()["group"] = group;
return *this;
}
size_t Builder::ConvolutionLayer::getOutDepth() const {
- return getLayer().getParameters()["output"].asUInt(0);
+ return getLayer()->getParameters().at("output");
}
Builder::ConvolutionLayer& Builder::ConvolutionLayer::setOutDepth(size_t outDepth) {
- getLayer().getParameters()["output"] = outDepth;
+ getLayer()->getParameters()["output"] = outDepth;
return *this;
}
-void Builder::ConvolutionLayer::validate(const Layer& layer) {
- Layer convLayer = layer;
- Builder::ConvolutionLayer convBuilder(convLayer);
- std::vector<size_t> l_kernel = convBuilder.getKernel();
-
+REG_VALIDATOR_FOR(Convolution, [] (const InferenceEngine::Builder::Layer::CPtr& layer, bool partial) {
// WA for old IRs
- if (l_kernel.empty() && layer.getParameters().find("kernel-x") != layer.getParameters().end() &&
- layer.getParameters().find("kernel-y") != layer.getParameters().end())
+ if (layer->getParameters().find("kernel") == layer->getParameters().end() &&
+ layer->getParameters().find("kernel-x") != layer->getParameters().end() &&
+ layer->getParameters().find("kernel-y") != layer->getParameters().end())
return;
+ Builder::ConvolutionLayer convBuilder(layer);
+ std::vector<size_t> l_kernel = convBuilder.getKernel();
std::vector<size_t> l_dilation = convBuilder.getDilation();
std::vector<size_t> l_paddingBegin = convBuilder.getPaddingsBegin();
std::vector<size_t> l_paddingEnd = convBuilder.getPaddingsEnd();
if (l_strides.empty() && !l_kernel.empty())
l_strides.resize(l_kernel.size(), 1);
- if (!convBuilder.getOutDepth() || l_kernel.empty() || l_kernel.size() != l_paddingBegin.size() || l_kernel.size() != l_paddingEnd.size() ||
- l_kernel.size() != l_dilation.size() || l_kernel.size() != l_strides.size())
- THROW_IE_EXCEPTION << layer.getType() << " node " << layer.getName() << " contains incorrect parameters!";
-}
+ if (l_kernel.empty()) {
+ THROW_IE_EXCEPTION << "Kernel is empty!";
+ }
+
+ if (l_paddingBegin.size() != l_paddingEnd.size()) {
+ THROW_IE_EXCEPTION << "Padding_begin dimension is not equal to padding_end dimension";
+ }
+
+ if (!l_paddingBegin.empty() && l_kernel.size() != l_paddingBegin.size()) {
+ THROW_IE_EXCEPTION << "Padding dimension is not equal to kernel dimension";
+ }
+
+ if (l_kernel.size() != l_strides.size()) {
+ THROW_IE_EXCEPTION << "Stride dimension is not equal to kernel dimension";
+ }
+
+ if (!l_dilation.empty() && l_kernel.size() != l_dilation.size()) {
+ THROW_IE_EXCEPTION << "Dilation dimension is not equal to kernel dimension";
+ }
+
+ if (convBuilder.getOutDepth() == 0) {
+ THROW_IE_EXCEPTION << "OutDepth parameter should be more than 0";
+ }
+
+ for (size_t kernel_dim : l_kernel) {
+ if (kernel_dim == 0) {
+ THROW_IE_EXCEPTION << "Kernel dimensions should be more than 0";
+ }
+ }
+
+ for (size_t i_stride : l_strides) {
+ if (i_stride == 0) {
+ THROW_IE_EXCEPTION << "Strides should be more than 0";
+ }
+ }
+
+ for (size_t dil : l_dilation) {
+ if (dil == 0)
+ THROW_IE_EXCEPTION << "Dilation should be more than 0";
+ }
+
+ if (!convBuilder.getGroup())
+ THROW_IE_EXCEPTION << "Group should be more than 0";
+
+ if (convBuilder.getInputPort().shape().empty())
+ return;
+
+ const size_t IC = convBuilder.getInputPort().shape()[1];
+ if (IC % convBuilder.getGroup())
+ THROW_IE_EXCEPTION << "Number of input channels (" << IC <<
+ ") is not divided by group number (" << convBuilder.getGroup() << ")";
+
+ size_t weight_size = convBuilder.getOutDepth() * IC / convBuilder.getGroup();
+ for (size_t kernel_dim : l_kernel) {
+ if (static_cast<double>(weight_size) * kernel_dim > std::numeric_limits<size_t>::max()) {
+ THROW_IE_EXCEPTION << "Weight size exceeds the size_t max";
+ }
+ weight_size *= kernel_dim;
+ }
+
+ if (partial)
+ return;
+
+ const auto weights = layer->getInputPorts()[1].getData()->getData();
+ if (weights->size() != weight_size) {
+ THROW_IE_EXCEPTION << "Weight size is not correct!";
+ }
+
+ const auto biases = layer->getInputPorts()[2].getData()->getData();
+ if (biases && biases->cbuffer() && biases->size() != convBuilder.getOutDepth())
+ THROW_IE_EXCEPTION << "Biases size is incorrect!";
+});
+
+REG_CONVERTER_FOR(Convolution, [](const CNNLayerPtr& cnnLayer, Builder::Layer& layer) {
+ // WA for old IRs
+ if (cnnLayer->params.find("kernel") == cnnLayer->params.end() &&
+ cnnLayer->params.find("kernel-x") != cnnLayer->params.end() &&
+ cnnLayer->params.find("kernel-y") != cnnLayer->params.end())
+ return;
-REG_VALIDATOR_FOR(Convolution, Builder::ConvolutionLayer::validate);
+ std::vector<unsigned int> tmp = cnnLayer->GetParamAsUInts("kernel");
+ std::vector<size_t> cur(tmp.size());
+ for (size_t i = 0; i < tmp.size(); ++i) {
+ cur[i] = static_cast<size_t>(tmp[i]);
+ }
+ layer.getParameters()["kernel"] = cur;
+
+ tmp = cnnLayer->GetParamAsUInts("strides");
+ cur.resize(tmp.size());
+ for (size_t i = 0; i < tmp.size(); ++i) {
+ cur[i] = static_cast<size_t>(tmp[i]);
+ }
+ layer.getParameters()["strides"] = cur;
+
+ tmp = cnnLayer->GetParamAsUInts("dilations");
+ cur.resize(tmp.size());
+ for (size_t i = 0; i < tmp.size(); ++i) {
+ cur[i] = static_cast<size_t>(tmp[i]);
+ }
+ layer.getParameters()["dilations"] = cur;
+
+ tmp = cnnLayer->GetParamAsUInts("pads_begin");
+ cur.resize(tmp.size());
+ for (size_t i = 0; i < tmp.size(); ++i) {
+ cur[i] = static_cast<size_t>(tmp[i]);
+ }
+ layer.getParameters()["pads_begin"] = cur;
+
+ tmp = cnnLayer->GetParamAsUInts("pads_end");
+ cur.resize(tmp.size());
+ for (size_t i = 0; i < tmp.size(); ++i) {
+ cur[i] = static_cast<size_t>(tmp[i]);
+ }
+ layer.getParameters()["pads_end"] = cur;
+
+ layer.getParameters()["group"] = static_cast<size_t>(cnnLayer->GetParamAsUInt("group"));
+ layer.getParameters()["output"] = static_cast<size_t>(cnnLayer->GetParamAsUInt("output"));
+});