Publishing 2019 R1 content
[platform/upstream/dldt.git] / inference-engine / src / inference_engine / builders / ie_batch_normalization_layer.cpp
1 // Copyright (C) 2018-2019 Intel Corporation
2 // SPDX-License-Identifier: Apache-2.0
3 //
4
5 #include <builders/ie_batch_normalization_layer.hpp>
6 #include <ie_cnn_layer_builder.h>
7
8 #include <string>
9
10 using namespace InferenceEngine;
11
12 Builder::BatchNormalizationLayer::BatchNormalizationLayer(const std::string& name): LayerDecorator("BatchNormalization", name) {
13     getLayer()->getInputPorts().resize(3);
14     getLayer()->getInputPorts()[1].setParameter("type", "weights");
15     getLayer()->getInputPorts()[2].setParameter("type", "biases");
16     getLayer()->getOutputPorts().resize(1);
17     setEpsilon(0.00000001f);
18 }
19
20 Builder::BatchNormalizationLayer::BatchNormalizationLayer(const Layer::Ptr& layer): LayerDecorator(layer) {
21     checkType("BatchNormalization");
22 }
23
24 Builder::BatchNormalizationLayer::BatchNormalizationLayer(const Layer::CPtr& layer): LayerDecorator(layer) {
25     checkType("BatchNormalization");
26 }
27
28 Builder::BatchNormalizationLayer& Builder::BatchNormalizationLayer::setName(const std::string& name) {
29     getLayer()->setName(name);
30     return *this;
31 }
32
33 const Port& Builder::BatchNormalizationLayer::getPort() const {
34     return getLayer()->getOutputPorts()[0];
35 }
36
37 Builder::BatchNormalizationLayer& Builder::BatchNormalizationLayer::setPort(const Port &port) {
38     getLayer()->getOutputPorts()[0] = port;
39     getLayer()->getInputPorts()[0] = port;
40     return *this;
41 }
42
43 float Builder::BatchNormalizationLayer::getEpsilon() const {
44     return getLayer()->getParameters().at("epsilon");
45 }
46 Builder::BatchNormalizationLayer& Builder::BatchNormalizationLayer::setEpsilon(float eps) {
47     getLayer()->getParameters()["epsilon"] = eps;
48     return *this;
49 }
50
51 REG_VALIDATOR_FOR(BatchNormalization, [](const Builder::Layer::CPtr& layer, bool partial)  {
52     Builder::BatchNormalizationLayer batchNormBuilder(layer);
53     if (partial)
54         return;
55     auto weights = layer->getInputPorts()[1].getData()->getData();
56     auto biases = layer->getInputPorts()[2].getData()->getData();
57     if (!weights || weights->cbuffer() == nullptr || !biases || biases->cbuffer() == nullptr)
58         THROW_IE_EXCEPTION << "Cannot create BatchNormalization layer! Weights and biases are required!";
59 });
60
61 REG_CONVERTER_FOR(BatchNormalization, [](const CNNLayerPtr& cnnLayer, Builder::Layer& layer) {
62     layer.getParameters()["epsilon"] = cnnLayer->GetParamAsFloat("epsilon");
63 });