Publishing 2019 R1 content
[platform/upstream/dldt.git] / inference-engine / src / mkldnn_plugin / nodes / mkldnn_roi_pooling_node.cpp
1 // Copyright (C) 2018-2019 Intel Corporation
2 // SPDX-License-Identifier: Apache-2.0
3 //
4
5 #include "mkldnn_roi_pooling_node.h"
6 #include "desc_iterator.hpp"
7 #include <ie_layers.h>
8 #include <mkldnn.hpp>
9 #include <string>
10 #include <vector>
11 #include <mkldnn_extension_utils.h>
12
13 using namespace mkldnn;
14 using namespace MKLDNNPlugin;
15 using namespace InferenceEngine;
16
17 MKLDNNROIPoolingNode::MKLDNNROIPoolingNode(const InferenceEngine::CNNLayerPtr& layer, const mkldnn::engine& eng) : MKLDNNNode(layer, eng) {}
18
19 void MKLDNNROIPoolingNode::getSupportedDescriptors() {
20     if (!descs.empty())
21         return;
22
23     InferenceEngine::Precision precision = getCnnLayer()->insData[0].lock()->getPrecision();
24     if (precision != InferenceEngine::Precision::FP32)
25         precision = InferenceEngine::Precision::FP32;
26     auto inputDataType = MKLDNNExtensionUtils::IEPrecisionToDataType(precision);
27     precision = getCnnLayer()->outData[0]->getPrecision();
28     if (precision != InferenceEngine::Precision::FP32)
29         precision = InferenceEngine::Precision::FP32;
30     auto outputDataType = MKLDNNExtensionUtils::IEPrecisionToDataType(precision);
31
32     GenericLayer* genericLayer = getCnnLayer().get();
33
34     if (genericLayer == nullptr)
35         THROW_IE_EXCEPTION << "Cannot convert ROIPooling layer.";
36
37     if (getParentEdges().empty())
38         THROW_IE_EXCEPTION << "Incorrect number of input edges for layer " << getName();
39     if (getChildEdges().empty())
40         THROW_IE_EXCEPTION << "Incorrect number of output edges for layer " << getName();
41
42     pooled_h = genericLayer->GetParamAsInt("pooled_h");
43     pooled_w = genericLayer->GetParamAsInt("pooled_w");
44     spatial_scale = genericLayer->GetParamAsFloat("spatial_scale");
45     std::string m = genericLayer->GetParamAsString("method", "max");
46     if (m == "max") {
47         method = mkldnn::algorithm::roi_pooling_max;
48     } else if (m == "bilinear") {
49         method = mkldnn::algorithm::roi_pooling_bilinear;
50     } else {
51         THROW_IE_EXCEPTION << "Unsupported roi pooling method";
52     }
53
54     auto parentDims = getParentEdgeAt(0)->getDims();
55     for (auto format : getAvailableFormatsForDims(parentDims)) {
56         std::vector<InferenceEngine::TensorDesc> srcs;
57         srcs.push_back(MKLDNNMemoryDesc(getParentEdgeAt(0)->getDims(), inputDataType, format));
58         srcs.push_back(MKLDNNMemoryDesc(getParentEdgeAt(1)->getDims(), inputDataType, memory::nc));
59         MKLDNNMemoryDesc out_candidate(getChildEdgeAt(0)->getDims(), outputDataType, format);
60
61         createDescriptor(srcs, {out_candidate});
62     }
63 }
64
65 void MKLDNNROIPoolingNode::createPrimitive() {
66     if (prim)
67         return;
68
69     std::vector<memory::desc> srcs;
70     for (size_t i = 0; i < getParentEdges().size(); i++) {
71         srcs.push_back(getParentEdgeAt(i)->getMemory().GetDescriptor());
72     }
73
74     memory::desc out_candidate = getChildEdgeAt(0)->getMemory().GetDescriptor();
75
76     MKLDNNDescriptor desc(std::shared_ptr<roi_pooling_forward::desc>(
77             new roi_pooling_forward::desc(prop_kind::forward_scoring, method, srcs, out_candidate, pooled_h, pooled_w,
78                                           spatial_scale)));
79
80     descs[0] = desc;
81     std::shared_ptr<roi_pooling_forward::desc> selected_desc_ptr = descs[0];
82
83     const PrimitiveDescInfo *selected_pd = getSelectedPrimitiveDescriptor();
84     if (selected_pd == nullptr)
85         THROW_IE_EXCEPTION << "Preferable primitive descriptor does not set for node " << getName() << ".";
86
87     auto prim_desc = roi_pooling_forward::primitive_desc(*selected_desc_ptr, getEngine());
88     primitive_desc_iterator itpd = descs[0].createPrimitiveDescriptorIterator(getEngine());
89
90     std::vector<primitive::at> src_p;
91     for (size_t i = 0; i < getParentEdges().size(); i++) {
92         src_p.push_back(getParentEdgeAt(i)->getMemoryPtr()->GetPrimitive());
93     }
94     prim.reset(new roi_pooling_forward(prim_desc, src_p, getChildEdgeAt(0)->getMemory().GetPrimitive()));
95 }
96
97 bool MKLDNNROIPoolingNode::created() const {
98     return getType() == ROIPooling;
99 }
100
101 void MKLDNNROIPoolingNode::createDescriptor(const std::vector<InferenceEngine::TensorDesc> &inputDesc,
102                                             const std::vector<InferenceEngine::TensorDesc> &outputDesc) {
103     std::vector<memory::desc> srcs;
104     srcs.push_back(MKLDNNMemoryDesc(inputDesc[0]));
105     srcs.push_back(MKLDNNMemoryDesc(inputDesc[1]));
106     MKLDNNMemoryDesc out_candidate(outputDesc[0]);
107
108     MKLDNNDescriptor desc(std::shared_ptr<roi_pooling_forward::desc>(
109             new roi_pooling_forward::desc(prop_kind::forward_scoring, method, srcs, out_candidate, pooled_h, pooled_w,
110                                           spatial_scale)));
111     descs.push_back(desc);
112 }