Publishing 2019 R1 content
[platform/upstream/dldt.git] / inference-engine / src / inference_engine / shape_infer / built-in / ie_roi_pooling_shape_infer.hpp
1 // Copyright (C) 2018-2019 Intel Corporation
2 // SPDX-License-Identifier: Apache-2.0
3 //
4
5 #pragma once
6
7 #include <description_buffer.hpp>
8 #include "ie_built_in_impl.hpp"
9 #include <ie_layers.h>
10 #include <map>
11 #include <memory>
12 #include <string>
13 #include <vector>
14
15 namespace InferenceEngine {
16 namespace ShapeInfer {
17
18 /**
19  *@brief Implementation of Shape inference for RoiPooling layer
20  */
21 class RoiPoolingShapeProp : public BuiltInShapeInferImpl {
22 public:
23     explicit RoiPoolingShapeProp(const std::string& type) : BuiltInShapeInferImpl(type) {}
24
25     void inferShapesImpl(const std::vector<Blob::CPtr>& inBlobs,
26                          const std::map<std::string, std::string>& params,
27                          const std::map<std::string, Blob::Ptr>& blobs,
28                          std::vector<SizeVector>& outShapes) override {
29         LayerParams lp{};
30         CNNLayer cnnLayer(lp);
31         cnnLayer.params = params;
32         cnnLayer.type = _type;
33         validate(&cnnLayer, inBlobs, params, blobs);
34
35         SizeVector out_shapes = {inShapes[1][0], inShapes[0][1]};
36         for (auto attr : {"pooled_d", "pooled_h", "pooled_w"}) {  // desired IR format: pooled="...,d,h,w"
37             int pooled = cnnLayer.GetParamAsInt(attr, -1);
38             if (pooled >= 0) {
39                 out_shapes.push_back(static_cast<size_t>(pooled));
40             }
41         }
42         outShapes.push_back(out_shapes);
43     }
44 };
45
46 }  // namespace ShapeInfer
47 }  // namespace InferenceEngine