Publishing 2019 R1 content
[platform/upstream/dldt.git] / inference-engine / src / inference_engine / shape_infer / built-in / ie_resample_shape_infer.hpp
1 // Copyright (C) 2018-2019 Intel Corporation
2 // SPDX-License-Identifier: Apache-2.0
3 //
4
5 #pragma once
6
7 #include <description_buffer.hpp>
8 #include "ie_built_in_impl.hpp"
9 #include <ie_layers.h>
10 #include <map>
11 #include <memory>
12 #include <string>
13 #include <vector>
14
15 namespace InferenceEngine {
16 namespace ShapeInfer {
17
18 /**
19  *@brief Implementation of Shape inference for Resample layer
20  */
21 class ResampleShapeProp : public BuiltInShapeInferImpl {
22 public:
23     explicit ResampleShapeProp(const std::string& type) : BuiltInShapeInferImpl(type) {}
24
25     void inferShapesImpl(const std::vector<Blob::CPtr>& inBlobs,
26                          const std::map<std::string, std::string>& params,
27                          const std::map<std::string, Blob::Ptr>& blobs,
28                          std::vector<SizeVector>& outShapes) override {
29         LayerParams lp{};
30         CNNLayer cnnLayer(lp);
31         cnnLayer.params = params;
32         cnnLayer.type = _type;
33         validate(&cnnLayer, inBlobs, params, blobs);
34         SizeVector outShape;
35         if (inBlobs.size() == 2) {
36             auto* buffer = inBlobs[1]->cbuffer().as<float*>();
37             if (buffer != nullptr) {
38                 for (int i = 0; i < inBlobs[1]->size(); i++) {
39                     outShape.push_back(static_cast<unsigned long>(buffer[i]));
40                 }
41             } else {
42                 THROW_IE_EXCEPTION << "Second input must have allocated data";
43             }
44         } else {
45             auto scale = static_cast<size_t>(cnnLayer.GetParamAsInt("factor"));
46             outShape = {inShapes[0][0], inShapes[0][1]};
47             for (int i = 2; i < inShapes[0].size(); i++)
48                 outShape.push_back(inShapes[0][i] * scale);
49         }
50         outShapes.push_back(outShape);
51     }
52 };
53
54 }  // namespace ShapeInfer
55 }  // namespace InferenceEngine