1 // Copyright (C) 2018-2019 Intel Corporation
2 // SPDX-License-Identifier: Apache-2.0
7 #include <description_buffer.hpp>
8 #include "ie_built_in_impl.hpp"
15 namespace InferenceEngine {
16 namespace ShapeInfer {
19 *@brief Implementation of Shape inference for Resample layer
21 class ResampleShapeProp : public BuiltInShapeInferImpl {
23 explicit ResampleShapeProp(const std::string& type) : BuiltInShapeInferImpl(type) {}
25 void inferShapesImpl(const std::vector<Blob::CPtr>& inBlobs,
26 const std::map<std::string, std::string>& params,
27 const std::map<std::string, Blob::Ptr>& blobs,
28 std::vector<SizeVector>& outShapes) override {
30 CNNLayer cnnLayer(lp);
31 cnnLayer.params = params;
32 cnnLayer.type = _type;
33 validate(&cnnLayer, inBlobs, params, blobs);
35 if (inBlobs.size() == 2) {
36 auto* buffer = inBlobs[1]->cbuffer().as<float*>();
37 if (buffer != nullptr) {
38 for (int i = 0; i < inBlobs[1]->size(); i++) {
39 outShape.push_back(static_cast<unsigned long>(buffer[i]));
42 THROW_IE_EXCEPTION << "Second input must have allocated data";
45 auto scale = static_cast<size_t>(cnnLayer.GetParamAsInt("factor"));
46 outShape = {inShapes[0][0], inShapes[0][1]};
47 for (int i = 2; i < inShapes[0].size(); i++)
48 outShape.push_back(inShapes[0][i] * scale);
50 outShapes.push_back(outShape);
54 } // namespace ShapeInfer
55 } // namespace InferenceEngine