Publishing 2019 R1 content
[platform/upstream/dldt.git] / inference-engine / src / inference_engine / shape_infer / built-in / ie_detection_output_shape_infer.hpp
1 // Copyright (C) 2018-2019 Intel Corporation
2 // SPDX-License-Identifier: Apache-2.0
3 //
4
5 #pragma once
6
7 #include <description_buffer.hpp>
8 #include "ie_built_in_impl.hpp"
9 #include <ie_layers.h>
10 #include <map>
11 #include <memory>
12 #include <string>
13 #include <vector>
14
15 namespace InferenceEngine {
16 namespace ShapeInfer {
17
18 /**
19  *@brief Implementation of Shape inference for DetectionOutput layer
20  */
21 class DetectionOutputShapeProp : public BuiltInShapeInferImpl {
22 public:
23     explicit DetectionOutputShapeProp(const std::string& type) : BuiltInShapeInferImpl(type) {}
24
25     void inferShapesImpl(const std::vector<Blob::CPtr>& inBlobs,
26                          const std::map<std::string, std::string>& params,
27                          const std::map<std::string, Blob::Ptr>& blobs,
28                          std::vector<SizeVector>& outShapes) override {
29         LayerParams lp{};
30         CNNLayer cnnLayer(lp);
31         cnnLayer.params = params;
32         cnnLayer.type = _type;
33         validate(&cnnLayer, inBlobs, params, blobs);
34
35         int top_k = cnnLayer.GetParamAsInt("keep_top_k");
36         outShapes.push_back({1, 1, static_cast<size_t>(top_k) * inShapes[0][0], 7});
37     }
38 };
39
40 }  // namespace ShapeInfer
41 }  // namespace InferenceEngine