Publishing 2019 R1 content
[platform/upstream/dldt.git] / inference-engine / src / inference_engine / shape_infer / built-in / ie_priorbox_shape_infer.hpp
index 03a8d9c..867651d 100644 (file)
@@ -1,4 +1,4 @@
-// Copyright (C) 2018 Intel Corporation
+// Copyright (C) 2018-2019 Intel Corporation
 // SPDX-License-Identifier: Apache-2.0
 //
 
@@ -22,7 +22,7 @@ class PriorBoxShapeProp : public BuiltInShapeInferImpl {
 public:
     explicit PriorBoxShapeProp(const std::string& type) : BuiltInShapeInferImpl(type) {}
 
-    void inferShapesImpl(const std::vector<SizeVector>& inShapes,
+    void inferShapesImpl(const std::vector<Blob::CPtr>& inBlobs,
                          const std::map<std::string, std::string>& params,
                          const std::map<std::string, Blob::Ptr>& blobs,
                          std::vector<SizeVector>& outShapes) override {
@@ -30,7 +30,7 @@ public:
         CNNLayer cnnLayer(lp);
         cnnLayer.params = params;
         cnnLayer.type = _type;
-        validate(&cnnLayer, inShapes, params, blobs);
+        validate(&cnnLayer, inBlobs, params, blobs);
         std::vector<float> min_sizes = cnnLayer.GetParamAsFloats("min_size", {});
         std::vector<float> max_sizes = cnnLayer.GetParamAsFloats("max_size", {});
         bool flip = static_cast<bool>(cnnLayer.GetParamAsInt("flip"));
@@ -45,7 +45,9 @@ public:
             num_priors = (flip ? 2 : 1) * aspect_ratios.size() + min_sizes.size() - 1;
         }
 
-        size_t res_prod = num_priors * inShapes[0][2] * inShapes[0][3] * 4;
+        size_t res_prod = num_priors * 4;
+        for (int i = 2; i < inShapes[0].size(); i++)
+            res_prod *= inShapes[0][i];
         outShapes.push_back({1, 2, res_prod});
     }
 };