Publishing 2019 R1 content
[platform/upstream/dldt.git] / inference-engine / tests / unit / shape_infer / built_in_shape_infer_general_test.hpp
index 5eac622..89f7b5a 100644 (file)
@@ -1,55 +1,54 @@
-// Copyright (C) 2018 Intel Corporation
+// Copyright (C) 2018-2019 Intel Corporation
 // SPDX-License-Identifier: Apache-2.0
 //
 
 #pragma once
 
 #include <gtest/gtest.h>
+#include <inference_engine/blob_factory.hpp>
 #include <inference_engine/shape_infer/built-in/ie_built_in_holder.hpp>
 #include <utility>
 #include <inference_engine/ie_format_parser.h>
 #include <xml_helper.hpp>
 #include <xml_net_builder.hpp>
 #include <single_layer_common.hpp>
+#include <tests_common.hpp>
 
-class BaseTestCreator {
-protected:
-    std::string _type;
-public:
-    explicit BaseTestCreator(const std::string &type) : _type(type) {}
-
-    virtual InferenceEngine::CNNLayerPtr create(const std::string &type) = 0;
-
-    virtual bool shouldCreate(const std::string &type) = 0;
-};
-
-template<class LT>
-class LayerTestCreator : public BaseTestCreator {
-public:
-    explicit LayerTestCreator(const std::string &type) : BaseTestCreator(type) {}
+namespace IE = InferenceEngine;
 
-    InferenceEngine::CNNLayerPtr create(const std::string &type) override {
-        InferenceEngine::LayerParams params;
-        params.type = type;
-        return std::make_shared<LT>(params);
+struct param_size {
+    // dimensions order: x, y, z, ...
+    std::vector<unsigned> dims;
+    param_size() {}
+//    param_size(const std::vector<unsigned>& dims) {
+//        this->dims = dims;
+//    }
+    param_size(std::initializer_list<unsigned> dims) {
+        this->dims = dims;
     }
-
-    bool shouldCreate(const std::string &type) override {
-        return type == _type;
+    bool empty() {
+        return dims.empty();
     }
-};
-
-struct param_size {
-    unsigned x;
-    unsigned y;
 
     friend std::ostream &operator<<(std::ostream &os, param_size const &paramSize) {
-        os << "x=" << std::to_string(paramSize.x) << ", y=" << std::to_string(paramSize.y);
+        auto d_size = paramSize.dims.size();
+        if (d_size > 0) {
+            os << "dims[" << std::to_string(0) << "]=" << std::to_string(paramSize.dims[0]);
+            for (int i = 1; i < paramSize.dims.size(); i++)
+                os << ", dims[" << std::to_string(i) << "]=" << std::to_string(paramSize.dims[i]);
+        }
         return os;
     };
 
     std::string toSeparetedRow(const char *separator) {
-        std::string res = std::to_string(y) + separator + std::to_string(x);
+        auto d_size = dims.size();
+        std::string res;
+        if (d_size > 0) {
+            res = std::to_string(dims[d_size - 1]);
+            for (int i = d_size - 2; i >= 0; i--) {
+                res += separator + std::to_string(dims[i]);
+            }
+        }
         return res;
     }
 };
@@ -60,7 +59,7 @@ PRETTY_PARAM(stride, param_size);
 
 PRETTY_PARAM(pad, param_size);
 
-PRETTY_PARAM(padrb, param_size);
+PRETTY_PARAM(pad_end, param_size);
 
 PRETTY_PARAM(auto_pad, std::string);
 
@@ -78,9 +77,9 @@ PRETTY_PARAM(LayerType, std::string)
 
 PRETTY_PARAM(LayerDataName, std::string)
 
-PRETTY_PARAM(InOutShapes, testing::InOutData)
+PRETTY_PARAM(InOutShapes, testing::InOutShapes)
 
-PRETTY_PARAM(NewInOutShapes, testing::InOutData)
+PRETTY_PARAM(NewInOutShapes, testing::InOutShapes)
 
 PRETTY_PARAM(MapParams, MapStrStr)
 
@@ -94,107 +93,45 @@ PRETTY_PARAM(ModelPath, std::string);
 
 static size_t BATCH = 100;
 
-class BuiltInShapeInferCommon : public ::testing::Test {
+class BuiltInShapeInferCommon : public TestsCommon {
 protected:
     void SetUp() override {
-        holder = std::make_shared<InferenceEngine::ShapeInfer::BuiltInShapeInferHolder>();
+        holder = std::make_shared<IE::ShapeInfer::BuiltInShapeInferHolder>();
     }
 
-    InferenceEngine::IShapeInferImpl::Ptr getShapeInferImpl(const std::string &type) {
-        InferenceEngine::IShapeInferImpl::Ptr impl;
+    IE::IShapeInferImpl::Ptr getShapeInferImpl(const std::string &type) {
+        IE::IShapeInferImpl::Ptr impl;
         sts = holder->getShapeInferImpl(impl, type.c_str(), &resp);
-        if (sts != InferenceEngine::StatusCode::OK) THROW_IE_EXCEPTION << resp.msg;
+        if (sts != IE::StatusCode::OK) THROW_IE_EXCEPTION << resp.msg;
         return impl;
     }
 
 protected:
-    InferenceEngine::StatusCode sts = InferenceEngine::StatusCode::GENERAL_ERROR;
-    InferenceEngine::ResponseDesc resp;
-    std::shared_ptr<InferenceEngine::IShapeInferExtension> holder;
+    IE::StatusCode sts = IE::StatusCode::GENERAL_ERROR;
+    IE::ResponseDesc resp;
+    std::shared_ptr<IE::IShapeInferExtension> holder;
 };
 
 template<class T>
 class BuiltInShapeInferTestWithParam : public BuiltInShapeInferCommon,
                                        public testing::WithParamInterface<T> {
-    const std::vector<std::shared_ptr<BaseTestCreator>> &getCreators() const {
-        // there should be unique_ptr but it cant be used with initializer lists
-        static std::vector<std::shared_ptr<BaseTestCreator> > creators = {
-                std::make_shared<LayerTestCreator<InferenceEngine::PowerLayer>>("Power"),
-                std::make_shared<LayerTestCreator<InferenceEngine::ConvolutionLayer>>("Convolution"),
-                std::make_shared<LayerTestCreator<InferenceEngine::DeconvolutionLayer>>("Deconvolution"),
-                std::make_shared<LayerTestCreator<InferenceEngine::PoolingLayer>>("Pooling"),
-                std::make_shared<LayerTestCreator<InferenceEngine::FullyConnectedLayer>>("InnerProduct"),
-                std::make_shared<LayerTestCreator<InferenceEngine::FullyConnectedLayer>>("FullyConnected"),
-                std::make_shared<LayerTestCreator<InferenceEngine::NormLayer>>("LRN"),
-                std::make_shared<LayerTestCreator<InferenceEngine::NormLayer>>("Norm"),
-                std::make_shared<LayerTestCreator<InferenceEngine::SoftMaxLayer>>("Softmax"),
-                std::make_shared<LayerTestCreator<InferenceEngine::SoftMaxLayer>>("SoftMax"),
-                std::make_shared<LayerTestCreator<InferenceEngine::GRNLayer>>("GRN"),
-                std::make_shared<LayerTestCreator<InferenceEngine::MVNLayer>>("MVN"),
-                std::make_shared<LayerTestCreator<InferenceEngine::ReLULayer>>("ReLU"),
-                std::make_shared<LayerTestCreator<InferenceEngine::ClampLayer>>("Clamp"),
-                std::make_shared<LayerTestCreator<InferenceEngine::SplitLayer>>("Split"),
-                std::make_shared<LayerTestCreator<InferenceEngine::SplitLayer>>("Slice"),
-                std::make_shared<LayerTestCreator<InferenceEngine::ConcatLayer>>("Concat"),
-                std::make_shared<LayerTestCreator<InferenceEngine::EltwiseLayer>>("Eltwise"),
-                std::make_shared<LayerTestCreator<InferenceEngine::ScaleShiftLayer>>("ScaleShift"),
-                std::make_shared<LayerTestCreator<InferenceEngine::PReLULayer>>("PReLU"),
-                std::make_shared<LayerTestCreator<InferenceEngine::CropLayer>>("Crop"),
-                std::make_shared<LayerTestCreator<InferenceEngine::ReshapeLayer>>("Reshape"),
-                std::make_shared<LayerTestCreator<InferenceEngine::TileLayer>>("Tile"),
-                std::make_shared<LayerTestCreator<InferenceEngine::BatchNormalizationLayer>>("BatchNormalization"),
-                std::make_shared<LayerTestCreator<InferenceEngine::GemmLayer>>("Gemm"),
-                std::make_shared<LayerTestCreator<InferenceEngine::PadLayer>>("Pad"),
-                std::make_shared<LayerTestCreator<InferenceEngine::GatherLayer>>("Gather")
-        };
-        return creators;
-    }
 
 protected:
-    InferenceEngine::DataPtr
-    getNotEmptyData(std::string const &name = "", const InferenceEngine::SizeVector &dims = {}) {
-        InferenceEngine::TensorDesc desc(InferenceEngine::Precision::UNSPECIFIED, dims,
-                                         InferenceEngine::TensorDesc::getLayoutByDims(dims));
-        return std::make_shared<InferenceEngine::Data>(name, desc);
-    }
-
-    InferenceEngine::CNNLayer::Ptr createLayer(const std::string &type) const {
-        for (auto &creator : getCreators()) {
-            if (!creator->shouldCreate(type))
-                continue;
-            return creator->create(type);
-        }
-        static LayerTestCreator<InferenceEngine::GenericLayer> genericCreator("");
-        return genericCreator.create(type);
-    }
-
-    void initLayer(const InferenceEngine::CNNLayerPtr &layer, const testing::InOutData &inOutData) {
-        for (const auto &in:inOutData.inDims) {
-            auto data = getNotEmptyData("", in);
-            _savedData.push_back(data);
-            layer->insData.push_back(data);
-        }
-        for (const auto &out:inOutData.outDims) {
-            layer->outData.push_back(getNotEmptyData("", out));
-        }
-    }
-
-    static testing::InOutData getFakeData(const testing::InOutData &inOutShapes) {
-        testing::InOutData initial = inOutShapes;
-        for (auto &dims : initial.inDims) {
-            std::fill(dims.begin(), dims.end(), 1);
-        }
-        for (auto &dims : initial.outDims) {
-            std::fill(dims.begin(), dims.end(), 1);
+    static std::vector<IE::Blob::CPtr> getBlobs(const std::vector<IE::SizeVector>& shapes) {
+        std::vector<IE::Blob::CPtr> inBlobs;
+        for (auto const& dims : shapes) {
+            IE::TensorDesc desc(IE::Precision::FP32, dims, IE::TensorDesc::getLayoutByDims(dims));
+            auto blob = make_blob_with_precision(desc);
+            inBlobs.push_back(blob);
         }
-        return initial;
+        return inBlobs;
     }
 
-    static InferenceEngine::ICNNNetwork::InputShapes
-    setInputShapes(const InferenceEngine::ICNNNetwork &cnnNetwork,
-                   const std::vector<InferenceEngine::SizeVector> &shapesToSet) {
-        InferenceEngine::ICNNNetwork::InputShapes inputShapes;
-        InferenceEngine::InputsDataMap inputs;
+    static IE::ICNNNetwork::InputShapes
+    setInputShapes(const IE::ICNNNetwork &cnnNetwork,
+                   const std::vector<IE::SizeVector> &shapesToSet) {
+        IE::ICNNNetwork::InputShapes inputShapes;
+        IE::InputsDataMap inputs;
         cnnNetwork.getInputsInfo(inputs);
         for (const auto &pair : inputs) {
             auto info = pair.second;
@@ -212,10 +149,10 @@ protected:
         return inputShapes;
     }
 
-    static void checkNetworkInOut(const InferenceEngine::ICNNNetwork &network,
-                                  const testing::InOutData &inOutData) {
-        InferenceEngine::InputsDataMap inputsDataMap;
-        InferenceEngine::OutputsDataMap outputsDataMap;
+    static void checkNetworkInOut(const IE::ICNNNetwork &network,
+                                  const testing::InOutShapes &inOutData) {
+        IE::InputsDataMap inputsDataMap;
+        IE::OutputsDataMap outputsDataMap;
         network.getInputsInfo(inputsDataMap);
         network.getOutputsInfo(outputsDataMap);
         int i = 0;
@@ -229,20 +166,19 @@ protected:
     }
 
     template<int Version = 3>
-    static InferenceEngine::details::CNNNetworkImplPtr
+    static IE::details::CNNNetworkImplPtr
     buildSingleLayerNetwork(const std::string &layerType,
-                            const testing::InOutData &inOutShapes,
+                            const testing::InOutShapes &inOutShapes,
                             std::map<std::string, std::string> *params,
                             const std::string &layerDataName = "data") {
-        auto *parser = new InferenceEngine::details::FormatParser(Version);
+        auto *parser = new IE::details::FormatParser(Version);
         return buildSingleLayerNetworkCommon<Version>(parser, layerType, inOutShapes, params, layerDataName);
     }
 
 protected:
-    std::vector<InferenceEngine::SizeVector> outShapes;
+    std::vector<IE::SizeVector> outShapes;
     std::map<std::string, std::string> params;
-    std::map<std::string, InferenceEngine::Blob::Ptr> blobs;
-    std::vector<InferenceEngine::DataPtr> _savedData;
+    std::map<std::string, IE::Blob::Ptr> blobs;
 };
 
 class BuiltInShapeInferImplTest
@@ -261,8 +197,8 @@ protected:
 
 protected:
     std::string type;
-    testing::InOutData inOutShapes;
-    testing::InOutData newInOutShapes;
+    testing::InOutShapes inOutShapes;
+    testing::InOutShapes newInOutShapes;
     MapStrStr layerParams;
     std::string layerDataName;
     bool canInfer{};