--- /dev/null
+// Copyright (C) 2020 Intel Corporation
+// SPDX-License-Identifier: Apache-2.0
+//
+
+#include <vector>
+#include "single_layer_tests/minimum_maximum.hpp"
+#include "common_test_utils/test_constants.hpp"
+
+using namespace LayerTestsDefinitions;
+
+namespace {
+
+const std::vector<std::vector<std::vector<size_t>>> inShapes = {
+ {{2}, {1}},
+ {{1, 1, 1, 3}, {1}},
+ {{1, 2, 4}, {1}},
+ {{1, 4, 4}, {1}},
+ {{1, 4, 4, 1}, {1}},
+ {{256, 56}, {256, 56}},
+ {{8, 1, 6, 1}, {7, 1, 5}},
+};
+
+const std::vector<InferenceEngine::Precision> netPrecisions = {
+ InferenceEngine::Precision::FP32,
+ InferenceEngine::Precision::FP16,
+};
+
+const std::vector<ngraph::helpers::MinMaxOpType> opType = {
+ ngraph::helpers::MinMaxOpType::MINIMUM,
+ ngraph::helpers::MinMaxOpType::MAXIMUM,
+};
+
+const std::vector<ngraph::helpers::InputLayerType> inputType = {
+ ngraph::helpers::InputLayerType::CONSTANT,
+ ngraph::helpers::InputLayerType::PARAMETER,
+};
+
+INSTANTIATE_TEST_CASE_P(maximum, MaxMinLayerTest,
+ ::testing::Combine(
+ ::testing::ValuesIn(inShapes),
+ ::testing::ValuesIn(opType),
+ ::testing::ValuesIn(netPrecisions),
+ ::testing::ValuesIn(inputType),
+ ::testing::Values(CommonTestUtils::DEVICE_CPU)),
+ MaxMinLayerTest::getTestCaseName);
+
+} // namespace
+++ /dev/null
-// Copyright (C) 2020 Intel Corporation
-// SPDX-License-Identifier: Apache-2.0
-//
-
-#include <vector>
-#include "single_layer_tests/maximum.hpp"
-#include "common_test_utils/test_constants.hpp"
-
-using namespace LayerTestsDefinitions;
-
-namespace {
-
-std::vector<std::vector<std::vector<size_t>>> inShapes = {
- {{2}},
- {{1, 1, 1, 3}},
- {{1, 2, 4}},
- {{1, 4, 4}},
- {{1, 4, 4, 1}},
-};
-
-std::vector<InferenceEngine::Precision> netPrecisions = {InferenceEngine::Precision::I32,
- InferenceEngine::Precision::FP16,
-};
-
-INSTANTIATE_TEST_CASE_P(maximum, MaximumLayerTest,
- ::testing::Combine(
- ::testing::ValuesIn(inShapes),
- ::testing::ValuesIn(netPrecisions),
- ::testing::Values(CommonTestUtils::DEVICE_MYRIAD)),
- MaximumLayerTest::getTestCaseName);
-
-} // namespace
--- /dev/null
+// Copyright (C) 2020 Intel Corporation
+// SPDX-License-Identifier: Apache-2.0
+//
+
+#include <vector>
+#include "single_layer_tests/minimum_maximum.hpp"
+#include "common_test_utils/test_constants.hpp"
+
+using namespace LayerTestsDefinitions;
+
+namespace {
+
+const std::vector<std::vector<std::vector<size_t>>> inShapes = {
+ {{2}, {1}},
+ {{1, 1, 1, 3}, {1}},
+ {{1, 2, 4}, {1}},
+ {{1, 4, 4}, {1}},
+ {{1, 4, 4, 1}, {1}},
+};
+
+const std::vector<InferenceEngine::Precision> netPrecisions = {
+ InferenceEngine::Precision::I32,
+ InferenceEngine::Precision::FP16,
+};
+
+const std::vector<ngraph::helpers::MinMaxOpType> opType = {
+ ngraph::helpers::MinMaxOpType::MINIMUM,
+ ngraph::helpers::MinMaxOpType::MAXIMUM,
+};
+
+const std::vector<ngraph::helpers::InputLayerType> inputType = {
+ ngraph::helpers::InputLayerType::CONSTANT,
+ ngraph::helpers::InputLayerType::PARAMETER,
+};
+
+INSTANTIATE_TEST_CASE_P(maximum, MaxMinLayerTest,
+ ::testing::Combine(
+ ::testing::ValuesIn(inShapes),
+ ::testing::ValuesIn(opType),
+ ::testing::ValuesIn(netPrecisions),
+ ::testing::ValuesIn(inputType),
+ ::testing::Values(CommonTestUtils::DEVICE_MYRIAD)),
+ MaxMinLayerTest::getTestCaseName);
+
+} // namespace
namespace LayerTestsDefinitions {
-using MaximumParamsTuple = typename std::tuple<
- std::vector<std::vector<size_t>>, //input shapes
- InferenceEngine::Precision, //Network precision
- std::string>; //Device name
+using MaxMinParamsTuple = typename std::tuple<
+ std::vector<std::vector<size_t>>, // Input shapes
+ ngraph::helpers::MinMaxOpType, // OperationType
+ InferenceEngine::Precision, // Network precision
+ ngraph::helpers::InputLayerType, // Secondary input type
+ std::string>; // Device name
-class MaximumLayerTest:
- public testing::WithParamInterface<MaximumParamsTuple>,
+class MaxMinLayerTest:
+ public testing::WithParamInterface<MaxMinParamsTuple>,
public LayerTestsUtils::LayerTestsCommon{
public:
- std::shared_ptr<ngraph::Function> fn;
- static std::string getTestCaseName(const testing::TestParamInfo<MaximumParamsTuple>& obj);
+ static std::string getTestCaseName(const testing::TestParamInfo<MaxMinParamsTuple>& obj);
protected:
void SetUp() override;
};
+++ /dev/null
-// Copyright (C) 2020 Intel Corporation
-// SPDX-License-Identifier: Apache-2.0
-//
-
-#include <tuple>
-#include <string>
-#include <vector>
-#include <memory>
-#include <functional>
-#include <debug.h>
-#include "ie_core.hpp"
-#include "common_test_utils/common_utils.hpp"
-#include "functional_test_utils/blob_utils.hpp"
-#include "functional_test_utils/precision_utils.hpp"
-#include "functional_test_utils/plugin_cache.hpp"
-#include "functional_test_utils/skip_tests_config.hpp"
-#include "single_layer_tests/maximum.hpp"
-
-namespace LayerTestsDefinitions {
- std::string MaximumLayerTest::getTestCaseName(const testing::TestParamInfo<MaximumParamsTuple> &obj) {
- std::vector<std::vector<size_t>> inputShapes;
- InferenceEngine::Precision netPrecision;
- std::string targetName;
- std::tie(inputShapes, netPrecision, targetName) = obj.param;
- std::ostringstream results;
-
- results << "IS=" << CommonTestUtils::vec2str(inputShapes) << "_";
- results << "netPRC=" << netPrecision.name() << "_";
- results << "targetDevice=" << targetName << "_";
- return results.str();
- }
-
- void MaximumLayerTest::SetUp() {
- std::vector<std::vector<size_t>> inputShapes;
- InferenceEngine::Precision netPrecision;
- std::tie(inputShapes, netPrecision, targetDevice) = this->GetParam();
- const std::size_t inputDim = InferenceEngine::details::product(inputShapes[0]);
- auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
- std::vector<size_t> shapeInput{1, inputDim};
- auto input = ngraph::builder::makeParams(ngPrc, {shapeInput});
- auto constMul = ngraph::builder::makeConstant(ngPrc, ngraph::Shape{1}, std::vector<float>{-1.0f});
- auto max = std::make_shared<ngraph::opset1::Maximum>(input[0], constMul);
- function = std::make_shared<ngraph::Function>(max, input, "maximum");
- }
-
- TEST_P(MaximumLayerTest, CompareWithRefs){
- Run();
- };
-} // namespace LayerTestsDefinitions
--- /dev/null
+// Copyright (C) 2020 Intel Corporation
+// SPDX-License-Identifier: Apache-2.0
+//
+
+#include <tuple>
+#include <string>
+#include <vector>
+#include <memory>
+#include <debug.h>
+
+#include "common_test_utils/common_utils.hpp"
+#include "functional_test_utils/precision_utils.hpp"
+#include "functional_test_utils/skip_tests_config.hpp"
+#include "single_layer_tests/minimum_maximum.hpp"
+
+namespace LayerTestsDefinitions {
+ std::string MaxMinLayerTest::getTestCaseName(const testing::TestParamInfo<MaxMinParamsTuple> &obj) {
+ std::vector<std::vector<size_t>> inputShapes;
+ InferenceEngine::Precision netPrecision;
+ std::string targetName;
+ ngraph::helpers::InputLayerType inputType;
+ ngraph::helpers::MinMaxOpType opType;
+ std::tie(inputShapes, opType, netPrecision, inputType, targetName) = obj.param;
+ std::ostringstream results;
+
+ results << "IS=" << CommonTestUtils::vec2str(inputShapes) << "_";
+ results << "OpType=" << opType << "_";
+ results << "SecondaryInputType=" << inputType << "_";
+ results << "netPRC=" << netPrecision.name() << "_";
+ results << "targetDevice=" << targetName << "_";
+ return results.str();
+ }
+
+ void MaxMinLayerTest::SetUp() {
+ std::vector<std::vector<size_t>> inputShapes;
+ InferenceEngine::Precision netPrecision;
+ ngraph::helpers::InputLayerType inputType;
+ ngraph::helpers::MinMaxOpType opType;
+ std::tie(inputShapes, opType, netPrecision, inputType, targetDevice) = this->GetParam();
+ if (inputShapes.size() != 2) {
+ THROW_IE_EXCEPTION << "Unsupported inputs number for Minimum/Maximum operaton";
+ }
+ auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
+ auto input = ngraph::builder::makeParams(ngPrc, {inputShapes[0]});
+ auto secondaryInput = ngraph::builder::makeInputLayer(ngPrc, inputType, {inputShapes[1]});
+ if (inputType == ngraph::helpers::InputLayerType::PARAMETER) {
+ input.push_back(std::dynamic_pointer_cast<ngraph::opset3::Parameter>(secondaryInput));
+ }
+
+ auto op = ngraph::builder::makeMinMax(input[0], secondaryInput, opType);
+ function = std::make_shared<ngraph::Function>(op, input, "MinMax");
+ }
+
+ TEST_P(MaxMinLayerTest, CompareWithRefs){
+ Run();
+ };
+} // namespace LayerTestsDefinitions
const std::vector<int> &squeeze_indices,
ngraph::helpers::SqueezeOpType opType);
+std::shared_ptr<ngraph::Node> makeMinMax(const ngraph::Output<Node> &in1,
+ const ngraph::Output<Node> &in2,
+ ngraph::helpers::MinMaxOpType opType);
+
std::shared_ptr<ngraph::Node> makeProposal(const ngraph::Output<Node> &class_probs,
const ngraph::Output<Node> &class_logits,
const ngraph::Output<Node> &image_shape,
UNSQUEEZE
};
+enum MinMaxOpType {
+ MINIMUM,
+ MAXIMUM
+};
+
enum QuantizationGranularity {
Pertensor,
Perchannel
--- /dev/null
+// Copyright (C) 2020 Intel Corporation
+// SPDX-License-Identifier: Apache-2.0
+//
+
+#include "ngraph_functions/builders.hpp"
+
+namespace ngraph {
+namespace builder {
+
+std::shared_ptr<ngraph::Node> makeMinMax(const ngraph::Output<Node> &in1,
+ const ngraph::Output<Node> &in2,
+ ngraph::helpers::MinMaxOpType opType) {
+ switch (opType) {
+ case ngraph::helpers::MinMaxOpType::MINIMUM:
+ return std::make_shared<ngraph::opset3::Minimum>(in1, in2);
+ case ngraph::helpers::MinMaxOpType::MAXIMUM:
+ return std::make_shared<ngraph::opset3::Maximum>(in1, in2);
+ default:
+ throw std::logic_error("Unsupported operation type");
+ }
+}
+
+} // namespace builder
+} // namespace ngraph