From: Bartosz Sochacki Date: Wed, 28 Oct 2020 11:57:46 +0000 (+0100) Subject: Extended test infrastructure to call per plugin configure function before running... X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=9b4f4999306a0fcd42528d5a7fcb137b79c254fc;p=platform%2Fupstream%2Fdldt.git Extended test infrastructure to call per plugin configure function before running a test. The function allows to pass plugin specific configuration (#2731) --- diff --git a/docs/template_plugin/tests/functional/plugin_config.cpp b/docs/template_plugin/tests/functional/plugin_config.cpp new file mode 100644 index 0000000..53e2dd7 --- /dev/null +++ b/docs/template_plugin/tests/functional/plugin_config.cpp @@ -0,0 +1,8 @@ +// Copyright (C) 2020 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "functional_test_utils/plugin_config.hpp" + +void PreparePluginConfiguration(LayerTestsUtils::LayerTestsCommon* test) { +} diff --git a/inference-engine/tests/functional/plugin/cpu/shared_tests_instances/plugin_config.cpp b/inference-engine/tests/functional/plugin/cpu/shared_tests_instances/plugin_config.cpp new file mode 100644 index 0000000..53e2dd7 --- /dev/null +++ b/inference-engine/tests/functional/plugin/cpu/shared_tests_instances/plugin_config.cpp @@ -0,0 +1,8 @@ +// Copyright (C) 2020 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "functional_test_utils/plugin_config.hpp" + +void PreparePluginConfiguration(LayerTestsUtils::LayerTestsCommon* test) { +} diff --git a/inference-engine/tests/functional/plugin/gna/shared_tests_instances/plugin_config.cpp b/inference-engine/tests/functional/plugin/gna/shared_tests_instances/plugin_config.cpp new file mode 100644 index 0000000..866500c --- /dev/null +++ b/inference-engine/tests/functional/plugin/gna/shared_tests_instances/plugin_config.cpp @@ -0,0 +1,57 @@ +// Copyright (C) 2020 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include + +#include "functional_test_utils/plugin_config.hpp" +#include "functional_test_utils/blob_utils.hpp" +#include "legacy/ie_ngraph_utils.hpp" + +void PreparePluginConfiguration(LayerTestsUtils::LayerTestsCommon* test) { + const float MAX_VAL_2B_FEAT = 16384.0f; + auto inputParameters = test->GetFunction()->get_parameters(); + auto& configuration = test->GetConfiguration(); + for (size_t i = 0; i < inputParameters.size(); ++i) { + std::string scaleFactorConfigKey = "GNA_SCALE_FACTOR" + std::string("_") + std::to_string(i); + if (configuration.find(scaleFactorConfigKey) != configuration.end()) { + continue; + } + + auto elementType = inputParameters[i]->get_element_type(); + auto shape = inputParameters[i]->get_shape(); + auto precision = InferenceEngine::details::convertPrecision(elementType); + precision = (precision.getPrecVal() == InferenceEngine::Precision::FP16) ? + InferenceEngine::Precision(InferenceEngine::Precision::FP32) : precision; + + InferenceEngine::SizeVector size(shape); + InferenceEngine::TensorDesc tensor(precision, size, InferenceEngine::Layout::ANY); + InferenceEngine::DataPtr dataPtr = std::make_shared("tmp", tensor); + + InferenceEngine::InputInfo info; + info.setInputData(dataPtr); + info.setPrecision(precision); + + auto blob = test->GenerateInput(info); + float floatScaleFactor = 1.0f; + + auto memory = InferenceEngine::as(blob); + IE_ASSERT(memory); + + const auto lockedMemory = memory->wmap(); + if (precision == InferenceEngine::Precision::FP32) { + float* ptrFloatFeat = lockedMemory.as(); + float max = 0.0; + + for (size_t i = 0; i < blob->size(); i++) { + if (fabs(ptrFloatFeat[i]) > max) { + max = fabs(ptrFloatFeat[i]); + } + } + + floatScaleFactor = (max == 0) ? 1.0f : MAX_VAL_2B_FEAT / max; + } + + configuration[scaleFactorConfigKey] = std::to_string(floatScaleFactor); + } +} diff --git a/inference-engine/tests/functional/plugin/gna/shared_tests_instances/subgraph_tests/perm_conv_perm_concat.cpp b/inference-engine/tests/functional/plugin/gna/shared_tests_instances/subgraph_tests/perm_conv_perm_concat.cpp index 3032c52..3735d68 100644 --- a/inference-engine/tests/functional/plugin/gna/shared_tests_instances/subgraph_tests/perm_conv_perm_concat.cpp +++ b/inference-engine/tests/functional/plugin/gna/shared_tests_instances/subgraph_tests/perm_conv_perm_concat.cpp @@ -26,7 +26,6 @@ std::vector netPrecisions = { std::map additional_config = { {"GNA_DEVICE_MODE", "GNA_SW_EXACT"}, - {"GNA_SCALE_FACTOR_0", "1234"} }; } // namespace diff --git a/inference-engine/tests/functional/plugin/gpu/shared_tests_instances/plugin_config.cpp b/inference-engine/tests/functional/plugin/gpu/shared_tests_instances/plugin_config.cpp new file mode 100644 index 0000000..53e2dd7 --- /dev/null +++ b/inference-engine/tests/functional/plugin/gpu/shared_tests_instances/plugin_config.cpp @@ -0,0 +1,8 @@ +// Copyright (C) 2020 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "functional_test_utils/plugin_config.hpp" + +void PreparePluginConfiguration(LayerTestsUtils::LayerTestsCommon* test) { +} diff --git a/inference-engine/tests/functional/plugin/myriad/shared_tests_instances/plugin_config.cpp b/inference-engine/tests/functional/plugin/myriad/shared_tests_instances/plugin_config.cpp new file mode 100644 index 0000000..53e2dd7 --- /dev/null +++ b/inference-engine/tests/functional/plugin/myriad/shared_tests_instances/plugin_config.cpp @@ -0,0 +1,8 @@ +// Copyright (C) 2020 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "functional_test_utils/plugin_config.hpp" + +void PreparePluginConfiguration(LayerTestsUtils::LayerTestsCommon* test) { +} diff --git a/inference-engine/tests/ie_test_utils/functional_test_utils/layer_test_utils.cpp b/inference-engine/tests/ie_test_utils/functional_test_utils/layer_test_utils.cpp index d53d4fb..ebcbb3f 100644 --- a/inference-engine/tests/ie_test_utils/functional_test_utils/layer_test_utils.cpp +++ b/inference-engine/tests/ie_test_utils/functional_test_utils/layer_test_utils.cpp @@ -6,6 +6,7 @@ #include #include "layer_test_utils.hpp" +#include "plugin_config.hpp" namespace LayerTestsUtils { @@ -98,6 +99,7 @@ void LayerTestsCommon::ConfigureNetwork() const { void LayerTestsCommon::LoadNetwork() { cnnNetwork = InferenceEngine::CNNNetwork{function}; + PreparePluginConfiguration(this); ConfigureNetwork(); executableNetwork = core->LoadNetwork(cnnNetwork, targetDevice, configuration); } @@ -212,4 +214,12 @@ void LayerTestsCommon::Validate() { void LayerTestsCommon::SetRefMode(RefMode mode) { refMode = mode; } + +std::shared_ptr LayerTestsCommon::GetFunction() { + return function; +} + +std::map& LayerTestsCommon::GetConfiguration() { + return configuration; +} } // namespace LayerTestsUtils diff --git a/inference-engine/tests/ie_test_utils/functional_test_utils/layer_test_utils.hpp b/inference-engine/tests/ie_test_utils/functional_test_utils/layer_test_utils.hpp index 813c531..2071ca7 100644 --- a/inference-engine/tests/ie_test_utils/functional_test_utils/layer_test_utils.hpp +++ b/inference-engine/tests/ie_test_utils/functional_test_utils/layer_test_utils.hpp @@ -60,6 +60,10 @@ public: virtual void SetRefMode(RefMode mode); + std::shared_ptr GetFunction(); + + std::map& GetConfiguration(); + protected: LayerTestsCommon(); diff --git a/inference-engine/tests/ie_test_utils/functional_test_utils/plugin_config.hpp b/inference-engine/tests/ie_test_utils/functional_test_utils/plugin_config.hpp new file mode 100644 index 0000000..6bbf7f7 --- /dev/null +++ b/inference-engine/tests/ie_test_utils/functional_test_utils/plugin_config.hpp @@ -0,0 +1,9 @@ +// Copyright (C) 2020 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#pragma once + +#include "functional_test_utils/layer_test_utils.hpp" + +void PreparePluginConfiguration(LayerTestsUtils::LayerTestsCommon* test);