Removed getLayerByName from public API (#1110)
authorIlya Lavrenov <ilya.lavrenov@intel.com>
Thu, 25 Jun 2020 17:00:39 +0000 (20:00 +0300)
committerGitHub <noreply@github.com>
Thu, 25 Jun 2020 17:00:39 +0000 (20:00 +0300)
* Fixed tests

* Removed getLayerByName from public API

24 files changed:
inference-engine/include/cpp/ie_cnn_network.h
inference-engine/include/ie_icnn_network.hpp
inference-engine/src/inference_engine/cnn_network_ngraph_impl.cpp
inference-engine/src/inference_engine/cnn_network_ngraph_impl.hpp
inference-engine/src/legacy_api/include/cnn_network_impl.hpp
inference-engine/src/legacy_api/src/ie_deprecated.cpp
inference-engine/tests/functional/inference_engine/cnn_network/cnn_ngraph_impl_tests.cpp
inference-engine/tests/functional/inference_engine/local_test.cpp
inference-engine/tests/functional/inference_engine/ngraph_reshape_tests.cpp
inference-engine/tests/functional/plugin/myriad/ngraph/conversions/dynamic_shape_resolver.cpp
inference-engine/tests/functional/plugin/shared/src/ngraph_conversion_tests/conv_bias_fusion.cpp
inference-engine/tests/ie_test_utils/common_test_utils/common_utils.hpp
inference-engine/tests_deprecated/functional/shared_tests/ie_class/ie_class.hpp
inference-engine/tests_deprecated/functional/shared_tests/transformations/concat_test.cpp
inference-engine/tests_deprecated/functional/shared_tests/transformations/fq_as_output.cpp
inference-engine/tests_deprecated/functional/shared_tests/transformations/quantization_on_inverted_weights_test.cpp
inference-engine/tests_deprecated/functional/shared_tests/transformations/quantization_on_weights_test.cpp
inference-engine/tests_deprecated/unit/cnn_network/cnn_net_reader_impl_test.cpp
inference-engine/tests_deprecated/unit/engines/mkldnn/graph/structure/graph_deconv_concat_tests.cpp
inference-engine/tests_deprecated/unit/engines/mkldnn/normalizer/supported_fusions_test.cpp
inference-engine/tests_deprecated/unit/graph_tools/graph_copy_tests.cpp
inference-engine/tests_deprecated/unit/graph_tools/graph_test_base.hpp
inference-engine/tests_deprecated/unit/graph_tools/graph_tools_test.cpp
inference-engine/tests_deprecated/unit/inference_engine_tests/util_const_infer_test.cpp

index c7763eb..f71dd32 100644 (file)
@@ -198,18 +198,6 @@ public:
     }
 
     /**
-     * @deprecated Migrate to IR v10 and work with ngraph::Function directly. The method will be removed in 2021.1
-     * @copybrief ICNNNetwork::getLayerByName
-     *
-     * Wraps ICNNNetwork::getLayerByName
-     *
-     * @param layerName Given name of the layer
-     * @return Status code of the operation. InferenceEngine::OK if succeeded
-     */
-    INFERENCE_ENGINE_DEPRECATED("Migrate to IR v10 and work with ngraph::Function directly. The method will be removed in 2021.1")
-    CNNLayerPtr getLayerByName(const char* layerName) const;
-
-    /**
      * @brief Helper method to get collect all input shapes with names of corresponding Data objects
      *
      * @return Map of pairs: input name and its dimension.
index 93a6a5d..a2fff91 100644 (file)
@@ -126,18 +126,6 @@ public:
                                  ResponseDesc* resp = nullptr) noexcept = 0;
 
     /**
-     * @deprecated Migrate to IR v10 and work with ngraph::Function directly. The method will be removed in 2021.1
-     * @brief Gets network layer with the given name
-     *
-     * @param layerName Given name of the layer
-     * @param out Pointer to the found CNNLayer object with the given name
-     * @param resp Pointer to the response message that holds a description of an error if any occurred
-     * @return Status code of the operation. InferenceEngine::OK if succeeded
-     */
-    INFERENCE_ENGINE_DEPRECATED("Migrate to IR v10 and work with ngraph::Function directly. The method will be removed in 2021.1")
-    virtual StatusCode getLayerByName(const char* layerName, CNNLayerPtr& out, ResponseDesc* resp) const noexcept = 0;
-
-    /**
      * @brief Changes the inference batch size.
      *
      * @note There are several limitations and it's not recommended to use it. Set batch to the input shape and call
index 1af35ab..11b8f92 100644 (file)
@@ -226,15 +226,6 @@ void CNNNetworkNGraphImpl::validate(int version) {
         _ngraph_function->validate_nodes_and_infer_types();
 }
 
-StatusCode CNNNetworkNGraphImpl::getLayerByName(const char* layerName, CNNLayerPtr& out, ResponseDesc* resp) const
-    noexcept {
-    if (!cnnNetwork) {
-        const_cast<CNNNetworkNGraphImpl *>(this)->convertToCNNNetworkImpl();
-    }
-    if (!cnnNetwork) return GENERAL_ERROR;
-    return cnnNetwork->getLayerByName(layerName, out, resp);
-}
-
 StatusCode CNNNetworkNGraphImpl::addOutput(const std::string& layerName, size_t outputIndex,
                                            ResponseDesc* resp) noexcept {
     IE_PROFILING_AUTO_SCOPE(addOutput)
index 0210cf2..7bdbe8a 100644 (file)
@@ -64,9 +64,6 @@ public:
     INFERENCE_ENGINE_DEPRECATED("Use ngraph::Function directly")
     void addLayer(const CNNLayerPtr& layer) noexcept override;
 
-    INFERENCE_ENGINE_DEPRECATED("Use ngraph::Function directly")
-    StatusCode getLayerByName(const char* layerName, CNNLayerPtr& out, ResponseDesc* resp) const noexcept override;
-
     // public version
     StatusCode setBatchSize(size_t size, ResponseDesc* responseDesc) noexcept override;
 
index 1aad2c9..9967de2 100644 (file)
@@ -98,7 +98,7 @@ public:
 
     void removeData(const std::string& dataName);
 
-    StatusCode getLayerByName(const char* layerName, CNNLayerPtr& out, ResponseDesc* resp) const noexcept override;
+    StatusCode getLayerByName(const char* layerName, CNNLayerPtr& out, ResponseDesc* resp) const noexcept;
 
     // public version
     StatusCode setBatchSize(size_t size, ResponseDesc* responseDesc) noexcept override;
index 331191a..37ec7e8 100644 (file)
@@ -12,12 +12,6 @@ IE_SUPPRESS_DEPRECATED_START
 
 namespace InferenceEngine {
 
-CNNLayerPtr CNNNetwork::getLayerByName(const char* layerName) const {
-    CNNLayerPtr layer;
-    CALL_STATUS_FNC(getLayerByName, layerName, layer);
-    return layer;
-}
-
 CNNLayer::CNNLayer(const LayerParams& prms)
     : node(nullptr), name(prms.name), type(prms.type), precision(prms.precision), userValue({0}) {}
 
index c3626e7..8249568 100644 (file)
@@ -30,6 +30,7 @@
 #include <ngraph/op/result.hpp>
 
 #include "common_test_utils/file_utils.hpp"
+#include "common_test_utils/common_utils.hpp"
 #include "transformations/rt_info/primitives_priority_attribute.hpp"
 #include "cnn_network_ngraph_impl.hpp"
 
@@ -205,7 +206,7 @@ TEST(CNNNGraphImplTests, TestSaveAffinity) {
     }
 
     InferenceEngine::CNNNetwork cnnNet(ngraph);
-    auto cnnLayer = cnnNet.getLayerByName("testReLU");
+    auto cnnLayer = CommonTestUtils::getLayerByName(cnnNet, "testReLU");
     ASSERT_NE(nullptr, cnnLayer);
     ASSERT_EQ(cnnLayer->affinity, testAffinity);
 }
@@ -350,15 +351,15 @@ TEST(CNNNGraphImplTests, SaveAttributesAfterConversion) {
     }
 
     InferenceEngine::details::CNNNetworkNGraphImpl cnnNet(ngraph);
-    CNNLayerPtr layer;
-    ASSERT_EQ(OK, cnnNet.getLayerByName(name.c_str(), layer, nullptr));
+    auto * icnnnetwork = static_cast<InferenceEngine::ICNNNetwork*>(&cnnNet);
+    CNNLayerPtr layer = CommonTestUtils::getLayerByName(icnnnetwork, name);
     layer->params["test"] = "2";
-    ASSERT_EQ(OK, cnnNet.getLayerByName(name.c_str(), layer, nullptr));
+    layer = CommonTestUtils::getLayerByName(icnnnetwork, name);
     ASSERT_TRUE(layer->params.find("test") != layer->params.end());
     ASSERT_EQ(layer->params["test"], "2");
 
     cnnNet.convertToCNNNetworkImpl();
-    ASSERT_EQ(OK, cnnNet.getLayerByName(name.c_str(), layer, nullptr));
+    layer = CommonTestUtils::getLayerByName(icnnnetwork, name);
     ASSERT_TRUE(layer->params.find("test") != layer->params.end());
     ASSERT_EQ(layer->params["test"], "2");
 }
index 54bbc72..eeab57a 100644 (file)
@@ -6,6 +6,7 @@
 
 #include <ie_core.hpp>
 #include <net_pass.h>
+#include "common_test_utils/common_utils.hpp"
 
 using namespace ::testing;
 using namespace std;
@@ -213,12 +214,12 @@ protected:
 
         IE_SUPPRESS_DEPRECATED_START
         if (!isLSTM) {
-            auto power_layer = dynamic_pointer_cast<PowerLayer>(net.getLayerByName("power"));
+            auto power_layer = dynamic_pointer_cast<PowerLayer>(CommonTestUtils::getLayerByName(net, "power"));
             ASSERT_EQ(power_layer->scale, 0.75f);
             ASSERT_EQ(power_layer->offset, 0.35f);
             ASSERT_EQ(power_layer->power, 0.5f);
 
-            auto sum_layer = dynamic_pointer_cast<EltwiseLayer>(net.getLayerByName("sum"));
+            auto sum_layer = dynamic_pointer_cast<EltwiseLayer>(CommonTestUtils::getLayerByName(net, "sum"));
             std::vector<float> ref_coeff{0.77f, 0.33f};
             ASSERT_EQ(sum_layer->coeff, ref_coeff);
 
@@ -230,7 +231,7 @@ protected:
             InferenceEngine::NetPass::UnrollRNN_if(net, [] (const RNNCellBase& rnn) -> bool { return true; });
             net.serialize("UnrollRNN_if.xml");
             EXPECT_EQ(0, std::remove("UnrollRNN_if.xml"));
-            auto lstmcell_layer = dynamic_pointer_cast<ClampLayer>(net.getLayerByName("LSTMCell:split_clip"));
+            auto lstmcell_layer = dynamic_pointer_cast<ClampLayer>(CommonTestUtils::getLayerByName(net, "LSTMCell:split_clip"));
 
             float ref_coeff = 0.2f;
             ASSERT_EQ(lstmcell_layer->min_value, -ref_coeff);
index fd1b212..1e30167 100644 (file)
@@ -28,6 +28,7 @@
 #include "common_test_utils/test_common.hpp"
 #include "common_test_utils/data_utils.hpp"
 #include "common_test_utils/file_utils.hpp"
+#include "common_test_utils/common_utils.hpp"
 #include "generic_ie.hpp"
 
 IE_SUPPRESS_DEPRECATED_START
@@ -282,7 +283,7 @@ TEST_F(NGraphReshapeTests, ReshapeNewIRWithNewExtension1) {
     SizeVector outDims = output["activation"]->getTensorDesc().getDims();
     ASSERT_EQ(outDims, refAfterReshape);
     // Convert to CNNNetwork
-    auto layer = network.getLayerByName("activation");
+    auto layer = CommonTestUtils::getLayerByName(network, "activation");
     ASSERT_EQ("CustomTestLayer", layer->type);
 }
 
@@ -352,7 +353,7 @@ TEST_F(NGraphReshapeTests, ReshapeNewIRWithNewExtension2) {
     SizeVector outDims = output["activation"]->getTensorDesc().getDims();
     ASSERT_EQ(outDims, refAfterReshape);
     // Convert to CNNNetwork
-    auto layer = network.getLayerByName("activation");
+    auto layer = CommonTestUtils::getLayerByName(network, "activation");
     ASSERT_EQ("CustomTestLayer", layer->type);
     ASSERT_EQ("false", layer->params["test1"]);
     ASSERT_EQ("3", layer->params["test2"]);
index 9632c89..86e0fcf 100644 (file)
@@ -11,6 +11,7 @@
 #include "ie_common.h"
 
 #include "common_test_utils/test_common.hpp"
+#include "common_test_utils/common_utils.hpp"
 #include "details/ie_cnn_network_iterator.hpp"
 
 #include <gtest/gtest.h>
@@ -36,7 +37,7 @@ public:
 
 protected:
     InferenceEngine::CNNLayerPtr getDynamicShapeResolverLayer() const {
-        return cnnNetwork.getLayerByName(s_FriendlyName);
+        return CommonTestUtils::getLayerByName(cnnNetwork, s_FriendlyName);
     }
     InferenceEngine::CNNNetwork cnnNetwork;
 
index 93a5518..7b28e01 100644 (file)
@@ -3,6 +3,7 @@
 // SPDX-License-Identifier: Apache-2.0
 //
 
+#include "common_test_utils/common_utils.hpp"
 #include "ngraph_conversion_tests/conv_bias_fusion.hpp"
 #include <ngraph/variant.hpp>
 
@@ -58,7 +59,7 @@ TEST_P(ConvBiasFusion, ConvBiasFusion) {
         }
     } else {
         IE_SUPPRESS_DEPRECATED_START
-        auto add_layer = net.getLayerByName(getOutputName().c_str());
+        auto add_layer = CommonTestUtils::getLayerByName(net, getOutputName());
         ASSERT_EQ(add_layer->params["originalLayersNames"], "add,conv");
         IE_SUPPRESS_DEPRECATED_END
     }
index aa7cf5a..f00f296 100644 (file)
@@ -9,6 +9,9 @@
 #include <iterator>
 #include <vector>
 
+#include <cpp/ie_cnn_network.h>
+#include <details/ie_cnn_network_iterator.hpp>
+
 namespace CommonTestUtils {
 
 template<typename vecElementType>
@@ -32,4 +35,27 @@ inline std::string vec2str(const std::vector<std::vector<vecElementType>> &vec)
     return result.str();
 }
 
+inline InferenceEngine::CNNLayerPtr getLayerByName(const InferenceEngine::ICNNNetwork * icnnnetwork,
+                                                   const std::string & layerName) {
+    IE_SUPPRESS_DEPRECATED_START
+    InferenceEngine::details::CNNNetworkIterator i(icnnnetwork), end;
+    while (i != end) {
+        auto layer = *i;
+        if (layer->name == layerName)
+            return layer;
+        ++i;
+    }
+
+    std::stringstream stream;
+    stream << "Layer " << layerName << " not found in network";
+    throw InferenceEngine::NotFound(stream.str());
+    IE_SUPPRESS_DEPRECATED_END
+}
+
+inline InferenceEngine::CNNLayerPtr getLayerByName(const InferenceEngine::CNNNetwork & network,
+                                                   const std::string & layerName) {
+    const InferenceEngine::ICNNNetwork & icnnnetwork = static_cast<const InferenceEngine::ICNNNetwork&>(network);
+    return getLayerByName(&icnnnetwork, layerName);
+}
+
 }  // namespace CommonTestUtils
index 28638e5..9baa9d4 100644 (file)
@@ -22,6 +22,7 @@
 #include <ngraph/op/subtract.hpp>
 
 #include "common_test_utils/file_utils.hpp"
+#include "common_test_utils/common_utils.hpp"
 #include "common_test_utils/unicode_utils.hpp"
 #include "ngraph_functions/subgraph_builders.hpp"
 
@@ -1293,7 +1294,7 @@ TEST_P(IEClassLoadNetworkTest, QueryNetworkHETEROwithMULTINoThrowv7) {
 
         for (auto && layer : result.supportedLayersMap) {
             IE_SUPPRESS_DEPRECATED_START
-            EXPECT_NO_THROW(actualNetwork.getLayerByName(layer.first.c_str()));
+            EXPECT_NO_THROW(CommonTestUtils::getLayerByName(actualNetwork, layer.first));
             IE_SUPPRESS_DEPRECATED_END
         }
     } else {
@@ -1321,7 +1322,7 @@ TEST_P(IEClassLoadNetworkTest, QueryNetworkMULTIwithHETERONoThrowv7) {
 
         for (auto && layer : result.supportedLayersMap) {
             IE_SUPPRESS_DEPRECATED_START
-            EXPECT_NO_THROW(actualNetwork.getLayerByName(layer.first.c_str()));
+            EXPECT_NO_THROW(CommonTestUtils::getLayerByName(actualNetwork, layer.first));
             IE_SUPPRESS_DEPRECATED_END
         }
     } else {
index 82635ce..e043c3f 100644 (file)
@@ -5,6 +5,7 @@
 #include "low_precision_transformer_single_layer_tests.hpp"
 #include "low_precision_transformations/concat.hpp"
 #include "low_precision_transformations/eltwise.hpp"
+#include "common_test_utils/common_utils.hpp"
 
 ConcatTestModel::ConcatTestModel(
     const bool signedIntervals,
@@ -90,14 +91,14 @@ bool ConcatTestModel::transform(CNNNetwork& network, LayerTransformation::Params
     LowPrecisionTransformer transformer(transformations);
     transformer.transform(network);
 
-    const CNNLayerPtr concatLayer = network.getLayerByName("concat");
+    const CNNLayerPtr concatLayer = CommonTestUtils::getLayerByName(network, "concat");
     if (concatLayer == nullptr) {
         THROW_IE_EXCEPTION << "concat layer was not found";
     }
 
     const std::vector<size_t> dims = concatLayer->outData[0]->getDims();
     if (dims.size() == 4ul) {
-        const CNNLayerPtr fakeQuantizeLayer1 = network.getLayerByName("fakeQuantize1");
+        const CNNLayerPtr fakeQuantizeLayer1 = CommonTestUtils::getLayerByName(network, "fakeQuantize1");
         QuantizeLayer* fakeQuantize1 = dynamic_cast<QuantizeLayer*>(fakeQuantizeLayer1.get());
         if (fakeQuantize1 == nullptr) {
             THROW_IE_EXCEPTION << "incorrect type for layer " << fakeQuantizeLayer1->name;
@@ -106,7 +107,7 @@ bool ConcatTestModel::transform(CNNNetwork& network, LayerTransformation::Params
             //
         }
 
-        const CNNLayerPtr fakeQuantizeLayer2 = network.getLayerByName("fakeQuantize2");
+        const CNNLayerPtr fakeQuantizeLayer2 = CommonTestUtils::getLayerByName(network, "fakeQuantize2");
         QuantizeLayer* fakeQuantize2 = dynamic_cast<QuantizeLayer*>(fakeQuantizeLayer2.get());
         if (fakeQuantize2 == nullptr) {
             THROW_IE_EXCEPTION << "incorrect type for layer " << fakeQuantizeLayer2->name;
index 7e22b47..2be9ab0 100644 (file)
@@ -3,6 +3,7 @@
 //
 
 #include "low_precision_transformer_single_layer_tests.hpp"
+#include "common_test_utils/common_utils.hpp"
 
 std::string FakeQuantizeAsOutputTest::getName() const {
     return "FakeQuantizeAsOutputTest";
@@ -14,7 +15,7 @@ bool FakeQuantizeAsOutputTest::transform(CNNNetwork& network, LayerTransformatio
     LowPrecisionTransformer transformer(LowPrecisionTransformer::getAllTransformations(params));
     transformer.transform(network);
 
-    const auto fq = network.getLayerByName("FakeQuantize12");
+    const auto fq = CommonTestUtils::getLayerByName(network, "FakeQuantize12");
     if (fq == nullptr)
         THROW_IE_EXCEPTION << "Layer 'FakeQuantize12' should not be transformed";
 
index 5a7e37f..aee0405 100644 (file)
@@ -3,6 +3,7 @@
 //
 
 #include "low_precision_transformer_single_layer_tests.hpp"
+#include "common_test_utils/common_utils.hpp"
 
 std::string QuantizationOnInvertedWeightsTestModel::getModel(SingleLayerTransformationsTestParams& p) const {
     size_t type_size = sizeof(InferenceEngine::PrecisionTrait<InferenceEngine::Precision::FP32>::value_type);
@@ -59,13 +60,13 @@ std::string QuantizationOnInvertedWeightsTestModel::getName() const {
 }
 
 bool QuantizationOnInvertedWeightsTestModel::transform(CNNNetwork& network, LayerTransformation::Params& params) const {
-    CNNLayerPtr weightsFakeQuantize = network.getLayerByName("FakeQuantize12");
+    CNNLayerPtr weightsFakeQuantize = CommonTestUtils::getLayerByName(network, "FakeQuantize12");
     Blob::Ptr weights = CNNNetworkHelper::quantizeWeights(*weightsFakeQuantize, false);
 
-    CNNLayerPtr biasesConvolutionConst = network.getLayerByName("Const13");
+    CNNLayerPtr biasesConvolutionConst = CommonTestUtils::getLayerByName(network, "Const13");
     Blob::Ptr biases = getBlob(biasesConvolutionConst, "custom");
 
-    CNNLayerPtr convolution = network.getLayerByName("Convolution14");
+    CNNLayerPtr convolution = CommonTestUtils::getLayerByName(network, "Convolution14");
     convolution->blobs.emplace("weights", weights);
     convolution->blobs.emplace("biases", biases);
 
@@ -73,15 +74,15 @@ bool QuantizationOnInvertedWeightsTestModel::transform(CNNNetwork& network, Laye
     weightableLayer->_weights = weights;
     weightableLayer->_biases = biases;
 
-    CNNLayerPtr weightsConstInput = network.getLayerByName("Const7");
+    CNNLayerPtr weightsConstInput = CommonTestUtils::getLayerByName(network, "Const7");
     CNNNetworkHelper::removeLayer(network, weightsConstInput);
-    CNNLayerPtr weightsConstInputLow = network.getLayerByName("Const8");
+    CNNLayerPtr weightsConstInputLow = CommonTestUtils::getLayerByName(network, "Const8");
     CNNNetworkHelper::removeLayer(network, weightsConstInputLow);
-    CNNLayerPtr weightsConstInputHigh = network.getLayerByName("Const9");
+    CNNLayerPtr weightsConstInputHigh = CommonTestUtils::getLayerByName(network, "Const9");
     CNNNetworkHelper::removeLayer(network, weightsConstInputHigh);
-    CNNLayerPtr weightsConstOutputLow = network.getLayerByName("Const10");
+    CNNLayerPtr weightsConstOutputLow = CommonTestUtils::getLayerByName(network, "Const10");
     CNNNetworkHelper::removeLayer(network, weightsConstOutputLow);
-    CNNLayerPtr weightsConstOutputHigh = network.getLayerByName("Const11");
+    CNNLayerPtr weightsConstOutputHigh = CommonTestUtils::getLayerByName(network, "Const11");
     CNNNetworkHelper::removeLayer(network, weightsConstOutputHigh);
 
     CNNNetworkHelper::removeLayer(network, weightsFakeQuantize);
index 92f47d6..9e37a17 100644 (file)
@@ -3,6 +3,7 @@
 //
 
 #include "low_precision_transformer_single_layer_tests.hpp"
+#include "common_test_utils/common_utils.hpp"
 
 std::string QuantizationOnWeightsTestModel::getModel(SingleLayerTransformationsTestParams& p) const {
     size_t type_size = sizeof(InferenceEngine::PrecisionTrait<InferenceEngine::Precision::FP32>::value_type);
@@ -59,13 +60,13 @@ std::string QuantizationOnWeightsTestModel::getName() const {
 }
 
 bool QuantizationOnWeightsTestModel::transform(CNNNetwork& network, LayerTransformation::Params& params) const {
-    CNNLayerPtr weightsFakeQuantize = network.getLayerByName("FakeQuantize12");
+    CNNLayerPtr weightsFakeQuantize = CommonTestUtils::getLayerByName(network, "FakeQuantize12");
     Blob::Ptr weights = CNNNetworkHelper::quantizeWeights(*weightsFakeQuantize, false);
 
-    CNNLayerPtr biasesConvolutionConst = network.getLayerByName("Const13");
+    CNNLayerPtr biasesConvolutionConst = CommonTestUtils::getLayerByName(network, "Const13");
     Blob::Ptr biases = getBlob(biasesConvolutionConst, "custom");
 
-    CNNLayerPtr convolution = network.getLayerByName("Convolution14");
+    CNNLayerPtr convolution = CommonTestUtils::getLayerByName(network, "Convolution14");
     convolution->blobs.emplace("weights", weights);
     convolution->blobs.emplace("biases", biases);
 
@@ -73,15 +74,15 @@ bool QuantizationOnWeightsTestModel::transform(CNNNetwork& network, LayerTransfo
     weightableLayer->_weights = weights;
     weightableLayer->_biases = biases;
 
-    CNNLayerPtr weightsConstInput = network.getLayerByName("Const7");
+    CNNLayerPtr weightsConstInput = CommonTestUtils::getLayerByName(network, "Const7");
     CNNNetworkHelper::removeLayer(network, weightsConstInput);
-    CNNLayerPtr weightsConstInputLow = network.getLayerByName("Const8");
+    CNNLayerPtr weightsConstInputLow = CommonTestUtils::getLayerByName(network, "Const8");
     CNNNetworkHelper::removeLayer(network, weightsConstInputLow);
-    CNNLayerPtr weightsConstInputHigh = network.getLayerByName("Const9");
+    CNNLayerPtr weightsConstInputHigh = CommonTestUtils::getLayerByName(network, "Const9");
     CNNNetworkHelper::removeLayer(network, weightsConstInputHigh);
-    CNNLayerPtr weightsConstOutputLow = network.getLayerByName("Const10");
+    CNNLayerPtr weightsConstOutputLow = CommonTestUtils::getLayerByName(network, "Const10");
     CNNNetworkHelper::removeLayer(network, weightsConstOutputLow);
-    CNNLayerPtr weightsConstOutputHigh = network.getLayerByName("Const11");
+    CNNLayerPtr weightsConstOutputHigh = CommonTestUtils::getLayerByName(network, "Const11");
     CNNNetworkHelper::removeLayer(network, weightsConstOutputHigh);
 
     CNNNetworkHelper::removeLayer(network, weightsFakeQuantize);
index e8ff393..e8f873f 100644 (file)
@@ -11,6 +11,7 @@
 
 #include "unit_test_utils/mocks/mock_icnn_network.hpp"
 #include "unit_test_utils/mocks/mock_iformat_parser.hpp"
+#include "common_test_utils/common_utils.hpp"
 
 using namespace testing;
 using namespace InferenceEngine;
@@ -1748,8 +1749,7 @@ TEST_F(CNNNetReaderImplTest, canRead3DConvolution) {
     ResponseDesc resp;
     auto network = reader.getNetwork(&resp);
 
-    CNNLayerPtr layer;
-    ASSERT_EQ(OK, network->getLayerByName("3D_conv", layer, nullptr));
+    CNNLayerPtr layer = CommonTestUtils::getLayerByName(network, "3D_conv");
     auto* conv = dynamic_cast<ConvolutionLayer*>(layer.get());
     ASSERT_NE(nullptr, conv);
     ASSERT_EQ(conv->_kernel[X_AXIS], 5);
@@ -1816,9 +1816,7 @@ TEST_F(CNNNetReaderImplTest, canRead3DPooling) {
     ResponseDesc resp;
     auto network = reader.getNetwork(&resp);
 
-    CNNLayerPtr layer;
-
-    ASSERT_EQ(OK, network->getLayerByName("3D_pooling", layer, nullptr));
+    CNNLayerPtr layer = CommonTestUtils::getLayerByName(network, "3D_pooling");
     auto* pool = dynamic_cast<PoolingLayer*>(layer.get());
     ASSERT_NE(nullptr, pool);
     ASSERT_EQ(pool->_kernel[X_AXIS], 5);
@@ -2025,9 +2023,7 @@ TEST_F(CNNNetReaderImplTest, canParseSimpleTI) {
     auto network = reader.getNetwork(&resp);
     ASSERT_NE(nullptr, network) << resp.msg;
 
-    CNNLayerPtr layer;
-    sts = network->getLayerByName("SomeTI", layer, &resp);
-    ASSERT_EQ(OK, sts) << resp.msg;
+    CNNLayerPtr layer = CommonTestUtils::getLayerByName(network, "SomeTI");
 
     auto* ti = dynamic_cast<TensorIterator*>(layer.get());
     ASSERT_NE(nullptr, ti);
@@ -2125,9 +2121,7 @@ TEST_F(CNNNetReaderImplTest, canParseScalar) {
     auto net = reader.getNetwork(&resp);
 
     ASSERT_NE(nullptr, net) << resp.msg;
-    CNNLayerPtr layer;
-    sts = net->getLayerByName("scalar", layer, &resp);
-    ASSERT_EQ(OK, sts) << resp.msg;
+    CNNLayerPtr layer = CommonTestUtils::getLayerByName(net, "scalar");
     ASSERT_NE(nullptr, layer.get());
     ASSERT_EQ(layer->type, "Const");
     auto actualBlob = layer->blobs.begin()->second;
index 6c500cd..ec928fa 100644 (file)
@@ -9,6 +9,7 @@
 #include "ir_gen_helper.hpp"
 #include <ie_core.hpp>
 #include "common_test_utils/common_layers_params.hpp"
+#include "common_test_utils/common_utils.hpp"
 
 using namespace ::testing;
 using namespace std;
@@ -331,7 +332,7 @@ protected:
 
             // Compare with reference
 
-            auto deconv = network.getLayerByName("Deconvolution_1");
+            auto deconv = CommonTestUtils::getLayerByName(network, "Deconvolution_1");
             InferenceEngine::TBlob<float> deconv_ref(deconv->outData[0]->getTensorDesc());
             deconv_ref.allocate();
 
index c21f108..e291238 100644 (file)
@@ -13,7 +13,7 @@
 #include "common_test_utils/xml_net_builder/xml_net_builder.hpp"
 #include "common_test_utils/common_layers_params.hpp"
 #include "common_test_utils/data_utils.hpp"
-
+#include "common_test_utils/common_utils.hpp"
 
 struct conv_eltwise_params {
     std::vector<size_t> in1;
@@ -88,8 +88,8 @@ protected:
             InferenceEngine::IExecutableNetwork::Ptr exeNetwork1;
             ASSERT_NO_THROW(score_engine->LoadNetwork(exeNetwork1, network, {}));
 
-            auto conv = network.getLayerByName("Convolution2");
-            auto eltwise = network.getLayerByName("Eltwise3");
+            auto conv = CommonTestUtils::getLayerByName(network, "Convolution2");
+            auto eltwise = CommonTestUtils::getLayerByName(network, "Eltwise3");
 
             ASSERT_EQ(conv->precision, InferenceEngine::Precision::I8);
             ASSERT_EQ(conv->outData[0]->getPrecision(), InferenceEngine::Precision::I8);
@@ -148,9 +148,9 @@ protected:
             InferenceEngine::IExecutableNetwork::Ptr exeNetwork1;
             ASSERT_NO_THROW(score_engine->LoadNetwork(exeNetwork1, network, { }));
 
-            auto conv = network.getLayerByName("Convolution2");
-            auto eltwise = network.getLayerByName("Eltwise3");
-            auto relu4 = network.getLayerByName("ReLU4");
+            auto conv = CommonTestUtils::getLayerByName(network, "Convolution2");
+            auto eltwise = CommonTestUtils::getLayerByName(network, "Eltwise3");
+            auto relu4 = CommonTestUtils::getLayerByName(network, "ReLU4");
 
             ASSERT_EQ(conv->precision, InferenceEngine::Precision::I8);
             ASSERT_EQ(conv->outData[0]->getPrecision(), InferenceEngine::Precision::I8);
@@ -209,9 +209,9 @@ protected:
             InferenceEngine::IExecutableNetwork::Ptr exeNetwork1;
             ASSERT_NO_THROW(score_engine->LoadNetwork(exeNetwork1, network, { }));
 
-            auto conv2 = network.getLayerByName("Convolution2");
-            auto conv3 = network.getLayerByName("Convolution3");
-            auto eltwise = network.getLayerByName("Eltwise3");
+            auto conv2 = CommonTestUtils::getLayerByName(network, "Convolution2");
+            auto conv3 = CommonTestUtils::getLayerByName(network, "Convolution3");
+            auto eltwise = CommonTestUtils::getLayerByName(network, "Eltwise3");
 
             ASSERT_EQ(conv2->precision, InferenceEngine::Precision::I8);
             ASSERT_EQ(conv2->outData[0]->getPrecision(), InferenceEngine::Precision::I8);
@@ -268,10 +268,10 @@ protected:
             InferenceEngine::IExecutableNetwork::Ptr exeNetwork1;
             ASSERT_NO_THROW(score_engine->LoadNetwork(exeNetwork1, network, { }));
 
-            auto conv2 = network.getLayerByName("Convolution2");
-            auto conv3 = network.getLayerByName("Convolution3");
-            auto eltwise = network.getLayerByName("Eltwise3");
-            auto relu5 = network.getLayerByName("ReLU5");
+            auto conv2 = CommonTestUtils::getLayerByName(network, "Convolution2");
+            auto conv3 = CommonTestUtils::getLayerByName(network, "Convolution3");
+            auto eltwise = CommonTestUtils::getLayerByName(network, "Eltwise3");
+            auto relu5 = CommonTestUtils::getLayerByName(network, "ReLU5");
 
             ASSERT_EQ(conv2->precision, InferenceEngine::Precision::I8);
             ASSERT_EQ(conv2->outData[0]->getPrecision(), InferenceEngine::Precision::I8);
@@ -344,10 +344,10 @@ protected:
             InferenceEngine::IExecutableNetwork::Ptr exeNetwork1;
             ASSERT_NO_THROW(score_engine->LoadNetwork(exeNetwork1, network, {}));
 
-            auto conv2 = network.getLayerByName("Convolution2");
-            auto conv3 = network.getLayerByName("Convolution3");
-            auto eltwise = network.getLayerByName("Eltwise3");
-            auto relu5 = network.getLayerByName("ReLU5");
+            auto conv2 = CommonTestUtils::getLayerByName(network, "Convolution2");
+            auto conv3 = CommonTestUtils::getLayerByName(network, "Convolution3");
+            auto eltwise = CommonTestUtils::getLayerByName(network, "Eltwise3");
+            auto relu5 = CommonTestUtils::getLayerByName(network, "ReLU5");
 
             ASSERT_EQ(conv2->precision, InferenceEngine::Precision::I8);
             ASSERT_EQ(conv2->outData[0]->getPrecision(), InferenceEngine::Precision::I8);
index 525fbc0..c4bd362 100644 (file)
@@ -5,6 +5,7 @@
 #include <gtest/gtest.h>
 #include <graph_tools.hpp>
 #include <common_test_utils/test_assertions.hpp>
+#include <common_test_utils/common_utils.hpp>
 #include <unordered_set>
 #include <gmock/gmock-generated-function-mockers.h>
 #include <gmock/gmock-generated-matchers.h>
@@ -93,8 +94,8 @@ TEST_F(GraphCopyTests, canPreserveAttributes) {
     ADD_ATTR(1, "id", "r-1-2-3");
     ADD_ATTR(2, "id", "r-1-2-3");
     CNNNetwork cloned (clone);
-    auto idMemOutput = cloned.getLayerByName("1")->GetParamAsString("id");
-    auto idMemInput  = cloned.getLayerByName("2")->GetParamAsString("id");
+    auto idMemOutput = CommonTestUtils::getLayerByName(cloned, "1")->GetParamAsString("id");
+    auto idMemInput  = CommonTestUtils::getLayerByName(cloned, "2")->GetParamAsString("id");
 
     ASSERT_STREQ(idMemInput.c_str(), idMemOutput.c_str());
     ASSERT_STREQ(idMemInput.c_str(), "r-1-2-3");
@@ -111,12 +112,12 @@ TEST_F(GraphCopyTests, canQuantizeTopology) {
     auto iclone = ModelQuantizer<FP32_2_FP32>().quantize(*mockNet, std::vector<float >({1.0f, 1.0f}));
     auto clone = CNNNetwork(iclone);
 
-    CNNNetBFS(clone.getLayerByName("1"), [&](CNNLayerPtr layer) {
+    CNNNetBFS(CommonTestUtils::getLayerByName(clone, "1"), [&](CNNLayerPtr layer) {
         auto params = getInjectedData<QuantizedLayerParams>(layer);
         ASSERT_NE(params, nullptr);
     });
 
-    CNNNetBFS(clone.getLayerByName("3"), [&](CNNLayerPtr layer) {
+    CNNNetBFS(CommonTestUtils::getLayerByName(clone, "3"), [&](CNNLayerPtr layer) {
         auto params = getInjectedData<QuantizedLayerParams>(layer);
         ASSERT_NE(params, nullptr);
     });
@@ -176,7 +177,7 @@ TEST(CNNSpecificGraphCopyTests, copyNetworkWithClampLayer) {
     auto copied_net = CNNNetwork(copied_net_ptr);
 
     //check that Clamp layer was properly copied
-    auto layer = std::dynamic_pointer_cast<ClampLayer>(copied_net.getLayerByName("ClampLayer"));
+    auto layer = std::dynamic_pointer_cast<ClampLayer>(CommonTestUtils::getLayerByName(copied_net, "ClampLayer"));
     ASSERT_NE(layer, nullptr) << "Could not perform dynamic cast from base pointer to Clamp layer pointer. "
             "Net copy could be incorrect.";
 }
@@ -310,7 +311,7 @@ TEST(CNNSpecificGraphCopyTests, copyNetworkWithDeconvolution) {
     auto copied_net = CNNNetwork(copied_net_ptr);
 
     // check that Clamp layer was properly copied
-    auto layer = std::dynamic_pointer_cast<DeconvolutionLayer>(copied_net.getLayerByName("upsample_merged"));
+    auto layer = std::dynamic_pointer_cast<DeconvolutionLayer>(CommonTestUtils::getLayerByName(copied_net, "upsample_merged"));
     ASSERT_NE(layer, nullptr) << "Could not perform dynamic cast from base pointer to Deconvolution layer pointer. "
                                  "Net copy could be incorrect.";
 }
index dcd1488..cf9a585 100644 (file)
@@ -11,6 +11,7 @@
 #include "details/ie_cnn_network_tools.h"
 
 #include "unit_test_utils/mocks/mock_icnn_network.hpp"
+#include "common_test_utils/common_utils.hpp"
 
 namespace GraphTest {
 
@@ -116,7 +117,7 @@ class GraphTestsBase : public ::testing::Test {
         long int nForward = 0;
         CNNLayerPtr layerExist;
         try {
-            layerExist = wrap.getLayerByName(a.c_str());
+            layerExist = CommonTestUtils::getLayerByName(wrap, a.c_str());
             if (!layerExist) {
                 return 0;
             }
@@ -144,7 +145,7 @@ class GraphTestsBase : public ::testing::Test {
     int countBackwardConnections(std::string a, std::string b, int from_port_id=-1) {
         CNNLayerPtr layerExist;
         try {
-            layerExist = wrap.getLayerByName(b.c_str());
+            layerExist = CommonTestUtils::getLayerByName(wrap, b.c_str());
             if (!layerExist) {
                 return 0;
             }
index 6deda6d..601cf70 100644 (file)
@@ -14,6 +14,7 @@
 #include <memory>
 #include "details/ie_cnn_network_tools.h"
 #include "details/ie_cnn_network_iterator.hpp"
+#include <common_test_utils/common_utils.hpp>
 
 using namespace testing;
 using namespace InferenceEngine;
@@ -346,7 +347,7 @@ TEST_F(GraphToolsTest, CNNNetSwapLayersSwapWithItself) {
         return l== nullptr ? GENERAL_ERROR : OK;
     })));
 
-    auto l = wrap.getLayerByName("2");
+    auto l = CommonTestUtils::getLayerByName(wrap, "2");
 
     ASSERT_NO_THROW(CNNNetSwapLayers(l, l));
 
@@ -366,8 +367,8 @@ TEST_F(GraphToolsTest, CNNNetSwapLayersSimpleCase_1) {
         return l== nullptr ? GENERAL_ERROR : OK;
     })));
 
-    auto l = wrap.getLayerByName("1");
-    auto r = wrap.getLayerByName("2");
+    auto l = CommonTestUtils::getLayerByName(wrap, "1");
+    auto r = CommonTestUtils::getLayerByName(wrap, "2");
 
     ASSERT_NO_THROW(CNNNetSwapLayers(l, r));
 
@@ -387,8 +388,8 @@ TEST_F(GraphToolsTest, CNNNetSwapLayersSimpleCase_2) {
         return l== nullptr ? GENERAL_ERROR : OK;
     })));
 
-    auto l = wrap.getLayerByName("2");
-    auto r = wrap.getLayerByName("3");
+    auto l = CommonTestUtils::getLayerByName(wrap, "2");
+    auto r = CommonTestUtils::getLayerByName(wrap, "3");
 
     ASSERT_NO_THROW(CNNNetSwapLayers(l, r));
 
@@ -409,8 +410,8 @@ TEST_F(GraphToolsTest, CNNNetSwapLayersSimpleCase_3) {
         return l== nullptr ? GENERAL_ERROR : OK;
     })));
 
-    auto l = wrap.getLayerByName("1");
-    auto r = wrap.getLayerByName("2");
+    auto l = CommonTestUtils::getLayerByName(wrap, "1");
+    auto r = CommonTestUtils::getLayerByName(wrap, "2");
 
     ASSERT_NO_THROW(CNNNetSwapLayers(l, r));
 
@@ -435,8 +436,8 @@ TEST_F(GraphToolsTest, CNNNetSwapLayersDoesSwapDims) {
         return l== nullptr ? GENERAL_ERROR : OK;
     })));
 
-    auto l = wrap.getLayerByName("1");
-    auto r = wrap.getLayerByName("2");
+    auto l = CommonTestUtils::getLayerByName(wrap, "1");
+    auto r = CommonTestUtils::getLayerByName(wrap, "2");
 
     ASSERT_NO_THROW(CNNNetSwapLayers(l, r));
 
@@ -462,8 +463,8 @@ TEST_F(GraphToolsTest, CNNNetSwapLayersSimpleCase_4) {
         return l== nullptr ? GENERAL_ERROR : OK;
     })));
 
-    auto l = wrap.getLayerByName("2");
-    auto r = wrap.getLayerByName("4");
+    auto l = CommonTestUtils::getLayerByName(wrap, "2");
+    auto r = CommonTestUtils::getLayerByName(wrap, "4");
 
     ASSERT_NO_THROW(CNNNetSwapLayers(l, r));
 
@@ -486,8 +487,8 @@ TEST_F(GraphToolsTest, CNNNetSwapLayersSplit) {
         return l== nullptr ? GENERAL_ERROR : OK;
     })));
 
-    auto l = wrap.getLayerByName("2");
-    auto r = wrap.getLayerByName("3");
+    auto l = CommonTestUtils::getLayerByName(wrap, "2");
+    auto r = CommonTestUtils::getLayerByName(wrap, "3");
 
     ASSERT_NO_THROW(CNNNetSwapLayers(l, r));
 
@@ -507,8 +508,8 @@ TEST_F(GraphToolsTest, CNNNetSwapLayersSplit_2) {
         return l== nullptr ? GENERAL_ERROR : OK;
     })));
 
-    auto l = wrap.getLayerByName("1");
-    auto r = wrap.getLayerByName("2");
+    auto l = CommonTestUtils::getLayerByName(wrap, "1");
+    auto r = CommonTestUtils::getLayerByName(wrap, "2");
 
     ASSERT_NO_THROW(CNNNetSwapLayers(l, r));
 
@@ -532,8 +533,8 @@ TEST_F(GraphToolsTest, CNNNetSwapLayersSplit_3) {
         return l== nullptr ? GENERAL_ERROR : OK;
     })));
 
-    auto l = wrap.getLayerByName("1");
-    auto r = wrap.getLayerByName("2");
+    auto l = CommonTestUtils::getLayerByName(wrap, "1");
+    auto r = CommonTestUtils::getLayerByName(wrap, "2");
 
     ASSERT_NO_THROW(CNNNetSwapLayers(l, r));
 
@@ -560,8 +561,8 @@ TEST_F(GraphToolsTest, CNNNetSwapLayersSplit_4) {
         return l== nullptr ? GENERAL_ERROR : OK;
     })));
 
-    auto l = wrap.getLayerByName("1");
-    auto r = wrap.getLayerByName("2");
+    auto l = CommonTestUtils::getLayerByName(wrap, "1");
+    auto r = CommonTestUtils::getLayerByName(wrap, "2");
 
     ASSERT_NO_THROW(CNNNetSwapLayers(l, r));
 
@@ -590,8 +591,8 @@ TEST_F(GraphToolsTest, CanNotInsertLayerIntoNonAdjiacendLayers) {
         return l== nullptr ? GENERAL_ERROR : OK;
     })));
 
-    auto l = wrap.getLayerByName("1");
-    auto r = wrap.getLayerByName("3");
+    auto l = CommonTestUtils::getLayerByName(wrap, "1");
+    auto r = CommonTestUtils::getLayerByName(wrap, "3");
 
     ASSERT_ANY_THROW(CNNNetworkInsertLayer(l, r, createGenericLayer("3")));
 }
@@ -608,8 +609,8 @@ TEST_F(GraphToolsTest, CNNNetworkInsertLayerSimpleCase) {
         return l== nullptr ? GENERAL_ERROR : OK;
     })));
 
-    auto l = wrap.getLayerByName("1");
-    auto r = wrap.getLayerByName("2");
+    auto l = CommonTestUtils::getLayerByName(wrap, "1");
+    auto r = CommonTestUtils::getLayerByName(wrap, "2");
 
     CNNNetworkInsertLayer(l, r, createGenericLayer("3"));
 
@@ -630,8 +631,8 @@ TEST_F(GraphToolsTest, CNNNetworkInsertLayerSimpleCaseWithMultipleOutputs) {
         return l== nullptr ? GENERAL_ERROR : OK;
     })));
 
-    auto l = wrap.getLayerByName("1");
-    auto r = wrap.getLayerByName("3");
+    auto l = CommonTestUtils::getLayerByName(wrap, "1");
+    auto r = CommonTestUtils::getLayerByName(wrap, "3");
 
     CNNNetworkInsertLayer(l, r, createGenericLayer("4"));
 
@@ -654,8 +655,8 @@ TEST_F(GraphToolsTest, CNNNetworkInsertLayerSimpleCaseWithMultipleInputs) {
         return l== nullptr ? GENERAL_ERROR : OK;
     })));
 
-    auto l = wrap.getLayerByName("3");
-    auto r = wrap.getLayerByName("2");
+    auto l = CommonTestUtils::getLayerByName(wrap, "3");
+    auto r = CommonTestUtils::getLayerByName(wrap, "2");
 
     CNNNetworkInsertLayer(l, r, createGenericLayer("4"));
 
@@ -678,9 +679,9 @@ TEST_F(GraphToolsTest, CNNNetworkInsertLayerSplitAndConcat) {
         return l== nullptr ? GENERAL_ERROR : OK;
     })));
 
-    auto l = wrap.getLayerByName("1");
-    auto r = wrap.getLayerByName("2");
-    auto r2 = wrap.getLayerByName("3");
+    auto l = CommonTestUtils::getLayerByName(wrap, "1");
+    auto r = CommonTestUtils::getLayerByName(wrap, "2");
+    auto r2 = CommonTestUtils::getLayerByName(wrap, "3");
 
     CNNNetworkInsertLayer(l, r, createGenericLayer("4"), 1);
     CNNNetworkInsertLayer(l, r2, createGenericLayer("5"), 2);
@@ -705,7 +706,7 @@ TEST_F(GraphToolsTest, CNNNetworkInsertAfterLastLayer) {
         return l== nullptr ? GENERAL_ERROR : OK;
     })));
 
-    auto l = wrap.getLayerByName("2");
+    auto l = CommonTestUtils::getLayerByName(wrap, "2");
 
     CNNNetworkInsertLayer(l, nullptr, createGenericLayer("3"));
 
@@ -726,7 +727,7 @@ TEST_F(GraphToolsTest, CNNNetworkInsertAfterAll) {
         return l== nullptr ? GENERAL_ERROR : OK;
     })));
 
-    CNNNetworkInsertLayer(wrap.getLayerByName("1"), nullptr, createGenericLayer("5"));
+    CNNNetworkInsertLayer(CommonTestUtils::getLayerByName(wrap, "1"), nullptr, createGenericLayer("5"));
 
     ASSERT_CONNECTION(1, 5);
     ASSERT_CONNECTION(5, 2);
@@ -747,7 +748,7 @@ TEST_F(GraphToolsTest, CNNNetworkInsertAllAfterSplit) {
         return l== nullptr ? GENERAL_ERROR : OK;
     })));
 
-    CNNNetworkInsertLayer(wrap.getLayerByName("1"), nullptr, createGenericLayer("5"));
+    CNNNetworkInsertLayer(CommonTestUtils::getLayerByName(wrap, "1"), nullptr, createGenericLayer("5"));
 
     ASSERT_CONNECTION(1, 5);
     ASSERT_CONNECTION(5, 2);
@@ -769,7 +770,7 @@ TEST_F(GraphToolsTest, CNNNetworkInsert1AfterSplitBeforeEltwise) {
         return l== nullptr ? GENERAL_ERROR : OK;
     })));
 
-    CNNNetworkInsertLayer(wrap.getLayerByName("1"), wrap.getLayerByName("4"), createGenericLayer("5"));
+    CNNNetworkInsertLayer(CommonTestUtils::getLayerByName(wrap, "1"), CommonTestUtils::getLayerByName(wrap, "4"), createGenericLayer("5"));
 
     ASSERT_CONNECTION(1, 3);
     ASSERT_CONNECTION(1, 5);
@@ -792,7 +793,7 @@ TEST_F(GraphToolsTest, CNNNetworkInsert1AfterSplit) {
         return l== nullptr ? GENERAL_ERROR : OK;
     })));
 
-    CNNNetworkInsertLayer(wrap.getLayerByName("1"), wrap.getLayerByName("4"), createGenericLayer("5"));
+    CNNNetworkInsertLayer(CommonTestUtils::getLayerByName(wrap, "1"), CommonTestUtils::getLayerByName(wrap, "4"), createGenericLayer("5"));
 
     ASSERT_CONNECTION(1, 2);
     ASSERT_CONNECTION(1, 3);
@@ -815,7 +816,7 @@ TEST_F(GraphToolsTest, CNNNetworkInsertAfter2ConnectionsToEltwise) {
         return l== nullptr ? GENERAL_ERROR : OK;
     })));
 
-    CNNNetworkInsertLayer(wrap.getLayerByName("1"), wrap.getLayerByName("2"), createGenericLayer("5"));
+    CNNNetworkInsertLayer(CommonTestUtils::getLayerByName(wrap, "1"), CommonTestUtils::getLayerByName(wrap, "2"), createGenericLayer("5"));
 
     ASSERT_CONNECTION(1, 5);
     ASSERT_MN_CONNECTIONS(5, 2, 1, 2);
@@ -855,8 +856,8 @@ TEST_F(GraphToolsTest, CNNNetworkRemoveInputOrOutputLayer) {
         return l== nullptr ? GENERAL_ERROR : OK;
     })));
 
-    ASSERT_ANY_THROW(CNNNetworkRemoveLayer(wrap.getLayerByName("1")));
-    ASSERT_ANY_THROW(CNNNetworkRemoveLayer(wrap.getLayerByName("3")));
+    ASSERT_ANY_THROW(CNNNetworkRemoveLayer(CommonTestUtils::getLayerByName(wrap, "1")));
+    ASSERT_ANY_THROW(CNNNetworkRemoveLayer(CommonTestUtils::getLayerByName(wrap, "3")));
 }
 
 TEST_F(GraphToolsTest, CNNNetworkRemoveLayerThaHas2Outputs) {
@@ -876,7 +877,7 @@ TEST_F(GraphToolsTest, CNNNetworkRemoveLayerThaHas2Outputs) {
         return l== nullptr ? GENERAL_ERROR : OK;
     })));
 
-    CNNNetworkRemoveLayer(wrap.getLayerByName("2"));
+    CNNNetworkRemoveLayer(CommonTestUtils::getLayerByName(wrap, "2"));
 
     ASSERT_2_CONNECTIONS(1, 3);
     ASSERT_CONNECTION(1, 4);
@@ -904,7 +905,7 @@ TEST_F(GraphToolsTest, CNNNetworkRemoveLayerSplit) {
         return l== nullptr ? GENERAL_ERROR : OK;
     })));
 
-    CNNNetworkRemoveLayer(wrap.getLayerByName("2"));
+    CNNNetworkRemoveLayer(CommonTestUtils::getLayerByName(wrap, "2"));
 
     ASSERT_2_CONNECTIONS(1, 3);
     // means all remained references removed
@@ -934,7 +935,7 @@ TEST_F(GraphToolsTest, CNNNetworkRemoveLayerSplit2) {
         return l== nullptr ? GENERAL_ERROR : OK;
     })));
 
-    CNNNetworkRemoveLayer(wrap.getLayerByName("2"));
+    CNNNetworkRemoveLayer(CommonTestUtils::getLayerByName(wrap, "2"));
 
     ASSERT_2_CONNECTIONS(1, 3);
     ASSERT_3_CONNECTIONS(1, 4);
@@ -962,7 +963,7 @@ TEST_F(GraphToolsTest, CNNNetworkRemoveSimpleLayer) {
         return l== nullptr ? GENERAL_ERROR : OK;
     })));
 
-    CNNNetworkRemoveLayer(wrap.getLayerByName("2"));
+    CNNNetworkRemoveLayer(CommonTestUtils::getLayerByName(wrap, "2"));
 
     ASSERT_CONNECTION(1, 3);
 
index 2c77dc9..a3bf05f 100644 (file)
@@ -19,6 +19,7 @@
 #include "util_const_infer_test.hpp"
 #include <details/ie_cnn_network_tools.h>
 #include <precision_utils.h>
+#include "common_test_utils/common_utils.hpp"
 
 namespace IE = InferenceEngine;
 
@@ -241,8 +242,8 @@ TEST_F(RemoveLayerTests, canTrimL2) {
 
     ASSERT_EQ(newLayer_names, refNewLayers);
     IE::CNNNetwork cnnNetwork(net);
-    ASSERT_THROW(cnnNetwork.getLayerByName("layer2"), IE::NotFound);
-    auto newLayer = cnnNetwork.getLayerByName(refNewLayers[0].c_str());
+    ASSERT_THROW(CommonTestUtils::getLayerByName(cnnNetwork, "layer2"), IE::NotFound);
+    auto newLayer = CommonTestUtils::getLayerByName(cnnNetwork, refNewLayers[0].c_str());
     ASSERT_EQ(newLayer->type, "Const");
     ASSERT_EQ(constData["data5"], newLayer->blobs.at("custom"));
     ASSERT_EQ(nullptr, net->getData("data7"));
@@ -283,11 +284,14 @@ TEST_F(RemoveLayerTests, canTrimI1andL1) {
     for (auto layer : newLayers) newLayer_names.push_back(layer->name);
 
     ASSERT_EQ(newLayer_names, refNewLayers);
+    IE::CNNLayerPtr layer;
+    ASSERT_EQ(IE::NOT_FOUND, net->getLayerByName("input1", layer, nullptr));
+    ASSERT_EQ(nullptr, layer);
+    ASSERT_EQ(IE::NOT_FOUND, net->getLayerByName("layer1", layer, nullptr));
+    ASSERT_EQ(nullptr, layer);
     IE::CNNNetwork cnnNetwork(net);
-    ASSERT_THROW(cnnNetwork.getLayerByName("input1"), IE::NotFound);
-    ASSERT_THROW(cnnNetwork.getLayerByName("layer1"), IE::NotFound);
-    auto newLayerD4 = cnnNetwork.getLayerByName(refNewLayers[0].c_str());
-    auto newLayerD7 = cnnNetwork.getLayerByName(refNewLayers[1].c_str());
+    auto newLayerD4 = CommonTestUtils::getLayerByName(cnnNetwork, refNewLayers[0]);
+    auto newLayerD7 = CommonTestUtils::getLayerByName(cnnNetwork, refNewLayers[1]);
     auto newData4 = net->getData("data4__layer4");
     auto newData7 = net->getData("data7__layer2");
     ASSERT_EQ(newLayerD4->type, "Const");
@@ -471,7 +475,7 @@ TEST_F(RemoveLayerTests, notTrimFirstConstInput) {
 
         ASSERT_EQ(net->allLayers().size(), originalLayersNum);
         IE::CNNNetwork cnnNetwork(net);
-        auto input4 = cnnNetwork.getLayerByName(constLayer->name.c_str());
+        auto input4 = CommonTestUtils::getLayerByName(cnnNetwork, constLayer->name.c_str());
         ASSERT_EQ(data10->getInputTo().size(), 1);
         ASSERT_EQ(data10->getCreatorLayer().lock(), input4);
         ASSERT_EQ(layer6->insData.size(), 2);
@@ -491,7 +495,7 @@ TEST_F(RemoveLayerTests, canSaveConstForEltWise) {
     testTransformator->trimShapeInputs({input2}, sortedLayers);
 
     IE::CNNNetwork cnnNetwork(net);
-    ASSERT_NO_THROW(input2 = cnnNetwork.getLayerByName(input2->name.c_str()));
+    ASSERT_NO_THROW(input2 = CommonTestUtils::getLayerByName(cnnNetwork, input2->name.c_str()));
     ASSERT_EQ(net->allLayers().size(), 10);
     ASSERT_EQ(layer1->insData.size(), 2);
     ASSERT_EQ(layer1->insData[1].lock(), data2);
@@ -512,7 +516,7 @@ TEST_F(RemoveLayerTests, canSaveDataWithMultipleInputTo) {
     testTransformator->trimShapeInputs({input3}, sortedLayers);
 
     IE::CNNNetwork cnnNetwork(net);
-    ASSERT_NO_THROW(input3 = cnnNetwork.getLayerByName(input3->name.c_str()));
+    ASSERT_NO_THROW(input3 = CommonTestUtils::getLayerByName(cnnNetwork, input3->name.c_str()));
     ASSERT_EQ(net->allLayers().size(), originalLayersNum);
     ASSERT_EQ(layer2->insData.size(), 2);
     ASSERT_EQ(layer2->insData[0].lock(), getData("data2"));
@@ -542,7 +546,7 @@ TEST_F(RemoveLayerTests, canFoldConstSubgraphToConst) {
     ASSERT_EQ(net->allLayers().size(), originalLayersNum - 7);
     ASSERT_EQ(newLayer_names, refNewLayers);
     IE::CNNNetwork cnnNetwork(net);
-    auto newLayer = cnnNetwork.getLayerByName(refNewLayers[0].c_str());
+    auto newLayer = CommonTestUtils::getLayerByName(cnnNetwork, refNewLayers[0].c_str());
     ASSERT_EQ(newLayer->type, "Const");
     ASSERT_EQ(newLayer->outData[0], getData("data9"));
 }
@@ -604,7 +608,7 @@ TEST_F(RemoveLayerTests, canFoldConstSubgraphs) {
 
     IE::CNNNetwork cnnNetwork(net);
     ASSERT_EQ(net->allLayers().size(), originalLayersNum - 7);
-    auto newLayer = cnnNetwork.getLayerByName(refNewLayers[0].c_str());
+    auto newLayer = CommonTestUtils::getLayerByName(cnnNetwork, refNewLayers[0].c_str());
     auto actualBlob = newLayer->blobs["custom"];
     ASSERT_NE(actualBlob, nullptr);
     ASSERT_FALSE(actualBlob->buffer() == nullptr);
@@ -677,7 +681,7 @@ TEST_F(RemoveLayerTests, canFullTrim) {
 
     IE::CNNNetwork cnnNetwork(net);
     std::string newName = "layer5__data9__Const";
-    ASSERT_THROW(cnnNetwork.getLayerByName(newName.c_str()), IE::NotFound);
+    ASSERT_THROW(CommonTestUtils::getLayerByName(cnnNetwork, newName.c_str()), IE::NotFound);
     ASSERT_EQ(net->allLayers().size(), 2);
     ASSERT_EQ(layer6->insData.size(), 1);
     ASSERT_EQ(layer6->insData[0].lock(), getData("data10"));
@@ -754,14 +758,14 @@ TEST_F(AdvancedShapeInferTests, canReshape) {
     std::map<std::string, IE::SizeVector> inputShapes = {{"data2", newShape}};
     cnnNetwork.reshape(inputShapes);
 
-    ASSERT_NO_THROW(cnnNetwork.getLayerByName("layer2"));
+    ASSERT_NO_THROW(CommonTestUtils::getLayerByName(cnnNetwork, "layer2"));
     ASSERT_EQ(getData("data3")->getTensorDesc().getDims(), IE::SizeVector{3});
     ASSERT_EQ(net->allLayers().size(), originalLayersNum);
 
     IE::ConstTransformer transformator(net.get());
     transformator.fullTrim();
 
-    ASSERT_THROW(cnnNetwork.getLayerByName("layer2"), IE::NotFound);
+    ASSERT_THROW(CommonTestUtils::getLayerByName(cnnNetwork, "layer2"), IE::NotFound);
     ASSERT_EQ(getData("data4")->getTensorDesc().getDims(), newShape);
     ASSERT_EQ(net->allLayers().size(), originalLayersNum - 1);
 }
@@ -1038,7 +1042,9 @@ TEST_F(AdvancedShapeInferTests, MulWithTensorConstInferTest) {
         transformator.foldConstSubgraphs();
         switch(precisionOutData) {
             case IE::Precision::U8: {
-                auto *l = cnnNetwork.getLayerByName("mulLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<uint8_t *>();
+                InferenceEngine::CNNLayerPtr layer;
+                ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("mulLayer__data3__Const", layer, nullptr));
+                auto *l = layer->blobs.at("custom")->cbuffer().as<uint8_t *>();
                 ASSERT_EQ(l[0], ref[0]);
                 ASSERT_EQ(l[1], ref[1]);
                 ASSERT_EQ(l[2], ref[2]);
@@ -1046,7 +1052,9 @@ TEST_F(AdvancedShapeInferTests, MulWithTensorConstInferTest) {
                 break;
             }
             case IE::Precision::I32: {
-                auto *l = cnnNetwork.getLayerByName("mulLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<int *>();
+                InferenceEngine::CNNLayerPtr layer;
+                ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("mulLayer__data3__Const", layer, nullptr));
+                auto *l = layer->blobs.at("custom")->cbuffer().as<int *>();
                 ASSERT_EQ(l[0], ref[0]);
                 ASSERT_EQ(l[1], ref[1]);
                 ASSERT_EQ(l[2], ref[2]);
@@ -1054,7 +1062,9 @@ TEST_F(AdvancedShapeInferTests, MulWithTensorConstInferTest) {
                 break;
             }
             case IE::Precision::I64: {
-                auto *l = cnnNetwork.getLayerByName("mulLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<long long int *>();
+                InferenceEngine::CNNLayerPtr layer;
+                ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("mulLayer__data3__Const", layer, nullptr));
+                auto *l = layer->blobs.at("custom")->cbuffer().as<long long int *>();
                 ASSERT_EQ(l[0], ref[0]);
                 ASSERT_EQ(l[1], ref[1]);
                 ASSERT_EQ(l[2], ref[2]);
@@ -1062,7 +1072,9 @@ TEST_F(AdvancedShapeInferTests, MulWithTensorConstInferTest) {
                 break;
             }
             case IE::Precision::U64: {
-                auto *l = cnnNetwork.getLayerByName("mulLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<unsigned long long int *>();
+                InferenceEngine::CNNLayerPtr layer;
+                ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("mulLayer__data3__Const", layer, nullptr));
+                auto *l = layer->blobs.at("custom")->cbuffer().as<unsigned long long int *>();
                 ASSERT_EQ(l[0], ref[0]);
                 ASSERT_EQ(l[1], ref[1]);
                 ASSERT_EQ(l[2], ref[2]);
@@ -1070,7 +1082,9 @@ TEST_F(AdvancedShapeInferTests, MulWithTensorConstInferTest) {
                 break;
             }
             case IE::Precision::FP16: {
-                auto *l = cnnNetwork.getLayerByName("mulLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<IE::ie_fp16 *>();
+                InferenceEngine::CNNLayerPtr layer;
+                ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("mulLayer__data3__Const", layer, nullptr));
+                auto *l = layer->blobs.at("custom")->cbuffer().as<IE::ie_fp16 *>();
                 ASSERT_EQ(l[0], ref[0]);
                 ASSERT_EQ(l[1], ref[1]);
                 ASSERT_EQ(l[2], ref[2]);
@@ -1078,7 +1092,9 @@ TEST_F(AdvancedShapeInferTests, MulWithTensorConstInferTest) {
                 break;
             }
             case IE::Precision::FP32: {
-                auto *l = cnnNetwork.getLayerByName("mulLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<float *>();
+                InferenceEngine::CNNLayerPtr layer;
+                ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("mulLayer__data3__Const", layer, nullptr));
+                auto *l = layer->blobs.at("custom")->cbuffer().as<float *>();
                 ASSERT_EQ(l[0], ref[0]);
                 ASSERT_EQ(l[1], ref[1]);
                 ASSERT_EQ(l[2], ref[2]);
@@ -1165,7 +1181,9 @@ TEST_F(AdvancedShapeInferTests, MulWithScalarConstInferTest) {
         transformator.foldConstSubgraphs();
         switch(precisionOutData) {
             case IE::Precision::U8: {
-                auto *l = cnnNetwork.getLayerByName("mulLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<uint8_t *>();
+                InferenceEngine::CNNLayerPtr layer;
+                ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("mulLayer__data3__Const", layer, nullptr));
+                auto *l = layer->blobs.at("custom")->cbuffer().as<uint8_t *>();
                 ASSERT_EQ(l[0], ref[0]);
                 ASSERT_EQ(l[1], ref[1]);
                 ASSERT_EQ(l[2], ref[2]);
@@ -1173,7 +1191,9 @@ TEST_F(AdvancedShapeInferTests, MulWithScalarConstInferTest) {
                 break;
             }
             case IE::Precision::I32: {
-                auto *l = cnnNetwork.getLayerByName("mulLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<int *>();
+                InferenceEngine::CNNLayerPtr layer;
+                ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("mulLayer__data3__Const", layer, nullptr));
+                auto *l = layer->blobs.at("custom")->cbuffer().as<int *>();
                 ASSERT_EQ(l[0], ref[0]);
                 ASSERT_EQ(l[1], ref[1]);
                 ASSERT_EQ(l[2], ref[2]);
@@ -1181,7 +1201,9 @@ TEST_F(AdvancedShapeInferTests, MulWithScalarConstInferTest) {
                 break;
             }
             case IE::Precision::I64: {
-                auto *l = cnnNetwork.getLayerByName("mulLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<long long int *>();
+                InferenceEngine::CNNLayerPtr layer;
+                ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("mulLayer__data3__Const", layer, nullptr));
+                auto *l = layer->blobs.at("custom")->cbuffer().as<long long int *>();
                 ASSERT_EQ(l[0], ref[0]);
                 ASSERT_EQ(l[1], ref[1]);
                 ASSERT_EQ(l[2], ref[2]);
@@ -1189,7 +1211,9 @@ TEST_F(AdvancedShapeInferTests, MulWithScalarConstInferTest) {
                 break;
             }
             case IE::Precision::U64: {
-                auto *l = cnnNetwork.getLayerByName("mulLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<unsigned long long int *>();
+                InferenceEngine::CNNLayerPtr layer;
+                ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("mulLayer__data3__Const", layer, nullptr));
+                auto *l = layer->blobs.at("custom")->cbuffer().as<unsigned long long int *>();
                 ASSERT_EQ(l[0], ref[0]);
                 ASSERT_EQ(l[1], ref[1]);
                 ASSERT_EQ(l[2], ref[2]);
@@ -1197,7 +1221,9 @@ TEST_F(AdvancedShapeInferTests, MulWithScalarConstInferTest) {
                 break;
             }
             case IE::Precision::FP16: {
-                auto *l = cnnNetwork.getLayerByName("mulLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<IE::ie_fp16 *>();
+                InferenceEngine::CNNLayerPtr layer;
+                ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("mulLayer__data3__Const", layer, nullptr));
+                auto *l = layer->blobs.at("custom")->cbuffer().as<IE::ie_fp16 *>();
                 ASSERT_EQ(l[0], ref[0]);
                 ASSERT_EQ(l[1], ref[1]);
                 ASSERT_EQ(l[2], ref[2]);
@@ -1205,7 +1231,9 @@ TEST_F(AdvancedShapeInferTests, MulWithScalarConstInferTest) {
                 break;
             }
             case IE::Precision::FP32: {
-                auto *l = cnnNetwork.getLayerByName("mulLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<float *>();
+                InferenceEngine::CNNLayerPtr layer;
+                ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("mulLayer__data3__Const", layer, nullptr));
+                auto *l = layer->blobs.at("custom")->cbuffer().as<float *>();
                 ASSERT_EQ(l[0], ref[0]);
                 ASSERT_EQ(l[1], ref[1]);
                 ASSERT_EQ(l[2], ref[2]);
@@ -1291,7 +1319,9 @@ TEST_F(AdvancedShapeInferTests, AddWithScalarConstInferTest) {
         transformator.foldConstSubgraphs();
         switch(precisionOutData) {
             case IE::Precision::U8: {
-                auto *l = cnnNetwork.getLayerByName("addLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<uint8_t *>();
+                InferenceEngine::CNNLayerPtr layer;
+                ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("addLayer__data3__Const", layer, nullptr));
+                auto *l = layer->blobs.at("custom")->cbuffer().as<uint8_t *>();
                 ASSERT_EQ(l[0], ref[0]);
                 ASSERT_EQ(l[1], ref[1]);
                 ASSERT_EQ(l[2], ref[2]);
@@ -1299,7 +1329,9 @@ TEST_F(AdvancedShapeInferTests, AddWithScalarConstInferTest) {
                 break;
             }
             case IE::Precision::I32: {
-                auto *l = cnnNetwork.getLayerByName("addLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<int *>();
+                InferenceEngine::CNNLayerPtr layer;
+                ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("addLayer__data3__Const", layer, nullptr));
+                auto *l = layer->blobs.at("custom")->cbuffer().as<int *>();
                 ASSERT_EQ(l[0], ref[0]);
                 ASSERT_EQ(l[1], ref[1]);
                 ASSERT_EQ(l[2], ref[2]);
@@ -1307,7 +1339,9 @@ TEST_F(AdvancedShapeInferTests, AddWithScalarConstInferTest) {
                 break;
             }
             case IE::Precision::I64: {
-                auto *l = cnnNetwork.getLayerByName("addLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<long long int *>();
+                InferenceEngine::CNNLayerPtr layer;
+                ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("addLayer__data3__Const", layer, nullptr));
+                auto *l = layer->blobs.at("custom")->cbuffer().as<long long int *>();
                 ASSERT_EQ(l[0], ref[0]);
                 ASSERT_EQ(l[1], ref[1]);
                 ASSERT_EQ(l[2], ref[2]);
@@ -1315,7 +1349,9 @@ TEST_F(AdvancedShapeInferTests, AddWithScalarConstInferTest) {
                 break;
             }
             case IE::Precision::U64: {
-                auto *l = cnnNetwork.getLayerByName("addLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<unsigned long long int *>();
+                InferenceEngine::CNNLayerPtr layer;
+                ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("addLayer__data3__Const", layer, nullptr));
+                auto *l = layer->blobs.at("custom")->cbuffer().as<unsigned long long int *>();
                 ASSERT_EQ(l[0], ref[0]);
                 ASSERT_EQ(l[1], ref[1]);
                 ASSERT_EQ(l[2], ref[2]);
@@ -1323,7 +1359,9 @@ TEST_F(AdvancedShapeInferTests, AddWithScalarConstInferTest) {
                 break;
             }
             case IE::Precision::FP16: {
-                auto *l = cnnNetwork.getLayerByName("addLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<IE::ie_fp16 *>();
+                InferenceEngine::CNNLayerPtr layer;
+                ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("addLayer__data3__Const", layer, nullptr));
+                auto *l = layer->blobs.at("custom")->cbuffer().as<IE::ie_fp16 *>();
                 ASSERT_EQ(l[0], ref[0]);
                 ASSERT_EQ(l[1], ref[1]);
                 ASSERT_EQ(l[2], ref[2]);
@@ -1331,7 +1369,9 @@ TEST_F(AdvancedShapeInferTests, AddWithScalarConstInferTest) {
                 break;
             }
             case IE::Precision::FP32: {
-                auto *l = cnnNetwork.getLayerByName("addLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<float *>();
+                InferenceEngine::CNNLayerPtr layer;
+                ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("addLayer__data3__Const", layer, nullptr));
+                auto *l = layer->blobs.at("custom")->cbuffer().as<float *>();
                 ASSERT_EQ(l[0], ref[0]);
                 ASSERT_EQ(l[1], ref[1]);
                 ASSERT_EQ(l[2], ref[2]);
@@ -1417,7 +1457,9 @@ TEST_F(AdvancedShapeInferTests, AddWithTensorConstInferTest) {
         transformator.foldConstSubgraphs();
         switch(precisionOutData) {
             case IE::Precision::U8: {
-                auto *l = cnnNetwork.getLayerByName("addLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<uint8_t *>();
+                InferenceEngine::CNNLayerPtr layer;
+                ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("addLayer__data3__Const", layer, nullptr));
+                auto *l = layer->blobs.at("custom")->cbuffer().as<uint8_t *>();
                 ASSERT_EQ(l[0], ref[0]);
                 ASSERT_EQ(l[1], ref[1]);
                 ASSERT_EQ(l[2], ref[2]);
@@ -1425,7 +1467,9 @@ TEST_F(AdvancedShapeInferTests, AddWithTensorConstInferTest) {
                 break;
             }
             case IE::Precision::I32: {
-                auto *l = cnnNetwork.getLayerByName("addLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<int *>();
+                InferenceEngine::CNNLayerPtr layer;
+                ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("addLayer__data3__Const", layer, nullptr));
+                auto *l = layer->blobs.at("custom")->cbuffer().as<int *>();
                 ASSERT_EQ(l[0], ref[0]);
                 ASSERT_EQ(l[1], ref[1]);
                 ASSERT_EQ(l[2], ref[2]);
@@ -1433,7 +1477,9 @@ TEST_F(AdvancedShapeInferTests, AddWithTensorConstInferTest) {
                 break;
             }
             case IE::Precision::I64: {
-                auto *l = cnnNetwork.getLayerByName("addLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<long long int *>();
+                InferenceEngine::CNNLayerPtr layer;
+                ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("addLayer__data3__Const", layer, nullptr));
+                auto *l = layer->blobs.at("custom")->cbuffer().as<long long int *>();
                 ASSERT_EQ(l[0], ref[0]);
                 ASSERT_EQ(l[1], ref[1]);
                 ASSERT_EQ(l[2], ref[2]);
@@ -1441,7 +1487,9 @@ TEST_F(AdvancedShapeInferTests, AddWithTensorConstInferTest) {
                 break;
             }
             case IE::Precision::U64: {
-                auto *l = cnnNetwork.getLayerByName("addLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<unsigned long long int *>();
+                InferenceEngine::CNNLayerPtr layer;
+                ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("addLayer__data3__Const", layer, nullptr));
+                auto *l = layer->blobs.at("custom")->cbuffer().as<unsigned long long int *>();
                 ASSERT_EQ(l[0], ref[0]);
                 ASSERT_EQ(l[1], ref[1]);
                 ASSERT_EQ(l[2], ref[2]);
@@ -1449,7 +1497,9 @@ TEST_F(AdvancedShapeInferTests, AddWithTensorConstInferTest) {
                 break;
             }
             case IE::Precision::FP16: {
-                auto *l = cnnNetwork.getLayerByName("addLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<IE::ie_fp16 *>();
+                InferenceEngine::CNNLayerPtr layer;
+                ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("addLayer__data3__Const", layer, nullptr));
+                auto *l = layer->blobs.at("custom")->cbuffer().as<IE::ie_fp16 *>();
                 ASSERT_EQ(l[0], ref[0]);
                 ASSERT_EQ(l[1], ref[1]);
                 ASSERT_EQ(l[2], ref[2]);
@@ -1457,7 +1507,9 @@ TEST_F(AdvancedShapeInferTests, AddWithTensorConstInferTest) {
                 break;
             }
             case IE::Precision::FP32: {
-                auto *l = cnnNetwork.getLayerByName("addLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<float *>();
+                InferenceEngine::CNNLayerPtr layer;
+                ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("addLayer__data3__Const", layer, nullptr));
+                auto *l = layer->blobs.at("custom")->cbuffer().as<float *>();
                 ASSERT_EQ(l[0], ref[0]);
                 ASSERT_EQ(l[1], ref[1]);
                 ASSERT_EQ(l[2], ref[2]);
@@ -1543,7 +1595,9 @@ TEST_F(AdvancedShapeInferTests, AddWithBroadcastingConstInferTest) {
         transformator.foldConstSubgraphs();
         switch(precisionOutData) {
             case IE::Precision::U8: {
-                auto *l = cnnNetwork.getLayerByName("addLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<uint8_t *>();
+                InferenceEngine::CNNLayerPtr layer;
+                ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("addLayer__data3__Const", layer, nullptr));
+                auto *l = layer->blobs.at("custom")->cbuffer().as<uint8_t *>();
                 ASSERT_EQ(l[0], ref[0]);
                 ASSERT_EQ(l[1], ref[1]);
                 ASSERT_EQ(l[2], ref[2]);
@@ -1551,7 +1605,9 @@ TEST_F(AdvancedShapeInferTests, AddWithBroadcastingConstInferTest) {
                 break;
             }
             case IE::Precision::I32: {
-                auto *l = cnnNetwork.getLayerByName("addLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<int *>();
+                InferenceEngine::CNNLayerPtr layer;
+                ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("addLayer__data3__Const", layer, nullptr));
+                auto *l = layer->blobs.at("custom")->cbuffer().as<int *>();
                 ASSERT_EQ(l[0], ref[0]);
                 ASSERT_EQ(l[1], ref[1]);
                 ASSERT_EQ(l[2], ref[2]);
@@ -1559,7 +1615,9 @@ TEST_F(AdvancedShapeInferTests, AddWithBroadcastingConstInferTest) {
                 break;
             }
             case IE::Precision::I64: {
-                auto *l = cnnNetwork.getLayerByName("addLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<long long int *>();
+                InferenceEngine::CNNLayerPtr layer;
+                ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("addLayer__data3__Const", layer, nullptr));
+                auto *l = layer->blobs.at("custom")->cbuffer().as<long long int *>();
                 ASSERT_EQ(l[0], ref[0]);
                 ASSERT_EQ(l[1], ref[1]);
                 ASSERT_EQ(l[2], ref[2]);
@@ -1567,7 +1625,9 @@ TEST_F(AdvancedShapeInferTests, AddWithBroadcastingConstInferTest) {
                 break;
             }
             case IE::Precision::U64: {
-                auto *l = cnnNetwork.getLayerByName("addLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<unsigned long long int *>();
+                InferenceEngine::CNNLayerPtr layer;
+                ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("addLayer__data3__Const", layer, nullptr));
+                auto *l = layer->blobs.at("custom")->cbuffer().as<unsigned long long int *>();
                 ASSERT_EQ(l[0], ref[0]);
                 ASSERT_EQ(l[1], ref[1]);
                 ASSERT_EQ(l[2], ref[2]);
@@ -1575,7 +1635,9 @@ TEST_F(AdvancedShapeInferTests, AddWithBroadcastingConstInferTest) {
                 break;
             }
             case IE::Precision::FP16: {
-                auto *l = cnnNetwork.getLayerByName("addLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<IE::ie_fp16 *>();
+                InferenceEngine::CNNLayerPtr layer;
+                ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("addLayer__data3__Const", layer, nullptr));
+                auto *l = layer->blobs.at("custom")->cbuffer().as<IE::ie_fp16 *>();
                 ASSERT_EQ(l[0], ref[0]);
                 ASSERT_EQ(l[1], ref[1]);
                 ASSERT_EQ(l[2], ref[2]);
@@ -1583,7 +1645,9 @@ TEST_F(AdvancedShapeInferTests, AddWithBroadcastingConstInferTest) {
                 break;
             }
             case IE::Precision::FP32: {
-                auto *l = cnnNetwork.getLayerByName("addLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<float *>();
+                InferenceEngine::CNNLayerPtr layer;
+                ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("addLayer__data3__Const", layer, nullptr));
+                auto *l = layer->blobs.at("custom")->cbuffer().as<float *>();
                 ASSERT_EQ(l[0], ref[0]);
                 ASSERT_EQ(l[1], ref[1]);
                 ASSERT_EQ(l[2], ref[2]);
@@ -1669,7 +1733,9 @@ TEST_F(AdvancedShapeInferTests, MulWithBroadcastingConstInferTest) {
         transformator.foldConstSubgraphs();
         switch(precisionOutData) {
             case IE::Precision::U8: {
-                auto *l = cnnNetwork.getLayerByName("mulLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<uint8_t *>();
+                InferenceEngine::CNNLayerPtr layer;
+                ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("mulLayer__data3__Const", layer, nullptr));
+                auto *l = layer->blobs.at("custom")->cbuffer().as<uint8_t *>();
                 ASSERT_EQ(l[0], ref[0]);
                 ASSERT_EQ(l[1], ref[1]);
                 ASSERT_EQ(l[2], ref[2]);
@@ -1677,7 +1743,9 @@ TEST_F(AdvancedShapeInferTests, MulWithBroadcastingConstInferTest) {
                 break;
             }
             case IE::Precision::I32: {
-                auto *l = cnnNetwork.getLayerByName("mulLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<int *>();
+                InferenceEngine::CNNLayerPtr layer;
+                ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("mulLayer__data3__Const", layer, nullptr));
+                auto *l = layer->blobs.at("custom")->cbuffer().as<int *>();
                 ASSERT_EQ(l[0], ref[0]);
                 ASSERT_EQ(l[1], ref[1]);
                 ASSERT_EQ(l[2], ref[2]);
@@ -1685,7 +1753,9 @@ TEST_F(AdvancedShapeInferTests, MulWithBroadcastingConstInferTest) {
                 break;
             }
             case IE::Precision::I64: {
-                auto *l = cnnNetwork.getLayerByName("mulLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<long long int *>();
+                InferenceEngine::CNNLayerPtr layer;
+                ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("mulLayer__data3__Const", layer, nullptr));
+                auto *l = layer->blobs.at("custom")->cbuffer().as<long long int *>();
                 ASSERT_EQ(l[0], ref[0]);
                 ASSERT_EQ(l[1], ref[1]);
                 ASSERT_EQ(l[2], ref[2]);
@@ -1693,7 +1763,9 @@ TEST_F(AdvancedShapeInferTests, MulWithBroadcastingConstInferTest) {
                 break;
             }
             case IE::Precision::U64: {
-                auto *l = cnnNetwork.getLayerByName("mulLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<unsigned long long int *>();
+                InferenceEngine::CNNLayerPtr layer;
+                ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("mulLayer__data3__Const", layer, nullptr));
+                auto *l = layer->blobs.at("custom")->cbuffer().as<unsigned long long int *>();
                 ASSERT_EQ(l[0], ref[0]);
                 ASSERT_EQ(l[1], ref[1]);
                 ASSERT_EQ(l[2], ref[2]);
@@ -1701,7 +1773,9 @@ TEST_F(AdvancedShapeInferTests, MulWithBroadcastingConstInferTest) {
                 break;
             }
             case IE::Precision::FP16: {
-                auto *l = cnnNetwork.getLayerByName("mulLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<IE::ie_fp16 *>();
+                InferenceEngine::CNNLayerPtr layer;
+                ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("mulLayer__data3__Const", layer, nullptr));
+                auto *l = layer->blobs.at("custom")->cbuffer().as<IE::ie_fp16 *>();
                 ASSERT_EQ(l[0], ref[0]);
                 ASSERT_EQ(l[1], ref[1]);
                 ASSERT_EQ(l[2], ref[2]);
@@ -1709,7 +1783,9 @@ TEST_F(AdvancedShapeInferTests, MulWithBroadcastingConstInferTest) {
                 break;
             }
             case IE::Precision::FP32: {
-                auto *l = cnnNetwork.getLayerByName("mulLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<float *>();
+                InferenceEngine::CNNLayerPtr layer;
+                ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("mulLayer__data3__Const", layer, nullptr));
+                auto *l = layer->blobs.at("custom")->cbuffer().as<float *>();
                 ASSERT_EQ(l[0], ref[0]);
                 ASSERT_EQ(l[1], ref[1]);
                 ASSERT_EQ(l[2], ref[2]);