}
/**
- * @deprecated Migrate to IR v10 and work with ngraph::Function directly. The method will be removed in 2021.1
- * @copybrief ICNNNetwork::getLayerByName
- *
- * Wraps ICNNNetwork::getLayerByName
- *
- * @param layerName Given name of the layer
- * @return Status code of the operation. InferenceEngine::OK if succeeded
- */
- INFERENCE_ENGINE_DEPRECATED("Migrate to IR v10 and work with ngraph::Function directly. The method will be removed in 2021.1")
- CNNLayerPtr getLayerByName(const char* layerName) const;
-
- /**
* @brief Helper method to get collect all input shapes with names of corresponding Data objects
*
* @return Map of pairs: input name and its dimension.
ResponseDesc* resp = nullptr) noexcept = 0;
/**
- * @deprecated Migrate to IR v10 and work with ngraph::Function directly. The method will be removed in 2021.1
- * @brief Gets network layer with the given name
- *
- * @param layerName Given name of the layer
- * @param out Pointer to the found CNNLayer object with the given name
- * @param resp Pointer to the response message that holds a description of an error if any occurred
- * @return Status code of the operation. InferenceEngine::OK if succeeded
- */
- INFERENCE_ENGINE_DEPRECATED("Migrate to IR v10 and work with ngraph::Function directly. The method will be removed in 2021.1")
- virtual StatusCode getLayerByName(const char* layerName, CNNLayerPtr& out, ResponseDesc* resp) const noexcept = 0;
-
- /**
* @brief Changes the inference batch size.
*
* @note There are several limitations and it's not recommended to use it. Set batch to the input shape and call
_ngraph_function->validate_nodes_and_infer_types();
}
-StatusCode CNNNetworkNGraphImpl::getLayerByName(const char* layerName, CNNLayerPtr& out, ResponseDesc* resp) const
- noexcept {
- if (!cnnNetwork) {
- const_cast<CNNNetworkNGraphImpl *>(this)->convertToCNNNetworkImpl();
- }
- if (!cnnNetwork) return GENERAL_ERROR;
- return cnnNetwork->getLayerByName(layerName, out, resp);
-}
-
StatusCode CNNNetworkNGraphImpl::addOutput(const std::string& layerName, size_t outputIndex,
ResponseDesc* resp) noexcept {
IE_PROFILING_AUTO_SCOPE(addOutput)
INFERENCE_ENGINE_DEPRECATED("Use ngraph::Function directly")
void addLayer(const CNNLayerPtr& layer) noexcept override;
- INFERENCE_ENGINE_DEPRECATED("Use ngraph::Function directly")
- StatusCode getLayerByName(const char* layerName, CNNLayerPtr& out, ResponseDesc* resp) const noexcept override;
-
// public version
StatusCode setBatchSize(size_t size, ResponseDesc* responseDesc) noexcept override;
void removeData(const std::string& dataName);
- StatusCode getLayerByName(const char* layerName, CNNLayerPtr& out, ResponseDesc* resp) const noexcept override;
+ StatusCode getLayerByName(const char* layerName, CNNLayerPtr& out, ResponseDesc* resp) const noexcept;
// public version
StatusCode setBatchSize(size_t size, ResponseDesc* responseDesc) noexcept override;
namespace InferenceEngine {
-CNNLayerPtr CNNNetwork::getLayerByName(const char* layerName) const {
- CNNLayerPtr layer;
- CALL_STATUS_FNC(getLayerByName, layerName, layer);
- return layer;
-}
-
CNNLayer::CNNLayer(const LayerParams& prms)
: node(nullptr), name(prms.name), type(prms.type), precision(prms.precision), userValue({0}) {}
#include <ngraph/op/result.hpp>
#include "common_test_utils/file_utils.hpp"
+#include "common_test_utils/common_utils.hpp"
#include "transformations/rt_info/primitives_priority_attribute.hpp"
#include "cnn_network_ngraph_impl.hpp"
}
InferenceEngine::CNNNetwork cnnNet(ngraph);
- auto cnnLayer = cnnNet.getLayerByName("testReLU");
+ auto cnnLayer = CommonTestUtils::getLayerByName(cnnNet, "testReLU");
ASSERT_NE(nullptr, cnnLayer);
ASSERT_EQ(cnnLayer->affinity, testAffinity);
}
}
InferenceEngine::details::CNNNetworkNGraphImpl cnnNet(ngraph);
- CNNLayerPtr layer;
- ASSERT_EQ(OK, cnnNet.getLayerByName(name.c_str(), layer, nullptr));
+ auto * icnnnetwork = static_cast<InferenceEngine::ICNNNetwork*>(&cnnNet);
+ CNNLayerPtr layer = CommonTestUtils::getLayerByName(icnnnetwork, name);
layer->params["test"] = "2";
- ASSERT_EQ(OK, cnnNet.getLayerByName(name.c_str(), layer, nullptr));
+ layer = CommonTestUtils::getLayerByName(icnnnetwork, name);
ASSERT_TRUE(layer->params.find("test") != layer->params.end());
ASSERT_EQ(layer->params["test"], "2");
cnnNet.convertToCNNNetworkImpl();
- ASSERT_EQ(OK, cnnNet.getLayerByName(name.c_str(), layer, nullptr));
+ layer = CommonTestUtils::getLayerByName(icnnnetwork, name);
ASSERT_TRUE(layer->params.find("test") != layer->params.end());
ASSERT_EQ(layer->params["test"], "2");
}
#include <ie_core.hpp>
#include <net_pass.h>
+#include "common_test_utils/common_utils.hpp"
using namespace ::testing;
using namespace std;
IE_SUPPRESS_DEPRECATED_START
if (!isLSTM) {
- auto power_layer = dynamic_pointer_cast<PowerLayer>(net.getLayerByName("power"));
+ auto power_layer = dynamic_pointer_cast<PowerLayer>(CommonTestUtils::getLayerByName(net, "power"));
ASSERT_EQ(power_layer->scale, 0.75f);
ASSERT_EQ(power_layer->offset, 0.35f);
ASSERT_EQ(power_layer->power, 0.5f);
- auto sum_layer = dynamic_pointer_cast<EltwiseLayer>(net.getLayerByName("sum"));
+ auto sum_layer = dynamic_pointer_cast<EltwiseLayer>(CommonTestUtils::getLayerByName(net, "sum"));
std::vector<float> ref_coeff{0.77f, 0.33f};
ASSERT_EQ(sum_layer->coeff, ref_coeff);
InferenceEngine::NetPass::UnrollRNN_if(net, [] (const RNNCellBase& rnn) -> bool { return true; });
net.serialize("UnrollRNN_if.xml");
EXPECT_EQ(0, std::remove("UnrollRNN_if.xml"));
- auto lstmcell_layer = dynamic_pointer_cast<ClampLayer>(net.getLayerByName("LSTMCell:split_clip"));
+ auto lstmcell_layer = dynamic_pointer_cast<ClampLayer>(CommonTestUtils::getLayerByName(net, "LSTMCell:split_clip"));
float ref_coeff = 0.2f;
ASSERT_EQ(lstmcell_layer->min_value, -ref_coeff);
#include "common_test_utils/test_common.hpp"
#include "common_test_utils/data_utils.hpp"
#include "common_test_utils/file_utils.hpp"
+#include "common_test_utils/common_utils.hpp"
#include "generic_ie.hpp"
IE_SUPPRESS_DEPRECATED_START
SizeVector outDims = output["activation"]->getTensorDesc().getDims();
ASSERT_EQ(outDims, refAfterReshape);
// Convert to CNNNetwork
- auto layer = network.getLayerByName("activation");
+ auto layer = CommonTestUtils::getLayerByName(network, "activation");
ASSERT_EQ("CustomTestLayer", layer->type);
}
SizeVector outDims = output["activation"]->getTensorDesc().getDims();
ASSERT_EQ(outDims, refAfterReshape);
// Convert to CNNNetwork
- auto layer = network.getLayerByName("activation");
+ auto layer = CommonTestUtils::getLayerByName(network, "activation");
ASSERT_EQ("CustomTestLayer", layer->type);
ASSERT_EQ("false", layer->params["test1"]);
ASSERT_EQ("3", layer->params["test2"]);
#include "ie_common.h"
#include "common_test_utils/test_common.hpp"
+#include "common_test_utils/common_utils.hpp"
#include "details/ie_cnn_network_iterator.hpp"
#include <gtest/gtest.h>
protected:
InferenceEngine::CNNLayerPtr getDynamicShapeResolverLayer() const {
- return cnnNetwork.getLayerByName(s_FriendlyName);
+ return CommonTestUtils::getLayerByName(cnnNetwork, s_FriendlyName);
}
InferenceEngine::CNNNetwork cnnNetwork;
// SPDX-License-Identifier: Apache-2.0
//
+#include "common_test_utils/common_utils.hpp"
#include "ngraph_conversion_tests/conv_bias_fusion.hpp"
#include <ngraph/variant.hpp>
}
} else {
IE_SUPPRESS_DEPRECATED_START
- auto add_layer = net.getLayerByName(getOutputName().c_str());
+ auto add_layer = CommonTestUtils::getLayerByName(net, getOutputName());
ASSERT_EQ(add_layer->params["originalLayersNames"], "add,conv");
IE_SUPPRESS_DEPRECATED_END
}
#include <iterator>
#include <vector>
+#include <cpp/ie_cnn_network.h>
+#include <details/ie_cnn_network_iterator.hpp>
+
namespace CommonTestUtils {
template<typename vecElementType>
return result.str();
}
+inline InferenceEngine::CNNLayerPtr getLayerByName(const InferenceEngine::ICNNNetwork * icnnnetwork,
+ const std::string & layerName) {
+ IE_SUPPRESS_DEPRECATED_START
+ InferenceEngine::details::CNNNetworkIterator i(icnnnetwork), end;
+ while (i != end) {
+ auto layer = *i;
+ if (layer->name == layerName)
+ return layer;
+ ++i;
+ }
+
+ std::stringstream stream;
+ stream << "Layer " << layerName << " not found in network";
+ throw InferenceEngine::NotFound(stream.str());
+ IE_SUPPRESS_DEPRECATED_END
+}
+
+inline InferenceEngine::CNNLayerPtr getLayerByName(const InferenceEngine::CNNNetwork & network,
+ const std::string & layerName) {
+ const InferenceEngine::ICNNNetwork & icnnnetwork = static_cast<const InferenceEngine::ICNNNetwork&>(network);
+ return getLayerByName(&icnnnetwork, layerName);
+}
+
} // namespace CommonTestUtils
#include <ngraph/op/subtract.hpp>
#include "common_test_utils/file_utils.hpp"
+#include "common_test_utils/common_utils.hpp"
#include "common_test_utils/unicode_utils.hpp"
#include "ngraph_functions/subgraph_builders.hpp"
for (auto && layer : result.supportedLayersMap) {
IE_SUPPRESS_DEPRECATED_START
- EXPECT_NO_THROW(actualNetwork.getLayerByName(layer.first.c_str()));
+ EXPECT_NO_THROW(CommonTestUtils::getLayerByName(actualNetwork, layer.first));
IE_SUPPRESS_DEPRECATED_END
}
} else {
for (auto && layer : result.supportedLayersMap) {
IE_SUPPRESS_DEPRECATED_START
- EXPECT_NO_THROW(actualNetwork.getLayerByName(layer.first.c_str()));
+ EXPECT_NO_THROW(CommonTestUtils::getLayerByName(actualNetwork, layer.first));
IE_SUPPRESS_DEPRECATED_END
}
} else {
#include "low_precision_transformer_single_layer_tests.hpp"
#include "low_precision_transformations/concat.hpp"
#include "low_precision_transformations/eltwise.hpp"
+#include "common_test_utils/common_utils.hpp"
ConcatTestModel::ConcatTestModel(
const bool signedIntervals,
LowPrecisionTransformer transformer(transformations);
transformer.transform(network);
- const CNNLayerPtr concatLayer = network.getLayerByName("concat");
+ const CNNLayerPtr concatLayer = CommonTestUtils::getLayerByName(network, "concat");
if (concatLayer == nullptr) {
THROW_IE_EXCEPTION << "concat layer was not found";
}
const std::vector<size_t> dims = concatLayer->outData[0]->getDims();
if (dims.size() == 4ul) {
- const CNNLayerPtr fakeQuantizeLayer1 = network.getLayerByName("fakeQuantize1");
+ const CNNLayerPtr fakeQuantizeLayer1 = CommonTestUtils::getLayerByName(network, "fakeQuantize1");
QuantizeLayer* fakeQuantize1 = dynamic_cast<QuantizeLayer*>(fakeQuantizeLayer1.get());
if (fakeQuantize1 == nullptr) {
THROW_IE_EXCEPTION << "incorrect type for layer " << fakeQuantizeLayer1->name;
//
}
- const CNNLayerPtr fakeQuantizeLayer2 = network.getLayerByName("fakeQuantize2");
+ const CNNLayerPtr fakeQuantizeLayer2 = CommonTestUtils::getLayerByName(network, "fakeQuantize2");
QuantizeLayer* fakeQuantize2 = dynamic_cast<QuantizeLayer*>(fakeQuantizeLayer2.get());
if (fakeQuantize2 == nullptr) {
THROW_IE_EXCEPTION << "incorrect type for layer " << fakeQuantizeLayer2->name;
//
#include "low_precision_transformer_single_layer_tests.hpp"
+#include "common_test_utils/common_utils.hpp"
std::string FakeQuantizeAsOutputTest::getName() const {
return "FakeQuantizeAsOutputTest";
LowPrecisionTransformer transformer(LowPrecisionTransformer::getAllTransformations(params));
transformer.transform(network);
- const auto fq = network.getLayerByName("FakeQuantize12");
+ const auto fq = CommonTestUtils::getLayerByName(network, "FakeQuantize12");
if (fq == nullptr)
THROW_IE_EXCEPTION << "Layer 'FakeQuantize12' should not be transformed";
//
#include "low_precision_transformer_single_layer_tests.hpp"
+#include "common_test_utils/common_utils.hpp"
std::string QuantizationOnInvertedWeightsTestModel::getModel(SingleLayerTransformationsTestParams& p) const {
size_t type_size = sizeof(InferenceEngine::PrecisionTrait<InferenceEngine::Precision::FP32>::value_type);
}
bool QuantizationOnInvertedWeightsTestModel::transform(CNNNetwork& network, LayerTransformation::Params& params) const {
- CNNLayerPtr weightsFakeQuantize = network.getLayerByName("FakeQuantize12");
+ CNNLayerPtr weightsFakeQuantize = CommonTestUtils::getLayerByName(network, "FakeQuantize12");
Blob::Ptr weights = CNNNetworkHelper::quantizeWeights(*weightsFakeQuantize, false);
- CNNLayerPtr biasesConvolutionConst = network.getLayerByName("Const13");
+ CNNLayerPtr biasesConvolutionConst = CommonTestUtils::getLayerByName(network, "Const13");
Blob::Ptr biases = getBlob(biasesConvolutionConst, "custom");
- CNNLayerPtr convolution = network.getLayerByName("Convolution14");
+ CNNLayerPtr convolution = CommonTestUtils::getLayerByName(network, "Convolution14");
convolution->blobs.emplace("weights", weights);
convolution->blobs.emplace("biases", biases);
weightableLayer->_weights = weights;
weightableLayer->_biases = biases;
- CNNLayerPtr weightsConstInput = network.getLayerByName("Const7");
+ CNNLayerPtr weightsConstInput = CommonTestUtils::getLayerByName(network, "Const7");
CNNNetworkHelper::removeLayer(network, weightsConstInput);
- CNNLayerPtr weightsConstInputLow = network.getLayerByName("Const8");
+ CNNLayerPtr weightsConstInputLow = CommonTestUtils::getLayerByName(network, "Const8");
CNNNetworkHelper::removeLayer(network, weightsConstInputLow);
- CNNLayerPtr weightsConstInputHigh = network.getLayerByName("Const9");
+ CNNLayerPtr weightsConstInputHigh = CommonTestUtils::getLayerByName(network, "Const9");
CNNNetworkHelper::removeLayer(network, weightsConstInputHigh);
- CNNLayerPtr weightsConstOutputLow = network.getLayerByName("Const10");
+ CNNLayerPtr weightsConstOutputLow = CommonTestUtils::getLayerByName(network, "Const10");
CNNNetworkHelper::removeLayer(network, weightsConstOutputLow);
- CNNLayerPtr weightsConstOutputHigh = network.getLayerByName("Const11");
+ CNNLayerPtr weightsConstOutputHigh = CommonTestUtils::getLayerByName(network, "Const11");
CNNNetworkHelper::removeLayer(network, weightsConstOutputHigh);
CNNNetworkHelper::removeLayer(network, weightsFakeQuantize);
//
#include "low_precision_transformer_single_layer_tests.hpp"
+#include "common_test_utils/common_utils.hpp"
std::string QuantizationOnWeightsTestModel::getModel(SingleLayerTransformationsTestParams& p) const {
size_t type_size = sizeof(InferenceEngine::PrecisionTrait<InferenceEngine::Precision::FP32>::value_type);
}
bool QuantizationOnWeightsTestModel::transform(CNNNetwork& network, LayerTransformation::Params& params) const {
- CNNLayerPtr weightsFakeQuantize = network.getLayerByName("FakeQuantize12");
+ CNNLayerPtr weightsFakeQuantize = CommonTestUtils::getLayerByName(network, "FakeQuantize12");
Blob::Ptr weights = CNNNetworkHelper::quantizeWeights(*weightsFakeQuantize, false);
- CNNLayerPtr biasesConvolutionConst = network.getLayerByName("Const13");
+ CNNLayerPtr biasesConvolutionConst = CommonTestUtils::getLayerByName(network, "Const13");
Blob::Ptr biases = getBlob(biasesConvolutionConst, "custom");
- CNNLayerPtr convolution = network.getLayerByName("Convolution14");
+ CNNLayerPtr convolution = CommonTestUtils::getLayerByName(network, "Convolution14");
convolution->blobs.emplace("weights", weights);
convolution->blobs.emplace("biases", biases);
weightableLayer->_weights = weights;
weightableLayer->_biases = biases;
- CNNLayerPtr weightsConstInput = network.getLayerByName("Const7");
+ CNNLayerPtr weightsConstInput = CommonTestUtils::getLayerByName(network, "Const7");
CNNNetworkHelper::removeLayer(network, weightsConstInput);
- CNNLayerPtr weightsConstInputLow = network.getLayerByName("Const8");
+ CNNLayerPtr weightsConstInputLow = CommonTestUtils::getLayerByName(network, "Const8");
CNNNetworkHelper::removeLayer(network, weightsConstInputLow);
- CNNLayerPtr weightsConstInputHigh = network.getLayerByName("Const9");
+ CNNLayerPtr weightsConstInputHigh = CommonTestUtils::getLayerByName(network, "Const9");
CNNNetworkHelper::removeLayer(network, weightsConstInputHigh);
- CNNLayerPtr weightsConstOutputLow = network.getLayerByName("Const10");
+ CNNLayerPtr weightsConstOutputLow = CommonTestUtils::getLayerByName(network, "Const10");
CNNNetworkHelper::removeLayer(network, weightsConstOutputLow);
- CNNLayerPtr weightsConstOutputHigh = network.getLayerByName("Const11");
+ CNNLayerPtr weightsConstOutputHigh = CommonTestUtils::getLayerByName(network, "Const11");
CNNNetworkHelper::removeLayer(network, weightsConstOutputHigh);
CNNNetworkHelper::removeLayer(network, weightsFakeQuantize);
#include "unit_test_utils/mocks/mock_icnn_network.hpp"
#include "unit_test_utils/mocks/mock_iformat_parser.hpp"
+#include "common_test_utils/common_utils.hpp"
using namespace testing;
using namespace InferenceEngine;
ResponseDesc resp;
auto network = reader.getNetwork(&resp);
- CNNLayerPtr layer;
- ASSERT_EQ(OK, network->getLayerByName("3D_conv", layer, nullptr));
+ CNNLayerPtr layer = CommonTestUtils::getLayerByName(network, "3D_conv");
auto* conv = dynamic_cast<ConvolutionLayer*>(layer.get());
ASSERT_NE(nullptr, conv);
ASSERT_EQ(conv->_kernel[X_AXIS], 5);
ResponseDesc resp;
auto network = reader.getNetwork(&resp);
- CNNLayerPtr layer;
-
- ASSERT_EQ(OK, network->getLayerByName("3D_pooling", layer, nullptr));
+ CNNLayerPtr layer = CommonTestUtils::getLayerByName(network, "3D_pooling");
auto* pool = dynamic_cast<PoolingLayer*>(layer.get());
ASSERT_NE(nullptr, pool);
ASSERT_EQ(pool->_kernel[X_AXIS], 5);
auto network = reader.getNetwork(&resp);
ASSERT_NE(nullptr, network) << resp.msg;
- CNNLayerPtr layer;
- sts = network->getLayerByName("SomeTI", layer, &resp);
- ASSERT_EQ(OK, sts) << resp.msg;
+ CNNLayerPtr layer = CommonTestUtils::getLayerByName(network, "SomeTI");
auto* ti = dynamic_cast<TensorIterator*>(layer.get());
ASSERT_NE(nullptr, ti);
auto net = reader.getNetwork(&resp);
ASSERT_NE(nullptr, net) << resp.msg;
- CNNLayerPtr layer;
- sts = net->getLayerByName("scalar", layer, &resp);
- ASSERT_EQ(OK, sts) << resp.msg;
+ CNNLayerPtr layer = CommonTestUtils::getLayerByName(net, "scalar");
ASSERT_NE(nullptr, layer.get());
ASSERT_EQ(layer->type, "Const");
auto actualBlob = layer->blobs.begin()->second;
#include "ir_gen_helper.hpp"
#include <ie_core.hpp>
#include "common_test_utils/common_layers_params.hpp"
+#include "common_test_utils/common_utils.hpp"
using namespace ::testing;
using namespace std;
// Compare with reference
- auto deconv = network.getLayerByName("Deconvolution_1");
+ auto deconv = CommonTestUtils::getLayerByName(network, "Deconvolution_1");
InferenceEngine::TBlob<float> deconv_ref(deconv->outData[0]->getTensorDesc());
deconv_ref.allocate();
#include "common_test_utils/xml_net_builder/xml_net_builder.hpp"
#include "common_test_utils/common_layers_params.hpp"
#include "common_test_utils/data_utils.hpp"
-
+#include "common_test_utils/common_utils.hpp"
struct conv_eltwise_params {
std::vector<size_t> in1;
InferenceEngine::IExecutableNetwork::Ptr exeNetwork1;
ASSERT_NO_THROW(score_engine->LoadNetwork(exeNetwork1, network, {}));
- auto conv = network.getLayerByName("Convolution2");
- auto eltwise = network.getLayerByName("Eltwise3");
+ auto conv = CommonTestUtils::getLayerByName(network, "Convolution2");
+ auto eltwise = CommonTestUtils::getLayerByName(network, "Eltwise3");
ASSERT_EQ(conv->precision, InferenceEngine::Precision::I8);
ASSERT_EQ(conv->outData[0]->getPrecision(), InferenceEngine::Precision::I8);
InferenceEngine::IExecutableNetwork::Ptr exeNetwork1;
ASSERT_NO_THROW(score_engine->LoadNetwork(exeNetwork1, network, { }));
- auto conv = network.getLayerByName("Convolution2");
- auto eltwise = network.getLayerByName("Eltwise3");
- auto relu4 = network.getLayerByName("ReLU4");
+ auto conv = CommonTestUtils::getLayerByName(network, "Convolution2");
+ auto eltwise = CommonTestUtils::getLayerByName(network, "Eltwise3");
+ auto relu4 = CommonTestUtils::getLayerByName(network, "ReLU4");
ASSERT_EQ(conv->precision, InferenceEngine::Precision::I8);
ASSERT_EQ(conv->outData[0]->getPrecision(), InferenceEngine::Precision::I8);
InferenceEngine::IExecutableNetwork::Ptr exeNetwork1;
ASSERT_NO_THROW(score_engine->LoadNetwork(exeNetwork1, network, { }));
- auto conv2 = network.getLayerByName("Convolution2");
- auto conv3 = network.getLayerByName("Convolution3");
- auto eltwise = network.getLayerByName("Eltwise3");
+ auto conv2 = CommonTestUtils::getLayerByName(network, "Convolution2");
+ auto conv3 = CommonTestUtils::getLayerByName(network, "Convolution3");
+ auto eltwise = CommonTestUtils::getLayerByName(network, "Eltwise3");
ASSERT_EQ(conv2->precision, InferenceEngine::Precision::I8);
ASSERT_EQ(conv2->outData[0]->getPrecision(), InferenceEngine::Precision::I8);
InferenceEngine::IExecutableNetwork::Ptr exeNetwork1;
ASSERT_NO_THROW(score_engine->LoadNetwork(exeNetwork1, network, { }));
- auto conv2 = network.getLayerByName("Convolution2");
- auto conv3 = network.getLayerByName("Convolution3");
- auto eltwise = network.getLayerByName("Eltwise3");
- auto relu5 = network.getLayerByName("ReLU5");
+ auto conv2 = CommonTestUtils::getLayerByName(network, "Convolution2");
+ auto conv3 = CommonTestUtils::getLayerByName(network, "Convolution3");
+ auto eltwise = CommonTestUtils::getLayerByName(network, "Eltwise3");
+ auto relu5 = CommonTestUtils::getLayerByName(network, "ReLU5");
ASSERT_EQ(conv2->precision, InferenceEngine::Precision::I8);
ASSERT_EQ(conv2->outData[0]->getPrecision(), InferenceEngine::Precision::I8);
InferenceEngine::IExecutableNetwork::Ptr exeNetwork1;
ASSERT_NO_THROW(score_engine->LoadNetwork(exeNetwork1, network, {}));
- auto conv2 = network.getLayerByName("Convolution2");
- auto conv3 = network.getLayerByName("Convolution3");
- auto eltwise = network.getLayerByName("Eltwise3");
- auto relu5 = network.getLayerByName("ReLU5");
+ auto conv2 = CommonTestUtils::getLayerByName(network, "Convolution2");
+ auto conv3 = CommonTestUtils::getLayerByName(network, "Convolution3");
+ auto eltwise = CommonTestUtils::getLayerByName(network, "Eltwise3");
+ auto relu5 = CommonTestUtils::getLayerByName(network, "ReLU5");
ASSERT_EQ(conv2->precision, InferenceEngine::Precision::I8);
ASSERT_EQ(conv2->outData[0]->getPrecision(), InferenceEngine::Precision::I8);
#include <gtest/gtest.h>
#include <graph_tools.hpp>
#include <common_test_utils/test_assertions.hpp>
+#include <common_test_utils/common_utils.hpp>
#include <unordered_set>
#include <gmock/gmock-generated-function-mockers.h>
#include <gmock/gmock-generated-matchers.h>
ADD_ATTR(1, "id", "r-1-2-3");
ADD_ATTR(2, "id", "r-1-2-3");
CNNNetwork cloned (clone);
- auto idMemOutput = cloned.getLayerByName("1")->GetParamAsString("id");
- auto idMemInput = cloned.getLayerByName("2")->GetParamAsString("id");
+ auto idMemOutput = CommonTestUtils::getLayerByName(cloned, "1")->GetParamAsString("id");
+ auto idMemInput = CommonTestUtils::getLayerByName(cloned, "2")->GetParamAsString("id");
ASSERT_STREQ(idMemInput.c_str(), idMemOutput.c_str());
ASSERT_STREQ(idMemInput.c_str(), "r-1-2-3");
auto iclone = ModelQuantizer<FP32_2_FP32>().quantize(*mockNet, std::vector<float >({1.0f, 1.0f}));
auto clone = CNNNetwork(iclone);
- CNNNetBFS(clone.getLayerByName("1"), [&](CNNLayerPtr layer) {
+ CNNNetBFS(CommonTestUtils::getLayerByName(clone, "1"), [&](CNNLayerPtr layer) {
auto params = getInjectedData<QuantizedLayerParams>(layer);
ASSERT_NE(params, nullptr);
});
- CNNNetBFS(clone.getLayerByName("3"), [&](CNNLayerPtr layer) {
+ CNNNetBFS(CommonTestUtils::getLayerByName(clone, "3"), [&](CNNLayerPtr layer) {
auto params = getInjectedData<QuantizedLayerParams>(layer);
ASSERT_NE(params, nullptr);
});
auto copied_net = CNNNetwork(copied_net_ptr);
//check that Clamp layer was properly copied
- auto layer = std::dynamic_pointer_cast<ClampLayer>(copied_net.getLayerByName("ClampLayer"));
+ auto layer = std::dynamic_pointer_cast<ClampLayer>(CommonTestUtils::getLayerByName(copied_net, "ClampLayer"));
ASSERT_NE(layer, nullptr) << "Could not perform dynamic cast from base pointer to Clamp layer pointer. "
"Net copy could be incorrect.";
}
auto copied_net = CNNNetwork(copied_net_ptr);
// check that Clamp layer was properly copied
- auto layer = std::dynamic_pointer_cast<DeconvolutionLayer>(copied_net.getLayerByName("upsample_merged"));
+ auto layer = std::dynamic_pointer_cast<DeconvolutionLayer>(CommonTestUtils::getLayerByName(copied_net, "upsample_merged"));
ASSERT_NE(layer, nullptr) << "Could not perform dynamic cast from base pointer to Deconvolution layer pointer. "
"Net copy could be incorrect.";
}
#include "details/ie_cnn_network_tools.h"
#include "unit_test_utils/mocks/mock_icnn_network.hpp"
+#include "common_test_utils/common_utils.hpp"
namespace GraphTest {
long int nForward = 0;
CNNLayerPtr layerExist;
try {
- layerExist = wrap.getLayerByName(a.c_str());
+ layerExist = CommonTestUtils::getLayerByName(wrap, a.c_str());
if (!layerExist) {
return 0;
}
int countBackwardConnections(std::string a, std::string b, int from_port_id=-1) {
CNNLayerPtr layerExist;
try {
- layerExist = wrap.getLayerByName(b.c_str());
+ layerExist = CommonTestUtils::getLayerByName(wrap, b.c_str());
if (!layerExist) {
return 0;
}
#include <memory>
#include "details/ie_cnn_network_tools.h"
#include "details/ie_cnn_network_iterator.hpp"
+#include <common_test_utils/common_utils.hpp>
using namespace testing;
using namespace InferenceEngine;
return l== nullptr ? GENERAL_ERROR : OK;
})));
- auto l = wrap.getLayerByName("2");
+ auto l = CommonTestUtils::getLayerByName(wrap, "2");
ASSERT_NO_THROW(CNNNetSwapLayers(l, l));
return l== nullptr ? GENERAL_ERROR : OK;
})));
- auto l = wrap.getLayerByName("1");
- auto r = wrap.getLayerByName("2");
+ auto l = CommonTestUtils::getLayerByName(wrap, "1");
+ auto r = CommonTestUtils::getLayerByName(wrap, "2");
ASSERT_NO_THROW(CNNNetSwapLayers(l, r));
return l== nullptr ? GENERAL_ERROR : OK;
})));
- auto l = wrap.getLayerByName("2");
- auto r = wrap.getLayerByName("3");
+ auto l = CommonTestUtils::getLayerByName(wrap, "2");
+ auto r = CommonTestUtils::getLayerByName(wrap, "3");
ASSERT_NO_THROW(CNNNetSwapLayers(l, r));
return l== nullptr ? GENERAL_ERROR : OK;
})));
- auto l = wrap.getLayerByName("1");
- auto r = wrap.getLayerByName("2");
+ auto l = CommonTestUtils::getLayerByName(wrap, "1");
+ auto r = CommonTestUtils::getLayerByName(wrap, "2");
ASSERT_NO_THROW(CNNNetSwapLayers(l, r));
return l== nullptr ? GENERAL_ERROR : OK;
})));
- auto l = wrap.getLayerByName("1");
- auto r = wrap.getLayerByName("2");
+ auto l = CommonTestUtils::getLayerByName(wrap, "1");
+ auto r = CommonTestUtils::getLayerByName(wrap, "2");
ASSERT_NO_THROW(CNNNetSwapLayers(l, r));
return l== nullptr ? GENERAL_ERROR : OK;
})));
- auto l = wrap.getLayerByName("2");
- auto r = wrap.getLayerByName("4");
+ auto l = CommonTestUtils::getLayerByName(wrap, "2");
+ auto r = CommonTestUtils::getLayerByName(wrap, "4");
ASSERT_NO_THROW(CNNNetSwapLayers(l, r));
return l== nullptr ? GENERAL_ERROR : OK;
})));
- auto l = wrap.getLayerByName("2");
- auto r = wrap.getLayerByName("3");
+ auto l = CommonTestUtils::getLayerByName(wrap, "2");
+ auto r = CommonTestUtils::getLayerByName(wrap, "3");
ASSERT_NO_THROW(CNNNetSwapLayers(l, r));
return l== nullptr ? GENERAL_ERROR : OK;
})));
- auto l = wrap.getLayerByName("1");
- auto r = wrap.getLayerByName("2");
+ auto l = CommonTestUtils::getLayerByName(wrap, "1");
+ auto r = CommonTestUtils::getLayerByName(wrap, "2");
ASSERT_NO_THROW(CNNNetSwapLayers(l, r));
return l== nullptr ? GENERAL_ERROR : OK;
})));
- auto l = wrap.getLayerByName("1");
- auto r = wrap.getLayerByName("2");
+ auto l = CommonTestUtils::getLayerByName(wrap, "1");
+ auto r = CommonTestUtils::getLayerByName(wrap, "2");
ASSERT_NO_THROW(CNNNetSwapLayers(l, r));
return l== nullptr ? GENERAL_ERROR : OK;
})));
- auto l = wrap.getLayerByName("1");
- auto r = wrap.getLayerByName("2");
+ auto l = CommonTestUtils::getLayerByName(wrap, "1");
+ auto r = CommonTestUtils::getLayerByName(wrap, "2");
ASSERT_NO_THROW(CNNNetSwapLayers(l, r));
return l== nullptr ? GENERAL_ERROR : OK;
})));
- auto l = wrap.getLayerByName("1");
- auto r = wrap.getLayerByName("3");
+ auto l = CommonTestUtils::getLayerByName(wrap, "1");
+ auto r = CommonTestUtils::getLayerByName(wrap, "3");
ASSERT_ANY_THROW(CNNNetworkInsertLayer(l, r, createGenericLayer("3")));
}
return l== nullptr ? GENERAL_ERROR : OK;
})));
- auto l = wrap.getLayerByName("1");
- auto r = wrap.getLayerByName("2");
+ auto l = CommonTestUtils::getLayerByName(wrap, "1");
+ auto r = CommonTestUtils::getLayerByName(wrap, "2");
CNNNetworkInsertLayer(l, r, createGenericLayer("3"));
return l== nullptr ? GENERAL_ERROR : OK;
})));
- auto l = wrap.getLayerByName("1");
- auto r = wrap.getLayerByName("3");
+ auto l = CommonTestUtils::getLayerByName(wrap, "1");
+ auto r = CommonTestUtils::getLayerByName(wrap, "3");
CNNNetworkInsertLayer(l, r, createGenericLayer("4"));
return l== nullptr ? GENERAL_ERROR : OK;
})));
- auto l = wrap.getLayerByName("3");
- auto r = wrap.getLayerByName("2");
+ auto l = CommonTestUtils::getLayerByName(wrap, "3");
+ auto r = CommonTestUtils::getLayerByName(wrap, "2");
CNNNetworkInsertLayer(l, r, createGenericLayer("4"));
return l== nullptr ? GENERAL_ERROR : OK;
})));
- auto l = wrap.getLayerByName("1");
- auto r = wrap.getLayerByName("2");
- auto r2 = wrap.getLayerByName("3");
+ auto l = CommonTestUtils::getLayerByName(wrap, "1");
+ auto r = CommonTestUtils::getLayerByName(wrap, "2");
+ auto r2 = CommonTestUtils::getLayerByName(wrap, "3");
CNNNetworkInsertLayer(l, r, createGenericLayer("4"), 1);
CNNNetworkInsertLayer(l, r2, createGenericLayer("5"), 2);
return l== nullptr ? GENERAL_ERROR : OK;
})));
- auto l = wrap.getLayerByName("2");
+ auto l = CommonTestUtils::getLayerByName(wrap, "2");
CNNNetworkInsertLayer(l, nullptr, createGenericLayer("3"));
return l== nullptr ? GENERAL_ERROR : OK;
})));
- CNNNetworkInsertLayer(wrap.getLayerByName("1"), nullptr, createGenericLayer("5"));
+ CNNNetworkInsertLayer(CommonTestUtils::getLayerByName(wrap, "1"), nullptr, createGenericLayer("5"));
ASSERT_CONNECTION(1, 5);
ASSERT_CONNECTION(5, 2);
return l== nullptr ? GENERAL_ERROR : OK;
})));
- CNNNetworkInsertLayer(wrap.getLayerByName("1"), nullptr, createGenericLayer("5"));
+ CNNNetworkInsertLayer(CommonTestUtils::getLayerByName(wrap, "1"), nullptr, createGenericLayer("5"));
ASSERT_CONNECTION(1, 5);
ASSERT_CONNECTION(5, 2);
return l== nullptr ? GENERAL_ERROR : OK;
})));
- CNNNetworkInsertLayer(wrap.getLayerByName("1"), wrap.getLayerByName("4"), createGenericLayer("5"));
+ CNNNetworkInsertLayer(CommonTestUtils::getLayerByName(wrap, "1"), CommonTestUtils::getLayerByName(wrap, "4"), createGenericLayer("5"));
ASSERT_CONNECTION(1, 3);
ASSERT_CONNECTION(1, 5);
return l== nullptr ? GENERAL_ERROR : OK;
})));
- CNNNetworkInsertLayer(wrap.getLayerByName("1"), wrap.getLayerByName("4"), createGenericLayer("5"));
+ CNNNetworkInsertLayer(CommonTestUtils::getLayerByName(wrap, "1"), CommonTestUtils::getLayerByName(wrap, "4"), createGenericLayer("5"));
ASSERT_CONNECTION(1, 2);
ASSERT_CONNECTION(1, 3);
return l== nullptr ? GENERAL_ERROR : OK;
})));
- CNNNetworkInsertLayer(wrap.getLayerByName("1"), wrap.getLayerByName("2"), createGenericLayer("5"));
+ CNNNetworkInsertLayer(CommonTestUtils::getLayerByName(wrap, "1"), CommonTestUtils::getLayerByName(wrap, "2"), createGenericLayer("5"));
ASSERT_CONNECTION(1, 5);
ASSERT_MN_CONNECTIONS(5, 2, 1, 2);
return l== nullptr ? GENERAL_ERROR : OK;
})));
- ASSERT_ANY_THROW(CNNNetworkRemoveLayer(wrap.getLayerByName("1")));
- ASSERT_ANY_THROW(CNNNetworkRemoveLayer(wrap.getLayerByName("3")));
+ ASSERT_ANY_THROW(CNNNetworkRemoveLayer(CommonTestUtils::getLayerByName(wrap, "1")));
+ ASSERT_ANY_THROW(CNNNetworkRemoveLayer(CommonTestUtils::getLayerByName(wrap, "3")));
}
TEST_F(GraphToolsTest, CNNNetworkRemoveLayerThaHas2Outputs) {
return l== nullptr ? GENERAL_ERROR : OK;
})));
- CNNNetworkRemoveLayer(wrap.getLayerByName("2"));
+ CNNNetworkRemoveLayer(CommonTestUtils::getLayerByName(wrap, "2"));
ASSERT_2_CONNECTIONS(1, 3);
ASSERT_CONNECTION(1, 4);
return l== nullptr ? GENERAL_ERROR : OK;
})));
- CNNNetworkRemoveLayer(wrap.getLayerByName("2"));
+ CNNNetworkRemoveLayer(CommonTestUtils::getLayerByName(wrap, "2"));
ASSERT_2_CONNECTIONS(1, 3);
// means all remained references removed
return l== nullptr ? GENERAL_ERROR : OK;
})));
- CNNNetworkRemoveLayer(wrap.getLayerByName("2"));
+ CNNNetworkRemoveLayer(CommonTestUtils::getLayerByName(wrap, "2"));
ASSERT_2_CONNECTIONS(1, 3);
ASSERT_3_CONNECTIONS(1, 4);
return l== nullptr ? GENERAL_ERROR : OK;
})));
- CNNNetworkRemoveLayer(wrap.getLayerByName("2"));
+ CNNNetworkRemoveLayer(CommonTestUtils::getLayerByName(wrap, "2"));
ASSERT_CONNECTION(1, 3);
#include "util_const_infer_test.hpp"
#include <details/ie_cnn_network_tools.h>
#include <precision_utils.h>
+#include "common_test_utils/common_utils.hpp"
namespace IE = InferenceEngine;
ASSERT_EQ(newLayer_names, refNewLayers);
IE::CNNNetwork cnnNetwork(net);
- ASSERT_THROW(cnnNetwork.getLayerByName("layer2"), IE::NotFound);
- auto newLayer = cnnNetwork.getLayerByName(refNewLayers[0].c_str());
+ ASSERT_THROW(CommonTestUtils::getLayerByName(cnnNetwork, "layer2"), IE::NotFound);
+ auto newLayer = CommonTestUtils::getLayerByName(cnnNetwork, refNewLayers[0].c_str());
ASSERT_EQ(newLayer->type, "Const");
ASSERT_EQ(constData["data5"], newLayer->blobs.at("custom"));
ASSERT_EQ(nullptr, net->getData("data7"));
for (auto layer : newLayers) newLayer_names.push_back(layer->name);
ASSERT_EQ(newLayer_names, refNewLayers);
+ IE::CNNLayerPtr layer;
+ ASSERT_EQ(IE::NOT_FOUND, net->getLayerByName("input1", layer, nullptr));
+ ASSERT_EQ(nullptr, layer);
+ ASSERT_EQ(IE::NOT_FOUND, net->getLayerByName("layer1", layer, nullptr));
+ ASSERT_EQ(nullptr, layer);
IE::CNNNetwork cnnNetwork(net);
- ASSERT_THROW(cnnNetwork.getLayerByName("input1"), IE::NotFound);
- ASSERT_THROW(cnnNetwork.getLayerByName("layer1"), IE::NotFound);
- auto newLayerD4 = cnnNetwork.getLayerByName(refNewLayers[0].c_str());
- auto newLayerD7 = cnnNetwork.getLayerByName(refNewLayers[1].c_str());
+ auto newLayerD4 = CommonTestUtils::getLayerByName(cnnNetwork, refNewLayers[0]);
+ auto newLayerD7 = CommonTestUtils::getLayerByName(cnnNetwork, refNewLayers[1]);
auto newData4 = net->getData("data4__layer4");
auto newData7 = net->getData("data7__layer2");
ASSERT_EQ(newLayerD4->type, "Const");
ASSERT_EQ(net->allLayers().size(), originalLayersNum);
IE::CNNNetwork cnnNetwork(net);
- auto input4 = cnnNetwork.getLayerByName(constLayer->name.c_str());
+ auto input4 = CommonTestUtils::getLayerByName(cnnNetwork, constLayer->name.c_str());
ASSERT_EQ(data10->getInputTo().size(), 1);
ASSERT_EQ(data10->getCreatorLayer().lock(), input4);
ASSERT_EQ(layer6->insData.size(), 2);
testTransformator->trimShapeInputs({input2}, sortedLayers);
IE::CNNNetwork cnnNetwork(net);
- ASSERT_NO_THROW(input2 = cnnNetwork.getLayerByName(input2->name.c_str()));
+ ASSERT_NO_THROW(input2 = CommonTestUtils::getLayerByName(cnnNetwork, input2->name.c_str()));
ASSERT_EQ(net->allLayers().size(), 10);
ASSERT_EQ(layer1->insData.size(), 2);
ASSERT_EQ(layer1->insData[1].lock(), data2);
testTransformator->trimShapeInputs({input3}, sortedLayers);
IE::CNNNetwork cnnNetwork(net);
- ASSERT_NO_THROW(input3 = cnnNetwork.getLayerByName(input3->name.c_str()));
+ ASSERT_NO_THROW(input3 = CommonTestUtils::getLayerByName(cnnNetwork, input3->name.c_str()));
ASSERT_EQ(net->allLayers().size(), originalLayersNum);
ASSERT_EQ(layer2->insData.size(), 2);
ASSERT_EQ(layer2->insData[0].lock(), getData("data2"));
ASSERT_EQ(net->allLayers().size(), originalLayersNum - 7);
ASSERT_EQ(newLayer_names, refNewLayers);
IE::CNNNetwork cnnNetwork(net);
- auto newLayer = cnnNetwork.getLayerByName(refNewLayers[0].c_str());
+ auto newLayer = CommonTestUtils::getLayerByName(cnnNetwork, refNewLayers[0].c_str());
ASSERT_EQ(newLayer->type, "Const");
ASSERT_EQ(newLayer->outData[0], getData("data9"));
}
IE::CNNNetwork cnnNetwork(net);
ASSERT_EQ(net->allLayers().size(), originalLayersNum - 7);
- auto newLayer = cnnNetwork.getLayerByName(refNewLayers[0].c_str());
+ auto newLayer = CommonTestUtils::getLayerByName(cnnNetwork, refNewLayers[0].c_str());
auto actualBlob = newLayer->blobs["custom"];
ASSERT_NE(actualBlob, nullptr);
ASSERT_FALSE(actualBlob->buffer() == nullptr);
IE::CNNNetwork cnnNetwork(net);
std::string newName = "layer5__data9__Const";
- ASSERT_THROW(cnnNetwork.getLayerByName(newName.c_str()), IE::NotFound);
+ ASSERT_THROW(CommonTestUtils::getLayerByName(cnnNetwork, newName.c_str()), IE::NotFound);
ASSERT_EQ(net->allLayers().size(), 2);
ASSERT_EQ(layer6->insData.size(), 1);
ASSERT_EQ(layer6->insData[0].lock(), getData("data10"));
std::map<std::string, IE::SizeVector> inputShapes = {{"data2", newShape}};
cnnNetwork.reshape(inputShapes);
- ASSERT_NO_THROW(cnnNetwork.getLayerByName("layer2"));
+ ASSERT_NO_THROW(CommonTestUtils::getLayerByName(cnnNetwork, "layer2"));
ASSERT_EQ(getData("data3")->getTensorDesc().getDims(), IE::SizeVector{3});
ASSERT_EQ(net->allLayers().size(), originalLayersNum);
IE::ConstTransformer transformator(net.get());
transformator.fullTrim();
- ASSERT_THROW(cnnNetwork.getLayerByName("layer2"), IE::NotFound);
+ ASSERT_THROW(CommonTestUtils::getLayerByName(cnnNetwork, "layer2"), IE::NotFound);
ASSERT_EQ(getData("data4")->getTensorDesc().getDims(), newShape);
ASSERT_EQ(net->allLayers().size(), originalLayersNum - 1);
}
transformator.foldConstSubgraphs();
switch(precisionOutData) {
case IE::Precision::U8: {
- auto *l = cnnNetwork.getLayerByName("mulLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<uint8_t *>();
+ InferenceEngine::CNNLayerPtr layer;
+ ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("mulLayer__data3__Const", layer, nullptr));
+ auto *l = layer->blobs.at("custom")->cbuffer().as<uint8_t *>();
ASSERT_EQ(l[0], ref[0]);
ASSERT_EQ(l[1], ref[1]);
ASSERT_EQ(l[2], ref[2]);
break;
}
case IE::Precision::I32: {
- auto *l = cnnNetwork.getLayerByName("mulLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<int *>();
+ InferenceEngine::CNNLayerPtr layer;
+ ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("mulLayer__data3__Const", layer, nullptr));
+ auto *l = layer->blobs.at("custom")->cbuffer().as<int *>();
ASSERT_EQ(l[0], ref[0]);
ASSERT_EQ(l[1], ref[1]);
ASSERT_EQ(l[2], ref[2]);
break;
}
case IE::Precision::I64: {
- auto *l = cnnNetwork.getLayerByName("mulLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<long long int *>();
+ InferenceEngine::CNNLayerPtr layer;
+ ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("mulLayer__data3__Const", layer, nullptr));
+ auto *l = layer->blobs.at("custom")->cbuffer().as<long long int *>();
ASSERT_EQ(l[0], ref[0]);
ASSERT_EQ(l[1], ref[1]);
ASSERT_EQ(l[2], ref[2]);
break;
}
case IE::Precision::U64: {
- auto *l = cnnNetwork.getLayerByName("mulLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<unsigned long long int *>();
+ InferenceEngine::CNNLayerPtr layer;
+ ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("mulLayer__data3__Const", layer, nullptr));
+ auto *l = layer->blobs.at("custom")->cbuffer().as<unsigned long long int *>();
ASSERT_EQ(l[0], ref[0]);
ASSERT_EQ(l[1], ref[1]);
ASSERT_EQ(l[2], ref[2]);
break;
}
case IE::Precision::FP16: {
- auto *l = cnnNetwork.getLayerByName("mulLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<IE::ie_fp16 *>();
+ InferenceEngine::CNNLayerPtr layer;
+ ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("mulLayer__data3__Const", layer, nullptr));
+ auto *l = layer->blobs.at("custom")->cbuffer().as<IE::ie_fp16 *>();
ASSERT_EQ(l[0], ref[0]);
ASSERT_EQ(l[1], ref[1]);
ASSERT_EQ(l[2], ref[2]);
break;
}
case IE::Precision::FP32: {
- auto *l = cnnNetwork.getLayerByName("mulLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<float *>();
+ InferenceEngine::CNNLayerPtr layer;
+ ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("mulLayer__data3__Const", layer, nullptr));
+ auto *l = layer->blobs.at("custom")->cbuffer().as<float *>();
ASSERT_EQ(l[0], ref[0]);
ASSERT_EQ(l[1], ref[1]);
ASSERT_EQ(l[2], ref[2]);
transformator.foldConstSubgraphs();
switch(precisionOutData) {
case IE::Precision::U8: {
- auto *l = cnnNetwork.getLayerByName("mulLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<uint8_t *>();
+ InferenceEngine::CNNLayerPtr layer;
+ ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("mulLayer__data3__Const", layer, nullptr));
+ auto *l = layer->blobs.at("custom")->cbuffer().as<uint8_t *>();
ASSERT_EQ(l[0], ref[0]);
ASSERT_EQ(l[1], ref[1]);
ASSERT_EQ(l[2], ref[2]);
break;
}
case IE::Precision::I32: {
- auto *l = cnnNetwork.getLayerByName("mulLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<int *>();
+ InferenceEngine::CNNLayerPtr layer;
+ ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("mulLayer__data3__Const", layer, nullptr));
+ auto *l = layer->blobs.at("custom")->cbuffer().as<int *>();
ASSERT_EQ(l[0], ref[0]);
ASSERT_EQ(l[1], ref[1]);
ASSERT_EQ(l[2], ref[2]);
break;
}
case IE::Precision::I64: {
- auto *l = cnnNetwork.getLayerByName("mulLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<long long int *>();
+ InferenceEngine::CNNLayerPtr layer;
+ ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("mulLayer__data3__Const", layer, nullptr));
+ auto *l = layer->blobs.at("custom")->cbuffer().as<long long int *>();
ASSERT_EQ(l[0], ref[0]);
ASSERT_EQ(l[1], ref[1]);
ASSERT_EQ(l[2], ref[2]);
break;
}
case IE::Precision::U64: {
- auto *l = cnnNetwork.getLayerByName("mulLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<unsigned long long int *>();
+ InferenceEngine::CNNLayerPtr layer;
+ ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("mulLayer__data3__Const", layer, nullptr));
+ auto *l = layer->blobs.at("custom")->cbuffer().as<unsigned long long int *>();
ASSERT_EQ(l[0], ref[0]);
ASSERT_EQ(l[1], ref[1]);
ASSERT_EQ(l[2], ref[2]);
break;
}
case IE::Precision::FP16: {
- auto *l = cnnNetwork.getLayerByName("mulLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<IE::ie_fp16 *>();
+ InferenceEngine::CNNLayerPtr layer;
+ ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("mulLayer__data3__Const", layer, nullptr));
+ auto *l = layer->blobs.at("custom")->cbuffer().as<IE::ie_fp16 *>();
ASSERT_EQ(l[0], ref[0]);
ASSERT_EQ(l[1], ref[1]);
ASSERT_EQ(l[2], ref[2]);
break;
}
case IE::Precision::FP32: {
- auto *l = cnnNetwork.getLayerByName("mulLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<float *>();
+ InferenceEngine::CNNLayerPtr layer;
+ ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("mulLayer__data3__Const", layer, nullptr));
+ auto *l = layer->blobs.at("custom")->cbuffer().as<float *>();
ASSERT_EQ(l[0], ref[0]);
ASSERT_EQ(l[1], ref[1]);
ASSERT_EQ(l[2], ref[2]);
transformator.foldConstSubgraphs();
switch(precisionOutData) {
case IE::Precision::U8: {
- auto *l = cnnNetwork.getLayerByName("addLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<uint8_t *>();
+ InferenceEngine::CNNLayerPtr layer;
+ ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("addLayer__data3__Const", layer, nullptr));
+ auto *l = layer->blobs.at("custom")->cbuffer().as<uint8_t *>();
ASSERT_EQ(l[0], ref[0]);
ASSERT_EQ(l[1], ref[1]);
ASSERT_EQ(l[2], ref[2]);
break;
}
case IE::Precision::I32: {
- auto *l = cnnNetwork.getLayerByName("addLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<int *>();
+ InferenceEngine::CNNLayerPtr layer;
+ ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("addLayer__data3__Const", layer, nullptr));
+ auto *l = layer->blobs.at("custom")->cbuffer().as<int *>();
ASSERT_EQ(l[0], ref[0]);
ASSERT_EQ(l[1], ref[1]);
ASSERT_EQ(l[2], ref[2]);
break;
}
case IE::Precision::I64: {
- auto *l = cnnNetwork.getLayerByName("addLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<long long int *>();
+ InferenceEngine::CNNLayerPtr layer;
+ ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("addLayer__data3__Const", layer, nullptr));
+ auto *l = layer->blobs.at("custom")->cbuffer().as<long long int *>();
ASSERT_EQ(l[0], ref[0]);
ASSERT_EQ(l[1], ref[1]);
ASSERT_EQ(l[2], ref[2]);
break;
}
case IE::Precision::U64: {
- auto *l = cnnNetwork.getLayerByName("addLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<unsigned long long int *>();
+ InferenceEngine::CNNLayerPtr layer;
+ ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("addLayer__data3__Const", layer, nullptr));
+ auto *l = layer->blobs.at("custom")->cbuffer().as<unsigned long long int *>();
ASSERT_EQ(l[0], ref[0]);
ASSERT_EQ(l[1], ref[1]);
ASSERT_EQ(l[2], ref[2]);
break;
}
case IE::Precision::FP16: {
- auto *l = cnnNetwork.getLayerByName("addLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<IE::ie_fp16 *>();
+ InferenceEngine::CNNLayerPtr layer;
+ ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("addLayer__data3__Const", layer, nullptr));
+ auto *l = layer->blobs.at("custom")->cbuffer().as<IE::ie_fp16 *>();
ASSERT_EQ(l[0], ref[0]);
ASSERT_EQ(l[1], ref[1]);
ASSERT_EQ(l[2], ref[2]);
break;
}
case IE::Precision::FP32: {
- auto *l = cnnNetwork.getLayerByName("addLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<float *>();
+ InferenceEngine::CNNLayerPtr layer;
+ ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("addLayer__data3__Const", layer, nullptr));
+ auto *l = layer->blobs.at("custom")->cbuffer().as<float *>();
ASSERT_EQ(l[0], ref[0]);
ASSERT_EQ(l[1], ref[1]);
ASSERT_EQ(l[2], ref[2]);
transformator.foldConstSubgraphs();
switch(precisionOutData) {
case IE::Precision::U8: {
- auto *l = cnnNetwork.getLayerByName("addLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<uint8_t *>();
+ InferenceEngine::CNNLayerPtr layer;
+ ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("addLayer__data3__Const", layer, nullptr));
+ auto *l = layer->blobs.at("custom")->cbuffer().as<uint8_t *>();
ASSERT_EQ(l[0], ref[0]);
ASSERT_EQ(l[1], ref[1]);
ASSERT_EQ(l[2], ref[2]);
break;
}
case IE::Precision::I32: {
- auto *l = cnnNetwork.getLayerByName("addLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<int *>();
+ InferenceEngine::CNNLayerPtr layer;
+ ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("addLayer__data3__Const", layer, nullptr));
+ auto *l = layer->blobs.at("custom")->cbuffer().as<int *>();
ASSERT_EQ(l[0], ref[0]);
ASSERT_EQ(l[1], ref[1]);
ASSERT_EQ(l[2], ref[2]);
break;
}
case IE::Precision::I64: {
- auto *l = cnnNetwork.getLayerByName("addLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<long long int *>();
+ InferenceEngine::CNNLayerPtr layer;
+ ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("addLayer__data3__Const", layer, nullptr));
+ auto *l = layer->blobs.at("custom")->cbuffer().as<long long int *>();
ASSERT_EQ(l[0], ref[0]);
ASSERT_EQ(l[1], ref[1]);
ASSERT_EQ(l[2], ref[2]);
break;
}
case IE::Precision::U64: {
- auto *l = cnnNetwork.getLayerByName("addLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<unsigned long long int *>();
+ InferenceEngine::CNNLayerPtr layer;
+ ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("addLayer__data3__Const", layer, nullptr));
+ auto *l = layer->blobs.at("custom")->cbuffer().as<unsigned long long int *>();
ASSERT_EQ(l[0], ref[0]);
ASSERT_EQ(l[1], ref[1]);
ASSERT_EQ(l[2], ref[2]);
break;
}
case IE::Precision::FP16: {
- auto *l = cnnNetwork.getLayerByName("addLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<IE::ie_fp16 *>();
+ InferenceEngine::CNNLayerPtr layer;
+ ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("addLayer__data3__Const", layer, nullptr));
+ auto *l = layer->blobs.at("custom")->cbuffer().as<IE::ie_fp16 *>();
ASSERT_EQ(l[0], ref[0]);
ASSERT_EQ(l[1], ref[1]);
ASSERT_EQ(l[2], ref[2]);
break;
}
case IE::Precision::FP32: {
- auto *l = cnnNetwork.getLayerByName("addLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<float *>();
+ InferenceEngine::CNNLayerPtr layer;
+ ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("addLayer__data3__Const", layer, nullptr));
+ auto *l = layer->blobs.at("custom")->cbuffer().as<float *>();
ASSERT_EQ(l[0], ref[0]);
ASSERT_EQ(l[1], ref[1]);
ASSERT_EQ(l[2], ref[2]);
transformator.foldConstSubgraphs();
switch(precisionOutData) {
case IE::Precision::U8: {
- auto *l = cnnNetwork.getLayerByName("addLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<uint8_t *>();
+ InferenceEngine::CNNLayerPtr layer;
+ ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("addLayer__data3__Const", layer, nullptr));
+ auto *l = layer->blobs.at("custom")->cbuffer().as<uint8_t *>();
ASSERT_EQ(l[0], ref[0]);
ASSERT_EQ(l[1], ref[1]);
ASSERT_EQ(l[2], ref[2]);
break;
}
case IE::Precision::I32: {
- auto *l = cnnNetwork.getLayerByName("addLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<int *>();
+ InferenceEngine::CNNLayerPtr layer;
+ ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("addLayer__data3__Const", layer, nullptr));
+ auto *l = layer->blobs.at("custom")->cbuffer().as<int *>();
ASSERT_EQ(l[0], ref[0]);
ASSERT_EQ(l[1], ref[1]);
ASSERT_EQ(l[2], ref[2]);
break;
}
case IE::Precision::I64: {
- auto *l = cnnNetwork.getLayerByName("addLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<long long int *>();
+ InferenceEngine::CNNLayerPtr layer;
+ ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("addLayer__data3__Const", layer, nullptr));
+ auto *l = layer->blobs.at("custom")->cbuffer().as<long long int *>();
ASSERT_EQ(l[0], ref[0]);
ASSERT_EQ(l[1], ref[1]);
ASSERT_EQ(l[2], ref[2]);
break;
}
case IE::Precision::U64: {
- auto *l = cnnNetwork.getLayerByName("addLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<unsigned long long int *>();
+ InferenceEngine::CNNLayerPtr layer;
+ ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("addLayer__data3__Const", layer, nullptr));
+ auto *l = layer->blobs.at("custom")->cbuffer().as<unsigned long long int *>();
ASSERT_EQ(l[0], ref[0]);
ASSERT_EQ(l[1], ref[1]);
ASSERT_EQ(l[2], ref[2]);
break;
}
case IE::Precision::FP16: {
- auto *l = cnnNetwork.getLayerByName("addLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<IE::ie_fp16 *>();
+ InferenceEngine::CNNLayerPtr layer;
+ ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("addLayer__data3__Const", layer, nullptr));
+ auto *l = layer->blobs.at("custom")->cbuffer().as<IE::ie_fp16 *>();
ASSERT_EQ(l[0], ref[0]);
ASSERT_EQ(l[1], ref[1]);
ASSERT_EQ(l[2], ref[2]);
break;
}
case IE::Precision::FP32: {
- auto *l = cnnNetwork.getLayerByName("addLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<float *>();
+ InferenceEngine::CNNLayerPtr layer;
+ ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("addLayer__data3__Const", layer, nullptr));
+ auto *l = layer->blobs.at("custom")->cbuffer().as<float *>();
ASSERT_EQ(l[0], ref[0]);
ASSERT_EQ(l[1], ref[1]);
ASSERT_EQ(l[2], ref[2]);
transformator.foldConstSubgraphs();
switch(precisionOutData) {
case IE::Precision::U8: {
- auto *l = cnnNetwork.getLayerByName("mulLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<uint8_t *>();
+ InferenceEngine::CNNLayerPtr layer;
+ ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("mulLayer__data3__Const", layer, nullptr));
+ auto *l = layer->blobs.at("custom")->cbuffer().as<uint8_t *>();
ASSERT_EQ(l[0], ref[0]);
ASSERT_EQ(l[1], ref[1]);
ASSERT_EQ(l[2], ref[2]);
break;
}
case IE::Precision::I32: {
- auto *l = cnnNetwork.getLayerByName("mulLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<int *>();
+ InferenceEngine::CNNLayerPtr layer;
+ ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("mulLayer__data3__Const", layer, nullptr));
+ auto *l = layer->blobs.at("custom")->cbuffer().as<int *>();
ASSERT_EQ(l[0], ref[0]);
ASSERT_EQ(l[1], ref[1]);
ASSERT_EQ(l[2], ref[2]);
break;
}
case IE::Precision::I64: {
- auto *l = cnnNetwork.getLayerByName("mulLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<long long int *>();
+ InferenceEngine::CNNLayerPtr layer;
+ ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("mulLayer__data3__Const", layer, nullptr));
+ auto *l = layer->blobs.at("custom")->cbuffer().as<long long int *>();
ASSERT_EQ(l[0], ref[0]);
ASSERT_EQ(l[1], ref[1]);
ASSERT_EQ(l[2], ref[2]);
break;
}
case IE::Precision::U64: {
- auto *l = cnnNetwork.getLayerByName("mulLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<unsigned long long int *>();
+ InferenceEngine::CNNLayerPtr layer;
+ ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("mulLayer__data3__Const", layer, nullptr));
+ auto *l = layer->blobs.at("custom")->cbuffer().as<unsigned long long int *>();
ASSERT_EQ(l[0], ref[0]);
ASSERT_EQ(l[1], ref[1]);
ASSERT_EQ(l[2], ref[2]);
break;
}
case IE::Precision::FP16: {
- auto *l = cnnNetwork.getLayerByName("mulLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<IE::ie_fp16 *>();
+ InferenceEngine::CNNLayerPtr layer;
+ ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("mulLayer__data3__Const", layer, nullptr));
+ auto *l = layer->blobs.at("custom")->cbuffer().as<IE::ie_fp16 *>();
ASSERT_EQ(l[0], ref[0]);
ASSERT_EQ(l[1], ref[1]);
ASSERT_EQ(l[2], ref[2]);
break;
}
case IE::Precision::FP32: {
- auto *l = cnnNetwork.getLayerByName("mulLayer__data3__Const").get()->blobs.at("custom")->cbuffer().as<float *>();
+ InferenceEngine::CNNLayerPtr layer;
+ ASSERT_EQ(InferenceEngine::OK, net->getLayerByName("mulLayer__data3__Const", layer, nullptr));
+ auto *l = layer->blobs.at("custom")->cbuffer().as<float *>();
ASSERT_EQ(l[0], ref[0]);
ASSERT_EQ(l[1], ref[1]);
ASSERT_EQ(l[2], ref[2]);