#include <ie_parameter.hpp>
#include <ie_core.hpp>
#include <legacy/net_pass.h>
+#include <generic_ie.hpp>
+#include <legacy/convert_function_to_cnn_network.hpp>
+#include <transformations/convert_opset1_to_legacy/convert_opset1_to_legacy.hpp>
+#include <ngraph/pass/manager.hpp>
#include <ngraph/opsets/opset3.hpp>
#include <ngraph/function.hpp>
auto outputs = network.getOutputsInfo();
ASSERT_NE(outputs.find("exp.0"), outputs.end());
}
+
+TEST(CNNNGraphImplTests, SaveOriginalResultNameForMultiOutputOp) {
+ std::string model = R"V0G0N(
+<net name="Activation" version="10">
+ <layers>
+ <layer name="in1" type="Parameter" id="0" version="opset1">
+ <data shape="1,3,22,22" element_type="f32"/>
+ <output>
+ <port id="0" precision="FP32">
+ <dim>1</dim>
+ <dim>3</dim>
+ <dim>22</dim>
+ <dim>22</dim>
+ </port>
+ </output>
+ </layer>
+ <layer name="exp" id="1" type="ExperimentalDetectronROIFeatureExtractor" version="experimental">
+ <input>
+ <port id="1" precision="FP32">
+ <dim>1</dim>
+ <dim>3</dim>
+ <dim>22</dim>
+ <dim>22</dim>
+ </port>
+ </input>
+ <output>
+ <port id="2" precision="FP32">
+ <dim>1</dim>
+ <dim>3</dim>
+ <dim>22</dim>
+ <dim>22</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="2" name="fake_const" type="Const" version="opset1">
+ <data offset="0" size="4" shape="1,1,1,1" element_type="f32"/>
+ <output>
+ <port id="1" precision="FP32">
+ <dim>1</dim>
+ <dim>1</dim>
+ <dim>1</dim>
+ <dim>1</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="3" name="text_features" type="Add" version="opset1">
+ <input>
+ <port id="0">
+ <dim>1</dim>
+ <dim>3</dim>
+ <dim>22</dim>
+ <dim>22</dim>
+ </port>
+ <port id="1">
+ <dim>1</dim>
+ <dim>1</dim>
+ <dim>1</dim>
+ <dim>1</dim>
+ </port>
+ </input>
+ <output>
+ <port id="2" precision="FP32">
+ <dim>1</dim>
+ <dim>3</dim>
+ <dim>22</dim>
+ <dim>22</dim>
+ </port>
+ </output>
+ </layer>
+ <layer name="output" type="Result" id="4" version="opset1">
+ <input>
+ <port id="0" precision="FP32">
+ <dim>1</dim>
+ <dim>3</dim>
+ <dim>22</dim>
+ <dim>22</dim>
+ </port>
+ </input>
+ </layer>
+ </layers>
+ <edges>
+ <edge from-layer="0" from-port="0" to-layer="1" to-port="1"/>
+ <edge from-layer="1" from-port="2" to-layer="3" to-port="0"/>
+ <edge from-layer="2" from-port="1" to-layer="3" to-port="1"/>
+ <edge from-layer="3" from-port="2" to-layer="4" to-port="0"/>
+ </edges>
+</net>
+)V0G0N";
+ InferenceEngine::Core core;
+ Blob::Ptr data = make_shared_blob<float>(TensorDesc(Precision::FP32, {4}, Layout::C));
+ data->allocate();
+ {
+ auto lockData = data->buffer();
+ float *dataPtr = lockData.as<float*>();
+
+ for (size_t i = 0; i < 4; ++i) {
+ dataPtr[i] = 0;
+ }
+ }
+ CNNNetwork network = core.ReadNetwork(model, data);
+ {
+ auto outputs = network.getOutputsInfo();
+ ASSERT_NE(outputs.find("text_features"), outputs.end());
+ }
+
+ auto nGraphFunc = network.getFunction();
+ // Disable shape inference (WA for generic operations)
+ ngraph::op::GenericIE::DisableReshape noReshape(nGraphFunc);
+
+ ngraph::pass::Manager manager;
+
+ manager.register_pass<ngraph::pass::ConvertOpSet1ToLegacy>();
+
+ manager.run_passes(nGraphFunc);
+
+ auto clonedNetwork = InferenceEngine::details::convertFunctionToICNNNetwork(nGraphFunc, network);
+ {
+ OutputsDataMap outputs;
+ clonedNetwork->getOutputsInfo(outputs);
+ ASSERT_NE(outputs.find("text_features"), outputs.end());
+ }
+}
IE_SUPPRESS_DEPRECATED_END