Remove GetOutputElement op (#1604)
authorIlya Churaev <ilya.churaev@intel.com>
Tue, 11 Aug 2020 12:28:14 +0000 (15:28 +0300)
committerGitHub <noreply@github.com>
Tue, 11 Aug 2020 12:28:14 +0000 (15:28 +0300)
43 files changed:
inference-engine/src/inference_engine/cnn_network_ngraph_impl.cpp
inference-engine/src/legacy_api/src/ie_cnn_layer_builder_ngraph.cpp
inference-engine/tests/functional/plugin/cpu/bfloat16/topk_inputs_i32.cpp
inference-engine/tests/unit/frontends/onnx_import/onnx_importer_test.cpp
ngraph/core/include/ngraph/ngraph.hpp
ngraph/core/include/ngraph/node.hpp
ngraph/core/include/ngraph/node_output.hpp
ngraph/core/include/ngraph/op/get_output_element.hpp [deleted file]
ngraph/core/include/ngraph/op/op_version_tbl.hpp
ngraph/core/include/ngraph/ops.hpp
ngraph/core/include/ngraph/pass/get_output_element_elimination.hpp [deleted file]
ngraph/core/include/ngraph/pattern/matcher.hpp
ngraph/core/include/ngraph/pattern/op/any_of.hpp
ngraph/core/src/builder/autobroadcast.cpp
ngraph/core/src/builder/split.cpp
ngraph/core/src/node.cpp
ngraph/core/src/node_output.cpp
ngraph/core/src/op/batch_norm.cpp
ngraph/core/src/op/get_output_element.cpp [deleted file]
ngraph/core/src/op/tensor_iterator.cpp
ngraph/core/src/op/util/rnn_cell_base.cpp
ngraph/core/src/pass/get_output_element_elimination.cpp [deleted file]
ngraph/core/src/pass/visualize_tree.cpp
ngraph/core/src/pattern/matcher.cpp
ngraph/frontend/onnx_import/include/onnx_import/op/identity.hpp
ngraph/frontend/onnx_import/src/op/loop.cpp
ngraph/frontend/onnx_import/src/op/lstm.cpp
ngraph/frontend/onnx_import/src/op/onehot.cpp
ngraph/frontend/onnx_import/src/utils/arg_min_max_factory.cpp
ngraph/test/CMakeLists.txt
ngraph/test/algebraic_simplification.cpp
ngraph/test/backend/fused_op.in.cpp
ngraph/test/backend/topk.in.cpp
ngraph/test/control_dependencies.cpp
ngraph/test/op_is.cpp
ngraph/test/replace_node.cpp
ngraph/test/runtime/ie/ie_backend.cpp
ngraph/test/runtime/ie/ie_executable.cpp
ngraph/test/runtime/interpreter/int_executable.hpp
ngraph/test/runtime/opset0_tbl.hpp
ngraph/test/runtime/pass/fused_op_decomposition.cpp
ngraph/test/type_prop/get_output_element.cpp [deleted file]
ngraph/test/util/engine/ie_engines.cpp

index 965486a..ec04e39 100644 (file)
@@ -17,7 +17,6 @@
 #include <ngraph/graph_util.hpp>
 #include <ngraph/pass/constant_folding.hpp>
 #include <ngraph/pass/manager.hpp>
-#include <ngraph/pass/get_output_element_elimination.hpp>
 #include <set>
 #include <string>
 
@@ -47,12 +46,6 @@ static std::shared_ptr<ngraph::Function> copyFunction(const std::shared_ptr<cons
     if (constFolding) {
         ngraph::pass::ConstantFolding().run_on_function(specialized_function);
     }
-    // TODO: remove this code after the fix on the nGraph side
-    ::ngraph::pass::GetOutputElementElimination goe_elimination;
-    for (auto n : specialized_function->get_ops()) {
-        goe_elimination.run_on_node(n);
-    }
-    specialized_function->set_friendly_name(func->get_friendly_name());
     return specialized_function;
 }
 
index b2cd51d..72efb5e 100644 (file)
@@ -264,11 +264,6 @@ CNNLayer::Ptr NodeConverter<ngraph::op::TensorIterator>::createLayer(const std::
     for (const auto& desc : tensor_iterator->get_output_descriptions()) {
         auto result = results[desc->m_body_value_index]->input(0).get_source_output();
 
-        // GetOutputElement layer can be inserted by ngraph deep copy functions
-        // Take the previous layer.
-        if (::ngraph::is_type<ngraph::op::GetOutputElement>(result.get_node_shared_ptr())) {
-            result = result.get_node()->input(0).get_source_output();
-        }
         std::string name = result.get_node()->get_friendly_name();
         if (result.get_node()->get_output_size() > 1) {
             name += "." + std::to_string(result.get_index());
@@ -328,11 +323,6 @@ CNNLayer::Ptr NodeConverter<ngraph::op::TensorIterator>::createLayer(const std::
 
                 auto result = results[input_desc->m_body_value_index]->inputs()[0].get_source_output();
 
-                // GetOutputElement layer can be inserted by ngraph deep copy functions
-                // Take the previous layer.
-                if (::ngraph::is_type<ngraph::op::GetOutputElement>(result.get_node_shared_ptr())) {
-                    result = result.get_node()->input(0).get_source_output();
-                }
                 // Create correct name for output.
                 std::string output_name = result.get_node()->get_friendly_name();
                 if (result.get_node()->get_output_size() > 1) {
index 9774b29..c0cdfa1 100644 (file)
@@ -97,9 +97,6 @@ protected:
         auto argmaxNode = std::make_shared<opset1::TopK>(convNode, k, axis, mode, sort);
         argmaxNode->set_friendly_name("TopK_1");
 
-        auto goe0 = make_shared<op::GetOutputElement>(argmaxNode, 0);
-        auto goe1 = make_shared<op::GetOutputElement>(argmaxNode, 1);
-
         // convolution
         std::shared_ptr<ngraph::opset1::Constant> weightsNode2 = nullptr;
         ngraph::Shape convFilterShape2 = { 1, 1, 3, 3 };  // out channel, /input channels, kernel h, kernel w
@@ -116,7 +113,7 @@ protected:
         }
 
         std::shared_ptr<ngraph::Node> convNode2 = std::make_shared<ngraph::opset1::Convolution>(
-            goe0, weightsNode2,
+            argmaxNode->output(0), weightsNode2->output(0),
             ngraph::Strides({ 1, 1 }),   // strides
             ngraph::CoordinateDiff({ 0, 0 }),  // pad begin
             ngraph::CoordinateDiff({ 0, 0 }),   // pad end
@@ -124,7 +121,7 @@ protected:
             ngraph::op::PadType::EXPLICIT);   // pad type
         convNode2->set_friendly_name("Convolution_2");
 
-        return std::make_shared<ngraph::Function>(ngraph::NodeVector{convNode2, goe1}, ngraph::ParameterVector{input1});
+        return std::make_shared<ngraph::Function>(ngraph::OutputVector{convNode2->output(0), argmaxNode->output(1)}, ngraph::ParameterVector{input1});
     }
     void SetUp() override {
         std::tie(inputPrecision, netPrecision, inputShapes, newInputShapes, targetDevice) = this->GetParam();
index 2c8a325..6b16ac7 100644 (file)
@@ -71,7 +71,6 @@ TEST(ONNX_Importer_Tests, ImportModelWithMultiOutput) {
         const auto op_type = std::string(op->get_type_name());
         count_topk += (op_type == "TopK" ? 1 : 0);
         count_constants += (op_type == "Constant" ? 1 : 0);
-        count_goe += (op_type == "GetOutputElement" ? 1 : 0);
         count_parameters += (op_type == "Parameter" ? 1 : 0);
     }
 
@@ -84,7 +83,6 @@ TEST(ONNX_Importer_Tests, ImportModelWithMultiOutput) {
     ASSERT_EQ(function->get_output_shape(1), ngraph::Shape({3, 3}));
     ASSERT_EQ(count_topk, 1);
     ASSERT_EQ(count_constants, 1);
-    ASSERT_EQ(count_goe, 2);
     ASSERT_EQ(count_parameters, 1);
 }
 
index 2ec215d..27d81bc 100644 (file)
@@ -98,7 +98,6 @@ namespace ngraph
 #include "ngraph/opsets/opset.hpp"
 
 // nGraph passes
-#include "ngraph/pass/get_output_element_elimination.hpp"
 #include "ngraph/pass/graph_rewrite.hpp"
 #include "ngraph/pass/manager.hpp"
 #include "ngraph/pass/visualize_tree.hpp"
index 3a4a4f5..7fb73ae 100644 (file)
@@ -302,11 +302,6 @@ namespace ngraph
         /// Returns the partial shape for output i
         const PartialShape& get_output_partial_shape(size_t i) const;
 
-        /// Second argument is ignored
-        /// Returns the node if i=0 and the node has 1 output, otherwise a GetOutputElement
-        /// If the node is a GetOutputElement, applies to the underlying node
-        std::shared_ptr<Node> get_output_as_single_output_node(size_t i);
-
         /// Return the output to use when converting to an Output<Node> with no index specified.
         /// Throws when not supported.
         Output<const Node> get_default_output() const;
index 09605ba..4999a54 100644 (file)
@@ -39,9 +39,6 @@ namespace ngraph
     template <>
     class NGRAPH_API Output<Node>
     {
-        NGRAPH_DEPRECATED("Remove when GetOrderedOutput is removed")
-        void eliminate_goe();
-
     public:
         /// \brief Constructs a Output.
         /// \param node A pointer to the node for the output handle.
@@ -76,10 +73,6 @@ namespace ngraph
         ///
         /// TODO: Make a plan to deprecate this.
         std::shared_ptr<Node> get_node_shared_ptr() const;
-        /// \return A useable shared pointer to this output. If index 0, the node,
-        /// otherwise find or create a GOE.
-        NGRAPH_DEPRECATED("Transitional.")
-        std::shared_ptr<Node> as_single_output_node() const;
 
         /// \return The index of the output referred to by this output handle.
         size_t get_index() const;
@@ -122,8 +115,6 @@ namespace ngraph
     template <>
     class NGRAPH_API Output<const Node>
     {
-        void eliminate_goe();
-
     public:
         /// \brief Constructs a Output.
         /// \param node A pointer to the node for the output handle.
diff --git a/ngraph/core/include/ngraph/op/get_output_element.hpp b/ngraph/core/include/ngraph/op/get_output_element.hpp
deleted file mode 100644 (file)
index 52eb580..0000000
+++ /dev/null
@@ -1,68 +0,0 @@
-//*****************************************************************************
-// Copyright 2017-2020 Intel Corporation
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//*****************************************************************************
-
-#pragma once
-
-#include "ngraph/op/op.hpp"
-
-namespace ngraph
-{
-    namespace op
-    {
-        NGRAPH_API
-        NodeVector get_output_elements(const std::shared_ptr<Node>& mon);
-
-        namespace v0
-        {
-            /// \brief Operation to get an output from a node.
-            class NGRAPH_API GetOutputElement : public Op
-            {
-            public:
-                static constexpr NodeTypeInfo type_info{"GetOutputElement", 0};
-                const NodeTypeInfo& get_type_info() const override { return type_info; }
-                GetOutputElement() = default;
-                /// \brief Constructs a get-tuple-element operation.
-                ///
-                /// \param arg The input tuple.
-                /// \param n The index of the tuple element to get.
-                GetOutputElement(const std::shared_ptr<Node>& arg, size_t n);
-
-                /// Return the equilent Output<Node>
-                Output<Node> get_as_output() const;
-
-                std::shared_ptr<Node>
-                    clone_with_new_inputs(const OutputVector& inputs) const override;
-                void validate_and_infer_types() override;
-
-                /// \return The index of the tuple element to get.
-                size_t get_n() const { return m_n; }
-            protected:
-                size_t m_n;
-            };
-        }
-        using v0::GetOutputElement;
-    }
-
-    inline std::shared_ptr<Node> get_output_element(const Output<Node>& output)
-    {
-        return output.get_node_shared_ptr()->get_output_as_single_output_node(output.get_index());
-    }
-
-    inline std::shared_ptr<Node> get_output_element(const std::shared_ptr<Node> node, size_t i = 0)
-    {
-        return node->get_output_as_single_output_node(i);
-    }
-}
index 79e6d0f..d4babea 100644 (file)
@@ -84,7 +84,6 @@ NGRAPH_OP(Gather, ngraph::op::v1, 1)
 NGRAPH_OP(GatherND, ngraph::op::v0, 0)
 NGRAPH_OP(GatherTree, ngraph::op::v1, 1)
 NGRAPH_OP(Gelu, ngraph::op::v0, 0)
-NGRAPH_OP(GetOutputElement, ngraph::op::v0, 0)
 NGRAPH_OP(Greater, ngraph::op::v0, 0)
 NGRAPH_OP(Greater, ngraph::op::v1, 1)
 NGRAPH_OP(GreaterEq, ngraph::op::v0, 0)
index ca5d940..46a4bea 100644 (file)
@@ -86,7 +86,6 @@
 #include "ngraph/op/gather.hpp"
 #include "ngraph/op/gather_nd.hpp"
 #include "ngraph/op/gather_tree.hpp"
-#include "ngraph/op/get_output_element.hpp"
 #include "ngraph/op/greater.hpp"
 #include "ngraph/op/greater_eq.hpp"
 #include "ngraph/op/group_conv.hpp"
diff --git a/ngraph/core/include/ngraph/pass/get_output_element_elimination.hpp b/ngraph/core/include/ngraph/pass/get_output_element_elimination.hpp
deleted file mode 100644 (file)
index cfa1692..0000000
+++ /dev/null
@@ -1,37 +0,0 @@
-//*****************************************************************************
-// Copyright 2017-2020 Intel Corporation
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//*****************************************************************************
-
-#pragma once
-
-#include "ngraph/pass/pass.hpp"
-
-namespace ngraph
-{
-    namespace pass
-    {
-        class GetOutputElementElimination;
-    }
-}
-
-NGRAPH_SUPPRESS_DEPRECATED_START
-class NGRAPH_API ngraph::pass::GetOutputElementElimination : public NodePass
-{
-public:
-    NGRAPH_RTTI_DECLARATION;
-
-    bool run_on_node(std::shared_ptr<Node> node) override;
-};
-NGRAPH_SUPPRESS_DEPRECATED_END
index 2583f1d..46fa869 100644 (file)
@@ -151,12 +151,7 @@ namespace ngraph
             OutputVector& get_matched_values() { return m_matched_list; }
             void reset() {}
             const std::string& get_name() { return m_name; }
-            std::shared_ptr<Node> get_pattern()
-            {
-                NGRAPH_SUPPRESS_DEPRECATED_START
-                return m_pattern_node.as_single_output_node();
-                NGRAPH_SUPPRESS_DEPRECATED_END
-            }
+            std::shared_ptr<Node> get_pattern() { return m_pattern_node.get_node_shared_ptr(); }
             Output<Node> get_pattern_value() { return m_pattern_node; }
             std::shared_ptr<Node> get_match_root();
             Output<Node> get_match_value();
index 83d65cd..4cd5b88 100644 (file)
@@ -58,9 +58,7 @@ namespace ngraph
                     : AnyOf(type,
                             s,
                             [pred](const Output<Node>& value) {
-                                NGRAPH_SUPPRESS_DEPRECATED_START
-                                return pred(value.as_single_output_node());
-                                NGRAPH_SUPPRESS_DEPRECATED_END
+                                return pred(value.get_node_shared_ptr());
                             },
                             as_output_vector(wrapped_values))
                 {
index b7f4dec..18a6eec 100644 (file)
@@ -139,9 +139,7 @@ namespace ngraph
                                                      const Shape& output_shape,
                                                      const Shape& source_shape)
         {
-            NGRAPH_SUPPRESS_DEPRECATED_START
-            shared_ptr<Node> broadcasted_node = value.as_single_output_node();
-            NGRAPH_SUPPRESS_DEPRECATED_END
+            shared_ptr<Node> broadcasted_node = value.get_node_shared_ptr();
             // If node already has the required shape, return original node
             if (output_shape == value.get_shape())
             {
@@ -202,9 +200,7 @@ namespace ngraph
             // If node already has the required shape, return original node
             if (output_shape == value_shape)
             {
-                NGRAPH_SUPPRESS_DEPRECATED_START
-                return value.as_single_output_node();
-                NGRAPH_SUPPRESS_DEPRECATED_END
+                return value.get_node_shared_ptr();
             }
 
             if (axis == -1)
@@ -253,10 +249,8 @@ namespace ngraph
             // Handle the trivial case...
             if (arg1_in_shape == arg2_in_shape)
             {
-                NGRAPH_SUPPRESS_DEPRECATED_START
-                return make_pair(args.first.as_single_output_node(),
-                                 args.second.as_single_output_node());
-                NGRAPH_SUPPRESS_DEPRECATED_END
+                return make_pair(args.first.get_node_shared_ptr(),
+                                 args.second.get_node_shared_ptr());
             }
 
             NodeVector bcasted_outputs =
index 9fd8da4..54d6aa3 100644 (file)
@@ -15,7 +15,6 @@
 //*****************************************************************************
 
 #include "ngraph/builder/split.hpp"
-#include "ngraph/op/get_output_element.hpp"
 #include "ngraph/op/slice.hpp"
 #include "ngraph/opsets/opset1.hpp"
 
index 1e03239..1d6eb09 100644 (file)
@@ -25,7 +25,6 @@
 #include "ngraph/itt.hpp"
 #include "ngraph/node.hpp"
 #include "ngraph/op/constant.hpp"
-#include "ngraph/op/get_output_element.hpp"
 #include "ngraph/op/parameter.hpp"
 #include "ngraph/op/result.hpp"
 #include "ngraph/pattern/matcher.hpp"
@@ -74,25 +73,6 @@ std::shared_ptr<Node> Node::copy_with_new_inputs(const OutputVector& inputs) con
     return copy_with_new_inputs(inputs, get_control_dependencies());
 }
 
-std::shared_ptr<Node> Node::get_output_as_single_output_node(size_t i)
-{
-    if (i == 0 && get_output_size() == 1)
-    {
-        return shared_from_this();
-    }
-    else
-    {
-        for (auto in : output(i).get_target_inputs())
-        {
-            if (is_type<op::GetOutputElement>(in.get_node()))
-            {
-                return in.get_node()->shared_from_this();
-            }
-        }
-        return make_shared<op::GetOutputElement>(shared_from_this(), i);
-    }
-}
-
 Output<const Node> Node::get_default_output() const
 {
     return output(get_default_output_index());
index 5f2e3f4..a32f4b4 100644 (file)
@@ -17,7 +17,6 @@
 #include "ngraph/node_output.hpp"
 #include "ngraph/log.hpp"
 #include "ngraph/node.hpp"
-#include "ngraph/op/get_output_element.hpp"
 
 namespace ngraph
 {
@@ -25,18 +24,12 @@ namespace ngraph
         : m_node(node->shared_from_this())
         , m_index(index)
     {
-        NGRAPH_SUPPRESS_DEPRECATED_START
-        eliminate_goe();
-        NGRAPH_SUPPRESS_DEPRECATED_END
     }
 
     Output<Node>::Output(const std::shared_ptr<Node>& node, size_t index)
         : m_node(node)
         , m_index(index)
     {
-        NGRAPH_SUPPRESS_DEPRECATED_START
-        eliminate_goe();
-        NGRAPH_SUPPRESS_DEPRECATED_END
     }
 
     void Output<Node>::reset()
@@ -51,11 +44,6 @@ namespace ngraph
     }
     Node* Output<Node>::get_node() const { return m_node.get(); }
     std::shared_ptr<Node> Output<Node>::get_node_shared_ptr() const { return m_node; }
-    std::shared_ptr<Node> Output<Node>::as_single_output_node() const
-    {
-        return m_node->get_output_as_single_output_node(m_index);
-    }
-
     size_t Output<Node>::get_index() const { return m_index; }
     descriptor::Tensor& Output<Node>::get_tensor() const
     {
@@ -97,11 +85,7 @@ namespace ngraph
     {
         for (auto& input : get_target_inputs())
         {
-            // GOEs are used as handles in passes
-            if (!is_type<op::GetOutputElement>(input.get_node()))
-            {
-                input.replace_source_output(replacement);
-            }
+            input.replace_source_output(replacement);
         }
     }
 
@@ -120,26 +104,16 @@ namespace ngraph
     }
     bool Output<Node>::operator<=(const Output& other) const { return !(*this > other); }
     bool Output<Node>::operator>=(const Output& other) const { return !(*this < other); }
-    void Output<Node>::eliminate_goe()
-    {
-        while (is_type<op::GetOutputElement>(m_node))
-        {
-            *this = m_node->input_value(0);
-        }
-    }
-
     Output<const Node>::Output(const Node* node, size_t index)
         : m_node(node->shared_from_this())
         , m_index(index)
     {
-        eliminate_goe();
     }
 
     Output<const Node>::Output(const std::shared_ptr<const Node>& node, size_t index)
         : m_node(node)
         , m_index(index)
     {
-        eliminate_goe();
     }
 
     void Output<const Node>::reset()
@@ -201,16 +175,6 @@ namespace ngraph
     }
     bool Output<const Node>::operator<=(const Output& other) const { return !(*this > other); }
     bool Output<const Node>::operator>=(const Output& other) const { return !(*this < other); }
-    void Output<const Node>::eliminate_goe()
-    {
-        while (is_type<const op::GetOutputElement>(m_node))
-        {
-            auto value = m_node->input_value(0);
-            m_node = value.get_node_shared_ptr();
-            m_index = value.get_index();
-        }
-    }
-
     std::ostream& operator<<(std::ostream& out, const Output<Node>& output)
     {
         return output.get_node()->write_description(out, 0) << "[" << output.get_index()
index 2bbdfc3..04470f9 100644 (file)
@@ -18,7 +18,6 @@
 
 #include "ngraph/attribute_visitor.hpp"
 #include "ngraph/op/batch_norm.hpp"
-#include "ngraph/op/get_output_element.hpp"
 #include "ngraph/validation_util.hpp"
 
 using namespace std;
diff --git a/ngraph/core/src/op/get_output_element.cpp b/ngraph/core/src/op/get_output_element.cpp
deleted file mode 100644 (file)
index 9cd3091..0000000
+++ /dev/null
@@ -1,67 +0,0 @@
-//*****************************************************************************
-// Copyright 2017-2020 Intel Corporation
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//*****************************************************************************
-
-#include <sstream>
-
-#include "ngraph/op/get_output_element.hpp"
-
-using namespace std;
-using namespace ngraph;
-
-constexpr NodeTypeInfo op::GetOutputElement::type_info;
-
-op::GetOutputElement::GetOutputElement(const shared_ptr<Node>& arg, size_t n)
-    : Op({arg->output(n)})
-    , m_n{n}
-{
-    constructor_validate_and_infer_types();
-}
-
-void op::GetOutputElement::validate_and_infer_types()
-{
-    NODE_VALIDATION_CHECK(this,
-                          m_n < input_value(0).get_node()->get_output_size(),
-                          "Output at index ",
-                          m_n,
-                          " requested, but node has only ",
-                          get_input_size(),
-                          " inputs.");
-
-    set_output_type(0, get_input_element_type(0), get_input_partial_shape(0));
-}
-
-shared_ptr<Node> op::GetOutputElement::clone_with_new_inputs(const OutputVector& inputs) const
-{
-    auto& value = inputs.at(0);
-    return make_shared<op::GetOutputElement>(value.get_node_shared_ptr(), value.get_index());
-}
-
-Output<Node> op::GetOutputElement::get_as_output() const
-{
-    return input_value(0);
-}
-
-NodeVector op::get_output_elements(const shared_ptr<Node>& mon)
-{
-    NodeVector goes(mon->get_output_size());
-    for (auto o : mon->outputs())
-    {
-        NGRAPH_SUPPRESS_DEPRECATED_START
-        goes.at(o.get_index()) = o.as_single_output_node();
-        NGRAPH_SUPPRESS_DEPRECATED_END
-    }
-    return goes;
-}
index f449f57..03a5d9b 100644 (file)
@@ -17,7 +17,6 @@
 #include "ngraph/op/tensor_iterator.hpp"
 #include "ngraph/factory.hpp"
 #include "ngraph/graph_util.hpp"
-#include "ngraph/pass/get_output_element_elimination.hpp"
 #include "ngraph/specialize_function.hpp"
 
 using namespace std;
@@ -665,13 +664,6 @@ std::shared_ptr<Node>
     op->m_body =
         std::make_shared<BodyLambda>(spec_func->get_results(), spec_func->get_parameters());
 
-    // TODO: remove this code after the fix on the nGraph side (GetOutputElements)
-    ::ngraph::pass::GetOutputElementElimination goe_elimination;
-    for (const auto& n : spec_func->get_ops())
-    {
-        goe_elimination.run_on_node(n);
-    }
-
     for (auto& input_description : m_input_descriptions)
     {
         op->m_input_descriptions.push_back(input_description->copy());
index b619450..43fed86 100644 (file)
@@ -111,9 +111,7 @@ shared_ptr<Node> op::util::RNNCellBase::clip(const Output<Node>& data) const
 {
     if (m_clip == 0.f)
     {
-        NGRAPH_SUPPRESS_DEPRECATED_START
-        return data.as_single_output_node();
-        NGRAPH_SUPPRESS_DEPRECATED_END
+        return data.get_node_shared_ptr();
     }
 
     return make_shared<op::Clamp>(data, -m_clip, m_clip);
diff --git a/ngraph/core/src/pass/get_output_element_elimination.cpp b/ngraph/core/src/pass/get_output_element_elimination.cpp
deleted file mode 100644 (file)
index f948c6b..0000000
+++ /dev/null
@@ -1,52 +0,0 @@
-//*****************************************************************************
-// Copyright 2017-2020 Intel Corporation
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//*****************************************************************************
-
-#include <set>
-
-#include "get_output_element_elimination.hpp"
-#include "ngraph/graph_util.hpp"
-#include "ngraph/log.hpp"
-#include "ngraph/op/avg_pool.hpp"
-#include "ngraph/op/broadcast.hpp"
-#include "ngraph/op/constant.hpp"
-#include "ngraph/op/convolution.hpp"
-#include "ngraph/op/get_output_element.hpp"
-#include "ngraph/op/max_pool.hpp"
-#include "ngraph/op/pad.hpp"
-#include "ngraph/op/product.hpp"
-#include "ngraph/op/sum.hpp"
-
-using namespace ngraph;
-using namespace std;
-
-NGRAPH_RTTI_DEFINITION(ngraph::pass::GetOutputElementElimination,
-                       "ngraph::pass::GetOutputElementElimination",
-                       0);
-
-bool pass::GetOutputElementElimination::run_on_node(shared_ptr<Node> n)
-{
-    bool optimized = false;
-    for (auto& input : n->inputs())
-    {
-        if (auto goe = dynamic_cast<op::GetOutputElement*>(input.get_source_output().get_node()))
-        {
-            input.replace_source_output(goe->input_value(0));
-            // we don't need to fix anything w.r.t GetOutputElement as it will become unreachable
-            optimized = true;
-        }
-    }
-    return optimized;
-}
index 6d00a54..82a4b21 100644 (file)
@@ -22,7 +22,6 @@
 #include "ngraph/graph_util.hpp"
 #include "ngraph/node.hpp"
 #include "ngraph/op/constant.hpp"
-#include "ngraph/op/get_output_element.hpp"
 #include "ngraph/op/parameter.hpp"
 #include "ngraph/op/util/op_types.hpp"
 #include "ngraph/pass/pass.hpp"
@@ -162,10 +161,6 @@ static std::string label_edge(const std::shared_ptr<Node>& /* src */,
     if (getenv_bool("NGRAPH_VISUALIZE_EDGE_LABELS"))
     {
         size_t output = 0;
-        if (auto goe = as_type_ptr<op::GetOutputElement>(dst))
-        {
-            output = goe->get_as_output().get_index();
-        }
         stringstream label_edge;
         label_edge << "[label=\" " << output << " -> " << arg_index << " \"]";
         ss << label_edge.str();
index d4a2422..616a9fb 100644 (file)
@@ -21,7 +21,6 @@
 #include "ngraph/env_util.hpp"
 #include "ngraph/graph_util.hpp"
 #include "ngraph/log.hpp"
-#include "ngraph/op/get_output_element.hpp"
 #include "ngraph/op/parameter.hpp"
 #include "ngraph/op/util/op_types.hpp"
 
index 817fbdf..0791482 100644 (file)
@@ -19,8 +19,8 @@
 #include <memory>
 
 #include "ngraph/node.hpp"
-#include "ngraph/op/get_output_element.hpp"
 #include "onnx_import/core/node.hpp"
+#include "onnx_import/default_opset.hpp"
 
 namespace ngraph
 {
index 8778eed..865b985 100644 (file)
@@ -193,15 +193,11 @@ namespace ngraph
                     OutputVector node_outputs;
                     for (const auto& v : final_values)
                     {
-                        NGRAPH_SUPPRESS_DEPRECATED_START
-                        node_outputs.push_back(v.as_single_output_node());
-                        NGRAPH_SUPPRESS_DEPRECATED_END
+                        node_outputs.push_back(v);
                     }
                     for (const auto& v : scan_outputs)
                     {
-                        NGRAPH_SUPPRESS_DEPRECATED_START
-                        node_outputs.push_back(v.as_single_output_node());
-                        NGRAPH_SUPPRESS_DEPRECATED_END
+                        node_outputs.push_back(v);
                     }
                     return node_outputs;
                 }
index 8ebb65d..8a78824 100644 (file)
@@ -28,7 +28,6 @@
 #include "ngraph/op/add.hpp"
 #include "ngraph/op/constant.hpp"
 #include "ngraph/op/fused/lstm_sequence.hpp"
-#include "ngraph/op/get_output_element.hpp"
 #include "ngraph/op/util/attr_types.hpp"
 #include "ngraph/shape.hpp"
 #include "ngraph/type/element_type.hpp"
index d508306..229b4ed 100644 (file)
@@ -41,10 +41,8 @@ namespace ngraph
                     auto split_axis = default_opset::Constant::create(element::i64, {}, {0});
                     auto off_on_values =
                         std::make_shared<default_opset::Split>(values, split_axis, 2);
-                    auto off_value =
-                        reshape::interpret_as_scalar(get_output_element(off_on_values, size_t{0}));
-                    auto on_value =
-                        reshape::interpret_as_scalar(get_output_element(off_on_values, size_t{1}));
+                    auto off_value = reshape::interpret_as_scalar(off_on_values->output(0));
+                    auto on_value = reshape::interpret_as_scalar(off_on_values->output(1));
 
                     auto axis = node.get_attribute_value<std::int64_t>("axis", -1);
 
index 4f1caea..c869501 100644 (file)
@@ -15,7 +15,6 @@
 //*****************************************************************************
 
 #include "onnx_import/utils/arg_min_max_factory.hpp"
-#include "ngraph/op/get_output_element.hpp"
 #include "ngraph/validation_util.hpp"
 #include "onnx_import/default_opset.hpp"
 
index 2447040..3254d8a 100644 (file)
@@ -121,7 +121,6 @@ set(SRC
     type_prop/gather.cpp
     type_prop/gather_nd.cpp
     type_prop/gather_tree.cpp
-    type_prop/get_output_element.cpp
     type_prop/grn.cpp
     type_prop/group_convolution.cpp
     type_prop/group_convolution_backprop_data.cpp
index 23b4edf..40de990 100644 (file)
@@ -31,7 +31,6 @@
 #include "ngraph/op/constant.hpp"
 #include "ngraph/op/divide.hpp"
 #include "ngraph/op/exp.hpp"
-#include "ngraph/op/get_output_element.hpp"
 #include "ngraph/op/log.hpp"
 #include "ngraph/op/multiply.hpp"
 #include "ngraph/op/negative.hpp"
index 33cb738..9647d98 100644 (file)
@@ -1630,7 +1630,7 @@ NGRAPH_TEST(${BACKEND_NAME}, lstm_cell_zero_bias_peepholes)
     const auto lstm_cell = make_shared<op::LSTMCell>(
         X, H_t, C_t, W, R, B, P, hidden_size, op::LSTMWeightsFormat::IOFC);
 
-    auto ht_function = make_shared<Function>(make_shared<op::GetOutputElement>(lstm_cell, 0),
+    auto ht_function = make_shared<Function>(OutputVector{lstm_cell->output(0)},
                                              ParameterVector{X, H_t, C_t, W, R, B, P});
     auto ht_test_case = test::TestCase<TestEngine>(ht_function);
 
@@ -1670,7 +1670,7 @@ NGRAPH_TEST(${BACKEND_NAME}, lstm_cell_zero_bias_peepholes)
         {0.81457126f, 0.61109227f, 0.769522f, 0.52239674f, 0.4324641f, 0.63183f});
     ht_test_case.run();
 
-    auto ct_function = make_shared<Function>(make_shared<op::GetOutputElement>(lstm_cell, 1),
+    auto ct_function = make_shared<Function>(OutputVector{lstm_cell->output(1)},
                                              ParameterVector{X, H_t, C_t, W, R, B, P});
     auto ct_test_case = test::TestCase<TestEngine>(ct_function);
     ct_test_case.add_multiple_inputs(
@@ -1701,7 +1701,7 @@ NGRAPH_TEST(${BACKEND_NAME}, lstm_cell_bias_peepholes)
     const auto lstm_cell = make_shared<op::LSTMCell>(
         X, H_t, C_t, W, R, B, P, hidden_size, op::LSTMWeightsFormat::IOFC);
 
-    auto ht_function = make_shared<Function>(make_shared<op::GetOutputElement>(lstm_cell, 0),
+    auto ht_function = make_shared<Function>(OutputVector{lstm_cell->output(0)},
                                              ParameterVector{X, H_t, C_t, W, R, B, P});
     auto ht_test_case = test::TestCase<TestEngine>(ht_function);
 
@@ -1760,7 +1760,7 @@ NGRAPH_TEST(${BACKEND_NAME}, lstm_cell_bias_peepholes)
         {0.9218244f, 0.78787273f, 0.8754273f, 0.7361462f, 0.70927656f, 0.83522964f});
     ht_test_case.run();
 
-    auto ct_function = make_shared<Function>(make_shared<op::GetOutputElement>(lstm_cell, 1),
+    auto ct_function = make_shared<Function>(OutputVector{lstm_cell->output(1)},
                                              ParameterVector{X, H_t, C_t, W, R, B, P});
     auto ct_test_case = test::TestCase<TestEngine>(ct_function);
     ct_test_case.add_multiple_inputs(
@@ -1804,7 +1804,7 @@ NGRAPH_TEST(${BACKEND_NAME}, lstm_cell_bias_peepholes_clip_input_forget)
                                                      vector<float>{},
                                                      clip_threshold,
                                                      input_forget);
-    auto ht_function = make_shared<Function>(make_shared<op::GetOutputElement>(lstm_cell, 0),
+    auto ht_function = make_shared<Function>(OutputVector{lstm_cell->output(0)},
                                              ParameterVector{X, H_t, C_t, W, R, B, P});
     auto ht_test_case = test::TestCase<TestEngine>(ht_function);
 
@@ -1863,7 +1863,7 @@ NGRAPH_TEST(${BACKEND_NAME}, lstm_cell_bias_peepholes_clip_input_forget)
         {0.71485436f, 0.71844107f, 0.72704613f, 0.6235602f, 0.68306124f, 0.6978715f});
     ht_test_case.run();
 
-    auto ct_function = make_shared<Function>(make_shared<op::GetOutputElement>(lstm_cell, 1),
+    auto ct_function = make_shared<Function>(OutputVector{lstm_cell->output(1)},
                                              ParameterVector{X, H_t, C_t, W, R, B, P});
     auto ct_test_case = test::TestCase<TestEngine>(ct_function);
     ct_test_case.add_multiple_inputs(
@@ -1910,7 +1910,7 @@ NGRAPH_TEST(${BACKEND_NAME}, lstm_cell_activaction_functions)
                                                      activation_beta,
                                                      clip_threshold,
                                                      input_forget);
-    auto ht_function = make_shared<Function>(make_shared<op::GetOutputElement>(lstm_cell, 0),
+    auto ht_function = make_shared<Function>(OutputVector{lstm_cell->output(0)},
                                              ParameterVector{X, H_t, C_t, W, R, B, P});
     auto ht_test_case = test::TestCase<TestEngine>(ht_function);
 
@@ -1969,7 +1969,7 @@ NGRAPH_TEST(${BACKEND_NAME}, lstm_cell_activaction_functions)
         {0.96834344f, 0.9695254f, 0.97068775f, 0.9077866f, 0.94161016f, 0.96599925f});
     ht_test_case.run();
 
-    auto ct_function = make_shared<Function>(make_shared<op::GetOutputElement>(lstm_cell, 1),
+    auto ct_function = make_shared<Function>(OutputVector{lstm_cell->output(1)},
                                              ParameterVector{X, H_t, C_t, W, R, B, P});
     auto ct_test_case = test::TestCase<TestEngine>(ct_function);
     ct_test_case.add_multiple_inputs(
index cbe30d5..bee6b3c 100644 (file)
@@ -24,7 +24,6 @@
 
 #include "gtest/gtest.h"
 #include "ngraph/op/constant.hpp"
-#include "ngraph/op/get_output_element.hpp"
 #include "ngraph/op/parameter.hpp"
 #include "ngraph/op/result.hpp"
 #include "ngraph/op/topk.hpp"
@@ -65,11 +64,11 @@ NGRAPH_TEST(${BACKEND_NAME}, topk_resnet50)
     auto A = make_shared<op::Parameter>(element::f32, shape);
     auto B = make_shared<op::TopK>(A, 1, element::i32, 5, true);
     auto C = make_shared<op::TopK>(A, 1, element::i32, 1, true);
-    auto out5_value = make_shared<op::GetOutputElement>(B, 1);
-    auto out5_index = make_shared<op::GetOutputElement>(B, 0);
-    auto out1_value = make_shared<op::GetOutputElement>(C, 1);
-    auto out1_index = make_shared<op::GetOutputElement>(C, 0);
-    auto f = make_shared<Function>(NodeVector{out5_value, out5_index, out1_value, out1_index},
+    auto out5_value = B->output(1);
+    auto out5_index = B->output(0);
+    auto out1_value = C->output(1);
+    auto out1_index = C->output(0);
+    auto f = make_shared<Function>(OutputVector{out5_value, out5_index, out1_value, out1_index},
                                    ParameterVector{A});
 
     auto backend = runtime::Backend::create("${BACKEND_NAME}");
@@ -133,9 +132,9 @@ NGRAPH_TEST(${BACKEND_NAME}, topk_max_sort_none)
     Shape rshape{128, 5};
     auto A = make_shared<op::Parameter>(element::f32, shape);
     auto B = make_shared<op::TopK>(A, 1, element::i32, 5, true, op::TopK::SortType::NONE);
-    auto out_value = make_shared<op::GetOutputElement>(B, 1);
-    auto out_index = make_shared<op::GetOutputElement>(B, 0);
-    auto f = make_shared<Function>(NodeVector{out_value, out_index}, ParameterVector{A});
+    auto out_value = B->output(1);
+    auto out_index = B->output(0);
+    auto f = make_shared<Function>(OutputVector{out_value, out_index}, ParameterVector{A});
 
     auto backend = runtime::Backend::create("${BACKEND_NAME}");
 
@@ -184,9 +183,9 @@ NGRAPH_TEST(${BACKEND_NAME}, topk_min_sort_none)
     Shape rshape{128, 5};
     auto A = make_shared<op::Parameter>(element::f32, shape);
     auto B = make_shared<op::TopK>(A, 1, element::i32, 5, false, op::TopK::SortType::NONE);
-    auto out_value = make_shared<op::GetOutputElement>(B, 1);
-    auto out_index = make_shared<op::GetOutputElement>(B, 0);
-    auto f = make_shared<Function>(NodeVector{out_value, out_index}, ParameterVector{A});
+    auto out_value = B->output(1);
+    auto out_index = B->output(0);
+    auto f = make_shared<Function>(OutputVector{out_value, out_index}, ParameterVector{A});
 
     auto backend = runtime::Backend::create("${BACKEND_NAME}");
 
@@ -235,9 +234,9 @@ NGRAPH_TEST(${BACKEND_NAME}, topk_max_sort_value)
     Shape rshape{128, 5};
     auto A = make_shared<op::Parameter>(element::f32, shape);
     auto B = make_shared<op::TopK>(A, 1, element::i32, 5, true, op::TopK::SortType::SORT_VALUES);
-    auto out_value = make_shared<op::GetOutputElement>(B, 1);
-    auto out_index = make_shared<op::GetOutputElement>(B, 0);
-    auto f = make_shared<Function>(NodeVector{out_value, out_index}, ParameterVector{A});
+    auto out_value = B->output(1);
+    auto out_index = B->output(0);
+    auto f = make_shared<Function>(OutputVector{out_value, out_index}, ParameterVector{A});
 
     auto backend = runtime::Backend::create("${BACKEND_NAME}");
 
@@ -282,9 +281,9 @@ NGRAPH_TEST(${BACKEND_NAME}, topk_min_sort_value)
     Shape rshape{128, 5};
     auto A = make_shared<op::Parameter>(element::f32, shape);
     auto B = make_shared<op::TopK>(A, 1, element::i32, 5, false, op::TopK::SortType::SORT_VALUES);
-    auto out_value = make_shared<op::GetOutputElement>(B, 1);
-    auto out_index = make_shared<op::GetOutputElement>(B, 0);
-    auto f = make_shared<Function>(NodeVector{out_value, out_index}, ParameterVector{A});
+    auto out_value = B->output(1);
+    auto out_index = B->output(0);
+    auto f = make_shared<Function>(OutputVector{out_value, out_index}, ParameterVector{A});
 
     auto backend = runtime::Backend::create("${BACKEND_NAME}");
 
@@ -333,9 +332,9 @@ NGRAPH_TEST(${BACKEND_NAME}, topk_max_sort_index)
     Shape rshape{128, 5};
     auto A = make_shared<op::Parameter>(element::f32, shape);
     auto B = make_shared<op::TopK>(A, 1, element::i32, 5, true, op::TopK::SortType::SORT_INDICES);
-    auto out_value = make_shared<op::GetOutputElement>(B, 1);
-    auto out_index = make_shared<op::GetOutputElement>(B, 0);
-    auto f = make_shared<Function>(NodeVector{out_value, out_index}, ParameterVector{A});
+    auto out_value = B->output(1);
+    auto out_index = B->output(0);
+    auto f = make_shared<Function>(OutputVector{out_value, out_index}, ParameterVector{A});
 
     auto backend = runtime::Backend::create("${BACKEND_NAME}");
 
@@ -384,9 +383,9 @@ NGRAPH_TEST(${BACKEND_NAME}, topk_min_sort_index)
     Shape rshape{128, 5};
     auto A = make_shared<op::Parameter>(element::f32, shape);
     auto B = make_shared<op::TopK>(A, 1, element::i32, 5, false, op::TopK::SortType::SORT_INDICES);
-    auto out_value = make_shared<op::GetOutputElement>(B, 1);
-    auto out_index = make_shared<op::GetOutputElement>(B, 0);
-    auto f = make_shared<Function>(NodeVector{out_value, out_index}, ParameterVector{A});
+    auto out_value = B->output(1);
+    auto out_index = B->output(0);
+    auto f = make_shared<Function>(OutputVector{out_value, out_index}, ParameterVector{A});
 
     auto backend = runtime::Backend::create("${BACKEND_NAME}");
 
@@ -435,8 +434,8 @@ NGRAPH_TEST(${BACKEND_NAME}, topk_1d_max_all)
     Shape rshape{6};
     auto A = make_shared<op::Parameter>(element::f32, shape);
     auto B = make_shared<op::TopK>(A, 0, element::i32, 0, true);
-    auto f0 = make_shared<Function>(make_shared<op::GetOutputElement>(B, 0), ParameterVector{A});
-    auto f1 = make_shared<Function>(make_shared<op::GetOutputElement>(B, 1), ParameterVector{A});
+    auto f0 = make_shared<Function>(OutputVector{B->output(0)}, ParameterVector{A});
+    auto f1 = make_shared<Function>(OutputVector{B->output(1)}, ParameterVector{A});
 
     auto backend = runtime::Backend::create("${BACKEND_NAME}");
 
@@ -461,8 +460,8 @@ NGRAPH_TEST(${BACKEND_NAME}, topk_1d_i32_max_all)
     Shape rshape{6};
     auto A = make_shared<op::Parameter>(element::i32, shape);
     auto B = make_shared<op::TopK>(A, 0, element::i32, 0, true);
-    auto f0 = make_shared<Function>(make_shared<op::GetOutputElement>(B, 0), ParameterVector{A});
-    auto f1 = make_shared<Function>(make_shared<op::GetOutputElement>(B, 1), ParameterVector{A});
+    auto f0 = make_shared<Function>(OutputVector{B->output(0)}, ParameterVector{A});
+    auto f1 = make_shared<Function>(OutputVector{B->output(1)}, ParameterVector{A});
 
     auto backend = runtime::Backend::create("${BACKEND_NAME}");
 
@@ -486,8 +485,8 @@ NGRAPH_TEST(${BACKEND_NAME}, topk_1d_max_partial)
     Shape rshape{3};
     auto A = make_shared<op::Parameter>(element::f32, shape);
     auto B = make_shared<op::TopK>(A, 0, element::i32, 3, true);
-    auto f0 = make_shared<Function>(make_shared<op::GetOutputElement>(B, 0), ParameterVector{A});
-    auto f1 = make_shared<Function>(make_shared<op::GetOutputElement>(B, 1), ParameterVector{A});
+    auto f0 = make_shared<Function>(OutputVector{B->output(0)}, ParameterVector{A});
+    auto f1 = make_shared<Function>(OutputVector{B->output(1)}, ParameterVector{A});
 
     auto backend = runtime::Backend::create("${BACKEND_NAME}");
 
@@ -512,8 +511,8 @@ NGRAPH_TEST(${BACKEND_NAME}, topk_1d_max_one)
     Shape rshape{1};
     auto A = make_shared<op::Parameter>(element::f32, shape);
     auto B = make_shared<op::TopK>(A, 0, element::i32, 1, true);
-    auto f0 = make_shared<Function>(make_shared<op::GetOutputElement>(B, 0), ParameterVector{A});
-    auto f1 = make_shared<Function>(make_shared<op::GetOutputElement>(B, 1), ParameterVector{A});
+    auto f0 = make_shared<Function>(OutputVector{B->output(0)}, ParameterVector{A});
+    auto f1 = make_shared<Function>(OutputVector{B->output(1)}, ParameterVector{A});
 
     auto backend = runtime::Backend::create("${BACKEND_NAME}");
 
@@ -538,8 +537,8 @@ NGRAPH_TEST(${BACKEND_NAME}, topk_1d_min_all)
     Shape rshape{6};
     auto A = make_shared<op::Parameter>(element::f32, shape);
     auto B = make_shared<op::TopK>(A, 0, element::i32, 0, false);
-    auto f0 = make_shared<Function>(make_shared<op::GetOutputElement>(B, 0), ParameterVector{A});
-    auto f1 = make_shared<Function>(make_shared<op::GetOutputElement>(B, 1), ParameterVector{A});
+    auto f0 = make_shared<Function>(OutputVector{B->output(0)}, ParameterVector{A});
+    auto f1 = make_shared<Function>(OutputVector{B->output(1)}, ParameterVector{A});
 
     auto backend = runtime::Backend::create("${BACKEND_NAME}");
 
@@ -564,8 +563,8 @@ NGRAPH_TEST(${BACKEND_NAME}, topk_1d_min_partial)
     Shape rshape{3};
     auto A = make_shared<op::Parameter>(element::f32, shape);
     auto B = make_shared<op::TopK>(A, 0, element::i32, 3, false);
-    auto f0 = make_shared<Function>(make_shared<op::GetOutputElement>(B, 0), ParameterVector{A});
-    auto f1 = make_shared<Function>(make_shared<op::GetOutputElement>(B, 1), ParameterVector{A});
+    auto f0 = make_shared<Function>(OutputVector{B->output(0)}, ParameterVector{A});
+    auto f1 = make_shared<Function>(OutputVector{B->output(1)}, ParameterVector{A});
 
     auto backend = runtime::Backend::create("${BACKEND_NAME}");
 
@@ -590,8 +589,8 @@ NGRAPH_TEST(${BACKEND_NAME}, topk_1d_min_one)
     Shape rshape{1};
     auto A = make_shared<op::Parameter>(element::f32, shape);
     auto B = make_shared<op::TopK>(A, 0, element::i32, 1, false);
-    auto f0 = make_shared<Function>(make_shared<op::GetOutputElement>(B, 0), ParameterVector{A});
-    auto f1 = make_shared<Function>(make_shared<op::GetOutputElement>(B, 1), ParameterVector{A});
+    auto f0 = make_shared<Function>(OutputVector{B->output(0)}, ParameterVector{A});
+    auto f1 = make_shared<Function>(OutputVector{B->output(1)}, ParameterVector{A});
 
     auto backend = runtime::Backend::create("${BACKEND_NAME}");
 
@@ -616,8 +615,8 @@ NGRAPH_TEST(${BACKEND_NAME}, topk_3d_max_all)
     Shape rshape{2, 3, 2};
     auto A = make_shared<op::Parameter>(element::f32, shape);
     auto B = make_shared<op::TopK>(A, 1, element::i32, 0, true);
-    auto f0 = make_shared<Function>(make_shared<op::GetOutputElement>(B, 0), ParameterVector{A});
-    auto f1 = make_shared<Function>(make_shared<op::GetOutputElement>(B, 1), ParameterVector{A});
+    auto f0 = make_shared<Function>(OutputVector{B->output(0)}, ParameterVector{A});
+    auto f1 = make_shared<Function>(OutputVector{B->output(1)}, ParameterVector{A});
 
     auto backend = runtime::Backend::create("${BACKEND_NAME}");
 
@@ -643,8 +642,8 @@ NGRAPH_TEST(${BACKEND_NAME}, topk_int64)
     Shape rshape{2, 3, 2};
     auto A = make_shared<op::Parameter>(element::f32, shape);
     auto B = make_shared<op::TopK>(A, 1, element::i64, 0, true);
-    auto f0 = make_shared<Function>(make_shared<op::GetOutputElement>(B, 0), ParameterVector{A});
-    auto f1 = make_shared<Function>(make_shared<op::GetOutputElement>(B, 1), ParameterVector{A});
+    auto f0 = make_shared<Function>(OutputVector{B->output(0)}, ParameterVector{A});
+    auto f1 = make_shared<Function>(OutputVector{B->output(1)}, ParameterVector{A});
 
     auto backend = runtime::Backend::create("${BACKEND_NAME}");
 
@@ -670,8 +669,8 @@ NGRAPH_TEST(${BACKEND_NAME}, topk_5d_max_partial)
     Shape rshape{2, 2, 3, 2, 4};
     auto A = make_shared<op::Parameter>(element::f32, shape);
     auto B = make_shared<op::TopK>(A, 1, element::i32, 2, true);
-    auto f0 = make_shared<Function>(make_shared<op::GetOutputElement>(B, 0), ParameterVector{A});
-    auto f1 = make_shared<Function>(make_shared<op::GetOutputElement>(B, 1), ParameterVector{A});
+    auto f0 = make_shared<Function>(OutputVector{B->output(0)}, ParameterVector{A});
+    auto f1 = make_shared<Function>(OutputVector{B->output(1)}, ParameterVector{A});
 
     auto backend = runtime::Backend::create("${BACKEND_NAME}");
 
@@ -734,8 +733,8 @@ NGRAPH_TEST(${BACKEND_NAME}, topk_3d_max_partial)
     Shape rshape{2, 2, 2};
     auto A = make_shared<op::Parameter>(element::f32, shape);
     auto B = make_shared<op::TopK>(A, 1, element::i32, 2, true);
-    auto f0 = make_shared<Function>(make_shared<op::GetOutputElement>(B, 0), ParameterVector{A});
-    auto f1 = make_shared<Function>(make_shared<op::GetOutputElement>(B, 1), ParameterVector{A});
+    auto f0 = make_shared<Function>(OutputVector{B->output(0)}, ParameterVector{A});
+    auto f1 = make_shared<Function>(OutputVector{B->output(1)}, ParameterVector{A});
 
     auto backend = runtime::Backend::create("${BACKEND_NAME}");
 
@@ -761,8 +760,8 @@ NGRAPH_TEST(${BACKEND_NAME}, topk_3d_max_one)
     Shape rshape{2, 1, 2};
     auto A = make_shared<op::Parameter>(element::f32, shape);
     auto B = make_shared<op::TopK>(A, 1, element::i32, 1, true);
-    auto f0 = make_shared<Function>(make_shared<op::GetOutputElement>(B, 0), ParameterVector{A});
-    auto f1 = make_shared<Function>(make_shared<op::GetOutputElement>(B, 1), ParameterVector{A});
+    auto f0 = make_shared<Function>(OutputVector{B->output(0)}, ParameterVector{A});
+    auto f1 = make_shared<Function>(OutputVector{B->output(1)}, ParameterVector{A});
 
     auto backend = runtime::Backend::create("${BACKEND_NAME}");
 
@@ -787,8 +786,8 @@ NGRAPH_TEST(${BACKEND_NAME}, topk_3d_min_all)
     Shape rshape{2, 3, 2};
     auto A = make_shared<op::Parameter>(element::f32, shape);
     auto B = make_shared<op::TopK>(A, 1, element::i32, 0, false);
-    auto f0 = make_shared<Function>(make_shared<op::GetOutputElement>(B, 0), ParameterVector{A});
-    auto f1 = make_shared<Function>(make_shared<op::GetOutputElement>(B, 1), ParameterVector{A});
+    auto f0 = make_shared<Function>(OutputVector{B->output(0)}, ParameterVector{A});
+    auto f1 = make_shared<Function>(OutputVector{B->output(1)}, ParameterVector{A});
 
     auto backend = runtime::Backend::create("${BACKEND_NAME}");
 
@@ -814,8 +813,8 @@ NGRAPH_TEST(${BACKEND_NAME}, topk_3d_min_partial)
     Shape rshape{2, 2, 2};
     auto A = make_shared<op::Parameter>(element::f32, shape);
     auto B = make_shared<op::TopK>(A, 1, element::i32, 2, false);
-    auto f0 = make_shared<Function>(make_shared<op::GetOutputElement>(B, 0), ParameterVector{A});
-    auto f1 = make_shared<Function>(make_shared<op::GetOutputElement>(B, 1), ParameterVector{A});
+    auto f0 = make_shared<Function>(OutputVector{B->output(0)}, ParameterVector{A});
+    auto f1 = make_shared<Function>(OutputVector{B->output(1)}, ParameterVector{A});
 
     auto backend = runtime::Backend::create("${BACKEND_NAME}");
 
@@ -841,8 +840,8 @@ NGRAPH_TEST(${BACKEND_NAME}, topk_3d_min_one)
     Shape rshape{2, 1, 2};
     auto A = make_shared<op::Parameter>(element::f32, shape);
     auto B = make_shared<op::TopK>(A, 1, element::i32, 1, false);
-    auto f0 = make_shared<Function>(make_shared<op::GetOutputElement>(B, 0), ParameterVector{A});
-    auto f1 = make_shared<Function>(make_shared<op::GetOutputElement>(B, 1), ParameterVector{A});
+    auto f0 = make_shared<Function>(OutputVector{B->output(0)}, ParameterVector{A});
+    auto f1 = make_shared<Function>(OutputVector{B->output(1)}, ParameterVector{A});
 
     auto backend = runtime::Backend::create("${BACKEND_NAME}");
 
@@ -867,8 +866,8 @@ NGRAPH_TEST(${BACKEND_NAME}, topk_2d_max_all)
     Shape rshape{4, 3};
     auto A = make_shared<op::Parameter>(element::f32, shape);
     auto B = make_shared<op::TopK>(A, 0, element::i32, 4, true);
-    auto f0 = make_shared<Function>(make_shared<op::GetOutputElement>(B, 0), ParameterVector{A});
-    auto f1 = make_shared<Function>(make_shared<op::GetOutputElement>(B, 1), ParameterVector{A});
+    auto f0 = make_shared<Function>(OutputVector{B->output(0)}, ParameterVector{A});
+    auto f1 = make_shared<Function>(OutputVector{B->output(1)}, ParameterVector{A});
 
     auto backend = runtime::Backend::create("${BACKEND_NAME}");
 
@@ -894,8 +893,8 @@ NGRAPH_TEST(${BACKEND_NAME}, topk_2d_max_partial)
     Shape rshape{2, 3};
     auto A = make_shared<op::Parameter>(element::f32, shape);
     auto B = make_shared<op::TopK>(A, 0, element::i32, 2, true);
-    auto f0 = make_shared<Function>(make_shared<op::GetOutputElement>(B, 0), ParameterVector{A});
-    auto f1 = make_shared<Function>(make_shared<op::GetOutputElement>(B, 1), ParameterVector{A});
+    auto f0 = make_shared<Function>(OutputVector{B->output(0)}, ParameterVector{A});
+    auto f1 = make_shared<Function>(OutputVector{B->output(1)}, ParameterVector{A});
 
     auto backend = runtime::Backend::create("${BACKEND_NAME}");
 
@@ -921,8 +920,8 @@ NGRAPH_TEST(${BACKEND_NAME}, topk_2d_max_one)
     Shape rshape{1, 3};
     auto A = make_shared<op::Parameter>(element::f32, shape);
     auto B = make_shared<op::TopK>(A, 0, element::i32, 1, true);
-    auto f0 = make_shared<Function>(make_shared<op::GetOutputElement>(B, 0), ParameterVector{A});
-    auto f1 = make_shared<Function>(make_shared<op::GetOutputElement>(B, 1), ParameterVector{A});
+    auto f0 = make_shared<Function>(OutputVector{B->output(0)}, ParameterVector{A});
+    auto f1 = make_shared<Function>(OutputVector{B->output(1)}, ParameterVector{A});
 
     auto backend = runtime::Backend::create("${BACKEND_NAME}");
 
@@ -947,8 +946,8 @@ NGRAPH_TEST(${BACKEND_NAME}, topk_2d_max_one_with_equal_values)
     Shape rshape{2, 1};
     auto A = make_shared<op::Parameter>(element::f32, shape);
     auto B = make_shared<op::TopK>(A, 1, element::i32, 1, true);
-    auto f0 = make_shared<Function>(make_shared<op::GetOutputElement>(B, 0), ParameterVector{A});
-    auto f1 = make_shared<Function>(make_shared<op::GetOutputElement>(B, 1), ParameterVector{A});
+    auto f0 = make_shared<Function>(OutputVector{B->output(0)}, ParameterVector{A});
+    auto f1 = make_shared<Function>(OutputVector{B->output(1)}, ParameterVector{A});
 
     auto backend = runtime::Backend::create("${BACKEND_NAME}");
 
@@ -973,8 +972,8 @@ NGRAPH_TEST(${BACKEND_NAME}, topk_2d_min_all)
     Shape rshape{4, 3};
     auto A = make_shared<op::Parameter>(element::f32, shape);
     auto B = make_shared<op::TopK>(A, 0, element::i32, 4, false);
-    auto f0 = make_shared<Function>(make_shared<op::GetOutputElement>(B, 0), ParameterVector{A});
-    auto f1 = make_shared<Function>(make_shared<op::GetOutputElement>(B, 1), ParameterVector{A});
+    auto f0 = make_shared<Function>(OutputVector{B->output(0)}, ParameterVector{A});
+    auto f1 = make_shared<Function>(OutputVector{B->output(1)}, ParameterVector{A});
 
     auto backend = runtime::Backend::create("${BACKEND_NAME}");
 
@@ -1000,8 +999,8 @@ NGRAPH_TEST(${BACKEND_NAME}, topk_2d_min_partial)
     Shape rshape{2, 3};
     auto A = make_shared<op::Parameter>(element::f32, shape);
     auto B = make_shared<op::TopK>(A, 0, element::i32, 2, false);
-    auto f0 = make_shared<Function>(make_shared<op::GetOutputElement>(B, 0), ParameterVector{A});
-    auto f1 = make_shared<Function>(make_shared<op::GetOutputElement>(B, 1), ParameterVector{A});
+    auto f0 = make_shared<Function>(OutputVector{B->output(0)}, ParameterVector{A});
+    auto f1 = make_shared<Function>(OutputVector{B->output(1)}, ParameterVector{A});
 
     auto backend = runtime::Backend::create("${BACKEND_NAME}");
 
@@ -1026,8 +1025,8 @@ NGRAPH_TEST(${BACKEND_NAME}, topk_2d_min_one)
     Shape rshape{1, 3};
     auto A = make_shared<op::Parameter>(element::f32, shape);
     auto B = make_shared<op::TopK>(A, 0, element::i32, 1, false);
-    auto f0 = make_shared<Function>(make_shared<op::GetOutputElement>(B, 0), ParameterVector{A});
-    auto f1 = make_shared<Function>(make_shared<op::GetOutputElement>(B, 1), ParameterVector{A});
+    auto f0 = make_shared<Function>(OutputVector{B->output(0)}, ParameterVector{A});
+    auto f1 = make_shared<Function>(OutputVector{B->output(1)}, ParameterVector{A});
 
     auto backend = runtime::Backend::create("${BACKEND_NAME}");
 
@@ -1053,10 +1052,8 @@ NGRAPH_TEST(${BACKEND_NAME}, topk_3d_large_input_max)
 
     auto B = make_shared<op::TopK>(A, 1, element::i32, 10, true);
 
-    auto interp_f_0 =
-        make_shared<Function>(make_shared<op::GetOutputElement>(B, 0), ParameterVector{A});
-    auto interp_f_1 =
-        make_shared<Function>(make_shared<op::GetOutputElement>(B, 1), ParameterVector{A});
+    auto interp_f_0 = make_shared<Function>(OutputVector{B->output(0)}, ParameterVector{A});
+    auto interp_f_1 = make_shared<Function>(OutputVector{B->output(1)}, ParameterVector{A});
     auto gpu_f_0 = ngraph::clone_function(*interp_f_0);
     auto gpu_f_1 = ngraph::clone_function(*interp_f_1);
 
@@ -1092,10 +1089,8 @@ NGRAPH_TEST(${BACKEND_NAME}, topk_3d_large_input_min)
 
     auto B = make_shared<op::TopK>(A, 1, element::i32, 10, false);
 
-    auto interp_f_0 =
-        make_shared<Function>(make_shared<op::GetOutputElement>(B, 0), ParameterVector{A});
-    auto interp_f_1 =
-        make_shared<Function>(make_shared<op::GetOutputElement>(B, 1), ParameterVector{A});
+    auto interp_f_0 = make_shared<Function>(OutputVector{B->output(0)}, ParameterVector{A});
+    auto interp_f_1 = make_shared<Function>(OutputVector{B->output(1)}, ParameterVector{A});
     auto gpu_f_0 = ngraph::clone_function(*interp_f_0);
     auto gpu_f_1 = ngraph::clone_function(*interp_f_1);
 
@@ -1130,7 +1125,7 @@ NGRAPH_TEST(${BACKEND_NAME}, topk_3d_single_output)
     Shape rshape{2, 2, 2};
     auto A = make_shared<op::Parameter>(element::f32, shape);
     auto B = make_shared<op::TopK>(A, 1, element::i32, 2, false);
-    auto f0 = make_shared<Function>(make_shared<op::GetOutputElement>(B, 0), ParameterVector{A});
+    auto f0 = make_shared<Function>(OutputVector{B->output(0)}, ParameterVector{A});
 
     auto backend = runtime::Backend::create("${BACKEND_NAME}");
 
index 96b011b..8df585a 100644 (file)
@@ -26,7 +26,6 @@
 #include "ngraph/log.hpp"
 #include "ngraph/ngraph.hpp"
 #include "ngraph/op/batch_norm.hpp"
-#include "ngraph/op/get_output_element.hpp"
 #include "ngraph/op/parameter.hpp"
 #include "ngraph/pass/manager.hpp"
 #include "ngraph/pass/visualize_tree.hpp"
index de3d1a3..2b199ec 100644 (file)
@@ -378,15 +378,6 @@ namespace
         EXPECT_FALSE(op::is_binary_elementwise_logical(&node));
     }
 
-    void op_is_GetOutputElement()
-    {
-        op::GetOutputElement node;
-        EXPECT_FALSE(op::is_unary_elementwise_arithmetic(&node));
-        EXPECT_FALSE(op::is_binary_elementwise_arithmetic(&node));
-        EXPECT_FALSE(op::is_binary_elementwise_comparison(&node));
-        EXPECT_FALSE(op::is_binary_elementwise_logical(&node));
-    }
-
     void op_is_Greater()
     {
         op::Greater node;
index fad4c79..33241b4 100644 (file)
@@ -120,59 +120,3 @@ TEST(replace_node, replace_nodes)
     ASSERT_EQ(z_replacement->get_input_node_shared_ptr(0), x_replacement);
     ASSERT_EQ(z_replacement->get_input_node_shared_ptr(1), mul);
 }
-
-TEST(replace_node, replace_nodes_output_order)
-{
-    auto data = make_shared<op::Parameter>(element::f16, Shape{4, 3});
-    auto topk_v0 = make_shared<op::v0::TopK>(data, 0, element::i32, 2, true);
-
-    auto topk_v1 = make_shared<op::v1::TopK>(data,
-                                             op::Constant::create(element::i32, Shape{}, {2}),
-                                             0,
-                                             op::v1::TopK::Mode::MAX,
-                                             op::v1::TopK::SortType::SORT_VALUES,
-                                             element::i32);
-
-    auto values = make_shared<op::GetOutputElement>(topk_v1, 0);
-    auto indices = make_shared<op::GetOutputElement>(topk_v1, 1);
-
-    ASSERT_EQ(values->get_input_element_type(0), element::f16);
-    ASSERT_EQ(indices->get_input_element_type(0), element::i32);
-
-    std::vector<int64_t> output_order{1, 0};
-    replace_node(topk_v1, topk_v0, output_order);
-
-    ASSERT_EQ(values->get_input_element_type(0), element::f16);
-    ASSERT_EQ(indices->get_input_element_type(0), element::i32);
-}
-
-TEST(replace_node, replace_nodes_output_order_incorrect_size)
-{
-    auto data = make_shared<op::Parameter>(element::f16, Shape{4, 3});
-    auto topk_v0 = make_shared<op::v0::TopK>(data, 0, element::i32, 2, true);
-
-    auto topk_v1 = make_shared<op::v1::TopK>(data,
-                                             op::Constant::create(element::i32, Shape{}, {2}),
-                                             0,
-                                             op::v1::TopK::Mode::MAX,
-                                             op::v1::TopK::SortType::SORT_VALUES,
-                                             element::i32);
-
-    auto values = make_shared<op::GetOutputElement>(topk_v1, 0);
-    auto indices = make_shared<op::GetOutputElement>(topk_v1, 1);
-
-    std::vector<int64_t> output_order{2, 1, 0};
-    try
-    {
-        replace_node(topk_v1, topk_v0, output_order);
-        FAIL() << "Incorrect output order size exception not detected";
-    }
-    catch (const ngraph_error& error)
-    {
-        EXPECT_HAS_SUBSTRING(error.what(), std::string("Target output size: "));
-    }
-    catch (...)
-    {
-        FAIL() << "Incorrect output order size exception not thrown for unexpected reason";
-    }
-}
index 6658f96..27adc6e 100644 (file)
@@ -22,7 +22,6 @@
 #include "ie_tensor.hpp"
 #include "ngraph/graph_util.hpp"
 #include "ngraph/log.hpp"
-#include "ngraph/op/get_output_element.hpp"
 #include "ngraph/opsets/opset.hpp"
 #include "ngraph/util.hpp"
 
@@ -49,15 +48,7 @@ shared_ptr<runtime::Executable> runtime::ie::IE_Backend::compile(shared_ptr<Func
 bool runtime::ie::IE_Backend::is_supported(const Node& node) const
 {
     const auto& opset = get_opset1();
-    if (node.get_type_info() == op::GetOutputElement::type_info)
-    {
-        // IE currently can handle this op
-        return true;
-    }
-    else
-    {
-        return opset.contains_op_type(&node);
-    }
+    return opset.contains_op_type(&node);
 }
 
 shared_ptr<runtime::Tensor>
index 85bae07..2ba251d 100644 (file)
@@ -16,7 +16,6 @@
 
 #include "ie_executable.hpp"
 #include "ie_tensor.hpp"
-#include "ngraph/op/get_output_element.hpp"
 #include "ngraph/opsets/opset.hpp"
 #include "ngraph/pass/manager.hpp"
 #include "ngraph/shape.hpp"
@@ -102,16 +101,8 @@ runtime::ie::IE_Executable::IE_Executable(shared_ptr<Function> func, string devi
     {
         if (ie_ops.find(node->get_type_info()) == ie_ops.end())
         {
-            if (node->get_type_info() == op::GetOutputElement::type_info)
-            {
-                // IE currently can handle GetOutuputElement op;
-                continue;
-            }
-            else
-            {
-                cout << "UNSUPPORTED OP DETECTED: " << node->get_type_info().name << endl;
-                THROW_IE_EXCEPTION << "Detected op not belonging to opset1!";
-            }
+            cout << "UNSUPPORTED OP DETECTED: " << node->get_type_info().name << endl;
+            THROW_IE_EXCEPTION << "Detected op not belonging to opset1!";
         }
     }
 
index cb7abc4..114d9f2 100644 (file)
@@ -247,13 +247,6 @@ protected:
                                    avg_pool->get_include_padding_in_avg_computation());
             break;
         }
-        case OP_TYPEID::GetOutputElement:
-        {
-            size_t element_count = shape_size(node.get_output_shape(0));
-            size_t num_bytes = element_count * node.get_output_element_type(0).size();
-            std::memcpy(out[0]->get_data_ptr<T>(), args[0]->get_data_ptr<T>(), num_bytes);
-            break;
-        }
         case OP_TYPEID::BatchNormInference:
         {
             const ngraph::op::BatchNormInference* bn =
index e1c5eb5..337232a 100644 (file)
@@ -85,7 +85,6 @@ NGRAPH_OP(GRUCell, ngraph::op)
 NGRAPH_OP(Gather, ngraph::op)
 NGRAPH_OP(GatherND, ngraph::op)
 NGRAPH_OP(Gelu, ngraph::op)
-NGRAPH_OP(GetOutputElement, ngraph::op)
 NGRAPH_OP(Greater, ngraph::op)
 NGRAPH_OP(GreaterEq, ngraph::op)
 NGRAPH_OP(GroupConvolution, ngraph::op::v0)
index b24d55a..75256d4 100644 (file)
@@ -15,7 +15,6 @@
 //*****************************************************************************
 #include "fused_op_decomposition.hpp"
 #include "ngraph/graph_util.hpp"
-#include "ngraph/op/get_output_element.hpp"
 #include "ngraph/op/util/op_types.hpp"
 #include "ngraph/provenance.hpp"
 
@@ -74,25 +73,7 @@ bool pass::FusedOpDecomposition::run_on_node(shared_ptr<Node> node)
                 std::set<Input<Node>> fop_users = node->outputs().at(i).get_target_inputs();
                 for (auto fop_user : fop_users)
                 {
-                    if (auto goe = as_type<op::GetOutputElement>(fop_user.get_node()))
-                    {
-                        Output<Node> goe_output = goe->get_as_output();
-                        if (goe_output.get_index() == i &&
-                            !goe->output(0).get_target_inputs().empty())
-                        {
-                            // Replace GOE users
-                            std::set<Input<Node>> goe_users =
-                                goe->outputs().at(0).get_target_inputs();
-                            for (auto goe_user : goe_users)
-                            {
-                                goe_user.replace_source_output(output_node->output(j));
-                            }
-                        }
-                    }
-                    else
-                    {
-                        fop_user.replace_source_output(output_node->output(j));
-                    }
+                    fop_user.replace_source_output(output_node->output(j));
                 }
             }
         }
diff --git a/ngraph/test/type_prop/get_output_element.cpp b/ngraph/test/type_prop/get_output_element.cpp
deleted file mode 100644 (file)
index 5383646..0000000
+++ /dev/null
@@ -1,58 +0,0 @@
-//*****************************************************************************
-// Copyright 2017-2020 Intel Corporation
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//*****************************************************************************
-
-#include "gtest/gtest.h"
-#include "ngraph/ngraph.hpp"
-#include "util/type_prop.hpp"
-
-using namespace std;
-using namespace ngraph;
-
-TEST(type_prop, get_output_element_partial_et_dynamic)
-{
-    auto a = make_shared<op::Parameter>(element::dynamic, Shape{1, 2, 3, 4});
-    auto b = make_shared<op::Parameter>(element::dynamic, Shape{1, 2, 3, 4});
-    auto add = make_shared<op::Add>(a, b);
-    auto goe = make_shared<op::GetOutputElement>(add, 0);
-
-    ASSERT_EQ(goe->get_output_element_type(0), element::dynamic);
-    ASSERT_EQ(goe->get_output_shape(0), (Shape{1, 2, 3, 4}));
-}
-
-TEST(type_prop, get_output_element_partial_rank_dynamic)
-{
-    auto a = make_shared<op::Parameter>(element::i32, PartialShape::dynamic());
-    auto b = make_shared<op::Parameter>(element::i32, PartialShape::dynamic());
-    auto add = make_shared<op::Add>(a, b);
-    auto goe = make_shared<op::GetOutputElement>(add, 0);
-
-    ASSERT_EQ(goe->get_output_element_type(0), element::i32);
-    ASSERT_TRUE(goe->get_output_partial_shape(0).rank().is_dynamic());
-}
-
-TEST(type_prop, get_output_element_partial_rank_static_dynamic)
-{
-    auto a = make_shared<op::Parameter>(
-        element::i32, PartialShape{Dimension::dynamic(), 2, 3, Dimension::dynamic()});
-    auto b = make_shared<op::Parameter>(
-        element::i32, PartialShape{Dimension::dynamic(), 2, Dimension::dynamic(), 4});
-    auto add = make_shared<op::Add>(a, b);
-    auto goe = make_shared<op::GetOutputElement>(add, 0);
-
-    ASSERT_EQ(goe->get_output_element_type(0), element::i32);
-    ASSERT_TRUE(
-        goe->get_output_partial_shape(0).same_scheme(PartialShape{Dimension::dynamic(), 2, 3, 4}));
-}
index 788766b..e36a56f 100644 (file)
@@ -16,7 +16,6 @@
 
 #include "ie_engines.hpp"
 
-#include "ngraph/op/get_output_element.hpp"
 #include "ngraph/opsets/opset.hpp"
 #include "ngraph/pass/manager.hpp"
 #include "pass/opset1_upgrade.hpp"
@@ -182,16 +181,8 @@ std::shared_ptr<Function>
     {
         if (ie_ops.find(node->get_type_info()) == ie_ops.end())
         {
-            if (node->get_type_info() == op::GetOutputElement::type_info)
-            {
-                // IE currently can handle GetOutputElement op;
-                continue;
-            }
-            else
-            {
-                THROW_IE_EXCEPTION << "Unsupported operator detected in the graph: "
-                                   << node->get_type_info().name;
-            }
+            THROW_IE_EXCEPTION << "Unsupported operator detected in the graph: "
+                               << node->get_type_info().name;
         }
     }