Remove get_arguments (#1323)
authorIlya Churaev <ilya.churaev@intel.com>
Fri, 17 Jul 2020 06:50:06 +0000 (09:50 +0300)
committerGitHub <noreply@github.com>
Fri, 17 Jul 2020 06:50:06 +0000 (09:50 +0300)
* Removed get_arguments

* Fixed code style

42 files changed:
inference-engine/src/transformations/src/transformations/convert_broadcast_to_tiles.cpp
inference-engine/src/transformations/src/transformations/convert_opset1_to_legacy/convert_one_hot_to_one_hot_ie.cpp
inference-engine/src/transformations/src/transformations/convert_opset1_to_legacy/convert_strided_slice_to_crop.cpp
inference-engine/src/transformations/src/transformations/convert_opset1_to_legacy/convert_tile_to_ie_tile.cpp
inference-engine/src/vpu/common/src/ngraph/operations/static_shape_reshape.cpp
inference-engine/src/vpu/common/src/ngraph/transformations/dynamic_to_static_shape_reduce.cpp
inference-engine/src/vpu/common/src/ngraph/transformations/dynamic_to_static_shape_reshape.cpp
inference-engine/src/vpu/common/src/ngraph/transformations/dynamic_to_static_shape_transpose.cpp
ngraph/src/ngraph/node.cpp
ngraph/src/ngraph/node.hpp
ngraph/src/ngraph/op/get_output_element.cpp
ngraph/src/ngraph/op/get_output_element.hpp
ngraph/src/ngraph/op/group_conv.cpp
ngraph/src/ngraph/op/quantized_convolution.hpp
ngraph/src/ngraph/op/quantized_dot.hpp
ngraph/src/ngraph/op/util/fused_op.cpp
ngraph/src/ngraph/pass/algebraic_simplification.cpp
ngraph/src/ngraph/pass/batch_fusion.cpp
ngraph/src/ngraph/pass/concat_fusion.cpp
ngraph/src/ngraph/pass/constant_folding_quantize.cpp
ngraph/src/ngraph/pass/core_fusion.cpp
ngraph/src/ngraph/pass/fused_op_decomposition.cpp
ngraph/src/ngraph/pass/implicit_broadcast_elimination.cpp
ngraph/src/ngraph/pass/like_replacement.cpp
ngraph/src/ngraph/pass/reshape_elimination.cpp
ngraph/src/ngraph/pass/reshape_sinking.cpp
ngraph/src/ngraph/pass/zero_dim_tensor_elimination.cpp
ngraph/src/ngraph/pattern/matcher.hpp
ngraph/test/algebraic_simplification.cpp
ngraph/test/build_graph.cpp
ngraph/test/constant_folding.cpp
ngraph/test/control_dependencies.cpp
ngraph/test/copy.cpp
ngraph/test/cse.cpp
ngraph/test/dyn_elimination.cpp
ngraph/test/input_output_assign.cpp
ngraph/test/pattern.cpp
ngraph/test/reshape_elimination.cpp
ngraph/test/reshape_sinking.cpp
ngraph/test/specialize_function.cpp
ngraph/test/type_prop/binary_elementwise.cpp
ngraph/test/util/test_tools.cpp

index b303fca..5d263d6 100644 (file)
@@ -23,9 +23,9 @@ void ngraph::pass::ConvertBroadcastToTiles::convert_broadcast_to_tiles() {
             return false;
         }
 
-        auto data_node = broadcast->get_argument(0);
-        auto shape_node = std::dynamic_pointer_cast<ngraph::opset1::Constant>(broadcast->get_argument(1));
-        auto axes_node = std::dynamic_pointer_cast<ngraph::opset1::Constant>(broadcast->get_argument(2));
+        auto data_node = broadcast->input_value(0).get_node_shared_ptr();
+        auto shape_node = std::dynamic_pointer_cast<ngraph::opset1::Constant>(broadcast->input_value(1).get_node_shared_ptr());
+        auto axes_node = std::dynamic_pointer_cast<ngraph::opset1::Constant>(broadcast->input_value(2).get_node_shared_ptr());
         if (!data_node || !shape_node || !axes_node) return false;
 
         auto output_shape = shape_node->get_vector<int64_t>();
index 4119455..507491e 100644 (file)
@@ -39,7 +39,7 @@ void ngraph::pass::ConvertOneHotToOneHotIE::convert_one_hot() {
         auto on_value = std::stof(on_value_node->convert_value_to_string(0));
         auto off_value = std::stof(off_value_node->convert_value_to_string(0));
 
-        auto one_hot_ie = std::make_shared<ngraph::op::OneHotIE>(one_hot->get_argument(0),
+        auto one_hot_ie = std::make_shared<ngraph::op::OneHotIE>(one_hot->input_value(0),
                                                                  one_hot->get_axis(), depth_value, on_value, off_value, output_type);
         one_hot_ie->set_friendly_name(one_hot->get_friendly_name());
 
@@ -64,4 +64,4 @@ void ngraph::pass::ConvertOneHotToOneHotIE::convert_one_hot() {
 bool ngraph::pass::ConvertOneHotToOneHotIE::run_on_function(std::shared_ptr<ngraph::Function> f) {
     is_f16 = ngraph::op::util::has_f16_constants(f);
     return GraphRewrite::run_on_function(f);
-}
\ No newline at end of file
+}
index 96e2915..59b1593 100644 (file)
@@ -28,10 +28,10 @@ void ngraph::pass::ConvertStridedSliceToCrop::convert_strided_slice_to_crop() {
             return false;
         }
 
-        auto data_node = slice->get_argument(0);
-        auto begin_node = std::dynamic_pointer_cast<ngraph::opset1::Constant>(slice->get_argument(1));
-        auto end_node = std::dynamic_pointer_cast<ngraph::opset1::Constant>(slice->get_argument(2));
-        auto stride_node = std::dynamic_pointer_cast<ngraph::opset1::Constant>(slice->get_argument(3));
+        auto data_output = slice->input_value(0);
+        auto begin_node = std::dynamic_pointer_cast<ngraph::opset1::Constant>(slice->input_value(1).get_node_shared_ptr());
+        auto end_node = std::dynamic_pointer_cast<ngraph::opset1::Constant>(slice->input_value(2).get_node_shared_ptr());
+        auto stride_node = std::dynamic_pointer_cast<ngraph::opset1::Constant>(slice->input_value(3).get_node_shared_ptr());
 
         auto partial_input_shape = slice->get_input_partial_shape(0);
 
@@ -192,19 +192,20 @@ void ngraph::pass::ConvertStridedSliceToCrop::convert_strided_slice_to_crop() {
         if (!new_axis_mask.empty()) {
             auto new_shape = std::make_shared<ngraph::opset1::Constant>(element::i64,
                                                                     ngraph::Shape{reshape_pattern.size()}, reshape_pattern);
-            data_node = std::make_shared<ngraph::opset1::Reshape>(data_node, new_shape, true);
+            auto data_node = std::make_shared<ngraph::opset1::Reshape>(data_output, new_shape, true);
             data_node->set_friendly_name(slice->get_friendly_name() + "/Reshape_before");
             new_ops.push_back(data_node);
+            data_output = data_node->output(0);
         }
 
-        auto data_node_shape = data_node->get_output_shape(0);
+        auto data_node_shape = data_output.get_shape();
         // MKLDNN: "Crop supports only 2d, 4d and 5d blobs."
         if (data_node_shape.size() != 2 && data_node_shape.size() != 4 && data_node_shape.size() != 5) {
             return false;
         }
 
         // Crop
-        data_node = std::make_shared<ngraph::op::CropIE> (data_node, axes, dim, offset);
+        std::shared_ptr<ngraph::Node> data_node = std::make_shared<ngraph::op::CropIE> (data_output, axes, dim, offset);
         data_node->set_friendly_name(slice->get_friendly_name());
         new_ops.push_back(data_node);
 
@@ -214,7 +215,7 @@ void ngraph::pass::ConvertStridedSliceToCrop::convert_strided_slice_to_crop() {
         if (!shrink_axis_mask.empty()) {
             auto new_shape = std::make_shared<ngraph::opset1::Constant>(element::i64, ngraph::Shape{output_shape.size()},
                                                                     output_shape);
-            data_node = std::make_shared<ngraph::opset1::Reshape>(data_node, new_shape, true);
+            data_node = std::make_shared<ngraph::opset1::Reshape>(data_node->output(0), new_shape, true);
             crop_data_node->set_friendly_name(slice->get_friendly_name() + "/Crop");
             data_node->set_friendly_name(slice->get_friendly_name());
             new_ops.push_back(data_node);
index 4819944..a9863ed 100644 (file)
@@ -23,8 +23,8 @@ void ngraph::pass::ConvertTileToIETile::convert_tile() {
             return false;
         }
 
-        auto data_node = tile->get_argument(0);
-        auto tiles_node = std::dynamic_pointer_cast<ngraph::opset1::Constant> (tile->get_argument(1));
+        auto data_node = tile->input_value(0).get_node_shared_ptr();
+        auto tiles_node = std::dynamic_pointer_cast<ngraph::opset1::Constant> (tile->input_value(1).get_node_shared_ptr());
         if (!data_node || !tiles_node) return false;
 
         auto tiles = tiles_node->get_vector<int64_t>();
index 682de1c..8cf6b38 100644 (file)
@@ -16,7 +16,7 @@ StaticShapeReshape::StaticShapeReshape(const Output<Node>& arg, const Output<Nod
 }
 
 StaticShapeReshape::StaticShapeReshape(const std::shared_ptr<ngraph::opset3::Reshape>& reshape)
-    : StaticShapeReshape(reshape->get_argument(0), reshape->get_argument(1), reshape->get_special_zero()) {
+    : StaticShapeReshape(reshape->input_value(0), reshape->input_value(1), reshape->get_special_zero()) {
 }
 
 void StaticShapeReshape::validate_and_infer_types() {
index d304e7c..f36f9fa 100644 (file)
@@ -26,11 +26,11 @@ void dynamicToStaticShapeReduce(std::shared_ptr<ngraph::Node> target) {
                      target->get_friendly_name(), target->get_type_info());
 
 
-    const auto axes_const_node = ngraph::as_type_ptr<ngraph::opset3::Constant>(target->get_argument(1));
+    const auto axes_const_node = ngraph::as_type_ptr<ngraph::opset3::Constant>(target->input_value(1).get_node_shared_ptr());
     VPU_THROW_UNLESS(axes_const_node,
                      "dynamicToStaticShapeReduce transformation for {} of type {} expects {} as input with index {}, but it has {} node of type {} instead",
                      target->get_friendly_name(), target->get_type_info(), ngraph::opset3::Constant::type_info, 1,
-                     target->get_argument(1)->get_friendly_name(), target->get_argument(1)->get_type_info());
+                     target->input_value(1).get_node_shared_ptr()->get_friendly_name(), target->input_value(1).get_node_shared_ptr()->get_type_info());
 
     const auto axes = axes_const_node->cast_vector<int64_t>();
 
index e6255b2..5c0fe8a 100644 (file)
 namespace vpu {
 
 void dynamicToStaticShapeReshape(std::shared_ptr<ngraph::Node> target) {
-    const auto dsr = target->get_argument(0);
+    const auto dsr = target->input_value(0).get_node_shared_ptr();
     VPU_THROW_UNLESS(ngraph::as_type_ptr<ngraph::vpu::op::DynamicShapeResolver>(dsr),
                      "DynamicToStaticShape transformation for {} of type {} expects {} as input with index {}",
                      target->get_friendly_name(), target->get_type_info(), ngraph::vpu::op::DynamicShapeResolver::type_info, 0);
 
     const auto reshape = std::dynamic_pointer_cast<ngraph::opset3::Reshape>(target);
-    const auto outShapeDescriptor = reshape->get_argument(1);
+    const auto outShapeDescriptor = reshape->input_value(1).get_node_shared_ptr();
 
     const auto replacement = ngraph::as_type_ptr<ngraph::opset3::Constant>(outShapeDescriptor)
         ? reshape->clone_with_new_inputs(reshape->input_values())
index f45a441..f1b9266 100644 (file)
 namespace vpu {
 
 void dynamicToStaticShapeTranspose(std::shared_ptr<ngraph::Node> target) {
-    const auto dsr = target->get_argument(0);
+    const auto dsr = target->input_value(0).get_node_shared_ptr();
     VPU_THROW_UNLESS(ngraph::as_type_ptr<ngraph::vpu::op::DynamicShapeResolver>(dsr),
         "DynamicToStaticShape transformation for {} of type {} expects {} as input with index {}",
         target->get_friendly_name(), target->get_type_info(), ngraph::vpu::op::DynamicShapeResolver::type_info, 0);
 
-    const auto transposition = target->get_argument(1);
+    const auto transposition = target->input_value(1).get_node_shared_ptr();
     VPU_THROW_UNLESS(ngraph::as_type_ptr<ngraph::opset3::Constant>(transposition),
         "DynamicToStaticShape transformation for {] of type {} expects {} as input with index {}",
         target->get_friendly_name(), target->get_type_info(), ngraph::opset3::Constant::type_info, 1);
 
     const auto transpose = std::dynamic_pointer_cast<ngraph::opset3::Transpose>(target);
-    const auto copied = transpose->copy_with_new_args(target->get_arguments());
+    const auto copied = transpose->clone_with_new_inputs(target->input_values());
     const auto shape = dsr->input(1).get_source_output();
 
     const auto axis = std::make_shared<ngraph::opset3::Constant>(
index 62b1036..f46159b 100644 (file)
@@ -493,13 +493,6 @@ void Node::transfer_provenance_tags(const shared_ptr<Node>& replacement)
     traverse_nodes({replacement}, set_prov_new_nodes, common_args);
 }
 
-std::shared_ptr<Node> Node::get_argument(size_t index) const
-{
-    NGRAPH_CHECK(
-        index < m_inputs.size(), "index '", index, "' out of range in get_argument(size_t index)");
-    return input_value(index).as_single_output_node();
-}
-
 Node* Node::get_input_node_ptr(size_t index) const
 {
     NGRAPH_CHECK(
@@ -519,18 +512,6 @@ Output<Node> Node::get_input_source_output(size_t i) const
     return input(i).get_source_output();
 }
 
-NodeVector Node::get_arguments() const
-{
-    NodeVector result;
-    for (size_t i = 0; i < get_input_size(); ++i)
-    {
-        {
-            result.push_back(get_argument(i));
-        }
-    }
-    return result;
-}
-
 const std::vector<std::shared_ptr<Node>>& Node::get_control_dependencies() const
 {
     return m_control_dependencies;
@@ -720,13 +701,6 @@ shared_ptr<descriptor::Tensor> Node::get_output_tensor_ptr() const
     return m_outputs[0].get_tensor_ptr();
 }
 
-const std::vector<descriptor::Input*>& Node::get_output_inputs(size_t i) const
-{
-    NGRAPH_CHECK(
-        i < m_outputs.size(), "index '", i, "' out of range in get_output_inputs(size_t i)");
-    return m_outputs[i].get_inputs();
-}
-
 std::set<Input<Node>> Node::get_output_target_inputs(size_t i) const
 {
     std::set<Input<Node>> result;
index 6db50d8..9f1a9aa 100644 (file)
@@ -394,10 +394,6 @@ namespace ngraph
             "use &node->output(i).get_tensor() instead; insert a check that the node has only one "
             "output, or update calling code not to assume only one output");
 
-        /// Returns the set of inputs using output i
-        const std::vector<descriptor::Input*>& get_output_inputs(size_t i) const
-            NGRAPH_DEPRECATED("use node->output(i).get_target_inputs() instead");
-
         std::set<Input<Node>> get_output_target_inputs(size_t i) const;
 
         /// Returns the number of inputs for the op
@@ -421,10 +417,6 @@ namespace ngraph
         std::unordered_set<descriptor::Tensor*> liveness_new_list;
         std::unordered_set<descriptor::Tensor*> liveness_free_list;
 
-        virtual NodeVector get_arguments() const NGRAPH_DEPRECATED("Use input_values().");
-        std::shared_ptr<Node> get_argument(size_t index) const
-            NGRAPH_DEPRECATED("use input_value(i).");
-
         Node* get_input_node_ptr(size_t index) const;
         std::shared_ptr<Node> get_input_node_shared_ptr(size_t index) const;
         Output<Node> get_input_source_output(size_t i) const;
@@ -663,11 +655,11 @@ namespace ngraph
     void check_new_args_count(const Node* node, T new_args)
     {
         NODE_VALIDATION_CHECK(node,
-                              new_args.size() == node->get_arguments().size(),
+                              new_args.size() == node->input_values().size(),
                               "copy_with_new_args() expected ",
-                              node->get_arguments().size(),
+                              node->input_values().size(),
                               " argument",
-                              (node->get_arguments().size() == 1 ? "" : "s"),
+                              (node->input_values().size() == 1 ? "" : "s"),
                               " but got ",
                               new_args.size());
     }
index b0dea14..a89e425 100644 (file)
@@ -54,11 +54,6 @@ Output<Node> op::GetOutputElement::get_as_output() const
     return input_value(0);
 }
 
-NodeVector op::GetOutputElement::get_arguments() const
-{
-    return NodeVector{input_value(0).get_node_shared_ptr()};
-}
-
 NodeVector op::get_output_elements(const shared_ptr<Node>& mon)
 {
     NodeVector goes(mon->get_output_size());
index 21498ac..52eb580 100644 (file)
@@ -49,8 +49,6 @@ namespace ngraph
 
                 /// \return The index of the tuple element to get.
                 size_t get_n() const { return m_n; }
-                NodeVector get_arguments() const override;
-
             protected:
                 size_t m_n;
             };
index bbc2f99..715ecd2 100644 (file)
@@ -518,7 +518,7 @@ NodeVector op::v1::GroupConvolutionBackpropData::decompose_op() const
 
     for (auto i = 0; i < groups; ++i)
     {
-        if (get_arguments().size() == 3)
+        if (input_values().size() == 3)
         {
             conv_groups.push_back(
                 std::make_shared<op::v1::ConvolutionBackpropData>(sliced_data[i],
index 0ebef18..486cbf1 100644 (file)
@@ -80,8 +80,11 @@ namespace ngraph
                 const CoordinateDiff& get_padding_below() const { return m_padding_below; }
                 const CoordinateDiff& get_padding_above() const { return m_padding_above; }
                 const Strides& get_data_dilation_strides() const { return m_data_dilation_strides; }
-                std::shared_ptr<Node> get_filters() { return get_argument(1); }
-                std::shared_ptr<Node> get_data_batch() { return get_argument(0); }
+                std::shared_ptr<Node> get_filters() { return input_value(1).get_node_shared_ptr(); }
+                std::shared_ptr<Node> get_data_batch()
+                {
+                    return input_value(0).get_node_shared_ptr();
+                }
                 const ngraph::element::Type& get_output_type() const { return m_output_type; }
                 const ngraph::AxisSet& get_input_axes() const { return m_input_axes; }
                 const ngraph::AxisSet& get_filter_axes() const { return m_filter_axes; }
index 0cde52f..b04f1b0 100644 (file)
@@ -58,8 +58,8 @@ namespace ngraph
                              const AxisSet& input1_axes = ngraph::AxisSet{},
                              const AxisSet& output_axes = ngraph::AxisSet{});
 
-                std::shared_ptr<Node> get_input0() { return get_argument(0); }
-                std::shared_ptr<Node> get_input1() { return get_argument(1); }
+                std::shared_ptr<Node> get_input0() { return input_value(0).get_node_shared_ptr(); }
+                std::shared_ptr<Node> get_input1() { return input_value(1).get_node_shared_ptr(); }
                 const ngraph::element::Type& get_output_type() const { return m_output_type; }
                 const ngraph::AxisSet& get_input0_axes() const { return m_input0_axes; }
                 const ngraph::AxisSet& get_input1_axes() const { return m_input1_axes; }
index 31ccd7b..94743a3 100644 (file)
@@ -40,7 +40,10 @@ void op::util::FusedOp::validate_and_infer_types()
     }
 
     auto subgraph_outputs = decompose_op();
-    auto subgraph = extract_subgraph(subgraph_outputs, get_arguments());
+    NodeVector nodes;
+    for (auto& val : input_values())
+        nodes.emplace_back(val.get_node_shared_ptr());
+    auto subgraph = extract_subgraph(subgraph_outputs, nodes);
     validate_nodes_and_infer_types(subgraph);
 
     size_t i = 0;
index 26062f7..c7c6593 100644 (file)
@@ -662,7 +662,8 @@ static bool simplify_reduction(shared_ptr<Node> n)
     auto cnst = as_type_ptr<op::Constant>(broadcast->input_value(0).get_node_shared_ptr());
     if (!cnst || cnst->get_shape().size() > 0 /*not a scalar*/)
     {
-        NGRAPH_DEBUG << broadcast->get_argument(0)->get_name() << " isn't a scalar constant";
+        NGRAPH_DEBUG << broadcast->input_value(0).get_node_shared_ptr()->get_name()
+                     << " isn't a scalar constant";
         return false;
     }
 
index e7510a3..02dd36c 100644 (file)
@@ -100,8 +100,9 @@ std::shared_ptr<Node> fuse_group_convolution(const std::shared_ptr<Node>& n)
         return {nullptr};
     }
 
-    for (auto arg : n->get_arguments())
+    for (auto val : n->input_values())
     {
+        auto arg = val.get_node_shared_ptr();
         if (!matcher->match(arg))
         {
             NGRAPH_DEBUG << arg->get_name() << " doesn't match";
@@ -160,7 +161,7 @@ std::shared_ptr<Node> fuse_group_convolution(const std::shared_ptr<Node>& n)
                                                            sconv->get_padding_below(),
                                                            sconv->get_padding_above(),
                                                            sconv->get_data_dilation_strides(),
-                                                           n->get_arguments().size());
+                                                           n->input_values().size());
 
     return move(new_conv);
 }
index 1473deb..2fdbd6e 100644 (file)
@@ -37,8 +37,10 @@ namespace
 {
     bool check_self_concat_op(const std::shared_ptr<Node>& op)
     {
-        auto input_args = op->get_arguments();
-        std::set<std::shared_ptr<Node>> input_args_set(input_args.begin(), input_args.end());
+        auto input_vals = op->input_values();
+        std::set<std::shared_ptr<Node>> input_args_set;
+        for (auto val : input_vals)
+            input_args_set.emplace(val.get_node_shared_ptr());
         return (input_args_set.size() == 1);
     }
 
@@ -213,7 +215,7 @@ void ngraph::pass::SelfConcatFusion::update_concat_pattern_vectors(
     for (auto& concat_pattern_vec : this->m_concat_pattern_vectors)
     {
         auto last_op_in_pattern_vec = concat_pattern_vec.back();
-        if ((concat_op->get_argument(0) == last_op_in_pattern_vec) &&
+        if ((concat_op->input_value(0).get_node_shared_ptr() == last_op_in_pattern_vec) &&
             (check_concat_has_no_fan_out(last_op_in_pattern_vec)))
         {
             concat_pattern_vec.push_back(concat_op);
@@ -264,7 +266,7 @@ bool ngraph::pass::SelfConcatFusion::replace_patterns(const NodeVector& bounded_
     auto concat_axis_vector = get_concatenation_axis_vector(bounded_concat_ops);
 
     auto& first_bounded_concat = (*bounded_concat_ops.begin());
-    auto driver_op = first_bounded_concat->get_argument(0);
+    auto driver_op = first_bounded_concat->input_value(0);
     const Shape& input_shape = first_bounded_concat->get_input_shape(0);
 
     auto scalarized_shape = scalarize_dim(concat_axis_vector, input_shape);
index 27117ee..5c00107 100644 (file)
@@ -66,7 +66,6 @@ void pass::ConstantFolding::construct_constant_quantize()
 
         NGRAPH_CHECK(revalidate_and_ensure_static(quantize_op));
 
-        auto args = quant_match->get_arguments();
         auto scale = static_pointer_cast<op::Constant>(quant_match->get_input_node_shared_ptr(1));
         auto offset = static_pointer_cast<op::Constant>(quant_match->get_input_node_shared_ptr(2));
 
index 7cb6a09..272372a 100644 (file)
@@ -205,7 +205,8 @@ void pass::CoreFusion::construct_folded_batch_norm()
         auto pattern_map = m.get_pattern_map();
 
         auto m_bn = static_pointer_cast<op::BatchNormInference>(m.get_match_root());
-        auto m_conv = static_pointer_cast<op::Convolution>(m_bn->get_argument(2));
+        auto m_conv =
+            static_pointer_cast<op::Convolution>(m_bn->input_value(2).get_node_shared_ptr());
 
         if (m_conv->get_users().size() > 1)
         {
@@ -325,13 +326,13 @@ void pass::CoreFusion::construct_conv_affine_folding()
         auto get_bcast_input = [](const shared_ptr<op::Broadcast>& bcast) {
             if (bcast->get_input_shape(0).size() == 1)
             {
-                return bcast->get_argument(0);
+                return bcast->input_value(0).get_node_shared_ptr();
             }
             if (bcast->get_input_shape(0).size() == 2)
             {
                 Shape bshape{bcast->get_input_shape(0)[1]};
                 return static_pointer_cast<Node>(
-                    make_shared<op::Reshape>(bcast->get_argument(0), AxisVector{0, 1}, bshape));
+                    make_shared<op::Reshape>(bcast->input_value(0), AxisVector{0, 1}, bshape));
             }
             throw ngraph_error("Unexpected shape for bcast input");
         };
@@ -392,10 +393,8 @@ static shared_ptr<Node> reduce_broadcast(shared_ptr<Node> broadcast)
     Shape shape_w1{matched_broadcast_w1->get_shape()};
     shape_w1[H] /= 2;
     shape_w1[W] /= 2;
-    auto new_broadcast_w1 =
-        std::make_shared<op::Broadcast>(matched_broadcast_w1->get_argument(0),
-                                        shape_w1,
-                                        matched_broadcast_w1->get_broadcast_axes());
+    auto new_broadcast_w1 = std::make_shared<op::Broadcast>(
+        matched_broadcast_w1->input_value(0), shape_w1, matched_broadcast_w1->get_broadcast_axes());
     return move(new_broadcast_w1);
 }
 
@@ -435,7 +434,8 @@ void pass::CoreFusion::construct_reshape_broadcast()
 
         auto pattern_map = m.get_pattern_map();
         auto broadcast_m = static_pointer_cast<op::Broadcast>(m.get_match_root());
-        auto reshape1_m = static_pointer_cast<op::Reshape>(broadcast_m->get_argument(0));
+        auto reshape1_m =
+            static_pointer_cast<op::Reshape>(broadcast_m->input_value(0).get_node_shared_ptr());
         auto input_m = m.get_pattern_value_map()[input];
 
         // it doesn't seem to make sense to support shapes : [0] or [1]
@@ -507,8 +507,10 @@ void pass::CoreFusion::construct_reshape_softmax_reshape()
 
         auto pattern_map = m.get_pattern_map();
         auto reshape2_m = static_pointer_cast<op::Reshape>(m.get_match_root());
-        auto softmax_m = static_pointer_cast<op::Softmax>(reshape2_m->get_argument(0));
-        auto reshape1_m = static_pointer_cast<op::Reshape>(softmax_m->get_argument(0));
+        auto softmax_m =
+            static_pointer_cast<op::Softmax>(reshape2_m->input_value(0).get_node_shared_ptr());
+        auto reshape1_m =
+            static_pointer_cast<op::Reshape>(softmax_m->input_value(0).get_node_shared_ptr());
         auto input_m = m.get_pattern_map()[input];
 
         if (!reshape2_m->get_is_transpose() || !reshape1_m->get_is_transpose())
@@ -794,11 +796,13 @@ void pass::CoreFusion::construct_conv_bias()
                      << m.get_match_root()->get_name();
         auto pattern_map = m.get_pattern_map();
 
-        auto conv_m = as_type_ptr<op::Convolution>(m.get_match_root()->get_argument(0));
+        auto conv_m =
+            as_type_ptr<op::Convolution>(m.get_match_root()->input_value(0).get_node_shared_ptr());
 
         if (conv_m == nullptr)
         {
-            conv_m = static_pointer_cast<op::Convolution>(m.get_match_root()->get_argument(1));
+            conv_m = static_pointer_cast<op::Convolution>(
+                m.get_match_root()->input_value(1).get_node_shared_ptr());
         }
 
         if (conv_m->get_shape().size() > 5 || conv_m->get_element_type() != element::f32)
@@ -819,7 +823,7 @@ void pass::CoreFusion::construct_conv_bias()
             }
         }
 
-        auto bias = bcast_m->get_argument(0);
+        auto bias = bcast_m->input_value(0).get_node_shared_ptr();
         if (bias->get_shape().size() > 1)
         {
             NGRAPH_DEBUG << "mpattern = " << m.get_match_root()->get_name()
@@ -866,13 +870,14 @@ void pass::CoreFusion::construct_conv_bias_add()
 
         auto add_m = m.get_match_root();
         auto pattern_map = m.get_pattern_map();
-        auto conv_m = as_type_ptr<op::ConvolutionBias>(add_m->get_argument(1));
-        auto add_input_m = add_m->get_argument(0);
+        auto conv_m = as_type_ptr<op::ConvolutionBias>(add_m->input_value(1).get_node_shared_ptr());
+        auto add_input_m = add_m->input_value(0).get_node_shared_ptr();
 
         if (!conv_m)
         {
-            conv_m = static_pointer_cast<op::ConvolutionBias>(add_m->get_argument(0));
-            add_input_m = add_m->get_argument(1);
+            conv_m = static_pointer_cast<op::ConvolutionBias>(
+                add_m->input_value(0).get_node_shared_ptr());
+            add_input_m = add_m->input_value(1).get_node_shared_ptr();
         }
 
         if (get_user_count(conv_m.get()) > 1)
index 1ca879c..feababd 100644 (file)
@@ -55,7 +55,10 @@ bool pass::FusedOpDecomposition::run_on_node(shared_ptr<Node> node)
         }
 
         // Run recursively until no more fused ops
-        auto subgraph = extract_subgraph(subgraph_outputs, node->get_arguments());
+        NodeVector nodes;
+        for (auto& val : node->input_values())
+            nodes.emplace_back(val.get_node_shared_ptr());
+        auto subgraph = extract_subgraph(subgraph_outputs, nodes);
         for (auto subgraph_node : subgraph)
         {
             run_on_node(subgraph_node);
@@ -73,7 +76,8 @@ bool pass::FusedOpDecomposition::run_on_node(shared_ptr<Node> node)
                     if (auto goe = as_type<op::GetOutputElement>(fop_user->get_raw_pointer_node()))
                     {
                         Output<Node> goe_output = goe->get_as_output();
-                        if (goe_output.get_index() == i && !goe->get_output_inputs(0).empty())
+                        if (goe_output.get_index() == i &&
+                            !goe->output(0).get_target_inputs().empty())
                         {
                             // Replace GOE users
                             set<descriptor::Input*> goe_users{
index 62fee8f..15bf33e 100644 (file)
@@ -50,7 +50,8 @@ NodeVector ngraph::pass::explicit_broadcast(std::shared_ptr<Node>& node)
         auto autob = node->get_autob();
         if (autob.m_type == op::AutoBroadcastType::NONE)
         {
-            rc = node->get_arguments();
+            for (auto& val : node->input_values())
+                rc.emplace_back(val.get_node_shared_ptr());
         }
         else if (autob.m_type == op::AutoBroadcastType::NUMPY)
         {
index 692e3f8..c7362b6 100644 (file)
@@ -39,7 +39,7 @@ static bool replace_broadcast_like(const std::shared_ptr<ngraph::Node>& node)
     // argument
     auto broadcast_like = as_type_ptr<op::BroadcastLike>(node);
     replace_node(node,
-                 make_shared<op::Broadcast>(broadcast_like->get_argument(0),
+                 make_shared<op::Broadcast>(broadcast_like->input_value(0),
                                             broadcast_like->get_broadcast_shape(),
                                             broadcast_like->get_broadcast_axes()));
     return true;
index a077cd7..d54b5dc 100644 (file)
@@ -90,7 +90,7 @@ void pass::ReshapeElimination::construct_reshapex2_pattern()
         auto gop = pattern_map[op];
 
         auto r2 = static_pointer_cast<op::Reshape>(m.get_match_root());
-        auto r1 = static_pointer_cast<op::Reshape>(r2->get_argument(0));
+        auto r1 = static_pointer_cast<op::Reshape>(r2->input_value(0).get_node_shared_ptr());
 
         if (gop->get_shape() != m.get_match_root()->get_shape())
         {
@@ -167,14 +167,14 @@ void pass::ReshapeElimination::construct_dot_transpose_pattern()
             return false;
         }
 
-        auto mdot = mtranspose->get_argument(0);
+        auto mdot = mtranspose->input_value(0).get_node_shared_ptr();
         if (mdot->get_shape().size() != 2)
         {
             NGRAPH_DEBUG << "Dot has the wrong shape. " << vector_to_string(mdot->get_shape());
             return false;
         }
 
-        auto arg0 = mdot->get_argument(0);
+        auto arg0 = mdot->input_value(0).get_node_shared_ptr();
         if (arg0->get_shape().size() != 2)
         {
             NGRAPH_DEBUG << "Arg0 has the wrong shape. " << vector_to_string(arg0->get_shape());
@@ -183,7 +183,7 @@ void pass::ReshapeElimination::construct_dot_transpose_pattern()
         auto reshape0_shape = Shape{arg0->get_shape().at(1), arg0->get_shape().at(0)};
         auto reshape0 = make_shared<op::Reshape>(arg0, AxisVector{1, 0}, reshape0_shape);
 
-        auto arg1 = mdot->get_argument(1);
+        auto arg1 = mdot->input_value(1).get_node_shared_ptr();
         if (arg1->get_shape().size() != 2)
         {
             NGRAPH_DEBUG << "Arg1 has the wrong shape. " << vector_to_string(arg1->get_shape());
@@ -213,7 +213,7 @@ void pass::RecurrentReshapeElimination::construct_recurrent_reshape()
 
     auto callback = [op, reshape_label](pattern::RecurrentMatcher& m) {
         NGRAPH_DEBUG << "In callback for construct_recurrent_reshape against node = "
-                     << reshape_label->get_argument(0)->get_name();
+                     << reshape_label->input_value(0).get_node_shared_ptr()->get_name();
         auto reshape_node_vector = m.get_bound_nodes_for_pattern(reshape_label);
 
         // The bound node vector is in reverse order. It is convenient to have the
@@ -221,7 +221,7 @@ void pass::RecurrentReshapeElimination::construct_recurrent_reshape()
         std::reverse(std::begin(reshape_node_vector), std::end(reshape_node_vector));
 
         auto first_bound_reshape_op = reshape_node_vector.front();
-        auto driver_op = first_bound_reshape_op->get_argument(0);
+        auto driver_op = first_bound_reshape_op->input_value(0);
         auto last_bound_reshape_op = reshape_node_vector.back();
 
         // Need to check if the user of the last bound op is a reshape since the last reshape is
@@ -282,7 +282,7 @@ void pass::RecurrentReshapeElimination::construct_recurrent_reshape()
             }
 
             auto first_reshape = as_type_ptr<op::Reshape>(sub_pattern.front());
-            auto input_to_first_reshape = first_reshape->get_argument(0);
+            auto input_to_first_reshape = first_reshape->input_value(0);
             auto last_reshape = as_type_ptr<op::Reshape>(sub_pattern.back());
 
             auto new_input_order = first_reshape->get_input_order();
index 45125c2..1eda989 100644 (file)
@@ -51,7 +51,7 @@ static string describe_reshape(shared_ptr<Node> node)
     ss << reshape->get_name()
        << " ( axis order = " << ngraph::vector_to_string(reshape->get_input_order())
        << " , shape = " << vector_to_string(reshape->get_shape()) << " ) "
-       << " , child = " << reshape->get_argument(0)->get_name();
+       << " , child = " << reshape->input_value(0).get_node_shared_ptr()->get_name();
 
     return ss.str();
 }
@@ -86,7 +86,8 @@ static shared_ptr<op::Reshape> combine_reshapes(shared_ptr<op::Reshape> r1,
     auto default_order = ngraph::get_default_order(r1->get_shape());
     auto perm_r1 = apply_permutation(default_order, r1->get_input_order());
     auto perm_r2 = apply_permutation(perm_r1, r2->get_input_order());
-    auto rreshape = make_reshape(r2->get_argument(0), perm_r2, r2->get_shape());
+    auto rreshape =
+        make_reshape(r2->input_value(0).get_node_shared_ptr(), perm_r2, r2->get_shape());
     NGRAPH_DEBUG << "Combining " << describe_reshape(r1) << " and " << describe_reshape(r2)
                  << " into " << describe_reshape(rreshape);
     return rreshape;
@@ -109,7 +110,7 @@ static void delete_reshape(shared_ptr<Node> reshape)
     NGRAPH_DEBUG << "Removing reshape " << reshape->get_name();
     if (!reshape->get_users().empty())
     {
-        ngraph::replace_node(reshape, reshape->get_argument(0));
+        ngraph::replace_node(reshape, reshape->input_value(0).get_node_shared_ptr());
     }
 }
 
@@ -185,7 +186,7 @@ void swim(Input<Node> input, shared_ptr<op::Reshape> reshape)
             Swimmer nsw{n->input(0), csw.reshape};
             work_queue.push_back(nsw);
             NGRAPH_DEBUG << "Propagating reshape " << describe_reshape(csw.reshape) << " for "
-                         << n->get_name() << " to " << n->get_argument(0);
+                         << n->get_name() << " to " << n->input_value(0).get_node_shared_ptr();
         }
         else if (is_type<op::Broadcast>(n))
         {
@@ -218,7 +219,7 @@ void swim(Input<Node> input, shared_ptr<op::Reshape> reshape)
                 }
             }
 
-            auto broadcast_input = old_broadcast->get_argument(0);
+            auto broadcast_input = old_broadcast->input_value(0).get_node_shared_ptr();
             if (!in_order)
             {
                 AxisVector new_source_axes_sorted{new_source_axes};
@@ -289,10 +290,10 @@ static void materialize_shapes(shared_ptr<Node> n,
         return;
     }
 
-    for (size_t i = 0; i < n->get_arguments().size(); i++)
+    for (size_t i = 0; i < n->input_values().size(); i++)
     {
         // materialize all pending reshapes, flush pending reshapes
-        auto arg = n->get_argument(i);
+        auto arg = n->input_value(i).get_node_shared_ptr();
         if (reorders.count(arg) != 0)
         {
             auto arg_reshape = reorders.at(arg);
@@ -316,7 +317,7 @@ static void sink_reshape(shared_ptr<op::Reshape> reshape,
                          set<shared_ptr<Node>>& reshapes_to_delete)
 {
     NGRAPH_DEBUG << "Sinking Reshape :" << describe_reshape(reshape);
-    auto orig_reshape = reorders.at(reshape->get_argument(0));
+    auto orig_reshape = reorders.at(reshape->input_value(0).get_node_shared_ptr());
     // 1) Not a Transpose or 2) Rank changing operation.
     if ((reshape->get_output_shape(0).size() != reshape->get_input_order().size()) ||
         (!reshape->get_is_transpose()))
@@ -345,7 +346,7 @@ static void sink_unary(shared_ptr<Node> n,
                        ReshapeMap& reorders,
                        set<shared_ptr<Node>>& /* reshapes_to_delete */)
 {
-    auto arg_reshape = read_reshapemap(reorders, n->get_argument(0));
+    auto arg_reshape = read_reshapemap(reorders, n->input_value(0).get_node_shared_ptr());
     NGRAPH_DEBUG << "Propagating " << describe_reshape(arg_reshape) << " for " << n->get_name();
     write_reshapemap(reorders, n, arg_reshape);
 }
@@ -354,8 +355,8 @@ static void sink_binary(shared_ptr<Node> binary,
                         ReshapeMap& reorders,
                         set<shared_ptr<Node>>& reshapes_to_delete)
 {
-    auto left = binary->get_argument(0);
-    auto right = binary->get_argument(1);
+    auto left = binary->input_value(0).get_node_shared_ptr();
+    auto right = binary->input_value(1).get_node_shared_ptr();
 
     if (reorders.at(left)->get_input_order() == reorders.at(right)->get_input_order())
     {
@@ -392,7 +393,7 @@ static void sink_slice(shared_ptr<op::Slice> n,
                        ReshapeMap& reorders,
                        set<shared_ptr<Node>>& /* reshapes_to_delete */)
 {
-    auto arg_reshape = reorders.at(n->get_argument(0));
+    auto arg_reshape = reorders.at(n->input_value(0).get_node_shared_ptr());
     auto order = arg_reshape->get_input_order();
 
     // we need the correct input shape to produce the right output shape
@@ -407,7 +408,7 @@ static void sink_slice(shared_ptr<op::Slice> n,
     auto new_upper = ngraph::apply_permutation(n->get_upper_bounds(), def_order);
     auto new_strides = ngraph::apply_permutation(n->get_strides(), def_order);
     auto new_slice = make_shared<op::Slice>(dummy_correct_shape, new_lower, new_upper, new_strides);
-    ngraph::replace_node(dummy_correct_shape, n->get_argument(0));
+    ngraph::replace_node(dummy_correct_shape, n->input_value(0).get_node_shared_ptr());
     NGRAPH_DEBUG << "Replacing " << n->get_name() << " with " << new_slice->get_name();
     ngraph::replace_node(n, new_slice);
 
@@ -420,7 +421,7 @@ static void sink_pad(shared_ptr<op::Pad> n,
                      ReshapeMap& reorders,
                      set<shared_ptr<Node>>& /* reshapes_to_delete */)
 {
-    auto arg_reshape = reorders.at(n->get_argument(0));
+    auto arg_reshape = reorders.at(n->input_value(0).get_node_shared_ptr());
     auto order = arg_reshape->get_input_order();
     // we need the correct input shape to produce the right output shape
     // we are going to create a label of the right input shape,
@@ -432,9 +433,12 @@ static void sink_pad(shared_ptr<op::Pad> n,
 
     auto new_lower = ngraph::apply_permutation(n->get_padding_below(), def_order);
     auto new_upper = ngraph::apply_permutation(n->get_padding_above(), def_order);
-    auto new_pad = make_shared<op::Pad>(
-        dummy_correct_shape, n->get_argument(1), new_lower, new_upper, n->get_pad_mode());
-    ngraph::replace_node(dummy_correct_shape, n->get_argument(0));
+    auto new_pad = make_shared<op::Pad>(dummy_correct_shape,
+                                        n->input_value(1).get_node_shared_ptr(),
+                                        new_lower,
+                                        new_upper,
+                                        n->get_pad_mode());
+    ngraph::replace_node(dummy_correct_shape, n->input_value(0).get_node_shared_ptr());
     NGRAPH_DEBUG << "Replacing " << n->get_name() << " with " << new_pad->get_name();
     ngraph::replace_node(n, new_pad);
     auto new_reshape = make_reshape(new_pad, order, n->get_shape());
@@ -445,12 +449,12 @@ static void sink_quantize(shared_ptr<op::Quantize> quantize,
                           ReshapeMap& reorders,
                           set<shared_ptr<Node>>& /* reshapes_to_delete */)
 {
-    auto arg_reshape = reorders.at(quantize->get_argument(0));
+    auto arg_reshape = reorders.at(quantize->input_value(0).get_node_shared_ptr());
     AxisSet axes_in_def_order =
         get_quantization_axes_in_default_order(arg_reshape, quantize->get_axes());
-    auto new_quantize = make_shared<op::Quantize>(quantize->get_argument(0),
-                                                  quantize->get_argument(1),
-                                                  quantize->get_argument(2),
+    auto new_quantize = make_shared<op::Quantize>(quantize->input_value(0),
+                                                  quantize->input_value(1),
+                                                  quantize->input_value(2),
                                                   quantize->get_element_type(),
                                                   axes_in_def_order,
                                                   quantize->get_round_mode());
@@ -463,7 +467,7 @@ static void sink_concat(shared_ptr<op::Concat> n,
                         ReshapeMap& reorders,
                         set<shared_ptr<Node>>& reshapes_to_delete)
 {
-    auto arg_reshape = reorders.at(n->get_argument(0));
+    auto arg_reshape = reorders.at(n->input_value(0).get_node_shared_ptr());
     auto order = arg_reshape->get_input_order();
     // we need the correct input shape to produce the right output shape
     // we are going to create a label of the right input shape,
@@ -478,7 +482,7 @@ static void sink_concat(shared_ptr<op::Concat> n,
 
     for (size_t i = 1; i < n->get_input_size(); i++)
     {
-        auto iarg_reshape = reorders.at(n->get_argument(i));
+        auto iarg_reshape = reorders.at(n->input_value(i).get_node_shared_ptr());
         auto iorder = iarg_reshape->get_input_order();
         if (iorder != order)
         {
@@ -498,7 +502,7 @@ static void sink_concat(shared_ptr<op::Concat> n,
     // put back the original arguments
     for (size_t i = 0; i < new_concat->get_input_size(); i++)
     {
-        ngraph::replace_node(new_args.at(i), n->get_argument(i));
+        ngraph::replace_node(new_args.at(i), n->input_value(i).get_node_shared_ptr());
     }
     NGRAPH_DEBUG << "Replacing " << n->get_name() << " with " << new_concat->get_name();
     ngraph::replace_node(n, new_concat);
@@ -512,12 +516,12 @@ static void sink_dequantize(shared_ptr<op::Dequantize> dequantize,
                             ReshapeMap& reorders,
                             set<shared_ptr<Node>>& /* reshapes_to_delete */)
 {
-    auto arg_reshape = reorders.at(dequantize->get_argument(0));
+    auto arg_reshape = reorders.at(dequantize->input_value(0).get_node_shared_ptr());
     AxisSet axes_in_def_order =
         get_quantization_axes_in_default_order(arg_reshape, dequantize->get_axes());
-    auto new_dequantize = make_shared<op::Dequantize>(dequantize->get_argument(0),
-                                                      dequantize->get_argument(1),
-                                                      dequantize->get_argument(2),
+    auto new_dequantize = make_shared<op::Dequantize>(dequantize->input_value(0),
+                                                      dequantize->input_value(1),
+                                                      dequantize->input_value(2),
                                                       dequantize->get_element_type(),
                                                       axes_in_def_order);
 
@@ -586,7 +590,7 @@ bool ngraph::pass::ReshapeSinking::run_on_function(shared_ptr<ngraph::Function>
             // significant time increase on graphs with many slice ops,
             // so for now we are removing "true" check and let backend
             // handle reshape sinking for slice operation.
-            if (slice->get_argument(0)->get_users().size() == 1)
+            if (slice->input_value(0).get_node_shared_ptr()->get_users().size() == 1)
             {
                 sink_slice(slice, reorders, reshapes_to_delete);
             }
@@ -620,11 +624,12 @@ bool ngraph::pass::ReshapeSinking::run_on_function(shared_ptr<ngraph::Function>
     for (auto r : results)
     {
         NGRAPH_CHECK(r->get_shape() == r->get_input_shape(0) &&
-                         r->get_element_type() == r->get_argument(0)->get_element_type(),
+                         r->get_element_type() ==
+                             r->input_value(0).get_node_shared_ptr()->get_element_type(),
                      " op::Result = ",
                      *r,
                      ", Arg = ",
-                     *r->get_argument(0));
+                     *r->input_value(0).get_node_shared_ptr());
     }
 
     // STEP 3: fix wrong shape info wholesale
index 33fb74f..72d1711 100644 (file)
@@ -116,7 +116,7 @@ bool pass::ZeroDimTensorElimination::run_on_function(shared_ptr<Function> f)
         if (auto concat = as_type_ptr<op::Concat>(n))
         {
             OutputVector non_zero_dim_args;
-            for (auto arg : concat->get_arguments())
+            for (auto arg : concat->input_values())
             {
                 if (!has_zero_dim(arg))
                 {
index 37ef6f2..a0ce67e 100644 (file)
@@ -128,9 +128,9 @@ namespace ngraph
             static std::shared_ptr<T> unique_match(std::shared_ptr<Node> node)
             {
                 std::shared_ptr<T> matched;
-                for (auto arg : node->get_arguments())
+                for (auto arg : node->input_values())
                 {
-                    if (auto t_casted = as_type_ptr<T>(arg))
+                    if (auto t_casted = as_type_ptr<T>(arg.get_node_shared_ptr()))
                     {
                         if (matched)
                         {
index 91a8d74..9e6a00a 100644 (file)
@@ -85,10 +85,15 @@ TEST(algebraic_simplification, add_types_shapes)
             auto results = f->get_results();
             for (size_t i = 0; i < results.size(); i++)
             {
-                ASSERT_EQ(expected.at(i),
-                          (results.at(i)->get_argument(0)->input_values().size()
-                               ? results.at(i)->get_argument(0)->get_argument(0)
-                               : results.at(i)->get_argument(0)));
+                ASSERT_EQ(
+                    expected.at(i),
+                    (results.at(i)->input_value(0).get_node_shared_ptr()->input_values().size()
+                         ? results.at(i)
+                               ->input_value(0)
+                               .get_node_shared_ptr()
+                               ->input_value(0)
+                               .get_node_shared_ptr()
+                         : results.at(i)->input_value(0).get_node_shared_ptr()));
             }
         }
     }
@@ -124,10 +129,15 @@ TEST(algebraic_simplification, DISABLED_add_v1_types_shapes)
             auto results = f->get_results();
             for (size_t i = 0; i < results.size(); i++)
             {
-                ASSERT_EQ(expected.at(i),
-                          (results.at(i)->get_argument(0)->input_values().size()
-                               ? results.at(i)->get_argument(0)->get_argument(0)
-                               : results.at(i)->get_argument(0)));
+                ASSERT_EQ(
+                    expected.at(i),
+                    (results.at(i)->input_value(0).get_node_shared_ptr()->input_values().size()
+                         ? results.at(i)
+                               ->input_value(0)
+                               .get_node_shared_ptr()
+                               ->input_value(0)
+                               .get_node_shared_ptr()
+                         : results.at(i)->input_value(0).get_node_shared_ptr()));
             }
         }
     }
@@ -160,9 +170,13 @@ TEST(algebraic_simplification, add_broadcast)
     for (size_t i = 0; i < results.size(); i++)
     {
         ASSERT_EQ(expected.at(i),
-                  (results.at(i)->get_argument(0)->input_values().size()
-                       ? results.at(i)->get_argument(0)->get_argument(0)
-                       : results.at(i)->get_argument(0)));
+                  (results.at(i)->input_value(0).get_node_shared_ptr()->input_values().size()
+                       ? results.at(i)
+                             ->input_value(0)
+                             .get_node_shared_ptr()
+                             ->input_value(0)
+                             .get_node_shared_ptr()
+                       : results.at(i)->input_value(0).get_node_shared_ptr()));
     }
 }
 
@@ -194,9 +208,13 @@ TEST(algebraic_simplification, DISABLED_add_v1_broadcast_v1)
     for (size_t i = 0; i < results.size(); i++)
     {
         ASSERT_EQ(expected.at(i),
-                  (results.at(i)->get_argument(0)->input_values().size()
-                       ? results.at(i)->get_argument(0)->get_argument(0)
-                       : results.at(i)->get_argument(0)));
+                  (results.at(i)->input_value(0).get_node_shared_ptr()->input_values().size()
+                       ? results.at(i)
+                             ->input_value(0)
+                             .get_node_shared_ptr()
+                             ->input_value(0)
+                             .get_node_shared_ptr()
+                       : results.at(i)->input_value(0).get_node_shared_ptr()));
     }
 }
 
@@ -227,9 +245,13 @@ TEST(algebraic_simplification, multiply_broadcast_0)
     for (size_t i = 0; i < results.size(); i++)
     {
         ASSERT_EQ(expected.at(i),
-                  (results.at(i)->get_argument(0)->input_values().size()
-                       ? results.at(i)->get_argument(0)->get_argument(0)
-                       : results.at(i)->get_argument(0)));
+                  (results.at(i)->input_value(0).get_node_shared_ptr()->input_values().size()
+                       ? results.at(i)
+                             ->input_value(0)
+                             .get_node_shared_ptr()
+                             ->input_value(0)
+                             .get_node_shared_ptr()
+                       : results.at(i)->input_value(0).get_node_shared_ptr()));
     }
 }
 
@@ -260,9 +282,13 @@ TEST(algebraic_simplification, DISABLED_multiply_v1_broadcast_v1_0)
     for (size_t i = 0; i < results.size(); i++)
     {
         ASSERT_EQ(expected.at(i),
-                  (results.at(i)->get_argument(0)->input_values().size()
-                       ? results.at(i)->get_argument(0)->get_argument(0)
-                       : results.at(i)->get_argument(0)));
+                  (results.at(i)->input_value(0).get_node_shared_ptr()->input_values().size()
+                       ? results.at(i)
+                             ->input_value(0)
+                             .get_node_shared_ptr()
+                             ->input_value(0)
+                             .get_node_shared_ptr()
+                       : results.at(i)->input_value(0).get_node_shared_ptr()));
     }
 }
 
@@ -291,9 +317,13 @@ TEST(algebraic_simplification, multiply_broadcast_1)
     for (size_t i = 0; i < results.size(); i++)
     {
         ASSERT_EQ(expected.at(i),
-                  (results.at(i)->get_argument(0)->input_values().size()
-                       ? results.at(i)->get_argument(0)->get_argument(0)
-                       : results.at(i)->get_argument(0)));
+                  (results.at(i)->input_value(0).get_node_shared_ptr()->input_values().size()
+                       ? results.at(i)
+                             ->input_value(0)
+                             .get_node_shared_ptr()
+                             ->input_value(0)
+                             .get_node_shared_ptr()
+                       : results.at(i)->input_value(0).get_node_shared_ptr()));
     }
 }
 
@@ -322,9 +352,13 @@ TEST(algebraic_simplification, DISABLED_multiply_v1_broadcast_v1_1)
     for (size_t i = 0; i < results.size(); i++)
     {
         ASSERT_EQ(expected.at(i),
-                  (results.at(i)->get_argument(0)->input_values().size()
-                       ? results.at(i)->get_argument(0)->get_argument(0)
-                       : results.at(i)->get_argument(0)));
+                  (results.at(i)->input_value(0).get_node_shared_ptr()->input_values().size()
+                       ? results.at(i)
+                             ->input_value(0)
+                             .get_node_shared_ptr()
+                             ->input_value(0)
+                             .get_node_shared_ptr()
+                       : results.at(i)->input_value(0).get_node_shared_ptr()));
     }
 }
 
@@ -348,8 +382,19 @@ TEST(algebraic_simplification, zero_plus_zero_commutativity)
                                         ParameterVector{a, b, c});
     pass_manager.run_passes(f);
 
-    ASSERT_TRUE(ngraph::is_zero(f->get_results().at(2)->get_argument(0)->get_argument(0)));
-    ASSERT_EQ(f->get_results().at(4)->get_argument(0)->get_argument(0), b);
+    ASSERT_TRUE(ngraph::is_zero(f->get_results()
+                                    .at(2)
+                                    ->input_value(0)
+                                    .get_node_shared_ptr()
+                                    ->input_value(0)
+                                    .get_node_shared_ptr()));
+    ASSERT_EQ(f->get_results()
+                  .at(4)
+                  ->input_value(0)
+                  .get_node_shared_ptr()
+                  ->input_value(0)
+                  .get_node_shared_ptr(),
+              b);
 }
 
 TEST(algebraic_simplification, DISABLED_zero_plus_zero_commutativity_v1)
@@ -372,8 +417,19 @@ TEST(algebraic_simplification, DISABLED_zero_plus_zero_commutativity_v1)
                                         ParameterVector{a, b, c});
     pass_manager.run_passes(f);
 
-    ASSERT_TRUE(ngraph::is_zero(f->get_results().at(2)->get_argument(0)->get_argument(0)));
-    ASSERT_EQ(f->get_results().at(4)->get_argument(0)->get_argument(0), b);
+    ASSERT_TRUE(ngraph::is_zero(f->get_results()
+                                    .at(2)
+                                    ->input_value(0)
+                                    .get_node_shared_ptr()
+                                    ->input_value(0)
+                                    .get_node_shared_ptr()));
+    ASSERT_EQ(f->get_results()
+                  .at(4)
+                  ->input_value(0)
+                  .get_node_shared_ptr()
+                  ->input_value(0)
+                  .get_node_shared_ptr(),
+              b);
 }
 
 TEST(algebraic_simplification, zero_multiply_zero_one)
@@ -395,8 +451,18 @@ TEST(algebraic_simplification, zero_multiply_zero_one)
                                         ParameterVector{a, b, c});
     pass_manager.run_passes(f);
 
-    ASSERT_TRUE(ngraph::is_zero(f->get_results().at(2)->get_argument(0)->get_argument(0)));
-    ASSERT_TRUE(ngraph::is_zero(f->get_results().at(4)->get_argument(0)->get_argument(0)));
+    ASSERT_TRUE(ngraph::is_zero(f->get_results()
+                                    .at(2)
+                                    ->input_value(0)
+                                    .get_node_shared_ptr()
+                                    ->input_value(0)
+                                    .get_node_shared_ptr()));
+    ASSERT_TRUE(ngraph::is_zero(f->get_results()
+                                    .at(4)
+                                    ->input_value(0)
+                                    .get_node_shared_ptr()
+                                    ->input_value(0)
+                                    .get_node_shared_ptr()));
 }
 
 TEST(algebraic_simplification, DISABLED_zero_multiply_zero_one_v1)
@@ -418,8 +484,18 @@ TEST(algebraic_simplification, DISABLED_zero_multiply_zero_one_v1)
                                         ParameterVector{a, b, c});
     pass_manager.run_passes(f);
 
-    ASSERT_TRUE(ngraph::is_zero(f->get_results().at(2)->get_argument(0)->get_argument(0)));
-    ASSERT_TRUE(ngraph::is_zero(f->get_results().at(4)->get_argument(0)->get_argument(0)));
+    ASSERT_TRUE(ngraph::is_zero(f->get_results()
+                                    .at(2)
+                                    ->input_value(0)
+                                    .get_node_shared_ptr()
+                                    ->input_value(0)
+                                    .get_node_shared_ptr()));
+    ASSERT_TRUE(ngraph::is_zero(f->get_results()
+                                    .at(4)
+                                    ->input_value(0)
+                                    .get_node_shared_ptr()
+                                    ->input_value(0)
+                                    .get_node_shared_ptr()));
 }
 
 TEST(algebraic_simplification, add_negative_tests)
@@ -447,7 +523,7 @@ TEST(algebraic_simplification, add_negative_tests)
     auto results = f->get_results();
     for (size_t i = 0; i < results.size(); i++)
     {
-        ASSERT_EQ(expected.at(i), results.at(i)->get_argument(0));
+        ASSERT_EQ(expected.at(i), results.at(i)->input_value(0).get_node_shared_ptr());
     }
 }
 
@@ -476,7 +552,7 @@ TEST(algebraic_simplification, DISABLED_add_negative_tests_v1)
     auto results = f->get_results();
     for (size_t i = 0; i < results.size(); i++)
     {
-        ASSERT_EQ(expected.at(i), results.at(i)->get_argument(0));
+        ASSERT_EQ(expected.at(i), results.at(i)->input_value(0).get_node_shared_ptr());
     }
 }
 
@@ -505,7 +581,7 @@ TEST(algebraic_simplification, DISABLED_multiply_negative_tests_v1)
     auto results = f->get_results();
     for (size_t i = 0; i < results.size(); i++)
     {
-        ASSERT_EQ(expected.at(i), results.at(i)->get_argument(0));
+        ASSERT_EQ(expected.at(i), results.at(i)->input_value(0).get_node_shared_ptr());
     }
 }
 
@@ -534,7 +610,7 @@ TEST(algebraic_simplification, multiply_negative_tests)
     auto results = f->get_results();
     for (size_t i = 0; i < results.size(); i++)
     {
-        ASSERT_EQ(expected.at(i), results.at(i)->get_argument(0));
+        ASSERT_EQ(expected.at(i), results.at(i)->input_value(0).get_node_shared_ptr());
     }
 }
 
@@ -549,9 +625,10 @@ TEST(algebraic_simplification, multiply_prod_vector_one)
 
     auto f = std::make_shared<Function>(ngraph::NodeVector{prod_fconst1}, ParameterVector{});
     pass_manager.run_passes(f);
-    auto new_broadcast = as_type_ptr<op::Broadcast>(f->get_results().at(0)->get_argument(0));
+    auto new_broadcast =
+        as_type_ptr<op::Broadcast>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_broadcast);
-    auto new_const = as_type_ptr<op::Constant>(new_broadcast->get_argument(0));
+    auto new_const = as_type_ptr<op::Constant>(new_broadcast->input_value(0).get_node_shared_ptr());
     auto values = new_const->get_vector<double>();
     ASSERT_EQ(values.size(), 1);
     ASSERT_EQ(values.at(0), 32);
@@ -568,7 +645,8 @@ TEST(algebraic_simplification, multiply_prod_scalar_one)
 
     auto f = std::make_shared<Function>(ngraph::NodeVector{prod_fconst1}, ParameterVector{});
     pass_manager.run_passes(f);
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     auto values = new_const->get_vector<double>();
     ASSERT_EQ(values.size(), 1);
@@ -586,7 +664,7 @@ TEST(algebraic_simplification, multiply_prod_negative)
 
     auto f = std::make_shared<Function>(ngraph::NodeVector{prod_fconst1}, ParameterVector{});
     pass_manager.run_passes(f);
-    auto f_prod = f->get_results().at(0)->get_argument(0);
+    auto f_prod = f->get_results().at(0)->input_value(0).get_node_shared_ptr();
     ASSERT_EQ(f_prod, prod_fconst1);
 }
 
@@ -601,7 +679,8 @@ TEST(algebraic_simplification, multiply_sum_scalar_one)
 
     auto f = std::make_shared<Function>(ngraph::NodeVector{sum_fconst1}, ParameterVector{});
     pass_manager.run_passes(f);
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     auto values = new_const->get_vector<double>();
     ASSERT_EQ(values.size(), 1);
@@ -619,9 +698,10 @@ TEST(algebraic_simplification, multiply_sum_vector_one)
 
     auto f = std::make_shared<Function>(ngraph::NodeVector{sum_fconst1}, ParameterVector{});
     pass_manager.run_passes(f);
-    auto new_broadcast = as_type_ptr<op::Broadcast>(f->get_results().at(0)->get_argument(0));
+    auto new_broadcast =
+        as_type_ptr<op::Broadcast>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_broadcast);
-    auto new_const = as_type_ptr<op::Constant>(new_broadcast->get_argument(0));
+    auto new_const = as_type_ptr<op::Constant>(new_broadcast->input_value(0).get_node_shared_ptr());
     auto values = new_const->get_vector<double>();
     ASSERT_EQ(values.size(), 1);
     ASSERT_EQ(values.at(0), 5);
@@ -638,7 +718,7 @@ TEST(algebraic_simplification, multiply_sum_negative)
 
     auto f = std::make_shared<Function>(ngraph::NodeVector{sum_fconst1}, ParameterVector{});
     pass_manager.run_passes(f);
-    auto f_sum = f->get_results().at(0)->get_argument(0);
+    auto f_sum = f->get_results().at(0)->input_value(0).get_node_shared_ptr();
     ASSERT_EQ(f_sum, sum_fconst1);
 }
 
@@ -661,7 +741,7 @@ TEST(algebraic_simplification, concat_reshape_slice)
 
     auto f = std::make_shared<Function>(ngraph::NodeVector{concat}, ParameterVector{a});
     pass_manager.run_passes(f);
-    ASSERT_TRUE(is_type<op::Reshape>(f->get_results().at(0)->get_argument(0)));
+    ASSERT_TRUE(is_type<op::Reshape>(f->get_results().at(0)->input_value(0).get_node_shared_ptr()));
 }
 
 TEST(algebraic_simplification, concat_slice)
@@ -679,7 +759,7 @@ TEST(algebraic_simplification, concat_slice)
 
     auto f = std::make_shared<Function>(ngraph::NodeVector{concat}, ParameterVector{a});
     pass_manager.run_passes(f);
-    ASSERT_EQ(f->get_results().at(0)->get_argument(0), a);
+    ASSERT_EQ(f->get_results().at(0)->input_value(0).get_node_shared_ptr(), a);
 }
 
 TEST(algebraic_simplification, concat_parameter_slice)
@@ -697,7 +777,7 @@ TEST(algebraic_simplification, concat_parameter_slice)
 
     auto f = std::make_shared<Function>(ngraph::NodeVector{concat}, ParameterVector{a});
     pass_manager.run_passes(f);
-    ASSERT_EQ(f->get_results().at(0)->get_argument(0), a);
+    ASSERT_EQ(f->get_results().at(0)->input_value(0).get_node_shared_ptr(), a);
 }
 
 TEST(algebraic_simplification, concat_parameter_slices_reversed)
@@ -715,7 +795,7 @@ TEST(algebraic_simplification, concat_parameter_slices_reversed)
 
     auto f = std::make_shared<Function>(ngraph::NodeVector{concat}, ParameterVector{a});
     pass_manager.run_passes(f);
-    ASSERT_EQ(f->get_results().at(0)->get_argument(0), concat);
+    ASSERT_EQ(f->get_results().at(0)->input_value(0).get_node_shared_ptr(), concat);
 }
 
 TEST(algebraic_simplification, concat_parameter_slices_element_count)
@@ -734,7 +814,7 @@ TEST(algebraic_simplification, concat_parameter_slices_element_count)
 
     auto f = std::make_shared<Function>(ngraph::NodeVector{concat}, ParameterVector{a});
     pass_manager.run_passes(f);
-    ASSERT_EQ(f->get_results().at(0)->get_argument(0), concat);
+    ASSERT_EQ(f->get_results().at(0)->input_value(0).get_node_shared_ptr(), concat);
 }
 
 TEST(algebraic_simplification, concat_parameter_non_uniform_slices)
@@ -752,7 +832,7 @@ TEST(algebraic_simplification, concat_parameter_non_uniform_slices)
 
     auto f = std::make_shared<Function>(ngraph::NodeVector{concat}, ParameterVector{a});
     pass_manager.run_passes(f);
-    ASSERT_EQ(f->get_results().at(0)->get_argument(0), concat);
+    ASSERT_EQ(f->get_results().at(0)->input_value(0).get_node_shared_ptr(), concat);
 }
 
 TEST(algebraic_simplification, concat_different_inputs)
@@ -775,7 +855,7 @@ TEST(algebraic_simplification, concat_different_inputs)
 
     auto f = std::make_shared<Function>(ngraph::NodeVector{concat}, ParameterVector{a});
     pass_manager.run_passes(f);
-    ASSERT_EQ(f->get_results().at(0)->get_argument(0), concat);
+    ASSERT_EQ(f->get_results().at(0)->input_value(0).get_node_shared_ptr(), concat);
 }
 
 TEST(algebraic_simplification, log_neg_neg)
@@ -796,12 +876,12 @@ TEST(algebraic_simplification, log_neg_neg)
 
     auto f = std::make_shared<Function>(ngraph::NodeVector{neg4}, ParameterVector{a, b});
     pass_manager.run_passes(f);
-    auto sub = as_type_ptr<op::Subtract>(neg_inner->get_argument(0));
+    auto sub = as_type_ptr<op::Subtract>(neg_inner->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(sub != nullptr);
-    ASSERT_EQ(sub->get_argument(0), a);
-    auto new_log = as_type_ptr<op::Log>(sub->get_argument(1));
+    ASSERT_EQ(sub->input_value(0).get_node_shared_ptr(), a);
+    auto new_log = as_type_ptr<op::Log>(sub->input_value(1).get_node_shared_ptr());
     ASSERT_TRUE(new_log != nullptr);
-    ASSERT_EQ(new_log->get_argument(0), b);
+    ASSERT_EQ(new_log->input_value(0).get_node_shared_ptr(), b);
 }
 
 TEST(algebraic_simplification, log_no_exp)
@@ -822,7 +902,7 @@ TEST(algebraic_simplification, log_no_exp)
 
     auto f = std::make_shared<Function>(ngraph::NodeVector{neg4}, ParameterVector{a, b});
     pass_manager.run_passes(f);
-    ASSERT_EQ(neg_inner->get_argument(0), log_div);
+    ASSERT_EQ(neg_inner->input_value(0).get_node_shared_ptr(), log_div);
 }
 
 TEST(algebraic_simplification, log_no_divide)
@@ -843,7 +923,7 @@ TEST(algebraic_simplification, log_no_divide)
 
     auto f = std::make_shared<Function>(ngraph::NodeVector{neg4}, ParameterVector{a, b});
     pass_manager.run_passes(f);
-    ASSERT_EQ(neg_inner->get_argument(0), log_mul);
+    ASSERT_EQ(neg_inner->input_value(0).get_node_shared_ptr(), log_mul);
 }
 
 TEST(algebraic_simplification, pass_property)
@@ -1093,17 +1173,21 @@ TEST(algebraic_simplification, gather_shapeof)
 
         ASSERT_EQ(count_ops_of_type<op::v1::Gather>(baseline_f), 1) << casename;
 
-        auto last_node = optimized_f->get_results()[0]->get_argument(0);
+        auto last_node = optimized_f->get_results()[0]->input_value(0).get_node_shared_ptr();
         if (is_scalar_index)
         {
             ASSERT_EQ(count_ops_of_type<op::v3::ShapeOf>(optimized_f), 1) << casename;
             ASSERT_EQ(count_ops_of_type<op::v1::Gather>(optimized_f), 1) << casename;
-            EXPECT_TRUE(as_type_ptr<op::v1::Gather>(last_node->get_argument(0))) << casename;
+            EXPECT_TRUE(
+                as_type_ptr<op::v1::Gather>(last_node->input_value(0).get_node_shared_ptr()))
+                << casename;
         }
         else
         {
             ASSERT_EQ(count_ops_of_type<op::v0::Concat>(optimized_f), 1) << casename;
-            EXPECT_TRUE(as_type_ptr<op::v0::Concat>(last_node->get_argument(0))) << casename;
+            EXPECT_TRUE(
+                as_type_ptr<op::v0::Concat>(last_node->input_value(0).get_node_shared_ptr()))
+                << casename;
         }
     };
 
index 9ce7198..0144629 100644 (file)
@@ -35,12 +35,12 @@ TEST(build_graph, build_simple)
     auto broadcast_1 = make_shared<op::Broadcast>(arg3, Shape{10, 32, 7}, AxisSet{0});
     auto b1 = make_shared<op::Broadcast>(arg3, Shape{10, 32, 7}, AxisSet{0});
     auto dot = make_shared<op::Dot>(arg2, arg0);
-    ASSERT_EQ(dot->get_arguments()[0], arg2);
-    ASSERT_EQ(dot->get_arguments()[1], arg0);
+    ASSERT_EQ(dot->input_value(0).get_node_shared_ptr(), arg2);
+    ASSERT_EQ(dot->input_value(1).get_node_shared_ptr(), arg0);
 
     auto cluster_0 = make_shared<Function>(dot, ParameterVector{arg0, arg1, arg2, arg3});
 
-    ASSERT_EQ(cluster_0->get_output_op(0)->get_argument(0), dot);
+    ASSERT_EQ(cluster_0->get_output_op(0)->input_value(0).get_node_shared_ptr(), dot);
 }
 
 // Check node comparisons
@@ -67,8 +67,8 @@ TEST(build_graph, literal)
     ASSERT_EQ(float0->get_element_type(), element::f32);
     ASSERT_EQ(float0->get_shape(), Shape{});
     auto d = make_shared<op::Dot>(float0, float0);
-    ASSERT_EQ(d->get_arguments().at(0), float0);
-    ASSERT_EQ(d->get_arguments().at(1), float0);
+    ASSERT_EQ(d->input_values().at(0).get_node_shared_ptr(), float0);
+    ASSERT_EQ(d->input_values().at(1).get_node_shared_ptr(), float0);
 
     vector<int32_t> int32{3};
     auto int32_0 = make_shared<op::Constant>(element::i32, Shape{}, int32);
@@ -87,8 +87,8 @@ TEST(build_graph, tensor)
     ASSERT_EQ(float0->get_element_type(), element::f32);
     ASSERT_EQ(float0->get_shape(), shape);
     auto d = make_shared<op::Add>(float0, float0);
-    ASSERT_EQ(d->get_arguments().at(0), float0);
-    ASSERT_EQ(d->get_arguments().at(1), float0);
+    ASSERT_EQ(d->input_values().at(0).get_node_shared_ptr(), float0);
+    ASSERT_EQ(d->input_values().at(1).get_node_shared_ptr(), float0);
 
     Shape ishape{3, 5};
     vector<int32_t> idata(shape_size(ishape), 0);
@@ -108,8 +108,8 @@ TEST(build_graph, function_undeclared_parameters)
     auto broadcast_1 = make_shared<op::Broadcast>(arg3, Shape{10, 32, 7}, AxisSet{0});
     auto b1 = make_shared<op::Broadcast>(arg3, Shape{10, 32, 7}, AxisSet{0});
     auto dot = make_shared<op::Dot>(arg2, arg0);
-    ASSERT_EQ(dot->get_arguments()[0], arg2);
-    ASSERT_EQ(dot->get_arguments()[1], arg0);
+    ASSERT_EQ(dot->input_values()[0].get_node_shared_ptr(), arg2);
+    ASSERT_EQ(dot->input_values()[1].get_node_shared_ptr(), arg0);
     try
     {
         auto f = make_shared<Function>(dot, ParameterVector{arg0, arg1, arg3});
index 472a7bf..4ff3c63 100644 (file)
@@ -28,7 +28,8 @@ using namespace std;
 template <typename T>
 static std::vector<T> get_result_constant(std::shared_ptr<Function> f, size_t pos)
 {
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(pos)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(pos)->input_value(0).get_node_shared_ptr());
     return new_const->cast_vector<T>();
 }
 
@@ -71,7 +72,8 @@ TEST(constant_folding, acosh)
     EXPECT_EQ(count_ops_of_type<op::Constant>(f), 1);
     ASSERT_EQ(f->get_results().size(), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results()[0]->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results()[0]->input_value(0).get_node_shared_ptr());
     EXPECT_TRUE(new_const);
 
     auto values_out = new_const->get_vector<float>();
@@ -100,7 +102,8 @@ TEST(constant_folding, asinh)
     EXPECT_EQ(count_ops_of_type<op::Constant>(f), 1);
     ASSERT_EQ(f->get_results().size(), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results()[0]->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results()[0]->input_value(0).get_node_shared_ptr());
     EXPECT_TRUE(new_const);
 
     auto values_out = new_const->get_vector<float>();
@@ -129,7 +132,8 @@ TEST(constant_folding, atanh)
     EXPECT_EQ(count_ops_of_type<op::Constant>(f), 1);
     ASSERT_EQ(f->get_results().size(), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results()[0]->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results()[0]->input_value(0).get_node_shared_ptr());
     EXPECT_TRUE(new_const);
 
     auto values_out = new_const->get_vector<float>();
@@ -156,7 +160,8 @@ TEST(constant_folding, constant_squeeze)
     ASSERT_EQ(count_ops_of_type<op::Squeeze>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     ASSERT_EQ(new_const->get_shape(), shape_out);
 
@@ -184,7 +189,8 @@ TEST(constant_folding, constant_unsqueeze)
     ASSERT_EQ(count_ops_of_type<op::Unsqueeze>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     ASSERT_EQ(new_const->get_shape(), shape_out);
 
@@ -209,7 +215,8 @@ TEST(constant_folding, constant_reshape)
     ASSERT_EQ(count_ops_of_type<op::Reshape>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     auto values_out = new_const->get_vector<float>();
 
@@ -233,7 +240,8 @@ TEST(constant_folding, DISABLED_constant_reshape_permute)
     ASSERT_EQ(count_ops_of_type<op::Reshape>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     auto values_out = new_const->get_vector<double>();
 
@@ -258,7 +266,8 @@ TEST(constant_folding, constant_broadcast)
     ASSERT_EQ(count_ops_of_type<op::Broadcast>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     auto values_out = new_const->get_vector<int>();
 
@@ -284,7 +293,8 @@ TEST(constant_folding, constant_broadcast_v1)
     ASSERT_EQ(count_ops_of_type<op::v1::Broadcast>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     auto values_out = new_const->get_vector<int32_t>();
 
@@ -308,7 +318,8 @@ TEST(constant_folding, constant_broadcast_v1_with_target_shape)
     ASSERT_EQ(count_ops_of_type<op::v1::Broadcast>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     auto values_out = new_const->get_vector<int32_t>();
 
@@ -332,7 +343,8 @@ TEST(constant_folding, constant_broadcast_v1_numpy)
     ASSERT_EQ(count_ops_of_type<op::v1::Broadcast>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     auto values_out = new_const->get_vector<int32_t>();
 
@@ -361,7 +373,8 @@ TEST(constant_folding, constant_pad_exterior)
     ASSERT_EQ(count_ops_of_type<op::Pad>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     auto values_out = new_const->get_vector<int>();
 
@@ -529,7 +542,8 @@ TEST(constant_folding, const_dequantize)
     ASSERT_EQ(count_ops_of_type<op::Dequantize>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     auto values_out = new_const->get_vector<output_c_type>();
 
@@ -563,7 +577,8 @@ TEST(constant_folding, const_quantize)
     ASSERT_EQ(count_ops_of_type<op::Quantize>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     auto values_out = new_const->get_vector<output_c_type>();
 
@@ -587,7 +602,8 @@ TEST(constant_folding, const_convert)
     ASSERT_EQ(count_ops_of_type<op::Convert>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     ASSERT_EQ(new_const->get_output_element_type(0), element::u64);
     auto values_out = new_const->get_vector<uint64_t>();
@@ -611,7 +627,8 @@ TEST(constant_folding, shape_of_v0)
     ASSERT_EQ(count_ops_of_type<op::v0::ShapeOf>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     ASSERT_EQ(new_const->get_output_element_type(0), element::i64);
     auto values_out = new_const->get_vector<int64_t>();
@@ -634,7 +651,8 @@ TEST(constant_folding, shape_of_v3)
     ASSERT_EQ(count_ops_of_type<op::v3::ShapeOf>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     ASSERT_EQ(new_const->get_output_element_type(0), element::i64);
     auto values_out = new_const->get_vector<int64_t>();
@@ -657,7 +675,8 @@ TEST(constant_folding, shape_of_i32_v3)
     ASSERT_EQ(count_ops_of_type<op::v3::ShapeOf>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     ASSERT_EQ(new_const->get_output_element_type(0), element::i32);
     auto values_out = new_const->get_vector<int32_t>();
@@ -682,7 +701,8 @@ TEST(constant_folding, shape_of_dynamic_v0)
     ASSERT_EQ(count_ops_of_type<op::Concat>(f), 1);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 8);
 
-    auto result_as_concat = as_type_ptr<op::Concat>(f->get_results().at(0)->get_argument(0));
+    auto result_as_concat =
+        as_type_ptr<op::Concat>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(result_as_concat);
     ASSERT_EQ(result_as_concat->get_output_shape(0), Shape{7});
 }
@@ -704,7 +724,8 @@ TEST(constant_folding, shape_of_dynamic_v3)
     ASSERT_EQ(count_ops_of_type<op::Concat>(f), 1);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 8);
 
-    auto result_as_concat = as_type_ptr<op::Concat>(f->get_results().at(0)->get_argument(0));
+    auto result_as_concat =
+        as_type_ptr<op::Concat>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(result_as_concat);
     ASSERT_EQ(result_as_concat->get_output_shape(0), Shape{7});
     ASSERT_EQ(result_as_concat->get_output_element_type(0), element::i64);
@@ -727,7 +748,8 @@ TEST(constant_folding, shape_of_dynamic_i32_v3)
     ASSERT_EQ(count_ops_of_type<op::Concat>(f), 1);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 8);
 
-    auto result_as_concat = as_type_ptr<op::Concat>(f->get_results().at(0)->get_argument(0));
+    auto result_as_concat =
+        as_type_ptr<op::Concat>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(result_as_concat);
     ASSERT_EQ(result_as_concat->get_output_shape(0), Shape{7});
     ASSERT_EQ(result_as_concat->get_output_element_type(0), element::i32);
@@ -752,7 +774,8 @@ TEST(constant_folding, shape_of_dynamic_double_folding_v0)
     ASSERT_EQ(count_ops_of_type<op::Concat>(f), 1);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 8);
 
-    auto result_as_concat = as_type_ptr<op::Concat>(f->get_results().at(0)->get_argument(0));
+    auto result_as_concat =
+        as_type_ptr<op::Concat>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(result_as_concat);
     ASSERT_EQ(result_as_concat->get_output_shape(0), Shape{7});
 }
@@ -775,7 +798,8 @@ TEST(constant_folding, shape_of_dynamic_double_folding_v3)
     ASSERT_EQ(count_ops_of_type<op::Concat>(f), 1);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 8);
 
-    auto result_as_concat = as_type_ptr<op::Concat>(f->get_results().at(0)->get_argument(0));
+    auto result_as_concat =
+        as_type_ptr<op::Concat>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(result_as_concat);
     ASSERT_EQ(result_as_concat->get_output_shape(0), Shape{7});
 }
@@ -836,7 +860,8 @@ TEST(constant_folding, const_reverse)
     ASSERT_EQ(count_ops_of_type<op::Reverse>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     auto values_out = new_const->get_vector<int32_t>();
 
@@ -860,7 +885,8 @@ TEST(constant_folding, const_product)
     ASSERT_EQ(count_ops_of_type<op::Product>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     auto values_out = new_const->get_vector<int32_t>();
 
@@ -888,7 +914,8 @@ TEST(constant_folding, const_reduceprod)
     ASSERT_EQ(count_ops_of_type<op::v1::ReduceProd>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     ASSERT_EQ(new_const->get_shape(), output_shape);
 
@@ -919,7 +946,8 @@ TEST(constant_folding, const_reduceprod_keepdims)
     ASSERT_EQ(count_ops_of_type<op::v1::ReduceProd>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     ASSERT_EQ(new_const->get_shape(), output_shape);
 
@@ -946,7 +974,8 @@ TEST(constant_folding, const_sum)
     ASSERT_EQ(count_ops_of_type<op::Sum>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     auto values_out = new_const->get_vector<int32_t>();
 
@@ -975,7 +1004,8 @@ TEST(constant_folding, const_reducesum)
     ASSERT_EQ(count_ops_of_type<op::v1::ReduceSum>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     ASSERT_EQ(new_const->get_shape(), output_shape);
 
@@ -1006,7 +1036,8 @@ TEST(constant_folding, const_reducesum_keepdims)
     ASSERT_EQ(count_ops_of_type<op::v1::ReduceSum>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     ASSERT_EQ(new_const->get_shape(), output_shape);
 
@@ -1033,7 +1064,8 @@ TEST(constant_folding, const_max)
     ASSERT_EQ(count_ops_of_type<op::Max>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     auto values_out = new_const->get_vector<int32_t>();
 
@@ -1062,7 +1094,8 @@ TEST(constant_folding, const_reducemax)
     ASSERT_EQ(count_ops_of_type<op::v1::ReduceMax>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     ASSERT_EQ(new_const->get_shape(), output_shape);
 
@@ -1093,7 +1126,8 @@ TEST(constant_folding, const_reducemax_keepdims)
     ASSERT_EQ(count_ops_of_type<op::v1::ReduceMax>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     ASSERT_EQ(new_const->get_shape(), output_shape);
 
@@ -1120,7 +1154,8 @@ TEST(constant_folding, const_min)
     ASSERT_EQ(count_ops_of_type<op::Min>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     auto values_out = new_const->get_vector<int32_t>();
 
@@ -1149,7 +1184,8 @@ TEST(constant_folding, const_reducemin)
     ASSERT_EQ(count_ops_of_type<op::v1::ReduceMin>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     ASSERT_EQ(new_const->get_shape(), output_shape);
 
@@ -1180,7 +1216,8 @@ TEST(constant_folding, const_reducemin_keepdims)
     ASSERT_EQ(count_ops_of_type<op::v1::ReduceMin>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     ASSERT_EQ(new_const->get_shape(), output_shape);
 
@@ -1211,7 +1248,8 @@ TEST(constant_folding, const_reducemean)
     ASSERT_EQ(count_ops_of_type<op::v1::ReduceMean>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     ASSERT_EQ(new_const->get_shape(), output_shape);
 
@@ -1242,7 +1280,8 @@ TEST(constant_folding, const_reducemean_keepdims)
     ASSERT_EQ(count_ops_of_type<op::v1::ReduceMean>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     ASSERT_EQ(new_const->get_shape(), output_shape);
 
@@ -1270,7 +1309,8 @@ TEST(constant_folding, const_reduce_logical_and__no_keepdims)
     ASSERT_EQ(count_ops_of_type<op::v1::ReduceLogicalAnd>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    const auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    const auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
 
     const Shape expected_out_shape{3};
@@ -1300,7 +1340,8 @@ TEST(constant_folding, const_reduce_logical_and__keepdims)
     ASSERT_EQ(count_ops_of_type<op::v1::ReduceLogicalAnd>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    const auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    const auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
 
     // the output shape is expected to have 'ones' at the positions specified in the reduction axes
@@ -1332,7 +1373,8 @@ TEST(constant_folding, const_reduce_logical_and__keepdims_3d)
     ASSERT_EQ(count_ops_of_type<op::v1::ReduceLogicalAnd>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    const auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    const auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
 
     const Shape expected_out_shape{1, 2, 1};
@@ -1361,7 +1403,8 @@ TEST(constant_folding, const_any)
     ASSERT_EQ(count_ops_of_type<op::Any>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     auto values_out = new_const->get_vector<char>();
 
@@ -1387,7 +1430,8 @@ TEST(constant_folding, const_reduce_logical_or__no_keepdims)
     ASSERT_EQ(count_ops_of_type<op::v1::ReduceLogicalAnd>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    const auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    const auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
 
     const Shape expected_out_shape{3};
@@ -1415,7 +1459,8 @@ TEST(constant_folding, const_concat)
     ASSERT_EQ(count_ops_of_type<op::Concat>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     auto values_out = new_const->get_vector<int32_t>();
 
@@ -1438,7 +1483,8 @@ TEST(constant_folding, const_not)
     ASSERT_EQ(count_ops_of_type<op::Not>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     auto values_out = new_const->get_vector<char>();
 
@@ -1463,7 +1509,8 @@ TEST(constant_folding, const_equal)
     ASSERT_EQ(count_ops_of_type<op::Equal>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     auto values_out = new_const->get_vector<char>();
 
@@ -1488,7 +1535,8 @@ TEST(constant_folding, const_not_equal)
     ASSERT_EQ(count_ops_of_type<op::NotEqual>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     auto values_out = new_const->get_vector<char>();
 
@@ -1513,7 +1561,8 @@ TEST(constant_folding, const_greater)
     ASSERT_EQ(count_ops_of_type<op::Greater>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     auto values_out = new_const->get_vector<char>();
 
@@ -1538,7 +1587,8 @@ TEST(constant_folding, const_greater_eq)
     ASSERT_EQ(count_ops_of_type<op::GreaterEq>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     auto values_out = new_const->get_vector<char>();
 
@@ -1563,7 +1613,8 @@ TEST(constant_folding, const_less)
     ASSERT_EQ(count_ops_of_type<op::Less>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     auto values_out = new_const->get_vector<char>();
 
@@ -1588,7 +1639,8 @@ TEST(constant_folding, const_less_eq)
     ASSERT_EQ(count_ops_of_type<op::LessEq>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     auto values_out = new_const->get_vector<char>();
 
@@ -1613,7 +1665,8 @@ TEST(constant_folding, const_or)
     ASSERT_EQ(count_ops_of_type<op::Or>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     auto values_out = new_const->get_vector<char>();
 
@@ -1638,7 +1691,8 @@ TEST(constant_folding, const_xor)
     ASSERT_EQ(count_ops_of_type<op::Xor>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     auto values_out = new_const->get_vector<char>();
 
@@ -1661,7 +1715,8 @@ TEST(constant_folding, const_ceiling)
     ASSERT_EQ(count_ops_of_type<op::Ceiling>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     auto values_out = new_const->get_vector<float>();
 
@@ -1684,7 +1739,8 @@ TEST(constant_folding, const_floor)
     ASSERT_EQ(count_ops_of_type<op::Floor>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     auto values_out = new_const->get_vector<float>();
 
@@ -1712,7 +1768,8 @@ TEST(constant_folding, const_gather)
     ASSERT_EQ(count_ops_of_type<op::v0::Gather>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     auto values_out = new_const->get_vector<float>();
 
@@ -1740,7 +1797,8 @@ TEST(constant_folding, const_gather_v1)
     ASSERT_EQ(count_ops_of_type<op::v1::Gather>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     auto values_out = new_const->get_vector<float>();
 
@@ -1768,7 +1826,8 @@ TEST(constant_folding, const_gather_v1_scalar)
     ASSERT_EQ(count_ops_of_type<op::v1::Gather>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     auto values_out = new_const->get_vector<float>();
 
@@ -1801,7 +1860,8 @@ TEST(constant_folding, const_gather_v1_subgraph)
     ASSERT_EQ(count_ops_of_type<op::v1::Gather>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    const auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    const auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
 
     const auto values_out = new_const->get_vector<float>();
@@ -1832,7 +1892,8 @@ TEST(constant_folding, const_gather_v1_subgraph_neg_axis)
     ASSERT_EQ(count_ops_of_type<op::v1::Gather>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    const auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    const auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
 
     const auto values_out = new_const->get_vector<float>();
@@ -1995,7 +2056,8 @@ TEST(constant_folding, const_slice)
     ASSERT_EQ(count_ops_of_type<op::Slice>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     auto values_out = new_const->get_vector<int>();
 
@@ -2023,7 +2085,8 @@ TEST(constant_folding, constant_dyn_reshape)
     ASSERT_EQ(count_ops_of_type<op::v1::Reshape>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     auto values_out = new_const->get_vector<float>();
 
@@ -2059,7 +2122,8 @@ TEST(constant_folding, constant_dyn_reshape_shape_not_originally_constant)
     ASSERT_EQ(count_ops_of_type<op::v1::Reshape>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     auto values_out = new_const->get_vector<float>();
 
@@ -2086,7 +2150,8 @@ TEST(constant_folding, constant_transpose)
     ASSERT_EQ(count_ops_of_type<op::Transpose>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     auto values_out = new_const->get_vector<double>();
 
@@ -2114,7 +2179,8 @@ void range_test(T start, T stop, T step, const vector<T>& values_expected)
     ASSERT_EQ(count_ops_of_type<op::Range>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
 
     auto values_out = new_const->template get_vector<T>();
@@ -2155,7 +2221,8 @@ TEST(constant_folding, constant_select)
     ASSERT_EQ(count_ops_of_type<op::Select>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     auto values_out = new_const->get_vector<int64_t>();
 
@@ -2184,7 +2251,8 @@ TEST(constant_folding, constant_v1_select)
     ASSERT_EQ(count_ops_of_type<op::Select>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     auto values_out = new_const->get_vector<int64_t>();
 
@@ -2209,9 +2277,12 @@ TEST(constant_folding, constant_v1_split)
     ASSERT_EQ(count_ops_of_type<op::v1::Split>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), num_splits);
 
-    auto res1 = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
-    auto res2 = as_type_ptr<op::Constant>(f->get_results().at(1)->get_argument(0));
-    auto res3 = as_type_ptr<op::Constant>(f->get_results().at(2)->get_argument(0));
+    auto res1 =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
+    auto res2 =
+        as_type_ptr<op::Constant>(f->get_results().at(1)->input_value(0).get_node_shared_ptr());
+    auto res3 =
+        as_type_ptr<op::Constant>(f->get_results().at(2)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(res1);
     ASSERT_TRUE(res2);
     ASSERT_TRUE(res3);
@@ -2244,9 +2315,12 @@ TEST(constant_folding, constant_v1_split_specialized)
     ASSERT_EQ(count_ops_of_type<op::v1::Split>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), num_splits);
 
-    auto res1 = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
-    auto res2 = as_type_ptr<op::Constant>(f->get_results().at(1)->get_argument(0));
-    auto res3 = as_type_ptr<op::Constant>(f->get_results().at(2)->get_argument(0));
+    auto res1 =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
+    auto res2 =
+        as_type_ptr<op::Constant>(f->get_results().at(1)->input_value(0).get_node_shared_ptr());
+    auto res3 =
+        as_type_ptr<op::Constant>(f->get_results().at(2)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(res1);
     ASSERT_TRUE(res2);
     ASSERT_TRUE(res3);
@@ -2283,10 +2357,14 @@ TEST(constant_folding, constant_v1_split_axis_1_4_splits)
     ASSERT_EQ(count_ops_of_type<op::v1::Split>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), num_splits);
 
-    auto res1 = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
-    auto res2 = as_type_ptr<op::Constant>(f->get_results().at(1)->get_argument(0));
-    auto res3 = as_type_ptr<op::Constant>(f->get_results().at(2)->get_argument(0));
-    auto res4 = as_type_ptr<op::Constant>(f->get_results().at(3)->get_argument(0));
+    auto res1 =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
+    auto res2 =
+        as_type_ptr<op::Constant>(f->get_results().at(1)->input_value(0).get_node_shared_ptr());
+    auto res3 =
+        as_type_ptr<op::Constant>(f->get_results().at(2)->input_value(0).get_node_shared_ptr());
+    auto res4 =
+        as_type_ptr<op::Constant>(f->get_results().at(3)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(res1);
     ASSERT_TRUE(res2);
     ASSERT_TRUE(res3);
@@ -2330,8 +2408,10 @@ TEST(constant_folding, constant_v1_split_axis_1_2_splits)
     ASSERT_EQ(count_ops_of_type<op::v1::Split>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), num_splits);
 
-    auto res1 = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
-    auto res2 = as_type_ptr<op::Constant>(f->get_results().at(1)->get_argument(0));
+    auto res1 =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
+    auto res2 =
+        as_type_ptr<op::Constant>(f->get_results().at(1)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(res1);
     ASSERT_TRUE(res2);
 
@@ -2372,8 +2452,10 @@ TEST(constant_folding, constant_v1_variadic_split_axis_1_2_splits)
     ASSERT_EQ(count_ops_of_type<op::v1::VariadicSplit>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), values_lengths.size());
 
-    auto res1 = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
-    auto res2 = as_type_ptr<op::Constant>(f->get_results().at(1)->get_argument(0));
+    auto res1 =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
+    auto res2 =
+        as_type_ptr<op::Constant>(f->get_results().at(1)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(res1);
     ASSERT_TRUE(res2);
 
@@ -2414,9 +2496,12 @@ TEST(constant_folding, constant_v1_variadic_split_axis_1_3_splits_neg_length)
     ASSERT_EQ(count_ops_of_type<op::v1::VariadicSplit>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), values_lengths.size());
 
-    auto res1 = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
-    auto res2 = as_type_ptr<op::Constant>(f->get_results().at(1)->get_argument(0));
-    auto res3 = as_type_ptr<op::Constant>(f->get_results().at(2)->get_argument(0));
+    auto res1 =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
+    auto res2 =
+        as_type_ptr<op::Constant>(f->get_results().at(1)->input_value(0).get_node_shared_ptr());
+    auto res3 =
+        as_type_ptr<op::Constant>(f->get_results().at(2)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(res1);
     ASSERT_TRUE(res2);
     ASSERT_TRUE(res3);
@@ -2456,7 +2541,8 @@ TEST(constant_folding, constant_v1_one_hot)
     ASSERT_EQ(count_ops_of_type<op::v1::OneHot>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto res = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto res =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(res);
 
     ASSERT_EQ((Shape{3, 3}), res->get_output_shape(0));
@@ -2495,7 +2581,8 @@ TEST(constant_folding, constant_v1_one_hot_negative_axes)
     ASSERT_EQ(count_ops_of_type<op::v1::OneHot>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto res = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto res =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(res);
 
     ASSERT_EQ((Shape{4, 3}), res->get_output_shape(0));
@@ -2537,7 +2624,8 @@ TEST(constant_folding, constant_v1_one_hot_negative_axes_2)
     ASSERT_EQ(count_ops_of_type<op::v1::OneHot>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto res = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto res =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(res);
 
     ASSERT_EQ((Shape{2, 2, 3}), res->get_output_shape(0));
@@ -2576,7 +2664,8 @@ TEST(constant_folding, constant_tile_1d)
     ASSERT_EQ(count_ops_of_type<op::Tile>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     auto values_out = new_const->get_vector<int>();
 
@@ -2604,7 +2693,8 @@ TEST(constant_folding, constant_tile_3d_small_data_rank)
     ASSERT_EQ(count_ops_of_type<op::Tile>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     auto values_out = new_const->get_vector<int>();
 
@@ -2632,7 +2722,8 @@ TEST(constant_folding, constant_tile_3d_few_repeats)
     ASSERT_EQ(count_ops_of_type<op::Tile>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     auto values_out = new_const->get_vector<int>();
 
@@ -2660,7 +2751,8 @@ TEST(constant_folding, constant_tile_1d_0_repeats)
     ASSERT_EQ(count_ops_of_type<op::Tile>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     auto values_out = new_const->get_vector<int>();
 
@@ -2688,7 +2780,8 @@ TEST(constant_folding, constant_tile_0_rank_data)
     ASSERT_EQ(count_ops_of_type<op::Tile>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     auto values_out = new_const->get_vector<int>();
 
@@ -2718,7 +2811,8 @@ TEST(constant_folding, constant_non_zero_0D)
     ASSERT_EQ(count_ops_of_type<op::v3::NonZero>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    const auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    const auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     const auto values_out = new_const->get_vector<int64_t>();
 
@@ -2741,7 +2835,8 @@ TEST(constant_folding, constant_non_zero_1D)
     ASSERT_EQ(count_ops_of_type<op::v3::NonZero>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    const auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    const auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     const auto values_out = new_const->get_vector<int64_t>();
 
@@ -2764,7 +2859,8 @@ TEST(constant_folding, constant_non_zero_int32_output_type)
     ASSERT_EQ(count_ops_of_type<op::v3::NonZero>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    const auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    const auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     ASSERT_EQ(element::i32, new_const->get_element_type());
     const auto values_out = new_const->get_vector<int32_t>();
@@ -2788,7 +2884,8 @@ TEST(constant_folding, constant_non_zero_1D_all_indices)
     ASSERT_EQ(count_ops_of_type<op::v3::NonZero>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    const auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    const auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     const auto values_out = new_const->get_vector<int64_t>();
 
@@ -2811,7 +2908,8 @@ TEST(constant_folding, constant_non_zero_2D)
     ASSERT_EQ(count_ops_of_type<op::v3::NonZero>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    const auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    const auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     const auto values_out = new_const->get_vector<int64_t>();
 
@@ -2834,7 +2932,8 @@ TEST(constant_folding, DISABLED_constant_non_zero_2D_all_indices)
     ASSERT_EQ(count_ops_of_type<op::v3::NonZero>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    const auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    const auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     const auto values_out = new_const->get_vector<int64_t>();
 
@@ -2858,7 +2957,8 @@ TEST(constant_folding, DISABLED_constant_non_zero_2D_all_zeros)
     ASSERT_EQ(count_ops_of_type<op::v3::NonZero>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    const auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    const auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     ASSERT_EQ(shape_size(new_const->get_shape()), 0);
 }
@@ -2877,7 +2977,8 @@ TEST(constant_folding, constant_non_zero_3D)
     ASSERT_EQ(count_ops_of_type<op::v3::NonZero>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    const auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    const auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     const auto values_out = new_const->get_vector<int64_t>();
 
@@ -2911,7 +3012,8 @@ TEST(constant_folding, constant_scatter_elements_update_basic)
     ASSERT_EQ(count_ops_of_type<op::v3::ScatterElementsUpdate>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto result_node = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto result_node =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(result_node);
     ASSERT_EQ(data_shape, result_node->get_output_shape(0));
     std::vector<float> expected{2.f, 1.1f, 0.0f, 1.f, 0.0f, 2.2f, 0.f, 2.1f, 1.2f};
@@ -2942,7 +3044,8 @@ TEST(constant_folding, constant_scatter_elements_update_negative_axis)
     ASSERT_EQ(count_ops_of_type<op::v3::ScatterElementsUpdate>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto result_node = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto result_node =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(result_node);
     ASSERT_EQ(data_shape, result_node->get_output_shape(0));
     std::vector<float> expected{1.1f, 1.0f, 1.2f, 2.0f, 2.2f, 2.1f, 0.0f, 0.0f, 0.0f};
@@ -2973,7 +3076,8 @@ TEST(constant_folding, constant_scatter_elements_update_1d_axis)
     ASSERT_EQ(count_ops_of_type<op::v3::ScatterElementsUpdate>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto result_node = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto result_node =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(result_node);
     ASSERT_EQ(data_shape, result_node->get_output_shape(0));
     std::vector<float> expected{2.f, 1.1f, 0.0f, 1.f, 0.0f, 2.2f, 0.f, 2.1f, 1.2f};
@@ -3004,7 +3108,8 @@ TEST(constant_folding, constant_scatter_elements_update_3d_i16)
     ASSERT_EQ(count_ops_of_type<op::v3::ScatterElementsUpdate>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto result_node = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto result_node =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(result_node);
     ASSERT_EQ(data_shape, result_node->get_output_shape(0));
     std::vector<int16_t> expected{4, 2, 0, 1, 0, 6, 0, 5, 3, 10, 0, 12, 0, 11,
@@ -3034,7 +3139,8 @@ TEST(constant_folding, constant_scatter_elements_update_one_elem)
     ASSERT_EQ(count_ops_of_type<op::v3::ScatterElementsUpdate>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto result_node = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto result_node =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(result_node);
     ASSERT_EQ(data_shape, result_node->get_output_shape(0));
     std::vector<int32_t> expected{input_data};
@@ -3061,7 +3167,8 @@ void test_constant_folding_reshape_v1(Shape& shape_in,
     ASSERT_EQ(count_ops_of_type<op::v1::Reshape>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_const = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto new_const =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_const);
     auto values_out = new_const->get_vector<float>();
 
index 491eee7..a951b4f 100644 (file)
@@ -129,7 +129,7 @@ TEST(control_dependencies, clone_function_cdop)
     test_ordered_ops(f, NodeVector{absn});
     auto clone = ngraph::clone_function(*f.get());
     auto matcher = std::make_shared<pattern::Matcher>(cdop);
-    auto cdop_clone = clone->get_results().at(0)->get_argument(0);
+    auto cdop_clone = clone->get_results().at(0)->input_value(0).get_node_shared_ptr();
     ASSERT_TRUE(matcher->match(cdop_clone));
     auto cloned_deps = cdop_clone->get_control_dependencies();
     ASSERT_EQ(cloned_deps.size(), 1);
@@ -150,7 +150,12 @@ TEST(control_dependencies, clone_function_cdop_abs)
     auto f = make_shared<Function>(absn_cdop, ParameterVector{A, B});
     auto clone = ngraph::clone_function(*f.get());
     auto matcher = std::make_shared<pattern::Matcher>(cdop);
-    auto cdop_clone = clone->get_results().at(0)->get_argument(0)->get_argument(0);
+    auto cdop_clone = clone->get_results()
+                          .at(0)
+                          ->input_value(0)
+                          .get_node_shared_ptr()
+                          ->input_value(0)
+                          .get_node_shared_ptr();
     ASSERT_TRUE(matcher->match(cdop_clone));
     auto cloned_deps = cdop_clone->get_control_dependencies();
     ASSERT_EQ(cloned_deps.size(), 2);
@@ -197,7 +202,7 @@ TEST(control_dependencies, serialize_cdop)
     shared_ptr<Function> clone = deserialize(js);
 
     auto matcher = std::make_shared<pattern::Matcher>(cdop);
-    auto cdop_clone = clone->get_results().at(0)->get_argument(0);
+    auto cdop_clone = clone->get_results().at(0)->input_value(0).get_node_shared_ptr();
     ASSERT_TRUE(matcher->match(cdop_clone));
     auto cloned_deps = cdop_clone->get_control_dependencies();
     ASSERT_EQ(cloned_deps.size(), 1);
@@ -221,7 +226,12 @@ TEST(control_dependencies, serialize_cdop_abs)
     string js = serialize(f, 4);
     shared_ptr<Function> clone = deserialize(js);
     auto matcher = std::make_shared<pattern::Matcher>(cdop);
-    auto cdop_clone = clone->get_results().at(0)->get_argument(0)->get_argument(0);
+    auto cdop_clone = clone->get_results()
+                          .at(0)
+                          ->input_value(0)
+                          .get_node_shared_ptr()
+                          ->input_value(0)
+                          .get_node_shared_ptr();
     ASSERT_TRUE(matcher->match(cdop_clone));
     auto cloned_deps = cdop_clone->get_control_dependencies();
     ASSERT_EQ(cloned_deps.size(), 2);
index 04fd719..386335d 100644 (file)
@@ -132,7 +132,7 @@ TEST(copy, constant)
     auto node_cast = as_type_ptr<op::Constant>(new_node);
     ASSERT_NE(node_cast, nullptr);
     ASSERT_TRUE(nullptr != new_node);
-    ASSERT_TRUE(NodeVector{} == new_node->get_arguments());
+    ASSERT_TRUE(OutputVector{} == new_node->input_values());
     ASSERT_TRUE(node_cast->get_vector<float>() == c);
     ASSERT_TRUE(node_cast->get_shape() == shape);
     ASSERT_TRUE(node_cast->get_element_type() == et);
@@ -151,7 +151,7 @@ TEST(copy, convert)
     ASSERT_NE(node_cast, nullptr);
 
     ASSERT_TRUE(nullptr != new_node);
-    ASSERT_TRUE(new_args == as_output_vector(new_node->get_arguments()));
+    ASSERT_TRUE(new_args == new_node->input_values());
     ASSERT_TRUE(et == node_cast->get_convert_element_type());
 }
 
@@ -249,7 +249,7 @@ TEST(copy, parameter)
     ASSERT_NE(node_cast, nullptr);
 
     ASSERT_TRUE(nullptr != new_node);
-    ASSERT_TRUE(new_node->get_arguments().size() == 0);
+    ASSERT_TRUE(new_node->input_values().size() == 0);
     ASSERT_TRUE(node->has_same_type(new_node));
 }
 
@@ -273,7 +273,7 @@ TEST(copy, reshape)
     ASSERT_NE(node_cast, nullptr);
 
     ASSERT_TRUE(nullptr != new_node);
-    ASSERT_TRUE(new_args == as_output_vector(new_node->get_arguments()));
+    ASSERT_TRUE(new_args == new_node->input_values());
     ASSERT_TRUE(axes == node_cast->get_input_order());
     ASSERT_TRUE(shape_out == node_cast->get_output_shape(0));
 }
@@ -328,7 +328,7 @@ TEST(copy, slice)
     ASSERT_NE(node_cast, nullptr);
 
     ASSERT_TRUE(nullptr != new_node);
-    ASSERT_TRUE(new_args == as_output_vector(new_node->get_arguments()));
+    ASSERT_TRUE(new_args == new_node->input_values());
     ASSERT_TRUE(lower == node_cast->get_lower_bounds());
     ASSERT_TRUE(upper == node_cast->get_upper_bounds());
     ASSERT_TRUE(strides == node_cast->get_strides());
@@ -346,13 +346,14 @@ TEST(copy, sum)
     auto arg0 = make_shared<op::Parameter>(element::f32, shape);
 
     auto node = make_shared<op::Sum>(arg0, axes);
-    OutputVector new_args{make_shared<op::Parameter>(element::f32, shape), node->get_argument(1)};
+    OutputVector new_args{make_shared<op::Parameter>(element::f32, shape),
+                          node->input_value(1).get_node_shared_ptr()};
     auto new_node = node->clone_with_new_inputs(new_args);
     auto node_cast = as_type_ptr<op::Sum>(new_node);
     ASSERT_NE(node_cast, nullptr);
 
     ASSERT_TRUE(nullptr != new_node);
-    ASSERT_TRUE(new_args == as_output_vector(new_node->get_arguments()));
+    ASSERT_TRUE(new_args == new_node->input_values());
     ASSERT_TRUE(axes == node_cast->get_reduction_axes());
 }
 
index ab1b99e..d8a8c7b 100644 (file)
@@ -48,7 +48,8 @@ TEST(CSE, abs_abs)
 
     pass_manager.register_pass<ngraph::pass::CommonSubexpressionElimination>();
     pass_manager.run_passes(f);
-    ASSERT_EQ(f->get_results().at(0)->get_argument(0), f->get_results().at(1)->get_argument(0));
+    ASSERT_EQ(f->get_results().at(0)->input_value(0).get_node_shared_ptr(),
+              f->get_results().at(1)->input_value(0).get_node_shared_ptr());
 }
 
 TEST(CSE, abs_abs_negative)
@@ -63,8 +64,8 @@ TEST(CSE, abs_abs_negative)
 
     pass_manager.register_pass<ngraph::pass::CommonSubexpressionElimination>();
     pass_manager.run_passes(f);
-    ASSERT_EQ(f->get_results().at(0)->get_argument(0), abs1);
-    ASSERT_EQ(f->get_results().at(1)->get_argument(0), abs2);
+    ASSERT_EQ(f->get_results().at(0)->input_value(0).get_node_shared_ptr(), abs1);
+    ASSERT_EQ(f->get_results().at(1)->input_value(0).get_node_shared_ptr(), abs2);
 }
 
 TEST(CSE, add_add)
@@ -79,7 +80,8 @@ TEST(CSE, add_add)
 
     pass_manager.register_pass<ngraph::pass::CommonSubexpressionElimination>();
     pass_manager.run_passes(f);
-    ASSERT_EQ(f->get_results().at(0)->get_argument(0), f->get_results().at(1)->get_argument(0));
+    ASSERT_EQ(f->get_results().at(0)->input_value(0).get_node_shared_ptr(),
+              f->get_results().at(1)->input_value(0).get_node_shared_ptr());
 }
 
 TEST(CSE, add_add_commutative)
@@ -94,7 +96,8 @@ TEST(CSE, add_add_commutative)
 
     pass_manager.register_pass<ngraph::pass::CommonSubexpressionElimination>();
     pass_manager.run_passes(f);
-    ASSERT_EQ(f->get_results().at(0)->get_argument(0), f->get_results().at(1)->get_argument(0));
+    ASSERT_EQ(f->get_results().at(0)->input_value(0).get_node_shared_ptr(),
+              f->get_results().at(1)->input_value(0).get_node_shared_ptr());
 }
 
 TEST(CSE, add_add_negative)
@@ -111,8 +114,8 @@ TEST(CSE, add_add_negative)
 
     pass_manager.register_pass<ngraph::pass::CommonSubexpressionElimination>();
     pass_manager.run_passes(f);
-    ASSERT_EQ(f->get_results().at(0)->get_argument(0), add1);
-    ASSERT_EQ(f->get_results().at(1)->get_argument(0), add2);
+    ASSERT_EQ(f->get_results().at(0)->input_value(0).get_node_shared_ptr(), add1);
+    ASSERT_EQ(f->get_results().at(1)->input_value(0).get_node_shared_ptr(), add2);
 }
 
 TEST(CSE, abs_add)
@@ -131,7 +134,8 @@ TEST(CSE, abs_add)
 
     pass_manager.register_pass<ngraph::pass::CommonSubexpressionElimination>();
     pass_manager.run_passes(f);
-    ASSERT_EQ(f->get_results().at(0)->get_argument(0), f->get_results().at(1)->get_argument(0));
+    ASSERT_EQ(f->get_results().at(0)->input_value(0).get_node_shared_ptr(),
+              f->get_results().at(1)->input_value(0).get_node_shared_ptr());
 }
 
 TEST(CSE, abs_add_reshape_broadcast)
@@ -157,7 +161,8 @@ TEST(CSE, abs_add_reshape_broadcast)
 
         pass_manager.register_pass<ngraph::pass::CommonSubexpressionElimination>();
         pass_manager.run_passes(f);
-        ASSERT_EQ(f->get_results().at(0)->get_argument(0), f->get_results().at(1)->get_argument(0));
+        ASSERT_EQ(f->get_results().at(0)->input_value(0).get_node_shared_ptr(),
+                  f->get_results().at(1)->input_value(0).get_node_shared_ptr());
     }
     {
         // fail case
@@ -168,7 +173,8 @@ TEST(CSE, abs_add_reshape_broadcast)
 
         pass_manager.register_pass<ngraph::pass::CommonSubexpressionElimination>();
         pass_manager.run_passes(f);
-        ASSERT_NE(f->get_results().at(0)->get_argument(0), f->get_results().at(1)->get_argument(0));
+        ASSERT_NE(f->get_results().at(0)->input_value(0).get_node_shared_ptr(),
+                  f->get_results().at(1)->input_value(0).get_node_shared_ptr());
     }
     {
         // fail case
@@ -180,7 +186,8 @@ TEST(CSE, abs_add_reshape_broadcast)
 
         pass_manager.register_pass<ngraph::pass::CommonSubexpressionElimination>();
         pass_manager.run_passes(f);
-        ASSERT_NE(f->get_results().at(0)->get_argument(0), f->get_results().at(1)->get_argument(0));
+        ASSERT_NE(f->get_results().at(0)->input_value(0).get_node_shared_ptr(),
+                  f->get_results().at(1)->input_value(0).get_node_shared_ptr());
     }
 }
 
@@ -205,7 +212,8 @@ TEST(CSE, abs_add_abs_add)
 
     pass_manager.register_pass<ngraph::pass::CommonSubexpressionElimination>();
     pass_manager.run_passes(f);
-    ASSERT_EQ(f->get_results().at(0)->get_argument(0), f->get_results().at(1)->get_argument(0));
+    ASSERT_EQ(f->get_results().at(0)->input_value(0).get_node_shared_ptr(),
+              f->get_results().at(1)->input_value(0).get_node_shared_ptr());
 }
 
 TEST(CSE, abs_add_abs_add_negative)
@@ -230,13 +238,14 @@ TEST(CSE, abs_add_abs_add_negative)
 
     pass_manager.register_pass<ngraph::pass::CommonSubexpressionElimination>();
     pass_manager.run_passes(f);
-    auto oadd3 = f->get_results().at(0)->get_argument(0);
-    auto oadd4 = f->get_results().at(1)->get_argument(0);
+    auto oadd3 = f->get_results().at(0)->input_value(0).get_node_shared_ptr();
+    auto oadd4 = f->get_results().at(1)->input_value(0).get_node_shared_ptr();
     ASSERT_EQ(oadd3, add3);
     ASSERT_EQ(oadd4, add4);
-    ASSERT_EQ(oadd3->get_argument(1), C);
-    ASSERT_EQ(oadd4->get_argument(1), D);
-    ASSERT_EQ(oadd3->get_argument(0), oadd4->get_argument(0));
+    ASSERT_EQ(oadd3->input_value(1).get_node_shared_ptr(), C);
+    ASSERT_EQ(oadd4->input_value(1).get_node_shared_ptr(), D);
+    ASSERT_EQ(oadd3->input_value(0).get_node_shared_ptr(),
+              oadd4->input_value(0).get_node_shared_ptr());
 }
 
 template <typename T>
@@ -260,9 +269,12 @@ static void execute_cse_reduction_test()
     pass_manager.register_pass<ngraph::pass::CommonSubexpressionElimination>();
     pass_manager.run_passes(f);
 
-    ASSERT_EQ(sub_aa->get_argument(0), sub_aa->get_argument(1));
-    ASSERT_NE(sub_ab->get_argument(0), sub_ab->get_argument(1));
-    ASSERT_NE(f->get_results().at(2)->get_argument(0), sub_aa->get_argument(0));
+    ASSERT_EQ(sub_aa->input_value(0).get_node_shared_ptr(),
+              sub_aa->input_value(1).get_node_shared_ptr());
+    ASSERT_NE(sub_ab->input_value(0).get_node_shared_ptr(),
+              sub_ab->input_value(1).get_node_shared_ptr());
+    ASSERT_NE(f->get_results().at(2)->input_value(0).get_node_shared_ptr(),
+              sub_aa->input_value(0).get_node_shared_ptr());
 }
 
 TEST(CSE, reduction_ops)
@@ -300,11 +312,16 @@ TEST(CSE, constant)
     pass_manager.register_pass<ngraph::pass::CommonSubexpressionElimination>();
     pass_manager.run_passes(f);
 
-    ASSERT_EQ(abs0->get_argument(0), abs0_1->get_argument(0));
-    ASSERT_EQ(abs1->get_argument(0), abs1_1->get_argument(0));
-    ASSERT_NE(abs0->get_argument(0), abs1->get_argument(0));
-    ASSERT_NE(abs0->get_argument(0), absf->get_argument(0));
-    ASSERT_NE(abs111->get_argument(0), abs112->get_argument(0));
+    ASSERT_EQ(abs0->input_value(0).get_node_shared_ptr(),
+              abs0_1->input_value(0).get_node_shared_ptr());
+    ASSERT_EQ(abs1->input_value(0).get_node_shared_ptr(),
+              abs1_1->input_value(0).get_node_shared_ptr());
+    ASSERT_NE(abs0->input_value(0).get_node_shared_ptr(),
+              abs1->input_value(0).get_node_shared_ptr());
+    ASSERT_NE(abs0->input_value(0).get_node_shared_ptr(),
+              absf->input_value(0).get_node_shared_ptr());
+    ASSERT_NE(abs111->input_value(0).get_node_shared_ptr(),
+              abs112->input_value(0).get_node_shared_ptr());
 }
 
 TEST(CSE, one_hot)
@@ -319,7 +336,8 @@ TEST(CSE, one_hot)
         auto onehot2 = std::make_shared<op::OneHot>(A, out_shape, 1);
         auto f = std::make_shared<Function>(NodeVector{onehot1, onehot2}, ParameterVector{A});
         pass_manager.run_passes(f);
-        ASSERT_EQ(f->get_results().at(0)->get_argument(0), f->get_results().at(1)->get_argument(0));
+        ASSERT_EQ(f->get_results().at(0)->input_value(0).get_node_shared_ptr(),
+                  f->get_results().at(1)->input_value(0).get_node_shared_ptr());
     }
     {
         Shape param_shape{8, 1};
@@ -331,7 +349,8 @@ TEST(CSE, one_hot)
         auto onehot2 = std::make_shared<op::OneHot>(reshape2, out_shape, 1);
         auto f = std::make_shared<Function>(NodeVector{onehot1, onehot2}, ParameterVector{A});
         pass_manager.run_passes(f);
-        ASSERT_EQ(f->get_results().at(0)->get_argument(0), f->get_results().at(1)->get_argument(0));
+        ASSERT_EQ(f->get_results().at(0)->input_value(0).get_node_shared_ptr(),
+                  f->get_results().at(1)->input_value(0).get_node_shared_ptr());
     }
 }
 
index fc626e4..c39d477 100644 (file)
@@ -46,7 +46,8 @@ TEST(dyn_elimination, transpose)
     ASSERT_EQ(count_ops_of_type<op::Transpose>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Reshape>(f), 1);
 
-    auto new_reshape = as_type_ptr<op::Reshape>(f->get_results().at(0)->get_argument(0));
+    auto new_reshape =
+        as_type_ptr<op::Reshape>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_reshape);
 
     ASSERT_EQ(new_reshape->get_input_order(), (AxisVector{2, 3, 1, 0}));
@@ -78,7 +79,8 @@ TEST(dyn_elimination, transpose_dyn_shape)
     ASSERT_EQ(count_ops_of_type<op::Transpose>(f), 1);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto new_transpose = as_type_ptr<op::Transpose>(f->get_results().at(0)->get_argument(0));
+    auto new_transpose =
+        as_type_ptr<op::Transpose>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_transpose);
 
     ASSERT_EQ(new_transpose->get_output_element_type(0), element::boolean);
@@ -106,7 +108,8 @@ TEST(dyn_elimination, range)
     ASSERT_EQ(count_ops_of_type<op::Range>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto replacement = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto replacement =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
 
     ASSERT_NE(replacement, nullptr);
     ASSERT_EQ(replacement->get_element_type(), element::i64);
@@ -137,7 +140,8 @@ TEST(dyn_elimination, range_f64)
     ASSERT_EQ(count_ops_of_type<op::Range>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
-    auto replacement = as_type_ptr<op::Constant>(f->get_results().at(0)->get_argument(0));
+    auto replacement =
+        as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
 
     ASSERT_NE(replacement, nullptr);
     ASSERT_EQ(replacement->get_element_type(), element::f64);
index a12b0e4..cc67f72 100644 (file)
@@ -52,6 +52,6 @@ TEST(input_output, simple_output)
     ASSERT_EQ(2, add->get_input_size());
     for (size_t i = 0; i < add->get_input_size(); i++)
     {
-        ASSERT_EQ(add->get_argument(i), nodes.at(i));
+        ASSERT_EQ(add->input_value(i).get_node_shared_ptr(), nodes.at(i));
     }
 }
index 39e3b71..cdd95aa 100644 (file)
@@ -95,15 +95,16 @@ public:
         auto callback = [pattern](pattern::Matcher& m) {
             NGRAPH_DEBUG << "In a callback for construct_multiply_by_one against "
                          << m.get_match_root()->get_name();
-            NGRAPH_CHECK(m.get_match_root()->get_arguments().size() == 2);
+            NGRAPH_CHECK(m.get_match_root()->input_values().size() == 2);
 
             auto pattern_map = m.get_pattern_map();
 
             size_t const_node_index =
-                m.get_match_root()->get_arguments().at(0) == pattern_map[pattern];
-            auto const_node =
-                as_type_ptr<op::Constant>(m.get_match_root()->get_arguments().at(const_node_index));
-            auto second_node = m.get_match_root()->get_arguments().at(const_node_index);
+                m.get_match_root()->input_value(0).get_node_shared_ptr() == pattern_map[pattern];
+            auto const_node = as_type_ptr<op::Constant>(
+                m.get_match_root()->input_value(const_node_index).get_node_shared_ptr());
+            auto second_node =
+                m.get_match_root()->input_value(const_node_index).get_node_shared_ptr();
             NGRAPH_DEBUG << "second_node = " << second_node->get_name()
                          << " , pattern = " << pattern_map[pattern]->get_name();
 
@@ -141,15 +142,16 @@ public:
         auto callback = [pattern](pattern::Matcher& m) {
             NGRAPH_DEBUG << "In a callback for construct_add_zero against "
                          << m.get_match_root()->get_name();
-            NGRAPH_CHECK(m.get_match_root()->get_arguments().size() == 2);
+            NGRAPH_CHECK(m.get_match_root()->input_values().size() == 2);
 
             auto pattern_map = m.get_pattern_map();
 
             size_t const_node_index =
-                m.get_match_root()->get_arguments().at(0) == pattern_map[pattern];
-            auto const_node =
-                as_type_ptr<op::Constant>(m.get_match_root()->get_arguments().at(const_node_index));
-            auto second_node = m.get_match_root()->get_arguments().at(const_node_index);
+                m.get_match_root()->input_value(0).get_node_shared_ptr() == pattern_map[pattern];
+            auto const_node = as_type_ptr<op::Constant>(
+                m.get_match_root()->input_value(const_node_index).get_node_shared_ptr());
+            auto second_node =
+                m.get_match_root()->input_value(const_node_index).get_node_shared_ptr();
             NGRAPH_DEBUG << "second_node = " << second_node->get_name()
                          << " , pattern = " << pattern_map[pattern]->get_name();
 
@@ -227,7 +229,7 @@ TEST(pattern, graph_rewrite)
         auto sum = (a + iconst0);
         auto graph = b + sum;
         run_passes(pass_manager, graph, {a, b});
-        ASSERT_EQ(graph->get_arguments().at(1), a);
+        ASSERT_EQ(graph->input_value(1).get_node_shared_ptr(), a);
         ASSERT_EQ(graph->input_value(1), a->output(0)); // graph's input points to a's output
         ASSERT_TRUE(sum->output(0)
                         .get_target_inputs()
@@ -243,7 +245,7 @@ TEST(pattern, graph_rewrite)
         auto mul = (a * iconst1);
         auto graph = b + mul;
         run_passes(pass_manager, graph, {a, b});
-        ASSERT_EQ(graph->get_arguments().at(1), a);
+        ASSERT_EQ(graph->input_value(1).get_node_shared_ptr(), a);
         ASSERT_EQ(graph->input_value(1), a->output(0)); // graph's input points to a's output
         ASSERT_TRUE(mul->output(0)
                         .get_target_inputs()
@@ -258,7 +260,7 @@ TEST(pattern, graph_rewrite)
         auto iconst1 = construct_constant_node(1);
         auto graph = ((((a * iconst1) * iconst1) * iconst1) * iconst1) + b;
         run_passes(pass_manager, graph, {a, b});
-        ASSERT_EQ(graph->get_arguments().at(0), a);
+        ASSERT_EQ(graph->input_value(0).get_node_shared_ptr(), a);
         ASSERT_EQ(graph->input_value(0), a->output(0)); // graph's input points to a's output
         ASSERT_TRUE(a->get_output_target_inputs(0).count(
             graph->input(0))); // a's output feeds into graph's input
@@ -271,7 +273,7 @@ TEST(pattern, graph_rewrite)
         auto iconst1 = construct_constant_node(1);
         auto graph = b + (iconst0 + ((a + iconst0) * iconst1));
         run_passes(pass_manager, graph, {a, b});
-        ASSERT_EQ(graph->get_arguments().at(1), a);
+        ASSERT_EQ(graph->input_value(1).get_node_shared_ptr(), a);
         ASSERT_EQ(graph->input_value(1), a->output(0)); // graph's input points to a's output
         ASSERT_TRUE(a->get_output_target_inputs(0).count(
             graph->input(1))); // a's output feeds into graph's input
@@ -283,7 +285,7 @@ TEST(pattern, graph_rewrite)
         auto iconst1 = construct_constant_node(1);
         auto graph = b + (iconst1 * (iconst1 * (iconst1 * (iconst1 * a))));
         run_passes(pass_manager, graph, {a, b});
-        ASSERT_EQ(graph->get_arguments().at(1), a);
+        ASSERT_EQ(graph->input_value(1).get_node_shared_ptr(), a);
         ASSERT_EQ(graph->input_value(1), a->output(0)); // graph's input points to a's output
         ASSERT_TRUE(a->get_output_target_inputs(0).count(
             graph->input(1))); // a's output feeds into graph's input
@@ -699,12 +701,12 @@ TEST(pattern, recurrent_graph_rewrite)
         auto f = std::make_shared<Function>(ngraph::NodeVector{graph}, ParameterVector{a, b});
         pass_manager.run_passes(f);
 
-        auto left_abs = graph->get_argument(0);
-        auto add_a = left_abs->get_argument(0);
+        auto left_abs = graph->input_value(0).get_node_shared_ptr();
+        auto add_a = left_abs->input_value(0).get_node_shared_ptr();
         ASSERT_EQ(add_a, a);
 
-        auto right_abs = graph->get_argument(1);
-        auto add_b = right_abs->get_argument(0);
+        auto right_abs = graph->input_value(1).get_node_shared_ptr();
+        auto add_b = right_abs->input_value(0).get_node_shared_ptr();
         ASSERT_EQ(add_b, b);
     }
 }
index 457cc0d..4066758 100644 (file)
@@ -149,12 +149,12 @@ TEST(reshape_elimination, dot_transpose_to_dot_w_transpose_args)
     pass_manager.register_pass<pass::ReshapeElimination>();
     auto func = make_shared<Function>(graph, ParameterVector{W, x});
     pass_manager.run_passes(func);
-    auto gdot = graph->get_argument(0);
+    auto gdot = graph->input_value(0).get_node_shared_ptr();
     ASSERT_TRUE(as_type_ptr<op::Dot>(gdot));
-    ASSERT_TRUE(as_type_ptr<op::Reshape>(gdot->get_argument(0)));
-    ASSERT_TRUE(as_type_ptr<op::Reshape>(gdot->get_argument(1)));
-    ASSERT_EQ(gdot->get_argument(0)->get_argument(0), x);
-    ASSERT_EQ(gdot->get_argument(1)->get_argument(0), W);
+    ASSERT_TRUE(as_type_ptr<op::Reshape>(gdot->input_value(0).get_node_shared_ptr()));
+    ASSERT_TRUE(as_type_ptr<op::Reshape>(gdot->input_value(1).get_node_shared_ptr()));
+    ASSERT_EQ(gdot->input_value(0).get_node_shared_ptr()->input_value(0).get_node_shared_ptr(), x);
+    ASSERT_EQ(gdot->input_value(1).get_node_shared_ptr()->input_value(0).get_node_shared_ptr(), W);
     ASSERT_EQ(gdot->get_shape(), (Shape{1, 2}));
 }
 
index 51778b8..57928c8 100644 (file)
@@ -61,8 +61,9 @@ TEST(reshape_sinking, edge_splitting)
     pass_manager.register_pass<pass::ReshapeElimination>();
     pass_manager.register_pass<pass::CommonSubexpressionElimination>();
     pass_manager.run_passes(func);
-    ASSERT_EQ(func->get_results().at(1)->get_argument(0), sum);
-    auto new_reshape = as_type_ptr<op::Reshape>(func->get_results().at(0)->get_argument(0));
+    ASSERT_EQ(func->get_results().at(1)->input_value(0).get_node_shared_ptr(), sum);
+    auto new_reshape =
+        as_type_ptr<op::Reshape>(func->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(new_reshape);
     ASSERT_EQ(new_reshape->get_shape(), shape_nchw);
 }
@@ -101,7 +102,7 @@ TEST(reshape_sinking, broadcast_swimming)
 
     ASSERT_EQ(add->get_shape(), conv_nchw);
     ASSERT_EQ(add->get_input_shape(0), conv_nchw);
-    ASSERT_EQ(add->get_argument(1), conv);
+    ASSERT_EQ(add->input_value(1).get_node_shared_ptr(), conv);
 }
 
 TEST(reshape_sinking, concat)
index 362d1b3..f9f0dbb 100644 (file)
@@ -133,11 +133,12 @@ TEST(specialize_function, et_static_shape_rank_static_dynamic_subst_val)
     ASSERT_EQ(g->get_output_shape(0), (Shape{1, 2, 3}));
     ASSERT_EQ(g->get_output_element_type(0), element::f32);
 
-    auto plus_node = as_type_ptr<op::Add>(g->get_results().at(0)->get_argument(0));
+    auto plus_node =
+        as_type_ptr<op::Add>(g->get_results().at(0)->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(plus_node);
-    auto convert_node = as_type_ptr<op::Convert>(plus_node->get_argument(1));
+    auto convert_node = as_type_ptr<op::Convert>(plus_node->input_value(1).get_node_shared_ptr());
     ASSERT_TRUE(convert_node);
-    auto const_node = as_type_ptr<op::Constant>(convert_node->get_argument(0));
+    auto const_node = as_type_ptr<op::Constant>(convert_node->input_value(0).get_node_shared_ptr());
     ASSERT_TRUE(const_node);
 
     ASSERT_EQ(const_node->get_output_element_type(0), element::i32);
index 7719fc0..372c32c 100644 (file)
@@ -75,7 +75,7 @@ void test_binary(std::string /* node_type */,
 
     auto test_binary_good_arguments = [&](const shared_ptr<Node>& x, const shared_ptr<Node>& y) {
         auto node = f(x, y);
-        EXPECT_TRUE(node->has_same_type(node->get_arguments()[0]));
+        EXPECT_TRUE(node->has_same_type(node->input_values()[0].get_node_shared_ptr()));
     };
     test_binary_good_arguments(tv0_2_4_param_0, tv0_2_4_param_1);
 }
@@ -188,7 +188,7 @@ void test_binary_logical(std::string /* node_type */,
 
     auto test_binary_good_arguments = [&](const shared_ptr<Node>& x, const shared_ptr<Node>& y) {
         auto node = f(x, y);
-        EXPECT_TRUE(node->has_same_type(node->get_arguments()[0]));
+        EXPECT_TRUE(node->has_same_type(node->input_values()[0].get_node_shared_ptr()));
     };
     test_binary_good_arguments(tv0_2_4_param_0, tv0_2_4_param_1);
 }
index 7961e09..3d016cf 100644 (file)
@@ -31,7 +31,9 @@ bool validate_list(const vector<shared_ptr<Node>>& nodes)
     for (auto it = nodes.rbegin(); it != nodes.rend(); it++)
     {
         auto node_tmp = *it;
-        auto dependencies_tmp = node_tmp->get_arguments();
+        NodeVector dependencies_tmp;
+        for (auto& val : node_tmp->input_values())
+            dependencies_tmp.emplace_back(val.get_node_shared_ptr());
         vector<Node*> dependencies;
 
         for (shared_ptr<Node> n : dependencies_tmp)