Remove obsoleted v0::Not operator (#2846)
authorMateusz Tabaka <mateusz.tabaka@intel.com>
Fri, 30 Oct 2020 04:33:26 +0000 (05:33 +0100)
committerGitHub <noreply@github.com>
Fri, 30 Oct 2020 04:33:26 +0000 (07:33 +0300)
* Remove obsoleted v0::Not operator

* restore tests from ngraph/test/backend

14 files changed:
ngraph/core/include/ngraph/op/not.hpp
ngraph/core/include/ngraph/op/op_version_tbl.hpp
ngraph/core/src/op/not.cpp
ngraph/test/CMakeLists.txt
ngraph/test/backend/logical_not.in.cpp [moved from ngraph/test/backend/not.in.cpp with 92% similarity]
ngraph/test/backend/zero_sized.in.cpp
ngraph/test/constant_folding.cpp
ngraph/test/eval.cpp
ngraph/test/op_is.cpp
ngraph/test/runtime/interpreter/int_executable.hpp
ngraph/test/runtime/opset0_tbl.hpp
ngraph/test/runtime/pass/opset0_downgrade.cpp
ngraph/test/runtime/pass/opset1_upgrade.cpp
ngraph/test/type_prop/binary_elementwise.cpp

index be33b16..85eb2da 100644 (file)
@@ -45,36 +45,5 @@ namespace ngraph
                               const HostTensorVector& inputs) const override;
             };
         }
-        namespace v0
-        {
-            /// \brief Elementwise logical negation operation.
-            class NGRAPH_DEPRECATED(
-                "This operation is deprecated and will be removed soon. "
-                "Use v1::LogicalNot instead of it.") NGRAPH_API Not : public Op
-            {
-                NGRAPH_SUPPRESS_DEPRECATED_START
-            public:
-                static constexpr NodeTypeInfo type_info{"Not", 0};
-                const NodeTypeInfo& get_type_info() const override { return type_info; }
-                /// \brief Constructs a logical negation operation.
-                Not() = default;
-                /// \brief Constructs a logical negation operation.
-                ///
-                /// \param arg Node that produces the input tensor.
-                Not(const Output<Node>& arg);
-
-                void validate_and_infer_types() override;
-
-                virtual std::shared_ptr<Node>
-                    clone_with_new_inputs(const OutputVector& new_args) const override;
-                bool evaluate(const HostTensorVector& outputs,
-                              const HostTensorVector& inputs) const override;
-                NGRAPH_SUPPRESS_DEPRECATED_END
-            };
-        }
-
-        NGRAPH_SUPPRESS_DEPRECATED_START
-        using v0::Not;
-        NGRAPH_SUPPRESS_DEPRECATED_END
     } // namespace op
 } // namespace ngraph
index 6dc5f72..d412cab 100644 (file)
@@ -117,7 +117,6 @@ NGRAPH_OP(NonMaxSuppression, ngraph::op::v1, 1)
 NGRAPH_OP(NonMaxSuppression, ngraph::op::v3, 3)
 NGRAPH_OP(NonZero, ngraph::op::v3, 3)
 NGRAPH_OP(NormalizeL2, ngraph::op::v0, 0)
-NGRAPH_OP(Not, ngraph::op::v0, 0)
 NGRAPH_OP(NotEqual, ngraph::op::v0, 0)
 NGRAPH_OP(NotEqual, ngraph::op::v1, 1)
 NGRAPH_OP(OneHot, ngraph::op::v1, 1)
index 877f75e..5deb3db 100644 (file)
@@ -101,33 +101,3 @@ bool op::v1::LogicalNot::evaluate(const HostTensorVector& outputs,
     OV_ITT_SCOPED_TASK(itt::domains::nGraphOp, "op::v1::LogicalNot::evaluate");
     return notop::evaluate_not(inputs[0], outputs[0], shape_size(get_output_shape(0)));
 }
-
-constexpr NodeTypeInfo op::v0::Not::type_info;
-
-op::v0::Not::Not(const Output<Node>& arg)
-    : Op({arg})
-{
-    constructor_validate_and_infer_types();
-}
-
-// TODO(amprocte): Update this to allow only boolean, for consistency with logical binops.
-void op::v0::Not::validate_and_infer_types()
-{
-    auto args_et_pshape = ngraph::op::util::validate_and_infer_elementwise_args(this);
-    element::Type& args_et = std::get<0>(args_et_pshape);
-    PartialShape& args_pshape = std::get<1>(args_et_pshape);
-
-    set_output_type(0, args_et, args_pshape);
-}
-
-shared_ptr<Node> op::v0::Not::clone_with_new_inputs(const OutputVector& new_args) const
-{
-    check_new_args_count(this, new_args);
-    return make_shared<v0::Not>(new_args.at(0));
-}
-
-bool op::Not::evaluate(const HostTensorVector& outputs, const HostTensorVector& inputs) const
-{
-    OV_ITT_SCOPED_TASK(itt::domains::nGraphOp, "op::Not::evaluate");
-    return notop::evaluate_not(inputs[0], outputs[0], shape_size(get_output_shape(0)));
-}
index e951601..78be12a 100644 (file)
@@ -297,6 +297,7 @@ set(MULTI_TEST_SRC
     backend/interpolate.in.cpp
     backend/log.in.cpp
     backend/log_softmax.in.cpp
+    backend/logical_not.in.cpp
     backend/logical_or.in.cpp
     backend/logical_xor.in.cpp
     backend/lrn.in.cpp
@@ -309,7 +310,6 @@ set(MULTI_TEST_SRC
     backend/negative.in.cpp
     backend/node_name.in.cpp
     backend/normalize_l2.in.cpp
-    backend/not.in.cpp
     backend/non_zero.in.cpp
     backend/numeric.in.cpp
     backend/one_hot.in.cpp
similarity index 92%
rename from ngraph/test/backend/not.in.cpp
rename to ngraph/test/backend/logical_not.in.cpp
index 7816176..c59654b 100644 (file)
@@ -49,7 +49,7 @@ NGRAPH_TEST(${BACKEND_NAME}, not)
 {
     Shape shape{2, 2};
     auto A = make_shared<op::Parameter>(element::boolean, shape);
-    auto f = make_shared<Function>(make_shared<op::Not>(A), ParameterVector{A});
+    auto f = make_shared<Function>(make_shared<op::v1::LogicalNot>(A), ParameterVector{A});
 
     std::vector<char> a{1, 0, 1, 0};
 
@@ -63,7 +63,7 @@ NGRAPH_TEST(${BACKEND_NAME}, not_i32)
 {
     Shape shape{2, 2};
     auto A = make_shared<op::Parameter>(element::i32, shape);
-    auto f = make_shared<Function>(make_shared<op::Not>(A), ParameterVector{A});
+    auto f = make_shared<Function>(make_shared<op::v1::LogicalNot>(A), ParameterVector{A});
 
     std::vector<int32_t> a{1, 0, 2, 0};
 
index 5cb82c1..dce14e9 100644 (file)
@@ -194,27 +194,6 @@ NGRAPH_TEST(${BACKEND_NAME}, zero_sized_negative)
     make_unary_empty_test<op::Negative>("${BACKEND_NAME}");
 }
 
-NGRAPH_TEST(${BACKEND_NAME}, zero_sized_not)
-{
-    Shape shape{0};
-    auto A = make_shared<op::Parameter>(element::from<char>(), shape);
-    auto f = make_shared<Function>(make_shared<op::Not>(A), ParameterVector{A});
-
-    auto backend = runtime::Backend::create("${BACKEND_NAME}");
-
-    auto a = backend->create_tensor(element::from<char>(), shape);
-    auto result = backend->create_tensor(element::from<char>(), shape);
-
-    auto handle = backend->compile(f);
-    handle->call_with_validate({result}, {a});
-
-    auto in_vec = read_vector<char>(a);
-    auto out_vec = read_vector<char>(result);
-
-    EXPECT_EQ(in_vec.size(), 0);
-    EXPECT_EQ(out_vec.size(), 0);
-}
-
 NGRAPH_TEST(${BACKEND_NAME}, zero_sized_sign)
 {
     make_unary_empty_test<op::Sign>("${BACKEND_NAME}");
index 5115516..3dedc58 100644 (file)
@@ -1444,11 +1444,11 @@ TEST(constant_folding, const_concat_axis_1_bool_type)
     ASSERT_EQ(values_expected, values_out);
 }
 
-TEST(constant_folding, const_not)
+TEST(constant_folding, const_logical_not)
 {
     auto constant =
         op::Constant::create(element::boolean, Shape{2, 3}, vector<char>{0, 1, 0, 0, 1, 1});
-    auto logical_not = make_shared<op::Not>(constant);
+    auto logical_not = make_shared<op::v1::LogicalNot>(constant);
     logical_not->set_friendly_name("test");
     auto f = make_shared<Function>(logical_not, ParameterVector{});
 
@@ -1456,7 +1456,7 @@ TEST(constant_folding, const_not)
     pass_manager.register_pass<pass::ConstantFolding>();
     pass_manager.run_passes(f);
 
-    ASSERT_EQ(count_ops_of_type<op::Not>(f), 0);
+    ASSERT_EQ(count_ops_of_type<op::v1::LogicalNot>(f), 0);
     ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
 
     auto new_const =
index a783a1f..64471ce 100644 (file)
@@ -1158,36 +1158,6 @@ TEST(eval, evaluate_tanh)
     ASSERT_FLOAT_VECTORS_EQ(input, result_val);
 }
 
-TEST(eval, evaluate_not)
-{
-    auto p = make_shared<op::Parameter>(element::boolean, Shape{2, 2});
-    auto op_not = make_shared<op::Not>(p);
-    auto fun = make_shared<Function>(OutputVector{op_not}, ParameterVector{p});
-    auto result = make_shared<HostTensor>();
-
-    ASSERT_TRUE(fun->evaluate(
-        {result}, {make_host_tensor<element::Type_t::boolean>(Shape{2, 2}, {1, 0, 1, 0})}));
-    EXPECT_EQ(result->get_element_type(), element::boolean);
-    auto result_val = read_vector<char>(result);
-    vector<char> expec{0, 1, 0, 1};
-    ASSERT_EQ(result_val, expec);
-}
-
-TEST(eval, evaluate_not_i32)
-{
-    auto p = make_shared<op::Parameter>(element::i32, Shape{2, 2});
-    auto op_not = make_shared<op::Not>(p);
-    auto fun = make_shared<Function>(OutputVector{op_not}, ParameterVector{p});
-    auto result = make_shared<HostTensor>();
-
-    ASSERT_TRUE(fun->evaluate(
-        {result}, {make_host_tensor<element::Type_t::i32>(Shape{2, 2}, {100, 0, -2, 0})}));
-    EXPECT_EQ(result->get_element_type(), element::i32);
-    auto result_val = read_vector<int32_t>(result);
-    vector<int32_t> expec{0, 1, 0, 1};
-    ASSERT_EQ(result_val, expec);
-}
-
 TEST(eval, evaluate_logical_not)
 {
     auto p = make_shared<op::Parameter>(element::boolean, Shape{2, 2});
index 7f65756..faf6c4c 100644 (file)
@@ -524,15 +524,6 @@ namespace
         EXPECT_FALSE(op::is_binary_elementwise_logical(&node));
     }
 
-    void op_is_Not()
-    {
-        op::Not node;
-        EXPECT_FALSE(op::is_unary_elementwise_arithmetic(&node));
-        EXPECT_FALSE(op::is_binary_elementwise_arithmetic(&node));
-        EXPECT_FALSE(op::is_binary_elementwise_comparison(&node));
-        EXPECT_FALSE(op::is_binary_elementwise_logical(&node));
-    }
-
     void op_is_NotEqual()
     {
         op::NotEqual node;
index 749c14f..4d62d67 100644 (file)
@@ -862,7 +862,6 @@ protected:
             break;
         }
         case OP_TYPEID::LogicalNot_v1:
-        case OP_TYPEID::Not:
         {
             size_t element_count = shape_size(node.get_output_shape(0));
             reference::logical_not(
index 5a05440..fe95306 100644 (file)
@@ -98,7 +98,6 @@ NGRAPH_OP(Minimum, ngraph::op)
 NGRAPH_OP(Multiply, ngraph::op)
 NGRAPH_OP(MVN, ngraph::op)
 NGRAPH_OP(Negative, ngraph::op)
-NGRAPH_OP(Not, ngraph::op)
 NGRAPH_OP(NotEqual, ngraph::op)
 NGRAPH_OP(Or, ngraph::op)
 NGRAPH_OP(Parameter, ngraph::op)
index b04edd1..2a1b3ec 100644 (file)
@@ -301,13 +301,6 @@ namespace opset0_downgrade
         return op_cast_binary_elementwise_node<op::v0::LessEq, op::v1::LessEqual>(node);
     }
 
-    shared_ptr<Node> op_cast(shared_ptr<op::v1::LogicalNot> node)
-    {
-        auto replacement_node = make_shared<op::v0::Not>(node->input_value(0));
-        replace_node(node, replacement_node);
-        return replacement_node;
-    }
-
     shared_ptr<Node> op_cast(shared_ptr<op::v1::LogicalOr> node)
     {
         return op_cast_binary_elementwise_node<op::v0::Or, op::v1::LogicalOr>(node);
index 1f189cb..3494662 100644 (file)
@@ -272,13 +272,6 @@ namespace opset1_upgrade
         return op_cast_binary_elementwise_node<op::v0::Multiply, op::v1::Multiply>(node);
     }
 
-    shared_ptr<Node> op_cast(shared_ptr<op::Not> node)
-    {
-        auto replacement_node = make_shared<op::v1::LogicalNot>(node->input_value(0));
-        replace_node(node, replacement_node);
-        return replacement_node;
-    }
-
     shared_ptr<Node> op_cast(shared_ptr<op::NotEqual> node)
     {
         return op_cast_binary_elementwise_node<op::v0::NotEqual, op::v1::NotEqual>(node);
index 564c2e5..d96e1a0 100644 (file)
@@ -536,9 +536,9 @@ TEST(type_prop, logic_arith_compare_partial_et)
         return std::make_shared<op::Greater>(param0, param1);
     };
 
-    auto test_not = [](element::Type et) -> std::shared_ptr<Node> {
+    auto test_logical_not = [](element::Type et) -> std::shared_ptr<Node> {
         auto param = std::make_shared<op::Parameter>(et, Shape{1, 2, 3});
-        return std::make_shared<op::Not>(param);
+        return std::make_shared<op::v1::LogicalNot>(param);
     };
 
     // Arith ops:
@@ -598,7 +598,7 @@ TEST(type_prop, logic_arith_compare_partial_et)
     // int -> !
     // boo -> boo
     // dyn -> boo
-    ASSERT_EQ(test_not(element::i32)->get_element_type(), element::i32);
-    ASSERT_EQ(test_not(element::boolean)->get_element_type(), element::boolean);
-    ASSERT_EQ(test_not(element::dynamic)->get_element_type(), element::dynamic);
+    ASSERT_EQ(test_logical_not(element::i32)->get_element_type(), element::i32);
+    ASSERT_EQ(test_logical_not(element::boolean)->get_element_type(), element::boolean);
+    ASSERT_EQ(test_logical_not(element::dynamic)->get_element_type(), element::dynamic);
 }