Enable NGRAPH_DEPRECATED (#1617)
authorIlya Churaev <ilya.churaev@intel.com>
Wed, 5 Aug 2020 08:13:05 +0000 (11:13 +0300)
committerGitHub <noreply@github.com>
Wed, 5 Aug 2020 08:13:05 +0000 (11:13 +0300)
* Enable NGRAPH_DEPRECATED

* Try to fix Windows

* Added NGRAPH_SUPPRESS_DEPRECATED_END for headers

* Removed tests on downgrade/upgrade passes

61 files changed:
ngraph/CMakeLists.txt
ngraph/src/ngraph/builder/autobroadcast.cpp
ngraph/src/ngraph/deprecated.hpp
ngraph/src/ngraph/frontend/onnx_import/op/loop.cpp
ngraph/src/ngraph/node_output.cpp
ngraph/src/ngraph/node_output.hpp
ngraph/src/ngraph/op/get_output_element.cpp
ngraph/src/ngraph/op/util/rnn_cell_base.cpp
ngraph/src/ngraph/pass/constant_folding_arithmetic_reduction.cpp
ngraph/src/ngraph/pass/constant_folding_convert.cpp
ngraph/src/ngraph/pass/constant_folding_dequantize.cpp
ngraph/src/ngraph/pass/constant_folding_gather.cpp
ngraph/src/ngraph/pass/constant_folding_logical_reduction.cpp
ngraph/src/ngraph/pass/constant_folding_one_hot.cpp
ngraph/src/ngraph/pass/constant_folding_pad.cpp
ngraph/src/ngraph/pass/constant_folding_quantize.cpp
ngraph/src/ngraph/pass/constant_folding_reverse.cpp
ngraph/src/ngraph/pass/constant_folding_scatter.cpp
ngraph/src/ngraph/pass/constant_folding_select.cpp
ngraph/src/ngraph/pass/constant_folding_slice.cpp
ngraph/src/ngraph/pass/constant_folding_split.cpp
ngraph/src/ngraph/pass/constant_folding_variadic_split.cpp
ngraph/src/ngraph/pass/convert_fp32_to_fp16.cpp
ngraph/src/ngraph/pass/get_output_element_elimination.hpp
ngraph/src/ngraph/pass/graph_rewrite.cpp
ngraph/src/ngraph/pass/graph_rewrite.hpp
ngraph/src/ngraph/pass/manager.cpp
ngraph/src/ngraph/pass/pass.cpp
ngraph/src/ngraph/pass/pass.hpp
ngraph/src/ngraph/pattern/matcher.hpp
ngraph/src/ngraph/pattern/op/any_of.hpp
ngraph/test/CMakeLists.txt
ngraph/test/onnx/onnx_import_provenance.in.cpp
ngraph/test/opset_pass/binary_elementwise_opset_pass.cpp [deleted file]
ngraph/test/opset_pass/broadcast_opset_pass.cpp [deleted file]
ngraph/test/opset_pass/convolution_opset_pass.cpp [deleted file]
ngraph/test/opset_pass/gather_opset_pass.cpp [deleted file]
ngraph/test/opset_pass/logical_not_opset_pass.cpp [deleted file]
ngraph/test/opset_pass/logical_or_opset_pass.cpp [deleted file]
ngraph/test/opset_pass/logical_xor_opset_pass.cpp [deleted file]
ngraph/test/opset_pass/one_hot_opset_pass.cpp [deleted file]
ngraph/test/opset_pass/pad_opset_pass.cpp [deleted file]
ngraph/test/opset_pass/reduction_opset_pass.cpp [deleted file]
ngraph/test/opset_pass/reverse_opset_pass.cpp [deleted file]
ngraph/test/opset_pass/select_opset_pass.cpp [deleted file]
ngraph/test/opset_pass/slice_opset_pass.cpp [deleted file]
ngraph/test/opset_pass/softmax_opset_pass.cpp [deleted file]
ngraph/test/opset_pass/topk_opset_pass.cpp [deleted file]
ngraph/test/opset_pass/transpose_opset_pass.cpp [deleted file]
ngraph/test/pattern.cpp
ngraph/test/provenance.cpp
ngraph/test/runtime/ie/ie_executable.cpp
ngraph/test/runtime/interpreter/int_executable.cpp
ngraph/test/runtime/opset0_downgrade.hpp
ngraph/test/runtime/opset1_downgrade.hpp
ngraph/test/runtime/opset1_upgrade.hpp
ngraph/test/runtime/pass/dyn_elimination.cpp
ngraph/test/runtime/pass/fused_op_decomposition.cpp
ngraph/test/runtime/pass/fused_op_decomposition.hpp
ngraph/test/runtime/pass/implicit_broadcast_elimination.hpp
ngraph/test/util/engine/ie_engines.cpp

index a2af18d..c8cee9b 100644 (file)
@@ -114,7 +114,7 @@ option(NGRAPH_UNIT_TEST_ENABLE "Control the building of unit tests" TRUE)
 option(NGRAPH_TEST_UTIL_ENABLE "Control the building of test utility" TRUE)
 option(NGRAPH_INTERPRETER_ENABLE "Control the building of the INTERPRETER backend" TRUE)
 option(NGRAPH_DEBUG_ENABLE "Enable output for NGRAPH_DEBUG statements" FALSE)
-option(NGRAPH_DEPRECATED_ENABLE "Enable compiler deprecation pragmas for deprecated APIs (recommended only for development use)" FALSE)
+option(NGRAPH_DEPRECATED_ENABLE "Enable compiler deprecation pragmas for deprecated APIs (recommended only for development use)" TRUE)
 option(NGRAPH_ONNX_IMPORT_ENABLE "Enable ONNX importer" FALSE)
 option(NGRAPH_CODE_COVERAGE_ENABLE "Enable code coverage data collection" FALSE)
 option(NGRAPH_LIB_VERSIONING_ENABLE "Enable shared library versioning" FALSE)
index 82bd614..b7f4dec 100644 (file)
@@ -139,7 +139,9 @@ namespace ngraph
                                                      const Shape& output_shape,
                                                      const Shape& source_shape)
         {
+            NGRAPH_SUPPRESS_DEPRECATED_START
             shared_ptr<Node> broadcasted_node = value.as_single_output_node();
+            NGRAPH_SUPPRESS_DEPRECATED_END
             // If node already has the required shape, return original node
             if (output_shape == value.get_shape())
             {
@@ -200,7 +202,9 @@ namespace ngraph
             // If node already has the required shape, return original node
             if (output_shape == value_shape)
             {
+                NGRAPH_SUPPRESS_DEPRECATED_START
                 return value.as_single_output_node();
+                NGRAPH_SUPPRESS_DEPRECATED_END
             }
 
             if (axis == -1)
@@ -249,8 +253,10 @@ namespace ngraph
             // Handle the trivial case...
             if (arg1_in_shape == arg2_in_shape)
             {
+                NGRAPH_SUPPRESS_DEPRECATED_START
                 return make_pair(args.first.as_single_output_node(),
                                  args.second.as_single_output_node());
+                NGRAPH_SUPPRESS_DEPRECATED_END
             }
 
             NodeVector bcasted_outputs =
index 19bc3ae..b2694d2 100644 (file)
 // deprecation attribute supported by the compiler, so any use of `frobnicate` will produce a
 // compiler warning. Otherwise, `NGRAPH_DEPRECATED` has no effect.
 //
-// TODO(amprocte): Test to make sure if this works in compilers other than clang. (Should be no
-// problem for the moment if it doesn't, since it defaults to `OFF` and we can investigate later
-// ways to implement this in other compilers.)
-//
 #ifdef NGRAPH_DEPRECATED_ENABLE
+#if defined(_WIN32)
+#define NGRAPH_DEPRECATED(msg) __declspec(deprecated(msg))
+#elif defined __INTEL_COMPILER
+#define NGRAPH_DEPRECATED(msg) __attribute__((deprecated(msg)))
+#elif defined(__GNUC__)
 #define NGRAPH_DEPRECATED(msg) __attribute__((deprecated((msg))))
+#else
+#define NGRAPH_DEPRECATED(msg)
+#endif
 #define NGRAPH_DEPRECATED_DOC /// \deprecated
 #else
 #define NGRAPH_DEPRECATED(msg)
 #define NGRAPH_DEPRECATED_DOC
 #endif
+
+// Suppress warning "-Wdeprecated-declarations" / C4996
+#if defined(_MSC_VER)
+#define NGRAPH_DO_PRAGMA(x) __pragma(x)
+#elif defined(__GNUC__)
+#define NGRAPH_DO_PRAGMA(x) _Pragma(#x)
+#else
+#define NGRAPH_DO_PRAGMA(x)
+#endif
+
+#if defined(_MSC_VER) && !defined(__clang__)
+#define NGRAPH_SUPPRESS_DEPRECATED_START                                                           \
+    NGRAPH_DO_PRAGMA(warning(push))                                                                \
+    NGRAPH_DO_PRAGMA(warning(disable : 4996))
+#define NGRAPH_SUPPRESS_DEPRECATED_END NGRAPH_DO_PRAGMA(warning(pop))
+#elif defined(__INTEL_COMPILER)
+#define NGRAPH_SUPPRESS_DEPRECATED_START                                                           \
+    NGRAPH_DO_PRAGMA(warning(push))                                                                \
+    NGRAPH_DO_PRAGMA(warning(disable : 1478))
+NGRAPH_DO_PRAGMA(warning(disable : 1786))
+#define NGRAPH_SUPPRESS_DEPRECATED_END NGRAPH_DO_PRAGMA(warning(pop))
+#elif defined(__clang__) || ((__GNUC__) && (__GNUC__ * 100 + __GNUC_MINOR__ > 405))
+#define NGRAPH_SUPPRESS_DEPRECATED_START                                                           \
+    NGRAPH_DO_PRAGMA(GCC diagnostic push)                                                          \
+    NGRAPH_DO_PRAGMA(GCC diagnostic ignored "-Wdeprecated-declarations")
+#define NGRAPH_SUPPRESS_DEPRECATED_END NGRAPH_DO_PRAGMA(GCC diagnostic pop)
+#else
+#define NGRAPH_SUPPRESS_DEPRECATED_START
+#define NGRAPH_SUPPRESS_DEPRECATED_END
+#endif
index 80b3f87..f44c7e0 100644 (file)
@@ -193,11 +193,15 @@ namespace ngraph
                     OutputVector node_outputs;
                     for (const auto& v : final_values)
                     {
+                        NGRAPH_SUPPRESS_DEPRECATED_START
                         node_outputs.push_back(v.as_single_output_node());
+                        NGRAPH_SUPPRESS_DEPRECATED_END
                     }
                     for (const auto& v : scan_outputs)
                     {
+                        NGRAPH_SUPPRESS_DEPRECATED_START
                         node_outputs.push_back(v.as_single_output_node());
+                        NGRAPH_SUPPRESS_DEPRECATED_END
                     }
                     return node_outputs;
                 }
index 23d32c5..5f2e3f4 100644 (file)
@@ -25,14 +25,18 @@ namespace ngraph
         : m_node(node->shared_from_this())
         , m_index(index)
     {
+        NGRAPH_SUPPRESS_DEPRECATED_START
         eliminate_goe();
+        NGRAPH_SUPPRESS_DEPRECATED_END
     }
 
     Output<Node>::Output(const std::shared_ptr<Node>& node, size_t index)
         : m_node(node)
         , m_index(index)
     {
+        NGRAPH_SUPPRESS_DEPRECATED_START
         eliminate_goe();
+        NGRAPH_SUPPRESS_DEPRECATED_END
     }
 
     void Output<Node>::reset()
index af32517..09605ba 100644 (file)
@@ -39,7 +39,8 @@ namespace ngraph
     template <>
     class NGRAPH_API Output<Node>
     {
-        void eliminate_goe() NGRAPH_DEPRECATED("Remove when GetOrderedOutput is removed");
+        NGRAPH_DEPRECATED("Remove when GetOrderedOutput is removed")
+        void eliminate_goe();
 
     public:
         /// \brief Constructs a Output.
@@ -77,7 +78,8 @@ namespace ngraph
         std::shared_ptr<Node> get_node_shared_ptr() const;
         /// \return A useable shared pointer to this output. If index 0, the node,
         /// otherwise find or create a GOE.
-        std::shared_ptr<Node> as_single_output_node() const NGRAPH_DEPRECATED("Transitional.");
+        NGRAPH_DEPRECATED("Transitional.")
+        std::shared_ptr<Node> as_single_output_node() const;
 
         /// \return The index of the output referred to by this output handle.
         size_t get_index() const;
index a89e425..9cd3091 100644 (file)
@@ -59,7 +59,9 @@ NodeVector op::get_output_elements(const shared_ptr<Node>& mon)
     NodeVector goes(mon->get_output_size());
     for (auto o : mon->outputs())
     {
+        NGRAPH_SUPPRESS_DEPRECATED_START
         goes.at(o.get_index()) = o.as_single_output_node();
+        NGRAPH_SUPPRESS_DEPRECATED_END
     }
     return goes;
 }
index 276187f..b619450 100644 (file)
@@ -111,7 +111,9 @@ shared_ptr<Node> op::util::RNNCellBase::clip(const Output<Node>& data) const
 {
     if (m_clip == 0.f)
     {
+        NGRAPH_SUPPRESS_DEPRECATED_START
         return data.as_single_output_node();
+        NGRAPH_SUPPRESS_DEPRECATED_END
     }
 
     return make_shared<op::Clamp>(data, -m_clip, m_clip);
index ddf4cee..610db1a 100644 (file)
@@ -205,7 +205,9 @@ void pass::ConstantFolding::construct_constant_arithmetic_reduction()
 
     auto arithmetic_reduction_matcher =
         make_shared<pattern::Matcher>(reduction, "ConstantFolding.ConstantArithmeticReduction");
+    NGRAPH_SUPPRESS_DEPRECATED_START
     this->add_matcher(arithmetic_reduction_matcher,
                       constant_arithmetic_reduction_callback,
                       PassProperty::CHANGE_DYNAMIC_STATE);
+    NGRAPH_SUPPRESS_DEPRECATED_END
 }
index f10b2f0..f829142 100644 (file)
@@ -180,6 +180,8 @@ void pass::ConstantFolding::construct_constant_convert()
 
     auto convert_matcher =
         make_shared<pattern::Matcher>(convert_op, "ConstantFolding.ConstantConvert");
+    NGRAPH_SUPPRESS_DEPRECATED_START
     this->add_matcher(
         convert_matcher, constant_convert_callback, PassProperty::CHANGE_DYNAMIC_STATE);
+    NGRAPH_SUPPRESS_DEPRECATED_END
 }
index 4d6c75f..d571f97 100644 (file)
@@ -94,6 +94,8 @@ void pass::ConstantFolding::construct_constant_dequantize()
 
     auto dequantize_matcher =
         make_shared<pattern::Matcher>(dequant, "ConstantFolding.ConstantDequantize");
+    NGRAPH_SUPPRESS_DEPRECATED_START
     this->add_matcher(
         dequantize_matcher, constant_dequantize_callback, PassProperty::CHANGE_DYNAMIC_STATE);
+    NGRAPH_SUPPRESS_DEPRECATED_END
 }
index 604b62e..2d154ce 100644 (file)
@@ -82,6 +82,8 @@ void pass::ConstantFolding::construct_constant_gather_with_subgraph()
 
     auto gather_matcher_v1 = make_shared<pattern::Matcher>(
         gather_v1, "ConstantFolding.ConstantGatherV1WithDynamicSubgraph");
+    NGRAPH_SUPPRESS_DEPRECATED_START
     this->add_matcher(
         gather_matcher_v1, concat_gather_callback, PassProperty::CHANGE_DYNAMIC_STATE);
+    NGRAPH_SUPPRESS_DEPRECATED_END
 }
index 9e52806..26cd609 100644 (file)
@@ -101,7 +101,9 @@ void pass::ConstantFolding::construct_constant_logical_reduction()
 
     auto logical_reduction_matcher =
         make_shared<pattern::Matcher>(reduction, "ConstantFolding.ConstantLogicalReduction");
+    NGRAPH_SUPPRESS_DEPRECATED_START
     this->add_matcher(logical_reduction_matcher,
                       constant_logical_reduction_callback,
                       PassProperty::CHANGE_DYNAMIC_STATE);
+    NGRAPH_SUPPRESS_DEPRECATED_END
 }
index 35c57ee..fba249f 100644 (file)
@@ -201,5 +201,7 @@ void pass::ConstantFolding::construct_constant_one_hot()
     };
     auto one_hot_matcher =
         make_shared<pattern::Matcher>(ont_hot_pattern, "ConstantFolding.ConstantOneHot");
+    NGRAPH_SUPPRESS_DEPRECATED_START
     this->add_matcher(one_hot_matcher, one_hot_callback, PassProperty::CHANGE_DYNAMIC_STATE);
+    NGRAPH_SUPPRESS_DEPRECATED_END
 }
index 182c683..c534ffc 100644 (file)
@@ -149,5 +149,7 @@ void pass::ConstantFolding::construct_constant_pad()
     };
 
     auto pad_matcher = make_shared<pattern::Matcher>(pad, "ConstantFolding.ConstantPad");
+    NGRAPH_SUPPRESS_DEPRECATED_START
     this->add_matcher(pad_matcher, constant_pad_callback, PassProperty::CHANGE_DYNAMIC_STATE);
+    NGRAPH_SUPPRESS_DEPRECATED_END
 }
index 5c00107..54ba538 100644 (file)
@@ -96,6 +96,8 @@ void pass::ConstantFolding::construct_constant_quantize()
 
     auto quantize_matcher =
         make_shared<pattern::Matcher>(quant, "ConstantFolding.ConstantQuantize");
+    NGRAPH_SUPPRESS_DEPRECATED_START
     this->add_matcher(
         quantize_matcher, constant_quantize_callback, PassProperty::CHANGE_DYNAMIC_STATE);
+    NGRAPH_SUPPRESS_DEPRECATED_END
 }
index 1695e4c..7fc0dc3 100644 (file)
@@ -111,6 +111,8 @@ void pass::ConstantFolding::construct_constant_reverse()
 
     auto convert_matcher =
         make_shared<pattern::Matcher>(convert_op, "ConstantFolding.ConstantReverse");
+    NGRAPH_SUPPRESS_DEPRECATED_START
     this->add_matcher(
         convert_matcher, constant_reverse_callback, PassProperty::CHANGE_DYNAMIC_STATE);
+    NGRAPH_SUPPRESS_DEPRECATED_END
 }
index 6bc112d..e117619 100644 (file)
@@ -263,7 +263,9 @@ void pass::ConstantFolding::construct_constant_scatter_elements_update()
 
     auto scatter_elem_updt_matcher = make_shared<pattern::Matcher>(
         scatter_elem_updt, "ConstantFolding.ConstantScatterElementsUpdateV3");
+    NGRAPH_SUPPRESS_DEPRECATED_START
     this->add_matcher(scatter_elem_updt_matcher,
                       constant_scatter_elem_updt_callback,
                       PassProperty::CHANGE_DYNAMIC_STATE);
+    NGRAPH_SUPPRESS_DEPRECATED_END
 }
index 41cacfa..a6ebc69 100644 (file)
@@ -137,6 +137,7 @@ void pass::ConstantFolding::construct_constant_select()
         return true;
     };
 
+    NGRAPH_SUPPRESS_DEPRECATED_START
     this->add_matcher(
         make_shared<pattern::Matcher>(select_v0_op, "ConstantFolding.ConstantSelectV0"),
         constant_select_callback,
@@ -145,4 +146,5 @@ void pass::ConstantFolding::construct_constant_select()
         make_shared<pattern::Matcher>(select_v1_op, "ConstantFolding.ConstantSelectV1"),
         constant_select_callback,
         PassProperty::CHANGE_DYNAMIC_STATE);
+    NGRAPH_SUPPRESS_DEPRECATED_END
 }
index fa2d78c..a529778 100644 (file)
@@ -117,5 +117,7 @@ void pass::ConstantFolding::construct_constant_slice()
     };
 
     auto slice_matcher = make_shared<pattern::Matcher>(slice_op, "ConstantFolding.ConstantSlice");
+    NGRAPH_SUPPRESS_DEPRECATED_START
     this->add_matcher(slice_matcher, constant_slice_callback, PassProperty::CHANGE_DYNAMIC_STATE);
+    NGRAPH_SUPPRESS_DEPRECATED_END
 }
index 275eef4..8c40634 100644 (file)
@@ -140,5 +140,7 @@ void pass::ConstantFolding::construct_constant_split()
     };
     auto split_matcher =
         make_shared<pattern::Matcher>(split_pattern, "ConstantFolding.ConstantSplit");
+    NGRAPH_SUPPRESS_DEPRECATED_START
     this->add_matcher(split_matcher, constant_split_callback, PassProperty::CHANGE_DYNAMIC_STATE);
+    NGRAPH_SUPPRESS_DEPRECATED_END
 }
index 5a5be23..4229253 100644 (file)
@@ -171,7 +171,9 @@ void pass::ConstantFolding::construct_constant_variadic_split()
     };
     auto variadic_split_matcher = make_shared<pattern::Matcher>(
         variadic_split_pattern, "ConstantFolding.ConstantVariadicSplit");
+    NGRAPH_SUPPRESS_DEPRECATED_START
     this->add_matcher(variadic_split_matcher,
                       constant_variadic_split_callback,
                       PassProperty::CHANGE_DYNAMIC_STATE);
+    NGRAPH_SUPPRESS_DEPRECATED_END
 }
index 569e5ad..377f750 100644 (file)
@@ -51,7 +51,9 @@ void pass::ConvertFP32ToFP16::convert_constants_precision()
     };
 
     auto m = std::make_shared<ngraph::pattern::Matcher>(constant, "ConvertFP32ToFP16");
+    NGRAPH_SUPPRESS_DEPRECATED_START
     this->add_matcher(m, callback, PassProperty::CHANGE_DYNAMIC_STATE);
+    NGRAPH_SUPPRESS_DEPRECATED_END
 }
 
 void pass::ConvertFP32ToFP16::convert_parameters_precision()
@@ -69,5 +71,7 @@ void pass::ConvertFP32ToFP16::convert_parameters_precision()
     };
 
     auto m = std::make_shared<ngraph::pattern::Matcher>(constant, "ConvertFP32ToFP16");
+    NGRAPH_SUPPRESS_DEPRECATED_START
     this->add_matcher(m, callback, PassProperty::CHANGE_DYNAMIC_STATE);
+    NGRAPH_SUPPRESS_DEPRECATED_END
 }
index bca6e4a..fa248c8 100644 (file)
@@ -26,8 +26,10 @@ namespace ngraph
     }
 }
 
+NGRAPH_SUPPRESS_DEPRECATED_START
 class NGRAPH_API ngraph::pass::GetOutputElementElimination : public NodePass
 {
 public:
     bool run_on_node(std::shared_ptr<Node> node) override;
 };
+NGRAPH_SUPPRESS_DEPRECATED_END
index 06e187a..05484da 100644 (file)
@@ -248,9 +248,11 @@ void pass::GraphRewrite::add_matcher(const shared_ptr<pattern::Matcher>& m,
 void pass::GraphRewrite::add_matcher(const shared_ptr<pattern::Matcher>& m,
                                      const graph_rewrite_callback& callback)
 {
+    NGRAPH_SUPPRESS_DEPRECATED_START
     // TODO: before deprecate this function, by default expect the
     // callback require static shape.
     add_matcher(m, callback, {PassProperty::REQUIRE_STATIC_SHAPE});
+    NGRAPH_SUPPRESS_DEPRECATED_END
 }
 
 void pass::RecurrentGraphRewrite::add_matcher(
@@ -353,4 +355,4 @@ bool ngraph::pass::MatcherPass::apply(std::shared_ptr<ngraph::Node> node)
 {
     m_new_nodes.clear();
     return m_handler(node);
-}
\ No newline at end of file
+}
index 821a8a9..4a564d7 100644 (file)
@@ -131,14 +131,14 @@ public:
         m_matchers.push_back(pass);
         return pass;
     }
-
+    NGRAPH_DEPRECATED("Use MatcherPass instead")
     void add_matcher(const std::shared_ptr<pattern::Matcher>& m,
                      const ngraph::graph_rewrite_callback& callback,
-                     const PassPropertyMask& property) NGRAPH_DEPRECATED("Use MatcherPass instead");
+                     const PassPropertyMask& property);
 
+    NGRAPH_DEPRECATED("Use MatcherPass instead")
     void add_matcher(const std::shared_ptr<pattern::Matcher>& m,
-                     const ngraph::graph_rewrite_callback& callback)
-        NGRAPH_DEPRECATED("Use MatcherPass instead");
+                     const ngraph::graph_rewrite_callback& callback);
 
     bool run_on_function(std::shared_ptr<ngraph::Function> f) override;
 
index 61d3512..506e458 100644 (file)
@@ -66,6 +66,7 @@ void pass::Manager::run_passes(shared_ptr<Function> func, bool /* transitive */)
             pass->set_callback(m_transformation_callback);
         }
 
+        NGRAPH_SUPPRESS_DEPRECATED_START
         if (auto module_pass = dynamic_pointer_cast<ModulePass>(pass))
         {
             if (auto vt_pass = dynamic_pointer_cast<pass::VisualizeTree>(module_pass))
@@ -138,6 +139,7 @@ void pass::Manager::run_passes(shared_ptr<Function> func, bool /* transitive */)
             }
             function_changed = call_graph_pass->run_on_call_graph(func->get_ordered_ops());
         }
+        NGRAPH_SUPPRESS_DEPRECATED_END
 
         if (m_visualize)
         {
index 0d796db..39a5cd5 100644 (file)
@@ -25,6 +25,8 @@
 using namespace std;
 using namespace ngraph;
 
+NGRAPH_SUPPRESS_DEPRECATED_START
+
 pass::PassBase::PassBase()
     : m_property{all_pass_property_off}
 {
index b9bd6e6..40bead1 100644 (file)
@@ -30,25 +30,6 @@ namespace ngraph
 {
     namespace pass
     {
-        class PassBase;
-        class ModulePass;
-        class FunctionPass;
-        class NodePass NGRAPH_DEPRECATED("Use MatcherPass or FunctionPass instead.");
-        class CallGraphPass NGRAPH_DEPRECATED("Use MatcherPass or FunctionPass instead.");
-        class Manager;
-        enum class FusionType : uint32_t
-        {
-            //`DIFFERENTIABLE_FUSIONS` produce ops that support autodiff
-            // i.e. implement `generate_adjoints`
-            DIFFERENTIABLE_FUSIONS = 0x1,
-            REGULAR_FUSIONS = 0x2,
-            //`FOP_FUSIONS` produce ops in the FusedOps category that might
-            // not be supported by all backends
-            FOP_FUSIONS = 0x4,
-            ALL_FUSIONS = 0xFFFFFFFF
-        };
-        typedef EnumMask<FusionType> FusionTypeMask;
-
         enum class PassProperty : uint32_t
         {
             // Pass requires node shapes to be static
@@ -57,77 +38,83 @@ namespace ngraph
             CHANGE_DYNAMIC_STATE = 1 << 1,
         };
 
-        using param_callback = std::function<bool(const std::shared_ptr<const ::ngraph::Node>)>;
-    }
-}
-
-template class NGRAPH_API ngraph::EnumMask<ngraph::pass::PassProperty>;
-
-namespace ngraph
-{
-    namespace pass
-    {
         typedef EnumMask<PassProperty> PassPropertyMask;
         const PassPropertyMask all_pass_property_off;
-    }
-}
+        using param_callback = std::function<bool(const std::shared_ptr<const ::ngraph::Node>)>;
 
-class NGRAPH_API ngraph::pass::PassBase
-{
-    friend class Manager;
+        class NGRAPH_API PassBase
+        {
+            friend class Manager;
 
-public:
-    PassBase();
-    virtual ~PassBase() {}
-    /// Check if this pass has all the pass properties.
-    bool get_property(const PassPropertyMask& prop_mask) const;
+        public:
+            PassBase();
+            virtual ~PassBase() {}
+            /// Check if this pass has all the pass properties.
+            bool get_property(const PassPropertyMask& prop_mask) const;
 
-    void set_name(const std::string& name) { m_name = name; }
-    std::string get_name() const;
+            void set_name(const std::string& name) { m_name = name; }
+            std::string get_name() const;
 
-    void set_callback(const param_callback& callback);
+            void set_callback(const param_callback& callback);
 
-protected:
-    ManagerState& get_state();
-    void set_state(ManagerState&);
-    void set_property(const PassPropertyMask& prop, bool value);
+        protected:
+            ManagerState& get_state();
+            void set_state(ManagerState&);
+            void set_property(const PassPropertyMask& prop, bool value);
 
-    param_callback m_transformation_callback = [](const std::shared_ptr<const Node>&) -> bool {
-        return false;
-    };
-    bool m_has_default_callback = true;
+            param_callback m_transformation_callback =
+                [](const std::shared_ptr<const Node>&) -> bool { return false; };
+            bool m_has_default_callback = true;
 
-private:
-    PassPropertyMask m_property;
-    ManagerState* m_state{nullptr};
-    std::string m_name;
-};
+        private:
+            PassPropertyMask m_property;
+            ManagerState* m_state{nullptr};
+            std::string m_name;
+        };
 
-class NGRAPH_API ngraph::pass::ModulePass : public PassBase
-{
-public:
-    virtual ~ModulePass();
-    virtual bool run_on_module(std::vector<std::shared_ptr<ngraph::Function>>&) = 0;
-};
+        class NGRAPH_API ModulePass : public PassBase
+        {
+        public:
+            virtual ~ModulePass();
+            virtual bool run_on_module(std::vector<std::shared_ptr<ngraph::Function>>&) = 0;
+        };
 
-class NGRAPH_API ngraph::pass::FunctionPass : public PassBase
-{
-public:
-    virtual ~FunctionPass();
-    virtual bool run_on_function(std::shared_ptr<ngraph::Function>) = 0;
-};
+        class NGRAPH_API FunctionPass : public PassBase
+        {
+        public:
+            virtual ~FunctionPass();
+            virtual bool run_on_function(std::shared_ptr<ngraph::Function>) = 0;
+        };
 
-class NGRAPH_API ngraph::pass::NodePass : public PassBase
-{
-public:
-    virtual ~NodePass();
-    virtual bool run_on_node(std::shared_ptr<ngraph::Node>) = 0;
-};
+        class NGRAPH_DEPRECATED("Use MatcherPass or FunctionPass instead.") NGRAPH_API NodePass
+            : public PassBase
+        {
+        public:
+            virtual ~NodePass();
+            virtual bool run_on_node(std::shared_ptr<ngraph::Node>) = 0;
+        };
 
-class NGRAPH_API ngraph::pass::CallGraphPass : public PassBase
-{
-public:
-    virtual ~CallGraphPass();
-    virtual bool run_on_call_graph(const std::list<std::shared_ptr<ngraph::Node>>&) = 0;
-    virtual bool run_on_call_graph(const std::vector<std::shared_ptr<ngraph::Node>>&);
-};
+        class NGRAPH_DEPRECATED("Use MatcherPass or FunctionPass instead.") NGRAPH_API CallGraphPass
+            : public PassBase
+        {
+        public:
+            virtual ~CallGraphPass();
+            virtual bool run_on_call_graph(const std::list<std::shared_ptr<ngraph::Node>>&) = 0;
+            virtual bool run_on_call_graph(const std::vector<std::shared_ptr<ngraph::Node>>&);
+        };
+
+        class Manager;
+        enum class FusionType : uint32_t
+        {
+            //`DIFFERENTIABLE_FUSIONS` produce ops that support autodiff
+            // i.e. implement `generate_adjoints`
+            DIFFERENTIABLE_FUSIONS = 0x1,
+            REGULAR_FUSIONS = 0x2,
+            //`FOP_FUSIONS` produce ops in the FusedOps category that might
+            // not be supported by all backends
+            FOP_FUSIONS = 0x4,
+            ALL_FUSIONS = 0xFFFFFFFF
+        };
+        typedef EnumMask<FusionType> FusionTypeMask;
+    }
+}
index 9ba73f8..2583f1d 100644 (file)
@@ -151,7 +151,12 @@ namespace ngraph
             OutputVector& get_matched_values() { return m_matched_list; }
             void reset() {}
             const std::string& get_name() { return m_name; }
-            std::shared_ptr<Node> get_pattern() { return m_pattern_node.as_single_output_node(); }
+            std::shared_ptr<Node> get_pattern()
+            {
+                NGRAPH_SUPPRESS_DEPRECATED_START
+                return m_pattern_node.as_single_output_node();
+                NGRAPH_SUPPRESS_DEPRECATED_END
+            }
             Output<Node> get_pattern_value() { return m_pattern_node; }
             std::shared_ptr<Node> get_match_root();
             Output<Node> get_match_value();
index 6269544..83d65cd 100644 (file)
@@ -58,7 +58,9 @@ namespace ngraph
                     : AnyOf(type,
                             s,
                             [pred](const Output<Node>& value) {
+                                NGRAPH_SUPPRESS_DEPRECATED_START
                                 return pred(value.as_single_output_node());
+                                NGRAPH_SUPPRESS_DEPRECATED_END
                             },
                             as_output_vector(wrapped_values))
                 {
index d1a54cf..f65ec78 100644 (file)
@@ -78,22 +78,6 @@ set(SRC
     op_eval/strided_slice.cpp
     op_is.cpp
     opset1.cpp
-    opset_pass/binary_elementwise_opset_pass.cpp
-    opset_pass/broadcast_opset_pass.cpp
-    opset_pass/convolution_opset_pass.cpp
-    opset_pass/logical_not_opset_pass.cpp
-    opset_pass/logical_or_opset_pass.cpp
-    opset_pass/logical_xor_opset_pass.cpp
-    opset_pass/one_hot_opset_pass.cpp
-    opset_pass/gather_opset_pass.cpp
-    opset_pass/pad_opset_pass.cpp
-    opset_pass/reduction_opset_pass.cpp
-    opset_pass/reverse_opset_pass.cpp
-    opset_pass/select_opset_pass.cpp
-    opset_pass/slice_opset_pass.cpp
-    opset_pass/softmax_opset_pass.cpp
-    opset_pass/topk_opset_pass.cpp
-    opset_pass/transpose_opset_pass.cpp
     partial_shape.cpp
     pass_liveness.cpp
     pass_manager.cpp
index aeb91be..b92fd89 100644 (file)
@@ -116,6 +116,8 @@ NGRAPH_TEST(${BACKEND_NAME}, onnx_provenance_tagging_parameters)
     test_provenance_tags<default_opset::Parameter>(function, "<ONNX Input (input_B) Shape:{}>");
 }
 
+NGRAPH_SUPPRESS_DEPRECATED_START
+
 NGRAPH_TEST(${BACKEND_NAME}, onnx_provenance_tag_downgrade_pass)
 {
     test::ProvenanceEnabler provenance_enabler;
diff --git a/ngraph/test/opset_pass/binary_elementwise_opset_pass.cpp b/ngraph/test/opset_pass/binary_elementwise_opset_pass.cpp
deleted file mode 100644 (file)
index 1035598..0000000
+++ /dev/null
@@ -1,274 +0,0 @@
-#include "gmock/gmock.h"
-#include "gtest/gtest.h"
-
-#include "ngraph/ngraph.hpp"
-#include "ngraph/pass/manager.hpp"
-#include "opset0_downgrade.hpp"
-#include "opset1_upgrade.hpp"
-#include "util/test_control.hpp"
-#include "util/type_prop.hpp"
-
-using namespace std;
-using namespace ngraph;
-
-//------------------------------------------------------------------------------
-//
-//                  Helper Functions
-//
-//------------------------------------------------------------------------------
-
-template <typename OpV0, typename OpV1>
-void test_type_prop_opset0_downgrade_pass(const element::Type& output_type,
-                                          const element::Type& input_type = element::f32,
-                                          const string node_name = "")
-{
-    auto A = make_shared<op::Parameter>(input_type, Shape{1, 3, 2});
-    auto B = make_shared<op::Parameter>(input_type, Shape{1, 2});
-    const op::AutoBroadcastSpec np_auto_b = op::AutoBroadcastSpec(op::AutoBroadcastType::NUMPY);
-
-    auto v1_node = make_shared<OpV1>(A, B);
-    auto result = make_shared<op::Result>(v1_node);
-    auto f = make_shared<Function>(ResultVector{result}, ParameterVector{A, B});
-
-    ngraph::pass::Manager pass_manager;
-    pass_manager.register_pass<pass::Opset0Downgrade>();
-    pass_manager.run_passes(f);
-
-    auto v0_result = f->get_results().at(0);
-    auto node = v0_result->input_value(0).get_node_shared_ptr();
-    auto v0_node = as_type_ptr<OpV0>(node);
-
-    ASSERT_TRUE(v0_node);
-    EXPECT_EQ(v0_node->get_autob(), np_auto_b);
-    EXPECT_EQ(v0_node->get_output_element_type(0), output_type);
-    EXPECT_EQ(v0_node->get_output_shape(0), (Shape{1, 3, 2}));
-}
-
-template <typename OpV0, typename OpV1>
-void test_opset0_arithmetic_downgrade_pass()
-{
-    test_type_prop_opset0_downgrade_pass<OpV0, OpV1>(element::f32);
-}
-
-template <typename OpV0, typename OpV1>
-void test_opset0_comparison_downgrade_pass()
-{
-    test_type_prop_opset0_downgrade_pass<OpV0, OpV1>(element::boolean);
-}
-
-template <typename OpV0, typename OpV1>
-void test_type_prop_opset1_upgrade_pass(const element::Type& output_type,
-                                        const element::Type& input_type = element::f32,
-                                        const string node_name = "")
-{
-    auto A = make_shared<op::Parameter>(input_type, Shape{1, 3, 2});
-    auto B = make_shared<op::Parameter>(input_type, Shape{1, 3, 2});
-    const op::AutoBroadcastSpec none_auto_b = op::AutoBroadcastSpec(op::AutoBroadcastType::NONE);
-
-    auto v0_node = make_shared<OpV0>(A, B);
-    auto result = make_shared<op::Result>(v0_node);
-    auto f = make_shared<Function>(ResultVector{result}, ParameterVector{A, B});
-
-    ngraph::pass::Manager pass_manager;
-    pass_manager.register_pass<pass::Opset1Upgrade>();
-    pass_manager.run_passes(f);
-
-    auto v1_result = f->get_results().at(0);
-    auto node = v1_result->get_input_node_shared_ptr(0);
-    auto v1_node = as_type_ptr<OpV1>(node);
-    ASSERT_TRUE(v1_node);
-    EXPECT_EQ(v1_node->get_autob(), none_auto_b);
-    EXPECT_EQ(v1_node->get_output_element_type(0), output_type);
-    EXPECT_EQ(v1_node->get_output_shape(0), (Shape{1, 3, 2}));
-}
-
-template <typename OpV0, typename OpV1>
-void test_opset1_arithmetic_upgrade_pass()
-{
-    test_type_prop_opset1_upgrade_pass<OpV0, OpV1>(element::f32);
-}
-
-template <typename OpV0, typename OpV1>
-void test_opset1_comparison_upgrade_pass()
-{
-    test_type_prop_opset1_upgrade_pass<OpV0, OpV1>(element::boolean);
-}
-
-//------------------------------------------------------------------------------
-//
-//                  Test Cases
-//
-//------------------------------------------------------------------------------
-
-TEST(opset_transform, opset0_add_downgrade_pass)
-{
-    test_opset0_arithmetic_downgrade_pass<op::v0::Add, op::v1::Add>();
-}
-
-TEST(opset_transform, opset1_add_upgrade_pass)
-{
-    test_opset1_arithmetic_upgrade_pass<op::v0::Add, op::v1::Add>();
-}
-
-TEST(opset_transform, opset0_divide_downgrade_pass)
-{
-    auto A = make_shared<op::Parameter>(element::f32, Shape{1, 3, 2});
-    auto B = make_shared<op::Parameter>(element::f32, Shape{1, 2});
-    const op::AutoBroadcastSpec np_auto_b = op::AutoBroadcastSpec(op::AutoBroadcastType::NUMPY);
-    const bool pydiv = false;
-
-    auto divide_v1 = make_shared<op::v1::Divide>(A, B);
-    divide_v1->set_is_pythondiv(pydiv);
-    auto result = make_shared<op::Result>(divide_v1);
-    auto f = make_shared<Function>(ResultVector{result}, ParameterVector{A, B});
-
-    ngraph::pass::Manager pass_manager;
-    pass_manager.register_pass<pass::Opset0Downgrade>();
-    pass_manager.run_passes(f);
-
-    auto divide_v0_result = f->get_results().at(0);
-    auto node = divide_v0_result->get_input_node_shared_ptr(0);
-    auto divide_v0_node = as_type_ptr<op::v0::Divide>(node);
-    ASSERT_TRUE(divide_v0_node);
-    EXPECT_EQ(divide_v0_node->is_pythondiv(), pydiv);
-    EXPECT_EQ(divide_v0_node->get_autob(), np_auto_b);
-    EXPECT_EQ(divide_v0_node->get_output_element_type(0), element::f32);
-    EXPECT_EQ(divide_v0_node->get_output_shape(0), (Shape{1, 3, 2}));
-}
-
-TEST(opset_transform, opset1_divide_upgrade_pass)
-{
-    auto A = make_shared<op::Parameter>(element::f32, Shape{1, 3, 2});
-    auto B = make_shared<op::Parameter>(element::f32, Shape{1, 3, 2});
-    const op::AutoBroadcastSpec none_auto_b = op::AutoBroadcastSpec(op::AutoBroadcastType::NONE);
-    const bool pydiv = false;
-
-    auto div_v0 = make_shared<op::v0::Divide>(A, B, pydiv);
-    auto result = make_shared<op::Result>(div_v0);
-    auto f = make_shared<Function>(ResultVector{result}, ParameterVector{A, B});
-
-    ngraph::pass::Manager pass_manager;
-    pass_manager.register_pass<pass::Opset1Upgrade>();
-    pass_manager.run_passes(f);
-
-    auto divide_v1_result = f->get_results().at(0);
-    auto node = divide_v1_result->get_input_node_shared_ptr(0);
-    auto divide_v1_node = as_type_ptr<op::v1::Divide>(node);
-    ASSERT_TRUE(divide_v1_node);
-    EXPECT_EQ(divide_v1_node->is_pythondiv(), pydiv);
-    EXPECT_EQ(divide_v1_node->get_autob(), none_auto_b);
-    EXPECT_EQ(divide_v1_node->get_output_element_type(0), element::f32);
-    EXPECT_EQ(divide_v1_node->get_output_shape(0), (Shape{1, 3, 2}));
-}
-
-TEST(opset_transform, opset0_equal_downgrade_pass)
-{
-    test_opset0_comparison_downgrade_pass<op::v0::Equal, op::v1::Equal>();
-}
-
-TEST(opset_transform, opset1_equal_upgrade_pass)
-{
-    test_opset1_comparison_upgrade_pass<op::v0::Equal, op::v1::Equal>();
-}
-
-TEST(opset_transform, opset0_greater_downgrade_pass)
-{
-    test_opset0_comparison_downgrade_pass<op::v0::Greater, op::v1::Greater>();
-}
-
-TEST(opset_transform, opset1_greater_upgrade_pass)
-{
-    test_opset1_comparison_upgrade_pass<op::v0::Greater, op::v1::Greater>();
-}
-
-TEST(opset_transform, opset0_greater_eq_downgrade_pass)
-{
-    test_opset0_comparison_downgrade_pass<op::v0::GreaterEq, op::v1::GreaterEqual>();
-}
-
-TEST(opset_transform, opset1_greater_eq_upgrade_pass)
-{
-    test_opset1_comparison_upgrade_pass<op::v0::GreaterEq, op::v1::GreaterEqual>();
-}
-
-TEST(opset_transform, opset0_less_downgrade_pass)
-{
-    test_opset0_comparison_downgrade_pass<op::v0::Less, op::v1::Less>();
-}
-
-TEST(opset_transform, opset1_less_upgrade_pass)
-{
-    test_opset1_comparison_upgrade_pass<op::v0::Less, op::v1::Less>();
-}
-
-TEST(opset_transform, opset0_less_eq_downgrade_pass)
-{
-    test_type_prop_opset0_downgrade_pass<op::v0::LessEq, op::v1::LessEqual>(
-        element::boolean, element::f32, "LessEq");
-}
-
-TEST(opset_transform, opset1_less_eq_upgrade_pass)
-{
-    test_type_prop_opset1_upgrade_pass<op::v0::LessEq, op::v1::LessEqual>(
-        element::boolean, element::f32, "LessEqual");
-}
-
-TEST(opset_transform, opset0_maximum_downgrade_pass)
-{
-    test_opset0_arithmetic_downgrade_pass<op::v0::Maximum, op::v1::Maximum>();
-}
-
-TEST(opset_transform, opset1_maximum_upgrade_pass)
-{
-    test_opset1_arithmetic_upgrade_pass<op::v0::Maximum, op::v1::Maximum>();
-}
-
-TEST(opset_transform, opset0_minimum_downgrade_pass)
-{
-    test_opset0_arithmetic_downgrade_pass<op::v0::Minimum, op::v1::Minimum>();
-}
-
-TEST(opset_transform, opset1_minimum_upgrade_pass)
-{
-    test_opset1_arithmetic_upgrade_pass<op::v0::Minimum, op::v1::Minimum>();
-}
-
-TEST(opset_transform, opset0_multiply_downgrade_pass)
-{
-    test_opset0_arithmetic_downgrade_pass<op::v0::Multiply, op::v1::Multiply>();
-}
-
-TEST(opset_transform, opset1_multiply_upgrade_pass)
-{
-    test_opset1_arithmetic_upgrade_pass<op::v0::Multiply, op::v1::Multiply>();
-}
-
-TEST(opset_transform, opset0_not_equal_downgrade_pass)
-{
-    test_opset0_comparison_downgrade_pass<op::v0::NotEqual, op::v1::NotEqual>();
-}
-
-TEST(opset_transform, opset1_not_equal_upgrade_pass)
-{
-    test_opset1_comparison_upgrade_pass<op::v0::NotEqual, op::v1::NotEqual>();
-}
-
-TEST(opset_transform, opset0_power_downgrade_pass)
-{
-    test_opset0_arithmetic_downgrade_pass<op::v0::Power, op::v1::Power>();
-}
-
-TEST(opset_transform, opset1_power_upgrade_pass)
-{
-    test_opset1_arithmetic_upgrade_pass<op::v0::Power, op::v1::Power>();
-}
-
-TEST(opset_transform, opset0_subtract_downgrade_pass)
-{
-    test_opset0_arithmetic_downgrade_pass<op::v0::Subtract, op::v1::Subtract>();
-}
-
-TEST(opset_transform, opset1_subtract_upgrade_pass)
-{
-    test_opset1_arithmetic_upgrade_pass<op::v0::Subtract, op::v1::Subtract>();
-}
diff --git a/ngraph/test/opset_pass/broadcast_opset_pass.cpp b/ngraph/test/opset_pass/broadcast_opset_pass.cpp
deleted file mode 100644 (file)
index db80b44..0000000
+++ /dev/null
@@ -1,61 +0,0 @@
-#include "gmock/gmock.h"
-#include "gtest/gtest.h"
-
-#include "ngraph/ngraph.hpp"
-#include "ngraph/op/util/attr_types.hpp"
-#include "ngraph/op/util/op_types.hpp"
-#include "ngraph/pass/manager.hpp"
-#include "opset0_downgrade.hpp"
-#include "opset1_upgrade.hpp"
-#include "util/type_prop.hpp"
-
-using namespace std;
-using namespace ngraph;
-
-TEST(opset_transform, opset1_broadcast_upgrade_pass)
-{
-    auto arg = make_shared<op::Parameter>(element::f32, Shape{5, 6});
-
-    auto bcast_v0 = make_shared<op::v0::Broadcast>(arg, Shape{3, 5, 4, 6}, AxisSet{0, 2});
-    auto f = make_shared<Function>(NodeVector{bcast_v0}, ParameterVector{arg});
-
-    ngraph::pass::Manager pass_manager;
-    pass_manager.register_pass<pass::Opset1Upgrade>();
-    pass_manager.run_passes(f);
-
-    auto bcast_v1 = as_type_ptr<op::v1::Broadcast>(
-        f->get_results().at(0)->input_value(0).get_node_shared_ptr());
-
-    ASSERT_TRUE(bcast_v1);
-    EXPECT_EQ(bcast_v1->get_broadcast_spec(), op::AutoBroadcastSpec());
-    EXPECT_EQ(bcast_v1->get_broadcast_axes(), (std::make_pair<bool, AxisSet>(true, AxisSet{0, 2})));
-    ASSERT_TRUE(op::is_constant(bcast_v1->input_value(1).get_node()));
-    ASSERT_TRUE(op::is_constant(bcast_v1->input_value(2).get_node()));
-    EXPECT_EQ(
-        as_type_ptr<op::Constant>(bcast_v1->input_value(1).get_node_shared_ptr())->get_shape_val(),
-        (Shape{3, 5, 4, 6}));
-    EXPECT_EQ(as_type_ptr<op::Constant>(bcast_v1->input_value(2).get_node_shared_ptr())
-                  ->get_axis_set_val(),
-              (AxisSet{1, 3}));
-}
-
-TEST(opset_transform, opset1_broadcast_downgrade_pass)
-{
-    auto arg = make_shared<op::Parameter>(element::f32, Shape{1, 2, 3});
-    auto target_shape = op::Constant::create<int64_t>(element::i64, Shape{5}, {3, 1, 4, 2, 3});
-    auto axes_mapping = op::Constant::create<int64_t>(element::i64, Shape{3}, {1, 3, 4});
-
-    auto bcast_v1 = make_shared<op::v1::Broadcast>(arg, target_shape, axes_mapping);
-    auto f = make_shared<Function>(NodeVector{bcast_v1}, ParameterVector{arg});
-
-    ngraph::pass::Manager pass_manager;
-    pass_manager.register_pass<pass::Opset0Downgrade>();
-    pass_manager.run_passes(f);
-
-    auto bcast_v0 = as_type_ptr<op::v0::Broadcast>(
-        f->get_results().at(0)->input_value(0).get_node_shared_ptr());
-
-    ASSERT_TRUE(bcast_v0);
-    EXPECT_EQ(bcast_v0->get_broadcast_shape(), (Shape{3, 1, 4, 2, 3}));
-    EXPECT_EQ(bcast_v0->get_broadcast_axes(), (AxisSet{0, 2}));
-}
diff --git a/ngraph/test/opset_pass/convolution_opset_pass.cpp b/ngraph/test/opset_pass/convolution_opset_pass.cpp
deleted file mode 100644 (file)
index 0a0b042..0000000
+++ /dev/null
@@ -1,179 +0,0 @@
-#include "gmock/gmock.h"
-#include "gtest/gtest.h"
-
-#include "ngraph/ngraph.hpp"
-#include "ngraph/pass/manager.hpp"
-#include "op/convolution.hpp"
-#include "op/group_conv.hpp"
-#include "opset0_downgrade.hpp"
-#include "opset1_upgrade.hpp"
-#include "util/test_control.hpp"
-#include "util/type_prop.hpp"
-
-using namespace std;
-using namespace ngraph;
-
-TEST(opset_transform, opset1_convolution_upgrade_pass)
-{
-    auto data = make_shared<op::Parameter>(element::f32, Shape{1, 3, 6, 9});
-    auto filters = make_shared<op::Parameter>(element::f32, Shape{1, 3, 3, 3});
-    CoordinateDiff pads_begin{0, 0};
-    CoordinateDiff pads_end{0, 0};
-    Strides strides{1, 1};
-    Strides dilations{1, 1};
-    Strides data_dilations_strides{1, 1};
-    op::PadType pad_type = op::PadType::EXPLICIT;
-
-    auto convolution_v0 = make_shared<op::v0::Convolution>(
-        data, filters, strides, dilations, pads_begin, pads_end, data_dilations_strides, pad_type);
-    auto result = make_shared<op::Result>(convolution_v0);
-    auto f = make_shared<Function>(ResultVector{result}, ParameterVector{data, filters});
-
-    ngraph::pass::Manager pass_manager;
-    pass_manager.register_pass<pass::Opset1Upgrade>();
-    pass_manager.run_passes(f);
-
-    auto convolution_s1_result = f->get_results().at(0);
-    auto node = convolution_s1_result->get_input_node_shared_ptr(0);
-    auto convolution_v1_node = as_type_ptr<op::v1::Convolution>(node);
-
-    ASSERT_TRUE(convolution_v1_node);
-
-    EXPECT_EQ(convolution_v1_node->get_pads_begin(), pads_begin);
-    EXPECT_EQ(convolution_v1_node->get_pads_end(), pads_end);
-    EXPECT_EQ(convolution_v1_node->get_strides(), strides);
-    EXPECT_EQ(convolution_v1_node->get_auto_pad(), pad_type);
-    EXPECT_EQ(convolution_v1_node->get_dilations(), dilations);
-}
-
-TEST(opset_transform, opset1_convolution_downgrade_pass)
-{
-    auto data = make_shared<op::Parameter>(element::f32, Shape{1, 3, 6, 9});
-    auto filters = make_shared<op::Parameter>(element::f32, Shape{1, 3, 3, 3});
-    CoordinateDiff pads_begin{1, 1};
-    CoordinateDiff pads_end{2, 2};
-    Strides strides{1, 1};
-    Strides dilations{1, 1};
-    op::PadType pad_type = op::PadType::EXPLICIT;
-
-    auto convolution_v1 = make_shared<op::v1::Convolution>(
-        data, filters, strides, pads_begin, pads_end, dilations, pad_type);
-    auto result = make_shared<op::Result>(convolution_v1);
-    auto f = make_shared<Function>(ResultVector{result}, ParameterVector{data, filters});
-
-    ngraph::pass::Manager pass_manager;
-    pass_manager.register_pass<pass::Opset0Downgrade>();
-    pass_manager.run_passes(f);
-
-    auto conv_s0_result = f->get_results().at(0);
-    auto node = conv_s0_result->get_input_node_shared_ptr(0);
-    auto conv_v0_node = as_type_ptr<op::v0::Convolution>(node);
-
-    ASSERT_TRUE(conv_v0_node);
-    EXPECT_EQ(conv_v0_node->get_window_movement_strides(), strides);
-    EXPECT_EQ(conv_v0_node->get_window_dilation_strides(), dilations);
-    EXPECT_EQ(conv_v0_node->get_padding_below(), pads_begin);
-    EXPECT_EQ(conv_v0_node->get_padding_above(), pads_end);
-    EXPECT_EQ(conv_v0_node->get_data_dilation_strides(), (Strides{1, 1}));
-    EXPECT_EQ(conv_v0_node->get_pad_type(), pad_type);
-}
-
-TEST(opset_transform, opset1_convolution_backprop_data_downgrade_pass)
-{
-    auto data_batch_shape = op::Constant::create<int64_t>(element::i64, Shape{1}, {100});
-    auto filters = make_shared<op::Parameter>(element::f32, Shape{128, 3, 10});
-    auto delta = make_shared<op::Parameter>(element::f32, Shape{64, 128, 96});
-    auto strides = Strides{1};
-    auto dilations = Strides{1};
-    auto padding_begin = CoordinateDiff{2};
-    auto padding_end = CoordinateDiff{3};
-
-    auto conv = make_shared<op::v1::ConvolutionBackpropData>(
-        delta, filters, data_batch_shape, strides, padding_begin, padding_end, dilations);
-    auto result = make_shared<op::Result>(conv);
-    auto f = make_shared<Function>(ResultVector{result}, ParameterVector{filters, delta});
-
-    ngraph::pass::Manager pass_manager;
-    pass_manager.register_pass<pass::Opset0Downgrade>();
-    pass_manager.run_passes(f);
-
-    auto conv_s0_result = f->get_results().at(0);
-    auto node = conv_s0_result->get_input_node_shared_ptr(0);
-    auto conv_v0_node = as_type_ptr<op::v0::ConvolutionBackpropData>(node);
-
-    ASSERT_TRUE(conv_v0_node);
-    EXPECT_EQ(conv_v0_node->get_data_batch_shape(), (Shape{64, 3, 100}));
-    EXPECT_EQ(conv_v0_node->get_window_movement_strides_forward(), strides);
-    EXPECT_EQ(conv_v0_node->get_window_dilation_strides_forward(), dilations);
-    EXPECT_EQ(conv_v0_node->get_padding_below_forward(), padding_begin);
-    EXPECT_EQ(conv_v0_node->get_padding_above_forward(), padding_end);
-    EXPECT_EQ(conv_v0_node->get_data_dilation_strides_forward(), (Strides{1}));
-}
-
-TEST(opset_transform, opset1_group_convolution_backprop_data_downgrade_pass)
-{
-    auto output_shape = op::Constant::create<int64_t>(element::i64, Shape{1}, {100});
-    auto filters = make_shared<op::Parameter>(element::f32, Shape{2, 128, 3, 10});
-    auto delta = make_shared<op::Parameter>(element::f32, Shape{64, 256, 96});
-    size_t groups = 2;
-    auto strides = Strides{1};
-    auto dilations = Strides{1};
-
-    auto padding_begin = CoordinateDiff{2};
-    auto padding_end = CoordinateDiff{3};
-
-    auto group_conv_backprop = make_shared<op::v1::GroupConvolutionBackpropData>(
-        delta, filters, output_shape, strides, padding_begin, padding_end, dilations);
-    auto result = make_shared<op::Result>(group_conv_backprop);
-    auto f = make_shared<Function>(ResultVector{result}, ParameterVector{filters, delta});
-
-    ngraph::pass::Manager pass_manager;
-    pass_manager.register_pass<pass::Opset0Downgrade>();
-    pass_manager.run_passes(f);
-
-    auto group_conv_backprop_s0_result = f->get_results().at(0);
-    auto node = group_conv_backprop_s0_result->get_input_node_shared_ptr(0);
-    auto group_conv_backprop_v0_node = as_type_ptr<op::v0::GroupConvolutionBackpropData>(node);
-
-    ASSERT_TRUE(group_conv_backprop_v0_node);
-    EXPECT_EQ(group_conv_backprop_v0_node->get_window_movement_strides(), strides);
-    EXPECT_EQ(group_conv_backprop_v0_node->get_window_dilation_strides(), dilations);
-    EXPECT_EQ(group_conv_backprop_v0_node->get_padding_below(), padding_begin);
-    EXPECT_EQ(group_conv_backprop_v0_node->get_padding_above(), padding_end);
-    EXPECT_EQ(group_conv_backprop_v0_node->get_input_shape(1), (Shape{256, 3, 10}));
-    EXPECT_EQ(group_conv_backprop_v0_node->get_groups(), groups);
-}
-
-TEST(opset_transform, opset1_group_convolution_backprop_data_upgrade_pass)
-{
-    auto data_batch_shape = op::Constant::create<int64_t>(element::i64, Shape{64, 12, 100}, {0});
-    auto filters = make_shared<op::Parameter>(element::f32, Shape{128, 3, 10});
-    auto delta = make_shared<op::Parameter>(element::f32, Shape{64, 128, 96});
-    auto strides = Strides{1};
-    auto dilations = Strides{1};
-    auto padding_begin = CoordinateDiff{2};
-    auto padding_end = CoordinateDiff{3};
-    size_t groups = 4;
-
-    auto group_conv_backprop = make_shared<op::v0::GroupConvolutionBackpropData>(
-        data_batch_shape, filters, delta, strides, dilations, padding_begin, padding_end, groups);
-    auto result = make_shared<op::Result>(group_conv_backprop);
-    auto f = make_shared<Function>(ResultVector{result}, ParameterVector{filters, delta});
-
-    ngraph::pass::Manager pass_manager;
-    pass_manager.register_pass<pass::Opset1Upgrade>();
-    pass_manager.run_passes(f);
-
-    auto group_conv_backprop_s1_result = f->get_results().at(0);
-    auto node = group_conv_backprop_s1_result->get_input_node_shared_ptr(0);
-    auto group_conv_backprop_v1_node = as_type_ptr<op::v1::GroupConvolutionBackpropData>(node);
-
-    ASSERT_TRUE(group_conv_backprop_v1_node);
-    EXPECT_EQ(group_conv_backprop_v1_node->get_strides(), strides);
-    EXPECT_EQ(group_conv_backprop_v1_node->get_dilations(), dilations);
-    EXPECT_EQ(group_conv_backprop_v1_node->get_pads_begin(), padding_begin);
-    EXPECT_EQ(group_conv_backprop_v1_node->get_pads_end(), padding_end);
-    EXPECT_EQ(node->get_output_shape(0), (data_batch_shape->get_shape()));
-    EXPECT_EQ(group_conv_backprop_v1_node->get_auto_pad(), op::PadType::EXPLICIT);
-    EXPECT_EQ(group_conv_backprop_v1_node->get_output_padding(), (CoordinateDiff{0}));
-}
diff --git a/ngraph/test/opset_pass/gather_opset_pass.cpp b/ngraph/test/opset_pass/gather_opset_pass.cpp
deleted file mode 100644 (file)
index 776694a..0000000
+++ /dev/null
@@ -1,47 +0,0 @@
-//*****************************************************************************
-// Copyright 2017-2020 Intel Corporation
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//*****************************************************************************
-
-#include "gmock/gmock.h"
-#include "gtest/gtest.h"
-
-#include "ngraph/ngraph.hpp"
-#include "ngraph/pass/manager.hpp"
-#include "opset1_upgrade.hpp"
-#include "util/type_prop.hpp"
-
-using namespace std;
-using namespace ngraph;
-
-TEST(opset_transform, opset1_gather_upgrade_pass)
-{
-    auto params = make_shared<op::Parameter>(element::f32, Shape{5, 6});
-    auto indices = make_shared<op::Parameter>(element::i64, Shape{4});
-    size_t axis = 1;
-
-    auto gather_v0 = make_shared<op::v0::Gather>(params, indices, axis);
-    auto result = make_shared<op::Result>(gather_v0);
-    auto f = make_shared<Function>(ResultVector{result}, ParameterVector{params, indices});
-
-    ngraph::pass::Manager pass_manager;
-    pass_manager.register_pass<pass::Opset1Upgrade>();
-    pass_manager.run_passes(f);
-
-    auto gather_s1_result = f->get_results().at(0);
-    auto gather_v1_node =
-        as_type_ptr<op::v1::Gather>(gather_s1_result->get_input_node_shared_ptr(0));
-    ASSERT_TRUE(gather_v1_node);
-    EXPECT_EQ(gather_v1_node->get_axis(), axis);
-}
diff --git a/ngraph/test/opset_pass/logical_not_opset_pass.cpp b/ngraph/test/opset_pass/logical_not_opset_pass.cpp
deleted file mode 100644 (file)
index 8cf8547..0000000
+++ /dev/null
@@ -1,65 +0,0 @@
-//*****************************************************************************
-// Copyright 2017-2020 Intel Corporation
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions not
-// limitations under the License.
-//*****************************************************************************
-
-#include "gmock/gmock.h"
-#include "gtest/gtest.h"
-
-#include "ngraph/ngraph.hpp"
-#include "ngraph/pass/manager.hpp"
-#include "opset0_downgrade.hpp"
-#include "opset1_upgrade.hpp"
-#include "util/type_prop.hpp"
-
-using namespace std;
-using namespace ngraph;
-
-TEST(opset_transform, opset1_logical_not_upgrade_pass)
-{
-    const auto a = make_shared<op::Parameter>(element::boolean, Shape{5, 10, 15});
-    const auto not_v0 = make_shared<op::v0::Not>(a);
-    const auto result = make_shared<op::Result>(not_v0);
-    auto f = make_shared<Function>(ResultVector{result}, ParameterVector{a});
-
-    ngraph::pass::Manager pass_manager;
-    pass_manager.register_pass<pass::Opset1Upgrade>();
-    pass_manager.run_passes(f);
-
-    const auto pass_replacement_node = f->get_result()->get_input_node_shared_ptr(0);
-    const auto not_v1 = as_type_ptr<op::v1::LogicalNot>(pass_replacement_node);
-    ASSERT_TRUE(not_v1);
-
-    const auto values_out_element_type = not_v1->get_output_element_type(0);
-    EXPECT_EQ(values_out_element_type, element::boolean);
-}
-
-TEST(opset_transform, opset1_logical_not_downgrade_pass)
-{
-    const auto a = make_shared<op::Parameter>(element::boolean, Shape{5, 10, 15});
-    const auto not_v1 = make_shared<op::v1::LogicalNot>(a);
-    const auto result = make_shared<op::Result>(not_v1);
-    auto f = make_shared<Function>(ResultVector{result}, ParameterVector{a});
-
-    ngraph::pass::Manager pass_manager;
-    pass_manager.register_pass<pass::Opset0Downgrade>();
-    pass_manager.run_passes(f);
-
-    const auto pass_replacement_node = f->get_result()->get_input_node_shared_ptr(0);
-    const auto not_v0 = as_type_ptr<op::v0::Not>(pass_replacement_node);
-    ASSERT_TRUE(not_v0);
-
-    const auto values_out_element_type = not_v0->get_output_element_type(0);
-    EXPECT_EQ(values_out_element_type, element::boolean);
-}
diff --git a/ngraph/test/opset_pass/logical_or_opset_pass.cpp b/ngraph/test/opset_pass/logical_or_opset_pass.cpp
deleted file mode 100644 (file)
index 8c0904f..0000000
+++ /dev/null
@@ -1,67 +0,0 @@
-//*****************************************************************************
-// Copyright 2017-2020 Intel Corporation
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//*****************************************************************************
-
-#include "gmock/gmock.h"
-#include "gtest/gtest.h"
-
-#include "ngraph/ngraph.hpp"
-#include "ngraph/pass/manager.hpp"
-#include "opset0_downgrade.hpp"
-#include "opset1_upgrade.hpp"
-#include "util/type_prop.hpp"
-
-using namespace std;
-using namespace ngraph;
-
-TEST(opset_transform, opset1_logical_or_upgrade_pass)
-{
-    const auto a = make_shared<op::Parameter>(element::boolean, Shape{5, 10, 15});
-    const auto b = make_shared<op::Parameter>(element::boolean, Shape{5, 10, 15});
-    const auto or_v0 = make_shared<op::v0::Or>(a, b);
-    const auto result = make_shared<op::Result>(or_v0);
-    auto f = make_shared<Function>(ResultVector{result}, ParameterVector{a, b});
-
-    ngraph::pass::Manager pass_manager;
-    pass_manager.register_pass<pass::Opset1Upgrade>();
-    pass_manager.run_passes(f);
-
-    const auto pass_replacement_node = f->get_result()->get_input_node_shared_ptr(0);
-    const auto or_v1 = as_type_ptr<op::v1::LogicalOr>(pass_replacement_node);
-    ASSERT_TRUE(or_v1);
-
-    const auto values_out_element_type = or_v1->get_output_element_type(0);
-    EXPECT_EQ(values_out_element_type, element::boolean);
-}
-
-TEST(opset_transform, opset1_logical_or_downgrade_pass)
-{
-    const auto a = make_shared<op::Parameter>(element::boolean, Shape{5, 10, 15});
-    const auto b = make_shared<op::Parameter>(element::boolean, Shape{5, 10, 15});
-    const auto or_v1 = make_shared<op::v1::LogicalOr>(a, b);
-    const auto result = make_shared<op::Result>(or_v1);
-    auto f = make_shared<Function>(ResultVector{result}, ParameterVector{a, b});
-
-    ngraph::pass::Manager pass_manager;
-    pass_manager.register_pass<pass::Opset0Downgrade>();
-    pass_manager.run_passes(f);
-
-    const auto pass_replacement_node = f->get_result()->get_input_node_shared_ptr(0);
-    const auto or_v0 = as_type_ptr<op::v0::Or>(pass_replacement_node);
-    ASSERT_TRUE(or_v0);
-
-    const auto values_out_element_type = or_v0->get_output_element_type(0);
-    EXPECT_EQ(values_out_element_type, element::boolean);
-}
diff --git a/ngraph/test/opset_pass/logical_xor_opset_pass.cpp b/ngraph/test/opset_pass/logical_xor_opset_pass.cpp
deleted file mode 100644 (file)
index 344c02e..0000000
+++ /dev/null
@@ -1,67 +0,0 @@
-//*****************************************************************************
-// Copyright 2017-2020 Intel Corporation
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//*****************************************************************************
-
-#include "gmock/gmock.h"
-#include "gtest/gtest.h"
-
-#include "ngraph/ngraph.hpp"
-#include "ngraph/pass/manager.hpp"
-#include "opset0_downgrade.hpp"
-#include "opset1_upgrade.hpp"
-#include "util/type_prop.hpp"
-
-using namespace std;
-using namespace ngraph;
-
-TEST(opset_transform, opset1_logical_xor_upgrade_pass)
-{
-    const auto a = make_shared<op::Parameter>(element::boolean, Shape{5, 10, 15});
-    const auto b = make_shared<op::Parameter>(element::boolean, Shape{5, 10, 15});
-    const auto xor_v0 = make_shared<op::v0::Xor>(a, b);
-    const auto result = make_shared<op::Result>(xor_v0);
-    auto f = make_shared<Function>(ResultVector{result}, ParameterVector{a, b});
-
-    ngraph::pass::Manager pass_manager;
-    pass_manager.register_pass<pass::Opset1Upgrade>();
-    pass_manager.run_passes(f);
-
-    const auto pass_replacement_node = f->get_result()->get_input_node_shared_ptr(0);
-    const auto xor_v1 = as_type_ptr<op::v1::LogicalXor>(pass_replacement_node);
-    ASSERT_TRUE(xor_v1);
-
-    const auto values_out_element_type = xor_v1->get_output_element_type(0);
-    EXPECT_EQ(values_out_element_type, element::boolean);
-}
-
-TEST(opset_transform, opset1_logical_xor_downgrade_pass)
-{
-    const auto a = make_shared<op::Parameter>(element::boolean, Shape{5, 10, 15});
-    const auto b = make_shared<op::Parameter>(element::boolean, Shape{5, 10, 15});
-    const auto xor_v1 = make_shared<op::v1::LogicalXor>(a, b);
-    const auto result = make_shared<op::Result>(xor_v1);
-    auto f = make_shared<Function>(ResultVector{result}, ParameterVector{a, b});
-
-    ngraph::pass::Manager pass_manager;
-    pass_manager.register_pass<pass::Opset0Downgrade>();
-    pass_manager.run_passes(f);
-
-    const auto pass_replacement_node = f->get_result()->get_input_node_shared_ptr(0);
-    const auto xor_v0 = as_type_ptr<op::v0::Xor>(pass_replacement_node);
-    ASSERT_TRUE(xor_v0);
-
-    const auto values_out_element_type = xor_v0->get_output_element_type(0);
-    EXPECT_EQ(values_out_element_type, element::boolean);
-}
diff --git a/ngraph/test/opset_pass/one_hot_opset_pass.cpp b/ngraph/test/opset_pass/one_hot_opset_pass.cpp
deleted file mode 100644 (file)
index 3caf299..0000000
+++ /dev/null
@@ -1,128 +0,0 @@
-#include "gmock/gmock.h"
-#include "gtest/gtest.h"
-
-#include "ngraph/ngraph.hpp"
-#include "ngraph/pass/manager.hpp"
-#include "opset0_downgrade.hpp"
-#include "opset1_upgrade.hpp"
-#include "util/type_prop.hpp"
-
-using namespace std;
-using namespace ngraph;
-
-TEST(opset_transform, opset1_one_hot_upgrade_pass)
-{
-    auto indices = make_shared<op::Parameter>(element::i64, Shape{1, 3, 2, 3});
-    const auto depth = 4;
-    PartialShape shape{1, 3, 2, depth, 3};
-    size_t one_hot_axis = 3;
-    auto ont_hot_v0 = make_shared<op::v0::OneHot>(indices, shape, one_hot_axis);
-
-    auto result = make_shared<op::Result>(ont_hot_v0);
-    auto f = make_shared<Function>(ResultVector{result}, ParameterVector{indices});
-
-    ngraph::pass::Manager pass_manager;
-    pass_manager.register_pass<pass::Opset1Upgrade>();
-    pass_manager.run_passes(f);
-
-    const auto pass_replacement_node = f->get_result()->get_input_node_shared_ptr(0);
-    const auto one_hot_v1 = as_type_ptr<op::v1::OneHot>(pass_replacement_node);
-    ASSERT_TRUE(one_hot_v1);
-    EXPECT_EQ(one_hot_v1->get_axis(), one_hot_axis);
-
-    auto one_hot_v1_depth =
-        as_type_ptr<op::Constant>(one_hot_v1->input_value(1).get_node_shared_ptr());
-    EXPECT_EQ(one_hot_v1_depth->get_vector<int64_t>()[0], depth);
-
-    auto one_hot_v1_on_value =
-        as_type_ptr<op::Constant>(one_hot_v1->input_value(2).get_node_shared_ptr());
-    EXPECT_EQ(one_hot_v1_on_value->get_vector<int64_t>()[0], 1);
-
-    auto one_hot_v1_off_value =
-        as_type_ptr<op::Constant>(one_hot_v1->input_value(3).get_node_shared_ptr());
-    EXPECT_EQ(one_hot_v1_off_value->get_vector<int64_t>()[0], 0);
-}
-
-TEST(opset_transform, opset1_one_hot_downgrade_pass)
-{
-    auto indices = make_shared<op::Parameter>(element::i64, Shape{1, 3, 2, 3});
-    auto depth = op::Constant::create(element::i64, Shape{}, {4});
-    auto on_value = op::Constant::create(element::u32, Shape{}, {5});
-    auto off_value = op::Constant::create(element::u32, Shape{}, {10});
-    int64_t axis = 3;
-    auto ont_hot_v1 = make_shared<op::v1::OneHot>(indices, depth, on_value, off_value, axis);
-
-    auto result = make_shared<op::Result>(ont_hot_v1);
-    auto f = make_shared<Function>(ResultVector{result}, ParameterVector{indices});
-
-    ngraph::pass::Manager pass_manager;
-    pass_manager.register_pass<pass::Opset0Downgrade>();
-    pass_manager.run_passes(f);
-
-    const auto pass_replacement_node = f->get_result()->input_value(0).get_node_shared_ptr();
-    ASSERT_FALSE(is_type<op::v1::OneHot>(pass_replacement_node));
-
-    EXPECT_EQ(pass_replacement_node->get_shape(), (Shape{1, 3, 2, 4, 3}));
-}
-
-TEST(opset_transform, opset1_one_hot_downgrade_pass_depth_not_constant)
-{
-    auto indices = make_shared<op::Parameter>(element::i64, Shape{1, 3, 2, 3});
-    auto depth = make_shared<op::Parameter>(element::i64, Shape{});
-    auto on_value = op::Constant::create(element::u32, Shape{}, {5});
-    auto off_value = op::Constant::create(element::u32, Shape{}, {10});
-    int64_t axis = 3;
-    auto ont_hot_v1 = make_shared<op::v1::OneHot>(indices, depth, on_value, off_value, axis);
-
-    auto result = make_shared<op::Result>(ont_hot_v1);
-    auto f = make_shared<Function>(ResultVector{result}, ParameterVector{indices, depth});
-
-    ngraph::pass::Manager pass_manager;
-    pass_manager.register_pass<pass::Opset0Downgrade>();
-
-    try
-    {
-        pass_manager.run_passes(f);
-        // Should have thrown, so fail if it didn't
-        FAIL() << "Not constant depth not detected";
-    }
-    catch (const ngraph_error& error)
-    {
-        EXPECT_HAS_SUBSTRING(error.what(), std::string("depth input must be constant"));
-    }
-    catch (...)
-    {
-        FAIL() << "OneHot downgrade failed for unexpected reason";
-    }
-}
-
-TEST(opset_transform, opset1_one_hot_downgrade_pass_output_shape_not_static)
-{
-    auto indices = make_shared<op::Parameter>(element::i64, PartialShape::dynamic());
-    auto depth = op::Constant::create(element::i64, Shape{}, {4});
-    auto on_value = op::Constant::create(element::u32, Shape{}, {5});
-    auto off_value = op::Constant::create(element::u32, Shape{}, {10});
-    int64_t axis = 3;
-    auto ont_hot_v1 = make_shared<op::v1::OneHot>(indices, depth, on_value, off_value, axis);
-
-    auto result = make_shared<op::Result>(ont_hot_v1);
-    auto f = make_shared<Function>(ResultVector{result}, ParameterVector{indices});
-
-    ngraph::pass::Manager pass_manager;
-    pass_manager.register_pass<pass::Opset0Downgrade>();
-
-    try
-    {
-        pass_manager.run_passes(f);
-        // Should have thrown, so fail if it didn't
-        FAIL() << "Not static output shape not detected";
-    }
-    catch (const ngraph_error& error)
-    {
-        EXPECT_HAS_SUBSTRING(error.what(), std::string("output shape must be static"));
-    }
-    catch (...)
-    {
-        FAIL() << "OneHot downgrade failed for unexpected reason";
-    }
-}
diff --git a/ngraph/test/opset_pass/pad_opset_pass.cpp b/ngraph/test/opset_pass/pad_opset_pass.cpp
deleted file mode 100644 (file)
index fcbd630..0000000
+++ /dev/null
@@ -1,65 +0,0 @@
-#include "gmock/gmock.h"
-#include "gtest/gtest.h"
-
-#include "ngraph/ngraph.hpp"
-#include "ngraph/pass/manager.hpp"
-#include "opset0_downgrade.hpp"
-#include "opset1_upgrade.hpp"
-#include "util/type_prop.hpp"
-
-using namespace std;
-using namespace ngraph;
-
-TEST(opset_transform, opset1_pad_upgrade_pass)
-{
-    auto arg = make_shared<op::Parameter>(element::f32, Shape{5, 6});
-    auto arg_pad_value = make_shared<op::Parameter>(element::f32, Shape{});
-    CoordinateDiff padding_below{1, 2};
-    CoordinateDiff padding_above{3, 4};
-    auto pad_mode = op::PadMode::EDGE;
-
-    auto pad_v0 =
-        make_shared<op::v0::Pad>(arg, arg_pad_value, padding_below, padding_above, pad_mode);
-    auto result = make_shared<op::Result>(pad_v0);
-    auto f = make_shared<Function>(ResultVector{result}, ParameterVector{arg, arg_pad_value});
-
-    ngraph::pass::Manager pass_manager;
-    pass_manager.register_pass<pass::Opset1Upgrade>();
-    pass_manager.run_passes(f);
-
-    auto pad_s1_result = f->get_results().at(0);
-    auto node = pad_s1_result->get_input_node_shared_ptr(0);
-    auto pad_v1_node = as_type_ptr<op::v1::Pad>(node);
-    ASSERT_TRUE(pad_v1_node);
-    EXPECT_EQ(pad_v1_node->get_pad_mode(), pad_mode);
-
-    EXPECT_EQ(pad_v1_node->get_pads_begin(), padding_below);
-    EXPECT_EQ(pad_v1_node->get_pads_end(), padding_above);
-}
-
-TEST(opset_transform, opset1_pad_downgrade_pass)
-{
-    auto arg = make_shared<op::Parameter>(element::f32, Shape{5, 6});
-    auto arg_pad_value = make_shared<op::Parameter>(element::f32, Shape{});
-    const auto pads_begin =
-        make_shared<op::Constant>(element::i64, Shape{2}, vector<int64_t>{1, 2});
-    const auto pads_end = make_shared<op::Constant>(element::i64, Shape{2}, vector<int64_t>{3, 4});
-    auto pad_mode = op::PadMode::EDGE;
-
-    auto pad_v1 = make_shared<op::v1::Pad>(arg, pads_begin, pads_end, arg_pad_value, pad_mode);
-    auto result = make_shared<op::Result>(pad_v1);
-    auto f = make_shared<Function>(ResultVector{result}, ParameterVector{arg, arg_pad_value});
-
-    ngraph::pass::Manager pass_manager;
-    pass_manager.register_pass<pass::Opset0Downgrade>();
-    pass_manager.run_passes(f);
-
-    auto pad_s0_result = f->get_results().at(0);
-    auto node = pad_s0_result->get_input_node_shared_ptr(0);
-    auto pad_v0_node = as_type_ptr<op::v0::Pad>(node);
-    ASSERT_TRUE(pad_v0_node);
-    EXPECT_EQ(pad_v0_node->get_pad_mode(), pad_mode);
-
-    EXPECT_EQ(pad_v0_node->get_padding_below(), CoordinateDiff({1, 2}));
-    EXPECT_EQ(pad_v0_node->get_padding_above(), CoordinateDiff({3, 4}));
-}
diff --git a/ngraph/test/opset_pass/reduction_opset_pass.cpp b/ngraph/test/opset_pass/reduction_opset_pass.cpp
deleted file mode 100644 (file)
index dad17da..0000000
+++ /dev/null
@@ -1,297 +0,0 @@
-//*****************************************************************************
-// Copyright 2017-2020 Intel Corporation
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//*****************************************************************************
-
-#include "gtest/gtest.h"
-
-#include "ngraph/ngraph.hpp"
-#include "ngraph/pass/manager.hpp"
-#include "opset0_downgrade.hpp"
-#include "opset1_upgrade.hpp"
-#include "util/type_prop.hpp"
-
-using namespace std;
-using namespace ngraph;
-
-//------------------------------------------------------------------------------
-//
-//                  Helper Functions
-//
-//------------------------------------------------------------------------------
-
-template <typename OpV0, typename OpV1>
-void test_reduce_op_opset1_upgrade_pass()
-{
-    const auto data = make_shared<op::Parameter>(element::f32, Shape{1, 2, 3});
-    const AxisSet reduction_axes{1, 2};
-
-    const auto v0_node = make_shared<OpV0>(data, reduction_axes);
-    const auto result = make_shared<op::Result>(v0_node);
-    auto f = make_shared<Function>(ResultVector{result}, ParameterVector{data});
-
-    ngraph::pass::Manager pass_manager;
-    pass_manager.register_pass<pass::Opset1Upgrade>();
-    pass_manager.run_passes(f);
-
-    const auto pass_replacement_node = f->get_result()->input_value(0).get_node_shared_ptr();
-    const auto v1_node = as_type_ptr<OpV1>(pass_replacement_node);
-
-    ASSERT_TRUE(v1_node);
-    EXPECT_EQ(v1_node->get_keep_dims(), false);
-    EXPECT_EQ(v1_node->get_output_element_type(0), element::f32);
-    EXPECT_EQ(v1_node->get_output_shape(0), (Shape{1}));
-}
-
-template <typename OpV0, typename OpV1>
-void test_reduce_op_opset0_downgrade_pass()
-{
-    const auto data = make_shared<op::Parameter>(element::f32, Shape{1, 2, 3});
-    const auto axes = make_shared<op::Constant>(element::i64, Shape{2}, vector<int64_t>{0, 1});
-
-    const auto v1_node = make_shared<OpV1>(data, axes, true);
-    const auto result = make_shared<op::Result>(v1_node);
-    auto f = make_shared<Function>(ResultVector{result}, ParameterVector{data});
-
-    ngraph::pass::Manager pass_manager;
-    pass_manager.register_pass<pass::Opset0Downgrade>();
-    pass_manager.run_passes(f);
-
-    const auto reshape_replacement_node = f->get_result()->input_value(0).get_node_shared_ptr();
-    const auto reshape_node = as_type_ptr<op::Reshape>(reshape_replacement_node);
-    ASSERT_TRUE(reshape_node);
-    EXPECT_EQ(reshape_node->get_output_element_type(0), element::f32);
-    EXPECT_EQ(reshape_node->get_output_shape(0), (Shape{1, 1, 3}));
-
-    const auto op_replace_node = reshape_replacement_node->input_value(0).get_node_shared_ptr();
-    const auto v0_node = as_type_ptr<OpV0>(op_replace_node);
-    ASSERT_TRUE(v0_node);
-}
-
-template <typename OpV1>
-void test_reduce_op_opset0_downgrade_pass_axes_not_constant()
-{
-    const auto data = make_shared<op::Parameter>(element::f32, Shape{1, 2, 3});
-    const auto axes = make_shared<op::Parameter>(element::f32, Shape{1});
-
-    const auto v1_node = make_shared<OpV1>(data, axes, true);
-    const auto result = make_shared<op::Result>(v1_node);
-    auto f = make_shared<Function>(ResultVector{result}, ParameterVector{data, axes});
-
-    ngraph::pass::Manager pass_manager;
-    pass_manager.register_pass<pass::Opset0Downgrade>();
-    try
-    {
-        pass_manager.run_passes(f);
-        FAIL() << "Exception after Opset0Downgrade pass was not thrown.";
-    }
-    catch (const ngraph_error& error)
-    {
-        EXPECT_HAS_SUBSTRING(error.what(),
-                             string("reduction axes are not constant (for keep_dims=true)"));
-    }
-    catch (...)
-    {
-        FAIL() << "ReduceSum pass failed for unexpected reason";
-    }
-}
-
-template <typename OpV1>
-void test_reduce_op_opset0_downgrade_pass_output_not_static()
-{
-    const auto data = make_shared<op::Parameter>(element::f32, PartialShape::dynamic());
-    const auto axes = make_shared<op::Constant>(element::i64, Shape{2}, vector<int64_t>{0, 1});
-
-    const auto v1_node = make_shared<OpV1>(data, axes, true);
-    const auto result = make_shared<op::Result>(v1_node);
-    auto f = make_shared<Function>(ResultVector{result}, ParameterVector{data});
-
-    ngraph::pass::Manager pass_manager;
-    pass_manager.register_pass<pass::Opset0Downgrade>();
-    try
-    {
-        pass_manager.run_passes(f);
-        FAIL() << "Exception after Opset0Downgrade pass was not thrown.";
-    }
-    catch (const ngraph_error& error)
-    {
-        EXPECT_HAS_SUBSTRING(error.what(), string("output shape is dynamic (for keep_dims=true)"));
-    }
-    catch (...)
-    {
-        FAIL() << "ReduceSum pass failed for unexpected reason";
-    }
-}
-
-template <typename OpV1>
-void test_reduce_op_opset0_downgrade_pass_out_shape_if_keep_dims()
-{
-    auto arg = make_shared<op::Parameter>(element::f32, Shape{3, 4, 5});
-    auto axes = make_shared<op::Constant>(element::i64, Shape{2}, vector<int64_t>{1, 2});
-    auto keep_dims = true;
-    auto v1_node = make_shared<OpV1>(arg, axes, keep_dims);
-    const auto result = make_shared<op::Result>(v1_node);
-    auto f = make_shared<Function>(ResultVector{result}, ParameterVector{arg});
-
-    ngraph::pass::Manager pass_manager;
-    pass_manager.register_pass<pass::Opset0Downgrade>();
-    pass_manager.run_passes(f);
-
-    const auto replacement_node = f->get_result()->input_value(0).get_node_shared_ptr();
-    ASSERT_TRUE(replacement_node->get_output_partial_shape(0).compatible(PartialShape{3, 1, 1}));
-}
-
-template <typename OpV1>
-void test_reduce_op_opset0_downgrade_pass_out_shape_if_not_keep_dims()
-{
-    auto arg = make_shared<op::Parameter>(element::f32, Shape{3, 4, 5});
-    auto axes = make_shared<op::Constant>(element::i64, Shape{2}, vector<int64_t>{1, 2});
-    auto keep_dims = false;
-    auto v1_node = make_shared<OpV1>(arg, axes, keep_dims);
-    const auto result = make_shared<op::Result>(v1_node);
-    auto f = make_shared<Function>(ResultVector{result}, ParameterVector{arg});
-
-    ngraph::pass::Manager pass_manager;
-    pass_manager.register_pass<pass::Opset0Downgrade>();
-    pass_manager.run_passes(f);
-
-    const auto replacement_node = f->get_result()->input_value(0).get_node_shared_ptr();
-    ASSERT_TRUE(replacement_node->get_output_partial_shape(0).compatible(PartialShape{3}));
-}
-
-//------------------------------------------------------------------------------
-//
-//                  Test Cases
-//
-//------------------------------------------------------------------------------
-
-TEST(opset_transform, opset1_reduce_sum_upgrade_pass)
-{
-    test_reduce_op_opset1_upgrade_pass<op::Sum, op::v1::ReduceSum>();
-}
-
-TEST(opset_transform, opset0_reduce_sum_downgrade_pass)
-{
-    test_reduce_op_opset0_downgrade_pass<op::v0::Sum, op::v1::ReduceSum>();
-}
-
-TEST(opset_transform, opset0_reduce_sum_downgrade_pass_axes_not_constant_axes)
-{
-    test_reduce_op_opset0_downgrade_pass_axes_not_constant<op::v1::ReduceSum>();
-}
-
-TEST(opset_transform, opset0_reduce_sum_downgrade_pass_output_not_static)
-{
-    test_reduce_op_opset0_downgrade_pass_output_not_static<op::v1::ReduceSum>();
-}
-
-TEST(opset_transform, opset0_reduce_sum_downgrade_pass_out_shape_if_keep_dims)
-{
-    test_reduce_op_opset0_downgrade_pass_out_shape_if_keep_dims<op::v1::ReduceSum>();
-}
-
-TEST(opset_transform, opset0_reduce_sum_downgrade_pass_out_shape_if_not_keep_dims)
-{
-    test_reduce_op_opset0_downgrade_pass_out_shape_if_not_keep_dims<op::v1::ReduceSum>();
-}
-
-TEST(opset_transform, opset1_reduce_prod_upgrade_pass)
-{
-    test_reduce_op_opset1_upgrade_pass<op::Product, op::v1::ReduceProd>();
-}
-
-TEST(opset_transform, opset0_reduce_prod_downgrade_pass)
-{
-    test_reduce_op_opset0_downgrade_pass<op::v0::Product, op::v1::ReduceProd>();
-}
-
-TEST(opset_transform, opset0_reduce_prod_downgrade_pass_axes_not_constant_axes)
-{
-    test_reduce_op_opset0_downgrade_pass_axes_not_constant<op::v1::ReduceProd>();
-}
-
-TEST(opset_transform, opset0_reduce_prod_downgrade_pass_output_not_static)
-{
-    test_reduce_op_opset0_downgrade_pass_output_not_static<op::v1::ReduceProd>();
-}
-
-TEST(opset_transform, opset0_reduce_prod_downgrade_pass_out_shape_if_keep_dims)
-{
-    test_reduce_op_opset0_downgrade_pass_out_shape_if_keep_dims<op::v1::ReduceProd>();
-}
-
-TEST(opset_transform, opset0_reduce_prod_downgrade_pass_out_shape_if_not_keep_dims)
-{
-    test_reduce_op_opset0_downgrade_pass_out_shape_if_not_keep_dims<op::v1::ReduceProd>();
-}
-
-TEST(opset_transform, opset1_reduce_max_upgrade_pass)
-{
-    test_reduce_op_opset1_upgrade_pass<op::Max, op::v1::ReduceMax>();
-}
-
-TEST(opset_transform, opset0_reduce_max_downgrade_pass)
-{
-    test_reduce_op_opset0_downgrade_pass<op::v0::Max, op::v1::ReduceMax>();
-}
-
-TEST(opset_transform, opset0_reduce_max_downgrade_pass_axes_not_constant_axes)
-{
-    test_reduce_op_opset0_downgrade_pass_axes_not_constant<op::v1::ReduceMax>();
-}
-
-TEST(opset_transform, opset0_reduce_max_downgrade_pass_output_not_static)
-{
-    test_reduce_op_opset0_downgrade_pass_output_not_static<op::v1::ReduceMax>();
-}
-
-TEST(opset_transform, opset0_reduce_max_downgrade_pass_out_shape_if_keep_dims)
-{
-    test_reduce_op_opset0_downgrade_pass_out_shape_if_keep_dims<op::v1::ReduceMax>();
-}
-
-TEST(opset_transform, opset0_reduce_max_downgrade_pass_out_shape_if_not_keep_dims)
-{
-    test_reduce_op_opset0_downgrade_pass_out_shape_if_not_keep_dims<op::v1::ReduceMax>();
-}
-
-TEST(opset_transform, opset1_reduce_min_upgrade_pass)
-{
-    test_reduce_op_opset1_upgrade_pass<op::Min, op::v1::ReduceMin>();
-}
-
-TEST(opset_transform, opset0_reduce_min_downgrade_pass)
-{
-    test_reduce_op_opset0_downgrade_pass<op::v0::Min, op::v1::ReduceMin>();
-}
-
-TEST(opset_transform, opset0_reduce_min_downgrade_pass_axes_not_constant_axes)
-{
-    test_reduce_op_opset0_downgrade_pass_axes_not_constant<op::v1::ReduceMin>();
-}
-
-TEST(opset_transform, opset0_reduce_min_downgrade_pass_output_not_static)
-{
-    test_reduce_op_opset0_downgrade_pass_output_not_static<op::v1::ReduceMin>();
-}
-
-TEST(opset_transform, opset0_reduce_min_downgrade_pass_out_shape_if_keep_dims)
-{
-    test_reduce_op_opset0_downgrade_pass_out_shape_if_keep_dims<op::v1::ReduceMin>();
-}
-
-TEST(opset_transform, opset0_reduce_min_downgrade_pass_out_shape_if_not_keep_dims)
-{
-    test_reduce_op_opset0_downgrade_pass_out_shape_if_not_keep_dims<op::v1::ReduceMin>();
-}
diff --git a/ngraph/test/opset_pass/reverse_opset_pass.cpp b/ngraph/test/opset_pass/reverse_opset_pass.cpp
deleted file mode 100644 (file)
index 5581182..0000000
+++ /dev/null
@@ -1,119 +0,0 @@
-//*****************************************************************************
-// Copyright 2017-2020 Intel Corporation
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//*****************************************************************************
-
-#include "gmock/gmock.h"
-#include "gtest/gtest.h"
-
-#include "ngraph/ngraph.hpp"
-#include "ngraph/pass/manager.hpp"
-#include "opset0_downgrade.hpp"
-#include "opset1_upgrade.hpp"
-#include "util/type_prop.hpp"
-
-using namespace std;
-using namespace ngraph;
-
-TEST(opset_transform, opset1_reverse_upgrade_pass)
-{
-    const auto data = make_shared<op::Parameter>(element::f32, Shape{2, 2, 2});
-    const AxisSet reverse_axes{1, 2};
-
-    const auto reverse_v0 = make_shared<op::v0::Reverse>(data, reverse_axes);
-    const auto result = make_shared<op::Result>(reverse_v0);
-    auto f = make_shared<Function>(ResultVector{result}, ParameterVector{data});
-
-    ngraph::pass::Manager pass_manager;
-    pass_manager.register_pass<pass::Opset1Upgrade>();
-    pass_manager.run_passes(f);
-
-    const auto pass_replacement_node = f->get_result()->get_input_node_shared_ptr(0);
-    const auto reverse_v1 = as_type_ptr<op::v1::Reverse>(pass_replacement_node);
-    ASSERT_TRUE(reverse_v1);
-    EXPECT_EQ(reverse_v1->get_mode(), op::v1::Reverse::Mode::INDEX);
-
-    const auto& rev_axes_input_shape = reverse_v1->get_input_shape(1);
-    // should match the number of elements of v0::Reverse reverse_axes attribute
-    EXPECT_EQ(rev_axes_input_shape, Shape{2});
-}
-
-TEST(opset_transform, opset0_reverse_downgrade_pass_index_mode)
-{
-    const auto data = make_shared<op::Parameter>(element::f32, Shape{2, 2, 2});
-    const auto reverse_axes =
-        make_shared<op::Constant>(element::i64, Shape{2}, vector<int64_t>{1, 2});
-    auto mode = op::v1::Reverse::Mode::INDEX;
-
-    const auto reverse_v1 = make_shared<op::v1::Reverse>(data, reverse_axes, mode);
-    const auto result = make_shared<op::Result>(reverse_v1);
-    auto f = make_shared<Function>(ResultVector{result}, ParameterVector{data});
-
-    ngraph::pass::Manager pass_manager;
-    pass_manager.register_pass<pass::Opset0Downgrade>();
-    pass_manager.run_passes(f);
-
-    const auto pass_replacement_node = f->get_result()->get_input_node_shared_ptr(0);
-    const auto reverse_v0 = as_type_ptr<op::v0::Reverse>(pass_replacement_node);
-    ASSERT_TRUE(reverse_v0);
-    EXPECT_EQ(reverse_v0->get_reversed_axes(), AxisSet({1, 2}));
-}
-
-TEST(opset_transform, opset0_reverse_downgrade_pass_mask_mode)
-{
-    const auto data = make_shared<op::Parameter>(element::f32, Shape{2, 2, 2});
-    const auto reverse_axes =
-        make_shared<op::Constant>(element::boolean, Shape{3}, vector<bool>{true, false, true});
-    auto mode = op::v1::Reverse::Mode::MASK;
-
-    const auto reverse_v1 = make_shared<op::v1::Reverse>(data, reverse_axes, mode);
-    const auto result = make_shared<op::Result>(reverse_v1);
-    auto f = make_shared<Function>(ResultVector{result}, ParameterVector{data});
-
-    ngraph::pass::Manager pass_manager;
-    pass_manager.register_pass<pass::Opset0Downgrade>();
-    pass_manager.run_passes(f);
-
-    const auto pass_replacement_node = f->get_result()->get_input_node_shared_ptr(0);
-    const auto reverse_v0 = as_type_ptr<op::v0::Reverse>(pass_replacement_node);
-    ASSERT_TRUE(reverse_v0);
-    EXPECT_EQ(reverse_v0->get_reversed_axes(), AxisSet({0, 2}));
-}
-
-TEST(opset_transform, opset0_reverse_downgrade_pass_axes_not_constant)
-{
-    const auto data = make_shared<op::Parameter>(element::f32, Shape{2, 2, 2});
-    const auto axes = make_shared<op::Parameter>(element::boolean, Shape{3});
-
-    const auto reverse_v1 = make_shared<op::v1::Reverse>(data, axes, op::v1::Reverse::Mode::MASK);
-    const auto result = make_shared<op::Result>(reverse_v1);
-    auto f = make_shared<Function>(ResultVector{result}, ParameterVector{data, axes});
-
-    ngraph::pass::Manager pass_manager;
-    pass_manager.register_pass<pass::Opset0Downgrade>();
-    try
-    {
-        pass_manager.run_passes(f);
-        FAIL() << "Exception after Opset0Downgrade pass was not thrown.";
-    }
-    catch (const ngraph_error& error)
-    {
-        EXPECT_HAS_SUBSTRING(error.what(),
-                             std::string("Unable to convert Reverse:v1 to Reverse:v0"));
-    }
-    catch (...)
-    {
-        FAIL() << "Reverse:v1 pass failed for unexpected reason";
-    }
-}
diff --git a/ngraph/test/opset_pass/select_opset_pass.cpp b/ngraph/test/opset_pass/select_opset_pass.cpp
deleted file mode 100644 (file)
index c25b047..0000000
+++ /dev/null
@@ -1,59 +0,0 @@
-#include "gmock/gmock.h"
-#include "gtest/gtest.h"
-
-#include "ngraph/ngraph.hpp"
-#include "ngraph/pass/manager.hpp"
-#include "opset0_downgrade.hpp"
-#include "opset1_upgrade.hpp"
-#include "util/test_control.hpp"
-#include "util/type_prop.hpp"
-
-using namespace std;
-using namespace ngraph;
-
-TEST(opset_transform, opset0_select_downgrade_pass)
-{
-    auto cond = make_shared<op::Parameter>(element::boolean, Shape{2});
-    auto ptrue = make_shared<op::Parameter>(element::f32, Shape{4, 2});
-    auto pfalse = make_shared<op::Parameter>(element::f32, Shape{4, 2});
-
-    auto v1_node = make_shared<op::v1::Select>(cond, ptrue, pfalse);
-    auto result = make_shared<op::Result>(v1_node);
-    auto f = make_shared<Function>(ResultVector{result}, ParameterVector{cond, ptrue, pfalse});
-
-    ngraph::pass::Manager pass_manager;
-    pass_manager.register_pass<pass::Opset0Downgrade>();
-    pass_manager.run_passes(f);
-
-    auto v0_result = f->get_results().at(0);
-    auto node = v0_result->input_value(0).get_node_shared_ptr();
-    auto v0_node = as_type_ptr<op::v0::Select>(node);
-
-    ASSERT_TRUE(v0_node);
-    EXPECT_EQ(v0_node->get_output_element_type(0), element::f32);
-    EXPECT_EQ(v0_node->get_output_shape(0), (Shape{4, 2}));
-}
-
-TEST(opset_transform, opset1_select_upgrade_pass)
-{
-    auto cond = make_shared<op::Parameter>(element::boolean, Shape{4, 2});
-    auto ptrue = make_shared<op::Parameter>(element::f32, Shape{4, 2});
-    auto pfalse = make_shared<op::Parameter>(element::f32, Shape{4, 2});
-
-    auto v0_node = make_shared<op::v0::Select>(cond, ptrue, pfalse);
-    auto result = make_shared<op::Result>(v0_node);
-    auto f = make_shared<Function>(ResultVector{result}, ParameterVector{cond, ptrue, pfalse});
-
-    ngraph::pass::Manager pass_manager;
-    pass_manager.register_pass<pass::Opset1Upgrade>();
-    pass_manager.run_passes(f);
-
-    auto v1_result = f->get_results().at(0);
-    auto node = v1_result->input_value(0).get_node_shared_ptr();
-    auto v1_node = as_type_ptr<op::v1::Select>(node);
-
-    ASSERT_TRUE(v1_node);
-    EXPECT_EQ(v1_node->get_auto_broadcast(), op::AutoBroadcastSpec());
-    EXPECT_EQ(v1_node->get_output_element_type(0), element::f32);
-    EXPECT_EQ(v1_node->get_output_shape(0), (Shape{4, 2}));
-}
diff --git a/ngraph/test/opset_pass/slice_opset_pass.cpp b/ngraph/test/opset_pass/slice_opset_pass.cpp
deleted file mode 100644 (file)
index e554d8a..0000000
+++ /dev/null
@@ -1,116 +0,0 @@
-//*****************************************************************************
-// Copyright 2017-2020 Intel Corporation
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//*****************************************************************************
-
-#include "gmock/gmock.h"
-#include "gtest/gtest.h"
-
-#include "ngraph/ngraph.hpp"
-#include "ngraph/pass/manager.hpp"
-#include "opset0_downgrade.hpp"
-#include "opset1_upgrade.hpp"
-#include "util/type_prop.hpp"
-
-using namespace std;
-using namespace ngraph;
-
-TEST(opset_transform, opset1_strided_slice_downgrade_pass)
-{
-    auto data = make_shared<op::Parameter>(element::f32, Shape{5, 7, 6, 8});
-    auto begin = op::Constant::create(element::i64, Shape{4}, {1, 2, 1, 2});
-    auto end = op::Constant::create(element::i64, Shape{4}, {3, 4, 5, 6});
-
-    auto strided_slice_v1 = make_shared<op::v1::StridedSlice>(
-        data, begin, end, vector<int64_t>{0, 0, 1, 0}, vector<int64_t>{1, 0, 0, 0});
-
-    const auto result = make_shared<op::Result>(strided_slice_v1);
-    auto f = make_shared<Function>(ResultVector{result}, ParameterVector{data});
-
-    ngraph::pass::Manager pass_manager;
-    pass_manager.register_pass<pass::Opset0Downgrade>();
-    pass_manager.run_passes(f);
-
-    const auto pass_replacement_node = f->get_result()->get_input_node_shared_ptr(0);
-    const auto slice_v0 = as_type_ptr<op::v0::Slice>(pass_replacement_node);
-    ASSERT_TRUE(slice_v0);
-    EXPECT_EQ(slice_v0->get_lower_bounds(), Coordinate({1, 2, 0, 2}));
-    EXPECT_EQ(slice_v0->get_upper_bounds(), Coordinate({5, 4, 5, 6}));
-    EXPECT_EQ(slice_v0->get_strides(), Strides({1, 1, 1, 1}));
-}
-
-TEST(opset_transform, opset1_strided_slice_downgrade_pass_dynamic_input_shape)
-{
-    auto data = make_shared<op::Parameter>(element::f32, PartialShape::dynamic());
-    auto begin = op::Constant::create(element::i64, Shape{4}, {1, 2, 1, 2});
-    auto end = op::Constant::create(element::i64, Shape{4}, {3, 4, 5, 6});
-
-    auto strided_slice_v1 = make_shared<op::v1::StridedSlice>(
-        data, begin, end, vector<int64_t>{0, 0, 1, 0}, vector<int64_t>{1, 0, 0, 0});
-
-    const auto result = make_shared<op::Result>(strided_slice_v1);
-    auto f = make_shared<Function>(ResultVector{result}, ParameterVector{data});
-
-    ngraph::pass::Manager pass_manager;
-    pass_manager.register_pass<pass::Opset0Downgrade>();
-
-    try
-    {
-        pass_manager.run_passes(f);
-        FAIL() << "Exception after Opset0Downgrade pass was not thrown.";
-    }
-    catch (const ngraph_error& error)
-    {
-        EXPECT_HAS_SUBSTRING(
-            error.what(),
-            std::string(
-                "Unable to convert StridedSlice:v1 to Slice:v0 if input rank is not static."));
-    }
-    catch (...)
-    {
-        FAIL() << "StridedSlice pass failed for unexpected reason";
-    }
-}
-
-TEST(opset_transform, opset1_strided_slice_downgrade_pass_end_not_constant)
-{
-    auto data = make_shared<op::Parameter>(element::f32, Shape{5, 7, 6, 8});
-    auto begin = op::Constant::create(element::i64, Shape{4}, {1, 2, 1, 2});
-    auto end = make_shared<op::Parameter>(element::i64, Shape{4});
-
-    auto strided_slice_v1 = make_shared<op::v1::StridedSlice>(
-        data, begin, end, vector<int64_t>{0, 0, 1, 0}, vector<int64_t>{1, 0, 0, 0});
-
-    const auto result = make_shared<op::Result>(strided_slice_v1);
-    auto f = make_shared<Function>(ResultVector{result}, ParameterVector{data, end});
-
-    ngraph::pass::Manager pass_manager;
-    pass_manager.register_pass<pass::Opset0Downgrade>();
-
-    try
-    {
-        pass_manager.run_passes(f);
-        FAIL() << "Exception after Opset0Downgrade pass was not thrown.";
-    }
-    catch (const ngraph_error& error)
-    {
-        EXPECT_HAS_SUBSTRING(error.what(),
-                             std::string("Unable to convert StridedSlice:v1 to Slice:v0 "
-                                         "if begin, end or strides are not constant"));
-    }
-    catch (...)
-    {
-        FAIL() << "StridedSlice pass failed for unexpected reason";
-    }
-}
diff --git a/ngraph/test/opset_pass/softmax_opset_pass.cpp b/ngraph/test/opset_pass/softmax_opset_pass.cpp
deleted file mode 100644 (file)
index 1d755d1..0000000
+++ /dev/null
@@ -1,75 +0,0 @@
-//*****************************************************************************
-// Copyright 2017-2020 Intel Corporation
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//*****************************************************************************
-
-#include "gmock/gmock.h"
-#include "gtest/gtest.h"
-
-#include "ngraph/ngraph.hpp"
-#include "ngraph/pass/manager.hpp"
-#include "opset1_upgrade.hpp"
-#include "util/type_prop.hpp"
-
-using namespace std;
-using namespace ngraph;
-
-TEST(opset_transform, opset1_softmax_upgrade_pass_axis)
-{
-    const size_t axis = 2;
-    const AxisSet axes{axis};
-    auto arg = make_shared<op::Parameter>(element::f32, Shape{2, 3, 4});
-    auto softmax_s0 = make_shared<op::v0::Softmax>(arg, axes);
-    auto result = make_shared<op::Result>(softmax_s0);
-    auto f = make_shared<Function>(ResultVector{result}, ParameterVector{arg});
-
-    ngraph::pass::Manager pass_manager;
-    pass_manager.register_pass<pass::Opset1Upgrade>();
-    pass_manager.run_passes(f);
-
-    auto softmax_s1_result = f->get_results().at(0);
-    auto node = softmax_s1_result->get_input_node_shared_ptr(0);
-    auto softmax_s1_node = as_type_ptr<op::v1::Softmax>(node);
-    ASSERT_TRUE(softmax_s1_node);
-    EXPECT_EQ(softmax_s1_node->get_axis(), axis);
-}
-
-TEST(opset_transform, opset1_softmax_upgrade_pass_axis_exception)
-{
-    const AxisSet axes{1, 2};
-    auto arg = make_shared<op::Parameter>(element::f32, Shape{2, 3, 4});
-    auto softmax_s0 = make_shared<op::v0::Softmax>(arg, axes);
-    auto result = make_shared<op::Result>(softmax_s0);
-    auto f = make_shared<Function>(ResultVector{result}, ParameterVector{arg});
-
-    ngraph::pass::Manager pass_manager;
-    pass_manager.register_pass<pass::Opset1Upgrade>();
-
-    try
-    {
-        pass_manager.run_passes(f);
-        FAIL() << "Exception after Opset1Upgrade pass was not thrown.";
-    }
-    catch (const ngraph_error& error)
-    {
-        EXPECT_HAS_SUBSTRING(
-            error.what(),
-            std::string(
-                "Unable to convert Softmax:0 to Softmax:1 with zero or more than one axis."));
-    }
-    catch (...)
-    {
-        FAIL() << "Softmax pass failed for unexpected reason";
-    }
-}
diff --git a/ngraph/test/opset_pass/topk_opset_pass.cpp b/ngraph/test/opset_pass/topk_opset_pass.cpp
deleted file mode 100644 (file)
index a2b0b79..0000000
+++ /dev/null
@@ -1,79 +0,0 @@
-//*****************************************************************************
-// Copyright 2017-2020 Intel Corporation
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//*****************************************************************************
-
-#include "gmock/gmock.h"
-#include "gtest/gtest.h"
-
-#include "ngraph/ngraph.hpp"
-#include "ngraph/pass/manager.hpp"
-#include "opset0_downgrade.hpp"
-#include "opset1_upgrade.hpp"
-#include "util/type_prop.hpp"
-
-using namespace std;
-using namespace ngraph;
-
-TEST(opset_transform, opset1_topk_upgrade_pass)
-{
-    const size_t axis = 2;
-    const size_t k = 10;
-    const auto data = make_shared<op::Parameter>(element::i32, Shape{5, 10, 15});
-    const auto topk_v0 = make_shared<op::v0::TopK>(data, axis, element::i32, k);
-    const auto result = make_shared<op::Result>(topk_v0->output(0));
-    auto f = make_shared<Function>(ResultVector{result}, ParameterVector{data});
-
-    ngraph::pass::Manager pass_manager;
-    pass_manager.register_pass<pass::Opset1Upgrade>();
-    pass_manager.run_passes(f);
-
-    const auto pass_replacement_node = f->get_result()->get_input_node_shared_ptr(0);
-    const auto topk_v1 = as_type_ptr<op::v1::TopK>(pass_replacement_node);
-    ASSERT_TRUE(topk_v1);
-    EXPECT_EQ(topk_v1->get_axis(), axis);
-    EXPECT_EQ(topk_v1->get_mode(), op::v1::TopK::Mode::MAX);
-    EXPECT_EQ(topk_v1->get_sort_type(), op::v1::TopK::SortType::SORT_VALUES);
-
-    const auto values_out_element_type = topk_v1->get_output_element_type(0);
-    EXPECT_EQ(values_out_element_type, data->get_element_type());
-}
-
-TEST(opset_transform, opset1_topk_downgrade_pass)
-{
-    const auto data = make_shared<op::Parameter>(element::i32, Shape{5, 10, 15});
-    const int32_t k = 10;
-    const auto k_node = op::Constant::create(element::i64, Shape{}, {k});
-    const size_t axis = 2;
-    const auto mode = op::v1::TopK::Mode::MAX;
-    const auto sort = op::v1::TopK::SortType::SORT_INDICES;
-    const auto elem_type = element::i64;
-
-    const auto topk_v1 = make_shared<op::v1::TopK>(data, k_node, axis, mode, sort, elem_type);
-    const auto result = make_shared<op::Result>(topk_v1->output(0));
-    auto f = make_shared<Function>(ResultVector{result}, ParameterVector{data});
-
-    ngraph::pass::Manager pass_manager;
-    pass_manager.register_pass<pass::Opset0Downgrade>();
-    pass_manager.run_passes(f);
-
-    const auto pass_replacement_node = f->get_result()->get_input_node_shared_ptr(0);
-    const auto topk_v0 = as_type_ptr<op::v0::TopK>(pass_replacement_node);
-    ASSERT_TRUE(topk_v0);
-    EXPECT_EQ(topk_v0->get_k(), k);
-    EXPECT_EQ(topk_v0->get_top_k_axis(), axis);
-    EXPECT_EQ(topk_v0->get_compute_max(), true);
-    EXPECT_EQ(topk_v0->get_sort(), op::v0::TopK::SortType::SORT_INDICES);
-    EXPECT_EQ(topk_v0->get_index_element_type(), elem_type);
-}
diff --git a/ngraph/test/opset_pass/transpose_opset_pass.cpp b/ngraph/test/opset_pass/transpose_opset_pass.cpp
deleted file mode 100644 (file)
index 47c9b73..0000000
+++ /dev/null
@@ -1,107 +0,0 @@
-//*****************************************************************************
-// Copyright 2017-2020 Intel Corporation
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//*****************************************************************************
-
-#include "gmock/gmock.h"
-#include "gtest/gtest.h"
-
-#include "ngraph/ngraph.hpp"
-#include "ngraph/pass/manager.hpp"
-#include "opset0_downgrade.hpp"
-#include "util/type_prop.hpp"
-
-using namespace std;
-using namespace ngraph;
-
-TEST(opset_transform, opset1_transpose_downgrade_pass)
-{
-    const auto data = make_shared<op::Parameter>(element::f32, Shape{4, 5, 6, 7});
-    AxisVector order{2, 1, 3, 0};
-    const auto order_node = op::Constant::create(element::i64, Shape{order.size()}, order);
-
-    auto transpose = make_shared<op::v1::Transpose>(data, order_node);
-    auto result = make_shared<op::Result>(transpose);
-    auto f = make_shared<Function>(ResultVector{result}, ParameterVector{data});
-
-    ngraph::pass::Manager pass_manager;
-    pass_manager.register_pass<pass::Opset0Downgrade>();
-    pass_manager.run_passes(f);
-
-    auto reshape_result = f->get_results().at(0);
-    auto reshape_node = as_type_ptr<op::v0::Reshape>(reshape_result->get_input_node_shared_ptr(0));
-
-    ASSERT_TRUE(reshape_node);
-    EXPECT_EQ(reshape_node->get_input_order(), order);
-    EXPECT_EQ(reshape_node->get_output_shape(0), Shape({6, 5, 7, 4}));
-}
-
-TEST(opset_transform, opset1_transpose_downgrade_pass_data_shape_not_staic)
-{
-    const auto data = make_shared<op::Parameter>(element::f32, PartialShape::dynamic());
-    AxisVector order{2, 1, 3, 0};
-    const auto order_node = op::Constant::create(element::i64, Shape{order.size()}, order);
-
-    auto transpose = make_shared<op::v1::Transpose>(data, order_node);
-    auto result = make_shared<op::Result>(transpose);
-    auto f = make_shared<Function>(ResultVector{result}, ParameterVector{data});
-
-    ngraph::pass::Manager pass_manager;
-    pass_manager.register_pass<pass::Opset0Downgrade>();
-
-    try
-    {
-        pass_manager.run_passes(f);
-        FAIL() << "Exception after Transpose Opset0Downgrade pass was not thrown.";
-    }
-    catch (const ngraph_error& error)
-    {
-        EXPECT_HAS_SUBSTRING(error.what(),
-                             std::string("Unable to convert Transpose:v1 to Reshape:v0 "
-                                         "if data shape is dynamic. Node:"));
-    }
-    catch (...)
-    {
-        FAIL() << "Transpose pass failed for unexpected reason";
-    }
-}
-
-TEST(opset_transform, opset1_transpose_downgrade_pass_order_not_constant)
-{
-    const auto data = make_shared<op::Parameter>(element::f32, Shape{4, 5, 6, 7});
-    const auto order_node = make_shared<op::Parameter>(element::i64, Shape{4});
-
-    auto transpose = make_shared<op::v1::Transpose>(data, order_node);
-    auto result = make_shared<op::Result>(transpose);
-    auto f = make_shared<Function>(ResultVector{result}, ParameterVector{data, order_node});
-
-    ngraph::pass::Manager pass_manager;
-    pass_manager.register_pass<pass::Opset0Downgrade>();
-
-    try
-    {
-        pass_manager.run_passes(f);
-        FAIL() << "Exception after Transpose Opset0Downgrade pass was not thrown.";
-    }
-    catch (const ngraph_error& error)
-    {
-        EXPECT_HAS_SUBSTRING(error.what(),
-                             std::string("Unable to convert Transpose:v1 to Reshape:v0 "
-                                         "if order node is not constant. Node:"));
-    }
-    catch (...)
-    {
-        FAIL() << "Transpose pass failed for unexpected reason";
-    }
-}
index b542775..478ce00 100644 (file)
@@ -132,7 +132,9 @@ public:
         };
 
         auto m = make_shared<TestMatcher>(pattern * iconst1);
+        NGRAPH_SUPPRESS_DEPRECATED_START
         this->add_matcher(m, callback);
+        NGRAPH_SUPPRESS_DEPRECATED_END
     }
 
     void construct_add_zero()
@@ -180,7 +182,9 @@ public:
 
         auto add = pattern + iconst0;
         auto m = make_shared<TestMatcher>(add);
+        NGRAPH_SUPPRESS_DEPRECATED_START
         this->add_matcher(m, callback);
+        NGRAPH_SUPPRESS_DEPRECATED_END
     }
 
     TestGraphRewrite()
@@ -669,7 +673,9 @@ public:
 
         std::set<std::shared_ptr<pattern::op::Label>> empty_correlated_matches;
         auto rm = make_shared<pattern::RecurrentMatcher>(padd, rpattern, empty_correlated_matches);
+        NGRAPH_SUPPRESS_DEPRECATED_START
         this->add_matcher(rm, callback);
+        NGRAPH_SUPPRESS_DEPRECATED_END
     }
 
     TestRecurrentGraphRewrite()
@@ -806,4 +812,4 @@ TEST(pattern, wrap_type)
         ASSERT_TRUE(matcher->match(static_pointer_cast<Node>(mul1)));
         ASSERT_TRUE(matcher->match(static_pointer_cast<Node>(mul2)));
     }
-}
\ No newline at end of file
+}
index fd573f4..718f3ca 100644 (file)
@@ -36,6 +36,8 @@ using namespace std;
 using namespace ngraph;
 using ::testing::Return;
 
+NGRAPH_SUPPRESS_DEPRECATED_START
+
 using ProvSet = std::unordered_set<std::string>;
 
 TEST(provenance, provenance)
index c4858ed..bd93076 100644 (file)
@@ -26,6 +26,8 @@
 using namespace std;
 using namespace ngraph;
 
+NGRAPH_SUPPRESS_DEPRECATED_START
+
 namespace
 {
     InferenceEngine::Blob::Ptr fill_blob(InferenceEngine::SizeVector shape,
index b363a42..6a4a83a 100644 (file)
@@ -34,6 +34,8 @@
 using namespace std;
 using namespace ngraph;
 
+NGRAPH_SUPPRESS_DEPRECATED_START
+
 using descriptor::layout::DenseTensorLayout;
 
 runtime::interpreter::OP_TYPEID runtime::interpreter::INTExecutable::get_typeid(const Node& node)
index f128b0c..8b517e3 100644 (file)
@@ -19,6 +19,8 @@
 #include "backend_visibility.hpp"
 #include "ngraph/pass/pass.hpp"
 
+NGRAPH_SUPPRESS_DEPRECATED_START
+
 namespace ngraph
 {
     namespace pass
@@ -37,3 +39,5 @@ namespace ngraph
         };
     }
 }
+
+NGRAPH_SUPPRESS_DEPRECATED_END
index 3223018..551974b 100644 (file)
@@ -19,6 +19,8 @@
 #include "backend_visibility.hpp"
 #include "ngraph/pass/pass.hpp"
 
+NGRAPH_SUPPRESS_DEPRECATED_START
+
 namespace ngraph
 {
     namespace pass
@@ -37,3 +39,5 @@ namespace ngraph
         };
     }
 }
+
+NGRAPH_SUPPRESS_DEPRECATED_END
index 2d498fe..a4ae087 100644 (file)
@@ -19,6 +19,8 @@
 #include "backend_visibility.hpp"
 #include "ngraph/pass/pass.hpp"
 
+NGRAPH_SUPPRESS_DEPRECATED_START
+
 namespace ngraph
 {
     namespace pass
@@ -37,3 +39,5 @@ namespace ngraph
         };
     }
 }
+
+NGRAPH_SUPPRESS_DEPRECATED_END
index 952903a..5d74c44 100644 (file)
@@ -83,7 +83,9 @@ void pass::DynElimination::construct_transpose()
     };
 
     auto transpose_matcher = make_shared<pattern::Matcher>(transpose, "DynElimination.Transpose");
+    NGRAPH_SUPPRESS_DEPRECATED_START
     add_matcher(transpose_matcher, transpose_callback, all_pass_property_off);
+    NGRAPH_SUPPRESS_DEPRECATED_END
 }
 
 template <typename T>
@@ -188,5 +190,7 @@ void pass::DynElimination::construct_range()
     };
 
     auto range_matcher = make_shared<pattern::Matcher>(range_pat, "DynElimination.Range");
+    NGRAPH_SUPPRESS_DEPRECATED_START
     add_matcher(range_matcher, range_callback, all_pass_property_off);
+    NGRAPH_SUPPRESS_DEPRECATED_END
 }
index 545b1b8..b24d55a 100644 (file)
@@ -22,6 +22,8 @@
 using namespace std;
 using namespace ngraph;
 
+NGRAPH_SUPPRESS_DEPRECATED_START
+
 pass::FusedOpDecomposition::FusedOpDecomposition(op_query_t callback)
     : m_has_direct_support{callback}
 {
index e8ab6ca..ea00d84 100644 (file)
@@ -22,6 +22,7 @@
 #include "ngraph/op/util/fused_op.hpp"
 #include "ngraph/pass/pass.hpp"
 
+NGRAPH_SUPPRESS_DEPRECATED_START
 namespace ngraph
 {
     namespace pass
@@ -53,8 +54,7 @@ namespace ngraph
         /// </table>
         class BACKEND_API FusedOpDecomposition : public NodePass
         {
-        public:
-            /// \brief  Function signature type for callback used to check whether provided node
+        public: /// \brief  Function signature type for callback used to check whether provided node
             ///         is supported by backend.
             using op_query_t = std::function<bool(const Node& node)>;
 
@@ -75,3 +75,4 @@ namespace ngraph
         };
     }
 }
+NGRAPH_SUPPRESS_DEPRECATED_END
index 7c526e9..766e68b 100644 (file)
@@ -20,6 +20,7 @@
 #include "ngraph/node.hpp"
 #include "ngraph/pass/pass.hpp"
 
+NGRAPH_SUPPRESS_DEPRECATED_START
 namespace ngraph
 {
     namespace pass
@@ -34,3 +35,4 @@ class BACKEND_API ngraph::pass::ImplicitBroadcastElimination : public ngraph::pa
 public:
     bool run_on_node(std::shared_ptr<ngraph::Node> node) override;
 };
+NGRAPH_SUPPRESS_DEPRECATED_END
index 88b9fd2..38c027f 100644 (file)
@@ -23,6 +23,8 @@
 
 using namespace ngraph;
 
+NGRAPH_SUPPRESS_DEPRECATED_START
+
 namespace
 {
     /// Extracts the data from two blobs and returns them as a pair of vectors.