From a3f600e394956a2b82b1a46b4d09376cc972b7a1 Mon Sep 17 00:00:00 2001 From: Edward Yang Date: Wed, 6 Feb 2019 08:17:55 -0800 Subject: [PATCH] Revert D13854304: [redo][c10] LayerNorm Registration Example Differential Revision: D13854304 Original commit changeset: ec463ce22721 fbshipit-source-id: 4262b9a2ef486e1c7c0283ea021331ac97cc5f56 --- caffe2/operators/layer_norm_op.cc | 11 +---------- caffe2/operators/layer_norm_op.h | 7 ++----- caffe2/python/operator_test/layer_norm_op_test.py | 15 --------------- torch/csrc/jit/register_caffe2_ops.cpp | 3 --- 4 files changed, 3 insertions(+), 33 deletions(-) diff --git a/caffe2/operators/layer_norm_op.cc b/caffe2/operators/layer_norm_op.cc index caba44c..f452f4b 100644 --- a/caffe2/operators/layer_norm_op.cc +++ b/caffe2/operators/layer_norm_op.cc @@ -181,18 +181,9 @@ to the end.) .Output(1, "mean", "Mean values for each feature vector") .Output(2, "stddev", "Standard deviations for each feature vector"); -DEFINE_FUNCTION_SCHEMA_OPERATOR( - LayerNorm, - (std::vector{c10::Argument("input_0"), - c10::Argument("axis", IntType::get()), - c10::Argument("epsilon", FloatType::get())}), - (std::vector{c10::Argument("output_0"), - c10::Argument("output_1"), - c10::Argument("output_2")}), - LayerNormOp); - } // namespace caffe2 + // Register layer norm with c10 namespace { struct Cache final : public c10::KernelCache { diff --git a/caffe2/operators/layer_norm_op.h b/caffe2/operators/layer_norm_op.h index 7138a12..bddb182 100644 --- a/caffe2/operators/layer_norm_op.h +++ b/caffe2/operators/layer_norm_op.h @@ -11,16 +11,13 @@ namespace caffe2 { -DECLARE_FUNCTION_SCHEMA_OPERATOR(LayerNorm); - template class LayerNormOp final : public Operator { public: USE_OPERATOR_CONTEXT_FUNCTIONS; - template - LayerNormOp(Args&&... args) - : Operator(std::forward(args)...), + LayerNormOp(const OperatorDef& operator_def, Workspace* ws) + : Operator(operator_def, ws), OP_SINGLE_ARG(int, "axis", axis_, 1), OP_SINGLE_ARG(float, "epsilon", epsilon_, 1e-5f) {} diff --git a/caffe2/python/operator_test/layer_norm_op_test.py b/caffe2/python/operator_test/layer_norm_op_test.py index fff1aa7..0332591 100644 --- a/caffe2/python/operator_test/layer_norm_op_test.py +++ b/caffe2/python/operator_test/layer_norm_op_test.py @@ -167,21 +167,6 @@ class TestLayerNormOp(serial.SerializedTestCase): torch.testing.assert_allclose(expected_mean, actual_mean) torch.testing.assert_allclose(expected_stdev, actual_stdev) - @given(X=hu.tensors(n=1), **hu.gcs) - def test_layer_norm_op_pytorch_2(self, X, gc, dc): - X = X[0] - if len(X.shape) == 1: - X = np.expand_dims(X, axis=0) - axis = np.random.randint(0, len(X.shape)) - epsilon = 1e-4 - - expected_norm, expected_mean, expected_stdev = _layer_norm_ref(axis, epsilon, X) - actual_norm, actual_mean, actual_stdev = torch.ops._caffe2.LayerNorm(torch.tensor(X), axis, epsilon) - - torch.testing.assert_allclose(expected_norm, actual_norm) - torch.testing.assert_allclose(expected_mean, actual_mean) - torch.testing.assert_allclose(expected_stdev, actual_stdev) - @given(X=hu.tensor(min_dim=2), **hu.gcs) def test_layer_norm_brew_wrapper(self, X, gc, dc): axis = np.random.randint(0, len(X.shape)) diff --git a/torch/csrc/jit/register_caffe2_ops.cpp b/torch/csrc/jit/register_caffe2_ops.cpp index 993a2b2..ea6295c 100644 --- a/torch/csrc/jit/register_caffe2_ops.cpp +++ b/torch/csrc/jit/register_caffe2_ops.cpp @@ -1,8 +1,5 @@ #include -#include "caffe2/operators/layer_norm_op.h" #define REGISTER_CAFFE2_OP(name) \ static caffe2::CAFFE2_STRUCT_OP_REGISTRATION_##name CAFFE2_STRUCT_OP_REGISTRATION_DEFN_TORCH_##name; \ static auto CAFFE2_OP_EXPORT_##name = torch::jit::RegisterOperators::Caffe2Operator(#name); - -REGISTER_CAFFE2_OP(LayerNorm); -- 2.7.4