Revert D13854304: [redo][c10] LayerNorm Registration Example
authorEdward Yang <ezyang@fb.com>
Wed, 6 Feb 2019 16:17:55 +0000 (08:17 -0800)
committerFacebook Github Bot <facebook-github-bot@users.noreply.github.com>
Wed, 6 Feb 2019 16:26:23 +0000 (08:26 -0800)
Differential Revision:
D13854304

Original commit changeset: ec463ce22721

fbshipit-source-id: 4262b9a2ef486e1c7c0283ea021331ac97cc5f56

caffe2/operators/layer_norm_op.cc
caffe2/operators/layer_norm_op.h
caffe2/python/operator_test/layer_norm_op_test.py
torch/csrc/jit/register_caffe2_ops.cpp

index caba44c..f452f4b 100644 (file)
@@ -181,18 +181,9 @@ to the end.)
     .Output(1, "mean", "Mean values for each feature vector")
     .Output(2, "stddev", "Standard deviations for each feature vector");
 
-DEFINE_FUNCTION_SCHEMA_OPERATOR(
-    LayerNorm,
-    (std::vector<c10::Argument>{c10::Argument("input_0"),
-                                c10::Argument("axis", IntType::get()),
-                                c10::Argument("epsilon", FloatType::get())}),
-    (std::vector<c10::Argument>{c10::Argument("output_0"),
-                                c10::Argument("output_1"),
-                                c10::Argument("output_2")}),
-    LayerNormOp<CPUContext>);
-
 } // namespace caffe2
 
+
 // Register layer norm with c10
 namespace {
 struct Cache final : public c10::KernelCache {
index 7138a12..bddb182 100644 (file)
 
 namespace caffe2 {
 
-DECLARE_FUNCTION_SCHEMA_OPERATOR(LayerNorm);
-
 template <class Context>
 class LayerNormOp final : public Operator<Context> {
  public:
   USE_OPERATOR_CONTEXT_FUNCTIONS;
 
-  template <class... Args>
-  LayerNormOp(Args&&... args)
-      : Operator<Context>(std::forward<Args>(args)...),
+  LayerNormOp(const OperatorDef& operator_def, Workspace* ws)
+      : Operator<Context>(operator_def, ws),
         OP_SINGLE_ARG(int, "axis", axis_, 1),
         OP_SINGLE_ARG(float, "epsilon", epsilon_, 1e-5f) {}
 
index fff1aa7..0332591 100644 (file)
@@ -167,21 +167,6 @@ class TestLayerNormOp(serial.SerializedTestCase):
         torch.testing.assert_allclose(expected_mean, actual_mean)
         torch.testing.assert_allclose(expected_stdev, actual_stdev)
 
-    @given(X=hu.tensors(n=1), **hu.gcs)
-    def test_layer_norm_op_pytorch_2(self, X, gc, dc):
-        X = X[0]
-        if len(X.shape) == 1:
-            X = np.expand_dims(X, axis=0)
-        axis = np.random.randint(0, len(X.shape))
-        epsilon = 1e-4
-
-        expected_norm, expected_mean, expected_stdev = _layer_norm_ref(axis, epsilon, X)
-        actual_norm, actual_mean, actual_stdev = torch.ops._caffe2.LayerNorm(torch.tensor(X), axis, epsilon)
-
-        torch.testing.assert_allclose(expected_norm, actual_norm)
-        torch.testing.assert_allclose(expected_mean, actual_mean)
-        torch.testing.assert_allclose(expected_stdev, actual_stdev)
-
     @given(X=hu.tensor(min_dim=2), **hu.gcs)
     def test_layer_norm_brew_wrapper(self, X, gc, dc):
         axis = np.random.randint(0, len(X.shape))
index 993a2b2..ea6295c 100644 (file)
@@ -1,8 +1,5 @@
 #include <jit/custom_operator.h>
-#include "caffe2/operators/layer_norm_op.h"
 
 #define REGISTER_CAFFE2_OP(name) \
   static caffe2::CAFFE2_STRUCT_OP_REGISTRATION_##name CAFFE2_STRUCT_OP_REGISTRATION_DEFN_TORCH_##name; \
   static auto CAFFE2_OP_EXPORT_##name = torch::jit::RegisterOperators::Caffe2Operator(#name);
-
-REGISTER_CAFFE2_OP(LayerNorm);