Summary:
Pull Request resolved: https://github.com/pytorch/pytorch/pull/15199
In order to call it from PyTorch, this op schema can't live in caffe2 but must be included from PyTorch.
Moving it to c10. This is not where it should be in the end (that's why there is a large TODO here),
but an intermediate hack to enable this use case and proof-of-concept.
Reviewed By: ezyang
Differential Revision:
D13462124
fbshipit-source-id:
1e187b9def8ef049c91e6de947ea4a85758d711b
--- /dev/null
+#include <c10/core/opschema/layer_norm.h>
+#include <c10/core/dispatch/OpSchemaRegistration.h>
+
+C10_DEFINE_OP_SCHEMA(c10::core::opschema::LayerNorm);
#include <c10/core/Tensor.h>
#include <c10/util/Array.h>
-#include "caffe2/core/context_base.h"
-#include "caffe2/core/tensor.h"
-namespace caffe2 {
-namespace ops {
+namespace at {
+class BaseContext;
+}
+namespace c10 {
+namespace core {
+namespace opschema {
+
+// TODO This op schema should probably not live in c10 since it's not a method
+// on Tensor. It's only here as a proof-of-concept op and for LATTE team
+// to be able to call caffe2 layer norm from PyTorch.
struct LayerNorm final {
static constexpr const char* name = "LayerNorm";
int axis,
float epsilon,
Cache* cache,
- BaseContext* context);
+ at::BaseContext* context);
static constexpr size_t num_dispatch_args() {return 1;}
{"input", "output", "output_mean", "output_stddev", "axis", "epsilon", "cache", "context"}};
};
-} // namespace ops
-} // namespace caffe2
+} // namespace opschema
+} // namespace core
+} // namespace c10
-#include "caffe2/operators/experimental/c10/schemas/layer_norm.h"
-#include <c10/core/dispatch/OpSchemaRegistration.h>
+#include <c10/core/opschema/layer_norm.h>
#include "caffe2/core/operator_c10wrapper.h"
-using caffe2::CPUContext;
-
-C10_DEFINE_OP_SCHEMA(caffe2::ops::LayerNorm);
-
namespace {
struct AxisParameter final {
};
} // namespace
+
namespace caffe2 {
REGISTER_C10_OPERATOR_FOR_CAFFE2_DISPATCH_WITH_PARAMETERS(
- ops::LayerNorm,
- ops::LayerNorm::Cache,
+ c10::core::opschema::LayerNorm,
+ c10::core::opschema::LayerNorm::Cache,
C10LayerNorm_DontUseThisOpYet,
ParameterHelper<AxisParameter>,
ParameterHelper<EpsilonParameter>)
#include "caffe2/operators/layer_norm_op.h"
-#include "caffe2/operators/experimental/c10/schemas/layer_norm.h"
#include "caffe2/utils/eigen_utils.h"
+#include <c10/core/opschema/layer_norm.h>
#include <c10/core/dispatch/KernelRegistration.h>
namespace caffe2 {
const c10::C10Tensor& sig_,
int axis,
float epsilon,
- caffe2::ops::LayerNorm::Cache* cache,
+ c10::core::opschema::LayerNorm::Cache* cache,
caffe2::BaseContext* context) {
caffe2::Tensor X(X_);
caffe2::Tensor Y(Y_);
}
}
namespace c10 {
-C10_REGISTER_KERNEL(caffe2::ops::LayerNorm)
+C10_REGISTER_KERNEL(c10::core::opschema::LayerNorm)
.kernel(&layer_norm_c10<float>)
.dispatchKey(c10::DispatchKey<1>{
c10::details::TensorParameterDispatchKey{DeviceTypeId::CPU,