Merge commit for internal changes
authorAkshay Modi <nareshmodi@google.com>
Tue, 6 Mar 2018 22:09:03 +0000 (14:09 -0800)
committerAkshay Modi <nareshmodi@google.com>
Tue, 6 Mar 2018 22:09:03 +0000 (14:09 -0800)
o Fixed simple merge issue in tf/contrib/timeseries/python/timeseries/BUILD

13 files changed:
1  2 
tensorflow/cc/gradients/nn_grad.cc
tensorflow/contrib/distributions/BUILD
tensorflow/contrib/eager/python/BUILD
tensorflow/contrib/timeseries/python/timeseries/BUILD
tensorflow/core/framework/dataset.h
tensorflow/python/BUILD
tensorflow/python/framework/test_util.py
tensorflow/python/keras/BUILD
tensorflow/python/ops/array_ops.py
tensorflow/python/ops/losses/losses_impl.py
tensorflow/python/ops/math_grad.py
tensorflow/python/ops/math_ops.py
tensorflow/tools/api/golden/tensorflow.pbtxt

@@@ -196,77 -182,10 +182,74 @@@ Status MaxPoolGradV2Helper(const Scope
  }
  REGISTER_GRADIENT_OP("MaxPoolV2", MaxPoolGradV2Helper);
  
 +Status MaxPool3DGradHelper(const Scope& scope, const Operation& op,
 +                           const std::vector<Output>& grad_inputs,
 +                           std::vector<Output>* grad_outputs) {
 +  std::vector<int32> ksize;
 +  std::vector<int32> strides;
 +  string padding;
 +  string data_format;
 +  auto attrs = op.output(0).node()->attrs();
 +  TF_RETURN_IF_ERROR(GetNodeAttr(attrs, "ksize", &ksize));
 +  TF_RETURN_IF_ERROR(GetNodeAttr(attrs, "strides", &strides));
 +  TF_RETURN_IF_ERROR(GetNodeAttr(attrs, "padding", &padding));
 +  TF_RETURN_IF_ERROR(GetNodeAttr(attrs, "data_format", &data_format));
 +  MaxPool3DGrad::Attrs grad_attrs;
 +  grad_attrs.DataFormat(data_format);
 +  auto dx = MaxPool3DGrad(scope, op.input(0), op.output(0), grad_inputs[0],
 +                          ksize, strides, padding, grad_attrs);
 +  grad_outputs->push_back(dx);
 +  return scope.status();
 +}
 +REGISTER_GRADIENT_OP("MaxPool3D", MaxPool3DGradHelper);
 +
 +Status AvgPoolGradHelper(const Scope& scope, const Operation& op,
 +                         const std::vector<Output>& grad_inputs,
 +                         std::vector<Output>* grad_outputs) {
 +  std::vector<int32> ksize;
 +  std::vector<int32> strides;
 +  string padding;
 +  string data_format;
 +  auto attrs = op.output(0).node()->attrs();
 +  TF_RETURN_IF_ERROR(GetNodeAttr(attrs, "ksize", &ksize));
 +  TF_RETURN_IF_ERROR(GetNodeAttr(attrs, "strides", &strides));
 +  TF_RETURN_IF_ERROR(GetNodeAttr(attrs, "padding", &padding));
 +  TF_RETURN_IF_ERROR(GetNodeAttr(attrs, "data_format", &data_format));
 +  internal::AvgPoolGrad::Attrs grad_attrs;
 +  grad_attrs.DataFormat(data_format);
 +  auto dx =
 +      internal::AvgPoolGrad(scope, Shape(scope, op.input(0)), grad_inputs[0],
 +                            ksize, strides, padding, grad_attrs);
 +  grad_outputs->push_back(dx);
 +  return scope.status();
 +}
 +REGISTER_GRADIENT_OP("AvgPool", AvgPoolGradHelper);
 +
 +Status AvgPool3DGradHelper(const Scope& scope, const Operation& op,
 +                           const std::vector<Output>& grad_inputs,
 +                           std::vector<Output>* grad_outputs) {
 +  std::vector<int32> ksize;
 +  std::vector<int32> strides;
 +  string padding;
 +  string data_format;
 +  auto attrs = op.output(0).node()->attrs();
 +  TF_RETURN_IF_ERROR(GetNodeAttr(attrs, "ksize", &ksize));
 +  TF_RETURN_IF_ERROR(GetNodeAttr(attrs, "strides", &strides));
 +  TF_RETURN_IF_ERROR(GetNodeAttr(attrs, "padding", &padding));
 +  TF_RETURN_IF_ERROR(GetNodeAttr(attrs, "data_format", &data_format));
 +  AvgPool3DGrad::Attrs grad_attrs;
 +  grad_attrs.DataFormat(data_format);
 +  auto dx = AvgPool3DGrad(scope, Shape(scope, op.input(0)), grad_inputs[0],
 +                          ksize, strides, padding, grad_attrs);
 +  grad_outputs->push_back(dx);
 +  return scope.status();
 +}
 +REGISTER_GRADIENT_OP("AvgPool3D", AvgPool3DGradHelper);
 +
  Status LRNGradHelper(const Scope& scope, const Operation& op,
                       const std::vector<Output>& grad_inputs,
-                      std::vector<Output>* grad_outputs){
-   internal::LRNGrad::Attrs grad_attrs;
-   auto dx = internal::LRNGrad(scope, grad_inputs[0], op.input(0), op.output(0),
-                               grad_attrs);
+                      std::vector<Output>* grad_outputs) {
+   auto dx = internal::LRNGrad(scope, grad_inputs[0], op.input(0), op.output(0));
    grad_outputs->push_back(dx);
    return scope.status();
  }
Simple merge
@@@ -425,7 -425,7 +425,8 @@@ py_test
      srcs_version = "PY2AND3",
      tags = [
          "no_pip_gpu",  # b/63391119
 +        "no_windows",  # TODO: needs investigation on Windows
+         "nomsan",
      ],
      deps = [
          ":feature_keys",
Simple merge
Simple merge
Simple merge
Simple merge
Simple merge
Simple merge