From: Elias Ellison Date: Fri, 22 Mar 2019 22:25:40 +0000 (-0700) Subject: Better error message for tensor with grad as constant in tracing (#18298) X-Git-Tag: accepted/tizen/6.5/unified/20211028.231830~671 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=3badea6eb3b835814990291368b03fe63cf29e00;p=platform%2Fupstream%2Fpytorch.git Better error message for tensor with grad as constant in tracing (#18298) Summary: Fix for https://github.com/pytorch/pytorch/issues/17583 There's an unrelated issue right now causing a segfault when printing tensor so that might have to fixed first for this to land Pull Request resolved: https://github.com/pytorch/pytorch/pull/18298 Differential Revision: D14584266 Pulled By: eellison fbshipit-source-id: 4e7850dadc78ef1e98ad40b9d8adc0fef42acf48 --- diff --git a/test/test_jit.py b/test/test_jit.py index 65f991b..dcae659 100644 --- a/test/test_jit.py +++ b/test/test_jit.py @@ -7186,6 +7186,21 @@ a") self.checkScript(good_fn, (torch.ones(2, 2),), optimize=True) + def test_tensor_with_grad_as_constant(self): + param = torch.randn(3).requires_grad_() + x = torch.randn(3) + + def f(x): + return x + param + with self.assertRaisesRegex(RuntimeError, "Cannot insert a Tensor that requires grad as a constant"): + torch.jit.trace(f, x) + + def test_non_tensor_tracing(self): + def f(x): + return x + param + with self.assertRaisesRegex(RuntimeError, "inputs or outputs of traced functions, but instead got value of type int."): + torch.jit.trace(f, (1,)) + def test_type_annotation_module(self): class BaseModule(torch.jit.ScriptModule): def foo(self, x): diff --git a/torch/csrc/jit/pybind_utils.h b/torch/csrc/jit/pybind_utils.h index f72ed15..028ed30 100644 --- a/torch/csrc/jit/pybind_utils.h +++ b/torch/csrc/jit/pybind_utils.h @@ -92,9 +92,14 @@ inline IValue toIValue(py::handle input) { } return Tuple::create(s); } else { - AT_ERROR( - "Only tensors and (possibly nested) tuples of tensors are supported " - "as inputs or outputs of traced functions"); + throw std::runtime_error(c10::str( + "Only tensors and (possibly nested) tuples of tensors are supported ", + "as inputs or outputs of traced functions", + ", but instead got value of type ", + py::str(input.get_type().attr("__name__")), + ".", + "\nValue: ", + py::repr(input))); } } diff --git a/torch/csrc/jit/tracer.cpp b/torch/csrc/jit/tracer.cpp index 7855b75..e195883 100644 --- a/torch/csrc/jit/tracer.cpp +++ b/torch/csrc/jit/tracer.cpp @@ -102,6 +102,13 @@ Value* getValueTrace(const IValue& var) { } // Didn't find it. Bake in a constant + if (ten.requires_grad()) { + std::ostringstream oss; + oss << "Cannot insert a Tensor that requires grad as a constant. " + << "Consider making it a parameter or input, or detaching the gradient\n"; + throw std::runtime_error(oss.str()); + } + Value* constant = state->graph->insertConstant(ten); recordSourceLocation(constant->node()); constant->inferTypeFrom(ten); @@ -229,7 +236,8 @@ std::pair, Stack> enter(Stack inputs) { return Tuple::create(std::move(elems)); } else { AT_ERROR( - "Only tensors or tuples of tensors can be inputs to traced functions"); + "Only tensors or tuples of tensors can be inputs to traced functions. Got ", + type); } }; for (IValue& input : inputs) { @@ -429,10 +437,7 @@ void addInputs(Node* n, const char* name, at::IntArrayRef value) { void addInputs(Node* n, const char* name, const ArrayRef& value) { AT_ERROR("Tracing float lists currently not supported!"); } -void addInputs( - Node* n, - const char* name, - const std::vector& value) { +void addInputs(Node* n, const char* name, const std::vector& value) { AT_ERROR("Tracing float lists currently not supported!"); }