_test_complex((1, 2, 3, 4, 2), 2)
_test_complex((2, 1, 3, 4, 3, 2), 3)
+ def test_gradcheck_fail_when_no_differentiable_outputs_and_num_grad_not_zero(self):
+ def autograd_fn(input):
+ output = torch.detach(input)
+ self.assertFalse(output.requires_grad)
+ return output
+
+ f_args_variable = torch.ones(S, S, requires_grad=True)
+ self.assertRaisesRegex(RuntimeError, 'Numerical gradient for function expected to be zero', lambda: gradcheck(autograd_fn, f_args_variable, eps=1e-6, atol=PRECISION))
+
def test_variable_traverse(self):
def get_out_and_unrefed_cycle():
inp = torch.randn(10, requires_grad=True)
tupled_inputs = _as_tuple(inputs)
if any(t.is_sparse for t in tupled_inputs if isinstance(t, torch.Tensor)) and not check_sparse_nnz:
- fail_test('gradcheck expects all tensor inputs '
+ return fail_test('gradcheck expects all tensor inputs '
'are dense when check_sparse_nnz is set to False.')
# Make sure that gradients are saved for all inputs
any_input_requiring_grad = False
+ some_input_not_requiring_grad = False
for inp in tupled_inputs:
if isinstance(inp, torch.Tensor):
if inp.requires_grad:
'This check will likely fail if all the inputs are '
'not of double precision floating point. ')
any_input_requiring_grad = True
+ else:
+ some_input_not_requiring_grad = True
inp.retain_grad()
if not any_input_requiring_grad:
raise ValueError(
'gradcheck expects at least one input tensor to require gradient, '
'but none of the them have requires_grad=True.')
-
- output = _differentiable_outputs(func(*tupled_inputs))
+ if some_input_not_requiring_grad:
+ raise ValueError(
+ 'gradcheck expects if at least one input tensor is required gradient, '
+ 'then all other inputs should have requires_grad=True.')
+
+ func_out = func(*tupled_inputs)
+ output = _differentiable_outputs(func_out)
+
+ if not output:
+ for i, o in enumerate(func_out):
+ def fn(input):
+ return _as_tuple(func(*input))[i]
+ numerical = get_numerical_jacobian(fn, tupled_inputs, eps=eps)
+ for n in numerical:
+ if len(torch.nonzero(n)) > 0:
+ return fail_test('Numerical gradient for function expected to be zero')
+ return True
for i, o in enumerate(output):
if not o.requires_grad: