From: Michael Suo Date: Fri, 17 Sep 2021 17:21:43 +0000 (-0700) Subject: Revert D30993855: [pytorch][PR] OpInfo: nn.functional.conv2d X-Git-Tag: accepted/tizen/8.0/unified/20231005.095509~112 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=ecfc784e676e51733624b2e96a17320579478735;p=platform%2Fupstream%2Fpytorch.git Revert D30993855: [pytorch][PR] OpInfo: nn.functional.conv2d Test Plan: revert-hammer Differential Revision: D30993855 (https://github.com/pytorch/pytorch/commit/873255c6d95342d144e9d1b633c16410844b934e) Original commit changeset: 7402f99addb4 fbshipit-source-id: b0539daa195dc6a3739bce5c264cb2177b7721ff --- diff --git a/test/test_fx_experimental.py b/test/test_fx_experimental.py index a46501b..9238a10 100644 --- a/test/test_fx_experimental.py +++ b/test/test_fx_experimental.py @@ -1471,7 +1471,6 @@ class TestNormalizeOperators(JitTestCase): "igamma", "igammac", "index_put", - "nn.functional.conv2d", "nn.functional.dropout", "polygamma", "special.polygamma", diff --git a/torch/testing/_internal/common_methods_invocations.py b/torch/testing/_internal/common_methods_invocations.py index 19b3712..6331c31 100644 --- a/torch/testing/_internal/common_methods_invocations.py +++ b/torch/testing/_internal/common_methods_invocations.py @@ -2667,49 +2667,6 @@ def sample_inputs_conv_transpose2d(op_info, device, dtype, requires_grad, **kwar return list(generator()) - -def sample_inputs_conv2d(op_info, device, dtype, requires_grad, jit_fail_sample=False, **kwargs): - make_arg = partial(make_tensor, device=device, dtype=dtype, requires_grad=requires_grad) - - # Ordered as shapes for input, weight, bias - # and a dict of values of (stride, padding, groups, dilation) - cases: Tuple = ( - ((1, 3, 4, 4), (3, 3, 3, 3), (3,), - {'stride': (2, 2), 'padding': 2, 'groups': 1}), - ((2, 4, 8, 8), (2, 2, 3, 3), (2,), - {'stride': (3, 2), 'padding': (2, 1), 'groups': 2, 'dilation': (4, 4)}), - ((1, 4, 5, 5), (1, 4, 2, 3), (1,), - {'stride': 2, 'padding': 1, 'groups': 1, 'dilation': (2, 3)}), - ((1, 4, 5, 5), (1, 4, 2, 3), (1,), - {'stride': 2, 'padding': 1, 'groups': 1, 'dilation': (2, 3)}), - ((1, 2, 4, 3), (4, 2, 3, 4), None, - {'stride': 2, 'padding': 1, 'groups': 1}), - ((1, 4, 5, 5), (1, 4, 2, 3), (1,), - {'stride': 2, 'padding': "valid"}), - ((1, 4, 5, 5), (1, 4, 2, 3), (1,), - {'stride': 1, 'padding': "same", 'dilation': 3}), - # Below are the group related samples from common_nn.py - ((2, 4, 6, 6), (4, 1, 3, 3), (4,), {'groups': 4}), - ((2, 4, 6, 6), (8, 1, 3, 3), (8,), {'groups': 4}), - ((2, 4, 6, 6), (8, 1, 3, 3), None, {'groups': 4}), - ((2, 4, 6, 6), (4, 1, 3, 3), (4,), {'groups': 4, 'stride': (3, 2)}), - ((2, 4, 6, 6), (4, 1, 3, 3), (4,), {'groups': 4, 'padding': (1, 1)}), - ((2, 4, 5, 5), (4, 1, 2, 2), (4,), {'groups': 4, 'dilation': (2, 2)}), - ((2, 4, 6, 5), (6, 2, 3, 2), (6,), {'groups': 2}), - # With defaults - ((1, 4, 5, 5), (3, 4, 3, 3), None, {}), - ) - - def generator(): - for input_shape, weight, bias, kwargs in cases: - yield SampleInput(make_arg(input_shape), args=( - make_arg(weight), - make_arg(bias) if bias is not None else bias - ), kwargs=kwargs) - - return list(generator()) - - def sample_inputs_layer_norm(opinfo, device, dtype, requires_grad, **kwargs): make_arg = partial(make_tensor, device=device, dtype=dtype, requires_grad=requires_grad) @@ -7593,21 +7550,6 @@ op_db: List[OpInfo] = [ DecorateInfo(unittest.skip("Skipped!"), 'TestJit', 'test_variant_consistency_jit'), ), supports_out=False,), - # Added 2 entries for conv2d - # One runs with cudnn and other without. - OpInfo('nn.functional.conv2d', - aliases=('conv2d',), - aten_name='conv2d', - dtypes=floating_types_and(torch.int64), - dtypesIfCUDA=floating_types_and(torch.float16, *[torch.bfloat16] if CUDA11OrLater else []), - sample_inputs_func=partial(sample_inputs_conv2d), - gradcheck_nondet_tol=GRADCHECK_NONDET_TOL if CUDA11OrLater else 0., - skips=( - # RuntimeError: !lhs.isAliasOf(rhs)INTERNAL ASSERT FAILED at - # "../torch/csrc/jit/passes/utils/check_alias_annotation.cpp":103, please report a bug to PyTorch. - DecorateInfo(unittest.skip("Skipped!"), 'TestJit', 'test_variant_consistency_jit'), - ), - supports_out=False,), OpInfo('nn.functional.layer_norm', aten_name='layer_norm', aliases=('layer_norm',),