From d188204323f59590ca80bfb345c7bce7bfea4d9a Mon Sep 17 00:00:00 2001 From: Natalia Gimelshein Date: Tue, 14 Sep 2021 11:19:07 -0700 Subject: [PATCH] remove SkipInfo class (#64972) Summary: per title Pull Request resolved: https://github.com/pytorch/pytorch/pull/64972 Reviewed By: mruberry Differential Revision: D30924598 Pulled By: ngimel fbshipit-source-id: 1ac1ec8fd50ca27e3cd36c12a588d334e7466899 --- .../_internal/common_methods_invocations.py | 29 +++------------------- 1 file changed, 3 insertions(+), 26 deletions(-) diff --git a/torch/testing/_internal/common_methods_invocations.py b/torch/testing/_internal/common_methods_invocations.py index 6dedd22..5dd1cb2 100644 --- a/torch/testing/_internal/common_methods_invocations.py +++ b/torch/testing/_internal/common_methods_invocations.py @@ -79,29 +79,6 @@ class DecorateInfo(object): ) -class SkipInfo(DecorateInfo): - """Describes which test, or type of tests, should be skipped when testing - an operator. Any test that matches all provided arguments will be skipped. - The skip will only be checked if the active_if argument is True.""" - - def __init__( - self, cls_name=None, test_name=None, *, device_type=None, dtypes=None, active_if=True, - expected_failure=False): - """ - Args: - cls_name: the name of the test class to skip - test_name: the name of the test within the test class to skip - device_type: the devices for which to skip the tests - dtypes: the dtypes for which to skip the tests - active_if: whether tests matching the above arguments should be skipped - expected_failure: whether to assert that skipped tests fail - """ - decorator = unittest.expectedFailure if expected_failure else unittest.skip("Skipped!") - super().__init__(decorators=decorator, cls_name=cls_name, test_name=test_name, - device_type=device_type, dtypes=dtypes, active_if=active_if) - - - class SampleInput(object): """Represents sample inputs to a function.""" @@ -6976,7 +6953,7 @@ op_db: List[OpInfo] = [ decorators=[skipCUDAIfNoMagma, skipCUDAIfRocm, skipCPUIfNoLapack], skips=( # Gradcheck for complex hangs for this function, therefore it raises NotImplementedError for now - SkipInfo('TestGradients', 'test_forward_mode_AD', dtypes=complex_types()),), + DecorateInfo(unittest.skip("Skipped!"), 'TestGradients', 'test_forward_mode_AD', dtypes=complex_types()),), ), OpInfo('linalg.eigvalsh', aten_name='linalg_eigvalsh', @@ -6987,7 +6964,7 @@ op_db: List[OpInfo] = [ decorators=[skipCUDAIfNoMagma, skipCUDAIfRocm, skipCPUIfNoLapack], skips=( # Gradcheck hangs for this function - SkipInfo('TestGradients', 'test_forward_mode_AD'),), + DecorateInfo(unittest.skip("Skipped!"), 'TestGradients', 'test_forward_mode_AD'),), ), OpInfo('linalg.householder_product', aten_name='linalg_householder_product', @@ -8442,7 +8419,7 @@ op_db: List[OpInfo] = [ decorators=[skipCUDAIfNoMagma, skipCUDAIfRocm, skipCPUIfNoLapack], skips=( # Gradcheck hangs for this function - SkipInfo('TestGradients', 'test_forward_mode_AD'),), + DecorateInfo(unittest.skip("Skipped!"), 'TestGradients', 'test_forward_mode_AD'),), ), OpInfo('eig', op=torch.eig, -- 2.7.4