1 """Supporting definitions for the Python regression tests."""
3 if __name__ != 'test.test_support':
4 raise ImportError('test_support must be imported from the test package')
26 __all__ = ["Error", "TestFailed", "ResourceDenied", "import_module",
27 "verbose", "use_resources", "max_memuse", "record_original_stdout",
28 "get_original_stdout", "unload", "unlink", "rmtree", "forget",
29 "is_resource_enabled", "requires", "find_unused_port", "bind_port",
30 "fcmp", "have_unicode", "is_jython", "TESTFN", "HOST", "FUZZ",
31 "SAVEDCWD", "temp_cwd", "findfile", "sortdict", "check_syntax_error",
32 "open_urlresource", "check_warnings", "check_py3k_warnings",
33 "CleanImport", "EnvironmentVarGuard", "captured_output",
34 "captured_stdout", "TransientResource", "transient_internet",
35 "run_with_locale", "set_memlimit", "bigmemtest", "bigaddrspacetest",
36 "BasicTestRunner", "run_unittest", "run_doctest", "threading_setup",
37 "threading_cleanup", "reap_children", "cpython_only",
38 "check_impl_detail", "get_attribute", "py3k_bytes"]
41 class Error(Exception):
42 """Base class for regression test exceptions."""
44 class TestFailed(Error):
47 class ResourceDenied(unittest.SkipTest):
48 """Test skipped because it requested a disallowed resource.
50 This is raised when a test calls requires() for a resource that
51 has not been enabled. It is used to distinguish between expected
55 @contextlib.contextmanager
56 def _ignore_deprecated_imports(ignore=True):
57 """Context manager to suppress package and module deprecation
58 warnings when importing them.
60 If ignore is False, this context manager has no effect."""
62 with warnings.catch_warnings():
63 warnings.filterwarnings("ignore", ".+ (module|package)",
70 def import_module(name, deprecated=False):
71 """Import and return the module to be tested, raising SkipTest if
74 If deprecated is True, any module or package deprecation messages
75 will be suppressed."""
76 with _ignore_deprecated_imports(deprecated):
78 return importlib.import_module(name)
79 except ImportError, msg:
80 raise unittest.SkipTest(str(msg))
83 def _save_and_remove_module(name, orig_modules):
84 """Helper function to save and remove a module from sys.modules
86 Return value is True if the module was in sys.modules and
90 orig_modules[name] = sys.modules[name]
98 def _save_and_block_module(name, orig_modules):
99 """Helper function to save and block a module in sys.modules
101 Return value is True if the module was in sys.modules and
105 orig_modules[name] = sys.modules[name]
108 sys.modules[name] = None
112 def import_fresh_module(name, fresh=(), blocked=(), deprecated=False):
113 """Imports and returns a module, deliberately bypassing the sys.modules cache
114 and importing a fresh copy of the module. Once the import is complete,
115 the sys.modules cache is restored to its original state.
117 Modules named in fresh are also imported anew if needed by the import.
119 Importing of modules named in blocked is prevented while the fresh import
122 If deprecated is True, any module or package deprecation messages
123 will be suppressed."""
124 # NOTE: test_heapq and test_warnings include extra sanity checks to make
125 # sure that this utility function is working as expected
126 with _ignore_deprecated_imports(deprecated):
127 # Keep track of modules saved for later restoration as well
128 # as those which just need a blocking entry removed
131 _save_and_remove_module(name, orig_modules)
133 for fresh_name in fresh:
134 _save_and_remove_module(fresh_name, orig_modules)
135 for blocked_name in blocked:
136 if not _save_and_block_module(blocked_name, orig_modules):
137 names_to_remove.append(blocked_name)
138 fresh_module = importlib.import_module(name)
140 for orig_name, module in orig_modules.items():
141 sys.modules[orig_name] = module
142 for name_to_remove in names_to_remove:
143 del sys.modules[name_to_remove]
147 def get_attribute(obj, name):
148 """Get an attribute, raising SkipTest if AttributeError is raised."""
150 attribute = getattr(obj, name)
151 except AttributeError:
152 raise unittest.SkipTest("module %s has no attribute %s" % (
158 verbose = 1 # Flag set to 0 by regrtest.py
159 use_resources = None # Flag set to [] by regrtest.py
160 max_memuse = 0 # Disable bigmem tests (they will still be run with
161 # small sizes, to make sure they work.)
164 # _original_stdout is meant to hold stdout at the time regrtest began.
165 # This may be "the real" stdout, or IDLE's emulation of stdout, or whatever.
166 # The point is to have some flavor of stdout the user can actually see.
167 _original_stdout = None
168 def record_original_stdout(stdout):
169 global _original_stdout
170 _original_stdout = stdout
172 def get_original_stdout():
173 return _original_stdout or sys.stdout
177 del sys.modules[name]
181 def unlink(filename):
191 # Unix returns ENOENT, Windows returns ESRCH.
192 if e.errno not in (errno.ENOENT, errno.ESRCH):
196 '''"Forget" a module was ever imported by removing it from sys.modules and
197 deleting any .pyc and .pyo files.'''
199 for dirname in sys.path:
200 unlink(os.path.join(dirname, modname + os.extsep + 'pyc'))
201 # Deleting the .pyo file cannot be within the 'try' for the .pyc since
202 # the chance exists that there is no .pyc (and thus the 'try' statement
203 # is exited) but there is a .pyo file.
204 unlink(os.path.join(dirname, modname + os.extsep + 'pyo'))
206 def is_resource_enabled(resource):
207 """Test whether a resource is enabled. Known resources are set by
209 return use_resources is not None and resource in use_resources
211 def requires(resource, msg=None):
212 """Raise ResourceDenied if the specified resource is not available.
214 If the caller's module is __main__ then automatically return True. The
215 possibility of False being returned occurs when regrtest.py is executing."""
216 # see if the caller's module is __main__ - if so, treat as if
217 # the resource was set
218 if sys._getframe(1).f_globals.get("__name__") == "__main__":
220 if not is_resource_enabled(resource):
222 msg = "Use of the `%s' resource not enabled" % resource
223 raise ResourceDenied(msg)
227 def find_unused_port(family=socket.AF_INET, socktype=socket.SOCK_STREAM):
228 """Returns an unused port that should be suitable for binding. This is
229 achieved by creating a temporary socket with the same family and type as
230 the 'sock' parameter (default is AF_INET, SOCK_STREAM), and binding it to
231 the specified host address (defaults to 0.0.0.0) with the port set to 0,
232 eliciting an unused ephemeral port from the OS. The temporary socket is
233 then closed and deleted, and the ephemeral port is returned.
235 Either this method or bind_port() should be used for any tests where a
236 server socket needs to be bound to a particular port for the duration of
237 the test. Which one to use depends on whether the calling code is creating
238 a python socket, or if an unused port needs to be provided in a constructor
239 or passed to an external program (i.e. the -accept argument to openssl's
240 s_server mode). Always prefer bind_port() over find_unused_port() where
241 possible. Hard coded ports should *NEVER* be used. As soon as a server
242 socket is bound to a hard coded port, the ability to run multiple instances
243 of the test simultaneously on the same host is compromised, which makes the
244 test a ticking time bomb in a buildbot environment. On Unix buildbots, this
245 may simply manifest as a failed test, which can be recovered from without
246 intervention in most cases, but on Windows, the entire python process can
247 completely and utterly wedge, requiring someone to log in to the buildbot
248 and manually kill the affected process.
250 (This is easy to reproduce on Windows, unfortunately, and can be traced to
251 the SO_REUSEADDR socket option having different semantics on Windows versus
252 Unix/Linux. On Unix, you can't have two AF_INET SOCK_STREAM sockets bind,
253 listen and then accept connections on identical host/ports. An EADDRINUSE
254 socket.error will be raised at some point (depending on the platform and
255 the order bind and listen were called on each socket).
257 However, on Windows, if SO_REUSEADDR is set on the sockets, no EADDRINUSE
258 will ever be raised when attempting to bind two identical host/ports. When
259 accept() is called on each socket, the second caller's process will steal
260 the port from the first caller, leaving them both in an awkwardly wedged
261 state where they'll no longer respond to any signals or graceful kills, and
262 must be forcibly killed via OpenProcess()/TerminateProcess().
264 The solution on Windows is to use the SO_EXCLUSIVEADDRUSE socket option
265 instead of SO_REUSEADDR, which effectively affords the same semantics as
266 SO_REUSEADDR on Unix. Given the propensity of Unix developers in the Open
267 Source world compared to Windows ones, this is a common mistake. A quick
268 look over OpenSSL's 0.9.8g source shows that they use SO_REUSEADDR when
269 openssl.exe is called with the 's_server' option, for example. See
270 http://bugs.python.org/issue2550 for more info. The following site also
271 has a very thorough description about the implications of both REUSEADDR
272 and EXCLUSIVEADDRUSE on Windows:
273 http://msdn2.microsoft.com/en-us/library/ms740621(VS.85).aspx)
275 XXX: although this approach is a vast improvement on previous attempts to
276 elicit unused ports, it rests heavily on the assumption that the ephemeral
277 port returned to us by the OS won't immediately be dished back out to some
278 other process when we close and delete our temporary socket but before our
279 calling code has a chance to bind the returned port. We can deal with this
280 issue if/when we come across it."""
281 tempsock = socket.socket(family, socktype)
282 port = bind_port(tempsock)
287 def bind_port(sock, host=HOST):
288 """Bind the socket to a free port and return the port number. Relies on
289 ephemeral ports in order to ensure we are using an unbound port. This is
290 important as many tests may be running simultaneously, especially in a
291 buildbot environment. This method raises an exception if the sock.family
292 is AF_INET and sock.type is SOCK_STREAM, *and* the socket has SO_REUSEADDR
293 or SO_REUSEPORT set on it. Tests should *never* set these socket options
294 for TCP/IP sockets. The only case for setting these options is testing
295 multicasting via multiple UDP sockets.
297 Additionally, if the SO_EXCLUSIVEADDRUSE socket option is available (i.e.
298 on Windows), it will be set on the socket. This will prevent anyone else
299 from bind()'ing to our host/port for the duration of the test.
301 if sock.family == socket.AF_INET and sock.type == socket.SOCK_STREAM:
302 if hasattr(socket, 'SO_REUSEADDR'):
303 if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR) == 1:
304 raise TestFailed("tests should never set the SO_REUSEADDR " \
305 "socket option on TCP/IP sockets!")
306 if hasattr(socket, 'SO_REUSEPORT'):
307 if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT) == 1:
308 raise TestFailed("tests should never set the SO_REUSEPORT " \
309 "socket option on TCP/IP sockets!")
310 if hasattr(socket, 'SO_EXCLUSIVEADDRUSE'):
311 sock.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1)
314 port = sock.getsockname()[1]
319 def fcmp(x, y): # fuzzy comparison function
320 if isinstance(x, float) or isinstance(y, float):
322 fuzz = (abs(x) + abs(y)) * FUZZ
327 elif type(x) == type(y) and isinstance(x, (tuple, list)):
328 for i in range(min(len(x), len(y))):
329 outcome = fcmp(x[i], y[i])
332 return (len(x) > len(y)) - (len(x) < len(y))
333 return (x > y) - (x < y)
341 is_jython = sys.platform.startswith('java')
343 # Filename used for testing
344 if os.name == 'java':
345 # Jython disallows @ in module names
347 elif os.name == 'riscos':
351 # Unicode name only used if TEST_FN_ENCODING exists for the platform.
353 # Assuming sys.getfilesystemencoding()!=sys.getdefaultencoding()
354 # TESTFN_UNICODE is a filename that can be encoded using the
355 # file system encoding, but *not* with the default (ascii) encoding
356 if isinstance('', unicode):
358 # XXX perhaps unicode() should accept Unicode strings?
359 TESTFN_UNICODE = "@test-\xe0\xf2"
361 # 2 latin characters.
362 TESTFN_UNICODE = unicode("@test-\xe0\xf2", "latin-1")
363 TESTFN_ENCODING = sys.getfilesystemencoding()
364 # TESTFN_UNENCODABLE is a filename that should *not* be
365 # able to be encoded by *either* the default or filesystem encoding.
366 # This test really only makes sense on Windows NT platforms
367 # which have special Unicode support in posixmodule.
368 if (not hasattr(sys, "getwindowsversion") or
369 sys.getwindowsversion()[3] < 2): # 0=win32s or 1=9x/ME
370 TESTFN_UNENCODABLE = None
372 # Japanese characters (I think - from bug 846133)
373 TESTFN_UNENCODABLE = eval('u"@test-\u5171\u6709\u3055\u308c\u308b"')
375 # XXX - Note - should be using TESTFN_ENCODING here - but for
376 # Windows, "mbcs" currently always operates as if in
377 # errors=ignore' mode - hence we get '?' characters rather than
378 # the exception. 'Latin1' operates as we expect - ie, fails.
379 # See [ 850997 ] mbcs encoding ignores errors
380 TESTFN_UNENCODABLE.encode("Latin1")
381 except UnicodeEncodeError:
385 'WARNING: The filename %r CAN be encoded by the filesystem. ' \
386 'Unicode filename tests may not be effective' \
390 # Disambiguate TESTFN for parallel testing, while letting it remain a valid
392 TESTFN = "{}_{}_tmp".format(TESTFN, os.getpid())
394 # Save the initial cwd
395 SAVEDCWD = os.getcwd()
397 @contextlib.contextmanager
398 def temp_cwd(name='tempcwd', quiet=False):
400 Context manager that creates a temporary directory and set it as CWD.
402 The new CWD is created in the current directory and it's named *name*.
403 If *quiet* is False (default) and it's not possible to create or change
404 the CWD, an error is raised. If it's True, only a warning is raised
405 and the original CWD is used.
407 if isinstance(name, unicode):
409 name = name.encode(sys.getfilesystemencoding() or 'ascii')
410 except UnicodeEncodeError:
412 raise unittest.SkipTest('unable to encode the cwd name with '
413 'the filesystem encoding.')
414 saved_dir = os.getcwd()
423 warnings.warn('tests may fail, unable to change the CWD to ' + name,
424 RuntimeWarning, stacklevel=3)
433 def findfile(file, here=__file__, subdir=None):
434 """Try to find a file on sys.path and the working directory. If it is not
435 found the argument passed to the function is returned (this does not
436 necessarily signal failure; could still be the legitimate path)."""
437 if os.path.isabs(file):
439 if subdir is not None:
440 file = os.path.join(subdir, file)
442 path = [os.path.dirname(here)] + path
444 fn = os.path.join(dn, file)
445 if os.path.exists(fn): return fn
449 "Like repr(dict), but in sorted order."
452 reprpairs = ["%r: %r" % pair for pair in items]
453 withcommas = ", ".join(reprpairs)
454 return "{%s}" % withcommas
458 Create an invalid file descriptor by opening and closing a file and return
461 file = open(TESTFN, "wb")
468 def check_syntax_error(testcase, statement):
469 testcase.assertRaises(SyntaxError, compile, statement,
470 '<test string>', 'exec')
472 def open_urlresource(url, check=None):
473 import urlparse, urllib2
475 filename = urlparse.urlparse(url)[2].split('/')[-1] # '/': it's URL!
477 fn = os.path.join(os.path.dirname(__file__), "data", filename)
479 def check_valid_file(fn):
488 if os.path.exists(fn):
489 f = check_valid_file(fn)
494 # Verify the requirement before downloading the file
497 print >> get_original_stdout(), '\tfetching %s ...' % url
498 f = urllib2.urlopen(url, timeout=15)
500 with open(fn, "wb") as out:
508 f = check_valid_file(fn)
511 raise TestFailed('invalid resource "%s"' % fn)
514 class WarningsRecorder(object):
515 """Convenience wrapper for the warnings list returned on
516 entry to the warnings.catch_warnings() context manager.
518 def __init__(self, warnings_list):
519 self._warnings = warnings_list
522 def __getattr__(self, attr):
523 if len(self._warnings) > self._last:
524 return getattr(self._warnings[-1], attr)
525 elif attr in warnings.WarningMessage._WARNING_DETAILS:
527 raise AttributeError("%r has no attribute %r" % (self, attr))
531 return self._warnings[self._last:]
534 self._last = len(self._warnings)
537 def _filterwarnings(filters, quiet=False):
538 """Catch the warnings, then check if all the expected
539 warnings have been raised and re-raise unexpected warnings.
540 If 'quiet' is True, only re-raise the unexpected warnings.
542 # Clear the warning registry of the calling module
543 # in order to re-raise the warnings.
544 frame = sys._getframe(2)
545 registry = frame.f_globals.get('__warningregistry__')
548 with warnings.catch_warnings(record=True) as w:
549 # Set filter "always" to record all warnings. Because
550 # test_warnings swap the module, we need to look up in
551 # the sys.modules dictionary.
552 sys.modules['warnings'].simplefilter("always")
553 yield WarningsRecorder(w)
554 # Filter the recorded warnings
555 reraise = [warning.message for warning in w]
557 for msg, cat in filters:
559 for exc in reraise[:]:
561 # Filter out the matching messages
562 if (re.match(msg, message, re.I) and
563 issubclass(exc.__class__, cat)):
566 if not seen and not quiet:
567 # This filter caught nothing
568 missing.append((msg, cat.__name__))
570 raise AssertionError("unhandled warning %r" % reraise[0])
572 raise AssertionError("filter (%r, %s) did not catch any warning" %
576 @contextlib.contextmanager
577 def check_warnings(*filters, **kwargs):
578 """Context manager to silence warnings.
580 Accept 2-tuples as positional arguments:
581 ("message regexp", WarningCategory)
584 - if 'quiet' is True, it does not fail if a filter catches nothing
585 (default True without argument,
586 default False if some filters are defined)
588 Without argument, it defaults to:
589 check_warnings(("", Warning), quiet=True)
591 quiet = kwargs.get('quiet')
593 filters = (("", Warning),)
594 # Preserve backward compatibility
597 return _filterwarnings(filters, quiet)
600 @contextlib.contextmanager
601 def check_py3k_warnings(*filters, **kwargs):
602 """Context manager to silence py3k warnings.
604 Accept 2-tuples as positional arguments:
605 ("message regexp", WarningCategory)
608 - if 'quiet' is True, it does not fail if a filter catches nothing
611 Without argument, it defaults to:
612 check_py3k_warnings(("", DeprecationWarning), quiet=False)
616 filters = (("", DeprecationWarning),)
618 # It should not raise any py3k warning
620 return _filterwarnings(filters, kwargs.get('quiet'))
623 class CleanImport(object):
624 """Context manager to force import to return a new module reference.
626 This is useful for testing module-level behaviours, such as
627 the emission of a DeprecationWarning on import.
631 with CleanImport("foo"):
632 importlib.import_module("foo") # new reference
635 def __init__(self, *module_names):
636 self.original_modules = sys.modules.copy()
637 for module_name in module_names:
638 if module_name in sys.modules:
639 module = sys.modules[module_name]
640 # It is possible that module_name is just an alias for
641 # another module (e.g. stub for modules renamed in 3.x).
642 # In that case, we also need delete the real module to clear
644 if module.__name__ != module_name:
645 del sys.modules[module.__name__]
646 del sys.modules[module_name]
651 def __exit__(self, *ignore_exc):
652 sys.modules.update(self.original_modules)
655 class EnvironmentVarGuard(UserDict.DictMixin):
657 """Class to help protect the environment variable properly. Can be used as
658 a context manager."""
661 self._environ = os.environ
664 def __getitem__(self, envvar):
665 return self._environ[envvar]
667 def __setitem__(self, envvar, value):
668 # Remember the initial value on the first access
669 if envvar not in self._changed:
670 self._changed[envvar] = self._environ.get(envvar)
671 self._environ[envvar] = value
673 def __delitem__(self, envvar):
674 # Remember the initial value on the first access
675 if envvar not in self._changed:
676 self._changed[envvar] = self._environ.get(envvar)
677 if envvar in self._environ:
678 del self._environ[envvar]
681 return self._environ.keys()
683 def set(self, envvar, value):
686 def unset(self, envvar):
692 def __exit__(self, *ignore_exc):
693 for (k, v) in self._changed.items():
695 if k in self._environ:
699 os.environ = self._environ
702 class DirsOnSysPath(object):
703 """Context manager to temporarily add directories to sys.path.
705 This makes a copy of sys.path, appends any directories given
706 as positional arguments, then reverts sys.path to the copied
707 settings when the context ends.
709 Note that *all* sys.path modifications in the body of the
710 context manager, including replacement of the object,
711 will be reverted at the end of the block.
714 def __init__(self, *paths):
715 self.original_value = sys.path[:]
716 self.original_object = sys.path
717 sys.path.extend(paths)
722 def __exit__(self, *ignore_exc):
723 sys.path = self.original_object
724 sys.path[:] = self.original_value
727 class TransientResource(object):
729 """Raise ResourceDenied if an exception is raised while the context manager
730 is in effect that matches the specified exception and attributes."""
732 def __init__(self, exc, **kwargs):
739 def __exit__(self, type_=None, value=None, traceback=None):
740 """If type_ is a subclass of self.exc and value has attributes matching
741 self.attrs, raise ResourceDenied. Otherwise let the exception
742 propagate (if any)."""
743 if type_ is not None and issubclass(self.exc, type_):
744 for attr, attr_value in self.attrs.iteritems():
745 if not hasattr(value, attr):
747 if getattr(value, attr) != attr_value:
750 raise ResourceDenied("an optional resource is not available")
753 @contextlib.contextmanager
754 def transient_internet(resource_name, timeout=30.0, errnos=()):
755 """Return a context manager that raises ResourceDenied when various issues
756 with the Internet connection manifest themselves as exceptions."""
758 ('ECONNREFUSED', 111),
760 ('ENETUNREACH', 101),
763 default_gai_errnos = [
768 denied = ResourceDenied("Resource '%s' is not available" % resource_name)
769 captured_errnos = errnos
771 if not captured_errnos:
772 captured_errnos = [getattr(errno, name, num)
773 for (name, num) in default_errnos]
774 gai_errnos = [getattr(socket, name, num)
775 for (name, num) in default_gai_errnos]
777 def filter_error(err):
778 n = getattr(err, 'errno', None)
779 if (isinstance(err, socket.timeout) or
780 (isinstance(err, socket.gaierror) and n in gai_errnos) or
781 n in captured_errnos):
783 sys.stderr.write(denied.args[0] + "\n")
786 old_timeout = socket.getdefaulttimeout()
788 if timeout is not None:
789 socket.setdefaulttimeout(timeout)
791 except IOError as err:
792 # urllib can wrap original socket errors multiple times (!), we must
793 # unwrap to get at the original error.
796 if len(a) >= 1 and isinstance(a[0], IOError):
798 # The error can also be wrapped as args[1]:
799 # except socket.error as msg:
800 # raise IOError('socket error', msg).with_traceback(sys.exc_info()[2])
801 elif len(a) >= 2 and isinstance(a[1], IOError):
807 # XXX should we catch generic exceptions and look for their
808 # __cause__ or __context__?
810 socket.setdefaulttimeout(old_timeout)
813 @contextlib.contextmanager
814 def captured_output(stream_name):
815 """Run the 'with' statement body using a StringIO object in place of a
816 specific attribute on the sys module.
817 Example use (with 'stream_name=stdout')::
819 with captured_stdout() as s:
821 assert s.getvalue() == "hello"
824 orig_stdout = getattr(sys, stream_name)
825 setattr(sys, stream_name, StringIO.StringIO())
827 yield getattr(sys, stream_name)
829 setattr(sys, stream_name, orig_stdout)
831 def captured_stdout():
832 return captured_output("stdout")
834 def captured_stdin():
835 return captured_output("stdin")
838 """Force as many objects as possible to be collected.
840 In non-CPython implementations of Python, this is needed because timely
841 deallocation is not guaranteed by the garbage collector. (Even in CPython
842 this can be the case in case of reference cycles.) This means that __del__
843 methods may be called later than expected and weakrefs may remain alive for
844 longer than expected. This function tries its best to force all garbage
845 objects to disappear.
854 #=======================================================================
855 # Decorator for running a function in a different locale, correctly resetting
858 def run_with_locale(catstr, *locales):
860 def inner(*args, **kwds):
863 category = getattr(locale, catstr)
864 orig_locale = locale.setlocale(category)
865 except AttributeError:
866 # if the test author gives us an invalid category string
869 # cannot retrieve original locale, so do nothing
870 locale = orig_locale = None
874 locale.setlocale(category, loc)
879 # now run the function, resetting the locale on exceptions
881 return func(*args, **kwds)
883 if locale and orig_locale:
884 locale.setlocale(category, orig_locale)
885 inner.func_name = func.func_name
886 inner.__doc__ = func.__doc__
890 #=======================================================================
891 # Big-memory-test support. Separate from 'resources' because memory use should be configurable.
893 # Some handy shorthands. Note that these are used for byte-limits as well
894 # as size-limits, in the various bigmem tests
900 MAX_Py_ssize_t = sys.maxsize
902 def set_memlimit(limit):
904 global real_max_memuse
911 m = re.match(r'(\d+(\.\d+)?) (K|M|G|T)b?$', limit,
912 re.IGNORECASE | re.VERBOSE)
914 raise ValueError('Invalid memory limit %r' % (limit,))
915 memlimit = int(float(m.group(1)) * sizes[m.group(3).lower()])
916 real_max_memuse = memlimit
917 if memlimit > MAX_Py_ssize_t:
918 memlimit = MAX_Py_ssize_t
919 if memlimit < _2G - 1:
920 raise ValueError('Memory limit %r too low to be useful' % (limit,))
921 max_memuse = memlimit
923 def bigmemtest(minsize, memuse, overhead=5*_1M):
924 """Decorator for bigmem tests.
926 'minsize' is the minimum useful size for the test (in arbitrary,
927 test-interpreted units.) 'memuse' is the number of 'bytes per size' for
928 the test, or a good estimate of it. 'overhead' specifies fixed overhead,
929 independent of the testsize, and defaults to 5Mb.
931 The decorator tries to guess a good value for 'size' and passes it to
932 the decorated test function. If minsize * memuse is more than the
933 allowed memory use (as defined by max_memuse), the test is skipped.
934 Otherwise, minsize is adjusted upward to use up to max_memuse.
939 # If max_memuse is 0 (the default),
940 # we still want to run the tests with size set to a few kb,
941 # to make sure they work. We still want to avoid using
942 # too much memory, though, but we do that noisily.
944 self.assertFalse(maxsize * memuse + overhead > 20 * _1M)
946 maxsize = int((max_memuse - overhead) / memuse)
947 if maxsize < minsize:
948 # Really ought to print 'test skipped' or something
950 sys.stderr.write("Skipping %s because of memory "
951 "constraint\n" % (f.__name__,))
953 # Try to keep some breathing room in memory use
954 maxsize = max(maxsize - 50 * _1M, minsize)
955 return f(self, maxsize)
956 wrapper.minsize = minsize
957 wrapper.memuse = memuse
958 wrapper.overhead = overhead
962 def precisionbigmemtest(size, memuse, overhead=5*_1M):
965 if not real_max_memuse:
970 if real_max_memuse and real_max_memuse < maxsize * memuse:
972 sys.stderr.write("Skipping %s because of memory "
973 "constraint\n" % (f.__name__,))
976 return f(self, maxsize)
978 wrapper.memuse = memuse
979 wrapper.overhead = overhead
983 def bigaddrspacetest(f):
984 """Decorator for tests that fill the address space."""
986 if max_memuse < MAX_Py_ssize_t:
988 sys.stderr.write("Skipping %s because of memory "
989 "constraint\n" % (f.__name__,))
994 #=======================================================================
995 # unittest integration.
997 class BasicTestRunner:
999 result = unittest.TestResult()
1006 def requires_resource(resource):
1007 if is_resource_enabled(resource):
1010 return unittest.skip("resource {0!r} is not enabled".format(resource))
1012 def cpython_only(test):
1014 Decorator for tests only applicable on CPython.
1016 return impl_detail(cpython=True)(test)
1018 def impl_detail(msg=None, **guards):
1019 if check_impl_detail(**guards):
1022 guardnames, default = _parse_guards(guards)
1024 msg = "implementation detail not available on {0}"
1026 msg = "implementation detail specific to {0}"
1027 guardnames = sorted(guardnames.keys())
1028 msg = msg.format(' or '.join(guardnames))
1029 return unittest.skip(msg)
1031 def _parse_guards(guards):
1032 # Returns a tuple ({platform_name: run_me}, default_value)
1034 return ({'cpython': True}, False)
1035 is_true = guards.values()[0]
1036 assert guards.values() == [is_true] * len(guards) # all True or all False
1037 return (guards, not is_true)
1039 # Use the following check to guard CPython's implementation-specific tests --
1040 # or to run them only on the implementation(s) guarded by the arguments.
1041 def check_impl_detail(**guards):
1042 """This function returns True or False depending on the host platform.
1044 if check_impl_detail(): # only on CPython (default)
1045 if check_impl_detail(jython=True): # only on Jython
1046 if check_impl_detail(cpython=False): # everywhere except on CPython
1048 guards, default = _parse_guards(guards)
1049 return guards.get(platform.python_implementation().lower(), default)
1053 def _run_suite(suite):
1054 """Run tests from a unittest.TestSuite-derived class."""
1056 runner = unittest.TextTestRunner(sys.stdout, verbosity=2)
1058 runner = BasicTestRunner()
1060 result = runner.run(suite)
1061 if not result.wasSuccessful():
1062 if len(result.errors) == 1 and not result.failures:
1063 err = result.errors[0][1]
1064 elif len(result.failures) == 1 and not result.errors:
1065 err = result.failures[0][1]
1067 err = "multiple errors occurred"
1069 err += "; run in verbose mode for details"
1070 raise TestFailed(err)
1073 def run_unittest(*classes):
1074 """Run tests from unittest.TestCase-derived classes."""
1075 valid_types = (unittest.TestSuite, unittest.TestCase)
1076 suite = unittest.TestSuite()
1078 if isinstance(cls, str):
1079 if cls in sys.modules:
1080 suite.addTest(unittest.findTestCases(sys.modules[cls]))
1082 raise ValueError("str arguments must be keys in sys.modules")
1083 elif isinstance(cls, valid_types):
1086 suite.addTest(unittest.makeSuite(cls))
1090 #=======================================================================
1093 def run_doctest(module, verbosity=None):
1094 """Run doctest on the given module. Return (#failures, #tests).
1096 If optional argument verbosity is not specified (or is None), pass
1097 test_support's belief about verbosity on to doctest. Else doctest's
1098 usual behavior is used (it searches sys.argv for -v).
1103 if verbosity is None:
1108 # Direct doctest output (normally just errors) to real stdout; doctest
1109 # output shouldn't be compared by regrtest.
1110 save_stdout = sys.stdout
1111 sys.stdout = get_original_stdout()
1113 f, t = doctest.testmod(module, verbose=verbosity)
1115 raise TestFailed("%d of %d doctests failed" % (f, t))
1117 sys.stdout = save_stdout
1119 print 'doctest (%s) ... %d tests with zero failures' % (module.__name__, t)
1122 #=======================================================================
1123 # Threading support to prevent reporting refleaks when running regrtest.py -R
1125 # NOTE: we use thread._count() rather than threading.enumerate() (or the
1126 # moral equivalent thereof) because a threading.Thread object is still alive
1127 # until its __bootstrap() method has returned, even after it has been
1128 # unregistered from the threading module.
1129 # thread._count(), on the other hand, only gets decremented *after* the
1130 # __bootstrap() method has returned, which gives us reliable reference counts
1131 # at the end of a test run.
1133 def threading_setup():
1135 return thread._count(),
1139 def threading_cleanup(nb_threads):
1144 for count in range(_MAX_COUNT):
1149 # XXX print a warning in case of failure?
1151 def reap_threads(func):
1152 """Use this function when threads are being used. This will
1153 ensure that the threads are cleaned up even when the test fails.
1154 If threading is unavailable this function does nothing.
1159 @functools.wraps(func)
1160 def decorator(*args):
1161 key = threading_setup()
1165 threading_cleanup(*key)
1168 def reap_children():
1169 """Use this function at the end of test_main() whenever sub-processes
1170 are started. This will help ensure that no extra children (zombies)
1171 stick around to hog resources and create problems when looking
1175 # Reap all our dead child processes so we don't leave zombies around.
1176 # These hog resources and might be causing some of the buildbots to die.
1177 if hasattr(os, 'waitpid'):
1181 # This will raise an exception on Windows. That's ok.
1182 pid, status = os.waitpid(any_process, os.WNOHANG)
1189 """Emulate the py3k bytes() constructor.
1191 NOTE: This is only a best effort function.
1196 except AttributeError:
1199 return b"".join(chr(x) for x in b)
1203 def args_from_interpreter_flags():
1204 """Return a list of command-line arguments reproducing the current
1205 settings in sys.flags."""
1207 'bytes_warning': 'b',
1208 'dont_write_bytecode': 'B',
1209 'ignore_environment': 'E',
1210 'no_user_site': 's',
1213 'py3k_warning': '3',
1217 for flag, opt in flag_opt_map.items():
1218 v = getattr(sys.flags, flag)
1220 args.append('-' + opt * v)