}hOdk<>TdUa07R(LPI6@!GU$ty4=mwqHG-XVe*n(Yvgdlr+FqIU18!osi)48t~eWX8)&L",
+ "G)Ud^0zz@*AF+2r7E}Nf9Y72K~o-T%}D&z%}#7g2br?oH6ZiYH^%>J3D)TPKV(JY*bwjuw5=DsPB@~CrROZeN",
+ "x>A*H&CHrWt0`EP`m!F%waepl#|w#&`XgVc?~2M3uw$fGX~tf_Il!q#Aa<*8xlzQ2+7r6Z",
+ "^;Laa9F(WB_O&Dy2r>~@kSi16W{=6+i5GV=Uq~KX*~&HUN4oz7*O(gXIr}sDVcD`Ikgw#|",
+ "50ssal8s)Qy;?YGCf;*UKKKN!T4!Kqy_G;7PfQapugqvVBKy12v3TVH^L2",
+ "0?#5*VP~MOYfe$h`*L!7@tiW|_^X1N%<}`7YahiUYtMu5XwmOf3?dr+@zXHwW`z}ZDqZlT",
+ "<2Cs(<1%M!i6o&VK89BY0J7HPIo;O62s=|IbV^@y$N=>i^F00FcHoDl#3",
+ "Mdv&xvBYQl0ssI200dcD"
+ ],
+ "Europe/Dublin": [
+ "{Wp48S^xk9=GL@E0stWa761SMbT8$j;0>b$_+0=h7KxBg5R*;&J77#T_U2R5sleVWFDmK~",
+ "Kzj5oh@`QKHvW^6V{jU-w>qg1tSt0c^vh;?qAqA0%t?;#S~6U8Qi",
+ "v&f1s9IH#g$m1k1a#3+lylw4mwT4QnEUUQdwg+xnEcBlgu31bAVabn41OMZVLGz6NDwG%X",
+ "uQar!b>GI{qSahE`AG}$kRWbuI~JCt;38)Xwbb~Qggs55t+MAHIxgDxzTJ;2xXx99+qCy4",
+ "45kC#v_l8fx|G&jlVvaciR<-wwf22l%4(t@S6tnX39#_K(4S0fu$FUs$isud9IKzCXB78NkARYq@9Dc0TGkhz);NtM_SSzEffN",
+ "l{2^*CKGdp52h!52A)6q9fUSltXF{T*Ehc9Q7u8!W7pE(Fv$D$cKUAt6wY=DA1mGgxC*VX",
+ "q_If3G#FY6-Voj`fIKk`0}Cc72_SD{v>468LV{pyBI33^p0E?}RwDA6Pkq--C~0jF&Z@Pv",
+ "!dx_1SN_)jwz@P$(oK%P!Tk9?fRjK88yxhxlcFtTjjZ$DYssSsa#ufYrR+}}nKS+r384o~",
+ "!Uw$nwTbF~qgRsgr0N#d@KIinx%hQB(SJyjJtDtIy(%mDm}ZBGN}dV6K~om|=U",
+ "VGkbciQ=^$_14|gT21!YQ)@y*Rd0i_lS6gtPBE9+ah%WIJPwzUTjIr+J1XckkmA!6WE16%",
+ "CVAl{Dn&-)=G$Bjh?bh0$Xt1UDcgXJjXzzojuw0>paV~?Sa`VN3FysqFxTzfKVAu*ucq#+m=|KSSMvp_#@-lwd+q*ue",
+ "FQ^5<|<0R-u4qYMbRqzSn&",
+ "Q7jSuvc%b+EZc%>nI(+&0Tl1Y>a6v4`uNFD-7$QrhHgS7Wnv~rDgfH;rQw3+m`LJxoM4v#",
+ "gK@?|B{RHJ*VxZgk#!p<_&-sjxOda0YaiJ1UnG41VPv(Et%ElzKRMcO$AfgU+Xnwg5p2_+",
+ "NrnZ1WfEj^fmHd^sx@%JWKkh#zaK0ox%rdP)zUmGZZnqmZ_9L=%6R8ibJH0bOT$AGhDo6{",
+ "fJ?;_U;D|^>5by2ul@i4Zf()InfFN}00EQ=q#FPL>RM>svBYQl0ssI200dcD"
+ ],
+ "Europe/Lisbon": [
+ "{Wp48S^xk9=GL@E0stWa761SMbT8$j;0=rf*IfWA7KxBg5R*;*X|PN+G3LqthM?xgkNUN_",
+ ")gCt1Sc%YT6^TTomk4yVHXeyvQj8}l<;q&s7K}#Vnc8lII1?)AHh$*>OKUU4S;*h>v*ep0",
+ "xTi1cK2{aY*|2D*-~K<;-{_W+r@NvZ7-|NZv($ek_C%VfP0xjWeZP#CPXD`IKkakjh(kUd",
+ "&H)m;^Q(jGjIyiyrcUMtOP)u3A>sw6ux;Bmp3x$4QvQKMx5TrCx_!$srWQuXNs&`9=^IY1",
+ "yc&C31!sQh7P=Mk*#6x8Z@5^%ehR8UW$OWw0KMw}P1ycI^",
+ "4eh12oBUOV?S>n*d!+EM@>x#9PZD12iD=zaC;7`8dTfkU_6d}OZvSFSbGgXeKw}XyX@D=(",
+ ")D0!^DBGr8pXWBT$S-yhLP>Z3ys^VW3}RQ6{NGGVJG6vf*MH93vvNW6yLjie1;{4tVhg-KnSf|G`!",
+ "Z;j$7gJ1ows~RD=@n7I6aFd8rOR_7Y?E-$clI%1o5gA@O!KPa^(8^iFFeFykI-+z>E$mvp",
+ "E_h`vbHPjqkLs`Dn-0FV`R@z|h!S(Lb;M&|Exr!biY`%bfp$6`hK;GDhdP|^Q",
+ "*Ty*}1d41K>H2B{jrjE9aFK>yAQJBX9CD%-384S;0fw`PlprHGS`^b$oS-`I4VH7ji8ou-",
+ "g|060jfb1XcxiInT0oOoeR7#%e5Ug5#KW)nVSRvLHNe$SQHM@2)`S9L7>RL@Qx%fmm7?3u7P5TywFQ}C@S(pq}|",
+ "eLPT{C^{<0Q?uU&kSVd%!~8q3;Z0s3OqzF`$HRkePL5Ywgiwn{R(zi+jmOBFrVpW;)@UsU#%$8BcV#h@}m$#!Fglo&bwb78aYqOG_W7h{eb(+39&-mk4EIXq_",
+ "_`30=8sfA3=!3TO_TyS5X22~?6nKngZ|bq=grdq=9X)3xAkA42L!~rmS)n3w-~;lgz%Fhn",
+ "(?rXdp2ho~9?wmVs2JwVt~?@FVD%`tN69{(i3oQa;O0$E$lF&~Y#_H6bu6(BiwblJ>;-Fs",
+ "gA$Y$*?=X)n1pFkKn}F~`>=4)+LLQk?L*P!bhAm0;`N~z3QbUIyVrm%kOZ(n1JJsm0pyb8",
+ "!GV{d*C!9KXv;4vD4Q>-k#+x(!V5L@w5M>v2V5a`B>t(|B",
+ "|Fqr4^-{S*%Ep~ojUtx_CRbSQ(uFwu2=KH)Q@EBs@ZqRXn4mU;B!68;;IQs3Ub=n&UU%*m",
+ "k&zwD36&JSwsN(%k&x?H+tN^6)23c`I0=5^N_R0~1>tsFZ`^`3z~rXSXT&qcwa#n!%+Z#P",
+ "PG}(D^_CCILXnF|GKwabBh*xFS?4rwGo2vtJUwzrbv_$5PO+`?$l{H-jGB@X%S!OAhw;D4",
+ "XFycN3!XqQ&EorJOD3>~^U%Luw!jF<;6_q-f-S|6{cQDfZ2(4Xf1MMLr1=SA=MwVf2%Pp%VP;jn)|5Tf!-DbUGn%I-rkYaH7?$$O!t)wwClAisr3eUoeB^~T=U*_P~Y2*KdnO87>B!19sV=xZ5",
+ "yApq26RxgqA|*tmsvtL#OhcF(C<0EGWHP)BFl?h)_*7!{LoJiv%RsOs!q->n+DcV%9~B@RbC_1G_1g6`Yd~8|%-=2l~oGN!~TVv2Bnk>7wW8L@^?vX$f3AiT)(4nrCuTm9%(XC6Nai",
+ "E(;}7&=YZagjAN$O-cN;1u{dTkElmB0GT$|Wa)QMmKrx<|LCJ9qlUoFsUbD^H^6_8(w<0{",
+ "ftj&O1~p_%lh5z;zNV&sP+",
+ "NF2>iK{8KMUf+)<-)VxXbLxD(alL}N$AT-ogNbJSMMYeX+Z{jS)b8TK^PB=FxyBxzfmFto",
+ "eo0R`a(%NO?#aEH9|?Cv00000NIsFh6BW2800DjO0RR918Pu^`vBYQl0ssI200dcD"
+ ],
+ "UTC": [
+ "{Wp48S^xk9=GL@E0stWa761SMbT8$j-~e#|9bEt_7KxBg5R*|3h1|xhHLji!C57qW6L*|H",
+ "pEErm00000ygu;I+>V)?00B92fhY-(AGY&-0RR9100dcD"
+ ]
+ },
+ "metadata": {
+ "version": "2020a"
+ }
+}
\ No newline at end of file
diff --git a/Lib/test/test_zoneinfo/test_zoneinfo.py b/Lib/test/test_zoneinfo/test_zoneinfo.py
new file mode 100644
index 00000000..85703269
--- /dev/null
+++ b/Lib/test/test_zoneinfo/test_zoneinfo.py
@@ -0,0 +1,2099 @@
+from __future__ import annotations
+
+import base64
+import contextlib
+import dataclasses
+import importlib.metadata
+import io
+import json
+import lzma
+import os
+import pathlib
+import pickle
+import re
+import shutil
+import struct
+import tempfile
+import unittest
+from datetime import date, datetime, time, timedelta, timezone
+from functools import cached_property
+
+from . import _support as test_support
+from ._support import OS_ENV_LOCK, TZPATH_TEST_LOCK, ZoneInfoTestBase
+
+py_zoneinfo, c_zoneinfo = test_support.get_modules()
+
+try:
+ importlib.metadata.metadata("tzdata")
+ HAS_TZDATA_PKG = True
+except importlib.metadata.PackageNotFoundError:
+ HAS_TZDATA_PKG = False
+
+ZONEINFO_DATA = None
+ZONEINFO_DATA_V1 = None
+TEMP_DIR = None
+DATA_DIR = pathlib.Path(__file__).parent / "data"
+ZONEINFO_JSON = DATA_DIR / "zoneinfo_data.json"
+
+# Useful constants
+ZERO = timedelta(0)
+ONE_H = timedelta(hours=1)
+
+
+def setUpModule():
+ global TEMP_DIR
+ global ZONEINFO_DATA
+ global ZONEINFO_DATA_V1
+
+ TEMP_DIR = pathlib.Path(tempfile.mkdtemp(prefix="zoneinfo"))
+ ZONEINFO_DATA = ZoneInfoData(ZONEINFO_JSON, TEMP_DIR / "v2")
+ ZONEINFO_DATA_V1 = ZoneInfoData(ZONEINFO_JSON, TEMP_DIR / "v1", v1=True)
+
+
+def tearDownModule():
+ shutil.rmtree(TEMP_DIR)
+
+
+class TzPathUserMixin:
+ """
+ Adds a setUp() and tearDown() to make TZPATH manipulations thread-safe.
+
+ Any tests that require manipulation of the TZPATH global are necessarily
+ thread unsafe, so we will acquire a lock and reset the TZPATH variable
+ to the default state before each test and release the lock after the test
+ is through.
+ """
+
+ @property
+ def tzpath(self): # pragma: nocover
+ return None
+
+ @property
+ def block_tzdata(self):
+ return True
+
+ def setUp(self):
+ with contextlib.ExitStack() as stack:
+ stack.enter_context(
+ self.tzpath_context(
+ self.tzpath,
+ block_tzdata=self.block_tzdata,
+ lock=TZPATH_TEST_LOCK,
+ )
+ )
+ self.addCleanup(stack.pop_all().close)
+
+ super().setUp()
+
+
+class DatetimeSubclassMixin:
+ """
+ Replaces all ZoneTransition transition dates with a datetime subclass.
+ """
+
+ class DatetimeSubclass(datetime):
+ @classmethod
+ def from_datetime(cls, dt):
+ return cls(
+ dt.year,
+ dt.month,
+ dt.day,
+ dt.hour,
+ dt.minute,
+ dt.second,
+ dt.microsecond,
+ tzinfo=dt.tzinfo,
+ fold=dt.fold,
+ )
+
+ def load_transition_examples(self, key):
+ transition_examples = super().load_transition_examples(key)
+ for zt in transition_examples:
+ dt = zt.transition
+ new_dt = self.DatetimeSubclass.from_datetime(dt)
+ new_zt = dataclasses.replace(zt, transition=new_dt)
+ yield new_zt
+
+
+class ZoneInfoTest(TzPathUserMixin, ZoneInfoTestBase):
+ module = py_zoneinfo
+ class_name = "ZoneInfo"
+
+ def setUp(self):
+ super().setUp()
+
+ # This is necessary because various subclasses pull from different
+ # data sources (e.g. tzdata, V1 files, etc).
+ self.klass.clear_cache()
+
+ @property
+ def zoneinfo_data(self):
+ return ZONEINFO_DATA
+
+ @property
+ def tzpath(self):
+ return [self.zoneinfo_data.tzpath]
+
+ def zone_from_key(self, key):
+ return self.klass(key)
+
+ def zones(self):
+ return ZoneDumpData.transition_keys()
+
+ def fixed_offset_zones(self):
+ return ZoneDumpData.fixed_offset_zones()
+
+ def load_transition_examples(self, key):
+ return ZoneDumpData.load_transition_examples(key)
+
+ def test_str(self):
+ # Zones constructed with a key must have str(zone) == key
+ for key in self.zones():
+ with self.subTest(key):
+ zi = self.zone_from_key(key)
+
+ self.assertEqual(str(zi), key)
+
+ # Zones with no key constructed should have str(zone) == repr(zone)
+ file_key = self.zoneinfo_data.keys[0]
+ file_path = self.zoneinfo_data.path_from_key(file_key)
+
+ with open(file_path, "rb") as f:
+ with self.subTest(test_name="Repr test", path=file_path):
+ zi_ff = self.klass.from_file(f)
+ self.assertEqual(str(zi_ff), repr(zi_ff))
+
+ def test_repr(self):
+ # The repr is not guaranteed, but I think we can insist that it at
+ # least contain the name of the class.
+ key = next(iter(self.zones()))
+
+ zi = self.klass(key)
+ class_name = self.class_name
+ with self.subTest(name="from key"):
+ self.assertRegex(repr(zi), class_name)
+
+ file_key = self.zoneinfo_data.keys[0]
+ file_path = self.zoneinfo_data.path_from_key(file_key)
+ with open(file_path, "rb") as f:
+ zi_ff = self.klass.from_file(f, key=file_key)
+
+ with self.subTest(name="from file with key"):
+ self.assertRegex(repr(zi_ff), class_name)
+
+ with open(file_path, "rb") as f:
+ zi_ff_nk = self.klass.from_file(f)
+
+ with self.subTest(name="from file without key"):
+ self.assertRegex(repr(zi_ff_nk), class_name)
+
+ def test_key_attribute(self):
+ key = next(iter(self.zones()))
+
+ def from_file_nokey(key):
+ with open(self.zoneinfo_data.path_from_key(key), "rb") as f:
+ return self.klass.from_file(f)
+
+ constructors = (
+ ("Primary constructor", self.klass, key),
+ ("no_cache", self.klass.no_cache, key),
+ ("from_file", from_file_nokey, None),
+ )
+
+ for msg, constructor, expected in constructors:
+ zi = constructor(key)
+
+ # Ensure that the key attribute is set to the input to ``key``
+ with self.subTest(msg):
+ self.assertEqual(zi.key, expected)
+
+ # Ensure that the key attribute is read-only
+ with self.subTest(f"{msg}: readonly"):
+ with self.assertRaises(AttributeError):
+ zi.key = "Some/Value"
+
+ def test_bad_keys(self):
+ bad_keys = [
+ "Eurasia/Badzone", # Plausible but does not exist
+ "BZQ",
+ "America.Los_Angeles",
+ "ð¨ð¦", # Non-ascii
+ "America/New\ud800York", # Contains surrogate character
+ ]
+
+ for bad_key in bad_keys:
+ with self.assertRaises(self.module.ZoneInfoNotFoundError):
+ self.klass(bad_key)
+
+ def test_bad_keys_paths(self):
+ bad_keys = [
+ "/America/Los_Angeles", # Absolute path
+ "America/Los_Angeles/", # Trailing slash - not normalized
+ "../zoneinfo/America/Los_Angeles", # Traverses above TZPATH
+ "America/../America/Los_Angeles", # Not normalized
+ "America/./Los_Angeles",
+ ]
+
+ for bad_key in bad_keys:
+ with self.assertRaises(ValueError):
+ self.klass(bad_key)
+
+ def test_bad_zones(self):
+ bad_zones = [
+ b"", # Empty file
+ b"AAAA3" + b" " * 15, # Bad magic
+ ]
+
+ for bad_zone in bad_zones:
+ fobj = io.BytesIO(bad_zone)
+ with self.assertRaises(ValueError):
+ self.klass.from_file(fobj)
+
+ def test_fromutc_errors(self):
+ key = next(iter(self.zones()))
+ zone = self.zone_from_key(key)
+
+ bad_values = [
+ (datetime(2019, 1, 1, tzinfo=timezone.utc), ValueError),
+ (datetime(2019, 1, 1), ValueError),
+ (date(2019, 1, 1), TypeError),
+ (time(0), TypeError),
+ (0, TypeError),
+ ("2019-01-01", TypeError),
+ ]
+
+ for val, exc_type in bad_values:
+ with self.subTest(val=val):
+ with self.assertRaises(exc_type):
+ zone.fromutc(val)
+
+ def test_utc(self):
+ zi = self.klass("UTC")
+ dt = datetime(2020, 1, 1, tzinfo=zi)
+
+ self.assertEqual(dt.utcoffset(), ZERO)
+ self.assertEqual(dt.dst(), ZERO)
+ self.assertEqual(dt.tzname(), "UTC")
+
+ def test_unambiguous(self):
+ test_cases = []
+ for key in self.zones():
+ for zone_transition in self.load_transition_examples(key):
+ test_cases.append(
+ (
+ key,
+ zone_transition.transition - timedelta(days=2),
+ zone_transition.offset_before,
+ )
+ )
+
+ test_cases.append(
+ (
+ key,
+ zone_transition.transition + timedelta(days=2),
+ zone_transition.offset_after,
+ )
+ )
+
+ for key, dt, offset in test_cases:
+ with self.subTest(key=key, dt=dt, offset=offset):
+ tzi = self.zone_from_key(key)
+ dt = dt.replace(tzinfo=tzi)
+
+ self.assertEqual(dt.tzname(), offset.tzname, dt)
+ self.assertEqual(dt.utcoffset(), offset.utcoffset, dt)
+ self.assertEqual(dt.dst(), offset.dst, dt)
+
+ def test_folds_and_gaps(self):
+ test_cases = []
+ for key in self.zones():
+ tests = {"folds": [], "gaps": []}
+ for zt in self.load_transition_examples(key):
+ if zt.fold:
+ test_group = tests["folds"]
+ elif zt.gap:
+ test_group = tests["gaps"]
+ else:
+ # Assign a random variable here to disable the peephole
+ # optimizer so that coverage can see this line.
+ # See bpo-2506 for more information.
+ no_peephole_opt = None
+ continue
+
+ # Cases are of the form key, dt, fold, offset
+ dt = zt.anomaly_start - timedelta(seconds=1)
+ test_group.append((dt, 0, zt.offset_before))
+ test_group.append((dt, 1, zt.offset_before))
+
+ dt = zt.anomaly_start
+ test_group.append((dt, 0, zt.offset_before))
+ test_group.append((dt, 1, zt.offset_after))
+
+ dt = zt.anomaly_start + timedelta(seconds=1)
+ test_group.append((dt, 0, zt.offset_before))
+ test_group.append((dt, 1, zt.offset_after))
+
+ dt = zt.anomaly_end - timedelta(seconds=1)
+ test_group.append((dt, 0, zt.offset_before))
+ test_group.append((dt, 1, zt.offset_after))
+
+ dt = zt.anomaly_end
+ test_group.append((dt, 0, zt.offset_after))
+ test_group.append((dt, 1, zt.offset_after))
+
+ dt = zt.anomaly_end + timedelta(seconds=1)
+ test_group.append((dt, 0, zt.offset_after))
+ test_group.append((dt, 1, zt.offset_after))
+
+ for grp, test_group in tests.items():
+ test_cases.append(((key, grp), test_group))
+
+ for (key, grp), tests in test_cases:
+ with self.subTest(key=key, grp=grp):
+ tzi = self.zone_from_key(key)
+
+ for dt, fold, offset in tests:
+ dt = dt.replace(fold=fold, tzinfo=tzi)
+
+ self.assertEqual(dt.tzname(), offset.tzname, dt)
+ self.assertEqual(dt.utcoffset(), offset.utcoffset, dt)
+ self.assertEqual(dt.dst(), offset.dst, dt)
+
+ def test_folds_from_utc(self):
+ for key in self.zones():
+ zi = self.zone_from_key(key)
+ with self.subTest(key=key):
+ for zt in self.load_transition_examples(key):
+ if not zt.fold:
+ continue
+
+ dt_utc = zt.transition_utc
+ dt_before_utc = dt_utc - timedelta(seconds=1)
+ dt_after_utc = dt_utc + timedelta(seconds=1)
+
+ dt_before = dt_before_utc.astimezone(zi)
+ self.assertEqual(dt_before.fold, 0, (dt_before, dt_utc))
+
+ dt_after = dt_after_utc.astimezone(zi)
+ self.assertEqual(dt_after.fold, 1, (dt_after, dt_utc))
+
+ def test_time_variable_offset(self):
+ # self.zones() only ever returns variable-offset zones
+ for key in self.zones():
+ zi = self.zone_from_key(key)
+ t = time(11, 15, 1, 34471, tzinfo=zi)
+
+ with self.subTest(key=key):
+ self.assertIs(t.tzname(), None)
+ self.assertIs(t.utcoffset(), None)
+ self.assertIs(t.dst(), None)
+
+ def test_time_fixed_offset(self):
+ for key, offset in self.fixed_offset_zones():
+ zi = self.zone_from_key(key)
+
+ t = time(11, 15, 1, 34471, tzinfo=zi)
+
+ with self.subTest(key=key):
+ self.assertEqual(t.tzname(), offset.tzname)
+ self.assertEqual(t.utcoffset(), offset.utcoffset)
+ self.assertEqual(t.dst(), offset.dst)
+
+
+class CZoneInfoTest(ZoneInfoTest):
+ module = c_zoneinfo
+
+ def test_fold_mutate(self):
+ """Test that fold isn't mutated when no change is necessary.
+
+ The underlying C API is capable of mutating datetime objects, and
+ may rely on the fact that addition of a datetime object returns a
+ new datetime; this test ensures that the input datetime to fromutc
+ is not mutated.
+ """
+
+ def to_subclass(dt):
+ class SameAddSubclass(type(dt)):
+ def __add__(self, other):
+ if other == timedelta(0):
+ return self
+
+ return super().__add__(other) # pragma: nocover
+
+ return SameAddSubclass(
+ dt.year,
+ dt.month,
+ dt.day,
+ dt.hour,
+ dt.minute,
+ dt.second,
+ dt.microsecond,
+ fold=dt.fold,
+ tzinfo=dt.tzinfo,
+ )
+
+ subclass = [False, True]
+
+ key = "Europe/London"
+ zi = self.zone_from_key(key)
+ for zt in self.load_transition_examples(key):
+ if zt.fold and zt.offset_after.utcoffset == ZERO:
+ example = zt.transition_utc.replace(tzinfo=zi)
+ break
+
+ for subclass in [False, True]:
+ if subclass:
+ dt = to_subclass(example)
+ else:
+ dt = example
+
+ with self.subTest(subclass=subclass):
+ dt_fromutc = zi.fromutc(dt)
+
+ self.assertEqual(dt_fromutc.fold, 1)
+ self.assertEqual(dt.fold, 0)
+
+
+class ZoneInfoDatetimeSubclassTest(DatetimeSubclassMixin, ZoneInfoTest):
+ pass
+
+
+class CZoneInfoDatetimeSubclassTest(DatetimeSubclassMixin, CZoneInfoTest):
+ pass
+
+
+class ZoneInfoSubclassTest(ZoneInfoTest):
+ @classmethod
+ def setUpClass(cls):
+ super().setUpClass()
+
+ class ZISubclass(cls.klass):
+ pass
+
+ cls.class_name = "ZISubclass"
+ cls.parent_klass = cls.klass
+ cls.klass = ZISubclass
+
+ def test_subclass_own_cache(self):
+ base_obj = self.parent_klass("Europe/London")
+ sub_obj = self.klass("Europe/London")
+
+ self.assertIsNot(base_obj, sub_obj)
+ self.assertIsInstance(base_obj, self.parent_klass)
+ self.assertIsInstance(sub_obj, self.klass)
+
+
+class CZoneInfoSubclassTest(ZoneInfoSubclassTest):
+ module = c_zoneinfo
+
+
+class ZoneInfoV1Test(ZoneInfoTest):
+ @property
+ def zoneinfo_data(self):
+ return ZONEINFO_DATA_V1
+
+ def load_transition_examples(self, key):
+ # We will discard zdump examples outside the range epoch +/- 2**31,
+ # because they are not well-supported in Version 1 files.
+ epoch = datetime(1970, 1, 1)
+ max_offset_32 = timedelta(seconds=2 ** 31)
+ min_dt = epoch - max_offset_32
+ max_dt = epoch + max_offset_32
+
+ for zt in ZoneDumpData.load_transition_examples(key):
+ if min_dt <= zt.transition <= max_dt:
+ yield zt
+
+
+class CZoneInfoV1Test(ZoneInfoV1Test):
+ module = c_zoneinfo
+
+
+@unittest.skipIf(
+ not HAS_TZDATA_PKG, "Skipping tzdata-specific tests: tzdata not installed"
+)
+class TZDataTests(ZoneInfoTest):
+ """
+ Runs all the ZoneInfoTest tests, but against the tzdata package
+
+ NOTE: The ZoneDumpData has frozen test data, but tzdata will update, so
+ some of the tests (particularly those related to the far future) may break
+ in the event that the time zone policies in the relevant time zones change.
+ """
+
+ @property
+ def tzpath(self):
+ return []
+
+ @property
+ def block_tzdata(self):
+ return False
+
+ def zone_from_key(self, key):
+ return self.klass(key=key)
+
+
+@unittest.skipIf(
+ not HAS_TZDATA_PKG, "Skipping tzdata-specific tests: tzdata not installed"
+)
+class CTZDataTests(TZDataTests):
+ module = c_zoneinfo
+
+
+class WeirdZoneTest(ZoneInfoTestBase):
+ module = py_zoneinfo
+
+ def test_one_transition(self):
+ LMT = ZoneOffset("LMT", -timedelta(hours=6, minutes=31, seconds=2))
+ STD = ZoneOffset("STD", -timedelta(hours=6))
+
+ transitions = [
+ ZoneTransition(datetime(1883, 6, 9, 14), LMT, STD),
+ ]
+
+ after = "STD6"
+
+ zf = self.construct_zone(transitions, after)
+ zi = self.klass.from_file(zf)
+
+ dt0 = datetime(1883, 6, 9, 1, tzinfo=zi)
+ dt1 = datetime(1883, 6, 10, 1, tzinfo=zi)
+
+ for dt, offset in [(dt0, LMT), (dt1, STD)]:
+ with self.subTest(name="local", dt=dt):
+ self.assertEqual(dt.tzname(), offset.tzname)
+ self.assertEqual(dt.utcoffset(), offset.utcoffset)
+ self.assertEqual(dt.dst(), offset.dst)
+
+ dts = [
+ (
+ datetime(1883, 6, 9, 1, tzinfo=zi),
+ datetime(1883, 6, 9, 7, 31, 2, tzinfo=timezone.utc),
+ ),
+ (
+ datetime(2010, 4, 1, 12, tzinfo=zi),
+ datetime(2010, 4, 1, 18, tzinfo=timezone.utc),
+ ),
+ ]
+
+ for dt_local, dt_utc in dts:
+ with self.subTest(name="fromutc", dt=dt_local):
+ dt_actual = dt_utc.astimezone(zi)
+ self.assertEqual(dt_actual, dt_local)
+
+ dt_utc_actual = dt_local.astimezone(timezone.utc)
+ self.assertEqual(dt_utc_actual, dt_utc)
+
+ def test_one_zone_dst(self):
+ DST = ZoneOffset("DST", ONE_H, ONE_H)
+ transitions = [
+ ZoneTransition(datetime(1970, 1, 1), DST, DST),
+ ]
+
+ after = "STD0DST-1,0/0,J365/25"
+
+ zf = self.construct_zone(transitions, after)
+ zi = self.klass.from_file(zf)
+
+ dts = [
+ datetime(1900, 3, 1),
+ datetime(1965, 9, 12),
+ datetime(1970, 1, 1),
+ datetime(2010, 11, 3),
+ datetime(2040, 1, 1),
+ ]
+
+ for dt in dts:
+ dt = dt.replace(tzinfo=zi)
+ with self.subTest(dt=dt):
+ self.assertEqual(dt.tzname(), DST.tzname)
+ self.assertEqual(dt.utcoffset(), DST.utcoffset)
+ self.assertEqual(dt.dst(), DST.dst)
+
+ def test_no_tz_str(self):
+ STD = ZoneOffset("STD", ONE_H, ZERO)
+ DST = ZoneOffset("DST", 2 * ONE_H, ONE_H)
+
+ transitions = []
+ for year in range(1996, 2000):
+ transitions.append(
+ ZoneTransition(datetime(year, 3, 1, 2), STD, DST)
+ )
+ transitions.append(
+ ZoneTransition(datetime(year, 11, 1, 2), DST, STD)
+ )
+
+ after = ""
+
+ zf = self.construct_zone(transitions, after)
+
+ # According to RFC 8536, local times after the last transition time
+ # with an empty TZ string are unspecified. We will go with "hold the
+ # last transition", but the most we should promise is "doesn't crash."
+ zi = self.klass.from_file(zf)
+
+ cases = [
+ (datetime(1995, 1, 1), STD),
+ (datetime(1996, 4, 1), DST),
+ (datetime(1996, 11, 2), STD),
+ (datetime(2001, 1, 1), STD),
+ ]
+
+ for dt, offset in cases:
+ dt = dt.replace(tzinfo=zi)
+ with self.subTest(dt=dt):
+ self.assertEqual(dt.tzname(), offset.tzname)
+ self.assertEqual(dt.utcoffset(), offset.utcoffset)
+ self.assertEqual(dt.dst(), offset.dst)
+
+ # Test that offsets return None when using a datetime.time
+ t = time(0, tzinfo=zi)
+ with self.subTest("Testing datetime.time"):
+ self.assertIs(t.tzname(), None)
+ self.assertIs(t.utcoffset(), None)
+ self.assertIs(t.dst(), None)
+
+ def test_tz_before_only(self):
+ # From RFC 8536 Section 3.2:
+ #
+ # If there are no transitions, local time for all timestamps is
+ # specified by the TZ string in the footer if present and nonempty;
+ # otherwise, it is specified by time type 0.
+
+ offsets = [
+ ZoneOffset("STD", ZERO, ZERO),
+ ZoneOffset("DST", ONE_H, ONE_H),
+ ]
+
+ for offset in offsets:
+ # Phantom transition to set time type 0.
+ transitions = [
+ ZoneTransition(None, offset, offset),
+ ]
+
+ after = ""
+
+ zf = self.construct_zone(transitions, after)
+ zi = self.klass.from_file(zf)
+
+ dts = [
+ datetime(1900, 1, 1),
+ datetime(1970, 1, 1),
+ datetime(2000, 1, 1),
+ ]
+
+ for dt in dts:
+ dt = dt.replace(tzinfo=zi)
+ with self.subTest(offset=offset, dt=dt):
+ self.assertEqual(dt.tzname(), offset.tzname)
+ self.assertEqual(dt.utcoffset(), offset.utcoffset)
+ self.assertEqual(dt.dst(), offset.dst)
+
+ def test_empty_zone(self):
+ zf = self.construct_zone([], "")
+
+ with self.assertRaises(ValueError):
+ self.klass.from_file(zf)
+
+ def test_zone_very_large_timestamp(self):
+ """Test when a transition is in the far past or future.
+
+ Particularly, this is a concern if something:
+
+ 1. Attempts to call ``datetime.timestamp`` for a datetime outside
+ of ``[datetime.min, datetime.max]``.
+ 2. Attempts to construct a timedelta outside of
+ ``[timedelta.min, timedelta.max]``.
+
+ This actually occurs "in the wild", as some time zones on Ubuntu (at
+ least as of 2020) have an initial transition added at ``-2**58``.
+ """
+
+ LMT = ZoneOffset("LMT", timedelta(seconds=-968))
+ GMT = ZoneOffset("GMT", ZERO)
+
+ transitions = [
+ (-(1 << 62), LMT, LMT),
+ ZoneTransition(datetime(1912, 1, 1), LMT, GMT),
+ ((1 << 62), GMT, GMT),
+ ]
+
+ after = "GMT0"
+
+ zf = self.construct_zone(transitions, after)
+ zi = self.klass.from_file(zf, key="Africa/Abidjan")
+
+ offset_cases = [
+ (datetime.min, LMT),
+ (datetime.max, GMT),
+ (datetime(1911, 12, 31), LMT),
+ (datetime(1912, 1, 2), GMT),
+ ]
+
+ for dt_naive, offset in offset_cases:
+ dt = dt_naive.replace(tzinfo=zi)
+ with self.subTest(name="offset", dt=dt, offset=offset):
+ self.assertEqual(dt.tzname(), offset.tzname)
+ self.assertEqual(dt.utcoffset(), offset.utcoffset)
+ self.assertEqual(dt.dst(), offset.dst)
+
+ utc_cases = [
+ (datetime.min, datetime.min + timedelta(seconds=968)),
+ (datetime(1898, 12, 31, 23, 43, 52), datetime(1899, 1, 1)),
+ (
+ datetime(1911, 12, 31, 23, 59, 59, 999999),
+ datetime(1912, 1, 1, 0, 16, 7, 999999),
+ ),
+ (datetime(1912, 1, 1, 0, 16, 8), datetime(1912, 1, 1, 0, 16, 8)),
+ (datetime(1970, 1, 1), datetime(1970, 1, 1)),
+ (datetime.max, datetime.max),
+ ]
+
+ for naive_dt, naive_dt_utc in utc_cases:
+ dt = naive_dt.replace(tzinfo=zi)
+ dt_utc = naive_dt_utc.replace(tzinfo=timezone.utc)
+
+ self.assertEqual(dt_utc.astimezone(zi), dt)
+ self.assertEqual(dt, dt_utc)
+
+ def test_fixed_offset_phantom_transition(self):
+ UTC = ZoneOffset("UTC", ZERO, ZERO)
+
+ transitions = [ZoneTransition(datetime(1970, 1, 1), UTC, UTC)]
+
+ after = "UTC0"
+ zf = self.construct_zone(transitions, after)
+ zi = self.klass.from_file(zf, key="UTC")
+
+ dt = datetime(2020, 1, 1, tzinfo=zi)
+ with self.subTest("datetime.datetime"):
+ self.assertEqual(dt.tzname(), UTC.tzname)
+ self.assertEqual(dt.utcoffset(), UTC.utcoffset)
+ self.assertEqual(dt.dst(), UTC.dst)
+
+ t = time(0, tzinfo=zi)
+ with self.subTest("datetime.time"):
+ self.assertEqual(t.tzname(), UTC.tzname)
+ self.assertEqual(t.utcoffset(), UTC.utcoffset)
+ self.assertEqual(t.dst(), UTC.dst)
+
+ def construct_zone(self, transitions, after=None, version=3):
+ # These are not used for anything, so we're not going to include
+ # them for now.
+ isutc = []
+ isstd = []
+ leap_seconds = []
+
+ offset_lists = [[], []]
+ trans_times_lists = [[], []]
+ trans_idx_lists = [[], []]
+
+ v1_range = (-(2 ** 31), 2 ** 31)
+ v2_range = (-(2 ** 63), 2 ** 63)
+ ranges = [v1_range, v2_range]
+
+ def zt_as_tuple(zt):
+ # zt may be a tuple (timestamp, offset_before, offset_after) or
+ # a ZoneTransition object â this is to allow the timestamp to be
+ # values that are outside the valid range for datetimes but still
+ # valid 64-bit timestamps.
+ if isinstance(zt, tuple):
+ return zt
+
+ if zt.transition:
+ trans_time = int(zt.transition_utc.timestamp())
+ else:
+ trans_time = None
+
+ return (trans_time, zt.offset_before, zt.offset_after)
+
+ transitions = sorted(map(zt_as_tuple, transitions), key=lambda x: x[0])
+
+ for zt in transitions:
+ trans_time, offset_before, offset_after = zt
+
+ for v, (dt_min, dt_max) in enumerate(ranges):
+ offsets = offset_lists[v]
+ trans_times = trans_times_lists[v]
+ trans_idx = trans_idx_lists[v]
+
+ if trans_time is not None and not (
+ dt_min <= trans_time <= dt_max
+ ):
+ continue
+
+ if offset_before not in offsets:
+ offsets.append(offset_before)
+
+ if offset_after not in offsets:
+ offsets.append(offset_after)
+
+ if trans_time is not None:
+ trans_times.append(trans_time)
+ trans_idx.append(offsets.index(offset_after))
+
+ isutcnt = len(isutc)
+ isstdcnt = len(isstd)
+ leapcnt = len(leap_seconds)
+
+ zonefile = io.BytesIO()
+
+ time_types = ("l", "q")
+ for v in range(min((version, 2))):
+ offsets = offset_lists[v]
+ trans_times = trans_times_lists[v]
+ trans_idx = trans_idx_lists[v]
+ time_type = time_types[v]
+
+ # Translate the offsets into something closer to the C values
+ abbrstr = bytearray()
+ ttinfos = []
+
+ for offset in offsets:
+ utcoff = int(offset.utcoffset.total_seconds())
+ isdst = bool(offset.dst)
+ abbrind = len(abbrstr)
+
+ ttinfos.append((utcoff, isdst, abbrind))
+ abbrstr += offset.tzname.encode("ascii") + b"\x00"
+ abbrstr = bytes(abbrstr)
+
+ typecnt = len(offsets)
+ timecnt = len(trans_times)
+ charcnt = len(abbrstr)
+
+ # Write the header
+ zonefile.write(b"TZif")
+ zonefile.write(b"%d" % version)
+ zonefile.write(b" " * 15)
+ zonefile.write(
+ struct.pack(
+ ">6l", isutcnt, isstdcnt, leapcnt, timecnt, typecnt, charcnt
+ )
+ )
+
+ # Now the transition data
+ zonefile.write(struct.pack(f">{timecnt}{time_type}", *trans_times))
+ zonefile.write(struct.pack(f">{timecnt}B", *trans_idx))
+
+ for ttinfo in ttinfos:
+ zonefile.write(struct.pack(">lbb", *ttinfo))
+
+ zonefile.write(bytes(abbrstr))
+
+ # Now the metadata and leap seconds
+ zonefile.write(struct.pack(f"{isutcnt}b", *isutc))
+ zonefile.write(struct.pack(f"{isstdcnt}b", *isstd))
+ zonefile.write(struct.pack(f">{leapcnt}l", *leap_seconds))
+
+ # Finally we write the TZ string if we're writing a Version 2+ file
+ if v > 0:
+ zonefile.write(b"\x0A")
+ zonefile.write(after.encode("ascii"))
+ zonefile.write(b"\x0A")
+
+ zonefile.seek(0)
+ return zonefile
+
+
+class CWeirdZoneTest(WeirdZoneTest):
+ module = c_zoneinfo
+
+
+class TZStrTest(ZoneInfoTestBase):
+ module = py_zoneinfo
+
+ NORMAL = 0
+ FOLD = 1
+ GAP = 2
+
+ @classmethod
+ def setUpClass(cls):
+ super().setUpClass()
+
+ cls._populate_test_cases()
+ cls.populate_tzstr_header()
+
+ @classmethod
+ def populate_tzstr_header(cls):
+ out = bytearray()
+ # The TZif format always starts with a Version 1 file followed by
+ # the Version 2+ file. In this case, we have no transitions, just
+ # the tzstr in the footer, so up to the footer, the files are
+ # identical and we can just write the same file twice in a row.
+ for _ in range(2):
+ out += b"TZif" # Magic value
+ out += b"3" # Version
+ out += b" " * 15 # Reserved
+
+ # We will not write any of the manual transition parts
+ out += struct.pack(">6l", 0, 0, 0, 0, 0, 0)
+
+ cls._tzif_header = bytes(out)
+
+ def zone_from_tzstr(self, tzstr):
+ """Creates a zoneinfo file following a POSIX rule."""
+ zonefile = io.BytesIO(self._tzif_header)
+ zonefile.seek(0, 2)
+
+ # Write the footer
+ zonefile.write(b"\x0A")
+ zonefile.write(tzstr.encode("ascii"))
+ zonefile.write(b"\x0A")
+
+ zonefile.seek(0)
+
+ return self.klass.from_file(zonefile, key=tzstr)
+
+ def test_tzstr_localized(self):
+ for tzstr, cases in self.test_cases.items():
+ with self.subTest(tzstr=tzstr):
+ zi = self.zone_from_tzstr(tzstr)
+
+ for dt_naive, offset, _ in cases:
+ dt = dt_naive.replace(tzinfo=zi)
+
+ with self.subTest(tzstr=tzstr, dt=dt, offset=offset):
+ self.assertEqual(dt.tzname(), offset.tzname)
+ self.assertEqual(dt.utcoffset(), offset.utcoffset)
+ self.assertEqual(dt.dst(), offset.dst)
+
+ def test_tzstr_from_utc(self):
+ for tzstr, cases in self.test_cases.items():
+ with self.subTest(tzstr=tzstr):
+ zi = self.zone_from_tzstr(tzstr)
+
+ for dt_naive, offset, dt_type in cases:
+ if dt_type == self.GAP:
+ continue # Cannot create a gap from UTC
+
+ dt_utc = (dt_naive - offset.utcoffset).replace(
+ tzinfo=timezone.utc
+ )
+
+ # Check that we can go UTC -> Our zone
+ dt_act = dt_utc.astimezone(zi)
+ dt_exp = dt_naive.replace(tzinfo=zi)
+
+ self.assertEqual(dt_act, dt_exp)
+
+ if dt_type == self.FOLD:
+ self.assertEqual(dt_act.fold, dt_naive.fold, dt_naive)
+ else:
+ self.assertEqual(dt_act.fold, 0)
+
+ # Now check that we can go our zone -> UTC
+ dt_act = dt_exp.astimezone(timezone.utc)
+
+ self.assertEqual(dt_act, dt_utc)
+
+ def test_invalid_tzstr(self):
+ invalid_tzstrs = [
+ "PST8PDT", # DST but no transition specified
+ "+11", # Unquoted alphanumeric
+ "GMT,M3.2.0/2,M11.1.0/3", # Transition rule but no DST
+ "GMT0+11,M3.2.0/2,M11.1.0/3", # Unquoted alphanumeric in DST
+ "PST8PDT,M3.2.0/2", # Only one transition rule
+ # Invalid offsets
+ "STD+25",
+ "STD-25",
+ "STD+374",
+ "STD+374DST,M3.2.0/2,M11.1.0/3",
+ "STD+23DST+25,M3.2.0/2,M11.1.0/3",
+ "STD-23DST-25,M3.2.0/2,M11.1.0/3",
+ # Completely invalid dates
+ "AAA4BBB,M1443339,M11.1.0/3",
+ "AAA4BBB,M3.2.0/2,0349309483959c",
+ # Invalid months
+ "AAA4BBB,M13.1.1/2,M1.1.1/2",
+ "AAA4BBB,M1.1.1/2,M13.1.1/2",
+ "AAA4BBB,M0.1.1/2,M1.1.1/2",
+ "AAA4BBB,M1.1.1/2,M0.1.1/2",
+ # Invalid weeks
+ "AAA4BBB,M1.6.1/2,M1.1.1/2",
+ "AAA4BBB,M1.1.1/2,M1.6.1/2",
+ # Invalid weekday
+ "AAA4BBB,M1.1.7/2,M2.1.1/2",
+ "AAA4BBB,M1.1.1/2,M2.1.7/2",
+ # Invalid numeric offset
+ "AAA4BBB,-1/2,20/2",
+ "AAA4BBB,1/2,-1/2",
+ "AAA4BBB,367,20/2",
+ "AAA4BBB,1/2,367/2",
+ # Invalid julian offset
+ "AAA4BBB,J0/2,J20/2",
+ "AAA4BBB,J20/2,J366/2",
+ ]
+
+ for invalid_tzstr in invalid_tzstrs:
+ with self.subTest(tzstr=invalid_tzstr):
+ # Not necessarily a guaranteed property, but we should show
+ # the problematic TZ string if that's the cause of failure.
+ tzstr_regex = re.escape(invalid_tzstr)
+ with self.assertRaisesRegex(ValueError, tzstr_regex):
+ self.zone_from_tzstr(invalid_tzstr)
+
+ @classmethod
+ def _populate_test_cases(cls):
+ # This method uses a somewhat unusual style in that it populates the
+ # test cases for each tzstr by using a decorator to automatically call
+ # a function that mutates the current dictionary of test cases.
+ #
+ # The population of the test cases is done in individual functions to
+ # give each set of test cases its own namespace in which to define
+ # its offsets (this way we don't have to worry about variable reuse
+ # causing problems if someone makes a typo).
+ #
+ # The decorator for calling is used to make it more obvious that each
+ # function is actually called (if it's not decorated, it's not called).
+ def call(f):
+ """Decorator to call the addition methods.
+
+ This will call a function which adds at least one new entry into
+ the `cases` dictionary. The decorator will also assert that
+ something was added to the dictionary.
+ """
+ prev_len = len(cases)
+ f()
+ assert len(cases) > prev_len, "Function did not add a test case!"
+
+ NORMAL = cls.NORMAL
+ FOLD = cls.FOLD
+ GAP = cls.GAP
+
+ cases = {}
+
+ @call
+ def _add():
+ # Transition to EDT on the 2nd Sunday in March at 4 AM, and
+ # transition back on the first Sunday in November at 3AM
+ tzstr = "EST5EDT,M3.2.0/4:00,M11.1.0/3:00"
+
+ EST = ZoneOffset("EST", timedelta(hours=-5), ZERO)
+ EDT = ZoneOffset("EDT", timedelta(hours=-4), ONE_H)
+
+ cases[tzstr] = (
+ (datetime(2019, 3, 9), EST, NORMAL),
+ (datetime(2019, 3, 10, 3, 59), EST, NORMAL),
+ (datetime(2019, 3, 10, 4, 0, fold=0), EST, GAP),
+ (datetime(2019, 3, 10, 4, 0, fold=1), EDT, GAP),
+ (datetime(2019, 3, 10, 4, 1, fold=0), EST, GAP),
+ (datetime(2019, 3, 10, 4, 1, fold=1), EDT, GAP),
+ (datetime(2019, 11, 2), EDT, NORMAL),
+ (datetime(2019, 11, 3, 1, 59, fold=1), EDT, NORMAL),
+ (datetime(2019, 11, 3, 2, 0, fold=0), EDT, FOLD),
+ (datetime(2019, 11, 3, 2, 0, fold=1), EST, FOLD),
+ (datetime(2020, 3, 8, 3, 59), EST, NORMAL),
+ (datetime(2020, 3, 8, 4, 0, fold=0), EST, GAP),
+ (datetime(2020, 3, 8, 4, 0, fold=1), EDT, GAP),
+ (datetime(2020, 11, 1, 1, 59, fold=1), EDT, NORMAL),
+ (datetime(2020, 11, 1, 2, 0, fold=0), EDT, FOLD),
+ (datetime(2020, 11, 1, 2, 0, fold=1), EST, FOLD),
+ )
+
+ @call
+ def _add():
+ # Transition to BST happens on the last Sunday in March at 1 AM GMT
+ # and the transition back happens the last Sunday in October at 2AM BST
+ tzstr = "GMT0BST-1,M3.5.0/1:00,M10.5.0/2:00"
+
+ GMT = ZoneOffset("GMT", ZERO, ZERO)
+ BST = ZoneOffset("BST", ONE_H, ONE_H)
+
+ cases[tzstr] = (
+ (datetime(2019, 3, 30), GMT, NORMAL),
+ (datetime(2019, 3, 31, 0, 59), GMT, NORMAL),
+ (datetime(2019, 3, 31, 2, 0), BST, NORMAL),
+ (datetime(2019, 10, 26), BST, NORMAL),
+ (datetime(2019, 10, 27, 0, 59, fold=1), BST, NORMAL),
+ (datetime(2019, 10, 27, 1, 0, fold=0), BST, GAP),
+ (datetime(2019, 10, 27, 2, 0, fold=1), GMT, GAP),
+ (datetime(2020, 3, 29, 0, 59), GMT, NORMAL),
+ (datetime(2020, 3, 29, 2, 0), BST, NORMAL),
+ (datetime(2020, 10, 25, 0, 59, fold=1), BST, NORMAL),
+ (datetime(2020, 10, 25, 1, 0, fold=0), BST, FOLD),
+ (datetime(2020, 10, 25, 2, 0, fold=1), GMT, NORMAL),
+ )
+
+ @call
+ def _add():
+ # Austrialian time zone - DST start is chronologically first
+ tzstr = "AEST-10AEDT,M10.1.0/2,M4.1.0/3"
+
+ AEST = ZoneOffset("AEST", timedelta(hours=10), ZERO)
+ AEDT = ZoneOffset("AEDT", timedelta(hours=11), ONE_H)
+
+ cases[tzstr] = (
+ (datetime(2019, 4, 6), AEDT, NORMAL),
+ (datetime(2019, 4, 7, 1, 59), AEDT, NORMAL),
+ (datetime(2019, 4, 7, 1, 59, fold=1), AEDT, NORMAL),
+ (datetime(2019, 4, 7, 2, 0, fold=0), AEDT, FOLD),
+ (datetime(2019, 4, 7, 2, 1, fold=0), AEDT, FOLD),
+ (datetime(2019, 4, 7, 2, 0, fold=1), AEST, FOLD),
+ (datetime(2019, 4, 7, 2, 1, fold=1), AEST, FOLD),
+ (datetime(2019, 4, 7, 3, 0, fold=0), AEST, NORMAL),
+ (datetime(2019, 4, 7, 3, 0, fold=1), AEST, NORMAL),
+ (datetime(2019, 10, 5, 0), AEST, NORMAL),
+ (datetime(2019, 10, 6, 1, 59), AEST, NORMAL),
+ (datetime(2019, 10, 6, 2, 0, fold=0), AEST, GAP),
+ (datetime(2019, 10, 6, 2, 0, fold=1), AEDT, GAP),
+ (datetime(2019, 10, 6, 3, 0), AEDT, NORMAL),
+ )
+
+ @call
+ def _add():
+ # Irish time zone - negative DST
+ tzstr = "IST-1GMT0,M10.5.0,M3.5.0/1"
+
+ GMT = ZoneOffset("GMT", ZERO, -ONE_H)
+ IST = ZoneOffset("IST", ONE_H, ZERO)
+
+ cases[tzstr] = (
+ (datetime(2019, 3, 30), GMT, NORMAL),
+ (datetime(2019, 3, 31, 0, 59), GMT, NORMAL),
+ (datetime(2019, 3, 31, 2, 0), IST, NORMAL),
+ (datetime(2019, 10, 26), IST, NORMAL),
+ (datetime(2019, 10, 27, 0, 59, fold=1), IST, NORMAL),
+ (datetime(2019, 10, 27, 1, 0, fold=0), IST, FOLD),
+ (datetime(2019, 10, 27, 1, 0, fold=1), GMT, FOLD),
+ (datetime(2019, 10, 27, 2, 0, fold=1), GMT, NORMAL),
+ (datetime(2020, 3, 29, 0, 59), GMT, NORMAL),
+ (datetime(2020, 3, 29, 2, 0), IST, NORMAL),
+ (datetime(2020, 10, 25, 0, 59, fold=1), IST, NORMAL),
+ (datetime(2020, 10, 25, 1, 0, fold=0), IST, FOLD),
+ (datetime(2020, 10, 25, 2, 0, fold=1), GMT, NORMAL),
+ )
+
+ @call
+ def _add():
+ # Pacific/Kosrae: Fixed offset zone with a quoted numerical tzname
+ tzstr = "<+11>-11"
+
+ cases[tzstr] = (
+ (
+ datetime(2020, 1, 1),
+ ZoneOffset("+11", timedelta(hours=11)),
+ NORMAL,
+ ),
+ )
+
+ @call
+ def _add():
+ # Quoted STD and DST, transitions at 24:00
+ tzstr = "<-04>4<-03>,M9.1.6/24,M4.1.6/24"
+
+ M04 = ZoneOffset("-04", timedelta(hours=-4))
+ M03 = ZoneOffset("-03", timedelta(hours=-3), ONE_H)
+
+ cases[tzstr] = (
+ (datetime(2020, 5, 1), M04, NORMAL),
+ (datetime(2020, 11, 1), M03, NORMAL),
+ )
+
+ @call
+ def _add():
+ # Permanent daylight saving time is modeled with transitions at 0/0
+ # and J365/25, as mentioned in RFC 8536 Section 3.3.1
+ tzstr = "EST5EDT,0/0,J365/25"
+
+ EDT = ZoneOffset("EDT", timedelta(hours=-4), ONE_H)
+
+ cases[tzstr] = (
+ (datetime(2019, 1, 1), EDT, NORMAL),
+ (datetime(2019, 6, 1), EDT, NORMAL),
+ (datetime(2019, 12, 31, 23, 59, 59, 999999), EDT, NORMAL),
+ (datetime(2020, 1, 1), EDT, NORMAL),
+ (datetime(2020, 3, 1), EDT, NORMAL),
+ (datetime(2020, 6, 1), EDT, NORMAL),
+ (datetime(2020, 12, 31, 23, 59, 59, 999999), EDT, NORMAL),
+ (datetime(2400, 1, 1), EDT, NORMAL),
+ (datetime(2400, 3, 1), EDT, NORMAL),
+ (datetime(2400, 12, 31, 23, 59, 59, 999999), EDT, NORMAL),
+ )
+
+ @call
+ def _add():
+ # Transitions on March 1st and November 1st of each year
+ tzstr = "AAA3BBB,J60/12,J305/12"
+
+ AAA = ZoneOffset("AAA", timedelta(hours=-3))
+ BBB = ZoneOffset("BBB", timedelta(hours=-2), ONE_H)
+
+ cases[tzstr] = (
+ (datetime(2019, 1, 1), AAA, NORMAL),
+ (datetime(2019, 2, 28), AAA, NORMAL),
+ (datetime(2019, 3, 1, 11, 59), AAA, NORMAL),
+ (datetime(2019, 3, 1, 12, fold=0), AAA, GAP),
+ (datetime(2019, 3, 1, 12, fold=1), BBB, GAP),
+ (datetime(2019, 3, 1, 13), BBB, NORMAL),
+ (datetime(2019, 11, 1, 10, 59), BBB, NORMAL),
+ (datetime(2019, 11, 1, 11, fold=0), BBB, FOLD),
+ (datetime(2019, 11, 1, 11, fold=1), AAA, FOLD),
+ (datetime(2019, 11, 1, 12), AAA, NORMAL),
+ (datetime(2019, 12, 31, 23, 59, 59, 999999), AAA, NORMAL),
+ (datetime(2020, 1, 1), AAA, NORMAL),
+ (datetime(2020, 2, 29), AAA, NORMAL),
+ (datetime(2020, 3, 1, 11, 59), AAA, NORMAL),
+ (datetime(2020, 3, 1, 12, fold=0), AAA, GAP),
+ (datetime(2020, 3, 1, 12, fold=1), BBB, GAP),
+ (datetime(2020, 3, 1, 13), BBB, NORMAL),
+ (datetime(2020, 11, 1, 10, 59), BBB, NORMAL),
+ (datetime(2020, 11, 1, 11, fold=0), BBB, FOLD),
+ (datetime(2020, 11, 1, 11, fold=1), AAA, FOLD),
+ (datetime(2020, 11, 1, 12), AAA, NORMAL),
+ (datetime(2020, 12, 31, 23, 59, 59, 999999), AAA, NORMAL),
+ )
+
+ @call
+ def _add():
+ # Taken from America/Godthab, this rule has a transition on the
+ # Saturday before the last Sunday of March and October, at 22:00
+ # and 23:00, respectively. This is encoded with negative start
+ # and end transition times.
+ tzstr = "<-03>3<-02>,M3.5.0/-2,M10.5.0/-1"
+
+ N03 = ZoneOffset("-03", timedelta(hours=-3))
+ N02 = ZoneOffset("-02", timedelta(hours=-2), ONE_H)
+
+ cases[tzstr] = (
+ (datetime(2020, 3, 27), N03, NORMAL),
+ (datetime(2020, 3, 28, 21, 59, 59), N03, NORMAL),
+ (datetime(2020, 3, 28, 22, fold=0), N03, GAP),
+ (datetime(2020, 3, 28, 22, fold=1), N02, GAP),
+ (datetime(2020, 3, 28, 23), N02, NORMAL),
+ (datetime(2020, 10, 24, 21), N02, NORMAL),
+ (datetime(2020, 10, 24, 22, fold=0), N02, FOLD),
+ (datetime(2020, 10, 24, 22, fold=1), N03, FOLD),
+ (datetime(2020, 10, 24, 23), N03, NORMAL),
+ )
+
+ @call
+ def _add():
+ # Transition times with minutes and seconds
+ tzstr = "AAA3BBB,M3.2.0/01:30,M11.1.0/02:15:45"
+
+ AAA = ZoneOffset("AAA", timedelta(hours=-3))
+ BBB = ZoneOffset("BBB", timedelta(hours=-2), ONE_H)
+
+ cases[tzstr] = (
+ (datetime(2012, 3, 11, 1, 0), AAA, NORMAL),
+ (datetime(2012, 3, 11, 1, 30, fold=0), AAA, GAP),
+ (datetime(2012, 3, 11, 1, 30, fold=1), BBB, GAP),
+ (datetime(2012, 3, 11, 2, 30), BBB, NORMAL),
+ (datetime(2012, 11, 4, 1, 15, 44, 999999), BBB, NORMAL),
+ (datetime(2012, 11, 4, 1, 15, 45, fold=0), BBB, FOLD),
+ (datetime(2012, 11, 4, 1, 15, 45, fold=1), AAA, FOLD),
+ (datetime(2012, 11, 4, 2, 15, 45), AAA, NORMAL),
+ )
+
+ cls.test_cases = cases
+
+
+class CTZStrTest(TZStrTest):
+ module = c_zoneinfo
+
+
+class ZoneInfoCacheTest(TzPathUserMixin, ZoneInfoTestBase):
+ module = py_zoneinfo
+
+ def setUp(self):
+ self.klass.clear_cache()
+ super().setUp()
+
+ @property
+ def zoneinfo_data(self):
+ return ZONEINFO_DATA
+
+ @property
+ def tzpath(self):
+ return [self.zoneinfo_data.tzpath]
+
+ def test_ephemeral_zones(self):
+ self.assertIs(
+ self.klass("America/Los_Angeles"), self.klass("America/Los_Angeles")
+ )
+
+ def test_strong_refs(self):
+ tz0 = self.klass("Australia/Sydney")
+ tz1 = self.klass("Australia/Sydney")
+
+ self.assertIs(tz0, tz1)
+
+ def test_no_cache(self):
+
+ tz0 = self.klass("Europe/Lisbon")
+ tz1 = self.klass.no_cache("Europe/Lisbon")
+
+ self.assertIsNot(tz0, tz1)
+
+ def test_cache_reset_tzpath(self):
+ """Test that the cache persists when tzpath has been changed.
+
+ The PEP specifies that as long as a reference exists to one zone
+ with a given key, the primary constructor must continue to return
+ the same object.
+ """
+ zi0 = self.klass("America/Los_Angeles")
+ with self.tzpath_context([]):
+ zi1 = self.klass("America/Los_Angeles")
+
+ self.assertIs(zi0, zi1)
+
+ def test_clear_cache_explicit_none(self):
+ la0 = self.klass("America/Los_Angeles")
+ self.klass.clear_cache(only_keys=None)
+ la1 = self.klass("America/Los_Angeles")
+
+ self.assertIsNot(la0, la1)
+
+ def test_clear_cache_one_key(self):
+ """Tests that you can clear a single key from the cache."""
+ la0 = self.klass("America/Los_Angeles")
+ dub0 = self.klass("Europe/Dublin")
+
+ self.klass.clear_cache(only_keys=["America/Los_Angeles"])
+
+ la1 = self.klass("America/Los_Angeles")
+ dub1 = self.klass("Europe/Dublin")
+
+ self.assertIsNot(la0, la1)
+ self.assertIs(dub0, dub1)
+
+ def test_clear_cache_two_keys(self):
+ la0 = self.klass("America/Los_Angeles")
+ dub0 = self.klass("Europe/Dublin")
+ tok0 = self.klass("Asia/Tokyo")
+
+ self.klass.clear_cache(
+ only_keys=["America/Los_Angeles", "Europe/Dublin"]
+ )
+
+ la1 = self.klass("America/Los_Angeles")
+ dub1 = self.klass("Europe/Dublin")
+ tok1 = self.klass("Asia/Tokyo")
+
+ self.assertIsNot(la0, la1)
+ self.assertIsNot(dub0, dub1)
+ self.assertIs(tok0, tok1)
+
+
+class CZoneInfoCacheTest(ZoneInfoCacheTest):
+ module = c_zoneinfo
+
+
+class ZoneInfoPickleTest(TzPathUserMixin, ZoneInfoTestBase):
+ module = py_zoneinfo
+
+ def setUp(self):
+ self.klass.clear_cache()
+
+ with contextlib.ExitStack() as stack:
+ stack.enter_context(test_support.set_zoneinfo_module(self.module))
+ self.addCleanup(stack.pop_all().close)
+
+ super().setUp()
+
+ @property
+ def zoneinfo_data(self):
+ return ZONEINFO_DATA
+
+ @property
+ def tzpath(self):
+ return [self.zoneinfo_data.tzpath]
+
+ def test_cache_hit(self):
+ zi_in = self.klass("Europe/Dublin")
+ pkl = pickle.dumps(zi_in)
+ zi_rt = pickle.loads(pkl)
+
+ with self.subTest(test="Is non-pickled ZoneInfo"):
+ self.assertIs(zi_in, zi_rt)
+
+ zi_rt2 = pickle.loads(pkl)
+ with self.subTest(test="Is unpickled ZoneInfo"):
+ self.assertIs(zi_rt, zi_rt2)
+
+ def test_cache_miss(self):
+ zi_in = self.klass("Europe/Dublin")
+ pkl = pickle.dumps(zi_in)
+
+ del zi_in
+ self.klass.clear_cache() # Induce a cache miss
+ zi_rt = pickle.loads(pkl)
+ zi_rt2 = pickle.loads(pkl)
+
+ self.assertIs(zi_rt, zi_rt2)
+
+ def test_no_cache(self):
+ zi_no_cache = self.klass.no_cache("Europe/Dublin")
+
+ pkl = pickle.dumps(zi_no_cache)
+ zi_rt = pickle.loads(pkl)
+
+ with self.subTest(test="Not the pickled object"):
+ self.assertIsNot(zi_rt, zi_no_cache)
+
+ zi_rt2 = pickle.loads(pkl)
+ with self.subTest(test="Not a second unpickled object"):
+ self.assertIsNot(zi_rt, zi_rt2)
+
+ zi_cache = self.klass("Europe/Dublin")
+ with self.subTest(test="Not a cached object"):
+ self.assertIsNot(zi_rt, zi_cache)
+
+ def test_from_file(self):
+ key = "Europe/Dublin"
+ with open(self.zoneinfo_data.path_from_key(key), "rb") as f:
+ zi_nokey = self.klass.from_file(f)
+
+ f.seek(0)
+ zi_key = self.klass.from_file(f, key=key)
+
+ test_cases = [
+ (zi_key, "ZoneInfo with key"),
+ (zi_nokey, "ZoneInfo without key"),
+ ]
+
+ for zi, test_name in test_cases:
+ with self.subTest(test_name=test_name):
+ with self.assertRaises(pickle.PicklingError):
+ pickle.dumps(zi)
+
+ def test_pickle_after_from_file(self):
+ # This may be a bit of paranoia, but this test is to ensure that no
+ # global state is maintained in order to handle the pickle cache and
+ # from_file behavior, and that it is possible to interweave the
+ # constructors of each of these and pickling/unpickling without issues.
+ key = "Europe/Dublin"
+ zi = self.klass(key)
+
+ pkl_0 = pickle.dumps(zi)
+ zi_rt_0 = pickle.loads(pkl_0)
+ self.assertIs(zi, zi_rt_0)
+
+ with open(self.zoneinfo_data.path_from_key(key), "rb") as f:
+ zi_ff = self.klass.from_file(f, key=key)
+
+ pkl_1 = pickle.dumps(zi)
+ zi_rt_1 = pickle.loads(pkl_1)
+ self.assertIs(zi, zi_rt_1)
+
+ with self.assertRaises(pickle.PicklingError):
+ pickle.dumps(zi_ff)
+
+ pkl_2 = pickle.dumps(zi)
+ zi_rt_2 = pickle.loads(pkl_2)
+ self.assertIs(zi, zi_rt_2)
+
+
+class CZoneInfoPickleTest(ZoneInfoPickleTest):
+ module = c_zoneinfo
+
+
+class CallingConventionTest(ZoneInfoTestBase):
+ """Tests for functions with restricted calling conventions."""
+
+ module = py_zoneinfo
+
+ @property
+ def zoneinfo_data(self):
+ return ZONEINFO_DATA
+
+ def test_from_file(self):
+ with open(self.zoneinfo_data.path_from_key("UTC"), "rb") as f:
+ with self.assertRaises(TypeError):
+ self.klass.from_file(fobj=f)
+
+ def test_clear_cache(self):
+ with self.assertRaises(TypeError):
+ self.klass.clear_cache(["UTC"])
+
+
+class CCallingConventionTest(CallingConventionTest):
+ module = c_zoneinfo
+
+
+class TzPathTest(TzPathUserMixin, ZoneInfoTestBase):
+ module = py_zoneinfo
+
+ @staticmethod
+ @contextlib.contextmanager
+ def python_tzpath_context(value):
+ path_var = "PYTHONTZPATH"
+ try:
+ with OS_ENV_LOCK:
+ old_env = os.environ.get(path_var, None)
+ os.environ[path_var] = value
+ yield
+ finally:
+ if old_env is None:
+ del os.environ[path_var]
+ else:
+ os.environ[path_var] = old_env # pragma: nocover
+
+ def test_env_variable(self):
+ """Tests that the environment variable works with reset_tzpath."""
+ new_paths = [
+ ("", []),
+ ("/etc/zoneinfo", ["/etc/zoneinfo"]),
+ (f"/a/b/c{os.pathsep}/d/e/f", ["/a/b/c", "/d/e/f"]),
+ ]
+
+ for new_path_var, expected_result in new_paths:
+ with self.python_tzpath_context(new_path_var):
+ with self.subTest(tzpath=new_path_var):
+ self.module.reset_tzpath()
+ tzpath = self.module.TZPATH
+ self.assertSequenceEqual(tzpath, expected_result)
+
+ def test_env_variable_relative_paths(self):
+ test_cases = [
+ [("path/to/somewhere",), ()],
+ [
+ ("/usr/share/zoneinfo", "path/to/somewhere",),
+ ("/usr/share/zoneinfo",),
+ ],
+ [("../relative/path",), ()],
+ [
+ ("/usr/share/zoneinfo", "../relative/path",),
+ ("/usr/share/zoneinfo",),
+ ],
+ [("path/to/somewhere", "../relative/path",), ()],
+ [
+ (
+ "/usr/share/zoneinfo",
+ "path/to/somewhere",
+ "../relative/path",
+ ),
+ ("/usr/share/zoneinfo",),
+ ],
+ ]
+
+ for input_paths, expected_paths in test_cases:
+ path_var = os.pathsep.join(input_paths)
+ with self.python_tzpath_context(path_var):
+ with self.subTest("warning", path_var=path_var):
+ # Note: Per PEP 615 the warning is implementation-defined
+ # behavior, other implementations need not warn.
+ with self.assertWarns(self.module.InvalidTZPathWarning):
+ self.module.reset_tzpath()
+
+ tzpath = self.module.TZPATH
+ with self.subTest("filtered", path_var=path_var):
+ self.assertSequenceEqual(tzpath, expected_paths)
+
+ def test_reset_tzpath_kwarg(self):
+ self.module.reset_tzpath(to=["/a/b/c"])
+
+ self.assertSequenceEqual(self.module.TZPATH, ("/a/b/c",))
+
+ def test_reset_tzpath_relative_paths(self):
+ bad_values = [
+ ("path/to/somewhere",),
+ ("/usr/share/zoneinfo", "path/to/somewhere",),
+ ("../relative/path",),
+ ("/usr/share/zoneinfo", "../relative/path",),
+ ("path/to/somewhere", "../relative/path",),
+ ("/usr/share/zoneinfo", "path/to/somewhere", "../relative/path",),
+ ]
+ for input_paths in bad_values:
+ with self.subTest(input_paths=input_paths):
+ with self.assertRaises(ValueError):
+ self.module.reset_tzpath(to=input_paths)
+
+ def test_tzpath_type_error(self):
+ bad_values = [
+ "/etc/zoneinfo:/usr/share/zoneinfo",
+ b"/etc/zoneinfo:/usr/share/zoneinfo",
+ 0,
+ ]
+
+ for bad_value in bad_values:
+ with self.subTest(value=bad_value):
+ with self.assertRaises(TypeError):
+ self.module.reset_tzpath(bad_value)
+
+ def test_tzpath_attribute(self):
+ tzpath_0 = ["/one", "/two"]
+ tzpath_1 = ["/three"]
+
+ with self.tzpath_context(tzpath_0):
+ query_0 = self.module.TZPATH
+
+ with self.tzpath_context(tzpath_1):
+ query_1 = self.module.TZPATH
+
+ self.assertSequenceEqual(tzpath_0, query_0)
+ self.assertSequenceEqual(tzpath_1, query_1)
+
+
+class CTzPathTest(TzPathTest):
+ module = c_zoneinfo
+
+
+class TestModule(ZoneInfoTestBase):
+ module = py_zoneinfo
+
+ @property
+ def zoneinfo_data(self):
+ return ZONEINFO_DATA
+
+ @cached_property
+ def _UTC_bytes(self):
+ zone_file = self.zoneinfo_data.path_from_key("UTC")
+ with open(zone_file, "rb") as f:
+ return f.read()
+
+ def touch_zone(self, key, tz_root):
+ """Creates a valid TZif file at key under the zoneinfo root tz_root.
+
+ tz_root must exist, but all folders below that will be created.
+ """
+ if not os.path.exists(tz_root):
+ raise FileNotFoundError(f"{tz_root} does not exist.")
+
+ root_dir, *tail = key.rsplit("/", 1)
+ if tail: # If there's no tail, then the first component isn't a dir
+ os.makedirs(os.path.join(tz_root, root_dir), exist_ok=True)
+
+ zonefile_path = os.path.join(tz_root, key)
+ with open(zonefile_path, "wb") as f:
+ f.write(self._UTC_bytes)
+
+ def test_getattr_error(self):
+ with self.assertRaises(AttributeError):
+ self.module.NOATTRIBUTE
+
+ def test_dir_contains_all(self):
+ """dir(self.module) should at least contain everything in __all__."""
+ module_all_set = set(self.module.__all__)
+ module_dir_set = set(dir(self.module))
+
+ difference = module_all_set - module_dir_set
+
+ self.assertFalse(difference)
+
+ def test_dir_unique(self):
+ """Test that there are no duplicates in dir(self.module)"""
+ module_dir = dir(self.module)
+ module_unique = set(module_dir)
+
+ self.assertCountEqual(module_dir, module_unique)
+
+ def test_available_timezones(self):
+ with self.tzpath_context([self.zoneinfo_data.tzpath]):
+ self.assertTrue(self.zoneinfo_data.keys) # Sanity check
+
+ available_keys = self.module.available_timezones()
+ zoneinfo_keys = set(self.zoneinfo_data.keys)
+
+ # If tzdata is not present, zoneinfo_keys == available_keys,
+ # otherwise it should be a subset.
+ union = zoneinfo_keys & available_keys
+ self.assertEqual(zoneinfo_keys, union)
+
+ def test_available_timezones_weirdzone(self):
+ with tempfile.TemporaryDirectory() as td:
+ # Make a fictional zone at "Mars/Olympus_Mons"
+ self.touch_zone("Mars/Olympus_Mons", td)
+
+ with self.tzpath_context([td]):
+ available_keys = self.module.available_timezones()
+ self.assertIn("Mars/Olympus_Mons", available_keys)
+
+ def test_folder_exclusions(self):
+ expected = {
+ "America/Los_Angeles",
+ "America/Santiago",
+ "America/Indiana/Indianapolis",
+ "UTC",
+ "Europe/Paris",
+ "Europe/London",
+ "Asia/Tokyo",
+ "Australia/Sydney",
+ }
+
+ base_tree = list(expected)
+ posix_tree = [f"posix/{x}" for x in base_tree]
+ right_tree = [f"right/{x}" for x in base_tree]
+
+ cases = [
+ ("base_tree", base_tree),
+ ("base_and_posix", base_tree + posix_tree),
+ ("base_and_right", base_tree + right_tree),
+ ("all_trees", base_tree + right_tree + posix_tree),
+ ]
+
+ with tempfile.TemporaryDirectory() as td:
+ for case_name, tree in cases:
+ tz_root = os.path.join(td, case_name)
+ os.mkdir(tz_root)
+
+ for key in tree:
+ self.touch_zone(key, tz_root)
+
+ with self.tzpath_context([tz_root]):
+ with self.subTest(case_name):
+ actual = self.module.available_timezones()
+ self.assertEqual(actual, expected)
+
+ def test_exclude_posixrules(self):
+ expected = {
+ "America/New_York",
+ "Europe/London",
+ }
+
+ tree = list(expected) + ["posixrules"]
+
+ with tempfile.TemporaryDirectory() as td:
+ for key in tree:
+ self.touch_zone(key, td)
+
+ with self.tzpath_context([td]):
+ actual = self.module.available_timezones()
+ self.assertEqual(actual, expected)
+
+
+class CTestModule(TestModule):
+ module = c_zoneinfo
+
+
+class ExtensionBuiltTest(unittest.TestCase):
+ """Smoke test to ensure that the C and Python extensions are both tested.
+
+ Because the intention is for the Python and C versions of ZoneInfo to
+ behave identically, these tests necessarily rely on implementation details,
+ so the tests may need to be adjusted if the implementations change. Do not
+ rely on these tests as an indication of stable properties of these classes.
+ """
+
+ def test_cache_location(self):
+ # The pure Python version stores caches on attributes, but the C
+ # extension stores them in C globals (at least for now)
+ self.assertFalse(hasattr(c_zoneinfo.ZoneInfo, "_weak_cache"))
+ self.assertTrue(hasattr(py_zoneinfo.ZoneInfo, "_weak_cache"))
+
+ def test_gc_tracked(self):
+ # The pure Python version is tracked by the GC but (for now) the C
+ # version is not.
+ import gc
+
+ self.assertTrue(gc.is_tracked(py_zoneinfo.ZoneInfo))
+ self.assertFalse(gc.is_tracked(c_zoneinfo.ZoneInfo))
+
+
+@dataclasses.dataclass(frozen=True)
+class ZoneOffset:
+ tzname: str
+ utcoffset: timedelta
+ dst: timedelta = ZERO
+
+
+@dataclasses.dataclass(frozen=True)
+class ZoneTransition:
+ transition: datetime
+ offset_before: ZoneOffset
+ offset_after: ZoneOffset
+
+ @property
+ def transition_utc(self):
+ return (self.transition - self.offset_before.utcoffset).replace(
+ tzinfo=timezone.utc
+ )
+
+ @property
+ def fold(self):
+ """Whether this introduces a fold"""
+ return self.offset_before.utcoffset > self.offset_after.utcoffset
+
+ @property
+ def gap(self):
+ """Whether this introduces a gap"""
+ return self.offset_before.utcoffset < self.offset_after.utcoffset
+
+ @property
+ def delta(self):
+ return self.offset_after.utcoffset - self.offset_before.utcoffset
+
+ @property
+ def anomaly_start(self):
+ if self.fold:
+ return self.transition + self.delta
+ else:
+ return self.transition
+
+ @property
+ def anomaly_end(self):
+ if not self.fold:
+ return self.transition + self.delta
+ else:
+ return self.transition
+
+
+class ZoneInfoData:
+ def __init__(self, source_json, tzpath, v1=False):
+ self.tzpath = pathlib.Path(tzpath)
+ self.keys = []
+ self.v1 = v1
+ self._populate_tzpath(source_json)
+
+ def path_from_key(self, key):
+ return self.tzpath / key
+
+ def _populate_tzpath(self, source_json):
+ with open(source_json, "rb") as f:
+ zoneinfo_dict = json.load(f)
+
+ zoneinfo_data = zoneinfo_dict["data"]
+
+ for key, value in zoneinfo_data.items():
+ self.keys.append(key)
+ raw_data = self._decode_text(value)
+
+ if self.v1:
+ data = self._convert_to_v1(raw_data)
+ else:
+ data = raw_data
+
+ destination = self.path_from_key(key)
+ destination.parent.mkdir(exist_ok=True, parents=True)
+ with open(destination, "wb") as f:
+ f.write(data)
+
+ def _decode_text(self, contents):
+ raw_data = b"".join(map(str.encode, contents))
+ decoded = base64.b85decode(raw_data)
+
+ return lzma.decompress(decoded)
+
+ def _convert_to_v1(self, contents):
+ assert contents[0:4] == b"TZif", "Invalid TZif data found!"
+ version = int(contents[4:5])
+
+ header_start = 4 + 16
+ header_end = header_start + 24 # 6l == 24 bytes
+ assert version >= 2, "Version 1 file found: no conversion necessary"
+ isutcnt, isstdcnt, leapcnt, timecnt, typecnt, charcnt = struct.unpack(
+ ">6l", contents[header_start:header_end]
+ )
+
+ file_size = (
+ timecnt * 5
+ + typecnt * 6
+ + charcnt
+ + leapcnt * 8
+ + isstdcnt
+ + isutcnt
+ )
+ file_size += header_end
+ out = b"TZif" + b"\x00" + contents[5:file_size]
+
+ assert (
+ contents[file_size : (file_size + 4)] == b"TZif"
+ ), "Version 2 file not truncated at Version 2 header"
+
+ return out
+
+
+class ZoneDumpData:
+ @classmethod
+ def transition_keys(cls):
+ return cls._get_zonedump().keys()
+
+ @classmethod
+ def load_transition_examples(cls, key):
+ return cls._get_zonedump()[key]
+
+ @classmethod
+ def fixed_offset_zones(cls):
+ if not cls._FIXED_OFFSET_ZONES:
+ cls._populate_fixed_offsets()
+
+ return cls._FIXED_OFFSET_ZONES.items()
+
+ @classmethod
+ def _get_zonedump(cls):
+ if not cls._ZONEDUMP_DATA:
+ cls._populate_zonedump_data()
+ return cls._ZONEDUMP_DATA
+
+ @classmethod
+ def _populate_fixed_offsets(cls):
+ cls._FIXED_OFFSET_ZONES = {
+ "UTC": ZoneOffset("UTC", ZERO, ZERO),
+ }
+
+ @classmethod
+ def _populate_zonedump_data(cls):
+ def _Africa_Abidjan():
+ LMT = ZoneOffset("LMT", timedelta(seconds=-968))
+ GMT = ZoneOffset("GMT", ZERO)
+
+ return [
+ ZoneTransition(datetime(1912, 1, 1), LMT, GMT),
+ ]
+
+ def _Africa_Casablanca():
+ P00_s = ZoneOffset("+00", ZERO, ZERO)
+ P01_d = ZoneOffset("+01", ONE_H, ONE_H)
+ P00_d = ZoneOffset("+00", ZERO, -ONE_H)
+ P01_s = ZoneOffset("+01", ONE_H, ZERO)
+
+ return [
+ # Morocco sometimes pauses DST during Ramadan
+ ZoneTransition(datetime(2018, 3, 25, 2), P00_s, P01_d),
+ ZoneTransition(datetime(2018, 5, 13, 3), P01_d, P00_s),
+ ZoneTransition(datetime(2018, 6, 17, 2), P00_s, P01_d),
+ # On October 28th Morocco set standard time to +01,
+ # with negative DST only during Ramadan
+ ZoneTransition(datetime(2018, 10, 28, 3), P01_d, P01_s),
+ ZoneTransition(datetime(2019, 5, 5, 3), P01_s, P00_d),
+ ZoneTransition(datetime(2019, 6, 9, 2), P00_d, P01_s),
+ ]
+
+ def _America_Los_Angeles():
+ LMT = ZoneOffset("LMT", timedelta(seconds=-28378), ZERO)
+ PST = ZoneOffset("PST", timedelta(hours=-8), ZERO)
+ PDT = ZoneOffset("PDT", timedelta(hours=-7), ONE_H)
+ PWT = ZoneOffset("PWT", timedelta(hours=-7), ONE_H)
+ PPT = ZoneOffset("PPT", timedelta(hours=-7), ONE_H)
+
+ return [
+ ZoneTransition(datetime(1883, 11, 18, 12, 7, 2), LMT, PST),
+ ZoneTransition(datetime(1918, 3, 31, 2), PST, PDT),
+ ZoneTransition(datetime(1918, 3, 31, 2), PST, PDT),
+ ZoneTransition(datetime(1918, 10, 27, 2), PDT, PST),
+ # Transition to Pacific War Time
+ ZoneTransition(datetime(1942, 2, 9, 2), PST, PWT),
+ # Transition from Pacific War Time to Pacific Peace Time
+ ZoneTransition(datetime(1945, 8, 14, 16), PWT, PPT),
+ ZoneTransition(datetime(1945, 9, 30, 2), PPT, PST),
+ ZoneTransition(datetime(2015, 3, 8, 2), PST, PDT),
+ ZoneTransition(datetime(2015, 11, 1, 2), PDT, PST),
+ # After 2038: Rules continue indefinitely
+ ZoneTransition(datetime(2450, 3, 13, 2), PST, PDT),
+ ZoneTransition(datetime(2450, 11, 6, 2), PDT, PST),
+ ]
+
+ def _America_Santiago():
+ LMT = ZoneOffset("LMT", timedelta(seconds=-16966), ZERO)
+ SMT = ZoneOffset("SMT", timedelta(seconds=-16966), ZERO)
+ N05 = ZoneOffset("-05", timedelta(seconds=-18000), ZERO)
+ N04 = ZoneOffset("-04", timedelta(seconds=-14400), ZERO)
+ N03 = ZoneOffset("-03", timedelta(seconds=-10800), ONE_H)
+
+ return [
+ ZoneTransition(datetime(1890, 1, 1), LMT, SMT),
+ ZoneTransition(datetime(1910, 1, 10), SMT, N05),
+ ZoneTransition(datetime(1916, 7, 1), N05, SMT),
+ ZoneTransition(datetime(2008, 3, 30), N03, N04),
+ ZoneTransition(datetime(2008, 10, 12), N04, N03),
+ ZoneTransition(datetime(2040, 4, 8), N03, N04),
+ ZoneTransition(datetime(2040, 9, 2), N04, N03),
+ ]
+
+ def _Asia_Tokyo():
+ JST = ZoneOffset("JST", timedelta(seconds=32400), ZERO)
+ JDT = ZoneOffset("JDT", timedelta(seconds=36000), ONE_H)
+
+ # Japan had DST from 1948 to 1951, and it was unusual in that
+ # the transition from DST to STD occurred at 25:00, and is
+ # denominated as such in the time zone database
+ return [
+ ZoneTransition(datetime(1948, 5, 2), JST, JDT),
+ ZoneTransition(datetime(1948, 9, 12, 1), JDT, JST),
+ ZoneTransition(datetime(1951, 9, 9, 1), JDT, JST),
+ ]
+
+ def _Australia_Sydney():
+ LMT = ZoneOffset("LMT", timedelta(seconds=36292), ZERO)
+ AEST = ZoneOffset("AEST", timedelta(seconds=36000), ZERO)
+ AEDT = ZoneOffset("AEDT", timedelta(seconds=39600), ONE_H)
+
+ return [
+ ZoneTransition(datetime(1895, 2, 1), LMT, AEST),
+ ZoneTransition(datetime(1917, 1, 1, 0, 1), AEST, AEDT),
+ ZoneTransition(datetime(1917, 3, 25, 2), AEDT, AEST),
+ ZoneTransition(datetime(2012, 4, 1, 3), AEDT, AEST),
+ ZoneTransition(datetime(2012, 10, 7, 2), AEST, AEDT),
+ ZoneTransition(datetime(2040, 4, 1, 3), AEDT, AEST),
+ ZoneTransition(datetime(2040, 10, 7, 2), AEST, AEDT),
+ ]
+
+ def _Europe_Dublin():
+ LMT = ZoneOffset("LMT", timedelta(seconds=-1500), ZERO)
+ DMT = ZoneOffset("DMT", timedelta(seconds=-1521), ZERO)
+ IST_0 = ZoneOffset("IST", timedelta(seconds=2079), ONE_H)
+ GMT_0 = ZoneOffset("GMT", ZERO, ZERO)
+ BST = ZoneOffset("BST", ONE_H, ONE_H)
+ GMT_1 = ZoneOffset("GMT", ZERO, -ONE_H)
+ IST_1 = ZoneOffset("IST", ONE_H, ZERO)
+
+ return [
+ ZoneTransition(datetime(1880, 8, 2, 0), LMT, DMT),
+ ZoneTransition(datetime(1916, 5, 21, 2), DMT, IST_0),
+ ZoneTransition(datetime(1916, 10, 1, 3), IST_0, GMT_0),
+ ZoneTransition(datetime(1917, 4, 8, 2), GMT_0, BST),
+ ZoneTransition(datetime(2016, 3, 27, 1), GMT_1, IST_1),
+ ZoneTransition(datetime(2016, 10, 30, 2), IST_1, GMT_1),
+ ZoneTransition(datetime(2487, 3, 30, 1), GMT_1, IST_1),
+ ZoneTransition(datetime(2487, 10, 26, 2), IST_1, GMT_1),
+ ]
+
+ def _Europe_Lisbon():
+ WET = ZoneOffset("WET", ZERO, ZERO)
+ WEST = ZoneOffset("WEST", ONE_H, ONE_H)
+ CET = ZoneOffset("CET", ONE_H, ZERO)
+ CEST = ZoneOffset("CEST", timedelta(seconds=7200), ONE_H)
+
+ return [
+ ZoneTransition(datetime(1992, 3, 29, 1), WET, WEST),
+ ZoneTransition(datetime(1992, 9, 27, 2), WEST, CET),
+ ZoneTransition(datetime(1993, 3, 28, 2), CET, CEST),
+ ZoneTransition(datetime(1993, 9, 26, 3), CEST, CET),
+ ZoneTransition(datetime(1996, 3, 31, 2), CET, WEST),
+ ZoneTransition(datetime(1996, 10, 27, 2), WEST, WET),
+ ]
+
+ def _Europe_London():
+ LMT = ZoneOffset("LMT", timedelta(seconds=-75), ZERO)
+ GMT = ZoneOffset("GMT", ZERO, ZERO)
+ BST = ZoneOffset("BST", ONE_H, ONE_H)
+
+ return [
+ ZoneTransition(datetime(1847, 12, 1), LMT, GMT),
+ ZoneTransition(datetime(2005, 3, 27, 1), GMT, BST),
+ ZoneTransition(datetime(2005, 10, 30, 2), BST, GMT),
+ ZoneTransition(datetime(2043, 3, 29, 1), GMT, BST),
+ ZoneTransition(datetime(2043, 10, 25, 2), BST, GMT),
+ ]
+
+ def _Pacific_Kiritimati():
+ LMT = ZoneOffset("LMT", timedelta(seconds=-37760), ZERO)
+ N1040 = ZoneOffset("-1040", timedelta(seconds=-38400), ZERO)
+ N10 = ZoneOffset("-10", timedelta(seconds=-36000), ZERO)
+ P14 = ZoneOffset("+14", timedelta(seconds=50400), ZERO)
+
+ # This is literally every transition in Christmas Island history
+ return [
+ ZoneTransition(datetime(1901, 1, 1), LMT, N1040),
+ ZoneTransition(datetime(1979, 10, 1), N1040, N10),
+ # They skipped December 31, 1994
+ ZoneTransition(datetime(1994, 12, 31), N10, P14),
+ ]
+
+ cls._ZONEDUMP_DATA = {
+ "Africa/Abidjan": _Africa_Abidjan(),
+ "Africa/Casablanca": _Africa_Casablanca(),
+ "America/Los_Angeles": _America_Los_Angeles(),
+ "America/Santiago": _America_Santiago(),
+ "Australia/Sydney": _Australia_Sydney(),
+ "Asia/Tokyo": _Asia_Tokyo(),
+ "Europe/Dublin": _Europe_Dublin(),
+ "Europe/Lisbon": _Europe_Lisbon(),
+ "Europe/London": _Europe_London(),
+ "Pacific/Kiritimati": _Pacific_Kiritimati(),
+ }
+
+ _ZONEDUMP_DATA = None
+ _FIXED_OFFSET_ZONES = None
diff --git a/Lib/test/threaded_import_hangers.py b/Lib/test/threaded_import_hangers.py
deleted file mode 100644
index 5484e60a..00000000
--- a/Lib/test/threaded_import_hangers.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# This is a helper module for test_threaded_import. The test imports this
-# module, and this module tries to run various Python library functions in
-# their own thread, as a side effect of being imported. If the spawned
-# thread doesn't complete in TIMEOUT seconds, an "appeared to hang" message
-# is appended to the module-global `errors` list. That list remains empty
-# if (and only if) all functions tested complete.
-
-TIMEOUT = 10
-
-import threading
-
-import tempfile
-import os.path
-
-errors = []
-
-# This class merely runs a function in its own thread T. The thread importing
-# this module holds the import lock, so if the function called by T tries
-# to do its own imports it will block waiting for this module's import
-# to complete.
-class Worker(threading.Thread):
- def __init__(self, function, args):
- threading.Thread.__init__(self)
- self.function = function
- self.args = args
-
- def run(self):
- self.function(*self.args)
-
-for name, func, args in [
- # Bug 147376: TemporaryFile hung on Windows, starting in Python 2.4.
- ("tempfile.TemporaryFile", lambda: tempfile.TemporaryFile().close(), ()),
-
- # The real cause for bug 147376: ntpath.abspath() caused the hang.
- ("os.path.abspath", os.path.abspath, ('.',)),
- ]:
-
- try:
- t = Worker(func, args)
- t.start()
- t.join(TIMEOUT)
- if t.is_alive():
- errors.append("%s appeared to hang" % name)
- finally:
- del t
diff --git a/Lib/threading.py b/Lib/threading.py
index 813dae2a..ab29db77 100644
--- a/Lib/threading.py
+++ b/Lib/threading.py
@@ -3,6 +3,7 @@
import os as _os
import sys as _sys
import _thread
+import functools
from time import monotonic as _time
from _weakrefset import WeakSet
@@ -121,6 +122,11 @@ class _RLock:
hex(id(self))
)
+ def _at_fork_reinit(self):
+ self._block._at_fork_reinit()
+ self._owner = None
+ self._count = 0
+
def acquire(self, blocking=True, timeout=-1):
"""Acquire a lock, blocking or non-blocking.
@@ -243,6 +249,10 @@ class Condition:
pass
self._waiters = _deque()
+ def _at_fork_reinit(self):
+ self._lock._at_fork_reinit()
+ self._waiters.clear()
+
def __enter__(self):
return self._lock.__enter__()
@@ -261,7 +271,7 @@ class Condition:
def _is_owned(self):
# Return True if lock is owned by current_thread.
# This method is called only if _lock doesn't have _is_owned().
- if self._lock.acquire(0):
+ if self._lock.acquire(False):
self._lock.release()
return False
else:
@@ -438,16 +448,19 @@ class Semaphore:
__enter__ = acquire
- def release(self):
- """Release a semaphore, incrementing the internal counter by one.
+ def release(self, n=1):
+ """Release a semaphore, incrementing the internal counter by one or more.
When the counter is zero on entry and another thread is waiting for it
to become larger than zero again, wake up that thread.
"""
+ if n < 1:
+ raise ValueError('n must be one or more')
with self._cond:
- self._value += 1
- self._cond.notify()
+ self._value += n
+ for i in range(n):
+ self._cond.notify()
def __exit__(self, t, v, tb):
self.release()
@@ -474,8 +487,8 @@ class BoundedSemaphore(Semaphore):
Semaphore.__init__(self, value)
self._initial_value = value
- def release(self):
- """Release a semaphore, incrementing the internal counter by one.
+ def release(self, n=1):
+ """Release a semaphore, incrementing the internal counter by one or more.
When the counter is zero on entry and another thread is waiting for it
to become larger than zero again, wake up that thread.
@@ -484,11 +497,14 @@ class BoundedSemaphore(Semaphore):
raise a ValueError.
"""
+ if n < 1:
+ raise ValueError('n must be one or more')
with self._cond:
- if self._value >= self._initial_value:
+ if self._value + n > self._initial_value:
raise ValueError("Semaphore released too many times")
- self._value += 1
- self._cond.notify()
+ self._value += n
+ for i in range(n):
+ self._cond.notify()
class Event:
@@ -506,9 +522,9 @@ class Event:
self._cond = Condition(Lock())
self._flag = False
- def _reset_internal_locks(self):
- # private! called by Thread._reset_internal_locks by _after_fork()
- self._cond.__init__(Lock())
+ def _at_fork_reinit(self):
+ # Private method called by Thread._reset_internal_locks()
+ self._cond._at_fork_reinit()
def is_set(self):
"""Return true if and only if the internal flag is true."""
@@ -808,9 +824,10 @@ class Thread:
def _reset_internal_locks(self, is_alive):
# private! Called by _after_fork() to reset our internal locks as
# they may be in an invalid state leading to a deadlock or crash.
- self._started._reset_internal_locks()
+ self._started._at_fork_reinit()
if is_alive:
- self._set_tstate_lock()
+ self._tstate_lock._at_fork_reinit()
+ self._tstate_lock.acquire()
else:
# The thread isn't alive after fork: it doesn't have a tstate
# anymore.
@@ -846,6 +863,7 @@ class Thread:
if self._started.is_set():
raise RuntimeError("threads can only be started once")
+
with _active_limbo_lock:
_limbo[self] = self
try:
@@ -1082,16 +1100,6 @@ class Thread:
self._wait_for_tstate_lock(False)
return not self._is_stopped
- def isAlive(self):
- """Return whether the thread is alive.
-
- This method is deprecated, use is_alive() instead.
- """
- import warnings
- warnings.warn('isAlive() is deprecated, use is_alive() instead',
- DeprecationWarning, stacklevel=2)
- return self.is_alive()
-
@property
def daemon(self):
"""A boolean value indicating whether this thread is a daemon thread.
@@ -1344,6 +1352,27 @@ def enumerate():
with _active_limbo_lock:
return list(_active.values()) + list(_limbo.values())
+
+_threading_atexits = []
+_SHUTTING_DOWN = False
+
+def _register_atexit(func, *arg, **kwargs):
+ """CPython internal: register *func* to be called before joining threads.
+
+ The registered *func* is called with its arguments just before all
+ non-daemon threads are joined in `_shutdown()`. It provides a similar
+ purpose to `atexit.register()`, but its functions are called prior to
+ threading shutdown instead of interpreter shutdown.
+
+ For similarity to atexit, the registered functions are called in reverse.
+ """
+ if _SHUTTING_DOWN:
+ raise RuntimeError("can't register atexit after shutdown")
+
+ call = functools.partial(func, *arg, **kwargs)
+ _threading_atexits.append(call)
+
+
from _thread import stack_size
# Create the main thread object,
@@ -1365,6 +1394,8 @@ def _shutdown():
# _shutdown() was already called
return
+ global _SHUTTING_DOWN
+ _SHUTTING_DOWN = True
# Main thread
tlock = _main_thread._tstate_lock
# The main thread isn't finished yet, so its thread state lock can't have
@@ -1374,6 +1405,11 @@ def _shutdown():
tlock.release()
_main_thread._stop()
+ # Call registered threading atexit functions before threads are joined.
+ # Order is reversed, similar to atexit.
+ for atexit_call in reversed(_threading_atexits):
+ atexit_call()
+
# Join all non-deamon threads
while True:
with _shutdown_locks_lock:
diff --git a/Lib/tkinter/__init__.py b/Lib/tkinter/__init__.py
index 479eb016..1067ab6a 100644
--- a/Lib/tkinter/__init__.py
+++ b/Lib/tkinter/__init__.py
@@ -32,13 +32,13 @@ tk.mainloop()
import enum
import sys
+import types
import _tkinter # If this fails your Python may not be configured for Tk
TclError = _tkinter.TclError
from tkinter.constants import *
import re
-
wantobjects = 1
TkVersion = float(_tkinter.TK_VERSION)
@@ -146,10 +146,10 @@ def _splitdict(tk, v, cut_minus=True, conv=None):
class EventType(str, enum.Enum):
KeyPress = '2'
- Key = KeyPress
+ Key = KeyPress,
KeyRelease = '3'
ButtonPress = '4'
- Button = ButtonPress
+ Button = ButtonPress,
ButtonRelease = '5'
Motion = '6'
Enter = '7'
@@ -180,10 +180,10 @@ class EventType(str, enum.Enum):
Colormap = '32'
ClientMessage = '33' # undocumented
Mapping = '34' # undocumented
- VirtualEvent = '35' # undocumented
- Activate = '36'
- Deactivate = '37'
- MouseWheel = '38'
+ VirtualEvent = '35', # undocumented
+ Activate = '36',
+ Deactivate = '37',
+ MouseWheel = '38',
def __str__(self):
return self.name
@@ -484,6 +484,8 @@ class Variable:
Note: if the Variable's master matters to behavior
also compare self._master == other._master
"""
+ if not isinstance(other, Variable):
+ return NotImplemented
return self.__class__.__name__ == other.__class__.__name__ \
and self._name == other._name
@@ -2239,7 +2241,7 @@ class Tk(Misc, Wm):
_w = '.'
def __init__(self, screenName=None, baseName=None, className='Tk',
- useTk=1, sync=0, use=None):
+ useTk=True, sync=False, use=None):
"""Return a new Toplevel widget on screen SCREENNAME. A new Tcl interpreter will
be created. BASENAME will be used for the identification of the profile file (see
readprofile).
@@ -2257,7 +2259,7 @@ class Tk(Misc, Wm):
baseName, ext = os.path.splitext(baseName)
if ext not in ('.py', '.pyc'):
baseName = baseName + ext
- interactive = 0
+ interactive = False
self.tk = _tkinter.create(screenName, baseName, className, interactive, wantobjects, useTk, sync, use)
if useTk:
self._loadtk()
@@ -2359,7 +2361,7 @@ class Tk(Misc, Wm):
# copied into the Pack, Place or Grid class.
-def Tcl(screenName=None, baseName=None, className='Tk', useTk=0):
+def Tcl(screenName=None, baseName=None, className='Tk', useTk=False):
return Tk(screenName, baseName, className, useTk)
@@ -4569,5 +4571,9 @@ def _test():
root.mainloop()
+__all__ = [name for name, obj in globals().items()
+ if not name.startswith('_') and not isinstance(obj, types.ModuleType)
+ and name not in {'wantobjects'}]
+
if __name__ == '__main__':
_test()
diff --git a/Lib/tkinter/colorchooser.py b/Lib/tkinter/colorchooser.py
index 9dc96713..3cfc06f6 100644
--- a/Lib/tkinter/colorchooser.py
+++ b/Lib/tkinter/colorchooser.py
@@ -21,6 +21,8 @@
from tkinter.commondialog import Dialog
+__all__ = ["Chooser", "askcolor"]
+
#
# color chooser class
diff --git a/Lib/tkinter/commondialog.py b/Lib/tkinter/commondialog.py
index c4ec010e..e56b5baf 100644
--- a/Lib/tkinter/commondialog.py
+++ b/Lib/tkinter/commondialog.py
@@ -8,15 +8,17 @@
# written by Fredrik Lundh, May 1997
#
-from tkinter import *
+__all__ = ["Dialog"]
+
+from tkinter import Frame
class Dialog:
- command = None
+ command = None
def __init__(self, master=None, **options):
- self.master = master
+ self.master = master
self.options = options
if not master and options.get('parent'):
self.master = options['parent']
diff --git a/Lib/tkinter/dialog.py b/Lib/tkinter/dialog.py
index cb463f71..8ae21401 100644
--- a/Lib/tkinter/dialog.py
+++ b/Lib/tkinter/dialog.py
@@ -1,7 +1,8 @@
# dialog.py -- Tkinter interface to the tk_dialog script.
-from tkinter import *
-from tkinter import _cnfmerge
+from tkinter import _cnfmerge, Widget, TclError, Button, Pack
+
+__all__ = ["Dialog"]
DIALOG_ICON = 'questhead'
diff --git a/Lib/tkinter/dnd.py b/Lib/tkinter/dnd.py
index 4de2331c..3120ff34 100644
--- a/Lib/tkinter/dnd.py
+++ b/Lib/tkinter/dnd.py
@@ -99,9 +99,10 @@ active; it will never call dnd_commit().
"""
-
import tkinter
+__all__ = ["dnd_start", "DndHandler"]
+
# The factory function
diff --git a/Lib/tkinter/filedialog.py b/Lib/tkinter/filedialog.py
index 88d23476..3ed93eb8 100644
--- a/Lib/tkinter/filedialog.py
+++ b/Lib/tkinter/filedialog.py
@@ -11,14 +11,20 @@ to the native file dialogues available in Tk 4.2 and newer, and the
directory dialogue available in Tk 8.3 and newer.
These interfaces were written by Fredrik Lundh, May 1997.
"""
+__all__ = ["FileDialog", "LoadFileDialog", "SaveFileDialog",
+ "Open", "SaveAs", "Directory",
+ "askopenfilename", "asksaveasfilename", "askopenfilenames",
+ "askopenfile", "askopenfiles", "asksaveasfile", "askdirectory"]
-from tkinter import *
+import fnmatch
+import os
+from tkinter import (
+ Frame, LEFT, YES, BOTTOM, Entry, TOP, Button, Tk, X,
+ Toplevel, RIGHT, Y, END, Listbox, BOTH, Scrollbar,
+)
from tkinter.dialog import Dialog
from tkinter import commondialog
-import os
-import fnmatch
-
dialogstates = {}
diff --git a/Lib/tkinter/font.py b/Lib/tkinter/font.py
index 13642572..15ad7ab4 100644
--- a/Lib/tkinter/font.py
+++ b/Lib/tkinter/font.py
@@ -3,11 +3,12 @@
# written by Fredrik Lundh, February 1998
#
-__version__ = "0.9"
-
import itertools
import tkinter
+__version__ = "0.9"
+__all__ = ["NORMAL", "ROMAN", "BOLD", "ITALIC",
+ "nametofont", "Font", "families", "names"]
# weight/slant
NORMAL = "normal"
@@ -100,7 +101,9 @@ class Font:
return self.name
def __eq__(self, other):
- return isinstance(other, Font) and self.name == other.name
+ if not isinstance(other, Font):
+ return NotImplemented
+ return self.name == other.name
def __getitem__(self, key):
return self.cget(key)
diff --git a/Lib/tkinter/messagebox.py b/Lib/tkinter/messagebox.py
index 4a711fa6..5f0343b6 100644
--- a/Lib/tkinter/messagebox.py
+++ b/Lib/tkinter/messagebox.py
@@ -24,6 +24,10 @@
from tkinter.commondialog import Dialog
+__all__ = ["showinfo", "showwarning", "showerror",
+ "askquestion", "askokcancel", "askyesno",
+ "askyesnocancel", "askretrycancel"]
+
#
# constants
diff --git a/Lib/tkinter/scrolledtext.py b/Lib/tkinter/scrolledtext.py
index 749a06a6..4f9a8815 100644
--- a/Lib/tkinter/scrolledtext.py
+++ b/Lib/tkinter/scrolledtext.py
@@ -11,11 +11,11 @@ Most methods calls are inherited from the Text widget; Pack, Grid and
Place methods are redirected to the Frame widget however.
"""
-__all__ = ['ScrolledText']
-
from tkinter import Frame, Text, Scrollbar, Pack, Grid, Place
from tkinter.constants import RIGHT, LEFT, Y, BOTH
+__all__ = ['ScrolledText']
+
class ScrolledText(Text):
def __init__(self, master=None, **kw):
diff --git a/Lib/tkinter/test/test_tkinter/test_font.py b/Lib/tkinter/test/test_tkinter/test_font.py
index 97cd87cc..a021ea33 100644
--- a/Lib/tkinter/test/test_tkinter/test_font.py
+++ b/Lib/tkinter/test/test_tkinter/test_font.py
@@ -1,7 +1,7 @@
import unittest
import tkinter
from tkinter import font
-from test.support import requires, run_unittest, gc_collect
+from test.support import requires, run_unittest, gc_collect, ALWAYS_EQ
from tkinter.test.support import AbstractTkTest
requires('gui')
@@ -70,6 +70,7 @@ class FontTest(AbstractTkTest, unittest.TestCase):
self.assertEqual(font1, font2)
self.assertNotEqual(font1, font1.copy())
self.assertNotEqual(font1, 0)
+ self.assertEqual(font1, ALWAYS_EQ)
def test_measure(self):
self.assertIsInstance(self.font.measure('abc'), int)
diff --git a/Lib/tkinter/test/test_tkinter/test_misc.py b/Lib/tkinter/test/test_tkinter/test_misc.py
index 236cae0e..1e089747 100644
--- a/Lib/tkinter/test/test_tkinter/test_misc.py
+++ b/Lib/tkinter/test/test_tkinter/test_misc.py
@@ -7,6 +7,20 @@ support.requires('gui')
class MiscTest(AbstractTkTest, unittest.TestCase):
+ def test_all(self):
+ self.assertIn("Widget", tkinter.__all__)
+ # Check that variables from tkinter.constants are also in tkinter.__all__
+ self.assertIn("CASCADE", tkinter.__all__)
+ self.assertIsNotNone(tkinter.CASCADE)
+ # Check that sys, re, and constants are not in tkinter.__all__
+ self.assertNotIn("re", tkinter.__all__)
+ self.assertNotIn("sys", tkinter.__all__)
+ self.assertNotIn("constants", tkinter.__all__)
+ # Check that an underscored functions is not in tkinter.__all__
+ self.assertNotIn("_tkerror", tkinter.__all__)
+ # Check that wantobjects is not in tkinter.__all__
+ self.assertNotIn("wantobjects", tkinter.__all__)
+
def test_repr(self):
t = tkinter.Toplevel(self.root, name='top')
f = tkinter.Frame(t, name='child')
diff --git a/Lib/tkinter/test/test_tkinter/test_variables.py b/Lib/tkinter/test/test_tkinter/test_variables.py
index 2eb1e126..08b7dedc 100644
--- a/Lib/tkinter/test/test_tkinter/test_variables.py
+++ b/Lib/tkinter/test/test_tkinter/test_variables.py
@@ -2,6 +2,7 @@ import unittest
import gc
from tkinter import (Variable, StringVar, IntVar, DoubleVar, BooleanVar, Tcl,
TclError)
+from test.support import ALWAYS_EQ
class Var(Variable):
@@ -59,11 +60,17 @@ class TestVariable(TestBase):
# values doesn't matter, only class and name are checked
v1 = Variable(self.root, name="abc")
v2 = Variable(self.root, name="abc")
+ self.assertIsNot(v1, v2)
self.assertEqual(v1, v2)
- v3 = Variable(self.root, name="abc")
- v4 = StringVar(self.root, name="abc")
- self.assertNotEqual(v3, v4)
+ v3 = StringVar(self.root, name="abc")
+ self.assertNotEqual(v1, v3)
+
+ V = type('Variable', (), {})
+ self.assertNotEqual(v1, V())
+
+ self.assertNotEqual(v1, object())
+ self.assertEqual(v1, ALWAYS_EQ)
def test_invalid_name(self):
with self.assertRaises(TypeError):
diff --git a/Lib/trace.py b/Lib/trace.py
index 89f17d48..c505d8bc 100755
--- a/Lib/trace.py
+++ b/Lib/trace.py
@@ -453,22 +453,7 @@ class Trace:
sys.settrace(None)
threading.settrace(None)
- def runfunc(*args, **kw):
- if len(args) >= 2:
- self, func, *args = args
- elif not args:
- raise TypeError("descriptor 'runfunc' of 'Trace' object "
- "needs an argument")
- elif 'func' in kw:
- func = kw.pop('func')
- self, *args = args
- import warnings
- warnings.warn("Passing 'func' as keyword argument is deprecated",
- DeprecationWarning, stacklevel=2)
- else:
- raise TypeError('runfunc expected at least 1 positional argument, '
- 'got %d' % (len(args)-1))
-
+ def runfunc(self, func, /, *args, **kw):
result = None
if not self.donothing:
sys.settrace(self.globaltrace)
@@ -478,7 +463,6 @@ class Trace:
if not self.donothing:
sys.settrace(None)
return result
- runfunc.__text_signature__ = '($self, func, /, *args, **kw)'
def file_module_function_of(self, frame):
code = frame.f_code
diff --git a/Lib/traceback.py b/Lib/traceback.py
index 5ef3be74..a19e3871 100644
--- a/Lib/traceback.py
+++ b/Lib/traceback.py
@@ -538,7 +538,9 @@ class TracebackException:
self.__cause__._load_lines()
def __eq__(self, other):
- return self.__dict__ == other.__dict__
+ if isinstance(other, TracebackException):
+ return self.__dict__ == other.__dict__
+ return NotImplemented
def __str__(self):
return self._str
@@ -567,23 +569,30 @@ class TracebackException:
if not issubclass(self.exc_type, SyntaxError):
yield _format_final_exc_line(stype, self._str)
- return
+ else:
+ yield from self._format_syntax_error(stype)
- # It was a syntax error; show exactly where the problem was found.
+ def _format_syntax_error(self, stype):
+ """Format SyntaxError exceptions (internal helper)."""
+ # Show exactly where the problem was found.
filename = self.filename or ""
lineno = str(self.lineno) or '?'
yield ' File "{}", line {}\n'.format(filename, lineno)
- badline = self.text
- offset = self.offset
- if badline is not None:
- yield ' {}\n'.format(badline.strip())
- if offset is not None:
- caretspace = badline.rstrip('\n')
- offset = min(len(caretspace), offset) - 1
- caretspace = caretspace[:offset].lstrip()
+ text = self.text
+ if text is not None:
+ # text = " foo\n"
+ # rtext = " foo"
+ # ltext = "foo"
+ rtext = text.rstrip('\n')
+ ltext = rtext.lstrip(' \n\f')
+ spaces = len(rtext) - len(ltext)
+ yield ' {}\n'.format(ltext)
+ # Convert 1-based column offset to 0-based index into stripped text
+ caret = (self.offset or 0) - 1 - spaces
+ if caret >= 0:
# non-space whitespace (likes tabs) must be kept for alignment
- caretspace = ((c.isspace() and c or ' ') for c in caretspace)
+ caretspace = ((c if c.isspace() else ' ') for c in ltext[:caret])
yield ' {}^\n'.format(''.join(caretspace))
msg = self.msg or ""
yield "{}: {}\n".format(stype, msg)
diff --git a/Lib/tracemalloc.py b/Lib/tracemalloc.py
index 2c1ac3b3..69b4170e 100644
--- a/Lib/tracemalloc.py
+++ b/Lib/tracemalloc.py
@@ -43,6 +43,8 @@ class Statistic:
return hash((self.traceback, self.size, self.count))
def __eq__(self, other):
+ if not isinstance(other, Statistic):
+ return NotImplemented
return (self.traceback == other.traceback
and self.size == other.size
and self.count == other.count)
@@ -84,6 +86,8 @@ class StatisticDiff:
self.count, self.count_diff))
def __eq__(self, other):
+ if not isinstance(other, StatisticDiff):
+ return NotImplemented
return (self.traceback == other.traceback
and self.size == other.size
and self.size_diff == other.size_diff
@@ -153,9 +157,13 @@ class Frame:
return self._frame[1]
def __eq__(self, other):
+ if not isinstance(other, Frame):
+ return NotImplemented
return (self._frame == other._frame)
def __lt__(self, other):
+ if not isinstance(other, Frame):
+ return NotImplemented
return (self._frame < other._frame)
def __hash__(self):
@@ -174,15 +182,20 @@ class Traceback(Sequence):
Sequence of Frame instances sorted from the oldest frame
to the most recent frame.
"""
- __slots__ = ("_frames",)
+ __slots__ = ("_frames", '_total_nframe')
- def __init__(self, frames):
+ def __init__(self, frames, total_nframe=None):
Sequence.__init__(self)
# frames is a tuple of frame tuples: see Frame constructor for the
# format of a frame tuple; it is reversed, because _tracemalloc
# returns frames sorted from most recent to oldest, but the
# Python API expects oldest to most recent
self._frames = tuple(reversed(frames))
+ self._total_nframe = total_nframe
+
+ @property
+ def total_nframe(self):
+ return self._total_nframe
def __len__(self):
return len(self._frames)
@@ -200,16 +213,25 @@ class Traceback(Sequence):
return hash(self._frames)
def __eq__(self, other):
+ if not isinstance(other, Traceback):
+ return NotImplemented
return (self._frames == other._frames)
def __lt__(self, other):
+ if not isinstance(other, Traceback):
+ return NotImplemented
return (self._frames < other._frames)
def __str__(self):
return str(self[0])
def __repr__(self):
- return "" % (tuple(self),)
+ s = ""
+ else:
+ s += f" total_nframe={self.total_nframe}>"
+ return s
def format(self, limit=None, most_recent_first=False):
lines = []
@@ -268,9 +290,11 @@ class Trace:
@property
def traceback(self):
- return Traceback(self._trace[2])
+ return Traceback(*self._trace[2:])
def __eq__(self, other):
+ if not isinstance(other, Trace):
+ return NotImplemented
return (self._trace == other._trace)
def __hash__(self):
@@ -303,6 +327,8 @@ class _Traces(Sequence):
return trace._trace in self._traces
def __eq__(self, other):
+ if not isinstance(other, _Traces):
+ return NotImplemented
return (self._traces == other._traces)
def __repr__(self):
@@ -362,7 +388,7 @@ class Filter(BaseFilter):
return self._match_frame(filename, lineno)
def _match(self, trace):
- domain, size, traceback = trace
+ domain, size, traceback, total_nframe = trace
res = self._match_traceback(traceback)
if self.domain is not None:
if self.inclusive:
@@ -382,7 +408,7 @@ class DomainFilter(BaseFilter):
return self._domain
def _match(self, trace):
- domain, size, traceback = trace
+ domain, size, traceback, total_nframe = trace
return (domain == self.domain) ^ (not self.inclusive)
@@ -459,7 +485,7 @@ class Snapshot:
tracebacks = {}
if not cumulative:
for trace in self.traces._traces:
- domain, size, trace_traceback = trace
+ domain, size, trace_traceback, total_nframe = trace
try:
traceback = tracebacks[trace_traceback]
except KeyError:
@@ -480,7 +506,7 @@ class Snapshot:
else:
# cumulative statistics
for trace in self.traces._traces:
- domain, size, trace_traceback = trace
+ domain, size, trace_traceback, total_nframe = trace
for frame in trace_traceback:
try:
traceback = tracebacks[frame]
diff --git a/Lib/turtledemo/__main__.py b/Lib/turtledemo/__main__.py
old mode 100644
new mode 100755
diff --git a/Lib/turtledemo/sorting_animate.py b/Lib/turtledemo/sorting_animate.py
old mode 100644
new mode 100755
diff --git a/Lib/turtledemo/two_canvases.py b/Lib/turtledemo/two_canvases.py
old mode 100755
new mode 100644
diff --git a/Lib/types.py b/Lib/types.py
index ea3c0b29..ad2020ec 100644
--- a/Lib/types.py
+++ b/Lib/types.py
@@ -293,4 +293,7 @@ def coroutine(func):
return wrapped
+GenericAlias = type(list[int])
+
+
__all__ = [n for n in globals() if n[:1] != '_']
diff --git a/Lib/typing.py b/Lib/typing.py
index 4ec538da..39c956dd 100644
--- a/Lib/typing.py
+++ b/Lib/typing.py
@@ -26,11 +26,12 @@ import operator
import re as stdlib_re # Avoid confusion with the re we export.
import sys
import types
-from types import WrapperDescriptorType, MethodWrapperType, MethodDescriptorType
+from types import WrapperDescriptorType, MethodWrapperType, MethodDescriptorType, GenericAlias
# Please keep __all__ alphabetized within each category.
__all__ = [
# Super-special typing primitives.
+ 'Annotated',
'Any',
'Callable',
'ClassVar',
@@ -140,8 +141,9 @@ def _type_check(arg, msg, is_argument=True):
if (isinstance(arg, _GenericAlias) and
arg.__origin__ in invalid_generic_forms):
raise TypeError(f"{arg} is not valid as type argument")
- if (isinstance(arg, _SpecialForm) and arg not in (Any, NoReturn) or
- arg in (Generic, Protocol)):
+ if arg in (Any, NoReturn):
+ return arg
+ if isinstance(arg, _SpecialForm) or arg in (Generic, Protocol):
raise TypeError(f"Plain {arg} is not valid as type argument")
if isinstance(arg, (type, TypeVar, ForwardRef)):
return arg
@@ -179,38 +181,18 @@ def _collect_type_vars(types):
for t in types:
if isinstance(t, TypeVar) and t not in tvars:
tvars.append(t)
- if isinstance(t, _GenericAlias) and not t._special:
+ if isinstance(t, (_GenericAlias, GenericAlias)):
tvars.extend([t for t in t.__parameters__ if t not in tvars])
return tuple(tvars)
-def _subs_tvars(tp, tvars, subs):
- """Substitute type variables 'tvars' with substitutions 'subs'.
- These two must have the same length.
- """
- if not isinstance(tp, _GenericAlias):
- return tp
- new_args = list(tp.__args__)
- for a, arg in enumerate(tp.__args__):
- if isinstance(arg, TypeVar):
- for i, tvar in enumerate(tvars):
- if arg == tvar:
- new_args[a] = subs[i]
- else:
- new_args[a] = _subs_tvars(arg, tvars, subs)
- if tp.__origin__ is Union:
- return Union[tuple(new_args)]
- return tp.copy_with(tuple(new_args))
-
-
-def _check_generic(cls, parameters):
+def _check_generic(cls, parameters, elen):
"""Check correct count for parameters of a generic cls (internal helper).
This gives a nice error message in case of count mismatch.
"""
- if not cls.__parameters__:
+ if not elen:
raise TypeError(f"{cls} is not a generic class")
alen = len(parameters)
- elen = len(cls.__parameters__)
if alen != elen:
raise TypeError(f"Too {'many' if alen > elen else 'few'} parameters for {cls};"
f" actual {alen}, expected {elen}")
@@ -223,7 +205,7 @@ def _remove_dups_flatten(parameters):
# Flatten out Union[Union[...], ...].
params = []
for p in parameters:
- if isinstance(p, _GenericAlias) and p.__origin__ is Union:
+ if isinstance(p, _UnionGenericAlias):
params.extend(p.__args__)
elif isinstance(p, tuple) and len(p) > 0 and p[0] is Union:
params.extend(p[1:])
@@ -262,19 +244,22 @@ def _tp_cache(func):
return inner
-def _eval_type(t, globalns, localns):
+def _eval_type(t, globalns, localns, recursive_guard=frozenset()):
"""Evaluate all forward references in the given type t.
For use of globalns and localns see the docstring for get_type_hints().
+ recursive_guard is used to prevent prevent infinite recursion
+ with recursive ForwardRef.
"""
if isinstance(t, ForwardRef):
- return t._evaluate(globalns, localns)
- if isinstance(t, _GenericAlias):
- ev_args = tuple(_eval_type(a, globalns, localns) for a in t.__args__)
+ return t._evaluate(globalns, localns, recursive_guard)
+ if isinstance(t, (_GenericAlias, GenericAlias)):
+ ev_args = tuple(_eval_type(a, globalns, localns, recursive_guard) for a in t.__args__)
if ev_args == t.__args__:
return t
- res = t.copy_with(ev_args)
- res._special = t._special
- return res
+ if isinstance(t, GenericAlias):
+ return GenericAlias(t.__origin__, ev_args)
+ else:
+ return t.copy_with(ev_args)
return t
@@ -289,6 +274,7 @@ class _Final:
class _Immutable:
"""Mixin to indicate that object should not be copied."""
+ __slots__ = ()
def __copy__(self):
return self
@@ -297,37 +283,18 @@ class _Immutable:
return self
-class _SpecialForm(_Final, _Immutable, _root=True):
- """Internal indicator of special typing constructs.
- See _doc instance attribute for specific docs.
- """
-
- __slots__ = ('_name', '_doc')
-
- def __new__(cls, *args, **kwds):
- """Constructor.
+# Internal indicator of special typing constructs.
+# See __doc__ instance attribute for specific docs.
+class _SpecialForm(_Final, _root=True):
+ __slots__ = ('_name', '__doc__', '_getitem')
- This only exists to give a better error message in case
- someone tries to subclass a special typing object (not a good idea).
- """
- if (len(args) == 3 and
- isinstance(args[0], str) and
- isinstance(args[1], tuple)):
- # Close enough.
- raise TypeError(f"Cannot subclass {cls!r}")
- return super().__new__(cls)
-
- def __init__(self, name, doc):
- self._name = name
- self._doc = doc
+ def __init__(self, getitem):
+ self._getitem = getitem
+ self._name = getitem.__name__
+ self.__doc__ = getitem.__doc__
- def __eq__(self, other):
- if not isinstance(other, _SpecialForm):
- return NotImplemented
- return self._name == other._name
-
- def __hash__(self):
- return hash((self._name,))
+ def __mro_entries__(self, bases):
+ raise TypeError(f"Cannot subclass {self!r}")
def __repr__(self):
return 'typing.' + self._name
@@ -346,31 +313,10 @@ class _SpecialForm(_Final, _Immutable, _root=True):
@_tp_cache
def __getitem__(self, parameters):
- if self._name in ('ClassVar', 'Final'):
- item = _type_check(parameters, f'{self._name} accepts only single type.')
- return _GenericAlias(self, (item,))
- if self._name == 'Union':
- if parameters == ():
- raise TypeError("Cannot take a Union of no types.")
- if not isinstance(parameters, tuple):
- parameters = (parameters,)
- msg = "Union[arg, ...]: each arg must be a type."
- parameters = tuple(_type_check(p, msg) for p in parameters)
- parameters = _remove_dups_flatten(parameters)
- if len(parameters) == 1:
- return parameters[0]
- return _GenericAlias(self, parameters)
- if self._name == 'Optional':
- arg = _type_check(parameters, "Optional[t] requires a single type.")
- return Union[arg, type(None)]
- if self._name == 'Literal':
- # There is no '_type_check' call because arguments to Literal[...] are
- # values, not types.
- return _GenericAlias(self, parameters)
- raise TypeError(f"{self} is not subscriptable")
-
-
-Any = _SpecialForm('Any', doc=
+ return self._getitem(self, parameters)
+
+@_SpecialForm
+def Any(self, parameters):
"""Special type indicating an unconstrained type.
- Any is compatible with every type.
@@ -380,9 +326,11 @@ Any = _SpecialForm('Any', doc=
Note that all the above statements are true from the point of view of
static type checkers. At runtime, Any should not be used with instance
or class checks.
- """)
+ """
+ raise TypeError(f"{self} is not subscriptable")
-NoReturn = _SpecialForm('NoReturn', doc=
+@_SpecialForm
+def NoReturn(self, parameters):
"""Special type indicating functions that never return.
Example::
@@ -393,9 +341,11 @@ NoReturn = _SpecialForm('NoReturn', doc=
This type is invalid in other positions, e.g., ``List[NoReturn]``
will fail in static type checkers.
- """)
+ """
+ raise TypeError(f"{self} is not subscriptable")
-ClassVar = _SpecialForm('ClassVar', doc=
+@_SpecialForm
+def ClassVar(self, parameters):
"""Special type construct to mark class variables.
An annotation wrapped in ClassVar indicates that a given
@@ -410,9 +360,12 @@ ClassVar = _SpecialForm('ClassVar', doc=
Note that ClassVar is not a class itself, and should not
be used with isinstance() or issubclass().
- """)
+ """
+ item = _type_check(parameters, f'{self} accepts only single type.')
+ return _GenericAlias(self, (item,))
-Final = _SpecialForm('Final', doc=
+@_SpecialForm
+def Final(self, parameters):
"""Special typing construct to indicate final names to type checkers.
A final name cannot be re-assigned or overridden in a subclass.
@@ -428,9 +381,12 @@ Final = _SpecialForm('Final', doc=
TIMEOUT = 1 # Error reported by type checker
There is no runtime checking of these properties.
- """)
+ """
+ item = _type_check(parameters, f'{self} accepts only single type.')
+ return _GenericAlias(self, (item,))
-Union = _SpecialForm('Union', doc=
+@_SpecialForm
+def Union(self, parameters):
"""Union type; Union[X, Y] means either X or Y.
To define a union, use e.g. Union[int, str]. Details:
@@ -455,15 +411,29 @@ Union = _SpecialForm('Union', doc=
- You cannot subclass or instantiate a union.
- You can use Optional[X] as a shorthand for Union[X, None].
- """)
-
-Optional = _SpecialForm('Optional', doc=
+ """
+ if parameters == ():
+ raise TypeError("Cannot take a Union of no types.")
+ if not isinstance(parameters, tuple):
+ parameters = (parameters,)
+ msg = "Union[arg, ...]: each arg must be a type."
+ parameters = tuple(_type_check(p, msg) for p in parameters)
+ parameters = _remove_dups_flatten(parameters)
+ if len(parameters) == 1:
+ return parameters[0]
+ return _UnionGenericAlias(self, parameters)
+
+@_SpecialForm
+def Optional(self, parameters):
"""Optional type.
Optional[X] is equivalent to Union[X, None].
- """)
+ """
+ arg = _type_check(parameters, f"{self} requires a single type.")
+ return Union[arg, type(None)]
-Literal = _SpecialForm('Literal', doc=
+@_SpecialForm
+def Literal(self, parameters):
"""Special typing form to define literal types (a.k.a. value types).
This form can be used to indicate to type checkers that the corresponding
@@ -480,10 +450,13 @@ Literal = _SpecialForm('Literal', doc=
open_helper('/some/path', 'r') # Passes type check
open_helper('/other/path', 'typo') # Error in type checker
- Literal[...] cannot be subclassed. At runtime, an arbitrary value
- is allowed as type argument to Literal[...], but type checkers may
- impose restrictions.
- """)
+ Literal[...] cannot be subclassed. At runtime, an arbitrary value
+ is allowed as type argument to Literal[...], but type checkers may
+ impose restrictions.
+ """
+ # There is no '_type_check' call because arguments to Literal[...] are
+ # values, not types.
+ return _GenericAlias(self, parameters)
class ForwardRef(_Final, _root=True):
@@ -506,7 +479,9 @@ class ForwardRef(_Final, _root=True):
self.__forward_value__ = None
self.__forward_is_argument__ = is_argument
- def _evaluate(self, globalns, localns):
+ def _evaluate(self, globalns, localns, recursive_guard):
+ if self.__forward_arg__ in recursive_guard:
+ return self
if not self.__forward_evaluated__ or localns is not globalns:
if globalns is None and localns is None:
globalns = localns = {}
@@ -514,10 +489,14 @@ class ForwardRef(_Final, _root=True):
globalns = localns
elif localns is None:
localns = globalns
- self.__forward_value__ = _type_check(
+ type_ =_type_check(
eval(self.__forward_code__, globalns, localns),
"Forward references must evaluate to types.",
- is_argument=self.__forward_is_argument__)
+ is_argument=self.__forward_is_argument__,
+ )
+ self.__forward_value__ = _eval_type(
+ type_, globalns, localns, recursive_guard | {self.__forward_arg__}
+ )
self.__forward_evaluated__ = True
return self.__forward_value__
@@ -581,7 +560,7 @@ class TypeVar(_Final, _Immutable, _root=True):
"""
__slots__ = ('__name__', '__bound__', '__constraints__',
- '__covariant__', '__contravariant__')
+ '__covariant__', '__contravariant__', '__dict__')
def __init__(self, name, *constraints, bound=None,
covariant=False, contravariant=False):
@@ -620,34 +599,10 @@ class TypeVar(_Final, _Immutable, _root=True):
return self.__name__
-# Special typing constructs Union, Optional, Generic, Callable and Tuple
-# use three special attributes for internal bookkeeping of generic types:
-# * __parameters__ is a tuple of unique free type parameters of a generic
-# type, for example, Dict[T, T].__parameters__ == (T,);
-# * __origin__ keeps a reference to a type that was subscripted,
-# e.g., Union[T, int].__origin__ == Union, or the non-generic version of
-# the type.
-# * __args__ is a tuple of all arguments used in subscripting,
-# e.g., Dict[T, int].__args__ == (T, int).
-
-
-# Mapping from non-generic type names that have a generic alias in typing
-# but with a different name.
-_normalize_alias = {'list': 'List',
- 'tuple': 'Tuple',
- 'dict': 'Dict',
- 'set': 'Set',
- 'frozenset': 'FrozenSet',
- 'deque': 'Deque',
- 'defaultdict': 'DefaultDict',
- 'type': 'Type',
- 'Set': 'AbstractSet'}
-
def _is_dunder(attr):
return attr.startswith('__') and attr.endswith('__')
-
-class _GenericAlias(_Final, _root=True):
+class _BaseGenericAlias(_Final, _root=True):
"""The central part of internal API.
This represents a generic version of type 'origin' with type arguments 'params'.
@@ -656,24 +611,88 @@ class _GenericAlias(_Final, _root=True):
have 'name' always set. If 'inst' is False, then the alias can't be instantiated,
this is used by e.g. typing.List and typing.Dict.
"""
- def __init__(self, origin, params, *, inst=True, special=False, name=None):
+ def __init__(self, origin, *, inst=True, name=None):
self._inst = inst
- self._special = special
- if special and name is None:
- orig_name = origin.__name__
- name = _normalize_alias.get(orig_name, orig_name)
self._name = name
+ self.__origin__ = origin
+ self.__slots__ = None # This is not documented.
+
+ def __call__(self, *args, **kwargs):
+ if not self._inst:
+ raise TypeError(f"Type {self._name} cannot be instantiated; "
+ f"use {self.__origin__.__name__}() instead")
+ result = self.__origin__(*args, **kwargs)
+ try:
+ result.__orig_class__ = self
+ except AttributeError:
+ pass
+ return result
+
+ def __mro_entries__(self, bases):
+ res = []
+ if self.__origin__ not in bases:
+ res.append(self.__origin__)
+ i = bases.index(self)
+ for b in bases[i+1:]:
+ if isinstance(b, _BaseGenericAlias) or issubclass(b, Generic):
+ break
+ else:
+ res.append(Generic)
+ return tuple(res)
+
+ def __getattr__(self, attr):
+ # We are careful for copy and pickle.
+ # Also for simplicity we just don't relay all dunder names
+ if '__origin__' in self.__dict__ and not _is_dunder(attr):
+ return getattr(self.__origin__, attr)
+ raise AttributeError(attr)
+
+ def __setattr__(self, attr, val):
+ if _is_dunder(attr) or attr in ('_name', '_inst', '_nparams'):
+ super().__setattr__(attr, val)
+ else:
+ setattr(self.__origin__, attr, val)
+
+ def __instancecheck__(self, obj):
+ return self.__subclasscheck__(type(obj))
+
+ def __subclasscheck__(self, cls):
+ raise TypeError("Subscripted generics cannot be used with"
+ " class and instance checks")
+
+
+# Special typing constructs Union, Optional, Generic, Callable and Tuple
+# use three special attributes for internal bookkeeping of generic types:
+# * __parameters__ is a tuple of unique free type parameters of a generic
+# type, for example, Dict[T, T].__parameters__ == (T,);
+# * __origin__ keeps a reference to a type that was subscripted,
+# e.g., Union[T, int].__origin__ == Union, or the non-generic version of
+# the type.
+# * __args__ is a tuple of all arguments used in subscripting,
+# e.g., Dict[T, int].__args__ == (T, int).
+
+
+class _GenericAlias(_BaseGenericAlias, _root=True):
+ def __init__(self, origin, params, *, inst=True, name=None):
+ super().__init__(origin, inst=inst, name=name)
if not isinstance(params, tuple):
params = (params,)
- self.__origin__ = origin
self.__args__ = tuple(... if a is _TypingEllipsis else
() if a is _TypingEmpty else
a for a in params)
self.__parameters__ = _collect_type_vars(params)
- self.__slots__ = None # This is not documented.
if not name:
self.__module__ = origin.__module__
+ def __eq__(self, other):
+ if not isinstance(other, _GenericAlias):
+ return NotImplemented
+ return (self.__origin__ == other.__origin__
+ and self.__args__ == other.__args__)
+
+ def __hash__(self):
+ return hash((self.__origin__, self.__args__))
+
@_tp_cache
def __getitem__(self, params):
if self.__origin__ in (Generic, Protocol):
@@ -683,125 +702,119 @@ class _GenericAlias(_Final, _root=True):
params = (params,)
msg = "Parameters to generic types must be types."
params = tuple(_type_check(p, msg) for p in params)
- _check_generic(self, params)
- return _subs_tvars(self, self.__parameters__, params)
+ _check_generic(self, params, len(self.__parameters__))
+
+ subst = dict(zip(self.__parameters__, params))
+ new_args = []
+ for arg in self.__args__:
+ if isinstance(arg, TypeVar):
+ arg = subst[arg]
+ elif isinstance(arg, (_GenericAlias, GenericAlias)):
+ subparams = arg.__parameters__
+ if subparams:
+ subargs = tuple(subst[x] for x in subparams)
+ arg = arg[subargs]
+ new_args.append(arg)
+ return self.copy_with(tuple(new_args))
def copy_with(self, params):
- # We don't copy self._special.
- return _GenericAlias(self.__origin__, params, name=self._name, inst=self._inst)
+ return self.__class__(self.__origin__, params, name=self._name, inst=self._inst)
def __repr__(self):
- if (self._name != 'Callable' or
- len(self.__args__) == 2 and self.__args__[0] is Ellipsis):
- if self._name:
- name = 'typing.' + self._name
- else:
- name = _type_repr(self.__origin__)
- if not self._special:
- args = f'[{", ".join([_type_repr(a) for a in self.__args__])}]'
- else:
- args = ''
- return (f'{name}{args}')
- if self._special:
- return 'typing.Callable'
- return (f'typing.Callable'
- f'[[{", ".join([_type_repr(a) for a in self.__args__[:-1]])}], '
- f'{_type_repr(self.__args__[-1])}]')
-
- def __eq__(self, other):
- if not isinstance(other, _GenericAlias):
- return NotImplemented
- if self.__origin__ != other.__origin__:
- return False
- if self.__origin__ is Union and other.__origin__ is Union:
- return frozenset(self.__args__) == frozenset(other.__args__)
- return self.__args__ == other.__args__
-
- def __hash__(self):
- if self.__origin__ is Union:
- return hash((Union, frozenset(self.__args__)))
- return hash((self.__origin__, self.__args__))
+ if self._name:
+ name = 'typing.' + self._name
+ else:
+ name = _type_repr(self.__origin__)
+ args = ", ".join([_type_repr(a) for a in self.__args__])
+ return f'{name}[{args}]'
- def __call__(self, *args, **kwargs):
- if not self._inst:
- raise TypeError(f"Type {self._name} cannot be instantiated; "
- f"use {self._name.lower()}() instead")
- result = self.__origin__(*args, **kwargs)
- try:
- result.__orig_class__ = self
- except AttributeError:
- pass
- return result
+ def __reduce__(self):
+ if self._name:
+ origin = globals()[self._name]
+ else:
+ origin = self.__origin__
+ args = tuple(self.__args__)
+ if len(args) == 1 and not isinstance(args[0], tuple):
+ args, = args
+ return operator.getitem, (origin, args)
def __mro_entries__(self, bases):
if self._name: # generic version of an ABC or built-in class
- res = []
- if self.__origin__ not in bases:
- res.append(self.__origin__)
- i = bases.index(self)
- if not any(isinstance(b, _GenericAlias) or issubclass(b, Generic)
- for b in bases[i+1:]):
- res.append(Generic)
- return tuple(res)
+ return super().__mro_entries__(bases)
if self.__origin__ is Generic:
if Protocol in bases:
return ()
i = bases.index(self)
for b in bases[i+1:]:
- if isinstance(b, _GenericAlias) and b is not self:
+ if isinstance(b, _BaseGenericAlias) and b is not self:
return ()
return (self.__origin__,)
- def __getattr__(self, attr):
- # We are careful for copy and pickle.
- # Also for simplicity we just don't relay all dunder names
- if '__origin__' in self.__dict__ and not _is_dunder(attr):
- return getattr(self.__origin__, attr)
- raise AttributeError(attr)
- def __setattr__(self, attr, val):
- if _is_dunder(attr) or attr in ('_name', '_inst', '_special'):
- super().__setattr__(attr, val)
+# _nparams is the number of accepted parameters, e.g. 0 for Hashable,
+# 1 for List and 2 for Dict. It may be -1 if variable number of
+# parameters are accepted (needs custom __getitem__).
+
+class _SpecialGenericAlias(_BaseGenericAlias, _root=True):
+ def __init__(self, origin, nparams, *, inst=True, name=None):
+ if name is None:
+ name = origin.__name__
+ super().__init__(origin, inst=inst, name=name)
+ self._nparams = nparams
+ if origin.__module__ == 'builtins':
+ self.__doc__ = f'A generic version of {origin.__qualname__}.'
else:
- setattr(self.__origin__, attr, val)
+ self.__doc__ = f'A generic version of {origin.__module__}.{origin.__qualname__}.'
- def __instancecheck__(self, obj):
- return self.__subclasscheck__(type(obj))
+ @_tp_cache
+ def __getitem__(self, params):
+ if not isinstance(params, tuple):
+ params = (params,)
+ msg = "Parameters to generic types must be types."
+ params = tuple(_type_check(p, msg) for p in params)
+ _check_generic(self, params, self._nparams)
+ return self.copy_with(params)
+
+ def copy_with(self, params):
+ return _GenericAlias(self.__origin__, params,
+ name=self._name, inst=self._inst)
+
+ def __repr__(self):
+ return 'typing.' + self._name
def __subclasscheck__(self, cls):
- if self._special:
- if not isinstance(cls, _GenericAlias):
- return issubclass(cls, self.__origin__)
- if cls._special:
- return issubclass(cls.__origin__, self.__origin__)
- raise TypeError("Subscripted generics cannot be used with"
- " class and instance checks")
+ if isinstance(cls, _SpecialGenericAlias):
+ return issubclass(cls.__origin__, self.__origin__)
+ if not isinstance(cls, _GenericAlias):
+ return issubclass(cls, self.__origin__)
+ return super().__subclasscheck__(cls)
def __reduce__(self):
- if self._special:
- return self._name
+ return self._name
- if self._name:
- origin = globals()[self._name]
- else:
- origin = self.__origin__
- if (origin is Callable and
- not (len(self.__args__) == 2 and self.__args__[0] is Ellipsis)):
- args = list(self.__args__[:-1]), self.__args__[-1]
- else:
- args = tuple(self.__args__)
- if len(args) == 1 and not isinstance(args[0], tuple):
- args, = args
- return operator.getitem, (origin, args)
+class _CallableGenericAlias(_GenericAlias, _root=True):
+ def __repr__(self):
+ assert self._name == 'Callable'
+ if len(self.__args__) == 2 and self.__args__[0] is Ellipsis:
+ return super().__repr__()
+ return (f'typing.Callable'
+ f'[[{", ".join([_type_repr(a) for a in self.__args__[:-1]])}], '
+ f'{_type_repr(self.__args__[-1])}]')
+
+ def __reduce__(self):
+ args = self.__args__
+ if not (len(args) == 2 and args[0] is ...):
+ args = list(args[:-1]), args[-1]
+ return operator.getitem, (Callable, args)
+
+
+class _CallableType(_SpecialGenericAlias, _root=True):
+ def copy_with(self, params):
+ return _CallableGenericAlias(self.__origin__, params,
+ name=self._name, inst=self._inst)
-class _VariadicGenericAlias(_GenericAlias, _root=True):
- """Same as _GenericAlias above but for variadic aliases. Currently,
- this is used only by special internal aliases: Tuple and Callable.
- """
def __getitem__(self, params):
- if self._name != 'Callable' or not self._special:
- return self.__getitem_inner__(params)
if not isinstance(params, tuple) or len(params) != 2:
raise TypeError("Callable must be used as "
"Callable[[arg, ...], result].")
@@ -817,29 +830,53 @@ class _VariadicGenericAlias(_GenericAlias, _root=True):
@_tp_cache
def __getitem_inner__(self, params):
- if self.__origin__ is tuple and self._special:
- if params == ():
- return self.copy_with((_TypingEmpty,))
- if not isinstance(params, tuple):
- params = (params,)
- if len(params) == 2 and params[1] is ...:
- msg = "Tuple[t, ...]: t must be a type."
- p = _type_check(params[0], msg)
- return self.copy_with((p, _TypingEllipsis))
- msg = "Tuple[t0, t1, ...]: each t must be a type."
- params = tuple(_type_check(p, msg) for p in params)
- return self.copy_with(params)
- if self.__origin__ is collections.abc.Callable and self._special:
- args, result = params
- msg = "Callable[args, result]: result must be a type."
- result = _type_check(result, msg)
- if args is Ellipsis:
- return self.copy_with((_TypingEllipsis, result))
- msg = "Callable[[arg, ...], result]: each arg must be a type."
- args = tuple(_type_check(arg, msg) for arg in args)
- params = args + (result,)
- return self.copy_with(params)
- return super().__getitem__(params)
+ args, result = params
+ msg = "Callable[args, result]: result must be a type."
+ result = _type_check(result, msg)
+ if args is Ellipsis:
+ return self.copy_with((_TypingEllipsis, result))
+ msg = "Callable[[arg, ...], result]: each arg must be a type."
+ args = tuple(_type_check(arg, msg) for arg in args)
+ params = args + (result,)
+ return self.copy_with(params)
+
+
+class _TupleType(_SpecialGenericAlias, _root=True):
+ @_tp_cache
+ def __getitem__(self, params):
+ if params == ():
+ return self.copy_with((_TypingEmpty,))
+ if not isinstance(params, tuple):
+ params = (params,)
+ if len(params) == 2 and params[1] is ...:
+ msg = "Tuple[t, ...]: t must be a type."
+ p = _type_check(params[0], msg)
+ return self.copy_with((p, _TypingEllipsis))
+ msg = "Tuple[t0, t1, ...]: each t must be a type."
+ params = tuple(_type_check(p, msg) for p in params)
+ return self.copy_with(params)
+
+
+class _UnionGenericAlias(_GenericAlias, _root=True):
+ def copy_with(self, params):
+ return Union[params]
+
+ def __eq__(self, other):
+ if not isinstance(other, _UnionGenericAlias):
+ return NotImplemented
+ return set(self.__args__) == set(other.__args__)
+
+ def __hash__(self):
+ return hash(frozenset(self.__args__))
+
+ def __repr__(self):
+ args = self.__args__
+ if len(args) == 2:
+ if args[0] is type(None):
+ return f'typing.Optional[{_type_repr(args[1])}]'
+ elif args[1] is type(None):
+ return f'typing.Optional[{_type_repr(args[0])}]'
+ return super().__repr__()
class Generic:
@@ -865,16 +902,6 @@ class Generic:
__slots__ = ()
_is_protocol = False
- def __new__(cls, *args, **kwds):
- if cls in (Generic, Protocol):
- raise TypeError(f"Type {cls.__name__} cannot be instantiated; "
- "it can be used only as a base class")
- if super().__new__ is object.__new__ and cls.__init__ is not object.__init__:
- obj = super().__new__(cls)
- else:
- obj = super().__new__(cls, *args, **kwds)
- return obj
-
@_tp_cache
def __class_getitem__(cls, params):
if not isinstance(params, tuple):
@@ -894,7 +921,7 @@ class Generic:
f"Parameters to {cls.__name__}[...] must all be unique")
else:
# Subscripting a regular Generic subclass.
- _check_generic(cls, params)
+ _check_generic(cls, params, len(cls.__parameters__))
return _GenericAlias(cls, params)
def __init_subclass__(cls, *args, **kwargs):
@@ -949,7 +976,7 @@ _TYPING_INTERNALS = ['__parameters__', '__orig_bases__', '__orig_class__',
_SPECIAL_NAMES = ['__abstractmethods__', '__annotations__', '__dict__', '__doc__',
'__init__', '__module__', '__new__', '__slots__',
- '__subclasshook__', '__weakref__']
+ '__subclasshook__', '__weakref__', '__class_getitem__']
# These special attributes will be not collected as protocol members.
EXCLUDED_ATTRIBUTES = _TYPING_INTERNALS + _SPECIAL_NAMES + ['_MutableMapping__marker']
@@ -983,7 +1010,7 @@ def _no_init(self, *args, **kwargs):
def _allow_reckless_class_cheks():
- """Allow instnance and class checks for special stdlib modules.
+ """Allow instance and class checks for special stdlib modules.
The abc and functools modules indiscriminately call isinstance() and
issubclass() on the whole MRO of a user class, which may contain protocols.
@@ -1121,6 +1148,100 @@ class Protocol(Generic, metaclass=_ProtocolMeta):
cls.__init__ = _no_init
+class _AnnotatedAlias(_GenericAlias, _root=True):
+ """Runtime representation of an annotated type.
+
+ At its core 'Annotated[t, dec1, dec2, ...]' is an alias for the type 't'
+ with extra annotations. The alias behaves like a normal typing alias,
+ instantiating is the same as instantiating the underlying type, binding
+ it to types is also the same.
+ """
+ def __init__(self, origin, metadata):
+ if isinstance(origin, _AnnotatedAlias):
+ metadata = origin.__metadata__ + metadata
+ origin = origin.__origin__
+ super().__init__(origin, origin)
+ self.__metadata__ = metadata
+
+ def copy_with(self, params):
+ assert len(params) == 1
+ new_type = params[0]
+ return _AnnotatedAlias(new_type, self.__metadata__)
+
+ def __repr__(self):
+ return "typing.Annotated[{}, {}]".format(
+ _type_repr(self.__origin__),
+ ", ".join(repr(a) for a in self.__metadata__)
+ )
+
+ def __reduce__(self):
+ return operator.getitem, (
+ Annotated, (self.__origin__,) + self.__metadata__
+ )
+
+ def __eq__(self, other):
+ if not isinstance(other, _AnnotatedAlias):
+ return NotImplemented
+ return (self.__origin__ == other.__origin__
+ and self.__metadata__ == other.__metadata__)
+
+ def __hash__(self):
+ return hash((self.__origin__, self.__metadata__))
+
+
+class Annotated:
+ """Add context specific metadata to a type.
+
+ Example: Annotated[int, runtime_check.Unsigned] indicates to the
+ hypothetical runtime_check module that this type is an unsigned int.
+ Every other consumer of this type can ignore this metadata and treat
+ this type as int.
+
+ The first argument to Annotated must be a valid type.
+
+ Details:
+
+ - It's an error to call `Annotated` with less than two arguments.
+ - Nested Annotated are flattened::
+
+ Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3]
+
+ - Instantiating an annotated type is equivalent to instantiating the
+ underlying type::
+
+ Annotated[C, Ann1](5) == C(5)
+
+ - Annotated can be used as a generic type alias::
+
+ Optimized = Annotated[T, runtime.Optimize()]
+ Optimized[int] == Annotated[int, runtime.Optimize()]
+
+ OptimizedList = Annotated[List[T], runtime.Optimize()]
+ OptimizedList[int] == Annotated[List[int], runtime.Optimize()]
+ """
+
+ __slots__ = ()
+
+ def __new__(cls, *args, **kwargs):
+ raise TypeError("Type Annotated cannot be instantiated.")
+
+ @_tp_cache
+ def __class_getitem__(cls, params):
+ if not isinstance(params, tuple) or len(params) < 2:
+ raise TypeError("Annotated[...] should be used "
+ "with at least two arguments (a type and an "
+ "annotation).")
+ msg = "Annotated[t, ...]: t must be a type."
+ origin = _type_check(params[0], msg)
+ metadata = tuple(params[1:])
+ return _AnnotatedAlias(origin, metadata)
+
+ def __init_subclass__(cls, *args, **kwargs):
+ raise TypeError(
+ "Cannot subclass {}.Annotated".format(cls.__module__)
+ )
+
+
def runtime_checkable(cls):
"""Mark a protocol class as a runtime protocol.
@@ -1182,12 +1303,13 @@ _allowed_types = (types.FunctionType, types.BuiltinFunctionType,
WrapperDescriptorType, MethodWrapperType, MethodDescriptorType)
-def get_type_hints(obj, globalns=None, localns=None):
+def get_type_hints(obj, globalns=None, localns=None, include_extras=False):
"""Return type hints for an object.
This is often the same as obj.__annotations__, but it handles
- forward references encoded as string literals, and if necessary
- adds Optional[t] if a default value equal to None is set.
+ forward references encoded as string literals, adds Optional[t] if a
+ default value equal to None is set and recursively replaces all
+ 'Annotated[T, ...]' with 'T' (unless 'include_extras=True').
The argument may be a module, class, method, or function. The annotations
are returned as a dictionary. For classes, annotations include also
@@ -1231,7 +1353,7 @@ def get_type_hints(obj, globalns=None, localns=None):
value = ForwardRef(value, is_argument=False)
value = _eval_type(value, base_globals, localns)
hints[name] = value
- return hints
+ return hints if include_extras else {k: _strip_annotations(t) for k, t in hints.items()}
if globalns is None:
if isinstance(obj, types.ModuleType):
@@ -1265,14 +1387,32 @@ def get_type_hints(obj, globalns=None, localns=None):
if name in defaults and defaults[name] is None:
value = Optional[value]
hints[name] = value
- return hints
+ return hints if include_extras else {k: _strip_annotations(t) for k, t in hints.items()}
+
+
+def _strip_annotations(t):
+ """Strips the annotations from a given type.
+ """
+ if isinstance(t, _AnnotatedAlias):
+ return _strip_annotations(t.__origin__)
+ if isinstance(t, _GenericAlias):
+ stripped_args = tuple(_strip_annotations(a) for a in t.__args__)
+ if stripped_args == t.__args__:
+ return t
+ return t.copy_with(stripped_args)
+ if isinstance(t, GenericAlias):
+ stripped_args = tuple(_strip_annotations(a) for a in t.__args__)
+ if stripped_args == t.__args__:
+ return t
+ return GenericAlias(t.__origin__, stripped_args)
+ return t
def get_origin(tp):
"""Get the unsubscripted version of a type.
- This supports generic types, Callable, Tuple, Union, Literal, Final and ClassVar.
- Return None for unsupported types. Examples::
+ This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar
+ and Annotated. Return None for unsupported types. Examples::
get_origin(Literal[42]) is Literal
get_origin(int) is None
@@ -1282,7 +1422,9 @@ def get_origin(tp):
get_origin(Union[T, int]) is Union
get_origin(List[Tuple[T, T]][int]) == list
"""
- if isinstance(tp, _GenericAlias):
+ if isinstance(tp, _AnnotatedAlias):
+ return Annotated
+ if isinstance(tp, (_BaseGenericAlias, GenericAlias)):
return tp.__origin__
if tp is Generic:
return Generic
@@ -1300,11 +1442,15 @@ def get_args(tp):
get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int])
get_args(Callable[[], T][int]) == ([], int)
"""
- if isinstance(tp, _GenericAlias) and not tp._special:
+ if isinstance(tp, _AnnotatedAlias):
+ return (tp.__origin__,) + tp.__metadata__
+ if isinstance(tp, _GenericAlias):
res = tp.__args__
- if get_origin(tp) is collections.abc.Callable and res[0] is not Ellipsis:
+ if tp.__origin__ is collections.abc.Callable and res[0] is not Ellipsis:
res = (list(res[:-1]), res[-1])
return res
+ if isinstance(tp, GenericAlias):
+ return tp.__args__
return ()
@@ -1432,21 +1578,20 @@ AnyStr = TypeVar('AnyStr', bytes, str)
# Various ABCs mimicking those in collections.abc.
-def _alias(origin, params, inst=True):
- return _GenericAlias(origin, params, special=True, inst=inst)
-
-Hashable = _alias(collections.abc.Hashable, ()) # Not generic.
-Awaitable = _alias(collections.abc.Awaitable, T_co)
-Coroutine = _alias(collections.abc.Coroutine, (T_co, T_contra, V_co))
-AsyncIterable = _alias(collections.abc.AsyncIterable, T_co)
-AsyncIterator = _alias(collections.abc.AsyncIterator, T_co)
-Iterable = _alias(collections.abc.Iterable, T_co)
-Iterator = _alias(collections.abc.Iterator, T_co)
-Reversible = _alias(collections.abc.Reversible, T_co)
-Sized = _alias(collections.abc.Sized, ()) # Not generic.
-Container = _alias(collections.abc.Container, T_co)
-Collection = _alias(collections.abc.Collection, T_co)
-Callable = _VariadicGenericAlias(collections.abc.Callable, (), special=True)
+_alias = _SpecialGenericAlias
+
+Hashable = _alias(collections.abc.Hashable, 0) # Not generic.
+Awaitable = _alias(collections.abc.Awaitable, 1)
+Coroutine = _alias(collections.abc.Coroutine, 3)
+AsyncIterable = _alias(collections.abc.AsyncIterable, 1)
+AsyncIterator = _alias(collections.abc.AsyncIterator, 1)
+Iterable = _alias(collections.abc.Iterable, 1)
+Iterator = _alias(collections.abc.Iterator, 1)
+Reversible = _alias(collections.abc.Reversible, 1)
+Sized = _alias(collections.abc.Sized, 0) # Not generic.
+Container = _alias(collections.abc.Container, 1)
+Collection = _alias(collections.abc.Collection, 1)
+Callable = _CallableType(collections.abc.Callable, 2)
Callable.__doc__ = \
"""Callable type; Callable[[int], str] is a function of (int) -> str.
@@ -1457,15 +1602,16 @@ Callable.__doc__ = \
There is no syntax to indicate optional or keyword arguments,
such function types are rarely used as callback types.
"""
-AbstractSet = _alias(collections.abc.Set, T_co)
-MutableSet = _alias(collections.abc.MutableSet, T)
+AbstractSet = _alias(collections.abc.Set, 1, name='AbstractSet')
+MutableSet = _alias(collections.abc.MutableSet, 1)
# NOTE: Mapping is only covariant in the value type.
-Mapping = _alias(collections.abc.Mapping, (KT, VT_co))
-MutableMapping = _alias(collections.abc.MutableMapping, (KT, VT))
-Sequence = _alias(collections.abc.Sequence, T_co)
-MutableSequence = _alias(collections.abc.MutableSequence, T)
-ByteString = _alias(collections.abc.ByteString, ()) # Not generic
-Tuple = _VariadicGenericAlias(tuple, (), inst=False, special=True)
+Mapping = _alias(collections.abc.Mapping, 2)
+MutableMapping = _alias(collections.abc.MutableMapping, 2)
+Sequence = _alias(collections.abc.Sequence, 1)
+MutableSequence = _alias(collections.abc.MutableSequence, 1)
+ByteString = _alias(collections.abc.ByteString, 0) # Not generic
+# Tuple accepts variable number of parameters.
+Tuple = _TupleType(tuple, -1, inst=False, name='Tuple')
Tuple.__doc__ = \
"""Tuple type; Tuple[X, Y] is the cross-product type of X and Y.
@@ -1475,24 +1621,24 @@ Tuple.__doc__ = \
To specify a variable-length tuple of homogeneous type, use Tuple[T, ...].
"""
-List = _alias(list, T, inst=False)
-Deque = _alias(collections.deque, T)
-Set = _alias(set, T, inst=False)
-FrozenSet = _alias(frozenset, T_co, inst=False)
-MappingView = _alias(collections.abc.MappingView, T_co)
-KeysView = _alias(collections.abc.KeysView, KT)
-ItemsView = _alias(collections.abc.ItemsView, (KT, VT_co))
-ValuesView = _alias(collections.abc.ValuesView, VT_co)
-ContextManager = _alias(contextlib.AbstractContextManager, T_co)
-AsyncContextManager = _alias(contextlib.AbstractAsyncContextManager, T_co)
-Dict = _alias(dict, (KT, VT), inst=False)
-DefaultDict = _alias(collections.defaultdict, (KT, VT))
-OrderedDict = _alias(collections.OrderedDict, (KT, VT))
-Counter = _alias(collections.Counter, T)
-ChainMap = _alias(collections.ChainMap, (KT, VT))
-Generator = _alias(collections.abc.Generator, (T_co, T_contra, V_co))
-AsyncGenerator = _alias(collections.abc.AsyncGenerator, (T_co, T_contra))
-Type = _alias(type, CT_co, inst=False)
+List = _alias(list, 1, inst=False, name='List')
+Deque = _alias(collections.deque, 1, name='Deque')
+Set = _alias(set, 1, inst=False, name='Set')
+FrozenSet = _alias(frozenset, 1, inst=False, name='FrozenSet')
+MappingView = _alias(collections.abc.MappingView, 1)
+KeysView = _alias(collections.abc.KeysView, 1)
+ItemsView = _alias(collections.abc.ItemsView, 2)
+ValuesView = _alias(collections.abc.ValuesView, 1)
+ContextManager = _alias(contextlib.AbstractContextManager, 1, name='ContextManager')
+AsyncContextManager = _alias(contextlib.AbstractAsyncContextManager, 1, name='AsyncContextManager')
+Dict = _alias(dict, 2, inst=False, name='Dict')
+DefaultDict = _alias(collections.defaultdict, 2, name='DefaultDict')
+OrderedDict = _alias(collections.OrderedDict, 2)
+Counter = _alias(collections.Counter, 1)
+ChainMap = _alias(collections.ChainMap, 2)
+Generator = _alias(collections.abc.Generator, 3)
+AsyncGenerator = _alias(collections.abc.AsyncGenerator, 2)
+Type = _alias(type, 1, inst=False, name='Type')
Type.__doc__ = \
"""A special construct usable to annotate class objects.
@@ -1588,50 +1734,41 @@ class SupportsRound(Protocol[T_co]):
pass
-def _make_nmtuple(name, types):
- msg = "NamedTuple('Name', [(f0, t0), (f1, t1), ...]); each t must be a type"
- types = [(n, _type_check(t, msg)) for n, t in types]
- nm_tpl = collections.namedtuple(name, [n for n, t in types])
- # Prior to PEP 526, only _field_types attribute was assigned.
- # Now __annotations__ are used and _field_types is deprecated (remove in 3.9)
- nm_tpl.__annotations__ = nm_tpl._field_types = dict(types)
- try:
- nm_tpl.__module__ = sys._getframe(2).f_globals.get('__name__', '__main__')
- except (AttributeError, ValueError):
- pass
+def _make_nmtuple(name, types, module, defaults = ()):
+ fields = [n for n, t in types]
+ types = {n: _type_check(t, f"field {n} annotation must be a type")
+ for n, t in types}
+ nm_tpl = collections.namedtuple(name, fields,
+ defaults=defaults, module=module)
+ nm_tpl.__annotations__ = nm_tpl.__new__.__annotations__ = types
return nm_tpl
# attributes prohibited to set in NamedTuple class syntax
-_prohibited = ('__new__', '__init__', '__slots__', '__getnewargs__',
- '_fields', '_field_defaults', '_field_types',
- '_make', '_replace', '_asdict', '_source')
+_prohibited = frozenset({'__new__', '__init__', '__slots__', '__getnewargs__',
+ '_fields', '_field_defaults',
+ '_make', '_replace', '_asdict', '_source'})
-_special = ('__module__', '__name__', '__annotations__')
+_special = frozenset({'__module__', '__name__', '__annotations__'})
class NamedTupleMeta(type):
def __new__(cls, typename, bases, ns):
- if ns.get('_root', False):
- return super().__new__(cls, typename, bases, ns)
+ assert bases[0] is _NamedTuple
types = ns.get('__annotations__', {})
- nm_tpl = _make_nmtuple(typename, types.items())
- defaults = []
- defaults_dict = {}
+ default_names = []
for field_name in types:
if field_name in ns:
- default_value = ns[field_name]
- defaults.append(default_value)
- defaults_dict[field_name] = default_value
- elif defaults:
- raise TypeError("Non-default namedtuple field {field_name} cannot "
- "follow default field(s) {default_names}"
- .format(field_name=field_name,
- default_names=', '.join(defaults_dict.keys())))
- nm_tpl.__new__.__annotations__ = dict(types)
- nm_tpl.__new__.__defaults__ = tuple(defaults)
- nm_tpl._field_defaults = defaults_dict
+ default_names.append(field_name)
+ elif default_names:
+ raise TypeError(f"Non-default namedtuple field {field_name} "
+ f"cannot follow default field"
+ f"{'s' if len(default_names) > 1 else ''} "
+ f"{', '.join(default_names)}")
+ nm_tpl = _make_nmtuple(typename, types.items(),
+ defaults=[ns[n] for n in default_names],
+ module=ns['__module__'])
# update from user namespace without overriding special namedtuple attributes
for key in ns:
if key in _prohibited:
@@ -1641,7 +1778,7 @@ class NamedTupleMeta(type):
return nm_tpl
-class NamedTuple(metaclass=NamedTupleMeta):
+def NamedTuple(typename, fields=None, /, **kwargs):
"""Typed version of namedtuple.
Usage in Python versions >= 3.6::
@@ -1665,99 +1802,81 @@ class NamedTuple(metaclass=NamedTupleMeta):
Employee = NamedTuple('Employee', [('name', str), ('id', int)])
"""
- _root = True
-
- def __new__(*args, **kwargs):
- if not args:
- raise TypeError('NamedTuple.__new__(): not enough arguments')
- cls, *args = args # allow the "cls" keyword be passed
- if args:
- typename, *args = args # allow the "typename" keyword be passed
- elif 'typename' in kwargs:
- typename = kwargs.pop('typename')
- import warnings
- warnings.warn("Passing 'typename' as keyword argument is deprecated",
- DeprecationWarning, stacklevel=2)
- else:
- raise TypeError("NamedTuple.__new__() missing 1 required positional "
- "argument: 'typename'")
- if args:
- try:
- fields, = args # allow the "fields" keyword be passed
- except ValueError:
- raise TypeError(f'NamedTuple.__new__() takes from 2 to 3 '
- f'positional arguments but {len(args) + 2} '
- f'were given') from None
- elif 'fields' in kwargs and len(kwargs) == 1:
- fields = kwargs.pop('fields')
- import warnings
- warnings.warn("Passing 'fields' as keyword argument is deprecated",
- DeprecationWarning, stacklevel=2)
- else:
- fields = None
-
- if fields is None:
- fields = kwargs.items()
- elif kwargs:
- raise TypeError("Either list of fields or keywords"
- " can be provided to NamedTuple, not both")
- return _make_nmtuple(typename, fields)
- __new__.__text_signature__ = '($cls, typename, fields=None, /, **kwargs)'
-
-
-def _dict_new(cls, /, *args, **kwargs):
- return dict(*args, **kwargs)
-
-
-def _typeddict_new(cls, typename, fields=None, /, *, total=True, **kwargs):
if fields is None:
- fields = kwargs
+ fields = kwargs.items()
elif kwargs:
- raise TypeError("TypedDict takes either a dict or keyword arguments,"
- " but not both")
-
- ns = {'__annotations__': dict(fields), '__total__': total}
+ raise TypeError("Either list of fields or keywords"
+ " can be provided to NamedTuple, not both")
try:
- # Setting correct module is necessary to make typed dict classes pickleable.
- ns['__module__'] = sys._getframe(1).f_globals.get('__name__', '__main__')
+ module = sys._getframe(1).f_globals.get('__name__', '__main__')
except (AttributeError, ValueError):
- pass
+ module = None
+ return _make_nmtuple(typename, fields, module=module)
- return _TypedDictMeta(typename, (), ns)
+_NamedTuple = type.__new__(NamedTupleMeta, 'NamedTuple', (), {})
+def _namedtuple_mro_entries(bases):
+ if len(bases) > 1:
+ raise TypeError("Multiple inheritance with NamedTuple is not supported")
+ assert bases[0] is NamedTuple
+ return (_NamedTuple,)
-def _check_fails(cls, other):
- # Typed dicts are only for static structural subtyping.
- raise TypeError('TypedDict does not support instance and class checks')
+NamedTuple.__mro_entries__ = _namedtuple_mro_entries
class _TypedDictMeta(type):
def __new__(cls, name, bases, ns, total=True):
"""Create new typed dict class object.
- This method is called directly when TypedDict is subclassed,
- or via _typeddict_new when TypedDict is instantiated. This way
+ This method is called when TypedDict is subclassed,
+ or when TypedDict is instantiated. This way
TypedDict supports all three syntax forms described in its docstring.
- Subclasses and instances of TypedDict return actual dictionaries
- via _dict_new.
+ Subclasses and instances of TypedDict return actual dictionaries.
"""
- ns['__new__'] = _typeddict_new if name == 'TypedDict' else _dict_new
- tp_dict = super(_TypedDictMeta, cls).__new__(cls, name, (dict,), ns)
-
- anns = ns.get('__annotations__', {})
+ for base in bases:
+ if type(base) is not _TypedDictMeta:
+ raise TypeError('cannot inherit from both a TypedDict type '
+ 'and a non-TypedDict base class')
+ tp_dict = type.__new__(_TypedDictMeta, name, (dict,), ns)
+
+ annotations = {}
+ own_annotations = ns.get('__annotations__', {})
+ own_annotation_keys = set(own_annotations.keys())
msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type"
- anns = {n: _type_check(tp, msg) for n, tp in anns.items()}
+ own_annotations = {
+ n: _type_check(tp, msg) for n, tp in own_annotations.items()
+ }
+ required_keys = set()
+ optional_keys = set()
+
for base in bases:
- anns.update(base.__dict__.get('__annotations__', {}))
- tp_dict.__annotations__ = anns
+ annotations.update(base.__dict__.get('__annotations__', {}))
+ required_keys.update(base.__dict__.get('__required_keys__', ()))
+ optional_keys.update(base.__dict__.get('__optional_keys__', ()))
+
+ annotations.update(own_annotations)
+ if total:
+ required_keys.update(own_annotation_keys)
+ else:
+ optional_keys.update(own_annotation_keys)
+
+ tp_dict.__annotations__ = annotations
+ tp_dict.__required_keys__ = frozenset(required_keys)
+ tp_dict.__optional_keys__ = frozenset(optional_keys)
if not hasattr(tp_dict, '__total__'):
tp_dict.__total__ = total
return tp_dict
- __instancecheck__ = __subclasscheck__ = _check_fails
+ __call__ = dict # static method
+ def __subclasscheck__(cls, other):
+ # Typed dicts are only for static structural subtyping.
+ raise TypeError('TypedDict does not support instance and class checks')
-class TypedDict(dict, metaclass=_TypedDictMeta):
+ __instancecheck__ = __subclasscheck__
+
+
+def TypedDict(typename, fields=None, /, *, total=True, **kwargs):
"""A simple typed namespace. At runtime it is equivalent to a plain dict.
TypedDict creates a dictionary type that expects all of its
@@ -1776,8 +1895,9 @@ class TypedDict(dict, metaclass=_TypedDictMeta):
assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first')
- The type info can be accessed via Point2D.__annotations__. TypedDict
- supports two additional equivalent forms::
+ The type info can be accessed via the Point2D.__annotations__ dict, and
+ the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets.
+ TypedDict supports two additional equivalent forms::
Point2D = TypedDict('Point2D', x=int, y=int, label=str)
Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str})
@@ -1798,6 +1918,23 @@ class TypedDict(dict, metaclass=_TypedDictMeta):
The class syntax is only supported in Python 3.6+, while two other
syntax forms work for Python 2.7 and 3.2+
"""
+ if fields is None:
+ fields = kwargs
+ elif kwargs:
+ raise TypeError("TypedDict takes either a dict or keyword arguments,"
+ " but not both")
+
+ ns = {'__annotations__': dict(fields), '__total__': total}
+ try:
+ # Setting correct module is necessary to make typed dict classes pickleable.
+ ns['__module__'] = sys._getframe(1).f_globals.get('__name__', '__main__')
+ except (AttributeError, ValueError):
+ pass
+
+ return _TypedDictMeta(typename, (), ns)
+
+_TypedDict = type.__new__(_TypedDictMeta, 'TypedDict', (), {})
+TypedDict.__mro_entries__ = lambda bases: (_TypedDict,)
def NewType(name, tp):
@@ -1995,8 +2132,8 @@ class io:
io.__name__ = __name__ + '.io'
sys.modules[io.__name__] = io
-Pattern = _alias(stdlib_re.Pattern, AnyStr)
-Match = _alias(stdlib_re.Match, AnyStr)
+Pattern = _alias(stdlib_re.Pattern, 1)
+Match = _alias(stdlib_re.Match, 1)
class re:
"""Wrapper namespace for re type aliases."""
diff --git a/Lib/unittest/__init__.py b/Lib/unittest/__init__.py
index ace3a6fb..348dc471 100644
--- a/Lib/unittest/__init__.py
+++ b/Lib/unittest/__init__.py
@@ -57,7 +57,6 @@ __all__.extend(['getTestCaseNames', 'makeSuite', 'findTestCases'])
__unittest = True
from .result import TestResult
-from .async_case import IsolatedAsyncioTestCase
from .case import (addModuleCleanup, TestCase, FunctionTestCase, SkipTest, skip,
skipIf, skipUnless, expectedFailure)
from .suite import BaseTestSuite, TestSuite
@@ -66,6 +65,7 @@ from .loader import (TestLoader, defaultTestLoader, makeSuite, getTestCaseNames,
from .main import TestProgram, main
from .runner import TextTestRunner, TextTestResult
from .signals import installHandler, registerResult, removeResult, removeHandler
+# IsolatedAsyncioTestCase will be imported lazily.
# deprecated
_TextTestResult = TextTestResult
@@ -78,3 +78,18 @@ def load_tests(loader, tests, pattern):
# top level directory cached on loader instance
this_dir = os.path.dirname(__file__)
return loader.discover(start_dir=this_dir, pattern=pattern)
+
+
+# Lazy import of IsolatedAsyncioTestCase from .async_case
+# It imports asyncio, which is relatively heavy, but most tests
+# do not need it.
+
+def __dir__():
+ return globals().keys() | {'IsolatedAsyncioTestCase'}
+
+def __getattr__(name):
+ if name == 'IsolatedAsyncioTestCase':
+ global IsolatedAsyncioTestCase
+ from .async_case import IsolatedAsyncioTestCase
+ return IsolatedAsyncioTestCase
+ raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
diff --git a/Lib/unittest/_log.py b/Lib/unittest/_log.py
new file mode 100644
index 00000000..94e7e758
--- /dev/null
+++ b/Lib/unittest/_log.py
@@ -0,0 +1,69 @@
+import logging
+import collections
+
+from .case import _BaseTestCaseContext
+
+
+_LoggingWatcher = collections.namedtuple("_LoggingWatcher",
+ ["records", "output"])
+
+class _CapturingHandler(logging.Handler):
+ """
+ A logging handler capturing all (raw and formatted) logging output.
+ """
+
+ def __init__(self):
+ logging.Handler.__init__(self)
+ self.watcher = _LoggingWatcher([], [])
+
+ def flush(self):
+ pass
+
+ def emit(self, record):
+ self.watcher.records.append(record)
+ msg = self.format(record)
+ self.watcher.output.append(msg)
+
+
+class _AssertLogsContext(_BaseTestCaseContext):
+ """A context manager used to implement TestCase.assertLogs()."""
+
+ LOGGING_FORMAT = "%(levelname)s:%(name)s:%(message)s"
+
+ def __init__(self, test_case, logger_name, level):
+ _BaseTestCaseContext.__init__(self, test_case)
+ self.logger_name = logger_name
+ if level:
+ self.level = logging._nameToLevel.get(level, level)
+ else:
+ self.level = logging.INFO
+ self.msg = None
+
+ def __enter__(self):
+ if isinstance(self.logger_name, logging.Logger):
+ logger = self.logger = self.logger_name
+ else:
+ logger = self.logger = logging.getLogger(self.logger_name)
+ formatter = logging.Formatter(self.LOGGING_FORMAT)
+ handler = _CapturingHandler()
+ handler.setFormatter(formatter)
+ self.watcher = handler.watcher
+ self.old_handlers = logger.handlers[:]
+ self.old_level = logger.level
+ self.old_propagate = logger.propagate
+ logger.handlers = [handler]
+ logger.setLevel(self.level)
+ logger.propagate = False
+ return handler.watcher
+
+ def __exit__(self, exc_type, exc_value, tb):
+ self.logger.handlers = self.old_handlers
+ self.logger.propagate = self.old_propagate
+ self.logger.setLevel(self.old_level)
+ if exc_type is not None:
+ # let unexpected exceptions pass through
+ return False
+ if len(self.watcher.records) == 0:
+ self._raiseFailure(
+ "no logs of level {} or higher triggered on {}"
+ .format(logging.getLevelName(self.level), self.logger.name))
diff --git a/Lib/unittest/case.py b/Lib/unittest/case.py
index 3223c0bf..f8bc865e 100644
--- a/Lib/unittest/case.py
+++ b/Lib/unittest/case.py
@@ -3,7 +3,6 @@
import sys
import functools
import difflib
-import logging
import pprint
import re
import warnings
@@ -241,6 +240,8 @@ class _AssertRaisesContext(_AssertRaisesBaseContext):
expected_regex.pattern, str(exc_value)))
return True
+ __class_getitem__ = classmethod(types.GenericAlias)
+
class _AssertWarnsContext(_AssertRaisesBaseContext):
"""A context manager used to implement TestCase.assertWarns* methods."""
@@ -251,7 +252,7 @@ class _AssertWarnsContext(_AssertRaisesBaseContext):
def __enter__(self):
# The __warningregistry__'s need to be in a pristine state for tests
# to work properly.
- for v in list(sys.modules.values()):
+ for v in sys.modules.values():
if getattr(v, '__warningregistry__', None):
v.__warningregistry__ = {}
self.warnings_manager = warnings.catch_warnings(record=True)
@@ -295,73 +296,6 @@ class _AssertWarnsContext(_AssertRaisesBaseContext):
-_LoggingWatcher = collections.namedtuple("_LoggingWatcher",
- ["records", "output"])
-
-
-class _CapturingHandler(logging.Handler):
- """
- A logging handler capturing all (raw and formatted) logging output.
- """
-
- def __init__(self):
- logging.Handler.__init__(self)
- self.watcher = _LoggingWatcher([], [])
-
- def flush(self):
- pass
-
- def emit(self, record):
- self.watcher.records.append(record)
- msg = self.format(record)
- self.watcher.output.append(msg)
-
-
-
-class _AssertLogsContext(_BaseTestCaseContext):
- """A context manager used to implement TestCase.assertLogs()."""
-
- LOGGING_FORMAT = "%(levelname)s:%(name)s:%(message)s"
-
- def __init__(self, test_case, logger_name, level):
- _BaseTestCaseContext.__init__(self, test_case)
- self.logger_name = logger_name
- if level:
- self.level = logging._nameToLevel.get(level, level)
- else:
- self.level = logging.INFO
- self.msg = None
-
- def __enter__(self):
- if isinstance(self.logger_name, logging.Logger):
- logger = self.logger = self.logger_name
- else:
- logger = self.logger = logging.getLogger(self.logger_name)
- formatter = logging.Formatter(self.LOGGING_FORMAT)
- handler = _CapturingHandler()
- handler.setFormatter(formatter)
- self.watcher = handler.watcher
- self.old_handlers = logger.handlers[:]
- self.old_level = logger.level
- self.old_propagate = logger.propagate
- logger.handlers = [handler]
- logger.setLevel(self.level)
- logger.propagate = False
- return handler.watcher
-
- def __exit__(self, exc_type, exc_value, tb):
- self.logger.handlers = self.old_handlers
- self.logger.propagate = self.old_propagate
- self.logger.setLevel(self.old_level)
- if exc_type is not None:
- # let unexpected exceptions pass through
- return False
- if len(self.watcher.records) == 0:
- self._raiseFailure(
- "no logs of level {} or higher triggered on {}"
- .format(logging.getLevelName(self.level), self.logger.name))
-
-
class _OrderedChainMap(collections.ChainMap):
def __iter__(self):
seen = set()
@@ -468,30 +402,13 @@ class TestCase(object):
"""
self._type_equality_funcs[typeobj] = function
- def addCleanup(*args, **kwargs):
+ def addCleanup(self, function, /, *args, **kwargs):
"""Add a function, with arguments, to be called when the test is
completed. Functions added are called on a LIFO basis and are
called after tearDown on test failure or success.
Cleanup items are called even if setUp fails (unlike tearDown)."""
- if len(args) >= 2:
- self, function, *args = args
- elif not args:
- raise TypeError("descriptor 'addCleanup' of 'TestCase' object "
- "needs an argument")
- elif 'function' in kwargs:
- function = kwargs.pop('function')
- self, *args = args
- import warnings
- warnings.warn("Passing 'function' as keyword argument is deprecated",
- DeprecationWarning, stacklevel=2)
- else:
- raise TypeError('addCleanup expected at least 1 positional '
- 'argument, got %d' % (len(args)-1))
- args = tuple(args)
-
self._cleanups.append((function, args, kwargs))
- addCleanup.__text_signature__ = '($self, function, /, *args, **kwargs)'
@classmethod
def addClassCleanup(cls, function, /, *args, **kwargs):
@@ -729,7 +646,7 @@ class TestCase(object):
function, args, kwargs = cls._class_cleanups.pop()
try:
function(*args, **kwargs)
- except Exception as exc:
+ except Exception:
cls.tearDown_exceptions.append(sys.exc_info())
def __call__(self, *args, **kwds):
@@ -869,6 +786,8 @@ class TestCase(object):
self.assertEqual(cm.output, ['INFO:foo:first message',
'ERROR:foo.bar:second message'])
"""
+ # Lazy import to avoid importing logging if it is not needed.
+ from ._log import _AssertLogsContext
return _AssertLogsContext(self, logger, level)
def _getAssertEqualityFunc(self, first, second):
diff --git a/Lib/unittest/mock.py b/Lib/unittest/mock.py
index 3629cf61..b495a5f6 100644
--- a/Lib/unittest/mock.py
+++ b/Lib/unittest/mock.py
@@ -23,8 +23,6 @@ __all__ = (
)
-__version__ = '1.0'
-
import asyncio
import contextlib
import io
@@ -32,6 +30,7 @@ import inspect
import pprint
import sys
import builtins
+from asyncio import iscoroutinefunction
from types import CodeType, ModuleType, MethodType
from unittest.util import safe_repr
from functools import wraps, partial
@@ -50,12 +49,12 @@ def _is_async_obj(obj):
return False
if hasattr(obj, '__func__'):
obj = getattr(obj, '__func__')
- return asyncio.iscoroutinefunction(obj) or inspect.isawaitable(obj)
+ return iscoroutinefunction(obj) or inspect.isawaitable(obj)
def _is_async_func(func):
if getattr(func, '__code__', None):
- return asyncio.iscoroutinefunction(func)
+ return iscoroutinefunction(func)
else:
return False
@@ -403,18 +402,12 @@ class NonCallableMock(Base):
# so we can create magic methods on the
# class without stomping on other mocks
bases = (cls,)
- if not issubclass(cls, AsyncMock):
+ if not issubclass(cls, AsyncMockMixin):
# Check if spec is an async object or function
- sig = inspect.signature(NonCallableMock.__init__)
- bound_args = sig.bind_partial(cls, *args, **kw).arguments
- spec_arg = [
- arg for arg in bound_args.keys()
- if arg.startswith('spec')
- ]
- if spec_arg:
- # what if spec_set is different than spec?
- if _is_async_obj(bound_args[spec_arg[0]]):
- bases = (AsyncMockMixin, cls,)
+ bound_args = _MOCK_SIG.bind_partial(cls, *args, **kw).arguments
+ spec_arg = bound_args.get('spec_set', bound_args.get('spec'))
+ if spec_arg and _is_async_obj(spec_arg):
+ bases = (AsyncMockMixin, cls)
new = type(cls.__name__, bases, {'__doc__': cls.__doc__})
instance = _safe_super(NonCallableMock, cls).__new__(new)
return instance
@@ -496,7 +489,7 @@ class NonCallableMock(Base):
_spec_asyncs = []
for attr in dir(spec):
- if asyncio.iscoroutinefunction(getattr(spec, attr, None)):
+ if iscoroutinefunction(getattr(spec, attr, None)):
_spec_asyncs.append(attr)
if spec is not None and not _is_list(spec):
@@ -600,7 +593,7 @@ class NonCallableMock(Base):
for child in self._mock_children.values():
if isinstance(child, _SpecState) or child is _deleted:
continue
- child.reset_mock(visited)
+ child.reset_mock(visited, return_value=return_value, side_effect=side_effect)
ret = self._mock_return_value
if _is_instance_mock(ret) and ret is not self:
@@ -857,7 +850,8 @@ class NonCallableMock(Base):
else:
name, args, kwargs = _call
try:
- return name, sig.bind(*args, **kwargs)
+ bound_call = sig.bind(*args, **kwargs)
+ return call(name, bound_call.args, bound_call.kwargs)
except TypeError as e:
return e.with_traceback(None)
else:
@@ -906,9 +900,9 @@ class NonCallableMock(Base):
def _error_message():
msg = self._format_mock_failure_message(args, kwargs)
return msg
- expected = self._call_matcher((args, kwargs))
+ expected = self._call_matcher(_Call((args, kwargs), two=True))
actual = self._call_matcher(self.call_args)
- if expected != actual:
+ if actual != expected:
cause = expected if isinstance(expected, Exception) else None
raise AssertionError(_error_message()) from cause
@@ -976,10 +970,10 @@ class NonCallableMock(Base):
The assert passes if the mock has *ever* been called, unlike
`assert_called_with` and `assert_called_once_with` that only pass if
the call is the most recent one."""
- expected = self._call_matcher((args, kwargs))
+ expected = self._call_matcher(_Call((args, kwargs), two=True))
+ cause = expected if isinstance(expected, Exception) else None
actual = [self._call_matcher(c) for c in self.call_args_list]
- if expected not in actual:
- cause = expected if isinstance(expected, Exception) else None
+ if cause or expected not in _AnyComparer(actual):
expected_string = self._format_mock_call_signature(args, kwargs)
raise AssertionError(
'%s call not found' % expected_string
@@ -1038,6 +1032,24 @@ class NonCallableMock(Base):
return f"\n{prefix}: {safe_repr(self.mock_calls)}."
+_MOCK_SIG = inspect.signature(NonCallableMock.__init__)
+
+
+class _AnyComparer(list):
+ """A list which checks if it contains a call which may have an
+ argument of ANY, flipping the components of item and self from
+ their traditional locations so that ANY is guaranteed to be on
+ the left."""
+ def __contains__(self, item):
+ for _call in self:
+ assert len(item) == len(_call)
+ if all([
+ expected == actual
+ for expected, actual in zip(item, _call)
+ ]):
+ return True
+ return False
+
def _try_iter(obj):
if obj is None:
@@ -1696,7 +1708,8 @@ def patch(
"as"; very useful if `patch` is creating a mock object for you.
`patch` takes arbitrary keyword arguments. These will be passed to
- the `Mock` (or `new_callable`) on construction.
+ `AsyncMock` if the patched object is asynchronous, to `MagicMock`
+ otherwise or to `new_callable` if specified.
`patch.dict(...)`, `patch.multiple(...)` and `patch.object(...)` are
available for alternate use-cases.
@@ -1820,11 +1833,27 @@ class _patch_dict(object):
def __exit__(self, *args):
"""Unpatch the dict."""
- self._unpatch_dict()
+ if self._original is not None:
+ self._unpatch_dict()
return False
- start = __enter__
- stop = __exit__
+
+ def start(self):
+ """Activate a patch, returning any created mock."""
+ result = self.__enter__()
+ _patch._active_patches.append(self)
+ return result
+
+
+ def stop(self):
+ """Stop an active patch."""
+ try:
+ _patch._active_patches.remove(self)
+ except ValueError:
+ # If the patch hasn't been started this will fail
+ return None
+
+ return self.__exit__(None, None, None)
def _clear_dict(in_dict):
@@ -2096,7 +2125,7 @@ class AsyncMockMixin(Base):
def __init__(self, /, *args, **kwargs):
super().__init__(*args, **kwargs)
- # asyncio.iscoroutinefunction() checks _is_coroutine property to say if an
+ # iscoroutinefunction() checks _is_coroutine property to say if an
# object is a coroutine. Without this check it looks to see if it is a
# function/method, which in this case it is not (since it is an
# AsyncMock).
@@ -2111,7 +2140,7 @@ class AsyncMockMixin(Base):
self.__dict__['__code__'] = code_mock
async def _execute_mock_call(self, /, *args, **kwargs):
- # This is nearly just like super(), except for sepcial handling
+ # This is nearly just like super(), except for special handling
# of coroutines
_call = _Call((args, kwargs), two=True)
@@ -2132,7 +2161,7 @@ class AsyncMockMixin(Base):
raise StopAsyncIteration
if _is_exception(result):
raise result
- elif asyncio.iscoroutinefunction(effect):
+ elif iscoroutinefunction(effect):
result = await effect(*args, **kwargs)
else:
result = effect(*args, **kwargs)
@@ -2144,7 +2173,7 @@ class AsyncMockMixin(Base):
return self.return_value
if self._mock_wraps is not None:
- if asyncio.iscoroutinefunction(self._mock_wraps):
+ if iscoroutinefunction(self._mock_wraps):
return await self._mock_wraps(*args, **kwargs)
return self._mock_wraps(*args, **kwargs)
@@ -2179,9 +2208,9 @@ class AsyncMockMixin(Base):
msg = self._format_mock_failure_message(args, kwargs, action='await')
return msg
- expected = self._call_matcher((args, kwargs))
+ expected = self._call_matcher(_Call((args, kwargs), two=True))
actual = self._call_matcher(self.await_args)
- if expected != actual:
+ if actual != expected:
cause = expected if isinstance(expected, Exception) else None
raise AssertionError(_error_message()) from cause
@@ -2200,10 +2229,10 @@ class AsyncMockMixin(Base):
"""
Assert the mock has ever been awaited with the specified arguments.
"""
- expected = self._call_matcher((args, kwargs))
+ expected = self._call_matcher(_Call((args, kwargs), two=True))
+ cause = expected if isinstance(expected, Exception) else None
actual = [self._call_matcher(c) for c in self.await_args_list]
- if expected not in actual:
- cause = expected if isinstance(expected, Exception) else None
+ if cause or expected not in _AnyComparer(actual):
expected_string = self._format_mock_call_signature(args, kwargs)
raise AssertionError(
'%s await not found' % expected_string
@@ -2281,7 +2310,7 @@ class AsyncMock(AsyncMockMixin, AsyncMagicMixin, Mock):
recognized as an async function, and the result of a call is an awaitable:
>>> mock = AsyncMock()
- >>> asyncio.iscoroutinefunction(mock)
+ >>> iscoroutinefunction(mock)
True
>>> inspect.isawaitable(mock())
True
@@ -2408,12 +2437,10 @@ class _Call(tuple):
def __eq__(self, other):
- if other is ANY:
- return True
try:
len_other = len(other)
except TypeError:
- return False
+ return NotImplemented
self_name = ''
if len(self) == 2:
@@ -2486,12 +2513,6 @@ class _Call(tuple):
return tuple.__getattribute__(self, attr)
- def count(self, /, *args, **kwargs):
- return self.__getattr__('count')(*args, **kwargs)
-
- def index(self, /, *args, **kwargs):
- return self.__getattr__('index')(*args, **kwargs)
-
def _get_call_arguments(self):
if len(self) == 2:
args, kwargs = self
@@ -2656,7 +2677,7 @@ def create_autospec(spec, spec_set=False, instance=False, _parent=None,
skipfirst = _must_skip(spec, entry, is_type)
kwargs['_eat_self'] = skipfirst
- if asyncio.iscoroutinefunction(original):
+ if iscoroutinefunction(original):
child_klass = AsyncMock
else:
child_klass = MagicMock
@@ -2862,9 +2883,6 @@ class _AsyncIterator:
code_mock.co_flags = inspect.CO_ITERABLE_COROUTINE
self.__dict__['__code__'] = code_mock
- def __aiter__(self):
- return self
-
async def __anext__(self):
try:
return next(self.iterator)
diff --git a/Lib/unittest/result.py b/Lib/unittest/result.py
index c7e3206d..111317b3 100644
--- a/Lib/unittest/result.py
+++ b/Lib/unittest/result.py
@@ -161,7 +161,7 @@ class TestResult(object):
"""Tells whether or not this result was a success."""
# The hasattr check is for test_result's OldResult test. That
# way this method works on objects that lack the attribute.
- # (where would such result intances come from? old stored pickles?)
+ # (where would such result instances come from? old stored pickles?)
return ((len(self.failures) == len(self.errors) == 0) and
(not hasattr(self, 'unexpectedSuccesses') or
len(self.unexpectedSuccesses) == 0))
diff --git a/Lib/unittest/test/test_case.py b/Lib/unittest/test/test_case.py
index 3dedcbe6..f855c4dc 100644
--- a/Lib/unittest/test/test_case.py
+++ b/Lib/unittest/test/test_case.py
@@ -8,7 +8,6 @@ import logging
import warnings
import weakref
import inspect
-import types
from copy import deepcopy
from test import support
@@ -1351,20 +1350,6 @@ test case
pass
self.assertRaises(TypeError, self.assertWarnsRegex, MyWarn, lambda: True)
- def testAssertWarnsModifySysModules(self):
- # bpo-29620: handle modified sys.modules during iteration
- class Foo(types.ModuleType):
- @property
- def __warningregistry__(self):
- sys.modules['@bar@'] = 'bar'
-
- sys.modules['@foo@'] = Foo('foo')
- try:
- self.assertWarns(UserWarning, warnings.warn, 'expected')
- finally:
- del sys.modules['@foo@']
- del sys.modules['@bar@']
-
def testAssertRaisesRegexMismatch(self):
def Stub():
raise Exception('Unexpected')
diff --git a/Lib/unittest/test/test_program.py b/Lib/unittest/test/test_program.py
index 4a62ae1b..eef82ff9 100644
--- a/Lib/unittest/test/test_program.py
+++ b/Lib/unittest/test/test_program.py
@@ -188,8 +188,6 @@ class TestCommandLineArgs(unittest.TestCase):
program = self.program
for arg, attr in (('buffer', 'buffer'), ('failfast', 'failfast'),
('catch', 'catchbreak')):
- if attr == 'catch' and not hasInstallHandler:
- continue
setattr(program, attr, None)
program.parseArgs([None])
diff --git a/Lib/unittest/test/test_runner.py b/Lib/unittest/test/test_runner.py
index 7d363407..dd9a1b6d 100644
--- a/Lib/unittest/test/test_runner.py
+++ b/Lib/unittest/test/test_runner.py
@@ -592,7 +592,7 @@ class TestModuleCleanUp(unittest.TestCase):
class TestableTest(unittest.TestCase):
def setUp(self2):
self2.addCleanup(cleanup, 1, 2, function=3, self=4)
- with self.assertWarns(DeprecationWarning):
+ with self.assertRaises(TypeError):
self2.addCleanup(function=cleanup, arg='hello')
def testNothing(self):
pass
@@ -603,8 +603,7 @@ class TestModuleCleanUp(unittest.TestCase):
unittest.TestCase.addCleanup(self=TestableTest(), function=cleanup)
runTests(TestableTest)
self.assertEqual(cleanups,
- [((), {'arg': 'hello'}),
- ((1, 2), {'function': 3, 'self': 4})])
+ [((1, 2), {'function': 3, 'self': 4})])
def test_with_errors_in_addClassCleanup(self):
ordering = []
diff --git a/Lib/unittest/test/testmock/testasync.py b/Lib/unittest/test/testmock/testasync.py
index e84c66c0..690ca4f5 100644
--- a/Lib/unittest/test/testmock/testasync.py
+++ b/Lib/unittest/test/testmock/testasync.py
@@ -1,8 +1,12 @@
import asyncio
+import gc
import inspect
import re
import unittest
+from contextlib import contextmanager
+from asyncio import run, iscoroutinefunction
+from unittest import IsolatedAsyncioTestCase
from unittest.mock import (ANY, call, AsyncMock, patch, MagicMock, Mock,
create_autospec, sentinel, _CallList)
@@ -12,49 +16,48 @@ def tearDownModule():
class AsyncClass:
- def __init__(self):
- pass
- async def async_method(self):
- pass
- def normal_method(self):
- pass
+ def __init__(self): pass
+ async def async_method(self): pass
+ def normal_method(self): pass
@classmethod
- async def async_class_method(cls):
- pass
+ async def async_class_method(cls): pass
@staticmethod
- async def async_static_method():
- pass
+ async def async_static_method(): pass
class AwaitableClass:
- def __await__(self):
- yield
+ def __await__(self): yield
-async def async_func():
- pass
+async def async_func(): pass
-async def async_func_args(a, b, *, c):
- pass
+async def async_func_args(a, b, *, c): pass
-def normal_func():
- pass
+def normal_func(): pass
class NormalClass(object):
- def a(self):
- pass
+ def a(self): pass
async_foo_name = f'{__name__}.AsyncClass'
normal_foo_name = f'{__name__}.NormalClass'
+@contextmanager
+def assertNeverAwaited(test):
+ with test.assertWarnsRegex(RuntimeWarning, "was never awaited$"):
+ yield
+ # In non-CPython implementations of Python, this is needed because timely
+ # deallocation is not guaranteed by the garbage collector.
+ gc.collect()
+
+
class AsyncPatchDecoratorTest(unittest.TestCase):
def test_is_coroutine_function_patch(self):
@patch.object(AsyncClass, 'async_method')
def test_async(mock_method):
- self.assertTrue(asyncio.iscoroutinefunction(mock_method))
+ self.assertTrue(iscoroutinefunction(mock_method))
test_async()
def test_is_async_patch(self):
@@ -62,13 +65,13 @@ class AsyncPatchDecoratorTest(unittest.TestCase):
def test_async(mock_method):
m = mock_method()
self.assertTrue(inspect.isawaitable(m))
- asyncio.run(m)
+ run(m)
@patch(f'{async_foo_name}.async_method')
def test_no_parent_attribute(mock_method):
m = mock_method()
self.assertTrue(inspect.isawaitable(m))
- asyncio.run(m)
+ run(m)
test_async()
test_no_parent_attribute()
@@ -107,7 +110,7 @@ class AsyncPatchDecoratorTest(unittest.TestCase):
self.assertEqual(await async_func(), 1)
self.assertEqual(await async_func_args(1, 2, c=3), 2)
- asyncio.run(test_async())
+ run(test_async())
self.assertTrue(inspect.iscoroutinefunction(async_func))
@@ -115,7 +118,7 @@ class AsyncPatchCMTest(unittest.TestCase):
def test_is_async_function_cm(self):
def test_async():
with patch.object(AsyncClass, 'async_method') as mock_method:
- self.assertTrue(asyncio.iscoroutinefunction(mock_method))
+ self.assertTrue(iscoroutinefunction(mock_method))
test_async()
@@ -124,7 +127,7 @@ class AsyncPatchCMTest(unittest.TestCase):
with patch.object(AsyncClass, 'async_method') as mock_method:
m = mock_method()
self.assertTrue(inspect.isawaitable(m))
- asyncio.run(m)
+ run(m)
test_async()
@@ -141,31 +144,31 @@ class AsyncPatchCMTest(unittest.TestCase):
self.assertIsInstance(async_func, AsyncMock)
self.assertTrue(inspect.iscoroutinefunction(async_func))
- asyncio.run(test_async())
+ run(test_async())
class AsyncMockTest(unittest.TestCase):
def test_iscoroutinefunction_default(self):
mock = AsyncMock()
- self.assertTrue(asyncio.iscoroutinefunction(mock))
+ self.assertTrue(iscoroutinefunction(mock))
def test_iscoroutinefunction_function(self):
async def foo(): pass
mock = AsyncMock(foo)
- self.assertTrue(asyncio.iscoroutinefunction(mock))
+ self.assertTrue(iscoroutinefunction(mock))
self.assertTrue(inspect.iscoroutinefunction(mock))
def test_isawaitable(self):
mock = AsyncMock()
m = mock()
self.assertTrue(inspect.isawaitable(m))
- asyncio.run(m)
+ run(m)
self.assertIn('assert_awaited', dir(mock))
def test_iscoroutinefunction_normal_function(self):
def foo(): pass
mock = AsyncMock(foo)
- self.assertTrue(asyncio.iscoroutinefunction(mock))
+ self.assertTrue(iscoroutinefunction(mock))
self.assertTrue(inspect.iscoroutinefunction(mock))
def test_future_isfuture(self):
@@ -211,9 +214,9 @@ class AsyncAutospecTest(unittest.TestCase):
self.assertEqual(spec.await_args_list, [])
spec.assert_not_awaited()
- asyncio.run(main())
+ run(main())
- self.assertTrue(asyncio.iscoroutinefunction(spec))
+ self.assertTrue(iscoroutinefunction(spec))
self.assertTrue(asyncio.iscoroutine(awaitable))
self.assertEqual(spec.await_count, 1)
self.assertEqual(spec.await_args, call(1, 2, c=3))
@@ -223,6 +226,10 @@ class AsyncAutospecTest(unittest.TestCase):
spec.assert_awaited_with(1, 2, c=3)
spec.assert_awaited()
+ with self.assertRaises(AssertionError):
+ spec.assert_any_await(e=1)
+
+
def test_patch_with_autospec(self):
async def test_async():
@@ -230,7 +237,7 @@ class AsyncAutospecTest(unittest.TestCase):
awaitable = mock_method(1, 2, c=3)
self.assertIsInstance(mock_method.mock, AsyncMock)
- self.assertTrue(asyncio.iscoroutinefunction(mock_method))
+ self.assertTrue(iscoroutinefunction(mock_method))
self.assertTrue(asyncio.iscoroutine(awaitable))
self.assertTrue(inspect.isawaitable(awaitable))
@@ -255,7 +262,7 @@ class AsyncAutospecTest(unittest.TestCase):
self.assertIsNone(mock_method.await_args)
self.assertEqual(mock_method.await_args_list, [])
- asyncio.run(test_async())
+ run(test_async())
class AsyncSpecTest(unittest.TestCase):
@@ -278,8 +285,7 @@ class AsyncSpecTest(unittest.TestCase):
def inner_test(mock_type):
async_mock = mock_type(spec=async_func)
self.assertIsInstance(async_mock, mock_type)
- with self.assertWarns(RuntimeWarning):
- # Will raise a warning because never awaited
+ with assertNeverAwaited(self):
self.assertTrue(inspect.isawaitable(async_mock()))
sync_mock = mock_type(spec=normal_func)
@@ -293,8 +299,7 @@ class AsyncSpecTest(unittest.TestCase):
def inner_test(mock_type):
async_mock = mock_type(async_func)
self.assertIsInstance(async_mock, mock_type)
- with self.assertWarns(RuntimeWarning):
- # Will raise a warning because never awaited
+ with assertNeverAwaited(self):
self.assertTrue(inspect.isawaitable(async_mock()))
sync_mock = mock_type(normal_func)
@@ -309,14 +314,14 @@ class AsyncSpecTest(unittest.TestCase):
self.assertIsInstance(mock, AsyncMock)
m = mock()
self.assertTrue(inspect.isawaitable(m))
- asyncio.run(m)
+ run(m)
def test_spec_as_normal_positional_AsyncMock(self):
mock = AsyncMock(normal_func)
self.assertIsInstance(mock, AsyncMock)
m = mock()
self.assertTrue(inspect.isawaitable(m))
- asyncio.run(m)
+ run(m)
def test_spec_async_mock(self):
@patch.object(AsyncClass, 'async_method', spec=True)
@@ -362,16 +367,17 @@ class AsyncSpecSetTest(unittest.TestCase):
@patch.object(AsyncClass, 'async_method', spec_set=True)
def test_async(async_method):
self.assertIsInstance(async_method, AsyncMock)
+ test_async()
def test_is_async_AsyncMock(self):
mock = AsyncMock(spec_set=AsyncClass.async_method)
- self.assertTrue(asyncio.iscoroutinefunction(mock))
+ self.assertTrue(iscoroutinefunction(mock))
self.assertIsInstance(mock, AsyncMock)
def test_is_child_AsyncMock(self):
mock = MagicMock(spec_set=AsyncClass)
- self.assertTrue(asyncio.iscoroutinefunction(mock.async_method))
- self.assertFalse(asyncio.iscoroutinefunction(mock.normal_method))
+ self.assertTrue(iscoroutinefunction(mock.async_method))
+ self.assertFalse(iscoroutinefunction(mock.normal_method))
self.assertIsInstance(mock.async_method, AsyncMock)
self.assertIsInstance(mock.normal_method, MagicMock)
self.assertIsInstance(mock, MagicMock)
@@ -384,10 +390,9 @@ class AsyncSpecSetTest(unittest.TestCase):
self.assertIsInstance(cm, MagicMock)
-class AsyncArguments(unittest.IsolatedAsyncioTestCase):
+class AsyncArguments(IsolatedAsyncioTestCase):
async def test_add_return_value(self):
- async def addition(self, var):
- return var + 1
+ async def addition(self, var): pass
mock = AsyncMock(addition, return_value=10)
output = await mock(5)
@@ -395,8 +400,7 @@ class AsyncArguments(unittest.IsolatedAsyncioTestCase):
self.assertEqual(output, 10)
async def test_add_side_effect_exception(self):
- async def addition(var):
- return var + 1
+ async def addition(var): pass
mock = AsyncMock(addition, side_effect=Exception('err'))
with self.assertRaises(Exception):
await mock(5)
@@ -542,24 +546,20 @@ class AsyncMagicMethods(unittest.TestCase):
self.assertIsInstance(m_mock.__aenter__, AsyncMock)
self.assertIsInstance(m_mock.__aexit__, AsyncMock)
# AsyncMocks are also coroutine functions
- self.assertTrue(asyncio.iscoroutinefunction(m_mock.__aenter__))
- self.assertTrue(asyncio.iscoroutinefunction(m_mock.__aexit__))
+ self.assertTrue(iscoroutinefunction(m_mock.__aenter__))
+ self.assertTrue(iscoroutinefunction(m_mock.__aexit__))
class AsyncContextManagerTest(unittest.TestCase):
+
class WithAsyncContextManager:
- async def __aenter__(self, *args, **kwargs):
- self.entered = True
- return self
+ async def __aenter__(self, *args, **kwargs): pass
- async def __aexit__(self, *args, **kwargs):
- self.exited = True
+ async def __aexit__(self, *args, **kwargs): pass
class WithSyncContextManager:
- def __enter__(self, *args, **kwargs):
- return self
+ def __enter__(self, *args, **kwargs): pass
- def __exit__(self, *args, **kwargs):
- pass
+ def __exit__(self, *args, **kwargs): pass
class ProductionCode:
# Example real-world(ish) code
@@ -580,7 +580,7 @@ class AsyncContextManagerTest(unittest.TestCase):
response.json = AsyncMock(return_value={'json': 123})
cm.__aenter__.return_value = response
pc.session.post.return_value = cm
- result = asyncio.run(pc.main())
+ result = run(pc.main())
self.assertEqual(result, {'json': 123})
for mock_type in [AsyncMock, MagicMock]:
@@ -599,7 +599,7 @@ class AsyncContextManagerTest(unittest.TestCase):
called = True
return result
- cm_result = asyncio.run(use_context_manager())
+ cm_result = run(use_context_manager())
self.assertTrue(called)
self.assertTrue(cm_mock.__aenter__.called)
self.assertTrue(cm_mock.__aexit__.called)
@@ -612,6 +612,7 @@ class AsyncContextManagerTest(unittest.TestCase):
with self.subTest(f"test context manager magics with {mock_type}"):
inner_test(mock_type)
+
def test_mock_customize_async_context_manager(self):
instance = self.WithAsyncContextManager()
mock_instance = MagicMock(instance)
@@ -623,7 +624,7 @@ class AsyncContextManagerTest(unittest.TestCase):
async with mock_instance as result:
return result
- self.assertIs(asyncio.run(use_context_manager()), expected_result)
+ self.assertIs(run(use_context_manager()), expected_result)
def test_mock_customize_async_context_manager_with_coroutine(self):
enter_called = False
@@ -647,7 +648,7 @@ class AsyncContextManagerTest(unittest.TestCase):
async with mock_instance:
pass
- asyncio.run(use_context_manager())
+ run(use_context_manager())
self.assertTrue(enter_called)
self.assertTrue(exit_called)
@@ -659,7 +660,7 @@ class AsyncContextManagerTest(unittest.TestCase):
instance = self.WithAsyncContextManager()
mock_instance = MagicMock(instance)
with self.assertRaises(TypeError):
- asyncio.run(raise_in(mock_instance))
+ run(raise_in(mock_instance))
class AsyncIteratorTest(unittest.TestCase):
@@ -667,23 +668,16 @@ class AsyncIteratorTest(unittest.TestCase):
def __init__(self):
self.items = ["foo", "NormalFoo", "baz"]
- def __aiter__(self):
- return self
-
- async def __anext__(self):
- try:
- return self.items.pop()
- except IndexError:
- pass
+ def __aiter__(self): pass
- raise StopAsyncIteration
+ async def __anext__(self): pass
def test_aiter_set_return_value(self):
mock_iter = AsyncMock(name="tester")
mock_iter.__aiter__.return_value = [1, 2, 3]
async def main():
return [i async for i in mock_iter]
- result = asyncio.run(main())
+ result = run(main())
self.assertEqual(result, [1, 2, 3])
def test_mock_aiter_and_anext_asyncmock(self):
@@ -692,11 +686,11 @@ class AsyncIteratorTest(unittest.TestCase):
mock_instance = mock_type(instance)
# Check that the mock and the real thing bahave the same
# __aiter__ is not actually async, so not a coroutinefunction
- self.assertFalse(asyncio.iscoroutinefunction(instance.__aiter__))
- self.assertFalse(asyncio.iscoroutinefunction(mock_instance.__aiter__))
+ self.assertFalse(iscoroutinefunction(instance.__aiter__))
+ self.assertFalse(iscoroutinefunction(mock_instance.__aiter__))
# __anext__ is async
- self.assertTrue(asyncio.iscoroutinefunction(instance.__anext__))
- self.assertTrue(asyncio.iscoroutinefunction(mock_instance.__anext__))
+ self.assertTrue(iscoroutinefunction(instance.__anext__))
+ self.assertTrue(iscoroutinefunction(mock_instance.__anext__))
for mock_type in [AsyncMock, MagicMock]:
with self.subTest(f"test aiter and anext corourtine with {mock_type}"):
@@ -714,18 +708,18 @@ class AsyncIteratorTest(unittest.TestCase):
expected = ["FOO", "BAR", "BAZ"]
def test_default(mock_type):
mock_instance = mock_type(self.WithAsyncIterator())
- self.assertEqual(asyncio.run(iterate(mock_instance)), [])
+ self.assertEqual(run(iterate(mock_instance)), [])
def test_set_return_value(mock_type):
mock_instance = mock_type(self.WithAsyncIterator())
mock_instance.__aiter__.return_value = expected[:]
- self.assertEqual(asyncio.run(iterate(mock_instance)), expected)
+ self.assertEqual(run(iterate(mock_instance)), expected)
def test_set_return_value_iter(mock_type):
mock_instance = mock_type(self.WithAsyncIterator())
mock_instance.__aiter__.return_value = iter(expected[:])
- self.assertEqual(asyncio.run(iterate(mock_instance)), expected)
+ self.assertEqual(run(iterate(mock_instance)), expected)
for mock_type in [AsyncMock, MagicMock]:
with self.subTest(f"default value with {mock_type}"):
@@ -750,10 +744,9 @@ class AsyncMockAssert(unittest.TestCase):
def test_assert_called_but_not_awaited(self):
mock = AsyncMock(AsyncClass)
- with self.assertWarns(RuntimeWarning):
- # Will raise a warning because never awaited
+ with assertNeverAwaited(self):
mock.async_method()
- self.assertTrue(asyncio.iscoroutinefunction(mock.async_method))
+ self.assertTrue(iscoroutinefunction(mock.async_method))
mock.async_method.assert_called()
mock.async_method.assert_called_once()
mock.async_method.assert_called_once_with()
@@ -771,7 +764,7 @@ class AsyncMockAssert(unittest.TestCase):
with self.assertRaises(AssertionError):
mock.async_method.assert_awaited()
- asyncio.run(self._await_coroutine(mock_coroutine))
+ run(self._await_coroutine(mock_coroutine))
# Assert we haven't re-called the function
mock.async_method.assert_called_once()
mock.async_method.assert_awaited()
@@ -785,21 +778,21 @@ class AsyncMockAssert(unittest.TestCase):
with self.assertRaises(AssertionError):
self.mock.assert_called()
- asyncio.run(self._runnable_test())
+ run(self._runnable_test())
self.mock.assert_called_once()
self.mock.assert_awaited_once()
def test_assert_called_twice_and_awaited_once(self):
mock = AsyncMock(AsyncClass)
coroutine = mock.async_method()
- with self.assertWarns(RuntimeWarning):
- # The first call will be awaited so no warning there
- # But this call will never get awaited, so it will warn here
+ # The first call will be awaited so no warning there
+ # But this call will never get awaited, so it will warn here
+ with assertNeverAwaited(self):
mock.async_method()
with self.assertRaises(AssertionError):
mock.async_method.assert_awaited()
mock.async_method.assert_called()
- asyncio.run(self._await_coroutine(coroutine))
+ run(self._await_coroutine(coroutine))
mock.async_method.assert_awaited()
mock.async_method.assert_awaited_once()
@@ -807,10 +800,10 @@ class AsyncMockAssert(unittest.TestCase):
mock = AsyncMock(AsyncClass)
coroutine = mock.async_method()
mock.async_method.assert_called_once()
- asyncio.run(self._await_coroutine(coroutine))
+ run(self._await_coroutine(coroutine))
with self.assertRaises(RuntimeError):
# Cannot reuse already awaited coroutine
- asyncio.run(self._await_coroutine(coroutine))
+ run(self._await_coroutine(coroutine))
mock.async_method.assert_awaited()
def test_assert_awaited_but_not_called(self):
@@ -820,7 +813,7 @@ class AsyncMockAssert(unittest.TestCase):
self.mock.assert_called()
with self.assertRaises(TypeError):
# You cannot await an AsyncMock, it must be a coroutine
- asyncio.run(self._await_coroutine(self.mock))
+ run(self._await_coroutine(self.mock))
with self.assertRaises(AssertionError):
self.mock.assert_awaited()
@@ -829,38 +822,34 @@ class AsyncMockAssert(unittest.TestCase):
def test_assert_has_calls_not_awaits(self):
kalls = [call('foo')]
- with self.assertWarns(RuntimeWarning):
- # Will raise a warning because never awaited
+ with assertNeverAwaited(self):
self.mock('foo')
self.mock.assert_has_calls(kalls)
with self.assertRaises(AssertionError):
self.mock.assert_has_awaits(kalls)
def test_assert_has_mock_calls_on_async_mock_no_spec(self):
- with self.assertWarns(RuntimeWarning):
- # Will raise a warning because never awaited
+ with assertNeverAwaited(self):
self.mock()
kalls_empty = [('', (), {})]
self.assertEqual(self.mock.mock_calls, kalls_empty)
- with self.assertWarns(RuntimeWarning):
- # Will raise a warning because never awaited
+ with assertNeverAwaited(self):
self.mock('foo')
+ with assertNeverAwaited(self):
self.mock('baz')
mock_kalls = ([call(), call('foo'), call('baz')])
self.assertEqual(self.mock.mock_calls, mock_kalls)
def test_assert_has_mock_calls_on_async_mock_with_spec(self):
a_class_mock = AsyncMock(AsyncClass)
- with self.assertWarns(RuntimeWarning):
- # Will raise a warning because never awaited
+ with assertNeverAwaited(self):
a_class_mock.async_method()
kalls_empty = [('', (), {})]
self.assertEqual(a_class_mock.async_method.mock_calls, kalls_empty)
self.assertEqual(a_class_mock.mock_calls, [call.async_method()])
- with self.assertWarns(RuntimeWarning):
- # Will raise a warning because never awaited
+ with assertNeverAwaited(self):
a_class_mock.async_method(1, 2, 3, a=4, b=5)
method_kalls = [call(), call(1, 2, 3, a=4, b=5)]
mock_kalls = [call.async_method(), call.async_method(1, 2, 3, a=4, b=5)]
@@ -868,9 +857,9 @@ class AsyncMockAssert(unittest.TestCase):
self.assertEqual(a_class_mock.mock_calls, mock_kalls)
def test_async_method_calls_recorded(self):
- with self.assertWarns(RuntimeWarning):
- # Will raise warnings because never awaited
+ with assertNeverAwaited(self):
self.mock.something(3, fish=None)
+ with assertNeverAwaited(self):
self.mock.something_else.something(6, cake=sentinel.Cake)
self.assertEqual(self.mock.method_calls, [
@@ -892,19 +881,20 @@ class AsyncMockAssert(unittest.TestCase):
self.assertEqual(attr, [])
assert_attrs(self.mock)
- with self.assertWarns(RuntimeWarning):
- # Will raise warnings because never awaited
+ with assertNeverAwaited(self):
self.mock()
+ with assertNeverAwaited(self):
self.mock(1, 2)
+ with assertNeverAwaited(self):
self.mock(a=3)
self.mock.reset_mock()
assert_attrs(self.mock)
a_mock = AsyncMock(AsyncClass)
- with self.assertWarns(RuntimeWarning):
- # Will raise warnings because never awaited
+ with assertNeverAwaited(self):
a_mock.async_method()
+ with assertNeverAwaited(self):
a_mock.async_method(1, a=3)
a_mock.reset_mock()
@@ -914,17 +904,17 @@ class AsyncMockAssert(unittest.TestCase):
with self.assertRaises(AssertionError):
self.mock.assert_awaited()
- asyncio.run(self._runnable_test())
+ run(self._runnable_test())
self.mock.assert_awaited()
def test_assert_awaited_once(self):
with self.assertRaises(AssertionError):
self.mock.assert_awaited_once()
- asyncio.run(self._runnable_test())
+ run(self._runnable_test())
self.mock.assert_awaited_once()
- asyncio.run(self._runnable_test())
+ run(self._runnable_test())
with self.assertRaises(AssertionError):
self.mock.assert_awaited_once()
@@ -933,15 +923,15 @@ class AsyncMockAssert(unittest.TestCase):
with self.assertRaisesRegex(AssertionError, msg):
self.mock.assert_awaited_with('foo')
- asyncio.run(self._runnable_test())
+ run(self._runnable_test())
msg = 'expected await not found'
with self.assertRaisesRegex(AssertionError, msg):
self.mock.assert_awaited_with('foo')
- asyncio.run(self._runnable_test('foo'))
+ run(self._runnable_test('foo'))
self.mock.assert_awaited_with('foo')
- asyncio.run(self._runnable_test('SomethingElse'))
+ run(self._runnable_test('SomethingElse'))
with self.assertRaises(AssertionError):
self.mock.assert_awaited_with('foo')
@@ -949,10 +939,10 @@ class AsyncMockAssert(unittest.TestCase):
with self.assertRaises(AssertionError):
self.mock.assert_awaited_once_with('foo')
- asyncio.run(self._runnable_test('foo'))
+ run(self._runnable_test('foo'))
self.mock.assert_awaited_once_with('foo')
- asyncio.run(self._runnable_test('foo'))
+ run(self._runnable_test('foo'))
with self.assertRaises(AssertionError):
self.mock.assert_awaited_once_with('foo')
@@ -960,14 +950,14 @@ class AsyncMockAssert(unittest.TestCase):
with self.assertRaises(AssertionError):
self.mock.assert_any_await('foo')
- asyncio.run(self._runnable_test('baz'))
+ run(self._runnable_test('baz'))
with self.assertRaises(AssertionError):
self.mock.assert_any_await('foo')
- asyncio.run(self._runnable_test('foo'))
+ run(self._runnable_test('foo'))
self.mock.assert_any_await('foo')
- asyncio.run(self._runnable_test('SomethingElse'))
+ run(self._runnable_test('SomethingElse'))
self.mock.assert_any_await('foo')
def test_assert_has_awaits_no_order(self):
@@ -977,43 +967,67 @@ class AsyncMockAssert(unittest.TestCase):
self.mock.assert_has_awaits(calls)
self.assertEqual(len(cm.exception.args), 1)
- asyncio.run(self._runnable_test('foo'))
+ run(self._runnable_test('foo'))
with self.assertRaises(AssertionError):
self.mock.assert_has_awaits(calls)
- asyncio.run(self._runnable_test('foo'))
+ run(self._runnable_test('foo'))
with self.assertRaises(AssertionError):
self.mock.assert_has_awaits(calls)
- asyncio.run(self._runnable_test('baz'))
+ run(self._runnable_test('baz'))
self.mock.assert_has_awaits(calls)
- asyncio.run(self._runnable_test('SomethingElse'))
+ run(self._runnable_test('SomethingElse'))
self.mock.assert_has_awaits(calls)
+ def test_awaits_asserts_with_any(self):
+ class Foo:
+ def __eq__(self, other): pass
+
+ run(self._runnable_test(Foo(), 1))
+
+ self.mock.assert_has_awaits([call(ANY, 1)])
+ self.mock.assert_awaited_with(ANY, 1)
+ self.mock.assert_any_await(ANY, 1)
+
+ def test_awaits_asserts_with_spec_and_any(self):
+ class Foo:
+ def __eq__(self, other): pass
+
+ mock_with_spec = AsyncMock(spec=Foo)
+
+ async def _custom_mock_runnable_test(*args):
+ await mock_with_spec(*args)
+
+ run(_custom_mock_runnable_test(Foo(), 1))
+ mock_with_spec.assert_has_awaits([call(ANY, 1)])
+ mock_with_spec.assert_awaited_with(ANY, 1)
+ mock_with_spec.assert_any_await(ANY, 1)
+
def test_assert_has_awaits_ordered(self):
calls = [call('foo'), call('baz')]
with self.assertRaises(AssertionError):
self.mock.assert_has_awaits(calls, any_order=True)
- asyncio.run(self._runnable_test('baz'))
+ run(self._runnable_test('baz'))
with self.assertRaises(AssertionError):
self.mock.assert_has_awaits(calls, any_order=True)
- asyncio.run(self._runnable_test('bamf'))
+ run(self._runnable_test('bamf'))
with self.assertRaises(AssertionError):
self.mock.assert_has_awaits(calls, any_order=True)
- asyncio.run(self._runnable_test('foo'))
+ run(self._runnable_test('foo'))
self.mock.assert_has_awaits(calls, any_order=True)
- asyncio.run(self._runnable_test('qux'))
+ run(self._runnable_test('qux'))
self.mock.assert_has_awaits(calls, any_order=True)
def test_assert_not_awaited(self):
self.mock.assert_not_awaited()
- asyncio.run(self._runnable_test())
+ run(self._runnable_test())
with self.assertRaises(AssertionError):
self.mock.assert_not_awaited()
@@ -1021,7 +1035,7 @@ class AsyncMockAssert(unittest.TestCase):
async def f(x=None): pass
self.mock = AsyncMock(spec=f)
- asyncio.run(self._runnable_test(1))
+ run(self._runnable_test(1))
with self.assertRaisesRegex(
AssertionError,
diff --git a/Lib/unittest/test/testmock/testhelpers.py b/Lib/unittest/test/testmock/testhelpers.py
index f3c7acb9..9e7ec5d6 100644
--- a/Lib/unittest/test/testmock/testhelpers.py
+++ b/Lib/unittest/test/testmock/testhelpers.py
@@ -64,7 +64,28 @@ class AnyTest(unittest.TestCase):
self.assertEqual(expected, mock.mock_calls)
self.assertEqual(mock.mock_calls, expected)
+ def test_any_no_spec(self):
+ # This is a regression test for bpo-37555
+ class Foo:
+ def __eq__(self, other): pass
+
+ mock = Mock()
+ mock(Foo(), 1)
+ mock.assert_has_calls([call(ANY, 1)])
+ mock.assert_called_with(ANY, 1)
+ mock.assert_any_call(ANY, 1)
+
+ def test_any_and_spec_set(self):
+ # This is a regression test for bpo-37555
+ class Foo:
+ def __eq__(self, other): pass
+
+ mock = Mock(spec=Foo)
+ mock(Foo(), 1)
+ mock.assert_has_calls([call(ANY, 1)])
+ mock.assert_called_with(ANY, 1)
+ mock.assert_any_call(ANY, 1)
class CallTest(unittest.TestCase):
diff --git a/Lib/unittest/test/testmock/testmagicmethods.py b/Lib/unittest/test/testmock/testmagicmethods.py
index 76b3a560..a4feae7e 100644
--- a/Lib/unittest/test/testmock/testmagicmethods.py
+++ b/Lib/unittest/test/testmock/testmagicmethods.py
@@ -1,8 +1,7 @@
-import asyncio
import math
import unittest
import os
-import sys
+from asyncio import iscoroutinefunction
from unittest.mock import AsyncMock, Mock, MagicMock, _magics
@@ -286,8 +285,8 @@ class TestMockingMagicMethods(unittest.TestCase):
self.assertEqual(math.trunc(mock), mock.__trunc__())
self.assertEqual(math.floor(mock), mock.__floor__())
self.assertEqual(math.ceil(mock), mock.__ceil__())
- self.assertTrue(asyncio.iscoroutinefunction(mock.__aexit__))
- self.assertTrue(asyncio.iscoroutinefunction(mock.__aenter__))
+ self.assertTrue(iscoroutinefunction(mock.__aexit__))
+ self.assertTrue(iscoroutinefunction(mock.__aenter__))
self.assertIsInstance(mock.__aenter__, AsyncMock)
self.assertIsInstance(mock.__aexit__, AsyncMock)
@@ -312,8 +311,8 @@ class TestMockingMagicMethods(unittest.TestCase):
self.assertEqual(math.trunc(mock), mock.__trunc__())
self.assertEqual(math.floor(mock), mock.__floor__())
self.assertEqual(math.ceil(mock), mock.__ceil__())
- self.assertTrue(asyncio.iscoroutinefunction(mock.__aexit__))
- self.assertTrue(asyncio.iscoroutinefunction(mock.__aenter__))
+ self.assertTrue(iscoroutinefunction(mock.__aexit__))
+ self.assertTrue(iscoroutinefunction(mock.__aenter__))
self.assertIsInstance(mock.__aenter__, AsyncMock)
self.assertIsInstance(mock.__aexit__, AsyncMock)
@@ -429,7 +428,6 @@ class TestMockingMagicMethods(unittest.TestCase):
self.assertEqual(dir(mock), ['foo'])
- @unittest.skipIf('PyPy' in sys.version, "This fails differently on pypy")
def test_bound_methods(self):
m = Mock()
diff --git a/Lib/unittest/test/testmock/testmock.py b/Lib/unittest/test/testmock/testmock.py
index 1cde45e9..ce674e71 100644
--- a/Lib/unittest/test/testmock/testmock.py
+++ b/Lib/unittest/test/testmock/testmock.py
@@ -3,6 +3,7 @@ import re
import sys
import tempfile
+from test.support import ALWAYS_EQ
import unittest
from unittest.test.testmock.support import is_instance
from unittest import mock
@@ -262,7 +263,7 @@ class MockTest(unittest.TestCase):
ret_val = mock(sentinel.Arg)
self.assertTrue(mock.called, "called not set")
- self.assertEqual(mock.call_count, 1, "call_count incoreect")
+ self.assertEqual(mock.call_count, 1, "call_count incorrect")
self.assertEqual(mock.call_args, ((sentinel.Arg,), {}),
"call_args not set")
self.assertEqual(mock.call_args.args, (sentinel.Arg,),
@@ -322,6 +323,8 @@ class MockTest(unittest.TestCase):
self.assertFalse(mm != mock.ANY)
self.assertTrue(mock.ANY == mm)
self.assertFalse(mock.ANY != mm)
+ self.assertTrue(mm == ALWAYS_EQ)
+ self.assertFalse(mm != ALWAYS_EQ)
call1 = mock.call(mock.MagicMock())
call2 = mock.call(mock.ANY)
@@ -330,6 +333,11 @@ class MockTest(unittest.TestCase):
self.assertTrue(call2 == call1)
self.assertFalse(call2 != call1)
+ self.assertTrue(call1 == ALWAYS_EQ)
+ self.assertFalse(call1 != ALWAYS_EQ)
+ self.assertFalse(call1 == 1)
+ self.assertTrue(call1 != 1)
+
def test_assert_called_with(self):
mock = Mock()
@@ -707,6 +715,57 @@ class MockTest(unittest.TestCase):
self.assertRaises(StopIteration, mock.method)
+ def test_magic_method_wraps_dict(self):
+ # bpo-25597: MagicMock with wrap doesn't call wrapped object's
+ # method for magic methods with default values.
+ data = {'foo': 'bar'}
+
+ wrapped_dict = MagicMock(wraps=data)
+ self.assertEqual(wrapped_dict.get('foo'), 'bar')
+ # Accessing key gives a MagicMock
+ self.assertIsInstance(wrapped_dict['foo'], MagicMock)
+ # __contains__ method has a default value of False
+ self.assertFalse('foo' in wrapped_dict)
+
+ # return_value is non-sentinel and takes precedence over wrapped value.
+ wrapped_dict.get.return_value = 'return_value'
+ self.assertEqual(wrapped_dict.get('foo'), 'return_value')
+
+ # return_value is sentinel and hence wrapped value is returned.
+ wrapped_dict.get.return_value = sentinel.DEFAULT
+ self.assertEqual(wrapped_dict.get('foo'), 'bar')
+
+ self.assertEqual(wrapped_dict.get('baz'), None)
+ self.assertIsInstance(wrapped_dict['baz'], MagicMock)
+ self.assertFalse('bar' in wrapped_dict)
+
+ data['baz'] = 'spam'
+ self.assertEqual(wrapped_dict.get('baz'), 'spam')
+ self.assertIsInstance(wrapped_dict['baz'], MagicMock)
+ self.assertFalse('bar' in wrapped_dict)
+
+ del data['baz']
+ self.assertEqual(wrapped_dict.get('baz'), None)
+
+
+ def test_magic_method_wraps_class(self):
+
+ class Foo:
+
+ def __getitem__(self, index):
+ return index
+
+ def __custom_method__(self):
+ return "foo"
+
+
+ klass = MagicMock(wraps=Foo)
+ obj = klass()
+ self.assertEqual(obj.__getitem__(2), 2)
+ self.assertEqual(obj[2], 2)
+ self.assertEqual(obj.__custom_method__(), "foo")
+
+
def test_exceptional_side_effect(self):
mock = Mock(side_effect=AttributeError)
self.assertRaises(AttributeError, mock)
@@ -1628,11 +1687,23 @@ class MockTest(unittest.TestCase):
self.assertNotEqual(m.side_effect, None)
def test_reset_sideeffect(self):
- m = Mock(return_value=10, side_effect=[2,3])
+ m = Mock(return_value=10, side_effect=[2, 3])
m.reset_mock(side_effect=True)
self.assertEqual(m.return_value, 10)
self.assertEqual(m.side_effect, None)
+ def test_reset_return_with_children(self):
+ m = MagicMock(f=MagicMock(return_value=1))
+ self.assertEqual(m.f(), 1)
+ m.reset_mock(return_value=True)
+ self.assertNotEqual(m.f(), 1)
+
+ def test_reset_return_with_children_side_effect(self):
+ m = MagicMock(f=MagicMock(side_effect=[2, 3]))
+ self.assertNotEqual(m.f.side_effect, None)
+ m.reset_mock(side_effect=True)
+ self.assertEqual(m.f.side_effect, None)
+
def test_mock_add_spec(self):
class _One(object):
one = 1
@@ -1801,6 +1872,11 @@ class MockTest(unittest.TestCase):
with self.assertRaises(StopIteration):
next(f1)
+ def test_mock_open_next_with_readline_with_return_value(self):
+ mopen = mock.mock_open(read_data='foo\nbarn')
+ mopen.return_value.readline.return_value = 'abc'
+ self.assertEqual('abc', next(mopen()))
+
def test_mock_open_write(self):
# Test exception in file writing write()
mock_namedtemp = mock.mock_open(mock.MagicMock(name='JLV'))
diff --git a/Lib/unittest/test/testmock/testpatch.py b/Lib/unittest/test/testmock/testpatch.py
index e065a2c3..d8c1515f 100644
--- a/Lib/unittest/test/testmock/testpatch.py
+++ b/Lib/unittest/test/testmock/testpatch.py
@@ -4,6 +4,7 @@
import os
import sys
+from collections import OrderedDict
import unittest
from unittest.test.testmock import support
@@ -769,6 +770,14 @@ class PatchTest(unittest.TestCase):
self.assertEqual(d, original)
+ def test_patch_dict_stop_without_start(self):
+ d = {'foo': 'bar'}
+ original = d.copy()
+ patcher = patch.dict(d, [('spam', 'eggs')], clear=True)
+ self.assertFalse(patcher.stop())
+ self.assertEqual(d, original)
+
+
def test_patch_dict_class_decorator(self):
this = self
d = {'spam': 'eggs'}
@@ -1807,6 +1816,56 @@ class PatchTest(unittest.TestCase):
self.assertEqual(stopped, ["three", "two", "one"])
+ def test_patch_dict_stopall(self):
+ dic1 = {}
+ dic2 = {1: 'a'}
+ dic3 = {1: 'A', 2: 'B'}
+ origdic1 = dic1.copy()
+ origdic2 = dic2.copy()
+ origdic3 = dic3.copy()
+ patch.dict(dic1, {1: 'I', 2: 'II'}).start()
+ patch.dict(dic2, {2: 'b'}).start()
+
+ @patch.dict(dic3)
+ def patched():
+ del dic3[1]
+
+ patched()
+ self.assertNotEqual(dic1, origdic1)
+ self.assertNotEqual(dic2, origdic2)
+ self.assertEqual(dic3, origdic3)
+
+ patch.stopall()
+
+ self.assertEqual(dic1, origdic1)
+ self.assertEqual(dic2, origdic2)
+ self.assertEqual(dic3, origdic3)
+
+
+ def test_patch_and_patch_dict_stopall(self):
+ original_unlink = os.unlink
+ original_chdir = os.chdir
+ dic1 = {}
+ dic2 = {1: 'A', 2: 'B'}
+ origdic1 = dic1.copy()
+ origdic2 = dic2.copy()
+
+ patch('os.unlink', something).start()
+ patch('os.chdir', something_else).start()
+ patch.dict(dic1, {1: 'I', 2: 'II'}).start()
+ patch.dict(dic2).start()
+ del dic2[1]
+
+ self.assertIsNot(os.unlink, original_unlink)
+ self.assertIsNot(os.chdir, original_chdir)
+ self.assertNotEqual(dic1, origdic1)
+ self.assertNotEqual(dic2, origdic2)
+ patch.stopall()
+ self.assertIs(os.unlink, original_unlink)
+ self.assertIs(os.chdir, original_chdir)
+ self.assertEqual(dic1, origdic1)
+ self.assertEqual(dic2, origdic2)
+
def test_special_attrs(self):
def foo(x=0):
@@ -1834,6 +1893,25 @@ class PatchTest(unittest.TestCase):
self.assertEqual(foo(), 1)
self.assertEqual(foo(), 0)
+ def test_patch_orderdict(self):
+ foo = OrderedDict()
+ foo['a'] = object()
+ foo['b'] = 'python'
+
+ original = foo.copy()
+ update_values = list(zip('cdefghijklmnopqrstuvwxyz', range(26)))
+ patched_values = list(foo.items()) + update_values
+
+ with patch.dict(foo, OrderedDict(update_values)):
+ self.assertEqual(list(foo.items()), patched_values)
+
+ self.assertEqual(foo, original)
+
+ with patch.dict(foo, update_values):
+ self.assertEqual(list(foo.items()), patched_values)
+
+ self.assertEqual(foo, original)
+
def test_dotted_but_module_not_loaded(self):
# This exercises the AttributeError branch of _dot_lookup.
diff --git a/Lib/urllib/parse.py b/Lib/urllib/parse.py
index e2b6f133..ea897c30 100644
--- a/Lib/urllib/parse.py
+++ b/Lib/urllib/parse.py
@@ -29,6 +29,7 @@ test_urlparse.py provides a good indicator of parsing behavior.
import re
import sys
+import types
import collections
import warnings
@@ -176,6 +177,8 @@ class _NetlocResultMixinBase(object):
raise ValueError("Port out of range 0-65535")
return port
+ __class_getitem__ = classmethod(types.GenericAlias)
+
class _NetlocResultMixinStr(_NetlocResultMixinBase, _ResultMixinStr):
__slots__ = ()
@@ -366,9 +369,23 @@ del _fix_result_transcoding
def urlparse(url, scheme='', allow_fragments=True):
"""Parse a URL into 6 components:
:///;?#
- Return a 6-tuple: (scheme, netloc, path, params, query, fragment).
- Note that we don't break the components up in smaller bits
- (e.g. netloc is a single string) and we don't expand % escapes."""
+
+ The result is a named 6-tuple with fields corresponding to the
+ above. It is either a ParseResult or ParseResultBytes object,
+ depending on the type of the url parameter.
+
+ The username, password, hostname, and port sub-components of netloc
+ can also be accessed as attributes of the returned object.
+
+ The scheme argument provides the default value of the scheme
+ component when no scheme is found in url.
+
+ If allow_fragments is False, no attempt is made to separate the
+ fragment component from the previous component, which can be either
+ path or query.
+
+ Note that % escapes are not expanded.
+ """
url, scheme, _coerce_result = _coerce_args(url, scheme)
splitresult = urlsplit(url, scheme, allow_fragments)
scheme, netloc, url, query, fragment = splitresult
@@ -417,9 +434,24 @@ def _checknetloc(netloc):
def urlsplit(url, scheme='', allow_fragments=True):
"""Parse a URL into 5 components:
:///?#
- Return a 5-tuple: (scheme, netloc, path, query, fragment).
- Note that we don't break the components up in smaller bits
- (e.g. netloc is a single string) and we don't expand % escapes."""
+
+ The result is a named 5-tuple with fields corresponding to the
+ above. It is either a SplitResult or SplitResultBytes object,
+ depending on the type of the url parameter.
+
+ The username, password, hostname, and port sub-components of netloc
+ can also be accessed as attributes of the returned object.
+
+ The scheme argument provides the default value of the scheme
+ component when no scheme is found in url.
+
+ If allow_fragments is False, no attempt is made to separate the
+ fragment component from the previous component, which can be either
+ path or query.
+
+ Note that % escapes are not expanded.
+ """
+
url, scheme, _coerce_result = _coerce_args(url, scheme)
allow_fragments = bool(allow_fragments)
key = url, scheme, allow_fragments, type(url), type(scheme)
@@ -431,31 +463,11 @@ def urlsplit(url, scheme='', allow_fragments=True):
netloc = query = fragment = ''
i = url.find(':')
if i > 0:
- if url[:i] == 'http': # optimize the common case
- url = url[i+1:]
- if url[:2] == '//':
- netloc, url = _splitnetloc(url, 2)
- if (('[' in netloc and ']' not in netloc) or
- (']' in netloc and '[' not in netloc)):
- raise ValueError("Invalid IPv6 URL")
- if allow_fragments and '#' in url:
- url, fragment = url.split('#', 1)
- if '?' in url:
- url, query = url.split('?', 1)
- _checknetloc(netloc)
- v = SplitResult('http', netloc, url, query, fragment)
- _parse_cache[key] = v
- return _coerce_result(v)
for c in url[:i]:
if c not in scheme_chars:
break
else:
- # make sure "url" is not actually a port number (in which case
- # "scheme" is really part of the path)
- rest = url[i+1:]
- if not rest or any(c not in '0123456789' for c in rest):
- # not a port number
- scheme, url = url[:i].lower(), rest
+ scheme, url = url[:i].lower(), url[i+1:]
if url[:2] == '//':
netloc, url = _splitnetloc(url, 2)
@@ -631,6 +643,8 @@ def unquote(string, encoding='utf-8', errors='replace'):
unquote('abc%20def') -> 'abc def'.
"""
+ if isinstance(string, bytes):
+ return unquote_to_bytes(string).decode(encoding, errors)
if '%' not in string:
string.split
return string
diff --git a/Lib/urllib/request.py b/Lib/urllib/request.py
index e4407388..2a3d7155 100644
--- a/Lib/urllib/request.py
+++ b/Lib/urllib/request.py
@@ -163,18 +163,10 @@ def urlopen(url, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
The *cadefault* parameter is ignored.
- This function always returns an object which can work as a context
- manager and has methods such as
- * geturl() - return the URL of the resource retrieved, commonly used to
- determine if a redirect was followed
-
- * info() - return the meta-information of the page, such as headers, in the
- form of an email.message_from_string() instance (see Quick Reference to
- HTTP Headers)
-
- * getcode() - return the HTTP status code of the response. Raises URLError
- on errors.
+ This function always returns an object which can work as a
+ context manager and has the properties url, headers, and status.
+ See urllib.response.addinfourl for more detail on these properties.
For HTTP and HTTPS URLs, this function returns a http.client.HTTPResponse
object slightly modified. In addition to the three new methods above, the
@@ -1819,7 +1811,7 @@ class URLopener:
hdrs = fp.info()
fp.close()
return url2pathname(_splithost(url1)[1]), hdrs
- except OSError as msg:
+ except OSError:
pass
fp = self.open(url, data)
try:
diff --git a/Lib/urllib/response.py b/Lib/urllib/response.py
index 4778118d..5a2c3cc7 100644
--- a/Lib/urllib/response.py
+++ b/Lib/urllib/response.py
@@ -73,6 +73,10 @@ class addinfourl(addinfo):
self.url = url
self.code = code
+ @property
+ def status(self):
+ return self.code
+
def getcode(self):
return self.code
diff --git a/Lib/uuid.py b/Lib/uuid.py
index 9540c21e..5ae0a3e5 100644
--- a/Lib/uuid.py
+++ b/Lib/uuid.py
@@ -61,6 +61,12 @@ else:
_AIX = _platform_system == 'AIX'
_LINUX = _platform_system == 'Linux'
+_MAC_DELIM = b':'
+_MAC_OMITS_LEADING_ZEROES = False
+if _AIX:
+ _MAC_DELIM = b'.'
+ _MAC_OMITS_LEADING_ZEROES = True
+
RESERVED_NCS, RFC_4122, RESERVED_MICROSOFT, RESERVED_FUTURE = [
'reserved for NCS compatibility', 'specified in RFC 4122',
'reserved for Microsoft compatibility', 'reserved for future definition']
@@ -349,24 +355,32 @@ class UUID:
if self.variant == RFC_4122:
return int((self.int >> 76) & 0xf)
-def _popen(command, *args):
- import os, shutil, subprocess
- executable = shutil.which(command)
- if executable is None:
- path = os.pathsep.join(('/sbin', '/usr/sbin'))
- executable = shutil.which(command, path=path)
+
+def _get_command_stdout(command, *args):
+ import io, os, shutil, subprocess
+
+ try:
+ path_dirs = os.environ.get('PATH', os.defpath).split(os.pathsep)
+ path_dirs.extend(['/sbin', '/usr/sbin'])
+ executable = shutil.which(command, path=os.pathsep.join(path_dirs))
if executable is None:
return None
- # LC_ALL=C to ensure English output, stderr=DEVNULL to prevent output
- # on stderr (Note: we don't have an example where the words we search
- # for are actually localized, but in theory some system could do so.)
- env = dict(os.environ)
- env['LC_ALL'] = 'C'
- proc = subprocess.Popen((executable,) + args,
- stdout=subprocess.PIPE,
- stderr=subprocess.DEVNULL,
- env=env)
- return proc
+ # LC_ALL=C to ensure English output, stderr=DEVNULL to prevent output
+ # on stderr (Note: we don't have an example where the words we search
+ # for are actually localized, but in theory some system could do so.)
+ env = dict(os.environ)
+ env['LC_ALL'] = 'C'
+ proc = subprocess.Popen((executable,) + args,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.DEVNULL,
+ env=env)
+ if not proc:
+ return None
+ stdout, stderr = proc.communicate()
+ return io.BytesIO(stdout)
+ except (OSError, subprocess.SubprocessError):
+ return None
+
# For MAC (a.k.a. IEEE 802, or EUI-48) addresses, the second least significant
# bit of the first octet signifies whether the MAC address is universally (0)
@@ -386,40 +400,114 @@ def _popen(command, *args):
def _is_universal(mac):
return not (mac & (1 << 41))
-def _find_mac(command, args, hw_identifiers, get_index):
+
+def _find_mac_near_keyword(command, args, keywords, get_word_index):
+ """Searches a command's output for a MAC address near a keyword.
+
+ Each line of words in the output is case-insensitively searched for
+ any of the given keywords. Upon a match, get_word_index is invoked
+ to pick a word from the line, given the index of the match. For
+ example, lambda i: 0 would get the first word on the line, while
+ lambda i: i - 1 would get the word preceding the keyword.
+ """
+ stdout = _get_command_stdout(command, args)
+ if stdout is None:
+ return None
+
first_local_mac = None
- try:
- proc = _popen(command, *args.split())
- if not proc:
- return None
- with proc:
- for line in proc.stdout:
- words = line.lower().rstrip().split()
- for i in range(len(words)):
- if words[i] in hw_identifiers:
- try:
- word = words[get_index(i)]
- mac = int(word.replace(b':', b''), 16)
- if _is_universal(mac):
- return mac
- first_local_mac = first_local_mac or mac
- except (ValueError, IndexError):
- # Virtual interfaces, such as those provided by
- # VPNs, do not have a colon-delimited MAC address
- # as expected, but a 16-byte HWAddr separated by
- # dashes. These should be ignored in favor of a
- # real MAC address
- pass
- except OSError:
- pass
+ for line in stdout:
+ words = line.lower().rstrip().split()
+ for i in range(len(words)):
+ if words[i] in keywords:
+ try:
+ word = words[get_word_index(i)]
+ mac = int(word.replace(_MAC_DELIM, b''), 16)
+ except (ValueError, IndexError):
+ # Virtual interfaces, such as those provided by
+ # VPNs, do not have a colon-delimited MAC address
+ # as expected, but a 16-byte HWAddr separated by
+ # dashes. These should be ignored in favor of a
+ # real MAC address
+ pass
+ else:
+ if _is_universal(mac):
+ return mac
+ first_local_mac = first_local_mac or mac
return first_local_mac or None
+
+def _parse_mac(word):
+ # Accept 'HH:HH:HH:HH:HH:HH' MAC address (ex: '52:54:00:9d:0e:67'),
+ # but reject IPv6 address (ex: 'fe80::5054:ff:fe9' or '123:2:3:4:5:6:7:8').
+ #
+ # Virtual interfaces, such as those provided by VPNs, do not have a
+ # colon-delimited MAC address as expected, but a 16-byte HWAddr separated
+ # by dashes. These should be ignored in favor of a real MAC address
+ parts = word.split(_MAC_DELIM)
+ if len(parts) != 6:
+ return
+ if _MAC_OMITS_LEADING_ZEROES:
+ # (Only) on AIX the macaddr value given is not prefixed by 0, e.g.
+ # en0 1500 link#2 fa.bc.de.f7.62.4 110854824 0 160133733 0 0
+ # not
+ # en0 1500 link#2 fa.bc.de.f7.62.04 110854824 0 160133733 0 0
+ if not all(1 <= len(part) <= 2 for part in parts):
+ return
+ hexstr = b''.join(part.rjust(2, b'0') for part in parts)
+ else:
+ if not all(len(part) == 2 for part in parts):
+ return
+ hexstr = b''.join(parts)
+ try:
+ return int(hexstr, 16)
+ except ValueError:
+ return
+
+
+def _find_mac_under_heading(command, args, heading):
+ """Looks for a MAC address under a heading in a command's output.
+
+ The first line of words in the output is searched for the given
+ heading. Words at the same word index as the heading in subsequent
+ lines are then examined to see if they look like MAC addresses.
+ """
+ stdout = _get_command_stdout(command, args)
+ if stdout is None:
+ return None
+
+ keywords = stdout.readline().rstrip().split()
+ try:
+ column_index = keywords.index(heading)
+ except ValueError:
+ return None
+
+ first_local_mac = None
+ for line in stdout:
+ words = line.rstrip().split()
+ try:
+ word = words[column_index]
+ except IndexError:
+ continue
+
+ mac = _parse_mac(word)
+ if mac is None:
+ continue
+ if _is_universal(mac):
+ return mac
+ if first_local_mac is None:
+ first_local_mac = mac
+
+ return first_local_mac
+
+
+# The following functions call external programs to 'get' a macaddr value to
+# be used as basis for an uuid
def _ifconfig_getnode():
"""Get the hardware address on Unix by running ifconfig."""
# This works on Linux ('' or '-a'), Tru64 ('-av'), but not all Unixes.
keywords = (b'hwaddr', b'ether', b'address:', b'lladdr')
for args in ('', '-a', '-av'):
- mac = _find_mac('ifconfig', args, keywords, lambda i: i+1)
+ mac = _find_mac_near_keyword('ifconfig', args, keywords, lambda i: i+1)
if mac:
return mac
return None
@@ -427,7 +515,7 @@ def _ifconfig_getnode():
def _ip_getnode():
"""Get the hardware address on Unix by running ip."""
# This works on Linux with iproute2.
- mac = _find_mac('ip', 'link', [b'link/ether'], lambda i: i+1)
+ mac = _find_mac_near_keyword('ip', 'link', [b'link/ether'], lambda i: i+1)
if mac:
return mac
return None
@@ -441,17 +529,17 @@ def _arp_getnode():
return None
# Try getting the MAC addr from arp based on our IP address (Solaris).
- mac = _find_mac('arp', '-an', [os.fsencode(ip_addr)], lambda i: -1)
+ mac = _find_mac_near_keyword('arp', '-an', [os.fsencode(ip_addr)], lambda i: -1)
if mac:
return mac
# This works on OpenBSD
- mac = _find_mac('arp', '-an', [os.fsencode(ip_addr)], lambda i: i+1)
+ mac = _find_mac_near_keyword('arp', '-an', [os.fsencode(ip_addr)], lambda i: i+1)
if mac:
return mac
# This works on Linux, FreeBSD and NetBSD
- mac = _find_mac('arp', '-an', [os.fsencode('(%s)' % ip_addr)],
+ mac = _find_mac_near_keyword('arp', '-an', [os.fsencode('(%s)' % ip_addr)],
lambda i: i+2)
# Return None instead of 0.
if mac:
@@ -461,210 +549,52 @@ def _arp_getnode():
def _lanscan_getnode():
"""Get the hardware address on Unix by running lanscan."""
# This might work on HP-UX.
- return _find_mac('lanscan', '-ai', [b'lan0'], lambda i: 0)
+ return _find_mac_near_keyword('lanscan', '-ai', [b'lan0'], lambda i: 0)
def _netstat_getnode():
"""Get the hardware address on Unix by running netstat."""
- # This might work on AIX, Tru64 UNIX.
- first_local_mac = None
- try:
- proc = _popen('netstat', '-ia')
- if not proc:
- return None
- with proc:
- words = proc.stdout.readline().rstrip().split()
- try:
- i = words.index(b'Address')
- except ValueError:
- return None
- for line in proc.stdout:
- try:
- words = line.rstrip().split()
- word = words[i]
- if len(word) == 17 and word.count(b':') == 5:
- mac = int(word.replace(b':', b''), 16)
- if _is_universal(mac):
- return mac
- first_local_mac = first_local_mac or mac
- except (ValueError, IndexError):
- pass
- except OSError:
- pass
- return first_local_mac or None
+ # This works on AIX and might work on Tru64 UNIX.
+ return _find_mac_under_heading('netstat', '-ian', b'Address')
def _ipconfig_getnode():
- """Get the hardware address on Windows by running ipconfig.exe."""
- import os, re, subprocess
- first_local_mac = None
- dirs = ['', r'c:\windows\system32', r'c:\winnt\system32']
- try:
- import ctypes
- buffer = ctypes.create_string_buffer(300)
- ctypes.windll.kernel32.GetSystemDirectoryA(buffer, 300)
- dirs.insert(0, buffer.value.decode('mbcs'))
- except:
- pass
- for dir in dirs:
- try:
- proc = subprocess.Popen([os.path.join(dir, 'ipconfig'), '/all'],
- stdout=subprocess.PIPE,
- encoding="oem")
- except OSError:
- continue
- with proc:
- for line in proc.stdout:
- value = line.split(':')[-1].strip().lower()
- if re.fullmatch('(?:[0-9a-f][0-9a-f]-){5}[0-9a-f][0-9a-f]', value):
- mac = int(value.replace('-', ''), 16)
- if _is_universal(mac):
- return mac
- first_local_mac = first_local_mac or mac
- return first_local_mac or None
+ """[DEPRECATED] Get the hardware address on Windows."""
+ # bpo-40501: UuidCreateSequential() is now the only supported approach
+ return _windll_getnode()
def _netbios_getnode():
- """Get the hardware address on Windows using NetBIOS calls.
- See http://support.microsoft.com/kb/118623 for details."""
- import win32wnet, netbios
- first_local_mac = None
- ncb = netbios.NCB()
- ncb.Command = netbios.NCBENUM
- ncb.Buffer = adapters = netbios.LANA_ENUM()
- adapters._pack()
- if win32wnet.Netbios(ncb) != 0:
- return None
- adapters._unpack()
- for i in range(adapters.length):
- ncb.Reset()
- ncb.Command = netbios.NCBRESET
- ncb.Lana_num = ord(adapters.lana[i])
- if win32wnet.Netbios(ncb) != 0:
- continue
- ncb.Reset()
- ncb.Command = netbios.NCBASTAT
- ncb.Lana_num = ord(adapters.lana[i])
- ncb.Callname = '*'.ljust(16)
- ncb.Buffer = status = netbios.ADAPTER_STATUS()
- if win32wnet.Netbios(ncb) != 0:
- continue
- status._unpack()
- bytes = status.adapter_address[:6]
- if len(bytes) != 6:
- continue
- mac = int.from_bytes(bytes, 'big')
- if _is_universal(mac):
- return mac
- first_local_mac = first_local_mac or mac
- return first_local_mac or None
+ """[DEPRECATED] Get the hardware address on Windows."""
+ # bpo-40501: UuidCreateSequential() is now the only supported approach
+ return _windll_getnode()
-_generate_time_safe = _UuidCreate = None
-_has_uuid_generate_time_safe = None
-
# Import optional C extension at toplevel, to help disabling it when testing
try:
import _uuid
+ _generate_time_safe = getattr(_uuid, "generate_time_safe", None)
+ _UuidCreate = getattr(_uuid, "UuidCreate", None)
+ _has_uuid_generate_time_safe = _uuid.has_uuid_generate_time_safe
except ImportError:
_uuid = None
+ _generate_time_safe = None
+ _UuidCreate = None
+ _has_uuid_generate_time_safe = None
def _load_system_functions():
- """
- Try to load platform-specific functions for generating uuids.
- """
- global _generate_time_safe, _UuidCreate, _has_uuid_generate_time_safe
-
- if _has_uuid_generate_time_safe is not None:
- return
-
- _has_uuid_generate_time_safe = False
-
- if sys.platform == "darwin" and int(os.uname().release.split('.')[0]) < 9:
- # The uuid_generate_* functions are broken on MacOS X 10.5, as noted
- # in issue #8621 the function generates the same sequence of values
- # in the parent process and all children created using fork (unless
- # those children use exec as well).
- #
- # Assume that the uuid_generate functions are broken from 10.5 onward,
- # the test can be adjusted when a later version is fixed.
- pass
- elif _uuid is not None:
- _generate_time_safe = _uuid.generate_time_safe
- _has_uuid_generate_time_safe = _uuid.has_uuid_generate_time_safe
- return
-
- try:
- # If we couldn't find an extension module, try ctypes to find
- # system routines for UUID generation.
- # Thanks to Thomas Heller for ctypes and for his help with its use here.
- import ctypes
- import ctypes.util
-
- # The uuid_generate_* routines are provided by libuuid on at least
- # Linux and FreeBSD, and provided by libc on Mac OS X.
- _libnames = ['uuid']
- if not sys.platform.startswith('win'):
- _libnames.append('c')
- for libname in _libnames:
- try:
- lib = ctypes.CDLL(ctypes.util.find_library(libname))
- except Exception: # pragma: nocover
- continue
- # Try to find the safe variety first.
- if hasattr(lib, 'uuid_generate_time_safe'):
- _uuid_generate_time_safe = lib.uuid_generate_time_safe
- # int uuid_generate_time_safe(uuid_t out);
- def _generate_time_safe():
- _buffer = ctypes.create_string_buffer(16)
- res = _uuid_generate_time_safe(_buffer)
- return bytes(_buffer.raw), res
- _has_uuid_generate_time_safe = True
- break
-
- elif hasattr(lib, 'uuid_generate_time'): # pragma: nocover
- _uuid_generate_time = lib.uuid_generate_time
- # void uuid_generate_time(uuid_t out);
- _uuid_generate_time.restype = None
- def _generate_time_safe():
- _buffer = ctypes.create_string_buffer(16)
- _uuid_generate_time(_buffer)
- return bytes(_buffer.raw), None
- break
-
- # On Windows prior to 2000, UuidCreate gives a UUID containing the
- # hardware address. On Windows 2000 and later, UuidCreate makes a
- # random UUID and UuidCreateSequential gives a UUID containing the
- # hardware address. These routines are provided by the RPC runtime.
- # NOTE: at least on Tim's WinXP Pro SP2 desktop box, while the last
- # 6 bytes returned by UuidCreateSequential are fixed, they don't appear
- # to bear any relationship to the MAC address of any network device
- # on the box.
- try:
- lib = ctypes.windll.rpcrt4
- except:
- lib = None
- _UuidCreate = getattr(lib, 'UuidCreateSequential',
- getattr(lib, 'UuidCreate', None))
-
- except Exception as exc:
- import warnings
- warnings.warn(f"Could not find fallback ctypes uuid functions: {exc}",
- ImportWarning)
+ """[DEPRECATED] Platform-specific functions loaded at import time"""
def _unix_getnode():
- """Get the hardware address on Unix using the _uuid extension module
- or ctypes."""
- _load_system_functions()
- uuid_time, _ = _generate_time_safe()
- return UUID(bytes=uuid_time).node
+ """Get the hardware address on Unix using the _uuid extension module."""
+ if _generate_time_safe:
+ uuid_time, _ = _generate_time_safe()
+ return UUID(bytes=uuid_time).node
def _windll_getnode():
- """Get the hardware address on Windows using ctypes."""
- import ctypes
- _load_system_functions()
- _buffer = ctypes.create_string_buffer(16)
- if _UuidCreate(_buffer) == 0:
- return UUID(bytes=bytes_(_buffer.raw)).node
+ """Get the hardware address on Windows using the _uuid extension module."""
+ if _UuidCreate:
+ uuid_bytes = _UuidCreate()
+ return UUID(bytes_le=uuid_bytes).node
def _random_getnode():
"""Get a random node ID."""
@@ -693,7 +623,8 @@ if _LINUX:
elif sys.platform == 'darwin':
_OS_GETTERS = [_ifconfig_getnode, _arp_getnode, _netstat_getnode]
elif sys.platform == 'win32':
- _OS_GETTERS = [_netbios_getnode, _ipconfig_getnode]
+ # bpo-40201: _windll_getnode will always succeed, so these are not needed
+ _OS_GETTERS = []
elif _AIX:
_OS_GETTERS = [_netstat_getnode]
else:
@@ -708,7 +639,7 @@ else:
_node = None
-def getnode(*, getters=None):
+def getnode():
"""Get the hardware address as a 48-bit positive integer.
The first time this runs, it may launch a separate program, which could
@@ -740,7 +671,6 @@ def uuid1(node=None, clock_seq=None):
# When the system provides a version-1 UUID generator, use it (but don't
# use UuidCreate here because its UUIDs don't conform to RFC 4122).
- _load_system_functions()
if _generate_time_safe is not None and node is clock_seq is None:
uuid_time, safely_generated = _generate_time_safe()
try:
@@ -774,8 +704,11 @@ def uuid1(node=None, clock_seq=None):
def uuid3(namespace, name):
"""Generate a UUID from the MD5 hash of a namespace UUID and a name."""
from hashlib import md5
- hash = md5(namespace.bytes + bytes(name, "utf-8")).digest()
- return UUID(bytes=hash[:16], version=3)
+ digest = md5(
+ namespace.bytes + bytes(name, "utf-8"),
+ usedforsecurity=False
+ ).digest()
+ return UUID(bytes=digest[:16], version=3)
def uuid4():
"""Generate a random UUID."""
diff --git a/Lib/venv/__init__.py b/Lib/venv/__init__.py
index caa7285b..8009deb3 100644
--- a/Lib/venv/__init__.py
+++ b/Lib/venv/__init__.py
@@ -12,6 +12,8 @@ import sys
import sysconfig
import types
+
+CORE_VENV_DEPS = ('pip', 'setuptools')
logger = logging.getLogger(__name__)
@@ -38,16 +40,21 @@ class EnvBuilder:
:param with_pip: If True, ensure pip is installed in the virtual
environment
:param prompt: Alternative terminal prefix for the environment.
+ :param upgrade_deps: Update the base venv modules to the latest on PyPI
"""
def __init__(self, system_site_packages=False, clear=False,
- symlinks=False, upgrade=False, with_pip=False, prompt=None):
+ symlinks=False, upgrade=False, with_pip=False, prompt=None,
+ upgrade_deps=False):
self.system_site_packages = system_site_packages
self.clear = clear
self.symlinks = symlinks
self.upgrade = upgrade
self.with_pip = with_pip
+ if prompt == '.': # see bpo-38901
+ prompt = os.path.basename(os.getcwd())
self.prompt = prompt
+ self.upgrade_deps = upgrade_deps
def create(self, env_dir):
"""
@@ -74,6 +81,8 @@ class EnvBuilder:
# restore it and rewrite the configuration
self.system_site_packages = True
self.create_configuration(context)
+ if self.upgrade_deps:
+ self.upgrade_dependencies(context)
def clear_directory(self, path):
for fn in os.listdir(path):
@@ -234,7 +243,7 @@ class EnvBuilder:
copier(context.executable, path)
if not os.path.islink(path):
os.chmod(path, 0o755)
- for suffix in ('python', 'python3'):
+ for suffix in ('python', 'python3', f'python3.{sys.version_info[1]}'):
path = os.path.join(binpath, suffix)
if not os.path.exists(path):
# Issue 18807: make copies if
@@ -381,13 +390,25 @@ class EnvBuilder:
f.write(data)
shutil.copymode(srcfile, dstfile)
+ def upgrade_dependencies(self, context):
+ logger.debug(
+ f'Upgrading {CORE_VENV_DEPS} packages in {context.bin_path}'
+ )
+ if sys.platform == 'win32':
+ python_exe = os.path.join(context.bin_path, 'python.exe')
+ else:
+ python_exe = os.path.join(context.bin_path, 'python')
+ cmd = [python_exe, '-m', 'pip', 'install', '--upgrade']
+ cmd.extend(CORE_VENV_DEPS)
+ subprocess.check_call(cmd)
+
def create(env_dir, system_site_packages=False, clear=False,
- symlinks=False, with_pip=False, prompt=None):
+ symlinks=False, with_pip=False, prompt=None, upgrade_deps=False):
"""Create a virtual environment in a directory."""
builder = EnvBuilder(system_site_packages=system_site_packages,
clear=clear, symlinks=symlinks, with_pip=with_pip,
- prompt=prompt)
+ prompt=prompt, upgrade_deps=upgrade_deps)
builder.create(env_dir)
def main(args=None):
@@ -450,6 +471,11 @@ def main(args=None):
parser.add_argument('--prompt',
help='Provides an alternative prompt prefix for '
'this environment.')
+ parser.add_argument('--upgrade-deps', default=False, action='store_true',
+ dest='upgrade_deps',
+ help='Upgrade core dependencies: {} to the latest '
+ 'version in PyPI'.format(
+ ' '.join(CORE_VENV_DEPS)))
options = parser.parse_args(args)
if options.upgrade and options.clear:
raise ValueError('you cannot supply --upgrade and --clear together.')
@@ -458,7 +484,8 @@ def main(args=None):
symlinks=options.symlinks,
upgrade=options.upgrade,
with_pip=options.with_pip,
- prompt=options.prompt)
+ prompt=options.prompt,
+ upgrade_deps=options.upgrade_deps)
for d in options.dirs:
builder.create(d)
diff --git a/Lib/venv/scripts/common/activate b/Lib/venv/scripts/common/activate
index 5e7ac174..45af3536 100644
--- a/Lib/venv/scripts/common/activate
+++ b/Lib/venv/scripts/common/activate
@@ -18,7 +18,7 @@ deactivate () {
# be called to get it to forget past commands. Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
- hash -r
+ hash -r 2> /dev/null
fi
if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
@@ -54,17 +54,7 @@ fi
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
_OLD_VIRTUAL_PS1="${PS1:-}"
- if [ "x__VENV_PROMPT__" != x ] ; then
- PS1="__VENV_PROMPT__${PS1:-}"
- else
- if [ "`basename \"$VIRTUAL_ENV\"`" = "__" ] ; then
- # special case for Aspen magic directories
- # see https://aspen.io/
- PS1="[`basename \`dirname \"$VIRTUAL_ENV\"\``] $PS1"
- else
- PS1="(`basename \"$VIRTUAL_ENV\"`)$PS1"
- fi
- fi
+ PS1="__VENV_PROMPT__${PS1:-}"
export PS1
fi
@@ -72,5 +62,5 @@ fi
# be called to get it to forget past commands. Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
- hash -r
+ hash -r 2> /dev/null
fi
diff --git a/Lib/venv/scripts/posix/activate.csh b/Lib/venv/scripts/posix/activate.csh
index 0f39ee8c..68a0dc74 100644
--- a/Lib/venv/scripts/posix/activate.csh
+++ b/Lib/venv/scripts/posix/activate.csh
@@ -17,19 +17,7 @@ setenv PATH "$VIRTUAL_ENV/__VENV_BIN_NAME__:$PATH"
set _OLD_VIRTUAL_PROMPT="$prompt"
if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
- if ("__VENV_NAME__" != "") then
- set env_name = "__VENV_NAME__"
- else
- if (`basename "VIRTUAL_ENV"` == "__") then
- # special case for Aspen magic directories
- # see https://aspen.io/
- set env_name = `basename \`dirname "$VIRTUAL_ENV"\``
- else
- set env_name = `basename "$VIRTUAL_ENV"`
- endif
- endif
- set prompt = "[$env_name] $prompt"
- unset env_name
+ set prompt = "__VENV_PROMPT__$prompt"
endif
alias pydoc python -m pydoc
diff --git a/Lib/venv/scripts/posix/activate.fish b/Lib/venv/scripts/posix/activate.fish
index 03e893f8..54b9ea56 100644
--- a/Lib/venv/scripts/posix/activate.fish
+++ b/Lib/venv/scripts/posix/activate.fish
@@ -1,7 +1,7 @@
-# This file must be used with ". bin/activate.fish" *from fish* (http://fishshell.org)
-# you cannot run it directly
+# This file must be used with "source /bin/activate.fish" *from fish*
+# (https://fishshell.com/); you cannot run it directly.
-function deactivate -d "Exit virtualenv and return to normal shell environment"
+function deactivate -d "Exit virtual environment and return to normal shell environment"
# reset old environment variables
if test -n "$_OLD_VIRTUAL_PATH"
set -gx PATH $_OLD_VIRTUAL_PATH
@@ -21,12 +21,12 @@ function deactivate -d "Exit virtualenv and return to normal shell environment"
set -e VIRTUAL_ENV
if test "$argv[1]" != "nondestructive"
- # Self destruct!
+ # Self-destruct!
functions -e deactivate
end
end
-# unset irrelevant variables
+# Unset irrelevant variables.
deactivate nondestructive
set -gx VIRTUAL_ENV "__VENV_DIR__"
@@ -34,7 +34,7 @@ set -gx VIRTUAL_ENV "__VENV_DIR__"
set -gx _OLD_VIRTUAL_PATH $PATH
set -gx PATH "$VIRTUAL_ENV/__VENV_BIN_NAME__" $PATH
-# unset PYTHONHOME if set
+# Unset PYTHONHOME if set.
if set -q PYTHONHOME
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
set -e PYTHONHOME
@@ -43,31 +43,20 @@ end
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
# fish uses a function instead of an env var to generate the prompt.
- # save the current fish_prompt function as the function _old_fish_prompt
+ # Save the current fish_prompt function as the function _old_fish_prompt.
functions -c fish_prompt _old_fish_prompt
- # with the original prompt function renamed, we can override with our own.
+ # With the original prompt function renamed, we can override with our own.
function fish_prompt
- # Save the return status of the last command
+ # Save the return status of the last command.
set -l old_status $status
- # Prompt override?
- if test -n "__VENV_PROMPT__"
- printf "%s%s" "__VENV_PROMPT__" (set_color normal)
- else
- # ...Otherwise, prepend env
- set -l _checkbase (basename "$VIRTUAL_ENV")
- if test $_checkbase = "__"
- # special case for Aspen magic directories
- # see https://aspen.io/
- printf "%s[%s]%s " (set_color -b blue white) (basename (dirname "$VIRTUAL_ENV")) (set_color normal)
- else
- printf "%s(%s)%s" (set_color -b blue white) (basename "$VIRTUAL_ENV") (set_color normal)
- end
- end
+ # Output the venv prompt; color taken from the blue of the Python logo.
+ printf "%s%s%s" (set_color 4B8BBE) "__VENV_PROMPT__" (set_color normal)
# Restore the return status of the previous command.
echo "exit $old_status" | .
+ # Output the original/"old" prompt.
_old_fish_prompt
end
diff --git a/Lib/wave.py b/Lib/wave.py
index 823f091d..b7071198 100644
--- a/Lib/wave.py
+++ b/Lib/wave.py
@@ -71,9 +71,15 @@ The close() method is called automatically when the class instance
is destroyed.
"""
+from chunk import Chunk
+from collections import namedtuple
+import audioop
import builtins
+import struct
+import sys
+
-__all__ = ["open", "openfp", "Error", "Wave_read", "Wave_write"]
+__all__ = ["open", "Error", "Wave_read", "Wave_write"]
class Error(Exception):
pass
@@ -82,13 +88,6 @@ WAVE_FORMAT_PCM = 0x0001
_array_fmts = None, 'b', 'h', None, 'i'
-import audioop
-import struct
-import sys
-from chunk import Chunk
-from collections import namedtuple
-import warnings
-
_wave_params = namedtuple('_wave_params',
'nchannels sampwidth framerate nframes comptype compname')
@@ -512,8 +511,3 @@ def open(f, mode=None):
return Wave_write(f)
else:
raise Error("mode must be 'r', 'rb', 'w', or 'wb'")
-
-def openfp(f, mode=None):
- warnings.warn("wave.openfp is deprecated since Python 3.7. "
- "Use wave.open instead.", DeprecationWarning, stacklevel=2)
- return open(f, mode=mode)
diff --git a/Lib/weakref.py b/Lib/weakref.py
index 9d700894..5fa851dd 100644
--- a/Lib/weakref.py
+++ b/Lib/weakref.py
@@ -33,6 +33,9 @@ __all__ = ["ref", "proxy", "getweakrefcount", "getweakrefs",
"WeakSet", "WeakMethod", "finalize"]
+_collections_abc.Set.register(WeakSet)
+_collections_abc.MutableSet.register(WeakSet)
+
class WeakMethod(ref):
"""
A custom `weakref.ref` subclass which simulates a weak reference to
@@ -75,14 +78,14 @@ class WeakMethod(ref):
if not self._alive or not other._alive:
return self is other
return ref.__eq__(self, other) and self._func_ref == other._func_ref
- return False
+ return NotImplemented
def __ne__(self, other):
if isinstance(other, WeakMethod):
if not self._alive or not other._alive:
return self is not other
return ref.__ne__(self, other) or self._func_ref != other._func_ref
- return True
+ return NotImplemented
__hash__ = ref.__hash__
@@ -307,6 +310,25 @@ class WeakValueDictionary(_collections_abc.MutableMapping):
self._commit_removals()
return list(self.data.values())
+ def __ior__(self, other):
+ self.update(other)
+ return self
+
+ def __or__(self, other):
+ if isinstance(other, _collections_abc.Mapping):
+ c = self.copy()
+ c.update(other)
+ return c
+ return NotImplemented
+
+ def __ror__(self, other):
+ if isinstance(other, _collections_abc.Mapping):
+ c = self.__class__()
+ c.update(other)
+ c.update(self)
+ return c
+ return NotImplemented
+
class KeyedRef(ref):
"""Specialized reference that includes a key corresponding to the value.
@@ -485,6 +507,25 @@ class WeakKeyDictionary(_collections_abc.MutableMapping):
if len(kwargs):
self.update(kwargs)
+ def __ior__(self, other):
+ self.update(other)
+ return self
+
+ def __or__(self, other):
+ if isinstance(other, _collections_abc.Mapping):
+ c = self.copy()
+ c.update(other)
+ return c
+ return NotImplemented
+
+ def __ror__(self, other):
+ if isinstance(other, _collections_abc.Mapping):
+ c = self.__class__()
+ c.update(other)
+ c.update(self)
+ return c
+ return NotImplemented
+
class finalize:
"""Class for finalization of weakrefable objects
@@ -514,33 +555,7 @@ class finalize:
class _Info:
__slots__ = ("weakref", "func", "args", "kwargs", "atexit", "index")
- def __init__(*args, **kwargs):
- if len(args) >= 3:
- self, obj, func, *args = args
- elif not args:
- raise TypeError("descriptor '__init__' of 'finalize' object "
- "needs an argument")
- else:
- if 'func' not in kwargs:
- raise TypeError('finalize expected at least 2 positional '
- 'arguments, got %d' % (len(args)-1))
- func = kwargs.pop('func')
- if len(args) >= 2:
- self, obj, *args = args
- import warnings
- warnings.warn("Passing 'func' as keyword argument is deprecated",
- DeprecationWarning, stacklevel=2)
- else:
- if 'obj' not in kwargs:
- raise TypeError('finalize expected at least 2 positional '
- 'arguments, got %d' % (len(args)-1))
- obj = kwargs.pop('obj')
- self, *args = args
- import warnings
- warnings.warn("Passing 'obj' as keyword argument is deprecated",
- DeprecationWarning, stacklevel=2)
- args = tuple(args)
-
+ def __init__(self, obj, func, /, *args, **kwargs):
if not self._registered_with_atexit:
# We may register the exit function more than once because
# of a thread race, but that is harmless
@@ -556,7 +571,6 @@ class finalize:
info.index = next(self._index_iter)
self._registry[self] = info
finalize._dirty = True
- __init__.__text_signature__ = '($self, obj, func, /, *args, **kwargs)'
def __call__(self, _=None):
"""If alive then mark as dead and return func(*args, **kwargs);
diff --git a/Lib/xml/dom/expatbuilder.py b/Lib/xml/dom/expatbuilder.py
index 2bd835b0..199c22d0 100644
--- a/Lib/xml/dom/expatbuilder.py
+++ b/Lib/xml/dom/expatbuilder.py
@@ -204,11 +204,11 @@ class ExpatBuilder:
buffer = file.read(16*1024)
if not buffer:
break
- parser.Parse(buffer, 0)
+ parser.Parse(buffer, False)
if first_buffer and self.document.documentElement:
self._setup_subset(buffer)
first_buffer = False
- parser.Parse("", True)
+ parser.Parse(b"", True)
except ParseEscape:
pass
doc = self.document
@@ -637,7 +637,7 @@ class FragmentBuilder(ExpatBuilder):
nsattrs = self._getNSattrs() # get ns decls from node's ancestors
document = _FRAGMENT_BUILDER_TEMPLATE % (ident, subset, nsattrs)
try:
- parser.Parse(document, 1)
+ parser.Parse(document, True)
except:
self.reset()
raise
@@ -697,7 +697,7 @@ class FragmentBuilder(ExpatBuilder):
self.fragment = self.document.createDocumentFragment()
self.curNode = self.fragment
try:
- parser.Parse(self._source, 1)
+ parser.Parse(self._source, True)
finally:
self.curNode = old_cur_node
self.document = old_document
diff --git a/Lib/xml/dom/minidom.py b/Lib/xml/dom/minidom.py
index 464420b7..d09ef5e7 100644
--- a/Lib/xml/dom/minidom.py
+++ b/Lib/xml/dom/minidom.py
@@ -43,10 +43,11 @@ class Node(xml.dom.Node):
def __bool__(self):
return True
- def toxml(self, encoding=None):
- return self.toprettyxml("", "", encoding)
+ def toxml(self, encoding=None, standalone=None):
+ return self.toprettyxml("", "", encoding, standalone)
- def toprettyxml(self, indent="\t", newl="\n", encoding=None):
+ def toprettyxml(self, indent="\t", newl="\n", encoding=None,
+ standalone=None):
if encoding is None:
writer = io.StringIO()
else:
@@ -56,7 +57,7 @@ class Node(xml.dom.Node):
newline='\n')
if self.nodeType == Node.DOCUMENT_NODE:
# Can pass encoding only to document, to put it into XML header
- self.writexml(writer, "", indent, newl, encoding)
+ self.writexml(writer, "", indent, newl, encoding, standalone)
else:
self.writexml(writer, "", indent, newl)
if encoding is None:
@@ -718,6 +719,14 @@ class Element(Node):
Node.unlink(self)
def getAttribute(self, attname):
+ """Returns the value of the specified attribute.
+
+ Returns the value of the element's attribute named attname as
+ a string. An empty string is returned if the element does not
+ have such an attribute. Note that an empty string may also be
+ returned as an explicitly given attribute value, use the
+ hasAttribute method to distinguish these two cases.
+ """
if self._attrs is None:
return ""
try:
@@ -828,6 +837,11 @@ class Element(Node):
removeAttributeNodeNS = removeAttributeNode
def hasAttribute(self, name):
+ """Checks whether the element has an attribute with the specified name.
+
+ Returns True if the element has an attribute with the specified name.
+ Otherwise, returns False.
+ """
if self._attrs is None:
return False
return name in self._attrs
@@ -838,6 +852,11 @@ class Element(Node):
return (namespaceURI, localName) in self._attrsNS
def getElementsByTagName(self, name):
+ """Returns all descendant elements with the given tag name.
+
+ Returns the list of all descendant elements (not direct children
+ only) with the specified tag name.
+ """
return _get_elements_by_tagName_helper(self, name, NodeList())
def getElementsByTagNameNS(self, namespaceURI, localName):
@@ -848,6 +867,11 @@ class Element(Node):
return "" % (self.tagName, id(self))
def writexml(self, writer, indent="", addindent="", newl=""):
+ """Write an XML element to a file-like object
+
+ Write the element to the writer object that must provide
+ a write method (e.g. a file or StringIO object).
+ """
# indent = current indentation
# addindent = indentation to add to higher levels
# newl = newline string
@@ -1787,12 +1811,17 @@ class Document(Node, DocumentLS):
raise xml.dom.NotSupportedErr("cannot import document type nodes")
return _clone_node(node, deep, self)
- def writexml(self, writer, indent="", addindent="", newl="", encoding=None):
- if encoding is None:
- writer.write(''+newl)
- else:
- writer.write('%s' % (
- encoding, newl))
+ def writexml(self, writer, indent="", addindent="", newl="", encoding=None,
+ standalone=None):
+ declarations = []
+
+ if encoding:
+ declarations.append(f'encoding="{encoding}"')
+ if standalone is not None:
+ declarations.append(f'standalone="{"yes" if standalone else "no"}"')
+
+ writer.write(f'{newl}')
+
for node in self.childNodes:
node.writexml(writer, indent, addindent, newl)
diff --git a/Lib/xml/dom/xmlbuilder.py b/Lib/xml/dom/xmlbuilder.py
index 213ab145..8a200263 100644
--- a/Lib/xml/dom/xmlbuilder.py
+++ b/Lib/xml/dom/xmlbuilder.py
@@ -1,7 +1,6 @@
"""Implementation of the DOM Level 3 'LS-Load' feature."""
import copy
-import warnings
import xml.dom
from xml.dom.NodeFilter import NodeFilter
diff --git a/Lib/xml/etree/ElementInclude.py b/Lib/xml/etree/ElementInclude.py
index 963470e3..53030627 100644
--- a/Lib/xml/etree/ElementInclude.py
+++ b/Lib/xml/etree/ElementInclude.py
@@ -50,18 +50,28 @@
import copy
from . import ElementTree
+from urllib.parse import urljoin
XINCLUDE = "{http://www.w3.org/2001/XInclude}"
XINCLUDE_INCLUDE = XINCLUDE + "include"
XINCLUDE_FALLBACK = XINCLUDE + "fallback"
+# For security reasons, the inclusion depth is limited to this read-only value by default.
+DEFAULT_MAX_INCLUSION_DEPTH = 6
+
+
##
# Fatal include error.
class FatalIncludeError(SyntaxError):
pass
+
+class LimitedRecursiveIncludeError(FatalIncludeError):
+ pass
+
+
##
# Default loader. This loader reads an included resource from disk.
#
@@ -92,13 +102,33 @@ def default_loader(href, parse, encoding=None):
# @param loader Optional resource loader. If omitted, it defaults
# to {@link default_loader}. If given, it should be a callable
# that implements the same interface as default_loader.
+# @param base_url The base URL of the original file, to resolve
+# relative include file references.
+# @param max_depth The maximum number of recursive inclusions.
+# Limited to reduce the risk of malicious content explosion.
+# Pass a negative value to disable the limitation.
+# @throws LimitedRecursiveIncludeError If the {@link max_depth} was exceeded.
# @throws FatalIncludeError If the function fails to include a given
# resource, or if the tree contains malformed XInclude elements.
-# @throws OSError If the function fails to load a given resource.
+# @throws IOError If the function fails to load a given resource.
+# @returns the node or its replacement if it was an XInclude node
-def include(elem, loader=None):
+def include(elem, loader=None, base_url=None,
+ max_depth=DEFAULT_MAX_INCLUSION_DEPTH):
+ if max_depth is None:
+ max_depth = -1
+ elif max_depth < 0:
+ raise ValueError("expected non-negative depth or None for 'max_depth', got %r" % max_depth)
+
+ if hasattr(elem, 'getroot'):
+ elem = elem.getroot()
if loader is None:
loader = default_loader
+
+ _include(elem, loader, base_url, max_depth, set())
+
+
+def _include(elem, loader, base_url, max_depth, _parent_hrefs):
# look for xinclude elements
i = 0
while i < len(elem):
@@ -106,14 +136,24 @@ def include(elem, loader=None):
if e.tag == XINCLUDE_INCLUDE:
# process xinclude directive
href = e.get("href")
+ if base_url:
+ href = urljoin(base_url, href)
parse = e.get("parse", "xml")
if parse == "xml":
+ if href in _parent_hrefs:
+ raise FatalIncludeError("recursive include of %s" % href)
+ if max_depth == 0:
+ raise LimitedRecursiveIncludeError(
+ "maximum xinclude depth reached when including file %s" % href)
+ _parent_hrefs.add(href)
node = loader(href, parse)
if node is None:
raise FatalIncludeError(
"cannot load %r as %r" % (href, parse)
)
- node = copy.copy(node)
+ node = copy.copy(node) # FIXME: this makes little sense with recursive includes
+ _include(node, loader, href, max_depth - 1, _parent_hrefs)
+ _parent_hrefs.remove(href)
if e.tail:
node.tail = (node.tail or "") + e.tail
elem[i] = node
@@ -123,11 +163,13 @@ def include(elem, loader=None):
raise FatalIncludeError(
"cannot load %r as %r" % (href, parse)
)
+ if e.tail:
+ text += e.tail
if i:
node = elem[i-1]
- node.tail = (node.tail or "") + text + (e.tail or "")
+ node.tail = (node.tail or "") + text
else:
- elem.text = (elem.text or "") + text + (e.tail or "")
+ elem.text = (elem.text or "") + text
del elem[i]
continue
else:
@@ -139,5 +181,5 @@ def include(elem, loader=None):
"xi:fallback tag must be child of xi:include (%r)" % e.tag
)
else:
- include(e, loader)
- i = i + 1
+ _include(e, loader, base_url, max_depth, _parent_hrefs)
+ i += 1
diff --git a/Lib/xml/etree/ElementTree.py b/Lib/xml/etree/ElementTree.py
index 645e999a..da2bcad0 100644
--- a/Lib/xml/etree/ElementTree.py
+++ b/Lib/xml/etree/ElementTree.py
@@ -76,7 +76,7 @@ __all__ = [
"dump",
"Element", "ElementTree",
"fromstring", "fromstringlist",
- "iselement", "iterparse",
+ "indent", "iselement", "iterparse",
"parse", "ParseError",
"PI", "ProcessingInstruction",
"QName",
@@ -195,6 +195,13 @@ class Element:
original tree.
"""
+ warnings.warn(
+ "elem.copy() is deprecated. Use copy.copy(elem) instead.",
+ DeprecationWarning
+ )
+ return self.__copy__()
+
+ def __copy__(self):
elem = self.makeelement(self.tag, self.attrib)
elem.text = self.text
elem.tail = self.tail
@@ -273,19 +280,6 @@ class Element:
# assert iselement(element)
self._children.remove(subelement)
- def getchildren(self):
- """(Deprecated) Return all subelements.
-
- Elements are returned in document order.
-
- """
- warnings.warn(
- "This method will be removed in future versions. "
- "Use 'list(elem)' or iteration over elem instead.",
- DeprecationWarning, stacklevel=2
- )
- return self._children
-
def find(self, path, namespaces=None):
"""Find first matching element by tag name or path.
@@ -409,15 +403,6 @@ class Element:
for e in self._children:
yield from e.iter(tag)
- # compatibility
- def getiterator(self, tag=None):
- warnings.warn(
- "This method will be removed in future versions. "
- "Use 'elem.iter()' or 'list(elem.iter())' instead.",
- DeprecationWarning, stacklevel=2
- )
- return list(self.iter(tag))
-
def itertext(self):
"""Create text iterator.
@@ -617,15 +602,6 @@ class ElementTree:
# assert self._root is not None
return self._root.iter(tag)
- # compatibility
- def getiterator(self, tag=None):
- warnings.warn(
- "This method will be removed in future versions. "
- "Use 'tree.iter()' or 'list(tree.iter())' instead.",
- DeprecationWarning, stacklevel=2
- )
- return list(self.iter(tag))
-
def find(self, path, namespaces=None):
"""Find first matching element by tag name or path.
@@ -1081,15 +1057,15 @@ def _escape_attrib(text):
text = text.replace(">", ">")
if "\"" in text:
text = text.replace("\"", """)
- # The following business with carriage returns is to satisfy
- # Section 2.11 of the XML specification, stating that
- # CR or CR LN should be replaced with just LN
+ # Although section 2.11 of the XML specification states that CR or
+ # CR LN should be replaced with just LN, it applies only to EOLNs
+ # which take part of organizing file into lines. Within attributes,
+ # we are replacing these with entity numbers, so they do not count.
# http://www.w3.org/TR/REC-xml/#sec-line-ends
- if "\r\n" in text:
- text = text.replace("\r\n", "\n")
+ # The current solution, contained in following six lines, was
+ # discussed in issue 17582 and 39011.
if "\r" in text:
- text = text.replace("\r", "\n")
- #The following four lines are issue 17582
+ text = text.replace("\r", "
")
if "\n" in text:
text = text.replace("\n", "
")
if "\t" in text:
@@ -1185,6 +1161,57 @@ def dump(elem):
if not tail or tail[-1] != "\n":
sys.stdout.write("\n")
+
+def indent(tree, space=" ", level=0):
+ """Indent an XML document by inserting newlines and indentation space
+ after elements.
+
+ *tree* is the ElementTree or Element to modify. The (root) element
+ itself will not be changed, but the tail text of all elements in its
+ subtree will be adapted.
+
+ *space* is the whitespace to insert for each indentation level, two
+ space characters by default.
+
+ *level* is the initial indentation level. Setting this to a higher
+ value than 0 can be used for indenting subtrees that are more deeply
+ nested inside of a document.
+ """
+ if isinstance(tree, ElementTree):
+ tree = tree.getroot()
+ if level < 0:
+ raise ValueError(f"Initial indentation level must be >= 0, got {level}")
+ if not len(tree):
+ return
+
+ # Reduce the memory consumption by reusing indentation strings.
+ indentations = ["\n" + level * space]
+
+ def _indent_children(elem, level):
+ # Start a new indentation level for the first child.
+ child_level = level + 1
+ try:
+ child_indentation = indentations[child_level]
+ except IndexError:
+ child_indentation = indentations[level] + space
+ indentations.append(child_indentation)
+
+ if not elem.text or not elem.text.strip():
+ elem.text = child_indentation
+
+ for child in elem:
+ if len(child):
+ _indent_children(child, child_level)
+ if not child.tail or not child.tail.strip():
+ child.tail = child_indentation
+
+ # Dedent after the last child by overwriting the previous indentation.
+ if not child.tail.strip():
+ child.tail = indentations[level]
+
+ _indent_children(tree, 0)
+
+
# --------------------------------------------------------------------
# parsing
@@ -1690,14 +1717,14 @@ class XMLParser:
def feed(self, data):
"""Feed encoded data to parser."""
try:
- self.parser.Parse(data, 0)
+ self.parser.Parse(data, False)
except self._error as v:
self._raiseerror(v)
def close(self):
"""Finish feeding data to parser and return element structure."""
try:
- self.parser.Parse("", 1) # end of data
+ self.parser.Parse(b"", True) # end of data
except self._error as v:
self._raiseerror(v)
try:
diff --git a/Lib/xml/sax/__init__.py b/Lib/xml/sax/__init__.py
index a0f5d40b..17b75879 100644
--- a/Lib/xml/sax/__init__.py
+++ b/Lib/xml/sax/__init__.py
@@ -78,7 +78,7 @@ def make_parser(parser_list=()):
for parser_name in list(parser_list) + default_parser_list:
try:
return _create_parser(parser_name)
- except ImportError as e:
+ except ImportError:
import sys
if parser_name in sys.modules:
# The parser module was found, but importing it
diff --git a/Lib/xml/sax/expatreader.py b/Lib/xml/sax/expatreader.py
index 5066ffc2..e334ac9f 100644
--- a/Lib/xml/sax/expatreader.py
+++ b/Lib/xml/sax/expatreader.py
@@ -93,7 +93,7 @@ class ExpatParser(xmlreader.IncrementalParser, xmlreader.Locator):
self._parser = None
self._namespaces = namespaceHandling
self._lex_handler_prop = None
- self._parsing = 0
+ self._parsing = False
self._entity_stack = []
self._external_ges = 0
self._interning = None
@@ -203,10 +203,10 @@ class ExpatParser(xmlreader.IncrementalParser, xmlreader.Locator):
# IncrementalParser methods
- def feed(self, data, isFinal = 0):
+ def feed(self, data, isFinal=False):
if not self._parsing:
self.reset()
- self._parsing = 1
+ self._parsing = True
self._cont_handler.startDocument()
try:
@@ -237,13 +237,13 @@ class ExpatParser(xmlreader.IncrementalParser, xmlreader.Locator):
# If we are completing an external entity, do nothing here
return
try:
- self.feed("", isFinal = 1)
+ self.feed(b"", isFinal=True)
self._cont_handler.endDocument()
- self._parsing = 0
+ self._parsing = False
# break cycle created by expat handlers pointing to our methods
self._parser = None
finally:
- self._parsing = 0
+ self._parsing = False
if self._parser is not None:
# Keep ErrorColumnNumber and ErrorLineNumber after closing.
parser = _ClosedParser()
@@ -307,7 +307,7 @@ class ExpatParser(xmlreader.IncrementalParser, xmlreader.Locator):
self._parser.SetParamEntityParsing(
expat.XML_PARAM_ENTITY_PARSING_UNLESS_STANDALONE)
- self._parsing = 0
+ self._parsing = False
self._entity_stack = []
# Locator methods
diff --git a/Lib/xmlrpc/client.py b/Lib/xmlrpc/client.py
index b9875745..d15d60d2 100644
--- a/Lib/xmlrpc/client.py
+++ b/Lib/xmlrpc/client.py
@@ -313,31 +313,38 @@ class DateTime:
s = self.timetuple()
o = other.timetuple()
else:
- otype = (hasattr(other, "__class__")
- and other.__class__.__name__
- or type(other))
- raise TypeError("Can't compare %s and %s" %
- (self.__class__.__name__, otype))
+ s = self
+ o = NotImplemented
return s, o
def __lt__(self, other):
s, o = self.make_comparable(other)
+ if o is NotImplemented:
+ return NotImplemented
return s < o
def __le__(self, other):
s, o = self.make_comparable(other)
+ if o is NotImplemented:
+ return NotImplemented
return s <= o
def __gt__(self, other):
s, o = self.make_comparable(other)
+ if o is NotImplemented:
+ return NotImplemented
return s > o
def __ge__(self, other):
s, o = self.make_comparable(other)
+ if o is NotImplemented:
+ return NotImplemented
return s >= o
def __eq__(self, other):
s, o = self.make_comparable(other)
+ if o is NotImplemented:
+ return NotImplemented
return s == o
def timetuple(self):
@@ -435,7 +442,7 @@ class ExpatParser:
target.xml(encoding, None)
def feed(self, data):
- self._parser.Parse(data, 0)
+ self._parser.Parse(data, False)
def close(self):
try:
@@ -1414,15 +1421,14 @@ class ServerProxy:
# establish a "logical" server connection
# get the url
- type, uri = urllib.parse._splittype(uri)
- if type not in ("http", "https"):
+ p = urllib.parse.urlparse(uri)
+ if p.scheme not in ("http", "https"):
raise OSError("unsupported XML-RPC protocol")
- self.__host, self.__handler = urllib.parse._splithost(uri)
- if not self.__handler:
- self.__handler = "/RPC2"
+ self.__host = p.netloc
+ self.__handler = p.path or "/RPC2"
if transport is None:
- if type == "https":
+ if p.scheme == "https":
handler = SafeTransport
extra_kwargs = {"context": context}
else:
diff --git a/Lib/xmlrpc/server.py b/Lib/xmlrpc/server.py
index 32aba4df..287e3243 100644
--- a/Lib/xmlrpc/server.py
+++ b/Lib/xmlrpc/server.py
@@ -732,7 +732,7 @@ class ServerHTMLDoc(pydoc.HTMLDoc):
# hyperlinking of arbitrary strings being used as method
# names. Only methods with names consisting of word characters
# and '.'s are hyperlinked.
- pattern = re.compile(r'\b((http|ftp)://\S+[\w/]|'
+ pattern = re.compile(r'\b((http|https|ftp)://\S+[\w/]|'
r'RFC[- ]?(\d+)|'
r'PEP[- ]?(\d+)|'
r'(self\.)?((?:\w|\.)+))\b')
diff --git a/Lib/zipfile.py b/Lib/zipfile.py
index 73e89666..915698f9 100644
--- a/Lib/zipfile.py
+++ b/Lib/zipfile.py
@@ -4,7 +4,6 @@ Read and write ZIP files.
XXX references to utf-8 need further investigation.
"""
import binascii
-import functools
import importlib.util
import io
import itertools
@@ -378,11 +377,11 @@ class ZipInfo (object):
self.volume = 0 # Volume number of file header
self.internal_attr = 0 # Internal attributes
self.external_attr = 0 # External file attributes
+ self.compress_size = 0 # Size of the compressed file
+ self.file_size = 0 # Size of the uncompressed file
# Other attributes are set by class ZipFile:
# header_offset Byte offset to the file header
# CRC CRC-32 of the uncompressed file
- # compress_size Size of the compressed file
- # file_size Size of the uncompressed file
def __repr__(self):
result = ['<%s filename=%r' % (self.__class__.__name__, self.filename)]
@@ -467,44 +466,23 @@ class ZipInfo (object):
if ln+4 > len(extra):
raise BadZipFile("Corrupt extra field %04x (size=%d)" % (tp, ln))
if tp == 0x0001:
- if ln >= 24:
- counts = unpack('{timecnt}{time_type}", fobj.read(timecnt * time_size)
+ )
+ trans_idx = struct.unpack(f">{timecnt}B", fobj.read(timecnt))
+ else:
+ trans_list_utc = ()
+ trans_idx = ()
+
+ # Read the ttinfo struct, (utoff, isdst, abbrind)
+ if typecnt:
+ utcoff, isdst, abbrind = zip(
+ *(struct.unpack(">lbb", fobj.read(6)) for i in range(typecnt))
+ )
+ else:
+ utcoff = ()
+ isdst = ()
+ abbrind = ()
+
+ # Now read the abbreviations. They are null-terminated strings, indexed
+ # not by position in the array but by position in the unsplit
+ # abbreviation string. I suppose this makes more sense in C, which uses
+ # null to terminate the strings, but it's inconvenient here...
+ abbr_vals = {}
+ abbr_chars = fobj.read(charcnt)
+
+ def get_abbr(idx):
+ # Gets a string starting at idx and running until the next \x00
+ #
+ # We cannot pre-populate abbr_vals by splitting on \x00 because there
+ # are some zones that use subsets of longer abbreviations, like so:
+ #
+ # LMT\x00AHST\x00HDT\x00
+ #
+ # Where the idx to abbr mapping should be:
+ #
+ # {0: "LMT", 4: "AHST", 5: "HST", 9: "HDT"}
+ if idx not in abbr_vals:
+ span_end = abbr_chars.find(b"\x00", idx)
+ abbr_vals[idx] = abbr_chars[idx:span_end].decode()
+
+ return abbr_vals[idx]
+
+ abbr = tuple(get_abbr(idx) for idx in abbrind)
+
+ # The remainder of the file consists of leap seconds (currently unused) and
+ # the standard/wall and ut/local indicators, which are metadata we don't need.
+ # In version 2 files, we need to skip the unnecessary data to get at the TZ string:
+ if header.version >= 2:
+ # Each leap second record has size (time_size + 4)
+ skip_bytes = header.isutcnt + header.isstdcnt + header.leapcnt * 12
+ fobj.seek(skip_bytes, 1)
+
+ c = fobj.read(1) # Should be \n
+ assert c == b"\n", c
+
+ tz_bytes = b""
+ while (c := fobj.read(1)) != b"\n":
+ tz_bytes += c
+
+ tz_str = tz_bytes
+ else:
+ tz_str = None
+
+ return trans_idx, trans_list_utc, utcoff, isdst, abbr, tz_str
+
+
+class _TZifHeader:
+ __slots__ = [
+ "version",
+ "isutcnt",
+ "isstdcnt",
+ "leapcnt",
+ "timecnt",
+ "typecnt",
+ "charcnt",
+ ]
+
+ def __init__(self, *args):
+ assert len(self.__slots__) == len(args)
+ for attr, val in zip(self.__slots__, args):
+ setattr(self, attr, val)
+
+ @classmethod
+ def from_file(cls, stream):
+ # The header starts with a 4-byte "magic" value
+ if stream.read(4) != b"TZif":
+ raise ValueError("Invalid TZif file: magic not found")
+
+ _version = stream.read(1)
+ if _version == b"\x00":
+ version = 1
+ else:
+ version = int(_version)
+ stream.read(15)
+
+ args = (version,)
+
+ # Slots are defined in the order that the bytes are arranged
+ args = args + struct.unpack(">6l", stream.read(24))
+
+ return cls(*args)
+
+
+class ZoneInfoNotFoundError(KeyError):
+ """Exception raised when a ZoneInfo key is not found."""
diff --git a/Lib/zoneinfo/_tzpath.py b/Lib/zoneinfo/_tzpath.py
new file mode 100644
index 00000000..9513611c
--- /dev/null
+++ b/Lib/zoneinfo/_tzpath.py
@@ -0,0 +1,175 @@
+import os
+import sysconfig
+
+
+def reset_tzpath(to=None):
+ global TZPATH
+
+ tzpaths = to
+ if tzpaths is not None:
+ if isinstance(tzpaths, (str, bytes)):
+ raise TypeError(
+ f"tzpaths must be a list or tuple, "
+ + f"not {type(tzpaths)}: {tzpaths!r}"
+ )
+
+ if not all(map(os.path.isabs, tzpaths)):
+ raise ValueError(_get_invalid_paths_message(tzpaths))
+ base_tzpath = tzpaths
+ else:
+ env_var = os.environ.get("PYTHONTZPATH", None)
+ if env_var is not None:
+ base_tzpath = _parse_python_tzpath(env_var)
+ else:
+ base_tzpath = _parse_python_tzpath(
+ sysconfig.get_config_var("TZPATH")
+ )
+
+ TZPATH = tuple(base_tzpath)
+
+
+def _parse_python_tzpath(env_var):
+ if not env_var:
+ return ()
+
+ raw_tzpath = env_var.split(os.pathsep)
+ new_tzpath = tuple(filter(os.path.isabs, raw_tzpath))
+
+ # If anything has been filtered out, we will warn about it
+ if len(new_tzpath) != len(raw_tzpath):
+ import warnings
+
+ msg = _get_invalid_paths_message(raw_tzpath)
+
+ warnings.warn(
+ "Invalid paths specified in PYTHONTZPATH environment variable."
+ + msg,
+ InvalidTZPathWarning,
+ )
+
+ return new_tzpath
+
+
+def _get_invalid_paths_message(tzpaths):
+ invalid_paths = (path for path in tzpaths if not os.path.isabs(path))
+
+ prefix = "\n "
+ indented_str = prefix + prefix.join(invalid_paths)
+
+ return (
+ "Paths should be absolute but found the following relative paths:"
+ + indented_str
+ )
+
+
+def find_tzfile(key):
+ """Retrieve the path to a TZif file from a key."""
+ _validate_tzfile_path(key)
+ for search_path in TZPATH:
+ filepath = os.path.join(search_path, key)
+ if os.path.isfile(filepath):
+ return filepath
+
+ return None
+
+
+_TEST_PATH = os.path.normpath(os.path.join("_", "_"))[:-1]
+
+
+def _validate_tzfile_path(path, _base=_TEST_PATH):
+ if os.path.isabs(path):
+ raise ValueError(
+ f"ZoneInfo keys may not be absolute paths, got: {path}"
+ )
+
+ # We only care about the kinds of path normalizations that would change the
+ # length of the key - e.g. a/../b -> a/b, or a/b/ -> a/b. On Windows,
+ # normpath will also change from a/b to a\b, but that would still preserve
+ # the length.
+ new_path = os.path.normpath(path)
+ if len(new_path) != len(path):
+ raise ValueError(
+ f"ZoneInfo keys must be normalized relative paths, got: {path}"
+ )
+
+ resolved = os.path.normpath(os.path.join(_base, new_path))
+ if not resolved.startswith(_base):
+ raise ValueError(
+ f"ZoneInfo keys must refer to subdirectories of TZPATH, got: {path}"
+ )
+
+
+del _TEST_PATH
+
+
+def available_timezones():
+ """Returns a set containing all available time zones.
+
+ .. caution::
+
+ This may attempt to open a large number of files, since the best way to
+ determine if a given file on the time zone search path is to open it
+ and check for the "magic string" at the beginning.
+ """
+ from importlib import resources
+
+ valid_zones = set()
+
+ # Start with loading from the tzdata package if it exists: this has a
+ # pre-assembled list of zones that only requires opening one file.
+ try:
+ with resources.open_text("tzdata", "zones") as f:
+ for zone in f:
+ zone = zone.strip()
+ if zone:
+ valid_zones.add(zone)
+ except (ImportError, FileNotFoundError):
+ pass
+
+ def valid_key(fpath):
+ try:
+ with open(fpath, "rb") as f:
+ return f.read(4) == b"TZif"
+ except Exception: # pragma: nocover
+ return False
+
+ for tz_root in TZPATH:
+ if not os.path.exists(tz_root):
+ continue
+
+ for root, dirnames, files in os.walk(tz_root):
+ if root == tz_root:
+ # right/ and posix/ are special directories and shouldn't be
+ # included in the output of available zones
+ if "right" in dirnames:
+ dirnames.remove("right")
+ if "posix" in dirnames:
+ dirnames.remove("posix")
+
+ for file in files:
+ fpath = os.path.join(root, file)
+
+ key = os.path.relpath(fpath, start=tz_root)
+ if os.sep != "/": # pragma: nocover
+ key = key.replace(os.sep, "/")
+
+ if not key or key in valid_zones:
+ continue
+
+ if valid_key(fpath):
+ valid_zones.add(key)
+
+ if "posixrules" in valid_zones:
+ # posixrules is a special symlink-only time zone where it exists, it
+ # should not be included in the output
+ valid_zones.remove("posixrules")
+
+ return valid_zones
+
+
+class InvalidTZPathWarning(RuntimeWarning):
+ """Warning raised if an invalid path is specified in PYTHONTZPATH."""
+
+
+TZPATH = ()
+reset_tzpath()
diff --git a/Lib/zoneinfo/_zoneinfo.py b/Lib/zoneinfo/_zoneinfo.py
new file mode 100644
index 00000000..9810637d
--- /dev/null
+++ b/Lib/zoneinfo/_zoneinfo.py
@@ -0,0 +1,752 @@
+import bisect
+import calendar
+import collections
+import functools
+import re
+import weakref
+from datetime import datetime, timedelta, tzinfo
+
+from . import _common, _tzpath
+
+EPOCH = datetime(1970, 1, 1)
+EPOCHORDINAL = datetime(1970, 1, 1).toordinal()
+
+# It is relatively expensive to construct new timedelta objects, and in most
+# cases we're looking at the same deltas, like integer numbers of hours, etc.
+# To improve speed and memory use, we'll keep a dictionary with references
+# to the ones we've already used so far.
+#
+# Loading every time zone in the 2020a version of the time zone database
+# requires 447 timedeltas, which requires approximately the amount of space
+# that ZoneInfo("America/New_York") with 236 transitions takes up, so we will
+# set the cache size to 512 so that in the common case we always get cache
+# hits, but specifically crafted ZoneInfo objects don't leak arbitrary amounts
+# of memory.
+@functools.lru_cache(maxsize=512)
+def _load_timedelta(seconds):
+ return timedelta(seconds=seconds)
+
+
+class ZoneInfo(tzinfo):
+ _strong_cache_size = 8
+ _strong_cache = collections.OrderedDict()
+ _weak_cache = weakref.WeakValueDictionary()
+ __module__ = "zoneinfo"
+
+ def __init_subclass__(cls):
+ cls._strong_cache = collections.OrderedDict()
+ cls._weak_cache = weakref.WeakValueDictionary()
+
+ def __new__(cls, key):
+ instance = cls._weak_cache.get(key, None)
+ if instance is None:
+ instance = cls._weak_cache.setdefault(key, cls._new_instance(key))
+ instance._from_cache = True
+
+ # Update the "strong" cache
+ cls._strong_cache[key] = cls._strong_cache.pop(key, instance)
+
+ if len(cls._strong_cache) > cls._strong_cache_size:
+ cls._strong_cache.popitem(last=False)
+
+ return instance
+
+ @classmethod
+ def no_cache(cls, key):
+ obj = cls._new_instance(key)
+ obj._from_cache = False
+
+ return obj
+
+ @classmethod
+ def _new_instance(cls, key):
+ obj = super().__new__(cls)
+ obj._key = key
+ obj._file_path = obj._find_tzfile(key)
+
+ if obj._file_path is not None:
+ file_obj = open(obj._file_path, "rb")
+ else:
+ file_obj = _common.load_tzdata(key)
+
+ with file_obj as f:
+ obj._load_file(f)
+
+ return obj
+
+ @classmethod
+ def from_file(cls, fobj, /, key=None):
+ obj = super().__new__(cls)
+ obj._key = key
+ obj._file_path = None
+ obj._load_file(fobj)
+ obj._file_repr = repr(fobj)
+
+ # Disable pickling for objects created from files
+ obj.__reduce__ = obj._file_reduce
+
+ return obj
+
+ @classmethod
+ def clear_cache(cls, *, only_keys=None):
+ if only_keys is not None:
+ for key in only_keys:
+ cls._weak_cache.pop(key, None)
+ cls._strong_cache.pop(key, None)
+
+ else:
+ cls._weak_cache.clear()
+ cls._strong_cache.clear()
+
+ @property
+ def key(self):
+ return self._key
+
+ def utcoffset(self, dt):
+ return self._find_trans(dt).utcoff
+
+ def dst(self, dt):
+ return self._find_trans(dt).dstoff
+
+ def tzname(self, dt):
+ return self._find_trans(dt).tzname
+
+ def fromutc(self, dt):
+ """Convert from datetime in UTC to datetime in local time"""
+
+ if not isinstance(dt, datetime):
+ raise TypeError("fromutc() requires a datetime argument")
+ if dt.tzinfo is not self:
+ raise ValueError("dt.tzinfo is not self")
+
+ timestamp = self._get_local_timestamp(dt)
+ num_trans = len(self._trans_utc)
+
+ if num_trans >= 1 and timestamp < self._trans_utc[0]:
+ tti = self._tti_before
+ fold = 0
+ elif (
+ num_trans == 0 or timestamp > self._trans_utc[-1]
+ ) and not isinstance(self._tz_after, _ttinfo):
+ tti, fold = self._tz_after.get_trans_info_fromutc(
+ timestamp, dt.year
+ )
+ elif num_trans == 0:
+ tti = self._tz_after
+ fold = 0
+ else:
+ idx = bisect.bisect_right(self._trans_utc, timestamp)
+
+ if num_trans > 1 and timestamp >= self._trans_utc[1]:
+ tti_prev, tti = self._ttinfos[idx - 2 : idx]
+ elif timestamp > self._trans_utc[-1]:
+ tti_prev = self._ttinfos[-1]
+ tti = self._tz_after
+ else:
+ tti_prev = self._tti_before
+ tti = self._ttinfos[0]
+
+ # Detect fold
+ shift = tti_prev.utcoff - tti.utcoff
+ fold = shift.total_seconds() > timestamp - self._trans_utc[idx - 1]
+ dt += tti.utcoff
+ if fold:
+ return dt.replace(fold=1)
+ else:
+ return dt
+
+ def _find_trans(self, dt):
+ if dt is None:
+ if self._fixed_offset:
+ return self._tz_after
+ else:
+ return _NO_TTINFO
+
+ ts = self._get_local_timestamp(dt)
+
+ lt = self._trans_local[dt.fold]
+
+ num_trans = len(lt)
+
+ if num_trans and ts < lt[0]:
+ return self._tti_before
+ elif not num_trans or ts > lt[-1]:
+ if isinstance(self._tz_after, _TZStr):
+ return self._tz_after.get_trans_info(ts, dt.year, dt.fold)
+ else:
+ return self._tz_after
+ else:
+ # idx is the transition that occurs after this timestamp, so we
+ # subtract off 1 to get the current ttinfo
+ idx = bisect.bisect_right(lt, ts) - 1
+ assert idx >= 0
+ return self._ttinfos[idx]
+
+ def _get_local_timestamp(self, dt):
+ return (
+ (dt.toordinal() - EPOCHORDINAL) * 86400
+ + dt.hour * 3600
+ + dt.minute * 60
+ + dt.second
+ )
+
+ def __str__(self):
+ if self._key is not None:
+ return f"{self._key}"
+ else:
+ return repr(self)
+
+ def __repr__(self):
+ if self._key is not None:
+ return f"{self.__class__.__name__}(key={self._key!r})"
+ else:
+ return f"{self.__class__.__name__}.from_file({self._file_repr})"
+
+ def __reduce__(self):
+ return (self.__class__._unpickle, (self._key, self._from_cache))
+
+ def _file_reduce(self):
+ import pickle
+
+ raise pickle.PicklingError(
+ "Cannot pickle a ZoneInfo file created from a file stream."
+ )
+
+ @classmethod
+ def _unpickle(cls, key, from_cache, /):
+ if from_cache:
+ return cls(key)
+ else:
+ return cls.no_cache(key)
+
+ def _find_tzfile(self, key):
+ return _tzpath.find_tzfile(key)
+
+ def _load_file(self, fobj):
+ # Retrieve all the data as it exists in the zoneinfo file
+ trans_idx, trans_utc, utcoff, isdst, abbr, tz_str = _common.load_data(
+ fobj
+ )
+
+ # Infer the DST offsets (needed for .dst()) from the data
+ dstoff = self._utcoff_to_dstoff(trans_idx, utcoff, isdst)
+
+ # Convert all the transition times (UTC) into "seconds since 1970-01-01 local time"
+ trans_local = self._ts_to_local(trans_idx, trans_utc, utcoff)
+
+ # Construct `_ttinfo` objects for each transition in the file
+ _ttinfo_list = [
+ _ttinfo(
+ _load_timedelta(utcoffset), _load_timedelta(dstoffset), tzname
+ )
+ for utcoffset, dstoffset, tzname in zip(utcoff, dstoff, abbr)
+ ]
+
+ self._trans_utc = trans_utc
+ self._trans_local = trans_local
+ self._ttinfos = [_ttinfo_list[idx] for idx in trans_idx]
+
+ # Find the first non-DST transition
+ for i in range(len(isdst)):
+ if not isdst[i]:
+ self._tti_before = _ttinfo_list[i]
+ break
+ else:
+ if self._ttinfos:
+ self._tti_before = self._ttinfos[0]
+ else:
+ self._tti_before = None
+
+ # Set the "fallback" time zone
+ if tz_str is not None and tz_str != b"":
+ self._tz_after = _parse_tz_str(tz_str.decode())
+ else:
+ if not self._ttinfos and not _ttinfo_list:
+ raise ValueError("No time zone information found.")
+
+ if self._ttinfos:
+ self._tz_after = self._ttinfos[-1]
+ else:
+ self._tz_after = _ttinfo_list[-1]
+
+ # Determine if this is a "fixed offset" zone, meaning that the output
+ # of the utcoffset, dst and tzname functions does not depend on the
+ # specific datetime passed.
+ #
+ # We make three simplifying assumptions here:
+ #
+ # 1. If _tz_after is not a _ttinfo, it has transitions that might
+ # actually occur (it is possible to construct TZ strings that
+ # specify STD and DST but no transitions ever occur, such as
+ # AAA0BBB,0/0,J365/25).
+ # 2. If _ttinfo_list contains more than one _ttinfo object, the objects
+ # represent different offsets.
+ # 3. _ttinfo_list contains no unused _ttinfos (in which case an
+ # otherwise fixed-offset zone with extra _ttinfos defined may
+ # appear to *not* be a fixed offset zone).
+ #
+ # Violations to these assumptions would be fairly exotic, and exotic
+ # zones should almost certainly not be used with datetime.time (the
+ # only thing that would be affected by this).
+ if len(_ttinfo_list) > 1 or not isinstance(self._tz_after, _ttinfo):
+ self._fixed_offset = False
+ elif not _ttinfo_list:
+ self._fixed_offset = True
+ else:
+ self._fixed_offset = _ttinfo_list[0] == self._tz_after
+
+ @staticmethod
+ def _utcoff_to_dstoff(trans_idx, utcoffsets, isdsts):
+ # Now we must transform our ttis and abbrs into `_ttinfo` objects,
+ # but there is an issue: .dst() must return a timedelta with the
+ # difference between utcoffset() and the "standard" offset, but
+ # the "base offset" and "DST offset" are not encoded in the file;
+ # we can infer what they are from the isdst flag, but it is not
+ # sufficient to to just look at the last standard offset, because
+ # occasionally countries will shift both DST offset and base offset.
+
+ typecnt = len(isdsts)
+ dstoffs = [0] * typecnt # Provisionally assign all to 0.
+ dst_cnt = sum(isdsts)
+ dst_found = 0
+
+ for i in range(1, len(trans_idx)):
+ if dst_cnt == dst_found:
+ break
+
+ idx = trans_idx[i]
+
+ dst = isdsts[idx]
+
+ # We're only going to look at daylight saving time
+ if not dst:
+ continue
+
+ # Skip any offsets that have already been assigned
+ if dstoffs[idx] != 0:
+ continue
+
+ dstoff = 0
+ utcoff = utcoffsets[idx]
+
+ comp_idx = trans_idx[i - 1]
+
+ if not isdsts[comp_idx]:
+ dstoff = utcoff - utcoffsets[comp_idx]
+
+ if not dstoff and idx < (typecnt - 1):
+ comp_idx = trans_idx[i + 1]
+
+ # If the following transition is also DST and we couldn't
+ # find the DST offset by this point, we're going ot have to
+ # skip it and hope this transition gets assigned later
+ if isdsts[comp_idx]:
+ continue
+
+ dstoff = utcoff - utcoffsets[comp_idx]
+
+ if dstoff:
+ dst_found += 1
+ dstoffs[idx] = dstoff
+ else:
+ # If we didn't find a valid value for a given index, we'll end up
+ # with dstoff = 0 for something where `isdst=1`. This is obviously
+ # wrong - one hour will be a much better guess than 0
+ for idx in range(typecnt):
+ if not dstoffs[idx] and isdsts[idx]:
+ dstoffs[idx] = 3600
+
+ return dstoffs
+
+ @staticmethod
+ def _ts_to_local(trans_idx, trans_list_utc, utcoffsets):
+ """Generate number of seconds since 1970 *in the local time*.
+
+ This is necessary to easily find the transition times in local time"""
+ if not trans_list_utc:
+ return [[], []]
+
+ # Start with the timestamps and modify in-place
+ trans_list_wall = [list(trans_list_utc), list(trans_list_utc)]
+
+ if len(utcoffsets) > 1:
+ offset_0 = utcoffsets[0]
+ offset_1 = utcoffsets[trans_idx[0]]
+ if offset_1 > offset_0:
+ offset_1, offset_0 = offset_0, offset_1
+ else:
+ offset_0 = offset_1 = utcoffsets[0]
+
+ trans_list_wall[0][0] += offset_0
+ trans_list_wall[1][0] += offset_1
+
+ for i in range(1, len(trans_idx)):
+ offset_0 = utcoffsets[trans_idx[i - 1]]
+ offset_1 = utcoffsets[trans_idx[i]]
+
+ if offset_1 > offset_0:
+ offset_1, offset_0 = offset_0, offset_1
+
+ trans_list_wall[0][i] += offset_0
+ trans_list_wall[1][i] += offset_1
+
+ return trans_list_wall
+
+
+class _ttinfo:
+ __slots__ = ["utcoff", "dstoff", "tzname"]
+
+ def __init__(self, utcoff, dstoff, tzname):
+ self.utcoff = utcoff
+ self.dstoff = dstoff
+ self.tzname = tzname
+
+ def __eq__(self, other):
+ return (
+ self.utcoff == other.utcoff
+ and self.dstoff == other.dstoff
+ and self.tzname == other.tzname
+ )
+
+ def __repr__(self): # pragma: nocover
+ return (
+ f"{self.__class__.__name__}"
+ + f"({self.utcoff}, {self.dstoff}, {self.tzname})"
+ )
+
+
+_NO_TTINFO = _ttinfo(None, None, None)
+
+
+class _TZStr:
+ __slots__ = (
+ "std",
+ "dst",
+ "start",
+ "end",
+ "get_trans_info",
+ "get_trans_info_fromutc",
+ "dst_diff",
+ )
+
+ def __init__(
+ self, std_abbr, std_offset, dst_abbr, dst_offset, start=None, end=None
+ ):
+ self.dst_diff = dst_offset - std_offset
+ std_offset = _load_timedelta(std_offset)
+ self.std = _ttinfo(
+ utcoff=std_offset, dstoff=_load_timedelta(0), tzname=std_abbr
+ )
+
+ self.start = start
+ self.end = end
+
+ dst_offset = _load_timedelta(dst_offset)
+ delta = _load_timedelta(self.dst_diff)
+ self.dst = _ttinfo(utcoff=dst_offset, dstoff=delta, tzname=dst_abbr)
+
+ # These are assertions because the constructor should only be called
+ # by functions that would fail before passing start or end
+ assert start is not None, "No transition start specified"
+ assert end is not None, "No transition end specified"
+
+ self.get_trans_info = self._get_trans_info
+ self.get_trans_info_fromutc = self._get_trans_info_fromutc
+
+ def transitions(self, year):
+ start = self.start.year_to_epoch(year)
+ end = self.end.year_to_epoch(year)
+ return start, end
+
+ def _get_trans_info(self, ts, year, fold):
+ """Get the information about the current transition - tti"""
+ start, end = self.transitions(year)
+
+ # With fold = 0, the period (denominated in local time) with the
+ # smaller offset starts at the end of the gap and ends at the end of
+ # the fold; with fold = 1, it runs from the start of the gap to the
+ # beginning of the fold.
+ #
+ # So in order to determine the DST boundaries we need to know both
+ # the fold and whether DST is positive or negative (rare), and it
+ # turns out that this boils down to fold XOR is_positive.
+ if fold == (self.dst_diff >= 0):
+ end -= self.dst_diff
+ else:
+ start += self.dst_diff
+
+ if start < end:
+ isdst = start <= ts < end
+ else:
+ isdst = not (end <= ts < start)
+
+ return self.dst if isdst else self.std
+
+ def _get_trans_info_fromutc(self, ts, year):
+ start, end = self.transitions(year)
+ start -= self.std.utcoff.total_seconds()
+ end -= self.dst.utcoff.total_seconds()
+
+ if start < end:
+ isdst = start <= ts < end
+ else:
+ isdst = not (end <= ts < start)
+
+ # For positive DST, the ambiguous period is one dst_diff after the end
+ # of DST; for negative DST, the ambiguous period is one dst_diff before
+ # the start of DST.
+ if self.dst_diff > 0:
+ ambig_start = end
+ ambig_end = end + self.dst_diff
+ else:
+ ambig_start = start
+ ambig_end = start - self.dst_diff
+
+ fold = ambig_start <= ts < ambig_end
+
+ return (self.dst if isdst else self.std, fold)
+
+
+def _post_epoch_days_before_year(year):
+ """Get the number of days between 1970-01-01 and YEAR-01-01"""
+ y = year - 1
+ return y * 365 + y // 4 - y // 100 + y // 400 - EPOCHORDINAL
+
+
+class _DayOffset:
+ __slots__ = ["d", "julian", "hour", "minute", "second"]
+
+ def __init__(self, d, julian, hour=2, minute=0, second=0):
+ if not (0 + julian) <= d <= 365:
+ min_day = 0 + julian
+ raise ValueError(f"d must be in [{min_day}, 365], not: {d}")
+
+ self.d = d
+ self.julian = julian
+ self.hour = hour
+ self.minute = minute
+ self.second = second
+
+ def year_to_epoch(self, year):
+ days_before_year = _post_epoch_days_before_year(year)
+
+ d = self.d
+ if self.julian and d >= 59 and calendar.isleap(year):
+ d += 1
+
+ epoch = (days_before_year + d) * 86400
+ epoch += self.hour * 3600 + self.minute * 60 + self.second
+
+ return epoch
+
+
+class _CalendarOffset:
+ __slots__ = ["m", "w", "d", "hour", "minute", "second"]
+
+ _DAYS_BEFORE_MONTH = (
+ -1,
+ 0,
+ 31,
+ 59,
+ 90,
+ 120,
+ 151,
+ 181,
+ 212,
+ 243,
+ 273,
+ 304,
+ 334,
+ )
+
+ def __init__(self, m, w, d, hour=2, minute=0, second=0):
+ if not 0 < m <= 12:
+ raise ValueError("m must be in (0, 12]")
+
+ if not 0 < w <= 5:
+ raise ValueError("w must be in (0, 5]")
+
+ if not 0 <= d <= 6:
+ raise ValueError("d must be in [0, 6]")
+
+ self.m = m
+ self.w = w
+ self.d = d
+ self.hour = hour
+ self.minute = minute
+ self.second = second
+
+ @classmethod
+ def _ymd2ord(cls, year, month, day):
+ return (
+ _post_epoch_days_before_year(year)
+ + cls._DAYS_BEFORE_MONTH[month]
+ + (month > 2 and calendar.isleap(year))
+ + day
+ )
+
+ # TODO: These are not actually epoch dates as they are expressed in local time
+ def year_to_epoch(self, year):
+ """Calculates the datetime of the occurrence from the year"""
+ # We know year and month, we need to convert w, d into day of month
+ #
+ # Week 1 is the first week in which day `d` (where 0 = Sunday) appears.
+ # Week 5 represents the last occurrence of day `d`, so we need to know
+ # the range of the month.
+ first_day, days_in_month = calendar.monthrange(year, self.m)
+
+ # This equation seems magical, so I'll break it down:
+ # 1. calendar says 0 = Monday, POSIX says 0 = Sunday
+ # so we need first_day + 1 to get 1 = Monday -> 7 = Sunday,
+ # which is still equivalent because this math is mod 7
+ # 2. Get first day - desired day mod 7: -1 % 7 = 6, so we don't need
+ # to do anything to adjust negative numbers.
+ # 3. Add 1 because month days are a 1-based index.
+ month_day = (self.d - (first_day + 1)) % 7 + 1
+
+ # Now use a 0-based index version of `w` to calculate the w-th
+ # occurrence of `d`
+ month_day += (self.w - 1) * 7
+
+ # month_day will only be > days_in_month if w was 5, and `w` means
+ # "last occurrence of `d`", so now we just check if we over-shot the
+ # end of the month and if so knock off 1 week.
+ if month_day > days_in_month:
+ month_day -= 7
+
+ ordinal = self._ymd2ord(year, self.m, month_day)
+ epoch = ordinal * 86400
+ epoch += self.hour * 3600 + self.minute * 60 + self.second
+ return epoch
+
+
+def _parse_tz_str(tz_str):
+ # The tz string has the format:
+ #
+ # std[offset[dst[offset],start[/time],end[/time]]]
+ #
+ # std and dst must be 3 or more characters long and must not contain
+ # a leading colon, embedded digits, commas, nor a plus or minus signs;
+ # The spaces between "std" and "offset" are only for display and are
+ # not actually present in the string.
+ #
+ # The format of the offset is ``[+|-]hh[:mm[:ss]]``
+
+ offset_str, *start_end_str = tz_str.split(",", 1)
+
+ # fmt: off
+ parser_re = re.compile(
+ r"(?P[^<0-9:.+-]+|<[a-zA-Z0-9+\-]+>)" +
+ r"((?P[+-]?\d{1,2}(:\d{2}(:\d{2})?)?)" +
+ r"((?P[^0-9:.+-]+|<[a-zA-Z0-9+\-]+>)" +
+ r"((?P[+-]?\d{1,2}(:\d{2}(:\d{2})?)?))?" +
+ r")?" + # dst
+ r")?$" # stdoff
+ )
+ # fmt: on
+
+ m = parser_re.match(offset_str)
+
+ if m is None:
+ raise ValueError(f"{tz_str} is not a valid TZ string")
+
+ std_abbr = m.group("std")
+ dst_abbr = m.group("dst")
+ dst_offset = None
+
+ std_abbr = std_abbr.strip("<>")
+
+ if dst_abbr:
+ dst_abbr = dst_abbr.strip("<>")
+
+ if std_offset := m.group("stdoff"):
+ try:
+ std_offset = _parse_tz_delta(std_offset)
+ except ValueError as e:
+ raise ValueError(f"Invalid STD offset in {tz_str}") from e
+ else:
+ std_offset = 0
+
+ if dst_abbr is not None:
+ if dst_offset := m.group("dstoff"):
+ try:
+ dst_offset = _parse_tz_delta(dst_offset)
+ except ValueError as e:
+ raise ValueError(f"Invalid DST offset in {tz_str}") from e
+ else:
+ dst_offset = std_offset + 3600
+
+ if not start_end_str:
+ raise ValueError(f"Missing transition rules: {tz_str}")
+
+ start_end_strs = start_end_str[0].split(",", 1)
+ try:
+ start, end = (_parse_dst_start_end(x) for x in start_end_strs)
+ except ValueError as e:
+ raise ValueError(f"Invalid TZ string: {tz_str}") from e
+
+ return _TZStr(std_abbr, std_offset, dst_abbr, dst_offset, start, end)
+ elif start_end_str:
+ raise ValueError(f"Transition rule present without DST: {tz_str}")
+ else:
+ # This is a static ttinfo, don't return _TZStr
+ return _ttinfo(
+ _load_timedelta(std_offset), _load_timedelta(0), std_abbr
+ )
+
+
+def _parse_dst_start_end(dststr):
+ date, *time = dststr.split("/")
+ if date[0] == "M":
+ n_is_julian = False
+ m = re.match(r"M(\d{1,2})\.(\d).(\d)$", date)
+ if m is None:
+ raise ValueError(f"Invalid dst start/end date: {dststr}")
+ date_offset = tuple(map(int, m.groups()))
+ offset = _CalendarOffset(*date_offset)
+ else:
+ if date[0] == "J":
+ n_is_julian = True
+ date = date[1:]
+ else:
+ n_is_julian = False
+
+ doy = int(date)
+ offset = _DayOffset(doy, n_is_julian)
+
+ if time:
+ time_components = list(map(int, time[0].split(":")))
+ n_components = len(time_components)
+ if n_components < 3:
+ time_components.extend([0] * (3 - n_components))
+ offset.hour, offset.minute, offset.second = time_components
+
+ return offset
+
+
+def _parse_tz_delta(tz_delta):
+ match = re.match(
+ r"(?P[+-])?(?P\d{1,2})(:(?P\d{2})(:(?P\d{2}))?)?",
+ tz_delta,
+ )
+ # Anything passed to this function should already have hit an equivalent
+ # regular expression to find the section to parse.
+ assert match is not None, tz_delta
+
+ h, m, s = (
+ int(v) if v is not None else 0
+ for v in map(match.group, ("h", "m", "s"))
+ )
+
+ total = h * 3600 + m * 60 + s
+
+ if not -86400 < total < 86400:
+ raise ValueError(
+ f"Offset must be strictly between -24h and +24h: {tz_delta}"
+ )
+
+ # Yes, +5 maps to an offset of -5h
+ if match.group("sign") != "-":
+ total *= -1
+
+ return total
diff --git a/Mac/BuildScript/resources/ReadMe.rtf b/Mac/BuildScript/resources/ReadMe.rtf
index b1e972ee..9bc96986 100644
--- a/Mac/BuildScript/resources/ReadMe.rtf
+++ b/Mac/BuildScript/resources/ReadMe.rtf
@@ -20,7 +20,7 @@ This package includes its own private copy of OpenSSL 1.1.1. The trust certifi
\f0\i0 command line utility are not used as defaults by the Python
\f3 ssl
\f0 module. A sample command script is included in
-\f3 /Applications/Python 3.8
+\f3 /Applications/Python 3.9
\f0 to install a curated bundle of default root certificates from the third-party
\f3 certifi
\f0 package ({\field{\*\fldinst{HYPERLINK "https://pypi.org/project/certifi/"}}{\fldrslt https://pypi.org/project/certifi/}}). Double-click on
@@ -48,33 +48,9 @@ Due to new security checks on macOS 10.15 Catalina, when launching IDLE macOS ma
\f0\b0 file dialog windows. Click on the
\f1\b OK
\f0\b0 button to proceed.\
-
-\f1\b \ul \
-macOS 10.15 (Catalina) Gatekeeper Requirements [changed in 3.8.2]\
-
-\f0\b0 \ulnone \
-As of 2020-02-03, Apple has changed how third-party installer packages, like those provided by python.org, are notarized for verification by Gatekeeper and begun enforcing additional requirements such as code signing and use of the hardened runtime. As of 3.8.2, python.org installer packages now meet those additional notarization requirements. The necessary changes in packaging should be transparent to your use of Python but, in the unlikely event that you encounter changes in behavior between 3.8.1 and newer 3.8.x releases in areas like ctypes, importlib, or mmap, please check bugs.python.org for existing reports and, if necessary, open a new issue.\
-
-\f1\b \ul \
-Python 2.7 end-of-life [changed in 3.8.4]\
\
-\f0\b0 \ulnone Python 2.7 has now reached end-of-life. As of Python 3.8.4, the
-\f3 Python Launcher
-\f0 app now has
-\f3 python3
-\f0 factory defaults. Also, the
-\f3 Current
-\f0 link in the
-\f3 /Library/Frameworks/Python.framework/Versions
-\f0 directory is now updated to point to the Python 3 being installed; previously, only Python 2 installs updated
-\f3 Current
-\f0 . This change might affect developers using the framework to embed Python in their applications. If another version is desired for embedding, the
-\f3 Current
-\f0 symlink can be changed manually without affecting 3.8.x behavior.\
-
-\f1\b \ul \
-Other changes\
+\f1\b \ul Other changes\
\f0\b0 \ulnone \
For other changes in this release, see the
diff --git a/Mac/BuildScript/resources/Welcome.rtf b/Mac/BuildScript/resources/Welcome.rtf
index ce53bd78..407c2258 100644
--- a/Mac/BuildScript/resources/Welcome.rtf
+++ b/Mac/BuildScript/resources/Welcome.rtf
@@ -1,5 +1,5 @@
-{\rtf1\ansi\ansicpg1252\cocoartf1671\cocoasubrtf600
-\cocoascreenfonts1{\fonttbl\f0\fswiss\fcharset0 Helvetica;\f1\fswiss\fcharset0 Helvetica-Bold;\f2\fmodern\fcharset0 CourierNewPSMT;
+{\rtf1\ansi\ansicpg1252\cocoartf2513
+\cocoascreenfonts1\cocoatextscaling0\cocoaplatform0{\fonttbl\f0\fswiss\fcharset0 Helvetica;\f1\fswiss\fcharset0 Helvetica-Bold;\f2\fmodern\fcharset0 CourierNewPSMT;
}
{\colortbl;\red255\green255\blue255;}
{\*\expandedcolortbl;;}
diff --git a/Mac/Resources/iconsrc/PythonCompiled.psd b/Mac/Resources/iconsrc/PythonCompiled.psd
old mode 100755
new mode 100644
diff --git a/Mac/Resources/iconsrc/PythonIcon.psd b/Mac/Resources/iconsrc/PythonIcon.psd
old mode 100755
new mode 100644
diff --git a/Mac/Resources/iconsrc/PythonSource.psd b/Mac/Resources/iconsrc/PythonSource.psd
old mode 100755
new mode 100644
diff --git a/Makefile.pre.in b/Makefile.pre.in
index a914a9c7..77f91e72 100644
--- a/Makefile.pre.in
+++ b/Makefile.pre.in
@@ -143,11 +143,12 @@ LIBDIR= @libdir@
MANDIR= @mandir@
INCLUDEDIR= @includedir@
CONFINCLUDEDIR= $(exec_prefix)/include
-SCRIPTDIR= $(prefix)/lib
+PLATLIBDIR= @PLATLIBDIR@
+SCRIPTDIR= $(prefix)/$(PLATLIBDIR)
ABIFLAGS= @ABIFLAGS@
# Detailed destination directories
-BINLIBDEST= $(LIBDIR)/python$(VERSION)
+BINLIBDEST= @BINLIBDEST@
LIBDEST= $(SCRIPTDIR)/python$(VERSION)
INCLUDEPY= $(INCLUDEDIR)/python$(LDVERSION)
CONFINCLUDEPY= $(CONFINCLUDEDIR)/python$(LDVERSION)
@@ -160,6 +161,10 @@ BLDSHARED= @BLDSHARED@ $(PY_CORE_LDFLAGS)
LDCXXSHARED= @LDCXXSHARED@
DESTSHARED= $(BINLIBDEST)/lib-dynload
+# List of exported symbols for AIX
+EXPORTSYMS= @EXPORTSYMS@
+EXPORTSFROM= @EXPORTSFROM@
+
# Executable suffix (.exe on Windows and Mac OS X)
EXE= @EXEEXT@
BUILDEXE= @BUILDEXEEXT@
@@ -196,6 +201,9 @@ OPENSSL_INCLUDES=@OPENSSL_INCLUDES@
OPENSSL_LIBS=@OPENSSL_LIBS@
OPENSSL_LDFLAGS=@OPENSSL_LDFLAGS@
+# Default zoneinfo.TZPATH. Added here to expose it in sysconfig.get_config_var
+TZPATH=@TZPATH@
+
# Modes for directories, executables and data files created by the
# install process. Default to user-only-writable for all file types.
DIRMODE= 755
@@ -243,7 +251,7 @@ LIBOBJS= @LIBOBJS@
PYTHON= python$(EXE)
BUILDPYTHON= python$(BUILDEXE)
-PYTHON_FOR_REGEN=@PYTHON_FOR_REGEN@
+PYTHON_FOR_REGEN?=@PYTHON_FOR_REGEN@
UPDATE_FILE=@PYTHON_FOR_REGEN@ $(srcdir)/Tools/scripts/update_file.py
PYTHON_FOR_BUILD=@PYTHON_FOR_BUILD@
_PYTHON_HOST_PLATFORM=@_PYTHON_HOST_PLATFORM@
@@ -294,6 +302,19 @@ LIBFFI_INCLUDEDIR= @LIBFFI_INCLUDEDIR@
##########################################################################
# Parser
+
+PEGEN_OBJS= \
+ Parser/pegen/pegen.o \
+ Parser/pegen/parse.o \
+ Parser/pegen/parse_string.o \
+ Parser/pegen/peg_api.o
+
+
+PEGEN_HEADERS= \
+ $(srcdir)/Include/internal/pegen_interface.h \
+ $(srcdir)/Parser/pegen/pegen.h \
+ $(srcdir)/Parser/pegen/parse_string.h
+
POBJS= \
Parser/acceler.o \
Parser/grammar1.o \
@@ -302,9 +323,10 @@ POBJS= \
Parser/parser.o \
Parser/token.o \
-PARSER_OBJS= $(POBJS) Parser/myreadline.o Parser/parsetok.o Parser/tokenizer.o
+PARSER_OBJS= $(POBJS) $(PEGEN_OBJS) Parser/myreadline.o Parser/parsetok.o Parser/tokenizer.o
PARSER_HEADERS= \
+ $(PEGEN_HEADERS) \
$(srcdir)/Include/grammar.h \
$(srcdir)/Include/parsetok.h \
$(srcdir)/Parser/parser.h \
@@ -336,6 +358,7 @@ PYTHON_OBJS= \
Python/getversion.o \
Python/graminit.o \
Python/hamt.o \
+ Python/hashtable.o \
Python/import.o \
Python/importdl.o \
Python/initconfig.o \
@@ -392,6 +415,7 @@ OBJECT_OBJS= \
Objects/descrobject.o \
Objects/enumobject.o \
Objects/exceptions.o \
+ Objects/genericaliasobject.o \
Objects/genobject.o \
Objects/fileobject.o \
Objects/floatobject.o \
@@ -461,7 +485,7 @@ check-clean-src:
# Profile generation build must start from a clean tree.
profile-clean-stamp:
- $(MAKE) clean profile-removal
+ $(MAKE) clean
touch $@
# Compile with profile generation enabled.
@@ -485,7 +509,7 @@ profile-run-stamp:
$(MAKE) run_profile_task
$(MAKE) build_all_merge_profile
# Remove profile generation binary since we are done with it.
- $(MAKE) clean
+ $(MAKE) clean-retain-profile
# This is an expensive target to build and it does not have proper
# makefile dependency information. So, we create a "stamp" file
# to record its completion and avoid re-running it.
@@ -512,8 +536,8 @@ profile-opt: profile-run-stamp
.PHONY=coverage coverage-lcov coverage-report
coverage:
@echo "Building with support for coverage checking:"
- $(MAKE) clean profile-removal
- $(MAKE) @DEF_MAKE_RULE@ CFLAGS="$(CFLAGS) -O0 -pg -fprofile-arcs -ftest-coverage" LIBS="$(LIBS) -lgcov"
+ $(MAKE) clean
+ $(MAKE) @DEF_MAKE_RULE@ CFLAGS="$(CFLAGS) -O0 -pg --coverage" LIBS="$(LIBS) --coverage"
coverage-lcov:
@echo "Creating Coverage HTML report with LCOV:"
@@ -562,7 +586,7 @@ clinic: check-clean-src $(srcdir)/Modules/_blake2/blake2s_impl.c
$(PYTHON_FOR_REGEN) $(srcdir)/Tools/clinic/clinic.py --make --srcdir $(srcdir)
# Build the interpreter
-$(BUILDPYTHON): Programs/python.o $(LIBRARY) $(LDLIBRARY) $(PY3LIBRARY)
+$(BUILDPYTHON): Programs/python.o $(LIBRARY) $(LDLIBRARY) $(PY3LIBRARY) $(EXPORTSYMS)
$(LINKCC) $(PY_CORE_LDFLAGS) $(LINKFORSHARED) -o $@ Programs/python.o $(BLDLIBRARY) $(LIBS) $(MODLIBS) $(SYSLIBS)
platform: $(BUILDPYTHON) pybuilddir.txt
@@ -634,6 +658,10 @@ libpython$(LDVERSION).dylib: $(LIBRARY_OBJS)
libpython$(VERSION).sl: $(LIBRARY_OBJS)
$(LDSHARED) -o $@ $(LIBRARY_OBJS) $(MODLIBS) $(SHLIBS) $(LIBC) $(LIBM)
+# List of exported symbols for AIX
+Modules/python.exp: $(LIBRARY)
+ $(srcdir)/Modules/makexp_aix $@ "$(EXPORTSFROM)" $?
+
# Copy up the gdb python hooks into a position where they can be automatically
# loaded by gdb during Lib/test/test_gdb.py
#
@@ -693,7 +721,7 @@ Makefile Modules/config.c: Makefile.pre \
@echo "The Makefile was updated, you may need to re-run make."
-Programs/_testembed: Programs/_testembed.o $(LIBRARY) $(LDLIBRARY) $(PY3LIBRARY)
+Programs/_testembed: Programs/_testembed.o $(LIBRARY) $(LDLIBRARY) $(PY3LIBRARY) $(EXPORTSYMS)
$(LINKCC) $(PY_CORE_LDFLAGS) $(LINKFORSHARED) -o $@ Programs/_testembed.o $(BLDLIBRARY) $(LIBS) $(MODLIBS) $(SYSLIBS)
############################################################################
@@ -730,7 +758,8 @@ regen-importlib: Programs/_freeze_importlib
# Regenerate all generated files
regen-all: regen-opcode regen-opcode-targets regen-typeslots regen-grammar \
- regen-token regen-keyword regen-symbol regen-ast regen-importlib clinic
+ regen-token regen-keyword regen-symbol regen-ast regen-importlib clinic \
+ regen-pegen-metaparser regen-pegen
############################################################################
# Special rules for object files
@@ -788,6 +817,11 @@ Python/sysmodule.o: $(srcdir)/Python/sysmodule.c Makefile $(srcdir)/Include/pydt
$(MULTIARCH_CPPFLAGS) \
-o $@ $(srcdir)/Python/sysmodule.c
+Python/initconfig.o: $(srcdir)/Python/initconfig.c
+ $(CC) -c $(PY_CORE_CFLAGS) \
+ -DPLATLIBDIR='"$(PLATLIBDIR)"' \
+ -o $@ $(srcdir)/Python/initconfig.c
+
$(IO_OBJS): $(IO_H)
.PHONY: regen-grammar
@@ -802,19 +836,35 @@ regen-grammar: regen-token
$(UPDATE_FILE) $(srcdir)/Include/graminit.h $(srcdir)/Include/graminit.h.new
$(UPDATE_FILE) $(srcdir)/Python/graminit.c $(srcdir)/Python/graminit.c.new
+.PHONY: regen-pegen-metaparser
+regen-pegen-metaparser:
+ @$(MKDIR_P) $(srcdir)/Tools/peg_generator/pegen
+ PYTHONPATH=$(srcdir)/Tools/peg_generator $(PYTHON_FOR_REGEN) -m pegen -q python \
+ $(srcdir)/Tools/peg_generator/pegen/metagrammar.gram \
+ -o $(srcdir)/Tools/peg_generator/pegen/grammar_parser.py.new
+ $(UPDATE_FILE) $(srcdir)/Tools/peg_generator/pegen/grammar_parser.py \
+ $(srcdir)/Tools/peg_generator/pegen/grammar_parser.py.new
+
+.PHONY: regen-pegen
+regen-pegen:
+ @$(MKDIR_P) $(srcdir)/Parser/pegen
+ PYTHONPATH=$(srcdir)/Tools/peg_generator $(PYTHON_FOR_REGEN) -m pegen -q c \
+ $(srcdir)/Grammar/python.gram \
+ $(srcdir)/Grammar/Tokens \
+ -o $(srcdir)/Parser/pegen/parse.new.c
+ $(UPDATE_FILE) $(srcdir)/Parser/pegen/parse.c $(srcdir)/Parser/pegen/parse.new.c
+
.PHONY=regen-ast
regen-ast:
- # Regenerate Include/Python-ast.h using Parser/asdl_c.py -h
+ # Regenerate Include/Python-ast.h and Python/Python-ast.c using Parser/asdl_c.py
$(MKDIR_P) $(srcdir)/Include
- $(PYTHON_FOR_REGEN) $(srcdir)/Parser/asdl_c.py \
- -h $(srcdir)/Include/Python-ast.h.new \
- $(srcdir)/Parser/Python.asdl
- $(UPDATE_FILE) $(srcdir)/Include/Python-ast.h $(srcdir)/Include/Python-ast.h.new
- # Regenerate Python/Python-ast.c using Parser/asdl_c.py -c
$(MKDIR_P) $(srcdir)/Python
$(PYTHON_FOR_REGEN) $(srcdir)/Parser/asdl_c.py \
- -c $(srcdir)/Python/Python-ast.c.new \
- $(srcdir)/Parser/Python.asdl
+ $(srcdir)/Parser/Python.asdl \
+ -H $(srcdir)/Include/Python-ast.h.new \
+ -C $(srcdir)/Python/Python-ast.c.new
+
+ $(UPDATE_FILE) $(srcdir)/Include/Python-ast.h $(srcdir)/Include/Python-ast.h.new
$(UPDATE_FILE) $(srcdir)/Python/Python-ast.c $(srcdir)/Python/Python-ast.c.new
.PHONY: regen-opcode
@@ -851,9 +901,10 @@ regen-token:
.PHONY: regen-keyword
regen-keyword:
- # Regenerate Lib/keyword.py from Grammar/Grammar and Grammar/Tokens
- # using Parser/pgen
- PYTHONPATH=$(srcdir) $(PYTHON_FOR_REGEN) -m Parser.pgen.keywordgen $(srcdir)/Grammar/Grammar \
+ # Regenerate Lib/keyword.py from Grammar/python.gram and Grammar/Tokens
+ # using Tools/peg_generator/pegen
+ PYTHONPATH=$(srcdir)/Tools/peg_generator $(PYTHON_FOR_REGEN) -m pegen.keywordgen \
+ $(srcdir)/Grammar/python.gram \
$(srcdir)/Grammar/Tokens \
$(srcdir)/Lib/keyword.py.new
$(UPDATE_FILE) $(srcdir)/Lib/keyword.py $(srcdir)/Lib/keyword.py.new
@@ -970,7 +1021,6 @@ PYTHON_HEADERS= \
$(srcdir)/Include/bltinmodule.h \
$(srcdir)/Include/boolobject.h \
$(srcdir)/Include/bytearrayobject.h \
- $(srcdir)/Include/bytes_methods.h \
$(srcdir)/Include/bytesobject.h \
$(srcdir)/Include/cellobject.h \
$(srcdir)/Include/ceval.h \
@@ -982,7 +1032,6 @@ PYTHON_HEADERS= \
$(srcdir)/Include/context.h \
$(srcdir)/Include/descrobject.h \
$(srcdir)/Include/dictobject.h \
- $(srcdir)/Include/dtoa.h \
$(srcdir)/Include/dynamic_annotations.h \
$(srcdir)/Include/enumobject.h \
$(srcdir)/Include/errcode.h \
@@ -1022,6 +1071,7 @@ PYTHON_HEADERS= \
$(srcdir)/Include/pydtrace.h \
$(srcdir)/Include/pyerrors.h \
$(srcdir)/Include/pyfpe.h \
+ $(srcdir)/Include/pyframe.h \
$(srcdir)/Include/pyhash.h \
$(srcdir)/Include/pylifecycle.h \
$(srcdir)/Include/pymacconfig.h \
@@ -1057,10 +1107,18 @@ PYTHON_HEADERS= \
$(srcdir)/Include/Python-ast.h \
\
$(srcdir)/Include/cpython/abstract.h \
+ $(srcdir)/Include/cpython/bytearrayobject.h \
+ $(srcdir)/Include/cpython/bytesobject.h \
+ $(srcdir)/Include/cpython/ceval.h \
+ $(srcdir)/Include/cpython/code.h \
$(srcdir)/Include/cpython/dictobject.h \
$(srcdir)/Include/cpython/fileobject.h \
+ $(srcdir)/Include/cpython/fileutils.h \
+ $(srcdir)/Include/cpython/import.h \
$(srcdir)/Include/cpython/initconfig.h \
$(srcdir)/Include/cpython/interpreteridobject.h \
+ $(srcdir)/Include/cpython/listobject.h \
+ $(srcdir)/Include/cpython/methodobject.h \
$(srcdir)/Include/cpython/object.h \
$(srcdir)/Include/cpython/objimpl.h \
$(srcdir)/Include/cpython/pyerrors.h \
@@ -1072,17 +1130,25 @@ PYTHON_HEADERS= \
$(srcdir)/Include/cpython/tupleobject.h \
$(srcdir)/Include/cpython/unicodeobject.h \
\
+ $(srcdir)/Include/internal/pycore_abstract.h \
$(srcdir)/Include/internal/pycore_accu.h \
$(srcdir)/Include/internal/pycore_atomic.h \
+ $(srcdir)/Include/internal/pycore_byteswap.h \
+ $(srcdir)/Include/internal/pycore_bytes_methods.h \
+ $(srcdir)/Include/internal/pycore_call.h \
$(srcdir)/Include/internal/pycore_ceval.h \
$(srcdir)/Include/internal/pycore_code.h \
$(srcdir)/Include/internal/pycore_condvar.h \
$(srcdir)/Include/internal/pycore_context.h \
+ $(srcdir)/Include/internal/pycore_dtoa.h \
$(srcdir)/Include/internal/pycore_fileutils.h \
$(srcdir)/Include/internal/pycore_getopt.h \
$(srcdir)/Include/internal/pycore_gil.h \
$(srcdir)/Include/internal/pycore_hamt.h \
+ $(srcdir)/Include/internal/pycore_hashtable.h \
+ $(srcdir)/Include/internal/pycore_import.h \
$(srcdir)/Include/internal/pycore_initconfig.h \
+ $(srcdir)/Include/internal/pycore_interp.h \
$(srcdir)/Include/internal/pycore_object.h \
$(srcdir)/Include/internal/pycore_pathconfig.h \
$(srcdir)/Include/internal/pycore_pyerrors.h \
@@ -1090,6 +1156,8 @@ PYTHON_HEADERS= \
$(srcdir)/Include/internal/pycore_pylifecycle.h \
$(srcdir)/Include/internal/pycore_pymem.h \
$(srcdir)/Include/internal/pycore_pystate.h \
+ $(srcdir)/Include/internal/pycore_runtime.h \
+ $(srcdir)/Include/internal/pycore_sysmodule.h \
$(srcdir)/Include/internal/pycore_traceback.h \
$(srcdir)/Include/internal/pycore_tupleobject.h \
$(srcdir)/Include/internal/pycore_warnings.h \
@@ -1332,6 +1400,7 @@ LIBSUBDIRS= tkinter tkinter/test tkinter/test/test_tkinter \
test/test_import/data/circular_imports/subpkg \
test/test_import/data/package \
test/test_import/data/package2 \
+ test/test_import/data/unwritable \
importlib \
importlib/metadata \
test/test_importlib \
@@ -1372,6 +1441,7 @@ LIBSUBDIRS= tkinter tkinter/test tkinter/test/test_tkinter \
test/test_importlib/source \
test/test_importlib/zipdata01 \
test/test_importlib/zipdata02 \
+ test/test_zoneinfo test/test_zoneinfo/data \
test/ziptestdata \
asyncio \
test/test_asyncio \
@@ -1387,12 +1457,14 @@ LIBSUBDIRS= tkinter tkinter/test tkinter/test/test_tkinter \
ctypes ctypes/test ctypes/macholib \
idlelib idlelib/Icons idlelib/idle_test \
distutils distutils/command distutils/tests $(XMLLIBSUBDIRS) \
+ test/test_peg_generator \
test/test_tools test/test_warnings test/test_warnings/data \
turtledemo \
multiprocessing multiprocessing/dummy \
unittest unittest/test unittest/test/testmock \
venv venv/scripts venv/scripts/common venv/scripts/posix \
- curses pydoc_data
+ curses pydoc_data \
+ zoneinfo
libinstall: build_all $(srcdir)/Modules/xxmodule.c
@for i in $(SCRIPTDIR) $(LIBDEST); \
do \
@@ -1435,6 +1507,7 @@ libinstall: build_all $(srcdir)/Modules/xxmodule.c
*CVS) ;; \
*.py[co]) ;; \
*.orig) ;; \
+ *wininst-*.exe) ;; \
*~) ;; \
*) \
if test -d $$i; then continue; fi; \
@@ -1753,10 +1826,13 @@ docclean:
-rm -rf Doc/build
-rm -rf Doc/tools/sphinx Doc/tools/pygments Doc/tools/docutils
-clean: pycremoval
+# like the 'clean' target but retain the profile guided optimization (PGO)
+# data. The PGO data is only valid if source code remains unchanged.
+clean-retain-profile: pycremoval
find . -name '*.[oa]' -exec rm -f {} ';'
find . -name '*.s[ol]' -exec rm -f {} ';'
find . -name '*.so.[0-9]*.[0-9]*' -exec rm -f {} ';'
+ find . -name '*.lst' -exec rm -f {} ';'
find build -name 'fficonfig.h' -exec rm -f {} ';' || true
find build -name '*.py' -exec rm -f {} ';' || true
find build -name '*.py[co]' -exec rm -f {} ';' || true
@@ -1775,14 +1851,19 @@ profile-removal:
rm -rf $(COVERAGE_REPORT)
rm -f profile-run-stamp
-clobber: clean profile-removal
+clean: clean-retain-profile
+ @if test @DEF_MAKE_ALL_RULE@ = profile-opt; then \
+ rm -f profile-gen-stamp profile-clean-stamp; \
+ $(MAKE) profile-removal; \
+ fi
+
+clobber: clean
-rm -f $(BUILDPYTHON) $(LIBRARY) $(LDLIBRARY) $(DLLLIBRARY) \
tags TAGS \
config.cache config.log pyconfig.h Modules/config.c
-rm -rf build platform
-rm -rf $(PYTHONFRAMEWORKDIR)
-rm -f python-config.py python-config
- -rm -f profile-gen-stamp profile-clean-stamp
# Make things extra clean, before making a distribution:
# remove all generated files, even Makefile[.pre]
@@ -1856,6 +1937,8 @@ Python/thread.o: @THREADHEADERS@ $(srcdir)/Python/condvar.h
.PHONY: frameworkinstallmaclib frameworkinstallapps frameworkinstallunixtools
.PHONY: frameworkaltinstallunixtools recheck clean clobber distclean
.PHONY: smelly funny patchcheck touch altmaninstall commoninstall
+.PHONY: clean-retain-profile profile-removal run_profile_task
+.PHONY: build_all_generate_profile build_all_merge_profile
.PHONY: gdbhooks
# IF YOU PUT ANYTHING HERE IT WILL GO AWAY
diff --git a/Misc/ACKS b/Misc/ACKS
index 8d355da8..a16f15a7 100644
--- a/Misc/ACKS
+++ b/Misc/ACKS
@@ -12,6 +12,7 @@ PS: In the standard Python distribution, this file is encoded in UTF-8
and the list is in rough alphabetical order by last names.
Aahz
+Erlend Egeberg Aasland
Edison Abahurire
Michael Abbott
Rajiv Abraham
@@ -23,6 +24,7 @@ Eitan Adler
Anton Afanasyev
Ali Afshar
Nitika Agarwal
+Anjani Agrawal
Pablo S. Blum de Aguiar
Jim Ahlstrom
Farhan Ahmad
@@ -45,6 +47,7 @@ Rose Ames
A. Amoroso
Mark Anacker
Shashwat Anand
+Ananthakrishnan
Anders Andersen
Tycho Andersen
John Anderson
@@ -84,8 +87,10 @@ Marcin Bachry
Alfonso Baciero
Dwayne Bailey
Stig Bakken
+LumÃr Balhar
Aleksandr Balezin
Greg Ball
+Lewis Ball
Luigi Ballabio
Thomas Ballinger
Jeff Balogh
@@ -232,6 +237,7 @@ Floris Bruynooghe
Matt Bryant
Stan Bubrouski
Brandt Bucher
+Curtis Bucher
Colm Buckley
Erik de Bueger
Jan-Hein Bührman
@@ -239,6 +245,7 @@ Lars Buitinck
Dick Bulterman
Bill Bumgarner
Jimmy Burgett
+Charles Burkland
Edmond Burnett
Tommy Burnette
Roger Burnham
@@ -257,6 +264,7 @@ Ben Caller
Arnaud Calmettes
Daniel Calvelo
Tony Campbell
+Giovanni Cappellotto
Brett Cannon
Tristan Carel
Mike Carlton
@@ -285,6 +293,7 @@ Brad Chapman
Greg Chapman
Mitch Chapman
Matt Chaput
+William Chargin
Yogesh Chaudhari
David Chaum
Nicolas Chauvat
@@ -309,6 +318,7 @@ Gilles Civario
Chris Clark
Diana Clarke
Laurie Clark-Michalek
+Alexander Clarkson
Mike Clarkson
Andrew Clegg
Brad Clements
@@ -324,6 +334,7 @@ Benjamin Collar
Jeffery Collins
Robert Collins
Paul Colomiets
+Samuel Colvin
Christophe Combelles
Geremy Condra
Denver Coneybeare
@@ -363,8 +374,8 @@ Tom Culliton
Raúl Cumplido
Antonio Cuni
Brian Curtin
-Hakan Celik
Jason Curtis
+Hakan Celik
Paul Dagnelie
Lisandro Dalcin
Darren Dale
@@ -449,6 +460,7 @@ Rodolpho Eckhardt
Ulrich Eckhardt
David Edelsohn
John Edmonds
+Benjamin Edwards
Grant Edwards
Zvi Effron
John Ehresman
@@ -467,6 +479,7 @@ Tom Epperly
Gökcen Eraslan
Stoffel Erasmus
Jürgen A. Erhard
+Florian Ernst
Michael Ernst
Ben Escoto
Andy Eskilsson
@@ -508,6 +521,7 @@ Tomer Filiba
Segev Finer
Jeffrey Finkelstein
Russell Finn
+Neal Finne
Dan Finnie
Nils Fischbeck
Frederik Fix
@@ -555,11 +569,13 @@ Riccardo Attilio Galli
Raymund Galvin
Nitin Ganatra
Fred Gansevles
+Paul Ganssle
Lars Marius Garshol
Jake Garver
Dan Gass
Tim Gates
Andrew Gaul
+Lewis Gaul
Matthieu Gautier
Stephen M. Gava
Xavier de Gaye
@@ -643,6 +659,7 @@ Mark Hammond
Harald Hanche-Olsen
Manus Hand
Milton L. Hankins
+Carl Bordum Hansen
Stephen Hansen
Barry Hantman
Lynda Hardman
@@ -653,6 +670,7 @@ David Harrigan
Brian Harring
Jonathan Hartley
Travis B. Hartwell
+Henrik Harutyunyan
Shane Harvey
Larry Hastings
Tim Hatch
@@ -660,6 +678,7 @@ Zac Hatfield-Dodds
Shane Hathaway
Michael Haubenwallner
Janko Hauser
+Flavian Hautbois
Rycharde Hawkes
Ben Hayden
Jochen Hayek
@@ -721,6 +740,7 @@ Rob Hooft
Michiel de Hoon
Brian Hooper
Randall Hopper
+Tim Hopper
Nadav Horesh
Alon Horev
Jan Hosang
@@ -739,6 +759,7 @@ Lawrence Hudson
Michael Hudson
Jim Hugunin
Greg Humphreys
+Chris Hunt
Eric Huss
Nehal Hussain
Taihyun Hwang
@@ -747,6 +768,7 @@ Ludwig Hähne
Gerhard Häring
Fredrik Håård
Florian Höch
+Oleg Höfling
Robert Hölzl
Catalin Iacob
Mihai Ibanescu
@@ -839,6 +861,7 @@ Dmitry Kazakov
Brian Kearns
Sebastien Keim
Ryan Kelly
+Hugo van Kemenade
Dan Kenigsberg
Randall Kern
Robert Kern
@@ -851,6 +874,7 @@ Dhiru Kholia
Artem Khramov
Akshit Khurana
Sanyam Khurana
+Tyler Kieft
Mads Kiilerich
Jason Killen
Jan Kim
@@ -993,6 +1017,7 @@ Robert Li
Xuanji Li
Zekun Li
Zheao Li
+Dan Lidral-Porter
Robert van Liere
Ross Light
Shawn Ligocki
@@ -1101,6 +1126,7 @@ Ezio Melotti
Doug Mennella
Dimitri Merejkowsky
Brian Merrell
+Bruce Merry
Alexis Métaireau
Luke Mewburn
Carl Meyer
@@ -1215,10 +1241,12 @@ Elena Oat
Jon Oberheide
Milan Oberkirch
Pascal Oberndoerfer
+Géry Ogam
Jeffrey Ollie
Adam Olsen
Bryan Olson
Grant Olson
+Furkan Onder
Koray Oner
Ethan Onstott
Piet van Oostrum
@@ -1419,6 +1447,7 @@ Mike Romberg
Armin Ronacher
Case Roole
Timothy Roscoe
+Joel Rosdahl
Erik Rose
Mark Roseman
Josh Rosenberg
@@ -1507,6 +1536,7 @@ Steven Scott
Nick Seidenman
Michael Seifert
Žiga Seilnacht
+Jendrik Seipp
Michael Selik
Yury Selivanov
Fred Sells
@@ -1555,6 +1585,7 @@ Kirill Simonov
Nathan Paul Simons
Guilherme Simões
Adam Simpkins
+Karthikeyan Singaravelan
Mandeep Singh
Ravi Sinha
Janne Sinkkonen
@@ -1565,6 +1596,7 @@ J. Sipprell
Ngalim Siregar
Kragen Sitaker
Kaartic Sivaraam
+Roman Skurikhin
Ville Skyttä
Michael Sloan
Nick Sloan
@@ -1602,6 +1634,7 @@ Tage Stabell-Kulo
Quentin Stafford-Fraser
Frank Stajano
Joel Stanley
+Kyle Stanley
Anthony Starks
David Steele
Oliver Steele
@@ -1638,6 +1671,7 @@ Hisao Suzuki
Kalle Svensson
Andrew Svetlov
Paul Swartz
+Dennis Sweeney
Al Sweigart
Sviatoslav Sydorenko
Thenault Sylvain
@@ -1660,6 +1694,7 @@ William Tanksley
Christian Tanzer
Steven Taschuk
Batuhan Taskaya
+Stefan Tatschner
Amy Taylor
Julian Taylor
Monty Taylor
@@ -1731,6 +1766,7 @@ Roger Upole
Daniel Urban
Michael Urman
Hector Urtubia
+Elizabeth Uselton
Lukas Vacek
Ville Vainio
Yann Vaginay
@@ -1795,6 +1831,7 @@ Steve Weber
Corran Webster
Glyn Webster
Phil Webster
+Antoine Wecxsteen
Stefan Wehr
Zack Weinberg
Bob Weiner
@@ -1806,6 +1843,7 @@ Jeff Wheeler
Christopher White
David White
Mats Wichmann
+Pete Wicken
Marcel Widjaja
Truida Wiedijk
Felix Wiemann
@@ -1829,6 +1867,7 @@ Alex Willmer
David Wilson
Geoff Wilson
Greg V. Wilson
+Huon Wilson
J Derek Wilson
Paul Winkler
Jody Winston
@@ -1849,6 +1888,7 @@ Klaus-Juergen Wolf
Dan Wolfe
Richard Wolff
Adam Woodbeck
+William Woodruff
Steven Work
Gordon Worley
Darren Worrall
@@ -1872,6 +1912,7 @@ EungJun Yi
Bob Yodlowski
Danny Yoo
Wonsup Yoon
+Andrew York
Rory Yorke
George Yoshida
Kazuhiro Yoshida
@@ -1897,5 +1938,6 @@ Jelle Zijlstra
Gennadiy Zlobin
Doug Zongker
Peter Ã
strand
+Vlad Emelianov
(Entries should be added in rough alphabetical order by last names)
diff --git a/Misc/HISTORY b/Misc/HISTORY
index fa5a05fd..32b2a378 100644
--- a/Misc/HISTORY
+++ b/Misc/HISTORY
@@ -2113,7 +2113,7 @@ Build
- Issue #21285: Refactor and fix curses configure check to always search
in a ncursesw directory.
-- Issue #15234: For BerkelyDB and Sqlite, only add the found library and
+- Issue #15234: For BerkeleyDB and Sqlite, only add the found library and
include directories if they aren't already being searched. This avoids
an explicit runtime library dependency.
diff --git a/Misc/NEWS b/Misc/NEWS
index 6e94ca3e..81da2bed 100644
--- a/Misc/NEWS
+++ b/Misc/NEWS
@@ -2,101 +2,102 @@
Python News
+++++++++++
-What's New in Python 3.8.6 final?
+What's New in Python 3.9.0 final?
=================================
-*Release date: 2020-09-23*
-
-Core and Builtins
------------------
-
-- bpo-41525: The output of ``python --help`` contains now only ASCII
- characters.
+*Release date: 2020-10-04*
Library
-------
-- bpo-41817: fix `tkinter.EventType` Enum so all members are strings, and
- none are tuples
-
- bpo-41815: Fix SQLite3 segfault when backing up closed database. Patch
contributed by Peter David McCormick.
-- bpo-41517: fix bug allowing Enums to be extended via multiple inheritance
-
-- bpo-39587: use the correct mix-in data type when constructing Enums
-
-- bpo-41789: Honor `object` overrides in `Enum` class creation
- (specifically, `__str__`, `__repr__`, `__format__`, and `__reduce_ex__`).
-
-- bpo-39651: Fix a race condition in the ``call_soon_threadsafe()`` method
- of ``asyncio.ProactorEventLoop``: do nothing if the self-pipe socket has
- been closed.
+- bpo-41662: No longer override exceptions raised in ``__len__()`` of a
+ sequence of parameters in :mod:`sqlite3` with
+ :exc:`~sqlite3.ProgrammingError`.
-- bpo-41720: Fixed :meth:`turtle.Vec2D.__rmul__` for arguments which are not
- int or float.
+- bpo-41662: Fixed crash when mutate list of parameters during iteration in
+ :mod:`sqlite3`.
- bpo-39728: fix default `_missing_` so a duplicate `ValueError` is not set
as the `__context__` of the original `ValueError`
-- bpo-37479: When `Enum.__str__` is overridden in a derived class, the
- override will be used by `Enum.__format__` regardless of whether mixin
- classes are present.
-
-Documentation
--------------
-
-- bpo-35293: Fix RemovedInSphinx40Warning when building the documentation.
- Patch by Dong-hee Na.
-
-- bpo-37149: Change Shipman tkinter doc link from archive.org to TkDocs.
- (The doc has been removed from the NMT server.) The new link responds
- much faster and includes a short explanatory note.
-
Tests
-----
-- bpo-41731: Make test_cmd_line_script pass with option '-vv'.
-
-Windows
--------
-
-- bpo-41744: Fixes automatic import of props file when using the Nuget
- package.
-
-IDLE
-----
+- bpo-41602: Add tests for SIGINT handling in the runpy module.
-- bpo-35764: Rewrite the Calltips doc section.
+Build
+-----
-- bpo-40181: In calltips, stop reminding that '/' marks the end of
- positional-only arguments.
+- bpo-38249: Update :c:macro:`Py_UNREACHABLE` to use __builtin_unreachable()
+ if only the compiler is able to use it. Patch by Dong-hee Na.
-What's New in Python 3.8.6 release candidate 1?
+What's New in Python 3.9.0 release candidate 2?
===============================================
-*Release date: 2020-09-07*
+*Release date: 2020-09-16*
Core and Builtins
-----------------
+- bpo-41780: Fix :meth:`__dir__` of :class:`types.GenericAlias`. Patch by
+ Batuhan Taskaya.
+
+- bpo-41690: Fix a possible stack overflow in the parser when parsing
+ functions and classes with a huge ammount of arguments. Patch by Pablo
+ Galindo.
+
+- bpo-41681: Fixes the wrong error description in the error raised by using
+ 2 `,` in format string in f-string and :meth:`str.format`.
+
- bpo-41654: Fix a crash that occurred when destroying subclasses of
:class:`MemoryError`. Patch by Pablo Galindo.
+- bpo-41631: The ``_ast`` module uses again a global state. Using a module
+ state per module instance is causing subtle practical problems. For
+ example, the Mercurial project replaces the ``__import__()`` function to
+ implement lazy import, whereas Python expected that ``import _ast`` always
+ return a fully initialized ``_ast`` module.
+
- bpo-41533: Free the stack allocated in ``va_build_stack`` if
``do_mkstack`` fails and the stack is not a ``small_stack``.
-- bpo-38156: Handle interrupts that come after EOF correctly in
- ``PyOS_StdioReadline``.
+- bpo-41531: Fix a bug that was dropping keys when compiling dict literals
+ with more than 0xFFFF elements. Patch by Pablo Galindo.
+
+- bpo-41525: The output of ``python --help`` contains now only ASCII
+ characters.
+
+- bpo-29590: Make the stack trace correct after calling
+ :meth:`generator.throw` on a generator that has yielded from a ``yield
+ from``.
Library
-------
+- bpo-41517: fix bug allowing Enums to be extended via multiple inheritance
+
+- bpo-39587: use the correct mix-in data type when constructing Enums
+
+- bpo-41789: Honor `object` overrides in `Enum` class creation
+ (specifically, `__str__`, `__repr__`, `__format__`, and `__reduce_ex__`).
+
+- bpo-39651: Fix a race condition in the ``call_soon_threadsafe()`` method
+ of ``asyncio.ProactorEventLoop``: do nothing if the self-pipe socket has
+ been closed.
+
+- bpo-41720: Fixed :meth:`turtle.Vec2D.__rmul__` for arguments which are not
+ int or float.
+
- bpo-41696: Fix handling of debug mode in :func:`asyncio.run`. This allows
setting ``PYTHONASYNCIODEBUG`` or ``-X dev`` to enable asyncio debug mode
when using :func:`asyncio.run`.
+- bpo-41687: Fix implementation of sendfile to be compatible with Solaris.
+
- bpo-39010: Restarting a ``ProactorEventLoop`` on Windows no longer logs
spurious ``ConnectionResetErrors``.
@@ -120,30 +121,11 @@ Library
- bpo-41503: Fixed a race between setTarget and flush in
logging.handlers.MemoryHandler.
-- bpo-41497: Fix potential UnicodeDecodeError in dis module.
-
-- bpo-41490: Update :mod:`ensurepip` to install pip 20.2.1 and setuptools
- 49.2.1.
-
-- bpo-41467: On Windows, fix asyncio ``recv_into()`` return value when the
- socket/pipe is closed (:exc:`BrokenPipeError`): return ``0`` rather than
- an empty byte string (``b''``).
-
-- bpo-41425: Make tkinter doc example runnable.
-
-- bpo-41384: Raise TclError instead of TypeError when an unknown option is
- passed to tkinter.OptionMenu.
-
-- bpo-38731: Fix :exc:`NameError` in command-line interface of
- :mod:`py_compile`.
-
-- bpo-41364: Reduce import overhead of :mod:`uuid`.
-
- bpo-41344: Prevent creating :class:`shared_memory.SharedMemory` objects
with :code:`size=0`.
-- bpo-40726: Handle cases where the ``end_lineno`` is ``None`` on
- :func:`ast.increment_lineno`.
+- bpo-41025: Fixed an issue preventing the C implementation of
+ :class:`zoneinfo.ZoneInfo` from being subclassed.
- bpo-31122: ssl.wrap_socket() now raises ssl.SSLEOFError rather than
OSError when peer closes connection during TLS negotiation
@@ -154,38 +136,39 @@ Library
Documentation
-------------
+- bpo-37149: Change Shipman tkinter doc link from archive.org to TkDocs.
+ (The doc has been removed from the NMT server.) The new link responds
+ much faster and includes a short explanatory note.
+
- bpo-41624: Fix the signature of :class:`typing.Coroutine`.
- bpo-40204: Enable Sphinx 3.2 ``c_allow_pre_v3`` option and disable
``c_warn_on_allowed_pre_v3`` option to make the documentation compatible
with Sphinx 2 and Sphinx 3.
-- bpo-41045: Add documentation for debug feature of f-strings.
-
-- bpo-41314: Changed the release when ``from __future__ import annotations``
- becomes the default from ``4.0`` to ``3.10`` (following a change in PEP
- 563).
+- bpo-40979: Refactored typing.rst, arranging more than 70 classes,
+ functions, and decorators into new sub-sections.
- bpo-39883: Make code, examples, and recipes in the Python documentation be
licensed under the more permissive BSD0 license in addition to the
existing Python 2.0 license.
-Windows
--------
+Tests
+-----
-- bpo-41492: Fixes the description that appears in UAC prompts.
+- bpo-41731: Make test_cmd_line_script pass with option '-vv'.
-- bpo-40741: Update Windows release to include SQLite 3.32.3.
+Build
+-----
-IDLE
-----
+- bpo-41617: Fix ``pycore_byteswap.h`` header file to support old clang
+ versions: ``__builtin_bswap16()`` is not available in LLVM clang 3.0.
-- bpo-41468: Improve IDLE run crash error message (which users should never
- see).
+Windows
+-------
-- bpo-41373: Save files loaded with no line ending, as when blank, or
- different line endings, by setting its line ending to the system default.
- Fix regression in 3.8.4 and 3.9.0b4.
+- bpo-41526: Fixed layout of final page of the installer by removing the
+ special thanks to Mark Hammond (with his permission).
C API
-----
@@ -194,84 +177,121 @@ C API
pointers beyond the end of a string.
-What's New in Python 3.8.5 final?
-=================================
-
-*Release date: 2020-07-20*
-
-Security
---------
-
-- bpo-41304: Fixes `python3x._pth` being ignored on Windows, caused by the
- fix for :issue:`29778` (CVE-2020-15801).
+What's New in Python 3.9.0 release candidate 1?
+===============================================
-- bpo-39603: Prevent http header injection by rejecting control characters
- in http.client.putrequest(...).
+*Release date: 2020-08-11*
Core and Builtins
-----------------
-- bpo-41295: Resolve a regression in CPython 3.8.4 where defining
- "__setattr__" in a multi-inheritance setup and calling up the hierarchy
- chain could fail if builtins/extension types were involved in the base
- types.
+- bpo-38156: Handle interrupts that come after EOF correctly in
+ ``PyOS_StdioReadline``.
Library
-------
-- bpo-41288: Unpickling invalid NEWOBJ_EX opcode with the C implementation
- raises now UnpicklingError instead of crashing.
+- bpo-41497: Fix potential UnicodeDecodeError in dis module.
-- bpo-39017: Avoid infinite loop when reading specially crafted TAR files
- using the tarfile module (CVE-2019-20907).
+- bpo-41490: Update :mod:`ensurepip` to install pip 20.2.1 and setuptools
+ 49.2.1.
+
+- bpo-41467: On Windows, fix asyncio ``recv_into()`` return value when the
+ socket/pipe is closed (:exc:`BrokenPipeError`): return ``0`` rather than
+ an empty byte string (``b''``).
+
+- bpo-41425: Make tkinter doc example runnable.
+
+- bpo-41384: Raise TclError instead of TypeError when an unknown option is
+ passed to tkinter.OptionMenu.
+
+- bpo-38731: Fix :exc:`NameError` in command-line interface of
+ :mod:`py_compile`.
+
+- bpo-41317: Use add_done_callback() in asyncio.loop.sock_accept() to
+ unsubscribe reader early on cancellation.
+
+- bpo-41364: Reduce import overhead of :mod:`uuid`.
+
+- bpo-41341: Recursive evaluation of `typing.ForwardRef` in
+ `get_type_hints`.
+
+- bpo-41182: selector: use DefaultSelector based upon implementation
+
+- bpo-40726: Handle cases where the ``end_lineno`` is ``None`` on
+ :func:`ast.increment_lineno`.
Documentation
-------------
-- bpo-37703: Updated Documentation to comprehensively elaborate on the
- behaviour of gather.cancel()
+- bpo-41045: Add documentation for debug feature of f-strings.
-Build
------
+- bpo-41314: Changed the release when ``from __future__ import annotations``
+ becomes the default from ``4.0`` to ``3.10`` (following a change in PEP
+ 563).
-- bpo-41302: Enable building Python 3.8 with libmpdec-2.5.0 to ease
- maintenance for Linux distributions. Patch by Felix Yan.
+Windows
+-------
-macOS
------
+- bpo-41492: Fixes the description that appears in UAC prompts.
-- bpo-40741: Update macOS installer to use SQLite 3.32.3.
+- bpo-40948: Improve post-install message to direct people to the "py"
+ command.
+
+- bpo-41412: The installer will now fail to install on Windows 7 and Windows
+ 8. Further, the UCRT dependency is now always downloaded on demand.
+
+- bpo-40741: Update Windows release to include SQLite 3.32.3.
IDLE
----
-- bpo-41300: Save files with non-ascii chars. Fix regression released in
- 3.9.0b4 and 3.8.4.
+- bpo-41468: Improve IDLE run crash error message (which users should never
+ see).
+- bpo-41373: Save files loaded with no line ending, as when blank, or
+ different line endings, by setting its line ending to the system default.
+ Fix regression in 3.8.4 and 3.9.0b4.
-What's New in Python 3.8.4 final?
-=================================
-*Release date: 2020-07-13*
+What's New in Python 3.9.0 beta 5?
+==================================
+
+*Release date: 2020-07-20*
Security
--------
+- bpo-41304: Fixes `python3x._pth` being ignored on Windows, caused by the
+ fix for :issue:`29778` (CVE-2020-15801).
+
- bpo-41162: Audit hooks are now cleared later during finalization to avoid
missing events.
- bpo-29778: Ensure :file:`python3.dll` is loaded from correct locations
when Python is embedded (CVE-2020-15523).
+- bpo-39603: Prevent http header injection by rejecting control characters
+ in http.client.putrequest(...).
+
Core and Builtins
-----------------
+- bpo-41295: Resolve a regression in CPython 3.8.4 where defining
+ "__setattr__" in a multi-inheritance setup and calling up the hierarchy
+ chain could fail if builtins/extension types were involved in the base
+ types.
+
- bpo-41247: Always cache the running loop holder when running
``asyncio.set_running_loop``.
- bpo-41252: Fix incorrect refcounting in _ssl.c's
``_servername_callback()``.
+- bpo-41215: Use non-NULL default values in the PEG parser keyword list to
+ overcome a bug that was preventing Python from being properly compiled
+ when using the XLC compiler. Patch by Pablo Galindo.
+
- bpo-41218: Python 3.8.3 had a regression where compiling with
ast.PyCF_ALLOW_TOP_LEVEL_AWAIT would aggressively mark list comprehension
with CO_COROUTINE. Now only list comprehension making use of async/await
@@ -287,34 +307,59 @@ Core and Builtins
Library
-------
+- bpo-41288: Unpickling invalid NEWOBJ_EX opcode with the C implementation
+ raises now UnpicklingError instead of crashing.
+
+- bpo-39017: Avoid infinite loop when reading specially crafted TAR files
+ using the tarfile module (CVE-2019-20907).
+
- bpo-41235: Fix the error handling in
:meth:`ssl.SSLContext.load_dh_params`.
-- bpo-41193: The ``write_history()`` atexit function of the readline
- completer now ignores any :exc:`OSError` to ignore error if the filesystem
- is read-only, instead of only ignoring :exc:`FileNotFoundError` and
- :exc:`PermissionError`.
+- bpo-41207: In distutils.spawn, restore expectation that DistutilsExecError
+ is raised when the command is not found.
-- bpo-41043: Fixed the use of :func:`~glob.glob` in the stdlib: literal part
- of the path is now always correctly escaped.
+- bpo-39168: Remove the ``__new__`` method of :class:`typing.Generic`.
+
+- bpo-41194: Fix a crash in the ``_ast`` module: it can no longer be loaded
+ more than once. It now uses a global state rather than a module state.
- bpo-39384: Fixed email.contentmanager to allow set_content() to set a null
string.
+Documentation
+-------------
+
+- bpo-37703: Updated Documentation to comprehensively elaborate on the
+ behaviour of gather.cancel()
+
+macOS
+-----
+
+- bpo-40741: Update macOS installer to use SQLite 3.32.3.
+
IDLE
----
+- bpo-41300: Save files with non-ascii chars. Fix regression released in
+ 3.9.0b4 and 3.8.4.
+
- bpo-37765: Add keywords to module name completion list. Rewrite
Completions section of IDLE doc.
-- bpo-41152: The encoding of ``stdin``, ``stdout`` and ``stderr`` in IDLE is
- now always UTF-8.
+C API
+-----
+- bpo-40170: Revert :c:func:`PyType_HasFeature` change: it reads again
+ directly the :c:member:`PyTypeObject.tp_flags` member when the limited C
+ API is not used, rather than always calling :c:func:`PyType_GetFlags`
+ which hides implementation details.
-What's New in Python 3.8.4 release candidate 1?
-===============================================
-*Release date: 2020-06-29*
+What's New in Python 3.9.0 beta 4?
+==================================
+
+*Release date: 2020-07-02*
Security
--------
@@ -325,44 +370,78 @@ Security
fix uses hash() to generate hash values for the tuple of (address, mask
length, network address).
-- bpo-39073: Disallow CR or LF in email.headerregistry.Address arguments to
- guard against header injection attacks.
-
Core and Builtins
-----------------
- bpo-41094: Fix decoding errors with audit when open files with non-ASCII
names on non-UTF-8 locale.
+- bpo-41084: Prefix the error message with 'f-string: ', when parsing an
+ f-string expression which throws a :exc:`SyntaxError`.
+
+- bpo-41076: Pre-feed the parser with the location of the f-string
+ expression, not the f-string itself, which allows us to skip the shifting
+ of the AST node locations after the parsing is completed.
+
+- bpo-40939: Deprecate :c:func:`PyNode_Compile`.
+
- bpo-41056: Fixes a reference to deallocated stack space during startup
when constructing sys.path involving a relative symlink when code was
supplied via -c. (discovered via Coverity)
+- bpo-41061: Fix incorrect expressions and asserts in hashtable code and
+ tests.
+
+- bpo-41052: Opt out serialization/deserialization for _random.Random
+
+- bpo-40939: Deprecate :c:func:`PyParser_SimpleParseStringFlags`,
+ :c:func:`PyParser_SimpleParseStringFlagsFilename` and
+ :c:func:`PyParser_SimpleParseFileFlags`.
+
- bpo-35975: Stefan Behnel reported that cf_feature_version is used even
when PyCF_ONLY_AST is not set. This is against the intention and against
the documented behavior, so it's been fixed.
+- bpo-40985: Fix a bug that caused the :exc:`SyntaxError` text to be empty
+ when a file ends with a line ending in a line continuation character (i.e.
+ backslash). The error text should contain the text of the last line.
+
+- bpo-40958: Fix a possible buffer overflow in the PEG parser when gathering
+ information for emitting syntax errors. Patch by Pablo Galindo.
+
- bpo-40957: Fix refleak in _Py_fopen_obj() when PySys_Audit() fails
-- bpo-40870: Raise :exc:`ValueError` when validating custom AST's where the
- constants ``True``, ``False`` and ``None`` are used within a
- :class:`ast.Name` node.
+- bpo-40947: The Python :ref:`Path Configuration ` now
+ takes :c:member:`PyConfig.platlibdir` in account.
-- bpo-40826: Fix GIL usage in :c:func:`PyOS_Readline`: lock the GIL to set
- an exception and pass the Python thread state when checking if there is a
- pending signal.
+- bpo-40847: Fix a bug where a line with only a line continuation character
+ is not considered a blank line at tokenizer level. In such cases, more
+ than a single `NEWLINE` token was emitted. The old parser was working
+ around the issue, but the new parser threw a :exc:`SyntaxError` for valid
+ input due to this. For example, an empty line following a line
+ continuation character was interpreted as a :exc:`SyntaxError`.
- bpo-40824: Unexpected errors in calling the ``__iter__`` method are no
longer masked by ``TypeError`` in the :keyword:`in` operator and functions
:func:`~operator.contains`, :func:`~operator.indexOf` and
:func:`~operator.countOf` of the :mod:`operator` module.
-- bpo-40663: Correctly generate annotations where parentheses are omitted
- but required (e.g: ``Type[(str, int, *other))]``.
+- bpo-19569: Add the private macros ``_Py_COMP_DIAG_PUSH``,
+ ``_Py_COMP_DIAG_IGNORE_DEPR_DECLS``, and ``_Py_COMP_DIAG_POP``.
Library
-------
+- bpo-41193: The ``write_history()`` atexit function of the readline
+ completer now ignores any :exc:`OSError` to ignore error if the filesystem
+ is read-only, instead of only ignoring :exc:`FileNotFoundError` and
+ :exc:`PermissionError`.
+
+- bpo-41161: The decimal module now requires libmpdec-2.5.0. Users of
+ --with-system-libmpdec should update their system library.
+
+- bpo-40874: The decimal module now requires libmpdec-2.5.0.
+
- bpo-41138: Fixed the :mod:`trace` module CLI for Python source files with
non-UTF-8 encoding.
@@ -378,82 +457,46 @@ Library
- bpo-41056: Fix a NULL pointer dereference within the ssl module during a
MemoryError in the keylog callback. (discovered by Coverity)
+- bpo-41056: Fixed an instance where a MemoryError within the zoneinfo
+ module might not be reported or not reported at its source. (found by
+ Coverity)
+
- bpo-41048: :func:`mimetypes.read_mime_types` function reads the rule file
using UTF-8 encoding, not the locale encoding. Patch by Srinivas Reddy
Thatiparthy.
+- bpo-41043: Fixed the use of :func:`~glob.glob` in the stdlib: literal part
+ of the path is now always correctly escaped.
+
- bpo-40448: :mod:`ensurepip` now disables the use of `pip` cache when
installing the bundled versions of `pip` and `setuptools`. Patch by
Krzysztof Konopko.
+- bpo-40967: Removed :meth:`asyncio.Task.current_task` and
+ :meth:`asyncio.Task.all_tasks`. Patch contributed by Rémi Lapeyre.
+
+- bpo-40955: Fix a minor memory leak in :mod:`subprocess` module when
+ extra_groups was specified.
+
- bpo-40855: The standard deviation and variance functions in the statistics
module were ignoring their mu and xbar arguments.
-- bpo-40807: Stop codeop._maybe_compile, used by code.InteractiveInterpreter
- (and IDLE). from from emitting each warning three times.
+- bpo-40924: Removed support for loaders implementing .files and supplying
+ TraversableResources.
+
+- bpo-40939: Use the new PEG parser when generating the stdlib
+ :mod:`keyword` module.
- bpo-40834: Fix truncate when sending str object
with_xxsubinterpreters.channel_send.
-- bpo-38488: Update ensurepip to install pip 20.1.1 and setuptools 47.1.0.
-
-- bpo-40767: :mod:`webbrowser` now properly finds the default browser in
- pure Wayland systems by checking the WAYLAND_DISPLAY environment variable.
- Patch contributed by Jérémy Attali.
-
-- bpo-40795: :mod:`ctypes` module: If ctypes fails to convert the result of
- a callback or if a ctypes callback function raises an exception,
- sys.unraisablehook is now called with an exception set. Previously, the
- error was logged into stderr by :c:func:`PyErr_Print`.
-
-- bpo-30008: Fix :mod:`ssl` code to be compatible with OpenSSL 1.1.x builds
- that use ``no-deprecated`` and ``--api=1.1.0``.
-
-- bpo-40614: :func:`ast.parse` will not parse self documenting expressions
- in f-strings when passed ``feature_version`` is less than ``(3, 8)``.
-
-- bpo-40626: Add h5 file extension as MIME Type application/x-hdf5, as per
- HDF Group recommendation for HDF5 formatted data files. Patch contributed
- by Mark Schwab.
-
-- bpo-25872: :mod:`linecache` could crash with a :exc:`KeyError` when
- accessed from multiple threads. Fix by Michael Graczyk.
-
-- bpo-40597: If text content lines are longer than policy.max_line_length,
- always use a content-encoding to make sure they are wrapped.
-
-- bpo-40515: The :mod:`ssl` and :mod:`hashlib` modules now actively check
- that OpenSSL is build with thread support. Python 3.7.0 made thread
- support mandatory and no longer works safely with a no-thread builds.
-
-- bpo-13097: ``ctypes`` now raises an ``ArgumentError`` when a callback is
- invoked with more than 1024 arguments.
-
-- bpo-40457: The ssl module now support OpenSSL builds without TLS 1.0 and
- 1.1 methods.
-
-- bpo-39830: Add :class:`zipfile.Path` to ``__all__`` in the :mod:`zipfile`
- module.
-
-- bpo-40025: Raise TypeError when _generate_next_value_ is defined after
- members. Patch by Ethan Onstott.
-
-- bpo-39244: Fixed :class:`multiprocessing.context.get_all_start_methods` to
- properly return the default method first on macOS.
-
-- bpo-39040: Fix parsing of invalid mime headers parameters by collapsing
- whitespace between encoded words in a bare-quote-string.
-
-- bpo-35714: :exc:`struct.error` is now raised if there is a null character
- in a :mod:`struct` format string.
+- bpo-26407: Unexpected errors in calling the ``__iter__`` method are no
+ longer masked by ``TypeError`` in :func:`csv.reader`,
+ :func:`csv.writer.writerow` and :meth:`csv.writer.writerows`.
-- bpo-36290: AST nodes are now raising :exc:`TypeError` on conflicting
- keyword arguments. Patch contributed by Rémi Lapeyre.
+- bpo-38488: Update ensurepip to install pip 20.1.1 and setuptools 47.1.0.
-- bpo-29620: :func:`~unittest.TestCase.assertWarns` no longer raises a
- ``RuntimeException`` when accessing a module's ``__warningregistry__``
- causes importation of a new module, or when a new module is imported in
- another thread. Patch by Kernc.
+- bpo-36543: Restored the deprecated :mod:`xml.etree.cElementTree` module.
- bpo-34226: Fix `cgi.parse_multipart` without content_length. Patch by
Roger Duran
@@ -464,6 +507,10 @@ Tests
- bpo-41085: Fix integer overflow in the :meth:`array.array.index` method on
64-bit Windows for index larger than ``2**31``.
+- bpo-41069: :data:`test.support.TESTFN` and the current directory for tests
+ when run via ``test.regrtest`` contain now non-ascii characters if
+ possible.
+
- bpo-38377: On Linux, skip tests using multiprocessing if the current user
cannot create a file in ``/dev/shm/`` directory. Add the
:func:`~test.support.skip_if_broken_multiprocessing_synchronize` function
@@ -479,9 +526,9 @@ Tests
- bpo-40964: Disable remote :mod:`imaplib` tests, host cyrus.andrew.cmu.edu
is blocking incoming connections.
-- bpo-40055: distutils.tests now saves/restores warnings filters to leave
- them unchanged. Importing tests imports docutils which imports
- pkg_resources which adds a warnings filter.
+- bpo-40927: Fix test_binhex when run twice: it now uses
+ import_fresh_module() to ensure that it raises DeprecationWarning each
+ time.
- bpo-34401: Make test_gdb properly run on HP-UX. Patch by Michael Osipov.
@@ -490,9 +537,6 @@ Build
- bpo-40204: Pin Sphinx version to 2.3.1 in ``Doc/Makefile``.
-- bpo-40653: Move _dirnameW out of HAVE_SYMLINK to fix a potential compiling
- issue.
-
Windows
-------
@@ -502,17 +546,8 @@ Windows
- bpo-40164: Updates Windows OpenSSL to 1.1.1g
-- bpo-39631: Changes the registered MIME type for ``.py`` files on Windows
- to ``text/x-python`` instead of ``text/plain``.
-
-- bpo-40677: Manually define IO_REPARSE_TAG_APPEXECLINK in case some old
- Windows SDK doesn't have it.
-
-- bpo-40650: Include winsock2.h in pytime.c for timeval.
-
-- bpo-39148: Add IPv6 support to :mod:`asyncio` datagram endpoints in
- ProactorEventLoop. Change the raised exception for unknown address
- families to ValueError as it's not coming from Windows API.
+- bpo-37556: Extend py.exe help to mention overrides via venv, shebang,
+ environmental variables & ini files.
macOS
-----
@@ -529,2412 +564,4307 @@ macOS
- bpo-41005: fixed an XDG settings issue not allowing macos to open browser
in webbrowser.py
-- bpo-40741: Update macOS installer to use SQLite 3.32.2.
-
IDLE
----
+- bpo-41152: The encoding of ``stdin``, ``stdout`` and ``stderr`` in IDLE is
+ now always UTF-8.
+
- bpo-41144: Make Open Module open a special module such as os.path.
-- bpo-39885: Make context menu Cut and Copy work again when right-clicking
- within a selection.
+C API
+-----
-- bpo-40723: Make test_idle pass when run after import.
+- bpo-36346: Mark ``Py_UNICODE_COPY``, ``Py_UNICODE_FILL``,
+ ``PyUnicode_WSTR_LENGTH``, ``PyUnicode_FromUnicode``,
+ ``PyUnicode_AsUnicode``, ``_PyUnicode_AsUnicode``, and
+ ``PyUnicode_AsUnicodeAndSize`` as deprecated in C. Remove
+ ``Py_UNICODE_MATCH`` which was deprecated and broken since Python 3.3.
-Tools/Demos
------------
+- bpo-36020: On Windows, ``#include "pyerrors.h"`` no longer defines
+ ``snprintf`` and ``vsnprintf`` macros.
-- bpo-40479: Update multissltest helper to test with latest OpenSSL 1.0.2,
- 1.1.0, 1.1.1, and 3.0.0-alpha.
+- bpo-40703: The PyType_FromSpec*() functions no longer overwrite the type's
+ "__module__" attribute if it is set via "Py_tp_members" or "Py_tp_getset".
-- bpo-40163: Fix multissltest tool. OpenSSL has changed download URL for old
- releases. The multissltest tool now tries to download from current and old
- download URLs.
+What's New in Python 3.9.0 beta 3?
+==================================
-What's New in Python 3.8.3 final?
-=================================
+*Release date: 2020-06-09*
+
+Library
+-------
+
+- bpo-40924: `importlib.resources`: Reverted ``TraversableResources``
+ implementations from the built-in loaders (SourceFileLoader and
+ ZipImporter) as it was an incompatible change introduced in 3.9.0 beta 2
+ causing through a chain of events for root TLS certificates to be missing.
+
+Build
+-----
+
+- bpo-40684: ``make install`` now uses the ``PLATLIBDIR`` variable for the
+ destination ``lib-dynload/`` directory when ``./configure
+ --with-platlibdir`` is used.
-*Release date: 2020-05-13*
+
+What's New in Python 3.9.0 beta 2?
+==================================
+
+*Release date: 2020-06-08*
Core and Builtins
-----------------
-- bpo-40527: Fix command line argument parsing: no longer write errors
- multiple times into stderr.
+- bpo-40904: Fix possible segfault in the new PEG parser when parsing
+ f-string containing yield statements with no value (:code:`f"{yield}"`).
+ Patch by Pablo Galindo
-- bpo-40417: Fix imp module deprecation warning when PyImport_ReloadModule
- is called. Patch by Robert Rouhani.
+- bpo-40903: Fixed a possible segfault in the new PEG parser when producing
+ error messages for invalid assignments of the form :code:`p=p=`. Patch by
+ Pablo Galindo
-- bpo-39562: The constant values of future flags in the :mod:`__future__`
- module are updated in order to prevent collision with compiler flags.
- Previously ``PyCF_ALLOW_TOP_LEVEL_AWAIT`` was clashing with
- ``CO_FUTURE_DIVISION``.
+- bpo-40880: Fix invalid memory read in the new parser when checking
+ newlines in string literals. Patch by Pablo Galindo.
-Library
--------
+- bpo-40883: Fix memory leak in when parsing f-strings in the new parser.
+ Patch by Pablo Galindo
-- bpo-40559: Fix possible memory leak in the C implementation of
- :class:`asyncio.Task`.
+- bpo-40870: Raise :exc:`ValueError` when validating custom AST's where the
+ constants ``True``, ``False`` and ``None`` are used within a
+ :class:`ast.Name` node.
-- bpo-40355: Improve error reporting in :func:`ast.literal_eval` in the
- presence of malformed :class:`ast.Dict` nodes instead of silently ignoring
- any non-conforming elements. Patch by Curtis Bucher.
+- bpo-40854: Allow overriding :data:`sys.platlibdir` via a new
+ :envvar:`PYTHONPLATLIBDIR` environment variable.
-- bpo-40459: :func:`platform.win32_ver` now produces correct *ptype* strings
- instead of empty strings.
+- bpo-40826: Fix GIL usage in :c:func:`PyOS_Readline`: lock the GIL to set
+ an exception and pass the Python thread state when checking if there is a
+ pending signal.
-- bpo-40398: :func:`typing.get_args` now always returns an empty tuple for
- special generic aliases.
+- bpo-40780: Fix a corner case where g-style string formatting of a float
+ failed to remove trailing zeros.
-Documentation
--------------
+- bpo-38964: When there's a :exc:`SyntaxError` in the expression part of an
+ fstring, the filename attribute of the :exc:`SyntaxError` gets correctly
+ set to the name of the file the fstring resides in.
-- bpo-40561: Provide docstrings for webbrowser open functions.
+- bpo-40750: Support the "-d" debug flag in the new PEG parser. Patch by
+ Pablo Galindo
-- bpo-39435: Fix an incorrect signature for :func:`pickle.loads` in the docs
+- bpo-40217: Instances of types created with
+ :c:func:`PyType_FromSpecWithBases` will no longer automatically visit
+ their class object when traversing references in the garbage collector.
+ The user is expected to manually visit the object's class. Patch by Pablo
+ Galindo.
-Windows
--------
+- bpo-40696: Fix a hang that can arise after :meth:`generator.throw` due to
+ a cycle in the exception context chain.
-- bpo-40458: Increase reserved stack space to prevent overflow crash on
- Windows.
+Library
+-------
-C API
------
+- bpo-39791: Refresh importlib.metadata from importlib_metadata 1.6.1.
-- bpo-40412: Nullify inittab_copy during finalization, preventing future
- interpreter initializations in an embedded situation from crashing. Patch
- by Gregory Szorc.
+- bpo-40807: Stop codeop._maybe_compile, used by code.InteractiveInterpreter
+ (and IDLE). from from emitting each warning three times.
+- bpo-39791: Built-in loaders (SourceFileLoader and ZipImporter) now supply
+ ``TraversableResources`` implementations for ``ResourceReader``, and the
+ fallback function has been removed.
-What's New in Python 3.8.3 release candidate 1?
-===============================================
+- bpo-17005: The topological sort functionality that was introduced
+ initially in the :mod:`functools` module has been moved to a new
+ :mod:`graphlib` module to better accommodate the new tools and keep the
+ original scope of the :mod:`functools` module. Patch by Pablo Galindo
-*Release date: 2020-04-29*
+- bpo-40777: Initialize PyDateTime_IsoCalendarDateType.tp_base at run-time
+ to avoid errors on some compilers.
-Security
---------
+- bpo-40767: :mod:`webbrowser` now properly finds the default browser in
+ pure Wayland systems by checking the WAYLAND_DISPLAY environment variable.
+ Patch contributed by Jérémy Attali.
-- bpo-40121: Fixes audit events raised on creating a new socket.
+- bpo-40791: :func:`hashlib.compare_digest` uses OpenSSL's
+ ``CRYPTO_memcmp()`` function when OpenSSL is available.
-- bpo-38576: Disallow control characters in hostnames in http.client,
- addressing CVE-2019-18348. Such potentially malicious header injection
- URLs now cause a InvalidURL to be raised.
+- bpo-40795: :mod:`ctypes` module: If ctypes fails to convert the result of
+ a callback or if a ctypes callback function raises an exception,
+ sys.unraisablehook is now called with an exception set. Previously, the
+ error was logged into stderr by :c:func:`PyErr_Print`.
-- bpo-39503: CVE-2020-8492: The
- :class:`~urllib.request.AbstractBasicAuthHandler` class of the
- :mod:`urllib.request` module uses an inefficient regular expression which
- can be exploited by an attacker to cause a denial of service. Fix the
- regex to prevent the catastrophic backtracking. Vulnerability reported by
- Ben Caller and Matt Schwager.
+- bpo-30008: Fix :mod:`ssl` code to be compatible with OpenSSL 1.1.x builds
+ that use ``no-deprecated`` and ``--api=1.1.0``.
-Core and Builtins
------------------
+- bpo-30064: Fix asyncio ``loop.sock_*`` race condition issue
-- bpo-20526: Fix :c:func:`PyThreadState_Clear()`. ``PyThreadState.frame`` is
- a borrowed reference, not a strong reference: ``PyThreadState_Clear()``
- must not call ``Py_CLEAR(tstate->frame)``.
+- bpo-40759: Deprecate the :mod:`symbol` module.
-- bpo-39965: Correctly raise ``SyntaxError`` if *await* is used inside
- non-async functions and ``PyCF_ALLOW_TOP_LEVEL_AWAIT`` is set (like in the
- asyncio REPL). Patch by Pablo Galindo.
+- bpo-40737: Fix possible reference leak for :mod:`sqlite3` initialization.
-- bpo-39562: Allow executing asynchronous comprehensions on the top level
- when the ``PyCF_ALLOW_TOP_LEVEL_AWAIT`` flag is given. Patch by Batuhan
- Taskaya.
+- bpo-40698: :mod:`distutils` upload creates SHA2-256 and Blake2b-256
+ digests. MD5 digests is skipped if platform blocks MD5.
-- bpo-38894: Fix a bug that was causing incomplete results when calling
- ``pathlib.Path.glob`` in the presence of symlinks that point to files
- where the user does not have read access. Patch by Pablo Galindo and Matt
- Wozniski.
+- bpo-40695: :mod:`hashlib` no longer falls back to builtin hash
+ implementations when OpenSSL provides a hash digest and the algorithm is
+ blocked by security policy.
-- bpo-39871: Fix a possible :exc:`SystemError` in
- ``math.{atan2,copysign,remainder}()`` when the first argument cannot be
- converted to a :class:`float`. Patch by Zachary Spytz.
+- bpo-9216: func:`hashlib.new` passed ``usedforsecurity`` to OpenSSL EVP
+ constructor ``_hashlib.new()``. test_hashlib and test_smtplib handle
+ strict security policy better.
-- bpo-39776: Fix race condition where threads created by PyGILState_Ensure()
- could get a duplicate id.
+- bpo-40614: :func:`ast.parse` will not parse self documenting expressions
+ in f-strings when passed ``feature_version`` is less than ``(3, 8)``.
- This affects consumers of tstate->id like the contextvar caching
- machinery, which could return invalid cached objects under heavy thread
- load (observed in embedded scenarios).
+- bpo-40671: Prepare ``_hashlib`` for :pep:`489` and use
+ :c:func:`PyModule_AddType`.
-- bpo-39778: Fixed a crash due to incorrect handling of weak references in
- ``collections.OrderedDict`` classes. Patch by Pablo Galindo.
+- bpo-32309: Added a new :term:`coroutine` :func:`asyncio.to_thread`. It is
+ mainly used for running IO-bound functions in a separate thread to avoid
+ blocking the event loop, and essentially works as a high-level version of
+ :meth:`~asyncio.loop.run_in_executor` that can directly take keyword
+ arguments.
-- bpo-39520: Fix unparsing of ext slices with no items (``foo[:,]``). Patch
- by Batuhan Taskaya.
+- bpo-40630: Added :func:`tracemalloc.reset_peak` to set the peak size of
+ traced memory blocks to the current size, to measure the peak of specific
+ pieces of code.
-- bpo-22490: Don't leak environment variable ``__PYVENV_LAUNCHER__`` into
- the interpreter session on macOS.
+- bpo-13097: ``ctypes`` now raises an ``ArgumentError`` when a callback is
+ invoked with more than 1024 arguments.
-Library
--------
+- bpo-23082: Updated the error message and docs of PurePath.relative_to() to
+ better reflect the function behaviour.
-- bpo-40138: Fix the Windows implementation of :func:`os.waitpid` for exit
- code larger than ``INT_MAX >> 8``. The exit status is now interpreted as
- an unsigned number.
+- bpo-39244: Fixed :class:`multiprocessing.context.get_all_start_methods` to
+ properly return the default method first on macOS.
-- bpo-39942: Set "__main__" as the default module name when "__name__" is
- missing in :class:`typing.TypeVar`. Patch by Weipeng Hong.
+- bpo-39040: Fix parsing of invalid mime headers parameters by collapsing
+ whitespace between encoded words in a bare-quote-string.
-- bpo-40330: In :meth:`ShareableList.__setitem__`, check the size of a new
- string item after encoding it to utf-8, not before.
+- bpo-35714: :exc:`struct.error` is now raised if there is a null character
+ in a :mod:`struct` format string.
-- bpo-40287: Fixed ``SpooledTemporaryFile.seek()`` to return the position.
+- bpo-36290: AST nodes are now raising :exc:`TypeError` on conflicting
+ keyword arguments. Patch contributed by Rémi Lapeyre.
-- bpo-40260: Ensure :mod:`modulefinder` uses :func:`io.open_code` and
- respects coding comments.
+Tests
+-----
-- bpo-40196: Fix a bug in the :mod:`symtable` module that was causing
- incorrectly report global variables as local. Patch by Pablo Galindo.
+- bpo-17258: Skip some :mod:`multiprocessing` tests when MD5 hash digest is
+ blocked.
-- bpo-40126: Fixed reverting multiple patches in unittest.mock. Patcher's
- ``__exit__()`` is now never called if its ``__enter__()`` is failed.
- Returning true from ``__exit__()`` silences now the exception.
+Build
+-----
-- bpo-40089: Fix threading._after_fork(): if fork was not called by a thread
- spawned by threading.Thread, threading._after_fork() now creates a
- _MainThread instance for _main_thread, instead of a _DummyThread instance.
+- bpo-40514: Remove ``--with-experimental-isolated-subinterpreters``
+ configure option in Python 3.9: the experiment continues in the master
+ branch, but it's no longer needed in 3.9.
-- bpo-39503: :class:`~urllib.request.AbstractBasicAuthHandler` of
- :mod:`urllib.request` now parses all WWW-Authenticate HTTP headers and
- accepts multiple challenges per header: use the realm of the first Basic
- challenge.
+- bpo-40683: Fixed an issue where the :mod:`zoneinfo` module and its tests
+ were not included when Python is installed with ``make``.
-- bpo-40014: Fix ``os.getgrouplist()``: if ``getgrouplist()`` function fails
- because the group list is too small, retry with a larger group list. On
- failure, the glibc implementation of ``getgrouplist()`` sets ``ngroups``
- to the total number of groups. For other implementations, double the group
- list size.
+Windows
+-------
-- bpo-40016: In re docstring, clarify the relationship between inline and
- argument compile flags.
+- bpo-39631: Changes the registered MIME type for ``.py`` files on Windows
+ to ``text/x-python`` instead of ``text/plain``.
-- bpo-39953: Update internal table of OpenSSL error codes in the ``ssl``
- module.
+- bpo-40677: Manually define IO_REPARSE_TAG_APPEXECLINK in case some old
+ Windows SDK doesn't have it.
-- bpo-39360: Ensure all workers exit when finalizing a
- :class:`multiprocessing.Pool` implicitly via the module finalization
- handlers of multiprocessing. This fixes a deadlock situation that can be
- experienced when the Pool is not properly finalized via the context
- manager or a call to ``multiprocessing.Pool.terminate``. Patch by Batuhan
- Taskaya and Pablo Galindo.
+macOS
+-----
-- bpo-39652: The column name found in ``sqlite3.Cursor.description`` is now
- truncated on the first '[' only if the PARSE_COLNAMES option is set.
+- bpo-40741: Update macOS installer to use SQLite 3.32.2.
-- bpo-39915: Ensure :attr:`unittest.mock.AsyncMock.await_args_list` has call
- objects in the order of awaited arguments instead of using
- :attr:`unittest.mock.Mock.call_args` which has the last value of the call.
- Patch by Karthikeyan Singaravelan.
+IDLE
+----
-- bpo-38662: The ``ensurepip`` module now invokes ``pip`` via the ``runpy``
- module. Hence it is no longer tightly coupled with the internal API of the
- bundled ``pip`` version, allowing easier updates to a newer ``pip``
- version both internally and for distributors.
+- bpo-39885: Make context menu Cut and Copy work again when right-clicking
+ within a selection.
-- bpo-39916: More reliable use of ``os.scandir()`` in ``Path.glob()``. It no
- longer emits a ResourceWarning when interrupted.
+- bpo-40723: Make test_idle pass when run after import.
-- bpo-39850: :mod:`multiprocessing` now supports abstract socket addresses
- (if abstract sockets are supported in the running platform). Patch by
- Pablo Galindo.
+C API
+-----
-- bpo-39828: Fix :mod:`json.tool` to catch :exc:`BrokenPipeError`. Patch by
- Dong-hee Na.
+- bpo-40910: Export explicitly the :c:func:`Py_GetArgcArgv` function to the
+ C API and document the function. Previously, it was exported implicitly
+ which no longer works since Python is built with ``-fvisibility=hidden``.
-- bpo-13487: Avoid a possible *"RuntimeError: dictionary changed size during
- iteration"* from :func:`inspect.getmodule` when it tried to loop through
- :attr:`sys.modules`.
+- bpo-40724: Allow defining buffer slots in type specs.
-- bpo-39794: Add --without-decimal-contextvar build option. This enables a
- thread-local rather than a coroutine local context.
+- bpo-40826: :c:func:`PyOS_InterruptOccurred` now fails with a fatal error
+ if it is called with the GIL released.
-- bpo-39769: The :func:`compileall.compile_dir` function's *ddir* parameter
- and the compileall command line flag `-d` no longer write the wrong
- pathname to the generated pyc file for submodules beneath the root of the
- directory tree being compiled. This fixes a regression introduced with
- Python 3.5.
-- bpo-39517: Fix runpy.run_path() when using pathlike objects
+What's New in Python 3.9.0 beta 1?
+==================================
-- bpo-39764: Fix AttributeError when calling get_stack on a PyAsyncGenObject
- Task
+*Release date: 2020-05-19*
-- bpo-30566: Fix :exc:`IndexError` when trying to decode an invalid string
- with punycode codec.
+Security
+--------
-- bpo-39667: Correct performance degradation in ``zipfile.Path`` as found in
- zipp 3.0. While retaining compatibility, this change discourages the use
- of ``zipfile.Path.open`` due to the signature change in Python 3.9. For
- compatibility across Python 3.8 and later versions, consider using
- ``zipp.Path`` on Python 3.8.x and earlier.
+- bpo-40501: :mod:`uuid` no longer uses :mod:`ctypes` to load
+ :file:`libuuid` or :file:`rpcrt4.dll` at runtime.
-- bpo-39548: Fix handling of header in
- :class:`urllib.request.AbstractDigestAuthHandler` when the optional
- ``qop`` parameter is not present.
+Core and Builtins
+-----------------
-- bpo-38971: Open issue in the BPO indicated a desire to make the
- implementation of codecs.open() at parity with io.open(), which implements
- a try/except to assure file stream gets closed before an exception is
- raised.
+- bpo-40663: Correctly generate annotations where parentheses are omitted
+ but required (e.g: ``Type[(str, int, *other))]``.
-- bpo-38410: Properly handle :func:`sys.audit` failures in
- :func:`sys.set_asyncgen_hooks`. Based on patch by Zackery Spytz.
+- bpo-40596: Fixed :meth:`str.isidentifier` for non-canonicalized strings
+ containing non-BMP characters on Windows.
-- bpo-36541: lib2to3 now recognizes named assignment expressions (the walrus
- operator, ``:=``)
+- bpo-40593: Improved syntax errors for invalid characters in source code.
-- bpo-31758: Prevent crashes when using an uninitialized
- ``_elementtree.XMLParser`` object. Patch by Oren Milman.
+- bpo-40585: Fixed a bug when using :func:`codeop.compile_command` that was
+ causing exceptions to be swallowed with the new parser. Patch by Pablo
+ Galindo
-Documentation
--------------
+- bpo-40566: Apply :pep:`573` to :mod:`abc`.
-- bpo-27635: The pickle documentation incorrectly claimed that ``__new__``
- isn't called by default when unpickling.
+- bpo-40502: Initialize ``n->n_col_offset``. (Patch by Joannah Nanjekye)
-- bpo-39879: Updated :ref:`datamodel` docs to include :func:`dict` insertion
- order preservation. Patch by Furkan Onder and Samy Lahfa.
+- bpo-40527: Fix command line argument parsing: no longer write errors
+ multiple times into stderr.
-- bpo-39868: Updated the Language Reference for :pep:`572`.
+- bpo-1635741: Port :mod:`errno` to multiphase initialization (:pep:`489`).
-- bpo-13790: Change 'string' to 'specification' in format doc.
+- bpo-40523: Add pass-throughs for :func:`hash` and :func:`reversed` to
+ :class:`weakref.proxy` objects. Patch by Pablo Galindo.
-- bpo-17422: The language reference no longer restricts default class
- namespaces to dicts only.
+- bpo-1635741: Port :mod:`syslog` to multiphase initialization (:pep:`489`).
-- bpo-39530: Fix misleading documentation about mixed-type numeric
- comparisons.
+- bpo-40246: Reporting a specialised error message for invalid string
+ prefixes, which was introduced in :issue:`40246`, is being reverted due to
+ backwards compatibility concerns for strings that immediately follow a
+ reserved keyword without whitespace between them. Constructs like
+ `bg="#d00" if clear else"#fca"` were failing to parse, which is not an
+ acceptable breakage on such short notice.
-- bpo-39718: Update :mod:`token` documentation to reflect additions in
- Python 3.8
+- bpo-40417: Fix imp module deprecation warning when PyImport_ReloadModule
+ is called. Patch by Robert Rouhani.
-- bpo-39677: Changed operand name of **MAKE_FUNCTION** from *argc* to
- *flags* for module :mod:`dis`
+- bpo-40408: Fixed support of nested type variables in GenericAlias (e.g.
+ ``list[list[T]]``).
-- bpo-38387: Document :c:macro:`PyDoc_STRVAR` macro in the C-API reference.
+- bpo-1635741: Port _stat module to multiphase initialization (:pep:`489`).
-Tests
------
+- bpo-29587: Enable implicit exception chaining when calling
+ :meth:`generator.throw`.
-- bpo-40436: test_gdb and test.pythoninfo now check gdb command exit code.
+- bpo-40328: Add tools for generating mappings headers for CJKCodecs.
-- bpo-40162: Update Travis CI configuration to OpenSSL 1.1.1f.
+- bpo-40228: Setting frame.f_lineno is now robust w.r.t. changes in the
+ source-to-bytecode compiler
-- bpo-40146: Update OpenSSL to 1.1.1f in Azure Pipelines.
+- bpo-38880: Added the ability to list interpreters associated with channel
+ ends in the internal subinterpreters module.
-- bpo-40019: test_gdb now skips tests if it detects that gdb failed to read
- debug information because the Python binary is optimized.
+- bpo-37986: Improve performance of :c:func:`PyLong_FromDouble` for values
+ that fit into :c:type:`long`.
-- bpo-27807: ``test_site.test_startup_imports()`` is now skipped if a path
- of :data:`sys.path` contains a ``.pth`` file.
+Library
+-------
-- bpo-39793: Use the same domain when testing ``make_msgid``. Patch by
- Batuhan Taskaya.
+- bpo-40662: Fixed :func:`ast.get_source_segment` for ast nodes that have
+ incomplete location information. Patch by Irit Katriel.
-- bpo-1812: Fix newline handling in doctest.testfile when loading from a
- package whose loader has a get_data method. Patch by Peter Donis.
+- bpo-40665: Convert :mod:`bisect` to use Argument Clinic.
-- bpo-37957: test.regrtest now can receive a list of test patterns to ignore
- (using the -i/--ignore argument) or a file with a list of patterns to
- ignore (using the --ignore-file argument). Patch by Pablo Galindo.
+- bpo-40536: Added the :func:`~zoneinfo.available_timezones` function to the
+ :mod:`zoneinfo` module. Patch by Paul Ganssle.
-- bpo-38502: test.regrtest now uses process groups in the multiprocessing
- mode (-jN command line option) if process groups are available: if
- :func:`os.setsid` and :func:`os.killpg` functions are available.
+- bpo-40645: The :class:`hmac.HMAC` exposes internal implementation details.
+ The attributes ``digest_cons``, ``inner``, and ``outer`` are deprecated
+ and will be removed in the future.
-Build
------
+- bpo-40645: The internal module ``_hashlib`` wraps and exposes OpenSSL's
+ HMAC API. The new code will be used in Python 3.10 after the internal
+ implementation details of the pure Python HMAC module are no longer part
+ of the public API.
-- bpo-38360: Support single-argument form of macOS -isysroot flag.
+- bpo-40637: Builtin hash modules can now be disabled or selectively enabled
+ with ``configure --with-builtin-hashlib-hashes=sha3,blake1`` or
+ ``--without-builtin-hashlib-hashes``.
-- bpo-40204: Pin Sphinx version to 1.8.2 in ``Doc/Makefile``.
+- bpo-37630: The :mod:`hashlib` module can now use SHA3 hashes and SHAKE XOF
+ from OpenSSL when available.
-- bpo-40158: Fix CPython MSBuild Properties in NuGet Package
- (build/native/python.props)
+- bpo-40479: The :mod:`hashlib` now compiles with OpenSSL 3.0.0-alpha2.
-- bpo-38527: Fix configure check on Solaris for "float word ordering":
- sometimes, the correct "grep" command was not being used. Patch by Arnon
- Yaari.
+- bpo-40257: Revert changes to :func:`inspect.getdoc`.
-Windows
--------
+- bpo-40607: When cancelling a task due to timeout, :meth:`asyncio.wait_for`
+ will now propagate the exception if an error happens during cancellation.
+ Patch by Roman Skurikhin.
-- bpo-40164: Updates Windows to OpenSSL 1.1.1f
+- bpo-40612: Fix edge cases in SyntaxError formatting. If the offset is <=
+ 0, no caret is printed. If the offset is > line length, the caret is
+ printed pointing just after the last character.
-- bpo-39930: Ensures the required :file:`vcruntime140.dll` is included in
- install packages.
+- bpo-40597: If text content lines are longer than policy.max_line_length,
+ always use a content-encoding to make sure they are wrapped.
-- bpo-39847: Avoid hang when computer is hibernated whilst waiting for a
- mutex (for lock-related objects from :mod:`threading`) around 49-day
- uptime.
+- bpo-40571: Added functools.cache() as a simpler, more discoverable way to
+ access the unbounded cache variant of lru_cache(maxsize=None).
-- bpo-38597: :mod:`distutils` will no longer statically link
- :file:`vcruntime140.dll` when a redistributable version is unavailable.
- All future releases of CPython will include a copy of this DLL to ensure
- distributed extensions can continue to load.
+- bpo-40503: :pep:`615`, the :mod:`zoneinfo` module. Adds support for the
+ IANA time zone database.
-- bpo-38380: Update Windows builds to use SQLite 3.31.1
+- bpo-40397: Removed attributes ``__args__`` and ``__parameters__`` from
+ special generic aliases like ``typing.List`` (not subscripted).
-- bpo-39789: Update Windows release build machines to Visual Studio 2019
- (MSVC 14.2).
+- bpo-40549: Convert posixmodule.c ("posix" or "nt" module) to the
+ multiphase initialization (PEP 489).
-- bpo-34803: Package for nuget.org now includes repository reference and
- bundled icon image.
+- bpo-31033: Add a ``msg`` argument to :meth:`Future.cancel` and
+ :meth:`Task.cancel`.
-macOS
------
+- bpo-40541: Added an optional *counts* parameter to random.sample().
-- bpo-40164: Update macOS installer builds to use OpenSSL 1.1.1g.
+- bpo-40515: The :mod:`ssl` and :mod:`hashlib` modules now actively check
+ that OpenSSL is build with thread support. Python 3.7.0 made thread
+ support mandatory and no longer works safely with a no-thread builds.
-- bpo-38380: Update macOS builds to use SQLite 3.31.1
+- bpo-31033: When a :class:`asyncio.Task` is cancelled, the exception
+ traceback now chains all the way back to where the task was first
+ interrupted.
-IDLE
-----
+- bpo-40504: :func:`functools.lru_cache` objects can now be the targets of
+ weakrefs.
-- bpo-27115: For 'Go to Line', use a Query box subclass with IDLE standard
- behavior and improved error checking.
+- bpo-40559: Fix possible memory leak in the C implementation of
+ :class:`asyncio.Task`.
-- bpo-39885: Since clicking to get an IDLE context menu moves the cursor,
- any text selection should be and now is cleared.
+- bpo-40480: ``fnmatch.fnmatch()`` could take exponential time in the
+ presence of multiple ``*`` pattern characters. This was repaired by
+ generating more elaborate regular expressions to avoid futile
+ backtracking.
-- bpo-39852: Edit "Go to line" now clears any selection, preventing
- accidental deletion. It also updates Ln and Col on the status bar.
+- bpo-40495: :mod:`compileall` is now able to use hardlinks to prevent
+ duplicates in a case when ``.pyc`` files for different optimization levels
+ have the same content.
-- bpo-39781: Selecting code context lines no longer causes a jump.
+- bpo-40457: The ssl module now support OpenSSL builds without TLS 1.0 and
+ 1.1 methods.
-- bpo-38439: Add a 256Ã256 pixel IDLE icon to support more modern
- environments. Created by Andrew Clover. Delete the unused macOS idle.icns
- icon file.
+- bpo-40355: Improve error reporting in :func:`ast.literal_eval` in the
+ presence of malformed :class:`ast.Dict` nodes instead of silently ignoring
+ any non-conforming elements. Patch by Curtis Bucher.
-- bpo-38689: IDLE will no longer freeze when inspect.signature fails when
- fetching a calltip.
+- bpo-40465: Deprecated the optional *random* argument to
+ *random.shuffle()*.
-Tools/Demos
------------
+- bpo-40459: :func:`platform.win32_ver` now produces correct *ptype* strings
+ instead of empty strings.
-- bpo-40179: Fixed translation of ``#elif`` in Argument Clinic.
+- bpo-39435: The first argument of :func:`pickle.loads` is now
+ positional-only.
-- bpo-36184: Port python-gdb.py to FreeBSD. python-gdb.py now checks for
- "take_gil" function name to check if a frame tries to acquire the GIL,
- instead of checking for "pthread_cond_timedwait" which is specific to
- Linux and can be a different condition than the GIL.
+- bpo-39305: Update :mod:`nntplib` to merge :class:`nntplib.NNTP` and
+ :class:`nntplib._NNTPBase`. Patch by Dong-hee Na.
-- bpo-39889: Fixed ``unparse.py`` for extended slices containing a single
- element (e.g. ``a[i:j,]``). Remove redundant tuples when index with a
- tuple (e.g. ``a[i, j]``).
+- bpo-32494: Update :mod:`dbm.gnu` to use gdbm_count if possible when
+ calling :func:`len`. Patch by Dong-hee Na.
-C API
------
+- bpo-40453: Add ``isolated=True`` keyword-only parameter to
+ ``_xxsubinterpreters.create()``. An isolated subinterpreter cannot spawn
+ threads, spawn a child process or call ``os.fork()``.
-- bpo-35370: If :c:func:`PySys_Audit` fails in :c:func:`PyEval_SetProfile`
- or :c:func:`PyEval_SetTrace`, log the error as an unraisable exception.
+- bpo-40286: Remove ``_random.Random.randbytes()``: the C implementation of
+ ``randbytes()``. Implement the method in Python to ease subclassing:
+ ``randbytes()`` now directly reuses ``getrandbits()``.
-- bpo-39884: :c:func:`PyDescr_NewMethod` and :c:func:`PyCFunction_NewEx` now
- include the method name in the SystemError "bad call flags" error message
- to ease debug.
+- bpo-40394: Added default arguments to
+ :meth:`difflib.SequenceMatcher.find_longest_match()`.
-- bpo-38643: :c:func:`PyNumber_ToBase` now raises a :exc:`SystemError`
- instead of crashing when called with invalid base.
+- bpo-39995: Fix a race condition in concurrent.futures._ThreadWakeup:
+ access to _ThreadWakeup is now protected with the shutdown lock.
-- bpo-38913: Fixed segfault in ``Py_BuildValue()`` called with a format
- containing "#" and undefined PY_SSIZE_T_CLEAN whwn an exception is set.
+- bpo-30966: ``Process.shutdown(wait=True)`` of :mod:`concurrent.futures`
+ now closes explicitly the result queue.
+- bpo-30966: Add a new :meth:`~multiprocessing.SimpleQueue.close` method to
+ the :class:`~multiprocessing.SimpleQueue` class to explicitly close the
+ queue.
-What's New in Python 3.8.2 final?
-=================================
+- bpo-39966: Revert bpo-25597. :class:`unittest.mock.MagicMock` with wraps'
+ set uses default return values for magic methods.
-*Release date: 2020-02-24*
+- bpo-39791: Added ``files()`` function to importlib.resources with support
+ for subdirectories in package data, matching backport in
+ importlib_resources 1.5.
-Core and Builtins
------------------
+- bpo-40375: :meth:`imaplib.IMAP4.unselect` is added. Patch by Dong-hee Na.
-- bpo-39382: Fix a use-after-free in the single inheritance path of
- ``issubclass()``, when the ``__bases__`` of an object has a single
- reference, and so does its first item. Patch by Yonatan Goldschmidt.
+- bpo-40389: ``repr()`` now returns ``typing.Optional[T]`` when called for
+ ``typing.Union`` of two types, one of which is ``NoneType``.
-- bpo-39427: Document all possibilities for the ``-X`` options in the
- command line help section. Patch by Pablo Galindo.
+- bpo-40291: Add support for CAN_J1939 sockets (available on Linux 5.4+)
-Library
--------
+- bpo-40273: :class:`types.MappingProxyType` is now reversible.
-- bpo-39649: Remove obsolete check for `__args__` in
- bdb.Bdb.format_stack_entry.
+- bpo-39075: The repr for :class:`types.SimpleNamespace` is now insertion
+ ordered rather than alphabetical.
-- bpo-39681: Fix a regression where the C pickle module wouldn't allow
- unpickling from a file-like object that doesn't expose a readinto()
- method.
+- bpo-40192: On AIX, :func:`~time.thread_time` is now implemented with
+ ``thread_cputime()`` which has nanosecond resolution, rather than
+ ``clock_gettime(CLOCK_THREAD_CPUTIME_ID)`` which has a resolution of 10
+ ms. Patch by Batuhan Taskaya.
-- bpo-39546: Fix a regression in :class:`~argparse.ArgumentParser` where
- ``allow_abbrev=False`` was ignored for long options that used a prefix
- character other than "-".
+- bpo-40025: Raise TypeError when _generate_next_value_ is defined after
+ members. Patch by Ethan Onstott.
-- bpo-39432: Implement PEP-489 algorithm for non-ascii "PyInit\_..." symbol
- names in distutils to make it export the correct init symbol also on
- Windows.
+- bpo-39058: In the argparse module, the repr for Namespace() and other
+ argument holders now displayed in the order attributes were added.
+ Formerly, it displayed in alphabetical order even though argument order is
+ preserved the user visible parts of the module.
+
+- bpo-24416: The ``isocalendar()`` methods of :class:`datetime.date` and
+ :class:`datetime.datetime` now return a :term:`named tuple` instead of a
+ :class:`tuple`.
Documentation
-------------
-- bpo-17422: The language reference now specifies restrictions on class
- namespaces. Adapted from a patch by Ethan Furman.
+- bpo-34790: Add version of removal for explicit passing of coros to
+ `asyncio.wait()`'s documentation
-- bpo-39572: Updated documentation of ``total`` flag of TypeDict.
+- bpo-40561: Provide docstrings for webbrowser open functions.
-- bpo-39654: In pyclbr doc, update 'class' to 'module' where appropriate and
- add readmodule comment. Patch by Hakan Ãelik.
+- bpo-40499: Mention that :func:`asyncio.wait` requires a non-empty set of
+ awaitables.
-IDLE
-----
+- bpo-39705: Tutorial example for sorted() in the Loop Techniques section is
+ given a better explanation. Also a new example is included to explain
+ sorted()'s basic behavior.
-- bpo-39663: Add tests for pyparse find_good_parse_start().
+- bpo-39435: Fix an incorrect signature for :func:`pickle.loads` in the docs
+Tests
+-----
-What's New in Python 3.8.2 release candidate 2?
-===============================================
+- bpo-40055: distutils.tests now saves/restores warnings filters to leave
+ them unchanged. Importing tests imports docutils which imports
+ pkg_resources which adds a warnings filter.
-*Release date: 2020-02-17*
+- bpo-40436: test_gdb and test.pythoninfo now check gdb command exit code.
-Security
---------
+Build
+-----
-- bpo-39184: Add audit events to functions in `fcntl`, `msvcrt`, `os`,
- `resource`, `shutil`, `signal` and `syslog`.
+- bpo-40653: Move _dirnameW out of HAVE_SYMLINK to fix a potential compiling
+ issue.
-Core and Builtins
------------------
+- bpo-40514: Add ``--with-experimental-isolated-subinterpreters`` build
+ option to ``configure``: better isolate subinterpreters, experimental
+ build mode.
-- bpo-39619: Enable use of :func:`os.chroot` on HP-UX systems.
+Windows
+-------
-- bpo-39606: Fix regression caused by fix for bpo-39386, that prevented
- calling ``aclose`` on an async generator that had already been closed or
- exhausted.
+- bpo-40650: Include winsock2.h in pytime.c for timeval.
-- bpo-39453: Fixed a possible crash in :meth:`list.__contains__` when a list
- is changed during comparing items. Patch by Dong-hee Na.
+- bpo-40458: Increase reserved stack space to prevent overflow crash on
+ Windows.
-- bpo-39219: Syntax errors raised in the tokenizer now always set correct
- "text" and "offset" attributes.
+- bpo-39148: Add IPv6 support to :mod:`asyncio` datagram endpoints in
+ ProactorEventLoop. Change the raised exception for unknown address
+ families to ValueError as it's not coming from Windows API.
-Library
--------
+macOS
+-----
-- bpo-27657: The original fix for bpo-27657, "Fix urlparse() with numeric
- paths" (GH-16839) included in 3.8.1, inadvertently introduced a behavior
- change that broke several third-party packages relying on the original
- undefined parsing behavior. The change is reverted in 3.8.2, restoring the
- behavior of 3.8.0 and earlier releases.
+- bpo-34956: When building Python on macOS from source, ``_tkinter`` now
+ links with non-system Tcl and Tk frameworks if they are installed in
+ ``/Library/Frameworks``, as had been the case on older releases of macOS.
+ If a macOS SDK is explicitly configured, by using
+ ``--enable-universalsdk=`` or ``-isysroot``, only the SDK itself is
+ searched. The default behavior can still be overridden with
+ ``--with-tcltk-includes`` and ``--with-tcltk-libs``.
-- bpo-39474: Fixed starting position of AST for expressions like ``(a)(b)``,
- ``(a)[b]`` and ``(a).b``.
+- bpo-35569: Expose RFC 3542 IPv6 socket options.
-- bpo-21016: The :mod:`pydoc` and :mod:`trace` modules now use the
- :mod:`sysconfig` module to get the path to the Python standard library, to
- support uncommon installation path like ``/usr/lib64/python3.9/`` on
- Fedora. Patch by Jan MatÄjek.
+Tools/Demos
+-----------
-- bpo-39595: Improved performance of zipfile.Path for files with a large
- number of entries. Also improved performance and fixed minor issue as
- published with `importlib_metadata 1.5
- `_.
+- bpo-40479: Update multissltest helper to test with latest OpenSSL 1.0.2,
+ 1.1.0, 1.1.1, and 3.0.0-alpha.
-IDLE
-----
+- bpo-40431: Fix a syntax typo in ``turtledemo`` that now raises a
+ ``SyntaxError``.
-- bpo-39600: In the font configuration window, remove duplicated font names.
+- bpo-40163: Fix multissltest tool. OpenSSL has changed download URL for old
+ releases. The multissltest tool now tries to download from current and old
+ download URLs.
+C API
+-----
-What's New in Python 3.8.2 release candidate 1?
-===============================================
+- bpo-39465: Remove the ``_PyUnicode_ClearStaticStrings()`` function from
+ the C API.
-*Release date: 2020-02-10*
+- bpo-38787: Add PyCFunction_CheckExact() macro for exact type checks now
+ that we allow subtypes of PyCFunction, as well as PyCMethod_CheckExact()
+ and PyCMethod_Check() for the new PyCMethod subtype.
-Security
---------
+- bpo-40545: Declare ``_PyErr_GetTopmostException()`` with ``PyAPI_FUNC()``
+ to properly export the function in the C API. The function remains private
+ (``_Py``) prefix.
-- bpo-39401: Avoid unsafe load of ``api-ms-win-core-path-l1-1-0.dll`` at
- startup on Windows 7.
+- bpo-40412: Nullify inittab_copy during finalization, preventing future
+ interpreter initializations in an embedded situation from crashing. Patch
+ by Gregory Szorc.
-- bpo-39184: Add audit events to command execution functions in os and pty
- modules.
+- bpo-40429: The :c:func:`PyThreadState_GetFrame` function now returns a
+ strong reference to the frame.
-Core and Builtins
------------------
+- bpo-40428: Remove the following functions from the C API. Call
+ :c:func:`PyGC_Collect` explicitly to free all free lists.
-- bpo-39579: Change the ending column offset of `Attribute` nodes
- constructed in `ast_for_dotted_name` to point at the end of the current
- node and not at the end of the last `NAME` node.
+ * ``PyAsyncGen_ClearFreeLists()``
+ * ``PyContext_ClearFreeList()``
+ * ``PyDict_ClearFreeList()``
+ * ``PyFloat_ClearFreeList()``
+ * ``PyFrame_ClearFreeList()``
+ * ``PyList_ClearFreeList()``
+ * ``PySet_ClearFreeList()``
+ * ``PyTuple_ClearFreeList()``
-- bpo-39510: Fix segfault in ``readinto()`` method on closed BufferedReader.
+- bpo-40421: New :c:func:`PyFrame_GetBack` function: get the frame next
+ outer frame.
-- bpo-39492: Fix a reference cycle in the C Pickler that was preventing the
- garbage collection of deleted, pickled objects.
+- bpo-40421: New :c:func:`PyFrame_GetCode` function: return a borrowed
+ reference to the frame code.
-- bpo-39421: Fix possible crashes when operating with the functions in the
- :mod:`heapq` module and custom comparison operators.
+- bpo-40217: Ensure that instances of types created with
+ :c:func:`PyType_FromSpecWithBases` will visit its class object when
+ traversing references in the garbage collector (implemented as an
+ extension of the provided :c:member:`~PyTypeObject.tp_traverse`). Patch by
+ Pablo Galindo.
-- bpo-39386: Prevent double awaiting of async iterator.
+- bpo-38787: Module C state is now accessible from C-defined heap type
+ methods (:pep:`573`). Patch by Marcel Plch and Petr Viktorin.
-- bpo-39235: Fix AST end location for lone generator expression in function
- call, e.g. f(i for i in a).
-- bpo-39209: Correctly handle multi-line tokens in interactive mode. Patch
- by Pablo Galindo.
+What's New in Python 3.9.0 alpha 6?
+===================================
-- bpo-39216: Fix constant folding optimization for positional only arguments
- - by Anthony Sottile.
+*Release date: 2020-04-27*
-- bpo-39215: Fix ``SystemError`` when nested function has annotation on
- positional-only argument - by Anthony Sottile.
+Security
+--------
-- bpo-38588: Fix possible crashes in dict and list when calling
- :c:func:`PyObject_RichCompareBool`.
+- bpo-40121: Fixes audit events raised on creating a new socket.
-- bpo-38610: Fix possible crashes in several list methods by holding strong
- references to list elements when calling
- :c:func:`PyObject_RichCompareBool`.
+- bpo-39073: Disallow CR or LF in email.headerregistry.Address arguments to
+ guard against header injection attacks.
-Library
--------
+- bpo-39503: CVE-2020-8492: The
+ :class:`~urllib.request.AbstractBasicAuthHandler` class of the
+ :mod:`urllib.request` module uses an inefficient regular expression which
+ can be exploited by an attacker to cause a denial of service. Fix the
+ regex to prevent the catastrophic backtracking. Vulnerability reported by
+ Ben Caller and Matt Schwager.
-- bpo-39590: Collections.deque now holds strong references during
- deque.__contains__ and deque.count, fixing crashes.
+Core and Builtins
+-----------------
-- bpo-38149: :func:`sys.audit` is now called only once per call of
- :func:`glob.glob` and :func:`glob.iglob`.
+- bpo-40313: Improve the performance of bytes.hex().
-- bpo-39450: Striped whitespace from docstring before returning it from
- :func:`unittest.case.shortDescription`.
+- bpo-40334: Switch to a new parser, based on PEG. For more details see PEP
+ 617. To temporarily switch back to the old parser, use ``-X oldparser`` or
+ ``PYTHONOLDPARSER=1``. In Python 3.10 we will remove the old parser
+ completely, including the ``parser`` module (already deprecated) and
+ anything that depends on it.
-- bpo-39493: Mark ``typing.IO.closed`` as a property
+- bpo-40267: Fix the tokenizer to display the correct error message, when
+ there is a SyntaxError on the last input character and no newline follows.
+ It used to be `unexpected EOF while parsing`, while it should be `invalid
+ syntax`.
-- bpo-39485: Fix a bug in :func:`unittest.mock.create_autospec` that would
- complain about the wrong number of arguments for custom descriptors
- defined in an extension module returning functions.
+- bpo-39522: Correctly unparse explicit ``u`` prefix for strings when
+ postponed evaluation for annotations activated. Patch by Batuhan Taskaya.
-- bpo-39082: Allow AsyncMock to correctly patch static/class methods
+- bpo-40246: Report a specialized error message, `invalid string prefix`,
+ when the tokenizer encounters a string with an invalid prefix.
-- bpo-39430: Fixed race condition in lazy imports in :mod:`tarfile`.
+- bpo-40082: Fix the signal handler: it now always uses the main
+ interpreter, rather than trying to get the current Python thread state.
-- bpo-39390: Fixed a regression with the `ignore` callback of
- :func:`shutil.copytree`. The argument types are now str and List[str]
- again.
+- bpo-37388: str.encode() and str.decode() no longer check the encoding and
+ errors in development mode or in debug mode during Python finalization.
+ The codecs machinery can no longer work on very late calls to str.encode()
+ and str.decode().
-- bpo-39389: Write accurate compression level metadata in :mod:`gzip`
- archives, rather than always signaling maximum compression.
+- bpo-40077: Fix possible refleaks in :mod:`_json`, memo of PyScannerObject
+ should be traversed.
-- bpo-39274: ``bool(fraction.Fraction)`` now returns a boolean even if
- (numerator != 0) does not return a boolean (ex: numpy number).
+- bpo-37207: Speed up calls to ``dict()`` by using the :pep:`590`
+ ``vectorcall`` calling convention.
-- bpo-39297: Improved performance of importlib.metadata distribution
- discovery and resilients to inaccessible sys.path entries
- (importlib_metadata v1.4.0).
+- bpo-40141: Add column and line information to ``ast.keyword`` nodes. Patch
+ by Pablo Galindo.
-- bpo-39242: Updated the Gmane domain from news.gmane.org to news.gmane.io
- which is used for examples of :class:`~nntplib.NNTP` news reader server
- and nntplib tests.
+- bpo-1635741: Port :mod:`resource` to multiphase initialization
+ (:pep:`489`).
-- bpo-38907: In http.server script, restore binding to IPv4 on Windows.
+- bpo-1635741: Port :mod:`math` to multiphase initialization (:pep:`489`).
-- bpo-39152: Fix ttk.Scale.configure([name]) to return configuration tuple
- for name or all options. Giovanni Lombardo contributed part of the patch.
+- bpo-1635741: Port _uuid module to multiphase initialization (:pep:`489`).
-- bpo-39198: If an exception were to be thrown in `Logger.isEnabledFor`
- (say, by asyncio timeouts or stopit) , the `logging` global lock may not
- be released appropriately, resulting in deadlock. This change wraps that
- block of code with `try...finally` to ensure the lock is released.
+- bpo-40077: Convert json module to use :c:func:`PyType_FromSpec`.
-- bpo-39191: Perform a check for running loop before starting a new task in
- ``loop.run_until_complete()`` to fail fast; it prevents the side effect of
- new task spawning before exception raising.
+- bpo-40067: Improve the error message for multiple star expressions in an
+ assignment. Patch by Furkan Onder
-- bpo-38871: Correctly parenthesize filter-based statements that contain
- lambda expressions in mod:`lib2to3`. Patch by Dong-hee Na.
+- bpo-1635741: Port _functools module to multiphase initialization (PEP
+ 489). Patch by Paulo Henrique Silva.
-- bpo-39142: A change was made to logging.config.dictConfig to avoid
- converting instances of named tuples to ConvertingTuple. It's assumed that
- named tuples are too specialised to be treated like ordinary tuples; if a
- user of named tuples requires ConvertingTuple functionality, they will
- have to implement that themselves in their named tuple class.
+- bpo-1635741: Port operator module to multiphase initialization (PEP 489).
+ Patch by Paulo Henrique Silva.
-- bpo-39129: Fix import path for ``asyncio.TimeoutError``
+- bpo-20526: Fix :c:func:`PyThreadState_Clear()`. ``PyThreadState.frame`` is
+ a borrowed reference, not a strong reference: ``PyThreadState_Clear()``
+ must not call ``Py_CLEAR(tstate->frame)``.
-- bpo-39057: :func:`urllib.request.proxy_bypass_environment` now ignores
- leading dots and no longer ignores a trailing newline.
+- bpo-1635741: Port time module to multiphase initialization (:pep:`489`).
+ Patch by Paulo Henrique Silva.
-- bpo-39056: Fixed handling invalid warning category in the -W option. No
- longer import the re module if it is not needed.
+- bpo-1635741: Port _weakref extension module to multiphase initialization
+ (:pep:`489`).
-- bpo-39055: :func:`base64.b64decode` with ``validate=True`` raises now a
- binascii.Error if the input ends with a single ``\n``.
+- bpo-40020: Fix a leak and subsequent crash in parsetok.c caused by realloc
+ misuse on a rare codepath.
-- bpo-39033: Fix :exc:`NameError` in :mod:`zipimport`. Patch by Karthikeyan
- Singaravelan.
+- bpo-39939: Added str.removeprefix and str.removesuffix methods and
+ corresponding bytes, bytearray, and collections.UserString methods to
+ remove affixes from a string if present. See :pep:`616` for a full
+ description. Patch by Dennis Sweeney.
-- bpo-38878: Fixed __subclasshook__ of :class:`os.PathLike` to return a
- correct result upon inheritence. Patch by Bar Harel.
+- bpo-39481: Implement PEP 585. This supports list[int], tuple[str, ...]
+ etc.
-- bpo-35182: Fixed :func:`Popen.communicate` subsequent call crash when the
- child process has already closed any piped standard stream, but still
- continues to be running. Patch by Andriy Maletsky.
+- bpo-32894: Support unparsing of infinity numbers in postponed annotations.
+ Patch by Batuhan TaÅkaya.
-- bpo-38473: Use signature from inner mock for autospecced methods attached
- with :func:`unittest.mock.attach_mock`. Patch by Karthikeyan Singaravelan.
+- bpo-37207: Speed up calls to ``list()`` by using the :pep:`590`
+ ``vectorcall`` calling convention. Patch by Mark Shannon.
-- bpo-38293: Add :func:`copy.copy` and :func:`copy.deepcopy` support to
- :func:`property` objects.
+Library
+-------
-Documentation
--------------
+- bpo-40398: :func:`typing.get_args` now always returns an empty tuple for
+ special generic aliases.
-- bpo-39153: Clarify refcounting semantics for the following functions: -
- PyObject_SetItem - PyMapping_SetItemString - PyDict_SetItem -
- PyDict_SetItemString
+- bpo-40396: Functions :func:`typing.get_origin`, :func:`typing.get_args`
+ and :func:`typing.get_type_hints` support now generic aliases like
+ ``list[int]``.
-- bpo-39392: Explain that when filling with turtle, overlap regions may be
- left unfilled.
+- bpo-38061: Optimize the :mod:`subprocess` module on FreeBSD using
+ ``closefrom()``. A single ``close(fd)`` syscall is cheap, but when
+ ``sysconf(_SC_OPEN_MAX)`` is high, the loop calling ``close(fd)`` on each
+ file descriptor can take several milliseconds.
-- bpo-39381: Mention in docs that :func:`asyncio.get_event_loop` implicitly
- creates new event loop only if called from the main thread.
+ The workaround on FreeBSD to improve performance was to load and mount the
+ fdescfs kernel module, but this is not enabled by default.
-- bpo-38918: Add an entry for ``__module__`` in the "function" & "method"
- sections of the `inspect docs types and members table
- `_
+ Initial patch by Ed Maste (emaste), Conrad Meyer (cem), Kyle Evans
+ (kevans) and Kubilay Kocak (koobs):
+ https://bugs.freebsd.org/bugzilla/show_bug.cgi?id=242274
-- bpo-3530: In the :mod:`ast` module documentation, fix a misleading
- ``NodeTransformer`` example and add advice on when to use the
- ``fix_missing_locations`` function.
+- bpo-38061: On FreeBSD, ``os.closerange(fd_low, fd_high)`` now calls
+ ``closefrom(fd_low)`` if *fd_high* is greater than or equal to
+ ``sysconf(_SC_OPEN_MAX)``.
-Tests
------
+ Initial patch by Ed Maste (emaste), Conrad Meyer (cem), Kyle Evans
+ (kevans) and Kubilay Kocak (koobs):
+ https://bugs.freebsd.org/bugzilla/show_bug.cgi?id=242274
-- bpo-39502: Skip test_zipfile.test_add_file_after_2107() if
- :func:`time.localtime` fails with :exc:`OverflowError`. It is the case on
- AIX 6.1 for example.
+- bpo-40360: The :mod:`lib2to3` module is pending deprecation due to
+ :pep:`617`.
-- bpo-38546: Fix test_ressources_gced_in_workers() of
- test_concurrent_futures: explicitly stop the manager to prevent leaking a
- child process running in the background after the test completes.
+- bpo-40138: Fix the Windows implementation of :func:`os.waitpid` for exit
+ code larger than ``INT_MAX >> 8``. The exit status is now interpreted as
+ an unsigned number.
-Build
------
+- bpo-39942: Set "__main__" as the default module name when "__name__" is
+ missing in :class:`typing.TypeVar`. Patch by Weipeng Hong.
-- bpo-39144: The ctags and etags build targets both include Modules/_ctypes
- and Python standard library source files.
+- bpo-40275: The :mod:`logging` package is now imported lazily in
+ :mod:`unittest` only when the :meth:`~unittest.TestCase.assertLogs`
+ assertion is used.
-Windows
--------
+- bpo-40275: The :mod:`asyncio` package is now imported lazily in
+ :mod:`unittest` only when the :class:`~unittest.IsolatedAsyncioTestCase`
+ class is used.
-- bpo-39439: Honor the Python path when a virtualenv is active on Windows.
+- bpo-40330: In :meth:`ShareableList.__setitem__`, check the size of a new
+ string item after encoding it to utf-8, not before.
-- bpo-39393: Improve the error message when attempting to load a DLL with
- unresolved dependencies.
+- bpo-40148: Added :meth:`pathlib.Path.with_stem()` to create a new Path
+ with the stem replaced.
-- bpo-38883: :meth:`~pathlib.Path.home()` and
- :meth:`~pathlib.Path.expanduser()` on Windows now prefer
- :envvar:`USERPROFILE` and no longer use :envvar:`HOME`, which is not
- normally set for regular user accounts. This makes them again behave like
- :func:`os.path.expanduser`, which was changed to ignore :envvar:`HOME` in
- 3.8, see :issue:`36264`.
+- bpo-40325: Deprecated support for set objects in random.sample().
-- bpo-39185: The build.bat script has additional options for very-quiet
- output (-q) and very-verbose output (-vv)
+- bpo-40257: Improved help for the :mod:`typing` module. Docstrings are now
+ shown for all special forms and special generic aliases (like ``Union``
+ and ``List``). Using ``help()`` with generic alias like ``List[int]`` will
+ show the help for the correspondent concrete type (``list`` in this case).
-IDLE
-----
+- bpo-40257: func:`inspect.getdoc` no longer returns docstring inherited
+ from the type of the object or from parent class if it is a class if it is
+ not defined in the object itself. In :mod:`pydoc` the documentation string
+ is now shown not only for class, function, method etc, but for any object
+ that has its own ``__doc__`` attribute.
-- bpo-30780: Add remaining configdialog tests for buttons and highlights and
- keys tabs.
+- bpo-40287: Fixed ``SpooledTemporaryFile.seek()`` to return the position.
-- bpo-39388: IDLE Settings Cancel button now cancels pending changes
+- bpo-40290: Added zscore() to statistics.NormalDist().
-- bpo-39050: Make IDLE Settings dialog Help button work again.
+- bpo-40282: Allow ``random.getrandbits(0)`` to succeed and to return 0.
-- bpo-34118: Tag memoryview, range, and tuple as classes, the same as list,
- etcetera, in the library manual built-in functions list.
+- bpo-40286: Add :func:`random.randbytes` function and
+ :meth:`random.Random.randbytes` method to generate random bytes.
-- bpo-38792: Close an IDLE shell calltip if a :exc:`KeyboardInterrupt` or
- shell restart occurs. Patch by Zackery Spytz.
+- bpo-40277: :func:`collections.namedtuple` now provides a human-readable
+ repr for its field accessors.
-- bpo-32989: Add tests for editor newline_and_indent_event method. Remove
- dead code from pyparse find_good_parse_start method.
+- bpo-40270: The included copy of sqlite3 on Windows is now compiled with
+ the json extension. This allows the use of functions such as
+ ``json_object``.
+- bpo-29255: Wait in `KqueueSelector.select` when no fds are registered
-What's New in Python 3.8.1 final?
-=================================
+- bpo-40260: Ensure :mod:`modulefinder` uses :func:`io.open_code` and
+ respects coding comments.
-*Release date: 2019-12-18*
+- bpo-40234: Allow again to spawn daemon threads in subinterpreters (revert
+ change which denied them).
-Core and Builtins
------------------
+- bpo-39207: Workers in :class:`~concurrent.futures.ProcessPoolExecutor` are
+ now spawned on demand, only when there are no available idle workers to
+ reuse. This optimizes startup overhead and reduces the amount of lost CPU
+ time to idle workers. Patch by Kyle Stanley.
-- bpo-39080: Fix the value of *end_col_offset* for Starred Expression AST
- nodes when they are among the elements in the *args* attribute of Call AST
- nodes.
+- bpo-40091: Fix a hang at fork in the logging module: the new private
+ _at_fork_reinit() method is now used to reinitialize locks at fork in the
+ child process.
-- bpo-39031: When parsing an "elif" node, lineno and col_offset of the node
- now point to the "elif" keyword and not to its condition, making it
- consistent with the "if" node. Patch by Lysandros Nikolaou.
+- bpo-40149: Implement traverse and clear slots in _abc._abc_data type.
-- bpo-39008: :c:func:`PySys_Audit` now requires ``Py_ssize_t`` to be used
- for size arguments in the format string, regardless of whether
- ``PY_SSIZE_T_CLEAN`` was defined at include time.
+- bpo-40208: Remove deprecated :meth:`symtable.SymbolTable.has_exec`.
-Library
--------
+- bpo-40196: Fix a bug in the :mod:`symtable` module that was causing
+ incorrectly report global variables as local. Patch by Pablo Galindo.
-- bpo-39022: Update importlib.metadata to include improvements from
- importlib_metadata 1.3 including better serialization of EntryPoints and
- improved documentation for custom finders.
+- bpo-40190: Add support for ``_SC_AIX_REALMEM`` to :func:`posix.sysconf`.
-- bpo-38811: Fix an unhandled exception in :mod:`pathlib` when
- :meth:`os.link` is missing. Patch by Toke Høiland-Jørgensen.
+- bpo-40182: Removed the ``_field_types`` attribute of the
+ :class:`typing.NamedTuple` class.
-- bpo-36406: Handle namespace packages in :mod:`doctest`. Patch by
- Karthikeyan Singaravelan.
+- bpo-36517: Multiple inheritance with :class:`typing.NamedTuple` now raises
+ an error instead of silently ignoring other types.
-Tests
------
+- bpo-40126: Fixed reverting multiple patches in unittest.mock. Patcher's
+ ``__exit__()`` is now never called if its ``__enter__()`` is failed.
+ Returning true from ``__exit__()`` silences now the exception.
-- bpo-38546: Multiprocessing and concurrent.futures tests now stop the
- resource tracker process when tests complete.
+- bpo-40094: CGIHTTPRequestHandler of http.server now logs the CGI script
+ exit code, rather than the CGI script exit status of os.waitpid(). For
+ example, if the script is killed by signal 11, it now logs: "CGI script
+ exit code -11."
-Windows
--------
+- bpo-40108: Improve the error message when triying to import a module using
+ :mod:`runpy` and incorrently use the ".py" extension at the end of the
+ module name. Patch by Pablo Galindo.
-- bpo-39007: Add auditing events to functions in :mod:`winreg`.
+- bpo-40094: Add :func:`os.waitstatus_to_exitcode` function: convert a wait
+ status to an exit code.
-macOS
------
+- bpo-40089: Fix threading._after_fork(): if fork was not called by a thread
+ spawned by threading.Thread, threading._after_fork() now creates a
+ _MainThread instance for _main_thread, instead of a _DummyThread instance.
-- bpo-38295: Prevent failure of test_relative_path in test_py_compile on
- macOS Catalina.
+- bpo-40089: Add a private ``_at_fork_reinit()`` method to
+ :class:`_thread.Lock`, :class:`_thread.RLock`, :class:`threading.RLock`
+ and :class:`threading.Condition` classes: reinitialize the lock at fork in
+ the child process, reset the lock to the unlocked state. Rename also the
+ private ``_reset_internal_locks()`` method of :class:`threading.Event` to
+ ``_at_fork_reinit()``.
-IDLE
-----
+- bpo-25780: Expose :data:`~socket.CAN_RAW_JOIN_FILTERS` in the
+ :mod:`socket` module.
-- bpo-38944: Escape key now closes IDLE completion windows. Patch by Johnny
- Najera.
+- bpo-39503: :class:`~urllib.request.AbstractBasicAuthHandler` of
+ :mod:`urllib.request` now parses all WWW-Authenticate HTTP headers and
+ accepts multiple challenges per header: use the realm of the first Basic
+ challenge.
-- bpo-38943: Fix IDLE autocomplete windows not always appearing on some
- systems. Patch by Johnny Najera.
+- bpo-39812: Removed daemon threads from :mod:`concurrent.futures` by adding
+ an internal `threading._register_atexit()`, which calls registered
+ functions prior to joining all non-daemon threads. This allows for
+ compatibility with subinterpreters, which don't support daemon threads.
+- bpo-40050: Fix ``importlib._bootstrap_external``: avoid creating a new
+ ``winreg`` builtin module if it's already available in
+ :data:`sys.modules`, and remove redundant imports.
-What's New in Python 3.8.1 release candidate 1?
-===============================================
+- bpo-40014: Fix ``os.getgrouplist()``: if ``getgrouplist()`` function fails
+ because the group list is too small, retry with a larger group list. On
+ failure, the glibc implementation of ``getgrouplist()`` sets ``ngroups``
+ to the total number of groups. For other implementations, double the group
+ list size.
-*Release date: 2019-12-09*
+- bpo-40017: Add :data:`time.CLOCK_TAI` constant if the operating system
+ support it.
-Security
---------
+- bpo-40016: In re docstring, clarify the relationship between inline and
+ argument compile flags.
-- bpo-38945: Newline characters have been escaped when performing uu
- encoding to prevent them from overflowing into to content section of the
- encoded file. This prevents malicious or accidental modification of data
- during the decoding process.
+- bpo-39953: Update internal table of OpenSSL error codes in the ``ssl``
+ module.
-- bpo-37228: Due to significant security concerns, the *reuse_address*
- parameter of :meth:`asyncio.loop.create_datagram_endpoint` is no longer
- supported. This is because of the behavior of ``SO_REUSEADDR`` in UDP. For
- more details, see the documentation for
- ``loop.create_datagram_endpoint()``. (Contributed by Kyle Stanley, Antoine
- Pitrou, and Yury Selivanov in :issue:`37228`.)
+- bpo-36144: Added :pep:`584` operators to
+ :class:`weakref.WeakValueDictionary`.
-- bpo-38722: :mod:`runpy` now uses :meth:`io.open_code` to open code files.
- Patch by Jason Killen.
+- bpo-36144: Added :pep:`584` operators to
+ :class:`weakref.WeakKeyDictionary`.
-- bpo-38804: Fixes a ReDoS vulnerability in :mod:`http.cookiejar`. Patch by
- Ben Caller.
+- bpo-38891: Fix linear runtime behaviour of the `__getitem__` and
+ `__setitem__` methods in
+ :class:`multiprocessing.shared_memory.ShareableList`. This avoids
+ quadratic performance when iterating a `ShareableList`. Patch by Thomas
+ Krennwallner.
-- bpo-38622: Add additional audit events for the :mod:`ctypes` module.
+- bpo-39682: Remove undocumented support for *closing* a `pathlib.Path`
+ object via its context manager. The context manager magic methods remain,
+ but they are now a no-op, making `Path` objects immutable.
-- bpo-38418: Fixes audit event for :func:`os.system` to be named
- ``os.system``.
+- bpo-36144: Added :pep:`584` operators (``|`` and ``|=``) to
+ :class:`collections.ChainMap`.
-Core and Builtins
------------------
+- bpo-39011: Normalization of line endings in ElementTree attributes was
+ removed, as line endings which were replaced by entity numbers should be
+ preserved in original form.
-- bpo-38673: In REPL mode, don't switch to PS2 if the line starts with
- comment or whitespace. Based on work by Batuhan TaÅkaya.
+- bpo-38410: Properly handle :func:`sys.audit` failures in
+ :func:`sys.set_asyncgen_hooks`.
-- bpo-38922: Calling ``replace`` on a code object now raises the
- ``code.__new__`` audit event.
+- bpo-36541: lib2to3 now recognizes named assignment expressions (the walrus
+ operator, ``:=``)
-- bpo-38920: Add audit hooks for when :func:`sys.excepthook` and
- :func:`sys.unraisablehook` are invoked
+- bpo-35967: In platform, delay the invocation of 'uname -p' until the
+ processor attribute is requested.
-- bpo-38892: Improve documentation for audit events table and functions.
+- bpo-35113: :meth:`inspect.getsource` now returns correct source code for
+ inner class with same name as module level class. Decorators are also
+ returned as part of source of the class. Patch by Karthikeyan
+ Singaravelan.
-- bpo-38707: ``MainThread.native_id`` is now correctly reset in child
- processes spawned using :class:`multiprocessing.Process`, instead of
- retaining the parent's value.
+- bpo-33262: Deprecate passing None as an argument for
+ :func:`shlex.split()`'s ``s`` parameter. Patch by Zackery Spytz.
-- bpo-38640: Fixed a bug in the compiler that was causing to raise in the
- presence of break statements and continue statements inside always false
- while loops. Patch by Pablo Galindo.
+- bpo-31758: Prevent crashes when using an uninitialized
+ ``_elementtree.XMLParser`` object. Patch by Oren Milman.
-- bpo-38535: Fixed line numbers and column offsets for AST nodes for calls
- without arguments in decorators.
+Documentation
+-------------
-- bpo-38525: Fix a segmentation fault when using reverse iterators of empty
- ``dict`` objects. Patch by Dong-hee Na and Inada Naoki.
+- bpo-27635: The pickle documentation incorrectly claimed that ``__new__``
+ isn't called by default when unpickling.
-- bpo-35409: Ignore GeneratorExit exceptions when throwing an exception into
- the aclose coroutine of an asynchronous generator.
+- bpo-39879: Updated :ref:`datamodel` docs to include :func:`dict` insertion
+ order preservation. Patch by Furkan Onder and Samy Lahfa.
-Library
--------
+- bpo-38387: Document :c:macro:`PyDoc_STRVAR` macro in the C-API reference.
-- bpo-39006: Fix asyncio when the ssl module is missing: only check for
- ssl.SSLSocket instance if the ssl module is available.
+- bpo-13743: Some methods within xml.dom.minidom.Element class are now
+ better documented.
-- bpo-38708: Fix a potential IndexError in email parser when parsing an
- empty msg-id.
+Tests
+-----
-- bpo-38698: Add a new ``InvalidMessageID`` token to email parser to
- represent invalid Message-ID headers. Also, add defects when there is
- remaining value after parsing the header.
+- bpo-31904: Set expected default encoding in test_c_locale_coercion.py for
+ VxWorks RTOS.
-- bpo-38979: Return class from ``ContextVar.__class_getitem__`` to simplify
- subclassing.
+- bpo-40162: Update Travis CI configuration to OpenSSL 1.1.1f.
-- bpo-38986: Make repr of C accelerated TaskWakeupMethWrapper the same as of
- pure Python version.
+- bpo-40146: Update OpenSSL to 1.1.1f in Azure Pipelines.
-- bpo-38529: Drop too noisy asyncio warning about deletion of a stream
- without explicit ``.close()`` call.
+- bpo-40094: Add :func:`test.support.wait_process` function.
-- bpo-38634: The :mod:`readline` module now detects if Python is linked to
- libedit at runtime on all platforms. Previously, the check was only done
- on macOS.
+- bpo-40003: ``test.bisect_cmd`` now copies Python command line options like
+ ``-O`` or ``-W``. Moreover, emit a warning if ``test.bisect_cmd`` is used
+ with ``-w``/``--verbose2`` option.
-- bpo-33684: Fix ``json.tool`` failed to read a JSON file with non-ASCII
- characters when locale encoding is not UTF-8.
+- bpo-39380: Add the encoding in :class:`ftplib.FTP` and
+ :class:`ftplib.FTP_TLS` to the constructor as keyword-only and change the
+ default from ``latin-1`` to ``utf-8`` to follow :rfc:`2640`.
-- bpo-38698: Prevent UnboundLocalError to pop up in parse_message_id
+- bpo-39793: Use the same domain when testing ``make_msgid``. Patch by
+ Batuhan Taskaya.
- parse_message_id() was improperly using a token defined inside an
- exception handler, which was raising `UnboundLocalError` on parsing an
- invalid value. Patch by Claudiu Popa.
+- bpo-1812: Fix newline handling in doctest.testfile when loading from a
+ package whose loader has a get_data method. Patch by Peter Donis.
-- bpo-26730: Fix ``SpooledTemporaryFile.rollover()`` might corrupt the file
- when it is in text mode. Patch by Serhiy Storchaka.
+Build
+-----
-- bpo-38668: Calling func:`shutil.copytree` to copy a directory tree from
- one directory to another subdirectory resulted in an endless loop and a
- RecursionError. A fix was added to consume an iterator and create the list
- of the entries to be copied, avoiding the recursion for newly created
- directories. Patch by Bruno P. Kinoshita.
+- bpo-38360: Support single-argument form of macOS -isysroot flag.
-- bpo-37838: :meth:`typing.get_type_hints` properly handles functions
- decorated with :meth:`functools.wraps`.
+- bpo-40158: Fix CPython MSBuild Properties in NuGet Package
+ (build/native/python.props)
-- bpo-38859: AsyncMock now returns StopAsyncIteration on the exaustion of a
- side_effects iterable. Since PEP-479 its Impossible to raise a
- StopIteration exception from a coroutine.
+- bpo-38527: Fix configure check on Solaris for "float word ordering":
+ sometimes, the correct "grep" command was not being used. Patch by Arnon
+ Yaari.
-- bpo-38857: AsyncMock fix for return values that are awaitable types. This
- also covers side_effect iterable values that happend to be awaitable, and
- wraps callables that return an awaitable type. Before these awaitables
- were being awaited instead of being returned as is.
+Windows
+-------
-- bpo-38821: Fix unhandled exceptions in :mod:`argparse` when
- internationalizing error messages for arguments with ``nargs`` set to
- special (non-integer) values. Patch by Federico Bond.
+- bpo-40164: Updates Windows to OpenSSL 1.1.1f
-- bpo-38820: Make Python compatible with OpenSSL 3.0.0.
- :func:`ssl.SSLSocket.getpeercert` no longer returns IPv6 addresses with a
- trailing new line.
+- bpo-8901: Ignore the Windows registry when the ``-E`` option is used.
-- bpo-38807: Update :exc:`TypeError` messages for :meth:`os.path.join` to
- include :class:`os.PathLike` objects as acceptable input types.
+macOS
+-----
-- bpo-38785: Prevent asyncio from crashing if parent ``__init__`` is not
- called from a constructor of object derived from ``asyncio.Future``.
+- bpo-38329: python.org macOS installers now update the Current version
+ symlink of /Library/Frameworks/Python.framework/Versions for 3.9 installs.
+ Previously, Current was only updated for Python 2.x installs. This should
+ make it easier to embed Python 3 into other macOS applications.
-- bpo-38723: :mod:`pdb` now uses :meth:`io.open_code` to trigger auditing
- events.
+- bpo-40164: Update macOS installer builds to use OpenSSL 1.1.1g.
-- bpo-27805: Allow opening pipes and other non-seekable files in append mode
- with :func:`open`.
+IDLE
+----
-- bpo-38686: Added support for multiple ``qop`` values in
- :class:`urllib.request.AbstractDigestAuthHandler`.
+- bpo-38439: Add a 256Ã256 pixel IDLE icon to support more modern
+ environments. Created by Andrew Clover. Delete the unused macOS idle.icns
+ icon file.
-- bpo-38334: Fixed seeking backward on an encrypted
- :class:`zipfile.ZipExtFile`.
+- bpo-38689: IDLE will no longer freeze when inspect.signature fails when
+ fetching a calltip.
-- bpo-34679: asynci.ProactorEventLoop.close() now only calls
- signal.set_wakeup_fd() in the main thread.
+Tools/Demos
+-----------
-- bpo-31202: The case the result of :func:`pathlib.WindowsPath.glob` matches
- now the case of the pattern for literal parts.
+- bpo-40385: Removed the checkpyc.py tool. Please see compileall without
+ force mode as a potential alternative.
-- bpo-38521: Fixed erroneous equality comparison in statistics.NormalDist().
+- bpo-40179: Fixed translation of ``#elif`` in Argument Clinic.
-- bpo-38478: Fixed a bug in :meth:`inspect.signature.bind` that was causing
- it to fail when handling a keyword argument with same name as
- positional-only parameter. Patch by Pablo Galindo.
+- bpo-40094: Fix ``which.py`` script exit code: it now uses
+ :func:`os.waitstatus_to_exitcode` to convert :func:`os.system` exit status
+ into an exit code.
-- bpo-33604: Fixed `hmac.new` and `hmac.HMAC` to raise TypeError instead of
- ValueError when the digestmod parameter, now required in 3.8, is omitted.
- Also clarified the hmac module documentation and docstrings.
+C API
+-----
-- bpo-38422: Clarify docstrings of pathlib suffix(es)
+- bpo-40241: Move the :c:type:`PyGC_Head` structure to the internal C API.
-- bpo-36993: Improve error reporting for corrupt zip files with bad zip64
- extra data. Patch by Daniel Hillier.
+- bpo-40170: Convert :c:func:`PyObject_IS_GC` macro to a function to hide
+ implementation details.
-- bpo-36820: Break cycle generated when saving an exception in socket.py,
- codeop.py and dyld.py as they keep alive not only the exception but user
- objects through the ``__traceback__`` attribute. Patch by Mario Corchero.
+- bpo-40241: Add the functions :c:func:`PyObject_GC_IsTracked` and
+ :c:func:`PyObject_GC_IsFinalized` to the public API to allow to query if
+ Python objects are being currently tracked or have been already finalized
+ by the garbage collector respectively. Patch by Pablo Galindo.
-- bpo-34776: Fix dataclasses to support forward references in type
- annotations
+- bpo-40170: The :c:func:`PyObject_NEW` macro becomes an alias to the
+ :c:func:`PyObject_New` macro, and the :c:func:`PyObject_NEW_VAR` macro
+ becomes an alias to the :c:func:`PyObject_NewVar` macro, to hide
+ implementation details. They no longer access directly the
+ :c:member:`PyTypeObject.tp_basicsize` member.
-- bpo-33348: lib2to3 now recognizes expressions after ``*`` and `**` like in
- ``f(*[] or [])``.
+- bpo-40170: :c:func:`PyType_HasFeature` now always calls
+ :c:func:`PyType_GetFlags` to hide implementation details. Previously, it
+ accessed directly the :c:member:`PyTypeObject.tp_flags` member when the
+ limited C API was not used.
-- bpo-27657: Fix urllib.parse.urlparse() with numeric paths. A string like
- "path:80" is no longer parsed as a path but as a scheme ("path") and a
- path ("80").
+- bpo-40170: Convert the :c:func:`PyObject_GET_WEAKREFS_LISTPTR` macro to a
+ function to hide implementation details: the macro accessed directly to
+ the :c:member:`PyTypeObject.tp_weaklistoffset` member.
-Documentation
--------------
+- bpo-40170: Convert :c:func:`PyObject_CheckBuffer` macro to a function to
+ hide implementation details: the macro accessed directly the
+ :c:member:`PyTypeObject.tp_as_buffer` member.
-- bpo-38816: Provides more details about the interaction between
- :c:func:`fork` and CPython's runtime, focusing just on the C-API. This
- includes cautions about where :c:func:`fork` should and shouldn't be
- called.
+- bpo-40170: Always declare :c:func:`PyIndex_Check` as an opaque function to
+ hide implementation details: remove ``PyIndex_Check()`` macro. The macro
+ accessed directly the :c:member:`PyTypeObject.tp_as_number` member.
-- bpo-38351: Modernize :mod:`email` examples from %-formatting to f-strings.
+- bpo-39947: Add :c:func:`PyThreadState_GetID` function: get the unique
+ identifier of a Python thread state.
-- bpo-38778: Document the fact that :exc:`RuntimeError` is raised if
- :meth:`os.fork` is called in a subinterpreter.
-- bpo-38592: Add Brazilian Portuguese to the language switcher at Python
- Documentation website.
+What's New in Python 3.9.0 alpha 5?
+===================================
-Tests
------
+*Release date: 2020-03-23*
-- bpo-38547: Fix test_pty: if the process is the session leader, closing the
- master file descriptor raises a SIGHUP signal: simply ignore SIGHUP when
- running the tests.
+Security
+--------
-- bpo-38992: Fix a test for :func:`math.fsum` that was failing due to
- constant folding.
+- bpo-38576: Disallow control characters in hostnames in http.client,
+ addressing CVE-2019-18348. Such potentially malicious header injection
+ URLs now cause a InvalidURL to be raised.
-- bpo-38965: Fix test_faulthandler on GCC 10. Use the "volatile" keyword in
- ``faulthandler._stack_overflow()`` to prevent tail call optimization on
- any compiler, rather than relying on compiler specific pragma.
+Core and Builtins
+-----------------
-- bpo-38875: test_capi: trashcan tests now require the test "cpu" resource.
+- bpo-40010: Optimize pending calls in multithreaded applications. If a
+ thread different than the main thread schedules a pending call
+ (:c:func:`Py_AddPendingCall`), the bytecode evaluation loop is no longer
+ interrupted at each bytecode instruction to check for pending calls which
+ cannot be executed. Only the main thread can execute pending calls.
-- bpo-38841: Skip asyncio test_create_datagram_endpoint_existing_sock_unix
- on platforms lacking a functional bind() for named unix domain sockets.
+ Previously, the bytecode evaluation loop was interrupted at each
+ instruction until the main thread executes pending calls.
-- bpo-38669: Raise :exc:`TypeError` when passing target as a string with
- :meth:`unittest.mock.patch.object`.
+- bpo-1635741: Port _weakref extension module to multiphase initialization
+ (:pep:`489`).
-- bpo-35998: Fix a race condition in test_asyncio.test_start_tls_server_1().
- Previously, there was a race condition between the test main() function
- which replaces the protocol and the test ServerProto protocol which sends
- ANSWER once it gets HELLO. Now, only the test main() function is
- responsible to send data, ServerProto no longer sends data.
+- bpo-1635741: Port _collections module to multiphase initialization
+ (:pep:`489`).
-Build
------
+- bpo-40010: Optimize signal handling in multithreaded applications. If a
+ thread different than the main thread gets a signal, the bytecode
+ evaluation loop is no longer interrupted at each bytecode instruction to
+ check for pending signals which cannot be handled. Only the main thread of
+ the main interpreter can handle signals.
-- bpo-37404: :mod:`asyncio` now raises :exc:`TyperError` when calling
- incompatible methods with an :class:`ssl.SSLSocket` socket. Patch by Ido
- Michael.
+ Previously, the bytecode evaluation loop was interrupted at each
+ instruction until the main thread handles signals.
-- bpo-38809: On Windows, build scripts will now recognize and use python.exe
- from an active virtual env.
+- bpo-39984: If :c:func:`Py_AddPendingCall` is called in a subinterpreter,
+ the function is now scheduled to be called from the subinterpreter, rather
+ than being called from the main interpreter. Each subinterpreter now has
+ its own list of scheduled calls.
-- bpo-38684: Fix _hashlib build when Blake2 is disabled, but OpenSSL
- supports it.
+- bpo-1635741: Port _heapq module to multiphase initialization.
-- bpo-37415: Fix stdatomic.h header check for ICC compiler: the ICC
- implementation lacks atomic_uintptr_t type which is needed by Python.
+- bpo-1635741: Port itertools module to multiphase initialization
+ (:pep:`489`).
-Windows
--------
+- bpo-37207: Speed up calls to ``frozenset()`` by using the :pep:`590`
+ ``vectorcall`` calling convention. Patch by Dong-hee Na.
-- bpo-33125: Add support for building and releasing Windows ARM64 packages.
+- bpo-39984: subinterpreters: Move
+ ``_PyRuntimeState.ceval.tracing_possible`` to
+ ``PyInterpreterState.ceval.tracing_possible``: each interpreter now has
+ its own variable.
-- bpo-38589: Fixes HTML Help shortcut when Windows is not installed to C
- drive
+- bpo-37207: Speed up calls to ``set()`` by using the :pep:`590`
+ ``vectorcall`` calling convention. Patch by Dong-hee Na.
-- bpo-38453: Ensure ntpath.realpath() correctly resolves relative paths.
+- bpo-1635741: Port _statistics module to multiphase initialization
+ (:pep:`489`).
-- bpo-38519: Restores the internal C headers that were missing from the
- nuget.org and Microsoft Store packages.
+- bpo-39968: Use inline function to replace extension modules'
+ get_module_state macros.
-- bpo-38492: Remove ``pythonw.exe`` dependency on the Microsoft C++ runtime.
+- bpo-39965: Correctly raise ``SyntaxError`` if *await* is used inside
+ non-async functions and ``PyCF_ALLOW_TOP_LEVEL_AWAIT`` is set (like in the
+ asyncio REPL). Patch by Pablo Galindo.
-macOS
------
+- bpo-39562: Allow executing asynchronous comprehensions on the top level
+ when the ``PyCF_ALLOW_TOP_LEVEL_AWAIT`` flag is given. Patch by Batuhan
+ Taskaya.
-- bpo-37931: Fixed a crash on OSX dynamic builds that occurred when
- re-initializing the posix module after a Py_Finalize if the environment
- had changed since the previous `import posix`. Patch by Benoît Hudson.
+- bpo-37207: Speed up calls to ``tuple()`` by using the :pep:`590`
+ ``vectorcall`` calling convention. Patch by Dong-hee Na.
-IDLE
-----
+- bpo-38373: Chaged list overallocation strategy. It no longer overallocates
+ if the new size is closer to overalocated size than to the old size and
+ adds padding.
-- bpo-38862: 'Strip Trailing Whitespace' on the Format menu removes extra
- newlines at the end of non-shell files.
+- bpo-39926: Update Unicode database to Unicode version 13.0.0.
-- bpo-26353: Stop adding newline when saving an IDLE shell window.
+- bpo-19466: Clear the frames of daemon threads earlier during the Python
+ shutdown to call objects destructors. So "unclosed file" resource warnings
+ are now emitted for daemon threads in a more reliable way.
-- bpo-38636: Fix IDLE Format menu tab toggle and file indent width. These
- functions (default shortcuts Alt-T and Alt-U) were mistakenly disabled in
- 3.7.5 and 3.8.0.
+- bpo-38894: Fix a bug that was causing incomplete results when calling
+ ``pathlib.Path.glob`` in the presence of symlinks that point to files
+ where the user does not have read access. Patch by Pablo Galindo and Matt
+ Wozniski.
-- bpo-4630: Add an option to toggle IDLE's cursor blink for shell, editor,
- and output windows. See Settings, General, Window Preferences, Cursor
- Blink. Patch by Zackery Spytz.
+- bpo-39877: Fix :c:func:`PyEval_RestoreThread` random crash at exit with
+ daemon threads. It now accesses the ``_PyRuntime`` variable directly
+ instead of using ``tstate->interp->runtime``, since ``tstate`` can be a
+ dangling pointer after :c:func:`Py_Finalize` has been called. Moreover,
+ the daemon thread now exits before trying to take the GIL.
-- bpo-38598: Do not try to compile IDLE shell or output windows
+- bpo-39871: Fix a possible :exc:`SystemError` in
+ ``math.{atan2,copysign,remainder}()`` when the first argument cannot be
+ converted to a :class:`float`. Patch by Zackery Spytz.
-C API
------
+- bpo-39776: Fix race condition where threads created by PyGILState_Ensure()
+ could get a duplicate id.
-- bpo-37633: Re-export some function compatibility wrappers for macros in
- ``pythonrun.h``.
+ This affects consumers of tstate->id like the contextvar caching
+ machinery, which could return invalid cached objects under heavy thread
+ load (observed in embedded scenarios).
-- bpo-38540: Fixed possible leak in :c:func:`PyArg_Parse` and similar
- functions for format units ``"es#"`` and ``"et#"`` when the macro
- :c:macro:`PY_SSIZE_T_CLEAN` is not defined.
+- bpo-39778: Fixed a crash due to incorrect handling of weak references in
+ ``collections.OrderedDict`` classes. Patch by Pablo Galindo.
-- bpo-36389: The ``_PyObject_CheckConsistency()`` function is now also
- available in release mode. For example, it can be used to debug a crash in
- the ``visit_decref()`` function of the GC.
+- bpo-1635741: Port audioop extension module to multiphase initialization
+ (:pep:`489`).
+- bpo-39702: Relax :term:`decorator` grammar restrictions to allow any valid
+ expression (:pep:`614`).
-What's New in Python 3.8.0 final?
-=================================
+- bpo-38091: Tweak import deadlock detection code to not deadlock itself.
-*Release date: 2019-10-14*
+- bpo-1635741: Port _locale extension module to multiphase initialization
+ (:pep:`489`).
-Core and Builtins
------------------
+- bpo-39087: Optimize :c:func:`PyUnicode_AsUTF8` and
+ :c:func:`PyUnicode_AsUTF8AndSize` slightly when they need to create
+ internal UTF-8 cache.
-- bpo-38469: Fixed a bug where the scope of named expressions was not being
- resolved correctly in the presence of the *global* keyword. Patch by Pablo
- Galindo.
+- bpo-39520: Fix unparsing of ext slices with no items (``foo[:,]``). Patch
+ by Batuhan Taskaya.
-- bpo-38379: When cyclic garbage collection (gc) runs finalizers that
- resurrect unreachable objects, the current gc run ends, without collecting
- any cyclic trash. However, the statistics reported by ``collect()`` and
- ``get_stats()`` claimed that all cyclic trash found was collected, and
- that the resurrected objects were collected. Changed the stats to report
- that none were collected.
+- bpo-39220: Do not optimize annotations if 'from __future__ import
+ annotations' is used. Patch by Pablo Galindo.
+
+- bpo-35712: Using :data:`NotImplemented` in a boolean context has been
+ deprecated. Patch contributed by Josh Rosenberg.
+
+- bpo-22490: Don't leak environment variable ``__PYVENV_LAUNCHER__`` into
+ the interpreter session on macOS.
Library
-------
-- bpo-38449: Revert GH-15522, which introduces a regression in
- :meth:`mimetypes.guess_type` due to improper handling of filenames as
- urls.
+- bpo-39830: Add :class:`zipfile.Path` to ``__all__`` in the :mod:`zipfile`
+ module.
-- bpo-38431: Fix ``__repr__`` method for :class:`dataclasses.InitVar` to
- support typing objects, patch by Samuel Colvin.
+- bpo-40000: Improved error messages for validation of ``ast.Constant``
+ nodes. Patch by Batuhan Taskaya.
-- bpo-38109: Add missing :data:`stat.S_IFDOOR`, :data:`stat.S_IFPORT`,
- :data:`stat.S_IFWHT`, :func:`stat.S_ISDOOR`, :func:`stat.S_ISPORT`, and
- :func:`stat.S_ISWHT` values to the Python implementation of :mod:`stat`.
+- bpo-39999: ``__module__`` of the AST node classes is now set to "ast"
+ instead of "_ast". Added docstrings for dummy AST node classes and
+ deprecated attributes.
-- bpo-38405: Nested subclasses of :class:`typing.NamedTuple` are now
- pickleable.
+- bpo-39991: :func:`uuid.getnode` now skips IPv6 addresses with the same
+ string length than a MAC address (17 characters): only use MAC addresses.
-- bpo-38332: Prevent :exc:`KeyError` thrown by :func:`_encoded_words.decode`
- when given an encoded-word with invalid content-type encoding from
- propagating all the way to :func:`email.message.get`.
+- bpo-39988: Deprecated ``ast.AugLoad`` and ``ast.AugStore`` node classes
+ because they are no longer used.
-- bpo-38341: Add :exc:`smtplib.SMTPNotSupportedError` to the :mod:`smtplib`
- exported names.
+- bpo-39656: Ensure ``bin/python3.#`` is always present in virtual
+ environments on POSIX platforms - by Anthony Sottile.
-- bpo-13153: OS native encoding is now used for converting between Python
- strings and Tcl objects. This allows to display, copy and paste to
- clipboard emoji and other non-BMP characters. Converting strings from Tcl
- to Python and back now never fails (except MemoryError).
+- bpo-39969: Deprecated ``ast.Param`` node class because it's no longer
+ used. Patch by Batuhan Taskaya.
-Documentation
--------------
+- bpo-39360: Ensure all workers exit when finalizing a
+ :class:`multiprocessing.Pool` implicitly via the module finalization
+ handlers of multiprocessing. This fixes a deadlock situation that can be
+ experienced when the Pool is not properly finalized via the context
+ manager or a call to ``multiprocessing.Pool.terminate``. Patch by Batuhan
+ Taskaya and Pablo Galindo.
-- bpo-38294: Add list of no-longer-escaped chars to re.escape documentation.
+- bpo-35370: sys.settrace(), sys.setprofile() and _lsprof.Profiler.enable()
+ now properly report :c:func:`PySys_Audit` error if "sys.setprofile" or
+ "sys.settrace" audit event is denied.
-Tests
------
+- bpo-39936: AIX: Fix _aix_support module when the subprocess is not
+ available, when building Python from scratch. It now uses new private
+ _bootsubprocess module, rather than having two implementations depending
+ if subprocess is available or not. So _aix_support.aix_platform() result
+ is now the same if subprocess is available or not.
-- bpo-37531: On timeout, regrtest no longer attempts to call
- ``popen.communicate()`` again: it can hang until all child processes using
- stdout and stderr pipes completes. Kill the worker process and ignores its
- output. Change also the faulthandler timeout of the main process from 1
- minute to 5 minutes, for Python slowest buildbots.
+- bpo-36144: :class:`collections.OrderedDict` now implements ``|`` and
+ ``|=`` (:pep:`584`).
-Windows
--------
+- bpo-39652: The column name found in ``sqlite3.Cursor.description`` is now
+ truncated on the first '[' only if the PARSE_COLNAMES option is set.
-- bpo-38344: Fix error message in activate.bat.
+- bpo-39915: Ensure :attr:`unittest.mock.AsyncMock.await_args_list` has call
+ objects in the order of awaited arguments instead of using
+ :attr:`unittest.mock.Mock.call_args` which has the last value of the call.
+ Patch by Karthikeyan Singaravelan.
-- bpo-38359: Ensures ``pyw.exe`` launcher reads correct registry key.
+- bpo-36144: Updated :data:`os.environ` and :data:`os.environb` to support
+ :pep:`584`'s merge (``|``) and update (``|=``) operators.
-- bpo-38355: Fixes ``ntpath.realpath`` failing on ``sys.executable``.
+- bpo-38662: The ``ensurepip`` module now invokes ``pip`` via the ``runpy``
+ module. Hence it is no longer tightly coupled with the internal API of the
+ bundled ``pip`` version, allowing easier updates to a newer ``pip``
+ version both internally and for distributors.
-IDLE
-----
+- bpo-38075: Fix the :meth:`random.Random.seed` method when a :class:`bool`
+ is passed as the seed.
-- bpo-36698: IDLE no longer fails when write non-encodable characters to
- stderr. It now escapes them with a backslash, as the regular Python
- interpreter. Added the ``errors`` field to the standard streams.
+- bpo-39916: More reliable use of ``os.scandir()`` in ``Path.glob()``. It no
+ longer emits a ResourceWarning when interrupted.
-Tools/Demos
------------
+- bpo-39850: :mod:`multiprocessing` now supports abstract socket addresses
+ (if abstract sockets are supported in the running platform). When creating
+ arbitrary addresses (like when default-constructing
+ :class:`multiprocessing.connection.Listener` objects) abstract sockets are
+ preferred to avoid the case when the temporary-file-generated address is
+ too large for an AF_UNIX socket address. Patch by Pablo Galindo.
-- bpo-38118: Update Valgrind suppression file to ignore a false alarm in
- :c:func:`PyUnicode_Decode` when using GCC builtin strcmp().
+- bpo-36287: :func:`ast.dump()` no longer outputs optional fields and
+ attributes with default values. The default values for optional fields and
+ attributes of AST nodes are now set as class attributes (e.g.
+ ``Constant.kind`` is set to ``None``).
-- bpo-38347: pathfix.py: Assume all files that end on '.py' are Python
- scripts when working recursively.
+- bpo-39889: Fixed :func:`ast.unparse` for extended slices containing a
+ single element (e.g. ``a[i:j,]``). Remove redundant tuples when index with
+ a tuple (e.g. ``a[i, j]``).
-C API
------
+- bpo-39828: Fix :mod:`json.tool` to catch :exc:`BrokenPipeError`. Patch by
+ Dong-hee Na.
-- bpo-38395: Fix a crash in :class:`weakref.proxy` objects due to incorrect
- lifetime management when calling some associated methods that may delete
- the last reference to object being referenced by the proxy. Patch by Pablo
- Galindo.
+- bpo-13487: Avoid a possible *"RuntimeError: dictionary changed size during
+ iteration"* from :func:`inspect.getmodule` when it tried to loop through
+ :attr:`sys.modules`.
+- bpo-39674: Revert "bpo-37330: open() no longer accept 'U' in file mode".
+ The "U" mode of open() is kept in Python 3.9 to ease transition from
+ Python 2.7, but will be removed in Python 3.10.
-What's New in Python 3.8.0 release candidate 1?
-===============================================
+- bpo-28577: The hosts method on 32-bit prefix length IPv4Networks and
+ 128-bit prefix IPv6Networks now returns a list containing the single
+ Address instead of an empty list.
-*Release date: 2019-10-01*
+- bpo-39826: Add getConnection method to logging HTTPHandler to enable
+ custom connections.
-Security
---------
+- bpo-39763: Reimplement :func:`distutils.spawn.spawn` function with the
+ :mod:`subprocess` module.
-- bpo-38243: Escape the server title of
- :class:`xmlrpc.server.DocXMLRPCServer` when rendering the document page as
- HTML. (Contributed by Dong-hee Na in :issue:`38243`.)
+- bpo-39794: Add --without-decimal-contextvar build option. This enables a
+ thread-local rather than a coroutine local context.
-- bpo-38174: Update vendorized expat library version to 2.2.8, which
- resolves CVE-2019-15903.
+- bpo-36144: :class:`collections.defaultdict` now implements ``|``
+ (:pep:`584`).
-- bpo-37764: Fixes email._header_value_parser.get_unstructured going into an
- infinite loop for a specific case in which the email header does not have
- trailing whitespace, and the case in which it contains an invalid encoded
- word. Patch by Ashwin Ramaswami.
+- bpo-39517: Fix runpy.run_path() when using pathlike objects
-Core and Builtins
------------------
+- bpo-39775: Change ``inspect.Signature.parameters`` back to
+ ``collections.OrderedDict``. This was changed to ``dict`` in Python
+ 3.9.0a4.
-- bpo-38006: Fix a bug due to the interaction of weakrefs and the cyclic
- garbage collector. We must clear any weakrefs in garbage in order to
- prevent their callbacks from executing and causing a crash.
+- bpo-39678: Refactor queue_manager in
+ :class:`concurrent.futures.ProcessPoolExecutor` to make it easier to
+ maintain.
-- bpo-38317: Fix warnings options priority: ``PyConfig.warnoptions`` has the
- highest priority, as stated in the :pep:`587`.
+- bpo-39764: Fix AttributeError when calling get_stack on a PyAsyncGenObject
+ Task
-- bpo-36871: Improve error handling for the assert_has_calls and
- assert_has_awaits methods of mocks. Fixed a bug where any errors
- encountered while binding the expected calls to the mock's spec were
- silently swallowed, leading to misleading error output.
+- bpo-39769: The :func:`compileall.compile_dir` function's *ddir* parameter
+ and the compileall command line flag `-d` no longer write the wrong
+ pathname to the generated pyc file for submodules beneath the root of the
+ directory tree being compiled. This fixes a regression introduced with
+ Python 3.5.
-- bpo-38236: Python now dumps path configuration if it fails to import the
- Python codecs of the filesystem and stdio encodings.
+- bpo-36144: :class:`types.MappingProxyType` objects now support the merge
+ (``|``) operator from :pep:`584`.
-- bpo-38013: Allow to call ``async_generator_athrow().throw(...)`` even for
- non-started async generator helper. It fixes annoying warning at the end
- of :func:`asyncio.run` call.
+- bpo-38691: The :mod:`importlib` module now ignores the
+ :envvar:`PYTHONCASEOK` environment variable when the :option:`-E` or
+ :option:`-I` command line options are being used.
-- bpo-38124: Fix an off-by-one error in PyState_AddModule that could cause
- out-of-bounds memory access.
+- bpo-39719: Remove :meth:`tempfile.SpooledTemporaryFile.softspace` as files
+ no longer have the ``softspace`` attribute in Python 3. Patch by Shantanu.
-- bpo-38005: Fixed comparing and creating of InterpreterID and ChannelID.
+- bpo-39667: Improve pathlib.Path compatibility on zipfile.Path and correct
+ performance degradation as found in zipp 3.0.
-- bpo-37994: Fixed silencing arbitrary errors if an attribute lookup fails
- in several sites. Only AttributeError should be silenced.
+- bpo-39638: Keep ASDL signatures in the docstrings for ``AST`` nodes. Patch
+ by Batuhan Taskaya
-- bpo-37990: Fix elapsed time in gc stats was not printed correctly. This
- bug was a regression in 3.8b4.
+- bpo-39639: Deprecated ``ast.Suite`` node class because it's no longer
+ used. Patch by Batuhan Taskaya.
-- bpo-37966: The implementation of :func:`~unicodedata.is_normalized` has
- been greatly sped up on strings that aren't normalized, by implementing
- the full normalization-quick-check algorithm from the Unicode standard.
+- bpo-39609: Add thread_name_prefix to default asyncio executor
-- bpo-20490: Improve import error message for partially initialized module
- on circular ``from`` imports - by Anthony Sottile.
+- bpo-39548: Fix handling of header in
+ :class:`urllib.request.AbstractDigestAuthHandler` when the optional
+ ``qop`` parameter is not present.
-- bpo-37409: Ensure explicit relative imports from interactive sessions and
- scripts (having no parent package) always raise ImportError, rather than
- treating the current module as the package. Patch by Ben Lewis.
+- bpo-39509: HTTP status codes ``103 EARLY_HINTS`` and ``425 TOO_EARLY`` are
+ added to :class:`http.HTTPStatus`. Patch by Dong-hee Na.
-- bpo-37619: When adding a wrapper descriptor from one class to a different
- class (for example, setting ``__add__ = str.__add__`` on an ``int``
- subclass), an exception is correctly raised when the operator is called.
+- bpo-39507: Adding HTTP status 418 "I'm a Teapot" to HTTPStatus in http
+ library. Patch by Ross Rhodes.
-- bpo-30773: Prohibit parallel running of aclose() / asend() / athrow(). Fix
- ag_running to reflect the actual running status of the AG.
+- bpo-39495: Remove default value from *attrs* parameter of
+ :meth:`xml.etree.ElementTree.TreeBuilder.start` for consistency between
+ Python and C implementations.
-Library
--------
+- bpo-38971: Open issue in the BPO indicated a desire to make the
+ implementation of codecs.open() at parity with io.open(), which implements
+ a try/except to assure file stream gets closed before an exception is
+ raised.
-- bpo-38319: sendfile() used in socket and shutil modules was raising
- OverflowError for files >= 2GiB on 32-bit architectures. (patch by
- Giampaolo Rodola)
+- bpo-38641: Added starred expressions support to ``return`` and ``yield``
+ statements for ``lib2to3``. Patch by Vlad Emelianov.
-- bpo-38242: Revert the new asyncio Streams API
+- bpo-37534: When using minidom module to generate XML documents the ability
+ to add Standalone Document Declaration is added. All the changes are made
+ to generate a document in compliance with Extensible Markup Language (XML)
+ 1.0 (Fifth Edition) W3C Recommendation (available here:
+ https://www.w3.org/TR/xml/#sec-prolog-dtd).
-- bpo-38019: Correctly handle pause/resume reading of closed asyncio unix
- pipe.
+- bpo-34788: Add support for scoped IPv6 addresses to :mod:`ipaddress`.
+ Patch by Oleksandr Pavliuk.
-- bpo-38163: Child mocks will now detect their type as either synchronous or
- asynchronous, asynchronous child mocks will be AsyncMocks and synchronous
- child mocks will be either MagicMock or Mock (depending on their parent
- type).
+- bpo-34822: Simplified AST for subscription. Simple indices are now
+ represented by their value, extended slices are represented as tuples.
+ :mod:`ast` classes ``Index`` and ``ExtSlice`` are considered deprecated
+ and will be removed in future Python versions. In the meantime,
+ ``Index(value)`` now returns a ``value`` itself, ``ExtSlice(slices)``
+ returns ``Tuple(slices, Load())``.
-- bpo-38161: Removes _AwaitEvent from AsyncMock.
+Documentation
+-------------
-- bpo-38216: Allow the rare code that wants to send invalid http requests
- from the `http.client` library a way to do so. The fixes for bpo-30458
- led to breakage for some projects that were relying on this ability to
- test their own behavior in the face of bad requests.
+- bpo-39868: Updated the Language Reference for :pep:`572`.
-- bpo-38108: Any synchronous magic methods on an AsyncMock now return a
- MagicMock. Any asynchronous magic methods on a MagicMock now return an
- AsyncMock.
+- bpo-13790: Change 'string' to 'specification' in format doc.
-- bpo-38248: asyncio: Fix inconsistent immediate Task cancellation
+- bpo-17422: The language reference no longer restricts default class
+ namespaces to dicts only.
-- bpo-38237: The arguments for the builtin pow function are more
- descriptive. They can now also be passed in as keywords.
+- bpo-39530: Fix misleading documentation about mixed-type numeric
+ comparisons.
-- bpo-38191: Constructors of :class:`~typing.NamedTuple` and
- :class:`~typing.TypedDict` types now accept arbitrary keyword argument
- names, including "cls", "self", "typename", "_typename", "fields" and
- "_fields". Passing positional arguments by keyword is deprecated.
+- bpo-39718: Update :mod:`token` documentation to reflect additions in
+ Python 3.8
-- bpo-38185: Fixed case-insensitive string comparison in
- :class:`sqlite3.Row` indexing.
+- bpo-39677: Changed operand name of **MAKE_FUNCTION** from *argc* to
+ *flags* for module :mod:`dis`
-- bpo-38136: Changes AsyncMock call count and await count to be two
- different counters. Now await count only counts when a coroutine has been
- awaited, not when it has been called, and vice-versa. Update the
- documentation around this.
+Tests
+-----
-- bpo-37828: Fix default mock name in
- :meth:`unittest.mock.Mock.assert_called` exceptions. Patch by Abraham
- Toriz Cruz.
+- bpo-40019: test_gdb now skips tests if it detects that gdb failed to read
+ debug information because the Python binary is optimized.
-- bpo-38175: Fix a memory leak in comparison of :class:`sqlite3.Row`
- objects.
+- bpo-27807: ``test_site.test_startup_imports()`` is now skipped if a path
+ of :data:`sys.path` contains a ``.pth`` file.
-- bpo-33936: _hashlib no longer calls obsolete OpenSSL initialization
- function with OpenSSL 1.1.0+.
+- bpo-26067: Do not fail test_shutil test_chown test when uid or gid of user
+ cannot be resolved to a name.
-- bpo-34706: Preserve subclassing in inspect.Signature.from_callable.
+- bpo-39855: test_subprocess.test_user() now skips the test on an user name
+ if the user name doesn't exist. For example, skip the test if the user
+ "nobody" doesn't exist on Linux.
-- bpo-38153: Names of hashing algorithms frome OpenSSL are now normalized to
- follow Python's naming conventions. For example OpenSSL uses sha3-512
- instead of sha3_512 or blake2b512 instead of blake2b.
+Build
+-----
-- bpo-38115: Fix a bug in dis.findlinestarts() where it would return invalid
- bytecode offsets. Document that a code object's co_lnotab can contain
- invalid bytecode offsets.
+- bpo-39761: Fix build with DTrace but without additional DFLAGS.
-- bpo-38148: Add slots to :mod:`asyncio` transport classes, which can reduce
- memory usage.
+- bpo-39763: setup.py now uses a basic implementation of the
+ :mod:`subprocess` module if the :mod:`subprocess` module is not available:
+ before required C extension modules are built.
-- bpo-36991: Fixes a potential incorrect AttributeError exception escaping
- ZipFile.extract() in some unsupported input error situations.
+- bpo-1294959: Add ``--with-platlibdir`` option to the configure script:
+ name of the platform-specific library directory, stored in the new
+ :attr:`sys.platlibdir` attribute. It is used to build the path of
+ platform-specific extension modules and the path of the standard library.
+ It is equal to ``"lib"`` on most platforms. On Fedora and SuSE, it is
+ equal to ``"lib64"`` on 64-bit platforms. Patch by Jan MatÄjek, MatÄj
+ Cepl, Charalampos Stratakis and Victor Stinner.
-- bpo-38134: Remove obsolete copy of PBKDF2_HMAC_fast. All supported OpenSSL
- versions contain a fast implementation.
+Windows
+-------
-- bpo-38132: The OpenSSL hashlib wrapper uses a simpler implementation.
- Several Macros and pointless caches are gone. The hash name now comes from
- OpenSSL's EVP. The algorithm name stays the same, except it is now always
- lower case.
+- bpo-39930: Ensures the required :file:`vcruntime140.dll` is included in
+ install packages.
-- bpo-38008: Fix parent class check in protocols to correctly identify the
- module that provides a builtin protocol, instead of assuming they all come
- from the :mod:`collections.abc` module
+- bpo-39847: Avoid hang when computer is hibernated whilst waiting for a
+ mutex (for lock-related objects from :mod:`threading`) around 49-day
+ uptime.
-- bpo-37405: Fixed regression bug for socket.getsockname() for non-CAN_ISOTP
- AF_CAN address family sockets by returning a 1-tuple instead of string.
+- bpo-38597: :mod:`distutils` will no longer statically link
+ :file:`vcruntime140.dll` when a redistributable version is unavailable.
+ All future releases of CPython will include a copy of this DLL to ensure
+ distributed extensions can continue to load.
-- bpo-38121: Update parameter names on functions in importlib.metadata
- matching the changes in the 0.22 release of importlib_metadata.
+- bpo-38380: Update Windows builds to use SQLite 3.31.1
-- bpo-38110: The os.closewalk() implementation now uses the libc fdwalk()
- API on platforms where it is available.
+- bpo-39789: Update Windows release build machines to Visual Studio 2019
+ (MSVC 14.2).
-- bpo-38093: Fixes AsyncMock so it doesn't crash when used with
- AsyncContextManagers or AsyncIterators.
+- bpo-34803: Package for nuget.org now includes repository reference and
+ bundled icon image.
-- bpo-37488: Add warning to :meth:`datetime.utctimetuple`,
- :meth:`datetime.utcnow` and :meth:`datetime.utcfromtimestamp` .
+macOS
+-----
-- bpo-38086: Update importlib.metadata with changes from `importlib_metadata
- 0.21
- `_.
+- bpo-38380: Update macOS builds to use SQLite 3.31.1
-- bpo-37251: Remove `__code__` check in AsyncMock that incorrectly evaluated
- function specs as async objects but failed to evaluate classes with
- `__await__` but no `__code__` attribute defined as async objects.
+IDLE
+----
-- bpo-38037: Fix reference counters in the :mod:`signal` module.
+- bpo-27115: For 'Go to Line', use a Query box subclass with IDLE standard
+ behavior and improved error checking.
-- bpo-38066: Hide internal asyncio.Stream methods: feed_eof(), feed_data(),
- set_exception() and set_transport().
+- bpo-39885: Since clicking to get an IDLE context menu moves the cursor,
+ any text selection should be and now is cleared.
-- bpo-38059: inspect.py now uses sys.exit() instead of exit()
+- bpo-39852: Edit "Go to line" now clears any selection, preventing
+ accidental deletion. It also updates Ln and Col on the status bar.
-- bpo-37953: In :mod:`typing`, improved the ``__hash__`` and ``__eq__``
- methods for :class:`ForwardReferences`.
+- bpo-39781: Selecting code context lines no longer causes a jump.
-- bpo-38026: Fixed :func:`inspect.getattr_static` used ``isinstance`` while
- it should avoid dynamic lookup.
+Tools/Demos
+-----------
-- bpo-38010: In ``importlib.metadata`` sync with ``importlib_metadata``
- 0.20, clarifying behavior of ``files()`` and fixing issue where only one
- requirement was returned for ``requires()`` on ``dist-info`` packages.
+- bpo-36184: Port python-gdb.py to FreeBSD. python-gdb.py now checks for
+ "take_gil" function name to check if a frame tries to acquire the GIL,
+ instead of checking for "pthread_cond_timedwait" which is specific to
+ Linux and can be a different condition than the GIL.
-- bpo-38006: weakref.WeakValueDictionary defines a local remove() function
- used as callback for weak references. This function was created with a
- closure. Modify the implementation to avoid the closure.
+- bpo-38080: Added support to fix ``getproxies`` in the
+ :mod:`lib2to3.fixes.fix_urllib` module. Patch by José Roberto Meza
+ Cabrera.
-- bpo-34410: Fixed a crash in the :func:`tee` iterator when re-enter it.
- RuntimeError is now raised in this case.
+C API
+-----
-- bpo-37140: Fix a ctypes regression of Python 3.8. When a ctypes.Structure
- is passed by copy to a function, ctypes internals created a temporary
- object which had the side effect of calling the structure finalizer
- (__del__) twice. The Python semantics requires a finalizer to be called
- exactly once. Fix ctypes internals to no longer call the finalizer twice.
+- bpo-40024: Add :c:func:`PyModule_AddType` helper function: add a type to a
+ module. Patch by Dong-hee Na.
-- bpo-37972: Subscripts to the `unittest.mock.call` objects now receive the
- same chaining mechanism as any other custom attributes, so that the
- following usage no longer raises a `TypeError`:
+- bpo-39946: Remove ``_PyRuntime.getframe`` hook and remove
+ ``_PyThreadState_GetFrame`` macro which was an alias to
+ ``_PyRuntime.getframe``. They were only exposed by the internal C API.
+ Remove also ``PyThreadFrameGetter`` type.
- call().foo().__getitem__('bar')
+- bpo-39947: Add :c:func:`PyThreadState_GetFrame` function: get the current
+ frame of a Python thread state.
- Patch by blhsing
+- bpo-37207: Add _PyArg_NoKwnames helper function. Patch by Dong-hee Na.
-- bpo-22347: Update mimetypes.guess_type to allow proper parsing of URLs
- with only a host name. Patch by Dong-hee Na.
+- bpo-39947: Add :c:func:`PyThreadState_GetInterpreter`: get the interpreter
+ of a Python thread state.
-- bpo-37885: venv: Don't generate unset variable warning on deactivate.
+- bpo-39947: Add :c:func:`PyInterpreterState_Get` function to the limited C
+ API.
-- bpo-37785: Fix xgettext warnings in :mod:`argparse`.
+- bpo-35370: If :c:func:`PySys_Audit` fails in :c:func:`PyEval_SetProfile`
+ or :c:func:`PyEval_SetTrace`, log the error as an unraisable exception.
-- bpo-11953: Completing WSA* error codes in :mod:`socket`.
+- bpo-39947: Move the static inline function flavor of
+ Py_EnterRecursiveCall() and Py_LeaveRecursiveCall() to the internal C API:
+ they access PyThreadState attributes. The limited C API provides regular
+ functions which hide implementation details.
-- bpo-37424: Fixes a possible hang when using a timeout on
- `subprocess.run()` while capturing output. If the child process spawned
- its own children or otherwise connected its stdout or stderr handles with
- another process, we could hang after the timeout was reached and our child
- was killed when attempting to read final output from the pipes.
+- bpo-39947: Py_TRASHCAN_BEGIN_CONDITION and Py_TRASHCAN_END macro no longer
+ access PyThreadState attributes, but call new private _PyTrash_begin() and
+ _PyTrash_end() functions which hide implementation details.
-- bpo-37212: :func:`unittest.mock.call` now preserves the order of keyword
- arguments in repr output. Patch by Karthikeyan Singaravelan.
+- bpo-39884: :c:func:`PyDescr_NewMethod` and :c:func:`PyCFunction_NewEx` now
+ include the method name in the SystemError "bad call flags" error message
+ to ease debug.
-- bpo-37305: Add .webmanifest -> application/manifest+json to list of
- recognized file types and content type headers
+- bpo-39877: Deprecated :c:func:`PyEval_InitThreads` and
+ :c:func:`PyEval_ThreadsInitialized`. Calling :c:func:`PyEval_InitThreads`
+ now does nothing.
-- bpo-21872: Fix :mod:`lzma`: module decompresses data incompletely. When
- decompressing a FORMAT_ALONE format file, and it doesn't have the end
- marker, sometimes the last one to dozens bytes can't be output. Patch by
- Ma Lin.
+- bpo-38249: :c:macro:`Py_UNREACHABLE` is now implemented with
+ ``__builtin_unreachable()`` and analogs in release mode.
-- bpo-37206: Default values which cannot be represented as Python objects no
- longer improperly represented as ``None`` in function signatures.
+- bpo-38643: :c:func:`PyNumber_ToBase` now raises a :exc:`SystemError`
+ instead of crashing when called with invalid base.
-- bpo-12144: Ensure cookies with ``expires`` attribute are handled in
- :meth:`CookieJar.make_cookies`.
+- bpo-39882: The :c:func:`Py_FatalError` function is replaced with a macro
+ which logs automatically the name of the current function, unless the
+ ``Py_LIMITED_API`` macro is defined.
-- bpo-31163: pathlib.Path instance's rename and replace methods now return
- the new Path instance.
+- bpo-39824: Extension modules: :c:member:`~PyModuleDef.m_traverse`,
+ :c:member:`~PyModuleDef.m_clear` and :c:member:`~PyModuleDef.m_free`
+ functions of :c:type:`PyModuleDef` are no longer called if the module
+ state was requested but is not allocated yet. This is the case immediately
+ after the module is created and before the module is executed
+ (:c:data:`Py_mod_exec` function). More precisely, these functions are not
+ called if :c:member:`~PyModuleDef.m_size` is greater than 0 and the module
+ state (as returned by :c:func:`PyModule_GetState`) is ``NULL``.
-- bpo-25068: :class:`urllib.request.ProxyHandler` now lowercases the keys of
- the passed dictionary.
+ Extension modules without module state (``m_size <= 0``) are not affected.
-- bpo-21315: Email headers containing RFC2047 encoded words are parsed
- despite the missing whitespace, and a defect registered. Also missing
- trailing whitespace after encoded words is now registered as a defect.
+- bpo-38913: Fixed segfault in ``Py_BuildValue()`` called with a format
+ containing "#" and undefined PY_SSIZE_T_CLEAN whwn an exception is set.
-- bpo-36250: Ignore ``ValueError`` from ``signal`` with ``interaction`` in
- non-main thread.
+- bpo-38500: Add a private API to get and set the frame evaluation function:
+ add :c:func:`_PyInterpreterState_GetEvalFrameFunc` and
+ :c:func:`_PyInterpreterState_SetEvalFrameFunc` C functions. The
+ :c:type:`_PyFrameEvalFunction` function type now takes a *tstate*
+ parameter.
-- bpo-35168: :attr:`shlex.shlex.punctuation_chars` is now a read-only
- property.
-- bpo-20504: Fixes a bug in :mod:`cgi` module when a multipart/form-data
- request has no `Content-Length` header.
+What's New in Python 3.9.0 alpha 4?
+===================================
-- bpo-34519: Add additional aliases for HP Roman 8. Patch by Michael Osipov.
+*Release date: 2020-02-25*
-Documentation
--------------
+Security
+--------
-- bpo-26868: Fix example usage of :c:func:`PyModule_AddObject` to properly
- handle errors.
+- bpo-39184: Add audit events to functions in `fcntl`, `msvcrt`, `os`,
+ `resource`, `shutil`, `signal` and `syslog`.
-- bpo-36797: Fix a dead link in the distutils API Reference.
+- bpo-39401: Avoid unsafe DLL load at startup on Windows 7 and earlier.
-- bpo-37977: Warn more strongly and clearly about pickle insecurity
+- bpo-39184: Add audit events to command execution functions in os and pty
+ modules.
-- bpo-37937: Mention ``frame.f_trace`` in :func:`sys.settrace` docs.
+Core and Builtins
+-----------------
-- bpo-36260: Add decompression pitfalls to zipfile module documentation.
+- bpo-39382: Fix a use-after-free in the single inheritance path of
+ ``issubclass()``, when the ``__bases__`` of an object has a single
+ reference, and so does its first item. Patch by Yonatan Goldschmidt.
-- bpo-36960: Restructured the :mod:`datetime` docs in the interest of making
- them more user-friendly and improving readability. Patch by Brad Solomon.
+- bpo-39573: Update clinic tool to use :c:func:`Py_IS_TYPE`. Patch by
+ Dong-hee Na.
-- bpo-23460: The documentation for decimal string formatting using the `:g`
- specifier has been updated to reflect the correct exponential notation
- cutoff point. Original patch contributed by Tuomas Suutari.
+- bpo-39619: Enable use of :func:`os.chroot` on HP-UX systems.
-- bpo-35803: Document and test that ``tempfile`` functions may accept a
- :term:`path-like object` for the ``dir`` argument. Patch by Anthony
- Sottile.
+- bpo-39573: Add :c:func:`Py_IS_TYPE` static inline function to check
+ whether the object *o* type is *type*.
-- bpo-33944: Added a note about the intended use of code in .pth files.
+- bpo-39606: Fix regression caused by fix for bpo-39386, that prevented
+ calling ``aclose`` on an async generator that had already been closed or
+ exhausted.
-- bpo-34293: Fix the Doc/Makefile regarding PAPER environment variable and
- PDF builds
+- bpo-39579: Change the ending column offset of `Attribute` nodes
+ constructed in `ast_for_dotted_name` to point at the end of the current
+ node and not at the end of the last `NAME` node.
-Tests
------
+- bpo-1635741: Port _crypt extension module to multiphase initialization
+ (:pep:`489`).
-- bpo-38239: Fix test_gdb for Link Time Optimization (LTO) builds.
+- bpo-1635741: Port _contextvars extension module to multiphase
+ initialization (:pep:`489`).
-- bpo-38275: test_ssl now handles disabled TLS/SSL versions better.
- OpenSSL's crypto policy and run-time settings are recognized and tests for
- disabled versions are skipped. Tests also accept more TLS minimum_versions
- for platforms that override OpenSSL's default with strict settings.
+- bpo-39510: Fix segfault in ``readinto()`` method on closed BufferedReader.
-- bpo-38271: The private keys for test_ssl were encrypted with 3DES in
- traditional PKCS#5 format. 3DES and the digest algorithm of PKCS#5 are
- blocked by some strict crypto policies. Use PKCS#8 format with AES256
- encryption instead.
+- bpo-39502: Fix :func:`time.localtime` on 64-bit AIX to support years
+ before 1902 and after 2038. Patch by M Felt.
-- bpo-38270: test.support now has a helper function to check for
- availibility of a hash digest function. Several tests are refactored avoid
- MD5 and use SHA256 instead. Other tests are marked to use MD5 and skipped
- when MD5 is disabled.
+- bpo-39492: Fix a reference cycle in the C Pickler that was preventing the
+ garbage collection of deleted, pickled objects.
-- bpo-37123: Multiprocessing test test_mymanager() now also expects
- -SIGTERM, not only exitcode 0. BaseManager._finalize_manager() sends
- SIGTERM to the manager process if it takes longer than 1 second to stop,
- which happens on slow buildbots.
+- bpo-39453: Fixed a possible crash in :meth:`list.__contains__` when a list
+ is changed during comparing items. Patch by Dong-hee Na.
-- bpo-38212: Multiprocessing tests: increase
- test_queue_feeder_donot_stop_onexc() timeout from 1 to 60 seconds.
+- bpo-39434: :term:`floor division` of float operation now has a better
+ performance. Also the message of :exc:`ZeroDivisionError` for this
+ operation is updated. Patch by Dong-hee Na.
-- bpo-38117: Test with OpenSSL 1.1.1d
+- bpo-1635741: Port _codecs extension module to multiphase initialization
+ (:pep:`489`).
-- bpo-37531: Enhance regrtest multiprocess timeout: write a message when
- killing a worker process, catch popen.kill() and popen.wait() exceptions,
- put a timeout on the second call to popen.communicate().
+- bpo-1635741: Port _bz2 extension module to multiphase initialization
+ (:pep:`489`).
-- bpo-37876: Add tests for ROT-13 codec.
+- bpo-1635741: Port _abc extension module to multiphase initialization
+ (:pep:`489`).
-- bpo-37252: Fix assertions in ``test_close`` and
- ``test_events_mask_overflow`` devpoll tests.
+- bpo-39320: Replace two complex bytecodes for building dicts with two
+ simpler ones. The new bytecodes ``DICT_MERGE`` and ``DICT_UPDATE`` have
+ been added The old bytecodes ``BUILD_MAP_UNPACK`` and
+ ``BUILD_MAP_UNPACK_WITH_CALL`` have been removed.
-- bpo-34001: Make test_ssl pass with LibreSSL. LibreSSL handles minimum and
- maximum TLS version differently than OpenSSL.
+- bpo-39219: Syntax errors raised in the tokenizer now always set correct
+ "text" and "offset" attributes.
-- bpo-36919: Make ``test_source_encoding.test_issue2301`` implementation
- independent. The test will work now for both CPython and IronPython.
+- bpo-36051: Drop the GIL during large ``bytes.join`` operations. Patch by
+ Bruce Merry.
-- bpo-34596: Fallback to a default reason when :func:`unittest.skip` is
- uncalled. Patch by Naitree Zhu.
+- bpo-38960: Fix DTrace build issues on FreeBSD. Patch by David Carlier.
-Build
------
+- bpo-37207: Speed up calls to ``range()`` by about 30%, by using the PEP
+ 590 ``vectorcall`` calling convention. Patch by Mark Shannon.
-- bpo-38301: In Solaris family, we must be sure to use ``-D_REENTRANT``.
- Patch by Jesús Cea Avión.
+- bpo-36144: :class:`dict` (and :class:`collections.UserDict`) objects now
+ support PEP 584's merge (``|``) and update (``|=``) operators. Patch by
+ Brandt Bucher.
-- bpo-36210: Update optional extension module detection for AIX. ossaudiodev
- and spwd are not applicable for AIX, and are no longer reported as
- missing. 3rd-party packaging of ncurses (with ASIS support) conflicts with
- officially supported AIX curses library, so configure AIX to use
- libcurses.a. However, skip trying to build _curses_panel.
+- bpo-32856: Optimized the idiom for assignment a temporary variable in
+ comprehensions. Now ``for y in [expr]`` in comprehensions is as fast as a
+ simple assignment ``y = expr``.
- patch by M Felt
+Library
+-------
-- bpo-36002: Locate ``llvm-profdata`` and ``llvm-ar`` binaries using
- ``AC_PATH_TOOL`` rather than ``AC_PATH_TARGET_TOOL``.
+- bpo-30566: Fix :exc:`IndexError` when trying to decode an invalid string
+ with punycode codec.
-- bpo-37936: The :file:`.gitignore` file systematically keeps "rooted", with
- a non-trailing slash, all the rules that are meant to apply to files in a
- specific place in the repo. Previously, when the intended file to ignore
- happened to be at the root of the repo, we'd most often accidentally also
- ignore files and directories with the same name anywhere in the tree.
+- bpo-39649: Remove obsolete check for `__args__` in
+ bdb.Bdb.format_stack_entry.
-- bpo-37936: The :file:`.gitignore` file no longer applies to any files that
- are in fact tracked in the Git repository. Patch by Greg Price.
+- bpo-39648: Expanded :func:`math.gcd` and :func:`math.lcm` to handle
+ multiple arguments.
-Windows
--------
+- bpo-39681: Fix a regression where the C pickle module wouldn't allow
+ unpickling from a file-like object that doesn't expose a readinto()
+ method.
-- bpo-38117: Update bundled OpenSSL to 1.1.1d
+- bpo-35950: Raise :exc:`io.UnsupportedOperation` in
+ :meth:`io.BufferedReader.truncate` when it is called on a read-only
+ :class:`io.BufferedReader` instance.
-- bpo-38092: Reduce overhead when using multiprocessing in a Windows virtual
- environment.
+- bpo-39479: Add :func:`math.lcm` function: least common multiple.
-- bpo-38133: Allow py.exe launcher to locate installations from the
- Microsoft Store and improve display of active virtual environments.
+- bpo-39674: Revert "Do not expose abstract collection classes in the
+ collections module" change (bpo-25988). Aliases to ABC like
+ collections.Mapping are kept in Python 3.9 to ease transition from Python
+ 2.7, but will be removed in Python 3.10.
-- bpo-38114: The ``pip.ini`` is no longer included in the Nuget package.
+- bpo-39104: Fix hanging ProcessPoolExcutor on ``shutdown(wait=False)`` when
+ a task has failed pickling.
-- bpo-36634: :func:`os.cpu_count` now returns active processors rather than
- maximum processors.
+- bpo-39627: Fixed TypedDict totality check for inherited keys.
-- bpo-36634: venv activate.bat now works when the existing variables contain
- double quote characters.
+- bpo-39474: Fixed starting position of AST for expressions like ``(a)(b)``,
+ ``(a)[b]`` and ``(a).b``.
-- bpo-38081: Prevent error calling :func:`os.path.realpath` on ``'NUL'``.
+- bpo-21016: The :mod:`pydoc` and :mod:`trace` modules now use the
+ :mod:`sysconfig` module to get the path to the Python standard library, to
+ support uncommon installation path like ``/usr/lib64/python3.9/`` on
+ Fedora. Patch by Jan MatÄjek.
-- bpo-38087: Fix case sensitivity in test_pathlib and test_ntpath.
+- bpo-39590: Collections.deque now holds strong references during
+ deque.__contains__ and deque.count, fixing crashes.
-- bpo-38088: Fixes distutils not finding vcruntime140.dll with only the v142
- toolset installed.
+- bpo-39586: The distutils ``bdist_msi`` command is deprecated in Python
+ 3.9, use ``bdist_wheel`` (wheel packages) instead.
-- bpo-37283: Ensure command-line and unattend.xml setting override
- previously detected states in Windows installer.
-
-- bpo-38030: Fixes :func:`os.stat` failing for block devices on Windows
+- bpo-39595: Improved performance of zipfile.Path for files with a large
+ number of entries. Also improved performance and fixed minor issue as
+ published with `importlib_metadata 1.5
+ `_.
-- bpo-38020: Fixes potential crash when calling :func:`os.readlink` (or
- indirectly through :func:`~os.path.realpath`) on a file that is not a
- supported link.
+- bpo-39350: Fix regression in :class:`fractions.Fraction` if the numerator
+ and/or the denominator is an :class:`int` subclass. The :func:`math.gcd`
+ function is now used to normalize the *numerator* and *denominator*.
+ :func:`math.gcd` always return a :class:`int` type. Previously, the GCD
+ type depended on *numerator* and *denominator*.
-- bpo-37705: Improve the implementation of ``winerror_to_errno()``.
+- bpo-39567: Added audit for :func:`os.walk`, :func:`os.fwalk`,
+ :meth:`pathlib.Path.glob` and :meth:`pathlib.Path.rglob`.
-- bpo-37702: Fix memory leak on Windows in creating an SSLContext object or
- running urllib.request.urlopen('https://...').
+- bpo-39559: Remove unused, undocumented argument ``getters`` from
+ :func:`uuid.getnode`
-- bpo-37445: Include the ``FORMAT_MESSAGE_IGNORE_INSERTS`` flag in
- ``FormatMessageW()`` calls.
+- bpo-38149: :func:`sys.audit` is now called only once per call of
+ :func:`glob.glob` and :func:`glob.iglob`.
-- bpo-37380: Don't collect unfinished processes with ``subprocess._active``
- on Windows to cleanup later. Patch by Ruslan Kuprieiev.
+- bpo-39546: Fix a regression in :class:`~argparse.ArgumentParser` where
+ ``allow_abbrev=False`` was ignored for long options that used a prefix
+ character other than "-".
-- bpo-32587: Make :data:`winreg.REG_MULTI_SZ` support zero-length strings.
+- bpo-39450: Striped whitespace from docstring before returning it from
+ :func:`unittest.case.shortDescription`.
-macOS
------
+- bpo-12915: A new function ``resolve_name`` has been added to the
+ ``pkgutil`` module. This resolves a string of the form ``'a.b.c.d'`` or
+ ``'a.b:c.d'`` to an object. In the example, ``a.b`` is a package/module
+ and ``c.d`` is an object within that package/module reached via recursive
+ attribute access.
-- bpo-38117: Updated OpenSSL to 1.1.1d in macOS installer.
+- bpo-39353: The :func:`binascii.crc_hqx` function is no longer deprecated.
-- bpo-38089: Move Azure Pipelines to latest VM versions and make macOS tests
- optional
+- bpo-39493: Mark ``typing.IO.closed`` as a property
-IDLE
-----
+- bpo-39491: Add :data:`typing.Annotated` and ``include_extras`` parameter
+ to :func:`typing.get_type_hints` as part of :pep:`593`. Patch by Till
+ Varoquaux, documentation by Till Varoquaux and Konstantin Kashin.
-- bpo-35379: When exiting IDLE, catch any AttributeError. One happens when
- EditorWindow.close is called twice. Printing a traceback, when IDLE is
- run from a terminal, is useless and annoying.
+- bpo-39485: Fix a bug in :func:`unittest.mock.create_autospec` that would
+ complain about the wrong number of arguments for custom descriptors
+ defined in an extension module returning functions.
-- bpo-38183: To avoid problems, test_idle ignores the user config directory.
- It no longer tries to create or access .idlerc or any files within. Users
- must run IDLE to discover problems with saving settings.
+- bpo-38932: Mock fully resets child objects on reset_mock(). Patch by
+ Vegard Stikbakke
-- bpo-38077: IDLE no longer adds 'argv' to the user namespace when
- initializing it. This bug only affected 3.7.4 and 3.8.0b2 to 3.8.0b4.
+- bpo-39082: Allow AsyncMock to correctly patch static/class methods
-- bpo-38041: Shell restart lines now fill the window width, always start
- with '=', and avoid wrapping unnecessarily. The line will still wrap if
- the included file name is long relative to the width.
+- bpo-39432: Implement PEP-489 algorithm for non-ascii "PyInit\_..." symbol
+ names in distutils to make it export the correct init symbol also on
+ Windows.
-- bpo-35771: To avoid occasional spurious test_idle failures on slower
- machines, increase the ``hover_delay`` in test_tooltip.
+- bpo-18819: Omit ``devmajor`` and ``devminor`` fields for non-device files
+ in :mod:`tarfile` archives, enabling bit-for-bit compatibility with GNU
+ ``tar(1)``.
-- bpo-37902: Add mousewheel scrolling for IDLE module, path, and stack
- browsers. Patch by George Zhang.
+- bpo-39349: Added a new *cancel_futures* parameter to
+ :meth:`concurrent.futures.Executor.shutdown` that cancels all pending
+ futures which have not started running, instead of waiting for them to
+ complete before shutting down the executor.
-Tools/Demos
------------
+- bpo-39274: ``bool(fraction.Fraction)`` now returns a boolean even if
+ (numerator != 0) does not return a boolean (ex: numpy number).
-- bpo-37803: pdb's ``--help`` and ``--version`` long options now work.
+- bpo-34793: Remove support for ``with (await asyncio.lock):`` and ``with
+ (yield from asyncio.lock):``. The same is correct for
+ ``asyncio.Condition`` and ``asyncio.Semaphore``.
-- bpo-37064: Add option -k to pathscript.py script: preserve shebang flags.
- Add option -a to pathscript.py script: add flags.
+- bpo-25597: Ensure, if ``wraps`` is supplied to
+ :class:`unittest.mock.MagicMock`, it is used to calculate return values
+ for the magic methods instead of using the default return values. Patch by
+ Karthikeyan Singaravelan.
-C API
------
+- bpo-36350: `inspect.Signature.parameters` and
+ `inspect.BoundArguments.arguments` are now dicts instead of OrderedDicts.
+ Patch contributed by Rémi Lapeyre.
-- bpo-38234: :c:func:`Py_SetPath` now sets :data:`sys.executable` to the
- program full path (:c:func:`Py_GetProgramFullPath`) rather than to the
- program name (:c:func:`Py_GetProgramName`).
+- bpo-35727: Fix sys.exit() and sys.exit(None) exit code propagation when
+ used in multiprocessing.Process.
-- bpo-38234: Python ignored arguments passed to :c:func:`Py_SetPath`,
- :c:func:`Py_SetPythonHome` and :c:func:`Py_SetProgramName`: fix Python
- initialization to use specified arguments.
+- bpo-32173: * Add `lazycache` function to `__all__`. * Use `dict.clear` to
+ clear the cache. * Refactoring `getline` function and `checkcache`
+ function.
-- bpo-38205: The :c:func:`Py_UNREACHABLE` macro now calls
- :c:func:`Py_FatalError`.
+Documentation
+-------------
-- bpo-37879: Fix subtype_dealloc to suppress the type decref when the base
- type is a C heap type
+- bpo-17422: The language reference now specifies restrictions on class
+ namespaces. Adapted from a patch by Ethan Furman.
+- bpo-39572: Updated documentation of ``total`` flag of TypeDict.
-What's New in Python 3.8.0 beta 4?
-==================================
+- bpo-39654: In pyclbr doc, update 'class' to 'module' where appropriate and
+ add readmodule comment. Patch by Hakan Ãelik.
-*Release date: 2019-08-29*
+- bpo-39153: Clarify refcounting semantics for the following functions: -
+ PyObject_SetItem - PyMapping_SetItemString - PyDict_SetItem -
+ PyDict_SetItemString
-Security
---------
+- bpo-39392: Explain that when filling with turtle, overlap regions may be
+ left unfilled.
-- bpo-34155: Fix parsing of invalid email addresses with more than one ``@``
- (e.g. a@b@c.com.) to not return the part before 2nd ``@`` as valid email
- address. Patch by maxking & jpic.
+- bpo-39369: Update mmap readline method description. The fact that the
+ readline method does update the file position should not be ignored since
+ this might give the impression for the programmer that it doesn't update
+ it.
-Core and Builtins
------------------
+- bpo-9056: Include subsection in TOC for PDF version of docs.
-- bpo-37947: Adjust correctly the recursion level in the symtable generation
- for named expressions. Patch by Pablo Galindo.
+Tests
+-----
-- bpo-37830: Fixed compilation of :keyword:`break` and :keyword:`continue`
- in the :keyword:`finally` block when the corresponding :keyword:`try`
- block contains :keyword:`return` with a non-constant value.
+- bpo-38325: Skip tests on non-BMP characters of test_winconsoleio.
-- bpo-32912: Reverted :issue:`32912`: emitting :exc:`SyntaxWarning` instead
- of :exc:`DeprecationWarning` for invalid escape sequences in string and
- bytes literals.
+- bpo-39502: Skip test_zipfile.test_add_file_after_2107() if
+ :func:`time.localtime` fails with :exc:`OverflowError`. It is the case on
+ AIX 6.1 for example.
-- bpo-37757: :pep:`572`: As described in the PEP, assignment expressions now
- raise :exc:`SyntaxError` when their interaction with comprehension scoping
- results in an ambiguous target scope.
+Build
+-----
- The ``TargetScopeError`` subclass originally proposed by the PEP has been
- removed in favour of just raising regular syntax errors for the disallowed
- cases.
+- bpo-39489: Remove ``COUNT_ALLOCS`` special build.
-- bpo-36311: Decoding bytes objects larger than 2GiB is faster and no longer
- fails when a multibyte characters spans a chunk boundary.
+Windows
+-------
-- bpo-37433: Fix ``SyntaxError`` indicator printing too many spaces for
- multi-line strings - by Anthony Sottile.
+- bpo-39553: Delete unused code related to SxS manifests.
-- bpo-20523: ``pdb.Pdb`` supports ~/.pdbrc in Windows 7. Patch by Tim Hopper
- and Dan Lidral-Porter.
+- bpo-39439: Honor the Python path when a virtualenv is active on Windows.
-Library
--------
+- bpo-39393: Improve the error message when attempting to load a DLL with
+ unresolved dependencies.
-- bpo-37834: Prevent shutil.rmtree exception when built on non-Windows
- system without fd system call support, like older versions of macOS.
+- bpo-38883: :meth:`~pathlib.Path.home()` and
+ :meth:`~pathlib.Path.expanduser()` on Windows now prefer
+ :envvar:`USERPROFILE` and no longer use :envvar:`HOME`, which is not
+ normally set for regular user accounts. This makes them again behave like
+ :func:`os.path.expanduser`, which was changed to ignore :envvar:`HOME` in
+ 3.8, see :issue:`36264`.
-- bpo-37965: Fix C compiler warning caused by
- distutils.ccompiler.CCompiler.has_function.
+- bpo-39185: The build.bat script has additional options for very-quiet
+ output (-q) and very-verbose output (-vv)
-- bpo-37960: ``repr()`` of buffered and text streams now silences only
- expected exceptions when get the value of "name" and "mode" attributes.
+IDLE
+----
-- bpo-37951: Most features of the subprocess module now work again in
- subinterpreters. Only *preexec_fn* is restricted in subinterpreters.
+- bpo-39663: Add tests for pyparse find_good_parse_start().
-- bpo-36205: Fix the rusage implementation of time.process_time() to
- correctly report the sum of the system and user CPU time.
+- bpo-39600: In the font configuration window, remove duplicated font names.
-- bpo-37950: Fix :func:`ast.dump` when call with incompletely initialized
- node.
+- bpo-30780: Add remaining configdialog tests for buttons and highlights and
+ keys tabs.
-- bpo-34679: Restores instantiation of Windows IOCP event loops from the
- non-main thread.
+- bpo-39388: IDLE Settings Cancel button now cancels pending changes
-- bpo-36917: Add default implementation of the
- :meth:`ast.NodeVisitor.visit_Constant` method which emits a deprecation
- warning and calls corresponding methody ``visit_Num()``, ``visit_Str()``,
- etc.
+- bpo-38792: Close an IDLE shell calltip if a :exc:`KeyboardInterrupt` or
+ shell restart occurs. Patch by Zackery Spytz.
-- bpo-37798: Update test_statistics.py to verify that the statistics module
- works well for both C and Python implementations. Patch by Dong-hee Na
+C API
+-----
-- bpo-26589: Added a new status code to the http module: 451
- UNAVAILABLE_FOR_LEGAL_REASONS
+- bpo-35081: Move the ``bytes_methods.h`` header file to the internal C API
+ as ``pycore_bytes_methods.h``: it only contains private symbols (prefixed
+ by ``_Py``), except of the ``PyDoc_STRVAR_shared()`` macro.
-- bpo-37915: Fix a segmentation fault that appeared when comparing instances
- of ``datetime.timezone`` and ``datetime.tzinfo`` objects. Patch by Pablo
- Galindo.
+- bpo-35081: Move the ``dtoa.h`` header file to the internal C API as
+ ``pycore_dtoa.h``: it only contains private functions (prefixed by
+ ``_Py``). The :mod:`math` and :mod:`cmath` modules must now be compiled
+ with the ``Py_BUILD_CORE`` macro defined.
-- bpo-37868: Fix dataclasses.is_dataclass when given an instance that never
- raises AttributeError in __getattr__. That is, an object that returns
- something for __dataclass_fields__ even if it's not a dataclass.
+- bpo-39573: Add :c:func:`Py_SET_SIZE` function to set the size of an
+ object.
-- bpo-37811: Fix ``socket`` module's ``socket.connect(address)`` function
- being unable to establish connection in case of interrupted system call.
- The problem was observed on all OSes which ``poll(2)`` system call can
- take only non-negative integers and -1 as a timeout value.
+- bpo-39500: :c:func:`PyUnicode_IsIdentifier` does not call
+ :c:func:`Py_FatalError` anymore if the string is not ready.
-- bpo-21131: Fix ``faulthandler.register(chain=True)`` stack. faulthandler
- now allocates a dedicated stack of ``SIGSTKSZ*2`` bytes, instead of just
- ``SIGSTKSZ`` bytes. Calling the previous signal handler in faulthandler
- signal handler uses more than ``SIGSTKSZ`` bytes of stack memory on some
- platforms.
+- bpo-39573: Add :c:func:`Py_SET_TYPE` function to set the type of an
+ object.
-- bpo-37798: Add C fastpath for statistics.NormalDist.inv_cdf() Patch by
- Dong-hee Na
+- bpo-39573: Add a :c:func:`Py_SET_REFCNT` function to set the reference
+ counter of an object.
-- bpo-37819: Add Fraction.as_integer_ratio() to match the corresponding
- methods in bool, int, float, and decimal.
+- bpo-39542: Convert :c:func:`PyType_HasFeature`, :c:func:`PyType_Check` and
+ :c:func:`PyType_CheckExact` macros to static inline functions.
-- bpo-37810: Fix :mod:`difflib` ``?`` hint in diff output when dealing with
- tabs. Patch by Anthony Sottile.
+- bpo-39542: In the limited C API, ``PyObject_INIT()`` and
+ ``PyObject_INIT_VAR()`` are now defined as aliases to
+ :c:func:`PyObject_Init` and :c:func:`PyObject_InitVar` to make their
+ implementation opaque. It avoids to leak implementation details in the
+ limited C API. Exclude the following functions from the limited C API:
+ ``_Py_NewReference()``, ``_Py_ForgetReference()``,
+ ``_PyTraceMalloc_NewReference()`` and ``_Py_GetRefTotal()``.
-- bpo-37772: In ``zipfile.Path``, when adding implicit dirs, ensure that
- ancestral directories are added and that duplicates are excluded.
+- bpo-39542: Exclude trashcan mechanism from the limited C API: it requires
+ access to PyTypeObject and PyThreadState structure fields, whereas these
+ structures are opaque in the limited C API.
-- bpo-28292: Mark calendar.py helper functions as being private. The
- follows PEP 8 guidance to maintain the style conventions in the module and
- it addresses a known case of user confusion.
+- bpo-39511: The :c:func:`PyThreadState_Clear` function now calls the
+ :c:member:`PyThreadState.on_delete` callback. Previously, that happened in
+ :c:func:`PyThreadState_Delete`.
-- bpo-18049: Add definition of THREAD_STACK_SIZE for AIX in
- Python/thread_pthread.h The default thread stacksize caused crashes with
- the default recursion limit Patch by M Felt
+- bpo-38076: Fix to clear the interpreter state only after clearing module
+ globals to guarantee module state access from C Extensions during runtime
+ destruction
-- bpo-37738: Fix the implementation of curses ``addch(str, color_pair)``:
- pass the color pair to ``setcchar()``, instead of always passing 0 as the
- color pair.
+- bpo-39245: The Vectorcall API (PEP 590) was made public, adding the
+ functions ``PyObject_Vectorcall``, ``PyObject_VectorcallMethod``,
+ ``PyVectorcall_Function``, ``PyObject_CallOneArg``,
+ ``PyObject_CallMethodNoArgs``, ``PyObject_CallMethodOneArg``,
+ ``PyObject_FastCallDict``, and the flag ``Py_TPFLAGS_HAVE_VECTORCALL``.
-- bpo-37723: Fix performance regression on regular expression parsing with
- huge character sets. Patch by Yann Vaginay.
-- bpo-32178: Fix IndexError in :mod:`email` package when trying to parse
- invalid address fields starting with ``:``.
+What's New in Python 3.9.0 alpha 3?
+===================================
-- bpo-37685: Fixed comparisons of :class:`datetime.timedelta` and
- :class:`datetime.timezone`.
+*Release date: 2020-01-24*
-- bpo-37695: Correct :func:`curses.unget_wch` error message. Patch by
- Anthony Sottile.
+Core and Builtins
+-----------------
-- bpo-37354: Make Activate.ps1 Powershell script static to allow for signing
- it.
+- bpo-39427: Document all possibilities for the ``-X`` options in the
+ command line help section. Patch by Pablo Galindo.
-- bpo-37664: Update wheels bundled with ensurepip (pip 19.2.3 and setuptools
- 41.2.0)
+- bpo-39421: Fix possible crashes when operating with the functions in the
+ :mod:`heapq` module and custom comparison operators.
-- bpo-37642: Allowed the pure Python implementation of
- :class:`datetime.timezone` to represent sub-minute offsets close to
- minimum and maximum boundaries, specifically in the ranges (23:59, 24:00)
- and (-23:59, 24:00). Patch by Ngalim Siregar
+- bpo-39386: Prevent double awaiting of async iterator.
-- bpo-16970: Adding a value error when an invalid value in passed to nargs
- Patch by Robert Leenders
+- bpo-17005: Add :class:`functools.TopologicalSorter` to the
+ :mod:`functools` module to offers functionality to perform topological
+ sorting of graphs. Patch by Pablo Galindo, Tim Peters and Larry Hastings.
-- bpo-37587: Make json.loads faster for long strings. (Patch by Marco
- Paolini)
+- bpo-39320: Replace four complex bytecodes for building sequences with
+ three simpler ones.
-- bpo-18378: Recognize "UTF-8" as a valid value for LC_CTYPE in
- locale._parse_localename.
+ The following four bytecodes have been removed:
-- bpo-37531: "python3 -m test -jN --timeout=TIMEOUT" now kills a worker
- process if it runs longer than *TIMEOUT* seconds.
+ * BUILD_LIST_UNPACK
+ * BUILD_TUPLE_UNPACK
+ * BUILD_SET_UNPACK
+ * BUILD_TUPLE_UNPACK_WITH_CALL
-- bpo-37482: Fix serialization of display name in originator or destination
- address fields with both encoded words and special chars.
+ The following three bytecodes have been added:
-- bpo-37372: Fix error unpickling datetime.time objects from Python 2 with
- seconds>=24. Patch by Justin Blanchard.
+ * LIST_TO_TUPLE
+ * LIST_EXTEND
+ * SET_UPDATE
-- bpo-37085: Add the optional Linux SocketCAN Broadcast Manager constants,
- used as flags to configure the BCM behaviour, in the socket module. Patch
- by Karl Ding.
+- bpo-39336: Import loaders which publish immutable module objects can now
+ publish immutable packages in addition to individual modules.
-- bpo-36871: Ensure method signature is used instead of constructor
- signature of a class while asserting mock object against method calls.
- Patch by Karthikeyan Singaravelan.
+- bpo-39322: Added a new function :func:`gc.is_finalized` to check if an
+ object has been finalized by the garbage collector. Patch by Pablo
+ Galindo.
-- bpo-36582: Fix ``UserString.encode()`` to correctly return ``bytes``
- rather than a ``UserString`` instance.
+- bpo-39048: Improve the displayed error message when incorrect types are
+ passed to ``async with`` statements by looking up the :meth:`__aenter__`
+ special method before the :meth:`__aexit__` special method when entering
+ an asynchronous context manager. Patch by Géry Ogam.
-- bpo-34775: Division handling of PurePath now returns NotImplemented
- instead of raising a TypeError when passed something other than an
- instance of str or PurePath. Patch by Roger Aiudi.
+- bpo-39235: Fix AST end location for lone generator expression in function
+ call, e.g. f(i for i in a).
-Documentation
--------------
-
-- bpo-37979: Added a link to dateutil.parser.isoparse in the
- datetime.fromisoformat documentation. Patch by Paul Ganssle
+- bpo-39209: Correctly handle multi-line tokens in interactive mode. Patch
+ by Pablo Galindo.
-- bpo-37759: Beginning edits to Whatsnew 3.8
+- bpo-1635741: Port _json extension module to multiphase initialization
+ (:pep:`489`).
-- bpo-37726: Stop recommending getopt in the tutorial for command line
- argument parsing and promote argparse.
+- bpo-39216: Fix constant folding optimization for positional only arguments
+ - by Anthony Sottile.
-- bpo-37256: Fix wording of arguments for :class:`Request` in
- :mod:`urllib.request`
+- bpo-39215: Fix ``SystemError`` when nested function has annotation on
+ positional-only argument - by Anthony Sottile.
-- bpo-37004: In the documentation for difflib, a note was added explicitly
- warning that the results of SequenceMatcher's ratio method may depend on
- the order of the input strings.
+- bpo-39200: Correct the error message when calling the :func:`min` or
+ :func:`max` with no arguments. Patch by Dong-hee Na.
-- bpo-36487: Make C-API docs clear about what the "main" interpreter is.
+- bpo-39200: Correct the error message when trying to construct
+ :class:`range` objects with no arguments. Patch by Pablo Galindo.
-Tests
------
+- bpo-39166: Fix incorrect line execution reporting in trace functions when
+ tracing the last iteration of asynchronous for loops. Patch by Pablo
+ Galindo.
-- bpo-37805: Add tests for json.dump(..., skipkeys=True). Patch by Dong-hee
- Na.
+- bpo-39114: Fix incorrent line execution reporting in trace functions when
+ tracing exception handlers with name binding. Patch by Pablo Galindo.
-Build
------
+- bpo-39156: Split the COMPARE_OP bytecode instruction into four distinct
+ instructions.
-- bpo-37707: Mark some individual tests to skip when --pgo is used. The
- tests marked increase the PGO task time significantly and likely don't
- help improve optimization of the final executable.
+ * COMPARE_OP for rich comparisons
+ * IS_OP for 'is' and 'is not' tests
+ * CONTAINS_OP for 'in' and 'is not' tests
+ * JUMP_IF_NOT_EXC_MATCH for checking exceptions in 'try-except' statements.
-Windows
--------
+ This improves the clarity of the interpreter and should provide a modest
+ speedup.
-- bpo-37549: :func:`os.dup` no longer fails for standard streams on Windows
- 7.
+- bpo-38588: Fix possible crashes in dict and list when calling
+ :c:func:`PyObject_RichCompareBool`.
-- bpo-1311: The ``nul`` file on Windows now returns True from
- :func:`~os.path.exists` and a valid result from :func:`os.stat` with
- ``S_IFCHR`` set.
+- bpo-13601: By default, ``sys.stderr`` is line-buffered now, even if
+ ``stderr`` is redirected to a file. You can still make ``sys.stderr``
+ unbuffered by passing the :option:`-u` command-line option or setting the
+ :envvar:`PYTHONUNBUFFERED` environment variable.
-- bpo-9949: Enable support for following symlinks in :func:`os.realpath`.
+ (Contributed by Jendrik Seipp in bpo-13601.)
-- bpo-37834: Treat all name surrogate reparse points on Windows in
- :func:`os.lstat` and other reparse points as regular files in
- :func:`os.stat`.
+- bpo-38610: Fix possible crashes in several list methods by holding strong
+ references to list elements when calling
+ :c:func:`PyObject_RichCompareBool`.
-- bpo-36266: Add the module name in the formatted error message when DLL
- load fail happens during module import in
- ``_PyImport_FindSharedFuncptrWindows()``. Patch by Srinivas Nyayapati.
+- bpo-32021: Include brotli .br encoding in mimetypes encodings_map
-- bpo-25172: Trying to import the :mod:`crypt` module on Windows will result
- in an :exc:`ImportError` with a message explaining that the module isn't
- supported on Windows. On other platforms, if the underlying ``_crypt``
- module is not available, the ImportError will include a message explaining
- the problem.
+Library
+-------
-- bpo-37778: Fixes the icons used for file associations to the Microsoft
- Store package.
+- bpo-39430: Fixed race condition in lazy imports in :mod:`tarfile`.
-- bpo-37734: Fix use of registry values to launch Python from Microsoft
- Store app.
+- bpo-39413: The :func:`os.unsetenv` function is now also available on
+ Windows.
-- bpo-28269: Replace use of :c:func:`strcasecmp` for the system function
- :c:func:`_stricmp`. Patch by Minmin Gong.
+- bpo-39390: Fixed a regression with the `ignore` callback of
+ :func:`shutil.copytree`. The argument types are now str and List[str]
+ again.
-macOS
------
+- bpo-39395: The :func:`os.putenv` and :func:`os.unsetenv` functions are now
+ always available.
-- bpo-18049: Increase the default stack size of threads from 5MB to 16MB on
- macOS, to match the stack size of the main thread. This avoids crashes on
- deep recursion in threads.
+- bpo-39406: If ``setenv()`` C function is available, :func:`os.putenv` is
+ now implemented with ``setenv()`` instead of ``putenv()``, so Python
+ doesn't have to handle the environment variable memory.
-IDLE
-----
+- bpo-39396: Fix ``math.nextafter(-0.0, +0.0)`` on AIX 7.1.
-- bpo-37824: Properly handle user input warnings in IDLE shell. Cease
- turning SyntaxWarnings into SyntaxErrors.
+- bpo-29435: Allow :func:`tarfile.is_tarfile` to be used with file and
+ file-like objects, like :func:`zipfile.is_zipfile`. Patch by William
+ Woodruff.
-- bpo-37929: IDLE Settings dialog now closes properly when there is no shell
- window.
+- bpo-39377: Removed ``encoding`` option from :func:`json.loads`. It has
+ been deprecated since Python 3.1.
-- bpo-37849: Fixed completions list appearing too high or low when shown
- above the current line.
+- bpo-39389: Write accurate compression level metadata in :mod:`gzip`
+ archives, rather than always signaling maximum compression.
-- bpo-36419: Refactor IDLE autocomplete and improve testing.
+- bpo-39366: The previously deprecated ``xpath()`` and ``xgtitle()`` methods
+ of :class:`nntplib.NNTP` have been removed.
-- bpo-37748: Reorder the Run menu. Put the most common choice, Run Module,
- at the top.
+- bpo-39357: Remove the *buffering* parameter of :class:`bz2.BZ2File`. Since
+ Python 3.0, it was ignored and using it was emitting
+ :exc:`DeprecationWarning`. Pass an open file object, to control how the
+ file is opened. The *compresslevel* parameter becomes keyword-only.
-Tools/Demos
------------
+- bpo-39353: Deprecate binhex4 and hexbin4 standards. Deprecate the
+ :mod:`binhex` module and the following :mod:`binascii` functions:
+ :func:`~binascii.b2a_hqx`, :func:`~binascii.a2b_hqx`,
+ :func:`~binascii.rlecode_hqx`, :func:`~binascii.rledecode_hqx`,
+ :func:`~binascii.crc_hqx`.
-- bpo-37942: Improve ArgumentClinic converter for floats.
+- bpo-39351: Remove ``base64.encodestring()`` and ``base64.decodestring()``,
+ aliases deprecated since Python 3.1: use :func:`base64.encodebytes` and
+ :func:`base64.decodebytes` instead.
-- bpo-37034: Argument Clinic now uses the argument name on errors with
- keyword-only argument instead of their position. Patch contributed by Rémi
- Lapeyre.
+- bpo-39350: Remove ``fractions.gcd()`` function, deprecated since Python
+ 3.5 (:issue:`22486`): use :func:`math.gcd` instead.
-C API
------
+- bpo-39329: :class:`~smtplib.LMTP` constructor now has an optional
+ *timeout* parameter. Patch by Dong-hee Na.
-- bpo-36763: Options added by ``PySys_AddXOption()`` are now handled the
- same way than ``PyConfig.xoptions`` and command line ``-X`` options.
+- bpo-39313: Add a new ``exec_function`` option (*--exec-function* in the
+ CLI) to ``RefactoringTool`` for making ``exec`` a function. Patch by
+ Batuhan Taskaya.
-- bpo-37926: Fix a crash in ``PySys_SetArgvEx(0, NULL, 0)``.
+- bpo-39259: :class:`~ftplib.FTP_TLS` and :class:`~ftplib.FTP_TLS` now raise
+ a :class:`ValueError` if the given timeout for their constructor is zero
+ to prevent the creation of a non-blocking socket. Patch by Dong-hee Na.
+- bpo-39259: :class:`~smtplib.SMTP` and :class:`~smtplib.SMTP_SSL` now raise
+ a :class:`ValueError` if the given timeout for their constructor is zero
+ to prevent the creation of a non-blocking socket. Patch by Dong-hee Na.
-What's New in Python 3.8.0 beta 3?
-==================================
+- bpo-39310: Add :func:`math.ulp`: return the value of the least significant
+ bit of a float.
-*Release date: 2019-07-29*
+- bpo-39297: Improved performance of importlib.metadata distribution
+ discovery and resilients to inaccessible sys.path entries
+ (importlib_metadata v1.4.0).
-Security
---------
+- bpo-39259: :class:`~nntplib.NNTP` and :class:`~nntplib.NNTP_SSL` now raise
+ a :class:`ValueError` if the given timeout for their constructor is zero
+ to prevent the creation of a non-blocking socket. Patch by Dong-hee Na.
-- bpo-37461: Fix an infinite loop when parsing specially crafted email
- headers. Patch by Abhilash Raj.
+- bpo-38901: When you specify prompt='.' or equivalently python -m venv
+ --prompt . ... the basename of the current directory is used to set the
+ created venv's prompt when it's activated.
-Core and Builtins
------------------
+- bpo-39288: Add :func:`math.nextafter`: return the next floating-point
+ value after *x* towards *y*.
-- bpo-37593: Swap the positions of the *posonlyargs* and *args* parameters
- in the constructor of :class:`ast.parameters` nodes.
+- bpo-39259: :class:`~poplib.POP3` and :class:`~poplib.POP3_SSL` now raise a
+ :class:`ValueError` if the given timeout for their constructor is zero to
+ prevent the creation of a non-blocking socket. Patch by Dong-hee Na.
-- bpo-36974: Implemented separate vectorcall functions for every calling
- convention of builtin functions and methods. This improves performance for
- calls.
+- bpo-39242: Updated the Gmane domain from news.gmane.org to news.gmane.io
+ which is used for examples of :class:`~nntplib.NNTP` news reader server
+ and nntplib tests.
-Library
--------
+- bpo-35292: Proxy the `SimpleHTTPRequestHandler.guess_type` to
+ `mimetypes.guess_type` so the `mimetypes.init` is called lazily to avoid
+ unnecessary costs when :mod:`http.server` module is imported.
-- bpo-37697: Syncronize ``importlib.metadata`` with `importlib_metadata 0.19
- `_,
- improving handling of EGG-INFO files and fixing a crash when entry point
- names contained colons.
+- bpo-39239: The :meth:`select.epoll.unregister` method no longer ignores
+ the :data:`~errno.EBADF` error.
-- bpo-37691: Let math.dist() accept coordinates as sequences (or iterables)
- rather than just tuples.
+- bpo-38907: In http.server script, restore binding to IPv4 on Windows.
-- bpo-37664: Update wheels bundled with ensurepip (pip 19.2.1 and setuptools
- 41.0.1)
+- bpo-39152: Fix ttk.Scale.configure([name]) to return configuration tuple
+ for name or all options. Giovanni Lombardo contributed part of the patch.
-- bpo-36324: Make internal attributes for statistics.NormalDist() private.
+- bpo-39198: If an exception were to be thrown in `Logger.isEnabledFor`
+ (say, by asyncio timeouts or stopit) , the `logging` global lock may not
+ be released appropriately, resulting in deadlock. This change wraps that
+ block of code with `try...finally` to ensure the lock is released.
-- bpo-37491: Fix ``IndexError`` when parsing email headers with unexpectedly
- ending bare-quoted string value. Patch by Abhilash Raj.
+- bpo-39191: Perform a check for running loop before starting a new task in
+ ``loop.run_until_complete()`` to fail fast; it prevents the side effect of
+ new task spawning before exception raising.
-- bpo-37579: Return :exc:`NotImplemented` in Python implementation of
- ``__eq__`` for :class:`~datetime.timedelta` and :class:`~datetime.time`
- when the other object being compared is not of the same type to match C
- implementation. Patch by Karthikeyan Singaravelan.
+- bpo-38871: Correctly parenthesize filter-based statements that contain
+ lambda expressions in mod:`lib2to3`. Patch by Dong-hee Na.
-- bpo-21478: Record calls to parent when autospecced object is attached to a
- mock using :func:`unittest.mock.attach_mock`. Patch by Karthikeyan
- Singaravelan.
+- bpo-39142: A change was made to logging.config.dictConfig to avoid
+ converting instances of named tuples to ConvertingTuple. It's assumed that
+ named tuples are too specialised to be treated like ordinary tuples; if a
+ user of named tuples requires ConvertingTuple functionality, they will
+ have to implement that themselves in their named tuple class.
-- bpo-37502: pickle.loads() no longer raises TypeError when the buffers
- argument is set to None
+- bpo-39158: ast.literal_eval() now supports empty sets.
-- bpo-37520: Correct behavior for zipfile.Path.parent when the path object
- identifies a subdirectory.
+- bpo-39129: Fix import path for ``asyncio.TimeoutError``
-- bpo-18374: Fix the ``.col_offset`` attribute of nested :class:`ast.BinOp`
- instances which had a too large value in some situations.
+- bpo-39057: :func:`urllib.request.proxy_bypass_environment` now ignores
+ leading dots and no longer ignores a trailing newline.
-- bpo-37421: Fix :func:`multiprocessing.util.get_temp_dir` finalizer: clear
- also the 'tempdir' configuration of the current process, so next call to
- ``get_temp_dir()`` will create a new temporary directory, rather than
- reusing the removed temporary directory.
+- bpo-39056: Fixed handling invalid warning category in the -W option. No
+ longer import the re module if it is not needed.
-- bpo-37481: The distutils ``bdist_wininst`` command is deprecated in Python
- 3.8, use ``bdist_wheel`` (wheel packages) instead.
+- bpo-39055: :func:`base64.b64decode` with ``validate=True`` raises now a
+ binascii.Error if the input ends with a single ``\n``.
-- bpo-26967: An :class:`~argparse.ArgumentParser` with
- ``allow_abbrev=False`` no longer disables grouping of short flags, such as
- ``-vv``, but only disables abbreviation of long flags as documented. Patch
- by Zac Hatfield-Dodds.
+- bpo-21600: Fix :func:`mock.patch.stopall` to stop active patches that were
+ created with :func:`mock.patch.dict`.
-- bpo-37347: :meth:`sqlite3.Connection.create_aggregate`,
- :meth:`sqlite3.Connection.create_function`,
- :meth:`sqlite3.Connection.set_authorizer`,
- :meth:`sqlite3.Connection.set_progress_handler`
- :meth:`sqlite3.Connection.set_trace_callback` methods lead to segfaults if
- some of these methods are called twice with an equal object but not the
- same. Now callbacks are stored more carefully. Patch by Aleksandr Balezin.
+- bpo-39019: Implement dummy ``__class_getitem__`` for
+ :class:`tempfile.SpooledTemporaryFile`.
-- bpo-36564: Fix infinite loop in email header folding logic that would be
- triggered when an email policy's max_line_length is not long enough to
- include the required markup and any values in the message. Patch by Paul
- Ganssle
+- bpo-39019: Implement dummy ``__class_getitem__`` for ``subprocess.Popen``,
+ ``subprocess.CompletedProcess``
-Documentation
--------------
+- bpo-38914: Adjusted the wording of the warning issued by distutils'
+ ``check`` command when the ``author`` and ``maintainer`` fields are
+ supplied but no corresponding e-mail field (``author_email`` or
+ ``maintainer_email``) is found. The wording now reflects the fact that
+ these fields are suggested, but not required. Patch by Juergen Gmach.
-- bpo-32910: Remove implementation-specific behaviour of how venv's
- Deactivate works.
+- bpo-38878: Fixed __subclasshook__ of :class:`os.PathLike` to return a
+ correct result upon inheritence. Patch by Bar Harel.
-- bpo-37284: Add a brief note to indicate that any new
- ``sys.implementation`` required attributes must go through the PEP
- process.
+- bpo-38615: :class:`~imaplib.IMAP4` and :class:`~imaplib.IMAP4_SSL` now
+ have an optional *timeout* parameter for their constructors. Also, the
+ :meth:`~imaplib.IMAP4.open` method now has an optional *timeout* parameter
+ with this change. The overridden methods of :class:`~imaplib.IMAP4_SSL`
+ and :class:`~imaplib.IMAP4_stream` were applied to this change. Patch by
+ Dong-hee Na.
-- bpo-30088: Documented that :class:`mailbox.Maildir` constructor doesn't
- attempt to verify the maildir folder layout correctness. Patch by
- Sviatoslav Sydorenko.
+- bpo-35182: Fixed :func:`Popen.communicate` subsequent call crash when the
+ child process has already closed any piped standard stream, but still
+ continues to be running. Patch by Andriy Maletsky.
-- bpo-37521: Fix `importlib` examples to insert any newly created modules
- via importlib.util.module_from_spec() immediately into sys.modules instead
- of after calling loader.exec_module().
+- bpo-38630: On Unix, :meth:`subprocess.Popen.send_signal` now polls the
+ process status. Polling reduces the risk of sending a signal to the wrong
+ process if the process completed, the :attr:`subprocess.Popen.returncode`
+ attribute is still ``None``, and the pid has been reassigned (recycled) to
+ a new different process.
- Thanks to Benjamin Mintz for finding the bug.
+- bpo-38536: Removes trailing space in formatted currency with
+ `international=True` and a locale with symbol following value. E.g.
+ `locale.currency(12.34, international=True)` returned `'12,34 EUR '`
+ instead of `'12,34 EUR'`.
-- bpo-37456: Slash ('/') is now part of syntax.
+- bpo-38473: Use signature from inner mock for autospecced methods attached
+ with :func:`unittest.mock.attach_mock`. Patch by Karthikeyan Singaravelan.
-- bpo-37487: Fix PyList_GetItem index description to include 0.
+- bpo-38361: Fixed an issue where ``ident`` could include a leading path
+ separator when :func:`syslog.openlog` was called without arguments.
-- bpo-37149: Replace the dead link to the Tkinter 8.5 reference by John
- Shipman, New Mexico Tech, with a link to the archive.org copy.
+- bpo-38293: Add :func:`copy.copy` and :func:`copy.deepcopy` support to
+ :func:`property` objects.
-- bpo-37478: Added possible exceptions to the description of os.chdir().
+- bpo-37958: Added the pstats.Stats.get_profile_dict() method to return the
+ profile data as a StatsProfile instance.
-Tests
------
+- bpo-28367: Termios magic constants for the following baud rates: -
+ B500000 - B576000 - B921600 - B1000000 - B1152000 - B1500000 -
+ B2000000 - B2500000 - B3000000 - B3500000 - B4000000 Patch by
+ Andrey Smirnov
-- bpo-37558: Fix test_shared_memory_cleaned_after_process_termination name
- handling
+Documentation
+-------------
-- bpo-37526: Add :func:`test.support.catch_threading_exception`: context
- manager catching :class:`threading.Thread` exception using
- :func:`threading.excepthook`.
+- bpo-39381: Mention in docs that :func:`asyncio.get_event_loop` implicitly
+ creates new event loop only if called from the main thread.
-- bpo-37421: test_concurrent_futures now explicitly stops the ForkServer
- instance if it's running.
+- bpo-38918: Add an entry for ``__module__`` in the "function" & "method"
+ sections of the `inspect docs types and members table
+ `_
-- bpo-37421: multiprocessing tests now stop the ForkServer instance if it's
- running: close the "alive" file descriptor to ask the server to stop and
- then remove its UNIX address.
+- bpo-3530: In the :mod:`ast` module documentation, fix a misleading
+ ``NodeTransformer`` example and add advice on when to use the
+ ``fix_missing_locations`` function.
Build
-----
-- bpo-36044: Reduce the number of unit tests run for the PGO generation
- task. This speeds up the task by a factor of about 15x. Running the full
- unit test suite is slow. This change may result in a slightly less
- optimized build since not as many code branches will be executed. If you
- are willing to wait for the much slower build, the old behavior can be
- restored using './configure [..] PROFILE_TASK="-m test --pgo-extended"'.
- We make no guarantees as to which PGO task set produces a faster build.
- Users who care should run their own relevant benchmarks as results can
- depend on the environment, workload, and compiler tool chain.
+- bpo-39395: On non-Windows platforms, the :c:func:`setenv` and
+ :c:func:`unsetenv` functions are now required to build Python.
-Windows
--------
+- bpo-39160: Updated the documentation in `./configure --help` to show
+ default values, reference documentation where required and add additional
+ explanation where needed.
-- bpo-37672: Switch Windows Store package's pip to use bundled
- :file:`pip.ini` instead of :envvar:`PIP_USER` variable.
+- bpo-39144: The ctags and etags build targets both include Modules/_ctypes
+ and Python standard library source files.
IDLE
----
-- bpo-37692: Improve highlight config sample with example shell interaction
- and better labels for shell elements.
+- bpo-39050: Make IDLE Settings dialog Help button work again.
-- bpo-37628: Settings dialog no longer expands with font size.
+- bpo-34118: Tag memoryview, range, and tuple as classes, the same as list,
+ etcetera, in the library manual built-in functions list.
-- bpo-37627: Initialize the Customize Run dialog with the command line
- arguments most recently entered before. The user can optionally edit
- before submitting them.
+- bpo-32989: Add tests for editor newline_and_indent_event method. Remove
+ dead code from pyparse find_good_parse_start method.
-- bpo-33610: Fix code context not showing the correct context when first
- toggled on.
+C API
+-----
-- bpo-37530: Optimize code context to reduce unneeded background activity.
- Font and highlight changes now occur along with text changes instead of
- after a random delay.
+- bpo-39372: Clean header files of interfaces defined but with no
+ implementation. The public API symbols being removed are:
+ ``_PyBytes_InsertThousandsGroupingLocale``,
+ ``_PyBytes_InsertThousandsGrouping``, ``_Py_InitializeFromArgs``,
+ ``_Py_InitializeFromWideArgs``, ``_PyFloat_Repr``, ``_PyFloat_Digits``,
+ ``_PyFloat_DigitsInit``, ``PyFrame_ExtendStack``,
+ ``_PyAIterWrapper_Type``, ``PyNullImporter_Type``, ``PyCmpWrapper_Type``,
+ ``PySortWrapper_Type``, ``PyNoArgsFunction``.
-- bpo-27452: Cleanup ``config.py`` by inlining ``RemoveFile`` and
- simplifying the handling of ``file`` in ``CreateConfigHandlers``.
+- bpo-39164: Add a private ``_PyErr_GetExcInfo()`` function to retrieve
+ exception information of the specified Python thread state.
-- bpo-17535: Add optional line numbers for IDLE editor windows. Windows
- open without line numbers unless set otherwise in the General tab of the
- configuration dialog.
-- bpo-26806: To compensate for stack frames added by IDLE and avoid possible
- problems with low recursion limits, add 30 to limits in the user code
- execution process. Subtract 30 when reporting recursion limits to make
- this addition mostly transparent.
+What's New in Python 3.9.0 alpha 2?
+===================================
-- bpo-36390: Gather Format menu functions into format.py. Combine
- paragraph.py, rstrip.py, and format methods from editor.py.
+*Release date: 2019-12-18*
-Tools/Demos
------------
+Security
+--------
-- bpo-37675: 2to3 now works when run from a zipped standard library.
+- bpo-38945: Newline characters have been escaped when performing uu
+ encoding to prevent them from overflowing into to content section of the
+ encoded file. This prevents malicious or accidental modification of data
+ during the decoding process.
+- bpo-37228: Due to significant security concerns, the *reuse_address*
+ parameter of :meth:`asyncio.loop.create_datagram_endpoint` is no longer
+ supported. This is because of the behavior of ``SO_REUSEADDR`` in UDP. For
+ more details, see the documentation for
+ ``loop.create_datagram_endpoint()``. (Contributed by Kyle Stanley, Antoine
+ Pitrou, and Yury Selivanov in :issue:`37228`.)
-What's New in Python 3.8.0 beta 2?
-==================================
+- bpo-38804: Fixes a ReDoS vulnerability in :mod:`http.cookiejar`. Patch by
+ Ben Caller.
-*Release date: 2019-07-04*
+Core and Builtins
+-----------------
-Security
---------
+- bpo-39028: Slightly improve the speed of keyword argument parsing with
+ many kwargs by strengthening the assumption that kwargs are interned
+ strings.
-- bpo-37363: Adds audit events for the range of supported run commands (see
- :ref:`using-on-general`).
+- bpo-39080: Fix the value of *end_col_offset* for Starred Expression AST
+ nodes when they are among the elements in the *args* attribute of Call AST
+ nodes.
-- bpo-37463: ssl.match_hostname() no longer accepts IPv4 addresses with
- additional text after the address and only quad-dotted notation without
- trailing whitespaces. Some inet_aton() implementations ignore whitespace
- and all data after whitespace, e.g. '127.0.0.1 whatever'.
+- bpo-39031: When parsing an "elif" node, lineno and col_offset of the node
+ now point to the "elif" keyword and not to its condition, making it
+ consistent with the "if" node. Patch by Lysandros Nikolaou.
-- bpo-37363: Adds audit events for :mod:`ensurepip`, :mod:`ftplib`,
- :mod:`glob`, :mod:`imaplib`, :mod:`nntplib`, :mod:`pdb`, :mod:`poplib`,
- :mod:`shutil`, :mod:`smtplib`, :mod:`sqlite3`, :mod:`subprocess`,
- :mod:`telnetlib`, :mod:`tempfile` and :mod:`webbrowser`, as well as
- :func:`os.listdir`, :func:`os.scandir` and :func:`breakpoint`.
+- bpo-20443: In Python 3.9.0a1, sys.argv[0] was made an absolute path if a
+ filename was specified on the command line. Revert this change, since most
+ users expect sys.argv to be unmodified.
-- bpo-37364: :func:`io.open_code` is now used when reading :file:`.pth`
- files.
+- bpo-39008: :c:func:`PySys_Audit` now requires ``Py_ssize_t`` to be used
+ for size arguments in the format string, regardless of whether
+ ``PY_SSIZE_T_CLEAN`` was defined at include time.
-- bpo-34631: Updated OpenSSL to 1.1.1c in Windows installer
+- bpo-38673: In REPL mode, don't switch to PS2 if the line starts with
+ comment or whitespace. Based on work by Batuhan TaÅkaya.
-Core and Builtins
------------------
+- bpo-38922: Calling ``replace`` on a code object now raises the
+ ``code.__new__`` audit event.
-- bpo-37467: Fix :func:`sys.excepthook` and :c:func:`PyErr_Display` if a
- filename is a bytes string. For example, for a SyntaxError exception where
- the filename attribute is a bytes string.
+- bpo-38920: Add audit hooks for when :func:`sys.excepthook` and
+ :func:`sys.unraisablehook` are invoked.
-- bpo-37417: :meth:`bytearray.extend` now correctly handles errors that
- arise during iteration. Patch by Brandt Bucher.
+- bpo-38892: Improve documentation for audit events table and functions.
-- bpo-24214: Improved support of the surrogatepass error handler in the
- UTF-8 and UTF-16 incremental decoders.
+- bpo-38852: Set the thread stack size to 8 Mb for debug builds on android
+ platforms.
-- bpo-35224: Reverse evaluation order of key: value in dict comprehensions
- as proposed in PEP 572. I.e. in ``{k: v for ...}``, ``k`` will be
- evaluated before ``v``.
+- bpo-38858: Each Python subinterpreter now has its own "small integer
+ singletons": numbers in [-5; 257] range. It is no longer possible to
+ change the number of small integers at build time by overriding
+ ``NSMALLNEGINTS`` and ``NSMALLPOSINTS`` macros: macros should now be
+ modified manually in ``pycore_pystate.h`` header file.
-- bpo-37316: Fix the :c:func:`PySys_Audit` call in :class:`mmap.mmap`.
+- bpo-36854: The garbage collector state becomes per interpreter
+ (``PyInterpreterState.gc``), rather than being global
+ (``_PyRuntimeState.gc``).
-- bpo-37269: Fix a bug in the peephole optimizer that was not treating
- correctly constant conditions with binary operators. Patch by Pablo
- Galindo.
+- bpo-38835: The ``PyFPE_START_PROTECT()`` and ``PyFPE_END_PROTECT()``
+ macros are empty: they have been doing nothing for the last year, so stop
+ using them.
-- bpo-37213: Handle correctly negative line offsets in the peephole
- optimizer. Patch by Pablo Galindo.
+- bpo-38328: Sped up the creation time of constant :class:`list` and
+ :class:`set` displays. Patch by Brandt Bucher.
-- bpo-37219: Remove errorneous optimization for empty set differences.
+- bpo-38707: ``MainThread.native_id`` is now correctly reset in child
+ processes spawned using :class:`multiprocessing.Process`, instead of
+ retaining the parent's value.
-- bpo-36922: Slot functions optimize any callable with
- ``Py_TPFLAGS_METHOD_DESCRIPTOR`` instead of only instances of
- ``function``.
+- bpo-38629: Added ``__floor__`` and ``__ceil__`` methods to float object.
+ Patch by Batuhan TaÅkaya.
-- bpo-36974: The slot ``tp_vectorcall_offset`` is inherited unconditionally
- to support ``super().__call__()`` when the base class uses vectorcall.
+- bpo-27145: int + int and int - int operators can now return small integer
+ singletons. Patch by hongweipeng.
-- bpo-37160: :func:`threading.get_native_id` now also supports NetBSD.
+- bpo-38021: Provide a platform tag for AIX that is sufficient for PEP425
+ binary distribution identification. Patch by Michael Felt.
-- bpo-37077: Add :func:`threading.get_native_id` support for AIX. Patch by
- M. Felt
+- bpo-35409: Ignore GeneratorExit exceptions when throwing an exception into
+ the aclose coroutine of an asynchronous generator.
+
+- bpo-33387: Removed WITH_CLEANUP_START, WITH_CLEANUP_FINISH, BEGIN_FINALLY,
+ END_FINALLY, CALL_FINALLY and POP_FINALLY bytecodes. Replaced with RERAISE
+ and WITH_EXCEPT_FINISH bytecodes. The compiler now generates different
+ code for exceptional and non-exceptional branches for 'with' and
+ 'try-except' statements. For 'try-finally' statements the 'finally' block
+ is replicated for each exit from the 'try' body.
Library
-------
-- bpo-37440: http.client now enables TLS 1.3 post-handshake authentication
- for default context or if a cert_file is passed to HTTPSConnection.
+- bpo-39033: Fix :exc:`NameError` in :mod:`zipimport`. Patch by Karthikeyan
+ Singaravelan.
-- bpo-37437: Update vendorized expat version to 2.2.7.
+- bpo-39022: Update importlib.metadata to include improvements from
+ importlib_metadata 1.3 including better serialization of EntryPoints and
+ improved documentation for custom finders.
-- bpo-37428: SSLContext.post_handshake_auth = True no longer sets
- SSL_VERIFY_POST_HANDSHAKE verify flag for client connections. Although the
- option is documented as ignored for clients, OpenSSL implicitly enables
- cert chain validation when the flag is set.
+- bpo-39006: Fix asyncio when the ssl module is missing: only check for
+ ssl.SSLSocket instance if the ssl module is available.
-- bpo-37420: :func:`os.sched_setaffinity` now correctly handles errors that
- arise during iteration over its ``mask`` argument. Patch by Brandt Bucher.
+- bpo-38708: Fix a potential IndexError in email parser when parsing an
+ empty msg-id.
-- bpo-37412: The :func:`os.getcwdb` function now uses the UTF-8 encoding on
- Windows, rather than the ANSI code page: see :pep:`529` for the rationale.
- The function is no longer deprecated on Windows.
+- bpo-38698: Add a new ``InvalidMessageID`` token to email parser to
+ represent invalid Message-ID headers. Also, add defects when there is
+ remaining value after parsing the header.
-- bpo-29412: Fix IndexError in parsing a header value ending unexpectedly.
- Patch by Abhilash Raj.
+- bpo-38994: Implement ``__class_getitem__`` for ``os.PathLike``,
+ ``pathlib.Path``.
-- bpo-36546: The *dist* argument for statistics.quantiles() is now
- positional only. The current name doesn't reflect that the argument can be
- either a dataset or a distribution. Marking the parameter as positional
- avoids confusion and makes it possible to change the name later.
+- bpo-38979: Return class from ``ContextVar.__class_getitem__`` to simplify
+ subclassing.
-- bpo-37394: Fix a bug that was causing the :mod:`queue` module to fail if
- the accelerator module was not available. Patch by Pablo Galindo.
+- bpo-38978: Implement ``__class_getitem__`` on asyncio objects (Future,
+ Task, Queue). Patch by Batuhan Taskaya.
-- bpo-33972: Email with single part but content-type set to ``multipart/*``
- doesn't raise AttributeError anymore.
+- bpo-38916: :class:`array.array`: Remove ``tostring()`` and
+ ``fromstring()`` methods. They were aliases to ``tobytes()`` and
+ ``frombytes()``, deprecated since Python 3.2.
-- bpo-37280: Use threadpool for reading from file for sendfile fallback
- mode.
+- bpo-38986: Make repr of C accelerated TaskWakeupMethWrapper the same as of
+ pure Python version.
-- bpo-37279: Fix asyncio sendfile support when sendfile sends extra data in
- fallback mode.
+- bpo-38982: Fix asyncio ``PidfdChildWatcher``: handle ``waitpid()`` error.
+ If ``waitpid()`` is called elsewhere, ``waitpid()`` call fails with
+ :exc:`ChildProcessError`: use return code 255 in this case, and log a
+ warning. It ensures that the pidfd file descriptor is closed if this error
+ occurs.
-- bpo-19865: :func:`ctypes.create_unicode_buffer()` now also supports
- non-BMP characters on platforms with 16-bit :c:type:`wchar_t` (for
- example, Windows and AIX).
+- bpo-38529: Drop too noisy asyncio warning about deletion of a stream
+ without explicit ``.close()`` call.
-- bpo-37210: Allow pure Python implementation of :mod:`pickle` to work even
- when the C :mod:`_pickle` module is unavailable.
+- bpo-27413: Added ability to pass through ``ensure_ascii`` options to
+ json.dumps in the ``json.tool`` command-line interface.
-- bpo-35922: Fix :meth:`RobotFileParser.crawl_delay` and
- :meth:`RobotFileParser.request_rate` to return ``None`` rather than raise
- :exc:`AttributeError` when no relevant rule is defined in the robots.txt
- file. Patch by Rémi Lapeyre.
+- bpo-38634: The :mod:`readline` module now detects if Python is linked to
+ libedit at runtime on all platforms. Previously, the check was only done
+ on macOS.
-- bpo-35766: Change the format of feature_version to be a (major, minor)
- tuple.
+- bpo-33684: Fix ``json.tool`` failed to read a JSON file with non-ASCII
+ characters when locale encoding is not UTF-8.
-- bpo-36607: Eliminate :exc:`RuntimeError` raised by
- :func:`asyncio.all_tasks()` if internal tasks weak set is changed by
- another thread during iteration.
+- bpo-38698: Prevent UnboundLocalError to pop up in parse_message_id.
-- bpo-18748: :class:`_pyio.IOBase` destructor now does nothing if getting
- the ``closed`` attribute fails to better mimick :class:`_io.IOBase`
- finalizer.
+ parse_message_id() was improperly using a token defined inside an
+ exception handler, which was raising `UnboundLocalError` on parsing an
+ invalid value. Patch by Claudiu Popa.
-- bpo-36402: Fix a race condition at Python shutdown when waiting for
- threads. Wait until the Python thread state of all non-daemon threads get
- deleted (join all non-daemon threads), rather than just wait until
- non-daemon Python threads complete.
+- bpo-38927: Use ``python -m pip`` instead of ``pip`` to upgrade
+ dependencies in venv.
-- bpo-34886: Fix an unintended ValueError from :func:`subprocess.run` when
- checking for conflicting `input` and `stdin` or `capture_output` and
- `stdout` or `stderr` args when they were explicitly provided but with
- `None` values within a passed in `**kwargs` dict rather than as passed
- directly by name. Patch contributed by Rémi Lapeyre.
+- bpo-26730: Fix ``SpooledTemporaryFile.rollover()`` might corrupt the file
+ when it is in text mode. Patch by Serhiy Storchaka.
-- bpo-37173: The exception message for ``inspect.getfile()`` now correctly
- reports the passed class rather than the builtins module.
+- bpo-38881: random.choices() now raises a ValueError when all the weights
+ are zero.
-- bpo-37178: Give math.perm() a one argument form that means the same as
- math.factorial().
+- bpo-38876: Raise pickle.UnpicklingError when loading an item from memo for
+ invalid input.
-- bpo-37178: For math.perm(n, k), let k default to n, giving the same result
- as factorial.
+ The previous code was raising a `KeyError` for both the Python and C
+ implementation. This was caused by the specified index of an invalid input
+ which did not exist in the memo structure, where the pickle stores what
+ objects it has seen. The malformed input would have caused either a
+ `BINGET` or `LONG_BINGET` load from the memo, leading to a `KeyError` as
+ the determined index was bogus. Patch by Claudiu Popa
-- bpo-37163: Deprecated passing ``obj`` argument of
- :func:`dataclasses.replace` as keyword argument.
+- bpo-38688: Calling func:`shutil.copytree` to copy a directory tree from
+ one directory to another subdirectory resulted in an endless loop and a
+ RecursionError. A fix was added to consume an iterator and create the list
+ of the entries to be copied, avoiding the recursion for newly created
+ directories. Patch by Bruno P. Kinoshita.
-- bpo-37165: Converted _collections._count_elements to use the Argument
- Clinic.
+- bpo-38863: Improve :func:`is_cgi` function in :mod:`http.server`, which
+ enables processing the case that cgi directory is a child of another
+ directory other than root.
-- bpo-34767: Do not always create a :class:`collections.deque` in
- :class:`asyncio.Lock`.
+- bpo-37838: :meth:`typing.get_type_hints` properly handles functions
+ decorated with :meth:`functools.wraps`.
-- bpo-37158: Speed-up statistics.fmean() by switching from a function to a
- generator.
+- bpo-38870: Expose :func:`ast.unparse` as a function of the :mod:`ast`
+ module that can be used to unparse an :class:`ast.AST` object and produce
+ a string with code that would produce an equivalent :class:`ast.AST`
+ object when parsed. Patch by Pablo Galindo and Batuhan Taskaya.
-- bpo-37150: `argparse._ActionsContainer.add_argument` now throws error, if
- someone accidentally pass FileType class object instead of instance of
- FileType as `type` argument
+- bpo-38859: AsyncMock now returns StopAsyncIteration on the exhaustion of a
+ side_effects iterable. Since PEP-479 its Impossible to raise a
+ StopIteration exception from a coroutine.
-- bpo-35621: Support running asyncio subprocesses when execution event loop
- in a thread on UNIX.
+- bpo-38857: AsyncMock fix for return values that are awaitable types. This
+ also covers side_effect iterable values that happened to be awaitable, and
+ wraps callables that return an awaitable type. Before these awaitables
+ were being awaited instead of being returned as is.
-- bpo-36520: Lengthy email headers with UTF-8 characters are now properly
- encoded when they are folded. Patch by Jeffrey Kintscher.
+- bpo-38834: :class:`typing.TypedDict` subclasses now track which keys are
+ optional using the ``__required_keys__`` and ``__optional_keys__``
+ attributes, to enable runtime validation by downstream projects. Patch by
+ Zac Hatfield-Dodds.
-- bpo-30835: Fixed a bug in email parsing where a message with invalid bytes
- in content-transfer-encoding of a multipart message can cause an
- AttributeError. Patch by Andrew Donnellan.
+- bpo-38821: Fix unhandled exceptions in :mod:`argparse` when
+ internationalizing error messages for arguments with ``nargs`` set to
+ special (non-integer) values. Patch by Federico Bond.
-- bpo-35805: Add parser for Message-ID header and add it to default
- HeaderRegistry. This should prevent folding of Message-ID using RFC 2048
- encoded words.
+- bpo-38820: Make Python compatible with OpenSSL 3.0.0.
+ :func:`ssl.SSLSocket.getpeercert` no longer returns IPv6 addresses with a
+ trailing new line.
-- bpo-35070: posix.getgrouplist() now works correctly when the user belongs
- to NGROUPS_MAX supplemental groups. Patch by Jeffrey Kintscher.
+- bpo-38811: Fix an unhandled exception in :mod:`pathlib` when
+ :meth:`os.link` is missing. Patch by Toke Høiland-Jørgensen.
-- bpo-32627: Fix compile error when ``_uuid`` headers conflicting included.
+- bpo-38686: Added support for multiple ``qop`` values in
+ :class:`urllib.request.AbstractDigestAuthHandler`.
-- bpo-11122: Distutils won't check for rpmbuild in specified paths only.
+- bpo-38712: Add the Linux-specific :func:`signal.pidfd_send_signal`
+ function, which allows sending a signal to a process identified by a file
+ descriptor rather than a pid.
-- bpo-4963: Fixed non-deterministic behavior related to mimetypes extension
- mapping and module reinitialization.
+- bpo-38348: Add ``-i`` and ``--indent`` (indentation level), and
+ ``--no-type-comments`` (type comments) command line options to ast parsing
+ tool.
-Documentation
--------------
+- bpo-37523: Change :class:`zipfile.ZipExtFile` to raise ``ValueError`` when
+ trying to access the underlying file object after it has been closed. This
+ new behavior is consistent with how accessing closed files is handled in
+ other parts of Python.
-- bpo-34903: Documented that in :meth:`datetime.datetime.strptime()`, the
- leading zero in some two-digit formats is optional. Patch by Mike Gleen.
+- bpo-38045: Improve the performance of :func:`enum._decompose` in
+ :mod:`enum`. Patch by hongweipeng.
-Tests
------
+- bpo-36820: Break cycle generated when saving an exception in socket.py,
+ codeop.py and dyld.py as they keep alive not only the exception but user
+ objects through the ``__traceback__`` attribute. Patch by Mario Corchero.
+
+- bpo-36406: Handle namespace packages in :mod:`doctest`. Patch by
+ Karthikeyan Singaravelan.
+
+- bpo-34776: Fix dataclasses to support forward references in type
+ annotations
+
+- bpo-20928: ElementTree supports recursive XInclude processing. Patch by
+ Stefan Behnel.
+
+- bpo-29636: Add whitespace options for formatting JSON with the
+ ``json.tool`` CLI. The following mutually exclusive options are now
+ supported: ``--indent`` for setting the indent level in spaces; ``--tab``
+ for indenting with tabs; ``--no-indent`` for suppressing newlines; and
+ ``--compact`` for suppressing all whitespace. The default behavior remains
+ the same as ``--indent=4``.
+
+Documentation
+-------------
+
+- bpo-38928: Correct when venv's ``upgrade_dependencies()`` and
+ ``--upgrade-deps`` are added.
+
+- bpo-38899: Update documentation to state that to activate virtual
+ environments under fish one should use `source`, not `.` as documented at
+ https://fishshell.com/docs/current/commands.html#source.
+
+- bpo-22377: Improves documentation of the values that
+ :meth:`datetime.datetime.strptime` accepts for ``%Z``. Patch by Karl
+ Dubost.
+
+Tests
+-----
+
+- bpo-38546: Fix test_ressources_gced_in_workers() of
+ test_concurrent_futures: explicitly stop the manager to prevent leaking a
+ child process running in the background after the test completes.
+
+- bpo-38546: Multiprocessing and concurrent.futures tests now stop the
+ resource tracker process when tests complete.
+
+- bpo-38614: Replace hardcoded timeout constants in tests with new
+ :mod:`test.support` constants: :data:`~test.support.LOOPBACK_TIMEOUT`,
+ :data:`~test.support.INTERNET_TIMEOUT`,
+ :data:`~test.support.SHORT_TIMEOUT` and
+ :data:`~test.support.LONG_TIMEOUT`. It becomes easier to adjust these four
+ timeout constants for all tests at once, rather than having to adjust
+ every single test file.
+
+- bpo-38547: Fix test_pty: if the process is the session leader, closing the
+ master file descriptor raises a SIGHUP signal: simply ignore SIGHUP when
+ running the tests.
+
+- bpo-38992: Fix a test for :func:`math.fsum` that was failing due to
+ constant folding.
+
+- bpo-38991: :mod:`test.support`:
+ :func:`~test.support.run_python_until_end`,
+ :func:`~test.support.assert_python_ok` and
+ :func:`~test.support.assert_python_failure` functions no longer strip
+ whitespaces from stderr. Remove ``test.support.strip_python_stderr()``
+ function.
+
+- bpo-38965: Fix test_faulthandler on GCC 10. Use the "volatile" keyword in
+ ``faulthandler._stack_overflow()`` to prevent tail call optimization on
+ any compiler, rather than relying on compiler specific pragma.
+
+- bpo-38875: test_capi: trashcan tests now require the test "cpu" resource.
+
+- bpo-38841: Skip asyncio test_create_datagram_endpoint_existing_sock_unix
+ on platforms lacking a functional bind() for named unix domain sockets.
+
+- bpo-38692: Skip the test_posix.test_pidfd_open() test if
+ ``os.pidfd_open()`` fails with a :exc:`PermissionError`. This situation
+ can happen in a Linux sandbox using a syscall whitelist which doesn't
+ allow the ``pidfd_open()`` syscall yet.
+
+- bpo-38839: Fix some unused functions in tests. Patch by Adam Johnson.
+
+- bpo-38669: Raise :exc:`TypeError` when passing target as a string with
+ :meth:`unittest.mock.patch.object`.
+
+- bpo-37957: test.regrtest now can receive a list of test patterns to ignore
+ (using the -i/--ignore argument) or a file with a list of patterns to
+ ignore (using the --ignore-file argument). Patch by Pablo Galindo.
+
+Build
+-----
+
+- bpo-37404: :mod:`asyncio` now raises :exc:`TyperError` when calling
+ incompatible methods with an :class:`ssl.SSLSocket` socket. Patch by Ido
+ Michael.
+
+- bpo-36500: Added an optional "regen" project to the Visual Studio solution
+ that will regenerate all grammar, tokens, and opcodes.
+
+Windows
+-------
+
+- bpo-39007: Add auditing events to functions in :mod:`winreg`.
+
+- bpo-33125: Add support for building and releasing Windows ARM64 packages.
+
+macOS
+-----
+
+- bpo-37931: Fixed a crash on OSX dynamic builds that occurred when
+ re-initializing the posix module after a Py_Finalize if the environment
+ had changed since the previous `import posix`. Patch by Benoît Hudson.
+
+IDLE
+----
+
+- bpo-38944: Escape key now closes IDLE completion windows. Patch by Johnny
+ Najera.
+
+- bpo-38943: Fix IDLE autocomplete windows not always appearing on some
+ systems. Patch by Johnny Najera.
+
+- bpo-38862: 'Strip Trailing Whitespace' on the Format menu removes extra
+ newlines at the end of non-shell files.
+
+- bpo-38636: Fix IDLE Format menu tab toggle and file indent width. These
+ functions (default shortcuts Alt-T and Alt-U) were mistakenly disabled in
+ 3.7.5 and 3.8.0.
+
+C API
+-----
+
+- bpo-38896: Remove ``PyUnicode_ClearFreeList()`` function: the Unicode free
+ list has been removed in Python 3.3.
+
+- bpo-37340: Remove ``PyMethod_ClearFreeList()`` and
+ ``PyCFunction_ClearFreeList()`` functions: the free lists of bound method
+ objects have been removed.
+
+- bpo-38835: Exclude ``PyFPE_START_PROTECT()`` and ``PyFPE_END_PROTECT()``
+ macros of ``pyfpe.h`` from ``Py_LIMITED_API`` (stable API).
+
+
+What's New in Python 3.9.0 alpha 1?
+===================================
+
+*Release date: 2019-11-19*
+
+Security
+--------
+
+- bpo-38722: :mod:`runpy` now uses :meth:`io.open_code` to open code files.
+ Patch by Jason Killen.
+
+- bpo-38622: Add additional audit events for the :mod:`ctypes` module.
+
+- bpo-38418: Fixes audit event for :func:`os.system` to be named
+ ``os.system``.
+
+- bpo-38243: Escape the server title of
+ :class:`xmlrpc.server.DocXMLRPCServer` when rendering the document page as
+ HTML. (Contributed by Dong-hee Na in :issue:`38243`.)
+
+- bpo-38174: Update vendorized expat library version to 2.2.8, which
+ resolves CVE-2019-15903.
+
+- bpo-37764: Fixes email._header_value_parser.get_unstructured going into an
+ infinite loop for a specific case in which the email header does not have
+ trailing whitespace, and the case in which it contains an invalid encoded
+ word. Patch by Ashwin Ramaswami.
+
+- bpo-37461: Fix an infinite loop when parsing specially crafted email
+ headers. Patch by Abhilash Raj.
+
+- bpo-37363: Adds audit events for the range of supported run commands (see
+ :ref:`using-on-general`).
+
+- bpo-37463: ssl.match_hostname() no longer accepts IPv4 addresses with
+ additional text after the address and only quad-dotted notation without
+ trailing whitespaces. Some inet_aton() implementations ignore whitespace
+ and all data after whitespace, e.g. '127.0.0.1 whatever'.
+
+- bpo-37363: Adds audit events for :mod:`ensurepip`, :mod:`ftplib`,
+ :mod:`glob`, :mod:`imaplib`, :mod:`nntplib`, :mod:`pdb`, :mod:`poplib`,
+ :mod:`shutil`, :mod:`smtplib`, :mod:`sqlite3`, :mod:`subprocess`,
+ :mod:`telnetlib`, :mod:`tempfile` and :mod:`webbrowser`, as well as
+ :func:`os.listdir`, :func:`os.scandir` and :func:`breakpoint`.
+
+- bpo-37364: :func:`io.open_code` is now used when reading :file:`.pth`
+ files.
+
+- bpo-34631: Updated OpenSSL to 1.1.1c in Windows installer
+
+- bpo-34155: Fix parsing of invalid email addresses with more than one ``@``
+ (e.g. a@b@c.com.) to not return the part before 2nd ``@`` as valid email
+ address. Patch by maxking & jpic.
+
+Core and Builtins
+-----------------
+
+- bpo-38631: Replace ``Py_FatalError()`` call with a regular
+ :exc:`RuntimeError` exception in :meth:`float.__getformat__`.
+
+- bpo-38639: Optimized :func:`math.floor()`, :func:`math.ceil()` and
+ :func:`math.trunc()` for floats.
+
+- bpo-38640: Fixed a bug in the compiler that was causing to raise in the
+ presence of break statements and continue statements inside always false
+ while loops. Patch by Pablo Galindo.
+
+- bpo-38613: Optimized some set operations (e.g. ``|``, ``^``, and ``-``) of
+ ``dict_keys``. ``d.keys() | other`` was slower than ``set(d) | other`` but
+ they are almost same performance for now.
+
+- bpo-28029: ``"".replace("", s, n)`` now returns ``s`` instead of an empty
+ string for all non-zero ``n``. There are similar changes for
+ :class:`bytes` and :class:`bytearray` objects.
+
+- bpo-38535: Fixed line numbers and column offsets for AST nodes for calls
+ without arguments in decorators.
+
+- bpo-38525: Fix a segmentation fault when using reverse iterators of empty
+ ``dict`` objects. Patch by Dong-hee Na and Inada Naoki.
+
+- bpo-38465: :class:`bytearray`, :class:`~array.array` and
+ :class:`~mmap.mmap` objects allow now to export more than 2**31 buffers at
+ a time.
+
+- bpo-38469: Fixed a bug where the scope of named expressions was not being
+ resolved correctly in the presence of the *global* keyword. Patch by Pablo
+ Galindo.
+
+- bpo-38437: Activate the ``GC_DEBUG`` macro for debug builds of the
+ interpreter (when ``Py_DEBUG`` is set). Patch by Pablo Galindo.
+
+- bpo-38379: When the garbage collector makes a collection in which some
+ objects resurrect (they are reachable from outside the isolated cycles
+ after the finalizers have been executed), do not block the collection of
+ all objects that are still unreachable. Patch by Pablo Galindo and Tim
+ Peters.
+
+- bpo-38379: When cyclic garbage collection (gc) runs finalizers that
+ resurrect unreachable objects, the current gc run ends, without collecting
+ any cyclic trash. However, the statistics reported by ``collect()`` and
+ ``get_stats()`` claimed that all cyclic trash found was collected, and
+ that the resurrected objects were collected. Changed the stats to report
+ that none were collected.
+
+- bpo-38392: In debug mode, :c:func:`PyObject_GC_Track` now calls
+ ``tp_traverse()`` of the object type to ensure that the object is valid:
+ test that objects visited by ``tp_traverse()`` are valid.
+
+- bpo-38210: Remove unecessary intersection and update set operation in
+ dictview with empty set. (Contributed by Dong-hee Na in :issue:`38210`.)
+
+- bpo-38402: Check the error from the system's underlying ``crypt`` or
+ ``crypt_r``.
+
+- bpo-37474: On FreeBSD, Python no longer calls ``fedisableexcept()`` at
+ startup to control the floating point control mode. The call became
+ useless since FreeBSD 6: it became the default mode.
+
+- bpo-38006: Fix a bug due to the interaction of weakrefs and the cyclic
+ garbage collector. We must clear any weakrefs in garbage in order to
+ prevent their callbacks from executing and causing a crash.
+
+- bpo-38317: Fix warnings options priority: ``PyConfig.warnoptions`` has the
+ highest priority, as stated in the :pep:`587`.
+
+- bpo-38310: Predict ``BUILD_MAP_UNPACK_WITH_CALL`` -> ``CALL_FUNCTION_EX``
+ opcode pairs in the main interpreter loop. Patch by Brandt Bucher.
+
+- bpo-36871: Improve error handling for the assert_has_calls and
+ assert_has_awaits methods of mocks. Fixed a bug where any errors
+ encountered while binding the expected calls to the mock's spec were
+ silently swallowed, leading to misleading error output.
+
+- bpo-11410: Better control over symbol visibility is provided through use
+ of the visibility attributes available in gcc >= 4.0, provided in a
+ uniform way across POSIX and Windows. The POSIX build files have been
+ updated to compile with -fvisibility=hidden, minimising exported symbols.
+
+- bpo-38219: Optimized the :class:`dict` constructor and the
+ :meth:`~dict.update` method for the case when the argument is a dict.
+
+- bpo-38236: Python now dumps path configuration if it fails to import the
+ Python codecs of the filesystem and stdio encodings.
+
+- bpo-38013: Allow to call ``async_generator_athrow().throw(...)`` even for
+ non-started async generator helper. It fixes annoying warning at the end
+ of :func:`asyncio.run` call.
+
+- bpo-38124: Fix an off-by-one error in PyState_AddModule that could cause
+ out-of-bounds memory access.
+
+- bpo-38116: The select module is now PEP-384 compliant and no longer has
+ static state
+
+- bpo-38113: ast module updated to PEP-384 and all statics removed
+
+- bpo-38076: The struct module is now PEP-384 compatible
+
+- bpo-38075: The random module is now PEP-384 compatible
+
+- bpo-38074: zlib module made PEP-384 compatible
+
+- bpo-38073: Make pwd extension module PEP-384 compatible
+
+- bpo-38072: grp module made PEP-384 compatible
+
+- bpo-38069: Make _posixsubprocess PEP-384 compatible
+
+- bpo-38071: Make termios extension module PEP-384 compatible
+
+- bpo-38005: Fixed comparing and creating of InterpreterID and ChannelID.
+
+- bpo-36946: Fix possible signed integer overflow when handling slices.
+ Patch by hongweipeng.
+
+- bpo-37994: Fixed silencing arbitrary errors if an attribute lookup fails
+ in several sites. Only AttributeError should be silenced.
+
+- bpo-8425: Optimize set difference_update for the case when the other set
+ is much larger than the base set. (Suggested by Evgeny Kapun with code
+ contributed by Michele Orrù).
+
+- bpo-37966: The implementation of :func:`~unicodedata.is_normalized` has
+ been greatly sped up on strings that aren't normalized, by implementing
+ the full normalization-quick-check algorithm from the Unicode standard.
+
+- bpo-37947: Adjust correctly the recursion level in the symtable generation
+ for named expressions. Patch by Pablo Galindo.
+
+- bpo-37812: The ``CHECK_SMALL_INT`` macro used inside
+ :file:`Object/longobject.c` has been replaced with an explicit ``return``
+ at each call site.
+
+- bpo-37751: Fix :func:`codecs.lookup` to normalize the encoding name the
+ same way than :func:`encodings.normalize_encoding`, except that
+ :func:`codecs.lookup` also converts the name to lower case.
+
+- bpo-37830: Fixed compilation of :keyword:`break` and :keyword:`continue`
+ in the :keyword:`finally` block when the corresponding :keyword:`try`
+ block contains :keyword:`return` with a non-constant value.
+
+- bpo-20490: Improve import error message for partially initialized module
+ on circular ``from`` imports - by Anthony Sottile.
+
+- bpo-37840: Fix handling of negative indices in
+ :c:member:`~PySequenceMethods.sq_item` of :class:`bytearray`. Patch by
+ Sergey Fedoseev.
+
+- bpo-37802: Slightly improve performance of
+ :c:func:`PyLong_FromUnsignedLong`, :c:func:`PyLong_FromUnsignedLongLong`
+ and :c:func:`PyLong_FromSize_t`. Patch by Sergey Fedoseev.
+
+- bpo-37409: Ensure explicit relative imports from interactive sessions and
+ scripts (having no parent package) always raise ImportError, rather than
+ treating the current module as the package. Patch by Ben Lewis.
+
+- bpo-32912: Reverted :issue:`32912`: emitting :exc:`SyntaxWarning` instead
+ of :exc:`DeprecationWarning` for invalid escape sequences in string and
+ bytes literals.
+
+- bpo-37757: :pep:`572`: As described in the PEP, assignment expressions now
+ raise :exc:`SyntaxError` when their interaction with comprehension scoping
+ results in an ambiguous target scope.
+
+ The ``TargetScopeError`` subclass originally proposed by the PEP has been
+ removed in favour of just raising regular syntax errors for the disallowed
+ cases.
+
+- bpo-36279: Fix potential use of uninitialized memory in :func:`os.wait3`.
+
+- bpo-36311: Decoding bytes objects larger than 2GiB is faster and no longer
+ fails when a multibyte characters spans a chunk boundary.
+
+- bpo-34880: The :keyword:`assert` statement now works properly if the
+ :exc:`AssertionError` exception is being shadowed. Patch by Zackery Spytz.
+
+- bpo-37340: Removed object cache (``free_list``) for bound method objects.
+ Temporary bound method objects are less used than before thanks to the
+ ``LOAD_METHOD`` opcode and the ``_PyObject_VectorcallMethod`` C API.
+
+- bpo-37648: Fixed minor inconsistency in :meth:`list.__contains__`,
+ :meth:`tuple.__contains__` and a few other places. The collection's item
+ is now always at the left and the needle is on the right of ``==``.
+
+- bpo-37444: Update differing exception between :meth:`builtins.__import__`
+ and :meth:`importlib.__import__`.
+
+- bpo-37619: When adding a wrapper descriptor from one class to a different
+ class (for example, setting ``__add__ = str.__add__`` on an ``int``
+ subclass), an exception is correctly raised when the operator is called.
+
+- bpo-37593: Swap the positions of the *posonlyargs* and *args* parameters
+ in the constructor of :class:`ast.parameters` nodes.
+
+- bpo-37543: Optimized pymalloc for non PGO build.
+
+- bpo-37537: Compute allocated pymalloc blocks inside
+ _Py_GetAllocatedBlocks(). This slows down _Py_GetAllocatedBlocks() but
+ gives a small speedup to _PyObject_Malloc() and _PyObject_Free().
+
+- bpo-37467: Fix :func:`sys.excepthook` and :c:func:`PyErr_Display` if a
+ filename is a bytes string. For example, for a SyntaxError exception where
+ the filename attribute is a bytes string.
+
+- bpo-37433: Fix ``SyntaxError`` indicator printing too many spaces for
+ multi-line strings - by Anthony Sottile.
+
+- bpo-37417: :meth:`bytearray.extend` now correctly handles errors that
+ arise during iteration. Patch by Brandt Bucher.
+
+- bpo-37414: The undocumented ``sys.callstats()`` function has been removed.
+ Since Python 3.7, it was deprecated and always returned ``None``. It
+ required a special build option ``CALL_PROFILE`` which was already removed
+ in Python 3.7.
+
+- bpo-37392: Remove ``sys.getcheckinterval()`` and
+ ``sys.setcheckinterval()`` functions. They were deprecated since Python
+ 3.2. Use :func:`sys.getswitchinterval` and :func:`sys.setswitchinterval`
+ instead. Remove also ``check_interval`` field of the
+ ``PyInterpreterState`` structure.
+
+- bpo-37388: In development mode and in debug build, *encoding* and *errors*
+ arguments are now checked on string encoding and decoding operations.
+ Examples: :func:`open`, :meth:`str.encode` and :meth:`bytes.decode`.
+
+ By default, for best performances, the *errors* argument is only checked
+ at the first encoding/decoding error, and the *encoding* argument is
+ sometimes ignored for empty strings.
+
+- bpo-37348: Optimized decoding short ASCII string with UTF-8 and ascii
+ codecs. ``b"foo".decode()`` is about 15% faster. Patch by Inada Naoki.
+
+- bpo-24214: Improved support of the surrogatepass error handler in the
+ UTF-8 and UTF-16 incremental decoders.
+
+- bpo-37330: :func:`open`, :func:`io.open`, :func:`codecs.open` and
+ :class:`fileinput.FileInput` no longer accept ``'U'`` ("universal
+ newline") in the file mode. This flag was deprecated since Python 3.3.
+
+- bpo-35224: Reverse evaluation order of key: value in dict comprehensions
+ as proposed in PEP 572. I.e. in ``{k: v for ...}``, ``k`` will be
+ evaluated before ``v``.
+
+- bpo-37316: Fix the :c:func:`PySys_Audit` call in :class:`mmap.mmap`.
+
+- bpo-37300: Remove an unnecssary Py_XINCREF in classobject.c.
+
+- bpo-37269: Fix a bug in the peephole optimizer that was not treating
+ correctly constant conditions with binary operators. Patch by Pablo
+ Galindo.
+
+- bpo-20443: Python now gets the absolute path of the script filename
+ specified on the command line (ex: "python3 script.py"): the __file__
+ attribute of the __main__ module and sys.path[0] become an absolute path,
+ rather than a relative path.
+
+- bpo-37257: Python's small object allocator (``obmalloc.c``) now allows (no
+ more than) one empty arena to remain available for immediate reuse,
+ without returning it to the OS. This prevents thrashing in simple loops
+ where an arena could be created and destroyed anew on each iteration.
+
+- bpo-37231: The dispatching of type slots to special methods (for example
+ calling ``__mul__`` when doing ``x * y``) has been made faster.
+
+- bpo-36974: Implemented separate vectorcall functions for every calling
+ convention of builtin functions and methods. This improves performance for
+ calls.
+
+- bpo-37213: Handle correctly negative line offsets in the peephole
+ optimizer. Patch by Pablo Galindo.
+
+- bpo-37219: Remove erroneous optimization for empty set differences.
+
+- bpo-15913: Implement :c:func:`PyBuffer_SizeFromFormat()` function
+ (previously documented but not implemented): call :func:`struct.calcsize`.
+ Patch by Joannah Nanjekye.
+
+- bpo-36922: Slot functions optimize any callable with
+ ``Py_TPFLAGS_METHOD_DESCRIPTOR`` instead of only instances of
+ ``function``.
+
+- bpo-36974: The slot ``tp_vectorcall_offset`` is inherited unconditionally
+ to support ``super().__call__()`` when the base class uses vectorcall.
+
+- bpo-37160: :func:`threading.get_native_id` now also supports NetBSD.
+
+- bpo-37077: Add :func:`threading.get_native_id` support for AIX. Patch by
+ M. Felt
+
+- bpo-36781: :func:`sum` has been optimized for boolean values.
+
+- bpo-34556: Add ``--upgrade-deps`` to venv module. Patch by Cooper Ry Lees
+
+- bpo-20523: ``pdb.Pdb`` supports ~/.pdbrc in Windows 7. Patch by Tim Hopper
+ and Dan Lidral-Porter.
+
+- bpo-35551: Updated encodings: - Removed the "tis260" encoding, which was
+ an alias for the nonexistent "tactis" codec. - Added "mac_centeuro" as an
+ alias for the mac_latin2 encoding.
+
+- bpo-19072: The :class:`classmethod` decorator can now wrap other
+ descriptors such as property objects. Adapted from a patch written by
+ Graham Dumpleton.
+
+- bpo-27575: Improve speed of dictview intersection by directly using set
+ intersection logic. Patch by David Su.
+
+- bpo-30773: Prohibit parallel running of aclose() / asend() / athrow(). Fix
+ ag_running to reflect the actual running status of the AG.
+
+Library
+-------
+
+- bpo-36589: The :func:`curses.update_lines_cols` function now returns
+ ``None`` instead of ``1`` on success.
+
+- bpo-38807: Update :exc:`TypeError` messages for :meth:`os.path.join` to
+ include :class:`os.PathLike` objects as acceptable input types.
+
+- bpo-38724: Add a repr for ``subprocess.Popen`` objects. Patch by Andrey
+ Doroschenko.
+
+- bpo-38786: pydoc now recognizes and parses HTTPS URLs. Patch by python273.
+
+- bpo-38785: Prevent asyncio from crashing if parent ``__init__`` is not
+ called from a constructor of object derived from ``asyncio.Future``.
+
+- bpo-38723: :mod:`pdb` now uses :meth:`io.open_code` to trigger auditing
+ events.
+
+- bpo-27805: Allow opening pipes and other non-seekable files in append mode
+ with :func:`open`.
+
+- bpo-38438: Simplify the :mod:`argparse` usage message for ``nargs="*"``.
+
+- bpo-38761: WeakSet is now registered as a collections.abc.MutableSet.
+
+- bpo-38716: logging: change RotatingHandler namer and rotator to
+ class-level attributes. This stops __init__ from setting them to None in
+ the case where a subclass defines them with eponymous methods.
+
+- bpo-38713: Add :data:`os.P_PIDFD` constant, which may be passed to
+ :func:`os.waitid` to wait on a Linux process file descriptor.
+
+- bpo-38692: Add :class:`asyncio.PidfdChildWatcher`, a Linux-specific child
+ watcher implementation that polls process file descriptors.
+
+- bpo-38692: Expose the Linux ``pidfd_open`` syscall as
+ :func:`os.pidfd_open`.
+
+- bpo-38602: Added constants :data:`~fcntl.F_OFD_GETLK`,
+ :data:`~fcntl.F_OFD_SETLK` and :data:`~fcntl.F_OFD_SETLKW` to the
+ :mod:`fcntl` module. Patch by Dong-hee Na.
+
+- bpo-38334: Fixed seeking backward on an encrypted
+ :class:`zipfile.ZipExtFile`.
+
+- bpo-38312: Add :func:`curses.get_escdelay`, :func:`curses.set_escdelay`,
+ :func:`curses.get_tabsize`, and :func:`curses.set_tabsize` functions - by
+ Anthony Sottile.
+
+- bpo-38586: Now :func:`~logging.config.fileConfig` correcty sets the .name
+ of handlers loaded.
+
+- bpo-38565: Add new cache_parameters() method for functools.lru_cache() to
+ better support pickling.
+
+- bpo-34679: asynci.ProactorEventLoop.close() now only calls
+ signal.set_wakeup_fd() in the main thread.
+
+- bpo-31202: The case the result of :func:`pathlib.WindowsPath.glob` matches
+ now the case of the pattern for literal parts.
+
+- bpo-36321: Remove misspelled attribute. The 3.8 changelog noted that this
+ would be removed in 3.9.
+
+- bpo-38521: Fixed erroneous equality comparison in statistics.NormalDist().
+
+- bpo-38493: Added :data:`~os.CLD_KILLED` and :data:`~os.CLD_STOPPED` for
+ :attr:`si_code`. Patch by Dong-hee Na.
+
+- bpo-38478: Fixed a bug in :meth:`inspect.signature.bind` that was causing
+ it to fail when handling a keyword argument with same name as
+ positional-only parameter. Patch by Pablo Galindo.
+
+- bpo-33604: Fixed `hmac.new` and `hmac.HMAC` to raise TypeError instead of
+ ValueError when the digestmod parameter, now required in 3.8, is omitted.
+ Also clarified the hmac module documentation and docstrings.
+
+- bpo-38378: Parameters *out* and *in* of :func:`os.sendfile` was renamed to
+ *out_fd* and *in_fd*.
+
+- bpo-38417: Added support for setting the umask in the child process to the
+ subprocess module on POSIX systems.
+
+- bpo-38449: Revert GH-15522, which introduces a regression in
+ :meth:`mimetypes.guess_type` due to improper handling of filenames as
+ urls.
+
+- bpo-38431: Fix ``__repr__`` method for :class:`dataclasses.InitVar` to
+ support typing objects, patch by Samuel Colvin.
+
+- bpo-38109: Add missing :data:`stat.S_IFDOOR`, :data:`stat.S_IFPORT`,
+ :data:`stat.S_IFWHT`, :func:`stat.S_ISDOOR`, :func:`stat.S_ISPORT`, and
+ :func:`stat.S_ISWHT` values to the Python implementation of :mod:`stat`.
+
+- bpo-38422: Clarify docstrings of pathlib suffix(es)
+
+- bpo-38405: Nested subclasses of :class:`typing.NamedTuple` are now
+ pickleable.
+
+- bpo-38332: Prevent :exc:`KeyError` thrown by :func:`_encoded_words.decode`
+ when given an encoded-word with invalid content-type encoding from
+ propagating all the way to :func:`email.message.get`.
+
+- bpo-38371: Deprecated the ``split()`` method in
+ :class:`_tkinter.TkappType` in favour of the ``splitlist()`` method which
+ has more consistent and predicable behavior.
+
+- bpo-38341: Add :exc:`smtplib.SMTPNotSupportedError` to the :mod:`smtplib`
+ exported names.
+
+- bpo-38319: sendfile() used in socket and shutil modules was raising
+ OverflowError for files >= 2GiB on 32-bit architectures. (patch by
+ Giampaolo Rodola)
+
+- bpo-38242: Revert the new asyncio Streams API
+
+- bpo-13153: OS native encoding is now used for converting between Python
+ strings and Tcl objects. This allows to display, copy and paste to
+ clipboard emoji and other non-BMP characters. Converting strings from Tcl
+ to Python and back now never fails (except MemoryError).
+
+- bpo-38019: Correctly handle pause/resume reading of closed asyncio unix
+ pipe.
+
+- bpo-38163: Child mocks will now detect their type as either synchronous or
+ asynchronous, asynchronous child mocks will be AsyncMocks and synchronous
+ child mocks will be either MagicMock or Mock (depending on their parent
+ type).
+
+- bpo-38161: Removes _AwaitEvent from AsyncMock.
+
+- bpo-38216: Allow the rare code that wants to send invalid http requests
+ from the `http.client` library a way to do so. The fixes for bpo-30458
+ led to breakage for some projects that were relying on this ability to
+ test their own behavior in the face of bad requests.
+
+- bpo-28286: Deprecate opening :class:`~gzip.GzipFile` for writing
+ implicitly. Always specify the *mode* argument for writing.
+
+- bpo-38108: Any synchronous magic methods on an AsyncMock now return a
+ MagicMock. Any asynchronous magic methods on a MagicMock now return an
+ AsyncMock.
+
+- bpo-38265: Update the *length* parameter of :func:`os.pread` to accept
+ :c:type:`Py_ssize_t` instead of :c:type:`int`.
+
+- bpo-38112: :mod:`compileall` has a higher default recursion limit and new
+ command-line arguments for path manipulation, symlinks handling, and
+ multiple optimization levels.
+
+- bpo-38248: asyncio: Fix inconsistent immediate Task cancellation
+
+- bpo-38237: The arguments for the builtin pow function are more
+ descriptive. They can now also be passed in as keywords.
+
+- bpo-34002: Improve efficiency in parts of email package by changing
+ while-pop to a for loop, using isdisjoint instead of set intersections.
+
+- bpo-38191: Constructors of :class:`~typing.NamedTuple` and
+ :class:`~typing.TypedDict` types now accept arbitrary keyword argument
+ names, including "cls", "self", "typename", "_typename", "fields" and
+ "_fields".
+
+- bpo-38155: Add ``__all__`` to :mod:`datetime`. Patch by Tahia Khan.
+
+- bpo-38185: Fixed case-insensitive string comparison in
+ :class:`sqlite3.Row` indexing.
+
+- bpo-38136: Changes AsyncMock call count and await count to be two
+ different counters. Now await count only counts when a coroutine has been
+ awaited, not when it has been called, and vice-versa. Update the
+ documentation around this.
+
+- bpo-37828: Fix default mock name in
+ :meth:`unittest.mock.Mock.assert_called` exceptions. Patch by Abraham
+ Toriz Cruz.
+
+- bpo-38175: Fix a memory leak in comparison of :class:`sqlite3.Row`
+ objects.
+
+- bpo-33936: _hashlib no longer calls obsolete OpenSSL initialization
+ function with OpenSSL 1.1.0+.
+
+- bpo-34706: Preserve subclassing in inspect.Signature.from_callable.
+
+- bpo-38153: Names of hashing algorithms frome OpenSSL are now normalized to
+ follow Python's naming conventions. For example OpenSSL uses sha3-512
+ instead of sha3_512 or blake2b512 instead of blake2b.
+
+- bpo-38115: Fix a bug in dis.findlinestarts() where it would return invalid
+ bytecode offsets. Document that a code object's co_lnotab can contain
+ invalid bytecode offsets.
+
+- bpo-38148: Add slots to :mod:`asyncio` transport classes, which can reduce
+ memory usage.
+
+- bpo-38142: The _hashlib OpenSSL wrapper extension module is now PEP-384
+ compliant.
+
+- bpo-9216: hashlib constructors now support usedforsecurity flag to signal
+ that a hashing algorithm is not used in a security context.
+
+- bpo-36991: Fixes a potential incorrect AttributeError exception escaping
+ ZipFile.extract() in some unsupported input error situations.
+
+- bpo-38134: Remove obsolete copy of PBKDF2_HMAC_fast. All supported OpenSSL
+ versions contain a fast implementation.
+
+- bpo-38132: The OpenSSL hashlib wrapper uses a simpler implementation.
+ Several Macros and pointless caches are gone. The hash name now comes from
+ OpenSSL's EVP. The algorithm name stays the same, except it is now always
+ lower case.
+
+- bpo-38008: Fix parent class check in protocols to correctly identify the
+ module that provides a builtin protocol, instead of assuming they all come
+ from the :mod:`collections.abc` module
+
+- bpo-34037: For :mod:`asyncio`, add a new coroutine
+ :meth:`loop.shutdown_default_executor`. The new coroutine provides an API
+ to schedule an executor shutdown that waits on the threadpool to finish
+ closing. Also, :func:`asyncio.run` has been updated to utilize the new
+ coroutine. Patch by Kyle Stanley.
+
+- bpo-37405: Fixed regression bug for socket.getsockname() for non-CAN_ISOTP
+ AF_CAN address family sockets by returning a 1-tuple instead of string.
+
+- bpo-38121: Update parameter names on functions in importlib.metadata
+ matching the changes in the 0.22 release of importlib_metadata.
+
+- bpo-38110: The os.closewalk() implementation now uses the libc fdwalk()
+ API on platforms where it is available.
+
+- bpo-38093: Fixes AsyncMock so it doesn't crash when used with
+ AsyncContextManagers or AsyncIterators.
+
+- bpo-37488: Add warning to :meth:`datetime.utctimetuple`,
+ :meth:`datetime.utcnow` and :meth:`datetime.utcfromtimestamp` .
+
+- bpo-35640: Allow passing a :term:`path-like object` as ``directory``
+ argument to the :class:`http.server.SimpleHTTPRequestHandler` class. Patch
+ by Géry Ogam.
+
+- bpo-38086: Update importlib.metadata with changes from `importlib_metadata
+ 0.21
+ `_.
+
+- bpo-37251: Remove `__code__` check in AsyncMock that incorrectly evaluated
+ function specs as async objects but failed to evaluate classes with
+ `__await__` but no `__code__` attribute defined as async objects.
+
+- bpo-38037: Fix reference counters in the :mod:`signal` module.
+
+- bpo-38066: Hide internal asyncio.Stream methods: feed_eof(), feed_data(),
+ set_exception() and set_transport().
+
+- bpo-38059: inspect.py now uses sys.exit() instead of exit()
+
+- bpo-38049: Added command-line interface for the :mod:`ast` module.
+
+- bpo-37953: In :mod:`typing`, improved the ``__hash__`` and ``__eq__``
+ methods for :class:`ForwardReferences`.
+
+- bpo-38026: Fixed :func:`inspect.getattr_static` used ``isinstance`` while
+ it should avoid dynamic lookup.
+
+- bpo-35923: Update :class:`importlib.machinery.BuiltinImporter` to use
+ ``loader._ORIGIN`` instead of a hardcoded value. Patch by Dong-hee Na.
+
+- bpo-38010: In ``importlib.metadata`` sync with ``importlib_metadata``
+ 0.20, clarifying behavior of ``files()`` and fixing issue where only one
+ requirement was returned for ``requires()`` on ``dist-info`` packages.
+
+- bpo-38006: weakref.WeakValueDictionary defines a local remove() function
+ used as callback for weak references. This function was created with a
+ closure. Modify the implementation to avoid the closure.
+
+- bpo-37995: Added the *indent* option to :func:`ast.dump` which allows it
+ to produce a multiline indented output.
+
+- bpo-34410: Fixed a crash in the :func:`tee` iterator when re-enter it.
+ RuntimeError is now raised in this case.
+
+- bpo-37140: Fix a ctypes regression of Python 3.8. When a ctypes.Structure
+ is passed by copy to a function, ctypes internals created a temporary
+ object which had the side effect of calling the structure finalizer
+ (__del__) twice. The Python semantics requires a finalizer to be called
+ exactly once. Fix ctypes internals to no longer call the finalizer twice.
+
+- bpo-37587: ``_json.scanstring`` is now up to 3x faster when there are many
+ backslash escaped characters in the JSON string.
+
+- bpo-37834: Prevent shutil.rmtree exception when built on non-Windows
+ system without fd system call support, like older versions of macOS.
+
+- bpo-10978: Semaphores and BoundedSemaphores can now release more than one
+ waiting thread at a time.
+
+- bpo-37972: Subscripts to the `unittest.mock.call` objects now receive the
+ same chaining mechanism as any other custom attributes, so that the
+ following usage no longer raises a `TypeError`:
+
+ call().foo().__getitem__('bar')
+
+ Patch by blhsing
+
+- bpo-37965: Fix C compiler warning caused by
+ distutils.ccompiler.CCompiler.has_function.
+
+- bpo-37964: Add ``F_GETPATH`` command to :mod:`fcntl`.
+
+- bpo-37960: ``repr()`` of buffered and text streams now silences only
+ expected exceptions when get the value of "name" and "mode" attributes.
+
+- bpo-37961: Add a ``total_nframe`` field to the traces collected by the
+ tracemalloc module. This field indicates the original number of frames
+ before it was truncated.
+
+- bpo-37951: Most features of the subprocess module now work again in
+ subinterpreters. Only *preexec_fn* is restricted in subinterpreters.
+
+- bpo-36205: Fix the rusage implementation of time.process_time() to
+ correctly report the sum of the system and user CPU time.
+
+- bpo-37950: Fix :func:`ast.dump` when call with incompletely initialized
+ node.
+
+- bpo-34679: Restores instantiation of Windows IOCP event loops from the
+ non-main thread.
+
+- bpo-36917: Add default implementation of the
+ :meth:`ast.NodeVisitor.visit_Constant` method which emits a deprecation
+ warning and calls corresponding methody ``visit_Num()``, ``visit_Str()``,
+ etc.
+
+- bpo-37798: Update test_statistics.py to verify that the statistics module
+ works well for both C and Python implementations. Patch by Dong-hee Na
+
+- bpo-26589: Added a new status code to the http module: 451
+ UNAVAILABLE_FOR_LEGAL_REASONS
+
+- bpo-37915: Fix a segmentation fault that appeared when comparing instances
+ of ``datetime.timezone`` and ``datetime.tzinfo`` objects. Patch by Pablo
+ Galindo.
+
+- bpo-32554: Deprecate having random.seed() call hash on arbitrary types.
+
+- bpo-9938: Add optional keyword argument ``exit_on_error`` for
+ :class:`ArgumentParser`.
+
+- bpo-37851: The :mod:`faulthandler` module no longer allocates its
+ alternative stack at Python startup. Now the stack is only allocated at
+ the first faulthandler usage.
+
+- bpo-32793: Fix a duplicated debug message when
+ :meth:`smtplib.SMTP.connect` is called.
+
+- bpo-37885: venv: Don't generate unset variable warning on deactivate.
+
+- bpo-37868: Fix dataclasses.is_dataclass when given an instance that never
+ raises AttributeError in __getattr__. That is, an object that returns
+ something for __dataclass_fields__ even if it's not a dataclass.
+
+- bpo-37811: Fix ``socket`` module's ``socket.connect(address)`` function
+ being unable to establish connection in case of interrupted system call.
+ The problem was observed on all OSes which ``poll(2)`` system call can
+ take only non-negative integers and -1 as a timeout value.
+
+- bpo-37863: Optimizations for Fraction.__hash__ suggested by Tim Peters.
+
+- bpo-21131: Fix ``faulthandler.register(chain=True)`` stack. faulthandler
+ now allocates a dedicated stack of ``SIGSTKSZ*2`` bytes, instead of just
+ ``SIGSTKSZ`` bytes. Calling the previous signal handler in faulthandler
+ signal handler uses more than ``SIGSTKSZ`` bytes of stack memory on some
+ platforms.
+
+- bpo-37798: Add C fastpath for statistics.NormalDist.inv_cdf() Patch by
+ Dong-hee Na
+
+- bpo-37804: Remove the deprecated method `threading.Thread.isAlive()`.
+ Patch by Dong-hee Na.
+
+- bpo-37819: Add Fraction.as_integer_ratio() to match the corresponding
+ methods in bool, int, float, and decimal.
+
+- bpo-14465: Add an xml.etree.ElementTree.indent() function for
+ pretty-printing XML trees. Contributed by Stefan Behnel.
+
+- bpo-37810: Fix :mod:`difflib` ``?`` hint in diff output when dealing with
+ tabs. Patch by Anthony Sottile.
+
+- bpo-37772: In ``zipfile.Path``, when adding implicit dirs, ensure that
+ ancestral directories are added and that duplicates are excluded.
+
+- bpo-18578: Renamed and documented `test.bytecode_helper` as
+ `test.support.bytecode_helper`. Patch by Joannah Nanjekye.
+
+- bpo-37785: Fix xgettext warnings in :mod:`argparse`.
+
+- bpo-34488: :meth:`writelines` method of :class:`io.BytesIO` is now
+ slightly faster when many small lines are passed. Patch by Sergey
+ Fedoseev.
+
+- bpo-37449: `ensurepip` now uses `importlib.resources.read_binary()` to
+ read data instead of `pkgutil.get_data()`. Patch by Joannah Nanjekye.
+
+- bpo-28292: Mark calendar.py helper functions as being private. The
+ follows PEP 8 guidance to maintain the style conventions in the module and
+ it addresses a known case of user confusion.
+
+- bpo-18049: Add definition of THREAD_STACK_SIZE for AIX in
+ Python/thread_pthread.h The default thread stacksize caused crashes with
+ the default recursion limit Patch by M Felt
+
+- bpo-37742: The logging.getLogger() API now returns the root logger when
+ passed the name 'root', whereas previously it returned a non-root logger
+ named 'root'. This could affect cases where user code explicitly wants a
+ non-root logger named 'root', or instantiates a logger using
+ logging.getLogger(__name__) in some top-level module called 'root.py'.
+
+- bpo-37738: Fix the implementation of curses ``addch(str, color_pair)``:
+ pass the color pair to ``setcchar()``, instead of always passing 0 as the
+ color pair.
+
+- bpo-37723: Fix performance regression on regular expression parsing with
+ huge character sets. Patch by Yann Vaginay.
+
+- bpo-35943: The function :c:func:`PyImport_GetModule` now ensures any
+ module it returns is fully initialized. Patch by Joannah Nanjekye.
+
+- bpo-32178: Fix IndexError in :mod:`email` package when trying to parse
+ invalid address fields starting with ``:``.
+
+- bpo-37268: The :mod:`parser` module is deprecated and will be removed in
+ future versions of Python.
+
+- bpo-11953: Completing WSA* error codes in :mod:`socket`.
+
+- bpo-37685: Fixed comparisons of :class:`datetime.timedelta` and
+ :class:`datetime.timezone`.
+
+- bpo-37697: Syncronize ``importlib.metadata`` with `importlib_metadata 0.19
+ `_,
+ improving handling of EGG-INFO files and fixing a crash when entry point
+ names contained colons.
+
+- bpo-37695: Correct :func:`curses.unget_wch` error message. Patch by
+ Anthony Sottile.
+
+- bpo-37689: Add :meth:`is_relative_to` in :class:`PurePath` to determine
+ whether or not one path is relative to another.
+
+- bpo-29553: Fixed :meth:`argparse.ArgumentParser.format_usage` for mutually
+ exclusive groups. Patch by Andrew Nester.
+
+- bpo-37691: Let math.dist() accept coordinates as sequences (or iterables)
+ rather than just tuples.
+
+- bpo-37685: Fixed ``__eq__``, ``__lt__`` etc implementations in some
+ classes. They now return :data:`NotImplemented` for unsupported type of
+ the other operand. This allows the other operand to play role (for example
+ the equality comparison with :data:`~unittest.mock.ANY` will return
+ ``True``).
+
+- bpo-37354: Make Activate.ps1 Powershell script static to allow for signing
+ it.
+
+- bpo-37664: Update wheels bundled with ensurepip (pip 19.2.3 and setuptools
+ 41.2.0)
+
+- bpo-37663: Bring consistency to venv shell activation scripts by always
+ using __VENV_PROMPT__.
+
+- bpo-37642: Allowed the pure Python implementation of
+ :class:`datetime.timezone` to represent sub-minute offsets close to
+ minimum and maximum boundaries, specifically in the ranges (23:59, 24:00)
+ and (-23:59, 24:00). Patch by Ngalim Siregar
+
+- bpo-36161: In :mod:`posix`, use ``ttyname_r`` instead of ``ttyname`` for
+ thread safety.
+
+- bpo-36324: Make internal attributes for statistics.NormalDist() private.
+
+- bpo-37555: Fix `NonCallableMock._call_matcher` returning tuple instead of
+ `_Call` object when `self._spec_signature` exists. Patch by Elizabeth
+ Uselton
+
+- bpo-29446: Make `from tkinter import *` import only the expected objects.
+
+- bpo-16970: Adding a value error when an invalid value in passed to nargs
+ Patch by Robert Leenders
+
+- bpo-34443: Exceptions from :mod:`enum` now use the ``__qualname`` of the
+ enum class in the exception message instead of the ``__name__``.
+
+- bpo-37491: Fix ``IndexError`` when parsing email headers with unexpectedly
+ ending bare-quoted string value. Patch by Abhilash Raj.
+
+- bpo-37587: Make json.loads faster for long strings. (Patch by Marco
+ Paolini)
+
+- bpo-18378: Recognize "UTF-8" as a valid value for LC_CTYPE in
+ locale._parse_localename.
+
+- bpo-37579: Return :exc:`NotImplemented` in Python implementation of
+ ``__eq__`` for :class:`~datetime.timedelta` and :class:`~datetime.time`
+ when the other object being compared is not of the same type to match C
+ implementation. Patch by Karthikeyan Singaravelan.
+
+- bpo-21478: Record calls to parent when autospecced object is attached to a
+ mock using :func:`unittest.mock.attach_mock`. Patch by Karthikeyan
+ Singaravelan.
+
+- bpo-37531: "python3 -m test -jN --timeout=TIMEOUT" now kills a worker
+ process if it runs longer than *TIMEOUT* seconds.
+
+- bpo-37482: Fix serialization of display name in originator or destination
+ address fields with both encoded words and special chars.
+
+- bpo-36993: Improve error reporting for corrupt zip files with bad zip64
+ extra data. Patch by Daniel Hillier.
+
+- bpo-37502: pickle.loads() no longer raises TypeError when the buffers
+ argument is set to None
+
+- bpo-37520: Correct behavior for zipfile.Path.parent when the path object
+ identifies a subdirectory.
+
+- bpo-18374: Fix the ``.col_offset`` attribute of nested :class:`ast.BinOp`
+ instances which had a too large value in some situations.
+
+- bpo-37424: Fixes a possible hang when using a timeout on
+ `subprocess.run()` while capturing output. If the child process spawned
+ its own children or otherwise connected its stdout or stderr handles with
+ another process, we could hang after the timeout was reached and our child
+ was killed when attempting to read final output from the pipes.
+
+- bpo-37421: Fix :func:`multiprocessing.util.get_temp_dir` finalizer: clear
+ also the 'tempdir' configuration of the current process, so next call to
+ ``get_temp_dir()`` will create a new temporary directory, rather than
+ reusing the removed temporary directory.
+
+- bpo-37481: The distutils ``bdist_wininst`` command is deprecated in Python
+ 3.8, use ``bdist_wheel`` (wheel packages) instead.
+
+- bpo-37479: When `Enum.__str__` is overridden in a derived class, the
+ override will be used by `Enum.__format__` regardless of whether mixin
+ classes are present.
+
+- bpo-37440: http.client now enables TLS 1.3 post-handshake authentication
+ for default context or if a cert_file is passed to HTTPSConnection.
+
+- bpo-37437: Update vendorized expat version to 2.2.7.
+
+- bpo-37428: SSLContext.post_handshake_auth = True no longer sets
+ SSL_VERIFY_POST_HANDSHAKE verify flag for client connections. Although the
+ option is documented as ignored for clients, OpenSSL implicitly enables
+ cert chain validation when the flag is set.
+
+- bpo-37420: :func:`os.sched_setaffinity` now correctly handles errors that
+ arise during iteration over its ``mask`` argument. Patch by Brandt Bucher.
+
+- bpo-37412: The :func:`os.getcwdb` function now uses the UTF-8 encoding on
+ Windows, rather than the ANSI code page: see :pep:`529` for the rationale.
+ The function is no longer deprecated on Windows.
+
+- bpo-37406: The sqlite3 module now raises TypeError, rather than
+ ValueError, if operation argument type is not str: execute(),
+ executemany() and calling a connection.
+
+- bpo-29412: Fix IndexError in parsing a header value ending unexpectedly.
+ Patch by Abhilash Raj.
+
+- bpo-36546: The *dist* argument for statistics.quantiles() is now
+ positional only. The current name doesn't reflect that the argument can be
+ either a dataset or a distribution. Marking the parameter as positional
+ avoids confusion and makes it possible to change the name later.
+
+- bpo-37394: Fix a bug that was causing the :mod:`queue` module to fail if
+ the accelerator module was not available. Patch by Pablo Galindo.
+
+- bpo-37376: :mod:`pprint` now has support for
+ :class:`types.SimpleNamespace`. Patch by Carl Bordum Hansen.
+
+- bpo-26967: An :class:`~argparse.ArgumentParser` with
+ ``allow_abbrev=False`` no longer disables grouping of short flags, such as
+ ``-vv``, but only disables abbreviation of long flags as documented. Patch
+ by Zac Hatfield-Dodds.
+
+- bpo-37212: :func:`unittest.mock.call` now preserves the order of keyword
+ arguments in repr output. Patch by Karthikeyan Singaravelan.
+
+- bpo-37372: Fix error unpickling datetime.time objects from Python 2 with
+ seconds>=24. Patch by Justin Blanchard.
+
+- bpo-37345: Add formal support for UDPLITE sockets. Support was present
+ before, but it is now easier to detect support with ``hasattr(socket,
+ 'IPPROTO_UDPLITE')`` and there are constants defined for each of the
+ values needed: :py:obj:`socket.IPPROTO_UDPLITE`,
+ :py:obj:`UDPLITE_SEND_CSCOV`, and :py:obj:`UDPLITE_RECV_CSCOV`. Patch by
+ Gabe Appleton.
+
+- bpo-37358: Optimized ``functools.partial`` by using vectorcall.
+
+- bpo-37347: :meth:`sqlite3.Connection.create_aggregate`,
+ :meth:`sqlite3.Connection.create_function`,
+ :meth:`sqlite3.Connection.set_authorizer`,
+ :meth:`sqlite3.Connection.set_progress_handler`
+ :meth:`sqlite3.Connection.set_trace_callback` methods lead to segfaults if
+ some of these methods are called twice with an equal object but not the
+ same. Now callbacks are stored more carefully. Patch by Aleksandr Balezin.
+
+- bpo-37163: The *obj* argument of :func:`dataclasses.replace` is
+ positional-only now.
+
+- bpo-37085: Add the optional Linux SocketCAN Broadcast Manager constants,
+ used as flags to configure the BCM behaviour, in the socket module. Patch
+ by Karl Ding.
+
+- bpo-37328: ``HTMLParser.unescape`` is removed. It was undocumented and
+ deprecated since Python 3.4.
+
+- bpo-37305: Add .webmanifest -> application/manifest+json to list of
+ recognized file types and content type headers
+
+- bpo-37320: ``aifc.openfp()`` alias to ``aifc.open()``, ``sunau.openfp()``
+ alias to ``sunau.open()``, and ``wave.openfp()`` alias to ``wave.open()``
+ have been removed. They were deprecated since Python 3.7.
+
+- bpo-37315: Deprecated accepting floats with integral value (like ``5.0``)
+ in :func:`math.factorial`.
+
+- bpo-37312: ``_dummy_thread`` and ``dummy_threading`` modules have been
+ removed. These modules were deprecated since Python 3.7 which requires
+ threading support.
+
+- bpo-33972: Email with single part but content-type set to ``multipart/*``
+ doesn't raise AttributeError anymore.
+
+- bpo-37280: Use threadpool for reading from file for sendfile fallback
+ mode.
+
+- bpo-37279: Fix asyncio sendfile support when sendfile sends extra data in
+ fallback mode.
+
+- bpo-19865: :func:`ctypes.create_unicode_buffer()` now also supports
+ non-BMP characters on platforms with 16-bit :c:type:`wchar_t` (for
+ example, Windows and AIX).
+
+- bpo-37266: In a subinterpreter, spawning a daemon thread now raises an
+ exception. Daemon threads were never supported in subinterpreters.
+ Previously, the subinterpreter finalization crashed with a Pyton fatal
+ error if a daemon thread was still running.
+
+- bpo-37210: Allow pure Python implementation of :mod:`pickle` to work even
+ when the C :mod:`_pickle` module is unavailable.
+
+- bpo-21872: Fix :mod:`lzma`: module decompresses data incompletely. When
+ decompressing a FORMAT_ALONE format file, and it doesn't have the end
+ marker, sometimes the last one to dozens bytes can't be output. Patch by
+ Ma Lin.
+
+- bpo-35922: Fix :meth:`RobotFileParser.crawl_delay` and
+ :meth:`RobotFileParser.request_rate` to return ``None`` rather than raise
+ :exc:`AttributeError` when no relevant rule is defined in the robots.txt
+ file. Patch by Rémi Lapeyre.
+
+- bpo-35766: Change the format of feature_version to be a (major, minor)
+ tuple.
+
+- bpo-36607: Eliminate :exc:`RuntimeError` raised by
+ :func:`asyncio.all_tasks()` if internal tasks weak set is changed by
+ another thread during iteration.
+
+- bpo-18748: :class:`_pyio.IOBase` destructor now does nothing if getting
+ the ``closed`` attribute fails to better mimick :class:`_io.IOBase`
+ finalizer.
+
+- bpo-36402: Fix a race condition at Python shutdown when waiting for
+ threads. Wait until the Python thread state of all non-daemon threads get
+ deleted (join all non-daemon threads), rather than just wait until
+ non-daemon Python threads complete.
+
+- bpo-37206: Default values which cannot be represented as Python objects no
+ longer improperly represented as ``None`` in function signatures.
+
+- bpo-37111: Added ``encoding`` and ``errors`` keyword parameters to
+ ``logging.basicConfig``.
+
+- bpo-12144: Ensure cookies with ``expires`` attribute are handled in
+ :meth:`CookieJar.make_cookies`.
+
+- bpo-34886: Fix an unintended ValueError from :func:`subprocess.run` when
+ checking for conflicting `input` and `stdin` or `capture_output` and
+ `stdout` or `stderr` args when they were explicitly provided but with
+ `None` values within a passed in `**kwargs` dict rather than as passed
+ directly by name. Patch contributed by Rémi Lapeyre.
+
+- bpo-37173: The exception message for ``inspect.getfile()`` now correctly
+ reports the passed class rather than the builtins module.
+
+- bpo-37178: Give math.perm() a one argument form that means the same as
+ math.factorial().
+
+- bpo-37178: For math.perm(n, k), let k default to n, giving the same result
+ as factorial.
+
+- bpo-37165: Converted _collections._count_elements to use the Argument
+ Clinic.
+
+- bpo-34767: Do not always create a :class:`collections.deque` in
+ :class:`asyncio.Lock`.
+
+- bpo-37158: Speed-up statistics.fmean() by switching from a function to a
+ generator.
+
+- bpo-34282: Remove ``Enum._convert`` method, deprecated in 3.8.
+
+- bpo-37150: `argparse._ActionsContainer.add_argument` now throws error, if
+ someone accidentally pass FileType class object instead of instance of
+ FileType as `type` argument
+
+- bpo-28724: The socket module now has the :func:`socket.send_fds` and
+ :func:`socket.recv.fds` methods. Contributed by Joannah Nanjekye, Shinya
+ Okano and Victor Stinner.
+
+- bpo-35621: Support running asyncio subprocesses when execution event loop
+ in a thread on UNIX.
+
+- bpo-36520: Lengthy email headers with UTF-8 characters are now properly
+ encoded when they are folded. Patch by Jeffrey Kintscher.
+
+- bpo-30835: Fixed a bug in email parsing where a message with invalid bytes
+ in content-transfer-encoding of a multipart message can cause an
+ AttributeError. Patch by Andrew Donnellan.
+
+- bpo-31163: pathlib.Path instance's rename and replace methods now return
+ the new Path instance.
+
+- bpo-25068: :class:`urllib.request.ProxyHandler` now lowercases the keys of
+ the passed dictionary.
+
+- bpo-26185: Fix :func:`repr` on empty :class:`ZipInfo` object. Patch by
+ Mickaël Schoentgen.
+
+- bpo-21315: Email headers containing RFC2047 encoded words are parsed
+ despite the missing whitespace, and a defect registered. Also missing
+ trailing whitespace after encoded words is now registered as a defect.
+
+- bpo-31904: Port test_datetime to VxWorks: skip zoneinfo tests on VxWorks
+
+- bpo-35805: Add parser for Message-ID header and add it to default
+ HeaderRegistry. This should prevent folding of Message-ID using RFC 2048
+ encoded words.
+
+- bpo-36871: Ensure method signature is used instead of constructor
+ signature of a class while asserting mock object against method calls.
+ Patch by Karthikeyan Singaravelan.
+
+- bpo-35070: posix.getgrouplist() now works correctly when the user belongs
+ to NGROUPS_MAX supplemental groups. Patch by Jeffrey Kintscher.
+
+- bpo-31783: Fix race condition in ThreadPoolExecutor when worker threads
+ are created during interpreter shutdown.
+
+- bpo-36582: Fix ``UserString.encode()`` to correctly return ``bytes``
+ rather than a ``UserString`` instance.
+
+- bpo-32424: Deprecate xml.etree.ElementTree.Element.copy() in favor of
+ copy.copy().
+
+ Patch by Gordon P. Hemsley
+
+- bpo-36564: Fix infinite loop in email header folding logic that would be
+ triggered when an email policy's max_line_length is not long enough to
+ include the required markup and any values in the message. Patch by Paul
+ Ganssle
+
+- bpo-36543: Removed methods Element.getchildren(), Element.getiterator()
+ and ElementTree.getiterator() and the xml.etree.cElementTree module.
+
+- bpo-36409: Remove the old plistlib API deprecated in Python 3.4
+
+- bpo-36302: distutils sorts source file lists so that Extension .so files
+ build more reproducibly by default
+
+- bpo-36250: Ignore ``ValueError`` from ``signal`` with ``interaction`` in
+ non-main thread.
+
+- bpo-36046: Added ``user``, ``group`` and ``extra_groups`` parameters to
+ the subprocess.Popen constructor. Patch by Patrick McLean.
+
+- bpo-32627: Fix compile error when ``_uuid`` headers conflicting included.
+
+- bpo-35800: Deprecate ``smtpd.MailmanProxy`` ready for future removal.
+
+- bpo-35168: :attr:`shlex.shlex.punctuation_chars` is now a read-only
+ property.
+
+- bpo-8538: Add support for boolean actions like ``--foo`` and ``--no-foo``
+ to argparse. Patch contributed by Rémi Lapeyre.
+
+- bpo-20504: Fixes a bug in :mod:`cgi` module when a multipart/form-data
+ request has no `Content-Length` header.
+
+- bpo-25988: The abstract base classes in :mod:`collections.abc` no longer
+ are exposed in the regular :mod:`collections` module.
+
+- bpo-11122: Distutils won't check for rpmbuild in specified paths only.
+
+- bpo-34775: Division handling of PurePath now returns NotImplemented
+ instead of raising a TypeError when passed something other than an
+ instance of str or PurePath. Patch by Roger Aiudi.
+
+- bpo-34749: :func:`binascii.a2b_base64` is now up to 2 times faster. Patch
+ by Sergey Fedoseev.
+
+- bpo-34519: Add additional aliases for HP Roman 8. Patch by Michael Osipov.
+
+- bpo-28009: Fix uuid.getnode() on platforms with '.' as MAC Addr delimiter
+ as well fix for MAC Addr format that omits a leading 0 in MAC Addr values.
+ Currently, AIX is the only know platform with these settings. Patch by
+ Michael Felt.
+
+- bpo-30618: Add :meth:`~pathlib.Path.readlink`. Patch by Girts Folkmanis.
+
+- bpo-32498: Made :func:`urllib.parse.unquote()` accept bytes in addition to
+ strings. Patch by Stein Karlsen.
+
+- bpo-33348: lib2to3 now recognizes expressions after ``*`` and `**` like in
+ ``f(*[] or [])``.
+
+- bpo-32689: Update :func:`shutil.move` function to allow for Path objects
+ to be used as source argument. Patch by Emily Morehouse and Maxwell
+ "5.13b" McKinnon.
+
+- bpo-32820: Added __format__ to IPv4 and IPv6 classes. Always outputs a
+ fully zero- padded string. Supports b/x/n modifiers (bin/hex/native
+ format). Native format for IPv4 is bin, native format for IPv6 is hex.
+ Also supports '#' and '_' modifiers.
+
+- bpo-27657: Fix urllib.parse.urlparse() with numeric paths. A string like
+ "path:80" is no longer parsed as a path but as a scheme ("path") and a
+ path ("80").
+
+- bpo-4963: Fixed non-deterministic behavior related to mimetypes extension
+ mapping and module reinitialization.
+
+Documentation
+-------------
+
+- bpo-21767: Explicitly mention abc support in functools.singledispatch
+
+- bpo-38816: Provides more details about the interaction between
+ :c:func:`fork` and CPython's runtime, focusing just on the C-API. This
+ includes cautions about where :c:func:`fork` should and shouldn't be
+ called.
+
+- bpo-38351: Modernize :mod:`email` examples from %-formatting to f-strings.
+
+- bpo-38778: Document the fact that :exc:`RuntimeError` is raised if
+ :meth:`os.fork` is called in a subinterpreter.
+
+- bpo-38592: Add Brazilian Portuguese to the language switcher at Python
+ Documentation website.
+
+- bpo-38294: Add list of no-longer-escaped chars to re.escape documentation
+
+- bpo-38053: Modernized the plistlib documentation
+
+- bpo-26868: Fix example usage of :c:func:`PyModule_AddObject` to properly
+ handle errors.
+
+- bpo-36797: Fix a dead link in the distutils API Reference.
+
+- bpo-37977: Warn more strongly and clearly about pickle insecurity
+
+- bpo-37979: Added a link to dateutil.parser.isoparse in the
+ datetime.fromisoformat documentation. Patch by Paul Ganssle
+
+- bpo-12707: Deprecate info(), geturl(), getcode() methods in favor of the
+ headers, url, and status properties, respectively, for HTTPResponse and
+ addinfourl. Also deprecate the code attribute of addinfourl in favor of
+ the status attribute. Patch by Ashwin Ramaswami
+
+- bpo-37937: Mention ``frame.f_trace`` in :func:`sys.settrace` docs.
+
+- bpo-37878: Make :c:func:`PyThreadState_DeleteCurrent` Internal.
+
+- bpo-37759: Beginning edits to Whatsnew 3.8
+
+- bpo-37726: Stop recommending getopt in the tutorial for command line
+ argument parsing and promote argparse.
+
+- bpo-32910: Remove implementation-specific behaviour of how venv's
+ Deactivate works.
+
+- bpo-37256: Fix wording of arguments for :class:`Request` in
+ :mod:`urllib.request`
+
+- bpo-37284: Add a brief note to indicate that any new
+ ``sys.implementation`` required attributes must go through the PEP
+ process.
+
+- bpo-30088: Documented that :class:`mailbox.Maildir` constructor doesn't
+ attempt to verify the maildir folder layout correctness. Patch by
+ Sviatoslav Sydorenko.
+
+- bpo-37521: Fix `importlib` examples to insert any newly created modules
+ via importlib.util.module_from_spec() immediately into sys.modules instead
+ of after calling loader.exec_module().
+
+ Thanks to Benjamin Mintz for finding the bug.
+
+- bpo-37456: Slash ('/') is now part of syntax.
+
+- bpo-37487: Fix PyList_GetItem index description to include 0.
+
+- bpo-37149: Replace the dead link to the Tkinter 8.5 reference by John
+ Shipman, New Mexico Tech, with a link to the archive.org copy.
+
+- bpo-37478: Added possible exceptions to the description of os.chdir().
+
+- bpo-34903: Documented that in :meth:`datetime.datetime.strptime()`, the
+ leading zero in some two-digit formats is optional. Patch by Mike Gleen.
+
+- bpo-36260: Add decompression pitfalls to zipfile module documentation.
+
+- bpo-37004: In the documentation for difflib, a note was added explicitly
+ warning that the results of SequenceMatcher's ratio method may depend on
+ the order of the input strings.
+
+- bpo-36960: Restructured the :mod:`datetime` docs in the interest of making
+ them more user-friendly and improving readability. Patch by Brad Solomon.
+
+- bpo-36487: Make C-API docs clear about what the "main" interpreter is.
+
+- bpo-23460: The documentation for decimal string formatting using the `:g`
+ specifier has been updated to reflect the correct exponential notation
+ cutoff point. Original patch contributed by Tuomas Suutari.
+
+- bpo-35803: Document and test that ``tempfile`` functions may accept a
+ :term:`path-like object` for the ``dir`` argument. Patch by Anthony
+ Sottile.
+
+- bpo-33944: Added a note about the intended use of code in .pth files.
+
+- bpo-34293: Fix the Doc/Makefile regarding PAPER environment variable and
+ PDF builds
+
+- bpo-25237: Add documentation for tkinter modules
+
+Tests
+-----
+
+- bpo-38614: Fix test_communicate() of test_asyncio.test_subprocess: use
+ ``support.LONG_TIMEOUT`` (5 minutes), instead of just 1 minute.
+
+- bpo-38614: Add timeout constants to :mod:`test.support`:
+ :data:`~test.support.LOOPBACK_TIMEOUT`,
+ :data:`~test.support.INTERNET_TIMEOUT`,
+ :data:`~test.support.SHORT_TIMEOUT` and
+ :data:`~test.support.LONG_TIMEOUT`.
+
+- bpo-38502: test.regrtest now uses process groups in the multiprocessing
+ mode (-jN command line option) if process groups are available: if
+ :func:`os.setsid` and :func:`os.killpg` functions are available.
+
+- bpo-35998: Fix a race condition in test_asyncio.test_start_tls_server_1().
+ Previously, there was a race condition between the test main() function
+ which replaces the protocol and the test ServerProto protocol which sends
+ ANSWER once it gets HELLO. Now, only the test main() function is
+ responsible to send data, ServerProto no longer sends data.
+
+- bpo-38470: Fix ``test_compileall.test_compile_dir_maxlevels()`` on Windows
+ without long path support: only create 3 subdirectories instead of between
+ 20 and 100 subdirectories.
+
+- bpo-37531: On timeout, regrtest no longer attempts to call
+ ``popen.communicate()`` again: it can hang until all child processes using
+ stdout and stderr pipes completes. Kill the worker process and ignores its
+ output. Change also the faulthandler timeout of the main process from 1
+ minute to 5 minutes, for Python slowest buildbots.
+
+- bpo-38239: Fix test_gdb for Link Time Optimization (LTO) builds.
+
+- bpo-38275: test_ssl now handles disabled TLS/SSL versions better.
+ OpenSSL's crypto policy and run-time settings are recognized and tests for
+ disabled versions are skipped. Tests also accept more TLS minimum_versions
+ for platforms that override OpenSSL's default with strict settings.
+
+- bpo-38271: The private keys for test_ssl were encrypted with 3DES in
+ traditional PKCS#5 format. 3DES and the digest algorithm of PKCS#5 are
+ blocked by some strict crypto policies. Use PKCS#8 format with AES256
+ encryption instead.
+
+- bpo-38270: test.support now has a helper function to check for
+ availibility of a hash digest function. Several tests are refactored avoid
+ MD5 and use SHA256 instead. Other tests are marked to use MD5 and skipped
+ when MD5 is disabled.
+
+- bpo-37123: Multiprocessing test test_mymanager() now also expects
+ -SIGTERM, not only exitcode 0. BaseManager._finalize_manager() sends
+ SIGTERM to the manager process if it takes longer than 1 second to stop,
+ which happens on slow buildbots.
+
+- bpo-38212: Multiprocessing tests: increase
+ test_queue_feeder_donot_stop_onexc() timeout from 1 to 60 seconds.
+
+- bpo-38117: Test with OpenSSL 1.1.1d
+
+- bpo-38018: Increase code coverage for multiprocessing.shared_memory.
+
+- bpo-37805: Add tests for json.dump(..., skipkeys=True). Patch by Dong-hee
+ Na.
+
+- bpo-37531: Enhance regrtest multiprocess timeout: write a message when
+ killing a worker process, catch popen.kill() and popen.wait() exceptions,
+ put a timeout on the second call to popen.communicate().
+
+- bpo-37876: Add tests for ROT-13 codec.
+
+- bpo-36833: Added tests for PyDateTime_xxx_GET_xxx() macros of the C API of
+ the :mod:`datetime` module. Patch by Joannah Nanjekye.
+
+- bpo-37558: Fix test_shared_memory_cleaned_after_process_termination name
+ handling
+
+- bpo-37526: Add :func:`test.support.catch_threading_exception`: context
+ manager catching :class:`threading.Thread` exception using
+ :func:`threading.excepthook`.
+
+- bpo-37421: test_concurrent_futures now explicitly stops the ForkServer
+ instance if it's running.
+
+- bpo-37421: multiprocessing tests now stop the ForkServer instance if it's
+ running: close the "alive" file descriptor to ask the server to stop and
+ then remove its UNIX address.
+
+- bpo-37421: test_distutils.test_build_ext() is now able to remove the
+ temporary directory on Windows: don't import the newly built C extension
+ ("xx") in the current process, but test it in a separated process.
+
+- bpo-37421: test_concurrent_futures now cleans up multiprocessing to remove
+ immediately temporary directories created by
+ multiprocessing.util.get_temp_dir().
+
+- bpo-37421: test_winconsoleio doesn't leak a temporary file anymore: use
+ tempfile.TemporaryFile() to remove it when the test completes.
+
+- bpo-37421: multiprocessing tests now explicitly call ``_run_finalizers()``
+ to immediately remove temporary directories created by tests.
+
+- bpo-37421: urllib.request tests now call
+ :func:`~urllib.request.urlcleanup` to remove temporary files created by
+ ``urlretrieve()`` tests and to clear the ``_opener`` global variable set
+ by ``urlopen()`` and functions calling indirectly ``urlopen()``.
+
+- bpo-37472: Remove ``Lib/test/outstanding_bugs.py``.
+
+- bpo-37199: Fix test failures when IPv6 is unavailable or disabled.
+
+- bpo-19696: Replace deprecated method "random.choose" with "random.choice"
+ in "test_pkg_import.py".
+
+- bpo-37335: Remove no longer necessary code from c locale coercion tests
+
+- bpo-37421: Fix test_shutil to no longer leak temporary files.
+
+- bpo-37411: Fix test_wsgiref.testEnviron() to no longer depend on the
+ environment variables (don't fail if "X" variable is set).
+
+- bpo-37400: Fix test_os.test_chown(): use os.getgroups() rather than
+ grp.getgrall() to get groups. Rename also the test to test_chown_gid().
+
+- bpo-37359: Add --cleanup option to python3 -m test to remove
+ ``test_python_*`` directories of previous failed jobs. Add "make
+ cleantest" to run ``python3 -m test --cleanup``.
+
+- bpo-37362: test_gdb no longer fails if it gets an "unexpected" message on
+ stderr: it now ignores stderr. The purpose of test_gdb is to test that
+ python-gdb.py commands work as expected, not to test gdb.
+
+- bpo-35998: Avoid TimeoutError in test_asyncio: test_start_tls_server_1()
+
+- bpo-37278: Fix test_asyncio ProactorLoopCtrlC: join the thread to prevent
+ leaking a running thread and leaking a reference.
+
+- bpo-37261: Fix :func:`test.support.catch_unraisable_exception`: its
+ __exit__() method now ignores unraisable exception raised when clearing
+ its ``unraisable`` attribute.
+
+- bpo-37069: regrtest now uses :func:`sys.unraisablehook` to mark a test as
+ "environment altered" (ENV_CHANGED) if it emits an "unraisable exception".
+ Moreover, regrtest logs a warning in this case.
+
+ Use ``python3 -m test --fail-env-changed`` to catch unraisable exceptions
+ in tests.
+
+- bpo-37252: Fix assertions in ``test_close`` and
+ ``test_events_mask_overflow`` devpoll tests.
+
+- bpo-37169: Rewrite ``_PyObject_IsFreed()`` unit tests.
+
+- bpo-37153: ``test_venv.test_multiprocessing()`` now explicitly calls
+ ``pool.terminate()`` to wait until the pool completes.
+
+- bpo-34001: Make test_ssl pass with LibreSSL. LibreSSL handles minimum and
+ maximum TLS version differently than OpenSSL.
+
+- bpo-36919: Make ``test_source_encoding.test_issue2301`` implementation
+ independent. The test will work now for both CPython and IronPython.
+
+- bpo-30202: Update ``test.test_importlib.test_abc`` to test
+ ``find_spec()``.
+
+- bpo-28009: Modify the test_uuid logic to test when a program is available
+ AND can be used to obtain a MACADDR as basis for an UUID. Patch by M. Felt
+
+- bpo-34596: Fallback to a default reason when :func:`unittest.skip` is
+ uncalled. Patch by Naitree Zhu.
+
+Build
+-----
+
+- bpo-38809: On Windows, build scripts will now recognize and use python.exe
+ from an active virtual env.
+
+- bpo-38684: Fix _hashlib build when Blake2 is disabled, but OpenSSL
+ supports it.
+
+- bpo-38468: Misc/python-config.in now uses `getvar()` for all still
+ existing `sysconfig.get_config_var()` calls. Patch by Joannah Nanjekye.
+
+- bpo-37415: Fix stdatomic.h header check for ICC compiler: the ICC
+ implementation lacks atomic_uintptr_t type which is needed by Python.
+
+- bpo-38301: In Solaris family, we must be sure to use ``-D_REENTRANT``.
+ Patch by Jesús Cea Avión.
+
+- bpo-36002: Locate ``llvm-profdata`` and ``llvm-ar`` binaries using
+ ``AC_PATH_TOOL`` rather than ``AC_PATH_TARGET_TOOL``.
+
+- bpo-37936: The :file:`.gitignore` file systematically keeps "rooted", with
+ a non-trailing slash, all the rules that are meant to apply to files in a
+ specific place in the repo. Previously, when the intended file to ignore
+ happened to be at the root of the repo, we'd most often accidentally also
+ ignore files and directories with the same name anywhere in the tree.
+
+- bpo-37760: The :file:`Tools/unicode/makeunicodedata.py` script, which is
+ used for converting information from the Unicode Character Database into
+ generated code and data used by the methods of :class:`str` and by the
+ :mod:`unicodedata` module, now handles each character's data as a
+ ``dataclass`` with named attributes, rather than a length-18 list of
+ different fields.
+
+- bpo-37936: The :file:`.gitignore` file no longer applies to any files that
+ are in fact tracked in the Git repository. Patch by Greg Price.
+
+- bpo-37725: Change "clean" makefile target to also clean the program guided
+ optimization (PGO) data. Previously you would have to use "make clean"
+ and "make profile-removal", or "make clobber".
+
+- bpo-37707: Mark some individual tests to skip when --pgo is used. The
+ tests marked increase the PGO task time significantly and likely don't
+ help improve optimization of the final executable.
+
+- bpo-36044: Reduce the number of unit tests run for the PGO generation
+ task. This speeds up the task by a factor of about 15x. Running the full
+ unit test suite is slow. This change may result in a slightly less
+ optimized build since not as many code branches will be executed. If you
+ are willing to wait for the much slower build, the old behavior can be
+ restored using './configure [..] PROFILE_TASK="-m test --pgo-extended"'.
+ We make no guarantees as to which PGO task set produces a faster build.
+ Users who care should run their own relevant benchmarks as results can
+ depend on the environment, workload, and compiler tool chain.
+
+- bpo-37468: ``make install`` no longer installs ``wininst-*.exe`` files
+ used by distutils bdist_wininst: bdist_wininst only works on Windows.
+
+- bpo-37189: Many ``PyRun_XXX()`` functions like :c:func:`PyRun_String` were
+ no longer exported in ``libpython38.dll`` by mistake. Export them again to
+ fix the ABI compatibility.
+
+- bpo-25361: Enables use of SSE2 instructions in Windows 32-bit build.
+
+- bpo-36210: Update optional extension module detection for AIX. ossaudiodev
+ and spwd are not applicable for AIX, and are no longer reported as
+ missing. 3rd-party packaging of ncurses (with ASIS support) conflicts with
+ officially supported AIX curses library, so configure AIX to use
+ libcurses.a. However, skip trying to build _curses_panel.
+
+ patch by M Felt
+
+Windows
+-------
+
+- bpo-38589: Fixes HTML Help shortcut when Windows is not installed to C
+ drive
+
+- bpo-38453: Ensure ntpath.realpath() correctly resolves relative paths.
+
+- bpo-38519: Restores the internal C headers that were missing from the
+ nuget.org and Microsoft Store packages.
+
+- bpo-38492: Remove ``pythonw.exe`` dependency on the Microsoft C++ runtime.
+
+- bpo-38344: Fix error message in activate.bat
+
+- bpo-38359: Ensures ``pyw.exe`` launcher reads correct registry key.
+
+- bpo-38355: Fixes ``ntpath.realpath`` failing on ``sys.executable``.
+
+- bpo-38117: Update bundled OpenSSL to 1.1.1d
+
+- bpo-38092: Reduce overhead when using multiprocessing in a Windows virtual
+ environment.
+
+- bpo-38133: Allow py.exe launcher to locate installations from the
+ Microsoft Store and improve display of active virtual environments.
-- bpo-37421: test_distutils.test_build_ext() is now able to remove the
- temporary directory on Windows: don't import the newly built C extension
- ("xx") in the current process, but test it in a separated process.
+- bpo-38114: The ``pip.ini`` is no longer included in the Nuget package.
-- bpo-37421: test_concurrent_futures now cleans up multiprocessing to remove
- immediately temporary directories created by
- multiprocessing.util.get_temp_dir().
+- bpo-32592: Set Windows 8 as the minimum required version for API support
-- bpo-37421: test_winconsoleio doesn't leak a temporary file anymore: use
- tempfile.TemporaryFile() to remove it when the test completes.
+- bpo-36634: :func:`os.cpu_count` now returns active processors rather than
+ maximum processors.
-- bpo-37421: multiprocessing tests now explicitly call ``_run_finalizers()``
- to immediately remove temporary directories created by tests.
+- bpo-36634: venv activate.bat now works when the existing variables contain
+ double quote characters.
-- bpo-37199: Fix test failures when IPv6 is unavailable or disabled.
+- bpo-38081: Prevent error calling :func:`os.path.realpath` on ``'NUL'``.
-- bpo-37335: Remove no longer necessary code from c locale coercion tests
+- bpo-38087: Fix case sensitivity in test_pathlib and test_ntpath.
-- bpo-37421: Fix test_shutil to no longer leak temporary files.
+- bpo-38088: Fixes distutils not finding vcruntime140.dll with only the v142
+ toolset installed.
-- bpo-37411: Fix test_wsgiref.testEnviron() to no longer depend on the
- environment variables (don't fail if "X" variable is set).
+- bpo-37283: Ensure command-line and unattend.xml setting override
+ previously detected states in Windows installer.
-- bpo-37400: Fix test_os.test_chown(): use os.getgroups() rather than
- grp.getgrall() to get groups. Rename also the test to test_chown_gid().
+- bpo-38030: Fixes :func:`os.stat` failing for block devices on Windows
-- bpo-37359: Add --cleanup option to python3 -m test to remove
- ``test_python_*`` directories of previous failed jobs. Add "make
- cleantest" to run ``python3 -m test --cleanup``.
+- bpo-38020: Fixes potential crash when calling :func:`os.readlink` (or
+ indirectly through :func:`~os.path.realpath`) on a file that is not a
+ supported link.
-- bpo-37362: test_gdb no longer fails if it gets an "unexpected" message on
- stderr: it now ignores stderr. The purpose of test_gdb is to test that
- python-gdb.py commands work as expected, not to test gdb.
+- bpo-37705: Improve the implementation of ``winerror_to_errno()``.
-- bpo-35998: Avoid TimeoutError in test_asyncio: test_start_tls_server_1()
+- bpo-37549: :func:`os.dup` no longer fails for standard streams on Windows
+ 7.
-- bpo-37278: Fix test_asyncio ProactorLoopCtrlC: join the thread to prevent
- leaking a running thread and leaking a reference.
+- bpo-1311: The ``nul`` file on Windows now returns True from
+ :func:`~os.path.exists` and a valid result from :func:`os.stat` with
+ ``S_IFCHR`` set.
-- bpo-37261: Fix :func:`test.support.catch_unraisable_exception`: its
- __exit__() method now ignores unraisable exception raised when clearing
- its ``unraisable`` attribute.
+- bpo-9949: Enable support for following symlinks in :func:`os.realpath`.
-- bpo-37169: Rewrite ``_PyObject_IsFreed()`` unit tests.
+- bpo-37834: Treat all name surrogate reparse points on Windows in
+ :func:`os.lstat` and other reparse points as regular files in
+ :func:`os.stat`.
-- bpo-37153: ``test_venv.test_mutiprocessing()`` now explicitly calls
- ``pool.terminate()`` to wait until the pool completes.
+- bpo-36266: Add the module name in the formatted error message when DLL
+ load fail happens during module import in
+ ``_PyImport_FindSharedFuncptrWindows()``. Patch by Srinivas Nyayapati.
-- bpo-28009: Modify the test_uuid logic to test when a program is available
- AND can be used to obtain a MACADDR as basis for an UUID. Patch by M. Felt
+- bpo-25172: Trying to import the :mod:`crypt` module on Windows will result
+ in an :exc:`ImportError` with a message explaining that the module isn't
+ supported on Windows. On other platforms, if the underlying ``_crypt``
+ module is not available, the ImportError will include a message explaining
+ the problem.
-Build
------
+- bpo-37778: Fixes the icons used for file associations to the Microsoft
+ Store package.
-- bpo-37189: Many ``PyRun_XXX()`` functions like :c:func:`PyRun_String` were
- no longer exported in ``libpython38.dll`` by mistake. Export them again to
- fix the ABI compatibiliy.
+- bpo-37734: Fix use of registry values to launch Python from Microsoft
+ Store app.
-Windows
--------
+- bpo-37702: Fix memory leak on Windows in creating an SSLContext object or
+ running urllib.request.urlopen('https://...').
+
+- bpo-37672: Switch Windows Store package's pip to use bundled
+ :file:`pip.ini` instead of :envvar:`PIP_USER` variable.
- bpo-10945: Officially drop support for creating bdist_wininst installers
on non-Windows systems.
+- bpo-37445: Include the ``FORMAT_MESSAGE_IGNORE_INSERTS`` flag in
+ ``FormatMessageW()`` calls.
+
- bpo-37369: Fixes path for :data:`sys.executable` when running from the
Microsoft Store.
+- bpo-37380: Don't collect unfinished processes with ``subprocess._active``
+ on Windows to cleanup later. Patch by Ruslan Kuprieiev.
+
- bpo-37351: Removes libpython38.a from standard Windows distribution.
- bpo-35360: Update Windows builds to use SQLite 3.28.0.
@@ -2945,9 +4875,25 @@ Windows
- bpo-36779: Ensure ``time.tzname`` is correct on Windows when the active
code page is set to CP_UTF7 or CP_UTF8.
+- bpo-32587: Make :data:`winreg.REG_MULTI_SZ` support zero-length strings.
+
+- bpo-28269: Replace use of :c:func:`strcasecmp` for the system function
+ :c:func:`_stricmp`. Patch by Minmin Gong.
+
+- bpo-36590: Add native Bluetooth RFCOMM support to socket module.
+
macOS
-----
+- bpo-38117: Updated OpenSSL to 1.1.1d in macOS installer.
+
+- bpo-38089: Move Azure Pipelines to latest VM versions and make macOS tests
+ optional
+
+- bpo-18049: Increase the default stack size of threads from 5MB to 16MB on
+ macOS, to match the stack size of the main thread. This avoids crashes on
+ deep recursion in threads.
+
- bpo-34602: Avoid test suite failures on macOS by no longer calling
resource.setrlimit to increase the process stack size limit at runtime.
The runtime change is no longer needed since the interpreter is being
@@ -2960,17 +4906,92 @@ macOS
IDLE
----
+- bpo-26353: Stop adding newline when saving an IDLE shell window.
+
+- bpo-4630: Add an option to toggle IDLE's cursor blink for shell, editor,
+ and output windows. See Settings, General, Window Preferences, Cursor
+ Blink. Patch by Zackery Spytz.
+
+- bpo-38598: Do not try to compile IDLE shell or output windows
+
+- bpo-36698: IDLE no longer fails when write non-encodable characters to
+ stderr. It now escapes them with a backslash, as the regular Python
+ interpreter. Added the ``errors`` field to the standard streams.
+
+- bpo-35379: When exiting IDLE, catch any AttributeError. One happens when
+ EditorWindow.close is called twice. Printing a traceback, when IDLE is
+ run from a terminal, is useless and annoying.
+
+- bpo-38183: To avoid problems, test_idle ignores the user config directory.
+ It no longer tries to create or access .idlerc or any files within. Users
+ must run IDLE to discover problems with saving settings.
+
+- bpo-38077: IDLE no longer adds 'argv' to the user namespace when
+ initializing it. This bug only affected 3.7.4 and 3.8.0b2 to 3.8.0b4.
+
+- bpo-38041: Shell restart lines now fill the window width, always start
+ with '=', and avoid wrapping unnecessarily. The line will still wrap if
+ the included file name is long relative to the width.
+
+- bpo-35771: To avoid occasional spurious test_idle failures on slower
+ machines, increase the ``hover_delay`` in test_tooltip.
+
+- bpo-37824: Properly handle user input warnings in IDLE shell. Cease
+ turning SyntaxWarnings into SyntaxErrors.
+
+- bpo-37929: IDLE Settings dialog now closes properly when there is no shell
+ window.
+
+- bpo-37902: Add mousewheel scrolling for IDLE module, path, and stack
+ browsers. Patch by George Zhang.
+
+- bpo-37849: Fixed completions list appearing too high or low when shown
+ above the current line.
+
+- bpo-36419: Refactor IDLE autocomplete and improve testing.
+
+- bpo-37748: Reorder the Run menu. Put the most common choice, Run Module,
+ at the top.
+
+- bpo-37692: Improve highlight config sample with example shell interaction
+ and better labels for shell elements.
+
+- bpo-37628: Settings dialog no longer expands with font size.
+
+- bpo-37627: Initialize the Customize Run dialog with the command line
+ arguments most recently entered before. The user can optionally edit
+ before submitting them.
+
+- bpo-33610: Fix code context not showing the correct context when first
+ toggled on.
+
+- bpo-37530: Optimize code context to reduce unneeded background activity.
+ Font and highlight changes now occur along with text changes instead of
+ after a random delay.
+
+- bpo-27452: Cleanup ``config.py`` by inlining ``RemoveFile`` and
+ simplifying the handling of ``file`` in ``CreateConfigHandlers``.
+
- bpo-37325: Fix tab focus traversal order for help source and custom run
dialogs.
- bpo-37321: Both subprocess connection error messages now refer to the
'Startup failure' section of the IDLE doc.
+- bpo-17535: Add optional line numbers for IDLE editor windows. Windows
+ open without line numbers unless set otherwise in the General tab of the
+ configuration dialog.
+
+- bpo-26806: To compensate for stack frames added by IDLE and avoid possible
+ problems with low recursion limits, add 30 to limits in the user code
+ execution process. Subtract 30 when reporting recursion limits to make
+ this addition mostly transparent.
+
- bpo-37177: Properly 'attach' search dialogs to their main window so that
they behave like other dialogs and do not get hidden behind their main
window.
-- bpo-37039: Adjust "Zoom Height" to individual screens by momemtarily
+- bpo-37039: Adjust "Zoom Height" to individual screens by momentarily
maximizing the window on first use with a particular screen. Changing
screen settings may invalidate the saved height. While a window is
maximized, "Zoom Height" has no effect.
@@ -2982,19 +5003,126 @@ IDLE
customized settings. Any 'command line arguments' entered are added to
sys.argv. One can suppress the normal Shell main module restart.
+- bpo-36390: Gather Format menu functions into format.py. Combine
+ paragraph.py, rstrip.py, and format methods from editor.py.
+
+Tools/Demos
+-----------
+
+- bpo-38118: Update Valgrind suppression file to ignore a false alarm in
+ :c:func:`PyUnicode_Decode` when using GCC builtin strcmp().
+
+- bpo-38347: pathfix.py: Assume all files that end on '.py' are Python
+ scripts when working recursively.
+
+- bpo-37803: pdb's ``--help`` and ``--version`` long options now work.
+
+- bpo-37942: Improve ArgumentClinic converter for floats.
+
+- bpo-37704: Remove ``Tools/scripts/h2py.py``: use cffi to access a C API in
+ Python.
+
+- bpo-37675: 2to3 now works when run from a zipped standard library.
+
+- bpo-37034: Argument Clinic now uses the argument name on errors with
+ keyword-only argument instead of their position. Patch contributed by Rémi
+ Lapeyre.
+
+- bpo-37064: Add option -k to pathscript.py script: preserve shebang flags.
+ Add option -a to pathscript.py script: add flags.
+
C API
-----
+- bpo-37633: Re-export some function compatibility wrappers for macros in
+ ``pythonrun.h``.
+
+- bpo-38644: Provide :c:func:`Py_EnterRecursiveCall` and
+ :c:func:`Py_LeaveRecursiveCall` as regular functions for the limited API.
+ Previously, there were defined as macros, but these macros didn't work
+ with the limited API which cannot access ``PyThreadState.recursion_depth``
+ field. Remove ``_Py_CheckRecursionLimit`` from the stable ABI.
+
+- bpo-38650: The global variable :c:data:`PyStructSequence_UnnamedField` is
+ now a constant and refers to a constant string.
+
+- bpo-38540: Fixed possible leak in :c:func:`PyArg_Parse` and similar
+ functions for format units ``"es#"`` and ``"et#"`` when the macro
+ :c:macro:`PY_SSIZE_T_CLEAN` is not defined.
+
+- bpo-38395: Fix a crash in :class:`weakref.proxy` objects due to incorrect
+ lifetime management when calling some associated methods that may delete
+ the last reference to object being referenced by the proxy. Patch by Pablo
+ Galindo.
+
+- bpo-36389: The ``_PyObject_CheckConsistency()`` function is now also
+ available in release mode. For example, it can be used to debug a crash in
+ the ``visit_decref()`` function of the GC.
+
+- bpo-38266: Revert the removal of PyThreadState_DeleteCurrent() with
+ documentation.
+
+- bpo-38303: Update audioop extension module to use the stable ABI
+ (PEP-384). Patch by Tyler Kieft.
+
+- bpo-38234: :c:func:`Py_SetPath` now sets :data:`sys.executable` to the
+ program full path (:c:func:`Py_GetProgramFullPath`) rather than to the
+ program name (:c:func:`Py_GetProgramName`).
+
+- bpo-38234: Python ignored arguments passed to :c:func:`Py_SetPath`,
+ :c:func:`Py_SetPythonHome` and :c:func:`Py_SetProgramName`: fix Python
+ initialization to use specified arguments.
+
+- bpo-38205: The :c:func:`Py_UNREACHABLE` macro now calls
+ :c:func:`Py_FatalError`.
+
+- bpo-38140: Make dict and weakref offsets opaque for C heap types by
+ passing the offsets through PyMemberDef
+
+- bpo-15088: The C function ``PyGen_NeedsFinalizing`` has been removed. It
+ was not documented, tested or used anywhere within CPython after the
+ implementation of :pep:`442`. Patch by Joannah Nanjekye. (Patch by Joannah
+ Nanjekye)
+
+- bpo-36763: Options added by ``PySys_AddXOption()`` are now handled the
+ same way than ``PyConfig.xoptions`` and command line ``-X`` options.
+
+- bpo-37926: Fix a crash in ``PySys_SetArgvEx(0, NULL, 0)``.
+
+- bpo-37879: Fix subtype_dealloc to suppress the type decref when the base
+ type is a C heap type
+
+- bpo-37645: Add :c:func:`_PyObject_FunctionStr` to get a user-friendly
+ string representation of a function-like object. Patch by Jeroen Demeyer.
+
+- bpo-29548: The functions ``PyEval_CallObject``, ``PyEval_CallFunction``,
+ ``PyEval_CallMethod`` and ``PyEval_CallObjectWithKeywords`` are
+ deprecated. Use :c:func:`PyObject_Call` and its variants instead.
+
+- bpo-37151: ``PyCFunction_Call`` is now a deprecated alias of
+ :c:func:`PyObject_Call`.
+
+- bpo-37540: The vectorcall protocol now requires that the caller passes
+ only strings as keyword names.
+
+- bpo-37207: The vectorcall protocol is now enabled for ``type`` objects:
+ set ``tp_vectorcall`` to a vectorcall function to be used instead of
+ ``tp_new`` and ``tp_init`` when calling the class itself.
+
+- bpo-21120: Exclude Python-ast.h, ast.h and asdl.h from the limited API.
+
+- bpo-37483: Add new function ``_PyObject_CallOneArg`` for calling an object
+ with one positional argument.
+
- bpo-36763: Add :func:`PyConfig_SetWideStringList` function.
+- bpo-37337: Add fast functions for calling methods:
+ :c:func:`_PyObject_VectorcallMethod`, :c:func:`_PyObject_CallMethodNoArgs`
+ and :c:func:`_PyObject_CallMethodOneArg`.
+
- bpo-28805: The :const:`METH_FASTCALL` calling convention has been
documented.
-- bpo-37221: ``tp_print`` is put back at the end of the ``PyTypeObject``
- structure to restore support for old code (in particular generated by
- Cython) setting ``tp_print = 0``. Note that ``tp_print`` will be removed
- entirely in Python 3.9.
-
- bpo-37221: The new function :c:func:`PyCode_NewWithPosOnlyArgs` allows to
create code objects like :c:func:`PyCode_New`, but with an extra
*posonlyargcount* parameter for indicating the number of positonal-only
@@ -3002,14 +5130,22 @@ C API
- bpo-37215: Fix dtrace issue introduce by bpo-36842
-- bpo-37191: Python.h does not need compiler support for intermingled
- declarations (GCC's ``-Wdeclaration-after-statement``), which were added
- in 3.8.0 Beta 1. Note that in Python 3.9, intermingled declarations will
- be needed again.
+- bpo-37194: Add a new public :c:func:`PyObject_CallNoArgs` function to the
+ C API: call a callable Python object without any arguments. It is the most
+ efficient way to call a callback without any argument. On x86-64, for
+ example, ``PyObject_CallFunctionObjArgs(func, NULL)`` allocates 960 bytes
+ on the stack per call, whereas ``PyObject_CallNoArgs(func)`` only
+ allocates 624 bytes per call.
- bpo-37170: Fix the cast on error in
:c:func:`PyLong_AsUnsignedLongLongMask()`.
+- bpo-35381: Convert posixmodule.c statically allocated types
+ ``DirEntryType`` and ``ScandirIteratorType`` to heap-allocated types.
+
+- bpo-34331: Use singular/plural noun in error message when instantiating an
+ abstract class with non-overriden abstract method(s).
+
What's New in Python 3.8.0 beta 1?
==================================
@@ -4094,7 +6230,7 @@ Tests
with ``-jN/--multiprocess N``. ``--findleaks`` becomes a deprecated alias
to ``--fail-env-changed``.
-- bpo-36725: When using mulitprocessing mode (-jN), regrtest now better
+- bpo-36725: When using multiprocessing mode (-jN), regrtest now better
reports errors if a worker process fails, and it exits immediately on a
worker thread failure or when interrupted.
@@ -5549,7 +7685,7 @@ Library
- bpo-35585: Speed-up building enums by value, e.g. http.HTTPStatus(200).
- bpo-30561: random.gammavariate(1.0, beta) now computes the same result as
- random.expovariate(1.0 / beta). This synchonizes the two algorithms and
+ random.expovariate(1.0 / beta). This synchronizes the two algorithms and
eliminates some idiosyncrasies in the old implementation. It does however
produce a difference stream of random variables than it used to.
@@ -11330,7 +13466,7 @@ Library
- bpo-29204: Element.getiterator() and the html parameter of XMLParser()
were deprecated only in the documentation (since Python 3.2 and 3.4
- correspondintly). Now using them emits a deprecation warning.
+ correspondingly). Now using them emits a deprecation warning.
- bpo-27863: Fixed multiple crashes in ElementTree caused by race conditions
and wrong types.
@@ -23173,7 +25309,7 @@ Build
- bpo-21285: Refactor and fix curses configure check to always search in a
ncursesw directory.
-- bpo-15234: For BerkelyDB and Sqlite, only add the found library and
+- bpo-15234: For BerkeleyDB and Sqlite, only add the found library and
include directories if they aren't already being searched. This avoids an
explicit runtime library dependency.
diff --git a/Misc/SpecialBuilds.txt b/Misc/SpecialBuilds.txt
index d1a03216..27369abf 100644
--- a/Misc/SpecialBuilds.txt
+++ b/Misc/SpecialBuilds.txt
@@ -46,9 +46,7 @@ Build option: ``./configure --with-trace-refs``.
Turn on heavy reference debugging. This is major surgery. Every PyObject grows
two more pointers, to maintain a doubly-linked list of all live heap-allocated
objects. Most built-in type objects are not in this list, as they're statically
-allocated. Starting in Python 2.3, if COUNT_ALLOCS (see below) is also defined,
-a static type object T does appear in this list if at least one object of type T
-has been created.
+allocated.
Note that because the fundamental PyObject layout changes, Python modules
compiled with Py_TRACE_REFS are incompatible with modules compiled without it.
@@ -165,55 +163,6 @@ by not defining NDEBUG), and some routines do additional sanity checks inside
"#ifdef Py_DEBUG" blocks.
-COUNT_ALLOCS
-------------
-
-Each type object grows three new members:
-
- /* Number of times an object of this type was allocated. */
- int tp_allocs;
-
- /* Number of times an object of this type was deallocated. */
- int tp_frees;
-
- /* Highwater mark: the maximum value of tp_allocs - tp_frees so
- * far; or, IOW, the largest number of objects of this type alive at
- * the same time.
- */
- int tp_maxalloc;
-
-Allocation and deallocation code keeps these counts up to date. Py_FinalizeEx()
-displays a summary of the info returned by sys.getcounts() (see below), along
-with assorted other special allocation counts (like the number of tuple
-allocations satisfied by a tuple free-list, the number of 1-character strings
-allocated, etc).
-
-Before Python 2.2, type objects were immortal, and the COUNT_ALLOCS
-implementation relies on that. As of Python 2.2, heap-allocated type/ class
-objects can go away. COUNT_ALLOCS can blow up in 2.2 and 2.2.1 because of this;
-this was fixed in 2.2.2. Use of COUNT_ALLOCS makes all heap-allocated type
-objects immortal, except for those for which no object of that type is ever
-allocated.
-
-Starting with Python 2.3, If Py_TRACE_REFS is also defined, COUNT_ALLOCS
-arranges to ensure that the type object for each allocated object appears in the
-doubly-linked list of all objects maintained by Py_TRACE_REFS.
-
-Special gimmicks:
-
-sys.getcounts()
- Return a list of 4-tuples, one entry for each type object for which at least
- one object of that type was allocated. Each tuple is of the form:
-
- (tp_name, tp_allocs, tp_frees, tp_maxalloc)
-
- Each distinct type object gets a distinct entry in this list, even if two or
- more type objects have the same tp_name (in which case there's no way to
- distinguish them by looking at this list). The list is ordered by time of
- first object allocation: the type object for which the first allocation of
- an object of that type occurred most recently is at the front of the list.
-
-
LLTRACE
-------
diff --git a/Misc/python-config.in b/Misc/python-config.in
index 727c4a86..ebd99daa 100644
--- a/Misc/python-config.in
+++ b/Misc/python-config.in
@@ -25,8 +25,8 @@ except getopt.error:
if not opts:
exit_with_usage()
-pyver = sysconfig.get_config_var('VERSION')
getvar = sysconfig.get_config_var
+pyver = getvar('VERSION')
opt_flags = [flag for (flag, val) in opts]
@@ -35,10 +35,10 @@ if '--help' in opt_flags:
for opt in opt_flags:
if opt == '--prefix':
- print(sysconfig.get_config_var('prefix'))
+ print(getvar('prefix'))
elif opt == '--exec-prefix':
- print(sysconfig.get_config_var('exec_prefix'))
+ print(getvar('exec_prefix'))
elif opt in ('--includes', '--cflags'):
flags = ['-I' + sysconfig.get_path('include'),
@@ -65,10 +65,10 @@ for opt in opt_flags:
print(' '.join(libs))
elif opt == '--extension-suffix':
- print(sysconfig.get_config_var('EXT_SUFFIX'))
+ print(getvar('EXT_SUFFIX'))
elif opt == '--abiflags':
print(sys.abiflags)
elif opt == '--configdir':
- print(sysconfig.get_config_var('LIBPL'))
+ print(getvar('LIBPL'))
diff --git a/Misc/python-wing3.wpr b/Misc/python-wing3.wpr
old mode 100644
new mode 100755
diff --git a/Misc/python-wing4.wpr b/Misc/python-wing4.wpr
old mode 100644
new mode 100755
diff --git a/Misc/python-wing5.wpr b/Misc/python-wing5.wpr
old mode 100644
new mode 100755
diff --git a/Misc/python.man b/Misc/python.man
index fa5d7999..22537657 100644
--- a/Misc/python.man
+++ b/Misc/python.man
@@ -286,10 +286,6 @@ Set implementation specific option. The following options are available:
traceback of a trace. Use -X tracemalloc=NFRAME to start tracing with a
traceback limit of NFRAME frames
- -X showalloccount: output the total count of allocated objects for each
- type when the program finishes. This only works when Python was built with
- COUNT_ALLOCS defined
-
-X importtime: show how long each import takes. It shows module name,
cumulative time (including nested imports) and self time (excluding
nested imports). Note that its output may be broken in multi-threaded
@@ -417,6 +413,8 @@ inserted in the path in front of $PYTHONPATH.
The search path can be manipulated from within a Python program as the
variable
.IR sys.path .
+.IP PYTHONPLATLIBDIR
+Override sys.platlibdir.
.IP PYTHONSTARTUP
If this is the name of a readable file, the Python commands in that
file are executed before the first prompt is displayed in interactive
diff --git a/Misc/requirements-test.txt b/Misc/requirements-test.txt
new file mode 100644
index 00000000..6e46c12e
--- /dev/null
+++ b/Misc/requirements-test.txt
@@ -0,0 +1 @@
+tzdata==2020.1rc0
diff --git a/Modules/Setup b/Modules/Setup
index 983fa014..02cfb675 100644
--- a/Modules/Setup
+++ b/Modules/Setup
@@ -132,7 +132,10 @@ faulthandler faulthandler.c
#
# bpo-35053: The module must be builtin since _Py_NewReference()
# can call _PyTraceMalloc_NewReference().
-_tracemalloc _tracemalloc.c hashtable.c
+_tracemalloc _tracemalloc.c
+
+# PEG-based parser module -- slated to be *the* parser
+_peg_parser _peg_parser.c
# The rest of the modules listed in this file are all commented out by
# default. Usually they can be detected and built as dynamically
@@ -167,17 +170,18 @@ _symtable symtablemodule.c
# Modules that should always be present (non UNIX dependent):
#array arraymodule.c # array objects
-#cmath cmathmodule.c _math.c # -lm # complex math library functions
-#math mathmodule.c _math.c # -lm # math library functions, e.g. sin()
+#cmath cmathmodule.c _math.c -DPy_BUILD_CORE_MODULE # -lm # complex math library functions
+#math mathmodule.c _math.c -DPy_BUILD_CORE_MODULE # -lm # math library functions, e.g. sin()
#_contextvars _contextvarsmodule.c # Context Variables
#_struct _struct.c # binary structure packing/unpacking
#_weakref _weakref.c # basic weak reference support
#_testcapi _testcapimodule.c # Python C API test module
#_testinternalcapi _testinternalcapi.c -I$(srcdir)/Include/internal -DPy_BUILD_CORE_MODULE # Python internal C API test module
-#_random _randommodule.c # Random number generator
+#_random _randommodule.c -DPy_BUILD_CORE_MODULE # Random number generator
#_elementtree -I$(srcdir)/Modules/expat -DHAVE_EXPAT_CONFIG_H -DUSE_PYEXPAT_CAPI _elementtree.c # elementtree accelerator
#_pickle _pickle.c # pickle accelerator
#_datetime _datetimemodule.c # datetime accelerator
+#_zoneinfo _zoneinfo.c # zoneinfo accelerator
#_bisect _bisectmodule.c # Bisection algorithms
#_heapq _heapqmodule.c # Heap queue algorithm
#_asyncio _asynciomodule.c # Fast asyncio Future
@@ -247,8 +251,8 @@ _symtable symtablemodule.c
# The _sha module implements the SHA checksum algorithms.
# (NIST's Secure Hash Algorithms.)
#_sha1 sha1module.c
-#_sha256 sha256module.c
-#_sha512 sha512module.c
+#_sha256 sha256module.c -DPy_BUILD_CORE_BUILTIN
+#_sha512 sha512module.c -DPy_BUILD_CORE_BUILTIN
#_sha3 _sha3/sha3module.c
# _blake module
diff --git a/Modules/_abc.c b/Modules/_abc.c
index de938dd0..709b52ff 100644
--- a/Modules/_abc.c
+++ b/Modules/_abc.c
@@ -1,7 +1,6 @@
/* ABCMeta implementation */
#include "Python.h"
-#include "structmember.h"
#include "clinic/_abc.c.h"
/*[clinic input]
@@ -20,12 +19,18 @@ _Py_IDENTIFIER(_abc_impl);
_Py_IDENTIFIER(__subclasscheck__);
_Py_IDENTIFIER(__subclasshook__);
-/* A global counter that is incremented each time a class is
- registered as a virtual subclass of anything. It forces the
- negative cache to be cleared before its next use.
- Note: this counter is private. Use `abc.get_cache_token()` for
- external code. */
-static unsigned long long abc_invalidation_counter = 0;
+typedef struct {
+ PyTypeObject *_abc_data_type;
+ unsigned long long abc_invalidation_counter;
+} _abcmodule_state;
+
+static inline _abcmodule_state*
+get_abc_state(PyObject *module)
+{
+ void *state = PyModule_GetState(module);
+ assert(state != NULL);
+ return (_abcmodule_state *)state;
+}
/* This object stores internal state for ABCs.
Note that we can use normal sets for caches,
@@ -38,51 +43,84 @@ typedef struct {
unsigned long long _abc_negative_cache_version;
} _abc_data;
+static int
+abc_data_traverse(_abc_data *self, visitproc visit, void *arg)
+{
+ Py_VISIT(Py_TYPE(self));
+ Py_VISIT(self->_abc_registry);
+ Py_VISIT(self->_abc_cache);
+ Py_VISIT(self->_abc_negative_cache);
+ return 0;
+}
+
+static int
+abc_data_clear(_abc_data *self)
+{
+ Py_CLEAR(self->_abc_registry);
+ Py_CLEAR(self->_abc_cache);
+ Py_CLEAR(self->_abc_negative_cache);
+ return 0;
+}
+
static void
abc_data_dealloc(_abc_data *self)
{
- Py_XDECREF(self->_abc_registry);
- Py_XDECREF(self->_abc_cache);
- Py_XDECREF(self->_abc_negative_cache);
- Py_TYPE(self)->tp_free(self);
+ PyTypeObject *tp = Py_TYPE(self);
+ (void)abc_data_clear(self);
+ tp->tp_free(self);
+ Py_DECREF(tp);
}
static PyObject *
abc_data_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
{
_abc_data *self = (_abc_data *) type->tp_alloc(type, 0);
+ _abcmodule_state *state = NULL;
if (self == NULL) {
return NULL;
}
+ state = PyType_GetModuleState(type);
+ if (state == NULL) {
+ Py_DECREF(self);
+ return NULL;
+ }
+
self->_abc_registry = NULL;
self->_abc_cache = NULL;
self->_abc_negative_cache = NULL;
- self->_abc_negative_cache_version = abc_invalidation_counter;
+ self->_abc_negative_cache_version = state->abc_invalidation_counter;
return (PyObject *) self;
}
PyDoc_STRVAR(abc_data_doc,
"Internal state held by ABC machinery.");
-static PyTypeObject _abc_data_type = {
- PyVarObject_HEAD_INIT(NULL, 0)
- "_abc_data", /*tp_name*/
- sizeof(_abc_data), /*tp_basicsize*/
- .tp_dealloc = (destructor)abc_data_dealloc,
- .tp_flags = Py_TPFLAGS_DEFAULT,
- .tp_alloc = PyType_GenericAlloc,
- .tp_new = abc_data_new,
+static PyType_Slot _abc_data_type_spec_slots[] = {
+ {Py_tp_doc, (void *)abc_data_doc},
+ {Py_tp_new, abc_data_new},
+ {Py_tp_dealloc, abc_data_dealloc},
+ {Py_tp_traverse, abc_data_traverse},
+ {Py_tp_clear, abc_data_clear},
+ {0, 0}
+};
+
+static PyType_Spec _abc_data_type_spec = {
+ .name = "_abc._abc_data",
+ .basicsize = sizeof(_abc_data),
+ .flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,
+ .slots = _abc_data_type_spec_slots,
};
static _abc_data *
-_get_impl(PyObject *self)
+_get_impl(PyObject *module, PyObject *self)
{
+ _abcmodule_state *state = get_abc_state(module);
PyObject *impl = _PyObject_GetAttrId(self, &PyId__abc_impl);
if (impl == NULL) {
return NULL;
}
- if (Py_TYPE(impl) != &_abc_data_type) {
+ if (!Py_IS_TYPE(impl, state->_abc_data_type)) {
PyErr_SetString(PyExc_TypeError, "_abc_impl is set to a wrong type");
Py_DECREF(impl);
return NULL;
@@ -179,7 +217,7 @@ static PyObject *
_abc__reset_registry(PyObject *module, PyObject *self)
/*[clinic end generated code: output=92d591a43566cc10 input=12a0b7eb339ac35c]*/
{
- _abc_data *impl = _get_impl(self);
+ _abc_data *impl = _get_impl(module, self);
if (impl == NULL) {
return NULL;
}
@@ -206,7 +244,7 @@ static PyObject *
_abc__reset_caches(PyObject *module, PyObject *self)
/*[clinic end generated code: output=f296f0d5c513f80c input=c0ac616fd8acfb6f]*/
{
- _abc_data *impl = _get_impl(self);
+ _abc_data *impl = _get_impl(module, self);
if (impl == NULL) {
return NULL;
}
@@ -241,7 +279,7 @@ static PyObject *
_abc__get_dump(PyObject *module, PyObject *self)
/*[clinic end generated code: output=9d9569a8e2c1c443 input=2c5deb1bfe9e3c79]*/
{
- _abc_data *impl = _get_impl(self);
+ _abc_data *impl = _get_impl(module, self);
if (impl == NULL) {
return NULL;
}
@@ -391,13 +429,14 @@ static PyObject *
_abc__abc_init(PyObject *module, PyObject *self)
/*[clinic end generated code: output=594757375714cda1 input=8d7fe470ff77f029]*/
{
+ _abcmodule_state *state = get_abc_state(module);
PyObject *data;
if (compute_abstract_methods(self) < 0) {
return NULL;
}
/* Set up inheritance registry. */
- data = abc_data_new(&_abc_data_type, NULL, NULL);
+ data = abc_data_new(state->_abc_data_type, NULL, NULL);
if (data == NULL) {
return NULL;
}
@@ -446,7 +485,7 @@ _abc__abc_register_impl(PyObject *module, PyObject *self, PyObject *subclass)
if (result < 0) {
return NULL;
}
- _abc_data *impl = _get_impl(self);
+ _abc_data *impl = _get_impl(module, self);
if (impl == NULL) {
return NULL;
}
@@ -457,7 +496,7 @@ _abc__abc_register_impl(PyObject *module, PyObject *self, PyObject *subclass)
Py_DECREF(impl);
/* Invalidate negative cache */
- abc_invalidation_counter++;
+ get_abc_state(module)->abc_invalidation_counter++;
Py_INCREF(subclass);
return subclass;
@@ -480,7 +519,7 @@ _abc__abc_instancecheck_impl(PyObject *module, PyObject *self,
/*[clinic end generated code: output=b8b5148f63b6b56f input=a4f4525679261084]*/
{
PyObject *subtype, *result = NULL, *subclass = NULL;
- _abc_data *impl = _get_impl(self);
+ _abc_data *impl = _get_impl(module, self);
if (impl == NULL) {
return NULL;
}
@@ -502,7 +541,7 @@ _abc__abc_instancecheck_impl(PyObject *module, PyObject *self,
}
subtype = (PyObject *)Py_TYPE(instance);
if (subtype == subclass) {
- if (impl->_abc_negative_cache_version == abc_invalidation_counter) {
+ if (impl->_abc_negative_cache_version == get_abc_state(module)->abc_invalidation_counter) {
incache = _in_weak_set(impl->_abc_negative_cache, subclass);
if (incache < 0) {
goto end;
@@ -514,12 +553,12 @@ _abc__abc_instancecheck_impl(PyObject *module, PyObject *self,
}
}
/* Fall back to the subclass check. */
- result = _PyObject_CallMethodIdObjArgs(self, &PyId___subclasscheck__,
- subclass, NULL);
+ result = _PyObject_CallMethodIdOneArg(self, &PyId___subclasscheck__,
+ subclass);
goto end;
}
- result = _PyObject_CallMethodIdObjArgs(self, &PyId___subclasscheck__,
- subclass, NULL);
+ result = _PyObject_CallMethodIdOneArg(self, &PyId___subclasscheck__,
+ subclass);
if (result == NULL) {
goto end;
}
@@ -531,8 +570,8 @@ _abc__abc_instancecheck_impl(PyObject *module, PyObject *self,
break;
case 0:
Py_DECREF(result);
- result = _PyObject_CallMethodIdObjArgs(self, &PyId___subclasscheck__,
- subtype, NULL);
+ result = _PyObject_CallMethodIdOneArg(self, &PyId___subclasscheck__,
+ subtype);
break;
case 1: // Nothing to do.
break;
@@ -574,9 +613,10 @@ _abc__abc_subclasscheck_impl(PyObject *module, PyObject *self,
}
PyObject *ok, *subclasses = NULL, *result = NULL;
+ _abcmodule_state *state = NULL;
Py_ssize_t pos;
int incache;
- _abc_data *impl = _get_impl(self);
+ _abc_data *impl = _get_impl(module, self);
if (impl == NULL) {
return NULL;
}
@@ -591,15 +631,16 @@ _abc__abc_subclasscheck_impl(PyObject *module, PyObject *self,
goto end;
}
+ state = get_abc_state(module);
/* 2. Check negative cache; may have to invalidate. */
- if (impl->_abc_negative_cache_version < abc_invalidation_counter) {
+ if (impl->_abc_negative_cache_version < state->abc_invalidation_counter) {
/* Invalidate the negative cache. */
if (impl->_abc_negative_cache != NULL &&
PySet_Clear(impl->_abc_negative_cache) < 0)
{
goto end;
}
- impl->_abc_negative_cache_version = abc_invalidation_counter;
+ impl->_abc_negative_cache_version = state->abc_invalidation_counter;
}
else {
incache = _in_weak_set(impl->_abc_negative_cache, subclass);
@@ -613,8 +654,8 @@ _abc__abc_subclasscheck_impl(PyObject *module, PyObject *self,
}
/* 3. Check the subclass hook. */
- ok = _PyObject_CallMethodIdObjArgs((PyObject *)self, &PyId___subclasshook__,
- subclass, NULL);
+ ok = _PyObject_CallMethodIdOneArg((PyObject *)self, &PyId___subclasshook__,
+ subclass);
if (ok == NULL) {
goto end;
}
@@ -792,10 +833,11 @@ static PyObject *
_abc_get_cache_token_impl(PyObject *module)
/*[clinic end generated code: output=c7d87841e033dacc input=70413d1c423ad9f9]*/
{
- return PyLong_FromUnsignedLongLong(abc_invalidation_counter);
+ _abcmodule_state *state = get_abc_state(module);
+ return PyLong_FromUnsignedLongLong(state->abc_invalidation_counter);
}
-static struct PyMethodDef module_functions[] = {
+static struct PyMethodDef _abcmodule_methods[] = {
_ABC_GET_CACHE_TOKEN_METHODDEF
_ABC__ABC_INIT_METHODDEF
_ABC__RESET_REGISTRY_METHODDEF
@@ -807,26 +849,60 @@ static struct PyMethodDef module_functions[] = {
{NULL, NULL} /* sentinel */
};
+static int
+_abcmodule_exec(PyObject *module)
+{
+ _abcmodule_state *state = get_abc_state(module);
+ state->abc_invalidation_counter = 0;
+ state->_abc_data_type = (PyTypeObject *)PyType_FromModuleAndSpec(module, &_abc_data_type_spec, NULL);
+ if (state->_abc_data_type == NULL) {
+ return -1;
+ }
+
+ return 0;
+}
+
+static int
+_abcmodule_traverse(PyObject *module, visitproc visit, void *arg)
+{
+ _abcmodule_state *state = get_abc_state(module);
+ Py_VISIT(state->_abc_data_type);
+ return 0;
+}
+
+static int
+_abcmodule_clear(PyObject *module)
+{
+ _abcmodule_state *state = get_abc_state(module);
+ Py_CLEAR(state->_abc_data_type);
+ return 0;
+}
+
+static void
+_abcmodule_free(void *module)
+{
+ _abcmodule_clear((PyObject *)module);
+}
+
+static PyModuleDef_Slot _abcmodule_slots[] = {
+ {Py_mod_exec, _abcmodule_exec},
+ {0, NULL}
+};
+
static struct PyModuleDef _abcmodule = {
PyModuleDef_HEAD_INIT,
"_abc",
_abc__doc__,
- -1,
- module_functions,
- NULL,
- NULL,
- NULL,
- NULL
+ sizeof(_abcmodule_state),
+ _abcmodule_methods,
+ _abcmodule_slots,
+ _abcmodule_traverse,
+ _abcmodule_clear,
+ _abcmodule_free,
};
-
PyMODINIT_FUNC
PyInit__abc(void)
{
- if (PyType_Ready(&_abc_data_type) < 0) {
- return NULL;
- }
- _abc_data_type.tp_doc = abc_data_doc;
-
- return PyModule_Create(&_abcmodule);
+ return PyModuleDef_Init(&_abcmodule);
}
diff --git a/Modules/_asynciomodule.c b/Modules/_asynciomodule.c
index 4ed2af55..b615c48c 100644
--- a/Modules/_asynciomodule.c
+++ b/Modules/_asynciomodule.c
@@ -1,5 +1,6 @@
#include "Python.h"
-#include "structmember.h"
+#include "pycore_pyerrors.h" // _PyErr_ClearExcState()
+#include // offsetof()
/*[clinic input]
@@ -12,10 +13,8 @@ module _asyncio
_Py_IDENTIFIER(__asyncio_running_event_loop__);
_Py_IDENTIFIER(_asyncio_future_blocking);
_Py_IDENTIFIER(add_done_callback);
-_Py_IDENTIFIER(_all_tasks_compat);
_Py_IDENTIFIER(call_soon);
_Py_IDENTIFIER(cancel);
-_Py_IDENTIFIER(current_task);
_Py_IDENTIFIER(get_event_loop);
_Py_IDENTIFIER(send);
_Py_IDENTIFIER(throw);
@@ -67,11 +66,13 @@ typedef enum {
PyObject *prefix##_exception; \
PyObject *prefix##_result; \
PyObject *prefix##_source_tb; \
+ PyObject *prefix##_cancel_msg; \
fut_state prefix##_state; \
int prefix##_log_tb; \
int prefix##_blocking; \
PyObject *dict; \
- PyObject *prefix##_weakreflist;
+ PyObject *prefix##_weakreflist; \
+ _PyErr_StackItem prefix##_cancelled_exc_state;
typedef struct {
FutureObj_HEAD(fut)
@@ -112,8 +113,8 @@ static PyTypeObject TaskType;
static PyTypeObject PyRunningLoopHolder_Type;
-#define Future_CheckExact(obj) (Py_TYPE(obj) == &FutureType)
-#define Task_CheckExact(obj) (Py_TYPE(obj) == &TaskType)
+#define Future_CheckExact(obj) Py_IS_TYPE(obj, &FutureType)
+#define Task_CheckExact(obj) Py_IS_TYPE(obj, &TaskType)
#define Future_Check(obj) PyObject_TypeCheck(obj, &FutureType)
#define Task_Check(obj) PyObject_TypeCheck(obj, &TaskType)
@@ -142,8 +143,7 @@ _is_coroutine(PyObject *coro)
Do this check after 'future_init()'; in case we need to raise
an error, __del__ needs a properly initialized object.
*/
- PyObject *res = PyObject_CallFunctionObjArgs(
- asyncio_iscoroutine_func, coro, NULL);
+ PyObject *res = PyObject_CallOneArg(asyncio_iscoroutine_func, coro);
if (res == NULL) {
return -1;
}
@@ -217,7 +217,7 @@ get_future_loop(PyObject *fut)
return NULL;
}
if (getloop != NULL) {
- PyObject *res = _PyObject_CallNoArg(getloop);
+ PyObject *res = PyObject_CallNoArgs(getloop);
Py_DECREF(getloop);
return res;
}
@@ -232,17 +232,19 @@ get_running_loop(PyObject **loop)
PyObject *rl;
PyThreadState *ts = PyThreadState_Get();
- if (ts->id == cached_running_holder_tsid && cached_running_holder != NULL) {
+ uint64_t ts_id = PyThreadState_GetID(ts);
+ if (ts_id == cached_running_holder_tsid && cached_running_holder != NULL) {
// Fast path, check the cache.
rl = cached_running_holder; // borrowed
}
else {
- if (ts->dict == NULL) {
+ PyObject *ts_dict = _PyThreadState_GetDict(ts); // borrowed
+ if (ts_dict == NULL) {
goto not_found;
}
rl = _PyDict_GetItemIdWithError(
- ts->dict, &PyId___asyncio_running_event_loop__); // borrowed
+ ts_dict, &PyId___asyncio_running_event_loop__); // borrowed
if (rl == NULL) {
if (PyErr_Occurred()) {
goto error;
@@ -253,10 +255,10 @@ get_running_loop(PyObject **loop)
}
cached_running_holder = rl; // borrowed
- cached_running_holder_tsid = ts->id;
+ cached_running_holder_tsid = ts_id;
}
- assert(Py_TYPE(rl) == &PyRunningLoopHolder_Type);
+ assert(Py_IS_TYPE(rl, &PyRunningLoopHolder_Type));
PyObject *running_loop = ((PyRunningLoopHolder *)rl)->rl_loop;
if (running_loop == Py_None) {
@@ -289,7 +291,13 @@ error:
static int
set_running_loop(PyObject *loop)
{
- PyObject *ts_dict = PyThreadState_GetDict(); // borrowed
+ PyObject *ts_dict = NULL;
+
+ PyThreadState *tstate = PyThreadState_Get();
+ if (tstate != NULL) {
+ ts_dict = _PyThreadState_GetDict(tstate); // borrowed
+ }
+
if (ts_dict == NULL) {
PyErr_SetString(
PyExc_RuntimeError, "thread-local storage is not available");
@@ -310,10 +318,7 @@ set_running_loop(PyObject *loop)
Py_DECREF(rl);
cached_running_holder = (PyObject *)rl;
-
- /* safe to assume state is not NULL as the call to PyThreadState_GetDict()
- above already checks if state is NULL */
- cached_running_holder_tsid = PyThreadState_Get()->id;
+ cached_running_holder_tsid = PyThreadState_GetID(tstate);
return 0;
}
@@ -332,12 +337,12 @@ get_event_loop(void)
return loop;
}
- policy = _PyObject_CallNoArg(asyncio_get_event_loop_policy);
+ policy = PyObject_CallNoArgs(asyncio_get_event_loop_policy);
if (policy == NULL) {
return NULL;
}
- loop = _PyObject_CallMethodId(policy, &PyId_get_event_loop, NULL);
+ loop = _PyObject_CallMethodIdNoArgs(policy, &PyId_get_event_loop);
Py_DECREF(policy);
return loop;
}
@@ -371,7 +376,7 @@ call_soon(PyObject *loop, PyObject *func, PyObject *arg, PyObject *ctx)
}
stack[nargs] = (PyObject *)ctx;
- handle = _PyObject_Vectorcall(callable, stack, nargs, context_kwname);
+ handle = PyObject_Vectorcall(callable, stack, nargs, context_kwname);
Py_DECREF(callable);
}
@@ -482,6 +487,8 @@ future_init(FutureObj *fut, PyObject *loop)
Py_CLEAR(fut->fut_result);
Py_CLEAR(fut->fut_exception);
Py_CLEAR(fut->fut_source_tb);
+ Py_CLEAR(fut->fut_cancel_msg);
+ _PyErr_ClearExcState(&fut->fut_cancelled_exc_state);
fut->fut_state = STATE_PENDING;
fut->fut_log_tb = 0;
@@ -498,7 +505,7 @@ future_init(FutureObj *fut, PyObject *loop)
}
fut->fut_loop = loop;
- res = _PyObject_CallMethodId(fut->fut_loop, &PyId_get_debug, NULL);
+ res = _PyObject_CallMethodIdNoArgs(fut->fut_loop, &PyId_get_debug);
if (res == NULL) {
return -1;
}
@@ -514,7 +521,7 @@ future_init(FutureObj *fut, PyObject *loop)
method, which is called during the interpreter shutdown and the
traceback module is already unloaded.
*/
- fut->fut_source_tb = _PyObject_CallNoArg(traceback_extract_stack);
+ fut->fut_source_tb = PyObject_CallNoArgs(traceback_extract_stack);
if (fut->fut_source_tb == NULL) {
return -1;
}
@@ -557,7 +564,7 @@ future_set_exception(FutureObj *fut, PyObject *exc)
}
if (PyExceptionClass_Check(exc)) {
- exc_val = _PyObject_CallNoArg(exc);
+ exc_val = PyObject_CallNoArgs(exc);
if (exc_val == NULL) {
return NULL;
}
@@ -576,7 +583,7 @@ future_set_exception(FutureObj *fut, PyObject *exc)
PyErr_SetString(PyExc_TypeError, "invalid exception object");
return NULL;
}
- if ((PyObject*)Py_TYPE(exc_val) == PyExc_StopIteration) {
+ if (Py_IS_TYPE(exc_val, (PyTypeObject *)PyExc_StopIteration)) {
Py_DECREF(exc_val);
PyErr_SetString(PyExc_TypeError,
"StopIteration interacts badly with generators "
@@ -596,11 +603,33 @@ future_set_exception(FutureObj *fut, PyObject *exc)
Py_RETURN_NONE;
}
+static PyObject *
+create_cancelled_error(PyObject *msg)
+{
+ PyObject *exc;
+ if (msg == NULL || msg == Py_None) {
+ exc = PyObject_CallNoArgs(asyncio_CancelledError);
+ } else {
+ exc = PyObject_CallOneArg(asyncio_CancelledError, msg);
+ }
+ return exc;
+}
+
+static void
+future_set_cancelled_error(FutureObj *fut)
+{
+ PyObject *exc = create_cancelled_error(fut->fut_cancel_msg);
+ PyErr_SetObject(asyncio_CancelledError, exc);
+ Py_DECREF(exc);
+
+ _PyErr_ChainStackItem(&fut->fut_cancelled_exc_state);
+}
+
static int
future_get_result(FutureObj *fut, PyObject **result)
{
if (fut->fut_state == STATE_CANCELLED) {
- PyErr_SetNone(asyncio_CancelledError);
+ future_set_cancelled_error(fut);
return -1;
}
@@ -697,7 +726,7 @@ future_add_done_callback(FutureObj *fut, PyObject *arg, PyObject *ctx)
}
static PyObject *
-future_cancel(FutureObj *fut)
+future_cancel(FutureObj *fut, PyObject *msg)
{
fut->fut_log_tb = 0;
@@ -706,6 +735,9 @@ future_cancel(FutureObj *fut)
}
fut->fut_state = STATE_CANCELLED;
+ Py_XINCREF(msg);
+ Py_XSETREF(fut->fut_cancel_msg, msg);
+
if (future_schedule_callbacks(fut) == -1) {
return NULL;
}
@@ -751,6 +783,8 @@ FutureObj_clear(FutureObj *fut)
Py_CLEAR(fut->fut_result);
Py_CLEAR(fut->fut_exception);
Py_CLEAR(fut->fut_source_tb);
+ Py_CLEAR(fut->fut_cancel_msg);
+ _PyErr_ClearExcState(&fut->fut_cancelled_exc_state);
Py_CLEAR(fut->dict);
return 0;
}
@@ -765,7 +799,14 @@ FutureObj_traverse(FutureObj *fut, visitproc visit, void *arg)
Py_VISIT(fut->fut_result);
Py_VISIT(fut->fut_exception);
Py_VISIT(fut->fut_source_tb);
+ Py_VISIT(fut->fut_cancel_msg);
Py_VISIT(fut->dict);
+
+ _PyErr_StackItem *exc_state = &fut->fut_cancelled_exc_state;
+ Py_VISIT(exc_state->exc_type);
+ Py_VISIT(exc_state->exc_value);
+ Py_VISIT(exc_state->exc_traceback);
+
return 0;
}
@@ -830,7 +871,7 @@ _asyncio_Future_exception_impl(FutureObj *self)
}
if (self->fut_state == STATE_CANCELLED) {
- PyErr_SetNone(asyncio_CancelledError);
+ future_set_cancelled_error(self);
return NULL;
}
@@ -942,7 +983,7 @@ _asyncio_Future_remove_done_callback(FutureObj *self, PyObject *fn)
ENSURE_FUTURE_ALIVE(self)
if (self->fut_callback0 != NULL) {
- int cmp = PyObject_RichCompareBool(fn, self->fut_callback0, Py_EQ);
+ int cmp = PyObject_RichCompareBool(self->fut_callback0, fn, Py_EQ);
if (cmp == -1) {
return NULL;
}
@@ -967,7 +1008,7 @@ _asyncio_Future_remove_done_callback(FutureObj *self, PyObject *fn)
if (len == 1) {
PyObject *cb_tup = PyList_GET_ITEM(self->fut_callbacks, 0);
int cmp = PyObject_RichCompareBool(
- fn, PyTuple_GET_ITEM(cb_tup, 0), Py_EQ);
+ PyTuple_GET_ITEM(cb_tup, 0), fn, Py_EQ);
if (cmp == -1) {
return NULL;
}
@@ -989,7 +1030,7 @@ _asyncio_Future_remove_done_callback(FutureObj *self, PyObject *fn)
int ret;
PyObject *item = PyList_GET_ITEM(self->fut_callbacks, i);
Py_INCREF(item);
- ret = PyObject_RichCompareBool(fn, PyTuple_GET_ITEM(item, 0), Py_EQ);
+ ret = PyObject_RichCompareBool(PyTuple_GET_ITEM(item, 0), fn, Py_EQ);
if (ret == 0) {
if (j < len) {
PyList_SET_ITEM(newlist, j, item);
@@ -1011,7 +1052,7 @@ _asyncio_Future_remove_done_callback(FutureObj *self, PyObject *fn)
}
if (j < len) {
- Py_SIZE(newlist) = j;
+ Py_SET_SIZE(newlist, j);
}
j = PyList_GET_SIZE(newlist);
len = PyList_GET_SIZE(self->fut_callbacks);
@@ -1031,6 +1072,8 @@ fail:
/*[clinic input]
_asyncio.Future.cancel
+ msg: object = None
+
Cancel the future and schedule callbacks.
If the future is already done or cancelled, return False. Otherwise,
@@ -1039,11 +1082,11 @@ return True.
[clinic start generated code]*/
static PyObject *
-_asyncio_Future_cancel_impl(FutureObj *self)
-/*[clinic end generated code: output=e45b932ba8bd68a1 input=515709a127995109]*/
+_asyncio_Future_cancel_impl(FutureObj *self, PyObject *msg)
+/*[clinic end generated code: output=3edebbc668e5aba3 input=925eb545251f2c5a]*/
{
ENSURE_FUTURE_ALIVE(self)
- return future_cancel(self);
+ return future_cancel(self, msg);
}
/*[clinic input]
@@ -1256,6 +1299,29 @@ FutureObj_get_source_traceback(FutureObj *fut, void *Py_UNUSED(ignored))
return fut->fut_source_tb;
}
+static PyObject *
+FutureObj_get_cancel_message(FutureObj *fut, void *Py_UNUSED(ignored))
+{
+ if (fut->fut_cancel_msg == NULL) {
+ Py_RETURN_NONE;
+ }
+ Py_INCREF(fut->fut_cancel_msg);
+ return fut->fut_cancel_msg;
+}
+
+static int
+FutureObj_set_cancel_message(FutureObj *fut, PyObject *msg,
+ void *Py_UNUSED(ignored))
+{
+ if (msg == NULL) {
+ PyErr_SetString(PyExc_AttributeError, "cannot delete attribute");
+ return -1;
+ }
+ Py_INCREF(msg);
+ Py_XSETREF(fut->fut_cancel_msg, msg);
+ return 0;
+}
+
static PyObject *
FutureObj_get_state(FutureObj *fut, void *Py_UNUSED(ignored))
{
@@ -1283,6 +1349,29 @@ FutureObj_get_state(FutureObj *fut, void *Py_UNUSED(ignored))
return ret;
}
+/*[clinic input]
+_asyncio.Future._make_cancelled_error
+
+Create the CancelledError to raise if the Future is cancelled.
+
+This should only be called once when handling a cancellation since
+it erases the context exception value.
+[clinic start generated code]*/
+
+static PyObject *
+_asyncio_Future__make_cancelled_error_impl(FutureObj *self)
+/*[clinic end generated code: output=a5df276f6c1213de input=ac6effe4ba795ecc]*/
+{
+ PyObject *exc = create_cancelled_error(self->fut_cancel_msg);
+ _PyErr_StackItem *exc_state = &self->fut_cancelled_exc_state;
+ /* Transfer ownership of exc_value from exc_state to exc since we are
+ done with it. */
+ PyException_SetContext(exc, exc_state->exc_value);
+ exc_state->exc_value = NULL;
+
+ return exc;
+}
+
/*[clinic input]
_asyncio.Future._repr_info
[clinic start generated code]*/
@@ -1291,8 +1380,7 @@ static PyObject *
_asyncio_Future__repr_info_impl(FutureObj *self)
/*[clinic end generated code: output=fa69e901bd176cfb input=f21504d8e2ae1ca2]*/
{
- return PyObject_CallFunctionObjArgs(
- asyncio_future_repr_info_func, self, NULL);
+ return PyObject_CallOneArg(asyncio_future_repr_info_func, (PyObject *)self);
}
static PyObject *
@@ -1302,9 +1390,8 @@ FutureObj_repr(FutureObj *fut)
ENSURE_FUTURE_ALIVE(fut)
- PyObject *rinfo = _PyObject_CallMethodIdObjArgs((PyObject*)fut,
- &PyId__repr_info,
- NULL);
+ PyObject *rinfo = _PyObject_CallMethodIdNoArgs((PyObject*)fut,
+ &PyId__repr_info);
if (rinfo == NULL) {
return NULL;
}
@@ -1369,7 +1456,7 @@ FutureObj_finalize(FutureObj *fut)
func = _PyObject_GetAttrId(fut->fut_loop, &PyId_call_exception_handler);
if (func != NULL) {
- PyObject *res = PyObject_CallFunctionObjArgs(func, context, NULL);
+ PyObject *res = PyObject_CallOneArg(func, context);
if (res == NULL) {
PyErr_WriteUnraisable(func);
}
@@ -1387,6 +1474,12 @@ finally:
PyErr_Restore(error_type, error_value, error_traceback);
}
+static PyObject *
+future_cls_getitem(PyObject *cls, PyObject *type)
+{
+ Py_INCREF(cls);
+ return cls;
+}
static PyAsyncMethods FutureType_as_async = {
(unaryfunc)future_new_iter, /* am_await */
@@ -1405,7 +1498,9 @@ static PyMethodDef FutureType_methods[] = {
_ASYNCIO_FUTURE_CANCELLED_METHODDEF
_ASYNCIO_FUTURE_DONE_METHODDEF
_ASYNCIO_FUTURE_GET_LOOP_METHODDEF
+ _ASYNCIO_FUTURE__MAKE_CANCELLED_ERROR_METHODDEF
_ASYNCIO_FUTURE__REPR_INFO_METHODDEF
+ {"__class_getitem__", future_cls_getitem, METH_O|METH_CLASS, NULL},
{NULL, NULL} /* Sentinel */
};
@@ -1419,7 +1514,10 @@ static PyMethodDef FutureType_methods[] = {
{"_exception", (getter)FutureObj_get_exception, NULL, NULL}, \
{"_log_traceback", (getter)FutureObj_get_log_traceback, \
(setter)FutureObj_set_log_traceback, NULL}, \
- {"_source_traceback", (getter)FutureObj_get_source_traceback, NULL, NULL},
+ {"_source_traceback", (getter)FutureObj_get_source_traceback, \
+ NULL, NULL}, \
+ {"_cancel_message", (getter)FutureObj_get_cancel_message, \
+ (setter)FutureObj_set_cancel_message, NULL},
static PyGetSetDef FutureType_getsetlist[] = {
FUTURE_COMMON_GETSETLIST
@@ -1866,8 +1964,8 @@ register_task(PyObject *task)
{
_Py_IDENTIFIER(add);
- PyObject *res = _PyObject_CallMethodIdObjArgs(
- all_tasks, &PyId_add, task, NULL);
+ PyObject *res = _PyObject_CallMethodIdOneArg(all_tasks,
+ &PyId_add, task);
if (res == NULL) {
return -1;
}
@@ -1881,8 +1979,8 @@ unregister_task(PyObject *task)
{
_Py_IDENTIFIER(discard);
- PyObject *res = _PyObject_CallMethodIdObjArgs(
- all_tasks, &PyId_discard, task, NULL);
+ PyObject *res = _PyObject_CallMethodIdOneArg(all_tasks,
+ &PyId_discard, task);
if (res == NULL) {
return -1;
}
@@ -2088,89 +2186,22 @@ TaskObj_get_fut_waiter(TaskObj *task, void *Py_UNUSED(ignored))
}
/*[clinic input]
-@classmethod
-_asyncio.Task.current_task
+_asyncio.Task._make_cancelled_error
- loop: object = None
+Create the CancelledError to raise if the Task is cancelled.
-Return the currently running task in an event loop or None.
-
-By default the current task for the current event loop is returned.
-
-None is returned when called not in the context of a Task.
+This should only be called once when handling a cancellation since
+it erases the context exception value.
[clinic start generated code]*/
static PyObject *
-_asyncio_Task_current_task_impl(PyTypeObject *type, PyObject *loop)
-/*[clinic end generated code: output=99fbe7332c516e03 input=cd14770c5b79c7eb]*/
+_asyncio_Task__make_cancelled_error_impl(TaskObj *self)
+/*[clinic end generated code: output=55a819e8b4276fab input=52c0e32de8e2f840]*/
{
- PyObject *ret;
- PyObject *current_task_func;
-
- if (PyErr_WarnEx(PyExc_DeprecationWarning,
- "Task.current_task() is deprecated, " \
- "use asyncio.current_task() instead",
- 1) < 0) {
- return NULL;
- }
-
- current_task_func = _PyObject_GetAttrId(asyncio_mod, &PyId_current_task);
- if (current_task_func == NULL) {
- return NULL;
- }
-
- if (loop == Py_None) {
- loop = get_event_loop();
- if (loop == NULL) {
- Py_DECREF(current_task_func);
- return NULL;
- }
- ret = PyObject_CallFunctionObjArgs(current_task_func, loop, NULL);
- Py_DECREF(current_task_func);
- Py_DECREF(loop);
- return ret;
- }
- else {
- ret = PyObject_CallFunctionObjArgs(current_task_func, loop, NULL);
- Py_DECREF(current_task_func);
- return ret;
- }
+ FutureObj *fut = (FutureObj*)self;
+ return _asyncio_Future__make_cancelled_error_impl(fut);
}
-/*[clinic input]
-@classmethod
-_asyncio.Task.all_tasks
-
- loop: object = None
-
-Return a set of all tasks for an event loop.
-
-By default all tasks for the current event loop are returned.
-[clinic start generated code]*/
-
-static PyObject *
-_asyncio_Task_all_tasks_impl(PyTypeObject *type, PyObject *loop)
-/*[clinic end generated code: output=11f9b20749ccca5d input=497f80bc9ce726b5]*/
-{
- PyObject *res;
- PyObject *all_tasks_func;
-
- if (PyErr_WarnEx(PyExc_DeprecationWarning,
- "Task.all_tasks() is deprecated, " \
- "use asyncio.all_tasks() instead",
- 1) < 0) {
- return NULL;
- }
-
- all_tasks_func = _PyObject_GetAttrId(asyncio_mod, &PyId__all_tasks_compat);
- if (all_tasks_func == NULL) {
- return NULL;
- }
-
- res = PyObject_CallFunctionObjArgs(all_tasks_func, loop, NULL);
- Py_DECREF(all_tasks_func);
- return res;
-}
/*[clinic input]
_asyncio.Task._repr_info
@@ -2180,13 +2211,14 @@ static PyObject *
_asyncio_Task__repr_info_impl(TaskObj *self)
/*[clinic end generated code: output=6a490eb66d5ba34b input=3c6d051ed3ddec8b]*/
{
- return PyObject_CallFunctionObjArgs(
- asyncio_task_repr_info_func, self, NULL);
+ return PyObject_CallOneArg(asyncio_task_repr_info_func, (PyObject *)self);
}
/*[clinic input]
_asyncio.Task.cancel
+ msg: object = None
+
Request that this task cancel itself.
This arranges for a CancelledError to be thrown into the
@@ -2208,8 +2240,8 @@ was not called).
[clinic start generated code]*/
static PyObject *
-_asyncio_Task_cancel_impl(TaskObj *self)
-/*[clinic end generated code: output=6bfc0479da9d5757 input=13f9bf496695cb52]*/
+_asyncio_Task_cancel_impl(TaskObj *self, PyObject *msg)
+/*[clinic end generated code: output=c66b60d41c74f9f1 input=f4ff8e8ffc5f1c00]*/
{
self->task_log_tb = 0;
@@ -2221,8 +2253,8 @@ _asyncio_Task_cancel_impl(TaskObj *self)
PyObject *res;
int is_true;
- res = _PyObject_CallMethodId(
- self->task_fut_waiter, &PyId_cancel, NULL);
+ res = _PyObject_CallMethodIdOneArg(self->task_fut_waiter,
+ &PyId_cancel, msg);
if (res == NULL) {
return NULL;
}
@@ -2239,6 +2271,8 @@ _asyncio_Task_cancel_impl(TaskObj *self)
}
self->task_must_cancel = 1;
+ Py_XINCREF(msg);
+ Py_XSETREF(self->task_cancel_msg, msg);
Py_RETURN_TRUE;
}
@@ -2432,7 +2466,7 @@ TaskObj_finalize(TaskObj *task)
func = _PyObject_GetAttrId(task->task_loop, &PyId_call_exception_handler);
if (func != NULL) {
- PyObject *res = PyObject_CallFunctionObjArgs(func, context, NULL);
+ PyObject *res = PyObject_CallOneArg(func, context);
if (res == NULL) {
PyErr_WriteUnraisable(func);
}
@@ -2453,6 +2487,13 @@ done:
FutureObj_finalize((FutureObj*)task);
}
+static PyObject *
+task_cls_getitem(PyObject *cls, PyObject *type)
+{
+ Py_INCREF(cls);
+ return cls;
+}
+
static void TaskObj_dealloc(PyObject *); /* Needs Task_CheckExact */
static PyMethodDef TaskType_methods[] = {
@@ -2464,15 +2505,15 @@ static PyMethodDef TaskType_methods[] = {
_ASYNCIO_FUTURE_DONE_METHODDEF
_ASYNCIO_TASK_SET_RESULT_METHODDEF
_ASYNCIO_TASK_SET_EXCEPTION_METHODDEF
- _ASYNCIO_TASK_CURRENT_TASK_METHODDEF
- _ASYNCIO_TASK_ALL_TASKS_METHODDEF
_ASYNCIO_TASK_CANCEL_METHODDEF
_ASYNCIO_TASK_GET_STACK_METHODDEF
_ASYNCIO_TASK_PRINT_STACK_METHODDEF
+ _ASYNCIO_TASK__MAKE_CANCELLED_ERROR_METHODDEF
_ASYNCIO_TASK__REPR_INFO_METHODDEF
_ASYNCIO_TASK_GET_NAME_METHODDEF
_ASYNCIO_TASK_SET_NAME_METHODDEF
_ASYNCIO_TASK_GET_CORO_METHODDEF
+ {"__class_getitem__", task_cls_getitem, METH_O|METH_CLASS, NULL},
{NULL, NULL} /* Sentinel */
};
@@ -2564,7 +2605,7 @@ task_set_error_soon(TaskObj *task, PyObject *et, const char *format, ...)
return NULL;
}
- PyObject *e = PyObject_CallFunctionObjArgs(et, msg, NULL);
+ PyObject *e = PyObject_CallOneArg(et, msg);
Py_DECREF(msg);
if (e == NULL) {
return NULL;
@@ -2614,7 +2655,8 @@ task_step_impl(TaskObj *task, PyObject *exc)
if (!exc) {
/* exc was not a CancelledError */
- exc = _PyObject_CallNoArg(asyncio_CancelledError);
+ exc = create_cancelled_error(task->task_cancel_msg);
+
if (!exc) {
goto fail;
}
@@ -2641,13 +2683,11 @@ task_step_impl(TaskObj *task, PyObject *exc)
result = _PyGen_Send((PyGenObject*)coro, Py_None);
}
else {
- result = _PyObject_CallMethodIdObjArgs(coro, &PyId_send,
- Py_None, NULL);
+ result = _PyObject_CallMethodIdOneArg(coro, &PyId_send, Py_None);
}
}
else {
- result = _PyObject_CallMethodIdObjArgs(coro, &PyId_throw,
- exc, NULL);
+ result = _PyObject_CallMethodIdOneArg(coro, &PyId_throw, exc);
if (clear_exc) {
/* We created 'exc' during this call */
Py_DECREF(exc);
@@ -2665,7 +2705,7 @@ task_step_impl(TaskObj *task, PyObject *exc)
if (task->task_must_cancel) {
// Task is cancelled right before coro stops.
task->task_must_cancel = 0;
- res = future_cancel((FutureObj*)task);
+ res = future_cancel((FutureObj*)task, task->task_cancel_msg);
}
else {
res = future_set_result((FutureObj*)task, o);
@@ -2682,8 +2722,15 @@ task_step_impl(TaskObj *task, PyObject *exc)
if (PyErr_ExceptionMatches(asyncio_CancelledError)) {
/* CancelledError */
- PyErr_Clear();
- return future_cancel((FutureObj*)task);
+ PyErr_Fetch(&et, &ev, &tb);
+
+ FutureObj *fut = (FutureObj*)task;
+ _PyErr_StackItem *exc_state = &fut->fut_cancelled_exc_state;
+ exc_state->exc_type = et;
+ exc_state->exc_value = ev;
+ exc_state->exc_traceback = tb;
+
+ return future_cancel(fut, NULL);
}
/* Some other exception; pop it and call Task.set_exception() */
@@ -2763,7 +2810,8 @@ task_step_impl(TaskObj *task, PyObject *exc)
if (task->task_must_cancel) {
PyObject *r;
int is_true;
- r = _PyObject_CallMethodId(result, &PyId_cancel, NULL);
+ r = _PyObject_CallMethodIdOneArg(result, &PyId_cancel,
+ task->task_cancel_msg);
if (r == NULL) {
return NULL;
}
@@ -2840,7 +2888,7 @@ task_step_impl(TaskObj *task, PyObject *exc)
PyObject *stack[2];
stack[0] = wrapper;
stack[1] = (PyObject *)task->task_context;
- res = _PyObject_Vectorcall(add_cb, stack, 1, context_kwname);
+ res = PyObject_Vectorcall(add_cb, stack, 1, context_kwname);
Py_DECREF(add_cb);
Py_DECREF(wrapper);
if (res == NULL) {
@@ -2854,7 +2902,8 @@ task_step_impl(TaskObj *task, PyObject *exc)
if (task->task_must_cancel) {
PyObject *r;
int is_true;
- r = _PyObject_CallMethodId(result, &PyId_cancel, NULL);
+ r = _PyObject_CallMethodIdOneArg(result, &PyId_cancel,
+ task->task_cancel_msg);
if (r == NULL) {
return NULL;
}
@@ -3341,7 +3390,7 @@ module_init(void)
PyObject *weak_set;
WITH_MOD("weakref")
GET_MOD_ATTR(weak_set, "WeakSet");
- all_tasks = _PyObject_CallNoArg(weak_set);
+ all_tasks = PyObject_CallNoArgs(weak_set);
Py_CLEAR(weak_set);
if (all_tasks == NULL) {
goto fail;
@@ -3392,9 +3441,6 @@ PyInit__asyncio(void)
if (module_init() < 0) {
return NULL;
}
- if (PyType_Ready(&FutureType) < 0) {
- return NULL;
- }
if (PyType_Ready(&FutureIterType) < 0) {
return NULL;
}
@@ -3404,9 +3450,6 @@ PyInit__asyncio(void)
if (PyType_Ready(&TaskWakeupMethWrapper_Type) < 0) {
return NULL;
}
- if (PyType_Ready(&TaskType) < 0) {
- return NULL;
- }
if (PyType_Ready(&PyRunningLoopHolder_Type) < 0) {
return NULL;
}
@@ -3416,16 +3459,13 @@ PyInit__asyncio(void)
return NULL;
}
- Py_INCREF(&FutureType);
- if (PyModule_AddObject(m, "Future", (PyObject *)&FutureType) < 0) {
- Py_DECREF(&FutureType);
+ /* FutureType and TaskType are made ready by PyModule_AddType() calls below. */
+ if (PyModule_AddType(m, &FutureType) < 0) {
Py_DECREF(m);
return NULL;
}
- Py_INCREF(&TaskType);
- if (PyModule_AddObject(m, "Task", (PyObject *)&TaskType) < 0) {
- Py_DECREF(&TaskType);
+ if (PyModule_AddType(m, &TaskType) < 0) {
Py_DECREF(m);
return NULL;
}
diff --git a/Modules/_bisectmodule.c b/Modules/_bisectmodule.c
index 461a11f5..82d800d9 100644
--- a/Modules/_bisectmodule.c
+++ b/Modules/_bisectmodule.c
@@ -6,6 +6,13 @@ Converted to C by Dmitry Vasiliev (dima at hlabs.spb.ru).
#define PY_SSIZE_T_CLEAN
#include "Python.h"
+/*[clinic input]
+module _bisect
+[clinic start generated code]*/
+/*[clinic end generated code: output=da39a3ee5e6b4b0d input=4d56a2b2033b462b]*/
+
+#include "clinic/_bisectmodule.c.h"
+
_Py_IDENTIFIER(insert);
static inline Py_ssize_t
@@ -44,69 +51,63 @@ internal_bisect_right(PyObject *list, PyObject *item, Py_ssize_t lo, Py_ssize_t
return lo;
}
-static PyObject *
-bisect_right(PyObject *self, PyObject *args, PyObject *kw)
+/*[clinic input]
+_bisect.bisect_right -> Py_ssize_t
+
+ a: object
+ x: object
+ lo: Py_ssize_t = 0
+ hi: Py_ssize_t(c_default='-1', accept={int, NoneType}) = None
+
+Return the index where to insert item x in list a, assuming a is sorted.
+
+The return value i is such that all e in a[:i] have e <= x, and all e in
+a[i:] have e > x. So if x already appears in the list, i points just
+beyond the rightmost x already there
+
+Optional args lo (default 0) and hi (default len(a)) bound the
+slice of a to be searched.
+[clinic start generated code]*/
+
+static Py_ssize_t
+_bisect_bisect_right_impl(PyObject *module, PyObject *a, PyObject *x,
+ Py_ssize_t lo, Py_ssize_t hi)
+/*[clinic end generated code: output=419e150cf1d2a235 input=e72212b282c83375]*/
{
- PyObject *list, *item;
- Py_ssize_t lo = 0;
- Py_ssize_t hi = -1;
- Py_ssize_t index;
- static char *keywords[] = {"a", "x", "lo", "hi", NULL};
-
- if (kw == NULL && PyTuple_GET_SIZE(args) == 2) {
- list = PyTuple_GET_ITEM(args, 0);
- item = PyTuple_GET_ITEM(args, 1);
- }
- else {
- if (!PyArg_ParseTupleAndKeywords(args, kw, "OO|nn:bisect_right",
- keywords, &list, &item, &lo, &hi))
- return NULL;
- }
- index = internal_bisect_right(list, item, lo, hi);
- if (index < 0)
- return NULL;
- return PyLong_FromSsize_t(index);
+ return internal_bisect_right(a, x, lo, hi);
}
-PyDoc_STRVAR(bisect_right_doc,
-"bisect_right(a, x[, lo[, hi]]) -> index\n\
-\n\
-Return the index where to insert item x in list a, assuming a is sorted.\n\
-\n\
-The return value i is such that all e in a[:i] have e <= x, and all e in\n\
-a[i:] have e > x. So if x already appears in the list, i points just\n\
-beyond the rightmost x already there\n\
-\n\
-Optional args lo (default 0) and hi (default len(a)) bound the\n\
-slice of a to be searched.\n");
+/*[clinic input]
+_bisect.insort_right
+
+ a: object
+ x: object
+ lo: Py_ssize_t = 0
+ hi: Py_ssize_t(c_default='-1', accept={int, NoneType}) = None
+
+Insert item x in list a, and keep it sorted assuming a is sorted.
+
+If x is already in a, insert it to the right of the rightmost x.
+
+Optional args lo (default 0) and hi (default len(a)) bound the
+slice of a to be searched.
+[clinic start generated code]*/
static PyObject *
-insort_right(PyObject *self, PyObject *args, PyObject *kw)
+_bisect_insort_right_impl(PyObject *module, PyObject *a, PyObject *x,
+ Py_ssize_t lo, Py_ssize_t hi)
+/*[clinic end generated code: output=c2caa3d4cd02035a input=d1c45bfa68182669]*/
{
- PyObject *list, *item, *result;
- Py_ssize_t lo = 0;
- Py_ssize_t hi = -1;
- Py_ssize_t index;
- static char *keywords[] = {"a", "x", "lo", "hi", NULL};
-
- if (kw == NULL && PyTuple_GET_SIZE(args) == 2) {
- list = PyTuple_GET_ITEM(args, 0);
- item = PyTuple_GET_ITEM(args, 1);
- }
- else {
- if (!PyArg_ParseTupleAndKeywords(args, kw, "OO|nn:insort_right",
- keywords, &list, &item, &lo, &hi))
- return NULL;
- }
- index = internal_bisect_right(list, item, lo, hi);
+ PyObject *result;
+ Py_ssize_t index = internal_bisect_right(a, x, lo, hi);
if (index < 0)
return NULL;
- if (PyList_CheckExact(list)) {
- if (PyList_Insert(list, index, item) < 0)
+ if (PyList_CheckExact(a)) {
+ if (PyList_Insert(a, index, x) < 0)
return NULL;
}
else {
- result = _PyObject_CallMethodId(list, &PyId_insert, "nO", index, item);
+ result = _PyObject_CallMethodId(a, &PyId_insert, "nO", index, x);
if (result == NULL)
return NULL;
Py_DECREF(result);
@@ -115,16 +116,6 @@ insort_right(PyObject *self, PyObject *args, PyObject *kw)
Py_RETURN_NONE;
}
-PyDoc_STRVAR(insort_right_doc,
-"insort_right(a, x[, lo[, hi]])\n\
-\n\
-Insert item x in list a, and keep it sorted assuming a is sorted.\n\
-\n\
-If x is already in a, insert it to the right of the rightmost x.\n\
-\n\
-Optional args lo (default 0) and hi (default len(a)) bound the\n\
-slice of a to be searched.\n");
-
static inline Py_ssize_t
internal_bisect_left(PyObject *list, PyObject *item, Py_ssize_t lo, Py_ssize_t hi)
{
@@ -161,67 +152,64 @@ internal_bisect_left(PyObject *list, PyObject *item, Py_ssize_t lo, Py_ssize_t h
return lo;
}
-static PyObject *
-bisect_left(PyObject *self, PyObject *args, PyObject *kw)
+
+/*[clinic input]
+_bisect.bisect_left -> Py_ssize_t
+
+ a: object
+ x: object
+ lo: Py_ssize_t = 0
+ hi: Py_ssize_t(c_default='-1', accept={int, NoneType}) = None
+
+Return the index where to insert item x in list a, assuming a is sorted.
+
+The return value i is such that all e in a[:i] have e < x, and all e in
+a[i:] have e >= x. So if x already appears in the list, i points just
+before the leftmost x already there.
+
+Optional args lo (default 0) and hi (default len(a)) bound the
+slice of a to be searched.
+[clinic start generated code]*/
+
+static Py_ssize_t
+_bisect_bisect_left_impl(PyObject *module, PyObject *a, PyObject *x,
+ Py_ssize_t lo, Py_ssize_t hi)
+/*[clinic end generated code: output=af82168bc2856f24 input=2bd90f34afe5609f]*/
{
- PyObject *list, *item;
- Py_ssize_t lo = 0;
- Py_ssize_t hi = -1;
- Py_ssize_t index;
- static char *keywords[] = {"a", "x", "lo", "hi", NULL};
-
- if (kw == NULL && PyTuple_GET_SIZE(args) == 2) {
- list = PyTuple_GET_ITEM(args, 0);
- item = PyTuple_GET_ITEM(args, 1);
- }
- else {
- if (!PyArg_ParseTupleAndKeywords(args, kw, "OO|nn:bisect_left",
- keywords, &list, &item, &lo, &hi))
- return NULL;
- }
- index = internal_bisect_left(list, item, lo, hi);
- if (index < 0)
- return NULL;
- return PyLong_FromSsize_t(index);
+ return internal_bisect_left(a, x, lo, hi);
}
-PyDoc_STRVAR(bisect_left_doc,
-"bisect_left(a, x[, lo[, hi]]) -> index\n\
-\n\
-Return the index where to insert item x in list a, assuming a is sorted.\n\
-\n\
-The return value i is such that all e in a[:i] have e < x, and all e in\n\
-a[i:] have e >= x. So if x already appears in the list, i points just\n\
-before the leftmost x already there.\n\
-\n\
-Optional args lo (default 0) and hi (default len(a)) bound the\n\
-slice of a to be searched.\n");
+
+/*[clinic input]
+_bisect.insort_left
+
+ a: object
+ x: object
+ lo: Py_ssize_t = 0
+ hi: Py_ssize_t(c_default='-1', accept={int, NoneType}) = None
+
+Insert item x in list a, and keep it sorted assuming a is sorted.
+
+If x is already in a, insert it to the left of the leftmost x.
+
+Optional args lo (default 0) and hi (default len(a)) bound the
+slice of a to be searched.
+[clinic start generated code]*/
static PyObject *
-insort_left(PyObject *self, PyObject *args, PyObject *kw)
+_bisect_insort_left_impl(PyObject *module, PyObject *a, PyObject *x,
+ Py_ssize_t lo, Py_ssize_t hi)
+/*[clinic end generated code: output=9e8356c0844a182b input=bc4583308bce00cc]*/
{
- PyObject *list, *item, *result;
- Py_ssize_t lo = 0;
- Py_ssize_t hi = -1;
- Py_ssize_t index;
- static char *keywords[] = {"a", "x", "lo", "hi", NULL};
-
- if (kw == NULL && PyTuple_GET_SIZE(args) == 2) {
- list = PyTuple_GET_ITEM(args, 0);
- item = PyTuple_GET_ITEM(args, 1);
- } else {
- if (!PyArg_ParseTupleAndKeywords(args, kw, "OO|nn:insort_left",
- keywords, &list, &item, &lo, &hi))
- return NULL;
- }
- index = internal_bisect_left(list, item, lo, hi);
+ PyObject *result;
+ Py_ssize_t index = internal_bisect_left(a, x, lo, hi);
if (index < 0)
return NULL;
- if (PyList_CheckExact(list)) {
- if (PyList_Insert(list, index, item) < 0)
+ if (PyList_CheckExact(a)) {
+ if (PyList_Insert(a, index, x) < 0)
return NULL;
} else {
- result = _PyObject_CallMethodId(list, &PyId_insert, "nO", index, item);
+ result = _PyObject_CallMethodId(a, &PyId_insert, "nO", index, x);
if (result == NULL)
return NULL;
Py_DECREF(result);
@@ -230,25 +218,11 @@ insort_left(PyObject *self, PyObject *args, PyObject *kw)
Py_RETURN_NONE;
}
-PyDoc_STRVAR(insort_left_doc,
-"insort_left(a, x[, lo[, hi]])\n\
-\n\
-Insert item x in list a, and keep it sorted assuming a is sorted.\n\
-\n\
-If x is already in a, insert it to the left of the leftmost x.\n\
-\n\
-Optional args lo (default 0) and hi (default len(a)) bound the\n\
-slice of a to be searched.\n");
-
static PyMethodDef bisect_methods[] = {
- {"bisect_right", (PyCFunction)(void(*)(void))bisect_right,
- METH_VARARGS|METH_KEYWORDS, bisect_right_doc},
- {"insort_right", (PyCFunction)(void(*)(void))insort_right,
- METH_VARARGS|METH_KEYWORDS, insort_right_doc},
- {"bisect_left", (PyCFunction)(void(*)(void))bisect_left,
- METH_VARARGS|METH_KEYWORDS, bisect_left_doc},
- {"insort_left", (PyCFunction)(void(*)(void))insort_left,
- METH_VARARGS|METH_KEYWORDS, insort_left_doc},
+ _BISECT_BISECT_RIGHT_METHODDEF
+ _BISECT_INSORT_RIGHT_METHODDEF
+ _BISECT_BISECT_LEFT_METHODDEF
+ _BISECT_INSORT_LEFT_METHODDEF
{NULL, NULL} /* sentinel */
};
diff --git a/Modules/_blake2/blake2b_impl.c b/Modules/_blake2/blake2b_impl.c
index edab31ea..7fb1296f 100644
--- a/Modules/_blake2/blake2b_impl.c
+++ b/Modules/_blake2/blake2b_impl.c
@@ -15,7 +15,6 @@
#include "Python.h"
#include "pystrhex.h"
-#include "pythread.h"
#include "../hashlib.h"
#include "blake2ns.h"
@@ -81,6 +80,7 @@ _blake2.blake2b.__new__ as py_blake2b_new
node_depth: int = 0
inner_size: int = 0
last_node: bool = False
+ usedforsecurity: bool = True
Return a new BLAKE2b hash object.
[clinic start generated code]*/
@@ -90,8 +90,8 @@ py_blake2b_new_impl(PyTypeObject *type, PyObject *data, int digest_size,
Py_buffer *key, Py_buffer *salt, Py_buffer *person,
int fanout, int depth, unsigned long leaf_size,
unsigned long long node_offset, int node_depth,
- int inner_size, int last_node)
-/*[clinic end generated code: output=65e732c66c2297a0 input=82be35a4e6a9daa2]*/
+ int inner_size, int last_node, int usedforsecurity)
+/*[clinic end generated code: output=32bfd8f043c6896f input=b947312abff46977]*/
{
BLAKE2bObject *self = NULL;
Py_buffer buf;
diff --git a/Modules/_blake2/blake2module.c b/Modules/_blake2/blake2module.c
index e2a3d420..ff142c9f 100644
--- a/Modules/_blake2/blake2module.c
+++ b/Modules/_blake2/blake2module.c
@@ -62,14 +62,11 @@ PyInit__blake2(void)
return NULL;
/* BLAKE2b */
- Py_TYPE(&PyBlake2_BLAKE2bType) = &PyType_Type;
- if (PyType_Ready(&PyBlake2_BLAKE2bType) < 0) {
+ Py_SET_TYPE(&PyBlake2_BLAKE2bType, &PyType_Type);
+ if (PyModule_AddType(m, &PyBlake2_BLAKE2bType) < 0) {
return NULL;
}
- Py_INCREF(&PyBlake2_BLAKE2bType);
- PyModule_AddObject(m, "blake2b", (PyObject *)&PyBlake2_BLAKE2bType);
-
d = PyBlake2_BLAKE2bType.tp_dict;
ADD_INT(d, "SALT_SIZE", BLAKE2B_SALTBYTES);
ADD_INT(d, "PERSON_SIZE", BLAKE2B_PERSONALBYTES);
@@ -82,14 +79,11 @@ PyInit__blake2(void)
PyModule_AddIntConstant(m, "BLAKE2B_MAX_DIGEST_SIZE", BLAKE2B_OUTBYTES);
/* BLAKE2s */
- Py_TYPE(&PyBlake2_BLAKE2sType) = &PyType_Type;
- if (PyType_Ready(&PyBlake2_BLAKE2sType) < 0) {
+ Py_SET_TYPE(&PyBlake2_BLAKE2sType, &PyType_Type);
+ if (PyModule_AddType(m, &PyBlake2_BLAKE2sType) < 0) {
return NULL;
}
- Py_INCREF(&PyBlake2_BLAKE2sType);
- PyModule_AddObject(m, "blake2s", (PyObject *)&PyBlake2_BLAKE2sType);
-
d = PyBlake2_BLAKE2sType.tp_dict;
ADD_INT(d, "SALT_SIZE", BLAKE2S_SALTBYTES);
ADD_INT(d, "PERSON_SIZE", BLAKE2S_PERSONALBYTES);
diff --git a/Modules/_blake2/blake2s_impl.c b/Modules/_blake2/blake2s_impl.c
index ef2f7e19..e3e90d05 100644
--- a/Modules/_blake2/blake2s_impl.c
+++ b/Modules/_blake2/blake2s_impl.c
@@ -15,7 +15,6 @@
#include "Python.h"
#include "pystrhex.h"
-#include "pythread.h"
#include "../hashlib.h"
#include "blake2ns.h"
@@ -81,6 +80,7 @@ _blake2.blake2s.__new__ as py_blake2s_new
node_depth: int = 0
inner_size: int = 0
last_node: bool = False
+ usedforsecurity: bool = True
Return a new BLAKE2s hash object.
[clinic start generated code]*/
@@ -90,8 +90,8 @@ py_blake2s_new_impl(PyTypeObject *type, PyObject *data, int digest_size,
Py_buffer *key, Py_buffer *salt, Py_buffer *person,
int fanout, int depth, unsigned long leaf_size,
unsigned long long node_offset, int node_depth,
- int inner_size, int last_node)
-/*[clinic end generated code: output=b95806be0514dcf7 input=641c0509debf714d]*/
+ int inner_size, int last_node, int usedforsecurity)
+/*[clinic end generated code: output=556181f73905c686 input=4dda87723f23abb0]*/
{
BLAKE2sObject *self = NULL;
Py_buffer buf;
diff --git a/Modules/_blake2/clinic/blake2b_impl.c.h b/Modules/_blake2/clinic/blake2b_impl.c.h
index cd329c07..07258c31 100644
--- a/Modules/_blake2/clinic/blake2b_impl.c.h
+++ b/Modules/_blake2/clinic/blake2b_impl.c.h
@@ -5,7 +5,8 @@ preserve
PyDoc_STRVAR(py_blake2b_new__doc__,
"blake2b(data=b\'\', /, *, digest_size=_blake2.blake2b.MAX_DIGEST_SIZE,\n"
" key=b\'\', salt=b\'\', person=b\'\', fanout=1, depth=1, leaf_size=0,\n"
-" node_offset=0, node_depth=0, inner_size=0, last_node=False)\n"
+" node_offset=0, node_depth=0, inner_size=0, last_node=False,\n"
+" usedforsecurity=True)\n"
"--\n"
"\n"
"Return a new BLAKE2b hash object.");
@@ -15,15 +16,15 @@ py_blake2b_new_impl(PyTypeObject *type, PyObject *data, int digest_size,
Py_buffer *key, Py_buffer *salt, Py_buffer *person,
int fanout, int depth, unsigned long leaf_size,
unsigned long long node_offset, int node_depth,
- int inner_size, int last_node);
+ int inner_size, int last_node, int usedforsecurity);
static PyObject *
py_blake2b_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
{
PyObject *return_value = NULL;
- static const char * const _keywords[] = {"", "digest_size", "key", "salt", "person", "fanout", "depth", "leaf_size", "node_offset", "node_depth", "inner_size", "last_node", NULL};
+ static const char * const _keywords[] = {"", "digest_size", "key", "salt", "person", "fanout", "depth", "leaf_size", "node_offset", "node_depth", "inner_size", "last_node", "usedforsecurity", NULL};
static _PyArg_Parser _parser = {NULL, _keywords, "blake2b", 0};
- PyObject *argsbuf[12];
+ PyObject *argsbuf[13];
PyObject * const *fastargs;
Py_ssize_t nargs = PyTuple_GET_SIZE(args);
Py_ssize_t noptargs = nargs + (kwargs ? PyDict_GET_SIZE(kwargs) : 0) - 0;
@@ -39,6 +40,7 @@ py_blake2b_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
int node_depth = 0;
int inner_size = 0;
int last_node = 0;
+ int usedforsecurity = 1;
fastargs = _PyArg_UnpackKeywords(_PyTuple_CAST(args)->ob_item, nargs, kwargs, NULL, &_parser, 0, 1, 0, argsbuf);
if (!fastargs) {
@@ -175,12 +177,21 @@ skip_optional_posonly:
goto skip_optional_kwonly;
}
}
- last_node = PyObject_IsTrue(fastargs[11]);
- if (last_node < 0) {
+ if (fastargs[11]) {
+ last_node = PyObject_IsTrue(fastargs[11]);
+ if (last_node < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
+ }
+ usedforsecurity = PyObject_IsTrue(fastargs[12]);
+ if (usedforsecurity < 0) {
goto exit;
}
skip_optional_kwonly:
- return_value = py_blake2b_new_impl(type, data, digest_size, &key, &salt, &person, fanout, depth, leaf_size, node_offset, node_depth, inner_size, last_node);
+ return_value = py_blake2b_new_impl(type, data, digest_size, &key, &salt, &person, fanout, depth, leaf_size, node_offset, node_depth, inner_size, last_node, usedforsecurity);
exit:
/* Cleanup for key */
@@ -261,4 +272,4 @@ _blake2_blake2b_hexdigest(BLAKE2bObject *self, PyObject *Py_UNUSED(ignored))
{
return _blake2_blake2b_hexdigest_impl(self);
}
-/*[clinic end generated code: output=cbb625d7f60c288c input=a9049054013a1b77]*/
+/*[clinic end generated code: output=2d6d0fe9aa42a42a input=a9049054013a1b77]*/
diff --git a/Modules/_blake2/clinic/blake2s_impl.c.h b/Modules/_blake2/clinic/blake2s_impl.c.h
index 560bd681..71c5706f 100644
--- a/Modules/_blake2/clinic/blake2s_impl.c.h
+++ b/Modules/_blake2/clinic/blake2s_impl.c.h
@@ -5,7 +5,8 @@ preserve
PyDoc_STRVAR(py_blake2s_new__doc__,
"blake2s(data=b\'\', /, *, digest_size=_blake2.blake2s.MAX_DIGEST_SIZE,\n"
" key=b\'\', salt=b\'\', person=b\'\', fanout=1, depth=1, leaf_size=0,\n"
-" node_offset=0, node_depth=0, inner_size=0, last_node=False)\n"
+" node_offset=0, node_depth=0, inner_size=0, last_node=False,\n"
+" usedforsecurity=True)\n"
"--\n"
"\n"
"Return a new BLAKE2s hash object.");
@@ -15,15 +16,15 @@ py_blake2s_new_impl(PyTypeObject *type, PyObject *data, int digest_size,
Py_buffer *key, Py_buffer *salt, Py_buffer *person,
int fanout, int depth, unsigned long leaf_size,
unsigned long long node_offset, int node_depth,
- int inner_size, int last_node);
+ int inner_size, int last_node, int usedforsecurity);
static PyObject *
py_blake2s_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
{
PyObject *return_value = NULL;
- static const char * const _keywords[] = {"", "digest_size", "key", "salt", "person", "fanout", "depth", "leaf_size", "node_offset", "node_depth", "inner_size", "last_node", NULL};
+ static const char * const _keywords[] = {"", "digest_size", "key", "salt", "person", "fanout", "depth", "leaf_size", "node_offset", "node_depth", "inner_size", "last_node", "usedforsecurity", NULL};
static _PyArg_Parser _parser = {NULL, _keywords, "blake2s", 0};
- PyObject *argsbuf[12];
+ PyObject *argsbuf[13];
PyObject * const *fastargs;
Py_ssize_t nargs = PyTuple_GET_SIZE(args);
Py_ssize_t noptargs = nargs + (kwargs ? PyDict_GET_SIZE(kwargs) : 0) - 0;
@@ -39,6 +40,7 @@ py_blake2s_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
int node_depth = 0;
int inner_size = 0;
int last_node = 0;
+ int usedforsecurity = 1;
fastargs = _PyArg_UnpackKeywords(_PyTuple_CAST(args)->ob_item, nargs, kwargs, NULL, &_parser, 0, 1, 0, argsbuf);
if (!fastargs) {
@@ -175,12 +177,21 @@ skip_optional_posonly:
goto skip_optional_kwonly;
}
}
- last_node = PyObject_IsTrue(fastargs[11]);
- if (last_node < 0) {
+ if (fastargs[11]) {
+ last_node = PyObject_IsTrue(fastargs[11]);
+ if (last_node < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
+ }
+ usedforsecurity = PyObject_IsTrue(fastargs[12]);
+ if (usedforsecurity < 0) {
goto exit;
}
skip_optional_kwonly:
- return_value = py_blake2s_new_impl(type, data, digest_size, &key, &salt, &person, fanout, depth, leaf_size, node_offset, node_depth, inner_size, last_node);
+ return_value = py_blake2s_new_impl(type, data, digest_size, &key, &salt, &person, fanout, depth, leaf_size, node_offset, node_depth, inner_size, last_node, usedforsecurity);
exit:
/* Cleanup for key */
@@ -261,4 +272,4 @@ _blake2_blake2s_hexdigest(BLAKE2sObject *self, PyObject *Py_UNUSED(ignored))
{
return _blake2_blake2s_hexdigest_impl(self);
}
-/*[clinic end generated code: output=39af5a74c8805b36 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=c80d8d06ce40a192 input=a9049054013a1b77]*/
diff --git a/Modules/_bz2module.c b/Modules/_bz2module.c
index 31bbf661..880632c6 100644
--- a/Modules/_bz2module.c
+++ b/Modules/_bz2module.c
@@ -3,9 +3,7 @@
#define PY_SSIZE_T_CLEAN
#include "Python.h"
-#include "structmember.h"
-
-#include "pythread.h"
+#include "structmember.h" // PyMemberDef
#include
#include
@@ -728,13 +726,32 @@ static PyTypeObject BZ2Decompressor_Type = {
/* Module initialization. */
+static int
+_bz2_exec(PyObject *module)
+{
+ if (PyModule_AddType(module, &BZ2Compressor_Type) < 0) {
+ return -1;
+ }
+
+ if (PyModule_AddType(module, &BZ2Decompressor_Type) < 0) {
+ return -1;
+ }
+
+ return 0;
+}
+
+static struct PyModuleDef_Slot _bz2_slots[] = {
+ {Py_mod_exec, _bz2_exec},
+ {0, NULL}
+};
+
static struct PyModuleDef _bz2module = {
PyModuleDef_HEAD_INIT,
"_bz2",
NULL,
- -1,
- NULL,
+ 0,
NULL,
+ _bz2_slots,
NULL,
NULL,
NULL
@@ -743,23 +760,5 @@ static struct PyModuleDef _bz2module = {
PyMODINIT_FUNC
PyInit__bz2(void)
{
- PyObject *m;
-
- if (PyType_Ready(&BZ2Compressor_Type) < 0)
- return NULL;
- if (PyType_Ready(&BZ2Decompressor_Type) < 0)
- return NULL;
-
- m = PyModule_Create(&_bz2module);
- if (m == NULL)
- return NULL;
-
- Py_INCREF(&BZ2Compressor_Type);
- PyModule_AddObject(m, "BZ2Compressor", (PyObject *)&BZ2Compressor_Type);
-
- Py_INCREF(&BZ2Decompressor_Type);
- PyModule_AddObject(m, "BZ2Decompressor",
- (PyObject *)&BZ2Decompressor_Type);
-
- return m;
+ return PyModuleDef_Init(&_bz2module);
}
diff --git a/Modules/_codecsmodule.c b/Modules/_codecsmodule.c
index a8ffb699..95207210 100644
--- a/Modules/_codecsmodule.c
+++ b/Modules/_codecsmodule.c
@@ -1039,13 +1039,17 @@ static PyMethodDef _codecs_functions[] = {
{NULL, NULL} /* sentinel */
};
+static PyModuleDef_Slot _codecs_slots[] = {
+ {0, NULL}
+};
+
static struct PyModuleDef codecsmodule = {
PyModuleDef_HEAD_INIT,
"_codecs",
NULL,
- -1,
+ 0,
_codecs_functions,
- NULL,
+ _codecs_slots,
NULL,
NULL,
NULL
@@ -1054,5 +1058,5 @@ static struct PyModuleDef codecsmodule = {
PyMODINIT_FUNC
PyInit__codecs(void)
{
- return PyModule_Create(&codecsmodule);
+ return PyModuleDef_Init(&codecsmodule);
}
diff --git a/Modules/_collectionsmodule.c b/Modules/_collectionsmodule.c
index cc2b90ea..7120e4dd 100644
--- a/Modules/_collectionsmodule.c
+++ b/Modules/_collectionsmodule.c
@@ -1,10 +1,10 @@
#include "Python.h"
-#include "structmember.h"
+#include "structmember.h" // PyMemberDef
#ifdef STDC_HEADERS
#include
#else
-#include /* For size_t */
+#include // size_t
#endif
/*[clinic input]
@@ -172,7 +172,7 @@ deque_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
MARK_END(b->rightlink);
assert(BLOCKLEN >= 2);
- Py_SIZE(deque) = 0;
+ Py_SET_SIZE(deque, 0);
deque->leftblock = b;
deque->rightblock = b;
deque->leftindex = CENTER + 1;
@@ -196,7 +196,7 @@ deque_pop(dequeobject *deque, PyObject *unused)
}
item = deque->rightblock->data[deque->rightindex];
deque->rightindex--;
- Py_SIZE(deque)--;
+ Py_SET_SIZE(deque, Py_SIZE(deque) - 1);
deque->state++;
if (deque->rightindex < 0) {
@@ -234,7 +234,7 @@ deque_popleft(dequeobject *deque, PyObject *unused)
assert(deque->leftblock != NULL);
item = deque->leftblock->data[deque->leftindex];
deque->leftindex++;
- Py_SIZE(deque)--;
+ Py_SET_SIZE(deque, Py_SIZE(deque) - 1);
deque->state++;
if (deque->leftindex == BLOCKLEN) {
@@ -287,7 +287,7 @@ deque_append_internal(dequeobject *deque, PyObject *item, Py_ssize_t maxlen)
MARK_END(b->rightlink);
deque->rightindex = -1;
}
- Py_SIZE(deque)++;
+ Py_SET_SIZE(deque, Py_SIZE(deque) + 1);
deque->rightindex++;
deque->rightblock->data[deque->rightindex] = item;
if (NEEDS_TRIM(deque, maxlen)) {
@@ -324,7 +324,7 @@ deque_appendleft_internal(dequeobject *deque, PyObject *item, Py_ssize_t maxlen)
MARK_END(b->leftlink);
deque->leftindex = BLOCKLEN;
}
- Py_SIZE(deque)++;
+ Py_SET_SIZE(deque, Py_SIZE(deque) + 1);
deque->leftindex--;
deque->leftblock->data[deque->leftindex] = item;
if (NEEDS_TRIM(deque, deque->maxlen)) {
@@ -489,7 +489,7 @@ deque_copy(PyObject *deque, PyObject *Py_UNUSED(ignored))
{
PyObject *result;
dequeobject *old_deque = (dequeobject *)deque;
- if (Py_TYPE(deque) == &deque_type) {
+ if (Py_IS_TYPE(deque, &deque_type)) {
dequeobject *new_deque;
PyObject *rv;
@@ -512,8 +512,7 @@ deque_copy(PyObject *deque, PyObject *Py_UNUSED(ignored))
return NULL;
}
if (old_deque->maxlen < 0)
- result = PyObject_CallFunctionObjArgs((PyObject *)(Py_TYPE(deque)),
- deque, NULL);
+ result = PyObject_CallOneArg((PyObject *)(Py_TYPE(deque)), deque);
else
result = PyObject_CallFunction((PyObject *)(Py_TYPE(deque)), "Oi",
deque, old_deque->maxlen, NULL);
@@ -540,7 +539,7 @@ deque_concat(dequeobject *deque, PyObject *other)
if (rv == 0) {
PyErr_Format(PyExc_TypeError,
"can only concatenate deque (not \"%.200s\") to deque",
- other->ob_type->tp_name);
+ Py_TYPE(other)->tp_name);
}
return NULL;
}
@@ -598,7 +597,7 @@ deque_clear(dequeobject *deque)
/* Set the deque to be empty using the newly allocated block */
MARK_END(b->leftlink);
MARK_END(b->rightlink);
- Py_SIZE(deque) = 0;
+ Py_SET_SIZE(deque, 0);
deque->leftblock = b;
deque->rightblock = b;
deque->leftindex = CENTER + 1;
@@ -681,7 +680,7 @@ deque_inplace_repeat(dequeobject *deque, Py_ssize_t n)
if (deque->rightindex == BLOCKLEN - 1) {
block *b = newblock();
if (b == NULL) {
- Py_SIZE(deque) += i;
+ Py_SET_SIZE(deque, Py_SIZE(deque) + i);
return NULL;
}
b->leftlink = deque->rightblock;
@@ -701,7 +700,7 @@ deque_inplace_repeat(dequeobject *deque, Py_ssize_t n)
deque->rightblock->data[deque->rightindex] = item;
}
}
- Py_SIZE(deque) += i;
+ Py_SET_SIZE(deque, Py_SIZE(deque) + i);
Py_INCREF(deque);
return (PyObject *)deque;
}
@@ -1610,6 +1609,8 @@ static PyMethodDef deque_methods[] = {
METH_FASTCALL, rotate_doc},
{"__sizeof__", (PyCFunction)deque_sizeof,
METH_NOARGS, sizeof_doc},
+ {"__class_getitem__", (PyCFunction)Py_GenericAlias,
+ METH_O|METH_CLASS, PyDoc_STR("See PEP 585")},
{NULL, NULL} /* sentinel */
};
@@ -1981,7 +1982,7 @@ defdict_missing(defdictobject *dd, PyObject *key)
Py_DECREF(tup);
return NULL;
}
- value = PyEval_CallObject(factory, NULL);
+ value = _PyObject_CallNoArg(factory);
if (value == NULL)
return value;
if (PyObject_SetItem((PyObject *)dd, key, value) < 0) {
@@ -1991,6 +1992,13 @@ defdict_missing(defdictobject *dd, PyObject *key)
return value;
}
+static inline PyObject*
+new_defdict(defdictobject *dd, PyObject *arg)
+{
+ return PyObject_CallFunctionObjArgs((PyObject*)Py_TYPE(dd),
+ dd->default_factory ? dd->default_factory : Py_None, arg, NULL);
+}
+
PyDoc_STRVAR(defdict_copy_doc, "D.copy() -> a shallow copy of D.");
static PyObject *
@@ -2000,11 +2008,7 @@ defdict_copy(defdictobject *dd, PyObject *Py_UNUSED(ignored))
whose class constructor has the same signature. Subclasses that
define a different constructor signature must override copy().
*/
-
- if (dd->default_factory == NULL)
- return PyObject_CallFunctionObjArgs((PyObject*)Py_TYPE(dd), Py_None, dd, NULL);
- return PyObject_CallFunctionObjArgs((PyObject*)Py_TYPE(dd),
- dd->default_factory, dd, NULL);
+ return new_defdict(dd, (PyObject*)dd);
}
static PyObject *
@@ -2044,7 +2048,7 @@ defdict_reduce(defdictobject *dd, PyObject *Py_UNUSED(ignored))
args = PyTuple_Pack(1, dd->default_factory);
if (args == NULL)
return NULL;
- items = _PyObject_CallMethodId((PyObject *)dd, &PyId_items, NULL);
+ items = _PyObject_CallMethodIdNoArgs((PyObject *)dd, &PyId_items);
if (items == NULL) {
Py_DECREF(args);
return NULL;
@@ -2072,6 +2076,8 @@ static PyMethodDef defdict_methods[] = {
defdict_copy_doc},
{"__reduce__", (PyCFunction)defdict_reduce, METH_NOARGS,
reduce_doc},
+ {"__class_getitem__", (PyCFunction)Py_GenericAlias, METH_O|METH_CLASS,
+ PyDoc_STR("See PEP 585")},
{NULL}
};
@@ -2128,6 +2134,38 @@ defdict_repr(defdictobject *dd)
return result;
}
+static PyObject*
+defdict_or(PyObject* left, PyObject* right)
+{
+ PyObject *self, *other;
+ if (PyObject_TypeCheck(left, &defdict_type)) {
+ self = left;
+ other = right;
+ }
+ else {
+ self = right;
+ other = left;
+ }
+ if (!PyDict_Check(other)) {
+ Py_RETURN_NOTIMPLEMENTED;
+ }
+ // Like copy(), this calls the object's class.
+ // Override __or__/__ror__ for subclasses with different constructors.
+ PyObject *new = new_defdict((defdictobject*)self, left);
+ if (!new) {
+ return NULL;
+ }
+ if (PyDict_Update(new, right)) {
+ Py_DECREF(new);
+ return NULL;
+ }
+ return new;
+}
+
+static PyNumberMethods defdict_as_number = {
+ .nb_or = defdict_or,
+};
+
static int
defdict_traverse(PyObject *self, visitproc visit, void *arg)
{
@@ -2199,7 +2237,7 @@ static PyTypeObject defdict_type = {
0, /* tp_setattr */
0, /* tp_as_async */
(reprfunc)defdict_repr, /* tp_repr */
- 0, /* tp_as_number */
+ &defdict_as_number, /* tp_as_number */
0, /* tp_as_sequence */
0, /* tp_as_mapping */
0, /* tp_hash */
@@ -2400,7 +2438,7 @@ tuplegetter_descr_get(PyObject *self, PyObject *obj, PyObject *type)
"descriptor for index '%zd' for tuple subclasses "
"doesn't apply to '%s' object",
index,
- obj->ob_type->tp_name);
+ Py_TYPE(obj)->tp_name);
return NULL;
}
@@ -2455,6 +2493,14 @@ tuplegetter_reduce(_tuplegetterobject *self, PyObject *Py_UNUSED(ignored))
return Py_BuildValue("(O(nO))", (PyObject*) Py_TYPE(self), self->index, self->doc);
}
+static PyObject*
+tuplegetter_repr(_tuplegetterobject *self)
+{
+ return PyUnicode_FromFormat("%s(%zd, %R)",
+ _PyType_Name(Py_TYPE(self)),
+ self->index, self->doc);
+}
+
static PyMemberDef tuplegetter_members[] = {
{"__doc__", T_OBJECT, offsetof(_tuplegetterobject, doc), 0},
@@ -2477,7 +2523,7 @@ static PyTypeObject tuplegetter_type = {
0, /* tp_getattr */
0, /* tp_setattr */
0, /* tp_as_async */
- 0, /* tp_repr */
+ (reprfunc)tuplegetter_repr, /* tp_repr */
0, /* tp_as_number */
0, /* tp_as_sequence */
0, /* tp_as_mapping */
@@ -2512,24 +2558,51 @@ static PyTypeObject tuplegetter_type = {
/* module level code ********************************************************/
-PyDoc_STRVAR(module_doc,
+PyDoc_STRVAR(collections_doc,
"High performance data structures.\n\
- deque: ordered collection accessible from endpoints only\n\
- defaultdict: dict subclass with a default value factory\n\
");
-static struct PyMethodDef module_functions[] = {
+static struct PyMethodDef collections_methods[] = {
_COLLECTIONS__COUNT_ELEMENTS_METHODDEF
{NULL, NULL} /* sentinel */
};
+static int
+collections_exec(PyObject *module) {
+ PyTypeObject *typelist[] = {
+ &deque_type,
+ &defdict_type,
+ &PyODict_Type,
+ &dequeiter_type,
+ &dequereviter_type,
+ &tuplegetter_type
+ };
+
+ defdict_type.tp_base = &PyDict_Type;
+
+ for (size_t i = 0; i < Py_ARRAY_LENGTH(typelist); i++) {
+ if (PyModule_AddType(module, typelist[i]) < 0) {
+ return -1;
+ }
+ }
+
+ return 0;
+}
+
+static struct PyModuleDef_Slot collections_slots[] = {
+ {Py_mod_exec, collections_exec},
+ {0, NULL}
+};
+
static struct PyModuleDef _collectionsmodule = {
PyModuleDef_HEAD_INIT,
"_collections",
- module_doc,
- -1,
- module_functions,
- NULL,
+ collections_doc,
+ 0,
+ collections_methods,
+ collections_slots,
NULL,
NULL,
NULL
@@ -2538,40 +2611,5 @@ static struct PyModuleDef _collectionsmodule = {
PyMODINIT_FUNC
PyInit__collections(void)
{
- PyObject *m;
-
- m = PyModule_Create(&_collectionsmodule);
- if (m == NULL)
- return NULL;
-
- if (PyType_Ready(&deque_type) < 0)
- return NULL;
- Py_INCREF(&deque_type);
- PyModule_AddObject(m, "deque", (PyObject *)&deque_type);
-
- defdict_type.tp_base = &PyDict_Type;
- if (PyType_Ready(&defdict_type) < 0)
- return NULL;
- Py_INCREF(&defdict_type);
- PyModule_AddObject(m, "defaultdict", (PyObject *)&defdict_type);
-
- Py_INCREF(&PyODict_Type);
- PyModule_AddObject(m, "OrderedDict", (PyObject *)&PyODict_Type);
-
- if (PyType_Ready(&dequeiter_type) < 0)
- return NULL;
- Py_INCREF(&dequeiter_type);
- PyModule_AddObject(m, "_deque_iterator", (PyObject *)&dequeiter_type);
-
- if (PyType_Ready(&dequereviter_type) < 0)
- return NULL;
- Py_INCREF(&dequereviter_type);
- PyModule_AddObject(m, "_deque_reverse_iterator", (PyObject *)&dequereviter_type);
-
- if (PyType_Ready(&tuplegetter_type) < 0)
- return NULL;
- Py_INCREF(&tuplegetter_type);
- PyModule_AddObject(m, "_tuplegetter", (PyObject *)&tuplegetter_type);
-
- return m;
+ return PyModuleDef_Init(&_collectionsmodule);
}
diff --git a/Modules/_contextvarsmodule.c b/Modules/_contextvarsmodule.c
index 1abcdbfa..d6d7f375 100644
--- a/Modules/_contextvarsmodule.c
+++ b/Modules/_contextvarsmodule.c
@@ -27,33 +27,15 @@ static PyMethodDef _contextvars_methods[] = {
{NULL, NULL}
};
-static struct PyModuleDef _contextvarsmodule = {
- PyModuleDef_HEAD_INIT, /* m_base */
- "_contextvars", /* m_name */
- module_doc, /* m_doc */
- -1, /* m_size */
- _contextvars_methods, /* m_methods */
- NULL, /* m_slots */
- NULL, /* m_traverse */
- NULL, /* m_clear */
- NULL, /* m_free */
-};
-
-PyMODINIT_FUNC
-PyInit__contextvars(void)
+static int
+_contextvars_exec(PyObject *m)
{
- PyObject *m = PyModule_Create(&_contextvarsmodule);
- if (m == NULL) {
- return NULL;
- }
-
Py_INCREF(&PyContext_Type);
if (PyModule_AddObject(m, "Context",
(PyObject *)&PyContext_Type) < 0)
{
Py_DECREF(&PyContext_Type);
- Py_DECREF(m);
- return NULL;
+ return -1;
}
Py_INCREF(&PyContextVar_Type);
@@ -61,8 +43,7 @@ PyInit__contextvars(void)
(PyObject *)&PyContextVar_Type) < 0)
{
Py_DECREF(&PyContextVar_Type);
- Py_DECREF(m);
- return NULL;
+ return -1;
}
Py_INCREF(&PyContextToken_Type);
@@ -70,9 +51,31 @@ PyInit__contextvars(void)
(PyObject *)&PyContextToken_Type) < 0)
{
Py_DECREF(&PyContextToken_Type);
- Py_DECREF(m);
- return NULL;
+ return -1;
}
- return m;
+ return 0;
+}
+
+static struct PyModuleDef_Slot _contextvars_slots[] = {
+ {Py_mod_exec, _contextvars_exec},
+ {0, NULL}
+};
+
+static struct PyModuleDef _contextvarsmodule = {
+ PyModuleDef_HEAD_INIT, /* m_base */
+ "_contextvars", /* m_name */
+ module_doc, /* m_doc */
+ 0, /* m_size */
+ _contextvars_methods, /* m_methods */
+ _contextvars_slots, /* m_slots */
+ NULL, /* m_traverse */
+ NULL, /* m_clear */
+ NULL, /* m_free */
+};
+
+PyMODINIT_FUNC
+PyInit__contextvars(void)
+{
+ return PyModuleDef_Init(&_contextvarsmodule);
}
diff --git a/Modules/_cryptmodule.c b/Modules/_cryptmodule.c
index 5d03f45f..a95f55a6 100644
--- a/Modules/_cryptmodule.c
+++ b/Modules/_cryptmodule.c
@@ -42,6 +42,9 @@ crypt_crypt_impl(PyObject *module, const char *word, const char *salt)
#else
crypt_result = crypt(word, salt);
#endif
+ if (crypt_result == NULL) {
+ return PyErr_SetFromErrno(PyExc_OSError);
+ }
return Py_BuildValue("s", crypt_result);
}
@@ -51,14 +54,17 @@ static PyMethodDef crypt_methods[] = {
{NULL, NULL} /* sentinel */
};
+static PyModuleDef_Slot _crypt_slots[] = {
+ {0, NULL}
+};
static struct PyModuleDef cryptmodule = {
PyModuleDef_HEAD_INIT,
"_crypt",
NULL,
- -1,
+ 0,
crypt_methods,
- NULL,
+ _crypt_slots,
NULL,
NULL,
NULL
@@ -67,5 +73,5 @@ static struct PyModuleDef cryptmodule = {
PyMODINIT_FUNC
PyInit__crypt(void)
{
- return PyModule_Create(&cryptmodule);
+ return PyModuleDef_Init(&cryptmodule);
}
diff --git a/Modules/_csv.c b/Modules/_csv.c
index 46d41438..59109b01 100644
--- a/Modules/_csv.c
+++ b/Modules/_csv.c
@@ -11,7 +11,7 @@ module instead.
#define MODULE_VERSION "1.0"
#include "Python.h"
-#include "structmember.h"
+#include "structmember.h" // PyMemberDef
#include
@@ -21,21 +21,27 @@ typedef struct {
long field_limit; /* max parsed field size */
} _csvstate;
-#define _csvstate(o) ((_csvstate *)PyModule_GetState(o))
+static inline _csvstate*
+get_csv_state(PyObject *module)
+{
+ void *state = PyModule_GetState(module);
+ assert(state != NULL);
+ return (_csvstate *)state;
+}
static int
_csv_clear(PyObject *m)
{
- Py_CLEAR(_csvstate(m)->error_obj);
- Py_CLEAR(_csvstate(m)->dialects);
+ Py_CLEAR(get_csv_state(m)->error_obj);
+ Py_CLEAR(get_csv_state(m)->dialects);
return 0;
}
static int
_csv_traverse(PyObject *m, visitproc visit, void *arg)
{
- Py_VISIT(_csvstate(m)->error_obj);
- Py_VISIT(_csvstate(m)->dialects);
+ Py_VISIT(get_csv_state(m)->error_obj);
+ Py_VISIT(get_csv_state(m)->dialects);
return 0;
}
@@ -106,7 +112,7 @@ typedef struct {
static PyTypeObject Reader_Type;
-#define ReaderObject_Check(v) (Py_TYPE(v) == &Reader_Type)
+#define ReaderObject_Check(v) Py_IS_TYPE(v, &Reader_Type)
typedef struct {
PyObject_HEAD
@@ -236,7 +242,7 @@ _set_char(const char *name, Py_UCS4 *target, PyObject *src, Py_UCS4 dflt)
if (!PyUnicode_Check(src)) {
PyErr_Format(PyExc_TypeError,
"\"%s\" must be string, not %.200s", name,
- src->ob_type->tp_name);
+ Py_TYPE(src)->tp_name);
return -1;
}
len = PyUnicode_GetLength(src);
@@ -514,10 +520,10 @@ _call_dialect(PyObject *dialect_inst, PyObject *kwargs)
{
PyObject *type = (PyObject *)&Dialect_Type;
if (dialect_inst) {
- return _PyObject_FastCallDict(type, &dialect_inst, 1, kwargs);
+ return PyObject_VectorcallDict(type, &dialect_inst, 1, kwargs);
}
else {
- return _PyObject_FastCallDict(type, NULL, 0, kwargs);
+ return PyObject_VectorcallDict(type, NULL, 0, kwargs);
}
}
@@ -783,7 +789,7 @@ Reader_iternext(ReaderObj *self)
Py_UCS4 c;
Py_ssize_t pos, linelen;
unsigned int kind;
- void *data;
+ const void *data;
PyObject *lineobj;
if (parse_reset(self) < 0)
@@ -807,7 +813,7 @@ Reader_iternext(ReaderObj *self)
"iterator should return strings, "
"not %.200s "
"(did you open the file in text mode?)",
- lineobj->ob_type->tp_name
+ Py_TYPE(lineobj)->tp_name
);
Py_DECREF(lineobj);
return NULL;
@@ -958,8 +964,6 @@ csv_reader(PyObject *module, PyObject *args, PyObject *keyword_args)
}
self->input_iter = PyObject_GetIter(iterator);
if (self->input_iter == NULL) {
- PyErr_SetString(PyExc_TypeError,
- "argument 1 must be an iterator");
Py_DECREF(self);
return NULL;
}
@@ -990,7 +994,7 @@ join_reset(WriterObj *self)
* record length.
*/
static Py_ssize_t
-join_append_data(WriterObj *self, unsigned int field_kind, void *field_data,
+join_append_data(WriterObj *self, unsigned int field_kind, const void *field_data,
Py_ssize_t field_len, int *quoted,
int copy_phase)
{
@@ -1101,7 +1105,7 @@ static int
join_append(WriterObj *self, PyObject *field, int quoted)
{
unsigned int field_kind = -1;
- void *field_data = NULL;
+ const void *field_data = NULL;
Py_ssize_t field_len = 0;
Py_ssize_t rec_len;
@@ -1133,7 +1137,7 @@ join_append_lineterminator(WriterObj *self)
{
Py_ssize_t terminator_len, i;
unsigned int term_kind;
- void *term_data;
+ const void *term_data;
terminator_len = PyUnicode_GET_LENGTH(self->dialect->lineterminator);
if (terminator_len == -1)
@@ -1165,10 +1169,14 @@ csv_writerow(WriterObj *self, PyObject *seq)
PyObject *iter, *field, *line, *result;
iter = PyObject_GetIter(seq);
- if (iter == NULL)
- return PyErr_Format(_csvstate_global->error_obj,
- "iterable expected, not %.200s",
- seq->ob_type->tp_name);
+ if (iter == NULL) {
+ if (PyErr_ExceptionMatches(PyExc_TypeError)) {
+ PyErr_Format(_csvstate_global->error_obj,
+ "iterable expected, not %.200s",
+ Py_TYPE(seq)->tp_name);
+ }
+ return NULL;
+ }
/* Join all fields in internal buffer.
*/
@@ -1240,7 +1248,7 @@ csv_writerow(WriterObj *self, PyObject *seq)
if (line == NULL) {
return NULL;
}
- result = PyObject_CallFunctionObjArgs(self->write, line, NULL);
+ result = PyObject_CallOneArg(self->write, line);
Py_DECREF(line);
return result;
}
@@ -1258,8 +1266,6 @@ csv_writerows(WriterObj *self, PyObject *seqseq)
row_iter = PyObject_GetIter(seqseq);
if (row_iter == NULL) {
- PyErr_SetString(PyExc_TypeError,
- "writerows() argument must be iterable");
return NULL;
}
while ((row_obj = PyIter_Next(row_iter))) {
@@ -1627,9 +1633,6 @@ PyInit__csv(void)
PyObject *module;
const StyleDesc *style;
- if (PyType_Ready(&Dialect_Type) < 0)
- return NULL;
-
if (PyType_Ready(&Reader_Type) < 0)
return NULL;
@@ -1647,15 +1650,15 @@ PyInit__csv(void)
return NULL;
/* Set the field limit */
- _csvstate(module)->field_limit = 128 * 1024;
+ get_csv_state(module)->field_limit = 128 * 1024;
/* Do I still need to add this var to the Module Dict? */
/* Add _dialects dictionary */
- _csvstate(module)->dialects = PyDict_New();
- if (_csvstate(module)->dialects == NULL)
+ get_csv_state(module)->dialects = PyDict_New();
+ if (get_csv_state(module)->dialects == NULL)
return NULL;
- Py_INCREF(_csvstate(module)->dialects);
- if (PyModule_AddObject(module, "_dialects", _csvstate(module)->dialects))
+ Py_INCREF(get_csv_state(module)->dialects);
+ if (PyModule_AddObject(module, "_dialects", get_csv_state(module)->dialects))
return NULL;
/* Add quote styles into dictionary */
@@ -1665,16 +1668,15 @@ PyInit__csv(void)
return NULL;
}
- /* Add the Dialect type */
- Py_INCREF(&Dialect_Type);
- if (PyModule_AddObject(module, "Dialect", (PyObject *)&Dialect_Type))
+ if (PyModule_AddType(module, &Dialect_Type)) {
return NULL;
+ }
/* Add the CSV exception object to the module. */
- _csvstate(module)->error_obj = PyErr_NewException("_csv.Error", NULL, NULL);
- if (_csvstate(module)->error_obj == NULL)
+ get_csv_state(module)->error_obj = PyErr_NewException("_csv.Error", NULL, NULL);
+ if (get_csv_state(module)->error_obj == NULL)
return NULL;
- Py_INCREF(_csvstate(module)->error_obj);
- PyModule_AddObject(module, "Error", _csvstate(module)->error_obj);
+ Py_INCREF(get_csv_state(module)->error_obj);
+ PyModule_AddObject(module, "Error", get_csv_state(module)->error_obj);
return module;
}
diff --git a/Modules/_ctypes/_ctypes.c b/Modules/_ctypes/_ctypes.c
index b10b8672..ceae67eb 100644
--- a/Modules/_ctypes/_ctypes.c
+++ b/Modules/_ctypes/_ctypes.c
@@ -102,7 +102,7 @@ bytes(cdata)
#define PY_SSIZE_T_CLEAN
#include "Python.h"
-#include "structmember.h"
+#include "structmember.h" // PyMemberDef
#include
#ifdef MS_WIN32
@@ -1060,8 +1060,8 @@ PyCPointerType_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
stgdict items size, align, length contain info about pointers itself,
stgdict->proto has info about the pointed to type!
*/
- stgdict = (StgDictObject *)PyObject_CallObject(
- (PyObject *)&PyCStgDict_Type, NULL);
+ stgdict = (StgDictObject *)_PyObject_CallNoArg(
+ (PyObject *)&PyCStgDict_Type);
if (!stgdict)
return NULL;
stgdict->size = sizeof(void *);
@@ -1310,7 +1310,7 @@ CharArray_get_value(CDataObject *self, void *Py_UNUSED(ignored))
static int
CharArray_set_value(CDataObject *self, PyObject *value, void *Py_UNUSED(ignored))
{
- char *ptr;
+ const char *ptr;
Py_ssize_t size;
if (value == NULL) {
@@ -1552,8 +1552,8 @@ PyCArrayType_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
goto error;
}
- stgdict = (StgDictObject *)PyObject_CallObject(
- (PyObject *)&PyCStgDict_Type, NULL);
+ stgdict = (StgDictObject *)_PyObject_CallNoArg(
+ (PyObject *)&PyCStgDict_Type);
if (!stgdict)
goto error;
@@ -2009,8 +2009,8 @@ static PyObject *CreateSwappedType(PyTypeObject *type, PyObject *args, PyObject
if (result == NULL)
return NULL;
- stgdict = (StgDictObject *)PyObject_CallObject(
- (PyObject *)&PyCStgDict_Type, NULL);
+ stgdict = (StgDictObject *)_PyObject_CallNoArg(
+ (PyObject *)&PyCStgDict_Type);
if (!stgdict) {
Py_DECREF(result);
return NULL;
@@ -2123,8 +2123,8 @@ PyCSimpleType_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
goto error;
}
- stgdict = (StgDictObject *)PyObject_CallObject(
- (PyObject *)&PyCStgDict_Type, NULL);
+ stgdict = (StgDictObject *)_PyObject_CallNoArg(
+ (PyObject *)&PyCStgDict_Type);
if (!stgdict)
goto error;
@@ -2563,8 +2563,8 @@ PyCFuncPtrType_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
PyTypeObject *result;
StgDictObject *stgdict;
- stgdict = (StgDictObject *)PyObject_CallObject(
- (PyObject *)&PyCStgDict_Type, NULL);
+ stgdict = (StgDictObject *)_PyObject_CallNoArg(
+ (PyObject *)&PyCStgDict_Type);
if (!stgdict)
return NULL;
@@ -4085,7 +4085,7 @@ _build_result(PyObject *result, PyObject *callargs,
_Py_IDENTIFIER(__ctypes_from_outparam__);
v = PyTuple_GET_ITEM(callargs, i);
- v = _PyObject_CallMethodId(v, &PyId___ctypes_from_outparam__, NULL);
+ v = _PyObject_CallMethodIdNoArgs(v, &PyId___ctypes_from_outparam__);
if (v == NULL || numretvals == 1) {
Py_DECREF(callargs);
return v;
@@ -4798,6 +4798,12 @@ Array_length(PyObject *myself)
return self->b_length;
}
+static PyMethodDef Array_methods[] = {
+ {"__class_getitem__", (PyCFunction)Py_GenericAlias,
+ METH_O|METH_CLASS, PyDoc_STR("See PEP 585")},
+ { NULL, NULL }
+};
+
static PySequenceMethods Array_as_sequence = {
Array_length, /* sq_length; */
0, /* sq_concat; */
@@ -4846,7 +4852,7 @@ PyTypeObject PyCArray_Type = {
0, /* tp_weaklistoffset */
0, /* tp_iter */
0, /* tp_iternext */
- 0, /* tp_methods */
+ Array_methods, /* tp_methods */
0, /* tp_members */
0, /* tp_getset */
0, /* tp_base */
@@ -5265,7 +5271,8 @@ Pointer_subscript(PyObject *myself, PyObject *item)
PyObject *np;
StgDictObject *stgdict, *itemdict;
PyObject *proto;
- Py_ssize_t i, len, cur;
+ Py_ssize_t i, len;
+ size_t cur;
/* Since pointers have no length, and we want to apply
different semantics to negative indices than normal
@@ -5694,7 +5701,6 @@ PyInit__ctypes(void)
ob_type is the metatype (the 'type'), defaults to PyType_Type,
tp_base is the base type, defaults to 'object' aka PyBaseObject_Type.
*/
- PyEval_InitThreads();
m = PyModule_Create(&_ctypesmodule);
if (!m)
return NULL;
@@ -5757,42 +5763,42 @@ PyInit__ctypes(void)
if (PyType_Ready(&PyCData_Type) < 0)
return NULL;
- Py_TYPE(&Struct_Type) = &PyCStructType_Type;
+ Py_SET_TYPE(&Struct_Type, &PyCStructType_Type);
Struct_Type.tp_base = &PyCData_Type;
if (PyType_Ready(&Struct_Type) < 0)
return NULL;
Py_INCREF(&Struct_Type);
PyModule_AddObject(m, "Structure", (PyObject *)&Struct_Type);
- Py_TYPE(&Union_Type) = &UnionType_Type;
+ Py_SET_TYPE(&Union_Type, &UnionType_Type);
Union_Type.tp_base = &PyCData_Type;
if (PyType_Ready(&Union_Type) < 0)
return NULL;
Py_INCREF(&Union_Type);
PyModule_AddObject(m, "Union", (PyObject *)&Union_Type);
- Py_TYPE(&PyCPointer_Type) = &PyCPointerType_Type;
+ Py_SET_TYPE(&PyCPointer_Type, &PyCPointerType_Type);
PyCPointer_Type.tp_base = &PyCData_Type;
if (PyType_Ready(&PyCPointer_Type) < 0)
return NULL;
Py_INCREF(&PyCPointer_Type);
PyModule_AddObject(m, "_Pointer", (PyObject *)&PyCPointer_Type);
- Py_TYPE(&PyCArray_Type) = &PyCArrayType_Type;
+ Py_SET_TYPE(&PyCArray_Type, &PyCArrayType_Type);
PyCArray_Type.tp_base = &PyCData_Type;
if (PyType_Ready(&PyCArray_Type) < 0)
return NULL;
Py_INCREF(&PyCArray_Type);
PyModule_AddObject(m, "Array", (PyObject *)&PyCArray_Type);
- Py_TYPE(&Simple_Type) = &PyCSimpleType_Type;
+ Py_SET_TYPE(&Simple_Type, &PyCSimpleType_Type);
Simple_Type.tp_base = &PyCData_Type;
if (PyType_Ready(&Simple_Type) < 0)
return NULL;
Py_INCREF(&Simple_Type);
PyModule_AddObject(m, "_SimpleCData", (PyObject *)&Simple_Type);
- Py_TYPE(&PyCFuncPtr_Type) = &PyCFuncPtrType_Type;
+ Py_SET_TYPE(&PyCFuncPtr_Type, &PyCFuncPtrType_Type);
PyCFuncPtr_Type.tp_base = &PyCData_Type;
if (PyType_Ready(&PyCFuncPtr_Type) < 0)
return NULL;
diff --git a/Modules/_ctypes/_ctypes_test.c b/Modules/_ctypes/_ctypes_test.c
index 33922082..1ccad8e0 100644
--- a/Modules/_ctypes/_ctypes_test.c
+++ b/Modules/_ctypes/_ctypes_test.c
@@ -4,11 +4,7 @@
#include
#endif
-#if defined(MS_WIN32) || defined(__CYGWIN__)
-#define EXPORT(x) __declspec(dllexport) x
-#else
-#define EXPORT(x) x
-#endif
+#define EXPORT(x) Py_EXPORTED_SYMBOL x
/* some functions handy for testing */
@@ -598,30 +594,6 @@ struct BITS {
#endif
};
-EXPORT(void) set_bitfields(struct BITS *bits, char name, int value)
-{
- switch (name) {
- case 'A': bits->A = value; break;
- case 'B': bits->B = value; break;
- case 'C': bits->C = value; break;
- case 'D': bits->D = value; break;
- case 'E': bits->E = value; break;
- case 'F': bits->F = value; break;
- case 'G': bits->G = value; break;
- case 'H': bits->H = value; break;
- case 'I': bits->I = value; break;
-#ifdef SIGNED_SHORT_BITFIELDS
- case 'M': bits->M = value; break;
- case 'N': bits->N = value; break;
- case 'O': bits->O = value; break;
- case 'P': bits->P = value; break;
- case 'Q': bits->Q = value; break;
- case 'R': bits->R = value; break;
- case 'S': bits->S = value; break;
-#endif
- }
-}
-
EXPORT(int) unpack_bitfields(struct BITS *bits, char name)
{
switch (name) {
@@ -1060,14 +1032,17 @@ EXPORT (HRESULT) KeepObject(IUnknown *punk)
#endif
+static struct PyModuleDef_Slot _ctypes_test_slots[] = {
+ {0, NULL}
+};
static struct PyModuleDef _ctypes_testmodule = {
PyModuleDef_HEAD_INIT,
"_ctypes_test",
NULL,
- -1,
+ 0,
module_methods,
- NULL,
+ _ctypes_test_slots,
NULL,
NULL,
NULL
@@ -1076,5 +1051,5 @@ static struct PyModuleDef _ctypes_testmodule = {
PyMODINIT_FUNC
PyInit__ctypes_test(void)
{
- return PyModule_Create(&_ctypes_testmodule);
+ return PyModuleDef_Init(&_ctypes_testmodule);
}
diff --git a/Modules/_ctypes/callbacks.c b/Modules/_ctypes/callbacks.c
index 2a364d6c..2abfa67c 100644
--- a/Modules/_ctypes/callbacks.c
+++ b/Modules/_ctypes/callbacks.c
@@ -84,7 +84,7 @@ PrintError(const char *msg, ...)
va_list marker;
va_start(marker, msg);
- vsnprintf(buf, sizeof(buf), msg, marker);
+ PyOS_vsnprintf(buf, sizeof(buf), msg, marker);
va_end(marker);
if (f != NULL && f != Py_None)
PyFile_WriteString(buf, f);
@@ -438,7 +438,6 @@ static void LoadPython(void)
{
if (!Py_IsInitialized()) {
Py_Initialize();
- PyEval_InitThreads();
}
}
diff --git a/Modules/_ctypes/callproc.c b/Modules/_ctypes/callproc.c
index a9b8675c..6030cc3d 100644
--- a/Modules/_ctypes/callproc.c
+++ b/Modules/_ctypes/callproc.c
@@ -55,7 +55,7 @@
*/
#include "Python.h"
-#include "structmember.h"
+#include "structmember.h" // PyMemberDef
#ifdef MS_WIN32
#include
@@ -156,10 +156,9 @@ _ctypes_get_errobj(int **pspace)
Py_INCREF(errobj);
}
else if (!PyErr_Occurred()) {
- void *space = PyMem_Malloc(sizeof(int) * 2);
+ void *space = PyMem_Calloc(2, sizeof(int));
if (space == NULL)
return NULL;
- memset(space, 0, sizeof(int) * 2);
errobj = PyCapsule_New(space, CTYPES_CAPSULE_NAME_PYMEM, pymem_destructor);
if (errobj == NULL) {
PyMem_Free(space);
@@ -752,7 +751,7 @@ static int ConvParam(PyObject *obj, Py_ssize_t index, struct argument *pa)
#if defined(MS_WIN32) && !defined(_WIN32_WCE)
/*
Per: https://msdn.microsoft.com/en-us/library/7572ztz4.aspx
-To be returned by value in RAX, user-defined types must have a length
+To be returned by value in RAX, user-defined types must have a length
of 1, 2, 4, 8, 16, 32, or 64 bits
*/
int can_return_struct_as_int(size_t s)
@@ -945,7 +944,7 @@ static PyObject *GetResult(PyObject *restype, void *result, PyObject *checker)
if (!checker || !retval)
return retval;
- v = PyObject_CallFunctionObjArgs(checker, retval, NULL);
+ v = PyObject_CallOneArg(checker, retval);
if (v == NULL)
_PyTraceback_Add("GetResult", "_ctypes/callproc.c", __LINE__-2);
Py_DECREF(retval);
@@ -1153,7 +1152,7 @@ PyObject *_ctypes_callproc(PPROC pProc,
if (argtypes && argtype_count > i) {
PyObject *v;
converter = PyTuple_GET_ITEM(argtypes, i);
- v = PyObject_CallFunctionObjArgs(converter, arg, NULL);
+ v = PyObject_CallOneArg(converter, arg);
if (v == NULL) {
_ctypes_extend_error(PyExc_ArgError, "argument %zd: ", i+1);
goto cleanup;
@@ -1402,7 +1401,7 @@ copy_com_pointer(PyObject *self, PyObject *args)
static PyObject *py_dl_open(PyObject *self, PyObject *args)
{
PyObject *name, *name2;
- char *name_str;
+ const char *name_str;
void * handle;
#if HAVE_DECL_RTLD_LOCAL
int mode = RTLD_NOW | RTLD_LOCAL;
@@ -1726,10 +1725,9 @@ resize(PyObject *self, PyObject *args)
if (!_CDataObject_HasExternalBuffer(obj)) {
/* We are currently using the objects default buffer, but it
isn't large enough any more. */
- void *ptr = PyMem_Malloc(size);
+ void *ptr = PyMem_Calloc(1, size);
if (ptr == NULL)
return PyErr_NoMemory();
- memset(ptr, 0, size);
memmove(ptr, obj->b_ptr, obj->b_size);
obj->b_ptr = ptr;
obj->b_size = size;
@@ -1753,7 +1751,7 @@ unpickle(PyObject *self, PyObject *args)
if (!PyArg_ParseTuple(args, "OO!", &typ, &PyTuple_Type, &state))
return NULL;
- obj = _PyObject_CallMethodIdObjArgs(typ, &PyId___new__, typ, NULL);
+ obj = _PyObject_CallMethodIdOneArg(typ, &PyId___new__, typ);
if (obj == NULL)
return NULL;
@@ -1849,7 +1847,7 @@ pointer(PyObject *self, PyObject *arg)
typ = PyDict_GetItemWithError(_ctypes_ptrtype_cache, (PyObject *)Py_TYPE(arg));
if (typ) {
- return PyObject_CallFunctionObjArgs(typ, arg, NULL);
+ return PyObject_CallOneArg(typ, arg);
}
else if (PyErr_Occurred()) {
return NULL;
@@ -1857,7 +1855,7 @@ pointer(PyObject *self, PyObject *arg)
typ = POINTER(NULL, (PyObject *)Py_TYPE(arg));
if (typ == NULL)
return NULL;
- result = PyObject_CallFunctionObjArgs(typ, arg, NULL);
+ result = PyObject_CallOneArg(typ, arg);
Py_DECREF(typ);
return result;
}
diff --git a/Modules/_ctypes/cfield.c b/Modules/_ctypes/cfield.c
index 95367d50..a72682d7 100644
--- a/Modules/_ctypes/cfield.c
+++ b/Modules/_ctypes/cfield.c
@@ -1,4 +1,5 @@
#include "Python.h"
+#include "pycore_byteswap.h" // _Py_bswap32()
#include
#ifdef MS_WIN32
@@ -60,8 +61,7 @@ PyCField_FromDesc(PyObject *desc, Py_ssize_t index,
#define CONT_BITFIELD 2
#define EXPAND_BITFIELD 3
- self = (CFieldObject *)PyObject_CallObject((PyObject *)&PyCField_Type,
- NULL);
+ self = (CFieldObject *)_PyObject_CallNoArg((PyObject *)&PyCField_Type);
if (self == NULL)
return NULL;
dict = PyType_stgdict(desc);
@@ -275,7 +275,7 @@ static void
PyCField_dealloc(PyObject *self)
{
PyCField_clear((CFieldObject *)self);
- self->ob_type->tp_free((PyObject *)self);
+ Py_TYPE(self)->tp_free((PyObject *)self);
}
static PyObject *
@@ -449,46 +449,32 @@ get_ulonglong(PyObject *v, unsigned long long *p)
( ( (type)x & ~(BIT_MASK(type, size) << LOW_BIT(size)) ) | ( ((type)v & BIT_MASK(type, size)) << LOW_BIT(size) ) ) \
: (type)v)
-/* byte swapping macros */
-#define SWAP_2(v) \
- ( ( (v >> 8) & 0x00FF) | \
- ( (v << 8) & 0xFF00) )
-
-#define SWAP_4(v) \
- ( ( (v & 0x000000FF) << 24 ) | \
- ( (v & 0x0000FF00) << 8 ) | \
- ( (v & 0x00FF0000) >> 8 ) | \
- ( ((v >> 24) & 0xFF)) )
-
-#ifdef _MSC_VER
-#define SWAP_8(v) \
- ( ( (v & 0x00000000000000FFL) << 56 ) | \
- ( (v & 0x000000000000FF00L) << 40 ) | \
- ( (v & 0x0000000000FF0000L) << 24 ) | \
- ( (v & 0x00000000FF000000L) << 8 ) | \
- ( (v & 0x000000FF00000000L) >> 8 ) | \
- ( (v & 0x0000FF0000000000L) >> 24 ) | \
- ( (v & 0x00FF000000000000L) >> 40 ) | \
- ( ((v >> 56) & 0xFF)) )
+#if SIZEOF_SHORT == 2
+# define SWAP_SHORT _Py_bswap16
#else
-#define SWAP_8(v) \
- ( ( (v & 0x00000000000000FFLL) << 56 ) | \
- ( (v & 0x000000000000FF00LL) << 40 ) | \
- ( (v & 0x0000000000FF0000LL) << 24 ) | \
- ( (v & 0x00000000FF000000LL) << 8 ) | \
- ( (v & 0x000000FF00000000LL) >> 8 ) | \
- ( (v & 0x0000FF0000000000LL) >> 24 ) | \
- ( (v & 0x00FF000000000000LL) >> 40 ) | \
- ( ((v >> 56) & 0xFF)) )
+# error "unsupported short size"
#endif
-#define SWAP_INT SWAP_4
+#if SIZEOF_INT == 4
+# define SWAP_INT _Py_bswap32
+#else
+# error "unsupported int size"
+#endif
#if SIZEOF_LONG == 4
-# define SWAP_LONG SWAP_4
+# define SWAP_LONG _Py_bswap32
#elif SIZEOF_LONG == 8
-# define SWAP_LONG SWAP_8
+# define SWAP_LONG _Py_bswap64
+#else
+# error "unsupported long size"
+#endif
+
+#if SIZEOF_LONG_LONG == 8
+# define SWAP_LONG_LONG _Py_bswap64
+#else
+# error "unsupported long long size"
#endif
+
/*****************************************************************
* The setter methods return an object which must be kept alive, to keep the
* data valid which has been stored in the memory block. The ctypes object
@@ -570,12 +556,13 @@ h_set_sw(void *ptr, PyObject *value, Py_ssize_t size)
{
long val;
short field;
- if (get_long(value, &val) < 0)
+ if (get_long(value, &val) < 0) {
return NULL;
+ }
memcpy(&field, ptr, sizeof(field));
- field = SWAP_2(field);
+ field = SWAP_SHORT(field);
field = SET(short, field, val, size);
- field = SWAP_2(field);
+ field = SWAP_SHORT(field);
memcpy(ptr, &field, sizeof(field));
_RET(value);
}
@@ -594,7 +581,7 @@ h_get_sw(void *ptr, Py_ssize_t size)
{
short val;
memcpy(&val, ptr, sizeof(val));
- val = SWAP_2(val);
+ val = SWAP_SHORT(val);
GET_BITFIELD(val, size);
return PyLong_FromLong(val);
}
@@ -617,12 +604,13 @@ H_set_sw(void *ptr, PyObject *value, Py_ssize_t size)
{
unsigned long val;
unsigned short field;
- if (get_ulong(value, &val) < 0)
+ if (get_ulong(value, &val) < 0) {
return NULL;
+ }
memcpy(&field, ptr, sizeof(field));
- field = SWAP_2(field);
+ field = SWAP_SHORT(field);
field = SET(unsigned short, field, val, size);
- field = SWAP_2(field);
+ field = SWAP_SHORT(field);
memcpy(ptr, &field, sizeof(field));
_RET(value);
}
@@ -642,7 +630,7 @@ H_get_sw(void *ptr, Py_ssize_t size)
{
unsigned short val;
memcpy(&val, ptr, sizeof(val));
- val = SWAP_2(val);
+ val = SWAP_SHORT(val);
GET_BITFIELD(val, size);
return PyLong_FromLong(val);
}
@@ -665,8 +653,9 @@ i_set_sw(void *ptr, PyObject *value, Py_ssize_t size)
{
long val;
int field;
- if (get_long(value, &val) < 0)
+ if (get_long(value, &val) < 0) {
return NULL;
+ }
memcpy(&field, ptr, sizeof(field));
field = SWAP_INT(field);
field = SET(int, field, val, size);
@@ -758,8 +747,9 @@ I_set_sw(void *ptr, PyObject *value, Py_ssize_t size)
{
unsigned long val;
unsigned int field;
- if (get_ulong(value, &val) < 0)
+ if (get_ulong(value, &val) < 0) {
return NULL;
+ }
memcpy(&field, ptr, sizeof(field));
field = SWAP_INT(field);
field = SET(unsigned int, field, (unsigned int)val, size);
@@ -806,8 +796,9 @@ l_set_sw(void *ptr, PyObject *value, Py_ssize_t size)
{
long val;
long field;
- if (get_long(value, &val) < 0)
+ if (get_long(value, &val) < 0) {
return NULL;
+ }
memcpy(&field, ptr, sizeof(field));
field = SWAP_LONG(field);
field = SET(long, field, val, size);
@@ -854,8 +845,9 @@ L_set_sw(void *ptr, PyObject *value, Py_ssize_t size)
{
unsigned long val;
unsigned long field;
- if (get_ulong(value, &val) < 0)
+ if (get_ulong(value, &val) < 0) {
return NULL;
+ }
memcpy(&field, ptr, sizeof(field));
field = SWAP_LONG(field);
field = SET(unsigned long, field, val, size);
@@ -902,12 +894,13 @@ q_set_sw(void *ptr, PyObject *value, Py_ssize_t size)
{
long long val;
long long field;
- if (get_longlong(value, &val) < 0)
+ if (get_longlong(value, &val) < 0) {
return NULL;
+ }
memcpy(&field, ptr, sizeof(field));
- field = SWAP_8(field);
+ field = SWAP_LONG_LONG(field);
field = SET(long long, field, val, size);
- field = SWAP_8(field);
+ field = SWAP_LONG_LONG(field);
memcpy(ptr, &field, sizeof(field));
_RET(value);
}
@@ -926,7 +919,7 @@ q_get_sw(void *ptr, Py_ssize_t size)
{
long long val;
memcpy(&val, ptr, sizeof(val));
- val = SWAP_8(val);
+ val = SWAP_LONG_LONG(val);
GET_BITFIELD(val, size);
return PyLong_FromLongLong(val);
}
@@ -949,12 +942,13 @@ Q_set_sw(void *ptr, PyObject *value, Py_ssize_t size)
{
unsigned long long val;
unsigned long long field;
- if (get_ulonglong(value, &val) < 0)
+ if (get_ulonglong(value, &val) < 0) {
return NULL;
+ }
memcpy(&field, ptr, sizeof(field));
- field = SWAP_8(field);
+ field = SWAP_LONG_LONG(field);
field = SET(unsigned long long, field, val, size);
- field = SWAP_8(field);
+ field = SWAP_LONG_LONG(field);
memcpy(ptr, &field, sizeof(field));
_RET(value);
}
@@ -973,7 +967,7 @@ Q_get_sw(void *ptr, Py_ssize_t size)
{
unsigned long long val;
memcpy(&val, ptr, sizeof(val));
- val = SWAP_8(val);
+ val = SWAP_LONG_LONG(val);
GET_BITFIELD(val, size);
return PyLong_FromUnsignedLongLong(val);
}
@@ -1176,7 +1170,7 @@ u_set(void *ptr, PyObject *value, Py_ssize_t size)
if (!PyUnicode_Check(value)) {
PyErr_Format(PyExc_TypeError,
"unicode string expected instead of %s instance",
- value->ob_type->tp_name);
+ Py_TYPE(value)->tp_name);
return NULL;
} else
Py_INCREF(value);
@@ -1235,7 +1229,7 @@ U_set(void *ptr, PyObject *value, Py_ssize_t length)
if (!PyUnicode_Check(value)) {
PyErr_Format(PyExc_TypeError,
"unicode string expected instead of %s instance",
- value->ob_type->tp_name);
+ Py_TYPE(value)->tp_name);
return NULL;
}
@@ -1284,13 +1278,13 @@ s_get(void *ptr, Py_ssize_t size)
static PyObject *
s_set(void *ptr, PyObject *value, Py_ssize_t length)
{
- char *data;
+ const char *data;
Py_ssize_t size;
if(!PyBytes_Check(value)) {
PyErr_Format(PyExc_TypeError,
"expected bytes, %s found",
- value->ob_type->tp_name);
+ Py_TYPE(value)->tp_name);
return NULL;
}
@@ -1322,7 +1316,7 @@ z_set(void *ptr, PyObject *value, Py_ssize_t size)
return value;
}
if (PyBytes_Check(value)) {
- *(char **)ptr = PyBytes_AsString(value);
+ *(const char **)ptr = PyBytes_AsString(value);
Py_INCREF(value);
return value;
} else if (PyLong_Check(value)) {
@@ -1335,7 +1329,7 @@ z_set(void *ptr, PyObject *value, Py_ssize_t size)
}
PyErr_Format(PyExc_TypeError,
"bytes or integer address expected instead of %s instance",
- value->ob_type->tp_name);
+ Py_TYPE(value)->tp_name);
return NULL;
}
@@ -1374,7 +1368,7 @@ Z_set(void *ptr, PyObject *value, Py_ssize_t size)
if (!PyUnicode_Check(value)) {
PyErr_Format(PyExc_TypeError,
"unicode string or integer address expected instead of %s instance",
- value->ob_type->tp_name);
+ Py_TYPE(value)->tp_name);
return NULL;
}
@@ -1417,7 +1411,7 @@ BSTR_set(void *ptr, PyObject *value, Py_ssize_t size)
} else if (!PyUnicode_Check(value)) {
PyErr_Format(PyExc_TypeError,
"unicode string expected instead of %s instance",
- value->ob_type->tp_name);
+ Py_TYPE(value)->tp_name);
return NULL;
}
diff --git a/Modules/_ctypes/ctypes.h b/Modules/_ctypes/ctypes.h
index e58f8523..1effccf9 100644
--- a/Modules/_ctypes/ctypes.h
+++ b/Modules/_ctypes/ctypes.h
@@ -68,7 +68,7 @@ typedef struct {
ffi_type *atypes[1];
} CThunkObject;
extern PyTypeObject PyCThunk_Type;
-#define CThunk_CheckExact(v) ((v)->ob_type == &PyCThunk_Type)
+#define CThunk_CheckExact(v) Py_IS_TYPE(v, &PyCThunk_Type)
typedef struct {
/* First part identical to tagCDataObject */
@@ -102,7 +102,7 @@ typedef struct {
} PyCFuncPtrObject;
extern PyTypeObject PyCStgDict_Type;
-#define PyCStgDict_CheckExact(v) ((v)->ob_type == &PyCStgDict_Type)
+#define PyCStgDict_CheckExact(v) Py_IS_TYPE(v, &PyCStgDict_Type)
#define PyCStgDict_Check(v) PyObject_TypeCheck(v, &PyCStgDict_Type)
extern int PyCStructUnionType_update_stgdict(PyObject *fields, PyObject *type, int isStruct);
@@ -112,12 +112,12 @@ extern int PyObject_stginfo(PyObject *self, Py_ssize_t *psize, Py_ssize_t *palig
extern PyTypeObject PyCData_Type;
-#define CDataObject_CheckExact(v) ((v)->ob_type == &PyCData_Type)
+#define CDataObject_CheckExact(v) Py_IS_TYPE(v, &PyCData_Type)
#define CDataObject_Check(v) PyObject_TypeCheck(v, &PyCData_Type)
#define _CDataObject_HasExternalBuffer(v) ((v)->b_ptr != (char *)&(v)->b_value)
extern PyTypeObject PyCSimpleType_Type;
-#define PyCSimpleTypeObject_CheckExact(v) ((v)->ob_type == &PyCSimpleType_Type)
+#define PyCSimpleTypeObject_CheckExact(v) Py_IS_TYPE(v, &PyCSimpleType_Type)
#define PyCSimpleTypeObject_Check(v) PyObject_TypeCheck(v, &PyCSimpleType_Type)
extern PyTypeObject PyCField_Type;
@@ -314,7 +314,7 @@ struct tagPyCArgObject {
};
extern PyTypeObject PyCArg_Type;
-#define PyCArg_CheckExact(v) ((v)->ob_type == &PyCArg_Type)
+#define PyCArg_CheckExact(v) Py_IS_TYPE(v, &PyCArg_Type)
extern PyCArgObject *PyCArgObject_new(void);
extern PyObject *
diff --git a/Modules/_ctypes/libffi_osx/x86/x86-ffi64.c b/Modules/_ctypes/libffi_osx/x86/x86-ffi64.c
index f2610c16..8e7d0164 100644
--- a/Modules/_ctypes/libffi_osx/x86/x86-ffi64.c
+++ b/Modules/_ctypes/libffi_osx/x86/x86-ffi64.c
@@ -57,7 +57,7 @@ ffi_call_unix64(
of SSESF, SSEDF classes, that are basically SSE class, just gcc will
use SF or DFmode move instead of DImode to avoid reformating penalties.
- Similary we play games with INTEGERSI_CLASS to use cheaper SImode moves
+ Similarly we play games with INTEGERSI_CLASS to use cheaper SImode moves
whenever possible (upper half does contain padding). */
enum x86_64_reg_class
{
diff --git a/Modules/_ctypes/stgdict.c b/Modules/_ctypes/stgdict.c
index 1d45ade5..443951a6 100644
--- a/Modules/_ctypes/stgdict.c
+++ b/Modules/_ctypes/stgdict.c
@@ -190,7 +190,7 @@ PyType_stgdict(PyObject *obj)
StgDictObject *
PyObject_stgdict(PyObject *self)
{
- PyTypeObject *type = self->ob_type;
+ PyTypeObject *type = Py_TYPE(self);
if (!type->tp_dict || !PyCStgDict_CheckExact(type->tp_dict))
return NULL;
return (StgDictObject *)type->tp_dict;
@@ -231,7 +231,7 @@ MakeFields(PyObject *type, CFieldObject *descr,
Py_DECREF(fieldlist);
return -1;
}
- if (Py_TYPE(fdescr) != &PyCField_Type) {
+ if (!Py_IS_TYPE(fdescr, &PyCField_Type)) {
PyErr_SetString(PyExc_TypeError, "unexpected type");
Py_DECREF(fdescr);
Py_DECREF(fieldlist);
@@ -254,7 +254,7 @@ MakeFields(PyObject *type, CFieldObject *descr,
Py_DECREF(fieldlist);
return -1;
}
- assert(Py_TYPE(new_descr) == &PyCField_Type);
+ assert(Py_IS_TYPE(new_descr, &PyCField_Type));
new_descr->size = fdescr->size;
new_descr->offset = fdescr->offset + offset;
new_descr->index = fdescr->index + index;
@@ -304,7 +304,7 @@ MakeAnonFields(PyObject *type)
Py_DECREF(anon_names);
return -1;
}
- if (Py_TYPE(descr) != &PyCField_Type) {
+ if (!Py_IS_TYPE(descr, &PyCField_Type)) {
PyErr_Format(PyExc_AttributeError,
"'%U' is specified in _anonymous_ but not in "
"_fields_",
diff --git a/Modules/_curses_panel.c b/Modules/_curses_panel.c
index 53849e3a..f1248034 100644
--- a/Modules/_curses_panel.c
+++ b/Modules/_curses_panel.c
@@ -21,19 +21,26 @@ typedef struct {
PyObject *PyCursesPanel_Type;
} _curses_panelstate;
-#define _curses_panelstate(o) ((_curses_panelstate *)PyModule_GetState(o))
+static inline _curses_panelstate*
+get_curses_panelstate(PyObject *module)
+{
+ void *state = PyModule_GetState(module);
+ assert(state != NULL);
+ return (_curses_panelstate *)state;
+}
static int
_curses_panel_clear(PyObject *m)
{
- Py_CLEAR(_curses_panelstate(m)->PyCursesError);
+ Py_CLEAR(get_curses_panelstate(m)->PyCursesError);
return 0;
}
static int
_curses_panel_traverse(PyObject *m, visitproc visit, void *arg)
{
- Py_VISIT(_curses_panelstate(m)->PyCursesError);
+ Py_VISIT(Py_TYPE(m));
+ Py_VISIT(get_curses_panelstate(m)->PyCursesError);
return 0;
}
@@ -83,7 +90,7 @@ typedef struct {
} PyCursesPanelObject;
#define PyCursesPanel_Check(v) \
- (Py_TYPE(v) == _curses_panelstate_global->PyCursesPanel_Type)
+ Py_IS_TYPE(v, _curses_panelstate_global->PyCursesPanel_Type)
/* Some helper functions. The problem is that there's always a window
associated with a panel. To ensure that Python's GC doesn't pull
@@ -233,7 +240,7 @@ PyCursesPanel_New(PANEL *pan, PyCursesWindowObject *wo)
{
PyCursesPanelObject *po;
- po = PyObject_NEW(PyCursesPanelObject,
+ po = PyObject_New(PyCursesPanelObject,
(PyTypeObject *)(_curses_panelstate_global)->PyCursesPanel_Type);
if (po == NULL) return NULL;
po->pan = pan;
@@ -645,15 +652,15 @@ PyInit__curses_panel(void)
if (v == NULL)
goto fail;
((PyTypeObject *)v)->tp_new = NULL;
- _curses_panelstate(m)->PyCursesPanel_Type = v;
+ get_curses_panelstate(m)->PyCursesPanel_Type = v;
import_curses();
if (PyErr_Occurred())
goto fail;
/* For exception _curses_panel.error */
- _curses_panelstate(m)->PyCursesError = PyErr_NewException("_curses_panel.error", NULL, NULL);
- PyDict_SetItemString(d, "error", _curses_panelstate(m)->PyCursesError);
+ get_curses_panelstate(m)->PyCursesError = PyErr_NewException("_curses_panel.error", NULL, NULL);
+ PyDict_SetItemString(d, "error", get_curses_panelstate(m)->PyCursesError);
/* Make the version available */
v = PyUnicode_FromString(PyCursesVersion);
@@ -661,8 +668,9 @@ PyInit__curses_panel(void)
PyDict_SetItemString(d, "__version__", v);
Py_DECREF(v);
- Py_INCREF(_curses_panelstate(m)->PyCursesPanel_Type);
- PyModule_AddObject(m, "panel", (PyObject *)_curses_panelstate(m)->PyCursesPanel_Type);
+ Py_INCREF(get_curses_panelstate(m)->PyCursesPanel_Type);
+ PyModule_AddObject(m, "panel",
+ (PyObject *)get_curses_panelstate(m)->PyCursesPanel_Type);
return m;
fail:
Py_XDECREF(m);
diff --git a/Modules/_cursesmodule.c b/Modules/_cursesmodule.c
index b2b1117f..c70b0e2a 100644
--- a/Modules/_cursesmodule.c
+++ b/Modules/_cursesmodule.c
@@ -547,7 +547,7 @@ PyCursesWindow_New(WINDOW *win, const char *encoding)
encoding = "utf-8";
}
- wo = PyObject_NEW(PyCursesWindowObject, &PyCursesWindow_Type);
+ wo = PyObject_New(PyCursesWindowObject, &PyCursesWindow_Type);
if (wo == NULL) return NULL;
wo->win = win;
wo->encoding = _PyMem_Strdup(encoding);
@@ -709,7 +709,7 @@ _curses_window_addstr_impl(PyCursesWindowObject *self, int group_left_1,
else
#endif
{
- char *str = PyBytes_AS_STRING(bytesobj);
+ const char *str = PyBytes_AS_STRING(bytesobj);
funcname = "addstr";
if (use_xy)
rtn = mvwaddstr(self->win,y,x,str);
@@ -792,7 +792,7 @@ _curses_window_addnstr_impl(PyCursesWindowObject *self, int group_left_1,
else
#endif
{
- char *str = PyBytes_AS_STRING(bytesobj);
+ const char *str = PyBytes_AS_STRING(bytesobj);
funcname = "addnstr";
if (use_xy)
rtn = mvwaddnstr(self->win,y,x,str,n);
@@ -1710,7 +1710,7 @@ _curses_window_insstr_impl(PyCursesWindowObject *self, int group_left_1,
else
#endif
{
- char *str = PyBytes_AS_STRING(bytesobj);
+ const char *str = PyBytes_AS_STRING(bytesobj);
funcname = "insstr";
if (use_xy)
rtn = mvwinsstr(self->win,y,x,str);
@@ -1795,7 +1795,7 @@ _curses_window_insnstr_impl(PyCursesWindowObject *self, int group_left_1,
else
#endif
{
- char *str = PyBytes_AS_STRING(bytesobj);
+ const char *str = PyBytes_AS_STRING(bytesobj);
funcname = "insnstr";
if (use_xy)
rtn = mvwinsnstr(self->win,y,x,str,n);
@@ -2918,13 +2918,13 @@ _curses_getwin(PyObject *module, PyObject *file)
if (_Py_set_inheritable(fileno(fp), 0, NULL) < 0)
goto error;
- data = _PyObject_CallMethodId(file, &PyId_read, NULL);
+ data = _PyObject_CallMethodIdNoArgs(file, &PyId_read);
if (data == NULL)
goto error;
if (!PyBytes_Check(data)) {
PyErr_Format(PyExc_TypeError,
"f.read() returned %.100s instead of bytes",
- data->ob_type->tp_name);
+ Py_TYPE(data)->tp_name);
Py_DECREF(data);
goto error;
}
@@ -3255,6 +3255,90 @@ _curses_setupterm_impl(PyObject *module, const char *term, int fd)
Py_RETURN_NONE;
}
+#if defined(NCURSES_EXT_FUNCS) && NCURSES_EXT_FUNCS >= 20081102
+// https://invisible-island.net/ncurses/NEWS.html#index-t20080119
+
+/*[clinic input]
+_curses.get_escdelay
+
+Gets the curses ESCDELAY setting.
+
+Gets the number of milliseconds to wait after reading an escape character,
+to distinguish between an individual escape character entered on the
+keyboard from escape sequences sent by cursor and function keys.
+[clinic start generated code]*/
+
+static PyObject *
+_curses_get_escdelay_impl(PyObject *module)
+/*[clinic end generated code: output=222fa1a822555d60 input=be2d5b3dd974d0a4]*/
+{
+ return PyLong_FromLong(ESCDELAY);
+}
+/*[clinic input]
+_curses.set_escdelay
+ ms: int
+ length of the delay in milliseconds.
+ /
+
+Sets the curses ESCDELAY setting.
+
+Sets the number of milliseconds to wait after reading an escape character,
+to distinguish between an individual escape character entered on the
+keyboard from escape sequences sent by cursor and function keys.
+[clinic start generated code]*/
+
+static PyObject *
+_curses_set_escdelay_impl(PyObject *module, int ms)
+/*[clinic end generated code: output=43818efbf7980ac4 input=7796fe19f111e250]*/
+{
+ if (ms <= 0) {
+ PyErr_SetString(PyExc_ValueError, "ms must be > 0");
+ return NULL;
+ }
+
+ return PyCursesCheckERR(set_escdelay(ms), "set_escdelay");
+}
+
+/*[clinic input]
+_curses.get_tabsize
+
+Gets the curses TABSIZE setting.
+
+Gets the number of columns used by the curses library when converting a tab
+character to spaces as it adds the tab to a window.
+[clinic start generated code]*/
+
+static PyObject *
+_curses_get_tabsize_impl(PyObject *module)
+/*[clinic end generated code: output=7e9e51fb6126fbdf input=74af86bf6c9f5d7e]*/
+{
+ return PyLong_FromLong(TABSIZE);
+}
+/*[clinic input]
+_curses.set_tabsize
+ size: int
+ rendered cell width of a tab character.
+ /
+
+Sets the curses TABSIZE setting.
+
+Sets the number of columns used by the curses library when converting a tab
+character to spaces as it adds the tab to a window.
+[clinic start generated code]*/
+
+static PyObject *
+_curses_set_tabsize_impl(PyObject *module, int size)
+/*[clinic end generated code: output=c1de5a76c0daab1e input=78cba6a3021ad061]*/
+{
+ if (size <= 0) {
+ PyErr_SetString(PyExc_ValueError, "size must be > 0");
+ return NULL;
+ }
+
+ return PyCursesCheckERR(set_tabsize(size), "set_tabsize");
+}
+#endif
+
/*[clinic input]
_curses.intrflush
@@ -3730,7 +3814,7 @@ update_lines_cols(void)
return 0;
}
/* PyId_LINES.object will be initialized here. */
- if (PyDict_SetItem(ModDict, PyId_LINES.object, o)) {
+ if (PyDict_SetItem(ModDict, _PyUnicode_FromId(&PyId_LINES), o)) {
Py_DECREF(m);
Py_DECREF(o);
return 0;
@@ -3746,7 +3830,7 @@ update_lines_cols(void)
Py_DECREF(o);
return 0;
}
- if (PyDict_SetItem(ModDict, PyId_COLS.object, o)) {
+ if (PyDict_SetItem(ModDict, _PyUnicode_FromId(&PyId_COLS), o)) {
Py_DECREF(m);
Py_DECREF(o);
return 0;
@@ -3757,15 +3841,18 @@ update_lines_cols(void)
}
/*[clinic input]
-_curses.update_lines_cols -> int
+_curses.update_lines_cols
[clinic start generated code]*/
-static int
+static PyObject *
_curses_update_lines_cols_impl(PyObject *module)
-/*[clinic end generated code: output=0345e7f072ea711a input=3a87760f7d5197f0]*/
+/*[clinic end generated code: output=423f2b1e63ed0f75 input=5f065ab7a28a5d90]*/
{
- return update_lines_cols();
+ if (!update_lines_cols()) {
+ return NULL;
+ }
+ Py_RETURN_NONE;
}
#endif
@@ -3849,8 +3936,10 @@ _curses_resizeterm_impl(PyObject *module, int nlines, int ncols)
result = PyCursesCheckERR(resizeterm(nlines, ncols), "resizeterm");
if (!result)
return NULL;
- if (!update_lines_cols())
+ if (!update_lines_cols()) {
+ Py_DECREF(result);
return NULL;
+ }
return result;
}
@@ -3886,8 +3975,10 @@ _curses_resize_term_impl(PyObject *module, int nlines, int ncols)
result = PyCursesCheckERR(resize_term(nlines, ncols), "resize_term");
if (!result)
return NULL;
- if (!update_lines_cols())
+ if (!update_lines_cols()) {
+ Py_DECREF(result);
return NULL;
+ }
return result;
}
#endif /* HAVE_CURSES_RESIZE_TERM */
@@ -3958,12 +4049,18 @@ _curses_start_color_impl(PyObject *module)
c = PyLong_FromLong((long) COLORS);
if (c == NULL)
return NULL;
- PyDict_SetItemString(ModDict, "COLORS", c);
+ if (PyDict_SetItemString(ModDict, "COLORS", c) < 0) {
+ Py_DECREF(c);
+ return NULL;
+ }
Py_DECREF(c);
cp = PyLong_FromLong((long) COLOR_PAIRS);
if (cp == NULL)
return NULL;
- PyDict_SetItemString(ModDict, "COLOR_PAIRS", cp);
+ if (PyDict_SetItemString(ModDict, "COLOR_PAIRS", cp) < 0) {
+ Py_DECREF(cp);
+ return NULL;
+ }
Py_DECREF(cp);
Py_RETURN_NONE;
} else {
@@ -4415,6 +4512,12 @@ static PyMethodDef PyCurses_methods[] = {
_CURSES_RESIZETERM_METHODDEF
_CURSES_RESIZE_TERM_METHODDEF
_CURSES_SAVETTY_METHODDEF
+#if defined(NCURSES_EXT_FUNCS) && NCURSES_EXT_FUNCS >= 20081102
+ _CURSES_GET_ESCDELAY_METHODDEF
+ _CURSES_SET_ESCDELAY_METHODDEF
+#endif
+ _CURSES_GET_TABSIZE_METHODDEF
+ _CURSES_SET_TABSIZE_METHODDEF
_CURSES_SETSYX_METHODDEF
_CURSES_SETUPTERM_METHODDEF
_CURSES_START_COLOR_METHODDEF
@@ -4637,7 +4740,8 @@ PyInit__curses(void)
SetDictInt("KEY_MAX", KEY_MAX);
}
- Py_INCREF(&PyCursesWindow_Type);
- PyModule_AddObject(m, "window", (PyObject *)&PyCursesWindow_Type);
+ if (PyModule_AddType(m, &PyCursesWindow_Type) < 0) {
+ return NULL;
+ }
return m;
}
diff --git a/Modules/_datetimemodule.c b/Modules/_datetimemodule.c
index 41c3f342..4e0c3783 100644
--- a/Modules/_datetimemodule.c
+++ b/Modules/_datetimemodule.c
@@ -9,7 +9,7 @@
#include "Python.h"
#include "datetime.h"
-#include "structmember.h"
+#include "structmember.h" // PyMemberDef
#include
@@ -18,19 +18,19 @@
#endif
#define PyDate_Check(op) PyObject_TypeCheck(op, &PyDateTime_DateType)
-#define PyDate_CheckExact(op) (Py_TYPE(op) == &PyDateTime_DateType)
+#define PyDate_CheckExact(op) Py_IS_TYPE(op, &PyDateTime_DateType)
#define PyDateTime_Check(op) PyObject_TypeCheck(op, &PyDateTime_DateTimeType)
-#define PyDateTime_CheckExact(op) (Py_TYPE(op) == &PyDateTime_DateTimeType)
+#define PyDateTime_CheckExact(op) Py_IS_TYPE(op, &PyDateTime_DateTimeType)
#define PyTime_Check(op) PyObject_TypeCheck(op, &PyDateTime_TimeType)
-#define PyTime_CheckExact(op) (Py_TYPE(op) == &PyDateTime_TimeType)
+#define PyTime_CheckExact(op) Py_IS_TYPE(op, &PyDateTime_TimeType)
#define PyDelta_Check(op) PyObject_TypeCheck(op, &PyDateTime_DeltaType)
-#define PyDelta_CheckExact(op) (Py_TYPE(op) == &PyDateTime_DeltaType)
+#define PyDelta_CheckExact(op) Py_IS_TYPE(op, &PyDateTime_DeltaType)
#define PyTZInfo_Check(op) PyObject_TypeCheck(op, &PyDateTime_TZInfoType)
-#define PyTZInfo_CheckExact(op) (Py_TYPE(op) == &PyDateTime_TZInfoType)
+#define PyTZInfo_CheckExact(op) Py_IS_TYPE(op, &PyDateTime_TZInfoType)
#define PyTimezone_Check(op) PyObject_TypeCheck(op, &PyDateTime_TimeZoneType)
@@ -38,8 +38,9 @@
module datetime
class datetime.datetime "PyDateTime_DateTime *" "&PyDateTime_DateTimeType"
class datetime.date "PyDateTime_Date *" "&PyDateTime_DateType"
+class datetime.IsoCalendarDate "PyDateTime_IsoCalendarDate *" "&PyDateTime_IsoCalendarDateType"
[clinic start generated code]*/
-/*[clinic end generated code: output=da39a3ee5e6b4b0d input=25138ad6a696b785]*/
+/*[clinic end generated code: output=da39a3ee5e6b4b0d input=81bec0fa19837f63]*/
#include "clinic/_datetimemodule.c.h"
@@ -131,6 +132,7 @@ class datetime.date "PyDateTime_Date *" "&PyDateTime_DateType"
static PyTypeObject PyDateTime_DateType;
static PyTypeObject PyDateTime_DateTimeType;
static PyTypeObject PyDateTime_DeltaType;
+static PyTypeObject PyDateTime_IsoCalendarDateType;
static PyTypeObject PyDateTime_TimeType;
static PyTypeObject PyDateTime_TZInfoType;
static PyTypeObject PyDateTime_TimeZoneType;
@@ -1242,8 +1244,7 @@ call_tzname(PyObject *tzinfo, PyObject *tzinfoarg)
if (tzinfo == Py_None)
Py_RETURN_NONE;
- result = _PyObject_CallMethodIdObjArgs(tzinfo, &PyId_tzname,
- tzinfoarg, NULL);
+ result = _PyObject_CallMethodIdOneArg(tzinfo, &PyId_tzname, tzinfoarg);
if (result == NULL || result == Py_None)
return result;
@@ -1533,8 +1534,8 @@ wrap_strftime(PyObject *object, PyObject *format, PyObject *timetuple,
ntoappend = 1;
}
else if ((ch = *pin++) == '\0') {
- /* Null byte follows %, copy only '%'.
- *
+ /* Null byte follows %, copy only '%'.
+ *
* Back the pin up one char so that we catch the null check
* the next time through the loop.*/
pin--;
@@ -1624,7 +1625,7 @@ wrap_strftime(PyObject *object, PyObject *format, PyObject *timetuple,
usednew += ntoappend;
assert(usednew <= totalnew);
} /* end while() */
-
+
if (_PyBytes_Resize(&newfmt, usednew) < 0)
goto Done;
{
@@ -1664,7 +1665,7 @@ time_time(void)
if (time != NULL) {
_Py_IDENTIFIER(time);
- result = _PyObject_CallMethodId(time, &PyId_time, NULL);
+ result = _PyObject_CallMethodIdNoArgs(time, &PyId_time);
Py_DECREF(time);
}
return result;
@@ -1698,8 +1699,7 @@ build_struct_time(int y, int m, int d, int hh, int mm, int ss, int dstflag)
return NULL;
}
- result = _PyObject_CallMethodIdObjArgs(time, &PyId_struct_time,
- args, NULL);
+ result = _PyObject_CallMethodIdOneArg(time, &PyId_struct_time, args);
Py_DECREF(time);
Py_DECREF(args);
return result;
@@ -1812,7 +1812,7 @@ checked_divmod(PyObject *a, PyObject *b)
if (!PyTuple_Check(result)) {
PyErr_Format(PyExc_TypeError,
"divmod() returned non-tuple (type %.200s)",
- result->ob_type->tp_name);
+ Py_TYPE(result)->tp_name);
Py_DECREF(result);
return NULL;
}
@@ -1923,7 +1923,7 @@ get_float_as_integer_ratio(PyObject *floatobj)
PyObject *ratio;
assert(floatobj && PyFloat_Check(floatobj));
- ratio = _PyObject_CallMethodId(floatobj, &PyId_as_integer_ratio, NULL);
+ ratio = _PyObject_CallMethodIdNoArgs(floatobj, &PyId_as_integer_ratio);
if (ratio == NULL) {
return NULL;
}
@@ -2491,7 +2491,6 @@ delta_new(PyTypeObject *type, PyObject *args, PyObject *kw)
int x_is_odd;
PyObject *temp;
- whole_us = round(leftover_us);
if (fabs(whole_us - leftover_us) == 0.5) {
/* We're exactly halfway between two integers. In order
* to do round-half-to-even, we must determine whether x
@@ -2899,8 +2898,7 @@ date_today(PyObject *cls, PyObject *dummy)
* time.time() delivers; if someone were gonzo about optimization,
* date.today() could get away with plain C time().
*/
- result = _PyObject_CallMethodIdObjArgs(cls, &PyId_fromtimestamp,
- time, NULL);
+ result = _PyObject_CallMethodIdOneArg(cls, &PyId_fromtimestamp, time);
Py_DECREF(time);
return result;
}
@@ -3167,7 +3165,7 @@ date_isoformat(PyDateTime_Date *self, PyObject *Py_UNUSED(ignored))
static PyObject *
date_str(PyDateTime_Date *self)
{
- return _PyObject_CallMethodId((PyObject *)self, &PyId_isoformat, NULL);
+ return _PyObject_CallMethodIdNoArgs((PyObject *)self, &PyId_isoformat);
}
@@ -3193,7 +3191,7 @@ date_strftime(PyDateTime_Date *self, PyObject *args, PyObject *kw)
&format))
return NULL;
- tuple = _PyObject_CallMethodId((PyObject *)self, &PyId_timetuple, NULL);
+ tuple = _PyObject_CallMethodIdNoArgs((PyObject *)self, &PyId_timetuple);
if (tuple == NULL)
return NULL;
result = wrap_strftime((PyObject *)self, format, tuple,
@@ -3214,8 +3212,8 @@ date_format(PyDateTime_Date *self, PyObject *args)
if (PyUnicode_GetLength(format) == 0)
return PyObject_Str((PyObject *)self);
- return _PyObject_CallMethodIdObjArgs((PyObject *)self, &PyId_strftime,
- format, NULL);
+ return _PyObject_CallMethodIdOneArg((PyObject *)self, &PyId_strftime,
+ format);
}
/* ISO methods. */
@@ -3228,6 +3226,136 @@ date_isoweekday(PyDateTime_Date *self, PyObject *Py_UNUSED(ignored))
return PyLong_FromLong(dow + 1);
}
+PyDoc_STRVAR(iso_calendar_date__doc__,
+"The result of date.isocalendar() or datetime.isocalendar()\n\n\
+This object may be accessed either as a tuple of\n\
+ ((year, week, weekday)\n\
+or via the object attributes as named in the above tuple.");
+
+typedef struct {
+ PyTupleObject tuple;
+} PyDateTime_IsoCalendarDate;
+
+static PyObject *
+iso_calendar_date_repr(PyDateTime_IsoCalendarDate *self)
+{
+ PyObject* year = PyTuple_GetItem((PyObject *)self, 0);
+ if (year == NULL) {
+ return NULL;
+ }
+ PyObject* week = PyTuple_GetItem((PyObject *)self, 1);
+ if (week == NULL) {
+ return NULL;
+ }
+ PyObject* weekday = PyTuple_GetItem((PyObject *)self, 2);
+ if (weekday == NULL) {
+ return NULL;
+ }
+
+ return PyUnicode_FromFormat("%.200s(year=%S, week=%S, weekday=%S)",
+ Py_TYPE(self)->tp_name, year, week, weekday);
+}
+
+static PyObject *
+iso_calendar_date_reduce(PyObject *self, PyObject *Py_UNUSED(ignored))
+{
+ // Construct the tuple that this reduces to
+ PyObject * reduce_tuple = Py_BuildValue(
+ "O((OOO))", &PyTuple_Type,
+ PyTuple_GET_ITEM(self, 0),
+ PyTuple_GET_ITEM(self, 1),
+ PyTuple_GET_ITEM(self, 2)
+ );
+
+ return reduce_tuple;
+}
+
+static PyObject *
+iso_calendar_date_year(PyDateTime_IsoCalendarDate *self, void *unused)
+{
+ PyObject *year = PyTuple_GetItem((PyObject *)self, 0);
+ if (year == NULL) {
+ return NULL;
+ }
+ Py_INCREF(year);
+ return year;
+}
+
+static PyObject *
+iso_calendar_date_week(PyDateTime_IsoCalendarDate *self, void *unused)
+{
+ PyObject *week = PyTuple_GetItem((PyObject *)self, 1);
+ if (week == NULL) {
+ return NULL;
+ }
+ Py_INCREF(week);
+ return week;
+}
+
+static PyObject *
+iso_calendar_date_weekday(PyDateTime_IsoCalendarDate *self, void *unused)
+{
+ PyObject *weekday = PyTuple_GetItem((PyObject *)self, 2);
+ if (weekday == NULL) {
+ return NULL;
+ }
+ Py_INCREF(weekday);
+ return weekday;
+}
+
+static PyGetSetDef iso_calendar_date_getset[] = {
+ {"year", (getter)iso_calendar_date_year},
+ {"week", (getter)iso_calendar_date_week},
+ {"weekday", (getter)iso_calendar_date_weekday},
+ {NULL}
+};
+
+static PyMethodDef iso_calendar_date_methods[] = {
+ {"__reduce__", (PyCFunction)iso_calendar_date_reduce, METH_NOARGS,
+ PyDoc_STR("__reduce__() -> (cls, state)")},
+ {NULL, NULL},
+};
+
+static PyTypeObject PyDateTime_IsoCalendarDateType = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ .tp_name = "datetime.IsoCalendarDate",
+ .tp_basicsize = sizeof(PyDateTime_IsoCalendarDate),
+ .tp_repr = (reprfunc) iso_calendar_date_repr,
+ .tp_flags = Py_TPFLAGS_DEFAULT,
+ .tp_doc = iso_calendar_date__doc__,
+ .tp_methods = iso_calendar_date_methods,
+ .tp_getset = iso_calendar_date_getset,
+ // .tp_base = &PyTuple_Type, // filled in PyInit__datetime
+ .tp_new = iso_calendar_date_new,
+};
+
+/*[clinic input]
+@classmethod
+datetime.IsoCalendarDate.__new__ as iso_calendar_date_new
+ year: int
+ week: int
+ weekday: int
+[clinic start generated code]*/
+
+static PyObject *
+iso_calendar_date_new_impl(PyTypeObject *type, int year, int week,
+ int weekday)
+/*[clinic end generated code: output=383d33d8dc7183a2 input=4f2c663c9d19c4ee]*/
+
+{
+ PyDateTime_IsoCalendarDate *self;
+ self = (PyDateTime_IsoCalendarDate *) type->tp_alloc(type, 3);
+ if (self == NULL) {
+ return NULL;
+ }
+
+ PyTuple_SET_ITEM(self, 0, PyLong_FromLong(year));
+ PyTuple_SET_ITEM(self, 1, PyLong_FromLong(week));
+ PyTuple_SET_ITEM(self, 2, PyLong_FromLong(weekday));
+
+ return (PyObject *)self;
+}
+
static PyObject *
date_isocalendar(PyDateTime_Date *self, PyObject *Py_UNUSED(ignored))
{
@@ -3247,7 +3375,13 @@ date_isocalendar(PyDateTime_Date *self, PyObject *Py_UNUSED(ignored))
++year;
week = 0;
}
- return Py_BuildValue("iii", year, week + 1, day + 1);
+
+ PyObject* v = iso_calendar_date_new_impl(&PyDateTime_IsoCalendarDateType,
+ year, week + 1, day + 1);
+ if (v == NULL) {
+ return NULL;
+ }
+ return v;
}
/* Miscellaneous methods. */
@@ -3386,7 +3520,7 @@ static PyMethodDef date_methods[] = {
PyDoc_STR("Return time tuple, compatible with time.localtime().")},
{"isocalendar", (PyCFunction)date_isocalendar, METH_NOARGS,
- PyDoc_STR("Return a 3-tuple containing ISO year, week number, and "
+ PyDoc_STR("Return a named tuple containing ISO year, week number, and "
"weekday.")},
{"isoformat", (PyCFunction)date_isoformat, METH_NOARGS,
@@ -3614,7 +3748,7 @@ tzinfo_reduce(PyObject *self, PyObject *Py_UNUSED(ignored))
return NULL;
}
if (getinitargs != NULL) {
- args = _PyObject_CallNoArg(getinitargs);
+ args = PyObject_CallNoArgs(getinitargs);
Py_DECREF(getinitargs);
}
else {
@@ -3629,7 +3763,7 @@ tzinfo_reduce(PyObject *self, PyObject *Py_UNUSED(ignored))
return NULL;
}
if (getstate != NULL) {
- state = _PyObject_CallNoArg(getstate);
+ state = PyObject_CallNoArgs(getstate);
Py_DECREF(getstate);
if (state == NULL) {
Py_DECREF(args);
@@ -3945,7 +4079,7 @@ static PyTypeObject PyDateTime_TimeZoneType = {
timezone_methods, /* tp_methods */
0, /* tp_members */
0, /* tp_getset */
- &PyDateTime_TZInfoType, /* tp_base */
+ 0, /* tp_base; filled in PyInit__datetime */
0, /* tp_dict */
0, /* tp_descr_get */
0, /* tp_descr_set */
@@ -4176,18 +4310,18 @@ time_repr(PyDateTime_Time *self)
static PyObject *
time_str(PyDateTime_Time *self)
{
- return _PyObject_CallMethodId((PyObject *)self, &PyId_isoformat, NULL);
+ return _PyObject_CallMethodIdNoArgs((PyObject *)self, &PyId_isoformat);
}
static PyObject *
time_isoformat(PyDateTime_Time *self, PyObject *args, PyObject *kw)
{
char buf[100];
- char *timespec = NULL;
+ const char *timespec = NULL;
static char *keywords[] = {"timespec", NULL};
PyObject *result;
int us = TIME_GET_MICROSECOND(self);
- static char *specs[][2] = {
+ static const char *specs[][2] = {
{"hours", "%02d"},
{"minutes", "%02d:%02d"},
{"seconds", "%02d:%02d:%02d"},
@@ -4539,7 +4673,10 @@ static PyMethodDef time_methods[] = {
{"isoformat", (PyCFunction)(void(*)(void))time_isoformat, METH_VARARGS | METH_KEYWORDS,
PyDoc_STR("Return string in ISO 8601 format, [HH[:MM[:SS[.mmm[uuu]]]]]"
"[+HH:MM].\n\n"
- "timespec specifies what components of the time to include.\n")},
+ "The optional argument timespec specifies the number "
+ "of additional terms\nof the time to include. Valid "
+ "options are 'auto', 'hours', 'minutes',\n'seconds', "
+ "'milliseconds' and 'microseconds'.\n")},
{"strftime", (PyCFunction)(void(*)(void))time_strftime, METH_VARARGS | METH_KEYWORDS,
PyDoc_STR("format -> strftime() style string.")},
@@ -5419,7 +5556,7 @@ datetime_isoformat(PyDateTime_DateTime *self, PyObject *args, PyObject *kw)
char buffer[100];
PyObject *result = NULL;
int us = DATE_GET_MICROSECOND(self);
- static char *specs[][2] = {
+ static const char *specs[][2] = {
{"hours", "%04d-%02d-%02d%c%02d"},
{"minutes", "%04d-%02d-%02d%c%02d:%02d"},
{"seconds", "%04d-%02d-%02d%c%02d:%02d:%02d"},
@@ -5961,7 +6098,7 @@ datetime_astimezone(PyDateTime_DateTime *self, PyObject *args, PyObject *kw)
temp = (PyObject *)result;
result = (PyDateTime_DateTime *)
- _PyObject_CallMethodIdObjArgs(tzinfo, &PyId_fromutc, temp, NULL);
+ _PyObject_CallMethodIdOneArg(tzinfo, &PyId_fromutc, temp);
Py_DECREF(temp);
return result;
@@ -6246,9 +6383,10 @@ static PyMethodDef datetime_methods[] = {
"YYYY-MM-DDT[HH[:MM[:SS[.mmm[uuu]]]]][+HH:MM].\n"
"sep is used to separate the year from the time, and "
"defaults to 'T'.\n"
- "timespec specifies what components of the time to include"
- " (allowed values are 'auto', 'hours', 'minutes', 'seconds',"
- " 'milliseconds', and 'microseconds').\n")},
+ "The optional argument timespec specifies the number "
+ "of additional terms\nof the time to include. Valid "
+ "options are 'auto', 'hours', 'minutes',\n'seconds', "
+ "'milliseconds' and 'microseconds'.\n")},
{"utcoffset", (PyCFunction)datetime_utcoffset, METH_NOARGS,
PyDoc_STR("Return self.tzinfo.utcoffset(self).")},
@@ -6324,7 +6462,8 @@ static PyTypeObject PyDateTime_DateTimeType = {
datetime_methods, /* tp_methods */
0, /* tp_members */
datetime_getset, /* tp_getset */
- &PyDateTime_DateType, /* tp_base */
+ 0, /* tp_base; filled in
+ PyInit__datetime */
0, /* tp_dict */
0, /* tp_descr_get */
0, /* tp_descr_set */
@@ -6390,18 +6529,32 @@ PyInit__datetime(void)
if (m == NULL)
return NULL;
- if (PyType_Ready(&PyDateTime_DateType) < 0)
- return NULL;
- if (PyType_Ready(&PyDateTime_DateTimeType) < 0)
- return NULL;
- if (PyType_Ready(&PyDateTime_DeltaType) < 0)
- return NULL;
- if (PyType_Ready(&PyDateTime_TimeType) < 0)
- return NULL;
- if (PyType_Ready(&PyDateTime_TZInfoType) < 0)
- return NULL;
- if (PyType_Ready(&PyDateTime_TimeZoneType) < 0)
+ // `&...` is not a constant expression according to a strict reading
+ // of C standards. Fill tp_base at run-time rather than statically.
+ // See https://bugs.python.org/issue40777
+ PyDateTime_IsoCalendarDateType.tp_base = &PyTuple_Type;
+ PyDateTime_TimeZoneType.tp_base = &PyDateTime_TZInfoType;
+ PyDateTime_DateTimeType.tp_base = &PyDateTime_DateType;
+
+ PyTypeObject *types[] = {
+ &PyDateTime_DateType,
+ &PyDateTime_DateTimeType,
+ &PyDateTime_TimeType,
+ &PyDateTime_DeltaType,
+ &PyDateTime_TZInfoType,
+ &PyDateTime_TimeZoneType,
+ };
+
+ for (size_t i = 0; i < Py_ARRAY_LENGTH(types); i++) {
+ if (PyModule_AddType(m, types[i]) < 0) {
+ return NULL;
+ }
+ }
+
+ if (PyType_Ready(&PyDateTime_IsoCalendarDateType) < 0) {
return NULL;
+ }
+ Py_INCREF(&PyDateTime_IsoCalendarDateType);
/* timedelta values */
d = PyDateTime_DeltaType.tp_dict;
@@ -6519,25 +6672,6 @@ PyInit__datetime(void)
PyModule_AddIntMacro(m, MINYEAR);
PyModule_AddIntMacro(m, MAXYEAR);
- Py_INCREF(&PyDateTime_DateType);
- PyModule_AddObject(m, "date", (PyObject *) &PyDateTime_DateType);
-
- Py_INCREF(&PyDateTime_DateTimeType);
- PyModule_AddObject(m, "datetime",
- (PyObject *)&PyDateTime_DateTimeType);
-
- Py_INCREF(&PyDateTime_TimeType);
- PyModule_AddObject(m, "time", (PyObject *) &PyDateTime_TimeType);
-
- Py_INCREF(&PyDateTime_DeltaType);
- PyModule_AddObject(m, "timedelta", (PyObject *) &PyDateTime_DeltaType);
-
- Py_INCREF(&PyDateTime_TZInfoType);
- PyModule_AddObject(m, "tzinfo", (PyObject *) &PyDateTime_TZInfoType);
-
- Py_INCREF(&PyDateTime_TimeZoneType);
- PyModule_AddObject(m, "timezone", (PyObject *) &PyDateTime_TimeZoneType);
-
x = PyCapsule_New(&CAPI, PyDateTime_CAPSULE_NAME, NULL);
if (x == NULL)
return NULL;
diff --git a/Modules/_dbmmodule.c b/Modules/_dbmmodule.c
index ea0a9d6f..80a05036 100644
--- a/Modules/_dbmmodule.c
+++ b/Modules/_dbmmodule.c
@@ -45,7 +45,7 @@ typedef struct {
static PyTypeObject Dbmtype;
-#define is_dbmobject(v) (Py_TYPE(v) == &Dbmtype)
+#define is_dbmobject(v) Py_IS_TYPE(v, &Dbmtype)
#define check_dbmobject_open(v) if ((v)->di_dbm == NULL) \
{ PyErr_SetString(DbmError, "DBM object has already been closed"); \
return NULL; }
@@ -255,7 +255,7 @@ dbm_contains(PyObject *self, PyObject *arg)
else if (!PyBytes_Check(arg)) {
PyErr_Format(PyExc_TypeError,
"dbm key must be bytes or string, not %.100s",
- arg->ob_type->tp_name);
+ Py_TYPE(arg)->tp_name);
return -1;
}
else {
@@ -370,7 +370,7 @@ static PyObject *
dbm__exit__(PyObject *self, PyObject *args)
{
_Py_IDENTIFIER(close);
- return _PyObject_CallMethodId(self, &PyId_close, NULL);
+ return _PyObject_CallMethodIdNoArgs(self, &PyId_close);
}
diff --git a/Modules/_decimal/_decimal.c b/Modules/_decimal/_decimal.c
index eb1f1a01..fb4e020f 100644
--- a/Modules/_decimal/_decimal.c
+++ b/Modules/_decimal/_decimal.c
@@ -28,8 +28,6 @@
#include
#include "longintrepr.h"
-#include "pythread.h"
-#include "structmember.h"
#include "complexobject.h"
#include "mpdecimal.h"
@@ -38,8 +36,8 @@
#include "docstrings.h"
-#if !defined(MPD_VERSION_HEX) || MPD_VERSION_HEX < 0x02040100
- #error "libmpdec version >= 2.4.1 required"
+#if !defined(MPD_VERSION_HEX) || MPD_VERSION_HEX < 0x02050000
+ #error "libmpdec version >= 2.5.0 required"
#endif
@@ -58,13 +56,11 @@
#define BOUNDS_CHECK(x, MIN, MAX) x = (x < MIN || MAX < x) ? MAX : x
-#ifndef UNUSED
#if defined(__GNUC__) && !defined(__INTEL_COMPILER)
#define UNUSED __attribute__((unused))
#else
#define UNUSED
#endif
-#endif
/* _Py_DEC_MINALLOC >= MPD_MINALLOC */
#define _Py_DEC_MINALLOC 4
@@ -103,9 +99,9 @@ static PyTypeObject PyDec_Type;
static PyTypeObject *PyDecSignalDict_Type;
static PyTypeObject PyDecContext_Type;
static PyTypeObject PyDecContextManager_Type;
-#define PyDec_CheckExact(v) (Py_TYPE(v) == &PyDec_Type)
+#define PyDec_CheckExact(v) Py_IS_TYPE(v, &PyDec_Type)
#define PyDec_Check(v) PyObject_TypeCheck(v, &PyDec_Type)
-#define PyDecSignalDict_Check(v) (Py_TYPE(v) == PyDecSignalDict_Type)
+#define PyDecSignalDict_Check(v) Py_IS_TYPE(v, PyDecSignalDict_Type)
#define PyDecContext_Check(v) PyObject_TypeCheck(v, &PyDecContext_Type)
#define MPD(v) (&((PyDecObject *)v)->dec)
#define SdFlagAddr(v) (((PyDecSignalDictObject *)v)->flags)
@@ -1885,7 +1881,7 @@ dec_dealloc(PyObject *dec)
/******************************************************************************/
Py_LOCAL_INLINE(int)
-is_space(enum PyUnicode_Kind kind, void *data, Py_ssize_t pos)
+is_space(enum PyUnicode_Kind kind, const void *data, Py_ssize_t pos)
{
Py_UCS4 ch = PyUnicode_READ(kind, data, pos);
return Py_UNICODE_ISSPACE(ch);
@@ -1903,7 +1899,7 @@ static char *
numeric_as_ascii(const PyObject *u, int strip_ws, int ignore_underscores)
{
enum PyUnicode_Kind kind;
- void *data;
+ const void *data;
Py_UCS4 ch;
char *res, *cp;
Py_ssize_t j, len;
@@ -2733,7 +2729,7 @@ PyDecType_FromObjectExact(PyTypeObject *type, PyObject *v, PyObject *context)
else {
PyErr_Format(PyExc_TypeError,
"conversion from %s to Decimal is not supported",
- v->ob_type->tp_name);
+ Py_TYPE(v)->tp_name);
return NULL;
}
}
@@ -2782,7 +2778,7 @@ PyDec_FromObject(PyObject *v, PyObject *context)
else {
PyErr_Format(PyExc_TypeError,
"conversion from %s to Decimal is not supported",
- v->ob_type->tp_name);
+ Py_TYPE(v)->tp_name);
return NULL;
}
}
@@ -2845,7 +2841,7 @@ convert_op(int type_err, PyObject **conv, PyObject *v, PyObject *context)
if (type_err) {
PyErr_Format(PyExc_TypeError,
"conversion from %s to Decimal is not supported",
- v->ob_type->tp_name);
+ Py_TYPE(v)->tp_name);
}
else {
Py_INCREF(Py_NotImplemented);
@@ -3402,9 +3398,9 @@ dec_as_long(PyObject *dec, PyObject *context, int round)
i--;
}
- Py_SIZE(pylong) = i;
+ Py_SET_SIZE(pylong, i);
if (mpd_isnegative(x) && !mpd_iszero(x)) {
- Py_SIZE(pylong) = -i;
+ Py_SET_SIZE(pylong, -i);
}
mpd_del(x);
diff --git a/Modules/_decimal/libmpdec/README.txt b/Modules/_decimal/libmpdec/README.txt
index 96b72232..dc97820a 100644
--- a/Modules/_decimal/libmpdec/README.txt
+++ b/Modules/_decimal/libmpdec/README.txt
@@ -20,7 +20,7 @@ Files required for the Python _decimal module
context.c -> Context functions.
io.{c,h} -> Conversions between mpd_t and ASCII strings,
mpd_t formatting (allows UTF-8 fill character).
- memory.{c,h} -> Allocation handlers with overflow detection
+ mpalloc.{c,h} -> Allocation handlers with overflow detection
and functions for switching between static
and dynamic mpd_t.
mpdecimal.{c,h} -> All (quiet) functions of the specification.
@@ -30,7 +30,6 @@ Files required for the Python _decimal module
Visual Studio only:
~~~~~~~~~~~~~~~~~~~
vccompat.h -> snprintf <==> sprintf_s and similar things.
- vcstdint.h -> stdint.h (included in VS 2010 but not in VS 2008).
vcdiv64.asm -> Double word division used in typearith.h. VS 2008 does
not allow inline asm for x64. Also, it does not provide
an intrinsic for double word division.
diff --git a/Modules/_decimal/libmpdec/basearith.c b/Modules/_decimal/libmpdec/basearith.c
index dfe15239..85c608fa 100644
--- a/Modules/_decimal/libmpdec/basearith.c
+++ b/Modules/_decimal/libmpdec/basearith.c
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2008-2016 Stefan Krah. All rights reserved.
+ * Copyright (c) 2008-2020 Stefan Krah. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -27,13 +27,13 @@
#include "mpdecimal.h"
-#include
-#include
-#include
+
#include
+#include
+
+#include "basearith.h"
#include "constants.h"
#include "typearith.h"
-#include "basearith.h"
/*********************************************************************/
@@ -337,6 +337,7 @@ _mpd_basedivmod(mpd_uint_t *q, mpd_uint_t *r,
/* D2: loop */
for (j=m; j != MPD_SIZE_MAX; j--) {
+ assert(2 <= j+n && j+n <= nplusm); /* annotation for scan-build */
/* D3: calculate qhat and rhat */
rhat = _mpd_shortdiv(w2, u+j+n-1, 2, v[n-1]);
@@ -652,6 +653,3 @@ _mpd_shortdiv_b(mpd_uint_t *w, const mpd_uint_t *u, mpd_size_t n,
return rem;
}
-
-
-
diff --git a/Modules/_decimal/libmpdec/basearith.h b/Modules/_decimal/libmpdec/basearith.h
index 976358a1..d35925aa 100644
--- a/Modules/_decimal/libmpdec/basearith.h
+++ b/Modules/_decimal/libmpdec/basearith.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2008-2016 Stefan Krah. All rights reserved.
+ * Copyright (c) 2008-2020 Stefan Krah. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -26,12 +26,11 @@
*/
-#ifndef BASEARITH_H
-#define BASEARITH_H
+#ifndef LIBMPDEC_BASEARITH_H_
+#define LIBMPDEC_BASEARITH_H_
#include "mpdecimal.h"
-#include
#include "typearith.h"
@@ -216,7 +215,4 @@ _mpd_isallnine(const mpd_uint_t *data, mpd_ssize_t len)
MPD_PRAGMA(MPD_HIDE_SYMBOLS_END) /* restore previous scope rules */
-#endif /* BASEARITH_H */
-
-
-
+#endif /* LIBMPDEC_BASEARITH_H_ */
diff --git a/Modules/_decimal/libmpdec/bits.h b/Modules/_decimal/libmpdec/bits.h
index b5eaa249..aa9c3e77 100644
--- a/Modules/_decimal/libmpdec/bits.h
+++ b/Modules/_decimal/libmpdec/bits.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2008-2016 Stefan Krah. All rights reserved.
+ * Copyright (c) 2008-2020 Stefan Krah. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -26,12 +26,11 @@
*/
-#ifndef BITS_H
-#define BITS_H
+#ifndef LIBMPDEC_BITS_H_
+#define LIBMPDEC_BITS_H_
#include "mpdecimal.h"
-#include
/* Check if n is a power of 2. */
@@ -186,7 +185,4 @@ mpd_bsf(mpd_size_t a)
#endif /* BSR/BSF */
-#endif /* BITS_H */
-
-
-
+#endif /* LIBMPDEC_BITS_H_ */
diff --git a/Modules/_decimal/libmpdec/constants.c b/Modules/_decimal/libmpdec/constants.c
index 2c2d5ea4..4c4de622 100644
--- a/Modules/_decimal/libmpdec/constants.c
+++ b/Modules/_decimal/libmpdec/constants.c
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2008-2016 Stefan Krah. All rights reserved.
+ * Copyright (c) 2008-2020 Stefan Krah. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -27,7 +27,6 @@
#include "mpdecimal.h"
-#include
#include "constants.h"
@@ -128,5 +127,3 @@ const char *mpd_clamp_string[MPD_CLAMP_GUARD] = {
"CLAMP_DEFAULT",
"CLAMP_IEEE_754"
};
-
-
diff --git a/Modules/_decimal/libmpdec/constants.h b/Modules/_decimal/libmpdec/constants.h
index c0febfc8..7c1db839 100644
--- a/Modules/_decimal/libmpdec/constants.h
+++ b/Modules/_decimal/libmpdec/constants.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2008-2016 Stefan Krah. All rights reserved.
+ * Copyright (c) 2008-2020 Stefan Krah. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -26,12 +26,14 @@
*/
-#ifndef CONSTANTS_H
-#define CONSTANTS_H
+#ifndef LIBMPDEC_CONSTANTS_H_
+#define LIBMPDEC_CONSTANTS_H_
#include "mpdecimal.h"
+#include
+
/* Internal header file: all symbols have local scope in the DSO */
MPD_PRAGMA(MPD_HIDE_SYMBOLS_START)
@@ -84,7 +86,4 @@ extern const mpd_uint_t UH_P1P2;
MPD_PRAGMA(MPD_HIDE_SYMBOLS_END) /* restore previous scope rules */
-#endif /* CONSTANTS_H */
-
-
-
+#endif /* LIBMPDEC_CONSTANTS_H_ */
diff --git a/Modules/_decimal/libmpdec/context.c b/Modules/_decimal/libmpdec/context.c
index 24c7b890..9cbc2050 100644
--- a/Modules/_decimal/libmpdec/context.c
+++ b/Modules/_decimal/libmpdec/context.c
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2008-2016 Stefan Krah. All rights reserved.
+ * Copyright (c) 2008-2020 Stefan Krah. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -27,14 +27,16 @@
#include "mpdecimal.h"
+
+#include
#include
#include
-#include
void
-mpd_dflt_traphandler(mpd_context_t *ctx UNUSED)
+mpd_dflt_traphandler(mpd_context_t *ctx)
{
+ (void)ctx;
raise(SIGFPE);
}
@@ -282,5 +284,3 @@ mpd_addstatus_raise(mpd_context_t *ctx, uint32_t flags)
mpd_traphandler(ctx);
}
}
-
-
diff --git a/Modules/_decimal/libmpdec/convolute.c b/Modules/_decimal/libmpdec/convolute.c
index 4c62e8bd..4bc8e8b5 100644
--- a/Modules/_decimal/libmpdec/convolute.c
+++ b/Modules/_decimal/libmpdec/convolute.c
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2008-2016 Stefan Krah. All rights reserved.
+ * Copyright (c) 2008-2020 Stefan Krah. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -27,15 +27,14 @@
#include "mpdecimal.h"
-#include
#include "bits.h"
#include "constants.h"
+#include "convolute.h"
#include "fnt.h"
#include "fourstep.h"
#include "numbertheory.h"
#include "sixstep.h"
#include "umodarith.h"
-#include "convolute.h"
/* Bignum: Fast convolution using the Number Theoretic Transform. Used for
@@ -170,5 +169,3 @@ fnt_autoconvolute(mpd_uint_t *c1, mpd_size_t n, int modnum)
return 1;
}
-
-
diff --git a/Modules/_decimal/libmpdec/convolute.h b/Modules/_decimal/libmpdec/convolute.h
index f30a177a..62edb3e4 100644
--- a/Modules/_decimal/libmpdec/convolute.h
+++ b/Modules/_decimal/libmpdec/convolute.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2008-2016 Stefan Krah. All rights reserved.
+ * Copyright (c) 2008-2020 Stefan Krah. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -26,12 +26,11 @@
*/
-#ifndef CONVOLUTE_H
-#define CONVOLUTE_H
+#ifndef LIBMPDEC_CONVOLUTE_H_
+#define LIBMPDEC_CONVOLUTE_H_
#include "mpdecimal.h"
-#include
/* Internal header file: all symbols have local scope in the DSO */
@@ -47,4 +46,4 @@ int fnt_autoconvolute(mpd_uint_t *c1, mpd_size_t n, int modnum);
MPD_PRAGMA(MPD_HIDE_SYMBOLS_END) /* restore previous scope rules */
-#endif
+#endif /* LIBMPDEC_CONVOLUTE_H_ */
diff --git a/Modules/_decimal/libmpdec/crt.c b/Modules/_decimal/libmpdec/crt.c
index 4a1e80a2..613274ee 100644
--- a/Modules/_decimal/libmpdec/crt.c
+++ b/Modules/_decimal/libmpdec/crt.c
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2008-2016 Stefan Krah. All rights reserved.
+ * Copyright (c) 2008-2020 Stefan Krah. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -27,11 +27,14 @@
#include "mpdecimal.h"
-#include
+
#include
+
+#include "constants.h"
+#include "crt.h"
#include "numbertheory.h"
#include "umodarith.h"
-#include "crt.h"
+#include "typearith.h"
/* Bignum: Chinese Remainder Theorem, extends the maximum transform length. */
@@ -175,5 +178,3 @@ crt3(mpd_uint_t *x1, mpd_uint_t *x2, mpd_uint_t *x3, mpd_size_t rsize)
assert(carry[0] == 0 && carry[1] == 0 && carry[2] == 0);
}
-
-
diff --git a/Modules/_decimal/libmpdec/crt.h b/Modules/_decimal/libmpdec/crt.h
index f61e7729..15a347d4 100644
--- a/Modules/_decimal/libmpdec/crt.h
+++ b/Modules/_decimal/libmpdec/crt.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2008-2016 Stefan Krah. All rights reserved.
+ * Copyright (c) 2008-2020 Stefan Krah. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -26,12 +26,11 @@
*/
-#ifndef CRT_H
-#define CRT_H
+#ifndef LIBMPDEC_CRT_H_
+#define LIBMPDEC_CRT_H_
#include "mpdecimal.h"
-#include
/* Internal header file: all symbols have local scope in the DSO */
@@ -44,4 +43,4 @@ void crt3(mpd_uint_t *x1, mpd_uint_t *x2, mpd_uint_t *x3, mpd_size_t nmemb);
MPD_PRAGMA(MPD_HIDE_SYMBOLS_END) /* restore previous scope rules */
-#endif
+#endif /* LIBMPDEC_CRT_H_ */
diff --git a/Modules/_decimal/libmpdec/difradix2.c b/Modules/_decimal/libmpdec/difradix2.c
index 06e5ab5e..049ecff6 100644
--- a/Modules/_decimal/libmpdec/difradix2.c
+++ b/Modules/_decimal/libmpdec/difradix2.c
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2008-2016 Stefan Krah. All rights reserved.
+ * Copyright (c) 2008-2020 Stefan Krah. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -27,12 +27,14 @@
#include "mpdecimal.h"
-#include
+
#include
+
#include "bits.h"
+#include "constants.h"
+#include "difradix2.h"
#include "numbertheory.h"
#include "umodarith.h"
-#include "difradix2.h"
/* Bignum: The actual transform routine (decimation in frequency). */
@@ -169,5 +171,3 @@ fnt_dif2(mpd_uint_t a[], mpd_size_t n, struct fnt_params *tparams)
bitreverse_permute(a, n);
}
-
-
diff --git a/Modules/_decimal/libmpdec/difradix2.h b/Modules/_decimal/libmpdec/difradix2.h
index 5e22bcf3..cdcbcf9a 100644
--- a/Modules/_decimal/libmpdec/difradix2.h
+++ b/Modules/_decimal/libmpdec/difradix2.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2008-2016 Stefan Krah. All rights reserved.
+ * Copyright (c) 2008-2020 Stefan Krah. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -26,12 +26,11 @@
*/
-#ifndef DIF_RADIX2_H
-#define DIF_RADIX2_H
+#ifndef LIBMPDEC_DIFRADIX2_H_
+#define LIBMPDEC_DIFRADIX2_H_
#include "mpdecimal.h"
-#include
#include "numbertheory.h"
@@ -45,4 +44,4 @@ void fnt_dif2(mpd_uint_t a[], mpd_size_t n, struct fnt_params *tparams);
MPD_PRAGMA(MPD_HIDE_SYMBOLS_END) /* restore previous scope rules */
-#endif
+#endif /* LIBMPDEC_DIFRADIX2_H_ */
diff --git a/Modules/_decimal/libmpdec/fnt.c b/Modules/_decimal/libmpdec/fnt.c
index 7e924c85..0dbe98fc 100644
--- a/Modules/_decimal/libmpdec/fnt.c
+++ b/Modules/_decimal/libmpdec/fnt.c
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2008-2016 Stefan Krah. All rights reserved.
+ * Copyright (c) 2008-2020 Stefan Krah. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -27,13 +27,14 @@
#include "mpdecimal.h"
-#include
-#include
+
#include
+#include
+
#include "bits.h"
#include "difradix2.h"
-#include "numbertheory.h"
#include "fnt.h"
+#include "numbertheory.h"
/* Bignum: Fast transform for medium-sized coefficients. */
@@ -76,6 +77,3 @@ std_inv_fnt(mpd_uint_t *a, mpd_size_t n, int modnum)
mpd_free(tparams);
return 1;
}
-
-
-
diff --git a/Modules/_decimal/libmpdec/fnt.h b/Modules/_decimal/libmpdec/fnt.h
index fa2154a7..5222c476 100644
--- a/Modules/_decimal/libmpdec/fnt.h
+++ b/Modules/_decimal/libmpdec/fnt.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2008-2016 Stefan Krah. All rights reserved.
+ * Copyright (c) 2008-2020 Stefan Krah. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -26,12 +26,11 @@
*/
-#ifndef FNT_H
-#define FNT_H
+#ifndef LIBMPDEC_FNT_H_
+#define LIBMPDEC_FNT_H_
#include "mpdecimal.h"
-#include
/* Internal header file: all symbols have local scope in the DSO */
@@ -45,5 +44,4 @@ int std_inv_fnt(mpd_uint_t a[], mpd_size_t n, int modnum);
MPD_PRAGMA(MPD_HIDE_SYMBOLS_END) /* restore previous scope rules */
-#endif
-
+#endif /* LIBMPDEC_FNT_H_ */
diff --git a/Modules/_decimal/libmpdec/fourstep.c b/Modules/_decimal/libmpdec/fourstep.c
index 21d3e748..fb173ed5 100644
--- a/Modules/_decimal/libmpdec/fourstep.c
+++ b/Modules/_decimal/libmpdec/fourstep.c
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2008-2016 Stefan Krah. All rights reserved.
+ * Copyright (c) 2008-2020 Stefan Krah. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -27,12 +27,14 @@
#include "mpdecimal.h"
+
#include
+
+#include "constants.h"
+#include "fourstep.h"
#include "numbertheory.h"
#include "sixstep.h"
-#include "transpose.h"
#include "umodarith.h"
-#include "fourstep.h"
/* Bignum: Cache efficient Matrix Fourier Transform for arrays of the
@@ -187,6 +189,7 @@ four_step_fnt(mpd_uint_t *a, mpd_size_t n, int modnum)
#if 0
/* An unordered transform is sufficient for convolution. */
/* Transpose the matrix. */
+ #include "transpose.h"
transpose_3xpow2(a, R, C);
#endif
@@ -217,6 +220,7 @@ inv_four_step_fnt(mpd_uint_t *a, mpd_size_t n, int modnum)
#if 0
/* An unordered transform is sufficient for convolution. */
/* Transpose the matrix, producing an R*C matrix. */
+ #include "transpose.h"
transpose_3xpow2(a, C, R);
#endif
@@ -253,5 +257,3 @@ inv_four_step_fnt(mpd_uint_t *a, mpd_size_t n, int modnum)
return 1;
}
-
-
diff --git a/Modules/_decimal/libmpdec/fourstep.h b/Modules/_decimal/libmpdec/fourstep.h
index 80dcd4be..5ffb6fcc 100644
--- a/Modules/_decimal/libmpdec/fourstep.h
+++ b/Modules/_decimal/libmpdec/fourstep.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2008-2016 Stefan Krah. All rights reserved.
+ * Copyright (c) 2008-2020 Stefan Krah. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -26,12 +26,11 @@
*/
-#ifndef FOUR_STEP_H
-#define FOUR_STEP_H
+#ifndef LIBMPDEC_FOURSTEP_H_
+#define LIBMPDEC_FOURSTEP_H_
#include "mpdecimal.h"
-#include
/* Internal header file: all symbols have local scope in the DSO */
@@ -45,4 +44,4 @@ int inv_four_step_fnt(mpd_uint_t *a, mpd_size_t n, int modnum);
MPD_PRAGMA(MPD_HIDE_SYMBOLS_END) /* restore previous scope rules */
-#endif
+#endif /* LIBMPDEC_FOURSTEP_H_ */
diff --git a/Modules/_decimal/libmpdec/io.c b/Modules/_decimal/libmpdec/io.c
index f45e558f..9513a68e 100644
--- a/Modules/_decimal/libmpdec/io.c
+++ b/Modules/_decimal/libmpdec/io.c
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2008-2016 Stefan Krah. All rights reserved.
+ * Copyright (c) 2008-2020 Stefan Krah. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -27,16 +27,16 @@
#include "mpdecimal.h"
-#include
-#include
-#include
-#include
-#include
+
#include
+#include
#include
+#include
#include
-#include "bits.h"
-#include "constants.h"
+#include
+#include
+#include
+
#include "typearith.h"
#include "io.h"
@@ -277,7 +277,7 @@ mpd_qset_string(mpd_t *dec, const char *s, const mpd_context_t *ctx,
}
}
- digits = end - coeff;
+ digits = end - coeff;
if (dpoint) {
size_t fracdigits = end-dpoint-1;
if (dpoint > coeff) digits--;
@@ -326,6 +326,22 @@ conversion_error:
mpd_seterror(dec, MPD_Conversion_syntax, status);
}
+/* convert a character string to a decimal, use a maxcontext for conversion */
+void
+mpd_qset_string_exact(mpd_t *dec, const char *s, uint32_t *status)
+{
+ mpd_context_t maxcontext;
+
+ mpd_maxcontext(&maxcontext);
+ mpd_qset_string(dec, s, &maxcontext, status);
+
+ if (*status & (MPD_Inexact|MPD_Rounded|MPD_Clamped)) {
+ /* we want exact results */
+ mpd_seterror(dec, MPD_Invalid_operation, status);
+ }
+ *status &= MPD_Errors;
+}
+
/* Print word x with n decimal digits to string s. dot is either NULL
or the location of a decimal point. */
#define EXTRACT_DIGIT(s, x, d, dot) \
@@ -539,8 +555,8 @@ _mpd_to_string(char **result, const mpd_t *dec, int flags, mpd_ssize_t dplace)
dplace = -1 + mod_mpd_ssize_t(dec->exp+2, 3);
}
else { /* ldigits-1 is the adjusted exponent, which
- * should be divisible by three. If not, move
- * dplace one or two places to the right. */
+ * should be divisible by three. If not, move
+ * dplace one or two places to the right. */
dplace += mod_mpd_ssize_t(ldigits-1, 3);
}
}
@@ -1247,7 +1263,7 @@ mpd_qformat_spec(const mpd_t *dec, const mpd_spec_t *spec,
}
if (isupper((uchar)type)) {
- type = tolower((uchar)type);
+ type = (char)tolower((uchar)type);
flags |= MPD_FMT_UPPER;
}
if (spec->sign == ' ') {
@@ -1265,6 +1281,7 @@ mpd_qformat_spec(const mpd_t *dec, const mpd_spec_t *spec,
stackspec.align = '>';
spec = &stackspec;
}
+ assert(strlen(spec->fill) == 1); /* annotation for scan-build */
if (type == '%') {
flags |= MPD_FMT_PERCENT;
}
@@ -1579,5 +1596,3 @@ mpd_print(const mpd_t *dec)
fputs("mpd_fprint: output error\n", stderr); /* GCOV_NOT_REACHED */
}
}
-
-
diff --git a/Modules/_decimal/libmpdec/io.h b/Modules/_decimal/libmpdec/io.h
index de5486a0..79d7c05c 100644
--- a/Modules/_decimal/libmpdec/io.h
+++ b/Modules/_decimal/libmpdec/io.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2008-2016 Stefan Krah. All rights reserved.
+ * Copyright (c) 2008-2020 Stefan Krah. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -26,17 +26,20 @@
*/
-#ifndef IO_H
-#define IO_H
+#ifndef LIBMPDEC_IO_H_
+#define LIBMPDEC_IO_H_
-#include
#include "mpdecimal.h"
+#include
+
#if SIZE_MAX == MPD_SIZE_MAX
#define mpd_strtossize _mpd_strtossize
#else
+#include
+
static inline mpd_ssize_t
mpd_strtossize(const char *s, char **end, int base)
{
@@ -56,4 +59,4 @@ mpd_strtossize(const char *s, char **end, int base)
#endif
-#endif
+#endif /* LIBMPDEC_IO_H_ */
diff --git a/Modules/_decimal/libmpdec/literature/fnt.py b/Modules/_decimal/libmpdec/literature/fnt.py
index 6363536d..c1285a56 100644
--- a/Modules/_decimal/libmpdec/literature/fnt.py
+++ b/Modules/_decimal/libmpdec/literature/fnt.py
@@ -1,5 +1,5 @@
#
-# Copyright (c) 2008-2016 Stefan Krah. All rights reserved.
+# Copyright (c) 2008-2020 Stefan Krah. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
diff --git a/Modules/_decimal/libmpdec/literature/matrix-transform.txt b/Modules/_decimal/libmpdec/literature/matrix-transform.txt
index 701d85d6..6e7ad742 100644
--- a/Modules/_decimal/libmpdec/literature/matrix-transform.txt
+++ b/Modules/_decimal/libmpdec/literature/matrix-transform.txt
@@ -1,6 +1,6 @@
-(* Copyright (c) 2011 Stefan Krah. All rights reserved. *)
+(* Copyright (c) 2011-2020 Stefan Krah. All rights reserved. *)
The Matrix Fourier Transform:
diff --git a/Modules/_decimal/libmpdec/literature/mulmod-64.txt b/Modules/_decimal/libmpdec/literature/mulmod-64.txt
index 029b8de3..fa967bf9 100644
--- a/Modules/_decimal/libmpdec/literature/mulmod-64.txt
+++ b/Modules/_decimal/libmpdec/literature/mulmod-64.txt
@@ -1,6 +1,6 @@
-(* Copyright (c) 2011 Stefan Krah. All rights reserved. *)
+(* Copyright (c) 2011-2020 Stefan Krah. All rights reserved. *)
==========================================================================
diff --git a/Modules/_decimal/libmpdec/literature/mulmod-ppro.txt b/Modules/_decimal/libmpdec/literature/mulmod-ppro.txt
index 4d17a928..ba804e4b 100644
--- a/Modules/_decimal/libmpdec/literature/mulmod-ppro.txt
+++ b/Modules/_decimal/libmpdec/literature/mulmod-ppro.txt
@@ -1,6 +1,6 @@
-(* Copyright (c) 2011 Stefan Krah. All rights reserved. *)
+(* Copyright (c) 2011-2020 Stefan Krah. All rights reserved. *)
========================================================================
diff --git a/Modules/_decimal/libmpdec/literature/six-step.txt b/Modules/_decimal/libmpdec/literature/six-step.txt
index 8e45f487..852d5b0d 100644
--- a/Modules/_decimal/libmpdec/literature/six-step.txt
+++ b/Modules/_decimal/libmpdec/literature/six-step.txt
@@ -1,6 +1,6 @@
-(* Copyright (c) 2011 Stefan Krah. All rights reserved. *)
+(* Copyright (c) 2011-2020 Stefan Krah. All rights reserved. *)
The Six Step Transform:
diff --git a/Modules/_decimal/libmpdec/literature/umodarith.lisp b/Modules/_decimal/libmpdec/literature/umodarith.lisp
index 99d71c37..d71f074a 100644
--- a/Modules/_decimal/libmpdec/literature/umodarith.lisp
+++ b/Modules/_decimal/libmpdec/literature/umodarith.lisp
@@ -1,5 +1,5 @@
;
-; Copyright (c) 2008-2016 Stefan Krah. All rights reserved.
+; Copyright (c) 2008-2020 Stefan Krah. All rights reserved.
;
; Redistribution and use in source and binary forms, with or without
; modification, are permitted provided that the following conditions
@@ -149,7 +149,7 @@
(defthmd addmod-correct
(implies (and (< 0 m) (< m base)
- (< a m) (<= b m)
+ (< a m) (<= b m)
(natp m) (natp base)
(natp a) (natp b))
(equal (addmod a b m base)
@@ -179,7 +179,7 @@
(defthmd submod-correct
(implies (and (< 0 m) (< m base)
- (< a m) (<= b m)
+ (< a m) (<= b m)
(natp m) (natp base)
(natp a) (natp b))
(equal (submod a b m base)
@@ -200,7 +200,7 @@
(defthm submod-2-correct
(implies (and (< 0 m) (< m base)
- (< a m) (<= b m)
+ (< a m) (<= b m)
(natp m) (natp base)
(natp a) (natp b))
(equal (submod-2 a b m base)
@@ -231,7 +231,7 @@
(defthmd ext-submod-ext-submod-2-equal
(implies (and (< 0 m) (< m base)
- (< a (* 2 m)) (< b (* 2 m))
+ (< a (* 2 m)) (< b (* 2 m))
(natp m) (natp base)
(natp a) (natp b))
(equal (ext-submod a b m base)
@@ -239,7 +239,7 @@
(defthmd ext-submod-2-correct
(implies (and (< 0 m) (< m base)
- (< a (* 2 m)) (< b (* 2 m))
+ (< a (* 2 m)) (< b (* 2 m))
(natp m) (natp base)
(natp a) (natp b))
(equal (ext-submod-2 a b m base)
@@ -257,7 +257,7 @@
(defthmd dw-reduce-correct
(implies (and (< 0 m) (< m base)
- (< hi base) (< lo base)
+ (< hi base) (< lo base)
(natp m) (natp base)
(natp hi) (natp lo))
(equal (dw-reduce hi lo m base)
@@ -322,7 +322,7 @@
(defthmd dw-submod-correct
(implies (and (< 0 m) (< m base)
(natp a) (< a m)
- (< hi base) (< lo base)
+ (< hi base) (< lo base)
(natp m) (natp base)
(natp hi) (natp lo))
(equal (dw-submod a hi lo m base)
diff --git a/Modules/_decimal/libmpdec/memory.c b/Modules/_decimal/libmpdec/memory.c
deleted file mode 100644
index a854e099..00000000
--- a/Modules/_decimal/libmpdec/memory.c
+++ /dev/null
@@ -1,297 +0,0 @@
-/*
- * Copyright (c) 2008-2016 Stefan Krah. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- *
- * 1. Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- *
- * 2. Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS "AS IS" AND
- * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
- * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
- * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
- * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
- * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
- * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
- * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
- * SUCH DAMAGE.
- */
-
-
-#include "mpdecimal.h"
-#include
-#include
-#include "typearith.h"
-#include "mpalloc.h"
-
-
-#if defined(_MSC_VER)
- #pragma warning(disable : 4232)
-#endif
-
-
-/* Guaranteed minimum allocation for a coefficient. May be changed once
- at program start using mpd_setminalloc(). */
-mpd_ssize_t MPD_MINALLOC = MPD_MINALLOC_MIN;
-
-/* Custom allocation and free functions */
-void *(* mpd_mallocfunc)(size_t size) = malloc;
-void *(* mpd_reallocfunc)(void *ptr, size_t size) = realloc;
-void *(* mpd_callocfunc)(size_t nmemb, size_t size) = calloc;
-void (* mpd_free)(void *ptr) = free;
-
-
-/* emulate calloc if it is not available */
-void *
-mpd_callocfunc_em(size_t nmemb, size_t size)
-{
- void *ptr;
- size_t req;
- mpd_size_t overflow;
-
-#if MPD_SIZE_MAX < SIZE_MAX
- /* full_coverage test only */
- if (nmemb > MPD_SIZE_MAX || size > MPD_SIZE_MAX) {
- return NULL;
- }
-#endif
-
- req = mul_size_t_overflow((mpd_size_t)nmemb, (mpd_size_t)size,
- &overflow);
- if (overflow) {
- return NULL;
- }
-
- ptr = mpd_mallocfunc(req);
- if (ptr == NULL) {
- return NULL;
- }
- /* used on uint32_t or uint64_t */
- memset(ptr, 0, req);
-
- return ptr;
-}
-
-
-/* malloc with overflow checking */
-void *
-mpd_alloc(mpd_size_t nmemb, mpd_size_t size)
-{
- mpd_size_t req, overflow;
-
- req = mul_size_t_overflow(nmemb, size, &overflow);
- if (overflow) {
- return NULL;
- }
-
- return mpd_mallocfunc(req);
-}
-
-/* calloc with overflow checking */
-void *
-mpd_calloc(mpd_size_t nmemb, mpd_size_t size)
-{
- mpd_size_t overflow;
-
- (void)mul_size_t_overflow(nmemb, size, &overflow);
- if (overflow) {
- return NULL;
- }
-
- return mpd_callocfunc(nmemb, size);
-}
-
-/* realloc with overflow checking */
-void *
-mpd_realloc(void *ptr, mpd_size_t nmemb, mpd_size_t size, uint8_t *err)
-{
- void *new;
- mpd_size_t req, overflow;
-
- req = mul_size_t_overflow(nmemb, size, &overflow);
- if (overflow) {
- *err = 1;
- return ptr;
- }
-
- new = mpd_reallocfunc(ptr, req);
- if (new == NULL) {
- *err = 1;
- return ptr;
- }
-
- return new;
-}
-
-/* struct hack malloc with overflow checking */
-void *
-mpd_sh_alloc(mpd_size_t struct_size, mpd_size_t nmemb, mpd_size_t size)
-{
- mpd_size_t req, overflow;
-
- req = mul_size_t_overflow(nmemb, size, &overflow);
- if (overflow) {
- return NULL;
- }
-
- req = add_size_t_overflow(req, struct_size, &overflow);
- if (overflow) {
- return NULL;
- }
-
- return mpd_mallocfunc(req);
-}
-
-
-/* Allocate a new decimal with a coefficient of length 'nwords'. In case
- of an error the return value is NULL. */
-mpd_t *
-mpd_qnew_size(mpd_ssize_t nwords)
-{
- mpd_t *result;
-
- nwords = (nwords < MPD_MINALLOC) ? MPD_MINALLOC : nwords;
-
- result = mpd_alloc(1, sizeof *result);
- if (result == NULL) {
- return NULL;
- }
-
- result->data = mpd_alloc(nwords, sizeof *result->data);
- if (result->data == NULL) {
- mpd_free(result);
- return NULL;
- }
-
- result->flags = 0;
- result->exp = 0;
- result->digits = 0;
- result->len = 0;
- result->alloc = nwords;
-
- return result;
-}
-
-/* Allocate a new decimal with a coefficient of length MPD_MINALLOC.
- In case of an error the return value is NULL. */
-mpd_t *
-mpd_qnew(void)
-{
- return mpd_qnew_size(MPD_MINALLOC);
-}
-
-/* Allocate new decimal. Caller can check for NULL or MPD_Malloc_error.
- Raises on error. */
-mpd_t *
-mpd_new(mpd_context_t *ctx)
-{
- mpd_t *result;
-
- result = mpd_qnew();
- if (result == NULL) {
- mpd_addstatus_raise(ctx, MPD_Malloc_error);
- }
- return result;
-}
-
-/*
- * Input: 'result' is a static mpd_t with a static coefficient.
- * Assumption: 'nwords' >= result->alloc.
- *
- * Resize the static coefficient to a larger dynamic one and copy the
- * existing data. If successful, the value of 'result' is unchanged.
- * Otherwise, set 'result' to NaN and update 'status' with MPD_Malloc_error.
- */
-int
-mpd_switch_to_dyn(mpd_t *result, mpd_ssize_t nwords, uint32_t *status)
-{
- mpd_uint_t *p = result->data;
-
- assert(nwords >= result->alloc);
-
- result->data = mpd_alloc(nwords, sizeof *result->data);
- if (result->data == NULL) {
- result->data = p;
- mpd_set_qnan(result);
- mpd_set_positive(result);
- result->exp = result->digits = result->len = 0;
- *status |= MPD_Malloc_error;
- return 0;
- }
-
- memcpy(result->data, p, result->alloc * (sizeof *result->data));
- result->alloc = nwords;
- mpd_set_dynamic_data(result);
- return 1;
-}
-
-/*
- * Input: 'result' is a static mpd_t with a static coefficient.
- *
- * Convert the coefficient to a dynamic one that is initialized to zero. If
- * malloc fails, set 'result' to NaN and update 'status' with MPD_Malloc_error.
- */
-int
-mpd_switch_to_dyn_zero(mpd_t *result, mpd_ssize_t nwords, uint32_t *status)
-{
- mpd_uint_t *p = result->data;
-
- result->data = mpd_calloc(nwords, sizeof *result->data);
- if (result->data == NULL) {
- result->data = p;
- mpd_set_qnan(result);
- mpd_set_positive(result);
- result->exp = result->digits = result->len = 0;
- *status |= MPD_Malloc_error;
- return 0;
- }
-
- result->alloc = nwords;
- mpd_set_dynamic_data(result);
-
- return 1;
-}
-
-/*
- * Input: 'result' is a static or a dynamic mpd_t with a dynamic coefficient.
- * Resize the coefficient to length 'nwords':
- * Case nwords > result->alloc:
- * If realloc is successful:
- * 'result' has a larger coefficient but the same value. Return 1.
- * Otherwise:
- * Set 'result' to NaN, update status with MPD_Malloc_error and return 0.
- * Case nwords < result->alloc:
- * If realloc is successful:
- * 'result' has a smaller coefficient. result->len is undefined. Return 1.
- * Otherwise (unlikely):
- * 'result' is unchanged. Reuse the now oversized coefficient. Return 1.
- */
-int
-mpd_realloc_dyn(mpd_t *result, mpd_ssize_t nwords, uint32_t *status)
-{
- uint8_t err = 0;
-
- result->data = mpd_realloc(result->data, nwords, sizeof *result->data, &err);
- if (!err) {
- result->alloc = nwords;
- }
- else if (nwords > result->alloc) {
- mpd_set_qnan(result);
- mpd_set_positive(result);
- result->exp = result->digits = result->len = 0;
- *status |= MPD_Malloc_error;
- return 0;
- }
-
- return 1;
-}
-
-
diff --git a/Modules/_decimal/libmpdec/mpalloc.c b/Modules/_decimal/libmpdec/mpalloc.c
new file mode 100644
index 00000000..eb5ee7a8
--- /dev/null
+++ b/Modules/_decimal/libmpdec/mpalloc.c
@@ -0,0 +1,356 @@
+/*
+ * Copyright (c) 2008-2020 Stefan Krah. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS "AS IS" AND
+ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
+ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+ * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ */
+
+
+#include "mpdecimal.h"
+
+#include
+#include
+#include
+#include
+
+#include "mpalloc.h"
+#include "typearith.h"
+
+
+#if defined(_MSC_VER)
+ #pragma warning(disable : 4232)
+#endif
+
+
+/* Guaranteed minimum allocation for a coefficient. May be changed once
+ at program start using mpd_setminalloc(). */
+mpd_ssize_t MPD_MINALLOC = MPD_MINALLOC_MIN;
+
+/* Custom allocation and free functions */
+void *(* mpd_mallocfunc)(size_t size) = malloc;
+void *(* mpd_reallocfunc)(void *ptr, size_t size) = realloc;
+void *(* mpd_callocfunc)(size_t nmemb, size_t size) = calloc;
+void (* mpd_free)(void *ptr) = free;
+
+
+/* emulate calloc if it is not available */
+void *
+mpd_callocfunc_em(size_t nmemb, size_t size)
+{
+ void *ptr;
+ size_t req;
+ mpd_size_t overflow;
+
+#if MPD_SIZE_MAX < SIZE_MAX
+ /* full_coverage test only */
+ if (nmemb > MPD_SIZE_MAX || size > MPD_SIZE_MAX) {
+ return NULL;
+ }
+#endif
+
+ req = mul_size_t_overflow((mpd_size_t)nmemb, (mpd_size_t)size,
+ &overflow);
+ if (overflow) {
+ return NULL;
+ }
+
+ ptr = mpd_mallocfunc(req);
+ if (ptr == NULL) {
+ return NULL;
+ }
+ /* used on uint32_t or uint64_t */
+ memset(ptr, 0, req);
+
+ return ptr;
+}
+
+
+/* malloc with overflow checking */
+void *
+mpd_alloc(mpd_size_t nmemb, mpd_size_t size)
+{
+ mpd_size_t req, overflow;
+
+ req = mul_size_t_overflow(nmemb, size, &overflow);
+ if (overflow) {
+ return NULL;
+ }
+
+ return mpd_mallocfunc(req);
+}
+
+/* calloc with overflow checking */
+void *
+mpd_calloc(mpd_size_t nmemb, mpd_size_t size)
+{
+ mpd_size_t overflow;
+
+ (void)mul_size_t_overflow(nmemb, size, &overflow);
+ if (overflow) {
+ return NULL;
+ }
+
+ return mpd_callocfunc(nmemb, size);
+}
+
+/* realloc with overflow checking */
+void *
+mpd_realloc(void *ptr, mpd_size_t nmemb, mpd_size_t size, uint8_t *err)
+{
+ void *new;
+ mpd_size_t req, overflow;
+
+ req = mul_size_t_overflow(nmemb, size, &overflow);
+ if (overflow) {
+ *err = 1;
+ return ptr;
+ }
+
+ new = mpd_reallocfunc(ptr, req);
+ if (new == NULL) {
+ *err = 1;
+ return ptr;
+ }
+
+ return new;
+}
+
+/* struct hack malloc with overflow checking */
+void *
+mpd_sh_alloc(mpd_size_t struct_size, mpd_size_t nmemb, mpd_size_t size)
+{
+ mpd_size_t req, overflow;
+
+ req = mul_size_t_overflow(nmemb, size, &overflow);
+ if (overflow) {
+ return NULL;
+ }
+
+ req = add_size_t_overflow(req, struct_size, &overflow);
+ if (overflow) {
+ return NULL;
+ }
+
+ return mpd_mallocfunc(req);
+}
+
+
+/* Allocate a new decimal with a coefficient of length 'nwords'. In case
+ of an error the return value is NULL. */
+mpd_t *
+mpd_qnew_size(mpd_ssize_t nwords)
+{
+ mpd_t *result;
+
+ nwords = (nwords < MPD_MINALLOC) ? MPD_MINALLOC : nwords;
+
+ result = mpd_alloc(1, sizeof *result);
+ if (result == NULL) {
+ return NULL;
+ }
+
+ result->data = mpd_alloc(nwords, sizeof *result->data);
+ if (result->data == NULL) {
+ mpd_free(result);
+ return NULL;
+ }
+
+ result->flags = 0;
+ result->exp = 0;
+ result->digits = 0;
+ result->len = 0;
+ result->alloc = nwords;
+
+ return result;
+}
+
+/* Allocate a new decimal with a coefficient of length MPD_MINALLOC.
+ In case of an error the return value is NULL. */
+mpd_t *
+mpd_qnew(void)
+{
+ return mpd_qnew_size(MPD_MINALLOC);
+}
+
+/* Allocate new decimal. Caller can check for NULL or MPD_Malloc_error.
+ Raises on error. */
+mpd_t *
+mpd_new(mpd_context_t *ctx)
+{
+ mpd_t *result;
+
+ result = mpd_qnew();
+ if (result == NULL) {
+ mpd_addstatus_raise(ctx, MPD_Malloc_error);
+ }
+ return result;
+}
+
+/*
+ * Input: 'result' is a static mpd_t with a static coefficient.
+ * Assumption: 'nwords' >= result->alloc.
+ *
+ * Resize the static coefficient to a larger dynamic one and copy the
+ * existing data. If successful, the value of 'result' is unchanged.
+ * Otherwise, set 'result' to NaN and update 'status' with MPD_Malloc_error.
+ */
+int
+mpd_switch_to_dyn(mpd_t *result, mpd_ssize_t nwords, uint32_t *status)
+{
+ mpd_uint_t *p = result->data;
+
+ assert(nwords >= result->alloc);
+
+ result->data = mpd_alloc(nwords, sizeof *result->data);
+ if (result->data == NULL) {
+ result->data = p;
+ mpd_set_qnan(result);
+ mpd_set_positive(result);
+ result->exp = result->digits = result->len = 0;
+ *status |= MPD_Malloc_error;
+ return 0;
+ }
+
+ memcpy(result->data, p, result->alloc * (sizeof *result->data));
+ result->alloc = nwords;
+ mpd_set_dynamic_data(result);
+ return 1;
+}
+
+/*
+ * Input: 'result' is a static mpd_t with a static coefficient.
+ *
+ * Convert the coefficient to a dynamic one that is initialized to zero. If
+ * malloc fails, set 'result' to NaN and update 'status' with MPD_Malloc_error.
+ */
+int
+mpd_switch_to_dyn_zero(mpd_t *result, mpd_ssize_t nwords, uint32_t *status)
+{
+ mpd_uint_t *p = result->data;
+
+ result->data = mpd_calloc(nwords, sizeof *result->data);
+ if (result->data == NULL) {
+ result->data = p;
+ mpd_set_qnan(result);
+ mpd_set_positive(result);
+ result->exp = result->digits = result->len = 0;
+ *status |= MPD_Malloc_error;
+ return 0;
+ }
+
+ result->alloc = nwords;
+ mpd_set_dynamic_data(result);
+
+ return 1;
+}
+
+/*
+ * Input: 'result' is a static or a dynamic mpd_t with a dynamic coefficient.
+ * Resize the coefficient to length 'nwords':
+ * Case nwords > result->alloc:
+ * If realloc is successful:
+ * 'result' has a larger coefficient but the same value. Return 1.
+ * Otherwise:
+ * Set 'result' to NaN, update status with MPD_Malloc_error and return 0.
+ * Case nwords < result->alloc:
+ * If realloc is successful:
+ * 'result' has a smaller coefficient. result->len is undefined. Return 1.
+ * Otherwise (unlikely):
+ * 'result' is unchanged. Reuse the now oversized coefficient. Return 1.
+ */
+int
+mpd_realloc_dyn(mpd_t *result, mpd_ssize_t nwords, uint32_t *status)
+{
+ uint8_t err = 0;
+
+ result->data = mpd_realloc(result->data, nwords, sizeof *result->data, &err);
+ if (!err) {
+ result->alloc = nwords;
+ }
+ else if (nwords > result->alloc) {
+ mpd_set_qnan(result);
+ mpd_set_positive(result);
+ result->exp = result->digits = result->len = 0;
+ *status |= MPD_Malloc_error;
+ return 0;
+ }
+
+ return 1;
+}
+
+/*
+ * Input: 'result' is a static mpd_t with a static coefficient.
+ * Assumption: 'nwords' >= result->alloc.
+ *
+ * Resize the static coefficient to a larger dynamic one and copy the
+ * existing data.
+ *
+ * On failure the value of 'result' is unchanged.
+ */
+int
+mpd_switch_to_dyn_cxx(mpd_t *result, mpd_ssize_t nwords)
+{
+ assert(nwords >= result->alloc);
+
+ mpd_uint_t *data = mpd_alloc(nwords, sizeof *result->data);
+ if (data == NULL) {
+ return 0;
+ }
+
+ memcpy(data, result->data, result->alloc * (sizeof *result->data));
+ result->data = data;
+ result->alloc = nwords;
+ mpd_set_dynamic_data(result);
+ return 1;
+}
+
+/*
+ * Input: 'result' is a static or a dynamic mpd_t with a dynamic coefficient.
+ * Resize the coefficient to length 'nwords':
+ * Case nwords > result->alloc:
+ * If realloc is successful:
+ * 'result' has a larger coefficient but the same value. Return 1.
+ * Otherwise:
+ * 'result' has a the same coefficient. Return 0.
+ * Case nwords < result->alloc:
+ * If realloc is successful:
+ * 'result' has a smaller coefficient. result->len is undefined. Return 1.
+ * Otherwise (unlikely):
+ * 'result' is unchanged. Reuse the now oversized coefficient. Return 1.
+ */
+int
+mpd_realloc_dyn_cxx(mpd_t *result, mpd_ssize_t nwords)
+{
+ uint8_t err = 0;
+
+ mpd_uint_t *p = mpd_realloc(result->data, nwords, sizeof *result->data, &err);
+ if (!err) {
+ result->data = p;
+ result->alloc = nwords;
+ }
+ else if (nwords > result->alloc) {
+ return 0;
+ }
+
+ return 1;
+}
diff --git a/Modules/_decimal/libmpdec/mpalloc.h b/Modules/_decimal/libmpdec/mpalloc.h
index efd71195..18680845 100644
--- a/Modules/_decimal/libmpdec/mpalloc.h
+++ b/Modules/_decimal/libmpdec/mpalloc.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2008-2016 Stefan Krah. All rights reserved.
+ * Copyright (c) 2008-2020 Stefan Krah. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -26,12 +26,14 @@
*/
-#ifndef MPALLOC_H
-#define MPALLOC_H
+#ifndef LIBMPDEC_MPALLOC_H_
+#define LIBMPDEC_MPALLOC_H_
#include "mpdecimal.h"
+#include
+
/* Internal header file: all symbols have local scope in the DSO */
MPD_PRAGMA(MPD_HIDE_SYMBOLS_START)
@@ -41,11 +43,11 @@ int mpd_switch_to_dyn(mpd_t *result, mpd_ssize_t size, uint32_t *status);
int mpd_switch_to_dyn_zero(mpd_t *result, mpd_ssize_t size, uint32_t *status);
int mpd_realloc_dyn(mpd_t *result, mpd_ssize_t size, uint32_t *status);
-
-MPD_PRAGMA(MPD_HIDE_SYMBOLS_END) /* restore previous scope rules */
+int mpd_switch_to_dyn_cxx(mpd_t *result, mpd_ssize_t size);
+int mpd_realloc_dyn_cxx(mpd_t *result, mpd_ssize_t size);
-#endif
-
+MPD_PRAGMA(MPD_HIDE_SYMBOLS_END) /* restore previous scope rules */
+#endif /* LIBMPDEC_MPALLOC_H_ */
diff --git a/Modules/_decimal/libmpdec/mpdecimal.c b/Modules/_decimal/libmpdec/mpdecimal.c
index bfa8bb34..28b639cc 100644
--- a/Modules/_decimal/libmpdec/mpdecimal.c
+++ b/Modules/_decimal/libmpdec/mpdecimal.c
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2008-2016 Stefan Krah. All rights reserved.
+ * Copyright (c) 2008-2020 Stefan Krah. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -27,18 +27,21 @@
#include "mpdecimal.h"
+
+#include
+#include
+#include
#include
#include
#include
-#include
-#include
+
#include "basearith.h"
#include "bits.h"
+#include "constants.h"
#include "convolute.h"
#include "crt.h"
#include "mpalloc.h"
#include "typearith.h"
-#include "umodarith.h"
#ifdef PPRO
#if defined(_MSC_VER)
@@ -61,7 +64,7 @@
#if defined(_MSC_VER)
#define ALWAYS_INLINE __forceinline
-#elif defined(LEGACY_COMPILER)
+#elif defined(__IBMC__) || defined(LEGACY_COMPILER)
#define ALWAYS_INLINE
#undef inline
#define inline
@@ -241,7 +244,7 @@ mpd_lsd(mpd_uint_t word)
}
/* Coefficient size needed to store 'digits' */
-ALWAYS_INLINE mpd_ssize_t
+mpd_ssize_t
mpd_digits_to_size(mpd_ssize_t digits)
{
mpd_ssize_t q, r;
@@ -260,8 +263,9 @@ mpd_exp_digits(mpd_ssize_t exp)
/* Canonical */
ALWAYS_INLINE int
-mpd_iscanonical(const mpd_t *dec UNUSED)
+mpd_iscanonical(const mpd_t *dec)
{
+ (void)dec;
return 1;
}
@@ -512,6 +516,28 @@ mpd_qresize(mpd_t *result, mpd_ssize_t nwords, uint32_t *status)
return mpd_realloc_dyn(result, nwords, status);
}
+/* Same as mpd_qresize, but do not set the result no NaN on failure. */
+static ALWAYS_INLINE int
+mpd_qresize_cxx(mpd_t *result, mpd_ssize_t nwords)
+{
+ assert(!mpd_isconst_data(result)); /* illegal operation for a const */
+ assert(!mpd_isshared_data(result)); /* illegal operation for a shared */
+ assert(MPD_MINALLOC <= result->alloc);
+
+ nwords = (nwords <= MPD_MINALLOC) ? MPD_MINALLOC : nwords;
+ if (nwords == result->alloc) {
+ return 1;
+ }
+ if (mpd_isstatic_data(result)) {
+ if (nwords > result->alloc) {
+ return mpd_switch_to_dyn_cxx(result, nwords);
+ }
+ return 1;
+ }
+
+ return mpd_realloc_dyn_cxx(result, nwords);
+}
+
/* Same as mpd_qresize, but the complete coefficient (including the old
* memory area!) is initialized to zero. */
ALWAYS_INLINE int
@@ -1192,7 +1218,7 @@ _c32setu64(mpd_t *result, uint64_t u, uint8_t sign, uint32_t *status)
result->data[i] = w[i];
}
- mpd_set_sign(result, sign);
+ mpd_set_flags(result, sign);
result->exp = 0;
result->len = len;
mpd_setdigits(result);
@@ -1244,6 +1270,26 @@ mpd_qset_i64(mpd_t *result, int64_t a, const mpd_context_t *ctx,
#endif
}
+/* quietly set a decimal from an int64_t, use a maxcontext for conversion */
+void
+mpd_qset_i64_exact(mpd_t *result, int64_t a, uint32_t *status)
+{
+ mpd_context_t maxcontext;
+
+ mpd_maxcontext(&maxcontext);
+#ifdef CONFIG_64
+ mpd_qset_ssize(result, a, &maxcontext, status);
+#else
+ _c32_qset_i64(result, a, &maxcontext, status);
+#endif
+
+ if (*status & (MPD_Inexact|MPD_Rounded|MPD_Clamped)) {
+ /* we want exact results */
+ mpd_seterror(result, MPD_Invalid_operation, status);
+ }
+ *status &= MPD_Errors;
+}
+
/* quietly set a decimal from a uint64_t */
void
mpd_qset_u64(mpd_t *result, uint64_t a, const mpd_context_t *ctx,
@@ -1255,8 +1301,27 @@ mpd_qset_u64(mpd_t *result, uint64_t a, const mpd_context_t *ctx,
_c32_qset_u64(result, a, ctx, status);
#endif
}
-#endif /* !LEGACY_COMPILER */
+/* quietly set a decimal from a uint64_t, use a maxcontext for conversion */
+void
+mpd_qset_u64_exact(mpd_t *result, uint64_t a, uint32_t *status)
+{
+ mpd_context_t maxcontext;
+
+ mpd_maxcontext(&maxcontext);
+#ifdef CONFIG_64
+ mpd_qset_uint(result, a, &maxcontext, status);
+#else
+ _c32_qset_u64(result, a, &maxcontext, status);
+#endif
+
+ if (*status & (MPD_Inexact|MPD_Rounded|MPD_Clamped)) {
+ /* we want exact results */
+ mpd_seterror(result, MPD_Invalid_operation, status);
+ }
+ *status &= MPD_Errors;
+}
+#endif /* !LEGACY_COMPILER */
/*
* Quietly get an mpd_uint_t from a decimal. Assumes
@@ -1345,11 +1410,13 @@ mpd_qabs_uint(const mpd_t *a, uint32_t *status)
mpd_ssize_t
mpd_qget_ssize(const mpd_t *a, uint32_t *status)
{
+ uint32_t workstatus = 0;
mpd_uint_t u;
int isneg;
- u = mpd_qabs_uint(a, status);
- if (*status&MPD_Invalid_operation) {
+ u = mpd_qabs_uint(a, &workstatus);
+ if (workstatus&MPD_Invalid_operation) {
+ *status |= workstatus;
return MPD_SSIZE_MAX;
}
@@ -1469,9 +1536,11 @@ mpd_qget_i64(const mpd_t *a, uint32_t *status)
uint32_t
mpd_qget_u32(const mpd_t *a, uint32_t *status)
{
- uint64_t x = mpd_qget_uint(a, status);
+ uint32_t workstatus = 0;
+ uint64_t x = mpd_qget_uint(a, &workstatus);
- if (*status&MPD_Invalid_operation) {
+ if (workstatus&MPD_Invalid_operation) {
+ *status |= workstatus;
return UINT32_MAX;
}
if (x > UINT32_MAX) {
@@ -1486,9 +1555,11 @@ mpd_qget_u32(const mpd_t *a, uint32_t *status)
int32_t
mpd_qget_i32(const mpd_t *a, uint32_t *status)
{
- int64_t x = mpd_qget_ssize(a, status);
+ uint32_t workstatus = 0;
+ int64_t x = mpd_qget_ssize(a, &workstatus);
- if (*status&MPD_Invalid_operation) {
+ if (workstatus&MPD_Invalid_operation) {
+ *status |= workstatus;
return INT32_MAX;
}
if (x < INT32_MIN || x > INT32_MAX) {
@@ -1504,14 +1575,20 @@ mpd_qget_i32(const mpd_t *a, uint32_t *status)
uint64_t
mpd_qget_u64(const mpd_t *a, uint32_t *status)
{
- return _c32_qget_u64(1, a, status);
+ uint32_t workstatus = 0;
+ uint64_t x = _c32_qget_u64(1, a, &workstatus);
+ *status |= workstatus;
+ return x;
}
/* quietly get an int64_t from a decimal */
int64_t
mpd_qget_i64(const mpd_t *a, uint32_t *status)
{
- return _c32_qget_i64(a, status);
+ uint32_t workstatus = 0;
+ int64_t x = _c32_qget_i64(a, &workstatus);
+ *status |= workstatus;
+ return x;
}
#endif
@@ -1937,6 +2014,25 @@ mpd_qcopy(mpd_t *result, const mpd_t *a, uint32_t *status)
return 1;
}
+/* Same as mpd_qcopy, but do not set the result to NaN on failure. */
+int
+mpd_qcopy_cxx(mpd_t *result, const mpd_t *a)
+{
+ if (result == a) return 1;
+
+ if (!mpd_qresize_cxx(result, a->len)) {
+ return 0;
+ }
+
+ mpd_copy_flags(result, a);
+ result->exp = a->exp;
+ result->digits = a->digits;
+ result->len = a->len;
+ memcpy(result->data, a->data, a->len * (sizeof *result->data));
+
+ return 1;
+}
+
/*
* Copy to a decimal with a static buffer. The caller has to make sure that
* the buffer is big enough. Cannot fail.
@@ -3780,7 +3876,72 @@ void
mpd_qdiv(mpd_t *q, const mpd_t *a, const mpd_t *b,
const mpd_context_t *ctx, uint32_t *status)
{
- _mpd_qdiv(SET_IDEAL_EXP, q, a, b, ctx, status);
+ MPD_NEW_STATIC(aa,0,0,0,0);
+ MPD_NEW_STATIC(bb,0,0,0,0);
+ uint32_t xstatus = 0;
+
+ if (q == a) {
+ if (!mpd_qcopy(&aa, a, status)) {
+ mpd_seterror(q, MPD_Malloc_error, status);
+ goto out;
+ }
+ a = &aa;
+ }
+
+ if (q == b) {
+ if (!mpd_qcopy(&bb, b, status)) {
+ mpd_seterror(q, MPD_Malloc_error, status);
+ goto out;
+ }
+ b = &bb;
+ }
+
+ _mpd_qdiv(SET_IDEAL_EXP, q, a, b, ctx, &xstatus);
+
+ if (xstatus & (MPD_Malloc_error|MPD_Division_impossible)) {
+ /* Inexact quotients (the usual case) fill the entire context precision,
+ * which can lead to the above errors for very high precisions. Retry
+ * the operation with a lower precision in case the result is exact.
+ *
+ * We need an upper bound for the number of digits of a_coeff / b_coeff
+ * when the result is exact. If a_coeff' * 1 / b_coeff' is in lowest
+ * terms, then maxdigits(a_coeff') + maxdigits(1 / b_coeff') is a suitable
+ * bound.
+ *
+ * 1 / b_coeff' is exact iff b_coeff' exclusively has prime factors 2 or 5.
+ * The largest amount of digits is generated if b_coeff' is a power of 2 or
+ * a power of 5 and is less than or equal to log5(b_coeff') <= log2(b_coeff').
+ *
+ * We arrive at a total upper bound:
+ *
+ * maxdigits(a_coeff') + maxdigits(1 / b_coeff') <=
+ * log10(a_coeff) + log2(b_coeff) =
+ * log10(a_coeff) + log10(b_coeff) / log10(2) <=
+ * a->digits + b->digits * 4;
+ */
+ mpd_context_t workctx = *ctx;
+ uint32_t ystatus = 0;
+
+ workctx.prec = a->digits + b->digits * 4;
+ if (workctx.prec >= ctx->prec) {
+ *status |= (xstatus&MPD_Errors);
+ goto out; /* No point in retrying, keep the original error. */
+ }
+
+ _mpd_qdiv(SET_IDEAL_EXP, q, a, b, &workctx, &ystatus);
+ if (ystatus != 0) {
+ ystatus = *status | ((ystatus|xstatus)&MPD_Errors);
+ mpd_seterror(q, ystatus, status);
+ }
+ }
+ else {
+ *status |= xstatus;
+ }
+
+
+out:
+ mpd_del(&aa);
+ mpd_del(&bb);
}
/* Internal function. */
@@ -3870,6 +4031,7 @@ _mpd_qdivmod(mpd_t *q, mpd_t *r, const mpd_t *a, const mpd_t *b,
}
if (b->len == 1) {
+ assert(b->data[0] != 0); /* annotation for scan-build */
if (a->len == 1) {
_mpd_div_word(&q->data[0], &r->data[0], a->data[0], b->data[0]);
}
@@ -6214,9 +6376,11 @@ _mpd_qpow_int(mpd_t *result, const mpd_t *base, const mpd_t *exp,
workctx.round = MPD_ROUND_HALF_EVEN;
workctx.clamp = 0;
if (mpd_isnegative(exp)) {
+ uint32_t workstatus = 0;
workctx.prec += 1;
- mpd_qdiv(&tbase, &one, base, &workctx, status);
- if (*status&MPD_Errors) {
+ mpd_qdiv(&tbase, &one, base, &workctx, &workstatus);
+ *status |= workstatus;
+ if (workstatus&MPD_Errors) {
mpd_setspecial(result, MPD_POS, MPD_NAN);
goto finish;
}
@@ -6951,6 +7115,8 @@ mpd_qrem_near(mpd_t *r, const mpd_t *a, const mpd_t *b,
mpd_ssize_t expdiff, qdigits;
int cmp, isodd, allnine;
+ assert(r != NULL); /* annotation for scan-build */
+
if (mpd_isspecial(a) || mpd_isspecial(b)) {
if (mpd_qcheck_nans(r, a, b, ctx, status)) {
return;
@@ -7181,6 +7347,11 @@ void
mpd_qtrunc(mpd_t *result, const mpd_t *a, const mpd_context_t *ctx,
uint32_t *status)
{
+ if (mpd_isspecial(a)) {
+ mpd_seterror(result, MPD_Invalid_operation, status);
+ return;
+ }
+
(void)_mpd_qround_to_integral(TO_INT_TRUNC, result, a, ctx, status);
}
@@ -7189,6 +7360,12 @@ mpd_qfloor(mpd_t *result, const mpd_t *a, const mpd_context_t *ctx,
uint32_t *status)
{
mpd_context_t workctx = *ctx;
+
+ if (mpd_isspecial(a)) {
+ mpd_seterror(result, MPD_Invalid_operation, status);
+ return;
+ }
+
workctx.round = MPD_ROUND_FLOOR;
(void)_mpd_qround_to_integral(TO_INT_SILENT, result, a,
&workctx, status);
@@ -7199,6 +7376,12 @@ mpd_qceil(mpd_t *result, const mpd_t *a, const mpd_context_t *ctx,
uint32_t *status)
{
mpd_context_t workctx = *ctx;
+
+ if (mpd_isspecial(a)) {
+ mpd_seterror(result, MPD_Invalid_operation, status);
+ return;
+ }
+
workctx.round = MPD_ROUND_CEILING;
(void)_mpd_qround_to_integral(TO_INT_SILENT, result, a,
&workctx, status);
@@ -7702,9 +7885,9 @@ mpd_qinvroot(mpd_t *result, const mpd_t *a, const mpd_context_t *ctx,
/* END LIBMPDEC_ONLY */
/* Algorithm from decimal.py */
-void
-mpd_qsqrt(mpd_t *result, const mpd_t *a, const mpd_context_t *ctx,
- uint32_t *status)
+static void
+_mpd_qsqrt(mpd_t *result, const mpd_t *a, const mpd_context_t *ctx,
+ uint32_t *status)
{
mpd_context_t maxcontext;
MPD_NEW_STATIC(c,0,0,0,0);
@@ -7836,6 +8019,56 @@ malloc_error:
goto out;
}
+void
+mpd_qsqrt(mpd_t *result, const mpd_t *a, const mpd_context_t *ctx,
+ uint32_t *status)
+{
+ MPD_NEW_STATIC(aa,0,0,0,0);
+ uint32_t xstatus = 0;
+
+ if (result == a) {
+ if (!mpd_qcopy(&aa, a, status)) {
+ mpd_seterror(result, MPD_Malloc_error, status);
+ goto out;
+ }
+ a = &aa;
+ }
+
+ _mpd_qsqrt(result, a, ctx, &xstatus);
+
+ if (xstatus & (MPD_Malloc_error|MPD_Division_impossible)) {
+ /* The above conditions can occur at very high context precisions
+ * if intermediate values get too large. Retry the operation with
+ * a lower context precision in case the result is exact.
+ *
+ * If the result is exact, an upper bound for the number of digits
+ * is the number of digits in the input.
+ *
+ * NOTE: sqrt(40e9) = 2.0e+5 /\ digits(40e9) = digits(2.0e+5) = 2
+ */
+ uint32_t ystatus = 0;
+ mpd_context_t workctx = *ctx;
+
+ workctx.prec = a->digits;
+ if (workctx.prec >= ctx->prec) {
+ *status |= (xstatus|MPD_Errors);
+ goto out; /* No point in repeating this, keep the original error. */
+ }
+
+ _mpd_qsqrt(result, a, &workctx, &ystatus);
+ if (ystatus != 0) {
+ ystatus = *status | ((xstatus|ystatus)&MPD_Errors);
+ mpd_seterror(result, ystatus, status);
+ }
+ }
+ else {
+ *status |= xstatus;
+ }
+
+out:
+ mpd_del(&aa);
+}
+
/******************************************************************************/
/* Base conversions */
@@ -7847,6 +8080,7 @@ mpd_sizeinbase(const mpd_t *a, uint32_t base)
{
double x;
size_t digits;
+ double upper_bound;
assert(mpd_isinteger(a));
assert(base >= 2);
@@ -7863,10 +8097,14 @@ mpd_sizeinbase(const mpd_t *a, uint32_t base)
if (digits > 2711437152599294ULL) {
return SIZE_MAX;
}
+
+ upper_bound = (double)((1ULL<<53)-1);
+#else
+ upper_bound = (double)(SIZE_MAX-1);
#endif
x = (double)digits / log10(base);
- return (x > SIZE_MAX-1) ? SIZE_MAX : (size_t)x + 1;
+ return (x > upper_bound) ? SIZE_MAX : (size_t)x + 1;
}
/* Space needed to import a base 'base' integer of length 'srclen'. */
@@ -7874,6 +8112,7 @@ static mpd_ssize_t
_mpd_importsize(size_t srclen, uint32_t base)
{
double x;
+ double upper_bound;
assert(srclen > 0);
assert(base >= 2);
@@ -7882,10 +8121,15 @@ _mpd_importsize(size_t srclen, uint32_t base)
if (srclen > (1ULL<<53)) {
return MPD_SSIZE_MAX;
}
+
+ assert((1ULL<<53) <= MPD_MAXIMPORT);
+ upper_bound = (double)((1ULL<<53)-1);
+#else
+ upper_bound = MPD_MAXIMPORT-1;
#endif
x = (double)srclen * (log10(base)/MPD_RDIGITS);
- return (x >= MPD_MAXIMPORT) ? MPD_SSIZE_MAX : (mpd_ssize_t)x + 1;
+ return (x > upper_bound) ? MPD_SSIZE_MAX : (mpd_ssize_t)x + 1;
}
static uint8_t
@@ -8412,6 +8656,3 @@ mpd_qimport_u32(mpd_t *result,
mpd_qresize(result, result->len, status);
mpd_qfinalize(result, ctx, status);
}
-
-
-
diff --git a/Modules/_decimal/libmpdec/mpdecimal.h b/Modules/_decimal/libmpdec/mpdecimal.h
index a67dd9bc..2815a8cd 100644
--- a/Modules/_decimal/libmpdec/mpdecimal.h
+++ b/Modules/_decimal/libmpdec/mpdecimal.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2008-2016 Stefan Krah. All rights reserved.
+ * Copyright (c) 2008-2020 Stefan Krah. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -26,59 +26,45 @@
*/
-#ifndef MPDECIMAL_H
-#define MPDECIMAL_H
-
-
-#ifdef __cplusplus
-extern "C" {
- #ifndef __STDC_LIMIT_MACROS
- #define __STDC_LIMIT_MACROS
- #define MPD_CLEAR_STDC_LIMIT_MACROS
- #endif
-#endif
+#ifndef LIBMPDEC_MPDECIMAL_H_
+#define LIBMPDEC_MPDECIMAL_H_
#ifndef _MSC_VER
#include "pyconfig.h"
#endif
-#include
-#include
-#include
-#include
-#include
-#include
-#include
+#ifdef __cplusplus
+ #include
+ #include
+ #include
+ #include
+ #include
+extern "C" {
+#else
+ #include
+ #include
+ #include
+ #include
+ #include
+#endif
-#ifdef _MSC_VER
- #include "vccompat.h"
- #ifndef UNUSED
- #define UNUSED
- #endif
+
+#if (defined(__linux__) || defined(__FreeBSD__) || defined(__APPLE__)) && \
+ defined(__GNUC__) && __GNUC__ >= 4 && !defined(__INTEL_COMPILER)
+ #define MPD_PRAGMA(x) _Pragma(x)
+ #define MPD_HIDE_SYMBOLS_START "GCC visibility push(hidden)"
+ #define MPD_HIDE_SYMBOLS_END "GCC visibility pop"
+#else
#define MPD_PRAGMA(x)
#define MPD_HIDE_SYMBOLS_START
#define MPD_HIDE_SYMBOLS_END
+#endif
+
+#if defined(_MSC_VER)
+ #include "vccompat.h"
#define EXTINLINE extern inline
#else
- #ifndef __GNUC_STDC_INLINE__
- #define __GNUC_STDC_INLINE__ 1
- #endif
- #if defined(__GNUC__) && !defined(__INTEL_COMPILER)
- #define UNUSED __attribute__((unused))
- #else
- #define UNUSED
- #endif
- #if (defined(__linux__) || defined(__FreeBSD__) || defined(__APPLE__)) && \
- defined(__GNUC__) && __GNUC__ >= 4 && !defined(__INTEL_COMPILER)
- #define MPD_PRAGMA(x) _Pragma(x)
- #define MPD_HIDE_SYMBOLS_START "GCC visibility push(hidden)"
- #define MPD_HIDE_SYMBOLS_END "GCC visibility pop"
- #else
- #define MPD_PRAGMA(x)
- #define MPD_HIDE_SYMBOLS_START
- #define MPD_HIDE_SYMBOLS_END
- #endif
#define EXTINLINE
#endif
@@ -103,10 +89,10 @@ MPD_PRAGMA(MPD_HIDE_SYMBOLS_START)
/******************************************************************************/
#define MPD_MAJOR_VERSION 2
-#define MPD_MINOR_VERSION 4
-#define MPD_MICRO_VERSION 2
+#define MPD_MINOR_VERSION 5
+#define MPD_MICRO_VERSION 0
-#define MPD_VERSION "2.4.2"
+#define MPD_VERSION "2.5.0"
#define MPD_VERSION_HEX ((MPD_MAJOR_VERSION << 24) | \
(MPD_MINOR_VERSION << 16) | \
@@ -423,6 +409,7 @@ void mpd_print(const mpd_t *dec);
/* assignment from a string */
void mpd_qset_string(mpd_t *dec, const char *s, const mpd_context_t *ctx, uint32_t *status);
+void mpd_qset_string_exact(mpd_t *dec, const char *s, uint32_t *status);
/* set to NaN with error flags */
void mpd_seterror(mpd_t *result, uint32_t flags, uint32_t *status);
@@ -440,6 +427,8 @@ void mpd_qset_u32(mpd_t *result, uint32_t a, const mpd_context_t *ctx, uint32_t
#ifndef LEGACY_COMPILER
void mpd_qset_i64(mpd_t *result, int64_t a, const mpd_context_t *ctx, uint32_t *status);
void mpd_qset_u64(mpd_t *result, uint64_t a, const mpd_context_t *ctx, uint32_t *status);
+void mpd_qset_i64_exact(mpd_t *result, int64_t a, uint32_t *status);
+void mpd_qset_u64_exact(mpd_t *result, uint64_t a, uint32_t *status);
#endif
/* quietly assign a C integer type to an mpd_t with a static coefficient */
@@ -467,7 +456,8 @@ void mpd_qfinalize(mpd_t *result, const mpd_context_t *ctx, uint32_t *status);
const char *mpd_class(const mpd_t *a, const mpd_context_t *ctx);
-int mpd_qcopy(mpd_t *result, const mpd_t *a, uint32_t *status);
+int mpd_qcopy(mpd_t *result, const mpd_t *a, uint32_t *status);
+int mpd_qcopy_cxx(mpd_t *result, const mpd_t *a);
mpd_t *mpd_qncopy(const mpd_t *a);
int mpd_qcopy_abs(mpd_t *result, const mpd_t *a, uint32_t *status);
int mpd_qcopy_negate(mpd_t *result, const mpd_t *a, uint32_t *status);
@@ -721,7 +711,7 @@ EXTINLINE mpd_uint_t mpd_lsd(mpd_uint_t word);
EXTINLINE mpd_ssize_t mpd_digits_to_size(mpd_ssize_t digits);
/* number of digits in the exponent, undefined for MPD_SSIZE_MIN */
EXTINLINE int mpd_exp_digits(mpd_ssize_t exp);
-EXTINLINE int mpd_iscanonical(const mpd_t *dec UNUSED);
+EXTINLINE int mpd_iscanonical(const mpd_t *dec);
EXTINLINE int mpd_isfinite(const mpd_t *dec);
EXTINLINE int mpd_isinfinite(const mpd_t *dec);
EXTINLINE int mpd_isinteger(const mpd_t *dec);
@@ -833,15 +823,8 @@ MPD_PRAGMA(MPD_HIDE_SYMBOLS_END) /* restore previous scope rules */
#ifdef __cplusplus
- #ifdef MPD_CLEAR_STDC_LIMIT_MACROS
- #undef MPD_CLEAR_STDC_LIMIT_MACROS
- #undef __STDC_LIMIT_MACROS
- #endif
} /* END extern "C" */
#endif
-#endif /* MPDECIMAL_H */
-
-
-
+#endif /* LIBMPDEC_MPDECIMAL_H_ */
diff --git a/Modules/_decimal/libmpdec/numbertheory.c b/Modules/_decimal/libmpdec/numbertheory.c
index 4e035477..210e0deb 100644
--- a/Modules/_decimal/libmpdec/numbertheory.c
+++ b/Modules/_decimal/libmpdec/numbertheory.c
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2008-2016 Stefan Krah. All rights reserved.
+ * Copyright (c) 2008-2020 Stefan Krah. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -27,11 +27,13 @@
#include "mpdecimal.h"
-#include
+
#include
+#include
+
#include "bits.h"
-#include "umodarith.h"
#include "numbertheory.h"
+#include "umodarith.h"
/* Bignum: Initialize the Number Theoretic Transform. */
@@ -128,5 +130,3 @@ _mpd_init_w3table(mpd_uint_t w3table[3], int sign, int modnum)
w3table[1] = kernel;
w3table[2] = POWMOD(kernel, 2);
}
-
-
diff --git a/Modules/_decimal/libmpdec/numbertheory.h b/Modules/_decimal/libmpdec/numbertheory.h
index e94c1579..47b7753b 100644
--- a/Modules/_decimal/libmpdec/numbertheory.h
+++ b/Modules/_decimal/libmpdec/numbertheory.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2008-2016 Stefan Krah. All rights reserved.
+ * Copyright (c) 2008-2020 Stefan Krah. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -26,12 +26,12 @@
*/
-#ifndef NUMBER_THEORY_H
-#define NUMBER_THEORY_H
+#ifndef LIBMPDEC_NUMBERTHEORY_H_
+#define LIBMPDEC_NUMBERTHEORY_H_
-#include "constants.h"
#include "mpdecimal.h"
+#include "constants.h"
/* Internal header file: all symbols have local scope in the DSO */
@@ -73,6 +73,4 @@ std_setmodulus(int modnum, mpd_uint_t *umod)
MPD_PRAGMA(MPD_HIDE_SYMBOLS_END) /* restore previous scope rules */
-#endif
-
-
+#endif /* LIBMPDEC_NUMBERTHEORY_H_ */
diff --git a/Modules/_decimal/libmpdec/sixstep.c b/Modules/_decimal/libmpdec/sixstep.c
index 92d513eb..a4d1dbed 100644
--- a/Modules/_decimal/libmpdec/sixstep.c
+++ b/Modules/_decimal/libmpdec/sixstep.c
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2008-2016 Stefan Krah. All rights reserved.
+ * Copyright (c) 2008-2020 Stefan Krah. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -27,15 +27,17 @@
#include "mpdecimal.h"
-#include
-#include
+
#include
+#include
+
#include "bits.h"
+#include "constants.h"
#include "difradix2.h"
#include "numbertheory.h"
+#include "sixstep.h"
#include "transpose.h"
#include "umodarith.h"
-#include "sixstep.h"
/* Bignum: Cache efficient Matrix Fourier Transform for arrays of the
@@ -210,5 +212,3 @@ inv_six_step_fnt(mpd_uint_t *a, mpd_size_t n, int modnum)
return 1;
}
-
-
diff --git a/Modules/_decimal/libmpdec/sixstep.h b/Modules/_decimal/libmpdec/sixstep.h
index 4a8b015e..89b4a33a 100644
--- a/Modules/_decimal/libmpdec/sixstep.h
+++ b/Modules/_decimal/libmpdec/sixstep.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2008-2016 Stefan Krah. All rights reserved.
+ * Copyright (c) 2008-2020 Stefan Krah. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -26,12 +26,11 @@
*/
-#ifndef SIX_STEP_H
-#define SIX_STEP_H
+#ifndef LIBMPDEC_SIXSTEP_H_
+#define LIBMPDEC_SIXSTEP_H_
#include "mpdecimal.h"
-#include
/* Internal header file: all symbols have local scope in the DSO */
@@ -45,4 +44,4 @@ int inv_six_step_fnt(mpd_uint_t *a, mpd_size_t n, int modnum);
MPD_PRAGMA(MPD_HIDE_SYMBOLS_END) /* restore previous scope rules */
-#endif
+#endif /* LIBMPDEC_SIXSTEP_H_ */
diff --git a/Modules/_decimal/libmpdec/transpose.c b/Modules/_decimal/libmpdec/transpose.c
index 55d6d899..56321b5f 100644
--- a/Modules/_decimal/libmpdec/transpose.c
+++ b/Modules/_decimal/libmpdec/transpose.c
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2008-2016 Stefan Krah. All rights reserved.
+ * Copyright (c) 2008-2020 Stefan Krah. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -27,15 +27,17 @@
#include "mpdecimal.h"
+
+#include
+#include
#include
#include
#include
-#include
-#include
+
#include "bits.h"
#include "constants.h"
-#include "typearith.h"
#include "transpose.h"
+#include "typearith.h"
#define BUFSIZE 4096
@@ -272,5 +274,3 @@ transpose_pow2(mpd_uint_t *matrix, mpd_size_t rows, mpd_size_t cols)
return 1;
}
-
-
diff --git a/Modules/_decimal/libmpdec/transpose.h b/Modules/_decimal/libmpdec/transpose.h
index e1cd1fa1..e91c18d7 100644
--- a/Modules/_decimal/libmpdec/transpose.h
+++ b/Modules/_decimal/libmpdec/transpose.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2008-2016 Stefan Krah. All rights reserved.
+ * Copyright (c) 2008-2020 Stefan Krah. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -26,12 +26,11 @@
*/
-#ifndef TRANSPOSE_H
-#define TRANSPOSE_H
+#ifndef LIBMPDEC_TRANSPOSE_H_
+#define LIBMPDEC_TRANSPOSE_H_
#include "mpdecimal.h"
-#include
/* Internal header file: all symbols have local scope in the DSO */
@@ -59,4 +58,4 @@ static inline void pointerswap(mpd_uint_t **a, mpd_uint_t **b)
MPD_PRAGMA(MPD_HIDE_SYMBOLS_END) /* restore previous scope rules */
-#endif
+#endif /* LIBMPDEC_TRANSPOSE_H_ */
diff --git a/Modules/_decimal/libmpdec/typearith.h b/Modules/_decimal/libmpdec/typearith.h
index 405237da..47961788 100644
--- a/Modules/_decimal/libmpdec/typearith.h
+++ b/Modules/_decimal/libmpdec/typearith.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2008-2016 Stefan Krah. All rights reserved.
+ * Copyright (c) 2008-2020 Stefan Krah. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -26,12 +26,14 @@
*/
-#ifndef TYPEARITH_H
-#define TYPEARITH_H
+#ifndef LIBMPDEC_TYPEARITH_H_
+#define LIBMPDEC_TYPEARITH_H_
#include "mpdecimal.h"
+#include
+
/*****************************************************************************/
/* Low level native arithmetic on basic types */
@@ -663,7 +665,4 @@ mulmod_size_t(mpd_size_t a, mpd_size_t b, mpd_size_t m)
}
-#endif /* TYPEARITH_H */
-
-
-
+#endif /* LIBMPDEC_TYPEARITH_H_ */
diff --git a/Modules/_decimal/libmpdec/umodarith.h b/Modules/_decimal/libmpdec/umodarith.h
index 68d15188..d7dbbbe6 100644
--- a/Modules/_decimal/libmpdec/umodarith.h
+++ b/Modules/_decimal/libmpdec/umodarith.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2008-2016 Stefan Krah. All rights reserved.
+ * Copyright (c) 2008-2020 Stefan Krah. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -26,12 +26,13 @@
*/
-#ifndef UMODARITH_H
-#define UMODARITH_H
+#ifndef LIBMPDEC_UMODARITH_H_
+#define LIBMPDEC_UMODARITH_H_
-#include "constants.h"
#include "mpdecimal.h"
+
+#include "constants.h"
#include "typearith.h"
@@ -644,7 +645,4 @@ ppro_powmod(mpd_uint_t base, mpd_uint_t exp, double *dmod, uint32_t *dinvmod)
#endif /* CONFIG_32 */
-#endif /* UMODARITH_H */
-
-
-
+#endif /* LIBMPDEC_UMODARITH_H_ */
diff --git a/Modules/_decimal/libmpdec/vccompat.h b/Modules/_decimal/libmpdec/vccompat.h
index dd131d8d..e2e1c42c 100644
--- a/Modules/_decimal/libmpdec/vccompat.h
+++ b/Modules/_decimal/libmpdec/vccompat.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2008-2016 Stefan Krah. All rights reserved.
+ * Copyright (c) 2008-2020 Stefan Krah. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -26,14 +26,16 @@
*/
-#ifndef VCCOMPAT_H
-#define VCCOMPAT_H
+#ifndef LIBMPDEC_VCCOMPAT_H_
+#define LIBMPDEC_VCCOMPAT_H_
-/* Visual C fixes: no stdint.h, no snprintf ... */
+/* Visual C fixes: no snprintf ... */
#ifdef _MSC_VER
- #undef inline
- #define inline __inline
+ #ifndef __cplusplus
+ #undef inline
+ #define inline __inline
+ #endif
#undef random
#define random rand
#undef srandom
@@ -51,7 +53,4 @@
#endif
-#endif /* VCCOMPAT_H */
-
-
-
+#endif /* LIBMPDEC_VCCOMPAT_H_ */
diff --git a/Modules/_decimal/libmpdec/vcdiv64.asm b/Modules/_decimal/libmpdec/vcdiv64.asm
index 6b664567..597e9ba9 100644
--- a/Modules/_decimal/libmpdec/vcdiv64.asm
+++ b/Modules/_decimal/libmpdec/vcdiv64.asm
@@ -1,5 +1,5 @@
;
-; Copyright (c) 2008-2016 Stefan Krah. All rights reserved.
+; Copyright (c) 2008-2020 Stefan Krah. All rights reserved.
;
; Redistribution and use in source and binary forms, with or without
; modification, are permitted provided that the following conditions
@@ -44,5 +44,3 @@ _mpd_div_words PROC
_mpd_div_words ENDP
_TEXT ENDS
END
-
-
diff --git a/Modules/_decimal/libmpdec/vcstdint.h b/Modules/_decimal/libmpdec/vcstdint.h
deleted file mode 100644
index 17dcad45..00000000
--- a/Modules/_decimal/libmpdec/vcstdint.h
+++ /dev/null
@@ -1,232 +0,0 @@
-// ISO C9x compliant stdint.h for Microsoft Visual Studio
-// Based on ISO/IEC 9899:TC2 Committee draft (May 6, 2005) WG14/N1124
-//
-// Copyright (c) 2006-2008 Alexander Chemeris
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are met:
-//
-// 1. Redistributions of source code must retain the above copyright notice,
-// this list of conditions and the following disclaimer.
-//
-// 2. Redistributions in binary form must reproduce the above copyright
-// notice, this list of conditions and the following disclaimer in the
-// documentation and/or other materials provided with the distribution.
-//
-// 3. The name of the author may be used to endorse or promote products
-// derived from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
-// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
-// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
-// EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
-// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
-// OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
-// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
-// OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
-// ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-//
-///////////////////////////////////////////////////////////////////////////////
-
-#ifndef _MSC_VER // [
-#error "Use this header only with Microsoft Visual C++ compilers!"
-#endif // _MSC_VER ]
-
-#ifndef _MSC_STDINT_H_ // [
-#define _MSC_STDINT_H_
-
-#if _MSC_VER > 1000
-#pragma once
-#endif
-
-#include
-
-// For Visual Studio 6 in C++ mode wrap include with 'extern "C++" {}'
-// or compiler give many errors like this:
-// error C2733: second C linkage of overloaded function 'wmemchr' not allowed
-#if (_MSC_VER < 1300) && defined(__cplusplus)
- extern "C++" {
-#endif
-# include
-#if (_MSC_VER < 1300) && defined(__cplusplus)
- }
-#endif
-
-// Define _W64 macros to mark types changing their size, like intptr_t.
-#ifndef _W64
-# if !defined(__midl) && (defined(_X86_) || defined(_M_IX86)) && _MSC_VER >= 1300
-# define _W64 __w64
-# else
-# define _W64
-# endif
-#endif
-
-
-// 7.18.1 Integer types
-
-// 7.18.1.1 Exact-width integer types
-typedef __int8 int8_t;
-typedef __int16 int16_t;
-typedef __int32 int32_t;
-typedef __int64 int64_t;
-typedef unsigned __int8 uint8_t;
-typedef unsigned __int16 uint16_t;
-typedef unsigned __int32 uint32_t;
-typedef unsigned __int64 uint64_t;
-
-// 7.18.1.2 Minimum-width integer types
-typedef int8_t int_least8_t;
-typedef int16_t int_least16_t;
-typedef int32_t int_least32_t;
-typedef int64_t int_least64_t;
-typedef uint8_t uint_least8_t;
-typedef uint16_t uint_least16_t;
-typedef uint32_t uint_least32_t;
-typedef uint64_t uint_least64_t;
-
-// 7.18.1.3 Fastest minimum-width integer types
-typedef int8_t int_fast8_t;
-typedef int16_t int_fast16_t;
-typedef int32_t int_fast32_t;
-typedef int64_t int_fast64_t;
-typedef uint8_t uint_fast8_t;
-typedef uint16_t uint_fast16_t;
-typedef uint32_t uint_fast32_t;
-typedef uint64_t uint_fast64_t;
-
-// 7.18.1.4 Integer types capable of holding object pointers
-#ifdef _WIN64 // [
- typedef __int64 intptr_t;
- typedef unsigned __int64 uintptr_t;
-#else // _WIN64 ][
- typedef _W64 int intptr_t;
- typedef _W64 unsigned int uintptr_t;
-#endif // _WIN64 ]
-
-// 7.18.1.5 Greatest-width integer types
-typedef int64_t intmax_t;
-typedef uint64_t uintmax_t;
-
-
-// 7.18.2 Limits of specified-width integer types
-
-#if !defined(__cplusplus) || defined(__STDC_LIMIT_MACROS) // [ See footnote 220 at page 257 and footnote 221 at page 259
-
-// 7.18.2.1 Limits of exact-width integer types
-#define INT8_MIN ((int8_t)_I8_MIN)
-#define INT8_MAX _I8_MAX
-#define INT16_MIN ((int16_t)_I16_MIN)
-#define INT16_MAX _I16_MAX
-#define INT32_MIN ((int32_t)_I32_MIN)
-#define INT32_MAX _I32_MAX
-#define INT64_MIN ((int64_t)_I64_MIN)
-#define INT64_MAX _I64_MAX
-#define UINT8_MAX _UI8_MAX
-#define UINT16_MAX _UI16_MAX
-#define UINT32_MAX _UI32_MAX
-#define UINT64_MAX _UI64_MAX
-
-// 7.18.2.2 Limits of minimum-width integer types
-#define INT_LEAST8_MIN INT8_MIN
-#define INT_LEAST8_MAX INT8_MAX
-#define INT_LEAST16_MIN INT16_MIN
-#define INT_LEAST16_MAX INT16_MAX
-#define INT_LEAST32_MIN INT32_MIN
-#define INT_LEAST32_MAX INT32_MAX
-#define INT_LEAST64_MIN INT64_MIN
-#define INT_LEAST64_MAX INT64_MAX
-#define UINT_LEAST8_MAX UINT8_MAX
-#define UINT_LEAST16_MAX UINT16_MAX
-#define UINT_LEAST32_MAX UINT32_MAX
-#define UINT_LEAST64_MAX UINT64_MAX
-
-// 7.18.2.3 Limits of fastest minimum-width integer types
-#define INT_FAST8_MIN INT8_MIN
-#define INT_FAST8_MAX INT8_MAX
-#define INT_FAST16_MIN INT16_MIN
-#define INT_FAST16_MAX INT16_MAX
-#define INT_FAST32_MIN INT32_MIN
-#define INT_FAST32_MAX INT32_MAX
-#define INT_FAST64_MIN INT64_MIN
-#define INT_FAST64_MAX INT64_MAX
-#define UINT_FAST8_MAX UINT8_MAX
-#define UINT_FAST16_MAX UINT16_MAX
-#define UINT_FAST32_MAX UINT32_MAX
-#define UINT_FAST64_MAX UINT64_MAX
-
-// 7.18.2.4 Limits of integer types capable of holding object pointers
-#ifdef _WIN64 // [
-# define INTPTR_MIN INT64_MIN
-# define INTPTR_MAX INT64_MAX
-# define UINTPTR_MAX UINT64_MAX
-#else // _WIN64 ][
-# define INTPTR_MIN INT32_MIN
-# define INTPTR_MAX INT32_MAX
-# define UINTPTR_MAX UINT32_MAX
-#endif // _WIN64 ]
-
-// 7.18.2.5 Limits of greatest-width integer types
-#define INTMAX_MIN INT64_MIN
-#define INTMAX_MAX INT64_MAX
-#define UINTMAX_MAX UINT64_MAX
-
-// 7.18.3 Limits of other integer types
-
-#ifdef _WIN64 // [
-# define PTRDIFF_MIN _I64_MIN
-# define PTRDIFF_MAX _I64_MAX
-#else // _WIN64 ][
-# define PTRDIFF_MIN _I32_MIN
-# define PTRDIFF_MAX _I32_MAX
-#endif // _WIN64 ]
-
-#define SIG_ATOMIC_MIN INT_MIN
-#define SIG_ATOMIC_MAX INT_MAX
-
-#ifndef SIZE_MAX // [
-# ifdef _WIN64 // [
-# define SIZE_MAX _UI64_MAX
-# else // _WIN64 ][
-# define SIZE_MAX _UI32_MAX
-# endif // _WIN64 ]
-#endif // SIZE_MAX ]
-
-// WCHAR_MIN and WCHAR_MAX are also defined in
-#ifndef WCHAR_MIN // [
-# define WCHAR_MIN 0
-#endif // WCHAR_MIN ]
-#ifndef WCHAR_MAX // [
-# define WCHAR_MAX _UI16_MAX
-#endif // WCHAR_MAX ]
-
-#define WINT_MIN 0
-#define WINT_MAX _UI16_MAX
-
-#endif // __STDC_LIMIT_MACROS ]
-
-
-// 7.18.4 Limits of other integer types
-
-#if !defined(__cplusplus) || defined(__STDC_CONSTANT_MACROS) // [ See footnote 224 at page 260
-
-// 7.18.4.1 Macros for minimum-width integer constants
-
-#define INT8_C(val) val##i8
-#define INT16_C(val) val##i16
-#define INT32_C(val) val##i32
-#define INT64_C(val) val##i64
-
-#define UINT8_C(val) val##ui8
-#define UINT16_C(val) val##ui16
-#define UINT32_C(val) val##ui32
-#define UINT64_C(val) val##ui64
-
-// 7.18.4.2 Macros for greatest-width integer constants
-#define INTMAX_C INT64_C
-#define UINTMAX_C UINT64_C
-
-#endif // __STDC_CONSTANT_MACROS ]
-
-
-#endif // _MSC_STDINT_H_ ]
diff --git a/Modules/_decimal/tests/deccheck.py b/Modules/_decimal/tests/deccheck.py
index f907531e..5d9179e6 100644
--- a/Modules/_decimal/tests/deccheck.py
+++ b/Modules/_decimal/tests/deccheck.py
@@ -29,9 +29,20 @@
# Usage: python deccheck.py [--short|--medium|--long|--all]
#
-import sys, random
+
+import sys
+import os
+import time
+import random
from copy import copy
from collections import defaultdict
+
+import argparse
+import subprocess
+from subprocess import PIPE, STDOUT
+from queue import Queue, Empty
+from threading import Thread, Event, Lock
+
from test.support import import_fresh_module
from randdec import randfloat, all_unary, all_binary, all_ternary
from randdec import unary_optarg, binary_optarg, ternary_optarg
@@ -125,6 +136,12 @@ ContextFunctions = {
'special': ('context.__reduce_ex__', 'context.create_decimal_from_float')
}
+# Functions that set no context flags but whose result can differ depending
+# on prec, Emin and Emax.
+MaxContextSkip = ['is_normal', 'is_subnormal', 'logical_invert', 'next_minus',
+ 'next_plus', 'number_class', 'logical_and', 'logical_or',
+ 'logical_xor', 'next_toward', 'rotate', 'shift']
+
# Functions that require a restricted exponent range for reasonable runtimes.
UnaryRestricted = [
'__ceil__', '__floor__', '__int__', '__trunc__',
@@ -344,6 +361,20 @@ class TestSet(object):
self.pex = RestrictedList() # Python exceptions for P.Decimal
self.presults = RestrictedList() # P.Decimal results
+ # If the above results are exact, unrounded and not clamped, repeat
+ # the operation with a maxcontext to ensure that huge intermediate
+ # values do not cause a MemoryError.
+ self.with_maxcontext = False
+ self.maxcontext = context.c.copy()
+ self.maxcontext.prec = C.MAX_PREC
+ self.maxcontext.Emax = C.MAX_EMAX
+ self.maxcontext.Emin = C.MIN_EMIN
+ self.maxcontext.clear_flags()
+
+ self.maxop = RestrictedList() # converted C.Decimal operands
+ self.maxex = RestrictedList() # Python exceptions for C.Decimal
+ self.maxresults = RestrictedList() # C.Decimal results
+
# ======================================================================
# SkipHandler: skip known discrepancies
@@ -545,13 +576,17 @@ def function_as_string(t):
if t.contextfunc:
cargs = t.cop
pargs = t.pop
+ maxargs = t.maxop
cfunc = "c_func: %s(" % t.funcname
pfunc = "p_func: %s(" % t.funcname
+ maxfunc = "max_func: %s(" % t.funcname
else:
cself, cargs = t.cop[0], t.cop[1:]
pself, pargs = t.pop[0], t.pop[1:]
+ maxself, maxargs = t.maxop[0], t.maxop[1:]
cfunc = "c_func: %s.%s(" % (repr(cself), t.funcname)
pfunc = "p_func: %s.%s(" % (repr(pself), t.funcname)
+ maxfunc = "max_func: %s.%s(" % (repr(maxself), t.funcname)
err = cfunc
for arg in cargs:
@@ -565,6 +600,14 @@ def function_as_string(t):
err = err.rstrip(", ")
err += ")"
+ if t.with_maxcontext:
+ err += "\n"
+ err += maxfunc
+ for arg in maxargs:
+ err += "%s, " % repr(arg)
+ err = err.rstrip(", ")
+ err += ")"
+
return err
def raise_error(t):
@@ -577,9 +620,24 @@ def raise_error(t):
err = "Error in %s:\n\n" % t.funcname
err += "input operands: %s\n\n" % (t.op,)
err += function_as_string(t)
- err += "\n\nc_result: %s\np_result: %s\n\n" % (t.cresults, t.presults)
- err += "c_exceptions: %s\np_exceptions: %s\n\n" % (t.cex, t.pex)
- err += "%s\n\n" % str(t.context)
+
+ err += "\n\nc_result: %s\np_result: %s\n" % (t.cresults, t.presults)
+ if t.with_maxcontext:
+ err += "max_result: %s\n\n" % (t.maxresults)
+ else:
+ err += "\n"
+
+ err += "c_exceptions: %s\np_exceptions: %s\n" % (t.cex, t.pex)
+ if t.with_maxcontext:
+ err += "max_exceptions: %s\n\n" % t.maxex
+ else:
+ err += "\n"
+
+ err += "%s\n" % str(t.context)
+ if t.with_maxcontext:
+ err += "%s\n" % str(t.maxcontext)
+ else:
+ err += "\n"
raise VerifyError(err)
@@ -603,6 +661,13 @@ def raise_error(t):
# are printed to stdout.
# ======================================================================
+def all_nan(a):
+ if isinstance(a, C.Decimal):
+ return a.is_nan()
+ elif isinstance(a, tuple):
+ return all(all_nan(v) for v in a)
+ return False
+
def convert(t, convstr=True):
""" t is the testset. At this stage the testset contains a tuple of
operands t.op of various types. For decimal methods the first
@@ -617,10 +682,12 @@ def convert(t, convstr=True):
for i, op in enumerate(t.op):
context.clear_status()
+ t.maxcontext.clear_flags()
if op in RoundModes:
t.cop.append(op)
t.pop.append(op)
+ t.maxop.append(op)
elif not t.contextfunc and i == 0 or \
convstr and isinstance(op, str):
@@ -638,11 +705,25 @@ def convert(t, convstr=True):
p = None
pex = e.__class__
+ try:
+ C.setcontext(t.maxcontext)
+ maxop = C.Decimal(op)
+ maxex = None
+ except (TypeError, ValueError, OverflowError) as e:
+ maxop = None
+ maxex = e.__class__
+ finally:
+ C.setcontext(context.c)
+
t.cop.append(c)
t.cex.append(cex)
+
t.pop.append(p)
t.pex.append(pex)
+ t.maxop.append(maxop)
+ t.maxex.append(maxex)
+
if cex is pex:
if str(c) != str(p) or not context.assert_eq_status():
raise_error(t)
@@ -652,14 +733,21 @@ def convert(t, convstr=True):
else:
raise_error(t)
+ # The exceptions in the maxcontext operation can legitimately
+ # differ, only test that maxex implies cex:
+ if maxex is not None and cex is not maxex:
+ raise_error(t)
+
elif isinstance(op, Context):
t.context = op
t.cop.append(op.c)
t.pop.append(op.p)
+ t.maxop.append(t.maxcontext)
else:
t.cop.append(op)
t.pop.append(op)
+ t.maxop.append(op)
return 1
@@ -673,6 +761,7 @@ def callfuncs(t):
t.rc and t.rp are the results of the operation.
"""
context.clear_status()
+ t.maxcontext.clear_flags()
try:
if t.contextfunc:
@@ -700,6 +789,35 @@ def callfuncs(t):
t.rp = None
t.pex.append(e.__class__)
+ # If the above results are exact, unrounded, normal etc., repeat the
+ # operation with a maxcontext to ensure that huge intermediate values
+ # do not cause a MemoryError.
+ if (t.funcname not in MaxContextSkip and
+ not context.c.flags[C.InvalidOperation] and
+ not context.c.flags[C.Inexact] and
+ not context.c.flags[C.Rounded] and
+ not context.c.flags[C.Subnormal] and
+ not context.c.flags[C.Clamped] and
+ not context.clamp and # results are padded to context.prec if context.clamp==1.
+ not any(isinstance(v, C.Context) for v in t.cop)): # another context is used.
+ t.with_maxcontext = True
+ try:
+ if t.contextfunc:
+ maxargs = t.maxop
+ t.rmax = getattr(t.maxcontext, t.funcname)(*maxargs)
+ else:
+ maxself = t.maxop[0]
+ maxargs = t.maxop[1:]
+ try:
+ C.setcontext(t.maxcontext)
+ t.rmax = getattr(maxself, t.funcname)(*maxargs)
+ finally:
+ C.setcontext(context.c)
+ t.maxex.append(None)
+ except (TypeError, ValueError, OverflowError, MemoryError) as e:
+ t.rmax = None
+ t.maxex.append(e.__class__)
+
def verify(t, stat):
""" t is the testset. At this stage the testset contains the following
tuples:
@@ -714,6 +832,9 @@ def verify(t, stat):
"""
t.cresults.append(str(t.rc))
t.presults.append(str(t.rp))
+ if t.with_maxcontext:
+ t.maxresults.append(str(t.rmax))
+
if isinstance(t.rc, C.Decimal) and isinstance(t.rp, P.Decimal):
# General case: both results are Decimals.
t.cresults.append(t.rc.to_eng_string())
@@ -725,6 +846,12 @@ def verify(t, stat):
t.presults.append(str(t.rp.imag))
t.presults.append(str(t.rp.real))
+ if t.with_maxcontext and isinstance(t.rmax, C.Decimal):
+ t.maxresults.append(t.rmax.to_eng_string())
+ t.maxresults.append(t.rmax.as_tuple())
+ t.maxresults.append(str(t.rmax.imag))
+ t.maxresults.append(str(t.rmax.real))
+
nc = t.rc.number_class().lstrip('+-s')
stat[nc] += 1
else:
@@ -732,6 +859,9 @@ def verify(t, stat):
if not isinstance(t.rc, tuple) and not isinstance(t.rp, tuple):
if t.rc != t.rp:
raise_error(t)
+ if t.with_maxcontext and not isinstance(t.rmax, tuple):
+ if t.rmax != t.rc:
+ raise_error(t)
stat[type(t.rc).__name__] += 1
# The return value lists must be equal.
@@ -744,6 +874,20 @@ def verify(t, stat):
if not t.context.assert_eq_status():
raise_error(t)
+ if t.with_maxcontext:
+ # NaN payloads etc. depend on precision and clamp.
+ if all_nan(t.rc) and all_nan(t.rmax):
+ return
+ # The return value lists must be equal.
+ if t.maxresults != t.cresults:
+ raise_error(t)
+ # The Python exception lists (TypeError, etc.) must be equal.
+ if t.maxex != t.cex:
+ raise_error(t)
+ # The context flags must be equal.
+ if t.maxcontext.flags != t.context.c.flags:
+ raise_error(t)
+
# ======================================================================
# Main test loops
@@ -991,18 +1135,35 @@ def check_untested(funcdict, c_cls, p_cls):
funcdict['untested'] = tuple(sorted(intersect-tested))
- #for key in ('untested', 'c_only', 'p_only'):
- # s = 'Context' if c_cls == C.Context else 'Decimal'
- # print("\n%s %s:\n%s" % (s, key, funcdict[key]))
+ # for key in ('untested', 'c_only', 'p_only'):
+ # s = 'Context' if c_cls == C.Context else 'Decimal'
+ # print("\n%s %s:\n%s" % (s, key, funcdict[key]))
if __name__ == '__main__':
- import time
+ parser = argparse.ArgumentParser(prog="deccheck.py")
+
+ group = parser.add_mutually_exclusive_group()
+ group.add_argument('--short', dest='time', action="store_const", const='short', default='short', help="short test (default)")
+ group.add_argument('--medium', dest='time', action="store_const", const='medium', default='short', help="medium test (reasonable run time)")
+ group.add_argument('--long', dest='time', action="store_const", const='long', default='short', help="long test (long run time)")
+ group.add_argument('--all', dest='time', action="store_const", const='all', default='short', help="all tests (excessive run time)")
+
+ group = parser.add_mutually_exclusive_group()
+ group.add_argument('--single', dest='single', nargs=1, default=False, metavar="TEST", help="run a single test")
+ group.add_argument('--multicore', dest='multicore', action="store_true", default=False, help="use all available cores")
+
+ args = parser.parse_args()
+ assert args.single is False or args.multicore is False
+ if args.single:
+ args.single = args.single[0]
+
randseed = int(time.time())
random.seed(randseed)
+
# Set up the testspecs list. A testspec is simply a dictionary
# that determines the amount of different contexts that 'test_method'
# will generate.
@@ -1035,17 +1196,17 @@ if __name__ == '__main__':
{'prec': [34], 'expts': [(-6143, 6144)], 'clamp': 1, 'iter': None}
]
- if '--medium' in sys.argv:
+ if args.time == 'medium':
base['expts'].append(('rand', 'rand'))
# 5 random precisions
base['samples'] = 5
testspecs = [small] + ieee + [base]
- if '--long' in sys.argv:
+ elif args.time == 'long':
base['expts'].append(('rand', 'rand'))
# 10 random precisions
base['samples'] = 10
testspecs = [small] + ieee + [base]
- elif '--all' in sys.argv:
+ elif args.time == 'all':
base['expts'].append(('rand', 'rand'))
# All precisions in [1, 100]
base['samples'] = 100
@@ -1062,39 +1223,100 @@ if __name__ == '__main__':
small['expts'] = [(-prec, prec)]
testspecs = [small, rand_ieee, base]
+
check_untested(Functions, C.Decimal, P.Decimal)
check_untested(ContextFunctions, C.Context, P.Context)
- log("\n\nRandom seed: %d\n\n", randseed)
+ if args.multicore:
+ q = Queue()
+ elif args.single:
+ log("Random seed: %d", randseed)
+ else:
+ log("\n\nRandom seed: %d\n\n", randseed)
+
+
+ FOUND_METHOD = False
+ def do_single(method, f):
+ global FOUND_METHOD
+ if args.multicore:
+ q.put(method)
+ elif not args.single or args.single == method:
+ FOUND_METHOD = True
+ f()
# Decimal methods:
for method in Functions['unary'] + Functions['unary_ctx'] + \
Functions['unary_rnd_ctx']:
- test_method(method, testspecs, test_unary)
+ do_single(method, lambda: test_method(method, testspecs, test_unary))
for method in Functions['binary'] + Functions['binary_ctx']:
- test_method(method, testspecs, test_binary)
+ do_single(method, lambda: test_method(method, testspecs, test_binary))
for method in Functions['ternary'] + Functions['ternary_ctx']:
- test_method(method, testspecs, test_ternary)
+ name = '__powmod__' if method == '__pow__' else method
+ do_single(name, lambda: test_method(method, testspecs, test_ternary))
- test_method('__format__', testspecs, test_format)
- test_method('__round__', testspecs, test_round)
- test_method('from_float', testspecs, test_from_float)
- test_method('quantize', testspecs, test_quantize_api)
+ do_single('__format__', lambda: test_method('__format__', testspecs, test_format))
+ do_single('__round__', lambda: test_method('__round__', testspecs, test_round))
+ do_single('from_float', lambda: test_method('from_float', testspecs, test_from_float))
+ do_single('quantize_api', lambda: test_method('quantize', testspecs, test_quantize_api))
# Context methods:
for method in ContextFunctions['unary']:
- test_method(method, testspecs, test_unary)
+ do_single(method, lambda: test_method(method, testspecs, test_unary))
for method in ContextFunctions['binary']:
- test_method(method, testspecs, test_binary)
+ do_single(method, lambda: test_method(method, testspecs, test_binary))
for method in ContextFunctions['ternary']:
- test_method(method, testspecs, test_ternary)
-
- test_method('context.create_decimal_from_float', testspecs, test_from_float)
-
-
- sys.exit(EXIT_STATUS)
+ name = 'context.powmod' if method == 'context.power' else method
+ do_single(name, lambda: test_method(method, testspecs, test_ternary))
+
+ do_single('context.create_decimal_from_float',
+ lambda: test_method('context.create_decimal_from_float',
+ testspecs, test_from_float))
+
+ if args.multicore:
+ error = Event()
+ write_lock = Lock()
+
+ def write_output(out, returncode):
+ if returncode != 0:
+ error.set()
+
+ with write_lock:
+ sys.stdout.buffer.write(out + b"\n")
+ sys.stdout.buffer.flush()
+
+ def tfunc():
+ while not error.is_set():
+ try:
+ test = q.get(block=False, timeout=-1)
+ except Empty:
+ return
+
+ cmd = [sys.executable, "deccheck.py", "--%s" % args.time, "--single", test]
+ p = subprocess.Popen(cmd, stdout=PIPE, stderr=STDOUT)
+ out, _ = p.communicate()
+ write_output(out, p.returncode)
+
+ N = os.cpu_count()
+ t = N * [None]
+
+ for i in range(N):
+ t[i] = Thread(target=tfunc)
+ t[i].start()
+
+ for i in range(N):
+ t[i].join()
+
+ sys.exit(1 if error.is_set() else 0)
+
+ elif args.single:
+ if not FOUND_METHOD:
+ log("\nerror: cannot find method \"%s\"" % args.single)
+ EXIT_STATUS = 1
+ sys.exit(EXIT_STATUS)
+ else:
+ sys.exit(EXIT_STATUS)
diff --git a/Modules/_elementtree.c b/Modules/_elementtree.c
index a96e3f43..2c92a8ae 100644
--- a/Modules/_elementtree.c
+++ b/Modules/_elementtree.c
@@ -14,7 +14,7 @@
#define PY_SSIZE_T_CLEAN
#include "Python.h"
-#include "structmember.h"
+#include "structmember.h" // PyMemberDef
/* -------------------------------------------------------------------- */
/* configuration */
@@ -101,7 +101,13 @@ static struct PyModuleDef elementtreemodule;
/* Given a module object (assumed to be _elementtree), get its per-module
* state.
*/
-#define ET_STATE(mod) ((elementtreestate *) PyModule_GetState(mod))
+static inline elementtreestate*
+get_elementtree_state(PyObject *module)
+{
+ void *state = PyModule_GetState(module);
+ assert(state != NULL);
+ return (elementtreestate *)state;
+}
/* Find the module instance imported in the currently running sub-interpreter
* and get its state.
@@ -112,7 +118,7 @@ static struct PyModuleDef elementtreemodule;
static int
elementtree_clear(PyObject *m)
{
- elementtreestate *st = ET_STATE(m);
+ elementtreestate *st = get_elementtree_state(m);
Py_CLEAR(st->parseerror_obj);
Py_CLEAR(st->deepcopy_obj);
Py_CLEAR(st->elementpath_obj);
@@ -124,7 +130,7 @@ elementtree_clear(PyObject *m)
static int
elementtree_traverse(PyObject *m, visitproc visit, void *arg)
{
- elementtreestate *st = ET_STATE(m);
+ elementtreestate *st = get_elementtree_state(m);
Py_VISIT(st->parseerror_obj);
Py_VISIT(st->deepcopy_obj);
Py_VISIT(st->elementpath_obj);
@@ -170,7 +176,7 @@ is_empty_dict(PyObject *obj)
typedef struct {
- /* attributes (a dictionary object), or None if no attributes */
+ /* attributes (a dictionary object), or NULL if no attributes */
PyObject* attrib;
/* child elements */
@@ -209,7 +215,7 @@ typedef struct {
} ElementObject;
-#define Element_CheckExact(op) (Py_TYPE(op) == &Element_Type)
+#define Element_CheckExact(op) Py_IS_TYPE(op, &Element_Type)
#define Element_Check(op) PyObject_TypeCheck(op, &Element_Type)
@@ -225,10 +231,7 @@ create_extra(ElementObject* self, PyObject* attrib)
return -1;
}
- if (!attrib)
- attrib = Py_None;
-
- Py_INCREF(attrib);
+ Py_XINCREF(attrib);
self->extra->attrib = attrib;
self->extra->length = 0;
@@ -246,7 +249,7 @@ dealloc_extra(ElementObjectExtra *extra)
if (!extra)
return;
- Py_DECREF(extra->attrib);
+ Py_XDECREF(extra->attrib);
for (i = 0; i < extra->length; i++)
Py_DECREF(extra->children[i]);
@@ -300,7 +303,7 @@ create_new_element(PyObject* tag, PyObject* attrib)
ALLOC(sizeof(ElementObject), "create element");
PyObject_GC_Track(self);
- if (attrib != Py_None && !is_empty_dict(attrib)) {
+ if (attrib != NULL && !is_empty_dict(attrib)) {
if (create_extra(self, attrib) < 0) {
Py_DECREF(self);
return NULL;
@@ -530,13 +533,9 @@ element_get_attrib(ElementObject* self)
PyObject* res = self->extra->attrib;
- if (res == Py_None) {
+ if (!res) {
/* create missing dictionary */
- res = PyDict_New();
- if (!res)
- return NULL;
- Py_DECREF(Py_None);
- self->extra->attrib = res;
+ res = self->extra->attrib = PyDict_New();
}
return res;
@@ -616,12 +615,10 @@ subelement(PyObject *self, PyObject *args, PyObject *kwds)
return NULL;
} else {
/* no attrib arg, no kwds, so no attribute */
- Py_INCREF(Py_None);
- attrib = Py_None;
}
elem = create_new_element(tag, attrib);
- Py_DECREF(attrib);
+ Py_XDECREF(attrib);
if (elem == NULL)
return NULL;
@@ -736,7 +733,7 @@ _elementtree_Element___copy___impl(ElementObject *self)
ElementObject* element;
element = (ElementObject*) create_new_element(
- self->tag, (self->extra) ? self->extra->attrib : Py_None);
+ self->tag, self->extra ? self->extra->attrib : NULL);
if (!element)
return NULL;
@@ -792,21 +789,20 @@ _elementtree_Element___deepcopy___impl(ElementObject *self, PyObject *memo)
if (!tag)
return NULL;
- if (self->extra) {
+ if (self->extra && self->extra->attrib) {
attrib = deepcopy(self->extra->attrib, memo);
if (!attrib) {
Py_DECREF(tag);
return NULL;
}
} else {
- Py_INCREF(Py_None);
- attrib = Py_None;
+ attrib = NULL;
}
element = (ElementObject*) create_new_element(tag, attrib);
Py_DECREF(tag);
- Py_DECREF(attrib);
+ Py_XDECREF(attrib);
if (!element)
return NULL;
@@ -963,7 +959,7 @@ _elementtree_Element___getstate___impl(ElementObject *self)
PyList_SET_ITEM(children, i, child);
}
- if (self->extra && self->extra->attrib != Py_None) {
+ if (self->extra && self->extra->attrib) {
attrib = self->extra->attrib;
Py_INCREF(attrib);
}
@@ -1037,9 +1033,9 @@ element_setstate_from_attributes(ElementObject *self,
assert(self->extra);
assert(self->extra->allocated >= nchildren);
if (oldextra) {
- assert(self->extra->attrib == Py_None);
+ assert(self->extra->attrib == NULL);
self->extra->attrib = oldextra->attrib;
- oldextra->attrib = Py_None;
+ oldextra->attrib = NULL;
}
/* Copy children */
@@ -1065,10 +1061,8 @@ element_setstate_from_attributes(ElementObject *self,
}
/* Stash attrib. */
- if (attrib) {
- Py_INCREF(attrib);
- Py_XSETREF(self->extra->attrib, attrib);
- }
+ Py_XINCREF(attrib);
+ Py_XSETREF(self->extra->attrib, attrib);
dealloc_extra(oldextra);
Py_RETURN_NONE;
@@ -1138,7 +1132,7 @@ checkpath(PyObject* tag)
if (PyUnicode_Check(tag)) {
const Py_ssize_t len = PyUnicode_GET_LENGTH(tag);
- void *data = PyUnicode_DATA(tag);
+ const void *data = PyUnicode_DATA(tag);
unsigned int kind = PyUnicode_KIND(tag);
if (len >= 3 && PyUnicode_READ(kind, data, 0) == '{' && (
PyUnicode_READ(kind, data, 1) == '}' || (
@@ -1159,7 +1153,7 @@ checkpath(PyObject* tag)
return 0;
}
if (PyBytes_Check(tag)) {
- char *p = PyBytes_AS_STRING(tag);
+ const char *p = PyBytes_AS_STRING(tag);
const Py_ssize_t len = PyBytes_GET_SIZE(tag);
if (len >= 3 && p[0] == '{' && (
p[1] == '}' || (p[1] == '*' && p[2] == '}'))) {
@@ -1401,7 +1395,7 @@ _elementtree_Element_get_impl(ElementObject *self, PyObject *key,
{
PyObject* value;
- if (!self->extra || self->extra->attrib == Py_None)
+ if (!self->extra || !self->extra->attrib)
value = default_value;
else {
value = PyDict_GetItemWithError(self->extra->attrib, key);
@@ -1417,42 +1411,6 @@ _elementtree_Element_get_impl(ElementObject *self, PyObject *key,
return value;
}
-/*[clinic input]
-_elementtree.Element.getchildren
-
-[clinic start generated code]*/
-
-static PyObject *
-_elementtree_Element_getchildren_impl(ElementObject *self)
-/*[clinic end generated code: output=e50ffe118637b14f input=0f754dfded150d5f]*/
-{
- Py_ssize_t i;
- PyObject* list;
-
- if (PyErr_WarnEx(PyExc_DeprecationWarning,
- "This method will be removed in future versions. "
- "Use 'list(elem)' or iteration over elem instead.",
- 1) < 0) {
- return NULL;
- }
-
- if (!self->extra)
- return PyList_New(0);
-
- list = PyList_New(self->extra->length);
- if (!list)
- return NULL;
-
- for (i = 0; i < self->extra->length; i++) {
- PyObject* item = self->extra->children[i];
- Py_INCREF(item);
- PyList_SET_ITEM(list, i, item);
- }
-
- return list;
-}
-
-
static PyObject *
create_elementiter(ElementObject *self, PyObject *tag, int gettext);
@@ -1483,27 +1441,6 @@ _elementtree_Element_iter_impl(ElementObject *self, PyObject *tag)
}
-/*[clinic input]
-_elementtree.Element.getiterator
-
- tag: object = None
-
-[clinic start generated code]*/
-
-static PyObject *
-_elementtree_Element_getiterator_impl(ElementObject *self, PyObject *tag)
-/*[clinic end generated code: output=cb69ff4a3742dfa1 input=500da1a03f7b9e28]*/
-{
- if (PyErr_WarnEx(PyExc_DeprecationWarning,
- "This method will be removed in future versions. "
- "Use 'tree.iter()' or 'list(tree.iter())' instead.",
- 1) < 0) {
- return NULL;
- }
- return _elementtree_Element_iter_impl(self, tag);
-}
-
-
/*[clinic input]
_elementtree.Element.itertext
@@ -1586,7 +1523,7 @@ static PyObject *
_elementtree_Element_items_impl(ElementObject *self)
/*[clinic end generated code: output=6db2c778ce3f5a4d input=adbe09aaea474447]*/
{
- if (!self->extra || self->extra->attrib == Py_None)
+ if (!self->extra || !self->extra->attrib)
return PyList_New(0);
return PyDict_Items(self->extra->attrib);
@@ -1601,7 +1538,7 @@ static PyObject *
_elementtree_Element_keys_impl(ElementObject *self)
/*[clinic end generated code: output=bc5bfabbf20eeb3c input=f02caf5b496b5b0b]*/
{
- if (!self->extra || self->extra->attrib == Py_None)
+ if (!self->extra || !self->extra->attrib)
return PyList_New(0);
return PyDict_Keys(self->extra->attrib);
@@ -1620,7 +1557,7 @@ element_length(ElementObject* self)
_elementtree.Element.makeelement
tag: object
- attrib: object
+ attrib: object(subclass_of='&PyDict_Type')
/
[clinic start generated code]*/
@@ -1628,7 +1565,7 @@ _elementtree.Element.makeelement
static PyObject *
_elementtree_Element_makeelement_impl(ElementObject *self, PyObject *tag,
PyObject *attrib)
-/*[clinic end generated code: output=4109832d5bb789ef input=9480d1d2e3e68235]*/
+/*[clinic end generated code: output=4109832d5bb789ef input=2279d974529c3861]*/
{
PyObject* elem;
@@ -2100,12 +2037,18 @@ static int
element_attrib_setter(ElementObject *self, PyObject *value, void *closure)
{
_VALIDATE_ATTR_VALUE(value);
+ if (!PyDict_Check(value)) {
+ PyErr_Format(PyExc_TypeError,
+ "attrib must be dict, not %.200s",
+ value->ob_type->tp_name);
+ return -1;
+ }
if (!self->extra) {
if (create_extra(self, NULL) < 0)
return -1;
}
Py_INCREF(value);
- Py_SETREF(self->extra->attrib, value);
+ Py_XSETREF(self->extra->attrib, value);
return 0;
}
@@ -2365,8 +2308,6 @@ create_elementiter(ElementObject *self, PyObject *tag, int gettext)
Py_INCREF(self);
it->root_element = self;
- PyObject_GC_Track(it);
-
it->parent_stack = PyMem_New(ParentLocator, INIT_PARENT_STACK_SIZE);
if (it->parent_stack == NULL) {
Py_DECREF(it);
@@ -2376,6 +2317,8 @@ create_elementiter(ElementObject *self, PyObject *tag, int gettext)
it->parent_stack_used = 0;
it->parent_stack_size = INIT_PARENT_STACK_SIZE;
+ PyObject_GC_Track(it);
+
return (PyObject *)it;
}
@@ -2414,7 +2357,7 @@ typedef struct {
char insert_pis;
} TreeBuilderObject;
-#define TreeBuilder_CheckExact(op) (Py_TYPE(op) == &TreeBuilder_Type)
+#define TreeBuilder_CheckExact(op) Py_IS_TYPE((op), &TreeBuilder_Type)
/* -------------------------------------------------------------------- */
/* constructor and destructor */
@@ -2702,7 +2645,7 @@ treebuilder_add_subelement(PyObject *element, PyObject *child)
}
else {
PyObject *res;
- res = _PyObject_CallMethodIdObjArgs(element, &PyId_append, child, NULL);
+ res = _PyObject_CallMethodIdOneArg(element, &PyId_append, child);
if (res == NULL)
return -1;
Py_DECREF(res);
@@ -2719,7 +2662,7 @@ treebuilder_append_event(TreeBuilderObject *self, PyObject *action,
PyObject *event = PyTuple_Pack(2, action, node);
if (event == NULL)
return -1;
- res = _PyObject_FastCall(self->events_append, &event, 1);
+ res = PyObject_CallOneArg(self->events_append, event);
Py_DECREF(event);
if (res == NULL)
return -1;
@@ -2745,7 +2688,7 @@ treebuilder_handle_start(TreeBuilderObject* self, PyObject* tag,
if (!self->element_factory) {
node = create_new_element(tag, attrib);
- } else if (attrib == Py_None) {
+ } else if (attrib == NULL) {
attrib = PyDict_New();
if (!attrib)
return NULL;
@@ -2885,7 +2828,7 @@ treebuilder_handle_comment(TreeBuilderObject* self, PyObject* text)
}
if (self->comment_factory) {
- comment = _PyObject_FastCall(self->comment_factory, &text, 1);
+ comment = PyObject_CallOneArg(self->comment_factory, text);
if (!comment)
return NULL;
@@ -3086,7 +3029,7 @@ _elementtree_TreeBuilder_close_impl(TreeBuilderObject *self)
_elementtree.TreeBuilder.start
tag: object
- attrs: object = None
+ attrs: object(subclass_of='&PyDict_Type')
/
[clinic start generated code]*/
@@ -3094,7 +3037,7 @@ _elementtree.TreeBuilder.start
static PyObject *
_elementtree_TreeBuilder_start_impl(TreeBuilderObject *self, PyObject *tag,
PyObject *attrs)
-/*[clinic end generated code: output=e7e9dc2861349411 input=95fc1758dd042c65]*/
+/*[clinic end generated code: output=e7e9dc2861349411 input=7288e9e38e63b2b6]*/
{
return treebuilder_handle_start(self, tag, attrs);
}
@@ -3227,7 +3170,7 @@ expat_set_error(enum XML_Error error_code, Py_ssize_t line, Py_ssize_t column,
if (errmsg == NULL)
return;
- error = _PyObject_FastCall(st->parseerror_obj, &errmsg, 1);
+ error = PyObject_CallOneArg(st->parseerror_obj, errmsg);
Py_DECREF(errmsg);
if (!error)
return;
@@ -3290,7 +3233,7 @@ expat_default_handler(XMLParserObject* self, const XML_Char* data_in,
(TreeBuilderObject*) self->target, value
);
else if (self->handle_data)
- res = _PyObject_FastCall(self->handle_data, &value, 1);
+ res = PyObject_CallOneArg(self->handle_data, value);
else
res = NULL;
Py_XDECREF(res);
@@ -3354,8 +3297,7 @@ expat_start_handler(XMLParserObject* self, const XML_Char* tag_in,
attrib_in += 2;
}
} else {
- Py_INCREF(Py_None);
- attrib = Py_None;
+ attrib = NULL;
}
if (TreeBuilder_CheckExact(self->target)) {
@@ -3364,8 +3306,7 @@ expat_start_handler(XMLParserObject* self, const XML_Char* tag_in,
tag, attrib);
}
else if (self->handle_start) {
- if (attrib == Py_None) {
- Py_DECREF(attrib);
+ if (attrib == NULL) {
attrib = PyDict_New();
if (!attrib) {
Py_DECREF(tag);
@@ -3378,7 +3319,7 @@ expat_start_handler(XMLParserObject* self, const XML_Char* tag_in,
res = NULL;
Py_DECREF(tag);
- Py_DECREF(attrib);
+ Py_XDECREF(attrib);
Py_XDECREF(res);
}
@@ -3401,7 +3342,7 @@ expat_data_handler(XMLParserObject* self, const XML_Char* data_in,
/* shortcut */
res = treebuilder_handle_data((TreeBuilderObject*) self->target, data);
else if (self->handle_data)
- res = _PyObject_FastCall(self->handle_data, &data, 1);
+ res = PyObject_CallOneArg(self->handle_data, data);
else
res = NULL;
@@ -3428,7 +3369,7 @@ expat_end_handler(XMLParserObject* self, const XML_Char* tag_in)
else if (self->handle_end) {
tag = makeuniversal(self, tag_in);
if (tag) {
- res = _PyObject_FastCall(self->handle_end, &tag, 1);
+ res = PyObject_CallOneArg(self->handle_end, tag);
Py_DECREF(tag);
}
}
@@ -3515,7 +3456,7 @@ expat_end_ns_handler(XMLParserObject* self, const XML_Char* prefix_in)
if (!prefix)
return;
- res = _PyObject_FastCall(self->handle_end_ns, &prefix, 1);
+ res = PyObject_CallOneArg(self->handle_end_ns, prefix);
Py_DECREF(prefix);
}
@@ -3547,7 +3488,7 @@ expat_comment_handler(XMLParserObject* self, const XML_Char* comment_in)
if (!comment)
return;
- res = _PyObject_FastCall(self->handle_comment, &comment, 1);
+ res = PyObject_CallOneArg(self->handle_comment, comment);
Py_XDECREF(res);
Py_DECREF(comment);
}
@@ -3938,7 +3879,7 @@ _elementtree_XMLParser_close_impl(XMLParserObject *self)
}
else if (self->handle_close) {
Py_DECREF(res);
- return _PyObject_CallNoArg(self->handle_close);
+ return PyObject_CallNoArgs(self->handle_close);
}
else {
return res;
@@ -4235,9 +4176,6 @@ static PyMethodDef element_methods[] = {
_ELEMENTTREE_ELEMENT_ITERTEXT_METHODDEF
_ELEMENTTREE_ELEMENT_ITERFIND_METHODDEF
- _ELEMENTTREE_ELEMENT_GETITERATOR_METHODDEF
- _ELEMENTTREE_ELEMENT_GETCHILDREN_METHODDEF
-
_ELEMENTTREE_ELEMENT_ITEMS_METHODDEF
_ELEMENTTREE_ELEMENT_KEYS_METHODDEF
@@ -4469,7 +4407,7 @@ PyInit__elementtree(void)
m = PyModule_Create(&elementtreemodule);
if (!m)
return NULL;
- st = ET_STATE(m);
+ st = get_elementtree_state(m);
if (!(temp = PyImport_ImportModule("copy")))
return NULL;
@@ -4505,16 +4443,22 @@ PyInit__elementtree(void)
"xml.etree.ElementTree.ParseError", PyExc_SyntaxError, NULL
);
Py_INCREF(st->parseerror_obj);
- PyModule_AddObject(m, "ParseError", st->parseerror_obj);
-
- Py_INCREF((PyObject *)&Element_Type);
- PyModule_AddObject(m, "Element", (PyObject *)&Element_Type);
+ if (PyModule_AddObject(m, "ParseError", st->parseerror_obj) < 0) {
+ Py_DECREF(st->parseerror_obj);
+ return NULL;
+ }
- Py_INCREF((PyObject *)&TreeBuilder_Type);
- PyModule_AddObject(m, "TreeBuilder", (PyObject *)&TreeBuilder_Type);
+ PyTypeObject *types[] = {
+ &Element_Type,
+ &TreeBuilder_Type,
+ &XMLParser_Type
+ };
- Py_INCREF((PyObject *)&XMLParser_Type);
- PyModule_AddObject(m, "XMLParser", (PyObject *)&XMLParser_Type);
+ for (size_t i = 0; i < Py_ARRAY_LENGTH(types); i++) {
+ if (PyModule_AddType(m, types[i]) < 0) {
+ return NULL;
+ }
+ }
return m;
}
diff --git a/Modules/_functoolsmodule.c b/Modules/_functoolsmodule.c
index a101363b..d158d3ba 100644
--- a/Modules/_functoolsmodule.c
+++ b/Modules/_functoolsmodule.c
@@ -1,8 +1,7 @@
#include "Python.h"
-#include "pycore_pymem.h"
-#include "pycore_pystate.h"
+#include "pycore_pystate.h" // _PyThreadState_GET()
#include "pycore_tupleobject.h"
-#include "structmember.h"
+#include "structmember.h" // PyMemberDef
/* _functools module written and maintained
by Hye-Shik Chang
@@ -18,13 +17,15 @@ typedef struct {
PyObject *fn;
PyObject *args;
PyObject *kw;
- PyObject *dict;
+ PyObject *dict; /* __dict__ */
PyObject *weakreflist; /* List of weak references */
- int use_fastcall;
+ vectorcallfunc vectorcall;
} partialobject;
static PyTypeObject partial_type;
+static void partial_setvectorcall(partialobject *pto);
+
static PyObject *
partial_new(PyTypeObject *type, PyObject *args, PyObject *kw)
{
@@ -39,7 +40,7 @@ partial_new(PyTypeObject *type, PyObject *args, PyObject *kw)
pargs = pkw = NULL;
func = PyTuple_GET_ITEM(args, 0);
- if (Py_TYPE(func) == &partial_type && type == &partial_type) {
+ if (Py_IS_TYPE(func, &partial_type) && type == &partial_type) {
partialobject *part = (partialobject *)func;
if (part->dict == NULL) {
pargs = part->args;
@@ -107,8 +108,7 @@ partial_new(PyTypeObject *type, PyObject *args, PyObject *kw)
return NULL;
}
- pto->use_fastcall = (_PyVectorcall_Function(func) != NULL);
-
+ partial_setvectorcall(pto);
return (PyObject *)pto;
}
@@ -126,77 +126,114 @@ partial_dealloc(partialobject *pto)
Py_TYPE(pto)->tp_free(pto);
}
+
+/* Merging keyword arguments using the vectorcall convention is messy, so
+ * if we would need to do that, we stop using vectorcall and fall back
+ * to using partial_call() instead. */
+_Py_NO_INLINE static PyObject *
+partial_vectorcall_fallback(PyThreadState *tstate, partialobject *pto,
+ PyObject *const *args, size_t nargsf,
+ PyObject *kwnames)
+{
+ pto->vectorcall = NULL;
+ Py_ssize_t nargs = PyVectorcall_NARGS(nargsf);
+ return _PyObject_MakeTpCall(tstate, (PyObject *)pto,
+ args, nargs, kwnames);
+}
+
static PyObject *
-partial_fastcall(partialobject *pto, PyObject **args, Py_ssize_t nargs,
- PyObject *kwargs)
+partial_vectorcall(partialobject *pto, PyObject *const *args,
+ size_t nargsf, PyObject *kwnames)
{
- PyObject *small_stack[_PY_FASTCALL_SMALL_STACK];
- PyObject *ret;
- PyObject **stack, **stack_buf = NULL;
- Py_ssize_t nargs2, pto_nargs;
+ PyThreadState *tstate = _PyThreadState_GET();
- pto_nargs = PyTuple_GET_SIZE(pto->args);
- nargs2 = pto_nargs + nargs;
+ /* pto->kw is mutable, so need to check every time */
+ if (PyDict_GET_SIZE(pto->kw)) {
+ return partial_vectorcall_fallback(tstate, pto, args, nargsf, kwnames);
+ }
- if (pto_nargs == 0) {
- stack = args;
+ Py_ssize_t nargs = PyVectorcall_NARGS(nargsf);
+ Py_ssize_t nargs_total = nargs;
+ if (kwnames != NULL) {
+ nargs_total += PyTuple_GET_SIZE(kwnames);
}
- else if (nargs == 0) {
- stack = _PyTuple_ITEMS(pto->args);
+
+ PyObject **pto_args = _PyTuple_ITEMS(pto->args);
+ Py_ssize_t pto_nargs = PyTuple_GET_SIZE(pto->args);
+
+ /* Fast path if we're called without arguments */
+ if (nargs_total == 0) {
+ return _PyObject_VectorcallTstate(tstate, pto->fn,
+ pto_args, pto_nargs, NULL);
+ }
+
+ /* Fast path using PY_VECTORCALL_ARGUMENTS_OFFSET to prepend a single
+ * positional argument */
+ if (pto_nargs == 1 && (nargsf & PY_VECTORCALL_ARGUMENTS_OFFSET)) {
+ PyObject **newargs = (PyObject **)args - 1;
+ PyObject *tmp = newargs[0];
+ newargs[0] = pto_args[0];
+ PyObject *ret = _PyObject_VectorcallTstate(tstate, pto->fn,
+ newargs, nargs + 1, kwnames);
+ newargs[0] = tmp;
+ return ret;
+ }
+
+ Py_ssize_t newnargs_total = pto_nargs + nargs_total;
+
+ PyObject *small_stack[_PY_FASTCALL_SMALL_STACK];
+ PyObject *ret;
+ PyObject **stack;
+
+ if (newnargs_total <= (Py_ssize_t)Py_ARRAY_LENGTH(small_stack)) {
+ stack = small_stack;
}
else {
- if (nargs2 <= (Py_ssize_t)Py_ARRAY_LENGTH(small_stack)) {
- stack = small_stack;
- }
- else {
- stack_buf = PyMem_Malloc(nargs2 * sizeof(PyObject *));
- if (stack_buf == NULL) {
- PyErr_NoMemory();
- return NULL;
- }
- stack = stack_buf;
+ stack = PyMem_Malloc(newnargs_total * sizeof(PyObject *));
+ if (stack == NULL) {
+ PyErr_NoMemory();
+ return NULL;
}
-
- /* use borrowed references */
- memcpy(stack,
- _PyTuple_ITEMS(pto->args),
- pto_nargs * sizeof(PyObject*));
- memcpy(&stack[pto_nargs],
- args,
- nargs * sizeof(PyObject*));
}
- ret = _PyObject_FastCallDict(pto->fn, stack, nargs2, kwargs);
- PyMem_Free(stack_buf);
+ /* Copy to new stack, using borrowed references */
+ memcpy(stack, pto_args, pto_nargs * sizeof(PyObject*));
+ memcpy(stack + pto_nargs, args, nargs_total * sizeof(PyObject*));
+
+ ret = _PyObject_VectorcallTstate(tstate, pto->fn,
+ stack, pto_nargs + nargs, kwnames);
+ if (stack != small_stack) {
+ PyMem_Free(stack);
+ }
return ret;
}
-static PyObject *
-partial_call_impl(partialobject *pto, PyObject *args, PyObject *kwargs)
+/* Set pto->vectorcall depending on the parameters of the partial object */
+static void
+partial_setvectorcall(partialobject *pto)
{
- PyObject *ret, *args2;
-
- /* Note: tupleconcat() is optimized for empty tuples */
- args2 = PySequence_Concat(pto->args, args);
- if (args2 == NULL) {
- return NULL;
+ if (PyVectorcall_Function(pto->fn) == NULL) {
+ /* Don't use vectorcall if the underlying function doesn't support it */
+ pto->vectorcall = NULL;
+ }
+ /* We could have a special case if there are no arguments,
+ * but that is unlikely (why use partial without arguments?),
+ * so we don't optimize that */
+ else {
+ pto->vectorcall = (vectorcallfunc)partial_vectorcall;
}
- assert(PyTuple_Check(args2));
-
- ret = PyObject_Call(pto->fn, args2, kwargs);
- Py_DECREF(args2);
- return ret;
}
+
static PyObject *
partial_call(partialobject *pto, PyObject *args, PyObject *kwargs)
{
- PyObject *kwargs2, *res;
-
- assert (PyCallable_Check(pto->fn));
- assert (PyTuple_Check(pto->args));
- assert (PyDict_Check(pto->kw));
+ assert(PyCallable_Check(pto->fn));
+ assert(PyTuple_Check(pto->args));
+ assert(PyDict_Check(pto->kw));
+ /* Merge keywords */
+ PyObject *kwargs2;
if (PyDict_GET_SIZE(pto->kw) == 0) {
/* kwargs can be NULL */
kwargs2 = kwargs;
@@ -219,16 +256,16 @@ partial_call(partialobject *pto, PyObject *args, PyObject *kwargs)
}
}
-
- if (pto->use_fastcall) {
- res = partial_fastcall(pto,
- _PyTuple_ITEMS(args),
- PyTuple_GET_SIZE(args),
- kwargs2);
- }
- else {
- res = partial_call_impl(pto, args, kwargs2);
+ /* Merge positional arguments */
+ /* Note: tupleconcat() is optimized for empty tuples */
+ PyObject *args2 = PySequence_Concat(pto->args, args);
+ if (args2 == NULL) {
+ Py_XDECREF(kwargs2);
+ return NULL;
}
+
+ PyObject *res = PyObject_Call(pto->fn, args2, kwargs2);
+ Py_DECREF(args2);
Py_XDECREF(kwargs2);
return res;
}
@@ -365,17 +402,19 @@ partial_setstate(partialobject *pto, PyObject *state)
Py_INCREF(dict);
Py_INCREF(fn);
- pto->use_fastcall = (_PyVectorcall_Function(fn) != NULL);
Py_SETREF(pto->fn, fn);
Py_SETREF(pto->args, fnargs);
Py_SETREF(pto->kw, kw);
Py_XSETREF(pto->dict, dict);
+ partial_setvectorcall(pto);
Py_RETURN_NONE;
}
static PyMethodDef partial_methods[] = {
{"__reduce__", (PyCFunction)partial_reduce, METH_NOARGS},
{"__setstate__", (PyCFunction)partial_setstate, METH_O},
+ {"__class_getitem__", (PyCFunction)Py_GenericAlias,
+ METH_O|METH_CLASS, PyDoc_STR("See PEP 585")},
{NULL, NULL} /* sentinel */
};
@@ -386,7 +425,7 @@ static PyTypeObject partial_type = {
0, /* tp_itemsize */
/* methods */
(destructor)partial_dealloc, /* tp_dealloc */
- 0, /* tp_vectorcall_offset */
+ offsetof(partialobject, vectorcall),/* tp_vectorcall_offset */
0, /* tp_getattr */
0, /* tp_setattr */
0, /* tp_as_async */
@@ -401,7 +440,8 @@ static PyTypeObject partial_type = {
PyObject_GenericSetAttr, /* tp_setattro */
0, /* tp_as_buffer */
Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC |
- Py_TPFLAGS_BASETYPE, /* tp_flags */
+ Py_TPFLAGS_BASETYPE |
+ Py_TPFLAGS_HAVE_VECTORCALL, /* tp_flags */
partial_doc, /* tp_doc */
(traverseproc)partial_traverse, /* tp_traverse */
0, /* tp_clear */
@@ -534,7 +574,7 @@ keyobject_richcompare(PyObject *ko, PyObject *other, int op)
PyObject *answer;
PyObject* stack[2];
- if (Py_TYPE(other) != &keyobject_type){
+ if (!Py_IS_TYPE(other, &keyobject_type)) {
PyErr_Format(PyExc_TypeError, "other argument must be K instance");
return NULL;
}
@@ -610,7 +650,7 @@ functools_reduce(PyObject *self, PyObject *args)
for (;;) {
PyObject *op2;
- if (args->ob_refcnt > 1) {
+ if (Py_REFCNT(args) > 1) {
Py_DECREF(args);
if ((args = PyTuple_New(2)) == NULL)
goto Fail;
@@ -627,7 +667,7 @@ functools_reduce(PyObject *self, PyObject *args)
result = op2;
else {
/* Update the args tuple in-place */
- assert(args->ob_refcnt == 1);
+ assert(Py_REFCNT(args) == 1);
Py_XSETREF(_PyTuple_ITEMS(args)[0], result);
Py_XSETREF(_PyTuple_ITEMS(args)[1], op2);
if ((result = PyObject_Call(func, args, NULL)) == NULL) {
@@ -743,6 +783,7 @@ typedef struct lru_cache_object {
Py_ssize_t misses;
PyObject *cache_info_type;
PyObject *dict;
+ PyObject *weakreflist;
} lru_cache_object;
static PyTypeObject lru_cache_type;
@@ -1155,6 +1196,8 @@ lru_cache_new(PyTypeObject *type, PyObject *args, PyObject *kw)
obj->maxsize = maxsize;
Py_INCREF(cache_info_type);
obj->cache_info_type = cache_info_type;
+ obj->dict = NULL;
+ obj->weakreflist = NULL;
return (PyObject *)obj;
}
@@ -1186,6 +1229,8 @@ lru_cache_dealloc(lru_cache_object *obj)
lru_list_elem *list;
/* bpo-31095: UnTrack is needed before calling any callbacks */
PyObject_GC_UnTrack(obj);
+ if (obj->weakreflist != NULL)
+ PyObject_ClearWeakRefs((PyObject*)obj);
list = lru_cache_unlink_list(obj);
Py_XDECREF(obj->cache);
@@ -1343,7 +1388,8 @@ static PyTypeObject lru_cache_type = {
(traverseproc)lru_cache_tp_traverse,/* tp_traverse */
(inquiry)lru_cache_tp_clear, /* tp_clear */
0, /* tp_richcompare */
- 0, /* tp_weaklistoffset */
+ offsetof(lru_cache_object, weakreflist),
+ /* tp_weaklistoffset */
0, /* tp_iter */
0, /* tp_iternext */
lru_cache_methods, /* tp_methods */
@@ -1361,10 +1407,10 @@ static PyTypeObject lru_cache_type = {
/* module level code ********************************************************/
-PyDoc_STRVAR(module_doc,
+PyDoc_STRVAR(_functools_doc,
"Tools that operate on functions.");
-static PyMethodDef module_methods[] = {
+static PyMethodDef _functools_methods[] = {
{"reduce", functools_reduce, METH_VARARGS, functools_reduce_doc},
{"cmp_to_key", (PyCFunction)(void(*)(void))functools_cmp_to_key,
METH_VARARGS | METH_KEYWORDS, functools_cmp_to_key_doc},
@@ -1372,53 +1418,56 @@ static PyMethodDef module_methods[] = {
};
static void
-module_free(void *m)
+_functools_free(void *m)
{
- Py_CLEAR(kwd_mark);
+ // FIXME: Do not clear kwd_mark to avoid NULL pointer dereferencing if we have
+ // other modules instances that could use it. Will fix when PEP-573 land
+ // and we could move kwd_mark to a per-module state.
+ // Py_CLEAR(kwd_mark);
}
-static struct PyModuleDef _functoolsmodule = {
- PyModuleDef_HEAD_INIT,
- "_functools",
- module_doc,
- -1,
- module_methods,
- NULL,
- NULL,
- NULL,
- module_free,
-};
-
-PyMODINIT_FUNC
-PyInit__functools(void)
+static int
+_functools_exec(PyObject *module)
{
- int i;
- PyObject *m;
- const char *name;
PyTypeObject *typelist[] = {
&partial_type,
- &lru_cache_type,
- NULL
+ &lru_cache_type
};
- m = PyModule_Create(&_functoolsmodule);
- if (m == NULL)
- return NULL;
-
- kwd_mark = _PyObject_CallNoArg((PyObject *)&PyBaseObject_Type);
if (!kwd_mark) {
- Py_DECREF(m);
- return NULL;
+ kwd_mark = _PyObject_CallNoArg((PyObject *)&PyBaseObject_Type);
+ if (!kwd_mark) {
+ return -1;
+ }
}
- for (i=0 ; typelist[i] != NULL ; i++) {
- if (PyType_Ready(typelist[i]) < 0) {
- Py_DECREF(m);
- return NULL;
+ for (size_t i = 0; i < Py_ARRAY_LENGTH(typelist); i++) {
+ if (PyModule_AddType(module, typelist[i]) < 0) {
+ return -1;
}
- name = _PyType_Name(typelist[i]);
- Py_INCREF(typelist[i]);
- PyModule_AddObject(m, name, (PyObject *)typelist[i]);
}
- return m;
+ return 0;
+}
+
+static struct PyModuleDef_Slot _functools_slots[] = {
+ {Py_mod_exec, _functools_exec},
+ {0, NULL}
+};
+
+static struct PyModuleDef _functools_module = {
+ PyModuleDef_HEAD_INIT,
+ "_functools",
+ _functools_doc,
+ 0,
+ _functools_methods,
+ _functools_slots,
+ NULL,
+ NULL,
+ _functools_free,
+};
+
+PyMODINIT_FUNC
+PyInit__functools(void)
+{
+ return PyModuleDef_Init(&_functools_module);
}
diff --git a/Modules/_gdbmmodule.c b/Modules/_gdbmmodule.c
index 77e78875..dd4c6b16 100644
--- a/Modules/_gdbmmodule.c
+++ b/Modules/_gdbmmodule.c
@@ -36,7 +36,7 @@ values() methods are not supported.");
typedef struct {
PyObject_HEAD
- int di_size; /* -1 means recompute */
+ Py_ssize_t di_size; /* -1 means recompute */
GDBM_FILE di_dbm;
} dbmobject;
@@ -44,7 +44,7 @@ static PyTypeObject Dbmtype;
#include "clinic/_gdbmmodule.c.h"
-#define is_dbmobject(v) (Py_TYPE(v) == &Dbmtype)
+#define is_dbmobject(v) Py_IS_TYPE(v, &Dbmtype)
#define check_dbmobject_open(v) if ((v)->di_dbm == NULL) \
{ PyErr_SetString(DbmError, "GDBM object has already been closed"); \
return NULL; }
@@ -102,19 +102,39 @@ dbm_length(dbmobject *dp)
return -1;
}
if (dp->di_size < 0) {
+#if GDBM_VERSION_MAJOR >= 1 && GDBM_VERSION_MINOR >= 11
+ errno = 0;
+ gdbm_count_t count;
+ if (gdbm_count(dp->di_dbm, &count) == -1) {
+ if (errno != 0) {
+ PyErr_SetFromErrno(DbmError);
+ }
+ else {
+ PyErr_SetString(DbmError, gdbm_strerror(gdbm_errno));
+ }
+ return -1;
+ }
+ if (count > PY_SSIZE_T_MAX) {
+ PyErr_SetString(PyExc_OverflowError, "count exceeds PY_SSIZE_T_MAX");
+ return -1;
+ }
+ dp->di_size = count;
+#else
datum key,okey;
- int size;
okey.dsize=0;
okey.dptr=NULL;
- size = 0;
- for (key=gdbm_firstkey(dp->di_dbm); key.dptr;
+ Py_ssize_t size = 0;
+ for (key = gdbm_firstkey(dp->di_dbm); key.dptr;
key = gdbm_nextkey(dp->di_dbm,okey)) {
size++;
- if(okey.dsize) free(okey.dptr);
+ if (okey.dsize) {
+ free(okey.dptr);
+ }
okey=key;
}
dp->di_size = size;
+#endif
}
return dp->di_size;
}
@@ -349,7 +369,7 @@ dbm_contains(PyObject *self, PyObject *arg)
else if (!PyBytes_Check(arg)) {
PyErr_Format(PyExc_TypeError,
"gdbm key must be bytes or string, not %.100s",
- arg->ob_type->tp_name);
+ Py_TYPE(arg)->tp_name);
return -1;
}
else {
@@ -497,7 +517,7 @@ static PyObject *
dbm__exit__(PyObject *self, PyObject *args)
{
_Py_IDENTIFIER(close);
- return _PyObject_CallMethodId(self, &PyId_close, NULL);
+ return _PyObject_CallMethodIdNoArgs(self, &PyId_close);
}
static PyMethodDef dbm_methods[] = {
diff --git a/Modules/_hashopenssl.c b/Modules/_hashopenssl.c
index edadbcb3..adc86537 100644
--- a/Modules/_hashopenssl.c
+++ b/Modules/_hashopenssl.c
@@ -14,7 +14,6 @@
#define PY_SSIZE_T_CLEAN
#include "Python.h"
-#include "structmember.h"
#include "hashlib.h"
#include "pystrhex.h"
@@ -22,10 +21,13 @@
/* EVP is the preferred interface to hashing in OpenSSL */
#include
#include
+#include
/* We use the object interface to discover what hashes OpenSSL supports. */
#include
#include "openssl/err.h"
+#include // FIPS_mode()
+
#ifndef OPENSSL_THREADS
# error "OPENSSL_THREADS is not defined, Python requires thread-safe OpenSSL"
#endif
@@ -34,6 +36,32 @@
/* OpenSSL < 1.1.0 */
#define EVP_MD_CTX_new EVP_MD_CTX_create
#define EVP_MD_CTX_free EVP_MD_CTX_destroy
+
+HMAC_CTX *
+HMAC_CTX_new(void)
+{
+ HMAC_CTX *ctx = OPENSSL_malloc(sizeof(HMAC_CTX));
+ if (ctx != NULL) {
+ memset(ctx, 0, sizeof(HMAC_CTX));
+ HMAC_CTX_init(ctx);
+ }
+ return ctx;
+}
+
+void
+HMAC_CTX_free(HMAC_CTX *ctx)
+{
+ if (ctx != NULL) {
+ HMAC_CTX_cleanup(ctx);
+ OPENSSL_free(ctx);
+ }
+}
+
+const EVP_MD *
+HMAC_CTX_get_md(const HMAC_CTX *ctx)
+{
+ return ctx->md;
+}
#endif
#define MUNCH_SIZE INT_MAX
@@ -50,21 +78,44 @@
#define PY_OPENSSL_HAS_BLAKE2 1
#endif
+static PyModuleDef _hashlibmodule;
+
+typedef struct {
+ PyTypeObject *EVPtype;
+ PyTypeObject *HMACtype;
+#ifdef PY_OPENSSL_HAS_SHAKE
+ PyTypeObject *EVPXOFtype;
+#endif
+} _hashlibstate;
+
+static inline _hashlibstate*
+get_hashlib_state(PyObject *module)
+{
+ void *state = PyModule_GetState(module);
+ assert(state != NULL);
+ return (_hashlibstate *)state;
+}
+
typedef struct {
PyObject_HEAD
EVP_MD_CTX *ctx; /* OpenSSL message digest context */
PyThread_type_lock lock; /* OpenSSL context lock */
} EVPobject;
-
-static PyTypeObject EVPtype;
+typedef struct {
+ PyObject_HEAD
+ HMAC_CTX *ctx; /* OpenSSL hmac context */
+ PyThread_type_lock lock; /* HMAC context lock */
+} HMACobject;
#include "clinic/_hashopenssl.c.h"
/*[clinic input]
module _hashlib
-class _hashlib.HASH "EVPobject *" "&EVPtype"
+class _hashlib.HASH "EVPobject *" "((_hashlibstate *)PyModule_GetState(module))->EVPtype"
+class _hashlib.HASHXOF "EVPobject *" "((_hashlibstate *)PyModule_GetState(module))->EVPXOFtype"
+class _hashlib.HMAC "HMACobject *" "((_hashlibstate *)PyModule_GetState(module))->HMACtype"
[clinic start generated code]*/
-/*[clinic end generated code: output=da39a3ee5e6b4b0d input=a881a5092eecad28]*/
+/*[clinic end generated code: output=da39a3ee5e6b4b0d input=7df1bcf6f75cb8ef]*/
/* LCOV_EXCL_START */
@@ -98,6 +149,15 @@ _setException(PyObject *exc)
}
/* LCOV_EXCL_STOP */
+/* {Py_tp_new, NULL} doesn't block __new__ */
+static PyObject *
+_disabled_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
+{
+ PyErr_Format(PyExc_TypeError,
+ "cannot create '%.100s' instances", _PyType_Name(type));
+ return NULL;
+}
+
static PyObject*
py_digest_name(const EVP_MD *md)
{
@@ -233,9 +293,9 @@ py_digest_by_name(const char *name)
}
static EVPobject *
-newEVPobject(void)
+newEVPobject(PyTypeObject *type)
{
- EVPobject *retval = (EVPobject *)PyObject_New(EVPobject, &EVPtype);
+ EVPobject *retval = (EVPobject *)PyObject_New(EVPobject, type);
if (retval == NULL) {
return NULL;
}
@@ -277,10 +337,12 @@ EVP_hash(EVPobject *self, const void *vp, Py_ssize_t len)
static void
EVP_dealloc(EVPobject *self)
{
+ PyTypeObject *tp = Py_TYPE(self);
if (self->lock != NULL)
PyThread_free_lock(self->lock);
EVP_MD_CTX_free(self->ctx);
PyObject_Del(self);
+ Py_DECREF(tp);
}
static int
@@ -307,7 +369,7 @@ EVP_copy_impl(EVPobject *self)
{
EVPobject *newobj;
- if ( (newobj = newEVPobject())==NULL)
+ if ((newobj = newEVPobject(Py_TYPE(self))) == NULL)
return NULL;
if (!locked_EVP_MD_CTX_copy(newobj->ctx, self)) {
@@ -482,7 +544,8 @@ EVP_repr(EVPobject *self)
if (!name_obj) {
return NULL;
}
- repr = PyUnicode_FromFormat("<%U HASH object @ %p>", name_obj, self);
+ repr = PyUnicode_FromFormat("<%U %s object @ %p>",
+ name_obj, Py_TYPE(self)->tp_name, self);
Py_DECREF(name_obj);
return repr;
}
@@ -505,61 +568,206 @@ PyDoc_STRVAR(hashtype_doc,
"name -- the hash algorithm being used by this object\n"
"digest_size -- number of bytes in this hashes output");
-static PyTypeObject EVPtype = {
- PyVarObject_HEAD_INIT(NULL, 0)
+static PyType_Slot EVPtype_slots[] = {
+ {Py_tp_dealloc, EVP_dealloc},
+ {Py_tp_repr, EVP_repr},
+ {Py_tp_doc, (char *)hashtype_doc},
+ {Py_tp_methods, EVP_methods},
+ {Py_tp_getset, EVP_getseters},
+ {Py_tp_new, _disabled_new},
+ {0, 0},
+};
+
+static PyType_Spec EVPtype_spec = {
"_hashlib.HASH", /*tp_name*/
sizeof(EVPobject), /*tp_basicsize*/
0, /*tp_itemsize*/
- /* methods */
- (destructor)EVP_dealloc, /*tp_dealloc*/
- 0, /*tp_vectorcall_offset*/
- 0, /*tp_getattr*/
- 0, /*tp_setattr*/
- 0, /*tp_as_async*/
- (reprfunc)EVP_repr, /*tp_repr*/
- 0, /*tp_as_number*/
- 0, /*tp_as_sequence*/
- 0, /*tp_as_mapping*/
- 0, /*tp_hash*/
- 0, /*tp_call*/
- 0, /*tp_str*/
- 0, /*tp_getattro*/
- 0, /*tp_setattro*/
- 0, /*tp_as_buffer*/
- Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /*tp_flags*/
- hashtype_doc, /*tp_doc*/
- 0, /*tp_traverse*/
- 0, /*tp_clear*/
- 0, /*tp_richcompare*/
- 0, /*tp_weaklistoffset*/
- 0, /*tp_iter*/
- 0, /*tp_iternext*/
- EVP_methods, /* tp_methods */
- NULL, /* tp_members */
- EVP_getseters, /* tp_getset */
- 0, /* tp_base */
- 0, /* tp_dict */
- 0, /* tp_descr_get */
- 0, /* tp_descr_set */
- 0, /* tp_dictoffset */
+ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE,
+ EVPtype_slots
+};
+
+#ifdef PY_OPENSSL_HAS_SHAKE
+
+/*[clinic input]
+_hashlib.HASHXOF.digest as EVPXOF_digest
+
+ length: Py_ssize_t
+
+Return the digest value as a bytes object.
+[clinic start generated code]*/
+
+static PyObject *
+EVPXOF_digest_impl(EVPobject *self, Py_ssize_t length)
+/*[clinic end generated code: output=ef9320c23280efad input=816a6537cea3d1db]*/
+{
+ EVP_MD_CTX *temp_ctx;
+ PyObject *retval = PyBytes_FromStringAndSize(NULL, length);
+
+ if (retval == NULL) {
+ return NULL;
+ }
+
+ temp_ctx = EVP_MD_CTX_new();
+ if (temp_ctx == NULL) {
+ Py_DECREF(retval);
+ PyErr_NoMemory();
+ return NULL;
+ }
+
+ if (!locked_EVP_MD_CTX_copy(temp_ctx, self)) {
+ Py_DECREF(retval);
+ EVP_MD_CTX_free(temp_ctx);
+ return _setException(PyExc_ValueError);
+ }
+ if (!EVP_DigestFinalXOF(temp_ctx,
+ (unsigned char*)PyBytes_AS_STRING(retval),
+ length)) {
+ Py_DECREF(retval);
+ EVP_MD_CTX_free(temp_ctx);
+ _setException(PyExc_ValueError);
+ return NULL;
+ }
+
+ EVP_MD_CTX_free(temp_ctx);
+ return retval;
+}
+
+/*[clinic input]
+_hashlib.HASHXOF.hexdigest as EVPXOF_hexdigest
+
+ length: Py_ssize_t
+
+Return the digest value as a string of hexadecimal digits.
+[clinic start generated code]*/
+
+static PyObject *
+EVPXOF_hexdigest_impl(EVPobject *self, Py_ssize_t length)
+/*[clinic end generated code: output=eb3e6ee7788bf5b2 input=5f9d6a8f269e34df]*/
+{
+ unsigned char *digest;
+ EVP_MD_CTX *temp_ctx;
+ PyObject *retval;
+
+ digest = (unsigned char*)PyMem_Malloc(length);
+ if (digest == NULL) {
+ PyErr_NoMemory();
+ return NULL;
+ }
+
+ temp_ctx = EVP_MD_CTX_new();
+ if (temp_ctx == NULL) {
+ PyMem_Free(digest);
+ PyErr_NoMemory();
+ return NULL;
+ }
+
+ /* Get the raw (binary) digest value */
+ if (!locked_EVP_MD_CTX_copy(temp_ctx, self)) {
+ PyMem_Free(digest);
+ EVP_MD_CTX_free(temp_ctx);
+ return _setException(PyExc_ValueError);
+ }
+ if (!EVP_DigestFinalXOF(temp_ctx, digest, length)) {
+ PyMem_Free(digest);
+ EVP_MD_CTX_free(temp_ctx);
+ _setException(PyExc_ValueError);
+ return NULL;
+ }
+
+ EVP_MD_CTX_free(temp_ctx);
+
+ retval = _Py_strhex((const char *)digest, length);
+ PyMem_Free(digest);
+ return retval;
+}
+
+static PyMethodDef EVPXOF_methods[] = {
+ EVPXOF_DIGEST_METHODDEF
+ EVPXOF_HEXDIGEST_METHODDEF
+ {NULL, NULL} /* sentinel */
+};
+
+
+static PyObject *
+EVPXOF_get_digest_size(EVPobject *self, void *closure)
+{
+ return PyLong_FromLong(0);
+}
+
+static PyGetSetDef EVPXOF_getseters[] = {
+ {"digest_size",
+ (getter)EVPXOF_get_digest_size, NULL,
+ NULL,
+ NULL},
+ {NULL} /* Sentinel */
+};
+
+PyDoc_STRVAR(hashxoftype_doc,
+"HASHXOF(name, string=b\'\')\n"
+"--\n"
+"\n"
+"A hash is an object used to calculate a checksum of a string of information.\n"
+"\n"
+"Methods:\n"
+"\n"
+"update() -- updates the current digest with an additional string\n"
+"digest(length) -- return the current digest value\n"
+"hexdigest(length) -- return the current digest as a string of hexadecimal digits\n"
+"copy() -- return a copy of the current hash object\n"
+"\n"
+"Attributes:\n"
+"\n"
+"name -- the hash algorithm being used by this object\n"
+"digest_size -- number of bytes in this hashes output");
+
+static PyType_Slot EVPXOFtype_slots[] = {
+ {Py_tp_doc, (char *)hashxoftype_doc},
+ {Py_tp_methods, EVPXOF_methods},
+ {Py_tp_getset, EVPXOF_getseters},
+ {Py_tp_new, _disabled_new},
+ {0, 0},
+};
+
+static PyType_Spec EVPXOFtype_spec = {
+ "_hashlib.HASHXOF", /*tp_name*/
+ sizeof(EVPobject), /*tp_basicsize*/
+ 0, /*tp_itemsize*/
+ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE,
+ EVPXOFtype_slots
};
-\
+
+#endif
+
static PyObject *
-EVPnew(const EVP_MD *digest,
- const unsigned char *cp, Py_ssize_t len)
+EVPnew(PyObject *module, const EVP_MD *digest,
+ const unsigned char *cp, Py_ssize_t len, int usedforsecurity)
{
int result = 0;
EVPobject *self;
+ PyTypeObject *type = get_hashlib_state(module)->EVPtype;
if (!digest) {
PyErr_SetString(PyExc_ValueError, "unsupported hash type");
return NULL;
}
- if ((self = newEVPobject()) == NULL)
+#ifdef PY_OPENSSL_HAS_SHAKE
+ if ((EVP_MD_flags(digest) & EVP_MD_FLAG_XOF) == EVP_MD_FLAG_XOF) {
+ type = get_hashlib_state(module)->EVPXOFtype;
+ }
+#endif
+
+ if ((self = newEVPobject(type)) == NULL)
return NULL;
+ if (!usedforsecurity) {
+#ifdef EVP_MD_CTX_FLAG_NON_FIPS_ALLOW
+ EVP_MD_CTX_set_flags(self->ctx, EVP_MD_CTX_FLAG_NON_FIPS_ALLOW);
+#endif
+ }
+
+
if (!EVP_DigestInit_ex(self->ctx, digest, NULL)) {
_setException(PyExc_ValueError);
Py_DECREF(self);
@@ -591,6 +799,8 @@ _hashlib.new as EVP_new
name as name_obj: object
string as data_obj: object(c_default="NULL") = b''
+ *
+ usedforsecurity: bool = True
Return a new hash object using the named algorithm.
@@ -601,13 +811,14 @@ The MD5 and SHA1 algorithms are always supported.
[clinic start generated code]*/
static PyObject *
-EVP_new_impl(PyObject *module, PyObject *name_obj, PyObject *data_obj)
-/*[clinic end generated code: output=9e7cf664e04b0226 input=7eb79bf30058bd02]*/
+EVP_new_impl(PyObject *module, PyObject *name_obj, PyObject *data_obj,
+ int usedforsecurity)
+/*[clinic end generated code: output=ddd5053f92dffe90 input=c24554d0337be1b0]*/
{
Py_buffer view = { 0 };
PyObject *ret_obj;
char *name;
- const EVP_MD *digest;
+ const EVP_MD *digest = NULL;
if (!PyArg_Parse(name_obj, "s", &name)) {
PyErr_SetString(PyExc_TypeError, "name must be a string");
@@ -619,7 +830,9 @@ EVP_new_impl(PyObject *module, PyObject *name_obj, PyObject *data_obj)
digest = py_digest_by_name(name);
- ret_obj = EVPnew(digest, (unsigned char*)view.buf, view.len);
+ ret_obj = EVPnew(module, digest,
+ (unsigned char*)view.buf, view.len,
+ usedforsecurity);
if (data_obj)
PyBuffer_Release(&view);
@@ -627,7 +840,8 @@ EVP_new_impl(PyObject *module, PyObject *name_obj, PyObject *data_obj)
}
static PyObject*
-EVP_fast_new(PyObject *module, PyObject *data_obj, const EVP_MD *digest)
+EVP_fast_new(PyObject *module, PyObject *data_obj, const EVP_MD *digest,
+ int usedforsecurity)
{
Py_buffer view = { 0 };
PyObject *ret_obj;
@@ -635,7 +849,9 @@ EVP_fast_new(PyObject *module, PyObject *data_obj, const EVP_MD *digest)
if (data_obj)
GET_BUFFER_VIEW_OR_ERROUT(data_obj, &view);
- ret_obj = EVPnew(digest, (unsigned char*)view.buf, view.len);
+ ret_obj = EVPnew(module, digest,
+ (unsigned char*)view.buf, view.len,
+ usedforsecurity);
if (data_obj)
PyBuffer_Release(&view);
@@ -647,16 +863,19 @@ EVP_fast_new(PyObject *module, PyObject *data_obj, const EVP_MD *digest)
_hashlib.openssl_md5
string as data_obj: object(py_default="b''") = NULL
+ *
+ usedforsecurity: bool = True
Returns a md5 hash object; optionally initialized with a string
[clinic start generated code]*/
static PyObject *
-_hashlib_openssl_md5_impl(PyObject *module, PyObject *data_obj)
-/*[clinic end generated code: output=6caae75b73e22c3f input=52010d3869e1b1a7]*/
+_hashlib_openssl_md5_impl(PyObject *module, PyObject *data_obj,
+ int usedforsecurity)
+/*[clinic end generated code: output=87b0186440a44f8c input=990e36d5e689b16e]*/
{
- return EVP_fast_new(module, data_obj, EVP_md5());
+ return EVP_fast_new(module, data_obj, EVP_md5(), usedforsecurity);
}
@@ -664,16 +883,19 @@ _hashlib_openssl_md5_impl(PyObject *module, PyObject *data_obj)
_hashlib.openssl_sha1
string as data_obj: object(py_default="b''") = NULL
+ *
+ usedforsecurity: bool = True
Returns a sha1 hash object; optionally initialized with a string
[clinic start generated code]*/
static PyObject *
-_hashlib_openssl_sha1_impl(PyObject *module, PyObject *data_obj)
-/*[clinic end generated code: output=07606d8f75153e61 input=16807d30e4aa8ae9]*/
+_hashlib_openssl_sha1_impl(PyObject *module, PyObject *data_obj,
+ int usedforsecurity)
+/*[clinic end generated code: output=6813024cf690670d input=948f2f4b6deabc10]*/
{
- return EVP_fast_new(module, data_obj, EVP_sha1());
+ return EVP_fast_new(module, data_obj, EVP_sha1(), usedforsecurity);
}
@@ -681,16 +903,19 @@ _hashlib_openssl_sha1_impl(PyObject *module, PyObject *data_obj)
_hashlib.openssl_sha224
string as data_obj: object(py_default="b''") = NULL
+ *
+ usedforsecurity: bool = True
Returns a sha224 hash object; optionally initialized with a string
[clinic start generated code]*/
static PyObject *
-_hashlib_openssl_sha224_impl(PyObject *module, PyObject *data_obj)
-/*[clinic end generated code: output=55e848761bcef0c9 input=5dbc2f1d84eb459b]*/
+_hashlib_openssl_sha224_impl(PyObject *module, PyObject *data_obj,
+ int usedforsecurity)
+/*[clinic end generated code: output=a2dfe7cc4eb14ebb input=f9272821fadca505]*/
{
- return EVP_fast_new(module, data_obj, EVP_sha224());
+ return EVP_fast_new(module, data_obj, EVP_sha224(), usedforsecurity);
}
@@ -698,16 +923,19 @@ _hashlib_openssl_sha224_impl(PyObject *module, PyObject *data_obj)
_hashlib.openssl_sha256
string as data_obj: object(py_default="b''") = NULL
+ *
+ usedforsecurity: bool = True
Returns a sha256 hash object; optionally initialized with a string
[clinic start generated code]*/
static PyObject *
-_hashlib_openssl_sha256_impl(PyObject *module, PyObject *data_obj)
-/*[clinic end generated code: output=05851d7cce34ac65 input=a68a5d21cda5a80f]*/
+_hashlib_openssl_sha256_impl(PyObject *module, PyObject *data_obj,
+ int usedforsecurity)
+/*[clinic end generated code: output=1f874a34870f0a68 input=549fad9d2930d4c5]*/
{
- return EVP_fast_new(module, data_obj, EVP_sha256());
+ return EVP_fast_new(module, data_obj, EVP_sha256(), usedforsecurity);
}
@@ -715,16 +943,19 @@ _hashlib_openssl_sha256_impl(PyObject *module, PyObject *data_obj)
_hashlib.openssl_sha384
string as data_obj: object(py_default="b''") = NULL
+ *
+ usedforsecurity: bool = True
Returns a sha384 hash object; optionally initialized with a string
[clinic start generated code]*/
static PyObject *
-_hashlib_openssl_sha384_impl(PyObject *module, PyObject *data_obj)
-/*[clinic end generated code: output=5101a4704a932c2f input=6bdfa006622b64ea]*/
+_hashlib_openssl_sha384_impl(PyObject *module, PyObject *data_obj,
+ int usedforsecurity)
+/*[clinic end generated code: output=58529eff9ca457b2 input=48601a6e3bf14ad7]*/
{
- return EVP_fast_new(module, data_obj, EVP_sha384());
+ return EVP_fast_new(module, data_obj, EVP_sha384(), usedforsecurity);
}
@@ -732,18 +963,140 @@ _hashlib_openssl_sha384_impl(PyObject *module, PyObject *data_obj)
_hashlib.openssl_sha512
string as data_obj: object(py_default="b''") = NULL
+ *
+ usedforsecurity: bool = True
Returns a sha512 hash object; optionally initialized with a string
[clinic start generated code]*/
static PyObject *
-_hashlib_openssl_sha512_impl(PyObject *module, PyObject *data_obj)
-/*[clinic end generated code: output=20c8e63ee560a5cb input=ece50182ad4b76a6]*/
+_hashlib_openssl_sha512_impl(PyObject *module, PyObject *data_obj,
+ int usedforsecurity)
+/*[clinic end generated code: output=2c744c9e4a40d5f6 input=c5c46a2a817aa98f]*/
+{
+ return EVP_fast_new(module, data_obj, EVP_sha512(), usedforsecurity);
+}
+
+
+#ifdef PY_OPENSSL_HAS_SHA3
+
+/*[clinic input]
+_hashlib.openssl_sha3_224
+
+ string as data_obj: object(py_default="b''") = NULL
+ *
+ usedforsecurity: bool = True
+
+Returns a sha3-224 hash object; optionally initialized with a string
+
+[clinic start generated code]*/
+
+static PyObject *
+_hashlib_openssl_sha3_224_impl(PyObject *module, PyObject *data_obj,
+ int usedforsecurity)
+/*[clinic end generated code: output=144641c1d144b974 input=e3a01b2888916157]*/
+{
+ return EVP_fast_new(module, data_obj, EVP_sha3_224(), usedforsecurity);
+}
+
+/*[clinic input]
+_hashlib.openssl_sha3_256
+
+ string as data_obj: object(py_default="b''") = NULL
+ *
+ usedforsecurity: bool = True
+
+Returns a sha3-256 hash object; optionally initialized with a string
+
+[clinic start generated code]*/
+
+static PyObject *
+_hashlib_openssl_sha3_256_impl(PyObject *module, PyObject *data_obj,
+ int usedforsecurity)
+/*[clinic end generated code: output=c61f1ab772d06668 input=e2908126c1b6deed]*/
+{
+ return EVP_fast_new(module, data_obj, EVP_sha3_256(), usedforsecurity);
+}
+
+/*[clinic input]
+_hashlib.openssl_sha3_384
+
+ string as data_obj: object(py_default="b''") = NULL
+ *
+ usedforsecurity: bool = True
+
+Returns a sha3-384 hash object; optionally initialized with a string
+
+[clinic start generated code]*/
+
+static PyObject *
+_hashlib_openssl_sha3_384_impl(PyObject *module, PyObject *data_obj,
+ int usedforsecurity)
+/*[clinic end generated code: output=f68e4846858cf0ee input=ec0edf5c792f8252]*/
+{
+ return EVP_fast_new(module, data_obj, EVP_sha3_384(), usedforsecurity);
+}
+
+/*[clinic input]
+_hashlib.openssl_sha3_512
+
+ string as data_obj: object(py_default="b''") = NULL
+ *
+ usedforsecurity: bool = True
+
+Returns a sha3-512 hash object; optionally initialized with a string
+
+[clinic start generated code]*/
+
+static PyObject *
+_hashlib_openssl_sha3_512_impl(PyObject *module, PyObject *data_obj,
+ int usedforsecurity)
+/*[clinic end generated code: output=2eede478c159354a input=64e2cc0c094d56f4]*/
+{
+ return EVP_fast_new(module, data_obj, EVP_sha3_512(), usedforsecurity);
+}
+#endif /* PY_OPENSSL_HAS_SHA3 */
+
+#ifdef PY_OPENSSL_HAS_SHAKE
+/*[clinic input]
+_hashlib.openssl_shake_128
+
+ string as data_obj: object(py_default="b''") = NULL
+ *
+ usedforsecurity: bool = True
+
+Returns a shake-128 variable hash object; optionally initialized with a string
+
+[clinic start generated code]*/
+
+static PyObject *
+_hashlib_openssl_shake_128_impl(PyObject *module, PyObject *data_obj,
+ int usedforsecurity)
+/*[clinic end generated code: output=bc49cdd8ada1fa97 input=6c9d67440eb33ec8]*/
{
- return EVP_fast_new(module, data_obj, EVP_sha512());
+ return EVP_fast_new(module, data_obj, EVP_shake128(), usedforsecurity);
}
+/*[clinic input]
+_hashlib.openssl_shake_256
+
+ string as data_obj: object(py_default="b''") = NULL
+ *
+ usedforsecurity: bool = True
+
+Returns a shake-256 variable hash object; optionally initialized with a string
+
+[clinic start generated code]*/
+
+static PyObject *
+_hashlib_openssl_shake_256_impl(PyObject *module, PyObject *data_obj,
+ int usedforsecurity)
+/*[clinic end generated code: output=358d213be8852df7 input=479cbe9fefd4a9f8]*/
+{
+ return EVP_fast_new(module, data_obj, EVP_shake256(), usedforsecurity);
+}
+#endif /* PY_OPENSSL_HAS_SHAKE */
/*[clinic input]
_hashlib.pbkdf2_hmac as pbkdf2_hmac
@@ -769,7 +1122,7 @@ pbkdf2_hmac_impl(PyObject *module, const char *hash_name,
int retval;
const EVP_MD *digest;
- digest = EVP_get_digestbyname(hash_name);
+ digest = py_digest_by_name(hash_name);
if (digest == NULL) {
PyErr_SetString(PyExc_ValueError, "unsupported hash type");
goto end;
@@ -971,7 +1324,7 @@ _hashlib_scrypt_impl(PyObject *module, Py_buffer *password, Py_buffer *salt,
*/
/*[clinic input]
-_hashlib.hmac_digest
+_hashlib.hmac_digest as _hashlib_hmac_singleshot
key: Py_buffer
msg: Py_buffer
@@ -981,16 +1334,16 @@ Single-shot HMAC.
[clinic start generated code]*/
static PyObject *
-_hashlib_hmac_digest_impl(PyObject *module, Py_buffer *key, Py_buffer *msg,
- const char *digest)
-/*[clinic end generated code: output=75630e684cdd8762 input=562d2f4249511bd3]*/
+_hashlib_hmac_singleshot_impl(PyObject *module, Py_buffer *key,
+ Py_buffer *msg, const char *digest)
+/*[clinic end generated code: output=15658ede5ab98185 input=019dffc571909a46]*/
{
unsigned char md[EVP_MAX_MD_SIZE] = {0};
unsigned int md_len = 0;
unsigned char *result;
const EVP_MD *evp;
- evp = EVP_get_digestbyname(digest);
+ evp = py_digest_by_name(digest);
if (evp == NULL) {
PyErr_SetString(PyExc_ValueError, "unsupported hash type");
return NULL;
@@ -1022,54 +1375,577 @@ _hashlib_hmac_digest_impl(PyObject *module, Py_buffer *key, Py_buffer *msg,
return PyBytes_FromStringAndSize((const char*)md, md_len);
}
-/* State for our callback function so that it can accumulate a result. */
-typedef struct _internal_name_mapper_state {
- PyObject *set;
- int error;
-} _InternalNameMapperState;
+/* OpenSSL-based HMAC implementation
+ */
+static int _hmac_update(HMACobject*, PyObject*);
-/* A callback function to pass to OpenSSL's OBJ_NAME_do_all(...) */
-static void
-_openssl_hash_name_mapper(const EVP_MD *md, const char *from,
- const char *to, void *arg)
-{
- _InternalNameMapperState *state = (_InternalNameMapperState *)arg;
- PyObject *py_name;
+/*[clinic input]
+_hashlib.hmac_new
- assert(state != NULL);
- if (md == NULL)
- return;
+ key: Py_buffer
+ msg as msg_obj: object(c_default="NULL") = b''
+ digestmod: str(c_default="NULL") = None
- py_name = py_digest_name(md);
- if (py_name == NULL) {
- state->error = 1;
- } else {
- if (PySet_Add(state->set, py_name) != 0) {
- state->error = 1;
- }
- Py_DECREF(py_name);
+Return a new hmac object.
+[clinic start generated code]*/
+
+static PyObject *
+_hashlib_hmac_new_impl(PyObject *module, Py_buffer *key, PyObject *msg_obj,
+ const char *digestmod)
+/*[clinic end generated code: output=9a35673be0cbea1b input=a0878868eb190134]*/
+{
+ PyTypeObject *type = get_hashlib_state(module)->HMACtype;
+ const EVP_MD *digest;
+ HMAC_CTX *ctx = NULL;
+ HMACobject *self = NULL;
+ int r;
+
+ if (key->len > INT_MAX) {
+ PyErr_SetString(PyExc_OverflowError,
+ "key is too long.");
+ return NULL;
+ }
+
+ if ((digestmod == NULL) || !strlen(digestmod)) {
+ PyErr_SetString(
+ PyExc_TypeError, "Missing required parameter 'digestmod'.");
+ return NULL;
+ }
+
+ digest = py_digest_by_name(digestmod);
+ if (!digest) {
+ PyErr_SetString(PyExc_ValueError, "unknown hash function");
+ return NULL;
+ }
+
+ ctx = HMAC_CTX_new();
+ if (ctx == NULL) {
+ _setException(PyExc_ValueError);
+ goto error;
+ }
+
+ r = HMAC_Init_ex(
+ ctx,
+ (const char*)key->buf,
+ (int)key->len,
+ digest,
+ NULL /*impl*/);
+ if (r == 0) {
+ _setException(PyExc_ValueError);
+ goto error;
}
+
+ self = (HMACobject *)PyObject_New(HMACobject, type);
+ if (self == NULL) {
+ goto error;
+ }
+
+ self->ctx = ctx;
+ self->lock = NULL;
+
+ if ((msg_obj != NULL) && (msg_obj != Py_None)) {
+ if (!_hmac_update(self, msg_obj))
+ goto error;
+ }
+
+ return (PyObject*)self;
+
+error:
+ if (ctx) HMAC_CTX_free(ctx);
+ if (self) PyObject_Del(self);
+ return NULL;
}
+/* helper functions */
+static int
+locked_HMAC_CTX_copy(HMAC_CTX *new_ctx_p, HMACobject *self)
+{
+ int result;
+ ENTER_HASHLIB(self);
+ result = HMAC_CTX_copy(new_ctx_p, self->ctx);
+ LEAVE_HASHLIB(self);
+ return result;
+}
-/* Ask OpenSSL for a list of supported ciphers, filling in a Python set. */
-static PyObject*
-generate_hash_name_list(void)
+static unsigned int
+_hmac_digest_size(HMACobject *self)
{
- _InternalNameMapperState state;
- state.set = PyFrozenSet_New(NULL);
- if (state.set == NULL)
+ unsigned int digest_size = EVP_MD_size(HMAC_CTX_get_md(self->ctx));
+ assert(digest_size <= EVP_MAX_MD_SIZE);
+ return digest_size;
+}
+
+static int
+_hmac_update(HMACobject *self, PyObject *obj)
+{
+ int r;
+ Py_buffer view = {0};
+
+ GET_BUFFER_VIEW_OR_ERROR(obj, &view, return 0);
+
+ if (self->lock == NULL && view.len >= HASHLIB_GIL_MINSIZE) {
+ self->lock = PyThread_allocate_lock();
+ /* fail? lock = NULL and we fail over to non-threaded code. */
+ }
+
+ if (self->lock != NULL) {
+ ENTER_HASHLIB(self);
+ r = HMAC_Update(self->ctx, (const unsigned char*)view.buf, view.len);
+ LEAVE_HASHLIB(self);
+ } else {
+ r = HMAC_Update(self->ctx, (const unsigned char*)view.buf, view.len);
+ }
+
+ PyBuffer_Release(&view);
+
+ if (r == 0) {
+ _setException(PyExc_ValueError);
+ return 0;
+ }
+ return 1;
+}
+
+/*[clinic input]
+_hashlib.HMAC.copy
+
+Return a copy ("clone") of the HMAC object.
+[clinic start generated code]*/
+
+static PyObject *
+_hashlib_HMAC_copy_impl(HMACobject *self)
+/*[clinic end generated code: output=29aa28b452833127 input=e2fa6a05db61a4d6]*/
+{
+ HMACobject *retval;
+
+ HMAC_CTX *ctx = HMAC_CTX_new();
+ if (ctx == NULL) {
+ return _setException(PyExc_ValueError);
+ }
+ if (!locked_HMAC_CTX_copy(ctx, self)) {
+ HMAC_CTX_free(ctx);
+ return _setException(PyExc_ValueError);
+ }
+
+ retval = (HMACobject *)PyObject_New(HMACobject, Py_TYPE(self));
+ if (retval == NULL) {
+ HMAC_CTX_free(ctx);
return NULL;
- state.error = 0;
+ }
+ retval->ctx = ctx;
+ retval->lock = NULL;
+
+ return (PyObject *)retval;
+}
+
+static void
+_hmac_dealloc(HMACobject *self)
+{
+ PyTypeObject *tp = Py_TYPE(self);
+ if (self->lock != NULL) {
+ PyThread_free_lock(self->lock);
+ }
+ HMAC_CTX_free(self->ctx);
+ PyObject_Del(self);
+ Py_DECREF(tp);
+}
+
+static PyObject *
+_hmac_repr(HMACobject *self)
+{
+ PyObject *digest_name = py_digest_name(HMAC_CTX_get_md(self->ctx));
+ if (digest_name == NULL) {
+ return NULL;
+ }
+ PyObject *repr = PyUnicode_FromFormat(
+ "<%U HMAC object @ %p>", digest_name, self
+ );
+ Py_DECREF(digest_name);
+ return repr;
+}
+
+/*[clinic input]
+_hashlib.HMAC.update
+ msg: object
+
+Update the HMAC object with msg.
+[clinic start generated code]*/
+
+static PyObject *
+_hashlib_HMAC_update_impl(HMACobject *self, PyObject *msg)
+/*[clinic end generated code: output=f31f0ace8c625b00 input=1829173bb3cfd4e6]*/
+{
+ if (!_hmac_update(self, msg)) {
+ return NULL;
+ }
+ Py_RETURN_NONE;
+}
+
+static int
+_hmac_digest(HMACobject *self, unsigned char *buf, unsigned int len)
+{
+ HMAC_CTX *temp_ctx = HMAC_CTX_new();
+ if (temp_ctx == NULL) {
+ PyErr_NoMemory();
+ return 0;
+ }
+ if (!locked_HMAC_CTX_copy(temp_ctx, self)) {
+ _setException(PyExc_ValueError);
+ return 0;
+ }
+ int r = HMAC_Final(temp_ctx, buf, &len);
+ HMAC_CTX_free(temp_ctx);
+ if (r == 0) {
+ _setException(PyExc_ValueError);
+ return 0;
+ }
+ return 1;
+}
+
+/*[clinic input]
+_hashlib.HMAC.digest
+Return the digest of the bytes passed to the update() method so far.
+[clinic start generated code]*/
+
+static PyObject *
+_hashlib_HMAC_digest_impl(HMACobject *self)
+/*[clinic end generated code: output=1b1424355af7a41e input=bff07f74da318fb4]*/
+{
+ unsigned char digest[EVP_MAX_MD_SIZE];
+ unsigned int digest_size = _hmac_digest_size(self);
+ if (digest_size == 0) {
+ return _setException(PyExc_ValueError);
+ }
+ int r = _hmac_digest(self, digest, digest_size);
+ if (r == 0) {
+ return NULL;
+ }
+ return PyBytes_FromStringAndSize((const char *)digest, digest_size);
+}
+
+/*[clinic input]
+_hashlib.HMAC.hexdigest
+
+Return hexadecimal digest of the bytes passed to the update() method so far.
+
+This may be used to exchange the value safely in email or other non-binary
+environments.
+[clinic start generated code]*/
+
+static PyObject *
+_hashlib_HMAC_hexdigest_impl(HMACobject *self)
+/*[clinic end generated code: output=80d825be1eaae6a7 input=5abc42702874ddcf]*/
+{
+ unsigned char digest[EVP_MAX_MD_SIZE];
+ unsigned int digest_size = _hmac_digest_size(self);
+ if (digest_size == 0) {
+ return _setException(PyExc_ValueError);
+ }
+ int r = _hmac_digest(self, digest, digest_size);
+ if (r == 0) {
+ return NULL;
+ }
+ return _Py_strhex((const char *)digest, digest_size);
+}
+
+static PyObject *
+_hashlib_hmac_get_digest_size(HMACobject *self, void *closure)
+{
+ unsigned int digest_size = _hmac_digest_size(self);
+ if (digest_size == 0) {
+ return _setException(PyExc_ValueError);
+ }
+ return PyLong_FromLong(digest_size);
+}
+
+static PyObject *
+_hashlib_hmac_get_block_size(HMACobject *self, void *closure)
+{
+ const EVP_MD *md = HMAC_CTX_get_md(self->ctx);
+ if (md == NULL) {
+ return _setException(PyExc_ValueError);
+ }
+ return PyLong_FromLong(EVP_MD_block_size(md));
+}
+
+static PyObject *
+_hashlib_hmac_get_name(HMACobject *self, void *closure)
+{
+ PyObject *digest_name = py_digest_name(HMAC_CTX_get_md(self->ctx));
+ if (digest_name == NULL) {
+ return NULL;
+ }
+ PyObject *name = PyUnicode_FromFormat("hmac-%U", digest_name);
+ Py_DECREF(digest_name);
+ return name;
+}
+
+static PyMethodDef HMAC_methods[] = {
+ _HASHLIB_HMAC_UPDATE_METHODDEF
+ _HASHLIB_HMAC_DIGEST_METHODDEF
+ _HASHLIB_HMAC_HEXDIGEST_METHODDEF
+ _HASHLIB_HMAC_COPY_METHODDEF
+ {NULL, NULL} /* sentinel */
+};
+
+static PyGetSetDef HMAC_getset[] = {
+ {"digest_size", (getter)_hashlib_hmac_get_digest_size, NULL, NULL, NULL},
+ {"block_size", (getter)_hashlib_hmac_get_block_size, NULL, NULL, NULL},
+ {"name", (getter)_hashlib_hmac_get_name, NULL, NULL, NULL},
+ {NULL} /* Sentinel */
+};
+
+
+PyDoc_STRVAR(hmactype_doc,
+"The object used to calculate HMAC of a message.\n\
+\n\
+Methods:\n\
+\n\
+update() -- updates the current digest with an additional string\n\
+digest() -- return the current digest value\n\
+hexdigest() -- return the current digest as a string of hexadecimal digits\n\
+copy() -- return a copy of the current hash object\n\
+\n\
+Attributes:\n\
+\n\
+name -- the name, including the hash algorithm used by this object\n\
+digest_size -- number of bytes in digest() output\n");
+
+static PyType_Slot HMACtype_slots[] = {
+ {Py_tp_doc, (char *)hmactype_doc},
+ {Py_tp_repr, (reprfunc)_hmac_repr},
+ {Py_tp_dealloc,(destructor)_hmac_dealloc},
+ {Py_tp_methods, HMAC_methods},
+ {Py_tp_getset, HMAC_getset},
+ {Py_tp_new, _disabled_new},
+ {0, NULL}
+};
+
+PyType_Spec HMACtype_spec = {
+ "_hashlib.HMAC", /* name */
+ sizeof(HMACobject), /* basicsize */
+ .flags = Py_TPFLAGS_DEFAULT,
+ .slots = HMACtype_slots,
+};
+
+
+/* State for our callback function so that it can accumulate a result. */
+typedef struct _internal_name_mapper_state {
+ PyObject *set;
+ int error;
+} _InternalNameMapperState;
+
+
+/* A callback function to pass to OpenSSL's OBJ_NAME_do_all(...) */
+static void
+_openssl_hash_name_mapper(const EVP_MD *md, const char *from,
+ const char *to, void *arg)
+{
+ _InternalNameMapperState *state = (_InternalNameMapperState *)arg;
+ PyObject *py_name;
+
+ assert(state != NULL);
+ if (md == NULL)
+ return;
+
+ py_name = py_digest_name(md);
+ if (py_name == NULL) {
+ state->error = 1;
+ } else {
+ if (PySet_Add(state->set, py_name) != 0) {
+ state->error = 1;
+ }
+ Py_DECREF(py_name);
+ }
+}
+
+
+/* Ask OpenSSL for a list of supported ciphers, filling in a Python set. */
+static int
+hashlib_md_meth_names(PyObject *module)
+{
+ _InternalNameMapperState state = {
+ .set = PyFrozenSet_New(NULL),
+ .error = 0
+ };
+ if (state.set == NULL) {
+ return -1;
+ }
EVP_MD_do_all(&_openssl_hash_name_mapper, &state);
if (state.error) {
Py_DECREF(state.set);
- return NULL;
+ return -1;
+ }
+
+ if (PyModule_AddObject(module, "openssl_md_meth_names", state.set) < 0) {
+ Py_DECREF(state.set);
+ return -1;
+ }
+
+ return 0;
+}
+
+/* LibreSSL doesn't support FIPS:
+ https://marc.info/?l=openbsd-misc&m=139819485423701&w=2
+
+ Ted Unangst wrote: "I figured I should mention our current libressl policy
+ wrt FIPS mode. It's gone and it's not coming back." */
+#ifndef LIBRESSL_VERSION_NUMBER
+/*[clinic input]
+_hashlib.get_fips_mode -> int
+
+Determine the OpenSSL FIPS mode of operation.
+
+For OpenSSL 3.0.0 and newer it returns the state of the default provider
+in the default OSSL context. It's not quite the same as FIPS_mode() but good
+enough for unittests.
+
+Effectively any non-zero return value indicates FIPS mode;
+values other than 1 may have additional significance.
+[clinic start generated code]*/
+
+static int
+_hashlib_get_fips_mode_impl(PyObject *module)
+/*[clinic end generated code: output=87eece1bab4d3fa9 input=2db61538c41c6fef]*/
+
+{
+ int result;
+#if OPENSSL_VERSION_NUMBER >= 0x30000000L
+ result = EVP_default_properties_is_fips_enabled(NULL);
+#else
+ ERR_clear_error();
+ result = FIPS_mode();
+ if (result == 0) {
+ // "If the library was built without support of the FIPS Object Module,
+ // then the function will return 0 with an error code of
+ // CRYPTO_R_FIPS_MODE_NOT_SUPPORTED (0x0f06d065)."
+ // But 0 is also a valid result value.
+ unsigned long errcode = ERR_peek_last_error();
+ if (errcode) {
+ _setException(PyExc_ValueError);
+ return -1;
+ }
+ }
+ return result;
+#endif
+}
+#endif // !LIBRESSL_VERSION_NUMBER
+
+
+static int
+_tscmp(const unsigned char *a, const unsigned char *b,
+ Py_ssize_t len_a, Py_ssize_t len_b)
+{
+ /* loop count depends on length of b. Might leak very little timing
+ * information if sizes are different.
+ */
+ Py_ssize_t length = len_b;
+ const void *left = a;
+ const void *right = b;
+ int result = 0;
+
+ if (len_a != length) {
+ left = b;
+ result = 1;
+ }
+
+ result |= CRYPTO_memcmp(left, right, length);
+
+ return (result == 0);
+}
+
+/* NOTE: Keep in sync with _operator.c implementation. */
+
+/*[clinic input]
+_hashlib.compare_digest
+
+ a: object
+ b: object
+ /
+
+Return 'a == b'.
+
+This function uses an approach designed to prevent
+timing analysis, making it appropriate for cryptography.
+
+a and b must both be of the same type: either str (ASCII only),
+or any bytes-like object.
+
+Note: If a and b are of different lengths, or if an error occurs,
+a timing attack could theoretically reveal information about the
+types and lengths of a and b--but not their values.
+[clinic start generated code]*/
+
+static PyObject *
+_hashlib_compare_digest_impl(PyObject *module, PyObject *a, PyObject *b)
+/*[clinic end generated code: output=6f1c13927480aed9 input=9c40c6e566ca12f5]*/
+{
+ int rc;
+
+ /* ASCII unicode string */
+ if(PyUnicode_Check(a) && PyUnicode_Check(b)) {
+ if (PyUnicode_READY(a) == -1 || PyUnicode_READY(b) == -1) {
+ return NULL;
+ }
+ if (!PyUnicode_IS_ASCII(a) || !PyUnicode_IS_ASCII(b)) {
+ PyErr_SetString(PyExc_TypeError,
+ "comparing strings with non-ASCII characters is "
+ "not supported");
+ return NULL;
+ }
+
+ rc = _tscmp(PyUnicode_DATA(a),
+ PyUnicode_DATA(b),
+ PyUnicode_GET_LENGTH(a),
+ PyUnicode_GET_LENGTH(b));
+ }
+ /* fallback to buffer interface for bytes, bytesarray and other */
+ else {
+ Py_buffer view_a;
+ Py_buffer view_b;
+
+ if (PyObject_CheckBuffer(a) == 0 && PyObject_CheckBuffer(b) == 0) {
+ PyErr_Format(PyExc_TypeError,
+ "unsupported operand types(s) or combination of types: "
+ "'%.100s' and '%.100s'",
+ Py_TYPE(a)->tp_name, Py_TYPE(b)->tp_name);
+ return NULL;
+ }
+
+ if (PyObject_GetBuffer(a, &view_a, PyBUF_SIMPLE) == -1) {
+ return NULL;
+ }
+ if (view_a.ndim > 1) {
+ PyErr_SetString(PyExc_BufferError,
+ "Buffer must be single dimension");
+ PyBuffer_Release(&view_a);
+ return NULL;
+ }
+
+ if (PyObject_GetBuffer(b, &view_b, PyBUF_SIMPLE) == -1) {
+ PyBuffer_Release(&view_a);
+ return NULL;
+ }
+ if (view_b.ndim > 1) {
+ PyErr_SetString(PyExc_BufferError,
+ "Buffer must be single dimension");
+ PyBuffer_Release(&view_a);
+ PyBuffer_Release(&view_b);
+ return NULL;
+ }
+
+ rc = _tscmp((const unsigned char*)view_a.buf,
+ (const unsigned char*)view_b.buf,
+ view_a.len,
+ view_b.len);
+
+ PyBuffer_Release(&view_a);
+ PyBuffer_Release(&view_b);
}
- return state.set;
+
+ return PyBool_FromLong(rc);
}
/* List of functions exported by this module */
@@ -1078,68 +1954,188 @@ static struct PyMethodDef EVP_functions[] = {
EVP_NEW_METHODDEF
PBKDF2_HMAC_METHODDEF
_HASHLIB_SCRYPT_METHODDEF
- _HASHLIB_HMAC_DIGEST_METHODDEF
+ _HASHLIB_GET_FIPS_MODE_METHODDEF
+ _HASHLIB_COMPARE_DIGEST_METHODDEF
+ _HASHLIB_HMAC_SINGLESHOT_METHODDEF
+ _HASHLIB_HMAC_NEW_METHODDEF
_HASHLIB_OPENSSL_MD5_METHODDEF
_HASHLIB_OPENSSL_SHA1_METHODDEF
_HASHLIB_OPENSSL_SHA224_METHODDEF
_HASHLIB_OPENSSL_SHA256_METHODDEF
_HASHLIB_OPENSSL_SHA384_METHODDEF
_HASHLIB_OPENSSL_SHA512_METHODDEF
+ _HASHLIB_OPENSSL_SHA3_224_METHODDEF
+ _HASHLIB_OPENSSL_SHA3_256_METHODDEF
+ _HASHLIB_OPENSSL_SHA3_384_METHODDEF
+ _HASHLIB_OPENSSL_SHA3_512_METHODDEF
+ _HASHLIB_OPENSSL_SHAKE_128_METHODDEF
+ _HASHLIB_OPENSSL_SHAKE_256_METHODDEF
{NULL, NULL} /* Sentinel */
};
/* Initialize this module. */
+static int
+hashlib_traverse(PyObject *m, visitproc visit, void *arg)
+{
+ _hashlibstate *state = get_hashlib_state(m);
+ Py_VISIT(state->EVPtype);
+ Py_VISIT(state->HMACtype);
+#ifdef PY_OPENSSL_HAS_SHAKE
+ Py_VISIT(state->EVPXOFtype);
+#endif
+ return 0;
+}
-static struct PyModuleDef _hashlibmodule = {
- PyModuleDef_HEAD_INIT,
- "_hashlib",
- NULL,
- -1,
- EVP_functions,
- NULL,
- NULL,
- NULL,
- NULL
-};
+static int
+hashlib_clear(PyObject *m)
+{
+ _hashlibstate *state = get_hashlib_state(m);
+ Py_CLEAR(state->EVPtype);
+ Py_CLEAR(state->HMACtype);
+#ifdef PY_OPENSSL_HAS_SHAKE
+ Py_CLEAR(state->EVPXOFtype);
+#endif
+ return 0;
+}
-PyMODINIT_FUNC
-PyInit__hashlib(void)
+static void
+hashlib_free(void *m)
{
- PyObject *m, *openssl_md_meth_names;
+ hashlib_clear((PyObject *)m);
+}
+/* Py_mod_exec functions */
+static int
+hashlib_openssl_legacy_init(PyObject *module)
+{
#if (OPENSSL_VERSION_NUMBER < 0x10100000L) || defined(LIBRESSL_VERSION_NUMBER)
/* Load all digest algorithms and initialize cpuid */
OPENSSL_add_all_algorithms_noconf();
ERR_load_crypto_strings();
#endif
+ return 0;
+}
- /* TODO build EVP_functions openssl_* entries dynamically based
- * on what hashes are supported rather than listing many
- * but having some be unsupported. Only init appropriate
- * constants. */
+static int
+hashlib_init_evptype(PyObject *module)
+{
+ _hashlibstate *state = get_hashlib_state(module);
- Py_TYPE(&EVPtype) = &PyType_Type;
- if (PyType_Ready(&EVPtype) < 0)
- return NULL;
+ state->EVPtype = (PyTypeObject *)PyType_FromSpec(&EVPtype_spec);
+ if (state->EVPtype == NULL) {
+ return -1;
+ }
+ if (PyModule_AddType(module, state->EVPtype) < 0) {
+ return -1;
+ }
+ return 0;
+}
+
+static int
+hashlib_init_evpxoftype(PyObject *module)
+{
+#ifdef PY_OPENSSL_HAS_SHAKE
+ _hashlibstate *state = get_hashlib_state(module);
+ PyObject *bases;
+
+ if (state->EVPtype == NULL) {
+ return -1;
+ }
+
+ bases = PyTuple_Pack(1, state->EVPtype);
+ if (bases == NULL) {
+ return -1;
+ }
+
+ state->EVPXOFtype = (PyTypeObject *)PyType_FromSpecWithBases(
+ &EVPXOFtype_spec, bases
+ );
+ Py_DECREF(bases);
+ if (state->EVPXOFtype == NULL) {
+ return -1;
+ }
+ if (PyModule_AddType(module, state->EVPXOFtype) < 0) {
+ return -1;
+ }
+#endif
+ return 0;
+}
+
+static int
+hashlib_init_hmactype(PyObject *module)
+{
+ _hashlibstate *state = get_hashlib_state(module);
+
+ state->HMACtype = (PyTypeObject *)PyType_FromSpec(&HMACtype_spec);
+ if (state->HMACtype == NULL) {
+ return -1;
+ }
+ if (PyModule_AddType(module, state->HMACtype) < 0) {
+ return -1;
+ }
+ return 0;
+}
+
+#if 0
+static PyModuleDef_Slot hashlib_slots[] = {
+ /* OpenSSL 1.0.2 and LibreSSL */
+ {Py_mod_exec, hashlib_openssl_legacy_init},
+ {Py_mod_exec, hashlib_init_evptype},
+ {Py_mod_exec, hashlib_init_evpxoftype},
+ {Py_mod_exec, hashlib_init_hmactype},
+ {Py_mod_exec, hashlib_md_meth_names},
+ {0, NULL}
+};
+#endif
+
+static struct PyModuleDef _hashlibmodule = {
+ PyModuleDef_HEAD_INIT,
+ .m_name = "_hashlib",
+ .m_doc = "OpenSSL interface for hashlib module",
+ .m_size = sizeof(_hashlibstate),
+ .m_methods = EVP_functions,
+ .m_slots = NULL,
+ .m_traverse = hashlib_traverse,
+ .m_clear = hashlib_clear,
+ .m_free = hashlib_free
+};
+
+PyMODINIT_FUNC
+PyInit__hashlib(void)
+{
+ PyObject *m = PyState_FindModule(&_hashlibmodule);
+ if (m != NULL) {
+ Py_INCREF(m);
+ return m;
+ }
m = PyModule_Create(&_hashlibmodule);
- if (m == NULL)
+ if (m == NULL) {
return NULL;
+ }
- openssl_md_meth_names = generate_hash_name_list();
- if (openssl_md_meth_names == NULL) {
+ if (hashlib_openssl_legacy_init(m) < 0) {
Py_DECREF(m);
return NULL;
}
- if (PyModule_AddObject(m, "openssl_md_meth_names", openssl_md_meth_names)) {
+ if (hashlib_init_evptype(m) < 0) {
+ Py_DECREF(m);
+ return NULL;
+ }
+ if (hashlib_init_evpxoftype(m) < 0) {
+ Py_DECREF(m);
+ return NULL;
+ }
+ if (hashlib_init_hmactype(m) < 0) {
+ Py_DECREF(m);
+ return NULL;
+ }
+ if (hashlib_md_meth_names(m) == -1) {
Py_DECREF(m);
return NULL;
}
-
- Py_INCREF((PyObject *)&EVPtype);
- PyModule_AddObject(m, "HASH", (PyObject *)&EVPtype);
return m;
}
diff --git a/Modules/_heapqmodule.c b/Modules/_heapqmodule.c
index 6bc18b5f..4e85e046 100644
--- a/Modules/_heapqmodule.c
+++ b/Modules/_heapqmodule.c
@@ -555,7 +555,6 @@ _heapq__heapify_max(PyObject *module, PyObject *heap)
return heapify_internal(heap, siftup_max);
}
-
static PyMethodDef heapq_methods[] = {
_HEAPQ_HEAPPUSH_METHODDEF
_HEAPQ_HEAPPUSHPOP_METHODDEF
@@ -694,13 +693,29 @@ Believe me, real good tape sorts were quite spectacular to watch!\n\
From all times, sorting has always been a Great Art! :-)\n");
+static int
+heapq_exec(PyObject *m)
+{
+ PyObject *about = PyUnicode_FromString(__about__);
+ if (PyModule_AddObject(m, "__about__", about) < 0) {
+ Py_DECREF(about);
+ return -1;
+ }
+ return 0;
+}
+
+static struct PyModuleDef_Slot heapq_slots[] = {
+ {Py_mod_exec, heapq_exec},
+ {0, NULL}
+};
+
static struct PyModuleDef _heapqmodule = {
PyModuleDef_HEAD_INIT,
"_heapq",
module_doc,
- -1,
+ 0,
heapq_methods,
- NULL,
+ heapq_slots,
NULL,
NULL,
NULL
@@ -709,13 +724,5 @@ static struct PyModuleDef _heapqmodule = {
PyMODINIT_FUNC
PyInit__heapq(void)
{
- PyObject *m, *about;
-
- m = PyModule_Create(&_heapqmodule);
- if (m == NULL)
- return NULL;
- about = PyUnicode_DecodeUTF8(__about__, strlen(__about__), NULL);
- PyModule_AddObject(m, "__about__", about);
- return m;
+ return PyModuleDef_Init(&_heapqmodule);
}
-
diff --git a/Modules/_io/_iomodule.c b/Modules/_io/_iomodule.c
index 49ed2cb0..d7cadace 100644
--- a/Modules/_io/_iomodule.c
+++ b/Modules/_io/_iomodule.c
@@ -9,8 +9,6 @@
#define PY_SSIZE_T_CLEAN
#include "Python.h"
-#include "pycore_pystate.h" /* _PyInterpreterState_GET_UNSAFE() */
-#include "structmember.h"
#include "_iomodule.h"
#ifdef HAVE_SYS_TYPES_H
@@ -377,14 +375,16 @@ _io_open_impl(PyObject *module, PyObject *file, const char *mode,
{
PyObject *RawIO_class = (PyObject *)&PyFileIO_Type;
#ifdef MS_WINDOWS
- PyConfig *config = &_PyInterpreterState_GET_UNSAFE()->config;
+ const PyConfig *config = _Py_GetConfig();
if (!config->legacy_windows_stdio && _PyIO_get_console_type(path_or_fd) != '\0') {
RawIO_class = (PyObject *)&PyWindowsConsoleIO_Type;
encoding = "utf-8";
}
#endif
- raw = PyObject_CallFunction(RawIO_class,
- "OsiO", path_or_fd, rawmode, closefd, opener);
+ raw = PyObject_CallFunction(RawIO_class, "OsOO",
+ path_or_fd, rawmode,
+ closefd ? Py_True : Py_False,
+ opener);
}
if (raw == NULL)
@@ -400,7 +400,7 @@ _io_open_impl(PyObject *module, PyObject *file, const char *mode,
/* buffering */
if (buffering < 0) {
- PyObject *res = _PyObject_CallMethodId(raw, &PyId_isatty, NULL);
+ PyObject *res = _PyObject_CallMethodIdNoArgs(raw, &PyId_isatty);
if (res == NULL)
goto error;
isatty = PyLong_AsLong(res);
@@ -476,10 +476,10 @@ _io_open_impl(PyObject *module, PyObject *file, const char *mode,
/* wraps into a TextIOWrapper */
wrapper = PyObject_CallFunction((PyObject *)&PyTextIOWrapper_Type,
- "Osssi",
+ "OsssO",
buffer,
encoding, errors, newline,
- line_buffering);
+ line_buffering ? Py_True : Py_False);
if (wrapper == NULL)
goto error;
result = wrapper;
@@ -494,7 +494,7 @@ _io_open_impl(PyObject *module, PyObject *file, const char *mode,
if (result != NULL) {
PyObject *exc, *val, *tb, *close_result;
PyErr_Fetch(&exc, &val, &tb);
- close_result = _PyObject_CallMethodId(result, &PyId_close, NULL);
+ close_result = _PyObject_CallMethodIdNoArgs(result, &PyId_close);
_PyErr_ChainExceptions(exc, val, tb);
Py_XDECREF(close_result);
Py_DECREF(result);
@@ -563,7 +563,7 @@ PyNumber_AsOff_t(PyObject *item, PyObject *err)
/* Otherwise replace the error with caller's error object. */
PyErr_Format(err,
"cannot fit '%.200s' into an offset-sized integer",
- item->ob_type->tp_name);
+ Py_TYPE(item)->tp_name);
}
finish:
@@ -571,13 +571,20 @@ PyNumber_AsOff_t(PyObject *item, PyObject *err)
return result;
}
+static inline _PyIO_State*
+get_io_state(PyObject *module)
+{
+ void *state = PyModule_GetState(module);
+ assert(state != NULL);
+ return (_PyIO_State *)state;
+}
_PyIO_State *
_PyIO_get_module_state(void)
{
PyObject *mod = PyState_FindModule(&_PyIO_Module);
_PyIO_State *state;
- if (mod == NULL || (state = IO_MOD_STATE(mod)) == NULL) {
+ if (mod == NULL || (state = get_io_state(mod)) == NULL) {
PyErr_SetString(PyExc_RuntimeError,
"could not find io module state "
"(interpreter shutdown?)");
@@ -613,7 +620,7 @@ _PyIO_get_locale_module(_PyIO_State *state)
static int
iomodule_traverse(PyObject *mod, visitproc visit, void *arg) {
- _PyIO_State *state = IO_MOD_STATE(mod);
+ _PyIO_State *state = get_io_state(mod);
if (!state->initialized)
return 0;
if (state->locale_module != NULL) {
@@ -626,7 +633,7 @@ iomodule_traverse(PyObject *mod, visitproc visit, void *arg) {
static int
iomodule_clear(PyObject *mod) {
- _PyIO_State *state = IO_MOD_STATE(mod);
+ _PyIO_State *state = get_io_state(mod);
if (!state->initialized)
return 0;
if (state->locale_module != NULL)
@@ -672,15 +679,11 @@ PyInit__io(void)
_PyIO_State *state = NULL;
if (m == NULL)
return NULL;
- state = IO_MOD_STATE(m);
+ state = get_io_state(m);
state->initialized = 0;
-#define ADD_TYPE(type, name) \
- if (PyType_Ready(type) < 0) \
- goto fail; \
- Py_INCREF(type); \
- if (PyModule_AddObject(m, name, (PyObject *)type) < 0) { \
- Py_DECREF(type); \
+#define ADD_TYPE(type) \
+ if (PyModule_AddType(m, type) < 0) { \
goto fail; \
}
@@ -708,54 +711,54 @@ PyInit__io(void)
/* Concrete base types of the IO ABCs.
(the ABCs themselves are declared through inheritance in io.py)
*/
- ADD_TYPE(&PyIOBase_Type, "_IOBase");
- ADD_TYPE(&PyRawIOBase_Type, "_RawIOBase");
- ADD_TYPE(&PyBufferedIOBase_Type, "_BufferedIOBase");
- ADD_TYPE(&PyTextIOBase_Type, "_TextIOBase");
+ ADD_TYPE(&PyIOBase_Type);
+ ADD_TYPE(&PyRawIOBase_Type);
+ ADD_TYPE(&PyBufferedIOBase_Type);
+ ADD_TYPE(&PyTextIOBase_Type);
/* Implementation of concrete IO objects. */
/* FileIO */
PyFileIO_Type.tp_base = &PyRawIOBase_Type;
- ADD_TYPE(&PyFileIO_Type, "FileIO");
+ ADD_TYPE(&PyFileIO_Type);
/* BytesIO */
PyBytesIO_Type.tp_base = &PyBufferedIOBase_Type;
- ADD_TYPE(&PyBytesIO_Type, "BytesIO");
+ ADD_TYPE(&PyBytesIO_Type);
if (PyType_Ready(&_PyBytesIOBuffer_Type) < 0)
goto fail;
/* StringIO */
PyStringIO_Type.tp_base = &PyTextIOBase_Type;
- ADD_TYPE(&PyStringIO_Type, "StringIO");
+ ADD_TYPE(&PyStringIO_Type);
#ifdef MS_WINDOWS
/* WindowsConsoleIO */
PyWindowsConsoleIO_Type.tp_base = &PyRawIOBase_Type;
- ADD_TYPE(&PyWindowsConsoleIO_Type, "_WindowsConsoleIO");
+ ADD_TYPE(&PyWindowsConsoleIO_Type);
#endif
/* BufferedReader */
PyBufferedReader_Type.tp_base = &PyBufferedIOBase_Type;
- ADD_TYPE(&PyBufferedReader_Type, "BufferedReader");
+ ADD_TYPE(&PyBufferedReader_Type);
/* BufferedWriter */
PyBufferedWriter_Type.tp_base = &PyBufferedIOBase_Type;
- ADD_TYPE(&PyBufferedWriter_Type, "BufferedWriter");
+ ADD_TYPE(&PyBufferedWriter_Type);
/* BufferedRWPair */
PyBufferedRWPair_Type.tp_base = &PyBufferedIOBase_Type;
- ADD_TYPE(&PyBufferedRWPair_Type, "BufferedRWPair");
+ ADD_TYPE(&PyBufferedRWPair_Type);
/* BufferedRandom */
PyBufferedRandom_Type.tp_base = &PyBufferedIOBase_Type;
- ADD_TYPE(&PyBufferedRandom_Type, "BufferedRandom");
+ ADD_TYPE(&PyBufferedRandom_Type);
/* TextIOWrapper */
PyTextIOWrapper_Type.tp_base = &PyTextIOBase_Type;
- ADD_TYPE(&PyTextIOWrapper_Type, "TextIOWrapper");
+ ADD_TYPE(&PyTextIOWrapper_Type);
/* IncrementalNewlineDecoder */
- ADD_TYPE(&PyIncrementalNewlineDecoder_Type, "IncrementalNewlineDecoder");
+ ADD_TYPE(&PyIncrementalNewlineDecoder_Type);
/* Interned strings */
#define ADD_INTERNED(name) \
diff --git a/Modules/_io/_iomodule.h b/Modules/_io/_iomodule.h
index 4d318acd..a8f3951e 100644
--- a/Modules/_io/_iomodule.h
+++ b/Modules/_io/_iomodule.h
@@ -2,6 +2,8 @@
* Declarations shared between the different parts of the io module
*/
+#include "exports.h"
+
/* ABCs */
extern PyTypeObject PyIOBase_Type;
extern PyTypeObject PyRawIOBase_Type;
@@ -183,4 +185,4 @@ extern PyObject *_PyIO_str_write;
extern PyObject *_PyIO_empty_str;
extern PyObject *_PyIO_empty_bytes;
-extern PyTypeObject _PyBytesIOBuffer_Type;
+extern Py_EXPORTED_SYMBOL PyTypeObject _PyBytesIOBuffer_Type;
diff --git a/Modules/_io/bufferedio.c b/Modules/_io/bufferedio.c
index ad7a8c9d..f8e21f20 100644
--- a/Modules/_io/bufferedio.c
+++ b/Modules/_io/bufferedio.c
@@ -10,9 +10,7 @@
#define PY_SSIZE_T_CLEAN
#include "Python.h"
#include "pycore_object.h"
-#include "pycore_pystate.h"
-#include "structmember.h"
-#include "pythread.h"
+#include "structmember.h" // PyMemberDef
#include "_iomodule.h"
/*[clinic input]
@@ -292,12 +290,11 @@ _enter_buffered_busy(buffered *self)
}
Py_END_ALLOW_THREADS
if (relax_locking && st != PY_LOCK_ACQUIRED) {
- PyObject *msgobj = PyUnicode_FromFormat(
- "could not acquire lock for %A at interpreter "
+ PyObject *ascii = PyObject_ASCII((PyObject*)self);
+ _Py_FatalErrorFormat(__func__,
+ "could not acquire lock for %s at interpreter "
"shutdown, possibly due to daemon threads",
- (PyObject *) self);
- const char *msg = PyUnicode_AsUTF8(msgobj);
- Py_FatalError(msg);
+ ascii ? PyUnicode_AsUTF8(ascii) : "");
}
return 1;
}
@@ -439,8 +436,8 @@ buffered_dealloc_warn(buffered *self, PyObject *source)
{
if (self->ok && self->raw) {
PyObject *r;
- r = _PyObject_CallMethodIdObjArgs(self->raw, &PyId__dealloc_warn,
- source, NULL);
+ r = _PyObject_CallMethodIdOneArg(self->raw, &PyId__dealloc_warn,
+ source);
if (r)
Py_DECREF(r);
else
@@ -461,7 +458,7 @@ static PyObject *
buffered_simple_flush(buffered *self, PyObject *args)
{
CHECK_INITIALIZED(self)
- return PyObject_CallMethodObjArgs(self->raw, _PyIO_str_flush, NULL);
+ return PyObject_CallMethodNoArgs(self->raw, _PyIO_str_flush);
}
static int
@@ -513,7 +510,7 @@ buffered_close(buffered *self, PyObject *args)
}
/* flush() will most probably re-take the lock, so drop it first */
LEAVE_BUFFERED(self)
- res = PyObject_CallMethodObjArgs((PyObject *)self, _PyIO_str_flush, NULL);
+ res = PyObject_CallMethodNoArgs((PyObject *)self, _PyIO_str_flush);
if (!ENTER_BUFFERED(self))
return NULL;
if (res == NULL)
@@ -521,7 +518,7 @@ buffered_close(buffered *self, PyObject *args)
else
Py_DECREF(res);
- res = PyObject_CallMethodObjArgs(self->raw, _PyIO_str_close, NULL);
+ res = PyObject_CallMethodNoArgs(self->raw, _PyIO_str_close);
if (self->buffer) {
PyMem_Free(self->buffer);
@@ -545,7 +542,7 @@ buffered_detach(buffered *self, PyObject *Py_UNUSED(ignored))
{
PyObject *raw, *res;
CHECK_INITIALIZED(self)
- res = PyObject_CallMethodObjArgs((PyObject *)self, _PyIO_str_flush, NULL);
+ res = PyObject_CallMethodNoArgs((PyObject *)self, _PyIO_str_flush);
if (res == NULL)
return NULL;
Py_DECREF(res);
@@ -562,21 +559,21 @@ static PyObject *
buffered_seekable(buffered *self, PyObject *Py_UNUSED(ignored))
{
CHECK_INITIALIZED(self)
- return PyObject_CallMethodObjArgs(self->raw, _PyIO_str_seekable, NULL);
+ return PyObject_CallMethodNoArgs(self->raw, _PyIO_str_seekable);
}
static PyObject *
buffered_readable(buffered *self, PyObject *Py_UNUSED(ignored))
{
CHECK_INITIALIZED(self)
- return PyObject_CallMethodObjArgs(self->raw, _PyIO_str_readable, NULL);
+ return PyObject_CallMethodNoArgs(self->raw, _PyIO_str_readable);
}
static PyObject *
buffered_writable(buffered *self, PyObject *Py_UNUSED(ignored))
{
CHECK_INITIALIZED(self)
- return PyObject_CallMethodObjArgs(self->raw, _PyIO_str_writable, NULL);
+ return PyObject_CallMethodNoArgs(self->raw, _PyIO_str_writable);
}
static PyObject *
@@ -599,14 +596,14 @@ static PyObject *
buffered_fileno(buffered *self, PyObject *Py_UNUSED(ignored))
{
CHECK_INITIALIZED(self)
- return PyObject_CallMethodObjArgs(self->raw, _PyIO_str_fileno, NULL);
+ return PyObject_CallMethodNoArgs(self->raw, _PyIO_str_fileno);
}
static PyObject *
buffered_isatty(buffered *self, PyObject *Py_UNUSED(ignored))
{
CHECK_INITIALIZED(self)
- return PyObject_CallMethodObjArgs(self->raw, _PyIO_str_isatty, NULL);
+ return PyObject_CallMethodNoArgs(self->raw, _PyIO_str_isatty);
}
/* Forward decls */
@@ -670,7 +667,7 @@ _buffered_raw_tell(buffered *self)
{
Py_off_t n;
PyObject *res;
- res = PyObject_CallMethodObjArgs(self->raw, _PyIO_str_tell, NULL);
+ res = PyObject_CallMethodNoArgs(self->raw, _PyIO_str_tell);
if (res == NULL)
return -1;
n = PyNumber_AsOff_t(res, PyExc_ValueError);
@@ -1315,16 +1312,20 @@ _io__Buffered_truncate_impl(buffered *self, PyObject *pos)
PyObject *res = NULL;
CHECK_INITIALIZED(self)
+ CHECK_CLOSED(self, "truncate of closed file")
+ if (!self->writable) {
+ return bufferediobase_unsupported("truncate");
+ }
if (!ENTER_BUFFERED(self))
return NULL;
- if (self->writable) {
- res = buffered_flush_and_rewind_unlocked(self);
- if (res == NULL)
- goto end;
- Py_CLEAR(res);
+ res = buffered_flush_and_rewind_unlocked(self);
+ if (res == NULL) {
+ goto end;
}
- res = PyObject_CallMethodObjArgs(self->raw, _PyIO_str_truncate, pos, NULL);
+ Py_CLEAR(res);
+
+ res = PyObject_CallMethodOneArg(self->raw, _PyIO_str_truncate, pos);
if (res == NULL)
goto end;
/* Reset cached position */
@@ -1351,8 +1352,8 @@ buffered_iternext(buffered *self)
line = _buffered_readline(self, -1);
}
else {
- line = PyObject_CallMethodObjArgs((PyObject *)self,
- _PyIO_str_readline, NULL);
+ line = PyObject_CallMethodNoArgs((PyObject *)self,
+ _PyIO_str_readline);
if (line && !PyBytes_Check(line)) {
PyErr_Format(PyExc_OSError,
"readline() should have returned a bytes object, "
@@ -1445,8 +1446,8 @@ _io_BufferedReader___init___impl(buffered *self, PyObject *raw,
return -1;
_bufferedreader_reset_buf(self);
- self->fast_closed_checks = (Py_TYPE(self) == &PyBufferedReader_Type &&
- Py_TYPE(raw) == &PyFileIO_Type);
+ self->fast_closed_checks = (Py_IS_TYPE(self, &PyBufferedReader_Type) &&
+ Py_IS_TYPE(raw, &PyFileIO_Type));
self->ok = 1;
return 0;
@@ -1470,7 +1471,7 @@ _bufferedreader_raw_read(buffered *self, char *start, Py_ssize_t len)
raised (see issue #10956).
*/
do {
- res = PyObject_CallMethodObjArgs(self->raw, _PyIO_str_readinto, memobj, NULL);
+ res = PyObject_CallMethodOneArg(self->raw, _PyIO_str_readinto, memobj);
} while (res == NULL && _PyIO_trap_eintr());
Py_DECREF(memobj);
if (res == NULL)
@@ -1569,7 +1570,7 @@ _bufferedreader_read_all(buffered *self)
}
/* Read until EOF or until read() would block. */
- data = PyObject_CallMethodObjArgs(self->raw, _PyIO_str_read, NULL);
+ data = PyObject_CallMethodNoArgs(self->raw, _PyIO_str_read);
if (data == NULL)
goto cleanup;
if (data != Py_None && !PyBytes_Check(data)) {
@@ -1791,8 +1792,8 @@ _io_BufferedWriter___init___impl(buffered *self, PyObject *raw,
_bufferedwriter_reset_buf(self);
self->pos = 0;
- self->fast_closed_checks = (Py_TYPE(self) == &PyBufferedWriter_Type &&
- Py_TYPE(raw) == &PyFileIO_Type);
+ self->fast_closed_checks = (Py_IS_TYPE(self, &PyBufferedWriter_Type) &&
+ Py_IS_TYPE(raw, &PyFileIO_Type));
self->ok = 1;
return 0;
@@ -1818,7 +1819,7 @@ _bufferedwriter_raw_write(buffered *self, char *start, Py_ssize_t len)
*/
do {
errno = 0;
- res = PyObject_CallMethodObjArgs(self->raw, _PyIO_str_write, memobj, NULL);
+ res = PyObject_CallMethodOneArg(self->raw, _PyIO_str_write, memobj);
errnum = errno;
} while (res == NULL && _PyIO_trap_eintr());
Py_DECREF(memobj);
@@ -2305,8 +2306,8 @@ _io_BufferedRandom___init___impl(buffered *self, PyObject *raw,
_bufferedwriter_reset_buf(self);
self->pos = 0;
- self->fast_closed_checks = (Py_TYPE(self) == &PyBufferedRandom_Type &&
- Py_TYPE(raw) == &PyFileIO_Type);
+ self->fast_closed_checks = (Py_IS_TYPE(self, &PyBufferedRandom_Type) &&
+ Py_IS_TYPE(raw, &PyFileIO_Type));
self->ok = 1;
return 0;
diff --git a/Modules/_io/bytesio.c b/Modules/_io/bytesio.c
index 3cf6402e..2468f45f 100644
--- a/Modules/_io/bytesio.c
+++ b/Modules/_io/bytesio.c
@@ -1,6 +1,6 @@
#include "Python.h"
#include "pycore_object.h"
-#include "structmember.h" /* for offsetof() */
+#include // offsetof()
#include "_iomodule.h"
/*[clinic input]
@@ -31,17 +31,34 @@ typedef struct {
* exports > 0. Py_REFCNT(buf) == 1, any modifications are forbidden.
*/
+static int
+check_closed(bytesio *self)
+{
+ if (self->buf == NULL) {
+ PyErr_SetString(PyExc_ValueError, "I/O operation on closed file.");
+ return 1;
+ }
+ return 0;
+}
+
+static int
+check_exports(bytesio *self)
+{
+ if (self->exports > 0) {
+ PyErr_SetString(PyExc_BufferError,
+ "Existing exports of data: object cannot be re-sized");
+ return 1;
+ }
+ return 0;
+}
+
#define CHECK_CLOSED(self) \
- if ((self)->buf == NULL) { \
- PyErr_SetString(PyExc_ValueError, \
- "I/O operation on closed file."); \
+ if (check_closed(self)) { \
return NULL; \
}
#define CHECK_EXPORTS(self) \
- if ((self)->exports > 0) { \
- PyErr_SetString(PyExc_BufferError, \
- "Existing exports of data: object cannot be re-sized"); \
+ if (check_exports(self)) { \
return NULL; \
}
@@ -156,23 +173,41 @@ resize_buffer(bytesio *self, size_t size)
}
/* Internal routine for writing a string of bytes to the buffer of a BytesIO
- object. Returns the number of bytes written, or -1 on error. */
-static Py_ssize_t
-write_bytes(bytesio *self, const char *bytes, Py_ssize_t len)
+ object. Returns the number of bytes written, or -1 on error.
+ Inlining is disabled because it's significantly decreases performance
+ of writelines() in PGO build. */
+_Py_NO_INLINE static Py_ssize_t
+write_bytes(bytesio *self, PyObject *b)
{
- size_t endpos;
- assert(self->buf != NULL);
- assert(self->pos >= 0);
- assert(len >= 0);
+ if (check_closed(self)) {
+ return -1;
+ }
+ if (check_exports(self)) {
+ return -1;
+ }
- endpos = (size_t)self->pos + len;
+ Py_buffer buf;
+ if (PyObject_GetBuffer(b, &buf, PyBUF_CONTIG_RO) < 0) {
+ return -1;
+ }
+ Py_ssize_t len = buf.len;
+ if (len == 0) {
+ goto done;
+ }
+
+ assert(self->pos >= 0);
+ size_t endpos = (size_t)self->pos + len;
if (endpos > (size_t)PyBytes_GET_SIZE(self->buf)) {
- if (resize_buffer(self, endpos) < 0)
- return -1;
+ if (resize_buffer(self, endpos) < 0) {
+ len = -1;
+ goto done;
+ }
}
else if (SHARED_BUF(self)) {
- if (unshare_buffer(self, Py_MAX(endpos, (size_t)self->string_size)) < 0)
- return -1;
+ if (unshare_buffer(self, Py_MAX(endpos, (size_t)self->string_size)) < 0) {
+ len = -1;
+ goto done;
+ }
}
if (self->pos > self->string_size) {
@@ -190,7 +225,7 @@ write_bytes(bytesio *self, const char *bytes, Py_ssize_t len)
/* Copy the data to the internal buffer, overwriting some of the existing
data if self->pos < self->string_size. */
- memcpy(PyBytes_AS_STRING(self->buf) + self->pos, bytes, len);
+ memcpy(PyBytes_AS_STRING(self->buf) + self->pos, buf.buf, len);
self->pos = endpos;
/* Set the new length of the internal string if it has changed. */
@@ -198,6 +233,8 @@ write_bytes(bytesio *self, const char *bytes, Py_ssize_t len)
self->string_size = endpos;
}
+ done:
+ PyBuffer_Release(&buf);
return len;
}
@@ -356,7 +393,7 @@ _io_BytesIO_tell_impl(bytesio *self)
static PyObject *
read_bytes(bytesio *self, Py_ssize_t size)
{
- char *output;
+ const char *output;
assert(self->buf != NULL);
assert(size <= self->string_size);
@@ -465,7 +502,7 @@ _io_BytesIO_readlines_impl(bytesio *self, PyObject *arg)
{
Py_ssize_t maxsize, size, n;
PyObject *result, *line;
- char *output;
+ const char *output;
CHECK_CLOSED(self);
@@ -669,19 +706,7 @@ static PyObject *
_io_BytesIO_write(bytesio *self, PyObject *b)
/*[clinic end generated code: output=53316d99800a0b95 input=f5ec7c8c64ed720a]*/
{
- Py_ssize_t n = 0;
- Py_buffer buf;
-
- CHECK_CLOSED(self);
- CHECK_EXPORTS(self);
-
- if (PyObject_GetBuffer(b, &buf, PyBUF_CONTIG_RO) < 0)
- return NULL;
-
- if (buf.len != 0)
- n = write_bytes(self, buf.buf, buf.len);
-
- PyBuffer_Release(&buf);
+ Py_ssize_t n = write_bytes(self, b);
return n >= 0 ? PyLong_FromSsize_t(n) : NULL;
}
@@ -702,7 +727,6 @@ _io_BytesIO_writelines(bytesio *self, PyObject *lines)
/*[clinic end generated code: output=7f33aa3271c91752 input=e972539176fc8fc1]*/
{
PyObject *it, *item;
- PyObject *ret;
CHECK_CLOSED(self);
@@ -711,13 +735,12 @@ _io_BytesIO_writelines(bytesio *self, PyObject *lines)
return NULL;
while ((item = PyIter_Next(it)) != NULL) {
- ret = _io_BytesIO_write(self, item);
+ Py_ssize_t ret = write_bytes(self, item);
Py_DECREF(item);
- if (ret == NULL) {
+ if (ret < 0) {
Py_DECREF(it);
return NULL;
}
- Py_DECREF(ret);
}
Py_DECREF(it);
@@ -1101,7 +1124,7 @@ static PyBufferProcs bytesiobuf_as_buffer = {
(releasebufferproc) bytesiobuf_releasebuffer,
};
-PyTypeObject _PyBytesIOBuffer_Type = {
+Py_EXPORTED_SYMBOL PyTypeObject _PyBytesIOBuffer_Type = {
PyVarObject_HEAD_INIT(NULL, 0)
"_io._BytesIOBuffer", /*tp_name*/
sizeof(bytesiobuf), /*tp_basicsize*/
diff --git a/Modules/_io/clinic/bufferedio.c.h b/Modules/_io/clinic/bufferedio.c.h
index 72841fcb..56d6332a 100644
--- a/Modules/_io/clinic/bufferedio.c.h
+++ b/Modules/_io/clinic/bufferedio.c.h
@@ -578,7 +578,7 @@ _io_BufferedRWPair___init__(PyObject *self, PyObject *args, PyObject *kwargs)
PyObject *writer;
Py_ssize_t buffer_size = DEFAULT_BUFFER_SIZE;
- if ((Py_TYPE(self) == &PyBufferedRWPair_Type) &&
+ if (Py_IS_TYPE(self, &PyBufferedRWPair_Type) &&
!_PyArg_NoKeywords("BufferedRWPair", kwargs)) {
goto exit;
}
@@ -672,4 +672,4 @@ skip_optional_pos:
exit:
return return_value;
}
-/*[clinic end generated code: output=7246104f6c7d3167 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=7d9ad40c95bdd808 input=a9049054013a1b77]*/
diff --git a/Modules/_io/fileio.c b/Modules/_io/fileio.c
index 482d08f9..caf91dfd 100644
--- a/Modules/_io/fileio.c
+++ b/Modules/_io/fileio.c
@@ -3,7 +3,7 @@
#define PY_SSIZE_T_CLEAN
#include "Python.h"
#include "pycore_object.h"
-#include "structmember.h"
+#include "structmember.h" // PyMemberDef
#include
#ifdef HAVE_SYS_TYPES_H
#include
@@ -146,8 +146,8 @@ _io_FileIO_close_impl(fileio *self)
PyObject *exc, *val, *tb;
int rc;
_Py_IDENTIFIER(close);
- res = _PyObject_CallMethodIdObjArgs((PyObject*)&PyRawIOBase_Type,
- &PyId_close, self, NULL);
+ res = _PyObject_CallMethodIdOneArg((PyObject*)&PyRawIOBase_Type,
+ &PyId_close, (PyObject *)self);
if (!self->closefd) {
self->fd = -1;
return res;
diff --git a/Modules/_io/iobase.c b/Modules/_io/iobase.c
index fab45097..a8e55c34 100644
--- a/Modules/_io/iobase.c
+++ b/Modules/_io/iobase.c
@@ -11,7 +11,7 @@
#define PY_SSIZE_T_CLEAN
#include "Python.h"
#include "pycore_object.h"
-#include "structmember.h"
+#include // offsetof()
#include "_iomodule.h"
/*[clinic input]
@@ -235,7 +235,7 @@ _io__IOBase_close_impl(PyObject *self)
Py_RETURN_NONE;
}
- res = PyObject_CallMethodObjArgs(self, _PyIO_str_flush, NULL);
+ res = PyObject_CallMethodNoArgs(self, _PyIO_str_flush);
PyErr_Fetch(&exc, &val, &tb);
rc = _PyObject_SetAttrId(self, &PyId___IOBase_closed, Py_True);
@@ -281,15 +281,13 @@ iobase_finalize(PyObject *self)
finalization process. */
if (_PyObject_SetAttrId(self, &PyId__finalizing, Py_True))
PyErr_Clear();
- res = PyObject_CallMethodObjArgs((PyObject *) self, _PyIO_str_close,
- NULL);
+ res = PyObject_CallMethodNoArgs((PyObject *)self, _PyIO_str_close);
/* Silencing I/O errors is bad, but printing spurious tracebacks is
equally as bad, and potentially more frequent (because of
shutdown issues). */
if (res == NULL) {
#ifndef Py_DEBUG
- const PyConfig *config = &_PyInterpreterState_GET_UNSAFE()->config;
- if (config->dev_mode) {
+ if (_Py_GetConfig()->dev_mode) {
PyErr_WriteUnraisable(self);
}
else {
@@ -383,7 +381,7 @@ _io__IOBase_seekable_impl(PyObject *self)
PyObject *
_PyIOBase_check_seekable(PyObject *self, PyObject *args)
{
- PyObject *res = PyObject_CallMethodObjArgs(self, _PyIO_str_seekable, NULL);
+ PyObject *res = PyObject_CallMethodNoArgs(self, _PyIO_str_seekable);
if (res == NULL)
return NULL;
if (res != Py_True) {
@@ -416,7 +414,7 @@ _io__IOBase_readable_impl(PyObject *self)
PyObject *
_PyIOBase_check_readable(PyObject *self, PyObject *args)
{
- PyObject *res = PyObject_CallMethodObjArgs(self, _PyIO_str_readable, NULL);
+ PyObject *res = PyObject_CallMethodNoArgs(self, _PyIO_str_readable);
if (res == NULL)
return NULL;
if (res != Py_True) {
@@ -449,7 +447,7 @@ _io__IOBase_writable_impl(PyObject *self)
PyObject *
_PyIOBase_check_writable(PyObject *self, PyObject *args)
{
- PyObject *res = PyObject_CallMethodObjArgs(self, _PyIO_str_writable, NULL);
+ PyObject *res = PyObject_CallMethodNoArgs(self, _PyIO_str_writable);
if (res == NULL)
return NULL;
if (res != Py_True) {
@@ -478,7 +476,7 @@ iobase_enter(PyObject *self, PyObject *args)
static PyObject *
iobase_exit(PyObject *self, PyObject *args)
{
- return PyObject_CallMethodObjArgs(self, _PyIO_str_close, NULL);
+ return PyObject_CallMethodNoArgs(self, _PyIO_str_close);
}
/* Lower-level APIs */
@@ -557,7 +555,7 @@ _io__IOBase_readline_impl(PyObject *self, Py_ssize_t limit)
PyObject *b;
if (peek != NULL) {
- PyObject *readahead = PyObject_CallFunctionObjArgs(peek, _PyLong_One, NULL);
+ PyObject *readahead = PyObject_CallOneArg(peek, _PyLong_One);
if (readahead == NULL) {
/* NOTE: PyErr_SetFromErrno() calls PyErr_CheckSignals()
when EINTR occurs so we needn't do it ourselves. */
@@ -656,7 +654,7 @@ iobase_iter(PyObject *self)
static PyObject *
iobase_iternext(PyObject *self)
{
- PyObject *line = PyObject_CallMethodObjArgs(self, _PyIO_str_readline, NULL);
+ PyObject *line = PyObject_CallMethodNoArgs(self, _PyIO_str_readline);
if (line == NULL)
return NULL;
@@ -921,7 +919,7 @@ _io__RawIOBase_read_impl(PyObject *self, Py_ssize_t n)
if (n < 0) {
_Py_IDENTIFIER(readall);
- return _PyObject_CallMethodId(self, &PyId_readall, NULL);
+ return _PyObject_CallMethodIdNoArgs(self, &PyId_readall);
}
/* TODO: allocate a bytes object directly instead and manually construct
diff --git a/Modules/_io/stringio.c b/Modules/_io/stringio.c
index 8b5fa7a3..e76152e6 100644
--- a/Modules/_io/stringio.c
+++ b/Modules/_io/stringio.c
@@ -1,6 +1,6 @@
#define PY_SSIZE_T_CLEAN
#include "Python.h"
-#include "structmember.h"
+#include // offsetof()
#include "pycore_accu.h"
#include "pycore_object.h"
#include "_iomodule.h"
@@ -402,14 +402,14 @@ stringio_iternext(stringio *self)
CHECK_CLOSED(self);
ENSURE_REALIZED(self);
- if (Py_TYPE(self) == &PyStringIO_Type) {
+ if (Py_IS_TYPE(self, &PyStringIO_Type)) {
/* Skip method call overhead for speed */
line = _stringio_readline(self, -1);
}
else {
/* XXX is subclassing StringIO really supported? */
- line = PyObject_CallMethodObjArgs((PyObject *)self,
- _PyIO_str_readline, NULL);
+ line = PyObject_CallMethodNoArgs((PyObject *)self,
+ _PyIO_str_readline);
if (line && !PyUnicode_Check(line)) {
PyErr_Format(PyExc_OSError,
"readline() should have returned a str object, "
@@ -714,9 +714,9 @@ _io_StringIO___init___impl(stringio *self, PyObject *value,
}
if (self->readuniversal) {
- self->decoder = PyObject_CallFunction(
+ self->decoder = PyObject_CallFunctionObjArgs(
(PyObject *)&PyIncrementalNewlineDecoder_Type,
- "Oi", Py_None, (int) self->readtranslate);
+ Py_None, self->readtranslate ? Py_True : Py_False, NULL);
if (self->decoder == NULL)
return -1;
}
diff --git a/Modules/_io/textio.c b/Modules/_io/textio.c
index a4bf7cd1..f2c72ebd 100644
--- a/Modules/_io/textio.c
+++ b/Modules/_io/textio.c
@@ -8,8 +8,10 @@
#define PY_SSIZE_T_CLEAN
#include "Python.h"
+#include "pycore_interp.h" // PyInterpreterState.fs_codec
#include "pycore_object.h"
-#include "structmember.h"
+#include "pycore_pystate.h" // _PyInterpreterState_GET()
+#include "structmember.h" // PyMemberDef
#include "_iomodule.h"
/*[clinic input]
@@ -340,7 +342,7 @@ _PyIncrementalNewlineDecoder_decode(PyObject *myself,
goto error;
kind = PyUnicode_KIND(modified);
out = PyUnicode_DATA(modified);
- PyUnicode_WRITE(kind, PyUnicode_DATA(modified), 0, '\r');
+ PyUnicode_WRITE(kind, out, 0, '\r');
memcpy(out + kind, PyUnicode_DATA(output), kind * output_len);
Py_DECREF(output);
output = modified; /* output remains ready */
@@ -367,7 +369,7 @@ _PyIncrementalNewlineDecoder_decode(PyObject *myself,
/* Record which newlines are read and do newline translation if desired,
all in one pass. */
{
- void *in_str;
+ const void *in_str;
Py_ssize_t len;
int seennl = self->seennl;
int only_lf = 0;
@@ -447,7 +449,7 @@ _PyIncrementalNewlineDecoder_decode(PyObject *myself,
else {
void *translated;
int kind = PyUnicode_KIND(output);
- void *in_str = PyUnicode_DATA(output);
+ const void *in_str = PyUnicode_DATA(output);
Py_ssize_t in, out;
/* XXX: Previous in-place translation here is disabled as
resizing is not possible anymore */
@@ -527,8 +529,8 @@ _io_IncrementalNewlineDecoder_getstate_impl(nldecoder_object *self)
unsigned long long flag;
if (self->decoder != Py_None) {
- PyObject *state = PyObject_CallMethodObjArgs(self->decoder,
- _PyIO_str_getstate, NULL);
+ PyObject *state = PyObject_CallMethodNoArgs(self->decoder,
+ _PyIO_str_getstate);
if (state == NULL)
return NULL;
if (!PyTuple_Check(state)) {
@@ -601,7 +603,7 @@ _io_IncrementalNewlineDecoder_reset_impl(nldecoder_object *self)
self->seennl = 0;
self->pendingcr = 0;
if (self->decoder != Py_None)
- return PyObject_CallMethodObjArgs(self->decoder, _PyIO_str_reset, NULL);
+ return PyObject_CallMethodNoArgs(self->decoder, _PyIO_str_reset);
else
Py_RETURN_NONE;
}
@@ -862,7 +864,7 @@ _textiowrapper_set_decoder(textio *self, PyObject *codec_info,
PyObject *res;
int r;
- res = _PyObject_CallMethodId(self->buffer, &PyId_readable, NULL);
+ res = _PyObject_CallMethodIdNoArgs(self->buffer, &PyId_readable);
if (res == NULL)
return -1;
@@ -880,9 +882,9 @@ _textiowrapper_set_decoder(textio *self, PyObject *codec_info,
return -1;
if (self->readuniversal) {
- PyObject *incrementalDecoder = PyObject_CallFunction(
+ PyObject *incrementalDecoder = PyObject_CallFunctionObjArgs(
(PyObject *)&PyIncrementalNewlineDecoder_Type,
- "Oi", self->decoder, (int)self->readtranslate);
+ self->decoder, self->readtranslate ? Py_True : Py_False, NULL);
if (incrementalDecoder == NULL)
return -1;
Py_CLEAR(self->decoder);
@@ -897,7 +899,7 @@ _textiowrapper_decode(PyObject *decoder, PyObject *bytes, int eof)
{
PyObject *chars;
- if (Py_TYPE(decoder) == &PyIncrementalNewlineDecoder_Type)
+ if (Py_IS_TYPE(decoder, &PyIncrementalNewlineDecoder_Type))
chars = _PyIncrementalNewlineDecoder_decode(decoder, bytes, eof);
else
chars = PyObject_CallMethodObjArgs(decoder, _PyIO_str_decode, bytes,
@@ -917,7 +919,7 @@ _textiowrapper_set_encoder(textio *self, PyObject *codec_info,
PyObject *res;
int r;
- res = _PyObject_CallMethodId(self->buffer, &PyId_writable, NULL);
+ res = _PyObject_CallMethodIdNoArgs(self->buffer, &PyId_writable);
if (res == NULL)
return -1;
@@ -963,8 +965,8 @@ _textiowrapper_fix_encoder_state(textio *self)
self->encoding_start_of_stream = 1;
- PyObject *cookieObj = PyObject_CallMethodObjArgs(
- self->buffer, _PyIO_str_tell, NULL);
+ PyObject *cookieObj = PyObject_CallMethodNoArgs(
+ self->buffer, _PyIO_str_tell);
if (cookieObj == NULL) {
return -1;
}
@@ -977,8 +979,8 @@ _textiowrapper_fix_encoder_state(textio *self)
if (cmp == 0) {
self->encoding_start_of_stream = 0;
- PyObject *res = PyObject_CallMethodObjArgs(
- self->encoder, _PyIO_str_setstate, _PyLong_Zero, NULL);
+ PyObject *res = PyObject_CallMethodOneArg(
+ self->encoder, _PyIO_str_setstate, _PyLong_Zero);
if (res == NULL) {
return -1;
}
@@ -988,6 +990,46 @@ _textiowrapper_fix_encoder_state(textio *self)
return 0;
}
+static int
+io_check_errors(PyObject *errors)
+{
+ assert(errors != NULL && errors != Py_None);
+
+ PyInterpreterState *interp = _PyInterpreterState_GET();
+#ifndef Py_DEBUG
+ /* In release mode, only check in development mode (-X dev) */
+ if (!_PyInterpreterState_GetConfig(interp)->dev_mode) {
+ return 0;
+ }
+#else
+ /* Always check in debug mode */
+#endif
+
+ /* Avoid calling PyCodec_LookupError() before the codec registry is ready:
+ before_PyUnicode_InitEncodings() is called. */
+ if (!interp->unicode.fs_codec.encoding) {
+ return 0;
+ }
+
+ Py_ssize_t name_length;
+ const char *name = PyUnicode_AsUTF8AndSize(errors, &name_length);
+ if (name == NULL) {
+ return -1;
+ }
+ if (strlen(name) != (size_t)name_length) {
+ PyErr_SetString(PyExc_ValueError, "embedded null character in errors");
+ return -1;
+ }
+ PyObject *handler = PyCodec_LookupError(name);
+ if (handler != NULL) {
+ Py_DECREF(handler);
+ return 0;
+ }
+ return -1;
+}
+
+
+
/*[clinic input]
_io.TextIOWrapper.__init__
buffer: object
@@ -1054,7 +1096,10 @@ _io_TextIOWrapper___init___impl(textio *self, PyObject *buffer,
PyErr_Format(
PyExc_TypeError,
"TextIOWrapper() argument 'errors' must be str or None, not %.50s",
- errors->ob_type->tp_name);
+ Py_TYPE(errors)->tp_name);
+ return -1;
+ }
+ else if (io_check_errors(errors)) {
return -1;
}
@@ -1083,7 +1128,7 @@ _io_TextIOWrapper___init___impl(textio *self, PyObject *buffer,
state = IO_STATE();
if (state == NULL)
goto error;
- fileno = _PyObject_CallMethodId(buffer, &PyId_fileno, NULL);
+ fileno = _PyObject_CallMethodIdNoArgs(buffer, &PyId_fileno);
/* Ignore only AttributeError and UnsupportedOperation */
if (fileno == NULL) {
if (PyErr_ExceptionMatches(PyExc_AttributeError) ||
@@ -1112,8 +1157,8 @@ _io_TextIOWrapper___init___impl(textio *self, PyObject *buffer,
PyObject *locale_module = _PyIO_get_locale_module(state);
if (locale_module == NULL)
goto catch_ImportError;
- self->encoding = _PyObject_CallMethodIdObjArgs(
- locale_module, &PyId_getpreferredencoding, Py_False, NULL);
+ self->encoding = _PyObject_CallMethodIdOneArg(
+ locale_module, &PyId_getpreferredencoding, Py_False);
Py_DECREF(locale_module);
if (self->encoding == NULL) {
catch_ImportError:
@@ -1183,22 +1228,22 @@ _io_TextIOWrapper___init___impl(textio *self, PyObject *buffer,
/* Finished sorting out the codec details */
Py_CLEAR(codec_info);
- if (Py_TYPE(buffer) == &PyBufferedReader_Type ||
- Py_TYPE(buffer) == &PyBufferedWriter_Type ||
- Py_TYPE(buffer) == &PyBufferedRandom_Type)
+ if (Py_IS_TYPE(buffer, &PyBufferedReader_Type) ||
+ Py_IS_TYPE(buffer, &PyBufferedWriter_Type) ||
+ Py_IS_TYPE(buffer, &PyBufferedRandom_Type))
{
if (_PyObject_LookupAttrId(buffer, &PyId_raw, &raw) < 0)
goto error;
/* Cache the raw FileIO object to speed up 'closed' checks */
if (raw != NULL) {
- if (Py_TYPE(raw) == &PyFileIO_Type)
+ if (Py_IS_TYPE(raw, &PyFileIO_Type))
self->raw = raw;
else
Py_DECREF(raw);
}
}
- res = _PyObject_CallMethodId(buffer, &PyId_seekable, NULL);
+ res = _PyObject_CallMethodIdNoArgs(buffer, &PyId_seekable);
if (res == NULL)
goto error;
r = PyObject_IsTrue(res);
@@ -1343,7 +1388,7 @@ _io_TextIOWrapper_reconfigure_impl(textio *self, PyObject *encoding,
return NULL;
}
- PyObject *res = PyObject_CallMethodObjArgs((PyObject *)self, _PyIO_str_flush, NULL);
+ PyObject *res = PyObject_CallMethodNoArgs((PyObject *)self, _PyIO_str_flush);
if (res == NULL) {
return NULL;
}
@@ -1423,7 +1468,7 @@ textiowrapper_closed_get(textio *self, void *context);
do { \
int r; \
PyObject *_res; \
- if (Py_TYPE(self) == &PyTextIOWrapper_Type) { \
+ if (Py_IS_TYPE(self, &PyTextIOWrapper_Type)) { \
if (self->raw != NULL) \
r = _PyFileIO_closed(self->raw); \
else { \
@@ -1482,7 +1527,7 @@ _io_TextIOWrapper_detach_impl(textio *self)
{
PyObject *buffer, *res;
CHECK_ATTACHED(self);
- res = PyObject_CallMethodObjArgs((PyObject *)self, _PyIO_str_flush, NULL);
+ res = PyObject_CallMethodNoArgs((PyObject *)self, _PyIO_str_flush);
if (res == NULL)
return NULL;
Py_DECREF(res);
@@ -1554,8 +1599,7 @@ _textiowrapper_writeflush(textio *self)
PyObject *ret;
do {
- ret = PyObject_CallMethodObjArgs(self->buffer,
- _PyIO_str_write, b, NULL);
+ ret = PyObject_CallMethodOneArg(self->buffer, _PyIO_str_write, b);
} while (ret == NULL && _PyIO_trap_eintr());
Py_DECREF(b);
if (ret == NULL)
@@ -1625,8 +1669,7 @@ _io_TextIOWrapper_write_impl(textio *self, PyObject *text)
self->encoding_start_of_stream = 0;
}
else
- b = PyObject_CallMethodObjArgs(self->encoder,
- _PyIO_str_encode, text, NULL);
+ b = PyObject_CallMethodOneArg(self->encoder, _PyIO_str_encode, text);
Py_DECREF(text);
if (b == NULL)
@@ -1677,7 +1720,7 @@ _io_TextIOWrapper_write_impl(textio *self, PyObject *text)
}
if (needflush) {
- ret = PyObject_CallMethodObjArgs(self->buffer, _PyIO_str_flush, NULL);
+ ret = PyObject_CallMethodNoArgs(self->buffer, _PyIO_str_flush);
if (ret == NULL)
return NULL;
Py_DECREF(ret);
@@ -1687,7 +1730,7 @@ _io_TextIOWrapper_write_impl(textio *self, PyObject *text)
Py_CLEAR(self->snapshot);
if (self->decoder) {
- ret = _PyObject_CallMethodId(self->decoder, &PyId_reset, NULL);
+ ret = _PyObject_CallMethodIdNoArgs(self->decoder, &PyId_reset);
if (ret == NULL)
return NULL;
Py_DECREF(ret);
@@ -1767,9 +1810,8 @@ textiowrapper_read_chunk(textio *self, Py_ssize_t size_hint)
/* To prepare for tell(), we need to snapshot a point in the file
* where the decoder's input buffer is empty.
*/
-
- PyObject *state = PyObject_CallMethodObjArgs(self->decoder,
- _PyIO_str_getstate, NULL);
+ PyObject *state = PyObject_CallMethodNoArgs(self->decoder,
+ _PyIO_str_getstate);
if (state == NULL)
return -1;
/* Given this, we know there was a valid snapshot point
@@ -1809,9 +1851,9 @@ textiowrapper_read_chunk(textio *self, Py_ssize_t size_hint)
if (chunk_size == NULL)
goto fail;
- input_chunk = PyObject_CallMethodObjArgs(self->buffer,
+ input_chunk = PyObject_CallMethodOneArg(self->buffer,
(self->has_read1 ? _PyIO_str_read1: _PyIO_str_read),
- chunk_size, NULL);
+ chunk_size);
Py_DECREF(chunk_size);
if (input_chunk == NULL)
goto fail;
@@ -1892,12 +1934,12 @@ _io_TextIOWrapper_read_impl(textio *self, Py_ssize_t n)
if (n < 0) {
/* Read everything */
- PyObject *bytes = _PyObject_CallMethodId(self->buffer, &PyId_read, NULL);
+ PyObject *bytes = _PyObject_CallMethodIdNoArgs(self->buffer, &PyId_read);
PyObject *decoded;
if (bytes == NULL)
goto fail;
- if (Py_TYPE(self->decoder) == &PyIncrementalNewlineDecoder_Type)
+ if (Py_IS_TYPE(self->decoder, &PyIncrementalNewlineDecoder_Type))
decoded = _PyIncrementalNewlineDecoder_decode(self->decoder,
bytes, 1);
else
@@ -2045,7 +2087,7 @@ _PyIO_find_line_ending(
else {
/* Non-universal mode. */
Py_ssize_t readnl_len = PyUnicode_GET_LENGTH(readnl);
- Py_UCS1 *nl = PyUnicode_1BYTE_DATA(readnl);
+ const Py_UCS1 *nl = PyUnicode_1BYTE_DATA(readnl);
/* Assume that readnl is an ASCII character. */
assert(PyUnicode_KIND(readnl) == PyUnicode_1BYTE_KIND);
if (readnl_len == 1) {
@@ -2099,7 +2141,7 @@ _textiowrapper_readline(textio *self, Py_ssize_t limit)
chunked = 0;
while (1) {
- char *ptr;
+ const char *ptr;
Py_ssize_t line_len;
int kind;
Py_ssize_t consumed = 0;
@@ -2353,7 +2395,7 @@ _textiowrapper_decoder_setstate(textio *self, cookie_type *cookie)
utf-16, that we are expecting a BOM).
*/
if (cookie->start_pos == 0 && cookie->dec_flags == 0)
- res = PyObject_CallMethodObjArgs(self->decoder, _PyIO_str_reset, NULL);
+ res = PyObject_CallMethodNoArgs(self->decoder, _PyIO_str_reset);
else
res = _PyObject_CallMethodId(self->decoder, &PyId_setstate,
"((yi))", "", cookie->dec_flags);
@@ -2368,12 +2410,12 @@ _textiowrapper_encoder_reset(textio *self, int start_of_stream)
{
PyObject *res;
if (start_of_stream) {
- res = PyObject_CallMethodObjArgs(self->encoder, _PyIO_str_reset, NULL);
+ res = PyObject_CallMethodNoArgs(self->encoder, _PyIO_str_reset);
self->encoding_start_of_stream = 1;
}
else {
- res = PyObject_CallMethodObjArgs(self->encoder, _PyIO_str_setstate,
- _PyLong_Zero, NULL);
+ res = PyObject_CallMethodOneArg(self->encoder, _PyIO_str_setstate,
+ _PyLong_Zero);
self->encoding_start_of_stream = 0;
}
if (res == NULL)
@@ -2433,7 +2475,7 @@ _io_TextIOWrapper_seek_impl(textio *self, PyObject *cookieObj, int whence)
* sync the underlying buffer with the current position.
*/
Py_DECREF(cookieObj);
- cookieObj = _PyObject_CallMethodId((PyObject *)self, &PyId_tell, NULL);
+ cookieObj = _PyObject_CallMethodIdNoArgs((PyObject *)self, &PyId_tell);
if (cookieObj == NULL)
goto fail;
break;
@@ -2449,7 +2491,7 @@ _io_TextIOWrapper_seek_impl(textio *self, PyObject *cookieObj, int whence)
goto fail;
}
- res = _PyObject_CallMethodId((PyObject *)self, &PyId_flush, NULL);
+ res = _PyObject_CallMethodIdNoArgs((PyObject *)self, &PyId_flush);
if (res == NULL)
goto fail;
Py_DECREF(res);
@@ -2457,7 +2499,7 @@ _io_TextIOWrapper_seek_impl(textio *self, PyObject *cookieObj, int whence)
textiowrapper_set_decoded_chars(self, NULL);
Py_CLEAR(self->snapshot);
if (self->decoder) {
- res = _PyObject_CallMethodId(self->decoder, &PyId_reset, NULL);
+ res = _PyObject_CallMethodIdNoArgs(self->decoder, &PyId_reset);
if (res == NULL)
goto fail;
Py_DECREF(res);
@@ -2497,7 +2539,7 @@ _io_TextIOWrapper_seek_impl(textio *self, PyObject *cookieObj, int whence)
goto fail;
}
- res = PyObject_CallMethodObjArgs((PyObject *)self, _PyIO_str_flush, NULL);
+ res = PyObject_CallMethodNoArgs((PyObject *)self, _PyIO_str_flush);
if (res == NULL)
goto fail;
Py_DECREF(res);
@@ -2512,8 +2554,7 @@ _io_TextIOWrapper_seek_impl(textio *self, PyObject *cookieObj, int whence)
posobj = PyLong_FromOff_t(cookie.start_pos);
if (posobj == NULL)
goto fail;
- res = PyObject_CallMethodObjArgs(self->buffer,
- _PyIO_str_seek, posobj, NULL);
+ res = PyObject_CallMethodOneArg(self->buffer, _PyIO_str_seek, posobj);
Py_DECREF(posobj);
if (res == NULL)
goto fail;
@@ -2552,8 +2593,8 @@ _io_TextIOWrapper_seek_impl(textio *self, PyObject *cookieObj, int whence)
}
Py_XSETREF(self->snapshot, snapshot);
- decoded = _PyObject_CallMethodId(self->decoder, &PyId_decode,
- "Oi", input_chunk, (int)cookie.need_eof);
+ decoded = _PyObject_CallMethodIdObjArgs(self->decoder, &PyId_decode,
+ input_chunk, cookie.need_eof ? Py_True : Py_False, NULL);
if (check_decoded(decoded) < 0)
goto fail;
@@ -2601,7 +2642,7 @@ _io_TextIOWrapper_tell_impl(textio *self)
Py_ssize_t chars_to_skip, chars_decoded;
Py_ssize_t skip_bytes, skip_back;
PyObject *saved_state = NULL;
- char *input, *input_end;
+ const char *input, *input_end;
Py_ssize_t dec_buffer_len;
int dec_flags;
@@ -2620,12 +2661,12 @@ _io_TextIOWrapper_tell_impl(textio *self)
if (_textiowrapper_writeflush(self) < 0)
return NULL;
- res = _PyObject_CallMethodId((PyObject *)self, &PyId_flush, NULL);
+ res = _PyObject_CallMethodIdNoArgs((PyObject *)self, &PyId_flush);
if (res == NULL)
goto fail;
Py_DECREF(res);
- posobj = _PyObject_CallMethodId(self->buffer, &PyId_tell, NULL);
+ posobj = _PyObject_CallMethodIdNoArgs(self->buffer, &PyId_tell);
if (posobj == NULL)
goto fail;
@@ -2661,15 +2702,15 @@ _io_TextIOWrapper_tell_impl(textio *self)
chars_to_skip = self->decoded_chars_used;
/* Decoder state will be restored at the end */
- saved_state = PyObject_CallMethodObjArgs(self->decoder,
- _PyIO_str_getstate, NULL);
+ saved_state = PyObject_CallMethodNoArgs(self->decoder,
+ _PyIO_str_getstate);
if (saved_state == NULL)
goto fail;
#define DECODER_GETSTATE() do { \
PyObject *dec_buffer; \
- PyObject *_state = PyObject_CallMethodObjArgs(self->decoder, \
- _PyIO_str_getstate, NULL); \
+ PyObject *_state = PyObject_CallMethodNoArgs(self->decoder, \
+ _PyIO_str_getstate); \
if (_state == NULL) \
goto fail; \
if (!PyTuple_Check(_state)) { \
@@ -2780,7 +2821,7 @@ _io_TextIOWrapper_tell_impl(textio *self)
if (input == input_end) {
/* We didn't get enough decoded data; signal EOF to get more. */
PyObject *decoded = _PyObject_CallMethodId(
- self->decoder, &PyId_decode, "yi", "", /* final = */ 1);
+ self->decoder, &PyId_decode, "yO", "", /* final = */ Py_True);
if (check_decoded(decoded) < 0)
goto fail;
chars_decoded += PyUnicode_GET_LENGTH(decoded);
@@ -2795,7 +2836,7 @@ _io_TextIOWrapper_tell_impl(textio *self)
}
finally:
- res = _PyObject_CallMethodIdObjArgs(self->decoder, &PyId_setstate, saved_state, NULL);
+ res = _PyObject_CallMethodIdOneArg(self->decoder, &PyId_setstate, saved_state);
Py_DECREF(saved_state);
if (res == NULL)
return NULL;
@@ -2809,7 +2850,7 @@ fail:
if (saved_state) {
PyObject *type, *value, *traceback;
PyErr_Fetch(&type, &value, &traceback);
- res = _PyObject_CallMethodIdObjArgs(self->decoder, &PyId_setstate, saved_state, NULL);
+ res = _PyObject_CallMethodIdOneArg(self->decoder, &PyId_setstate, saved_state);
_PyErr_ChainExceptions(type, value, traceback);
Py_DECREF(saved_state);
Py_XDECREF(res);
@@ -2831,12 +2872,12 @@ _io_TextIOWrapper_truncate_impl(textio *self, PyObject *pos)
CHECK_ATTACHED(self)
- res = PyObject_CallMethodObjArgs((PyObject *) self, _PyIO_str_flush, NULL);
+ res = PyObject_CallMethodNoArgs((PyObject *)self, _PyIO_str_flush);
if (res == NULL)
return NULL;
Py_DECREF(res);
- return PyObject_CallMethodObjArgs(self->buffer, _PyIO_str_truncate, pos, NULL);
+ return PyObject_CallMethodOneArg(self->buffer, _PyIO_str_truncate, pos);
}
static PyObject *
@@ -2916,7 +2957,7 @@ _io_TextIOWrapper_fileno_impl(textio *self)
/*[clinic end generated code: output=21490a4c3da13e6c input=c488ca83d0069f9b]*/
{
CHECK_ATTACHED(self);
- return _PyObject_CallMethodId(self->buffer, &PyId_fileno, NULL);
+ return _PyObject_CallMethodIdNoArgs(self->buffer, &PyId_fileno);
}
/*[clinic input]
@@ -2928,7 +2969,7 @@ _io_TextIOWrapper_seekable_impl(textio *self)
/*[clinic end generated code: output=ab223dbbcffc0f00 input=8b005ca06e1fca13]*/
{
CHECK_ATTACHED(self);
- return _PyObject_CallMethodId(self->buffer, &PyId_seekable, NULL);
+ return _PyObject_CallMethodIdNoArgs(self->buffer, &PyId_seekable);
}
/*[clinic input]
@@ -2940,7 +2981,7 @@ _io_TextIOWrapper_readable_impl(textio *self)
/*[clinic end generated code: output=72ff7ba289a8a91b input=0704ea7e01b0d3eb]*/
{
CHECK_ATTACHED(self);
- return _PyObject_CallMethodId(self->buffer, &PyId_readable, NULL);
+ return _PyObject_CallMethodIdNoArgs(self->buffer, &PyId_readable);
}
/*[clinic input]
@@ -2952,7 +2993,7 @@ _io_TextIOWrapper_writable_impl(textio *self)
/*[clinic end generated code: output=a728c71790d03200 input=c41740bc9d8636e8]*/
{
CHECK_ATTACHED(self);
- return _PyObject_CallMethodId(self->buffer, &PyId_writable, NULL);
+ return _PyObject_CallMethodIdNoArgs(self->buffer, &PyId_writable);
}
/*[clinic input]
@@ -2964,7 +3005,7 @@ _io_TextIOWrapper_isatty_impl(textio *self)
/*[clinic end generated code: output=12be1a35bace882e input=fb68d9f2c99bbfff]*/
{
CHECK_ATTACHED(self);
- return _PyObject_CallMethodId(self->buffer, &PyId_isatty, NULL);
+ return _PyObject_CallMethodIdNoArgs(self->buffer, &PyId_isatty);
}
/*[clinic input]
@@ -2980,7 +3021,7 @@ _io_TextIOWrapper_flush_impl(textio *self)
self->telling = self->seekable;
if (_textiowrapper_writeflush(self) < 0)
return NULL;
- return _PyObject_CallMethodId(self->buffer, &PyId_flush, NULL);
+ return _PyObject_CallMethodIdNoArgs(self->buffer, &PyId_flush);
}
/*[clinic input]
@@ -3009,21 +3050,21 @@ _io_TextIOWrapper_close_impl(textio *self)
else {
PyObject *exc = NULL, *val, *tb;
if (self->finalizing) {
- res = _PyObject_CallMethodIdObjArgs(self->buffer,
- &PyId__dealloc_warn,
- self, NULL);
+ res = _PyObject_CallMethodIdOneArg(self->buffer,
+ &PyId__dealloc_warn,
+ (PyObject *)self);
if (res)
Py_DECREF(res);
else
PyErr_Clear();
}
- res = _PyObject_CallMethodId((PyObject *)self, &PyId_flush, NULL);
+ res = _PyObject_CallMethodIdNoArgs((PyObject *)self, &PyId_flush);
if (res == NULL)
PyErr_Fetch(&exc, &val, &tb);
else
Py_DECREF(res);
- res = _PyObject_CallMethodId(self->buffer, &PyId_close, NULL);
+ res = _PyObject_CallMethodIdNoArgs(self->buffer, &PyId_close);
if (exc != NULL) {
_PyErr_ChainExceptions(exc, val, tb);
Py_CLEAR(res);
@@ -3040,13 +3081,13 @@ textiowrapper_iternext(textio *self)
CHECK_ATTACHED(self);
self->telling = 0;
- if (Py_TYPE(self) == &PyTextIOWrapper_Type) {
+ if (Py_IS_TYPE(self, &PyTextIOWrapper_Type)) {
/* Skip method call overhead for speed */
line = _textiowrapper_readline(self, -1);
}
else {
- line = PyObject_CallMethodObjArgs((PyObject *)self,
- _PyIO_str_readline, NULL);
+ line = PyObject_CallMethodNoArgs((PyObject *)self,
+ _PyIO_str_readline);
if (line && !PyUnicode_Check(line)) {
PyErr_Format(PyExc_OSError,
"readline() should have returned a str object, "
diff --git a/Modules/_io/winconsoleio.c b/Modules/_io/winconsoleio.c
index ea5d24f9..a83ef37a 100644
--- a/Modules/_io/winconsoleio.c
+++ b/Modules/_io/winconsoleio.c
@@ -12,7 +12,7 @@
#ifdef MS_WINDOWS
-#include "structmember.h"
+#include "structmember.h" // PyMemberDef
#ifdef HAVE_SYS_TYPES_H
#include
#endif
@@ -204,8 +204,8 @@ _io__WindowsConsoleIO_close_impl(winconsoleio *self)
PyObject *exc, *val, *tb;
int rc;
_Py_IDENTIFIER(close);
- res = _PyObject_CallMethodIdObjArgs((PyObject*)&PyRawIOBase_Type,
- &PyId_close, self, NULL);
+ res = _PyObject_CallMethodIdOneArg((PyObject*)&PyRawIOBase_Type,
+ &PyId_close, (PyObject*)self);
if (!self->closehandle) {
self->handle = INVALID_HANDLE_VALUE;
return res;
diff --git a/Modules/_json.c b/Modules/_json.c
index 048a9654..faa3944e 100644
--- a/Modules/_json.c
+++ b/Modules/_json.c
@@ -9,22 +9,22 @@
#endif
#include "Python.h"
-#include "structmember.h"
+#include "structmember.h" // PyMemberDef
#include "pycore_accu.h"
-#ifdef __GNUC__
-#define UNUSED __attribute__((__unused__))
-#else
-#define UNUSED
-#endif
+typedef struct {
+ PyObject *PyScannerType;
+ PyObject *PyEncoderType;
+} _jsonmodulestate;
-#define PyScanner_Check(op) PyObject_TypeCheck(op, &PyScannerType)
-#define PyScanner_CheckExact(op) (Py_TYPE(op) == &PyScannerType)
-#define PyEncoder_Check(op) PyObject_TypeCheck(op, &PyEncoderType)
-#define PyEncoder_CheckExact(op) (Py_TYPE(op) == &PyEncoderType)
+static inline _jsonmodulestate*
+get_json_state(PyObject *module)
+{
+ void *state = PyModule_GetState(module);
+ assert(state != NULL);
+ return (_jsonmodulestate *)state;
+}
-static PyTypeObject PyScannerType;
-static PyTypeObject PyEncoderType;
typedef struct _PyScannerObject {
PyObject_HEAD
@@ -73,25 +73,12 @@ static PyMemberDef encoder_members[] = {
{NULL}
};
-static PyObject *
-join_list_unicode(PyObject *lst)
-{
- /* return u''.join(lst) */
- static PyObject *sep = NULL;
- if (sep == NULL) {
- sep = PyUnicode_FromStringAndSize("", 0);
- if (sep == NULL)
- return NULL;
- }
- return PyUnicode_Join(sep, lst);
-}
-
/* Forward decls */
static PyObject *
ascii_escape_unicode(PyObject *pystr);
static PyObject *
-py_encode_basestring_ascii(PyObject* self UNUSED, PyObject *pystr);
+py_encode_basestring_ascii(PyObject* Py_UNUSED(self), PyObject *pystr);
void init_json(void);
static PyObject *
scan_once_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_t *next_idx_ptr);
@@ -102,13 +89,13 @@ scanner_new(PyTypeObject *type, PyObject *args, PyObject *kwds);
static void
scanner_dealloc(PyObject *self);
static int
-scanner_clear(PyObject *self);
+scanner_clear(PyScannerObject *self);
static PyObject *
encoder_new(PyTypeObject *type, PyObject *args, PyObject *kwds);
static void
encoder_dealloc(PyObject *self);
static int
-encoder_clear(PyObject *self);
+encoder_clear(PyEncoderObject *self);
static int
encoder_listencode_list(PyEncoderObject *s, _PyAccu *acc, PyObject *seq, Py_ssize_t indent_level);
static int
@@ -172,8 +159,8 @@ ascii_escape_unicode(PyObject *pystr)
Py_ssize_t output_size;
Py_ssize_t chars;
PyObject *rval;
- void *input;
- unsigned char *output;
+ const void *input;
+ Py_UCS1 *output;
int kind;
if (PyUnicode_READY(pystr) == -1)
@@ -238,7 +225,7 @@ escape_unicode(PyObject *pystr)
Py_ssize_t output_size;
Py_ssize_t chars;
PyObject *rval;
- void *input;
+ const void *input;
int kind;
Py_UCS4 maxchar;
@@ -385,21 +372,6 @@ _build_rval_index_tuple(PyObject *rval, Py_ssize_t idx) {
return tpl;
}
-#define APPEND_OLD_CHUNK \
- if (chunk != NULL) { \
- if (chunks == NULL) { \
- chunks = PyList_New(0); \
- if (chunks == NULL) { \
- goto bail; \
- } \
- } \
- if (PyList_Append(chunks, chunk)) { \
- Py_CLEAR(chunk); \
- goto bail; \
- } \
- Py_CLEAR(chunk); \
- }
-
static PyObject *
scanstring_unicode(PyObject *pystr, Py_ssize_t end, int strict, Py_ssize_t *next_end_ptr)
{
@@ -417,12 +389,14 @@ scanstring_unicode(PyObject *pystr, Py_ssize_t end, int strict, Py_ssize_t *next
Py_ssize_t next /* = begin */;
const void *buf;
int kind;
- PyObject *chunks = NULL;
- PyObject *chunk = NULL;
if (PyUnicode_READY(pystr) == -1)
return 0;
+ _PyUnicodeWriter writer;
+ _PyUnicodeWriter_Init(&writer);
+ writer.overallocate = 1;
+
len = PyUnicode_GET_LENGTH(pystr);
buf = PyUnicode_DATA(pystr);
kind = PyUnicode_KIND(pystr);
@@ -433,29 +407,42 @@ scanstring_unicode(PyObject *pystr, Py_ssize_t end, int strict, Py_ssize_t *next
}
while (1) {
/* Find the end of the string or the next escape */
- Py_UCS4 c = 0;
- for (next = end; next < len; next++) {
- c = PyUnicode_READ(kind, buf, next);
- if (c == '"' || c == '\\') {
- break;
+ Py_UCS4 c;
+ {
+ // Use tight scope variable to help register allocation.
+ Py_UCS4 d = 0;
+ for (next = end; next < len; next++) {
+ d = PyUnicode_READ(kind, buf, next);
+ if (d == '"' || d == '\\') {
+ break;
+ }
+ if (d <= 0x1f && strict) {
+ raise_errmsg("Invalid control character at", pystr, next);
+ goto bail;
+ }
}
- else if (c <= 0x1f && strict) {
- raise_errmsg("Invalid control character at", pystr, next);
- goto bail;
+ c = d;
+ }
+
+ if (c == '"') {
+ // Fast path for simple case.
+ if (writer.buffer == NULL) {
+ PyObject *ret = PyUnicode_Substring(pystr, end, next);
+ if (ret == NULL) {
+ goto bail;
+ }
+ *next_end_ptr = next + 1;;
+ return ret;
}
}
- if (!(c == '"' || c == '\\')) {
+ else if (c != '\\') {
raise_errmsg("Unterminated string starting at", pystr, begin);
goto bail;
}
+
/* Pick up this chunk if it's not zero length */
if (next != end) {
- APPEND_OLD_CHUNK
- chunk = PyUnicode_FromKindAndData(
- kind,
- (char*)buf + kind * end,
- next - end);
- if (chunk == NULL) {
+ if (_PyUnicodeWriter_WriteSubstring(&writer, pystr, end, next) < 0) {
goto bail;
}
}
@@ -546,34 +533,18 @@ scanstring_unicode(PyObject *pystr, Py_ssize_t end, int strict, Py_ssize_t *next
end -= 6;
}
}
- APPEND_OLD_CHUNK
- chunk = PyUnicode_FromKindAndData(PyUnicode_4BYTE_KIND, &c, 1);
- if (chunk == NULL) {
+ if (_PyUnicodeWriter_WriteChar(&writer, c) < 0) {
goto bail;
}
}
- if (chunks == NULL) {
- if (chunk != NULL)
- rval = chunk;
- else
- rval = PyUnicode_FromStringAndSize("", 0);
- }
- else {
- APPEND_OLD_CHUNK
- rval = join_list_unicode(chunks);
- if (rval == NULL) {
- goto bail;
- }
- Py_CLEAR(chunks);
- }
-
+ rval = _PyUnicodeWriter_Finish(&writer);
*next_end_ptr = end;
return rval;
+
bail:
*next_end_ptr = -1;
- Py_XDECREF(chunks);
- Py_XDECREF(chunk);
+ _PyUnicodeWriter_Dealloc(&writer);
return NULL;
}
@@ -591,7 +562,7 @@ PyDoc_STRVAR(pydoc_scanstring,
);
static PyObject *
-py_scanstring(PyObject* self UNUSED, PyObject *args)
+py_scanstring(PyObject* Py_UNUSED(self), PyObject *args)
{
PyObject *pystr;
PyObject *rval;
@@ -620,7 +591,7 @@ PyDoc_STRVAR(pydoc_encode_basestring_ascii,
);
static PyObject *
-py_encode_basestring_ascii(PyObject* self UNUSED, PyObject *pystr)
+py_encode_basestring_ascii(PyObject* Py_UNUSED(self), PyObject *pystr)
{
PyObject *rval;
/* Return an ASCII-only JSON representation of a Python string */
@@ -645,7 +616,7 @@ PyDoc_STRVAR(pydoc_encode_basestring,
);
static PyObject *
-py_encode_basestring(PyObject* self UNUSED, PyObject *pystr)
+py_encode_basestring(PyObject* Py_UNUSED(self), PyObject *pystr)
{
PyObject *rval;
/* Return a JSON representation of a Python string */
@@ -665,38 +636,36 @@ py_encode_basestring(PyObject* self UNUSED, PyObject *pystr)
static void
scanner_dealloc(PyObject *self)
{
+ PyTypeObject *tp = Py_TYPE(self);
/* bpo-31095: UnTrack is needed before calling any callbacks */
PyObject_GC_UnTrack(self);
- scanner_clear(self);
- Py_TYPE(self)->tp_free(self);
+ scanner_clear((PyScannerObject *)self);
+ tp->tp_free(self);
+ Py_DECREF(tp);
}
static int
-scanner_traverse(PyObject *self, visitproc visit, void *arg)
+scanner_traverse(PyScannerObject *self, visitproc visit, void *arg)
{
- PyScannerObject *s;
- assert(PyScanner_Check(self));
- s = (PyScannerObject *)self;
- Py_VISIT(s->object_hook);
- Py_VISIT(s->object_pairs_hook);
- Py_VISIT(s->parse_float);
- Py_VISIT(s->parse_int);
- Py_VISIT(s->parse_constant);
+ Py_VISIT(Py_TYPE(self));
+ Py_VISIT(self->object_hook);
+ Py_VISIT(self->object_pairs_hook);
+ Py_VISIT(self->parse_float);
+ Py_VISIT(self->parse_int);
+ Py_VISIT(self->parse_constant);
+ Py_VISIT(self->memo);
return 0;
}
static int
-scanner_clear(PyObject *self)
+scanner_clear(PyScannerObject *self)
{
- PyScannerObject *s;
- assert(PyScanner_Check(self));
- s = (PyScannerObject *)self;
- Py_CLEAR(s->object_hook);
- Py_CLEAR(s->object_pairs_hook);
- Py_CLEAR(s->parse_float);
- Py_CLEAR(s->parse_int);
- Py_CLEAR(s->parse_constant);
- Py_CLEAR(s->memo);
+ Py_CLEAR(self->object_hook);
+ Py_CLEAR(self->object_pairs_hook);
+ Py_CLEAR(self->parse_float);
+ Py_CLEAR(self->parse_int);
+ Py_CLEAR(self->parse_constant);
+ Py_CLEAR(self->memo);
return 0;
}
@@ -710,7 +679,7 @@ _parse_object_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ss
Returns a new PyObject (usually a dict, but object_hook can change that)
*/
- void *str;
+ const void *str;
int kind;
Py_ssize_t end_idx;
PyObject *val = NULL;
@@ -749,19 +718,13 @@ _parse_object_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ss
key = scanstring_unicode(pystr, idx + 1, s->strict, &next_idx);
if (key == NULL)
goto bail;
- memokey = PyDict_GetItemWithError(s->memo, key);
- if (memokey != NULL) {
- Py_INCREF(memokey);
- Py_DECREF(key);
- key = memokey;
- }
- else if (PyErr_Occurred()) {
+ memokey = PyDict_SetDefault(s->memo, key, key);
+ if (memokey == NULL) {
goto bail;
}
- else {
- if (PyDict_SetItem(s->memo, key, key) < 0)
- goto bail;
- }
+ Py_INCREF(memokey);
+ Py_DECREF(key);
+ key = memokey;
idx = next_idx;
/* skip whitespace between key and : delimiter, read :, skip whitespace */
@@ -818,14 +781,14 @@ _parse_object_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ss
*next_idx_ptr = idx + 1;
if (has_pairs_hook) {
- val = PyObject_CallFunctionObjArgs(s->object_pairs_hook, rval, NULL);
+ val = PyObject_CallOneArg(s->object_pairs_hook, rval);
Py_DECREF(rval);
return val;
}
/* if object_hook is not None: rval = object_hook(rval) */
if (s->object_hook != Py_None) {
- val = PyObject_CallFunctionObjArgs(s->object_hook, rval, NULL);
+ val = PyObject_CallOneArg(s->object_hook, rval);
Py_DECREF(rval);
return val;
}
@@ -846,7 +809,7 @@ _parse_array_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssi
Returns a new PyList
*/
- void *str;
+ const void *str;
int kind;
Py_ssize_t end_idx;
PyObject *val = NULL;
@@ -931,7 +894,7 @@ _parse_constant(PyScannerObject *s, const char *constant, Py_ssize_t idx, Py_ssi
return NULL;
/* rval = parse_constant(constant) */
- rval = PyObject_CallFunctionObjArgs(s->parse_constant, cstr, NULL);
+ rval = PyObject_CallOneArg(s->parse_constant, cstr);
idx += PyUnicode_GET_LENGTH(cstr);
Py_DECREF(cstr);
*next_idx_ptr = idx;
@@ -949,7 +912,7 @@ _match_number_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t start, Py_
PyLong, or PyFloat.
May return other types if parse_int or parse_float are set
*/
- void *str;
+ const void *str;
int kind;
Py_ssize_t end_idx;
Py_ssize_t idx = start;
@@ -1030,7 +993,7 @@ _match_number_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t start, Py_
idx - start);
if (numstr == NULL)
return NULL;
- rval = PyObject_CallFunctionObjArgs(custom_func, numstr, NULL);
+ rval = PyObject_CallOneArg(custom_func, numstr);
}
else {
Py_ssize_t i, n;
@@ -1066,7 +1029,7 @@ scan_once_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_
Returns a new PyObject representation of the term.
*/
PyObject *res;
- void *str;
+ const void *str;
int kind;
Py_ssize_t length;
@@ -1168,7 +1131,7 @@ scan_once_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_
}
static PyObject *
-scanner_call(PyObject *self, PyObject *args, PyObject *kwds)
+scanner_call(PyScannerObject *self, PyObject *args, PyObject *kwds)
{
/* Python callable interface to scan_once_{str,unicode} */
PyObject *pystr;
@@ -1176,14 +1139,11 @@ scanner_call(PyObject *self, PyObject *args, PyObject *kwds)
Py_ssize_t idx;
Py_ssize_t next_idx = -1;
static char *kwlist[] = {"string", "idx", NULL};
- PyScannerObject *s;
- assert(PyScanner_Check(self));
- s = (PyScannerObject *)self;
if (!PyArg_ParseTupleAndKeywords(args, kwds, "On:scan_once", kwlist, &pystr, &idx))
return NULL;
if (PyUnicode_Check(pystr)) {
- rval = scan_once_unicode(s, pystr, idx, &next_idx);
+ rval = scan_once_unicode(self, pystr, idx, &next_idx);
}
else {
PyErr_Format(PyExc_TypeError,
@@ -1191,7 +1151,7 @@ scanner_call(PyObject *self, PyObject *args, PyObject *kwds)
Py_TYPE(pystr)->tp_name);
return NULL;
}
- PyDict_Clear(s->memo);
+ PyDict_Clear(self->memo);
if (rval == NULL)
return NULL;
return _build_rval_index_tuple(rval, next_idx);
@@ -1250,47 +1210,23 @@ bail:
PyDoc_STRVAR(scanner_doc, "JSON scanner object");
-static
-PyTypeObject PyScannerType = {
- PyVarObject_HEAD_INIT(NULL, 0)
- "_json.Scanner", /* tp_name */
- sizeof(PyScannerObject), /* tp_basicsize */
- 0, /* tp_itemsize */
- scanner_dealloc, /* tp_dealloc */
- 0, /* tp_vectorcall_offset */
- 0, /* tp_getattr */
- 0, /* tp_setattr */
- 0, /* tp_as_async */
- 0, /* tp_repr */
- 0, /* tp_as_number */
- 0, /* tp_as_sequence */
- 0, /* tp_as_mapping */
- 0, /* tp_hash */
- scanner_call, /* tp_call */
- 0, /* tp_str */
- 0,/* PyObject_GenericGetAttr, */ /* tp_getattro */
- 0,/* PyObject_GenericSetAttr, */ /* tp_setattro */
- 0, /* tp_as_buffer */
- Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /* tp_flags */
- scanner_doc, /* tp_doc */
- scanner_traverse, /* tp_traverse */
- scanner_clear, /* tp_clear */
- 0, /* tp_richcompare */
- 0, /* tp_weaklistoffset */
- 0, /* tp_iter */
- 0, /* tp_iternext */
- 0, /* tp_methods */
- scanner_members, /* tp_members */
- 0, /* tp_getset */
- 0, /* tp_base */
- 0, /* tp_dict */
- 0, /* tp_descr_get */
- 0, /* tp_descr_set */
- 0, /* tp_dictoffset */
- 0, /* tp_init */
- 0,/* PyType_GenericAlloc, */ /* tp_alloc */
- scanner_new, /* tp_new */
- 0,/* PyObject_GC_Del, */ /* tp_free */
+static PyType_Slot PyScannerType_slots[] = {
+ {Py_tp_doc, (void *)scanner_doc},
+ {Py_tp_dealloc, scanner_dealloc},
+ {Py_tp_call, scanner_call},
+ {Py_tp_traverse, scanner_traverse},
+ {Py_tp_clear, scanner_clear},
+ {Py_tp_members, scanner_members},
+ {Py_tp_new, scanner_new},
+ {0, 0}
+};
+
+static PyType_Spec PyScannerType_spec = {
+ .name = "_json.Scanner",
+ .basicsize = sizeof(PyScannerObject),
+ .itemsize = 0,
+ .flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,
+ .slots = PyScannerType_slots,
};
static PyObject *
@@ -1348,23 +1284,19 @@ encoder_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
}
static PyObject *
-encoder_call(PyObject *self, PyObject *args, PyObject *kwds)
+encoder_call(PyEncoderObject *self, PyObject *args, PyObject *kwds)
{
/* Python callable interface to encode_listencode_obj */
static char *kwlist[] = {"obj", "_current_indent_level", NULL};
PyObject *obj;
Py_ssize_t indent_level;
- PyEncoderObject *s;
_PyAccu acc;
-
- assert(PyEncoder_Check(self));
- s = (PyEncoderObject *)self;
if (!PyArg_ParseTupleAndKeywords(args, kwds, "On:_iterencode", kwlist,
&obj, &indent_level))
return NULL;
if (_PyAccu_Init(&acc))
return NULL;
- if (encoder_listencode_obj(s, &acc, obj, indent_level)) {
+ if (encoder_listencode_obj(self, &acc, obj, indent_level)) {
_PyAccu_Destroy(&acc);
return NULL;
}
@@ -1440,7 +1372,7 @@ encoder_encode_string(PyEncoderObject *s, PyObject *obj)
if (s->fast_encode) {
return s->fast_encode(NULL, obj);
}
- encoded = PyObject_CallFunctionObjArgs(s->encoder, obj, NULL);
+ encoded = PyObject_CallOneArg(s->encoder, obj);
if (encoded != NULL && !PyUnicode_Check(encoded)) {
PyErr_Format(PyExc_TypeError,
"encoder() must return a string, not %.80s",
@@ -1526,7 +1458,7 @@ encoder_listencode_obj(PyEncoderObject *s, _PyAccu *acc,
return -1;
}
}
- newobj = PyObject_CallFunctionObjArgs(s->defaultfn, obj, NULL);
+ newobj = PyObject_CallOneArg(s->defaultfn, obj);
if (newobj == NULL) {
Py_XDECREF(ident);
return -1;
@@ -1658,7 +1590,7 @@ encoder_listencode_dict(PyEncoderObject *s, _PyAccu *acc,
else {
PyErr_Format(PyExc_TypeError,
"keys must be str, int, float, bool or None, "
- "not %.100s", key->ob_type->tp_name);
+ "not %.100s", Py_TYPE(key)->tp_name);
goto bail;
}
@@ -1803,86 +1735,59 @@ bail:
static void
encoder_dealloc(PyObject *self)
{
+ PyTypeObject *tp = Py_TYPE(self);
/* bpo-31095: UnTrack is needed before calling any callbacks */
PyObject_GC_UnTrack(self);
- encoder_clear(self);
- Py_TYPE(self)->tp_free(self);
+ encoder_clear((PyEncoderObject *)self);
+ tp->tp_free(self);
+ Py_DECREF(tp);
}
static int
-encoder_traverse(PyObject *self, visitproc visit, void *arg)
+encoder_traverse(PyEncoderObject *self, visitproc visit, void *arg)
{
- PyEncoderObject *s;
- assert(PyEncoder_Check(self));
- s = (PyEncoderObject *)self;
- Py_VISIT(s->markers);
- Py_VISIT(s->defaultfn);
- Py_VISIT(s->encoder);
- Py_VISIT(s->indent);
- Py_VISIT(s->key_separator);
- Py_VISIT(s->item_separator);
+ Py_VISIT(Py_TYPE(self));
+ Py_VISIT(self->markers);
+ Py_VISIT(self->defaultfn);
+ Py_VISIT(self->encoder);
+ Py_VISIT(self->indent);
+ Py_VISIT(self->key_separator);
+ Py_VISIT(self->item_separator);
return 0;
}
static int
-encoder_clear(PyObject *self)
+encoder_clear(PyEncoderObject *self)
{
/* Deallocate Encoder */
- PyEncoderObject *s;
- assert(PyEncoder_Check(self));
- s = (PyEncoderObject *)self;
- Py_CLEAR(s->markers);
- Py_CLEAR(s->defaultfn);
- Py_CLEAR(s->encoder);
- Py_CLEAR(s->indent);
- Py_CLEAR(s->key_separator);
- Py_CLEAR(s->item_separator);
+ Py_CLEAR(self->markers);
+ Py_CLEAR(self->defaultfn);
+ Py_CLEAR(self->encoder);
+ Py_CLEAR(self->indent);
+ Py_CLEAR(self->key_separator);
+ Py_CLEAR(self->item_separator);
return 0;
}
PyDoc_STRVAR(encoder_doc, "_iterencode(obj, _current_indent_level) -> iterable");
-static
-PyTypeObject PyEncoderType = {
- PyVarObject_HEAD_INIT(NULL, 0)
- "_json.Encoder", /* tp_name */
- sizeof(PyEncoderObject), /* tp_basicsize */
- 0, /* tp_itemsize */
- encoder_dealloc, /* tp_dealloc */
- 0, /* tp_vectorcall_offset */
- 0, /* tp_getattr */
- 0, /* tp_setattr */
- 0, /* tp_as_async */
- 0, /* tp_repr */
- 0, /* tp_as_number */
- 0, /* tp_as_sequence */
- 0, /* tp_as_mapping */
- 0, /* tp_hash */
- encoder_call, /* tp_call */
- 0, /* tp_str */
- 0, /* tp_getattro */
- 0, /* tp_setattro */
- 0, /* tp_as_buffer */
- Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /* tp_flags */
- encoder_doc, /* tp_doc */
- encoder_traverse, /* tp_traverse */
- encoder_clear, /* tp_clear */
- 0, /* tp_richcompare */
- 0, /* tp_weaklistoffset */
- 0, /* tp_iter */
- 0, /* tp_iternext */
- 0, /* tp_methods */
- encoder_members, /* tp_members */
- 0, /* tp_getset */
- 0, /* tp_base */
- 0, /* tp_dict */
- 0, /* tp_descr_get */
- 0, /* tp_descr_set */
- 0, /* tp_dictoffset */
- 0, /* tp_init */
- 0, /* tp_alloc */
- encoder_new, /* tp_new */
- 0, /* tp_free */
+static PyType_Slot PyEncoderType_slots[] = {
+ {Py_tp_doc, (void *)encoder_doc},
+ {Py_tp_dealloc, encoder_dealloc},
+ {Py_tp_call, encoder_call},
+ {Py_tp_traverse, encoder_traverse},
+ {Py_tp_clear, encoder_clear},
+ {Py_tp_members, encoder_members},
+ {Py_tp_new, encoder_new},
+ {0, 0}
+};
+
+static PyType_Spec PyEncoderType_spec = {
+ .name = "_json.Encoder",
+ .basicsize = sizeof(PyEncoderObject),
+ .itemsize = 0,
+ .flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,
+ .slots = PyEncoderType_slots
};
static PyMethodDef speedups_methods[] = {
@@ -1904,40 +1809,77 @@ static PyMethodDef speedups_methods[] = {
PyDoc_STRVAR(module_doc,
"json speedups\n");
+static int
+_json_exec(PyObject *module)
+{
+ _jsonmodulestate *state = get_json_state(module);
+
+ state->PyScannerType = PyType_FromSpec(&PyScannerType_spec);
+ if (state->PyScannerType == NULL) {
+ return -1;
+ }
+ Py_INCREF(state->PyScannerType);
+ if (PyModule_AddObject(module, "make_scanner", state->PyScannerType) < 0) {
+ Py_DECREF(state->PyScannerType);
+ return -1;
+ }
+
+ state->PyEncoderType = PyType_FromSpec(&PyEncoderType_spec);
+ if (state->PyEncoderType == NULL) {
+ return -1;
+ }
+ Py_INCREF(state->PyEncoderType);
+ if (PyModule_AddObject(module, "make_encoder", state->PyEncoderType) < 0) {
+ Py_DECREF(state->PyEncoderType);
+ return -1;
+ }
+
+ return 0;
+}
+
+static int
+_jsonmodule_traverse(PyObject *module, visitproc visit, void *arg)
+{
+ _jsonmodulestate *state = get_json_state(module);
+ Py_VISIT(state->PyScannerType);
+ Py_VISIT(state->PyEncoderType);
+ return 0;
+}
+
+static int
+_jsonmodule_clear(PyObject *module)
+{
+ _jsonmodulestate *state = get_json_state(module);
+ Py_CLEAR(state->PyScannerType);
+ Py_CLEAR(state->PyEncoderType);
+ return 0;
+}
+
+static void
+_jsonmodule_free(void *module)
+{
+ _jsonmodule_clear((PyObject *)module);
+}
+
+static PyModuleDef_Slot _json_slots[] = {
+ {Py_mod_exec, _json_exec},
+ {0, NULL}
+};
+
static struct PyModuleDef jsonmodule = {
PyModuleDef_HEAD_INIT,
"_json",
module_doc,
- -1,
+ sizeof(_jsonmodulestate),
speedups_methods,
- NULL,
- NULL,
- NULL,
- NULL
+ _json_slots,
+ _jsonmodule_traverse,
+ _jsonmodule_clear,
+ _jsonmodule_free,
};
PyMODINIT_FUNC
PyInit__json(void)
{
- PyObject *m = PyModule_Create(&jsonmodule);
- if (!m)
- return NULL;
- if (PyType_Ready(&PyScannerType) < 0)
- goto fail;
- if (PyType_Ready(&PyEncoderType) < 0)
- goto fail;
- Py_INCREF((PyObject*)&PyScannerType);
- if (PyModule_AddObject(m, "make_scanner", (PyObject*)&PyScannerType) < 0) {
- Py_DECREF((PyObject*)&PyScannerType);
- goto fail;
- }
- Py_INCREF((PyObject*)&PyEncoderType);
- if (PyModule_AddObject(m, "make_encoder", (PyObject*)&PyEncoderType) < 0) {
- Py_DECREF((PyObject*)&PyEncoderType);
- goto fail;
- }
- return m;
- fail:
- Py_DECREF(m);
- return NULL;
+ return PyModuleDef_Init(&jsonmodule);
}
diff --git a/Modules/_localemodule.c b/Modules/_localemodule.c
index 036bdb30..0819d0e1 100644
--- a/Modules/_localemodule.c
+++ b/Modules/_localemodule.c
@@ -41,7 +41,17 @@ This software comes with no warranty. Use at your own risk.
PyDoc_STRVAR(locale__doc__, "Support for POSIX locales.");
-static PyObject *Error;
+typedef struct _locale_state {
+ PyObject *Error;
+} _locale_state;
+
+static inline _locale_state*
+get_locale_state(PyObject *m)
+{
+ void *state = PyModule_GetState(m);
+ assert(state != NULL);
+ return (_locale_state *)state;
+}
/* support functions for formatting floating point numbers */
@@ -94,7 +104,8 @@ PyLocale_setlocale(PyObject* self, PyObject* args)
#if defined(MS_WINDOWS)
if (category < LC_MIN || category > LC_MAX)
{
- PyErr_SetString(Error, "invalid locale category");
+ PyErr_SetString(get_locale_state(self)->Error,
+ "invalid locale category");
return NULL;
}
#endif
@@ -104,7 +115,8 @@ PyLocale_setlocale(PyObject* self, PyObject* args)
result = setlocale(category, locale);
if (!result) {
/* operation failed, no setting was changed */
- PyErr_SetString(Error, "unsupported locale setting");
+ PyErr_SetString(get_locale_state(self)->Error,
+ "unsupported locale setting");
return NULL;
}
result_object = PyUnicode_DecodeLocale(result, NULL);
@@ -114,7 +126,8 @@ PyLocale_setlocale(PyObject* self, PyObject* args)
/* get locale */
result = setlocale(category, NULL);
if (!result) {
- PyErr_SetString(Error, "locale query failed");
+ PyErr_SetString(get_locale_state(self)->Error,
+ "locale query failed");
return NULL;
}
result_object = PyUnicode_DecodeLocale(result, NULL);
@@ -622,14 +635,16 @@ PyDoc_STRVAR(bindtextdomain__doc__,
"Bind the C library's domain to dir.");
static PyObject*
-PyIntl_bindtextdomain(PyObject* self,PyObject*args)
+PyIntl_bindtextdomain(PyObject* self, PyObject*args)
{
- char *domain, *dirname, *current_dirname;
+ const char *domain, *dirname, *current_dirname;
PyObject *dirname_obj, *dirname_bytes = NULL, *result;
+
if (!PyArg_ParseTuple(args, "sO", &domain, &dirname_obj))
return 0;
if (!strlen(domain)) {
- PyErr_SetString(Error, "domain must be a non-empty string");
+ PyErr_SetString(get_locale_state(self)->Error,
+ "domain must be a non-empty string");
return 0;
}
if (dirname_obj != Py_None) {
@@ -710,63 +725,104 @@ static struct PyMethodDef PyLocale_Methods[] = {
{NULL, NULL}
};
-
-static struct PyModuleDef _localemodule = {
- PyModuleDef_HEAD_INIT,
- "_locale",
- locale__doc__,
- -1,
- PyLocale_Methods,
- NULL,
- NULL,
- NULL,
- NULL
-};
-
-PyMODINIT_FUNC
-PyInit__locale(void)
+static int
+_locale_exec(PyObject *module)
{
- PyObject *m;
#ifdef HAVE_LANGINFO_H
int i;
#endif
+#define ADD_INT(module, value) \
+ do { \
+ if (PyModule_AddIntConstant(module, #value, value) < 0) { \
+ return -1; \
+ } \
+ } while (0)
- m = PyModule_Create(&_localemodule);
- if (m == NULL)
- return NULL;
-
- PyModule_AddIntMacro(m, LC_CTYPE);
- PyModule_AddIntMacro(m, LC_TIME);
- PyModule_AddIntMacro(m, LC_COLLATE);
- PyModule_AddIntMacro(m, LC_MONETARY);
+ ADD_INT(module, LC_CTYPE);
+ ADD_INT(module, LC_TIME);
+ ADD_INT(module, LC_COLLATE);
+ ADD_INT(module, LC_MONETARY);
#ifdef LC_MESSAGES
- PyModule_AddIntMacro(m, LC_MESSAGES);
+ ADD_INT(module, LC_MESSAGES);
#endif /* LC_MESSAGES */
- PyModule_AddIntMacro(m, LC_NUMERIC);
- PyModule_AddIntMacro(m, LC_ALL);
- PyModule_AddIntMacro(m, CHAR_MAX);
+ ADD_INT(module, LC_NUMERIC);
+ ADD_INT(module, LC_ALL);
+ ADD_INT(module, CHAR_MAX);
- Error = PyErr_NewException("locale.Error", NULL, NULL);
- if (Error == NULL) {
- Py_DECREF(m);
- return NULL;
+ _locale_state *state = get_locale_state(module);
+ state->Error = PyErr_NewException("locale.Error", NULL, NULL);
+ if (state->Error == NULL) {
+ return -1;
+ }
+ Py_INCREF(get_locale_state(module)->Error);
+ if (PyModule_AddObject(module, "Error", get_locale_state(module)->Error) < 0) {
+ Py_DECREF(get_locale_state(module)->Error);
+ return -1;
}
- PyModule_AddObject(m, "Error", Error);
#ifdef HAVE_LANGINFO_H
for (i = 0; langinfo_constants[i].name; i++) {
- PyModule_AddIntConstant(m, langinfo_constants[i].name,
- langinfo_constants[i].value);
+ if (PyModule_AddIntConstant(module,
+ langinfo_constants[i].name,
+ langinfo_constants[i].value) < 0) {
+ return -1;
+ }
}
#endif
if (PyErr_Occurred()) {
- Py_DECREF(m);
- return NULL;
+ return -1;
}
- return m;
+ return 0;
+
+#undef ADD_INT
+}
+
+static struct PyModuleDef_Slot _locale_slots[] = {
+ {Py_mod_exec, _locale_exec},
+ {0, NULL}
+};
+
+static int
+locale_traverse(PyObject *module, visitproc visit, void *arg)
+{
+ _locale_state *state = get_locale_state(module);
+ Py_VISIT(state->Error);
+ return 0;
+}
+
+static int
+locale_clear(PyObject *module)
+{
+ _locale_state *state = get_locale_state(module);
+ Py_CLEAR(state->Error);
+ return 0;
+}
+
+static void
+locale_free(PyObject *module)
+{
+ locale_clear(module);
+}
+
+static struct PyModuleDef _localemodule = {
+ PyModuleDef_HEAD_INIT,
+ "_locale",
+ locale__doc__,
+ sizeof(_locale_state),
+ PyLocale_Methods,
+ _locale_slots,
+ locale_traverse,
+ locale_clear,
+ (freefunc)locale_free,
+};
+
+PyMODINIT_FUNC
+PyInit__locale(void)
+{
+ return PyModuleDef_Init(&_localemodule);
}
/*
diff --git a/Modules/_lsprof.c b/Modules/_lsprof.c
index c5a6f444..5e53d839 100644
--- a/Modules/_lsprof.c
+++ b/Modules/_lsprof.c
@@ -1,5 +1,4 @@
#include "Python.h"
-#include "frameobject.h"
#include "rotatingtree.h"
/************************************************************/
@@ -54,7 +53,7 @@ typedef struct {
static PyTypeObject PyProfiler_Type;
#define PyProfiler_Check(op) PyObject_TypeCheck(op, &PyProfiler_Type)
-#define PyProfiler_CheckExact(op) (Py_TYPE(op) == &PyProfiler_Type)
+#define PyProfiler_CheckExact(op) Py_IS_TYPE(op, &PyProfiler_Type)
/*** External Timers ***/
@@ -388,15 +387,22 @@ profiler_callback(PyObject *self, PyFrameObject *frame, int what,
/* the 'frame' of a called function is about to start its execution */
case PyTrace_CALL:
- ptrace_enter_call(self, (void *)frame->f_code,
- (PyObject *)frame->f_code);
+ {
+ PyCodeObject *code = PyFrame_GetCode(frame);
+ ptrace_enter_call(self, (void *)code, (PyObject *)code);
+ Py_DECREF(code);
break;
+ }
/* the 'frame' of a called function is about to finish
(either normally or with an exception) */
case PyTrace_RETURN:
- ptrace_leave_call(self, (void *)frame->f_code);
+ {
+ PyCodeObject *code = PyFrame_GetCode(frame);
+ ptrace_leave_call(self, (void *)code);
+ Py_DECREF(code);
break;
+ }
/* case PyTrace_EXCEPTION:
If the exception results in the function exiting, a
@@ -578,8 +584,9 @@ static PyObject*
profiler_getstats(ProfilerObject *pObj, PyObject* noarg)
{
statscollector_t collect;
- if (pending_exception(pObj))
+ if (pending_exception(pObj)) {
return NULL;
+ }
if (!pObj->externalTimer || pObj->externalTimerUnit == 0.0) {
_PyTime_t onesec = _PyTime_FromSeconds(1);
collect.factor = (double)1 / onesec;
@@ -639,9 +646,15 @@ profiler_enable(ProfilerObject *self, PyObject *args, PyObject *kwds)
if (!PyArg_ParseTupleAndKeywords(args, kwds, "|ii:enable",
kwlist, &subcalls, &builtins))
return NULL;
- if (setSubcalls(self, subcalls) < 0 || setBuiltins(self, builtins) < 0)
+ if (setSubcalls(self, subcalls) < 0 || setBuiltins(self, builtins) < 0) {
+ return NULL;
+ }
+
+ PyThreadState *tstate = PyThreadState_GET();
+ if (_PyEval_SetProfile(tstate, profiler_callback, (PyObject*)self) < 0) {
return NULL;
- PyEval_SetProfile(profiler_callback, (PyObject*)self);
+ }
+
self->flags |= POF_ENABLED;
Py_RETURN_NONE;
}
@@ -671,11 +684,16 @@ Stop collecting profiling information.\n\
static PyObject*
profiler_disable(ProfilerObject *self, PyObject* noarg)
{
+ PyThreadState *tstate = PyThreadState_GET();
+ if (_PyEval_SetProfile(tstate, NULL, NULL) < 0) {
+ return NULL;
+ }
self->flags &= ~POF_ENABLED;
- PyEval_SetProfile(NULL, NULL);
+
flush_unmatched(self);
- if (pending_exception(self))
+ if (pending_exception(self)) {
return NULL;
+ }
Py_RETURN_NONE;
}
@@ -695,8 +713,13 @@ profiler_clear(ProfilerObject *pObj, PyObject* noarg)
static void
profiler_dealloc(ProfilerObject *op)
{
- if (op->flags & POF_ENABLED)
- PyEval_SetProfile(NULL, NULL);
+ if (op->flags & POF_ENABLED) {
+ PyThreadState *tstate = PyThreadState_GET();
+ if (_PyEval_SetProfile(tstate, NULL, NULL) < 0) {
+ PyErr_WriteUnraisable((PyObject *)op);
+ }
+ }
+
flush_unmatched(op);
clearEntries(op);
Py_XDECREF(op->externalTimer);
diff --git a/Modules/_lzmamodule.c b/Modules/_lzmamodule.c
index 1ab67f30..2a62a683 100644
--- a/Modules/_lzmamodule.c
+++ b/Modules/_lzmamodule.c
@@ -8,8 +8,7 @@
#define PY_SSIZE_T_CLEAN
#include "Python.h"
-#include "structmember.h"
-#include "pythread.h"
+#include "structmember.h" // PyMemberDef
#include
#include
@@ -212,10 +211,9 @@ parse_filter_spec_lzma(PyObject *spec)
return NULL;
}
- options = (lzma_options_lzma *)PyMem_Malloc(sizeof *options);
+ options = (lzma_options_lzma *)PyMem_Calloc(1, sizeof *options);
if (options == NULL)
return PyErr_NoMemory();
- memset(options, 0, sizeof *options);
if (lzma_lzma_preset(options, preset)) {
PyMem_Free(options);
@@ -257,10 +255,9 @@ parse_filter_spec_delta(PyObject *spec)
return NULL;
}
- options = (lzma_options_delta *)PyMem_Malloc(sizeof *options);
+ options = (lzma_options_delta *)PyMem_Calloc(1, sizeof *options);
if (options == NULL)
return PyErr_NoMemory();
- memset(options, 0, sizeof *options);
options->type = LZMA_DELTA_TYPE_BYTE;
options->dist = dist;
return options;
@@ -281,10 +278,9 @@ parse_filter_spec_bcj(PyObject *spec)
return NULL;
}
- options = (lzma_options_bcj *)PyMem_Malloc(sizeof *options);
+ options = (lzma_options_bcj *)PyMem_Calloc(1, sizeof *options);
if (options == NULL)
return PyErr_NoMemory();
- memset(options, 0, sizeof *options);
options->start_offset = start_offset;
return options;
}
@@ -1486,19 +1482,13 @@ PyInit__lzma(void)
if (PyModule_AddObject(m, "LZMAError", Error) == -1)
return NULL;
- if (PyType_Ready(&Compressor_type) == -1)
- return NULL;
- Py_INCREF(&Compressor_type);
- if (PyModule_AddObject(m, "LZMACompressor",
- (PyObject *)&Compressor_type) == -1)
+ if (PyModule_AddType(m, &Compressor_type) < 0) {
return NULL;
+ }
- if (PyType_Ready(&Decompressor_type) == -1)
- return NULL;
- Py_INCREF(&Decompressor_type);
- if (PyModule_AddObject(m, "LZMADecompressor",
- (PyObject *)&Decompressor_type) == -1)
+ if (PyModule_AddType(m, &Decompressor_type) < 0) {
return NULL;
+ }
return m;
}
diff --git a/Modules/_multiprocessing/multiprocessing.h b/Modules/_multiprocessing/multiprocessing.h
index 512bc17f..fe78135d 100644
--- a/Modules/_multiprocessing/multiprocessing.h
+++ b/Modules/_multiprocessing/multiprocessing.h
@@ -27,14 +27,6 @@
# include
typedef sem_t *SEM_HANDLE;
# endif
-# define HANDLE int
-# define SOCKET int
-# define BOOL int
-# define UINT32 uint32_t
-# define INT32 int32_t
-# define TRUE 1
-# define FALSE 0
-# define INVALID_HANDLE_VALUE (-1)
#endif
/*
@@ -72,8 +64,6 @@
# define T_HANDLE T_POINTER
# define F_SEM_HANDLE F_HANDLE
# define T_SEM_HANDLE T_HANDLE
-# define F_DWORD "k"
-# define T_DWORD T_ULONG
#else
# define F_HANDLE "i"
# define T_HANDLE T_INT
diff --git a/Modules/_multiprocessing/posixshmem.c b/Modules/_multiprocessing/posixshmem.c
index 2049dbbc..436ac6d6 100644
--- a/Modules/_multiprocessing/posixshmem.c
+++ b/Modules/_multiprocessing/posixshmem.c
@@ -5,7 +5,6 @@ posixshmem - A Python extension that provides shm_open() and shm_unlink()
#define PY_SSIZE_T_CLEAN
#include
-#include "structmember.h"
// for shm_open() and shm_unlink()
#ifdef HAVE_SYS_MMAN_H
diff --git a/Modules/_multiprocessing/semaphore.c b/Modules/_multiprocessing/semaphore.c
index 4be2deae..ee490256 100644
--- a/Modules/_multiprocessing/semaphore.c
+++ b/Modules/_multiprocessing/semaphore.c
@@ -268,11 +268,8 @@ static PyObject *
semlock_acquire(SemLockObject *self, PyObject *args, PyObject *kwds)
{
int blocking = 1, res, err = 0;
- double timeout;
PyObject *timeout_obj = Py_None;
struct timespec deadline = {0};
- struct timeval now;
- long sec, nsec;
static char *kwlist[] = {"block", "timeout", NULL};
@@ -285,19 +282,23 @@ semlock_acquire(SemLockObject *self, PyObject *args, PyObject *kwds)
Py_RETURN_TRUE;
}
- if (timeout_obj != Py_None) {
- timeout = PyFloat_AsDouble(timeout_obj);
- if (PyErr_Occurred())
+ int use_deadline = (timeout_obj != Py_None);
+ if (use_deadline) {
+ double timeout = PyFloat_AsDouble(timeout_obj);
+ if (PyErr_Occurred()) {
return NULL;
- if (timeout < 0.0)
+ }
+ if (timeout < 0.0) {
timeout = 0.0;
+ }
+ struct timeval now;
if (gettimeofday(&now, NULL) < 0) {
PyErr_SetFromErrno(PyExc_OSError);
return NULL;
}
- sec = (long) timeout;
- nsec = (long) (1e9 * (timeout - sec) + 0.5);
+ long sec = (long) timeout;
+ long nsec = (long) (1e9 * (timeout - sec) + 0.5);
deadline.tv_sec = now.tv_sec + sec;
deadline.tv_nsec = now.tv_usec * 1000 + nsec;
deadline.tv_sec += (deadline.tv_nsec / 1000000000);
@@ -315,7 +316,7 @@ semlock_acquire(SemLockObject *self, PyObject *args, PyObject *kwds)
/* Couldn't acquire immediately, need to block */
do {
Py_BEGIN_ALLOW_THREADS
- if (timeout_obj == Py_None) {
+ if (!use_deadline) {
res = sem_wait(self->handle);
}
else {
diff --git a/Modules/_operator.c b/Modules/_operator.c
index 5aa229fa..8a54829e 100644
--- a/Modules/_operator.c
+++ b/Modules/_operator.c
@@ -785,6 +785,8 @@ _operator_length_hint_impl(PyObject *module, PyObject *obj,
return PyObject_LengthHint(obj, default_value);
}
+/* NOTE: Keep in sync with _hashopenssl.c implementation. */
+
/*[clinic input]
_operator._compare_digest = _operator.eq
@@ -1170,7 +1172,7 @@ attrgetter_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
for (idx = 0; idx < nattrs; ++idx) {
PyObject *item = PyTuple_GET_ITEM(args, idx);
Py_ssize_t item_len;
- void *data;
+ const void *data;
unsigned int kind;
int dot_count;
@@ -1682,7 +1684,7 @@ methodcaller_reduce(methodcallerobject *mc, PyObject *Py_UNUSED(ignored))
newargs[0] = (PyObject *)Py_TYPE(mc);
newargs[1] = mc->name;
- constructor = _PyObject_FastCallDict(partial, newargs, 2, mc->kwds);
+ constructor = PyObject_VectorcallDict(partial, newargs, 2, mc->kwds);
Py_DECREF(partial);
return Py_BuildValue("NO", constructor, mc->args);
@@ -1746,16 +1748,38 @@ static PyTypeObject methodcaller_type = {
};
-/* Initialization function for the module (*must* be called PyInit__operator) */
+static int
+operator_exec(PyObject *module)
+{
+ PyTypeObject *types[] = {
+ &itemgetter_type,
+ &attrgetter_type,
+ &methodcaller_type
+ };
+
+ for (size_t i = 0; i < Py_ARRAY_LENGTH(types); i++) {
+ if (PyModule_AddType(module, types[i]) < 0) {
+ return -1;
+ }
+ }
+
+ return 0;
+}
+
+
+static struct PyModuleDef_Slot operator_slots[] = {
+ {Py_mod_exec, operator_exec},
+ {0, NULL}
+};
static struct PyModuleDef operatormodule = {
PyModuleDef_HEAD_INIT,
"_operator",
operator_doc,
- -1,
+ 0,
operator_methods,
- NULL,
+ operator_slots,
NULL,
NULL,
NULL
@@ -1764,26 +1788,5 @@ static struct PyModuleDef operatormodule = {
PyMODINIT_FUNC
PyInit__operator(void)
{
- PyObject *m;
-
- /* Create the module and add the functions */
- m = PyModule_Create(&operatormodule);
- if (m == NULL)
- return NULL;
-
- if (PyType_Ready(&itemgetter_type) < 0)
- return NULL;
- Py_INCREF(&itemgetter_type);
- PyModule_AddObject(m, "itemgetter", (PyObject *)&itemgetter_type);
-
- if (PyType_Ready(&attrgetter_type) < 0)
- return NULL;
- Py_INCREF(&attrgetter_type);
- PyModule_AddObject(m, "attrgetter", (PyObject *)&attrgetter_type);
-
- if (PyType_Ready(&methodcaller_type) < 0)
- return NULL;
- Py_INCREF(&methodcaller_type);
- PyModule_AddObject(m, "methodcaller", (PyObject *)&methodcaller_type);
- return m;
+ return PyModuleDef_Init(&operatormodule);
}
diff --git a/Modules/_peg_parser.c b/Modules/_peg_parser.c
new file mode 100644
index 00000000..ca2a3cf7
--- /dev/null
+++ b/Modules/_peg_parser.c
@@ -0,0 +1,153 @@
+#include
+#include "pegen_interface.h"
+
+static int
+_mode_str_to_int(char *mode_str)
+{
+ int mode;
+ if (strcmp(mode_str, "exec") == 0) {
+ mode = Py_file_input;
+ }
+ else if (strcmp(mode_str, "eval") == 0) {
+ mode = Py_eval_input;
+ }
+ else if (strcmp(mode_str, "single") == 0) {
+ mode = Py_single_input;
+ }
+ else {
+ mode = -1;
+ }
+ return mode;
+}
+
+static mod_ty
+_run_parser(char *str, char *filename, int mode, PyCompilerFlags *flags, PyArena *arena, int oldparser)
+{
+ mod_ty mod;
+ if (!oldparser) {
+ mod = PyPegen_ASTFromString(str, filename, mode, flags, arena);
+ }
+ else {
+ mod = PyParser_ASTFromString(str, filename, mode, flags, arena);
+ }
+ return mod;
+}
+
+PyObject *
+_Py_compile_string(PyObject *self, PyObject *args, PyObject *kwds)
+{
+ static char *keywords[] = {"string", "filename", "mode", "oldparser", NULL};
+ char *the_string;
+ char *filename = "";
+ char *mode_str = "exec";
+ int oldparser = 0;
+
+ if (!PyArg_ParseTupleAndKeywords(args, kwds, "s|ssp", keywords,
+ &the_string, &filename, &mode_str, &oldparser)) {
+ return NULL;
+ }
+
+ int mode = _mode_str_to_int(mode_str);
+ if (mode == -1) {
+ return PyErr_Format(PyExc_ValueError, "mode must be either 'exec' or 'eval' or 'single'");
+ }
+
+ PyCompilerFlags flags = _PyCompilerFlags_INIT;
+ flags.cf_flags = PyCF_IGNORE_COOKIE;
+
+ PyArena *arena = PyArena_New();
+ if (arena == NULL) {
+ return NULL;
+ }
+
+ mod_ty mod = _run_parser(the_string, filename, mode, &flags, arena, oldparser);
+ if (mod == NULL) {
+ PyArena_Free(arena);
+ return NULL;
+ }
+
+ PyObject *filename_ob = PyUnicode_DecodeFSDefault(filename);
+ if (filename_ob == NULL) {
+ PyArena_Free(arena);
+ return NULL;
+ }
+ PyCodeObject *result = PyAST_CompileObject(mod, filename_ob, &flags, -1, arena);
+ Py_XDECREF(filename_ob);
+ PyArena_Free(arena);
+ return (PyObject *)result;
+}
+
+PyObject *
+_Py_parse_string(PyObject *self, PyObject *args, PyObject *kwds)
+{
+ static char *keywords[] = {"string", "filename", "mode", "oldparser", "ast", NULL};
+ char *the_string;
+ char *filename = "";
+ char *mode_str = "exec";
+ int oldparser = 0;
+ int ast = 1;
+
+ if (!PyArg_ParseTupleAndKeywords(args, kwds, "s|sspp", keywords,
+ &the_string, &filename, &mode_str, &oldparser, &ast)) {
+ return NULL;
+ }
+
+ int mode = _mode_str_to_int(mode_str);
+ if (mode == -1) {
+ return PyErr_Format(PyExc_ValueError, "mode must be either 'exec' or 'eval' or 'single'");
+ }
+
+ PyCompilerFlags flags = _PyCompilerFlags_INIT;
+ flags.cf_flags = PyCF_IGNORE_COOKIE;
+
+ PyArena *arena = PyArena_New();
+ if (arena == NULL) {
+ return NULL;
+ }
+
+ mod_ty mod = _run_parser(the_string, filename, mode, &flags, arena, oldparser);
+ if (mod == NULL) {
+ PyArena_Free(arena);
+ return NULL;
+ }
+
+ PyObject *result;
+ if (ast) {
+ result = PyAST_mod2obj(mod);
+ }
+ else {
+ Py_INCREF(Py_None);
+ result = Py_None;
+ }
+ PyArena_Free(arena);
+ return result;
+}
+
+static PyMethodDef ParseMethods[] = {
+ {
+ "parse_string",
+ (PyCFunction)(void (*)(void))_Py_parse_string,
+ METH_VARARGS|METH_KEYWORDS,
+ "Parse a string, return an AST."
+ },
+ {
+ "compile_string",
+ (PyCFunction)(void (*)(void))_Py_compile_string,
+ METH_VARARGS|METH_KEYWORDS,
+ "Compile a string, return a code object."
+ },
+ {NULL, NULL, 0, NULL} /* Sentinel */
+};
+
+static struct PyModuleDef parsemodule = {
+ PyModuleDef_HEAD_INIT,
+ .m_name = "peg_parser",
+ .m_doc = "A parser.",
+ .m_methods = ParseMethods,
+};
+
+PyMODINIT_FUNC
+PyInit__peg_parser(void)
+{
+ return PyModule_Create(&parsemodule);
+}
diff --git a/Modules/_pickle.c b/Modules/_pickle.c
index 42ce62fc..8dea2c6e 100644
--- a/Modules/_pickle.c
+++ b/Modules/_pickle.c
@@ -9,7 +9,7 @@
#endif
#include "Python.h"
-#include "structmember.h"
+#include "structmember.h" // PyMemberDef
PyDoc_STRVAR(pickle_module_doc,
"Optimized C implementation for the Python pickle module.");
@@ -359,7 +359,7 @@ _Pickle_FastCall(PyObject *func, PyObject *obj)
{
PyObject *result;
- result = PyObject_CallFunctionObjArgs(func, obj, NULL);
+ result = PyObject_CallOneArg(func, obj);
Py_DECREF(obj);
return result;
}
@@ -420,7 +420,7 @@ call_method(PyObject *func, PyObject *self, PyObject *obj)
return PyObject_CallFunctionObjArgs(func, self, obj, NULL);
}
else {
- return PyObject_CallFunctionObjArgs(func, obj, NULL);
+ return PyObject_CallOneArg(func, obj);
}
}
@@ -461,7 +461,7 @@ Pdata_New(void)
if (!(self = PyObject_New(Pdata, &Pdata_Type)))
return NULL;
- Py_SIZE(self) = 0;
+ Py_SET_SIZE(self, 0);
self->mark_set = 0;
self->fence = 0;
self->allocated = 8;
@@ -488,7 +488,7 @@ Pdata_clear(Pdata *self, Py_ssize_t clearto)
while (--i >= clearto) {
Py_CLEAR(self->data[i]);
}
- Py_SIZE(self) = clearto;
+ Py_SET_SIZE(self, clearto);
return 0;
}
@@ -539,7 +539,8 @@ Pdata_pop(Pdata *self)
Pdata_stack_underflow(self);
return NULL;
}
- return self->data[--Py_SIZE(self)];
+ Py_SET_SIZE(self, Py_SIZE(self) - 1);
+ return self->data[Py_SIZE(self)];
}
#define PDATA_POP(D, V) do { (V) = Pdata_pop((D)); } while (0)
@@ -549,7 +550,8 @@ Pdata_push(Pdata *self, PyObject *obj)
if (Py_SIZE(self) == self->allocated && Pdata_grow(self) < 0) {
return -1;
}
- self->data[Py_SIZE(self)++] = obj;
+ self->data[Py_SIZE(self)] = obj;
+ Py_SET_SIZE(self, Py_SIZE(self) + 1);
return 0;
}
@@ -579,7 +581,7 @@ Pdata_poptuple(Pdata *self, Py_ssize_t start)
for (i = start, j = 0; j < len; i++, j++)
PyTuple_SET_ITEM(tuple, j, self->data[i]);
- Py_SIZE(self) = start;
+ Py_SET_SIZE(self, start);
return tuple;
}
@@ -596,7 +598,7 @@ Pdata_poplist(Pdata *self, Py_ssize_t start)
for (i = start, j = 0; j < len; i++, j++)
PyList_SET_ITEM(list, j, self->data[i]);
- Py_SIZE(self) = start;
+ Py_SET_SIZE(self, start);
return list;
}
@@ -1274,7 +1276,7 @@ _Unpickler_ReadFromFile(UnpicklerObject *self, Py_ssize_t n)
return -1;
if (n == READ_WHOLE_LINE) {
- data = _PyObject_CallNoArg(self->readline);
+ data = PyObject_CallNoArgs(self->readline);
}
else {
PyObject *len;
@@ -2007,7 +2009,7 @@ fast_save_enter(PicklerObject *self, PyObject *obj)
PyErr_Format(PyExc_ValueError,
"fast mode: can't pickle cyclic objects "
"including object type %.200s at %p",
- obj->ob_type->tp_name, obj);
+ Py_TYPE(obj)->tp_name, obj);
self->fast_nesting = -1;
return 0;
}
@@ -2327,7 +2329,7 @@ _Pickler_write_bytes(PicklerObject *self,
return -1;
}
}
- result = PyObject_CallFunctionObjArgs(self->write, payload, NULL);
+ result = PyObject_CallOneArg(self->write, payload);
Py_XDECREF(mem);
if (result == NULL) {
return -1;
@@ -2535,8 +2537,7 @@ save_picklebuffer(PicklerObject *self, PyObject *obj)
}
int in_band = 1;
if (self->buffer_callback != NULL) {
- PyObject *ret = PyObject_CallFunctionObjArgs(self->buffer_callback,
- obj, NULL);
+ PyObject *ret = PyObject_CallOneArg(self->buffer_callback, obj);
if (ret == NULL) {
return -1;
}
@@ -2580,7 +2581,7 @@ raw_unicode_escape(PyObject *obj)
{
char *p;
Py_ssize_t i, size;
- void *data;
+ const void *data;
unsigned int kind;
_PyBytesWriter writer;
@@ -3337,7 +3338,7 @@ save_dict(PicklerObject *self, PyObject *obj)
} else {
_Py_IDENTIFIER(items);
- items = _PyObject_CallMethodId(obj, &PyId_items, NULL);
+ items = _PyObject_CallMethodIdNoArgs(obj, &PyId_items);
if (items == NULL)
goto error;
iter = PyObject_GetIter(items);
@@ -4353,8 +4354,7 @@ save(PicklerObject *self, PyObject *obj, int pers_save)
* regular reduction mechanism.
*/
if (self->reducer_override != NULL) {
- reduce_value = PyObject_CallFunctionObjArgs(self->reducer_override,
- obj, NULL);
+ reduce_value = PyObject_CallOneArg(self->reducer_override, obj);
if (reduce_value == NULL) {
goto error;
}
@@ -4441,7 +4441,7 @@ save(PicklerObject *self, PyObject *obj, int pers_save)
goto error;
}
if (reduce_func != NULL) {
- reduce_value = _PyObject_CallNoArg(reduce_func);
+ reduce_value = PyObject_CallNoArgs(reduce_func);
}
else {
PickleState *st = _Pickle_GetGlobalState();
@@ -4974,7 +4974,7 @@ Pickler_set_memo(PicklerObject *self, PyObject *obj, void *Py_UNUSED(ignored))
return -1;
}
- if (Py_TYPE(obj) == &PicklerMemoProxyType) {
+ if (Py_IS_TYPE(obj, &PicklerMemoProxyType)) {
PicklerObject *pickler =
((PicklerMemoProxyObject *)obj)->pickler;
@@ -5821,7 +5821,7 @@ instantiate(PyObject *cls, PyObject *args)
return NULL;
}
if (func == NULL) {
- return _PyObject_CallMethodIdObjArgs(cls, &PyId___new__, cls, NULL);
+ return _PyObject_CallMethodIdOneArg(cls, &PyId___new__, cls);
}
Py_DECREF(func);
}
@@ -6180,7 +6180,7 @@ load_pop(UnpicklerObject *self)
else {
len--;
Py_DECREF(self->stack->data[len]);
- Py_SIZE(self->stack) = len;
+ Py_SET_SIZE(self->stack, len);
}
return 0;
}
@@ -6235,8 +6235,10 @@ load_get(UnpicklerObject *self)
value = _Unpickler_MemoGet(self, idx);
if (value == NULL) {
- if (!PyErr_Occurred())
- PyErr_SetObject(PyExc_KeyError, key);
+ if (!PyErr_Occurred()) {
+ PickleState *st = _Pickle_GetGlobalState();
+ PyErr_Format(st->UnpicklingError, "Memo value not found at index %ld", idx);
+ }
Py_DECREF(key);
return -1;
}
@@ -6262,7 +6264,8 @@ load_binget(UnpicklerObject *self)
if (value == NULL) {
PyObject *key = PyLong_FromSsize_t(idx);
if (key != NULL) {
- PyErr_SetObject(PyExc_KeyError, key);
+ PickleState *st = _Pickle_GetGlobalState();
+ PyErr_Format(st->UnpicklingError, "Memo value not found at index %ld", idx);
Py_DECREF(key);
}
return -1;
@@ -6288,7 +6291,8 @@ load_long_binget(UnpicklerObject *self)
if (value == NULL) {
PyObject *key = PyLong_FromSsize_t(idx);
if (key != NULL) {
- PyErr_SetObject(PyExc_KeyError, key);
+ PickleState *st = _Pickle_GetGlobalState();
+ PyErr_Format(st->UnpicklingError, "Memo value not found at index %ld", idx);
Py_DECREF(key);
}
return -1;
@@ -6537,13 +6541,13 @@ do_append(UnpicklerObject *self, Py_ssize_t x)
result = _Pickle_FastCall(append_func, value);
if (result == NULL) {
Pdata_clear(self->stack, i + 1);
- Py_SIZE(self->stack) = x;
+ Py_SET_SIZE(self->stack, x);
Py_DECREF(append_func);
return -1;
}
Py_DECREF(result);
}
- Py_SIZE(self->stack) = x;
+ Py_SET_SIZE(self->stack, x);
Py_DECREF(append_func);
}
}
@@ -6665,12 +6669,12 @@ load_additems(UnpicklerObject *self)
result = _Pickle_FastCall(add_func, item);
if (result == NULL) {
Pdata_clear(self->stack, i + 1);
- Py_SIZE(self->stack) = mark;
+ Py_SET_SIZE(self->stack, mark);
return -1;
}
Py_DECREF(result);
}
- Py_SIZE(self->stack) = mark;
+ Py_SET_SIZE(self->stack, mark);
}
return 0;
@@ -7528,7 +7532,7 @@ Unpickler_set_memo(UnpicklerObject *self, PyObject *obj, void *Py_UNUSED(ignored
return -1;
}
- if (Py_TYPE(obj) == &UnpicklerMemoProxyType) {
+ if (Py_IS_TYPE(obj, &UnpicklerMemoProxyType)) {
UnpicklerObject *unpickler =
((UnpicklerMemoProxyObject *)obj)->unpickler;
@@ -7882,6 +7886,7 @@ _pickle_load_impl(PyObject *module, PyObject *file, int fix_imports,
_pickle.loads
data: object
+ /
*
fix_imports: bool = True
encoding: str = 'ASCII'
@@ -7908,7 +7913,7 @@ static PyObject *
_pickle_loads_impl(PyObject *module, PyObject *data, int fix_imports,
const char *encoding, const char *errors,
PyObject *buffers)
-/*[clinic end generated code: output=82ac1e6b588e6d02 input=9c2ab6a0960185ea]*/
+/*[clinic end generated code: output=82ac1e6b588e6d02 input=b3615540d0535087]*/
{
PyObject *result;
UnpicklerObject *unpickler = _Unpickler_New();
@@ -8002,10 +8007,6 @@ PyInit__pickle(void)
return m;
}
- if (PyType_Ready(&Unpickler_Type) < 0)
- return NULL;
- if (PyType_Ready(&Pickler_Type) < 0)
- return NULL;
if (PyType_Ready(&Pdata_Type) < 0)
return NULL;
if (PyType_Ready(&PicklerMemoProxyType) < 0)
@@ -8019,16 +8020,15 @@ PyInit__pickle(void)
return NULL;
/* Add types */
- Py_INCREF(&Pickler_Type);
- if (PyModule_AddObject(m, "Pickler", (PyObject *)&Pickler_Type) < 0)
+ if (PyModule_AddType(m, &Pickler_Type) < 0) {
return NULL;
- Py_INCREF(&Unpickler_Type);
- if (PyModule_AddObject(m, "Unpickler", (PyObject *)&Unpickler_Type) < 0)
+ }
+ if (PyModule_AddType(m, &Unpickler_Type) < 0) {
return NULL;
- Py_INCREF(&PyPickleBuffer_Type);
- if (PyModule_AddObject(m, "PickleBuffer",
- (PyObject *)&PyPickleBuffer_Type) < 0)
+ }
+ if (PyModule_AddType(m, &PyPickleBuffer_Type) < 0) {
return NULL;
+ }
st = _Pickle_GetState(m);
diff --git a/Modules/_posixsubprocess.c b/Modules/_posixsubprocess.c
index e693e532..5d1691ac 100644
--- a/Modules/_posixsubprocess.c
+++ b/Modules/_posixsubprocess.c
@@ -8,7 +8,7 @@
#ifdef HAVE_SYS_TYPES_H
#include
#endif
-#if defined(HAVE_SYS_STAT_H) && defined(__FreeBSD__)
+#if defined(HAVE_SYS_STAT_H)
#include
#endif
#ifdef HAVE_SYS_SYSCALL_H
@@ -20,6 +20,11 @@
#ifdef HAVE_DIRENT_H
#include
#endif
+#ifdef HAVE_GRP_H
+#include
+#endif /* HAVE_GRP_H */
+
+#include "posixmodule.h"
#ifdef _Py_MEMORY_SANITIZER
# include
@@ -47,20 +52,43 @@
# define FD_DIR "/proc/self/fd"
#endif
+#ifdef NGROUPS_MAX
+#define MAX_GROUPS NGROUPS_MAX
+#else
+#define MAX_GROUPS 64
+#endif
+
#define POSIX_CALL(call) do { if ((call) == -1) goto error; } while (0)
+typedef struct {
+ PyObject* disable;
+ PyObject* enable;
+ PyObject* isenabled;
+} _posixsubprocessstate;
+
+static struct PyModuleDef _posixsubprocessmodule;
+
+static inline _posixsubprocessstate*
+get_posixsubprocess_state(PyObject *module)
+{
+ void *state = PyModule_GetState(module);
+ assert(state != NULL);
+ return (_posixsubprocessstate *)state;
+}
+
+#define _posixsubprocessstate_global get_posixsubprocess_state(PyState_FindModule(&_posixsubprocessmodule))
/* If gc was disabled, call gc.enable(). Return 0 on success. */
static int
_enable_gc(int need_to_reenable_gc, PyObject *gc_module)
{
PyObject *result;
- _Py_IDENTIFIER(enable);
PyObject *exctype, *val, *tb;
if (need_to_reenable_gc) {
PyErr_Fetch(&exctype, &val, &tb);
- result = _PyObject_CallMethodId(gc_module, &PyId_enable, NULL);
+ result = PyObject_CallMethodNoArgs(
+ gc_module, _posixsubprocessstate_global->enable);
if (exctype != NULL) {
PyErr_Restore(exctype, val, tb);
}
@@ -236,9 +264,15 @@ _close_fds_by_brute_force(long start_fd, PyObject *py_fds_to_keep)
start_fd = keep_fd + 1;
}
if (start_fd <= end_fd) {
+#if defined(__FreeBSD__)
+ /* Any errors encountered while closing file descriptors are ignored */
+ closefrom(start_fd);
+#else
for (fd_num = start_fd; fd_num < end_fd; ++fd_num) {
- close(fd_num);
+ /* Ignore errors */
+ (void)close(fd_num);
}
+#endif
}
}
@@ -405,6 +439,9 @@ child_exec(char *const exec_array[],
int errpipe_read, int errpipe_write,
int close_fds, int restore_signals,
int call_setsid,
+ int call_setgid, gid_t gid,
+ int call_setgroups, size_t groups_size, const gid_t *groups,
+ int call_setuid, uid_t uid, int child_umask,
PyObject *py_fds_to_keep,
PyObject *preexec_fn,
PyObject *preexec_fn_args_tuple)
@@ -474,6 +511,9 @@ child_exec(char *const exec_array[],
if (cwd)
POSIX_CALL(chdir(cwd));
+ if (child_umask >= 0)
+ umask(child_umask); /* umask() always succeeds. */
+
if (restore_signals)
_Py_RestoreSignals();
@@ -482,6 +522,22 @@ child_exec(char *const exec_array[],
POSIX_CALL(setsid());
#endif
+#ifdef HAVE_SETGROUPS
+ if (call_setgroups)
+ POSIX_CALL(setgroups(groups_size, groups));
+#endif /* HAVE_SETGROUPS */
+
+#ifdef HAVE_SETREGID
+ if (call_setgid)
+ POSIX_CALL(setregid(gid, gid));
+#endif /* HAVE_SETREGID */
+
+#ifdef HAVE_SETREUID
+ if (call_setuid)
+ POSIX_CALL(setreuid(uid, uid));
+#endif /* HAVE_SETREUID */
+
+
reached_preexec = 1;
if (preexec_fn != Py_None && preexec_fn_args_tuple) {
/* This is where the user has asked us to deadlock their program. */
@@ -561,30 +617,38 @@ subprocess_fork_exec(PyObject* self, PyObject *args)
PyObject *env_list, *preexec_fn;
PyObject *process_args, *converted_args = NULL, *fast_args = NULL;
PyObject *preexec_fn_args_tuple = NULL;
+ PyObject *groups_list;
+ PyObject *uid_object, *gid_object;
int p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite;
int errpipe_read, errpipe_write, close_fds, restore_signals;
int call_setsid;
+ int call_setgid = 0, call_setgroups = 0, call_setuid = 0;
+ uid_t uid;
+ gid_t gid, *groups = NULL;
+ int child_umask;
PyObject *cwd_obj, *cwd_obj2;
const char *cwd;
pid_t pid;
int need_to_reenable_gc = 0;
char *const *exec_array, *const *argv = NULL, *const *envp = NULL;
- Py_ssize_t arg_num;
+ Py_ssize_t arg_num, num_groups = 0;
int need_after_fork = 0;
int saved_errno = 0;
if (!PyArg_ParseTuple(
- args, "OOpO!OOiiiiiiiiiiO:fork_exec",
+ args, "OOpO!OOiiiiiiiiiiOOOiO:fork_exec",
&process_args, &executable_list,
&close_fds, &PyTuple_Type, &py_fds_to_keep,
&cwd_obj, &env_list,
&p2cread, &p2cwrite, &c2pread, &c2pwrite,
&errread, &errwrite, &errpipe_read, &errpipe_write,
- &restore_signals, &call_setsid, &preexec_fn))
+ &restore_signals, &call_setsid,
+ &gid_object, &groups_list, &uid_object, &child_umask,
+ &preexec_fn))
return NULL;
if ((preexec_fn != Py_None) &&
- (_PyInterpreterState_Get() != PyInterpreterState_Main())) {
+ (PyInterpreterState_Get() != PyInterpreterState_Main())) {
PyErr_SetString(PyExc_RuntimeError,
"preexec_fn not supported within subinterpreters");
return NULL;
@@ -599,16 +663,23 @@ subprocess_fork_exec(PyObject* self, PyObject *args)
return NULL;
}
+ PyInterpreterState *interp = PyInterpreterState_Get();
+ const PyConfig *config = _PyInterpreterState_GetConfig(interp);
+ if (config->_isolated_interpreter) {
+ PyErr_SetString(PyExc_RuntimeError,
+ "subprocess not supported for isolated subinterpreters");
+ return NULL;
+ }
+
/* We need to call gc.disable() when we'll be calling preexec_fn */
if (preexec_fn != Py_None) {
PyObject *result;
- _Py_IDENTIFIER(isenabled);
- _Py_IDENTIFIER(disable);
gc_module = PyImport_ImportModule("gc");
if (gc_module == NULL)
return NULL;
- result = _PyObject_CallMethodId(gc_module, &PyId_isenabled, NULL);
+ result = PyObject_CallMethodNoArgs(
+ gc_module, _posixsubprocessstate_global->isenabled);
if (result == NULL) {
Py_DECREF(gc_module);
return NULL;
@@ -619,7 +690,8 @@ subprocess_fork_exec(PyObject* self, PyObject *args)
Py_DECREF(gc_module);
return NULL;
}
- result = _PyObject_CallMethodId(gc_module, &PyId_disable, NULL);
+ result = PyObject_CallMethodNoArgs(
+ gc_module, _posixsubprocessstate_global->disable);
if (result == NULL) {
Py_DECREF(gc_module);
return NULL;
@@ -679,6 +751,90 @@ subprocess_fork_exec(PyObject* self, PyObject *args)
cwd_obj2 = NULL;
}
+ if (groups_list != Py_None) {
+#ifdef HAVE_SETGROUPS
+ Py_ssize_t i;
+ unsigned long gid;
+
+ if (!PyList_Check(groups_list)) {
+ PyErr_SetString(PyExc_TypeError,
+ "setgroups argument must be a list");
+ goto cleanup;
+ }
+ num_groups = PySequence_Size(groups_list);
+
+ if (num_groups < 0)
+ goto cleanup;
+
+ if (num_groups > MAX_GROUPS) {
+ PyErr_SetString(PyExc_ValueError, "too many groups");
+ goto cleanup;
+ }
+
+ if ((groups = PyMem_RawMalloc(num_groups * sizeof(gid_t))) == NULL) {
+ PyErr_SetString(PyExc_MemoryError,
+ "failed to allocate memory for group list");
+ goto cleanup;
+ }
+
+ for (i = 0; i < num_groups; i++) {
+ PyObject *elem;
+ elem = PySequence_GetItem(groups_list, i);
+ if (!elem)
+ goto cleanup;
+ if (!PyLong_Check(elem)) {
+ PyErr_SetString(PyExc_TypeError,
+ "groups must be integers");
+ Py_DECREF(elem);
+ goto cleanup;
+ } else {
+ /* In posixmodule.c UnsignedLong is used as a fallback value
+ * if the value provided does not fit in a Long. Since we are
+ * already doing the bounds checking on the Python side, we
+ * can go directly to an UnsignedLong here. */
+ if (!_Py_Gid_Converter(elem, &gid)) {
+ Py_DECREF(elem);
+ PyErr_SetString(PyExc_ValueError, "invalid group id");
+ goto cleanup;
+ }
+ groups[i] = gid;
+ }
+ Py_DECREF(elem);
+ }
+ call_setgroups = 1;
+
+#else /* HAVE_SETGROUPS */
+ PyErr_BadInternalCall();
+ goto cleanup;
+#endif /* HAVE_SETGROUPS */
+ }
+
+ if (gid_object != Py_None) {
+#ifdef HAVE_SETREGID
+ if (!_Py_Gid_Converter(gid_object, &gid))
+ goto cleanup;
+
+ call_setgid = 1;
+
+#else /* HAVE_SETREGID */
+ PyErr_BadInternalCall();
+ goto cleanup;
+#endif /* HAVE_SETREUID */
+ }
+
+ if (uid_object != Py_None) {
+#ifdef HAVE_SETREUID
+ if (!_Py_Uid_Converter(uid_object, &uid))
+ goto cleanup;
+
+ call_setuid = 1;
+
+#else /* HAVE_SETREUID */
+ PyErr_BadInternalCall();
+ goto cleanup;
+#endif /* HAVE_SETREUID */
+ }
+
/* This must be the last thing done before fork() because we do not
* want to call PyOS_BeforeFork() if there is any chance of another
* error leading to the cleanup: code without calling fork(). */
@@ -711,6 +867,8 @@ subprocess_fork_exec(PyObject* self, PyObject *args)
p2cread, p2cwrite, c2pread, c2pwrite,
errread, errwrite, errpipe_read, errpipe_write,
close_fds, restore_signals, call_setsid,
+ call_setgid, gid, call_setgroups, num_groups, groups,
+ call_setuid, uid, child_umask,
py_fds_to_keep, preexec_fn, preexec_fn_args_tuple);
_exit(255);
return NULL; /* Dead code to avoid a potential compiler warning. */
@@ -735,6 +893,7 @@ subprocess_fork_exec(PyObject* self, PyObject *args)
if (_enable_gc(need_to_reenable_gc, gc_module)) {
pid = -1;
}
+ PyMem_RawFree(groups);
Py_XDECREF(preexec_fn_args_tuple);
Py_XDECREF(gc_module);
@@ -755,6 +914,8 @@ cleanup:
_Py_FreeCharPArray(argv);
if (exec_array)
_Py_FreeCharPArray(exec_array);
+
+ PyMem_RawFree(groups);
Py_XDECREF(converted_args);
Py_XDECREF(fast_args);
Py_XDECREF(preexec_fn_args_tuple);
@@ -765,16 +926,22 @@ cleanup:
PyDoc_STRVAR(subprocess_fork_exec_doc,
-"fork_exec(args, executable_list, close_fds, cwd, env,\n\
+"fork_exec(args, executable_list, close_fds, pass_fds, cwd, env,\n\
p2cread, p2cwrite, c2pread, c2pwrite,\n\
errread, errwrite, errpipe_read, errpipe_write,\n\
- restore_signals, call_setsid, preexec_fn)\n\
+ restore_signals, call_setsid,\n\
+ gid, groups_list, uid,\n\
+ preexec_fn)\n\
\n\
Forks a child process, closes parent file descriptors as appropriate in the\n\
child and dups the few that are needed before calling exec() in the child\n\
process.\n\
\n\
-The preexec_fn, if supplied, will be called immediately before exec.\n\
+If close_fds is true, close file descriptors 3 and higher, except those listed\n\
+in the sorted tuple pass_fds.\n\
+\n\
+The preexec_fn, if supplied, will be called immediately before closing file\n\
+descriptors and exec.\n\
WARNING: preexec_fn is NOT SAFE if your application uses threads.\n\
It may trigger infrequent, difficult to debug deadlocks.\n\
\n\
@@ -798,16 +965,56 @@ static PyMethodDef module_methods[] = {
};
+static int _posixsubprocess_traverse(PyObject *m, visitproc visit, void *arg) {
+ Py_VISIT(get_posixsubprocess_state(m)->disable);
+ Py_VISIT(get_posixsubprocess_state(m)->enable);
+ Py_VISIT(get_posixsubprocess_state(m)->isenabled);
+ return 0;
+}
+
+static int _posixsubprocess_clear(PyObject *m) {
+ Py_CLEAR(get_posixsubprocess_state(m)->disable);
+ Py_CLEAR(get_posixsubprocess_state(m)->enable);
+ Py_CLEAR(get_posixsubprocess_state(m)->isenabled);
+ return 0;
+}
+
+static void _posixsubprocess_free(void *m) {
+ _posixsubprocess_clear((PyObject *)m);
+}
+
static struct PyModuleDef _posixsubprocessmodule = {
PyModuleDef_HEAD_INIT,
"_posixsubprocess",
module_doc,
- -1, /* No memory is needed. */
+ sizeof(_posixsubprocessstate),
module_methods,
+ NULL,
+ _posixsubprocess_traverse,
+ _posixsubprocess_clear,
+ _posixsubprocess_free,
};
PyMODINIT_FUNC
PyInit__posixsubprocess(void)
{
- return PyModule_Create(&_posixsubprocessmodule);
+ PyObject* m;
+
+ m = PyState_FindModule(&_posixsubprocessmodule);
+ if (m != NULL) {
+ Py_INCREF(m);
+ return m;
+ }
+
+ m = PyModule_Create(&_posixsubprocessmodule);
+ if (m == NULL) {
+ return NULL;
+ }
+
+ get_posixsubprocess_state(m)->disable = PyUnicode_InternFromString("disable");
+ get_posixsubprocess_state(m)->enable = PyUnicode_InternFromString("enable");
+ get_posixsubprocess_state(m)->isenabled = PyUnicode_InternFromString("isenabled");
+
+ PyState_AddModule(m, &_posixsubprocessmodule);
+ return m;
}
diff --git a/Modules/_queuemodule.c b/Modules/_queuemodule.c
index e033da50..b155ea94 100644
--- a/Modules/_queuemodule.c
+++ b/Modules/_queuemodule.c
@@ -1,6 +1,5 @@
#include "Python.h"
-#include "structmember.h" /* offsetof */
-#include "pythread.h"
+#include // offsetof()
/*[clinic input]
module _queue
@@ -302,6 +301,8 @@ static PyMethodDef simplequeue_methods[] = {
_QUEUE_SIMPLEQUEUE_PUT_METHODDEF
_QUEUE_SIMPLEQUEUE_PUT_NOWAIT_METHODDEF
_QUEUE_SIMPLEQUEUE_QSIZE_METHODDEF
+ {"__class_getitem__", (PyCFunction)Py_GenericAlias,
+ METH_O|METH_CLASS, PyDoc_STR("See PEP 585")},
{NULL, NULL} /* sentinel */
};
@@ -390,11 +391,9 @@ PyInit__queue(void)
if (PyModule_AddObject(m, "Empty", EmptyError) < 0)
return NULL;
- if (PyType_Ready(&PySimpleQueueType) < 0)
- return NULL;
- Py_INCREF(&PySimpleQueueType);
- if (PyModule_AddObject(m, "SimpleQueue", (PyObject *)&PySimpleQueueType) < 0)
+ if (PyModule_AddType(m, &PySimpleQueueType) < 0) {
return NULL;
+ }
return m;
}
diff --git a/Modules/_randommodule.c b/Modules/_randommodule.c
index 4e9ac407..1b01491b 100644
--- a/Modules/_randommodule.c
+++ b/Modules/_randommodule.c
@@ -11,7 +11,7 @@
* renamed genrand_res53() to random_random() and wrapped
in python calling/return code.
- * genrand_int32() and the helper functions, init_genrand()
+ * genrand_uint32() and the helper functions, init_genrand()
and init_by_array(), were declared static, wrapped in
Python calling/return code. also, their global data
references were replaced with structure references.
@@ -67,9 +67,9 @@
/* ---------------------------------------------------------------*/
#include "Python.h"
-#include /* for seeding to current time */
+#include "pycore_byteswap.h" // _Py_bswap32()
#ifdef HAVE_PROCESS_H
-# include /* needed for getpid() */
+# include // getpid()
#endif
/* Period parameters -- These are all magic. Don't change. */
@@ -79,15 +79,29 @@
#define UPPER_MASK 0x80000000U /* most significant w-r bits */
#define LOWER_MASK 0x7fffffffU /* least significant r bits */
+typedef struct {
+ PyObject *Random_Type;
+ PyObject *Long___abs__;
+} _randomstate;
+
+static inline _randomstate*
+get_random_state(PyObject *module)
+{
+ void *state = PyModule_GetState(module);
+ assert(state != NULL);
+ return (_randomstate *)state;
+}
+
+static struct PyModuleDef _randommodule;
+
+#define _randomstate_global get_random_state(PyState_FindModule(&_randommodule))
+
typedef struct {
PyObject_HEAD
int index;
uint32_t state[N];
} RandomObject;
-static PyTypeObject Random_Type;
-
-#define RandomObject_Check(v) (Py_TYPE(v) == &Random_Type)
#include "clinic/_randommodule.c.h"
@@ -102,7 +116,7 @@ class _random.Random "RandomObject *" "&Random_Type"
/* generates a random number on [0,0xffffffff]-interval */
static uint32_t
-genrand_int32(RandomObject *self)
+genrand_uint32(RandomObject *self)
{
uint32_t y;
static const uint32_t mag01[2] = {0x0U, MATRIX_A};
@@ -157,7 +171,7 @@ static PyObject *
_random_Random_random_impl(RandomObject *self)
/*[clinic end generated code: output=117ff99ee53d755c input=afb2a59cbbb00349]*/
{
- uint32_t a=genrand_int32(self)>>5, b=genrand_int32(self)>>6;
+ uint32_t a=genrand_uint32(self)>>5, b=genrand_uint32(self)>>6;
return PyFloat_FromDouble((a*67108864.0+b)*(1.0/9007199254740992.0));
}
@@ -220,7 +234,7 @@ init_by_array(RandomObject *self, uint32_t init_key[], size_t key_length)
static int
random_seed_urandom(RandomObject *self)
{
- PY_UINT32_T key[N];
+ uint32_t key[N];
if (_PyOS_URandomNonblock(key, sizeof(key)) < 0) {
return -1;
@@ -236,14 +250,14 @@ random_seed_time_pid(RandomObject *self)
uint32_t key[5];
now = _PyTime_GetSystemClock();
- key[0] = (PY_UINT32_T)(now & 0xffffffffU);
- key[1] = (PY_UINT32_T)(now >> 32);
+ key[0] = (uint32_t)(now & 0xffffffffU);
+ key[1] = (uint32_t)(now >> 32);
- key[2] = (PY_UINT32_T)getpid();
+ key[2] = (uint32_t)getpid();
now = _PyTime_GetMonotonicClock();
- key[3] = (PY_UINT32_T)(now & 0xffffffffU);
- key[4] = (PY_UINT32_T)(now >> 32);
+ key[3] = (uint32_t)(now & 0xffffffffU);
+ key[4] = (uint32_t)(now >> 32);
init_by_array(self, key, Py_ARRAY_LENGTH(key));
}
@@ -272,10 +286,12 @@ random_seed(RandomObject *self, PyObject *arg)
* So: if the arg is a PyLong, use its absolute value.
* Otherwise use its hash value, cast to unsigned.
*/
- if (PyLong_Check(arg)) {
+ if (PyLong_CheckExact(arg)) {
+ n = PyNumber_Absolute(arg);
+ } else if (PyLong_Check(arg)) {
/* Calling int.__abs__() prevents calling arg.__abs__(), which might
return an invalid value. See issue #31478. */
- n = PyLong_Type.tp_as_number->nb_absolute(arg);
+ n = PyObject_CallOneArg(_randomstate_global->Long___abs__, arg);
}
else {
Py_hash_t hash = PyObject_Hash(arg);
@@ -458,14 +474,17 @@ _random_Random_getrandbits_impl(RandomObject *self, int k)
uint32_t *wordarray;
PyObject *result;
- if (k <= 0) {
+ if (k < 0) {
PyErr_SetString(PyExc_ValueError,
- "number of bits must be greater than zero");
+ "number of bits must be non-negative");
return NULL;
}
+ if (k == 0)
+ return PyLong_FromLong(0);
+
if (k <= 32) /* Fast path */
- return PyLong_FromUnsignedLong(genrand_int32(self) >> (32 - k));
+ return PyLong_FromUnsignedLong(genrand_uint32(self) >> (32 - k));
words = (k - 1) / 32 + 1;
wordarray = (uint32_t *)PyMem_Malloc(words * 4);
@@ -482,7 +501,7 @@ _random_Random_getrandbits_impl(RandomObject *self, int k)
for (i = words - 1; i >= 0; i--, k -= 32)
#endif
{
- r = genrand_int32(self);
+ r = genrand_uint32(self);
if (k < 32)
r >>= (32 - k); /* Drop least significant bits */
wordarray[i] = r;
@@ -500,10 +519,12 @@ random_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
RandomObject *self;
PyObject *tmp;
- if (type == &Random_Type && !_PyArg_NoKeywords("Random", kwds))
+ if (type == (PyTypeObject*)_randomstate_global->Random_Type &&
+ !_PyArg_NoKeywords("Random()", kwds)) {
return NULL;
+ }
- self = (RandomObject *)type->tp_alloc(type, 0);
+ self = (RandomObject *)PyType_GenericAlloc(type, 0);
if (self == NULL)
return NULL;
tmp = random_seed(self, args);
@@ -515,76 +536,86 @@ random_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
return (PyObject *)self;
}
+
+/*[clinic input]
+
+_random.Random.__reduce__
+
+[clinic start generated code]*/
+
+static PyObject *
+_random_Random___reduce___impl(RandomObject *self)
+/*[clinic end generated code: output=ddea0dcdb60ffd6d input=bd38ec35fd157e0f]*/
+{
+ PyErr_Format(PyExc_TypeError,
+ "cannot pickle %s object",
+ Py_TYPE(self)->tp_name);
+ return NULL;
+}
+
static PyMethodDef random_methods[] = {
_RANDOM_RANDOM_RANDOM_METHODDEF
_RANDOM_RANDOM_SEED_METHODDEF
_RANDOM_RANDOM_GETSTATE_METHODDEF
_RANDOM_RANDOM_SETSTATE_METHODDEF
_RANDOM_RANDOM_GETRANDBITS_METHODDEF
+ _RANDOM_RANDOM___REDUCE___METHODDEF
{NULL, NULL} /* sentinel */
};
PyDoc_STRVAR(random_doc,
"Random() -> create a random number generator with its own internal state.");
-static PyTypeObject Random_Type = {
- PyVarObject_HEAD_INIT(NULL, 0)
- "_random.Random", /*tp_name*/
- sizeof(RandomObject), /*tp_basicsize*/
- 0, /*tp_itemsize*/
- /* methods */
- 0, /*tp_dealloc*/
- 0, /*tp_vectorcall_offset*/
- 0, /*tp_getattr*/
- 0, /*tp_setattr*/
- 0, /*tp_as_async*/
- 0, /*tp_repr*/
- 0, /*tp_as_number*/
- 0, /*tp_as_sequence*/
- 0, /*tp_as_mapping*/
- 0, /*tp_hash*/
- 0, /*tp_call*/
- 0, /*tp_str*/
- PyObject_GenericGetAttr, /*tp_getattro*/
- 0, /*tp_setattro*/
- 0, /*tp_as_buffer*/
- Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /*tp_flags*/
- random_doc, /*tp_doc*/
- 0, /*tp_traverse*/
- 0, /*tp_clear*/
- 0, /*tp_richcompare*/
- 0, /*tp_weaklistoffset*/
- 0, /*tp_iter*/
- 0, /*tp_iternext*/
- random_methods, /*tp_methods*/
- 0, /*tp_members*/
- 0, /*tp_getset*/
- 0, /*tp_base*/
- 0, /*tp_dict*/
- 0, /*tp_descr_get*/
- 0, /*tp_descr_set*/
- 0, /*tp_dictoffset*/
- 0, /*tp_init*/
- 0, /*tp_alloc*/
- random_new, /*tp_new*/
- PyObject_Free, /*tp_free*/
- 0, /*tp_is_gc*/
+static PyType_Slot Random_Type_slots[] = {
+ {Py_tp_doc, (void *)random_doc},
+ {Py_tp_methods, random_methods},
+ {Py_tp_new, random_new},
+ {Py_tp_free, PyObject_Free},
+ {0, 0},
+};
+
+static PyType_Spec Random_Type_spec = {
+ "_random.Random",
+ sizeof(RandomObject),
+ 0,
+ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE,
+ Random_Type_slots
};
PyDoc_STRVAR(module_doc,
"Module implements the Mersenne Twister random number generator.");
+static int
+_random_traverse(PyObject *module, visitproc visit, void *arg)
+{
+ Py_VISIT(get_random_state(module)->Random_Type);
+ return 0;
+}
+
+static int
+_random_clear(PyObject *module)
+{
+ Py_CLEAR(get_random_state(module)->Random_Type);
+ Py_CLEAR(get_random_state(module)->Long___abs__);
+ return 0;
+}
+
+static void
+_random_free(void *module)
+{
+ _random_clear((PyObject *)module);
+}
static struct PyModuleDef _randommodule = {
PyModuleDef_HEAD_INIT,
"_random",
module_doc,
- -1,
- NULL,
+ sizeof(_randomstate),
NULL,
NULL,
- NULL,
- NULL
+ _random_traverse,
+ _random_clear,
+ _random_free,
};
PyMODINIT_FUNC
@@ -592,12 +623,41 @@ PyInit__random(void)
{
PyObject *m;
- if (PyType_Ready(&Random_Type) < 0)
+ PyObject *Random_Type = PyType_FromSpec(&Random_Type_spec);
+ if (Random_Type == NULL) {
return NULL;
+ }
+
m = PyModule_Create(&_randommodule);
- if (m == NULL)
+ if (m == NULL) {
+ Py_DECREF(Random_Type);
return NULL;
- Py_INCREF(&Random_Type);
- PyModule_AddObject(m, "Random", (PyObject *)&Random_Type);
+ }
+ get_random_state(m)->Random_Type = Random_Type;
+
+ Py_INCREF(Random_Type);
+ PyModule_AddObject(m, "Random", Random_Type);
+
+ /* Look up and save int.__abs__, which is needed in random_seed(). */
+ PyObject *longval = NULL, *longtype = NULL;
+ longval = PyLong_FromLong(0);
+ if (longval == NULL) goto fail;
+
+ longtype = PyObject_Type(longval);
+ if (longtype == NULL) goto fail;
+
+ PyObject *abs = PyObject_GetAttrString(longtype, "__abs__");
+ if (abs == NULL) goto fail;
+
+ Py_DECREF(longtype);
+ Py_DECREF(longval);
+ get_random_state(m)->Long___abs__ = abs;
+
return m;
+
+fail:
+ Py_XDECREF(longtype);
+ Py_XDECREF(longval);
+ Py_DECREF(m);
+ return NULL;
}
diff --git a/Modules/_sha3/clinic/sha3module.c.h b/Modules/_sha3/clinic/sha3module.c.h
index 554442df..1c79c269 100644
--- a/Modules/_sha3/clinic/sha3module.c.h
+++ b/Modules/_sha3/clinic/sha3module.c.h
@@ -2,6 +2,52 @@
preserve
[clinic start generated code]*/
+PyDoc_STRVAR(py_sha3_new__doc__,
+"sha3_224(data=b\'\', /, *, usedforsecurity=True)\n"
+"--\n"
+"\n"
+"Return a new BLAKE2b hash object.");
+
+static PyObject *
+py_sha3_new_impl(PyTypeObject *type, PyObject *data, int usedforsecurity);
+
+static PyObject *
+py_sha3_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
+{
+ PyObject *return_value = NULL;
+ static const char * const _keywords[] = {"", "usedforsecurity", NULL};
+ static _PyArg_Parser _parser = {NULL, _keywords, "sha3_224", 0};
+ PyObject *argsbuf[2];
+ PyObject * const *fastargs;
+ Py_ssize_t nargs = PyTuple_GET_SIZE(args);
+ Py_ssize_t noptargs = nargs + (kwargs ? PyDict_GET_SIZE(kwargs) : 0) - 0;
+ PyObject *data = NULL;
+ int usedforsecurity = 1;
+
+ fastargs = _PyArg_UnpackKeywords(_PyTuple_CAST(args)->ob_item, nargs, kwargs, NULL, &_parser, 0, 1, 0, argsbuf);
+ if (!fastargs) {
+ goto exit;
+ }
+ if (nargs < 1) {
+ goto skip_optional_posonly;
+ }
+ noptargs--;
+ data = fastargs[0];
+skip_optional_posonly:
+ if (!noptargs) {
+ goto skip_optional_kwonly;
+ }
+ usedforsecurity = PyObject_IsTrue(fastargs[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+skip_optional_kwonly:
+ return_value = py_sha3_new_impl(type, data, usedforsecurity);
+
+exit:
+ return return_value;
+}
+
PyDoc_STRVAR(_sha3_sha3_224_copy__doc__,
"copy($self, /)\n"
"--\n"
@@ -118,4 +164,4 @@ _sha3_shake_128_hexdigest(SHA3object *self, PyObject *arg)
exit:
return return_value;
}
-/*[clinic end generated code: output=5b3e99b9a96471e8 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=c8a97b34e80def62 input=a9049054013a1b77]*/
diff --git a/Modules/_sha3/sha3module.c b/Modules/_sha3/sha3module.c
index c1fb6185..c826b42d 100644
--- a/Modules/_sha3/sha3module.c
+++ b/Modules/_sha3/sha3module.c
@@ -40,7 +40,7 @@
#elif PY_BIG_ENDIAN
/* opt64 is not yet supported on big endian platforms */
#define KeccakOpt 32
-#elif SIZEOF_VOID_P == 8 && defined(PY_UINT64_T)
+#elif SIZEOF_VOID_P == 8
/* opt64 works only on little-endian 64bit platforms with unsigned int64 */
#define KeccakOpt 64
#else
@@ -48,9 +48,9 @@
#define KeccakOpt 32
#endif
-#if KeccakOpt == 64 && defined(PY_UINT64_T)
+#if KeccakOpt == 64
/* 64bit platforms with unsigned int64 */
- typedef PY_UINT64_T UINT64;
+ typedef uint64_t UINT64;
typedef unsigned char UINT8;
#endif
@@ -169,21 +169,24 @@ newSHA3object(PyTypeObject *type)
return newobj;
}
+/*[clinic input]
+@classmethod
+_sha3.sha3_224.__new__ as py_sha3_new
+ data: object(c_default="NULL") = b''
+ /
+ *
+ usedforsecurity: bool = True
+
+Return a new BLAKE2b hash object.
+[clinic start generated code]*/
static PyObject *
-py_sha3_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
+py_sha3_new_impl(PyTypeObject *type, PyObject *data, int usedforsecurity)
+/*[clinic end generated code: output=90409addc5d5e8b0 input=bcfcdf2e4368347a]*/
{
SHA3object *self = NULL;
Py_buffer buf = {NULL, NULL};
HashReturn res;
- PyObject *data = NULL;
-
- if (!_PyArg_NoKeywords(_PyType_Name(type), kwargs)) {
- return NULL;
- }
- if (!PyArg_UnpackTuple(args, _PyType_Name(type), 0, 1, &data)) {
- return NULL;
- }
self = newSHA3object(type);
if (self == NULL) {
@@ -529,22 +532,22 @@ static PyGetSetDef SHA3_getseters[] = {
}
PyDoc_STRVAR(sha3_224__doc__,
-"sha3_224([data]) -> SHA3 object\n\
+"sha3_224([data], *, usedforsecurity=True) -> SHA3 object\n\
\n\
Return a new SHA3 hash object with a hashbit length of 28 bytes.");
PyDoc_STRVAR(sha3_256__doc__,
-"sha3_256([data]) -> SHA3 object\n\
+"sha3_256([data], *, usedforsecurity=True) -> SHA3 object\n\
\n\
Return a new SHA3 hash object with a hashbit length of 32 bytes.");
PyDoc_STRVAR(sha3_384__doc__,
-"sha3_384([data]) -> SHA3 object\n\
+"sha3_384([data], *, usedforsecurity=True) -> SHA3 object\n\
\n\
Return a new SHA3 hash object with a hashbit length of 48 bytes.");
PyDoc_STRVAR(sha3_512__doc__,
-"sha3_512([data]) -> SHA3 object\n\
+"sha3_512([data], *, usedforsecurity=True) -> SHA3 object\n\
\n\
Return a new SHA3 hash object with a hashbit length of 64 bytes.");
@@ -555,22 +558,22 @@ SHA3_TYPE(SHA3_512type, "_sha3.sha3_512", sha3_512__doc__, SHA3_methods);
#ifdef PY_WITH_KECCAK
PyDoc_STRVAR(keccak_224__doc__,
-"keccak_224([data]) -> Keccak object\n\
+"keccak_224([data], *, usedforsecurity=True) -> Keccak object\n\
\n\
Return a new Keccak hash object with a hashbit length of 28 bytes.");
PyDoc_STRVAR(keccak_256__doc__,
-"keccak_256([data]) -> Keccak object\n\
+"keccak_256([data], *, usedforsecurity=True) -> Keccak object\n\
\n\
Return a new Keccak hash object with a hashbit length of 32 bytes.");
PyDoc_STRVAR(keccak_384__doc__,
-"keccak_384([data]) -> Keccak object\n\
+"keccak_384([data], *, usedforsecurity=True) -> Keccak object\n\
\n\
Return a new Keccak hash object with a hashbit length of 48 bytes.");
PyDoc_STRVAR(keccak_512__doc__,
-"keccak_512([data]) -> Keccak object\n\
+"keccak_512([data], *, usedforsecurity=True) -> Keccak object\n\
\n\
Return a new Keccak hash object with a hashbit length of 64 bytes.");
@@ -672,12 +675,12 @@ static PyMethodDef SHAKE_methods[] = {
};
PyDoc_STRVAR(shake_128__doc__,
-"shake_128([data]) -> SHAKE object\n\
+"shake_128([data], *, usedforsecurity=True) -> SHAKE object\n\
\n\
Return a new SHAKE hash object.");
PyDoc_STRVAR(shake_256__doc__,
-"shake_256([data]) -> SHAKE object\n\
+"shake_256([data], *, usedforsecurity=True) -> SHAKE object\n\
\n\
Return a new SHAKE hash object.");
@@ -710,7 +713,7 @@ PyInit__sha3(void)
#define init_sha3type(name, type) \
do { \
- Py_TYPE(type) = &PyType_Type; \
+ Py_SET_TYPE(type, &PyType_Type); \
if (PyType_Ready(type) < 0) { \
goto error; \
} \
diff --git a/Modules/_sqlite/cache.c b/Modules/_sqlite/cache.c
index 4d418042..758fc022 100644
--- a/Modules/_sqlite/cache.c
+++ b/Modules/_sqlite/cache.c
@@ -112,9 +112,8 @@ void pysqlite_cache_dealloc(pysqlite_Cache* self)
Py_TYPE(self)->tp_free((PyObject*)self);
}
-PyObject* pysqlite_cache_get(pysqlite_Cache* self, PyObject* args)
+PyObject* pysqlite_cache_get(pysqlite_Cache* self, PyObject* key)
{
- PyObject* key = args;
pysqlite_Node* node;
pysqlite_Node* ptr;
PyObject* data;
@@ -184,6 +183,9 @@ PyObject* pysqlite_cache_get(pysqlite_Cache* self, PyObject* args)
}
}
+ /* We cannot replace this by PyObject_CallOneArg() since
+ * PyObject_CallFunction() has a special case when using a
+ * single tuple as argument. */
data = PyObject_CallFunction(self->factory, "O", key);
if (!data) {
diff --git a/Modules/_sqlite/connection.c b/Modules/_sqlite/connection.c
index b6188a36..b8003734 100644
--- a/Modules/_sqlite/connection.c
+++ b/Modules/_sqlite/connection.c
@@ -23,15 +23,13 @@
#include "cache.h"
#include "module.h"
-#include "structmember.h"
+#include "structmember.h" // PyMemberDef
#include "connection.h"
#include "statement.h"
#include "cursor.h"
#include "prepare_protocol.h"
#include "util.h"
-#include "pythread.h"
-
#define ACTION_FINALIZE 1
#define ACTION_RESET 2
@@ -79,7 +77,7 @@ int pysqlite_connection_init(pysqlite_Connection* self, PyObject* args, PyObject
NULL
};
- char* database;
+ const char* database;
PyObject* database_obj;
int detect_types = 0;
PyObject* isolation_level = NULL;
@@ -308,7 +306,7 @@ PyObject* pysqlite_connection_cursor(pysqlite_Connection* self, PyObject* args,
factory = (PyObject*)&pysqlite_CursorType;
}
- cursor = PyObject_CallFunctionObjArgs(factory, (PyObject *)self, NULL);
+ cursor = PyObject_CallOneArg(factory, (PyObject *)self);
if (cursor == NULL)
return NULL;
if (!PyObject_TypeCheck(cursor, &pysqlite_CursorType)) {
@@ -550,7 +548,7 @@ PyObject* _pysqlite_build_py_params(sqlite3_context *context, int argc, sqlite3_
cur_value = argv[i];
switch (sqlite3_value_type(argv[i])) {
case SQLITE_INTEGER:
- cur_py_value = _pysqlite_long_from_int64(sqlite3_value_int64(cur_value));
+ cur_py_value = PyLong_FromLongLong(sqlite3_value_int64(cur_value));
break;
case SQLITE_FLOAT:
cur_py_value = PyFloat_FromDouble(sqlite3_value_double(cur_value));
@@ -709,7 +707,7 @@ void _pysqlite_final_callback(sqlite3_context* context)
PyErr_Fetch(&exception, &value, &tb);
restore = 1;
- function_result = _PyObject_CallMethodId(*aggregate_instance, &PyId_finalize, NULL);
+ function_result = _PyObject_CallMethodIdNoArgs(*aggregate_instance, &PyId_finalize);
Py_DECREF(*aggregate_instance);
@@ -975,7 +973,7 @@ static void _trace_callback(void* user_arg, const char* statement_string)
py_statement = PyUnicode_DecodeUTF8(statement_string,
strlen(statement_string), "replace");
if (py_statement) {
- ret = PyObject_CallFunctionObjArgs((PyObject*)user_arg, py_statement, NULL);
+ ret = PyObject_CallOneArg((PyObject*)user_arg, py_statement);
Py_DECREF(py_statement);
}
@@ -1192,9 +1190,9 @@ pysqlite_connection_set_isolation_level(pysqlite_Connection* self, PyObject* iso
return -1;
}
- uppercase_level = _PyObject_CallMethodIdObjArgs(
+ uppercase_level = _PyObject_CallMethodIdOneArg(
(PyObject *)&PyUnicode_Type, &PyId_upper,
- isolation_level, NULL);
+ isolation_level);
if (!uppercase_level) {
return -1;
}
@@ -1230,7 +1228,7 @@ PyObject* pysqlite_connection_call(pysqlite_Connection* self, PyObject* args, Py
if (!_PyArg_NoKeywords(MODULE_NAME ".Connection", kwargs))
return NULL;
- if (!PyArg_ParseTuple(args, "O", &sql))
+ if (!PyArg_ParseTuple(args, "U", &sql))
return NULL;
_pysqlite_drop_unused_statement_references(self);
@@ -1282,7 +1280,7 @@ PyObject* pysqlite_connection_execute(pysqlite_Connection* self, PyObject* args)
PyObject* result = 0;
PyObject* method = 0;
- cursor = _PyObject_CallMethodId((PyObject*)self, &PyId_cursor, NULL);
+ cursor = _PyObject_CallMethodIdNoArgs((PyObject*)self, &PyId_cursor);
if (!cursor) {
goto error;
}
@@ -1311,7 +1309,7 @@ PyObject* pysqlite_connection_executemany(pysqlite_Connection* self, PyObject* a
PyObject* result = 0;
PyObject* method = 0;
- cursor = _PyObject_CallMethodId((PyObject*)self, &PyId_cursor, NULL);
+ cursor = _PyObject_CallMethodIdNoArgs((PyObject*)self, &PyId_cursor);
if (!cursor) {
goto error;
}
@@ -1340,7 +1338,7 @@ PyObject* pysqlite_connection_executescript(pysqlite_Connection* self, PyObject*
PyObject* result = 0;
PyObject* method = 0;
- cursor = _PyObject_CallMethodId((PyObject*)self, &PyId_cursor, NULL);
+ cursor = _PyObject_CallMethodIdNoArgs((PyObject*)self, &PyId_cursor);
if (!cursor) {
goto error;
}
@@ -1472,16 +1470,9 @@ pysqlite_connection_iterdump(pysqlite_Connection* self, PyObject* args)
goto finally;
}
- args = PyTuple_New(1);
- if (!args) {
- goto finally;
- }
- Py_INCREF(self);
- PyTuple_SetItem(args, 0, (PyObject*)self);
- retval = PyObject_CallObject(pyfn_iterdump, args);
+ retval = PyObject_CallOneArg(pyfn_iterdump, (PyObject *)self);
finally:
- Py_XDECREF(args);
Py_XDECREF(module);
return retval;
}
@@ -1655,7 +1646,7 @@ pysqlite_connection_create_collation(pysqlite_Connection* self, PyObject* args)
const char *uppercase_name_str;
int rc;
unsigned int kind;
- void *data;
+ const void *data;
if (!pysqlite_check_thread(self) || !pysqlite_check_connection(self)) {
goto finally;
@@ -1666,8 +1657,8 @@ pysqlite_connection_create_collation(pysqlite_Connection* self, PyObject* args)
goto finally;
}
- uppercase_name = _PyObject_CallMethodIdObjArgs((PyObject *)&PyUnicode_Type,
- &PyId_upper, name, NULL);
+ uppercase_name = _PyObject_CallMethodIdOneArg((PyObject *)&PyUnicode_Type,
+ &PyId_upper, name);
if (!uppercase_name) {
goto finally;
}
diff --git a/Modules/_sqlite/cursor.c b/Modules/_sqlite/cursor.c
index 8cfa6e50..5cfb4b97 100644
--- a/Modules/_sqlite/cursor.c
+++ b/Modules/_sqlite/cursor.c
@@ -106,7 +106,7 @@ _pysqlite_get_converter(const char *keystr, Py_ssize_t keylen)
if (!key) {
return NULL;
}
- upcase_key = _PyObject_CallMethodId(key, &PyId_upper, NULL);
+ upcase_key = _PyObject_CallMethodIdNoArgs(key, &PyId_upper);
Py_DECREF(key);
if (!upcase_key) {
return NULL;
@@ -274,7 +274,7 @@ _pysqlite_fetch_one_row(pysqlite_Cursor* self)
item = PyBytes_FromStringAndSize(val_str, nbytes);
if (!item)
goto error;
- converted = PyObject_CallFunction(converter, "O", item);
+ converted = PyObject_CallOneArg(converter, item);
Py_DECREF(item);
}
} else {
@@ -285,7 +285,7 @@ _pysqlite_fetch_one_row(pysqlite_Cursor* self)
Py_INCREF(Py_None);
converted = Py_None;
} else if (coltype == SQLITE_INTEGER) {
- converted = _pysqlite_long_from_int64(sqlite3_column_int64(self->statement->st, i));
+ converted = PyLong_FromLongLong(sqlite3_column_int64(self->statement->st, i));
} else if (coltype == SQLITE_FLOAT) {
converted = PyFloat_FromDouble(sqlite3_column_double(self->statement->st, i));
} else if (coltype == SQLITE_TEXT) {
@@ -393,12 +393,7 @@ _pysqlite_query_execute(pysqlite_Cursor* self, int multiple, PyObject* args)
if (multiple) {
/* executemany() */
- if (!PyArg_ParseTuple(args, "OO", &operation, &second_argument)) {
- goto error;
- }
-
- if (!PyUnicode_Check(operation)) {
- PyErr_SetString(PyExc_ValueError, "operation parameter must be str");
+ if (!PyArg_ParseTuple(args, "UO", &operation, &second_argument)) {
goto error;
}
@@ -415,12 +410,7 @@ _pysqlite_query_execute(pysqlite_Cursor* self, int multiple, PyObject* args)
}
} else {
/* execute() */
- if (!PyArg_ParseTuple(args, "O|O", &operation, &second_argument)) {
- goto error;
- }
-
- if (!PyUnicode_Check(operation)) {
- PyErr_SetString(PyExc_ValueError, "operation parameter must be str");
+ if (!PyArg_ParseTuple(args, "U|O", &operation, &second_argument)) {
goto error;
}
@@ -583,7 +573,7 @@ _pysqlite_query_execute(pysqlite_Cursor* self, int multiple, PyObject* args)
Py_BEGIN_ALLOW_THREADS
lastrowid = sqlite3_last_insert_rowid(self->connection->db);
Py_END_ALLOW_THREADS
- self->lastrowid = _pysqlite_long_from_int64(lastrowid);
+ self->lastrowid = PyLong_FromLongLong(lastrowid);
}
if (rc == SQLITE_ROW) {
@@ -636,7 +626,6 @@ static PyObject *
pysqlite_cursor_executescript(pysqlite_Cursor* self, PyObject* args)
{
PyObject* script_obj;
- PyObject* script_str = NULL;
const char* script_cstr;
sqlite3_stmt* statement;
int rc;
@@ -710,8 +699,6 @@ pysqlite_cursor_executescript(pysqlite_Cursor* self, PyObject* args)
}
error:
- Py_XDECREF(script_str);
-
if (PyErr_Occurred()) {
return NULL;
} else {
@@ -798,7 +785,7 @@ PyObject* pysqlite_cursor_fetchone(pysqlite_Cursor* self, PyObject* args)
PyObject* pysqlite_cursor_fetchmany(pysqlite_Cursor* self, PyObject* args, PyObject* kwargs)
{
- static char *kwlist[] = {"size", NULL, NULL};
+ static char *kwlist[] = {"size", NULL};
PyObject* row;
PyObject* list;
@@ -814,17 +801,9 @@ PyObject* pysqlite_cursor_fetchmany(pysqlite_Cursor* self, PyObject* args, PyObj
return NULL;
}
- /* just make sure we enter the loop */
- row = Py_None;
-
- while (row) {
- row = pysqlite_cursor_iternext(self);
- if (row) {
- PyList_Append(list, row);
- Py_DECREF(row);
- } else {
- break;
- }
+ while ((row = pysqlite_cursor_iternext(self))) {
+ PyList_Append(list, row);
+ Py_XDECREF(row);
if (++counter == maxrows) {
break;
@@ -849,15 +828,9 @@ PyObject* pysqlite_cursor_fetchall(pysqlite_Cursor* self, PyObject* args)
return NULL;
}
- /* just make sure we enter the loop */
- row = (PyObject*)Py_None;
-
- while (row) {
- row = pysqlite_cursor_iternext(self);
- if (row) {
- PyList_Append(list, row);
- Py_DECREF(row);
- }
+ while ((row = pysqlite_cursor_iternext(self))) {
+ PyList_Append(list, row);
+ Py_XDECREF(row);
}
if (PyErr_Occurred()) {
diff --git a/Modules/_sqlite/microprotocols.c b/Modules/_sqlite/microprotocols.c
index c23b09f5..3b2d7f42 100644
--- a/Modules/_sqlite/microprotocols.c
+++ b/Modules/_sqlite/microprotocols.c
@@ -24,7 +24,6 @@
*/
#include
-#include
#include "cursor.h"
#include "microprotocols.h"
@@ -84,7 +83,7 @@ pysqlite_microprotocols_adapt(PyObject *obj, PyObject *proto, PyObject *alt)
way to get a quotable object to be its instance */
/* look for an adapter in the registry */
- key = Py_BuildValue("(OO)", (PyObject*)obj->ob_type, proto);
+ key = Py_BuildValue("(OO)", (PyObject*)Py_TYPE(obj), proto);
if (!key) {
return NULL;
}
@@ -92,7 +91,7 @@ pysqlite_microprotocols_adapt(PyObject *obj, PyObject *proto, PyObject *alt)
Py_DECREF(key);
if (adapter) {
Py_INCREF(adapter);
- adapted = PyObject_CallFunctionObjArgs(adapter, obj, NULL);
+ adapted = PyObject_CallOneArg(adapter, obj);
Py_DECREF(adapter);
return adapted;
}
@@ -105,7 +104,7 @@ pysqlite_microprotocols_adapt(PyObject *obj, PyObject *proto, PyObject *alt)
return NULL;
}
if (adapter) {
- adapted = PyObject_CallFunctionObjArgs(adapter, obj, NULL);
+ adapted = PyObject_CallOneArg(adapter, obj);
Py_DECREF(adapter);
if (adapted == Py_None) {
@@ -124,7 +123,7 @@ pysqlite_microprotocols_adapt(PyObject *obj, PyObject *proto, PyObject *alt)
return NULL;
}
if (adapter) {
- adapted = PyObject_CallFunctionObjArgs(adapter, proto, NULL);
+ adapted = PyObject_CallOneArg(adapter, proto);
Py_DECREF(adapter);
if (adapted == Py_None) {
diff --git a/Modules/_sqlite/module.c b/Modules/_sqlite/module.c
index 9fe0dc95..71d951ee 100644
--- a/Modules/_sqlite/module.c
+++ b/Modules/_sqlite/module.c
@@ -105,7 +105,7 @@ RAM instead of on disk.");
static PyObject* module_complete(PyObject* self, PyObject* args, PyObject*
kwargs)
{
- static char *kwlist[] = {"statement", NULL, NULL};
+ static char *kwlist[] = {"statement", NULL};
char* statement;
PyObject* result;
@@ -135,7 +135,7 @@ Checks if a string contains a complete SQL statement. Non-standard.");
static PyObject* module_enable_shared_cache(PyObject* self, PyObject* args, PyObject*
kwargs)
{
- static char *kwlist[] = {"do_enable", NULL, NULL};
+ static char *kwlist[] = {"do_enable", NULL};
int do_enable;
int rc;
@@ -203,7 +203,7 @@ static PyObject* module_register_converter(PyObject* self, PyObject* args)
}
/* convert the name to upper case */
- name = _PyObject_CallMethodId(orig_name, &PyId_upper, NULL);
+ name = _PyObject_CallMethodIdNoArgs(orig_name, &PyId_upper);
if (!name) {
goto error;
}
@@ -346,6 +346,14 @@ static struct PyModuleDef _sqlite3module = {
NULL
};
+#define ADD_TYPE(module, type) \
+do { \
+ if (PyModule_AddType(module, &type) < 0) { \
+ Py_DECREF(module); \
+ return NULL; \
+ } \
+} while (0)
+
PyMODINIT_FUNC PyInit__sqlite3(void)
{
PyObject *module, *dict;
@@ -366,14 +374,10 @@ PyMODINIT_FUNC PyInit__sqlite3(void)
return NULL;
}
- Py_INCREF(&pysqlite_ConnectionType);
- PyModule_AddObject(module, "Connection", (PyObject*) &pysqlite_ConnectionType);
- Py_INCREF(&pysqlite_CursorType);
- PyModule_AddObject(module, "Cursor", (PyObject*) &pysqlite_CursorType);
- Py_INCREF(&pysqlite_PrepareProtocolType);
- PyModule_AddObject(module, "PrepareProtocol", (PyObject*) &pysqlite_PrepareProtocolType);
- Py_INCREF(&pysqlite_RowType);
- PyModule_AddObject(module, "Row", (PyObject*) &pysqlite_RowType);
+ ADD_TYPE(module, pysqlite_ConnectionType);
+ ADD_TYPE(module, pysqlite_CursorType);
+ ADD_TYPE(module, pysqlite_PrepareProtocolType);
+ ADD_TYPE(module, pysqlite_RowType);
if (!(dict = PyModule_GetDict(module))) {
goto error;
diff --git a/Modules/_sqlite/prepare_protocol.c b/Modules/_sqlite/prepare_protocol.c
index 181c7edf..05a2ca5a 100644
--- a/Modules/_sqlite/prepare_protocol.c
+++ b/Modules/_sqlite/prepare_protocol.c
@@ -78,6 +78,6 @@ PyTypeObject pysqlite_PrepareProtocolType= {
extern int pysqlite_prepare_protocol_setup_types(void)
{
pysqlite_PrepareProtocolType.tp_new = PyType_GenericNew;
- Py_TYPE(&pysqlite_PrepareProtocolType)= &PyType_Type;
+ Py_SET_TYPE(&pysqlite_PrepareProtocolType, &PyType_Type);
return PyType_Ready(&pysqlite_PrepareProtocolType);
}
diff --git a/Modules/_sqlite/statement.c b/Modules/_sqlite/statement.c
index 491294b0..23c204e7 100644
--- a/Modules/_sqlite/statement.c
+++ b/Modules/_sqlite/statement.c
@@ -59,6 +59,8 @@ int pysqlite_statement_create(pysqlite_Statement* self, pysqlite_Connection* con
self->st = NULL;
self->in_use = 0;
+ assert(PyUnicode_Check(sql));
+
sql_cstr = PyUnicode_AsUTF8AndSize(sql, &sql_cstr_len);
if (sql_cstr == NULL) {
rc = PYSQLITE_SQL_WRONG_TYPE;
@@ -225,6 +227,9 @@ void pysqlite_statement_bind_parameters(pysqlite_Statement* self, PyObject* para
num_params = PyList_GET_SIZE(parameters);
} else {
num_params = PySequence_Size(parameters);
+ if (num_params == -1) {
+ return;
+ }
}
if (num_params != num_params_needed) {
PyErr_Format(pysqlite_ProgrammingError,
@@ -236,9 +241,9 @@ void pysqlite_statement_bind_parameters(pysqlite_Statement* self, PyObject* para
for (i = 0; i < num_params; i++) {
if (PyTuple_CheckExact(parameters)) {
current_param = PyTuple_GET_ITEM(parameters, i);
- Py_XINCREF(current_param);
+ Py_INCREF(current_param);
} else if (PyList_CheckExact(parameters)) {
- current_param = PyList_GET_ITEM(parameters, i);
+ current_param = PyList_GetItem(parameters, i);
Py_XINCREF(current_param);
} else {
current_param = PySequence_GetItem(parameters, i);
diff --git a/Modules/_sqlite/util.c b/Modules/_sqlite/util.c
index 3fa671d0..1dbabcdd 100644
--- a/Modules/_sqlite/util.c
+++ b/Modules/_sqlite/util.c
@@ -103,22 +103,6 @@ int _pysqlite_seterror(sqlite3* db, sqlite3_stmt* st)
# define IS_LITTLE_ENDIAN 1
#endif
-PyObject *
-_pysqlite_long_from_int64(sqlite_int64 value)
-{
-# if SIZEOF_LONG_LONG < 8
- if (value > PY_LLONG_MAX || value < PY_LLONG_MIN) {
- return _PyLong_FromByteArray(&value, sizeof(value),
- IS_LITTLE_ENDIAN, 1 /* signed */);
- }
-# endif
-# if SIZEOF_LONG < SIZEOF_LONG_LONG
- if (value > LONG_MAX || value < LONG_MIN)
- return PyLong_FromLongLong(value);
-# endif
- return PyLong_FromLong(Py_SAFE_DOWNCAST(value, sqlite_int64, long));
-}
-
sqlite_int64
_pysqlite_long_as_int64(PyObject * py_val)
{
diff --git a/Modules/_sqlite/util.h b/Modules/_sqlite/util.h
index 62619111..c5a220e9 100644
--- a/Modules/_sqlite/util.h
+++ b/Modules/_sqlite/util.h
@@ -37,7 +37,6 @@ int pysqlite_step(sqlite3_stmt* statement, pysqlite_Connection* connection);
*/
int _pysqlite_seterror(sqlite3* db, sqlite3_stmt* st);
-PyObject * _pysqlite_long_from_int64(sqlite_int64 value);
sqlite_int64 _pysqlite_long_as_int64(PyObject * value);
#if SQLITE_VERSION_NUMBER >= 3007014
diff --git a/Modules/_sre.c b/Modules/_sre.c
index d4fe588c..244e4f1f 100644
--- a/Modules/_sre.c
+++ b/Modules/_sre.c
@@ -41,7 +41,7 @@ static const char copyright[] =
#define PY_SSIZE_T_CLEAN
#include "Python.h"
-#include "structmember.h" /* offsetof */
+#include "structmember.h" // PyMemberDef
#include "sre.h"
@@ -351,7 +351,7 @@ state_reset(SRE_STATE* state)
data_stack_dealloc(state);
}
-static void*
+static const void*
getstring(PyObject* string, Py_ssize_t* p_length,
int* p_isbytes, int* p_charsize,
Py_buffer *view)
@@ -398,11 +398,11 @@ state_init(SRE_STATE* state, PatternObject* pattern, PyObject* string,
Py_ssize_t length;
int isbytes, charsize;
- void* ptr;
+ const void* ptr;
memset(state, 0, sizeof(SRE_STATE));
- state->mark = PyMem_New(void *, pattern->groups * 2);
+ state->mark = PyMem_New(const void *, pattern->groups * 2);
if (!state->mark) {
PyErr_NoMemory();
goto err;
@@ -891,7 +891,7 @@ _sre_SRE_Pattern_split_impl(PatternObject *self, PyObject *string,
Py_ssize_t status;
Py_ssize_t n;
Py_ssize_t i;
- void* last;
+ const void* last;
assert(self->codesize != 0);
@@ -984,7 +984,7 @@ pattern_subx(PatternObject* self, PyObject* ptemplate, PyObject* string,
PyObject* item;
PyObject* filter;
PyObject* match;
- void* ptr;
+ const void* ptr;
Py_ssize_t status;
Py_ssize_t n;
Py_ssize_t i, b, e;
@@ -1002,7 +1002,6 @@ pattern_subx(PatternObject* self, PyObject* ptemplate, PyObject* string,
int literal;
view.buf = NULL;
ptr = getstring(ptemplate, &n, &isbytes, &charsize, &view);
- b = charsize;
if (ptr) {
if (charsize == 1)
literal = memchr(ptr, '\\', n) == NULL;
@@ -1082,7 +1081,7 @@ pattern_subx(PatternObject* self, PyObject* ptemplate, PyObject* string,
match = pattern_new_match(self, &state, 1);
if (!match)
goto error;
- item = PyObject_CallFunctionObjArgs(filter, match, NULL);
+ item = PyObject_CallOneArg(filter, match);
Py_DECREF(match);
if (!item)
goto error;
@@ -1339,7 +1338,7 @@ _sre_compile_impl(PyObject *module, PyObject *pattern, int flags,
n = PyList_GET_SIZE(code);
/* coverity[ampersand_in_size] */
- self = PyObject_NEW_VAR(PatternObject, &Pattern_Type, n);
+ self = PyObject_NewVar(PatternObject, &Pattern_Type, n);
if (!self)
return NULL;
self->weakreflist = NULL;
@@ -1896,7 +1895,7 @@ match_getslice_by_index(MatchObject* self, Py_ssize_t index, PyObject* def)
int isbytes, charsize;
Py_buffer view;
PyObject *result;
- void* ptr;
+ const void* ptr;
Py_ssize_t i, j;
assert(0 <= index && index < self->groups);
@@ -2328,8 +2327,8 @@ pattern_new_match(PatternObject* pattern, SRE_STATE* state, Py_ssize_t status)
/* create match object (with room for extra group marks) */
/* coverity[ampersand_in_size] */
- match = PyObject_NEW_VAR(MatchObject, &Match_Type,
- 2*(pattern->groups+1));
+ match = PyObject_NewVar(MatchObject, &Match_Type,
+ 2*(pattern->groups+1));
if (!match)
return NULL;
@@ -2469,7 +2468,7 @@ pattern_scanner(PatternObject *self, PyObject *string, Py_ssize_t pos, Py_ssize_
ScannerObject* scanner;
/* create scanner object */
- scanner = PyObject_NEW(ScannerObject, &Scanner_Type);
+ scanner = PyObject_New(ScannerObject, &Scanner_Type);
if (!scanner)
return NULL;
scanner->pattern = NULL;
@@ -2519,7 +2518,7 @@ pattern_richcompare(PyObject *lefto, PyObject *righto, int op)
Py_RETURN_NOTIMPLEMENTED;
}
- if (Py_TYPE(lefto) != &Pattern_Type || Py_TYPE(righto) != &Pattern_Type) {
+ if (!Py_IS_TYPE(lefto, &Pattern_Type) || !Py_IS_TYPE(righto, &Pattern_Type)) {
Py_RETURN_NOTIMPLEMENTED;
}
@@ -2569,6 +2568,8 @@ static PyMethodDef pattern_methods[] = {
_SRE_SRE_PATTERN_SCANNER_METHODDEF
_SRE_SRE_PATTERN___COPY___METHODDEF
_SRE_SRE_PATTERN___DEEPCOPY___METHODDEF
+ {"__class_getitem__", (PyCFunction)Py_GenericAlias, METH_O|METH_CLASS,
+ PyDoc_STR("See PEP 585")},
{NULL, NULL}
};
@@ -2639,6 +2640,8 @@ static PyMethodDef match_methods[] = {
_SRE_SRE_MATCH_EXPAND_METHODDEF
_SRE_SRE_MATCH___COPY___METHODDEF
_SRE_SRE_MATCH___DEEPCOPY___METHODDEF
+ {"__class_getitem__", (PyCFunction)Py_GenericAlias, METH_O|METH_CLASS,
+ PyDoc_STR("See PEP 585")},
{NULL, NULL}
};
diff --git a/Modules/_ssl.c b/Modules/_ssl.c
index e6dda298..28796b37 100644
--- a/Modules/_ssl.c
+++ b/Modules/_ssl.c
@@ -18,8 +18,6 @@
#include "Python.h"
-#include "pythread.h"
-
/* Redefined below for Windows debug builds after important #includes */
#define _PySSL_FIX_ERRNO
@@ -523,9 +521,9 @@ static int PySSL_select(PySocketSockObject *s, int writing, _PyTime_t timeout);
static int PySSL_set_owner(PySSLSocket *, PyObject *, void *);
static int PySSL_set_session(PySSLSocket *, PyObject *, void *);
-#define PySSLSocket_Check(v) (Py_TYPE(v) == &PySSLSocket_Type)
-#define PySSLMemoryBIO_Check(v) (Py_TYPE(v) == &PySSLMemoryBIO_Type)
-#define PySSLSession_Check(v) (Py_TYPE(v) == &PySSLSession_Type)
+#define PySSLSocket_Check(v) Py_IS_TYPE(v, &PySSLSocket_Type)
+#define PySSLMemoryBIO_Check(v) Py_IS_TYPE(v, &PySSLMemoryBIO_Type)
+#define PySSLSession_Check(v) Py_IS_TYPE(v, &PySSLSession_Type)
typedef enum {
SOCKET_IS_NONBLOCKING,
@@ -4069,7 +4067,7 @@ error:
/* internal helper function, returns -1 on error
*/
static int
-_add_ca_certs(PySSLContext *self, void *data, Py_ssize_t len,
+_add_ca_certs(PySSLContext *self, const void *data, Py_ssize_t len,
int filetype)
{
BIO *biobuf = NULL;
@@ -4255,7 +4253,6 @@ _ssl__SSLContext_load_verify_locations_impl(PySSLContext *self,
r = SSL_CTX_load_verify_locations(self->ctx, cafile_buf, capath_buf);
PySSL_END_ALLOW_THREADS
if (r != 1) {
- ok = 0;
if (errno != 0) {
ERR_clear_error();
PyErr_SetFromErrno(PyExc_OSError);
diff --git a/Modules/_stat.c b/Modules/_stat.c
index 7a799af0..546e6a5f 100644
--- a/Modules/_stat.c
+++ b/Modules/_stat.c
@@ -496,113 +496,140 @@ ST_CTIME\n\
");
+static int
+stat_exec(PyObject *module)
+{
+#define ADD_INT_MACRO(module, macro) \
+ do { \
+ if (PyModule_AddIntConstant(module, #macro, macro) < 0) { \
+ return -1; \
+ } \
+ } while (0)
+
+ ADD_INT_MACRO(module, S_IFDIR);
+ ADD_INT_MACRO(module, S_IFCHR);
+ ADD_INT_MACRO(module, S_IFBLK);
+ ADD_INT_MACRO(module, S_IFREG);
+ ADD_INT_MACRO(module, S_IFIFO);
+ ADD_INT_MACRO(module, S_IFLNK);
+ ADD_INT_MACRO(module, S_IFSOCK);
+ ADD_INT_MACRO(module, S_IFDOOR);
+ ADD_INT_MACRO(module, S_IFPORT);
+ ADD_INT_MACRO(module, S_IFWHT);
+
+ ADD_INT_MACRO(module, S_ISUID);
+ ADD_INT_MACRO(module, S_ISGID);
+ ADD_INT_MACRO(module, S_ISVTX);
+ ADD_INT_MACRO(module, S_ENFMT);
+
+ ADD_INT_MACRO(module, S_IREAD);
+ ADD_INT_MACRO(module, S_IWRITE);
+ ADD_INT_MACRO(module, S_IEXEC);
+
+ ADD_INT_MACRO(module, S_IRWXU);
+ ADD_INT_MACRO(module, S_IRUSR);
+ ADD_INT_MACRO(module, S_IWUSR);
+ ADD_INT_MACRO(module, S_IXUSR);
+
+ ADD_INT_MACRO(module, S_IRWXG);
+ ADD_INT_MACRO(module, S_IRGRP);
+ ADD_INT_MACRO(module, S_IWGRP);
+ ADD_INT_MACRO(module, S_IXGRP);
+
+ ADD_INT_MACRO(module, S_IRWXO);
+ ADD_INT_MACRO(module, S_IROTH);
+ ADD_INT_MACRO(module, S_IWOTH);
+ ADD_INT_MACRO(module, S_IXOTH);
+
+ ADD_INT_MACRO(module, UF_NODUMP);
+ ADD_INT_MACRO(module, UF_IMMUTABLE);
+ ADD_INT_MACRO(module, UF_APPEND);
+ ADD_INT_MACRO(module, UF_OPAQUE);
+ ADD_INT_MACRO(module, UF_NOUNLINK);
+ ADD_INT_MACRO(module, UF_COMPRESSED);
+ ADD_INT_MACRO(module, UF_HIDDEN);
+ ADD_INT_MACRO(module, SF_ARCHIVED);
+ ADD_INT_MACRO(module, SF_IMMUTABLE);
+ ADD_INT_MACRO(module, SF_APPEND);
+ ADD_INT_MACRO(module, SF_NOUNLINK);
+ ADD_INT_MACRO(module, SF_SNAPSHOT);
+
+ const char* st_constants[] = {
+ "ST_MODE",
+ "ST_INO",
+ "ST_DEV",
+ "ST_NLINK",
+ "ST_UID",
+ "ST_GID",
+ "ST_SIZE",
+ "ST_ATIME",
+ "ST_MTIME",
+ "ST_CTIME"
+ };
+
+ for (int i = 0; i < (int)Py_ARRAY_LENGTH(st_constants); i++) {
+ if (PyModule_AddIntConstant(module, st_constants[i], i) < 0) {
+ return -1;
+ }
+ }
+
+#ifdef MS_WINDOWS
+ ADD_INT_MACRO(module, FILE_ATTRIBUTE_ARCHIVE);
+ ADD_INT_MACRO(module, FILE_ATTRIBUTE_COMPRESSED);
+ ADD_INT_MACRO(module, FILE_ATTRIBUTE_DEVICE);
+ ADD_INT_MACRO(module, FILE_ATTRIBUTE_DIRECTORY);
+ ADD_INT_MACRO(module, FILE_ATTRIBUTE_ENCRYPTED);
+ ADD_INT_MACRO(module, FILE_ATTRIBUTE_HIDDEN);
+ ADD_INT_MACRO(module, FILE_ATTRIBUTE_INTEGRITY_STREAM);
+ ADD_INT_MACRO(module, FILE_ATTRIBUTE_NORMAL);
+ ADD_INT_MACRO(module, FILE_ATTRIBUTE_NOT_CONTENT_INDEXED);
+ ADD_INT_MACRO(module, FILE_ATTRIBUTE_NO_SCRUB_DATA);
+ ADD_INT_MACRO(module, FILE_ATTRIBUTE_OFFLINE);
+ ADD_INT_MACRO(module, FILE_ATTRIBUTE_READONLY);
+ ADD_INT_MACRO(module, FILE_ATTRIBUTE_REPARSE_POINT);
+ ADD_INT_MACRO(module, FILE_ATTRIBUTE_SPARSE_FILE);
+ ADD_INT_MACRO(module, FILE_ATTRIBUTE_SYSTEM);
+ ADD_INT_MACRO(module, FILE_ATTRIBUTE_TEMPORARY);
+ ADD_INT_MACRO(module, FILE_ATTRIBUTE_VIRTUAL);
+
+ if (PyModule_AddObject(module, "IO_REPARSE_TAG_SYMLINK",
+ PyLong_FromUnsignedLong(IO_REPARSE_TAG_SYMLINK)) < 0) {
+ return -1;
+ }
+ if (PyModule_AddObject(module, "IO_REPARSE_TAG_MOUNT_POINT",
+ PyLong_FromUnsignedLong(IO_REPARSE_TAG_MOUNT_POINT)) < 0) {
+ return -1;
+ }
+ if (PyModule_AddObject(module, "IO_REPARSE_TAG_APPEXECLINK",
+ PyLong_FromUnsignedLong(IO_REPARSE_TAG_APPEXECLINK)) < 0) {
+ return -1;
+ }
+#endif
+
+ return 0;
+}
+
+
+static PyModuleDef_Slot stat_slots[] = {
+ {Py_mod_exec, stat_exec},
+ {0, NULL}
+};
+
+
static struct PyModuleDef statmodule = {
PyModuleDef_HEAD_INIT,
- "_stat",
- module_doc,
- -1,
- stat_methods,
- NULL,
- NULL,
- NULL,
- NULL
+ .m_name = "_stat",
+ .m_doc = module_doc,
+ .m_size = 0,
+ .m_methods = stat_methods,
+ .m_slots = stat_slots,
};
+
PyMODINIT_FUNC
PyInit__stat(void)
{
- PyObject *m;
- m = PyModule_Create(&statmodule);
- if (m == NULL)
- return NULL;
-
- if (PyModule_AddIntMacro(m, S_IFDIR)) return NULL;
- if (PyModule_AddIntMacro(m, S_IFCHR)) return NULL;
- if (PyModule_AddIntMacro(m, S_IFBLK)) return NULL;
- if (PyModule_AddIntMacro(m, S_IFREG)) return NULL;
- if (PyModule_AddIntMacro(m, S_IFIFO)) return NULL;
- if (PyModule_AddIntMacro(m, S_IFLNK)) return NULL;
- if (PyModule_AddIntMacro(m, S_IFSOCK)) return NULL;
- if (PyModule_AddIntMacro(m, S_IFDOOR)) return NULL;
- if (PyModule_AddIntMacro(m, S_IFPORT)) return NULL;
- if (PyModule_AddIntMacro(m, S_IFWHT)) return NULL;
-
- if (PyModule_AddIntMacro(m, S_ISUID)) return NULL;
- if (PyModule_AddIntMacro(m, S_ISGID)) return NULL;
- if (PyModule_AddIntMacro(m, S_ISVTX)) return NULL;
- if (PyModule_AddIntMacro(m, S_ENFMT)) return NULL;
-
- if (PyModule_AddIntMacro(m, S_IREAD)) return NULL;
- if (PyModule_AddIntMacro(m, S_IWRITE)) return NULL;
- if (PyModule_AddIntMacro(m, S_IEXEC)) return NULL;
-
- if (PyModule_AddIntMacro(m, S_IRWXU)) return NULL;
- if (PyModule_AddIntMacro(m, S_IRUSR)) return NULL;
- if (PyModule_AddIntMacro(m, S_IWUSR)) return NULL;
- if (PyModule_AddIntMacro(m, S_IXUSR)) return NULL;
-
- if (PyModule_AddIntMacro(m, S_IRWXG)) return NULL;
- if (PyModule_AddIntMacro(m, S_IRGRP)) return NULL;
- if (PyModule_AddIntMacro(m, S_IWGRP)) return NULL;
- if (PyModule_AddIntMacro(m, S_IXGRP)) return NULL;
-
- if (PyModule_AddIntMacro(m, S_IRWXO)) return NULL;
- if (PyModule_AddIntMacro(m, S_IROTH)) return NULL;
- if (PyModule_AddIntMacro(m, S_IWOTH)) return NULL;
- if (PyModule_AddIntMacro(m, S_IXOTH)) return NULL;
-
- if (PyModule_AddIntMacro(m, UF_NODUMP)) return NULL;
- if (PyModule_AddIntMacro(m, UF_IMMUTABLE)) return NULL;
- if (PyModule_AddIntMacro(m, UF_APPEND)) return NULL;
- if (PyModule_AddIntMacro(m, UF_OPAQUE)) return NULL;
- if (PyModule_AddIntMacro(m, UF_NOUNLINK)) return NULL;
- if (PyModule_AddIntMacro(m, UF_COMPRESSED)) return NULL;
- if (PyModule_AddIntMacro(m, UF_HIDDEN)) return NULL;
- if (PyModule_AddIntMacro(m, SF_ARCHIVED)) return NULL;
- if (PyModule_AddIntMacro(m, SF_IMMUTABLE)) return NULL;
- if (PyModule_AddIntMacro(m, SF_APPEND)) return NULL;
- if (PyModule_AddIntMacro(m, SF_NOUNLINK)) return NULL;
- if (PyModule_AddIntMacro(m, SF_SNAPSHOT)) return NULL;
-
- if (PyModule_AddIntConstant(m, "ST_MODE", 0)) return NULL;
- if (PyModule_AddIntConstant(m, "ST_INO", 1)) return NULL;
- if (PyModule_AddIntConstant(m, "ST_DEV", 2)) return NULL;
- if (PyModule_AddIntConstant(m, "ST_NLINK", 3)) return NULL;
- if (PyModule_AddIntConstant(m, "ST_UID", 4)) return NULL;
- if (PyModule_AddIntConstant(m, "ST_GID", 5)) return NULL;
- if (PyModule_AddIntConstant(m, "ST_SIZE", 6)) return NULL;
- if (PyModule_AddIntConstant(m, "ST_ATIME", 7)) return NULL;
- if (PyModule_AddIntConstant(m, "ST_MTIME", 8)) return NULL;
- if (PyModule_AddIntConstant(m, "ST_CTIME", 9)) return NULL;
-
-#ifdef MS_WINDOWS
- if (PyModule_AddIntMacro(m, FILE_ATTRIBUTE_ARCHIVE)) return NULL;
- if (PyModule_AddIntMacro(m, FILE_ATTRIBUTE_COMPRESSED)) return NULL;
- if (PyModule_AddIntMacro(m, FILE_ATTRIBUTE_DEVICE)) return NULL;
- if (PyModule_AddIntMacro(m, FILE_ATTRIBUTE_DIRECTORY)) return NULL;
- if (PyModule_AddIntMacro(m, FILE_ATTRIBUTE_ENCRYPTED)) return NULL;
- if (PyModule_AddIntMacro(m, FILE_ATTRIBUTE_HIDDEN)) return NULL;
- if (PyModule_AddIntMacro(m, FILE_ATTRIBUTE_INTEGRITY_STREAM)) return NULL;
- if (PyModule_AddIntMacro(m, FILE_ATTRIBUTE_NORMAL)) return NULL;
- if (PyModule_AddIntMacro(m, FILE_ATTRIBUTE_NOT_CONTENT_INDEXED)) return NULL;
- if (PyModule_AddIntMacro(m, FILE_ATTRIBUTE_NO_SCRUB_DATA)) return NULL;
- if (PyModule_AddIntMacro(m, FILE_ATTRIBUTE_OFFLINE)) return NULL;
- if (PyModule_AddIntMacro(m, FILE_ATTRIBUTE_READONLY)) return NULL;
- if (PyModule_AddIntMacro(m, FILE_ATTRIBUTE_REPARSE_POINT)) return NULL;
- if (PyModule_AddIntMacro(m, FILE_ATTRIBUTE_SPARSE_FILE)) return NULL;
- if (PyModule_AddIntMacro(m, FILE_ATTRIBUTE_SYSTEM)) return NULL;
- if (PyModule_AddIntMacro(m, FILE_ATTRIBUTE_TEMPORARY)) return NULL;
- if (PyModule_AddIntMacro(m, FILE_ATTRIBUTE_VIRTUAL)) return NULL;
-
- if (PyModule_AddObject(m, "IO_REPARSE_TAG_SYMLINK",
- PyLong_FromUnsignedLong(IO_REPARSE_TAG_SYMLINK))) return NULL;
- if (PyModule_AddObject(m, "IO_REPARSE_TAG_MOUNT_POINT",
- PyLong_FromUnsignedLong(IO_REPARSE_TAG_MOUNT_POINT))) return NULL;
- if (PyModule_AddObject(m, "IO_REPARSE_TAG_APPEXECLINK",
- PyLong_FromUnsignedLong(IO_REPARSE_TAG_APPEXECLINK))) return NULL;
-#endif
-
- return m;
+ return PyModuleDef_Init(&statmodule);
}
#ifdef __cplusplus
diff --git a/Modules/_statisticsmodule.c b/Modules/_statisticsmodule.c
index a646e96d..78c0676a 100644
--- a/Modules/_statisticsmodule.c
+++ b/Modules/_statisticsmodule.c
@@ -1,7 +1,6 @@
/* statistics accelerator C extension: _statistics module. */
#include "Python.h"
-#include "structmember.h"
#include "clinic/_statisticsmodule.c.h"
/*[clinic input]
@@ -129,13 +128,17 @@ static PyMethodDef statistics_methods[] = {
PyDoc_STRVAR(statistics_doc,
"Accelerators for the statistics module.\n");
+static struct PyModuleDef_Slot _statisticsmodule_slots[] = {
+ {0, NULL}
+};
+
static struct PyModuleDef statisticsmodule = {
PyModuleDef_HEAD_INIT,
"_statistics",
statistics_doc,
- -1,
+ 0,
statistics_methods,
- NULL,
+ _statisticsmodule_slots,
NULL,
NULL,
NULL
@@ -144,7 +147,5 @@ static struct PyModuleDef statisticsmodule = {
PyMODINIT_FUNC
PyInit__statistics(void)
{
- PyObject *m = PyModule_Create(&statisticsmodule);
- if (!m) return NULL;
- return m;
+ return PyModuleDef_Init(&statisticsmodule);
}
diff --git a/Modules/_struct.c b/Modules/_struct.c
index 64a9827e..f759f0b1 100644
--- a/Modules/_struct.c
+++ b/Modules/_struct.c
@@ -6,7 +6,7 @@
#define PY_SSIZE_T_CLEAN
#include "Python.h"
-#include "structmember.h"
+#include "structmember.h" // PyMemberDef
#include
/*[clinic input]
@@ -14,7 +14,23 @@ class Struct "PyStructObject *" "&PyStructType"
[clinic start generated code]*/
/*[clinic end generated code: output=da39a3ee5e6b4b0d input=9b032058a83ed7c3]*/
-static PyTypeObject PyStructType;
+typedef struct {
+ PyObject *PyStructType;
+ PyObject *unpackiter_type;
+ PyObject *StructError;
+} _structmodulestate;
+
+static inline _structmodulestate*
+get_struct_state(PyObject *module)
+{
+ void *state = PyModule_GetState(module);
+ assert(state != NULL);
+ return (_structmodulestate *)state;
+}
+
+static struct PyModuleDef _structmodule;
+
+#define _structmodulestate_global get_struct_state(PyState_FindModule(&_structmodule))
/* The translation function for each format character is table driven */
typedef struct _formatdef {
@@ -46,13 +62,8 @@ typedef struct {
} PyStructObject;
-#define PyStruct_Check(op) PyObject_TypeCheck(op, &PyStructType)
-#define PyStruct_CheckExact(op) (Py_TYPE(op) == &PyStructType)
-
-
-/* Exception */
-
-static PyObject *StructError;
+#define PyStruct_Check(op) PyObject_TypeCheck(op, (PyTypeObject *)_structmodulestate_global->PyStructType)
+#define PyStruct_CheckExact(op) Py_IS_TYPE(op, (PyTypeObject *)_structmodulestate_global->PyStructType)
/* Define various structs to figure out the alignments of types */
@@ -115,7 +126,7 @@ get_pylong(PyObject *v)
return NULL;
}
else {
- PyErr_SetString(StructError,
+ PyErr_SetString(_structmodulestate_global->StructError,
"required argument is not an integer");
return NULL;
}
@@ -143,7 +154,7 @@ get_long(PyObject *v, long *p)
Py_DECREF(v);
if (x == (long)-1 && PyErr_Occurred()) {
if (PyErr_ExceptionMatches(PyExc_OverflowError))
- PyErr_SetString(StructError,
+ PyErr_SetString(_structmodulestate_global->StructError,
"argument out of range");
return -1;
}
@@ -167,7 +178,7 @@ get_ulong(PyObject *v, unsigned long *p)
Py_DECREF(v);
if (x == (unsigned long)-1 && PyErr_Occurred()) {
if (PyErr_ExceptionMatches(PyExc_OverflowError))
- PyErr_SetString(StructError,
+ PyErr_SetString(_structmodulestate_global->StructError,
"argument out of range");
return -1;
}
@@ -190,7 +201,7 @@ get_longlong(PyObject *v, long long *p)
Py_DECREF(v);
if (x == (long long)-1 && PyErr_Occurred()) {
if (PyErr_ExceptionMatches(PyExc_OverflowError))
- PyErr_SetString(StructError,
+ PyErr_SetString(_structmodulestate_global->StructError,
"argument out of range");
return -1;
}
@@ -213,7 +224,7 @@ get_ulonglong(PyObject *v, unsigned long long *p)
Py_DECREF(v);
if (x == (unsigned long long)-1 && PyErr_Occurred()) {
if (PyErr_ExceptionMatches(PyExc_OverflowError))
- PyErr_SetString(StructError,
+ PyErr_SetString(_structmodulestate_global->StructError,
"argument out of range");
return -1;
}
@@ -236,7 +247,7 @@ get_ssize_t(PyObject *v, Py_ssize_t *p)
Py_DECREF(v);
if (x == (Py_ssize_t)-1 && PyErr_Occurred()) {
if (PyErr_ExceptionMatches(PyExc_OverflowError))
- PyErr_SetString(StructError,
+ PyErr_SetString(_structmodulestate_global->StructError,
"argument out of range");
return -1;
}
@@ -259,7 +270,7 @@ get_size_t(PyObject *v, size_t *p)
Py_DECREF(v);
if (x == (size_t)-1 && PyErr_Occurred()) {
if (PyErr_ExceptionMatches(PyExc_OverflowError))
- PyErr_SetString(StructError,
+ PyErr_SetString(_structmodulestate_global->StructError,
"argument out of range");
return -1;
}
@@ -293,7 +304,7 @@ pack_halffloat(char *p, /* start of 2-byte string */
{
double x = PyFloat_AsDouble(v);
if (x == -1.0 && PyErr_Occurred()) {
- PyErr_SetString(StructError,
+ PyErr_SetString(_structmodulestate_global->StructError,
"required argument is not a float");
return -1;
}
@@ -339,13 +350,13 @@ _range_error(const formatdef *f, int is_unsigned)
const size_t ulargest = (size_t)-1 >> ((SIZEOF_SIZE_T - f->size)*8);
assert(f->size >= 1 && f->size <= SIZEOF_SIZE_T);
if (is_unsigned)
- PyErr_Format(StructError,
+ PyErr_Format(_structmodulestate_global->StructError,
"'%c' format requires 0 <= number <= %zu",
f->format,
ulargest);
else {
const Py_ssize_t largest = (Py_ssize_t)(ulargest >> 1);
- PyErr_Format(StructError,
+ PyErr_Format(_structmodulestate_global->StructError,
"'%c' format requires %zd <= number <= %zd",
f->format,
~ largest,
@@ -361,8 +372,8 @@ _range_error(const formatdef *f, int is_unsigned)
[bln][up]_TYPE
- [bln] distiguishes among big-endian, little-endian and native.
- [pu] distiguishes between pack (to struct) and unpack (from struct).
+ [bln] distinguishes among big-endian, little-endian and native.
+ [pu] distinguishes between pack (to struct) and unpack (from struct).
TYPE is one of char, byte, ubyte, etc.
*/
@@ -524,7 +535,7 @@ np_byte(char *p, PyObject *v, const formatdef *f)
if (get_long(v, &x) < 0)
return -1;
if (x < -128 || x > 127) {
- PyErr_SetString(StructError,
+ PyErr_SetString(_structmodulestate_global->StructError,
"byte format requires -128 <= number <= 127");
return -1;
}
@@ -539,7 +550,7 @@ np_ubyte(char *p, PyObject *v, const formatdef *f)
if (get_long(v, &x) < 0)
return -1;
if (x < 0 || x > 255) {
- PyErr_SetString(StructError,
+ PyErr_SetString(_structmodulestate_global->StructError,
"ubyte format requires 0 <= number <= 255");
return -1;
}
@@ -550,8 +561,8 @@ np_ubyte(char *p, PyObject *v, const formatdef *f)
static int
np_char(char *p, PyObject *v, const formatdef *f)
{
- if (!PyBytes_Check(v) || PyBytes_GET_SIZE(v) != 1) {
- PyErr_SetString(StructError,
+ if (!PyBytes_Check(v) || PyBytes_Size(v) != 1) {
+ PyErr_SetString(_structmodulestate_global->StructError,
"char format requires a bytes object of length 1");
return -1;
}
@@ -567,7 +578,7 @@ np_short(char *p, PyObject *v, const formatdef *f)
if (get_long(v, &x) < 0)
return -1;
if (x < SHRT_MIN || x > SHRT_MAX) {
- PyErr_SetString(StructError,
+ PyErr_SetString(_structmodulestate_global->StructError,
"short format requires " Py_STRINGIFY(SHRT_MIN)
" <= number <= " Py_STRINGIFY(SHRT_MAX));
return -1;
@@ -585,7 +596,7 @@ np_ushort(char *p, PyObject *v, const formatdef *f)
if (get_long(v, &x) < 0)
return -1;
if (x < 0 || x > USHRT_MAX) {
- PyErr_SetString(StructError,
+ PyErr_SetString(_structmodulestate_global->StructError,
"ushort format requires 0 <= number <= "
Py_STRINGIFY(USHRT_MAX));
return -1;
@@ -716,7 +727,7 @@ np_float(char *p, PyObject *v, const formatdef *f)
{
float x = (float)PyFloat_AsDouble(v);
if (x == -1 && PyErr_Occurred()) {
- PyErr_SetString(StructError,
+ PyErr_SetString(_structmodulestate_global->StructError,
"required argument is not a float");
return -1;
}
@@ -729,7 +740,7 @@ np_double(char *p, PyObject *v, const formatdef *f)
{
double x = PyFloat_AsDouble(v);
if (x == -1 && PyErr_Occurred()) {
- PyErr_SetString(StructError,
+ PyErr_SetString(_structmodulestate_global->StructError,
"required argument is not a float");
return -1;
}
@@ -948,7 +959,7 @@ bp_float(char *p, PyObject *v, const formatdef *f)
{
double x = PyFloat_AsDouble(v);
if (x == -1 && PyErr_Occurred()) {
- PyErr_SetString(StructError,
+ PyErr_SetString(_structmodulestate_global->StructError,
"required argument is not a float");
return -1;
}
@@ -960,7 +971,7 @@ bp_double(char *p, PyObject *v, const formatdef *f)
{
double x = PyFloat_AsDouble(v);
if (x == -1 && PyErr_Occurred()) {
- PyErr_SetString(StructError,
+ PyErr_SetString(_structmodulestate_global->StructError,
"required argument is not a float");
return -1;
}
@@ -1163,7 +1174,7 @@ lp_float(char *p, PyObject *v, const formatdef *f)
{
double x = PyFloat_AsDouble(v);
if (x == -1 && PyErr_Occurred()) {
- PyErr_SetString(StructError,
+ PyErr_SetString(_structmodulestate_global->StructError,
"required argument is not a float");
return -1;
}
@@ -1175,7 +1186,7 @@ lp_double(char *p, PyObject *v, const formatdef *f)
{
double x = PyFloat_AsDouble(v);
if (x == -1 && PyErr_Occurred()) {
- PyErr_SetString(StructError,
+ PyErr_SetString(_structmodulestate_global->StructError,
"required argument is not a float");
return -1;
}
@@ -1242,7 +1253,7 @@ getentry(int c, const formatdef *f)
return f;
}
}
- PyErr_SetString(StructError, "bad char in struct format");
+ PyErr_SetString(_structmodulestate_global->StructError, "bad char in struct format");
return NULL;
}
@@ -1286,7 +1297,8 @@ prepare_s(PyStructObject *self)
fmt = PyBytes_AS_STRING(self->s_format);
if (strlen(fmt) != (size_t)PyBytes_GET_SIZE(self->s_format)) {
- PyErr_SetString(StructError, "embedded null character");
+ PyErr_SetString(_structmodulestate_global->StructError,
+ "embedded null character");
return -1;
}
@@ -1297,7 +1309,7 @@ prepare_s(PyStructObject *self)
len = 0;
ncodes = 0;
while ((c = *s++) != '\0') {
- if (Py_ISSPACE(Py_CHARMASK(c)))
+ if (Py_ISSPACE(c))
continue;
if ('0' <= c && c <= '9') {
num = c - '0';
@@ -1311,7 +1323,7 @@ prepare_s(PyStructObject *self)
num = num*10 + (c - '0');
}
if (c == '\0') {
- PyErr_SetString(StructError,
+ PyErr_SetString(_structmodulestate_global->StructError,
"repeat count given without format specifier");
return -1;
}
@@ -1362,7 +1374,7 @@ prepare_s(PyStructObject *self)
s = fmt;
size = 0;
while ((c = *s++) != '\0') {
- if (Py_ISSPACE(Py_CHARMASK(c)))
+ if (Py_ISSPACE(c))
continue;
if ('0' <= c && c <= '9') {
num = c - '0';
@@ -1401,7 +1413,7 @@ prepare_s(PyStructObject *self)
return 0;
overflow:
- PyErr_SetString(StructError,
+ PyErr_SetString(_structmodulestate_global->StructError,
"total struct size too long");
return -1;
}
@@ -1411,9 +1423,11 @@ s_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
{
PyObject *self;
- assert(type != NULL && type->tp_alloc != NULL);
+ assert(type != NULL);
+ allocfunc alloc_func = PyType_GetSlot(type, Py_tp_alloc);
+ assert(alloc_func != NULL);
- self = type->tp_alloc(type, 0);
+ self = alloc_func(type, 0);
if (self != NULL) {
PyStructObject *s = (PyStructObject*)self;
Py_INCREF(Py_None);
@@ -1459,7 +1473,7 @@ Struct___init___impl(PyStructObject *self, PyObject *format)
PyErr_Format(PyExc_TypeError,
"Struct() argument 1 must be a str or bytes object, "
"not %.200s",
- Py_TYPE(format)->tp_name);
+ _PyType_Name(Py_TYPE(format)));
return -1;
}
@@ -1472,13 +1486,16 @@ Struct___init___impl(PyStructObject *self, PyObject *format)
static void
s_dealloc(PyStructObject *s)
{
+ PyTypeObject *tp = Py_TYPE(s);
if (s->weakreflist != NULL)
PyObject_ClearWeakRefs((PyObject *)s);
if (s->s_codes != NULL) {
PyMem_FREE(s->s_codes);
}
- Py_DECREF(s->s_format);
- Py_TYPE(s)->tp_free((PyObject *)s);
+ Py_XDECREF(s->s_format);
+ freefunc free_func = PyType_GetSlot(Py_TYPE(s), Py_tp_free);
+ free_func(s);
+ Py_DECREF(tp);
}
static PyObject *
@@ -1539,7 +1556,7 @@ Struct_unpack_impl(PyStructObject *self, Py_buffer *buffer)
{
assert(self->s_codes != NULL);
if (buffer->len != self->s_size) {
- PyErr_Format(StructError,
+ PyErr_Format(_structmodulestate_global->StructError,
"unpack requires a buffer of %zd bytes",
self->s_size);
return NULL;
@@ -1572,7 +1589,7 @@ Struct_unpack_from_impl(PyStructObject *self, Py_buffer *buffer,
if (offset < 0) {
if (offset + self->s_size > 0) {
- PyErr_Format(StructError,
+ PyErr_Format(_structmodulestate_global->StructError,
"not enough data to unpack %zd bytes at offset %zd",
self->s_size,
offset);
@@ -1580,7 +1597,7 @@ Struct_unpack_from_impl(PyStructObject *self, Py_buffer *buffer,
}
if (offset + buffer->len < 0) {
- PyErr_Format(StructError,
+ PyErr_Format(_structmodulestate_global->StructError,
"offset %zd out of range for %zd-byte buffer",
offset,
buffer->len);
@@ -1590,7 +1607,7 @@ Struct_unpack_from_impl(PyStructObject *self, Py_buffer *buffer,
}
if ((buffer->len - offset) < self->s_size) {
- PyErr_Format(StructError,
+ PyErr_Format(_structmodulestate_global->StructError,
"unpack_from requires a buffer of at least %zu bytes for "
"unpacking %zd bytes at offset %zd "
"(actual buffer size is %zd)",
@@ -1618,15 +1635,18 @@ static void
unpackiter_dealloc(unpackiterobject *self)
{
/* bpo-31095: UnTrack is needed before calling any callbacks */
+ PyTypeObject *tp = Py_TYPE(self);
PyObject_GC_UnTrack(self);
Py_XDECREF(self->so);
PyBuffer_Release(&self->buf);
PyObject_GC_Del(self);
+ Py_DECREF(tp);
}
static int
unpackiter_traverse(unpackiterobject *self, visitproc visit, void *arg)
{
+ Py_VISIT(Py_TYPE(self));
Py_VISIT(self->so);
Py_VISIT(self->buf.obj);
return 0;
@@ -1667,35 +1687,28 @@ unpackiter_iternext(unpackiterobject *self)
return result;
}
-static PyTypeObject unpackiter_type = {
- PyVarObject_HEAD_INIT(NULL, 0)
- "unpack_iterator", /* tp_name */
- sizeof(unpackiterobject), /* tp_basicsize */
- 0, /* tp_itemsize */
- (destructor)unpackiter_dealloc, /* tp_dealloc */
- 0, /* tp_vectorcall_offset */
- 0, /* tp_getattr */
- 0, /* tp_setattr */
- 0, /* tp_as_async */
- 0, /* tp_repr */
- 0, /* tp_as_number */
- 0, /* tp_as_sequence */
- 0, /* tp_as_mapping */
- 0, /* tp_hash */
- 0, /* tp_call */
- 0, /* tp_str */
- PyObject_GenericGetAttr, /* tp_getattro */
- 0, /* tp_setattro */
- 0, /* tp_as_buffer */
- Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /* tp_flags */
- 0, /* tp_doc */
- (traverseproc)unpackiter_traverse, /* tp_traverse */
- 0, /* tp_clear */
- 0, /* tp_richcompare */
- 0, /* tp_weaklistoffset */
- PyObject_SelfIter, /* tp_iter */
- (iternextfunc)unpackiter_iternext, /* tp_iternext */
- unpackiter_methods /* tp_methods */
+PyObject *unpackiter_new(PyTypeObject *type, PyObject *args, PyObject *kwds) {
+ PyErr_Format(PyExc_TypeError, "Cannot create '%.200s objects", _PyType_Name(type));
+ return NULL;
+}
+
+static PyType_Slot unpackiter_type_slots[] = {
+ {Py_tp_dealloc, unpackiter_dealloc},
+ {Py_tp_getattro, PyObject_GenericGetAttr},
+ {Py_tp_traverse, unpackiter_traverse},
+ {Py_tp_iter, PyObject_SelfIter},
+ {Py_tp_iternext, unpackiter_iternext},
+ {Py_tp_methods, unpackiter_methods},
+ {Py_tp_new, unpackiter_new},
+ {0, 0},
+};
+
+static PyType_Spec unpackiter_type_spec = {
+ "_struct.unpack_iterator",
+ sizeof(unpackiterobject),
+ 0,
+ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,
+ unpackiter_type_slots
};
/*[clinic input]
@@ -1721,12 +1734,12 @@ Struct_iter_unpack(PyStructObject *self, PyObject *buffer)
assert(self->s_codes != NULL);
if (self->s_size == 0) {
- PyErr_Format(StructError,
+ PyErr_Format(_structmodulestate_global->StructError,
"cannot iteratively unpack with a struct of length 0");
return NULL;
}
- iter = (unpackiterobject *) PyType_GenericAlloc(&unpackiter_type, 0);
+ iter = (unpackiterobject *) PyType_GenericAlloc((PyTypeObject *)_structmodulestate_global->unpackiter_type, 0);
if (iter == NULL)
return NULL;
@@ -1735,7 +1748,7 @@ Struct_iter_unpack(PyStructObject *self, PyObject *buffer)
return NULL;
}
if (iter->buf.len % self->s_size != 0) {
- PyErr_Format(StructError,
+ PyErr_Format(_structmodulestate_global->StructError,
"iterative unpacking requires a buffer of "
"a multiple of %zd bytes",
self->s_size);
@@ -1778,10 +1791,10 @@ s_pack_internal(PyStructObject *soself, PyObject *const *args, int offset, char*
if (e->format == 's') {
Py_ssize_t n;
int isstring;
- void *p;
+ const void *p;
isstring = PyBytes_Check(v);
if (!isstring && !PyByteArray_Check(v)) {
- PyErr_SetString(StructError,
+ PyErr_SetString(_structmodulestate_global->StructError,
"argument for 's' must be a bytes object");
return -1;
}
@@ -1800,10 +1813,10 @@ s_pack_internal(PyStructObject *soself, PyObject *const *args, int offset, char*
} else if (e->format == 'p') {
Py_ssize_t n;
int isstring;
- void *p;
+ const void *p;
isstring = PyBytes_Check(v);
if (!isstring && !PyByteArray_Check(v)) {
- PyErr_SetString(StructError,
+ PyErr_SetString(_structmodulestate_global->StructError,
"argument for 'p' must be a bytes object");
return -1;
}
@@ -1825,7 +1838,7 @@ s_pack_internal(PyStructObject *soself, PyObject *const *args, int offset, char*
} else {
if (e->pack(res, v, e) < 0) {
if (PyLong_Check(v) && PyErr_ExceptionMatches(PyExc_OverflowError))
- PyErr_SetString(StructError,
+ PyErr_SetString(_structmodulestate_global->StructError,
"int too large to convert");
return -1;
}
@@ -1849,8 +1862,8 @@ strings.");
static PyObject *
s_pack(PyObject *self, PyObject *const *args, Py_ssize_t nargs)
{
+ char *buf;
PyStructObject *soself;
- PyObject *result;
/* Validate arguments. */
soself = (PyStructObject *)self;
@@ -1858,23 +1871,27 @@ s_pack(PyObject *self, PyObject *const *args, Py_ssize_t nargs)
assert(soself->s_codes != NULL);
if (nargs != soself->s_len)
{
- PyErr_Format(StructError,
+ PyErr_Format(_structmodulestate_global->StructError,
"pack expected %zd items for packing (got %zd)", soself->s_len, nargs);
return NULL;
}
- /* Allocate a new buffer */
- result = PyBytes_FromStringAndSize((char *)NULL, soself->s_size);
- if (result == NULL)
+ /* Allocate a new string */
+ _PyBytesWriter writer;
+ _PyBytesWriter_Init(&writer);
+ buf = _PyBytesWriter_Alloc(&writer, soself->s_size);
+ if (buf == NULL) {
+ _PyBytesWriter_Dealloc(&writer);
return NULL;
+ }
/* Call the guts */
- if ( s_pack_internal(soself, args, 0, PyBytes_AS_STRING(result)) != 0 ) {
- Py_DECREF(result);
+ if ( s_pack_internal(soself, args, 0, buf) != 0 ) {
+ _PyBytesWriter_Dealloc(&writer);
return NULL;
}
- return result;
+ return _PyBytesWriter_Finish(&writer, buf + soself->s_size);
}
PyDoc_STRVAR(s_pack_into__doc__,
@@ -1899,15 +1916,15 @@ s_pack_into(PyObject *self, PyObject *const *args, Py_ssize_t nargs)
if (nargs != (soself->s_len + 2))
{
if (nargs == 0) {
- PyErr_Format(StructError,
+ PyErr_Format(_structmodulestate_global->StructError,
"pack_into expected buffer argument");
}
else if (nargs == 1) {
- PyErr_Format(StructError,
+ PyErr_Format(_structmodulestate_global->StructError,
"pack_into expected offset argument");
}
else {
- PyErr_Format(StructError,
+ PyErr_Format(_structmodulestate_global->StructError,
"pack_into expected %zd items for packing (got %zd)",
soself->s_len, (nargs - 2));
}
@@ -1930,7 +1947,7 @@ s_pack_into(PyObject *self, PyObject *const *args, Py_ssize_t nargs)
if (offset < 0) {
/* Check that negative offset is low enough to fit data */
if (offset + soself->s_size > 0) {
- PyErr_Format(StructError,
+ PyErr_Format(_structmodulestate_global->StructError,
"no space to pack %zd bytes at offset %zd",
soself->s_size,
offset);
@@ -1940,7 +1957,7 @@ s_pack_into(PyObject *self, PyObject *const *args, Py_ssize_t nargs)
/* Check that negative offset is not crossing buffer boundary */
if (offset + buffer.len < 0) {
- PyErr_Format(StructError,
+ PyErr_Format(_structmodulestate_global->StructError,
"offset %zd out of range for %zd-byte buffer",
offset,
buffer.len);
@@ -1956,7 +1973,7 @@ s_pack_into(PyObject *self, PyObject *const *args, Py_ssize_t nargs)
assert(offset >= 0);
assert(soself->s_size >= 0);
- PyErr_Format(StructError,
+ PyErr_Format(_structmodulestate_global->StructError,
"pack_into requires a buffer of at least %zu bytes for "
"packing %zd bytes at offset %zd "
"(actual buffer size is %zd)",
@@ -2018,6 +2035,11 @@ static struct PyMethodDef s_methods[] = {
{NULL, NULL} /* sentinel */
};
+static PyMemberDef s_members[] = {
+ {"__weaklistoffset__", T_PYSSIZET, offsetof(PyStructObject, weakreflist), READONLY},
+ {NULL} /* sentinel */
+};
+
#define OFF(x) offsetof(PyStructObject, x)
static PyGetSetDef s_getsetlist[] = {
@@ -2026,47 +2048,32 @@ static PyGetSetDef s_getsetlist[] = {
{NULL} /* sentinel */
};
-static
-PyTypeObject PyStructType = {
- PyVarObject_HEAD_INIT(NULL, 0)
- "Struct",
+PyDoc_STRVAR(s__doc__,
+"Struct(fmt) --> compiled struct object\n"
+"\n"
+);
+
+static PyType_Slot PyStructType_slots[] = {
+ {Py_tp_dealloc, s_dealloc},
+ {Py_tp_getattro, PyObject_GenericGetAttr},
+ {Py_tp_setattro, PyObject_GenericSetAttr},
+ {Py_tp_doc, (void*)s__doc__},
+ {Py_tp_methods, s_methods},
+ {Py_tp_members, s_members},
+ {Py_tp_getset, s_getsetlist},
+ {Py_tp_init, Struct___init__},
+ {Py_tp_alloc, PyType_GenericAlloc},
+ {Py_tp_new, s_new},
+ {Py_tp_free, PyObject_Del},
+ {0, 0},
+};
+
+static PyType_Spec PyStructType_spec = {
+ "_struct.Struct",
sizeof(PyStructObject),
0,
- (destructor)s_dealloc, /* tp_dealloc */
- 0, /* tp_vectorcall_offset */
- 0, /* tp_getattr */
- 0, /* tp_setattr */
- 0, /* tp_as_async */
- 0, /* tp_repr */
- 0, /* tp_as_number */
- 0, /* tp_as_sequence */
- 0, /* tp_as_mapping */
- 0, /* tp_hash */
- 0, /* tp_call */
- 0, /* tp_str */
- PyObject_GenericGetAttr, /* tp_getattro */
- PyObject_GenericSetAttr, /* tp_setattro */
- 0, /* tp_as_buffer */
- Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */
- Struct___init____doc__, /* tp_doc */
- 0, /* tp_traverse */
- 0, /* tp_clear */
- 0, /* tp_richcompare */
- offsetof(PyStructObject, weakreflist), /* tp_weaklistoffset */
- 0, /* tp_iter */
- 0, /* tp_iternext */
- s_methods, /* tp_methods */
- NULL, /* tp_members */
- s_getsetlist, /* tp_getset */
- 0, /* tp_base */
- 0, /* tp_dict */
- 0, /* tp_descr_get */
- 0, /* tp_descr_set */
- 0, /* tp_dictoffset */
- Struct___init__, /* tp_init */
- PyType_GenericAlloc, /* tp_alloc */
- s_new, /* tp_new */
- PyObject_Del, /* tp_free */
+ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE,
+ PyStructType_slots
};
@@ -2102,7 +2109,7 @@ cache_struct_converter(PyObject *fmt, PyStructObject **ptr)
return 0;
}
- s_object = PyObject_CallFunctionObjArgs((PyObject *)(&PyStructType), fmt, NULL);
+ s_object = PyObject_CallOneArg(_structmodulestate_global->PyStructType, fmt);
if (s_object != NULL) {
if (PyDict_GET_SIZE(cache) >= MAXCACHE)
PyDict_Clear(cache);
@@ -2312,16 +2319,46 @@ Whitespace between formats is ignored.\n\
The variable struct.error is an exception raised on errors.\n");
+static int
+_structmodule_traverse(PyObject *module, visitproc visit, void *arg)
+{
+ _structmodulestate *state = (_structmodulestate *)PyModule_GetState(module);
+ if (state) {
+ Py_VISIT(state->PyStructType);
+ Py_VISIT(state->unpackiter_type);
+ Py_VISIT(state->StructError);
+ }
+ return 0;
+}
+
+static int
+_structmodule_clear(PyObject *module)
+{
+ _structmodulestate *state = (_structmodulestate *)PyModule_GetState(module);
+ if (state) {
+ Py_CLEAR(state->PyStructType);
+ Py_CLEAR(state->unpackiter_type);
+ Py_CLEAR(state->StructError);
+ }
+ return 0;
+}
+
+static void
+_structmodule_free(void *module)
+{
+ _structmodule_clear((PyObject *)module);
+}
+
static struct PyModuleDef _structmodule = {
PyModuleDef_HEAD_INIT,
"_struct",
module_doc,
- -1,
+ sizeof(_structmodulestate),
module_functions,
NULL,
- NULL,
- NULL,
- NULL
+ _structmodule_traverse,
+ _structmodule_clear,
+ _structmodule_free,
};
PyMODINIT_FUNC
@@ -2333,12 +2370,19 @@ PyInit__struct(void)
if (m == NULL)
return NULL;
- Py_TYPE(&PyStructType) = &PyType_Type;
- if (PyType_Ready(&PyStructType) < 0)
+ PyObject *PyStructType = PyType_FromSpec(&PyStructType_spec);
+ if (PyStructType == NULL) {
return NULL;
+ }
+ Py_INCREF(PyStructType);
+ PyModule_AddObject(m, "Struct", PyStructType);
+ get_struct_state(m)->PyStructType = PyStructType;
- if (PyType_Ready(&unpackiter_type) < 0)
+ PyObject *unpackiter_type = PyType_FromSpec(&unpackiter_type_spec);
+ if (unpackiter_type == NULL) {
return NULL;
+ }
+ get_struct_state(m)->unpackiter_type = unpackiter_type;
/* Check endian and swap in faster functions */
{
@@ -2383,17 +2427,12 @@ PyInit__struct(void)
}
/* Add some symbolic constants to the module */
- if (StructError == NULL) {
- StructError = PyErr_NewException("struct.error", NULL, NULL);
- if (StructError == NULL)
- return NULL;
- }
-
+ PyObject *StructError = PyErr_NewException("struct.error", NULL, NULL);
+ if (StructError == NULL)
+ return NULL;
Py_INCREF(StructError);
PyModule_AddObject(m, "error", StructError);
-
- Py_INCREF((PyObject*)&PyStructType);
- PyModule_AddObject(m, "Struct", (PyObject*)&PyStructType);
+ get_struct_state(m)->StructError = StructError;
return m;
}
diff --git a/Modules/_testbuffer.c b/Modules/_testbuffer.c
index d7d3cc8d..d8321768 100644
--- a/Modules/_testbuffer.c
+++ b/Modules/_testbuffer.c
@@ -24,7 +24,7 @@ static PyObject *simple_format = NULL;
/**************************************************************************/
static PyTypeObject NDArray_Type;
-#define NDArray_Check(v) (Py_TYPE(v) == &NDArray_Type)
+#define NDArray_Check(v) Py_IS_TYPE(v, &NDArray_Type)
#define CHECK_LIST_OR_TUPLE(v) \
if (!PyList_Check(v) && !PyTuple_Check(v)) { \
@@ -1854,7 +1854,7 @@ ndarray_subscript(NDArrayObject *self, PyObject *key)
type_error:
PyErr_Format(PyExc_TypeError,
"cannot index memory using \"%.200s\"",
- key->ob_type->tp_name);
+ Py_TYPE(key)->tp_name);
err_occurred:
Py_DECREF(nd);
return NULL;
@@ -2050,7 +2050,7 @@ static PyObject *
ndarray_get_format(NDArrayObject *self, void *closure)
{
Py_buffer *base = &self->head->base;
- char *fmt = base->format ? base->format : "";
+ const char *fmt = base->format ? base->format : "";
return PyUnicode_FromString(fmt);
}
@@ -2835,11 +2835,11 @@ PyInit__testbuffer(void)
if (m == NULL)
return NULL;
- Py_TYPE(&NDArray_Type) = &PyType_Type;
+ Py_SET_TYPE(&NDArray_Type, &PyType_Type);
Py_INCREF(&NDArray_Type);
PyModule_AddObject(m, "ndarray", (PyObject *)&NDArray_Type);
- Py_TYPE(&StaticArray_Type) = &PyType_Type;
+ Py_SET_TYPE(&StaticArray_Type, &PyType_Type);
Py_INCREF(&StaticArray_Type);
PyModule_AddObject(m, "staticarray", (PyObject *)&StaticArray_Type);
diff --git a/Modules/_testcapimodule.c b/Modules/_testcapimodule.c
index af28af50..b2d070cf 100644
--- a/Modules/_testcapimodule.c
+++ b/Modules/_testcapimodule.c
@@ -5,18 +5,22 @@
* standard Python regression test, via Lib/test/test_capi.py.
*/
-/* The Visual Studio projects builds _testcapi with Py_BUILD_CORE_MODULE
- define, but we only want to test the public C API, not the internal
- C API. */
+/* This module tests the public (Include/ and Include/cpython/) C API.
+ The internal C API must not be used here: use _testinternalcapi for that.
+
+ The Visual Studio projects builds _testcapi with Py_BUILD_CORE_MODULE
+ macro defined, but only the public C API must be tested here. */
+
#undef Py_BUILD_CORE_MODULE
+/* Always enable assertions */
+#undef NDEBUG
#define PY_SSIZE_T_CLEAN
#include "Python.h"
#include "datetime.h"
#include "marshal.h"
-#include "pythread.h"
-#include "structmember.h"
+#include "structmember.h" // PyMemberDef
#include
#include
@@ -274,7 +278,7 @@ dict_hassplittable(PyObject *self, PyObject *arg)
if (!PyDict_Check(arg)) {
PyErr_Format(PyExc_TypeError,
"dict_hassplittable() argument must be dict, not '%s'",
- arg->ob_type->tp_name);
+ Py_TYPE(arg)->tp_name);
return NULL;
}
@@ -639,7 +643,7 @@ test_long_long_and_overflow(PyObject *self, PyObject *Py_UNUSED(ignored))
int overflow;
/* Test that overflow is set properly for a large value. */
- /* num is a number larger than PY_LLONG_MAX on a typical machine. */
+ /* num is a number larger than LLONG_MAX on a typical machine. */
num = PyLong_FromString("FFFFFFFFFFFFFFFFFFFFFFFF", NULL, 16);
if (num == NULL)
return NULL;
@@ -655,8 +659,8 @@ test_long_long_and_overflow(PyObject *self, PyObject *Py_UNUSED(ignored))
return raiseTestError("test_long_long_and_overflow",
"overflow was not set to 1");
- /* Same again, with num = PY_LLONG_MAX + 1 */
- num = PyLong_FromLongLong(PY_LLONG_MAX);
+ /* Same again, with num = LLONG_MAX + 1 */
+ num = PyLong_FromLongLong(LLONG_MAX);
if (num == NULL)
return NULL;
one = PyLong_FromLong(1L);
@@ -683,7 +687,7 @@ test_long_long_and_overflow(PyObject *self, PyObject *Py_UNUSED(ignored))
"overflow was not set to 1");
/* Test that overflow is set properly for a large negative value. */
- /* num is a number smaller than PY_LLONG_MIN on a typical platform */
+ /* num is a number smaller than LLONG_MIN on a typical platform */
num = PyLong_FromString("-FFFFFFFFFFFFFFFFFFFFFFFF", NULL, 16);
if (num == NULL)
return NULL;
@@ -699,8 +703,8 @@ test_long_long_and_overflow(PyObject *self, PyObject *Py_UNUSED(ignored))
return raiseTestError("test_long_long_and_overflow",
"overflow was not set to -1");
- /* Same again, with num = PY_LLONG_MIN - 1 */
- num = PyLong_FromLongLong(PY_LLONG_MIN);
+ /* Same again, with num = LLONG_MIN - 1 */
+ num = PyLong_FromLongLong(LLONG_MIN);
if (num == NULL)
return NULL;
one = PyLong_FromLong(1L);
@@ -757,7 +761,7 @@ test_long_long_and_overflow(PyObject *self, PyObject *Py_UNUSED(ignored))
return raiseTestError("test_long_long_and_overflow",
"overflow was set incorrectly");
- num = PyLong_FromLongLong(PY_LLONG_MAX);
+ num = PyLong_FromLongLong(LLONG_MAX);
if (num == NULL)
return NULL;
overflow = 1234;
@@ -765,14 +769,14 @@ test_long_long_and_overflow(PyObject *self, PyObject *Py_UNUSED(ignored))
Py_DECREF(num);
if (value == -1 && PyErr_Occurred())
return NULL;
- if (value != PY_LLONG_MAX)
+ if (value != LLONG_MAX)
return raiseTestError("test_long_long_and_overflow",
- "expected return value PY_LLONG_MAX");
+ "expected return value LLONG_MAX");
if (overflow != 0)
return raiseTestError("test_long_long_and_overflow",
"overflow was not cleared");
- num = PyLong_FromLongLong(PY_LLONG_MIN);
+ num = PyLong_FromLongLong(LLONG_MIN);
if (num == NULL)
return NULL;
overflow = 0;
@@ -780,9 +784,9 @@ test_long_long_and_overflow(PyObject *self, PyObject *Py_UNUSED(ignored))
Py_DECREF(num);
if (value == -1 && PyErr_Occurred())
return NULL;
- if (value != PY_LLONG_MIN)
+ if (value != LLONG_MIN)
return raiseTestError("test_long_long_and_overflow",
- "expected return value PY_LLONG_MIN");
+ "expected return value LLONG_MIN");
if (overflow != 0)
return raiseTestError("test_long_long_and_overflow",
"overflow was not cleared");
@@ -1664,6 +1668,10 @@ exit:
static volatile int x;
+/* Ignore use of deprecated APIs */
+_Py_COMP_DIAG_PUSH
+_Py_COMP_DIAG_IGNORE_DEPR_DECLS
+
/* Test the u and u# codes for PyArg_ParseTuple. May leak memory in case
of an error.
*/
@@ -1840,6 +1848,7 @@ test_widechar(PyObject *self, PyObject *Py_UNUSED(ignored))
Py_RETURN_NONE;
}
+_Py_COMP_DIAG_POP
static PyObject *
unicode_aswidechar(PyObject *self, PyObject *args)
@@ -1923,6 +1932,48 @@ unicode_asucs4(PyObject *self, PyObject *args)
return result;
}
+static PyObject *
+unicode_asutf8(PyObject *self, PyObject *args)
+{
+ PyObject *unicode;
+ const char *buffer;
+
+ if (!PyArg_ParseTuple(args, "U", &unicode)) {
+ return NULL;
+ }
+
+ buffer = PyUnicode_AsUTF8(unicode);
+ if (buffer == NULL) {
+ return NULL;
+ }
+
+ return PyBytes_FromString(buffer);
+}
+
+static PyObject *
+unicode_asutf8andsize(PyObject *self, PyObject *args)
+{
+ PyObject *unicode, *result;
+ const char *buffer;
+ Py_ssize_t utf8_len;
+
+ if(!PyArg_ParseTuple(args, "U", &unicode)) {
+ return NULL;
+ }
+
+ buffer = PyUnicode_AsUTF8AndSize(unicode, &utf8_len);
+ if (buffer == NULL) {
+ return NULL;
+ }
+
+ result = PyBytes_FromString(buffer);
+ if (result == NULL) {
+ return NULL;
+ }
+
+ return Py_BuildValue("(Nn)", result, utf8_len);
+}
+
static PyObject *
unicode_findchar(PyObject *self, PyObject *args)
{
@@ -1973,6 +2024,10 @@ unicode_copycharacters(PyObject *self, PyObject *args)
return Py_BuildValue("(Nn)", to_copy, copied);
}
+/* Ignore use of deprecated APIs */
+_Py_COMP_DIAG_PUSH
+_Py_COMP_DIAG_IGNORE_DEPR_DECLS
+
static PyObject *
unicode_encodedecimal(PyObject *self, PyObject *args)
{
@@ -2040,6 +2095,7 @@ unicode_legacy_string(PyObject *self, PyObject *args)
return u;
}
+_Py_COMP_DIAG_POP
static PyObject *
getargs_w_star(PyObject *self, PyObject *args)
@@ -2589,6 +2645,55 @@ get_datetime_fromtimestamp(PyObject* self, PyObject *args)
return rv;
}
+static PyObject *
+test_PyDateTime_GET(PyObject *self, PyObject *obj)
+{
+ int year, month, day;
+
+ year = PyDateTime_GET_YEAR(obj);
+ month = PyDateTime_GET_MONTH(obj);
+ day = PyDateTime_GET_DAY(obj);
+
+ return Py_BuildValue("(lll)", year, month, day);
+}
+
+static PyObject *
+test_PyDateTime_DATE_GET(PyObject *self, PyObject *obj)
+{
+ int hour, minute, second, microsecond;
+
+ hour = PyDateTime_DATE_GET_HOUR(obj);
+ minute = PyDateTime_DATE_GET_MINUTE(obj);
+ second = PyDateTime_DATE_GET_SECOND(obj);
+ microsecond = PyDateTime_DATE_GET_MICROSECOND(obj);
+
+ return Py_BuildValue("(llll)", hour, minute, second, microsecond);
+}
+
+static PyObject *
+test_PyDateTime_TIME_GET(PyObject *self, PyObject *obj)
+{
+ int hour, minute, second, microsecond;
+
+ hour = PyDateTime_TIME_GET_HOUR(obj);
+ minute = PyDateTime_TIME_GET_MINUTE(obj);
+ second = PyDateTime_TIME_GET_SECOND(obj);
+ microsecond = PyDateTime_TIME_GET_MICROSECOND(obj);
+
+ return Py_BuildValue("(llll)", hour, minute, second, microsecond);
+}
+
+static PyObject *
+test_PyDateTime_DELTA_GET(PyObject *self, PyObject *obj)
+{
+ int days, seconds, microseconds;
+
+ days = PyDateTime_DELTA_GET_DAYS(obj);
+ seconds = PyDateTime_DELTA_GET_SECONDS(obj);
+ microseconds = PyDateTime_DELTA_GET_MICROSECONDS(obj);
+
+ return Py_BuildValue("(lll)", days, seconds, microseconds);
+}
/* test_thread_state spawns a thread of its own, and that thread releases
* `thread_done` when it's finished. The driver code has to know when the
@@ -2633,12 +2738,10 @@ test_thread_state(PyObject *self, PyObject *args)
if (!PyCallable_Check(fn)) {
PyErr_Format(PyExc_TypeError, "'%s' object is not callable",
- fn->ob_type->tp_name);
+ Py_TYPE(fn)->tp_name);
return NULL;
}
- /* Ensure Python is set up for threading */
- PyEval_InitThreads();
thread_done = PyThread_allocate_lock();
if (thread_done == NULL)
return PyErr_NoMemory();
@@ -3323,6 +3426,26 @@ getbuffer_with_null_view(PyObject* self, PyObject *obj)
Py_RETURN_NONE;
}
+/* PyBuffer_SizeFromFormat() */
+static PyObject *
+test_PyBuffer_SizeFromFormat(PyObject *self, PyObject *args)
+{
+ const char *format;
+ Py_ssize_t result;
+
+ if (!PyArg_ParseTuple(args, "s:test_PyBuffer_SizeFromFormat",
+ &format)) {
+ return NULL;
+ }
+
+ result = PyBuffer_SizeFromFormat(format);
+ if (result == -1) {
+ return NULL;
+ }
+
+ return PyLong_FromSsize_t(result);
+}
+
/* Test that the fatal error from not having a current thread doesn't
cause an infinite loop. Run via Lib/test/test_capi.py */
static PyObject *
@@ -3441,8 +3564,8 @@ slot_tp_del(PyObject *self)
PyObject *error_type, *error_value, *error_traceback;
/* Temporarily resurrect the object. */
- assert(self->ob_refcnt == 0);
- self->ob_refcnt = 1;
+ assert(Py_REFCNT(self) == 0);
+ Py_SET_REFCNT(self, 1);
/* Save the current exception, if any. */
PyErr_Fetch(&error_type, &error_value, &error_traceback);
@@ -3464,31 +3587,26 @@ slot_tp_del(PyObject *self)
/* Undo the temporary resurrection; can't use DECREF here, it would
* cause a recursive call.
*/
- assert(self->ob_refcnt > 0);
- if (--self->ob_refcnt == 0)
- return; /* this is the normal path out */
+ assert(Py_REFCNT(self) > 0);
+ Py_SET_REFCNT(self, Py_REFCNT(self) - 1);
+ if (Py_REFCNT(self) == 0) {
+ /* this is the normal path out */
+ return;
+ }
/* __del__ resurrected it! Make it look like the original Py_DECREF
* never happened.
*/
{
- Py_ssize_t refcnt = self->ob_refcnt;
+ Py_ssize_t refcnt = Py_REFCNT(self);
_Py_NewReference(self);
- self->ob_refcnt = refcnt;
- }
- assert(!PyType_IS_GC(Py_TYPE(self)) || _PyObject_GC_IS_TRACKED(self));
- /* If Py_REF_DEBUG, _Py_NewReference bumped _Py_RefTotal, so
- * we need to undo that. */
- _Py_DEC_REFTOTAL;
- /* If Py_TRACE_REFS, _Py_NewReference re-added self to the object
- * chain, so no more to do there.
- * If COUNT_ALLOCS, the original decref bumped tp_frees, and
- * _Py_NewReference bumped tp_allocs: both of those need to be
- * undone.
- */
-#ifdef COUNT_ALLOCS
- --Py_TYPE(self)->tp_frees;
- --Py_TYPE(self)->tp_allocs;
+ Py_SET_REFCNT(self, refcnt);
+ }
+ assert(!PyType_IS_GC(Py_TYPE(self)) || PyObject_GC_IsTracked(self));
+ /* If Py_REF_DEBUG macro is defined, _Py_NewReference() increased
+ _Py_RefTotal, so we need to undo that. */
+#ifdef Py_REF_DEBUG
+ _Py_RefTotal--;
#endif
}
@@ -4071,8 +4189,6 @@ call_in_temporary_c_thread(PyObject *self, PyObject *callback)
test_c_thread_t test_c_thread;
long thread;
- PyEval_InitThreads();
-
test_c_thread.start_event = PyThread_allocate_lock();
test_c_thread.exit_event = PyThread_allocate_lock();
test_c_thread.callback = NULL;
@@ -4121,15 +4237,15 @@ static PyObject*
pymarshal_write_long_to_file(PyObject* self, PyObject *args)
{
long value;
- char *filename;
+ PyObject *filename;
int version;
FILE *fp;
- if (!PyArg_ParseTuple(args, "lsi:pymarshal_write_long_to_file",
+ if (!PyArg_ParseTuple(args, "lOi:pymarshal_write_long_to_file",
&value, &filename, &version))
return NULL;
- fp = fopen(filename, "wb");
+ fp = _Py_fopen_obj(filename, "wb");
if (fp == NULL) {
PyErr_SetFromErrno(PyExc_OSError);
return NULL;
@@ -4147,15 +4263,15 @@ static PyObject*
pymarshal_write_object_to_file(PyObject* self, PyObject *args)
{
PyObject *obj;
- char *filename;
+ PyObject *filename;
int version;
FILE *fp;
- if (!PyArg_ParseTuple(args, "Osi:pymarshal_write_object_to_file",
+ if (!PyArg_ParseTuple(args, "OOi:pymarshal_write_object_to_file",
&obj, &filename, &version))
return NULL;
- fp = fopen(filename, "wb");
+ fp = _Py_fopen_obj(filename, "wb");
if (fp == NULL) {
PyErr_SetFromErrno(PyExc_OSError);
return NULL;
@@ -4174,13 +4290,13 @@ pymarshal_read_short_from_file(PyObject* self, PyObject *args)
{
int value;
long pos;
- char *filename;
+ PyObject *filename;
FILE *fp;
- if (!PyArg_ParseTuple(args, "s:pymarshal_read_short_from_file", &filename))
+ if (!PyArg_ParseTuple(args, "O:pymarshal_read_short_from_file", &filename))
return NULL;
- fp = fopen(filename, "rb");
+ fp = _Py_fopen_obj(filename, "rb");
if (fp == NULL) {
PyErr_SetFromErrno(PyExc_OSError);
return NULL;
@@ -4199,13 +4315,13 @@ static PyObject*
pymarshal_read_long_from_file(PyObject* self, PyObject *args)
{
long value, pos;
- char *filename;
+ PyObject *filename;
FILE *fp;
- if (!PyArg_ParseTuple(args, "s:pymarshal_read_long_from_file", &filename))
+ if (!PyArg_ParseTuple(args, "O:pymarshal_read_long_from_file", &filename))
return NULL;
- fp = fopen(filename, "rb");
+ fp = _Py_fopen_obj(filename, "rb");
if (fp == NULL) {
PyErr_SetFromErrno(PyExc_OSError);
return NULL;
@@ -4225,13 +4341,13 @@ pymarshal_read_last_object_from_file(PyObject* self, PyObject *args)
{
PyObject *obj;
long pos;
- char *filename;
+ PyObject *filename;
FILE *fp;
- if (!PyArg_ParseTuple(args, "s:pymarshal_read_last_object_from_file", &filename))
+ if (!PyArg_ParseTuple(args, "O:pymarshal_read_last_object_from_file", &filename))
return NULL;
- fp = fopen(filename, "rb");
+ fp = _Py_fopen_obj(filename, "rb");
if (fp == NULL) {
PyErr_SetFromErrno(PyExc_OSError);
return NULL;
@@ -4249,13 +4365,13 @@ pymarshal_read_object_from_file(PyObject* self, PyObject *args)
{
PyObject *obj;
long pos;
- char *filename;
+ PyObject *filename;
FILE *fp;
- if (!PyArg_ParseTuple(args, "s:pymarshal_read_object_from_file", &filename))
+ if (!PyArg_ParseTuple(args, "O:pymarshal_read_object_from_file", &filename))
return NULL;
- fp = fopen(filename, "rb");
+ fp = _Py_fopen_obj(filename, "rb");
if (fp == NULL) {
PyErr_SetFromErrno(PyExc_OSError);
return NULL;
@@ -4423,15 +4539,6 @@ test_PyTime_AsMicroseconds(PyObject *self, PyObject *args)
return _PyTime_AsNanosecondsObject(ms);
}
-static PyObject*
-get_recursion_depth(PyObject *self, PyObject *args)
-{
- PyThreadState *tstate = PyThreadState_Get();
-
- /* subtract one to ignore the frame of the get_recursion_depth() call */
- return PyLong_FromLong(tstate->recursion_depth - 1);
-}
-
static PyObject*
pymem_buffer_overflow(PyObject *self, PyObject *args)
{
@@ -4518,7 +4625,7 @@ check_pyobject_uninitialized_is_freed(PyObject *self, PyObject *Py_UNUSED(args))
return NULL;
}
/* Initialize reference count to avoid early crash in ceval or GC */
- Py_REFCNT(op) = 1;
+ Py_SET_REFCNT(op, 1);
/* object fields like ob_type are uninitialized! */
return test_pyobject_is_freed("check_pyobject_uninitialized_is_freed", op);
}
@@ -4533,7 +4640,7 @@ check_pyobject_forbidden_bytes_is_freed(PyObject *self, PyObject *Py_UNUSED(args
return NULL;
}
/* Initialize reference count to avoid early crash in ceval or GC */
- Py_REFCNT(op) = 1;
+ Py_SET_REFCNT(op, 1);
/* ob_type field is after the memory block: part of "forbidden bytes"
when using debug hooks on memory allocators! */
return test_pyobject_is_freed("check_pyobject_forbidden_bytes_is_freed", op);
@@ -4549,7 +4656,7 @@ check_pyobject_freed_is_freed(PyObject *self, PyObject *Py_UNUSED(args))
}
Py_TYPE(op)->tp_dealloc(op);
/* Reset reference count to avoid early crash in ceval or GC */
- Py_REFCNT(op) = 1;
+ Py_SET_REFCNT(op, 1);
/* object memory is freed! */
return test_pyobject_is_freed("check_pyobject_freed_is_freed", op);
}
@@ -4654,8 +4761,8 @@ dict_get_version(PyObject *self, PyObject *args)
version = dict->ma_version_tag;
- Py_BUILD_ASSERT(sizeof(unsigned PY_LONG_LONG) >= sizeof(version));
- return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG)version);
+ Py_BUILD_ASSERT(sizeof(unsigned long long) >= sizeof(version));
+ return PyLong_FromUnsignedLongLong((unsigned long long)version);
}
@@ -4742,7 +4849,7 @@ test_pyobject_fastcalldict(PyObject *self, PyObject *args)
return NULL;
}
- return _PyObject_FastCallDict(func, stack, nargs, kwargs);
+ return PyObject_VectorcallDict(func, stack, nargs, kwargs);
}
@@ -4776,7 +4883,7 @@ test_pyobject_vectorcall(PyObject *self, PyObject *args)
PyErr_SetString(PyExc_TypeError, "kwnames must be None or a tuple");
return NULL;
}
- return _PyObject_Vectorcall(func, stack, nargs, kwnames);
+ return PyObject_Vectorcall(func, stack, nargs, kwnames);
}
@@ -4919,7 +5026,7 @@ bad_get(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
return NULL;
}
- PyObject *res = PyObject_CallObject(cls, NULL);
+ PyObject *res = _PyObject_CallNoArg(cls);
if (res == NULL) {
return NULL;
}
@@ -5031,7 +5138,7 @@ negative_refcount(PyObject *self, PyObject *Py_UNUSED(args))
}
assert(Py_REFCNT(obj) == 1);
- Py_REFCNT(obj) = 0;
+ Py_SET_REFCNT(obj, 0);
/* Py_DECREF() must call _Py_NegativeRefcount() and abort Python */
Py_DECREF(obj);
@@ -5065,6 +5172,96 @@ test_write_unraisable_exc(PyObject *self, PyObject *args)
}
+static PyObject *
+sequence_getitem(PyObject *self, PyObject *args)
+{
+ PyObject *seq;
+ Py_ssize_t i;
+ if (!PyArg_ParseTuple(args, "On", &seq, &i)) {
+ return NULL;
+ }
+ return PySequence_GetItem(seq, i);
+}
+
+
+/* Functions for testing C calling conventions (METH_*) are named meth_*,
+ * e.g. "meth_varargs" for METH_VARARGS.
+ *
+ * They all return a tuple of their C-level arguments, with None instead
+ * of NULL and Python tuples instead of C arrays.
+ */
+
+
+static PyObject*
+_null_to_none(PyObject* obj)
+{
+ if (obj == NULL) {
+ Py_RETURN_NONE;
+ }
+ Py_INCREF(obj);
+ return obj;
+}
+
+static PyObject*
+meth_varargs(PyObject* self, PyObject* args)
+{
+ return Py_BuildValue("NO", _null_to_none(self), args);
+}
+
+static PyObject*
+meth_varargs_keywords(PyObject* self, PyObject* args, PyObject* kwargs)
+{
+ return Py_BuildValue("NON", _null_to_none(self), args, _null_to_none(kwargs));
+}
+
+static PyObject*
+meth_o(PyObject* self, PyObject* obj)
+{
+ return Py_BuildValue("NO", _null_to_none(self), obj);
+}
+
+static PyObject*
+meth_noargs(PyObject* self, PyObject* ignored)
+{
+ return _null_to_none(self);
+}
+
+static PyObject*
+_fastcall_to_tuple(PyObject* const* args, Py_ssize_t nargs)
+{
+ PyObject *tuple = PyTuple_New(nargs);
+ if (tuple == NULL) {
+ return NULL;
+ }
+ for (Py_ssize_t i=0; i < nargs; i++) {
+ Py_INCREF(args[i]);
+ PyTuple_SET_ITEM(tuple, i, args[i]);
+ }
+ return tuple;
+}
+
+static PyObject*
+meth_fastcall(PyObject* self, PyObject* const* args, Py_ssize_t nargs)
+{
+ return Py_BuildValue(
+ "NN", _null_to_none(self), _fastcall_to_tuple(args, nargs)
+ );
+}
+
+static PyObject*
+meth_fastcall_keywords(PyObject* self, PyObject* const* args,
+ Py_ssize_t nargs, PyObject* kwargs)
+{
+ PyObject *pyargs = _fastcall_to_tuple(args, nargs);
+ if (pyargs == NULL) {
+ return NULL;
+ }
+ PyObject *pykwargs = PyObject_Vectorcall((PyObject*)&PyDict_Type,
+ args + nargs, 0, kwargs);
+ return Py_BuildValue("NNN", _null_to_none(self), pyargs, pykwargs);
+}
+
+
static PyObject*
pynumber_tobase(PyObject *module, PyObject *args)
{
@@ -5103,6 +5300,10 @@ static PyMethodDef TestMethods[] = {
{"get_delta_fromdsu", get_delta_fromdsu, METH_VARARGS},
{"get_date_fromtimestamp", get_date_fromtimestamp, METH_VARARGS},
{"get_datetime_fromtimestamp", get_datetime_fromtimestamp, METH_VARARGS},
+ {"PyDateTime_GET", test_PyDateTime_GET, METH_O},
+ {"PyDateTime_DATE_GET", test_PyDateTime_DATE_GET, METH_O},
+ {"PyDateTime_TIME_GET", test_PyDateTime_TIME_GET, METH_O},
+ {"PyDateTime_DELTA_GET", test_PyDateTime_DELTA_GET, METH_O},
{"test_list_api", test_list_api, METH_NOARGS},
{"test_dict_iteration", test_dict_iteration, METH_NOARGS},
{"dict_getitem_knownhash", dict_getitem_knownhash, METH_VARARGS},
@@ -5138,6 +5339,7 @@ static PyMethodDef TestMethods[] = {
{"test_pep3118_obsolete_write_locks", (PyCFunction)test_pep3118_obsolete_write_locks, METH_NOARGS},
#endif
{"getbuffer_with_null_view", getbuffer_with_null_view, METH_O},
+ {"PyBuffer_SizeFromFormat", test_PyBuffer_SizeFromFormat, METH_VARARGS},
{"test_buildvalue_N", test_buildvalue_N, METH_NOARGS},
{"test_buildvalue_issue38913", test_buildvalue_issue38913, METH_NOARGS},
{"get_args", get_args, METH_VARARGS},
@@ -5202,6 +5404,8 @@ static PyMethodDef TestMethods[] = {
{"unicode_aswidechar", unicode_aswidechar, METH_VARARGS},
{"unicode_aswidecharstring",unicode_aswidecharstring, METH_VARARGS},
{"unicode_asucs4", unicode_asucs4, METH_VARARGS},
+ {"unicode_asutf8", unicode_asutf8, METH_VARARGS},
+ {"unicode_asutf8andsize", unicode_asutf8andsize, METH_VARARGS},
{"unicode_findchar", unicode_findchar, METH_VARARGS},
{"unicode_copycharacters", unicode_copycharacters, METH_VARARGS},
{"unicode_encodedecimal", unicode_encodedecimal, METH_VARARGS},
@@ -5289,7 +5493,6 @@ static PyMethodDef TestMethods[] = {
#endif
{"PyTime_AsMilliseconds", test_PyTime_AsMilliseconds, METH_VARARGS},
{"PyTime_AsMicroseconds", test_PyTime_AsMicroseconds, METH_VARARGS},
- {"get_recursion_depth", get_recursion_depth, METH_NOARGS},
{"pymem_buffer_overflow", pymem_buffer_overflow, METH_NOARGS},
{"pymem_api_misuse", pymem_api_misuse, METH_NOARGS},
{"pymem_malloc_without_gil", pymem_malloc_without_gil, METH_NOARGS},
@@ -5324,6 +5527,13 @@ static PyMethodDef TestMethods[] = {
{"negative_refcount", negative_refcount, METH_NOARGS},
#endif
{"write_unraisable_exc", test_write_unraisable_exc, METH_VARARGS},
+ {"sequence_getitem", sequence_getitem, METH_VARARGS},
+ {"meth_varargs", meth_varargs, METH_VARARGS},
+ {"meth_varargs_keywords", (PyCFunction)(void(*)(void))meth_varargs_keywords, METH_VARARGS|METH_KEYWORDS},
+ {"meth_o", meth_o, METH_O},
+ {"meth_noargs", meth_noargs, METH_NOARGS},
+ {"meth_fastcall", (PyCFunction)(void(*)(void))meth_fastcall, METH_FASTCALL},
+ {"meth_fastcall_keywords", (PyCFunction)(void(*)(void))meth_fastcall_keywords, METH_FASTCALL|METH_KEYWORDS},
{"pynumber_tobase", pynumber_tobase, METH_VARARGS},
{NULL, NULL} /* sentinel */
};
@@ -5943,7 +6153,7 @@ static PyTypeObject MethodDescriptorBase_Type = {
.tp_call = PyVectorcall_Call,
.tp_vectorcall_offset = offsetof(MethodDescriptorObject, vectorcall),
.tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE |
- Py_TPFLAGS_METHOD_DESCRIPTOR | _Py_TPFLAGS_HAVE_VECTORCALL,
+ Py_TPFLAGS_METHOD_DESCRIPTOR | Py_TPFLAGS_HAVE_VECTORCALL,
.tp_descr_get = func_descr_get,
};
@@ -5982,7 +6192,7 @@ static PyTypeObject MethodDescriptor2_Type = {
.tp_new = MethodDescriptor2_new,
.tp_call = PyVectorcall_Call,
.tp_vectorcall_offset = offsetof(MethodDescriptor2Object, vectorcall),
- .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | _Py_TPFLAGS_HAVE_VECTORCALL,
+ .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_VECTORCALL,
};
PyDoc_STRVAR(heapgctype__doc__,
@@ -6100,6 +6310,47 @@ static PyType_Spec HeapCTypeSubclass_spec = {
HeapCTypeSubclass_slots
};
+PyDoc_STRVAR(heapctypewithbuffer__doc__,
+"Heap type with buffer support.\n\n"
+"The buffer is set to [b'1', b'2', b'3', b'4']");
+
+typedef struct {
+ HeapCTypeObject base;
+ char buffer[4];
+} HeapCTypeWithBufferObject;
+
+static int
+heapctypewithbuffer_getbuffer(HeapCTypeWithBufferObject *self, Py_buffer *view, int flags)
+{
+ self->buffer[0] = '1';
+ self->buffer[1] = '2';
+ self->buffer[2] = '3';
+ self->buffer[3] = '4';
+ return PyBuffer_FillInfo(
+ view, (PyObject*)self, (void *)self->buffer, 4, 1, flags);
+}
+
+static void
+heapctypewithbuffer_releasebuffer(HeapCTypeWithBufferObject *self, Py_buffer *view)
+{
+ assert(view->obj == (void*) self);
+}
+
+static PyType_Slot HeapCTypeWithBuffer_slots[] = {
+ {Py_bf_getbuffer, heapctypewithbuffer_getbuffer},
+ {Py_bf_releasebuffer, heapctypewithbuffer_releasebuffer},
+ {Py_tp_doc, (char*)heapctypewithbuffer__doc__},
+ {0, 0},
+};
+
+static PyType_Spec HeapCTypeWithBuffer_spec = {
+ "_testcapi.HeapCTypeWithBuffer",
+ sizeof(HeapCTypeWithBufferObject),
+ 0,
+ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE,
+ HeapCTypeWithBuffer_slots
+};
+
PyDoc_STRVAR(heapctypesubclasswithfinalizer__doc__,
"Subclass of HeapCType with a finalizer that reassigns __class__.\n\n"
"__class__ is set to plain HeapCTypeSubclass during finalization.\n"
@@ -6177,6 +6428,106 @@ static PyType_Spec HeapCTypeSubclassWithFinalizer_spec = {
HeapCTypeSubclassWithFinalizer_slots
};
+typedef struct {
+ PyObject_HEAD
+ PyObject *dict;
+} HeapCTypeWithDictObject;
+
+static void
+heapctypewithdict_dealloc(HeapCTypeWithDictObject* self)
+{
+
+ PyTypeObject *tp = Py_TYPE(self);
+ Py_XDECREF(self->dict);
+ PyObject_DEL(self);
+ Py_DECREF(tp);
+}
+
+static PyGetSetDef heapctypewithdict_getsetlist[] = {
+ {"__dict__", PyObject_GenericGetDict, PyObject_GenericSetDict},
+ {NULL} /* Sentinel */
+};
+
+static struct PyMemberDef heapctypewithdict_members[] = {
+ {"dictobj", T_OBJECT, offsetof(HeapCTypeWithDictObject, dict)},
+ {"__dictoffset__", T_PYSSIZET, offsetof(HeapCTypeWithDictObject, dict), READONLY},
+ {NULL} /* Sentinel */
+};
+
+static PyType_Slot HeapCTypeWithDict_slots[] = {
+ {Py_tp_members, heapctypewithdict_members},
+ {Py_tp_getset, heapctypewithdict_getsetlist},
+ {Py_tp_dealloc, heapctypewithdict_dealloc},
+ {0, 0},
+};
+
+static PyType_Spec HeapCTypeWithDict_spec = {
+ "_testcapi.HeapCTypeWithDict",
+ sizeof(HeapCTypeWithDictObject),
+ 0,
+ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE,
+ HeapCTypeWithDict_slots
+};
+
+static struct PyMemberDef heapctypewithnegativedict_members[] = {
+ {"dictobj", T_OBJECT, offsetof(HeapCTypeWithDictObject, dict)},
+ {"__dictoffset__", T_PYSSIZET, -(Py_ssize_t)sizeof(void*), READONLY},
+ {NULL} /* Sentinel */
+};
+
+static PyType_Slot HeapCTypeWithNegativeDict_slots[] = {
+ {Py_tp_members, heapctypewithnegativedict_members},
+ {Py_tp_getset, heapctypewithdict_getsetlist},
+ {Py_tp_dealloc, heapctypewithdict_dealloc},
+ {0, 0},
+};
+
+static PyType_Spec HeapCTypeWithNegativeDict_spec = {
+ "_testcapi.HeapCTypeWithNegativeDict",
+ sizeof(HeapCTypeWithDictObject),
+ 0,
+ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE,
+ HeapCTypeWithNegativeDict_slots
+};
+
+typedef struct {
+ PyObject_HEAD
+ PyObject *weakreflist;
+} HeapCTypeWithWeakrefObject;
+
+static struct PyMemberDef heapctypewithweakref_members[] = {
+ {"weakreflist", T_OBJECT, offsetof(HeapCTypeWithWeakrefObject, weakreflist)},
+ {"__weaklistoffset__", T_PYSSIZET,
+ offsetof(HeapCTypeWithWeakrefObject, weakreflist), READONLY},
+ {NULL} /* Sentinel */
+};
+
+static void
+heapctypewithweakref_dealloc(HeapCTypeWithWeakrefObject* self)
+{
+
+ PyTypeObject *tp = Py_TYPE(self);
+ if (self->weakreflist != NULL)
+ PyObject_ClearWeakRefs((PyObject *) self);
+ Py_XDECREF(self->weakreflist);
+ PyObject_DEL(self);
+ Py_DECREF(tp);
+}
+
+static PyType_Slot HeapCTypeWithWeakref_slots[] = {
+ {Py_tp_members, heapctypewithweakref_members},
+ {Py_tp_dealloc, heapctypewithweakref_dealloc},
+ {0, 0},
+};
+
+static PyType_Spec HeapCTypeWithWeakref_spec = {
+ "_testcapi.HeapCTypeWithWeakref",
+ sizeof(HeapCTypeWithWeakrefObject),
+ 0,
+ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE,
+ HeapCTypeWithWeakref_slots
+};
+
PyDoc_STRVAR(heapctypesetattr__doc__,
"A heap type without GC, but with overridden __setattr__.\n\n"
"The 'value' attribute is set to 10 in __init__ and updated via attribute setting.");
@@ -6251,6 +6602,120 @@ static PyType_Spec HeapCTypeSetattr_spec = {
HeapCTypeSetattr_slots
};
+static PyMethodDef meth_instance_methods[] = {
+ {"meth_varargs", meth_varargs, METH_VARARGS},
+ {"meth_varargs_keywords", (PyCFunction)(void(*)(void))meth_varargs_keywords, METH_VARARGS|METH_KEYWORDS},
+ {"meth_o", meth_o, METH_O},
+ {"meth_noargs", meth_noargs, METH_NOARGS},
+ {"meth_fastcall", (PyCFunction)(void(*)(void))meth_fastcall, METH_FASTCALL},
+ {"meth_fastcall_keywords", (PyCFunction)(void(*)(void))meth_fastcall_keywords, METH_FASTCALL|METH_KEYWORDS},
+ {NULL, NULL} /* sentinel */
+};
+
+
+static PyTypeObject MethInstance_Type = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ "MethInstance",
+ sizeof(PyObject),
+ .tp_new = PyType_GenericNew,
+ .tp_flags = Py_TPFLAGS_DEFAULT,
+ .tp_methods = meth_instance_methods,
+ .tp_doc = (char*)PyDoc_STR(
+ "Class with normal (instance) methods to test calling conventions"),
+};
+
+static PyMethodDef meth_class_methods[] = {
+ {"meth_varargs", meth_varargs, METH_VARARGS|METH_CLASS},
+ {"meth_varargs_keywords", (PyCFunction)(void(*)(void))meth_varargs_keywords, METH_VARARGS|METH_KEYWORDS|METH_CLASS},
+ {"meth_o", meth_o, METH_O|METH_CLASS},
+ {"meth_noargs", meth_noargs, METH_NOARGS|METH_CLASS},
+ {"meth_fastcall", (PyCFunction)(void(*)(void))meth_fastcall, METH_FASTCALL|METH_CLASS},
+ {"meth_fastcall_keywords", (PyCFunction)(void(*)(void))meth_fastcall_keywords, METH_FASTCALL|METH_KEYWORDS|METH_CLASS},
+ {NULL, NULL} /* sentinel */
+};
+
+
+static PyTypeObject MethClass_Type = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ "MethClass",
+ sizeof(PyObject),
+ .tp_new = PyType_GenericNew,
+ .tp_flags = Py_TPFLAGS_DEFAULT,
+ .tp_methods = meth_class_methods,
+ .tp_doc = PyDoc_STR(
+ "Class with class methods to test calling conventions"),
+};
+
+static PyMethodDef meth_static_methods[] = {
+ {"meth_varargs", meth_varargs, METH_VARARGS|METH_STATIC},
+ {"meth_varargs_keywords", (PyCFunction)(void(*)(void))meth_varargs_keywords, METH_VARARGS|METH_KEYWORDS|METH_STATIC},
+ {"meth_o", meth_o, METH_O|METH_STATIC},
+ {"meth_noargs", meth_noargs, METH_NOARGS|METH_STATIC},
+ {"meth_fastcall", (PyCFunction)(void(*)(void))meth_fastcall, METH_FASTCALL|METH_STATIC},
+ {"meth_fastcall_keywords", (PyCFunction)(void(*)(void))meth_fastcall_keywords, METH_FASTCALL|METH_KEYWORDS|METH_STATIC},
+ {NULL, NULL} /* sentinel */
+};
+
+
+static PyTypeObject MethStatic_Type = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ "MethStatic",
+ sizeof(PyObject),
+ .tp_new = PyType_GenericNew,
+ .tp_flags = Py_TPFLAGS_DEFAULT,
+ .tp_methods = meth_static_methods,
+ .tp_doc = PyDoc_STR(
+ "Class with static methods to test calling conventions"),
+};
+
+/* ContainerNoGC -- a simple container without GC methods */
+
+typedef struct {
+ PyObject_HEAD
+ PyObject *value;
+} ContainerNoGCobject;
+
+static PyObject *
+ContainerNoGC_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
+{
+ PyObject *value;
+ char *names[] = {"value", NULL};
+ if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O", names, &value)) {
+ return NULL;
+ }
+ PyObject *self = type->tp_alloc(type, 0);
+ if (self == NULL) {
+ return NULL;
+ }
+ Py_INCREF(value);
+ ((ContainerNoGCobject *)self)->value = value;
+ return self;
+}
+
+static void
+ContainerNoGC_dealloc(ContainerNoGCobject *self)
+{
+ Py_DECREF(self->value);
+ Py_TYPE(self)->tp_free((PyObject *)self);
+}
+
+static PyMemberDef ContainerNoGC_members[] = {
+ {"value", T_OBJECT, offsetof(ContainerNoGCobject, value), READONLY,
+ PyDoc_STR("a container value for test purposes")},
+ {0}
+};
+
+static PyTypeObject ContainerNoGC_type = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ "_testcapi.ContainerNoGC",
+ sizeof(ContainerNoGCobject),
+ .tp_dealloc = (destructor)ContainerNoGC_dealloc,
+ .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE,
+ .tp_members = ContainerNoGC_members,
+ .tp_new = ContainerNoGC_new,
+};
+
+
static struct PyModuleDef _testcapimodule = {
PyModuleDef_HEAD_INIT,
"_testcapi",
@@ -6275,9 +6740,9 @@ PyInit__testcapi(void)
if (m == NULL)
return NULL;
- Py_TYPE(&_HashInheritanceTester_Type)=&PyType_Type;
+ Py_SET_TYPE(&_HashInheritanceTester_Type, &PyType_Type);
- Py_TYPE(&test_structmembersType)=&PyType_Type;
+ Py_SET_TYPE(&test_structmembersType, &PyType_Type);
Py_INCREF(&test_structmembersType);
/* don't use a name starting with "test", since we don't want
test_capi to automatically call this */
@@ -6303,10 +6768,6 @@ PyInit__testcapi(void)
Py_INCREF(&MyList_Type);
PyModule_AddObject(m, "MyList", (PyObject *)&MyList_Type);
- /* bpo-37250: old Cython code sets tp_print to 0, we check that
- * this doesn't break anything. */
- MyList_Type.tp_print = 0;
-
if (PyType_Ready(&MethodDescriptorBase_Type) < 0)
return NULL;
Py_INCREF(&MethodDescriptorBase_Type);
@@ -6340,6 +6801,21 @@ PyInit__testcapi(void)
Py_INCREF(&Generic_Type);
PyModule_AddObject(m, "Generic", (PyObject *)&Generic_Type);
+ if (PyType_Ready(&MethInstance_Type) < 0)
+ return NULL;
+ Py_INCREF(&MethInstance_Type);
+ PyModule_AddObject(m, "MethInstance", (PyObject *)&MethInstance_Type);
+
+ if (PyType_Ready(&MethClass_Type) < 0)
+ return NULL;
+ Py_INCREF(&MethClass_Type);
+ PyModule_AddObject(m, "MethClass", (PyObject *)&MethClass_Type);
+
+ if (PyType_Ready(&MethStatic_Type) < 0)
+ return NULL;
+ Py_INCREF(&MethStatic_Type);
+ PyModule_AddObject(m, "MethStatic", (PyObject *)&MethStatic_Type);
+
PyRecursingInfinitelyError_Type.tp_base = (PyTypeObject *)PyExc_Exception;
if (PyType_Ready(&PyRecursingInfinitelyError_Type) < 0) {
return NULL;
@@ -6364,12 +6840,11 @@ PyInit__testcapi(void)
PyModule_AddObject(m, "FLT_MIN", PyFloat_FromDouble(FLT_MIN));
PyModule_AddObject(m, "DBL_MAX", PyFloat_FromDouble(DBL_MAX));
PyModule_AddObject(m, "DBL_MIN", PyFloat_FromDouble(DBL_MIN));
- PyModule_AddObject(m, "LLONG_MAX", PyLong_FromLongLong(PY_LLONG_MAX));
- PyModule_AddObject(m, "LLONG_MIN", PyLong_FromLongLong(PY_LLONG_MIN));
- PyModule_AddObject(m, "ULLONG_MAX", PyLong_FromUnsignedLongLong(PY_ULLONG_MAX));
+ PyModule_AddObject(m, "LLONG_MAX", PyLong_FromLongLong(LLONG_MAX));
+ PyModule_AddObject(m, "LLONG_MIN", PyLong_FromLongLong(LLONG_MIN));
+ PyModule_AddObject(m, "ULLONG_MAX", PyLong_FromUnsignedLongLong(ULLONG_MAX));
PyModule_AddObject(m, "PY_SSIZE_T_MAX", PyLong_FromSsize_t(PY_SSIZE_T_MAX));
PyModule_AddObject(m, "PY_SSIZE_T_MIN", PyLong_FromSsize_t(PY_SSIZE_T_MIN));
- PyModule_AddObject(m, "SIZEOF_PYGC_HEAD", PyLong_FromSsize_t(sizeof(PyGC_Head)));
PyModule_AddObject(m, "SIZEOF_TIME_T", PyLong_FromSsize_t(sizeof(time_t)));
Py_INCREF(&PyInstanceMethod_Type);
PyModule_AddObject(m, "instancemethod", (PyObject *)&PyInstanceMethod_Type);
@@ -6409,6 +6884,30 @@ PyInit__testcapi(void)
Py_DECREF(subclass_bases);
PyModule_AddObject(m, "HeapCTypeSubclass", HeapCTypeSubclass);
+ PyObject *HeapCTypeWithDict = PyType_FromSpec(&HeapCTypeWithDict_spec);
+ if (HeapCTypeWithDict == NULL) {
+ return NULL;
+ }
+ PyModule_AddObject(m, "HeapCTypeWithDict", HeapCTypeWithDict);
+
+ PyObject *HeapCTypeWithNegativeDict = PyType_FromSpec(&HeapCTypeWithNegativeDict_spec);
+ if (HeapCTypeWithNegativeDict == NULL) {
+ return NULL;
+ }
+ PyModule_AddObject(m, "HeapCTypeWithNegativeDict", HeapCTypeWithNegativeDict);
+
+ PyObject *HeapCTypeWithWeakref = PyType_FromSpec(&HeapCTypeWithWeakref_spec);
+ if (HeapCTypeWithWeakref == NULL) {
+ return NULL;
+ }
+ PyModule_AddObject(m, "HeapCTypeWithWeakref", HeapCTypeWithWeakref);
+
+ PyObject *HeapCTypeWithBuffer = PyType_FromSpec(&HeapCTypeWithBuffer_spec);
+ if (HeapCTypeWithBuffer == NULL) {
+ return NULL;
+ }
+ PyModule_AddObject(m, "HeapCTypeWithBuffer", HeapCTypeWithBuffer);
+
PyObject *HeapCTypeSetattr = PyType_FromSpec(&HeapCTypeSetattr_spec);
if (HeapCTypeSetattr == NULL) {
return NULL;
@@ -6427,6 +6926,14 @@ PyInit__testcapi(void)
Py_DECREF(subclass_with_finalizer_bases);
PyModule_AddObject(m, "HeapCTypeSubclassWithFinalizer", HeapCTypeSubclassWithFinalizer);
+ if (PyType_Ready(&ContainerNoGC_type) < 0) {
+ return NULL;
+ }
+ Py_INCREF(&ContainerNoGC_type);
+ if (PyModule_AddObject(m, "ContainerNoGC",
+ (PyObject *) &ContainerNoGC_type) < 0)
+ return NULL;
+
PyState_AddModule(m, &_testcapimodule);
return m;
}
diff --git a/Modules/_testinternalcapi.c b/Modules/_testinternalcapi.c
index 3a931cae..4445d2e5 100644
--- a/Modules/_testinternalcapi.c
+++ b/Modules/_testinternalcapi.c
@@ -6,10 +6,16 @@
# error "Py_BUILD_CORE_BUILTIN or Py_BUILD_CORE_MODULE must be defined"
#endif
+/* Always enable assertions */
+#undef NDEBUG
+
#define PY_SSIZE_T_CLEAN
#include "Python.h"
-#include "pycore_initconfig.h"
+#include "pycore_byteswap.h" // _Py_bswap32()
+#include "pycore_initconfig.h" // _Py_GetConfigsAsDict()
+#include "pycore_hashtable.h" // _Py_hashtable_new()
+#include "pycore_gc.h" // PyGC_Head
#ifdef MS_WINDOWS
@@ -62,8 +68,139 @@ get_configs(PyObject *self, PyObject *Py_UNUSED(args))
}
+static PyObject*
+get_recursion_depth(PyObject *self, PyObject *Py_UNUSED(args))
+{
+ PyThreadState *tstate = PyThreadState_Get();
+
+ /* subtract one to ignore the frame of the get_recursion_depth() call */
+ return PyLong_FromLong(tstate->recursion_depth - 1);
+}
+
+
+static PyObject*
+test_bswap(PyObject *self, PyObject *Py_UNUSED(args))
+{
+ uint16_t u16 = _Py_bswap16(UINT16_C(0x3412));
+ if (u16 != UINT16_C(0x1234)) {
+ PyErr_Format(PyExc_AssertionError,
+ "_Py_bswap16(0x3412) returns %u", u16);
+ return NULL;
+ }
+
+ uint32_t u32 = _Py_bswap32(UINT32_C(0x78563412));
+ if (u32 != UINT32_C(0x12345678)) {
+ PyErr_Format(PyExc_AssertionError,
+ "_Py_bswap32(0x78563412) returns %lu", u32);
+ return NULL;
+ }
+
+ uint64_t u64 = _Py_bswap64(UINT64_C(0xEFCDAB9078563412));
+ if (u64 != UINT64_C(0x1234567890ABCDEF)) {
+ PyErr_Format(PyExc_AssertionError,
+ "_Py_bswap64(0xEFCDAB9078563412) returns %llu", u64);
+ return NULL;
+ }
+
+ Py_RETURN_NONE;
+}
+
+
+#define TO_PTR(ch) ((void*)(uintptr_t)ch)
+#define FROM_PTR(ptr) ((uintptr_t)ptr)
+#define VALUE(key) (1 + ((int)(key) - 'a'))
+
+static Py_uhash_t
+hash_char(const void *key)
+{
+ char ch = (char)FROM_PTR(key);
+ return ch;
+}
+
+
+static int
+hashtable_cb(_Py_hashtable_t *table,
+ const void *key_ptr, const void *value_ptr,
+ void *user_data)
+{
+ int *count = (int *)user_data;
+ char key = (char)FROM_PTR(key_ptr);
+ int value = (int)FROM_PTR(value_ptr);
+ assert(value == VALUE(key));
+ *count += 1;
+ return 0;
+}
+
+
+static PyObject*
+test_hashtable(PyObject *self, PyObject *Py_UNUSED(args))
+{
+ _Py_hashtable_t *table = _Py_hashtable_new(hash_char,
+ _Py_hashtable_compare_direct);
+ if (table == NULL) {
+ return PyErr_NoMemory();
+ }
+
+ // Using an newly allocated table must not crash
+ assert(table->nentries == 0);
+ assert(table->nbuckets > 0);
+ assert(_Py_hashtable_get(table, TO_PTR('x')) == NULL);
+
+ // Test _Py_hashtable_set()
+ char key;
+ for (key='a'; key <= 'z'; key++) {
+ int value = VALUE(key);
+ if (_Py_hashtable_set(table, TO_PTR(key), TO_PTR(value)) < 0) {
+ _Py_hashtable_destroy(table);
+ return PyErr_NoMemory();
+ }
+ }
+ assert(table->nentries == 26);
+ assert(table->nbuckets > table->nentries);
+
+ // Test _Py_hashtable_get_entry()
+ for (key='a'; key <= 'z'; key++) {
+ _Py_hashtable_entry_t *entry = _Py_hashtable_get_entry(table, TO_PTR(key));
+ assert(entry != NULL);
+ assert(entry->key == TO_PTR(key));
+ assert(entry->value == TO_PTR(VALUE(key)));
+ }
+
+ // Test _Py_hashtable_get()
+ for (key='a'; key <= 'z'; key++) {
+ void *value_ptr = _Py_hashtable_get(table, TO_PTR(key));
+ assert((int)FROM_PTR(value_ptr) == VALUE(key));
+ }
+
+ // Test _Py_hashtable_steal()
+ key = 'p';
+ void *value_ptr = _Py_hashtable_steal(table, TO_PTR(key));
+ assert((int)FROM_PTR(value_ptr) == VALUE(key));
+ assert(table->nentries == 25);
+ assert(_Py_hashtable_get_entry(table, TO_PTR(key)) == NULL);
+
+ // Test _Py_hashtable_foreach()
+ int count = 0;
+ int res = _Py_hashtable_foreach(table, hashtable_cb, &count);
+ assert(res == 0);
+ assert(count == 25);
+
+ // Test _Py_hashtable_clear()
+ _Py_hashtable_clear(table);
+ assert(table->nentries == 0);
+ assert(table->nbuckets > 0);
+ assert(_Py_hashtable_get(table, TO_PTR('x')) == NULL);
+
+ _Py_hashtable_destroy(table);
+ Py_RETURN_NONE;
+}
+
+
static PyMethodDef TestMethods[] = {
{"get_configs", get_configs, METH_NOARGS},
+ {"get_recursion_depth", get_recursion_depth, METH_NOARGS},
+ {"test_bswap", test_bswap, METH_NOARGS},
+ {"test_hashtable", test_hashtable, METH_NOARGS},
{NULL, NULL} /* sentinel */
};
@@ -84,5 +221,19 @@ static struct PyModuleDef _testcapimodule = {
PyMODINIT_FUNC
PyInit__testinternalcapi(void)
{
- return PyModule_Create(&_testcapimodule);
+ PyObject *module = PyModule_Create(&_testcapimodule);
+ if (module == NULL) {
+ return NULL;
+ }
+
+ if (PyModule_AddObject(module, "SIZEOF_PYGC_HEAD",
+ PyLong_FromSsize_t(sizeof(PyGC_Head))) < 0) {
+ goto error;
+ }
+
+ return module;
+
+error:
+ Py_DECREF(module);
+ return NULL;
}
diff --git a/Modules/_testmultiphase.c b/Modules/_testmultiphase.c
index 4933abba..d69ae628 100644
--- a/Modules/_testmultiphase.c
+++ b/Modules/_testmultiphase.c
@@ -4,6 +4,19 @@
#include "Python.h"
+/* State for testing module state access from methods */
+
+typedef struct {
+ int counter;
+} meth_state;
+
+/*[clinic input]
+module _testmultiphase
+
+class _testmultiphase.StateAccessType "StateAccessTypeObject *" "!StateAccessType"
+[clinic start generated code]*/
+/*[clinic end generated code: output=da39a3ee5e6b4b0d input=bab9f2fe3bd312ff]*/
+
/* Example objects */
typedef struct {
PyObject_HEAD
@@ -14,6 +27,10 @@ typedef struct {
PyObject *integer;
} testmultiphase_state;
+typedef struct {
+ PyObject_HEAD
+} StateAccessTypeObject;
+
/* Example methods */
static int
@@ -42,6 +59,7 @@ Example_demo(ExampleObject *self, PyObject *args)
Py_RETURN_NONE;
}
+#include "clinic/_testmultiphase.c.h"
static PyMethodDef Example_methods[] = {
{"demo", (PyCFunction)Example_demo, METH_VARARGS,
@@ -102,6 +120,150 @@ static PyType_Spec Example_Type_spec = {
Example_Type_slots
};
+
+/*[clinic input]
+_testmultiphase.StateAccessType.get_defining_module
+
+ cls: defining_class
+
+Return the module of the defining class.
+[clinic start generated code]*/
+
+static PyObject *
+_testmultiphase_StateAccessType_get_defining_module_impl(StateAccessTypeObject *self,
+ PyTypeObject *cls)
+/*[clinic end generated code: output=ba2a14284a5d0921 input=946149f91cf72c0d]*/
+{
+ PyObject *retval;
+ retval = PyType_GetModule(cls);
+ if (retval == NULL) {
+ return NULL;
+ }
+ Py_INCREF(retval);
+ return retval;
+}
+
+/*[clinic input]
+_testmultiphase.StateAccessType.increment_count_clinic
+
+ cls: defining_class
+ /
+ n: int = 1
+ *
+ twice: bool = False
+
+Add 'n' from the module-state counter.
+
+Pass 'twice' to double that amount.
+
+This tests Argument Clinic support for defining_class.
+[clinic start generated code]*/
+
+static PyObject *
+_testmultiphase_StateAccessType_increment_count_clinic_impl(StateAccessTypeObject *self,
+ PyTypeObject *cls,
+ int n, int twice)
+/*[clinic end generated code: output=3b34f86bc5473204 input=551d482e1fe0b8f5]*/
+{
+ meth_state *m_state = PyType_GetModuleState(cls);
+ if (twice) {
+ n *= 2;
+ }
+ m_state->counter += n;
+
+ Py_RETURN_NONE;
+}
+
+PyDoc_STRVAR(_StateAccessType_decrement_count__doc__,
+"decrement_count($self, /, n=1, *, twice=None)\n"
+"--\n"
+"\n"
+"Add 'n' from the module-state counter.\n"
+"Pass 'twice' to double that amount.\n"
+"(This is to test both positional and keyword arguments.");
+
+// Intentionally does not use Argument Clinic
+static PyObject *
+_StateAccessType_increment_count_noclinic(StateAccessTypeObject *self,
+ PyTypeObject *defining_class,
+ PyObject *const *args,
+ Py_ssize_t nargs,
+ PyObject *kwnames)
+{
+ if (!_PyArg_CheckPositional("StateAccessTypeObject.decrement_count", nargs, 0, 1)) {
+ return NULL;
+ }
+ long n = 1;
+ if (nargs) {
+ n = PyLong_AsLong(args[0]);
+ if (PyErr_Occurred()) {
+ return NULL;
+ }
+ }
+ if (kwnames && PyTuple_Check(kwnames)) {
+ if (PyTuple_GET_SIZE(kwnames) > 1 ||
+ PyUnicode_CompareWithASCIIString(
+ PyTuple_GET_ITEM(kwnames, 0),
+ "twice"
+ )) {
+ PyErr_SetString(
+ PyExc_TypeError,
+ "decrement_count only takes 'twice' keyword argument"
+ );
+ return NULL;
+ }
+ n *= 2;
+ }
+ meth_state *m_state = PyType_GetModuleState(defining_class);
+ m_state->counter += n;
+
+ Py_RETURN_NONE;
+}
+
+/*[clinic input]
+_testmultiphase.StateAccessType.get_count
+
+ cls: defining_class
+
+Return the value of the module-state counter.
+[clinic start generated code]*/
+
+static PyObject *
+_testmultiphase_StateAccessType_get_count_impl(StateAccessTypeObject *self,
+ PyTypeObject *cls)
+/*[clinic end generated code: output=64600f95b499a319 input=d5d181f12384849f]*/
+{
+ meth_state *m_state = PyType_GetModuleState(cls);
+ return PyLong_FromLong(m_state->counter);
+}
+
+static PyMethodDef StateAccessType_methods[] = {
+ _TESTMULTIPHASE_STATEACCESSTYPE_GET_DEFINING_MODULE_METHODDEF
+ _TESTMULTIPHASE_STATEACCESSTYPE_GET_COUNT_METHODDEF
+ _TESTMULTIPHASE_STATEACCESSTYPE_INCREMENT_COUNT_CLINIC_METHODDEF
+ {
+ "increment_count_noclinic",
+ (PyCFunction)(void(*)(void))_StateAccessType_increment_count_noclinic,
+ METH_METHOD|METH_FASTCALL|METH_KEYWORDS,
+ _StateAccessType_decrement_count__doc__
+ },
+ {NULL, NULL} /* sentinel */
+};
+
+static PyType_Slot StateAccessType_Type_slots[] = {
+ {Py_tp_doc, "Type for testing per-module state access from methods."},
+ {Py_tp_methods, StateAccessType_methods},
+ {0, NULL}
+};
+
+static PyType_Spec StateAccessType_spec = {
+ "_testimportexec.StateAccessType",
+ sizeof(StateAccessTypeObject),
+ 0,
+ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_FINALIZE | Py_TPFLAGS_BASETYPE,
+ StateAccessType_Type_slots
+};
+
/* Function of two integers returning integer */
PyDoc_STRVAR(testexport_foo_doc,
@@ -193,30 +355,39 @@ static int execfunc(PyObject *m)
/* Add a custom type */
temp = PyType_FromSpec(&Example_Type_spec);
- if (temp == NULL)
+ if (temp == NULL) {
goto fail;
- if (PyModule_AddObject(m, "Example", temp) != 0)
+ }
+ if (PyModule_AddObject(m, "Example", temp) != 0) {
goto fail;
+ }
+
/* Add an exception type */
temp = PyErr_NewException("_testimportexec.error", NULL, NULL);
- if (temp == NULL)
+ if (temp == NULL) {
goto fail;
- if (PyModule_AddObject(m, "error", temp) != 0)
+ }
+ if (PyModule_AddObject(m, "error", temp) != 0) {
goto fail;
+ }
/* Add Str */
temp = PyType_FromSpec(&Str_Type_spec);
- if (temp == NULL)
+ if (temp == NULL) {
goto fail;
- if (PyModule_AddObject(m, "Str", temp) != 0)
+ }
+ if (PyModule_AddObject(m, "Str", temp) != 0) {
goto fail;
+ }
- if (PyModule_AddIntConstant(m, "int_const", 1969) != 0)
+ if (PyModule_AddIntConstant(m, "int_const", 1969) != 0) {
goto fail;
+ }
- if (PyModule_AddStringConstant(m, "str_const", "something different") != 0)
+ if (PyModule_AddStringConstant(m, "str_const", "something different") != 0) {
goto fail;
+ }
return 0;
fail:
@@ -225,20 +396,18 @@ static int execfunc(PyObject *m)
/* Helper for module definitions; there'll be a lot of them */
-#define TEST_MODULE_DEF_EX(name, slots, methods, statesize, traversefunc) { \
+#define TEST_MODULE_DEF(name, slots, methods) { \
PyModuleDef_HEAD_INIT, /* m_base */ \
name, /* m_name */ \
PyDoc_STR("Test module " name), /* m_doc */ \
- statesize, /* m_size */ \
+ 0, /* m_size */ \
methods, /* m_methods */ \
slots, /* m_slots */ \
- traversefunc, /* m_traverse */ \
+ NULL, /* m_traverse */ \
NULL, /* m_clear */ \
NULL, /* m_free */ \
}
-#define TEST_MODULE_DEF(name, slots, methods) TEST_MODULE_DEF_EX(name, slots, methods, 0, NULL)
-
static PyModuleDef_Slot main_slots[] = {
{Py_mod_exec, execfunc},
{0, NULL},
@@ -623,51 +792,49 @@ PyInit__testmultiphase_exec_unreported_exception(PyObject *spec)
}
static int
-bad_traverse(PyObject *self, visitproc visit, void *arg) {
- testmultiphase_state *m_state;
-
- m_state = PyModule_GetState(self);
-
- /* The following assertion mimics any traversal function that doesn't correctly handle
- * the case during module creation where the module state hasn't been created yet.
- *
- * The check that it is used to test only runs in debug mode, so it is OK that the
- * assert() will get compiled out in fully optimised release builds.
- */
- assert(m_state != NULL);
- Py_VISIT(m_state->integer);
- return 0;
-}
-
-static int
-execfunc_with_bad_traverse(PyObject *mod) {
- testmultiphase_state *m_state;
+meth_state_access_exec(PyObject *m)
+{
+ PyObject *temp;
+ meth_state *m_state;
- m_state = PyModule_GetState(mod);
+ m_state = PyModule_GetState(m);
if (m_state == NULL) {
return -1;
}
- m_state->integer = PyLong_FromLong(0x7fffffff);
- Py_INCREF(m_state->integer);
+ temp = PyType_FromModuleAndSpec(m, &StateAccessType_spec, NULL);
+ if (temp == NULL) {
+ return -1;
+ }
+ if (PyModule_AddObject(m, "StateAccessType", temp) != 0) {
+ return -1;
+ }
+
return 0;
}
-static PyModuleDef_Slot slots_with_bad_traverse[] = {
- {Py_mod_exec, execfunc_with_bad_traverse},
+static PyModuleDef_Slot meth_state_access_slots[] = {
+ {Py_mod_exec, meth_state_access_exec},
{0, NULL}
};
-static PyModuleDef def_with_bad_traverse = TEST_MODULE_DEF_EX(
- "_testmultiphase_with_bad_traverse", slots_with_bad_traverse, NULL,
- sizeof(testmultiphase_state), bad_traverse);
+static PyModuleDef def_meth_state_access = {
+ PyModuleDef_HEAD_INIT,
+ .m_name = "_testmultiphase_meth_state_access",
+ .m_doc = PyDoc_STR("Module testing access"
+ " to state from methods."),
+ .m_size = sizeof(meth_state),
+ .m_slots = meth_state_access_slots,
+};
PyMODINIT_FUNC
-PyInit__testmultiphase_with_bad_traverse(PyObject *spec) {
- return PyModuleDef_Init(&def_with_bad_traverse);
+PyInit__testmultiphase_meth_state_access(PyObject *spec)
+{
+ return PyModuleDef_Init(&def_meth_state_access);
}
+
/*** Helper for imp test ***/
static PyModuleDef imp_dummy_def = TEST_MODULE_DEF("imp_dummy", main_slots, testexport_methods);
diff --git a/Modules/_threadmodule.c b/Modules/_threadmodule.c
index fadf57aa..77baba48 100644
--- a/Modules/_threadmodule.c
+++ b/Modules/_threadmodule.c
@@ -4,9 +4,9 @@
#include "Python.h"
#include "pycore_pylifecycle.h"
-#include "pycore_pystate.h"
-#include "structmember.h" /* offsetof */
-#include "pythread.h"
+#include "pycore_interp.h" // _PyInterpreterState.num_threads
+#include "pycore_pystate.h" // _PyThreadState_Init()
+#include // offsetof()
static PyObject *ThreadError;
static PyObject *str_dict;
@@ -205,6 +205,22 @@ lock_repr(lockobject *self)
self->locked ? "locked" : "unlocked", Py_TYPE(self)->tp_name, self);
}
+#ifdef HAVE_FORK
+static PyObject *
+lock__at_fork_reinit(lockobject *self, PyObject *Py_UNUSED(args))
+{
+ if (_PyThread_at_fork_reinit(&self->lock_lock) < 0) {
+ PyErr_SetString(ThreadError, "failed to reinitialize lock at fork");
+ return NULL;
+ }
+
+ self->locked = 0;
+
+ Py_RETURN_NONE;
+}
+#endif /* HAVE_FORK */
+
+
static PyMethodDef lock_methods[] = {
{"acquire_lock", (PyCFunction)(void(*)(void))lock_PyThread_acquire_lock,
METH_VARARGS | METH_KEYWORDS, acquire_doc},
@@ -222,6 +238,10 @@ static PyMethodDef lock_methods[] = {
METH_VARARGS | METH_KEYWORDS, acquire_doc},
{"__exit__", (PyCFunction)lock_PyThread_release_lock,
METH_VARARGS, release_doc},
+#ifdef HAVE_FORK
+ {"_at_fork_reinit", (PyCFunction)lock__at_fork_reinit,
+ METH_NOARGS, NULL},
+#endif
{NULL, NULL} /* sentinel */
};
@@ -438,22 +458,20 @@ For internal use by `threading.Condition`.");
static PyObject *
rlock_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
{
- rlockobject *self;
-
- self = (rlockobject *) type->tp_alloc(type, 0);
- if (self != NULL) {
- self->in_weakreflist = NULL;
- self->rlock_owner = 0;
- self->rlock_count = 0;
-
- self->rlock_lock = PyThread_allocate_lock();
- if (self->rlock_lock == NULL) {
- Py_DECREF(self);
- PyErr_SetString(ThreadError, "can't allocate lock");
- return NULL;
- }
+ rlockobject *self = (rlockobject *) type->tp_alloc(type, 0);
+ if (self == NULL) {
+ return NULL;
}
+ self->in_weakreflist = NULL;
+ self->rlock_owner = 0;
+ self->rlock_count = 0;
+ self->rlock_lock = PyThread_allocate_lock();
+ if (self->rlock_lock == NULL) {
+ Py_DECREF(self);
+ PyErr_SetString(ThreadError, "can't allocate lock");
+ return NULL;
+ }
return (PyObject *) self;
}
@@ -467,6 +485,23 @@ rlock_repr(rlockobject *self)
}
+#ifdef HAVE_FORK
+static PyObject *
+rlock__at_fork_reinit(rlockobject *self, PyObject *Py_UNUSED(args))
+{
+ if (_PyThread_at_fork_reinit(&self->rlock_lock) < 0) {
+ PyErr_SetString(ThreadError, "failed to reinitialize lock at fork");
+ return NULL;
+ }
+
+ self->rlock_owner = 0;
+ self->rlock_count = 0;
+
+ Py_RETURN_NONE;
+}
+#endif /* HAVE_FORK */
+
+
static PyMethodDef rlock_methods[] = {
{"acquire", (PyCFunction)(void(*)(void))rlock_acquire,
METH_VARARGS | METH_KEYWORDS, rlock_acquire_doc},
@@ -482,6 +517,10 @@ static PyMethodDef rlock_methods[] = {
METH_VARARGS | METH_KEYWORDS, rlock_acquire_doc},
{"__exit__", (PyCFunction)rlock_release,
METH_VARARGS, rlock_release_doc},
+#ifdef HAVE_FORK
+ {"_at_fork_reinit", (PyCFunction)rlock__at_fork_reinit,
+ METH_NOARGS, NULL},
+#endif
{NULL, NULL} /* sentinel */
};
@@ -548,8 +587,6 @@ newlockobject(void)
/* Thread-local objects */
-#include "structmember.h"
-
/* Quick overview:
We need to be able to reclaim reference cycles as soon as possible
@@ -836,7 +873,7 @@ _ldict(localobject *self)
}
}
else {
- assert(Py_TYPE(dummy) == &localdummytype);
+ assert(Py_IS_TYPE(dummy, &localdummytype));
ldict = ((localdummyobject *) dummy)->localdict;
}
@@ -930,7 +967,7 @@ local_getattro(localobject *self, PyObject *name)
if (r == -1)
return NULL;
- if (Py_TYPE(self) != &localtype)
+ if (!Py_IS_TYPE(self, &localtype))
/* use generic lookup for subtypes */
return _PyObject_GenericGetAttrWithDict(
(PyObject *)self, name, ldict, 0);
@@ -985,6 +1022,7 @@ struct bootstate {
PyObject *args;
PyObject *keyw;
PyThreadState *tstate;
+ _PyRuntimeState *runtime;
};
static void
@@ -996,7 +1034,7 @@ t_bootstrap(void *boot_raw)
tstate = boot->tstate;
tstate->thread_id = PyThread_get_thread_ident();
- _PyThreadState_Init(&_PyRuntime, tstate);
+ _PyThreadState_Init(tstate);
PyEval_AcquireThread(tstate);
tstate->interp->num_threads++;
res = PyObject_Call(boot->func, boot->args, boot->keyw);
@@ -1017,13 +1055,14 @@ t_bootstrap(void *boot_raw)
PyMem_DEL(boot_raw);
tstate->interp->num_threads--;
PyThreadState_Clear(tstate);
- PyThreadState_DeleteCurrent();
+ _PyThreadState_DeleteCurrent(tstate);
PyThread_exit_thread();
}
static PyObject *
thread_PyThread_start_new_thread(PyObject *self, PyObject *fargs)
{
+ _PyRuntimeState *runtime = &_PyRuntime;
PyObject *func, *args, *keyw = NULL;
struct bootstate *boot;
unsigned long ident;
@@ -1046,14 +1085,23 @@ thread_PyThread_start_new_thread(PyObject *self, PyObject *fargs)
"optional 3rd arg must be a dictionary");
return NULL;
}
+
+ PyInterpreterState *interp = _PyInterpreterState_GET();
+ if (interp->config._isolated_interpreter) {
+ PyErr_SetString(PyExc_RuntimeError,
+ "thread is not supported for isolated subinterpreters");
+ return NULL;
+ }
+
boot = PyMem_NEW(struct bootstate, 1);
if (boot == NULL)
return PyErr_NoMemory();
- boot->interp = _PyInterpreterState_Get();
+ boot->interp = _PyInterpreterState_GET();
boot->func = func;
boot->args = args;
boot->keyw = keyw;
boot->tstate = _PyThreadState_Prealloc(boot->interp);
+ boot->runtime = runtime;
if (boot->tstate == NULL) {
PyMem_DEL(boot);
return PyErr_NoMemory();
@@ -1061,7 +1109,7 @@ thread_PyThread_start_new_thread(PyObject *self, PyObject *fargs)
Py_INCREF(func);
Py_INCREF(args);
Py_XINCREF(keyw);
- PyEval_InitThreads(); /* Start the interpreter's thread-awareness */
+
ident = PyThread_start_new_thread(t_bootstrap, (void*) boot);
if (ident == PYTHREAD_INVALID_THREAD_ID) {
PyErr_SetString(ThreadError, "can't start new thread");
@@ -1170,7 +1218,7 @@ particular thread within a system.");
static PyObject *
thread__count(PyObject *self, PyObject *Py_UNUSED(ignored))
{
- PyInterpreterState *interp = _PyInterpreterState_Get();
+ PyInterpreterState *interp = _PyInterpreterState_GET();
return PyLong_FromLong(interp->num_threads);
}
@@ -1196,7 +1244,7 @@ release_sentinel(void *wr_raw)
PyObject *obj = PyWeakref_GET_OBJECT(wr);
lockobject *lock;
if (obj != Py_None) {
- assert(Py_TYPE(obj) == &Locktype);
+ assert(Py_IS_TYPE(obj, &Locktype));
lock = (lockobject *) obj;
if (lock->locked) {
PyThread_release_lock(lock->lock_lock);
@@ -1351,7 +1399,7 @@ thread_excepthook_file(PyObject *file, PyObject *exc_type, PyObject *exc_value,
_PyErr_Display(file, exc_type, exc_value, exc_traceback);
/* Call file.flush() */
- PyObject *res = _PyObject_CallMethodId(file, &PyId_flush, NULL);
+ PyObject *res = _PyObject_CallMethodIdNoArgs(file, &PyId_flush);
if (!res) {
return -1;
}
@@ -1387,7 +1435,7 @@ static PyStructSequence_Desc ExceptHookArgs_desc = {
static PyObject *
thread_excepthook(PyObject *self, PyObject *args)
{
- if (Py_TYPE(args) != &ExceptHookArgsType) {
+ if (!Py_IS_TYPE(args, &ExceptHookArgsType)) {
PyErr_SetString(PyExc_TypeError,
"_thread.excepthook argument type "
"must be ExceptHookArgs");
@@ -1513,7 +1561,7 @@ PyInit__thread(void)
PyObject *m, *d, *v;
double time_max;
double timeout_max;
- PyInterpreterState *interp = _PyInterpreterState_Get();
+ PyInterpreterState *interp = _PyInterpreterState_GET();
/* Initialize types: */
if (PyType_Ready(&localdummytype) < 0)
diff --git a/Modules/_tkinter.c b/Modules/_tkinter.c
index a1071e5a..793c5e71 100644
--- a/Modules/_tkinter.c
+++ b/Modules/_tkinter.c
@@ -26,8 +26,6 @@ Copyright (C) 1994 Steen Lumholt.
#include "Python.h"
#include
-#include "pythread.h"
-
#ifdef MS_WINDOWS
#include
#endif
@@ -56,7 +54,7 @@ Copyright (C) 1994 Steen Lumholt.
#if TK_HEX_VERSION >= 0x08050208 && TK_HEX_VERSION < 0x08060000 || \
TK_HEX_VERSION >= 0x08060200
-#define HAVE_LIBTOMMAMTH
+#define HAVE_LIBTOMMATH
#include
#endif
@@ -574,9 +572,9 @@ SplitObj(PyObject *arg)
else if (PyBytes_Check(arg)) {
int argc;
const char **argv;
- char *list = PyBytes_AS_STRING(arg);
+ const char *list = PyBytes_AS_STRING(arg);
- if (Tcl_SplitList((Tcl_Interp *)NULL, list, &argc, &argv) != TCL_OK) {
+ if (Tcl_SplitList((Tcl_Interp *)NULL, (char *)list, &argc, &argv) != TCL_OK) {
Py_INCREF(arg);
return arg;
}
@@ -712,8 +710,8 @@ Tkapp_New(const char *screenName, const char *className,
}
strcpy(argv0, className);
- if (Py_ISUPPER(Py_CHARMASK(argv0[0])))
- argv0[0] = Py_TOLOWER(Py_CHARMASK(argv0[0]));
+ if (Py_ISUPPER(argv0[0]))
+ argv0[0] = Py_TOLOWER(argv0[0]);
Tcl_SetVar(v->interp, "argv0", argv0, TCL_GLOBAL_ONLY);
PyMem_Free(argv0);
@@ -833,7 +831,7 @@ typedef struct {
} PyTclObject;
static PyObject *PyTclObject_Type;
-#define PyTclObject_Check(v) ((v)->ob_type == (PyTypeObject *) PyTclObject_Type)
+#define PyTclObject_Check(v) Py_IS_TYPE(v, (PyTypeObject *) PyTclObject_Type)
static PyObject *
newPyTclObject(Tcl_Obj *arg)
@@ -967,7 +965,7 @@ static PyType_Spec PyTclObject_Type_spec = {
#define CHECK_STRING_LENGTH(s)
#endif
-#ifdef HAVE_LIBTOMMAMTH
+#ifdef HAVE_LIBTOMMATH
static Tcl_Obj*
asBignumObj(PyObject *value)
{
@@ -1047,7 +1045,7 @@ AsObj(PyObject *value)
#endif
/* If there is an overflow in the wideInt conversion,
fall through to bignum handling. */
-#ifdef HAVE_LIBTOMMAMTH
+#ifdef HAVE_LIBTOMMATH
return asBignumObj(value);
#endif
/* If there is no wideInt or bignum support,
@@ -1169,7 +1167,7 @@ fromWideIntObj(TkappObject *tkapp, Tcl_Obj *value)
return NULL;
}
-#ifdef HAVE_LIBTOMMAMTH
+#ifdef HAVE_LIBTOMMATH
static PyObject*
fromBignumObj(TkappObject *tkapp, Tcl_Obj *value)
{
@@ -1249,7 +1247,7 @@ FromObj(TkappObject *tkapp, Tcl_Obj *value)
fall through to bignum handling. */
}
-#ifdef HAVE_LIBTOMMAMTH
+#ifdef HAVE_LIBTOMMATH
if (value->typePtr == tkapp->IntType ||
value->typePtr == tkapp->WideIntType ||
value->typePtr == tkapp->BignumType) {
@@ -1302,7 +1300,7 @@ FromObj(TkappObject *tkapp, Tcl_Obj *value)
}
#endif
-#ifdef HAVE_LIBTOMMAMTH
+#ifdef HAVE_LIBTOMMATH
if (tkapp->BignumType == NULL &&
strcmp(value->typePtr->name, "bignum") == 0) {
/* bignum type is not registered in Tcl */
@@ -1734,7 +1732,7 @@ varname_converter(PyObject *in, void *_out)
}
PyErr_Format(PyExc_TypeError,
"must be str, bytes or Tcl_Obj, not %.50s",
- in->ob_type->tp_name);
+ Py_TYPE(in)->tp_name);
return 0;
}
@@ -2003,7 +2001,7 @@ _tkinter_tkapp_getint(TkappObject *self, PyObject *arg)
Prefer bignum because Tcl_GetWideIntFromObj returns ambiguous result for
value in ranges -2**64..-2**63-1 and 2**63..2**64-1 (on 32-bit platform).
*/
-#ifdef HAVE_LIBTOMMAMTH
+#ifdef HAVE_LIBTOMMATH
result = fromBignumObj(self, value);
#else
result = fromWideIntObj(self, value);
@@ -2166,11 +2164,9 @@ _tkinter_tkapp_exprdouble_impl(TkappObject *self, const char *s)
CHECK_STRING_LENGTH(s);
CHECK_TCL_APPARTMENT;
- PyFPE_START_PROTECT("Tkapp_ExprDouble", return 0)
ENTER_TCL
retval = Tcl_ExprDouble(Tkapp_Interp(self), s, &v);
ENTER_OVERLAP
- PyFPE_END_PROTECT(retval)
if (retval == TCL_ERROR)
res = Tkinter_Error(self);
else
@@ -2304,6 +2300,12 @@ _tkinter_tkapp_split(TkappObject *self, PyObject *arg)
PyObject *v;
char *list;
+ if (PyErr_WarnEx(PyExc_DeprecationWarning,
+ "split() is deprecated; consider using splitlist() instead", 1))
+ {
+ return NULL;
+ }
+
if (PyTclObject_Check(arg)) {
Tcl_Obj *value = ((PyTclObject*)arg)->value;
int objc;
@@ -2798,7 +2800,7 @@ TimerHandler(ClientData clientData)
ENTER_PYTHON
- res = _PyObject_CallNoArg(func);
+ res = PyObject_CallNoArgs(func);
Py_DECREF(func);
Py_DECREF(v); /* See Tktt_New() */
diff --git a/Modules/_tracemalloc.c b/Modules/_tracemalloc.c
index cbcf55f8..fc91622d 100644
--- a/Modules/_tracemalloc.c
+++ b/Modules/_tracemalloc.c
@@ -1,9 +1,9 @@
#include "Python.h"
+#include "pycore_gc.h" // PyGC_Head
+#include "pycore_pymem.h" // _Py_tracemalloc_config
#include "pycore_traceback.h"
-#include "hashtable.h"
-#include "frameobject.h"
-#include "pythread.h"
-#include "osdefs.h"
+#include "pycore_hashtable.h"
+#include "frameobject.h" // PyFrame_GetBack()
#include "clinic/_tracemalloc.c.h"
/*[clinic input]
@@ -23,6 +23,9 @@ static void raw_free(void *ptr);
# define TRACE_DEBUG
#endif
+#define TO_PTR(key) ((const void *)(uintptr_t)(key))
+#define FROM_PTR(key) ((uintptr_t)(key))
+
/* Protected by the GIL */
static struct {
PyMemAllocatorEx mem;
@@ -34,7 +37,7 @@ static struct {
#if defined(TRACE_RAW_MALLOC)
/* This lock is needed because tracemalloc_free() is called without
the GIL held from PyMem_RawFree(). It cannot acquire the lock because it
- would introduce a deadlock in PyThreadState_DeleteCurrent(). */
+ would introduce a deadlock in _PyThreadState_DeleteCurrent(). */
static PyThread_type_lock tables_lock;
# define TABLES_LOCK() PyThread_acquire_lock(tables_lock, 1)
# define TABLES_UNLOCK() PyThread_release_lock(tables_lock)
@@ -47,16 +50,6 @@ static PyThread_type_lock tables_lock;
#define DEFAULT_DOMAIN 0
-/* Pack the frame_t structure to reduce the memory footprint. */
-typedef struct
-#ifdef __GNUC__
-__attribute__((packed))
-#endif
-{
- uintptr_t ptr;
- unsigned int domain;
-} pointer_t;
-
/* Pack the frame_t structure to reduce the memory footprint on 64-bit
architectures: 12 bytes instead of 16. */
typedef struct
@@ -78,15 +71,20 @@ __attribute__((packed))
typedef struct {
Py_uhash_t hash;
- int nframe;
+ /* Number of frames stored */
+ uint16_t nframe;
+ /* Total number of frames the traceback had */
+ uint16_t total_nframe;
frame_t frames[1];
} traceback_t;
#define TRACEBACK_SIZE(NFRAME) \
(sizeof(traceback_t) + sizeof(frame_t) * (NFRAME - 1))
-#define MAX_NFRAME \
- ((INT_MAX - (int)sizeof(traceback_t)) / (int)sizeof(frame_t) + 1)
+/* The maximum number of frames is either:
+ - The maximum number of frames we can store in `traceback_t.nframe`
+ - The maximum memory size_t we can allocate */
+static const unsigned long MAX_NFRAME = Py_MIN(UINT16_MAX, ((SIZE_MAX - sizeof(traceback_t)) / sizeof(frame_t) + 1));
static PyObject *unknown_filename = NULL;
@@ -124,10 +122,14 @@ static traceback_t *tracemalloc_traceback = NULL;
Protected by the GIL */
static _Py_hashtable_t *tracemalloc_tracebacks = NULL;
-/* pointer (void*) => trace (trace_t).
+/* pointer (void*) => trace (trace_t*).
Protected by TABLES_LOCK(). */
static _Py_hashtable_t *tracemalloc_traces = NULL;
+/* domain (unsigned int) => traces (_Py_hashtable_t).
+ Protected by TABLES_LOCK(). */
+static _Py_hashtable_t *tracemalloc_domains = NULL;
+
#ifdef TRACE_DEBUG
static void
@@ -204,69 +206,44 @@ set_reentrant(int reentrant)
static Py_uhash_t
-hashtable_hash_pyobject(_Py_hashtable_t *ht, const void *pkey)
+hashtable_hash_pyobject(const void *key)
{
- PyObject *obj;
-
- _Py_HASHTABLE_READ_KEY(ht, pkey, obj);
+ PyObject *obj = (PyObject *)key;
return PyObject_Hash(obj);
}
static int
-hashtable_compare_unicode(_Py_hashtable_t *ht, const void *pkey,
- const _Py_hashtable_entry_t *entry)
+hashtable_compare_unicode(const void *key1, const void *key2)
{
- PyObject *key1, *key2;
-
- _Py_HASHTABLE_READ_KEY(ht, pkey, key1);
- _Py_HASHTABLE_ENTRY_READ_KEY(ht, entry, key2);
-
- if (key1 != NULL && key2 != NULL)
- return (PyUnicode_Compare(key1, key2) == 0);
- else
- return key1 == key2;
+ PyObject *obj1 = (PyObject *)key1;
+ PyObject *obj2 = (PyObject *)key2;
+ if (obj1 != NULL && obj2 != NULL) {
+ return (PyUnicode_Compare(obj1, obj2) == 0);
+ }
+ else {
+ return obj1 == obj2;
+ }
}
static Py_uhash_t
-hashtable_hash_pointer_t(_Py_hashtable_t *ht, const void *pkey)
+hashtable_hash_uint(const void *key_raw)
{
- pointer_t ptr;
- Py_uhash_t hash;
-
- _Py_HASHTABLE_READ_KEY(ht, pkey, ptr);
-
- hash = (Py_uhash_t)_Py_HashPointer((void*)ptr.ptr);
- hash ^= ptr.domain;
- return hash;
-}
-
-
-static int
-hashtable_compare_pointer_t(_Py_hashtable_t *ht, const void *pkey,
- const _Py_hashtable_entry_t *entry)
-{
- pointer_t ptr1, ptr2;
-
- _Py_HASHTABLE_READ_KEY(ht, pkey, ptr1);
- _Py_HASHTABLE_ENTRY_READ_KEY(ht, entry, ptr2);
-
- /* compare pointer before domain, because pointer is more likely to be
- different */
- return (ptr1.ptr == ptr2.ptr && ptr1.domain == ptr2.domain);
-
+ unsigned int key = (unsigned int)FROM_PTR(key_raw);
+ return (Py_uhash_t)key;
}
static _Py_hashtable_t *
-hashtable_new(size_t key_size, size_t data_size,
- _Py_hashtable_hash_func hash_func,
- _Py_hashtable_compare_func compare_func)
+hashtable_new(_Py_hashtable_hash_func hash_func,
+ _Py_hashtable_compare_func compare_func,
+ _Py_hashtable_destroy_func key_destroy_func,
+ _Py_hashtable_destroy_func value_destroy_func)
{
_Py_hashtable_allocator_t hashtable_alloc = {malloc, free};
- return _Py_hashtable_new_full(key_size, data_size, 0,
- hash_func, compare_func,
+ return _Py_hashtable_new_full(hash_func, compare_func,
+ key_destroy_func, value_destroy_func,
&hashtable_alloc);
}
@@ -285,36 +262,33 @@ raw_free(void *ptr)
static Py_uhash_t
-hashtable_hash_traceback(_Py_hashtable_t *ht, const void *pkey)
+hashtable_hash_traceback(const void *key)
{
- traceback_t *traceback;
-
- _Py_HASHTABLE_READ_KEY(ht, pkey, traceback);
+ const traceback_t *traceback = (const traceback_t *)key;
return traceback->hash;
}
static int
-hashtable_compare_traceback(_Py_hashtable_t *ht, const void *pkey,
- const _Py_hashtable_entry_t *entry)
+hashtable_compare_traceback(const void *key1, const void *key2)
{
- traceback_t *traceback1, *traceback2;
- const frame_t *frame1, *frame2;
- int i;
-
- _Py_HASHTABLE_READ_KEY(ht, pkey, traceback1);
- _Py_HASHTABLE_ENTRY_READ_KEY(ht, entry, traceback2);
+ const traceback_t *traceback1 = (const traceback_t *)key1;
+ const traceback_t *traceback2 = (const traceback_t *)key2;
- if (traceback1->nframe != traceback2->nframe)
+ if (traceback1->nframe != traceback2->nframe) {
+ return 0;
+ }
+ if (traceback1->total_nframe != traceback2->total_nframe) {
return 0;
+ }
- for (i=0; i < traceback1->nframe; i++) {
- frame1 = &traceback1->frames[i];
- frame2 = &traceback2->frames[i];
+ for (int i=0; i < traceback1->nframe; i++) {
+ const frame_t *frame1 = &traceback1->frames[i];
+ const frame_t *frame2 = &traceback2->frames[i];
- if (frame1->lineno != frame2->lineno)
+ if (frame1->lineno != frame2->lineno) {
return 0;
-
+ }
if (frame1->filename != frame2->filename) {
assert(PyUnicode_Compare(frame1->filename, frame2->filename) != 0);
return 0;
@@ -327,37 +301,24 @@ hashtable_compare_traceback(_Py_hashtable_t *ht, const void *pkey,
static void
tracemalloc_get_frame(PyFrameObject *pyframe, frame_t *frame)
{
- PyCodeObject *code;
- PyObject *filename;
- _Py_hashtable_entry_t *entry;
- int lineno;
-
frame->filename = unknown_filename;
- lineno = PyFrame_GetLineNumber(pyframe);
- if (lineno < 0)
+ int lineno = PyFrame_GetLineNumber(pyframe);
+ if (lineno < 0) {
lineno = 0;
+ }
frame->lineno = (unsigned int)lineno;
- code = pyframe->f_code;
- if (code == NULL) {
-#ifdef TRACE_DEBUG
- tracemalloc_error("failed to get the code object of the frame");
-#endif
- return;
- }
+ PyCodeObject *code = PyFrame_GetCode(pyframe);
+ PyObject *filename = code->co_filename;
+ Py_DECREF(code);
- if (code->co_filename == NULL) {
+ if (filename == NULL) {
#ifdef TRACE_DEBUG
tracemalloc_error("failed to get the filename of the code object");
#endif
return;
}
- filename = code->co_filename;
- assert(filename != NULL);
- if (filename == NULL)
- return;
-
if (!PyUnicode_Check(filename)) {
#ifdef TRACE_DEBUG
tracemalloc_error("filename is not a unicode string");
@@ -374,15 +335,16 @@ tracemalloc_get_frame(PyFrameObject *pyframe, frame_t *frame)
}
/* intern the filename */
- entry = _Py_HASHTABLE_GET_ENTRY(tracemalloc_filenames, filename);
+ _Py_hashtable_entry_t *entry;
+ entry = _Py_hashtable_get_entry(tracemalloc_filenames, filename);
if (entry != NULL) {
- _Py_HASHTABLE_ENTRY_READ_KEY(tracemalloc_filenames, entry, filename);
+ filename = (PyObject *)entry->key;
}
else {
/* tracemalloc_filenames is responsible to keep a reference
to the filename */
Py_INCREF(filename);
- if (_Py_HASHTABLE_SET_NODATA(tracemalloc_filenames, filename) < 0) {
+ if (_Py_hashtable_set(tracemalloc_filenames, filename, NULL) < 0) {
Py_DECREF(filename);
#ifdef TRACE_DEBUG
tracemalloc_error("failed to intern the filename");
@@ -416,6 +378,7 @@ traceback_hash(traceback_t *traceback)
/* the cast might truncate len; that doesn't change hash stability */
mult += (Py_uhash_t)(82520UL + len + len);
}
+ x ^= traceback->total_nframe;
x += 97531UL;
return x;
}
@@ -424,10 +387,7 @@ traceback_hash(traceback_t *traceback)
static void
traceback_get_frames(traceback_t *traceback)
{
- PyThreadState *tstate;
- PyFrameObject *pyframe;
-
- tstate = PyGILState_GetThisThreadState();
+ PyThreadState *tstate = PyGILState_GetThisThreadState();
if (tstate == NULL) {
#ifdef TRACE_DEBUG
tracemalloc_error("failed to get the current thread state");
@@ -435,12 +395,20 @@ traceback_get_frames(traceback_t *traceback)
return;
}
- for (pyframe = tstate->frame; pyframe != NULL; pyframe = pyframe->f_back) {
- tracemalloc_get_frame(pyframe, &traceback->frames[traceback->nframe]);
- assert(traceback->frames[traceback->nframe].filename != NULL);
- traceback->nframe++;
- if (traceback->nframe == _Py_tracemalloc_config.max_nframe)
- break;
+ PyFrameObject *pyframe = PyThreadState_GetFrame(tstate);
+ for (; pyframe != NULL;) {
+ if (traceback->nframe < _Py_tracemalloc_config.max_nframe) {
+ tracemalloc_get_frame(pyframe, &traceback->frames[traceback->nframe]);
+ assert(traceback->frames[traceback->nframe].filename != NULL);
+ traceback->nframe++;
+ }
+ if (traceback->total_nframe < UINT16_MAX) {
+ traceback->total_nframe++;
+ }
+
+ PyFrameObject *back = PyFrame_GetBack(pyframe);
+ Py_DECREF(pyframe);
+ pyframe = back;
}
}
@@ -456,15 +424,16 @@ traceback_new(void)
/* get frames */
traceback = tracemalloc_traceback;
traceback->nframe = 0;
+ traceback->total_nframe = 0;
traceback_get_frames(traceback);
if (traceback->nframe == 0)
return &tracemalloc_empty_traceback;
traceback->hash = traceback_hash(traceback);
/* intern the traceback */
- entry = _Py_HASHTABLE_GET_ENTRY(tracemalloc_tracebacks, traceback);
+ entry = _Py_hashtable_get_entry(tracemalloc_tracebacks, traceback);
if (entry != NULL) {
- _Py_HASHTABLE_ENTRY_READ_KEY(tracemalloc_tracebacks, entry, traceback);
+ traceback = (traceback_t *)entry->key;
}
else {
traceback_t *copy;
@@ -481,7 +450,7 @@ traceback_new(void)
}
memcpy(copy, traceback, traceback_size);
- if (_Py_HASHTABLE_SET_NODATA(tracemalloc_tracebacks, copy) < 0) {
+ if (_Py_hashtable_set(tracemalloc_tracebacks, copy, NULL) < 0) {
raw_free(copy);
#ifdef TRACE_DEBUG
tracemalloc_error("failed to intern the traceback: putdata failed");
@@ -494,79 +463,54 @@ traceback_new(void)
}
-static int
-tracemalloc_use_domain_cb(_Py_hashtable_t *old_traces,
- _Py_hashtable_entry_t *entry, void *user_data)
+static _Py_hashtable_t*
+tracemalloc_create_traces_table(void)
{
- uintptr_t ptr;
- pointer_t key;
- _Py_hashtable_t *new_traces = (_Py_hashtable_t *)user_data;
- const void *pdata = _Py_HASHTABLE_ENTRY_PDATA(old_traces, entry);
-
- _Py_HASHTABLE_ENTRY_READ_KEY(old_traces, entry, ptr);
- key.ptr = ptr;
- key.domain = DEFAULT_DOMAIN;
-
- return _Py_hashtable_set(new_traces,
- sizeof(key), &key,
- old_traces->data_size, pdata);
+ return hashtable_new(_Py_hashtable_hash_ptr,
+ _Py_hashtable_compare_direct,
+ NULL, raw_free);
}
-/* Convert tracemalloc_traces from compact key (uintptr_t) to pointer_t key.
- * Return 0 on success, -1 on error. */
-static int
-tracemalloc_use_domain(void)
+static _Py_hashtable_t*
+tracemalloc_create_domains_table(void)
{
- _Py_hashtable_t *new_traces = NULL;
+ return hashtable_new(hashtable_hash_uint,
+ _Py_hashtable_compare_direct,
+ NULL,
+ (_Py_hashtable_destroy_func)_Py_hashtable_destroy);
+}
- assert(!_Py_tracemalloc_config.use_domain);
- new_traces = hashtable_new(sizeof(pointer_t),
- sizeof(trace_t),
- hashtable_hash_pointer_t,
- hashtable_compare_pointer_t);
- if (new_traces == NULL) {
- return -1;
+static _Py_hashtable_t*
+tracemalloc_get_traces_table(unsigned int domain)
+{
+ if (domain == DEFAULT_DOMAIN) {
+ return tracemalloc_traces;
}
-
- if (_Py_hashtable_foreach(tracemalloc_traces, tracemalloc_use_domain_cb,
- new_traces) < 0)
- {
- _Py_hashtable_destroy(new_traces);
- return -1;
+ else {
+ return _Py_hashtable_get(tracemalloc_domains, TO_PTR(domain));
}
-
- _Py_hashtable_destroy(tracemalloc_traces);
- tracemalloc_traces = new_traces;
-
- _Py_tracemalloc_config.use_domain = 1;
-
- return 0;
}
static void
tracemalloc_remove_trace(unsigned int domain, uintptr_t ptr)
{
- trace_t trace;
- int removed;
-
assert(_Py_tracemalloc_config.tracing);
- if (_Py_tracemalloc_config.use_domain) {
- pointer_t key = {ptr, domain};
- removed = _Py_HASHTABLE_POP(tracemalloc_traces, key, trace);
- }
- else {
- removed = _Py_HASHTABLE_POP(tracemalloc_traces, ptr, trace);
- }
- if (!removed) {
+ _Py_hashtable_t *traces = tracemalloc_get_traces_table(domain);
+ if (!traces) {
return;
}
- assert(tracemalloc_traced_memory >= trace.size);
- tracemalloc_traced_memory -= trace.size;
+ trace_t *trace = _Py_hashtable_steal(traces, TO_PTR(ptr));
+ if (!trace) {
+ return;
+ }
+ assert(tracemalloc_traced_memory >= trace->size);
+ tracemalloc_traced_memory -= trace->size;
+ raw_free(trace);
}
#define REMOVE_TRACE(ptr) \
@@ -577,63 +521,55 @@ static int
tracemalloc_add_trace(unsigned int domain, uintptr_t ptr,
size_t size)
{
- pointer_t key = {ptr, domain};
- traceback_t *traceback;
- trace_t trace;
- _Py_hashtable_entry_t* entry;
- int res;
-
assert(_Py_tracemalloc_config.tracing);
- traceback = traceback_new();
+ traceback_t *traceback = traceback_new();
if (traceback == NULL) {
return -1;
}
- if (!_Py_tracemalloc_config.use_domain && domain != DEFAULT_DOMAIN) {
- /* first trace using a non-zero domain whereas traces use compact
- (uintptr_t) keys: switch to pointer_t keys. */
- if (tracemalloc_use_domain() < 0) {
+ _Py_hashtable_t *traces = tracemalloc_get_traces_table(domain);
+ if (traces == NULL) {
+ traces = tracemalloc_create_traces_table();
+ if (traces == NULL) {
return -1;
}
- }
- if (_Py_tracemalloc_config.use_domain) {
- entry = _Py_HASHTABLE_GET_ENTRY(tracemalloc_traces, key);
- }
- else {
- entry = _Py_HASHTABLE_GET_ENTRY(tracemalloc_traces, ptr);
+ if (_Py_hashtable_set(tracemalloc_domains, TO_PTR(domain), traces) < 0) {
+ _Py_hashtable_destroy(traces);
+ return -1;
+ }
}
- if (entry != NULL) {
+ trace_t *trace = _Py_hashtable_get(traces, TO_PTR(ptr));
+ if (trace != NULL) {
/* the memory block is already tracked */
- _Py_HASHTABLE_ENTRY_READ_DATA(tracemalloc_traces, entry, trace);
- assert(tracemalloc_traced_memory >= trace.size);
- tracemalloc_traced_memory -= trace.size;
+ assert(tracemalloc_traced_memory >= trace->size);
+ tracemalloc_traced_memory -= trace->size;
- trace.size = size;
- trace.traceback = traceback;
- _Py_HASHTABLE_ENTRY_WRITE_DATA(tracemalloc_traces, entry, trace);
+ trace->size = size;
+ trace->traceback = traceback;
}
else {
- trace.size = size;
- trace.traceback = traceback;
-
- if (_Py_tracemalloc_config.use_domain) {
- res = _Py_HASHTABLE_SET(tracemalloc_traces, key, trace);
- }
- else {
- res = _Py_HASHTABLE_SET(tracemalloc_traces, ptr, trace);
+ trace = raw_malloc(sizeof(trace_t));
+ if (trace == NULL) {
+ return -1;
}
+ trace->size = size;
+ trace->traceback = traceback;
+
+ int res = _Py_hashtable_set(traces, TO_PTR(ptr), trace);
if (res != 0) {
+ raw_free(trace);
return res;
}
}
assert(tracemalloc_traced_memory <= SIZE_MAX - size);
tracemalloc_traced_memory += size;
- if (tracemalloc_traced_memory > tracemalloc_peak_traced_memory)
+ if (tracemalloc_traced_memory > tracemalloc_peak_traced_memory) {
tracemalloc_peak_traced_memory = tracemalloc_traced_memory;
+ }
return 0;
}
@@ -684,7 +620,7 @@ tracemalloc_realloc(void *ctx, void *ptr, size_t new_size)
TABLES_LOCK();
/* tracemalloc_add_trace() updates the trace if there is already
- a trace at address (domain, ptr2) */
+ a trace at address ptr2 */
if (ptr2 != ptr) {
REMOVE_TRACE(ptr);
}
@@ -699,7 +635,7 @@ tracemalloc_realloc(void *ctx, void *ptr, size_t new_size)
The GIL and the table lock ensures that only one thread is
allocating memory. */
- Py_UNREACHABLE();
+ Py_FatalError("tracemalloc_realloc() failed to allocate a trace");
}
TABLES_UNLOCK();
}
@@ -728,7 +664,7 @@ tracemalloc_free(void *ctx, void *ptr)
return;
/* GIL cannot be locked in PyMem_RawFree() because it would introduce
- a deadlock in PyThreadState_DeleteCurrent(). */
+ a deadlock in _PyThreadState_DeleteCurrent(). */
alloc->free(alloc->ctx, ptr);
@@ -888,27 +824,11 @@ tracemalloc_raw_realloc(void *ctx, void *ptr, size_t new_size)
#endif /* TRACE_RAW_MALLOC */
-static int
-tracemalloc_clear_filename(_Py_hashtable_t *ht, _Py_hashtable_entry_t *entry,
- void *user_data)
+static void
+tracemalloc_clear_filename(void *value)
{
- PyObject *filename;
-
- _Py_HASHTABLE_ENTRY_READ_KEY(ht, entry, filename);
+ PyObject *filename = (PyObject *)value;
Py_DECREF(filename);
- return 0;
-}
-
-
-static int
-traceback_free_traceback(_Py_hashtable_t *ht, _Py_hashtable_entry_t *entry,
- void *user_data)
-{
- traceback_t *traceback;
-
- _Py_HASHTABLE_ENTRY_READ_KEY(ht, entry, traceback);
- raw_free(traceback);
- return 0;
}
@@ -921,14 +841,13 @@ tracemalloc_clear_traces(void)
TABLES_LOCK();
_Py_hashtable_clear(tracemalloc_traces);
+ _Py_hashtable_clear(tracemalloc_domains);
tracemalloc_traced_memory = 0;
tracemalloc_peak_traced_memory = 0;
TABLES_UNLOCK();
- _Py_hashtable_foreach(tracemalloc_tracebacks, traceback_free_traceback, NULL);
_Py_hashtable_clear(tracemalloc_tracebacks);
- _Py_hashtable_foreach(tracemalloc_filenames, tracemalloc_clear_filename, NULL);
_Py_hashtable_clear(tracemalloc_filenames);
}
@@ -968,29 +887,19 @@ tracemalloc_init(void)
}
#endif
- tracemalloc_filenames = hashtable_new(sizeof(PyObject *), 0,
- hashtable_hash_pyobject,
- hashtable_compare_unicode);
+ tracemalloc_filenames = hashtable_new(hashtable_hash_pyobject,
+ hashtable_compare_unicode,
+ tracemalloc_clear_filename, NULL);
- tracemalloc_tracebacks = hashtable_new(sizeof(traceback_t *), 0,
- hashtable_hash_traceback,
- hashtable_compare_traceback);
+ tracemalloc_tracebacks = hashtable_new(hashtable_hash_traceback,
+ hashtable_compare_traceback,
+ NULL, raw_free);
- if (_Py_tracemalloc_config.use_domain) {
- tracemalloc_traces = hashtable_new(sizeof(pointer_t),
- sizeof(trace_t),
- hashtable_hash_pointer_t,
- hashtable_compare_pointer_t);
- }
- else {
- tracemalloc_traces = hashtable_new(sizeof(uintptr_t),
- sizeof(trace_t),
- _Py_hashtable_hash_ptr,
- _Py_hashtable_compare_direct);
- }
+ tracemalloc_traces = tracemalloc_create_traces_table();
+ tracemalloc_domains = tracemalloc_create_domains_table();
if (tracemalloc_filenames == NULL || tracemalloc_tracebacks == NULL
- || tracemalloc_traces == NULL) {
+ || tracemalloc_traces == NULL || tracemalloc_domains == NULL) {
PyErr_NoMemory();
return -1;
}
@@ -1001,6 +910,7 @@ tracemalloc_init(void)
PyUnicode_InternInPlace(&unknown_filename);
tracemalloc_empty_traceback.nframe = 1;
+ tracemalloc_empty_traceback.total_nframe = 1;
/* borrowed reference */
tracemalloc_empty_traceback.frames[0].filename = unknown_filename;
tracemalloc_empty_traceback.frames[0].lineno = 0;
@@ -1021,9 +931,10 @@ tracemalloc_deinit(void)
tracemalloc_stop();
/* destroy hash tables */
+ _Py_hashtable_destroy(tracemalloc_domains);
+ _Py_hashtable_destroy(tracemalloc_traces);
_Py_hashtable_destroy(tracemalloc_tracebacks);
_Py_hashtable_destroy(tracemalloc_filenames);
- _Py_hashtable_destroy(tracemalloc_traces);
#if defined(TRACE_RAW_MALLOC)
if (tables_lock != NULL) {
@@ -1046,10 +957,10 @@ tracemalloc_start(int max_nframe)
PyMemAllocatorEx alloc;
size_t size;
- if (max_nframe < 1 || max_nframe > MAX_NFRAME) {
+ if (max_nframe < 1 || (unsigned long) max_nframe > MAX_NFRAME) {
PyErr_Format(PyExc_ValueError,
- "the number of frames must be in range [1; %i]",
- (int)MAX_NFRAME);
+ "the number of frames must be in range [1; %lu]",
+ MAX_NFRAME);
return -1;
}
@@ -1062,7 +973,6 @@ tracemalloc_start(int max_nframe)
return 0;
}
- assert(1 <= max_nframe && max_nframe <= MAX_NFRAME);
_Py_tracemalloc_config.max_nframe = max_nframe;
/* allocate a buffer to store a new traceback */
@@ -1191,11 +1101,11 @@ frame_to_pyobject(frame_t *frame)
static PyObject*
traceback_to_pyobject(traceback_t *traceback, _Py_hashtable_t *intern_table)
{
- int i;
- PyObject *frames, *frame;
+ PyObject *frames;
if (intern_table != NULL) {
- if (_Py_HASHTABLE_GET(intern_table, traceback, frames)) {
+ frames = _Py_hashtable_get(intern_table, (const void *)traceback);
+ if (frames) {
Py_INCREF(frames);
return frames;
}
@@ -1205,8 +1115,8 @@ traceback_to_pyobject(traceback_t *traceback, _Py_hashtable_t *intern_table)
if (frames == NULL)
return NULL;
- for (i=0; i < traceback->nframe; i++) {
- frame = frame_to_pyobject(&traceback->frames[i]);
+ for (int i=0; i < traceback->nframe; i++) {
+ PyObject *frame = frame_to_pyobject(&traceback->frames[i]);
if (frame == NULL) {
Py_DECREF(frames);
return NULL;
@@ -1215,7 +1125,7 @@ traceback_to_pyobject(traceback_t *traceback, _Py_hashtable_t *intern_table)
}
if (intern_table != NULL) {
- if (_Py_HASHTABLE_SET(intern_table, traceback, frames) < 0) {
+ if (_Py_hashtable_set(intern_table, traceback, frames) < 0) {
Py_DECREF(frames);
PyErr_NoMemory();
return NULL;
@@ -1228,13 +1138,13 @@ traceback_to_pyobject(traceback_t *traceback, _Py_hashtable_t *intern_table)
static PyObject*
-trace_to_pyobject(unsigned int domain, trace_t *trace,
+trace_to_pyobject(unsigned int domain, const trace_t *trace,
_Py_hashtable_t *intern_tracebacks)
{
PyObject *trace_obj = NULL;
PyObject *obj;
- trace_obj = PyTuple_New(3);
+ trace_obj = PyTuple_New(4);
if (trace_obj == NULL)
return NULL;
@@ -1259,58 +1169,152 @@ trace_to_pyobject(unsigned int domain, trace_t *trace,
}
PyTuple_SET_ITEM(trace_obj, 2, obj);
+ obj = PyLong_FromUnsignedLong(trace->traceback->total_nframe);
+ if (obj == NULL) {
+ Py_DECREF(trace_obj);
+ return NULL;
+ }
+ PyTuple_SET_ITEM(trace_obj, 3, obj);
+
return trace_obj;
}
typedef struct {
_Py_hashtable_t *traces;
+ _Py_hashtable_t *domains;
_Py_hashtable_t *tracebacks;
PyObject *list;
+ unsigned int domain;
} get_traces_t;
+
static int
-tracemalloc_get_traces_fill(_Py_hashtable_t *traces, _Py_hashtable_entry_t *entry,
- void *user_data)
+tracemalloc_copy_trace(_Py_hashtable_t *traces,
+ const void *key, const void *value,
+ void *user_data)
{
- get_traces_t *get_traces = user_data;
- unsigned int domain;
- trace_t trace;
- PyObject *tracemalloc_obj;
- int res;
+ _Py_hashtable_t *traces2 = (_Py_hashtable_t *)user_data;
+
+ trace_t *trace = (trace_t *)value;
- if (_Py_tracemalloc_config.use_domain) {
- pointer_t key;
- _Py_HASHTABLE_ENTRY_READ_KEY(traces, entry, key);
- domain = key.domain;
+ trace_t *trace2 = raw_malloc(sizeof(trace_t));
+ if (traces2 == NULL) {
+ return -1;
}
- else {
- domain = DEFAULT_DOMAIN;
+ *trace2 = *trace;
+ if (_Py_hashtable_set(traces2, key, trace2) < 0) {
+ raw_free(trace2);
+ return -1;
+ }
+ return 0;
+}
+
+
+static _Py_hashtable_t*
+tracemalloc_copy_traces(_Py_hashtable_t *traces)
+{
+ _Py_hashtable_t *traces2 = tracemalloc_create_traces_table();
+ if (traces2 == NULL) {
+ return NULL;
+ }
+
+ int err = _Py_hashtable_foreach(traces,
+ tracemalloc_copy_trace,
+ traces2);
+ if (err) {
+ _Py_hashtable_destroy(traces2);
+ return NULL;
+ }
+ return traces2;
+}
+
+
+static int
+tracemalloc_copy_domain(_Py_hashtable_t *domains,
+ const void *key, const void *value,
+ void *user_data)
+{
+ _Py_hashtable_t *domains2 = (_Py_hashtable_t *)user_data;
+
+ unsigned int domain = (unsigned int)FROM_PTR(key);
+ _Py_hashtable_t *traces = (_Py_hashtable_t *)value;
+
+ _Py_hashtable_t *traces2 = tracemalloc_copy_traces(traces);
+ if (_Py_hashtable_set(domains2, TO_PTR(domain), traces2) < 0) {
+ _Py_hashtable_destroy(traces2);
+ return -1;
}
- _Py_HASHTABLE_ENTRY_READ_DATA(traces, entry, trace);
+ return 0;
+}
+
- tracemalloc_obj = trace_to_pyobject(domain, &trace, get_traces->tracebacks);
- if (tracemalloc_obj == NULL)
+static _Py_hashtable_t*
+tracemalloc_copy_domains(_Py_hashtable_t *domains)
+{
+ _Py_hashtable_t *domains2 = tracemalloc_create_domains_table();
+ if (domains2 == NULL) {
+ return NULL;
+ }
+
+ int err = _Py_hashtable_foreach(domains,
+ tracemalloc_copy_domain,
+ domains2);
+ if (err) {
+ _Py_hashtable_destroy(domains2);
+ return NULL;
+ }
+ return domains2;
+}
+
+
+static int
+tracemalloc_get_traces_fill(_Py_hashtable_t *traces,
+ const void *key, const void *value,
+ void *user_data)
+{
+ get_traces_t *get_traces = user_data;
+
+ const trace_t *trace = (const trace_t *)value;
+
+ PyObject *tuple = trace_to_pyobject(get_traces->domain, trace,
+ get_traces->tracebacks);
+ if (tuple == NULL) {
return 1;
+ }
- res = PyList_Append(get_traces->list, tracemalloc_obj);
- Py_DECREF(tracemalloc_obj);
- if (res < 0)
+ int res = PyList_Append(get_traces->list, tuple);
+ Py_DECREF(tuple);
+ if (res < 0) {
return 1;
+ }
return 0;
}
static int
-tracemalloc_pyobject_decref_cb(_Py_hashtable_t *tracebacks,
- _Py_hashtable_entry_t *entry,
- void *user_data)
+tracemalloc_get_traces_domain(_Py_hashtable_t *domains,
+ const void *key, const void *value,
+ void *user_data)
{
- PyObject *obj;
- _Py_HASHTABLE_ENTRY_READ_DATA(tracebacks, entry, obj);
+ get_traces_t *get_traces = user_data;
+
+ unsigned int domain = (unsigned int)FROM_PTR(key);
+ _Py_hashtable_t *traces = (_Py_hashtable_t *)value;
+
+ get_traces->domain = domain;
+ return _Py_hashtable_foreach(traces,
+ tracemalloc_get_traces_fill,
+ get_traces);
+}
+
+
+static void
+tracemalloc_pyobject_decref(void *value)
+{
+ PyObject *obj = (PyObject *)value;
Py_DECREF(obj);
- return 0;
}
@@ -1331,9 +1335,9 @@ _tracemalloc__get_traces_impl(PyObject *module)
/*[clinic end generated code: output=e9929876ced4b5cc input=6c7d2230b24255aa]*/
{
get_traces_t get_traces;
- int err;
-
+ get_traces.domain = DEFAULT_DOMAIN;
get_traces.traces = NULL;
+ get_traces.domains = NULL;
get_traces.tracebacks = NULL;
get_traces.list = PyList_New(0);
if (get_traces.list == NULL)
@@ -1344,45 +1348,65 @@ _tracemalloc__get_traces_impl(PyObject *module)
/* the traceback hash table is used temporarily to intern traceback tuple
of (filename, lineno) tuples */
- get_traces.tracebacks = hashtable_new(sizeof(traceback_t *),
- sizeof(PyObject *),
- _Py_hashtable_hash_ptr,
- _Py_hashtable_compare_direct);
+ get_traces.tracebacks = hashtable_new(_Py_hashtable_hash_ptr,
+ _Py_hashtable_compare_direct,
+ NULL, tracemalloc_pyobject_decref);
if (get_traces.tracebacks == NULL) {
- PyErr_NoMemory();
- goto error;
+ goto no_memory;
}
+ // Copy all traces so tracemalloc_get_traces_fill() doesn't have to disable
+ // temporarily tracemalloc which would impact other threads and so would
+ // miss allocations while get_traces() is called.
TABLES_LOCK();
- get_traces.traces = _Py_hashtable_copy(tracemalloc_traces);
+ get_traces.traces = tracemalloc_copy_traces(tracemalloc_traces);
TABLES_UNLOCK();
if (get_traces.traces == NULL) {
- PyErr_NoMemory();
- goto error;
+ goto no_memory;
+ }
+
+ TABLES_LOCK();
+ get_traces.domains = tracemalloc_copy_domains(tracemalloc_domains);
+ TABLES_UNLOCK();
+
+ if (get_traces.domains == NULL) {
+ goto no_memory;
}
+ // Convert traces to a list of tuples
set_reentrant(1);
- err = _Py_hashtable_foreach(get_traces.traces,
- tracemalloc_get_traces_fill, &get_traces);
+ int err = _Py_hashtable_foreach(get_traces.traces,
+ tracemalloc_get_traces_fill,
+ &get_traces);
+ if (!err) {
+ err = _Py_hashtable_foreach(get_traces.domains,
+ tracemalloc_get_traces_domain,
+ &get_traces);
+ }
set_reentrant(0);
- if (err)
+ if (err) {
goto error;
+ }
goto finally;
+no_memory:
+ PyErr_NoMemory();
+
error:
Py_CLEAR(get_traces.list);
finally:
if (get_traces.tracebacks != NULL) {
- _Py_hashtable_foreach(get_traces.tracebacks,
- tracemalloc_pyobject_decref_cb, NULL);
_Py_hashtable_destroy(get_traces.tracebacks);
}
if (get_traces.traces != NULL) {
_Py_hashtable_destroy(get_traces.traces);
}
+ if (get_traces.domains != NULL) {
+ _Py_hashtable_destroy(get_traces.domains);
+ }
return get_traces.list;
}
@@ -1391,26 +1415,26 @@ finally:
static traceback_t*
tracemalloc_get_traceback(unsigned int domain, uintptr_t ptr)
{
- trace_t trace;
- int found;
if (!_Py_tracemalloc_config.tracing)
return NULL;
+ trace_t *trace;
TABLES_LOCK();
- if (_Py_tracemalloc_config.use_domain) {
- pointer_t key = {ptr, domain};
- found = _Py_HASHTABLE_GET(tracemalloc_traces, key, trace);
+ _Py_hashtable_t *traces = tracemalloc_get_traces_table(domain);
+ if (traces) {
+ trace = _Py_hashtable_get(traces, TO_PTR(ptr));
}
else {
- found = _Py_HASHTABLE_GET(tracemalloc_traces, ptr, trace);
+ trace = NULL;
}
TABLES_UNLOCK();
- if (!found)
+ if (!trace) {
return NULL;
+ }
- return trace.traceback;
+ return trace->traceback;
}
@@ -1550,6 +1574,17 @@ _tracemalloc_get_traceback_limit_impl(PyObject *module)
}
+static int
+tracemalloc_get_tracemalloc_memory_cb(_Py_hashtable_t *domains,
+ const void *key, const void *value,
+ void *user_data)
+{
+ const _Py_hashtable_t *traces = value;
+ size_t *size = (size_t*)user_data;
+ *size += _Py_hashtable_size(traces);
+ return 0;
+}
+
/*[clinic input]
_tracemalloc.get_tracemalloc_memory
@@ -1570,6 +1605,8 @@ _tracemalloc_get_tracemalloc_memory_impl(PyObject *module)
TABLES_LOCK();
size += _Py_hashtable_size(tracemalloc_traces);
+ _Py_hashtable_foreach(tracemalloc_domains,
+ tracemalloc_get_tracemalloc_memory_cb, &size);
TABLES_UNLOCK();
return PyLong_FromSize_t(size);
@@ -1602,6 +1639,30 @@ _tracemalloc_get_traced_memory_impl(PyObject *module)
return Py_BuildValue("nn", size, peak_size);
}
+/*[clinic input]
+_tracemalloc.reset_peak
+
+Set the peak size of memory blocks traced by tracemalloc to the current size.
+
+Do nothing if the tracemalloc module is not tracing memory allocations.
+
+[clinic start generated code]*/
+
+static PyObject *
+_tracemalloc_reset_peak_impl(PyObject *module)
+/*[clinic end generated code: output=140c2870f691dbb2 input=18afd0635066e9ce]*/
+{
+ if (!_Py_tracemalloc_config.tracing) {
+ Py_RETURN_NONE;
+ }
+
+ TABLES_LOCK();
+ tracemalloc_peak_traced_memory = tracemalloc_traced_memory;
+ TABLES_UNLOCK();
+
+ Py_RETURN_NONE;
+}
+
static PyMethodDef module_methods[] = {
_TRACEMALLOC_IS_TRACING_METHODDEF
@@ -1613,6 +1674,7 @@ static PyMethodDef module_methods[] = {
_TRACEMALLOC_GET_TRACEBACK_LIMIT_METHODDEF
_TRACEMALLOC_GET_TRACEMALLOC_MEMORY_METHODDEF
_TRACEMALLOC_GET_TRACED_MEMORY_METHODDEF
+ _TRACEMALLOC_RESET_PEAK_METHODDEF
/* sentinel */
{NULL, NULL}
};
@@ -1727,26 +1789,15 @@ _PyTraceMalloc_NewReference(PyObject *op)
ptr = (uintptr_t)op;
}
- _Py_hashtable_entry_t* entry;
int res = -1;
TABLES_LOCK();
- if (_Py_tracemalloc_config.use_domain) {
- pointer_t key = {ptr, DEFAULT_DOMAIN};
- entry = _Py_HASHTABLE_GET_ENTRY(tracemalloc_traces, key);
- }
- else {
- entry = _Py_HASHTABLE_GET_ENTRY(tracemalloc_traces, ptr);
- }
-
- if (entry != NULL) {
+ trace_t *trace = _Py_hashtable_get(tracemalloc_traces, TO_PTR(ptr));
+ if (trace != NULL) {
/* update the traceback of the memory block */
traceback_t *traceback = traceback_new();
if (traceback != NULL) {
- trace_t trace;
- _Py_HASHTABLE_ENTRY_READ_DATA(tracemalloc_traces, entry, trace);
- trace.traceback = traceback;
- _Py_HASHTABLE_ENTRY_WRITE_DATA(tracemalloc_traces, entry, trace);
+ trace->traceback = traceback;
res = 0;
}
}
diff --git a/Modules/_uuidmodule.c b/Modules/_uuidmodule.c
index 0b7aa72e..3f33e22a 100644
--- a/Modules/_uuidmodule.c
+++ b/Modules/_uuidmodule.c
@@ -1,5 +1,5 @@
/*
- * Python UUID module that wraps libuuid -
+ * Python UUID module that wraps libuuid or Windows rpcrt4.dll.
* DCE compatible Universally Unique Identifier library.
*/
@@ -12,6 +12,12 @@
#include
#endif
+#ifdef MS_WINDOWS
+#include
+#endif
+
+#ifndef MS_WINDOWS
+
static PyObject *
py_uuid_generate_time_safe(PyObject *Py_UNUSED(context),
PyObject *Py_UNUSED(ignored))
@@ -31,45 +37,86 @@ py_uuid_generate_time_safe(PyObject *Py_UNUSED(context),
return Py_BuildValue("y#i", buf, sizeof(uuid), (int) status);
# else
return Py_BuildValue("y#i", (const char *) &uuid, sizeof(uuid), (int) status);
-# endif
-#else
+# endif /* HAVE_UUID_CREATE */
+#else /* HAVE_UUID_GENERATE_TIME_SAFE */
uuid_generate_time(uuid);
return Py_BuildValue("y#O", (const char *) uuid, sizeof(uuid), Py_None);
-#endif
+#endif /* HAVE_UUID_GENERATE_TIME_SAFE */
}
+#else /* MS_WINDOWS */
+
+static PyObject *
+py_UuidCreate(PyObject *Py_UNUSED(context),
+ PyObject *Py_UNUSED(ignored))
+{
+ UUID uuid;
+ RPC_STATUS res;
+
+ Py_BEGIN_ALLOW_THREADS
+ res = UuidCreateSequential(&uuid);
+ Py_END_ALLOW_THREADS
+
+ switch (res) {
+ case RPC_S_OK:
+ case RPC_S_UUID_LOCAL_ONLY:
+ case RPC_S_UUID_NO_ADDRESS:
+ /*
+ All success codes, but the latter two indicate that the UUID is random
+ rather than based on the MAC address. If the OS can't figure this out,
+ neither can we, so we'll take it anyway.
+ */
+ return Py_BuildValue("y#", (const char *)&uuid, sizeof(uuid));
+ }
+ PyErr_SetFromWindowsErr(res);
+ return NULL;
+}
+
+#endif /* MS_WINDOWS */
+
+
+static int
+uuid_exec(PyObject *module) {
+ assert(sizeof(uuid_t) == 16);
+#if defined(MS_WINDOWS)
+ int has_uuid_generate_time_safe = 0;
+#elif defined(HAVE_UUID_GENERATE_TIME_SAFE)
+ int has_uuid_generate_time_safe = 1;
+#else
+ int has_uuid_generate_time_safe = 0;
+#endif
+ if (PyModule_AddIntConstant(module, "has_uuid_generate_time_safe",
+ has_uuid_generate_time_safe) < 0) {
+ return -1;
+ }
+ return 0;
+}
static PyMethodDef uuid_methods[] = {
+#if defined(HAVE_UUID_UUID_H) || defined(HAVE_UUID_H)
{"generate_time_safe", py_uuid_generate_time_safe, METH_NOARGS, NULL},
+#endif
+#if defined(MS_WINDOWS)
+ {"UuidCreate", py_UuidCreate, METH_NOARGS, NULL},
+#endif
{NULL, NULL, 0, NULL} /* sentinel */
};
+static PyModuleDef_Slot uuid_slots[] = {
+ {Py_mod_exec, uuid_exec},
+ {0, NULL}
+};
+
static struct PyModuleDef uuidmodule = {
PyModuleDef_HEAD_INIT,
.m_name = "_uuid",
- .m_size = -1,
+ .m_size = 0,
.m_methods = uuid_methods,
+ .m_slots = uuid_slots,
};
PyMODINIT_FUNC
PyInit__uuid(void)
{
- PyObject *mod;
- assert(sizeof(uuid_t) == 16);
-#ifdef HAVE_UUID_GENERATE_TIME_SAFE
- int has_uuid_generate_time_safe = 1;
-#else
- int has_uuid_generate_time_safe = 0;
-#endif
- mod = PyModule_Create(&uuidmodule);
- if (mod == NULL) {
- return NULL;
- }
- if (PyModule_AddIntConstant(mod, "has_uuid_generate_time_safe",
- has_uuid_generate_time_safe) < 0) {
- Py_DECREF(mod);
- return NULL;
- }
-
- return mod;
+ return PyModuleDef_Init(&uuidmodule);
}
diff --git a/Modules/_weakref.c b/Modules/_weakref.c
index c1238e00..e33cba2a 100644
--- a/Modules/_weakref.c
+++ b/Modules/_weakref.c
@@ -1,8 +1,9 @@
#include "Python.h"
+#include "pycore_object.h" // _PyObject_GET_WEAKREFS_LISTPTR
#define GET_WEAKREFS_LISTPTR(o) \
- ((PyWeakReference **) PyObject_GET_WEAKREFS_LISTPTR(o))
+ ((PyWeakReference **) _PyObject_GET_WEAKREFS_LISTPTR(o))
/*[clinic input]
module _weakref
@@ -136,14 +137,48 @@ weakref_functions[] = {
{NULL, NULL, 0, NULL}
};
+static int
+weakref_exec(PyObject *module)
+{
+ Py_INCREF(&_PyWeakref_RefType);
+ if (PyModule_AddObject(module, "ref", (PyObject *) &_PyWeakref_RefType) < 0) {
+ Py_DECREF(&_PyWeakref_RefType);
+ return -1;
+ }
+ Py_INCREF(&_PyWeakref_RefType);
+ if (PyModule_AddObject(module, "ReferenceType",
+ (PyObject *) &_PyWeakref_RefType) < 0) {
+ Py_DECREF(&_PyWeakref_RefType);
+ return -1;
+ }
+ Py_INCREF(&_PyWeakref_ProxyType);
+ if (PyModule_AddObject(module, "ProxyType",
+ (PyObject *) &_PyWeakref_ProxyType) < 0) {
+ Py_DECREF(&_PyWeakref_ProxyType);
+ return -1;
+ }
+ Py_INCREF(&_PyWeakref_CallableProxyType);
+ if (PyModule_AddObject(module, "CallableProxyType",
+ (PyObject *) &_PyWeakref_CallableProxyType) < 0) {
+ Py_DECREF(&_PyWeakref_CallableProxyType);
+ return -1;
+ }
+
+ return 0;
+}
+
+static struct PyModuleDef_Slot weakref_slots[] = {
+ {Py_mod_exec, weakref_exec},
+ {0, NULL}
+};
static struct PyModuleDef weakrefmodule = {
PyModuleDef_HEAD_INIT,
"_weakref",
"Weak-reference support module.",
- -1,
+ 0,
weakref_functions,
- NULL,
+ weakref_slots,
NULL,
NULL,
NULL
@@ -152,23 +187,5 @@ static struct PyModuleDef weakrefmodule = {
PyMODINIT_FUNC
PyInit__weakref(void)
{
- PyObject *m;
-
- m = PyModule_Create(&weakrefmodule);
-
- if (m != NULL) {
- Py_INCREF(&_PyWeakref_RefType);
- PyModule_AddObject(m, "ref",
- (PyObject *) &_PyWeakref_RefType);
- Py_INCREF(&_PyWeakref_RefType);
- PyModule_AddObject(m, "ReferenceType",
- (PyObject *) &_PyWeakref_RefType);
- Py_INCREF(&_PyWeakref_ProxyType);
- PyModule_AddObject(m, "ProxyType",
- (PyObject *) &_PyWeakref_ProxyType);
- Py_INCREF(&_PyWeakref_CallableProxyType);
- PyModule_AddObject(m, "CallableProxyType",
- (PyObject *) &_PyWeakref_CallableProxyType);
- }
- return m;
+ return PyModuleDef_Init(&weakrefmodule);
}
diff --git a/Modules/_winapi.c b/Modules/_winapi.c
index 647075cd..e1672c47 100644
--- a/Modules/_winapi.c
+++ b/Modules/_winapi.c
@@ -35,7 +35,7 @@
/* See http://www.python.org/2.4/license for licensing details. */
#include "Python.h"
-#include "structmember.h"
+#include "structmember.h" // PyMemberDef
#define WINDOWS_LEAN_AND_MEAN
#include "windows.h"
@@ -603,11 +603,10 @@ _winapi_CreateJunction_impl(PyObject *module, LPWSTR src_path,
sizeof(rdb->MountPointReparseBuffer.PathBuffer) +
/* Two +1's for NUL terminators. */
(prefix_len + print_len + 1 + print_len + 1) * sizeof(WCHAR);
- rdb = (_Py_PREPARSE_DATA_BUFFER)PyMem_RawMalloc(rdb_size);
+ rdb = (_Py_PREPARSE_DATA_BUFFER)PyMem_RawCalloc(1, rdb_size);
if (rdb == NULL)
goto cleanup;
- memset(rdb, 0, rdb_size);
rdb->ReparseTag = IO_REPARSE_TAG_MOUNT_POINT;
rdb->ReparseDataLength = rdb_size - _Py_REPARSE_DATA_BUFFER_HEADER_SIZE;
rdb->MountPointReparseBuffer.SubstituteNameOffset = 0;
@@ -1081,6 +1080,14 @@ _winapi_CreateProcess_impl(PyObject *module,
return NULL;
}
+ PyInterpreterState *interp = PyInterpreterState_Get();
+ const PyConfig *config = _PyInterpreterState_GetConfig(interp);
+ if (config->_isolated_interpreter) {
+ PyErr_SetString(PyExc_RuntimeError,
+ "subprocess not supported for isolated subinterpreters");
+ return NULL;
+ }
+
ZeroMemory(&si, sizeof(si));
si.StartupInfo.cb = sizeof(si);
@@ -1111,8 +1118,8 @@ _winapi_CreateProcess_impl(PyObject *module,
}
}
else if (command_line != Py_None) {
- PyErr_Format(PyExc_TypeError,
- "CreateProcess() argument 2 must be str or None, not %s",
+ PyErr_Format(PyExc_TypeError,
+ "CreateProcess() argument 2 must be str or None, not %s",
Py_TYPE(command_line)->tp_name);
goto cleanup;
}
diff --git a/Modules/_xxsubinterpretersmodule.c b/Modules/_xxsubinterpretersmodule.c
index db1116ac..de11c090 100644
--- a/Modules/_xxsubinterpretersmodule.c
+++ b/Modules/_xxsubinterpretersmodule.c
@@ -26,9 +26,9 @@ _copy_raw_string(PyObject *strobj)
static PyInterpreterState *
_get_current(void)
{
- // _PyInterpreterState_Get() aborts if lookup fails, so don't need
+ // PyInterpreterState_Get() aborts if lookup fails, so don't need
// to check the result for NULL.
- return _PyInterpreterState_Get();
+ return PyInterpreterState_Get();
}
@@ -538,7 +538,7 @@ _channelend_find(_channelend *first, int64_t interp, _channelend **pprev)
typedef struct _channelassociations {
// Note that the list entries are never removed for interpreter
- // for which the channel is closed. This should be a problem in
+ // for which the channel is closed. This should not be a problem in
// practice. Also, a channel isn't automatically closed when an
// interpreter is destroyed.
int64_t numsendopen;
@@ -1179,11 +1179,6 @@ _channels_list_all(_channels *channels, int64_t *count)
{
int64_t *cids = NULL;
PyThread_acquire_lock(channels->mutex, WAIT_LOCK);
- int64_t numopen = channels->numopen;
- if (numopen >= PY_SSIZE_T_MAX) {
- PyErr_SetString(PyExc_RuntimeError, "too many channels open");
- goto done;
- }
int64_t *ids = PyMem_NEW(int64_t, (Py_ssize_t)(channels->numopen));
if (ids == NULL) {
goto done;
@@ -1350,19 +1345,16 @@ _channel_recv(_channels *channels, int64_t id)
_PyCrossInterpreterData *data = _channel_next(chan, PyInterpreterState_GetID(interp));
PyThread_release_lock(mutex);
if (data == NULL) {
- if (!PyErr_Occurred()) {
- PyErr_Format(ChannelEmptyError, "channel %" PRId64 " is empty", id);
- }
return NULL;
}
// Convert the data back to an object.
PyObject *obj = _PyCrossInterpreterData_NewObject(data);
+ _PyCrossInterpreterData_Release(data);
+ PyMem_Free(data);
if (obj == NULL) {
return NULL;
}
- _PyCrossInterpreterData_Release(data);
- PyMem_Free(data);
return obj;
}
@@ -1395,6 +1387,24 @@ _channel_close(_channels *channels, int64_t id, int end, int force)
return _channels_close(channels, id, NULL, end, force);
}
+static int
+_channel_is_associated(_channels *channels, int64_t cid, int64_t interp,
+ int send)
+{
+ _PyChannelState *chan = _channels_lookup(channels, cid, NULL);
+ if (chan == NULL) {
+ return -1;
+ } else if (send && chan->closing != NULL) {
+ PyErr_Format(ChannelClosedError, "channel %" PRId64 " closed", cid);
+ return -1;
+ }
+
+ _channelend *end = _channelend_find(send ? chan->ends->send : chan->ends->recv,
+ interp, NULL);
+
+ return (end != NULL && end->open);
+}
+
/* ChannelID class */
static PyTypeObject ChannelIDtype;
@@ -1428,7 +1438,7 @@ channel_id_converter(PyObject *arg, void *ptr)
else {
PyErr_Format(PyExc_TypeError,
"channel ID must be an int, got %.100s",
- arg->ob_type->tp_name);
+ Py_TYPE(arg)->tp_name);
return 0;
}
*(int64_t *)ptr = cid;
@@ -1830,14 +1840,17 @@ _is_running(PyInterpreterState *interp)
"interpreter has more than one thread");
return -1;
}
- PyFrameObject *frame = tstate->frame;
+
+ assert(!PyErr_Occurred());
+ PyFrameObject *frame = PyThreadState_GetFrame(tstate);
if (frame == NULL) {
- if (PyErr_Occurred() != NULL) {
- return -1;
- }
return 0;
}
- return (int)(frame->f_executing);
+
+ int executing = (int)(frame->f_executing);
+ Py_DECREF(frame);
+
+ return executing;
}
static int
@@ -1928,7 +1941,7 @@ _run_script_in_interpreter(PyInterpreterState *interp, const char *codestr,
// Switch to interpreter.
PyThreadState *save_tstate = NULL;
- if (interp != _PyInterpreterState_Get()) {
+ if (interp != PyInterpreterState_Get()) {
// XXX Using the "head" thread isn't strictly correct.
PyThreadState *tstate = PyInterpreterState_ThreadHead(interp);
// XXX Possible GILState issues?
@@ -1986,16 +1999,20 @@ _global_channels(void) {
}
static PyObject *
-interp_create(PyObject *self, PyObject *args)
+interp_create(PyObject *self, PyObject *args, PyObject *kwds)
{
- if (!PyArg_UnpackTuple(args, "create", 0, 0)) {
+
+ static char *kwlist[] = {"isolated", NULL};
+ int isolated = 1;
+ if (!PyArg_ParseTupleAndKeywords(args, kwds, "|$i:create", kwlist,
+ &isolated)) {
return NULL;
}
// Create and initialize the new interpreter.
PyThreadState *save_tstate = PyThreadState_Swap(NULL);
// XXX Possible GILState issues?
- PyThreadState *tstate = Py_NewInterpreter();
+ PyThreadState *tstate = _Py_NewInterpreter(isolated);
PyThreadState_Swap(save_tstate);
if (tstate == NULL) {
/* Since no new thread state was created, there is no exception to
@@ -2004,7 +2021,8 @@ interp_create(PyObject *self, PyObject *args)
PyErr_SetString(PyExc_RuntimeError, "interpreter creation failed");
return NULL;
}
- PyObject *idobj = _PyInterpreterState_GetIDObject(tstate->interp);
+ PyInterpreterState *interp = PyThreadState_GetInterpreter(tstate);
+ PyObject *idobj = _PyInterpreterState_GetIDObject(interp);
if (idobj == NULL) {
// XXX Possible GILState issues?
save_tstate = PyThreadState_Swap(tstate);
@@ -2012,7 +2030,7 @@ interp_create(PyObject *self, PyObject *args)
PyThreadState_Swap(save_tstate);
return NULL;
}
- _PyInterpreterState_RequireIDRef(tstate->interp, 1);
+ _PyInterpreterState_RequireIDRef(interp, 1);
return idobj;
}
@@ -2058,7 +2076,6 @@ interp_destroy(PyObject *self, PyObject *args, PyObject *kwds)
}
// Destroy the interpreter.
- //PyInterpreterState_Delete(interp);
PyThreadState *tstate = PyInterpreterState_ThreadHead(interp);
// XXX Possible GILState issues?
PyThreadState *save_tstate = PyThreadState_Swap(tstate);
@@ -2135,7 +2152,7 @@ static PyObject *
interp_get_main(PyObject *self, PyObject *Py_UNUSED(ignored))
{
// Currently, 0 is always the main interpreter.
- PY_INT64_T id = 0;
+ int64_t id = 0;
return _PyInterpreterID_New(id);
}
@@ -2326,6 +2343,68 @@ PyDoc_STRVAR(channel_list_all_doc,
\n\
Return the list of all IDs for active channels.");
+static PyObject *
+channel_list_interpreters(PyObject *self, PyObject *args, PyObject *kwds)
+{
+ static char *kwlist[] = {"cid", "send", NULL};
+ int64_t cid; /* Channel ID */
+ int send = 0; /* Send or receive end? */
+ int64_t id;
+ PyObject *ids, *id_obj;
+ PyInterpreterState *interp;
+
+ if (!PyArg_ParseTupleAndKeywords(
+ args, kwds, "O&$p:channel_list_interpreters",
+ kwlist, channel_id_converter, &cid, &send)) {
+ return NULL;
+ }
+
+ ids = PyList_New(0);
+ if (ids == NULL) {
+ goto except;
+ }
+
+ interp = PyInterpreterState_Head();
+ while (interp != NULL) {
+ id = PyInterpreterState_GetID(interp);
+ assert(id >= 0);
+ int res = _channel_is_associated(&_globals.channels, cid, id, send);
+ if (res < 0) {
+ goto except;
+ }
+ if (res) {
+ id_obj = _PyInterpreterState_GetIDObject(interp);
+ if (id_obj == NULL) {
+ goto except;
+ }
+ res = PyList_Insert(ids, 0, id_obj);
+ Py_DECREF(id_obj);
+ if (res < 0) {
+ goto except;
+ }
+ }
+ interp = PyInterpreterState_Next(interp);
+ }
+
+ goto finally;
+
+except:
+ Py_XDECREF(ids);
+ ids = NULL;
+
+finally:
+ return ids;
+}
+
+PyDoc_STRVAR(channel_list_interpreters_doc,
+"channel_list_interpreters(cid, *, send) -> [id]\n\
+\n\
+Return the list of all interpreter IDs associated with an end of the channel.\n\
+\n\
+The 'send' argument should be a boolean indicating whether to use the send or\n\
+receive end.");
+
+
static PyObject *
channel_send(PyObject *self, PyObject *args, PyObject *kwds)
{
@@ -2351,20 +2430,37 @@ Add the object's data to the channel's queue.");
static PyObject *
channel_recv(PyObject *self, PyObject *args, PyObject *kwds)
{
- static char *kwlist[] = {"cid", NULL};
+ static char *kwlist[] = {"cid", "default", NULL};
int64_t cid;
- if (!PyArg_ParseTupleAndKeywords(args, kwds, "O&:channel_recv", kwlist,
- channel_id_converter, &cid)) {
+ PyObject *dflt = NULL;
+ if (!PyArg_ParseTupleAndKeywords(args, kwds, "O&|O:channel_recv", kwlist,
+ channel_id_converter, &cid, &dflt)) {
return NULL;
}
+ Py_XINCREF(dflt);
- return _channel_recv(&_globals.channels, cid);
+ PyObject *obj = _channel_recv(&_globals.channels, cid);
+ if (obj != NULL) {
+ Py_XDECREF(dflt);
+ return obj;
+ } else if (PyErr_Occurred()) {
+ Py_XDECREF(dflt);
+ return NULL;
+ } else if (dflt != NULL) {
+ return dflt;
+ } else {
+ PyErr_Format(ChannelEmptyError, "channel %" PRId64 " is empty", cid);
+ return NULL;
+ }
}
PyDoc_STRVAR(channel_recv_doc,
-"channel_recv(cid) -> obj\n\
+"channel_recv(cid, [default]) -> obj\n\
+\n\
+Return a new object from the data at the front of the channel's queue.\n\
\n\
-Return a new object from the data at the from of the channel's queue.");
+If there is nothing to receive then raise ChannelEmptyError, unless\n\
+a default value is provided. In that case return it.");
static PyObject *
channel_close(PyObject *self, PyObject *args, PyObject *kwds)
@@ -2455,8 +2551,8 @@ channel__channel_id(PyObject *self, PyObject *args, PyObject *kwds)
}
static PyMethodDef module_functions[] = {
- {"create", (PyCFunction)interp_create,
- METH_VARARGS, create_doc},
+ {"create", (PyCFunction)(void(*)(void))interp_create,
+ METH_VARARGS | METH_KEYWORDS, create_doc},
{"destroy", (PyCFunction)(void(*)(void))interp_destroy,
METH_VARARGS | METH_KEYWORDS, destroy_doc},
{"list_all", interp_list_all,
@@ -2479,6 +2575,8 @@ static PyMethodDef module_functions[] = {
METH_VARARGS | METH_KEYWORDS, channel_destroy_doc},
{"channel_list_all", channel_list_all,
METH_NOARGS, channel_list_all_doc},
+ {"channel_list_interpreters", (PyCFunction)(void(*)(void))channel_list_interpreters,
+ METH_VARARGS | METH_KEYWORDS, channel_list_interpreters_doc},
{"channel_send", (PyCFunction)(void(*)(void))channel_send,
METH_VARARGS | METH_KEYWORDS, channel_send_doc},
{"channel_recv", (PyCFunction)(void(*)(void))channel_recv,
diff --git a/Modules/_xxtestfuzz/_xxtestfuzz.c b/Modules/_xxtestfuzz/_xxtestfuzz.c
index 781dd235..e0694de6 100644
--- a/Modules/_xxtestfuzz/_xxtestfuzz.c
+++ b/Modules/_xxtestfuzz/_xxtestfuzz.c
@@ -44,10 +44,5 @@ static struct PyModuleDef _fuzzmodule = {
PyMODINIT_FUNC
PyInit__xxtestfuzz(void)
{
- PyObject *m = NULL;
-
- if ((m = PyModule_Create(&_fuzzmodule)) == NULL) {
- return NULL;
- }
- return m;
+ return PyModule_Create(&_fuzzmodule);
}
diff --git a/Modules/_xxtestfuzz/fuzz_struct_unpack_corpus/hello_string b/Modules/_xxtestfuzz/fuzz_struct_unpack_corpus/hello_string
new file mode 100644
index 00000000..92d47cd3
Binary files /dev/null and b/Modules/_xxtestfuzz/fuzz_struct_unpack_corpus/hello_string differ
diff --git a/Modules/_xxtestfuzz/fuzz_struct_unpack_corpus/long_zero b/Modules/_xxtestfuzz/fuzz_struct_unpack_corpus/long_zero
new file mode 100644
index 00000000..d952225c
Binary files /dev/null and b/Modules/_xxtestfuzz/fuzz_struct_unpack_corpus/long_zero differ
diff --git a/Modules/_xxtestfuzz/fuzz_struct_unpack_corpus/varied_format_string b/Modules/_xxtestfuzz/fuzz_struct_unpack_corpus/varied_format_string
new file mode 100644
index 00000000..a150dc08
Binary files /dev/null and b/Modules/_xxtestfuzz/fuzz_struct_unpack_corpus/varied_format_string differ
diff --git a/Modules/_xxtestfuzz/fuzz_tests.txt b/Modules/_xxtestfuzz/fuzz_tests.txt
index 9d330a66..053b77b4 100644
--- a/Modules/_xxtestfuzz/fuzz_tests.txt
+++ b/Modules/_xxtestfuzz/fuzz_tests.txt
@@ -5,3 +5,4 @@ fuzz_json_loads
fuzz_sre_compile
fuzz_sre_match
fuzz_csv_reader
+fuzz_struct_unpack
diff --git a/Modules/_xxtestfuzz/fuzzer.c b/Modules/_xxtestfuzz/fuzzer.c
index 1821eb2a..6bd2c3ae 100644
--- a/Modules/_xxtestfuzz/fuzzer.c
+++ b/Modules/_xxtestfuzz/fuzzer.c
@@ -79,6 +79,69 @@ static int fuzz_builtin_unicode(const char* data, size_t size) {
return 0;
}
+
+PyObject* struct_unpack_method = NULL;
+PyObject* struct_error = NULL;
+/* Called by LLVMFuzzerTestOneInput for initialization */
+static int init_struct_unpack() {
+ /* Import struct.unpack */
+ PyObject* struct_module = PyImport_ImportModule("struct");
+ if (struct_module == NULL) {
+ return 0;
+ }
+ struct_error = PyObject_GetAttrString(struct_module, "error");
+ if (struct_error == NULL) {
+ return 0;
+ }
+ struct_unpack_method = PyObject_GetAttrString(struct_module, "unpack");
+ return struct_unpack_method != NULL;
+}
+/* Fuzz struct.unpack(x, y) */
+static int fuzz_struct_unpack(const char* data, size_t size) {
+ /* Everything up to the first null byte is considered the
+ format. Everything after is the buffer */
+ const char* first_null = memchr(data, '\0', size);
+ if (first_null == NULL) {
+ return 0;
+ }
+
+ size_t format_length = first_null - data;
+ size_t buffer_length = size - format_length - 1;
+
+ PyObject* pattern = PyBytes_FromStringAndSize(data, format_length);
+ if (pattern == NULL) {
+ return 0;
+ }
+ PyObject* buffer = PyBytes_FromStringAndSize(first_null + 1, buffer_length);
+ if (buffer == NULL) {
+ Py_DECREF(pattern);
+ return 0;
+ }
+
+ PyObject* unpacked = PyObject_CallFunctionObjArgs(
+ struct_unpack_method, pattern, buffer, NULL);
+ /* Ignore any overflow errors, these are easily triggered accidentally */
+ if (unpacked == NULL && PyErr_ExceptionMatches(PyExc_OverflowError)) {
+ PyErr_Clear();
+ }
+ /* The pascal format string will throw a negative size when passing 0
+ like: struct.unpack('0p', b'') */
+ if (unpacked == NULL && PyErr_ExceptionMatches(PyExc_SystemError)) {
+ PyErr_Clear();
+ }
+ /* Ignore any struct.error exceptions, these can be caused by invalid
+ formats or incomplete buffers both of which are common. */
+ if (unpacked == NULL && PyErr_ExceptionMatches(struct_error)) {
+ PyErr_Clear();
+ }
+
+ Py_XDECREF(unpacked);
+ Py_DECREF(pattern);
+ Py_DECREF(buffer);
+ return 0;
+}
+
+
#define MAX_JSON_TEST_SIZE 0x10000
PyObject* json_loads_method = NULL;
@@ -104,7 +167,7 @@ static int fuzz_json_loads(const char* data, size_t size) {
if (input_bytes == NULL) {
return 0;
}
- PyObject* parsed = PyObject_CallFunctionObjArgs(json_loads_method, input_bytes, NULL);
+ PyObject* parsed = PyObject_CallOneArg(json_loads_method, input_bytes);
if (parsed == NULL) {
/* Ignore ValueError as the fuzzer will more than likely
generate some invalid json and values */
@@ -190,9 +253,10 @@ static int fuzz_sre_compile(const char* data, size_t size) {
PyErr_Clear();
}
/* Ignore some common errors thrown by sre_parse:
- Overflow, Assertion and Index */
+ Overflow, Assertion, Recursion and Index */
if (compiled == NULL && (PyErr_ExceptionMatches(PyExc_OverflowError) ||
PyErr_ExceptionMatches(PyExc_AssertionError) ||
+ PyErr_ExceptionMatches(PyExc_RecursionError) ||
PyErr_ExceptionMatches(PyExc_IndexError))
) {
PyErr_Clear();
@@ -263,7 +327,7 @@ static int fuzz_sre_match(const char* data, size_t size) {
PyObject* pattern = compiled_patterns[idx];
PyObject* match_callable = PyObject_GetAttrString(pattern, "match");
- PyObject* matches = PyObject_CallFunctionObjArgs(match_callable, to_match, NULL);
+ PyObject* matches = PyObject_CallOneArg(match_callable, to_match);
Py_XDECREF(matches);
Py_DECREF(match_callable);
@@ -378,6 +442,16 @@ int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
#if !defined(_Py_FUZZ_ONE) || defined(_Py_FUZZ_fuzz_builtin_unicode)
rv |= _run_fuzz(data, size, fuzz_builtin_unicode);
#endif
+#if !defined(_Py_FUZZ_ONE) || defined(_Py_FUZZ_fuzz_struct_unpack)
+ static int STRUCT_UNPACK_INITIALIZED = 0;
+ if (!STRUCT_UNPACK_INITIALIZED && !init_struct_unpack()) {
+ PyErr_Print();
+ abort();
+ } else {
+ STRUCT_UNPACK_INITIALIZED = 1;
+ }
+ rv |= _run_fuzz(data, size, fuzz_struct_unpack);
+#endif
#if !defined(_Py_FUZZ_ONE) || defined(_Py_FUZZ_fuzz_json_loads)
static int JSON_LOADS_INITIALIZED = 0;
if (!JSON_LOADS_INITIALIZED && !init_json_loads()) {
diff --git a/Modules/_zoneinfo.c b/Modules/_zoneinfo.c
new file mode 100644
index 00000000..2cee65fa
--- /dev/null
+++ b/Modules/_zoneinfo.c
@@ -0,0 +1,2700 @@
+#include "Python.h"
+#include "structmember.h"
+
+#include
+#include
+#include
+
+#include "datetime.h"
+
+// Imports
+static PyObject *io_open = NULL;
+static PyObject *_tzpath_find_tzfile = NULL;
+static PyObject *_common_mod = NULL;
+
+typedef struct TransitionRuleType TransitionRuleType;
+typedef struct StrongCacheNode StrongCacheNode;
+
+typedef struct {
+ PyObject *utcoff;
+ PyObject *dstoff;
+ PyObject *tzname;
+ long utcoff_seconds;
+} _ttinfo;
+
+typedef struct {
+ _ttinfo std;
+ _ttinfo dst;
+ int dst_diff;
+ TransitionRuleType *start;
+ TransitionRuleType *end;
+ unsigned char std_only;
+} _tzrule;
+
+typedef struct {
+ PyDateTime_TZInfo base;
+ PyObject *key;
+ PyObject *file_repr;
+ PyObject *weakreflist;
+ size_t num_transitions;
+ size_t num_ttinfos;
+ int64_t *trans_list_utc;
+ int64_t *trans_list_wall[2];
+ _ttinfo **trans_ttinfos; // References to the ttinfo for each transition
+ _ttinfo *ttinfo_before;
+ _tzrule tzrule_after;
+ _ttinfo *_ttinfos; // Unique array of ttinfos for ease of deallocation
+ unsigned char fixed_offset;
+ unsigned char source;
+} PyZoneInfo_ZoneInfo;
+
+struct TransitionRuleType {
+ int64_t (*year_to_timestamp)(TransitionRuleType *, int);
+};
+
+typedef struct {
+ TransitionRuleType base;
+ uint8_t month;
+ uint8_t week;
+ uint8_t day;
+ int8_t hour;
+ int8_t minute;
+ int8_t second;
+} CalendarRule;
+
+typedef struct {
+ TransitionRuleType base;
+ uint8_t julian;
+ unsigned int day;
+ int8_t hour;
+ int8_t minute;
+ int8_t second;
+} DayRule;
+
+struct StrongCacheNode {
+ StrongCacheNode *next;
+ StrongCacheNode *prev;
+ PyObject *key;
+ PyObject *zone;
+};
+
+static PyTypeObject PyZoneInfo_ZoneInfoType;
+
+// Globals
+static PyObject *TIMEDELTA_CACHE = NULL;
+static PyObject *ZONEINFO_WEAK_CACHE = NULL;
+static StrongCacheNode *ZONEINFO_STRONG_CACHE = NULL;
+static size_t ZONEINFO_STRONG_CACHE_MAX_SIZE = 8;
+
+static _ttinfo NO_TTINFO = {NULL, NULL, NULL, 0};
+
+// Constants
+static const int EPOCHORDINAL = 719163;
+static int DAYS_IN_MONTH[] = {
+ -1, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31,
+};
+
+static int DAYS_BEFORE_MONTH[] = {
+ -1, 0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334,
+};
+
+static const int SOURCE_NOCACHE = 0;
+static const int SOURCE_CACHE = 1;
+static const int SOURCE_FILE = 2;
+
+// Forward declarations
+static int
+load_data(PyZoneInfo_ZoneInfo *self, PyObject *file_obj);
+static void
+utcoff_to_dstoff(size_t *trans_idx, long *utcoffs, long *dstoffs,
+ unsigned char *isdsts, size_t num_transitions,
+ size_t num_ttinfos);
+static int
+ts_to_local(size_t *trans_idx, int64_t *trans_utc, long *utcoff,
+ int64_t *trans_local[2], size_t num_ttinfos,
+ size_t num_transitions);
+
+static int
+parse_tz_str(PyObject *tz_str_obj, _tzrule *out);
+
+static Py_ssize_t
+parse_abbr(const char *const p, PyObject **abbr);
+static Py_ssize_t
+parse_tz_delta(const char *const p, long *total_seconds);
+static Py_ssize_t
+parse_transition_time(const char *const p, int8_t *hour, int8_t *minute,
+ int8_t *second);
+static Py_ssize_t
+parse_transition_rule(const char *const p, TransitionRuleType **out);
+
+static _ttinfo *
+find_tzrule_ttinfo(_tzrule *rule, int64_t ts, unsigned char fold, int year);
+static _ttinfo *
+find_tzrule_ttinfo_fromutc(_tzrule *rule, int64_t ts, int year,
+ unsigned char *fold);
+
+static int
+build_ttinfo(long utcoffset, long dstoffset, PyObject *tzname, _ttinfo *out);
+static void
+xdecref_ttinfo(_ttinfo *ttinfo);
+static int
+ttinfo_eq(const _ttinfo *const tti0, const _ttinfo *const tti1);
+
+static int
+build_tzrule(PyObject *std_abbr, PyObject *dst_abbr, long std_offset,
+ long dst_offset, TransitionRuleType *start,
+ TransitionRuleType *end, _tzrule *out);
+static void
+free_tzrule(_tzrule *tzrule);
+
+static PyObject *
+load_timedelta(long seconds);
+
+static int
+get_local_timestamp(PyObject *dt, int64_t *local_ts);
+static _ttinfo *
+find_ttinfo(PyZoneInfo_ZoneInfo *self, PyObject *dt);
+
+static int
+ymd_to_ord(int y, int m, int d);
+static int
+is_leap_year(int year);
+
+static size_t
+_bisect(const int64_t value, const int64_t *arr, size_t size);
+
+static void
+eject_from_strong_cache(const PyTypeObject *const type, PyObject *key);
+static void
+clear_strong_cache(const PyTypeObject *const type);
+static void
+update_strong_cache(const PyTypeObject *const type, PyObject *key,
+ PyObject *zone);
+static PyObject *
+zone_from_strong_cache(const PyTypeObject *const type, PyObject *key);
+
+static PyObject *
+zoneinfo_new_instance(PyTypeObject *type, PyObject *key)
+{
+ PyObject *file_obj = NULL;
+ PyObject *file_path = NULL;
+
+ file_path = PyObject_CallFunctionObjArgs(_tzpath_find_tzfile, key, NULL);
+ if (file_path == NULL) {
+ return NULL;
+ }
+ else if (file_path == Py_None) {
+ file_obj = PyObject_CallMethod(_common_mod, "load_tzdata", "O", key);
+ if (file_obj == NULL) {
+ Py_DECREF(file_path);
+ return NULL;
+ }
+ }
+
+ PyObject *self = (PyObject *)(type->tp_alloc(type, 0));
+ if (self == NULL) {
+ goto error;
+ }
+
+ if (file_obj == NULL) {
+ file_obj = PyObject_CallFunction(io_open, "Os", file_path, "rb");
+ if (file_obj == NULL) {
+ goto error;
+ }
+ }
+
+ if (load_data((PyZoneInfo_ZoneInfo *)self, file_obj)) {
+ goto error;
+ }
+
+ PyObject *rv = PyObject_CallMethod(file_obj, "close", NULL);
+ Py_DECREF(file_obj);
+ file_obj = NULL;
+ if (rv == NULL) {
+ goto error;
+ }
+ Py_DECREF(rv);
+
+ ((PyZoneInfo_ZoneInfo *)self)->key = key;
+ Py_INCREF(key);
+
+ goto cleanup;
+error:
+ Py_XDECREF(self);
+ self = NULL;
+cleanup:
+ if (file_obj != NULL) {
+ PyObject *exc, *val, *tb;
+ PyErr_Fetch(&exc, &val, &tb);
+ PyObject *tmp = PyObject_CallMethod(file_obj, "close", NULL);
+ _PyErr_ChainExceptions(exc, val, tb);
+ if (tmp == NULL) {
+ Py_CLEAR(self);
+ }
+ Py_XDECREF(tmp);
+ Py_DECREF(file_obj);
+ }
+ Py_DECREF(file_path);
+ return self;
+}
+
+static PyObject *
+get_weak_cache(PyTypeObject *type)
+{
+ if (type == &PyZoneInfo_ZoneInfoType) {
+ return ZONEINFO_WEAK_CACHE;
+ }
+ else {
+ PyObject *cache =
+ PyObject_GetAttrString((PyObject *)type, "_weak_cache");
+ // We are assuming that the type lives at least as long as the function
+ // that calls get_weak_cache, and that it holds a reference to the
+ // cache, so we'll return a "borrowed reference".
+ Py_XDECREF(cache);
+ return cache;
+ }
+}
+
+static PyObject *
+zoneinfo_new(PyTypeObject *type, PyObject *args, PyObject *kw)
+{
+ PyObject *key = NULL;
+ static char *kwlist[] = {"key", NULL};
+ if (PyArg_ParseTupleAndKeywords(args, kw, "O", kwlist, &key) == 0) {
+ return NULL;
+ }
+
+ PyObject *instance = zone_from_strong_cache(type, key);
+ if (instance != NULL) {
+ return instance;
+ }
+
+ PyObject *weak_cache = get_weak_cache(type);
+ instance = PyObject_CallMethod(weak_cache, "get", "O", key, Py_None);
+ if (instance == NULL) {
+ return NULL;
+ }
+
+ if (instance == Py_None) {
+ Py_DECREF(instance);
+ PyObject *tmp = zoneinfo_new_instance(type, key);
+ if (tmp == NULL) {
+ return NULL;
+ }
+
+ instance =
+ PyObject_CallMethod(weak_cache, "setdefault", "OO", key, tmp);
+ Py_DECREF(tmp);
+ if (instance == NULL) {
+ return NULL;
+ }
+ ((PyZoneInfo_ZoneInfo *)instance)->source = SOURCE_CACHE;
+ }
+
+ update_strong_cache(type, key, instance);
+ return instance;
+}
+
+static void
+zoneinfo_dealloc(PyObject *obj_self)
+{
+ PyZoneInfo_ZoneInfo *self = (PyZoneInfo_ZoneInfo *)obj_self;
+
+ if (self->weakreflist != NULL) {
+ PyObject_ClearWeakRefs(obj_self);
+ }
+
+ if (self->trans_list_utc != NULL) {
+ PyMem_Free(self->trans_list_utc);
+ }
+
+ for (size_t i = 0; i < 2; i++) {
+ if (self->trans_list_wall[i] != NULL) {
+ PyMem_Free(self->trans_list_wall[i]);
+ }
+ }
+
+ if (self->_ttinfos != NULL) {
+ for (size_t i = 0; i < self->num_ttinfos; ++i) {
+ xdecref_ttinfo(&(self->_ttinfos[i]));
+ }
+ PyMem_Free(self->_ttinfos);
+ }
+
+ if (self->trans_ttinfos != NULL) {
+ PyMem_Free(self->trans_ttinfos);
+ }
+
+ free_tzrule(&(self->tzrule_after));
+
+ Py_XDECREF(self->key);
+ Py_XDECREF(self->file_repr);
+
+ Py_TYPE(self)->tp_free((PyObject *)self);
+}
+
+static PyObject *
+zoneinfo_from_file(PyTypeObject *type, PyObject *args, PyObject *kwargs)
+{
+ PyObject *file_obj = NULL;
+ PyObject *file_repr = NULL;
+ PyObject *key = Py_None;
+ PyZoneInfo_ZoneInfo *self = NULL;
+
+ static char *kwlist[] = {"", "key", NULL};
+ if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O|O", kwlist, &file_obj,
+ &key)) {
+ return NULL;
+ }
+
+ PyObject *obj_self = (PyObject *)(type->tp_alloc(type, 0));
+ self = (PyZoneInfo_ZoneInfo *)obj_self;
+ if (self == NULL) {
+ return NULL;
+ }
+
+ file_repr = PyUnicode_FromFormat("%R", file_obj);
+ if (file_repr == NULL) {
+ goto error;
+ }
+
+ if (load_data(self, file_obj)) {
+ goto error;
+ }
+
+ self->source = SOURCE_FILE;
+ self->file_repr = file_repr;
+ self->key = key;
+ Py_INCREF(key);
+
+ return obj_self;
+error:
+ Py_XDECREF(file_repr);
+ Py_XDECREF(self);
+ return NULL;
+}
+
+static PyObject *
+zoneinfo_no_cache(PyTypeObject *cls, PyObject *args, PyObject *kwargs)
+{
+ static char *kwlist[] = {"key", NULL};
+ PyObject *key = NULL;
+ if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O", kwlist, &key)) {
+ return NULL;
+ }
+
+ PyObject *out = zoneinfo_new_instance(cls, key);
+ if (out != NULL) {
+ ((PyZoneInfo_ZoneInfo *)out)->source = SOURCE_NOCACHE;
+ }
+
+ return out;
+}
+
+static PyObject *
+zoneinfo_clear_cache(PyObject *cls, PyObject *args, PyObject *kwargs)
+{
+ PyObject *only_keys = NULL;
+ static char *kwlist[] = {"only_keys", NULL};
+
+ if (!(PyArg_ParseTupleAndKeywords(args, kwargs, "|$O", kwlist,
+ &only_keys))) {
+ return NULL;
+ }
+
+ PyTypeObject *type = (PyTypeObject *)cls;
+ PyObject *weak_cache = get_weak_cache(type);
+
+ if (only_keys == NULL || only_keys == Py_None) {
+ PyObject *rv = PyObject_CallMethod(weak_cache, "clear", NULL);
+ if (rv != NULL) {
+ Py_DECREF(rv);
+ }
+
+ clear_strong_cache(type);
+ }
+ else {
+ PyObject *item = NULL;
+ PyObject *pop = PyUnicode_FromString("pop");
+ if (pop == NULL) {
+ return NULL;
+ }
+
+ PyObject *iter = PyObject_GetIter(only_keys);
+ if (iter == NULL) {
+ Py_DECREF(pop);
+ return NULL;
+ }
+
+ while ((item = PyIter_Next(iter))) {
+ // Remove from strong cache
+ eject_from_strong_cache(type, item);
+
+ // Remove from weak cache
+ PyObject *tmp = PyObject_CallMethodObjArgs(weak_cache, pop, item,
+ Py_None, NULL);
+
+ Py_DECREF(item);
+ if (tmp == NULL) {
+ break;
+ }
+ Py_DECREF(tmp);
+ }
+ Py_DECREF(iter);
+ Py_DECREF(pop);
+ }
+
+ if (PyErr_Occurred()) {
+ return NULL;
+ }
+
+ Py_RETURN_NONE;
+}
+
+static PyObject *
+zoneinfo_utcoffset(PyObject *self, PyObject *dt)
+{
+ _ttinfo *tti = find_ttinfo((PyZoneInfo_ZoneInfo *)self, dt);
+ if (tti == NULL) {
+ return NULL;
+ }
+ Py_INCREF(tti->utcoff);
+ return tti->utcoff;
+}
+
+static PyObject *
+zoneinfo_dst(PyObject *self, PyObject *dt)
+{
+ _ttinfo *tti = find_ttinfo((PyZoneInfo_ZoneInfo *)self, dt);
+ if (tti == NULL) {
+ return NULL;
+ }
+ Py_INCREF(tti->dstoff);
+ return tti->dstoff;
+}
+
+static PyObject *
+zoneinfo_tzname(PyObject *self, PyObject *dt)
+{
+ _ttinfo *tti = find_ttinfo((PyZoneInfo_ZoneInfo *)self, dt);
+ if (tti == NULL) {
+ return NULL;
+ }
+ Py_INCREF(tti->tzname);
+ return tti->tzname;
+}
+
+#define HASTZINFO(p) (((_PyDateTime_BaseTZInfo *)(p))->hastzinfo)
+#define GET_DT_TZINFO(p) \
+ (HASTZINFO(p) ? ((PyDateTime_DateTime *)(p))->tzinfo : Py_None)
+
+static PyObject *
+zoneinfo_fromutc(PyObject *obj_self, PyObject *dt)
+{
+ if (!PyDateTime_Check(dt)) {
+ PyErr_SetString(PyExc_TypeError,
+ "fromutc: argument must be a datetime");
+ return NULL;
+ }
+ if (GET_DT_TZINFO(dt) != obj_self) {
+ PyErr_SetString(PyExc_ValueError,
+ "fromutc: dt.tzinfo "
+ "is not self");
+ return NULL;
+ }
+
+ PyZoneInfo_ZoneInfo *self = (PyZoneInfo_ZoneInfo *)obj_self;
+
+ int64_t timestamp;
+ if (get_local_timestamp(dt, ×tamp)) {
+ return NULL;
+ }
+ size_t num_trans = self->num_transitions;
+
+ _ttinfo *tti = NULL;
+ unsigned char fold = 0;
+
+ if (num_trans >= 1 && timestamp < self->trans_list_utc[0]) {
+ tti = self->ttinfo_before;
+ }
+ else if (num_trans == 0 ||
+ timestamp > self->trans_list_utc[num_trans - 1]) {
+ tti = find_tzrule_ttinfo_fromutc(&(self->tzrule_after), timestamp,
+ PyDateTime_GET_YEAR(dt), &fold);
+
+ // Immediately after the last manual transition, the fold/gap is
+ // between self->trans_ttinfos[num_transitions - 1] and whatever
+ // ttinfo applies immediately after the last transition, not between
+ // the STD and DST rules in the tzrule_after, so we may need to
+ // adjust the fold value.
+ if (num_trans) {
+ _ttinfo *tti_prev = NULL;
+ if (num_trans == 1) {
+ tti_prev = self->ttinfo_before;
+ }
+ else {
+ tti_prev = self->trans_ttinfos[num_trans - 2];
+ }
+ int64_t diff = tti_prev->utcoff_seconds - tti->utcoff_seconds;
+ if (diff > 0 &&
+ timestamp < (self->trans_list_utc[num_trans - 1] + diff)) {
+ fold = 1;
+ }
+ }
+ }
+ else {
+ size_t idx = _bisect(timestamp, self->trans_list_utc, num_trans);
+ _ttinfo *tti_prev = NULL;
+
+ if (idx >= 2) {
+ tti_prev = self->trans_ttinfos[idx - 2];
+ tti = self->trans_ttinfos[idx - 1];
+ }
+ else {
+ tti_prev = self->ttinfo_before;
+ tti = self->trans_ttinfos[0];
+ }
+
+ // Detect fold
+ int64_t shift =
+ (int64_t)(tti_prev->utcoff_seconds - tti->utcoff_seconds);
+ if (shift > (timestamp - self->trans_list_utc[idx - 1])) {
+ fold = 1;
+ }
+ }
+
+ PyObject *tmp = PyNumber_Add(dt, tti->utcoff);
+ if (tmp == NULL) {
+ return NULL;
+ }
+
+ if (fold) {
+ if (PyDateTime_CheckExact(tmp)) {
+ ((PyDateTime_DateTime *)tmp)->fold = 1;
+ dt = tmp;
+ }
+ else {
+ PyObject *replace = PyObject_GetAttrString(tmp, "replace");
+ PyObject *args = PyTuple_New(0);
+ PyObject *kwargs = PyDict_New();
+
+ Py_DECREF(tmp);
+ if (args == NULL || kwargs == NULL || replace == NULL) {
+ Py_XDECREF(args);
+ Py_XDECREF(kwargs);
+ Py_XDECREF(replace);
+ return NULL;
+ }
+
+ dt = NULL;
+ if (!PyDict_SetItemString(kwargs, "fold", _PyLong_One)) {
+ dt = PyObject_Call(replace, args, kwargs);
+ }
+
+ Py_DECREF(args);
+ Py_DECREF(kwargs);
+ Py_DECREF(replace);
+
+ if (dt == NULL) {
+ return NULL;
+ }
+ }
+ }
+ else {
+ dt = tmp;
+ }
+ return dt;
+}
+
+static PyObject *
+zoneinfo_repr(PyZoneInfo_ZoneInfo *self)
+{
+ PyObject *rv = NULL;
+ const char *type_name = Py_TYPE((PyObject *)self)->tp_name;
+ if (!(self->key == Py_None)) {
+ rv = PyUnicode_FromFormat("%s(key=%R)", type_name, self->key);
+ }
+ else {
+ assert(PyUnicode_Check(self->file_repr));
+ rv = PyUnicode_FromFormat("%s.from_file(%U)", type_name,
+ self->file_repr);
+ }
+
+ return rv;
+}
+
+static PyObject *
+zoneinfo_str(PyZoneInfo_ZoneInfo *self)
+{
+ if (!(self->key == Py_None)) {
+ Py_INCREF(self->key);
+ return self->key;
+ }
+ else {
+ return zoneinfo_repr(self);
+ }
+}
+
+/* Pickles the ZoneInfo object by key and source.
+ *
+ * ZoneInfo objects are pickled by reference to the TZif file that they came
+ * from, which means that the exact transitions may be different or the file
+ * may not un-pickle if the data has changed on disk in the interim.
+ *
+ * It is necessary to include a bit indicating whether or not the object
+ * was constructed from the cache, because from-cache objects will hit the
+ * unpickling process's cache, whereas no-cache objects will bypass it.
+ *
+ * Objects constructed from ZoneInfo.from_file cannot be pickled.
+ */
+static PyObject *
+zoneinfo_reduce(PyObject *obj_self, PyObject *unused)
+{
+ PyZoneInfo_ZoneInfo *self = (PyZoneInfo_ZoneInfo *)obj_self;
+ if (self->source == SOURCE_FILE) {
+ // Objects constructed from files cannot be pickled.
+ PyObject *pickle = PyImport_ImportModule("pickle");
+ if (pickle == NULL) {
+ return NULL;
+ }
+
+ PyObject *pickle_error =
+ PyObject_GetAttrString(pickle, "PicklingError");
+ Py_DECREF(pickle);
+ if (pickle_error == NULL) {
+ return NULL;
+ }
+
+ PyErr_Format(pickle_error,
+ "Cannot pickle a ZoneInfo file from a file stream.");
+ Py_DECREF(pickle_error);
+ return NULL;
+ }
+
+ unsigned char from_cache = self->source == SOURCE_CACHE ? 1 : 0;
+ PyObject *constructor = PyObject_GetAttrString(obj_self, "_unpickle");
+
+ if (constructor == NULL) {
+ return NULL;
+ }
+
+ PyObject *rv = Py_BuildValue("O(OB)", constructor, self->key, from_cache);
+ Py_DECREF(constructor);
+ return rv;
+}
+
+static PyObject *
+zoneinfo__unpickle(PyTypeObject *cls, PyObject *args)
+{
+ PyObject *key;
+ unsigned char from_cache;
+ if (!PyArg_ParseTuple(args, "OB", &key, &from_cache)) {
+ return NULL;
+ }
+
+ if (from_cache) {
+ PyObject *val_args = Py_BuildValue("(O)", key);
+ if (val_args == NULL) {
+ return NULL;
+ }
+
+ PyObject *rv = zoneinfo_new(cls, val_args, NULL);
+
+ Py_DECREF(val_args);
+ return rv;
+ }
+ else {
+ return zoneinfo_new_instance(cls, key);
+ }
+}
+
+/* It is relatively expensive to construct new timedelta objects, and in most
+ * cases we're looking at a relatively small number of timedeltas, such as
+ * integer number of hours, etc. We will keep a cache so that we construct
+ * a minimal number of these.
+ *
+ * Possibly this should be replaced with an LRU cache so that it's not possible
+ * for the memory usage to explode from this, but in order for this to be a
+ * serious problem, one would need to deliberately craft a malicious time zone
+ * file with many distinct offsets. As of tzdb 2019c, loading every single zone
+ * fills the cache with ~450 timedeltas for a total size of ~12kB.
+ *
+ * This returns a new reference to the timedelta.
+ */
+static PyObject *
+load_timedelta(long seconds)
+{
+ PyObject *rv = NULL;
+ PyObject *pyoffset = PyLong_FromLong(seconds);
+ if (pyoffset == NULL) {
+ return NULL;
+ }
+ int contains = PyDict_Contains(TIMEDELTA_CACHE, pyoffset);
+ if (contains == -1) {
+ goto error;
+ }
+
+ if (!contains) {
+ PyObject *tmp = PyDateTimeAPI->Delta_FromDelta(
+ 0, seconds, 0, 1, PyDateTimeAPI->DeltaType);
+
+ if (tmp == NULL) {
+ goto error;
+ }
+
+ rv = PyDict_SetDefault(TIMEDELTA_CACHE, pyoffset, tmp);
+ Py_DECREF(tmp);
+ }
+ else {
+ rv = PyDict_GetItem(TIMEDELTA_CACHE, pyoffset);
+ }
+
+ Py_DECREF(pyoffset);
+ Py_INCREF(rv);
+ return rv;
+error:
+ Py_DECREF(pyoffset);
+ return NULL;
+}
+
+/* Constructor for _ttinfo object - this starts by initializing the _ttinfo
+ * to { NULL, NULL, NULL }, so that Py_XDECREF will work on partially
+ * initialized _ttinfo objects.
+ */
+static int
+build_ttinfo(long utcoffset, long dstoffset, PyObject *tzname, _ttinfo *out)
+{
+ out->utcoff = NULL;
+ out->dstoff = NULL;
+ out->tzname = NULL;
+
+ out->utcoff_seconds = utcoffset;
+ out->utcoff = load_timedelta(utcoffset);
+ if (out->utcoff == NULL) {
+ return -1;
+ }
+
+ out->dstoff = load_timedelta(dstoffset);
+ if (out->dstoff == NULL) {
+ return -1;
+ }
+
+ out->tzname = tzname;
+ Py_INCREF(tzname);
+
+ return 0;
+}
+
+/* Decrease reference count on any non-NULL members of a _ttinfo */
+static void
+xdecref_ttinfo(_ttinfo *ttinfo)
+{
+ if (ttinfo != NULL) {
+ Py_XDECREF(ttinfo->utcoff);
+ Py_XDECREF(ttinfo->dstoff);
+ Py_XDECREF(ttinfo->tzname);
+ }
+}
+
+/* Equality function for _ttinfo. */
+static int
+ttinfo_eq(const _ttinfo *const tti0, const _ttinfo *const tti1)
+{
+ int rv;
+ if ((rv = PyObject_RichCompareBool(tti0->utcoff, tti1->utcoff, Py_EQ)) <
+ 1) {
+ goto end;
+ }
+
+ if ((rv = PyObject_RichCompareBool(tti0->dstoff, tti1->dstoff, Py_EQ)) <
+ 1) {
+ goto end;
+ }
+
+ if ((rv = PyObject_RichCompareBool(tti0->tzname, tti1->tzname, Py_EQ)) <
+ 1) {
+ goto end;
+ }
+end:
+ return rv;
+}
+
+/* Given a file-like object, this populates a ZoneInfo object
+ *
+ * The current version calls into a Python function to read the data from
+ * file into Python objects, and this translates those Python objects into
+ * C values and calculates derived values (e.g. dstoff) in C.
+ *
+ * This returns 0 on success and -1 on failure.
+ *
+ * The function will never return while `self` is partially initialized â
+ * the object only needs to be freed / deallocated if this succeeds.
+ */
+static int
+load_data(PyZoneInfo_ZoneInfo *self, PyObject *file_obj)
+{
+ PyObject *data_tuple = NULL;
+
+ long *utcoff = NULL;
+ long *dstoff = NULL;
+ size_t *trans_idx = NULL;
+ unsigned char *isdst = NULL;
+
+ self->trans_list_utc = NULL;
+ self->trans_list_wall[0] = NULL;
+ self->trans_list_wall[1] = NULL;
+ self->trans_ttinfos = NULL;
+ self->_ttinfos = NULL;
+ self->file_repr = NULL;
+
+ size_t ttinfos_allocated = 0;
+
+ data_tuple = PyObject_CallMethod(_common_mod, "load_data", "O", file_obj);
+
+ if (data_tuple == NULL) {
+ goto error;
+ }
+
+ if (!PyTuple_CheckExact(data_tuple)) {
+ PyErr_Format(PyExc_TypeError, "Invalid data result type: %r",
+ data_tuple);
+ goto error;
+ }
+
+ // Unpack the data tuple
+ PyObject *trans_idx_list = PyTuple_GetItem(data_tuple, 0);
+ if (trans_idx_list == NULL) {
+ goto error;
+ }
+
+ PyObject *trans_utc = PyTuple_GetItem(data_tuple, 1);
+ if (trans_utc == NULL) {
+ goto error;
+ }
+
+ PyObject *utcoff_list = PyTuple_GetItem(data_tuple, 2);
+ if (utcoff_list == NULL) {
+ goto error;
+ }
+
+ PyObject *isdst_list = PyTuple_GetItem(data_tuple, 3);
+ if (isdst_list == NULL) {
+ goto error;
+ }
+
+ PyObject *abbr = PyTuple_GetItem(data_tuple, 4);
+ if (abbr == NULL) {
+ goto error;
+ }
+
+ PyObject *tz_str = PyTuple_GetItem(data_tuple, 5);
+ if (tz_str == NULL) {
+ goto error;
+ }
+
+ // Load the relevant sizes
+ Py_ssize_t num_transitions = PyTuple_Size(trans_utc);
+ if (num_transitions < 0) {
+ goto error;
+ }
+
+ Py_ssize_t num_ttinfos = PyTuple_Size(utcoff_list);
+ if (num_ttinfos < 0) {
+ goto error;
+ }
+
+ self->num_transitions = (size_t)num_transitions;
+ self->num_ttinfos = (size_t)num_ttinfos;
+
+ // Load the transition indices and list
+ self->trans_list_utc =
+ PyMem_Malloc(self->num_transitions * sizeof(int64_t));
+ trans_idx = PyMem_Malloc(self->num_transitions * sizeof(Py_ssize_t));
+
+ for (size_t i = 0; i < self->num_transitions; ++i) {
+ PyObject *num = PyTuple_GetItem(trans_utc, i);
+ if (num == NULL) {
+ goto error;
+ }
+ self->trans_list_utc[i] = PyLong_AsLongLong(num);
+ if (self->trans_list_utc[i] == -1 && PyErr_Occurred()) {
+ goto error;
+ }
+
+ num = PyTuple_GetItem(trans_idx_list, i);
+ if (num == NULL) {
+ goto error;
+ }
+
+ Py_ssize_t cur_trans_idx = PyLong_AsSsize_t(num);
+ if (cur_trans_idx == -1) {
+ goto error;
+ }
+
+ trans_idx[i] = (size_t)cur_trans_idx;
+ if (trans_idx[i] > self->num_ttinfos) {
+ PyErr_Format(
+ PyExc_ValueError,
+ "Invalid transition index found while reading TZif: %zd",
+ cur_trans_idx);
+
+ goto error;
+ }
+ }
+
+ // Load UTC offsets and isdst (size num_ttinfos)
+ utcoff = PyMem_Malloc(self->num_ttinfos * sizeof(long));
+ isdst = PyMem_Malloc(self->num_ttinfos * sizeof(unsigned char));
+
+ if (utcoff == NULL || isdst == NULL) {
+ goto error;
+ }
+ for (size_t i = 0; i < self->num_ttinfos; ++i) {
+ PyObject *num = PyTuple_GetItem(utcoff_list, i);
+ if (num == NULL) {
+ goto error;
+ }
+
+ utcoff[i] = PyLong_AsLong(num);
+ if (utcoff[i] == -1 && PyErr_Occurred()) {
+ goto error;
+ }
+
+ num = PyTuple_GetItem(isdst_list, i);
+ if (num == NULL) {
+ goto error;
+ }
+
+ int isdst_with_error = PyObject_IsTrue(num);
+ if (isdst_with_error == -1) {
+ goto error;
+ }
+ else {
+ isdst[i] = (unsigned char)isdst_with_error;
+ }
+ }
+
+ dstoff = PyMem_Calloc(self->num_ttinfos, sizeof(long));
+ if (dstoff == NULL) {
+ goto error;
+ }
+
+ // Derive dstoff and trans_list_wall from the information we've loaded
+ utcoff_to_dstoff(trans_idx, utcoff, dstoff, isdst, self->num_transitions,
+ self->num_ttinfos);
+
+ if (ts_to_local(trans_idx, self->trans_list_utc, utcoff,
+ self->trans_list_wall, self->num_ttinfos,
+ self->num_transitions)) {
+ goto error;
+ }
+
+ // Build _ttinfo objects from utcoff, dstoff and abbr
+ self->_ttinfos = PyMem_Malloc(self->num_ttinfos * sizeof(_ttinfo));
+ for (size_t i = 0; i < self->num_ttinfos; ++i) {
+ PyObject *tzname = PyTuple_GetItem(abbr, i);
+ if (tzname == NULL) {
+ goto error;
+ }
+
+ ttinfos_allocated++;
+ if (build_ttinfo(utcoff[i], dstoff[i], tzname, &(self->_ttinfos[i]))) {
+ goto error;
+ }
+ }
+
+ // Build our mapping from transition to the ttinfo that applies
+ self->trans_ttinfos =
+ PyMem_Calloc(self->num_transitions, sizeof(_ttinfo *));
+ for (size_t i = 0; i < self->num_transitions; ++i) {
+ size_t ttinfo_idx = trans_idx[i];
+ assert(ttinfo_idx < self->num_ttinfos);
+ self->trans_ttinfos[i] = &(self->_ttinfos[ttinfo_idx]);
+ }
+
+ // Set ttinfo_before to the first non-DST transition
+ for (size_t i = 0; i < self->num_ttinfos; ++i) {
+ if (!isdst[i]) {
+ self->ttinfo_before = &(self->_ttinfos[i]);
+ break;
+ }
+ }
+
+ // If there are only DST ttinfos, pick the first one, if there are no
+ // ttinfos at all, set ttinfo_before to NULL
+ if (self->ttinfo_before == NULL && self->num_ttinfos > 0) {
+ self->ttinfo_before = &(self->_ttinfos[0]);
+ }
+
+ if (tz_str != Py_None && PyObject_IsTrue(tz_str)) {
+ if (parse_tz_str(tz_str, &(self->tzrule_after))) {
+ goto error;
+ }
+ }
+ else {
+ if (!self->num_ttinfos) {
+ PyErr_Format(PyExc_ValueError, "No time zone information found.");
+ goto error;
+ }
+
+ size_t idx;
+ if (!self->num_transitions) {
+ idx = self->num_ttinfos - 1;
+ }
+ else {
+ idx = trans_idx[self->num_transitions - 1];
+ }
+
+ _ttinfo *tti = &(self->_ttinfos[idx]);
+ build_tzrule(tti->tzname, NULL, tti->utcoff_seconds, 0, NULL, NULL,
+ &(self->tzrule_after));
+
+ // We've abused the build_tzrule constructor to construct an STD-only
+ // rule mimicking whatever ttinfo we've picked up, but it's possible
+ // that the one we've picked up is a DST zone, so we need to make sure
+ // that the dstoff is set correctly in that case.
+ if (PyObject_IsTrue(tti->dstoff)) {
+ _ttinfo *tti_after = &(self->tzrule_after.std);
+ Py_DECREF(tti_after->dstoff);
+ tti_after->dstoff = tti->dstoff;
+ Py_INCREF(tti_after->dstoff);
+ }
+ }
+
+ // Determine if this is a "fixed offset" zone, meaning that the output of
+ // the utcoffset, dst and tzname functions does not depend on the specific
+ // datetime passed.
+ //
+ // We make three simplifying assumptions here:
+ //
+ // 1. If tzrule_after is not std_only, it has transitions that might occur
+ // (it is possible to construct TZ strings that specify STD and DST but
+ // no transitions ever occur, such as AAA0BBB,0/0,J365/25).
+ // 2. If self->_ttinfos contains more than one _ttinfo object, the objects
+ // represent different offsets.
+ // 3. self->ttinfos contains no unused _ttinfos (in which case an otherwise
+ // fixed-offset zone with extra _ttinfos defined may appear to *not* be
+ // a fixed offset zone).
+ //
+ // Violations to these assumptions would be fairly exotic, and exotic
+ // zones should almost certainly not be used with datetime.time (the
+ // only thing that would be affected by this).
+ if (self->num_ttinfos > 1 || !self->tzrule_after.std_only) {
+ self->fixed_offset = 0;
+ }
+ else if (self->num_ttinfos == 0) {
+ self->fixed_offset = 1;
+ }
+ else {
+ int constant_offset =
+ ttinfo_eq(&(self->_ttinfos[0]), &self->tzrule_after.std);
+ if (constant_offset < 0) {
+ goto error;
+ }
+ else {
+ self->fixed_offset = constant_offset;
+ }
+ }
+
+ int rv = 0;
+ goto cleanup;
+error:
+ // These resources only need to be freed if we have failed, if we succeed
+ // in initializing a PyZoneInfo_ZoneInfo object, we can rely on its dealloc
+ // method to free the relevant resources.
+ if (self->trans_list_utc != NULL) {
+ PyMem_Free(self->trans_list_utc);
+ self->trans_list_utc = NULL;
+ }
+
+ for (size_t i = 0; i < 2; ++i) {
+ if (self->trans_list_wall[i] != NULL) {
+ PyMem_Free(self->trans_list_wall[i]);
+ self->trans_list_wall[i] = NULL;
+ }
+ }
+
+ if (self->_ttinfos != NULL) {
+ for (size_t i = 0; i < ttinfos_allocated; ++i) {
+ xdecref_ttinfo(&(self->_ttinfos[i]));
+ }
+ PyMem_Free(self->_ttinfos);
+ self->_ttinfos = NULL;
+ }
+
+ if (self->trans_ttinfos != NULL) {
+ PyMem_Free(self->trans_ttinfos);
+ self->trans_ttinfos = NULL;
+ }
+
+ rv = -1;
+cleanup:
+ Py_XDECREF(data_tuple);
+
+ if (utcoff != NULL) {
+ PyMem_Free(utcoff);
+ }
+
+ if (dstoff != NULL) {
+ PyMem_Free(dstoff);
+ }
+
+ if (isdst != NULL) {
+ PyMem_Free(isdst);
+ }
+
+ if (trans_idx != NULL) {
+ PyMem_Free(trans_idx);
+ }
+
+ return rv;
+}
+
+/* Function to calculate the local timestamp of a transition from the year. */
+int64_t
+calendarrule_year_to_timestamp(TransitionRuleType *base_self, int year)
+{
+ CalendarRule *self = (CalendarRule *)base_self;
+
+ // We want (year, month, day of month); we have year and month, but we
+ // need to turn (week, day-of-week) into day-of-month
+ //
+ // Week 1 is the first week in which day `day` (where 0 = Sunday) appears.
+ // Week 5 represents the last occurrence of day `day`, so we need to know
+ // the first weekday of the month and the number of days in the month.
+ int8_t first_day = (ymd_to_ord(year, self->month, 1) + 6) % 7;
+ uint8_t days_in_month = DAYS_IN_MONTH[self->month];
+ if (self->month == 2 && is_leap_year(year)) {
+ days_in_month += 1;
+ }
+
+ // This equation seems magical, so I'll break it down:
+ // 1. calendar says 0 = Monday, POSIX says 0 = Sunday so we need first_day
+ // + 1 to get 1 = Monday -> 7 = Sunday, which is still equivalent
+ // because this math is mod 7
+ // 2. Get first day - desired day mod 7 (adjusting by 7 for negative
+ // numbers so that -1 % 7 = 6).
+ // 3. Add 1 because month days are a 1-based index.
+ int8_t month_day = ((int8_t)(self->day) - (first_day + 1)) % 7;
+ if (month_day < 0) {
+ month_day += 7;
+ }
+ month_day += 1;
+
+ // Now use a 0-based index version of `week` to calculate the w-th
+ // occurrence of `day`
+ month_day += ((int8_t)(self->week) - 1) * 7;
+
+ // month_day will only be > days_in_month if w was 5, and `w` means "last
+ // occurrence of `d`", so now we just check if we over-shot the end of the
+ // month and if so knock off 1 week.
+ if (month_day > days_in_month) {
+ month_day -= 7;
+ }
+
+ int64_t ordinal = ymd_to_ord(year, self->month, month_day) - EPOCHORDINAL;
+ return ((ordinal * 86400) + (int64_t)(self->hour * 3600) +
+ (int64_t)(self->minute * 60) + (int64_t)(self->second));
+}
+
+/* Constructor for CalendarRule. */
+int
+calendarrule_new(uint8_t month, uint8_t week, uint8_t day, int8_t hour,
+ int8_t minute, int8_t second, CalendarRule *out)
+{
+ // These bounds come from the POSIX standard, which describes an Mm.n.d
+ // rule as:
+ //
+ // The d'th day (0 <= d <= 6) of week n of month m of the year (1 <= n <=
+ // 5, 1 <= m <= 12, where week 5 means "the last d day in month m" which
+ // may occur in either the fourth or the fifth week). Week 1 is the first
+ // week in which the d'th day occurs. Day zero is Sunday.
+ if (month <= 0 || month > 12) {
+ PyErr_Format(PyExc_ValueError, "Month must be in (0, 12]");
+ return -1;
+ }
+
+ if (week <= 0 || week > 5) {
+ PyErr_Format(PyExc_ValueError, "Week must be in (0, 5]");
+ return -1;
+ }
+
+ // day is an unsigned integer, so day < 0 should always return false, but
+ // if day's type changes to a signed integer *without* changing this value,
+ // it may create a bug. Considering that the compiler should be able to
+ // optimize out the first comparison if day is an unsigned integer anyway,
+ // we will leave this comparison in place and disable the compiler warning.
+#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Wtype-limits"
+ if (day < 0 || day > 6) {
+#pragma GCC diagnostic pop
+ PyErr_Format(PyExc_ValueError, "Day must be in [0, 6]");
+ return -1;
+ }
+
+ TransitionRuleType base = {&calendarrule_year_to_timestamp};
+
+ CalendarRule new_offset = {
+ .base = base,
+ .month = month,
+ .week = week,
+ .day = day,
+ .hour = hour,
+ .minute = minute,
+ .second = second,
+ };
+
+ *out = new_offset;
+ return 0;
+}
+
+/* Function to calculate the local timestamp of a transition from the year.
+ *
+ * This translates the day of the year into a local timestamp â either a
+ * 1-based Julian day, not including leap days, or the 0-based year-day,
+ * including leap days.
+ * */
+int64_t
+dayrule_year_to_timestamp(TransitionRuleType *base_self, int year)
+{
+ // The function signature requires a TransitionRuleType pointer, but this
+ // function is only applicable to DayRule* objects.
+ DayRule *self = (DayRule *)base_self;
+
+ // ymd_to_ord calculates the number of days since 0001-01-01, but we want
+ // to know the number of days since 1970-01-01, so we must subtract off
+ // the equivalent of ymd_to_ord(1970, 1, 1).
+ //
+ // We subtract off an additional 1 day to account for January 1st (we want
+ // the number of full days *before* the date of the transition - partial
+ // days are accounted for in the hour, minute and second portions.
+ int64_t days_before_year = ymd_to_ord(year, 1, 1) - EPOCHORDINAL - 1;
+
+ // The Julian day specification skips over February 29th in leap years,
+ // from the POSIX standard:
+ //
+ // Leap days shall not be counted. That is, in all years-including leap
+ // years-February 28 is day 59 and March 1 is day 60. It is impossible to
+ // refer explicitly to the occasional February 29.
+ //
+ // This is actually more useful than you'd think â if you want a rule that
+ // always transitions on a given calendar day (other than February 29th),
+ // you would use a Julian day, e.g. J91 always refers to April 1st and J365
+ // always refers to December 31st.
+ unsigned int day = self->day;
+ if (self->julian && day >= 59 && is_leap_year(year)) {
+ day += 1;
+ }
+
+ return ((days_before_year + day) * 86400) + (self->hour * 3600) +
+ (self->minute * 60) + self->second;
+}
+
+/* Constructor for DayRule. */
+static int
+dayrule_new(uint8_t julian, unsigned int day, int8_t hour, int8_t minute,
+ int8_t second, DayRule *out)
+{
+ // The POSIX standard specifies that Julian days must be in the range (1 <=
+ // n <= 365) and that non-Julian (they call it "0-based Julian") days must
+ // be in the range (0 <= n <= 365).
+ if (day < julian || day > 365) {
+ PyErr_Format(PyExc_ValueError, "day must be in [%u, 365], not: %u",
+ julian, day);
+ return -1;
+ }
+
+ TransitionRuleType base = {
+ &dayrule_year_to_timestamp,
+ };
+
+ DayRule tmp = {
+ .base = base,
+ .julian = julian,
+ .day = day,
+ .hour = hour,
+ .minute = minute,
+ .second = second,
+ };
+
+ *out = tmp;
+
+ return 0;
+}
+
+/* Calculate the start and end rules for a _tzrule in the given year. */
+static void
+tzrule_transitions(_tzrule *rule, int year, int64_t *start, int64_t *end)
+{
+ assert(rule->start != NULL);
+ assert(rule->end != NULL);
+ *start = rule->start->year_to_timestamp(rule->start, year);
+ *end = rule->end->year_to_timestamp(rule->end, year);
+}
+
+/* Calculate the _ttinfo that applies at a given local time from a _tzrule.
+ *
+ * This takes a local timestamp and fold for disambiguation purposes; the year
+ * could technically be calculated from the timestamp, but given that the
+ * callers of this function already have the year information accessible from
+ * the datetime struct, it is taken as an additional parameter to reduce
+ * unncessary calculation.
+ * */
+static _ttinfo *
+find_tzrule_ttinfo(_tzrule *rule, int64_t ts, unsigned char fold, int year)
+{
+ if (rule->std_only) {
+ return &(rule->std);
+ }
+
+ int64_t start, end;
+ uint8_t isdst;
+
+ tzrule_transitions(rule, year, &start, &end);
+
+ // With fold = 0, the period (denominated in local time) with the smaller
+ // offset starts at the end of the gap and ends at the end of the fold;
+ // with fold = 1, it runs from the start of the gap to the beginning of the
+ // fold.
+ //
+ // So in order to determine the DST boundaries we need to know both the
+ // fold and whether DST is positive or negative (rare), and it turns out
+ // that this boils down to fold XOR is_positive.
+ if (fold == (rule->dst_diff >= 0)) {
+ end -= rule->dst_diff;
+ }
+ else {
+ start += rule->dst_diff;
+ }
+
+ if (start < end) {
+ isdst = (ts >= start) && (ts < end);
+ }
+ else {
+ isdst = (ts < end) || (ts >= start);
+ }
+
+ if (isdst) {
+ return &(rule->dst);
+ }
+ else {
+ return &(rule->std);
+ }
+}
+
+/* Calculate the ttinfo and fold that applies for a _tzrule at an epoch time.
+ *
+ * This function can determine the _ttinfo that applies at a given epoch time,
+ * (analogous to trans_list_utc), and whether or not the datetime is in a fold.
+ * This is to be used in the .fromutc() function.
+ *
+ * The year is technically a redundant parameter, because it can be calculated
+ * from the timestamp, but all callers of this function should have the year
+ * in the datetime struct anyway, so taking it as a parameter saves unnecessary
+ * calculation.
+ **/
+static _ttinfo *
+find_tzrule_ttinfo_fromutc(_tzrule *rule, int64_t ts, int year,
+ unsigned char *fold)
+{
+ if (rule->std_only) {
+ *fold = 0;
+ return &(rule->std);
+ }
+
+ int64_t start, end;
+ uint8_t isdst;
+ tzrule_transitions(rule, year, &start, &end);
+ start -= rule->std.utcoff_seconds;
+ end -= rule->dst.utcoff_seconds;
+
+ if (start < end) {
+ isdst = (ts >= start) && (ts < end);
+ }
+ else {
+ isdst = (ts < end) || (ts >= start);
+ }
+
+ // For positive DST, the ambiguous period is one dst_diff after the end of
+ // DST; for negative DST, the ambiguous period is one dst_diff before the
+ // start of DST.
+ int64_t ambig_start, ambig_end;
+ if (rule->dst_diff > 0) {
+ ambig_start = end;
+ ambig_end = end + rule->dst_diff;
+ }
+ else {
+ ambig_start = start;
+ ambig_end = start - rule->dst_diff;
+ }
+
+ *fold = (ts >= ambig_start) && (ts < ambig_end);
+
+ if (isdst) {
+ return &(rule->dst);
+ }
+ else {
+ return &(rule->std);
+ }
+}
+
+/* Parse a TZ string in the format specified by the POSIX standard:
+ *
+ * std offset[dst[offset],start[/time],end[/time]]
+ *
+ * std and dst must be 3 or more characters long and must not contain a
+ * leading colon, embedded digits, commas, nor a plus or minus signs; The
+ * spaces between "std" and "offset" are only for display and are not actually
+ * present in the string.
+ *
+ * The format of the offset is ``[+|-]hh[:mm[:ss]]``
+ *
+ * See the POSIX.1 spec: IEE Std 1003.1-2018 §8.3:
+ *
+ * https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap08.html
+ */
+static int
+parse_tz_str(PyObject *tz_str_obj, _tzrule *out)
+{
+ PyObject *std_abbr = NULL;
+ PyObject *dst_abbr = NULL;
+ TransitionRuleType *start = NULL;
+ TransitionRuleType *end = NULL;
+ // Initialize offsets to invalid value (> 24 hours)
+ long std_offset = 1 << 20;
+ long dst_offset = 1 << 20;
+
+ char *tz_str = PyBytes_AsString(tz_str_obj);
+ if (tz_str == NULL) {
+ return -1;
+ }
+ char *p = tz_str;
+
+ // Read the `std` abbreviation, which must be at least 3 characters long.
+ Py_ssize_t num_chars = parse_abbr(p, &std_abbr);
+ if (num_chars < 1) {
+ PyErr_Format(PyExc_ValueError, "Invalid STD format in %R", tz_str_obj);
+ goto error;
+ }
+
+ p += num_chars;
+
+ // Now read the STD offset, which is required
+ num_chars = parse_tz_delta(p, &std_offset);
+ if (num_chars < 0) {
+ PyErr_Format(PyExc_ValueError, "Invalid STD offset in %R", tz_str_obj);
+ goto error;
+ }
+ p += num_chars;
+
+ // If the string ends here, there is no DST, otherwise we must parse the
+ // DST abbreviation and start and end dates and times.
+ if (*p == '\0') {
+ goto complete;
+ }
+
+ num_chars = parse_abbr(p, &dst_abbr);
+ if (num_chars < 1) {
+ PyErr_Format(PyExc_ValueError, "Invalid DST format in %R", tz_str_obj);
+ goto error;
+ }
+ p += num_chars;
+
+ if (*p == ',') {
+ // From the POSIX standard:
+ //
+ // If no offset follows dst, the alternative time is assumed to be one
+ // hour ahead of standard time.
+ dst_offset = std_offset + 3600;
+ }
+ else {
+ num_chars = parse_tz_delta(p, &dst_offset);
+ if (num_chars < 0) {
+ PyErr_Format(PyExc_ValueError, "Invalid DST offset in %R",
+ tz_str_obj);
+ goto error;
+ }
+
+ p += num_chars;
+ }
+
+ TransitionRuleType **transitions[2] = {&start, &end};
+ for (size_t i = 0; i < 2; ++i) {
+ if (*p != ',') {
+ PyErr_Format(PyExc_ValueError,
+ "Missing transition rules in TZ string: %R",
+ tz_str_obj);
+ goto error;
+ }
+ p++;
+
+ num_chars = parse_transition_rule(p, transitions[i]);
+ if (num_chars < 0) {
+ PyErr_Format(PyExc_ValueError,
+ "Malformed transition rule in TZ string: %R",
+ tz_str_obj);
+ goto error;
+ }
+ p += num_chars;
+ }
+
+ if (*p != '\0') {
+ PyErr_Format(PyExc_ValueError,
+ "Extraneous characters at end of TZ string: %R",
+ tz_str_obj);
+ goto error;
+ }
+
+complete:
+ build_tzrule(std_abbr, dst_abbr, std_offset, dst_offset, start, end, out);
+ Py_DECREF(std_abbr);
+ Py_XDECREF(dst_abbr);
+
+ return 0;
+error:
+ Py_XDECREF(std_abbr);
+ if (dst_abbr != NULL && dst_abbr != Py_None) {
+ Py_DECREF(dst_abbr);
+ }
+
+ if (start != NULL) {
+ PyMem_Free(start);
+ }
+
+ if (end != NULL) {
+ PyMem_Free(end);
+ }
+
+ return -1;
+}
+
+static int
+parse_uint(const char *const p, uint8_t *value)
+{
+ if (!isdigit(*p)) {
+ return -1;
+ }
+
+ *value = (*p) - '0';
+ return 0;
+}
+
+/* Parse the STD and DST abbreviations from a TZ string. */
+static Py_ssize_t
+parse_abbr(const char *const p, PyObject **abbr)
+{
+ const char *ptr = p;
+ char buff = *ptr;
+ const char *str_start;
+ const char *str_end;
+
+ if (*ptr == '<') {
+ ptr++;
+ str_start = ptr;
+ while ((buff = *ptr) != '>') {
+ // From the POSIX standard:
+ //
+ // In the quoted form, the first character shall be the less-than
+ // ( '<' ) character and the last character shall be the
+ // greater-than ( '>' ) character. All characters between these
+ // quoting characters shall be alphanumeric characters from the
+ // portable character set in the current locale, the plus-sign (
+ // '+' ) character, or the minus-sign ( '-' ) character. The std
+ // and dst fields in this case shall not include the quoting
+ // characters.
+ if (!isalpha(buff) && !isdigit(buff) && buff != '+' &&
+ buff != '-') {
+ return -1;
+ }
+ ptr++;
+ }
+ str_end = ptr;
+ ptr++;
+ }
+ else {
+ str_start = p;
+ // From the POSIX standard:
+ //
+ // In the unquoted form, all characters in these fields shall be
+ // alphabetic characters from the portable character set in the
+ // current locale.
+ while (isalpha(*ptr)) {
+ ptr++;
+ }
+ str_end = ptr;
+ }
+
+ *abbr = PyUnicode_FromStringAndSize(str_start, str_end - str_start);
+ if (*abbr == NULL) {
+ return -1;
+ }
+
+ return ptr - p;
+}
+
+/* Parse a UTC offset from a TZ str. */
+static Py_ssize_t
+parse_tz_delta(const char *const p, long *total_seconds)
+{
+ // From the POSIX spec:
+ //
+ // Indicates the value added to the local time to arrive at Coordinated
+ // Universal Time. The offset has the form:
+ //
+ // hh[:mm[:ss]]
+ //
+ // One or more digits may be used; the value is always interpreted as a
+ // decimal number.
+ //
+ // The POSIX spec says that the values for `hour` must be between 0 and 24
+ // hours, but RFC 8536 §3.3.1 specifies that the hours part of the
+ // transition times may be signed and range from -167 to 167.
+ long sign = -1;
+ long hours = 0;
+ long minutes = 0;
+ long seconds = 0;
+
+ const char *ptr = p;
+ char buff = *ptr;
+ if (buff == '-' || buff == '+') {
+ // Negative numbers correspond to *positive* offsets, from the spec:
+ //
+ // If preceded by a '-', the timezone shall be east of the Prime
+ // Meridian; otherwise, it shall be west (which may be indicated by
+ // an optional preceding '+' ).
+ if (buff == '-') {
+ sign = 1;
+ }
+
+ ptr++;
+ }
+
+ // The hour can be 1 or 2 numeric characters
+ for (size_t i = 0; i < 2; ++i) {
+ buff = *ptr;
+ if (!isdigit(buff)) {
+ if (i == 0) {
+ return -1;
+ }
+ else {
+ break;
+ }
+ }
+
+ hours *= 10;
+ hours += buff - '0';
+ ptr++;
+ }
+
+ if (hours > 24 || hours < 0) {
+ return -1;
+ }
+
+ // Minutes and seconds always of the format ":dd"
+ long *outputs[2] = {&minutes, &seconds};
+ for (size_t i = 0; i < 2; ++i) {
+ if (*ptr != ':') {
+ goto complete;
+ }
+ ptr++;
+
+ for (size_t j = 0; j < 2; ++j) {
+ buff = *ptr;
+ if (!isdigit(buff)) {
+ return -1;
+ }
+ *(outputs[i]) *= 10;
+ *(outputs[i]) += buff - '0';
+ ptr++;
+ }
+ }
+
+complete:
+ *total_seconds = sign * ((hours * 3600) + (minutes * 60) + seconds);
+
+ return ptr - p;
+}
+
+/* Parse the date portion of a transition rule. */
+static Py_ssize_t
+parse_transition_rule(const char *const p, TransitionRuleType **out)
+{
+ // The full transition rule indicates when to change back and forth between
+ // STD and DST, and has the form:
+ //
+ // date[/time],date[/time]
+ //
+ // This function parses an individual date[/time] section, and returns
+ // the number of characters that contributed to the transition rule. This
+ // does not include the ',' at the end of the first rule.
+ //
+ // The POSIX spec states that if *time* is not given, the default is 02:00.
+ const char *ptr = p;
+ int8_t hour = 2;
+ int8_t minute = 0;
+ int8_t second = 0;
+
+ // Rules come in one of three flavors:
+ //
+ // 1. Jn: Julian day n, with no leap days.
+ // 2. n: Day of year (0-based, with leap days)
+ // 3. Mm.n.d: Specifying by month, week and day-of-week.
+
+ if (*ptr == 'M') {
+ uint8_t month, week, day;
+ ptr++;
+ if (parse_uint(ptr, &month)) {
+ return -1;
+ }
+ ptr++;
+ if (*ptr != '.') {
+ uint8_t tmp;
+ if (parse_uint(ptr, &tmp)) {
+ return -1;
+ }
+
+ month *= 10;
+ month += tmp;
+ ptr++;
+ }
+
+ uint8_t *values[2] = {&week, &day};
+ for (size_t i = 0; i < 2; ++i) {
+ if (*ptr != '.') {
+ return -1;
+ }
+ ptr++;
+
+ if (parse_uint(ptr, values[i])) {
+ return -1;
+ }
+ ptr++;
+ }
+
+ if (*ptr == '/') {
+ ptr++;
+ Py_ssize_t num_chars =
+ parse_transition_time(ptr, &hour, &minute, &second);
+ if (num_chars < 0) {
+ return -1;
+ }
+ ptr += num_chars;
+ }
+
+ CalendarRule *rv = PyMem_Calloc(1, sizeof(CalendarRule));
+ if (rv == NULL) {
+ return -1;
+ }
+
+ if (calendarrule_new(month, week, day, hour, minute, second, rv)) {
+ PyMem_Free(rv);
+ return -1;
+ }
+
+ *out = (TransitionRuleType *)rv;
+ }
+ else {
+ uint8_t julian = 0;
+ unsigned int day = 0;
+ if (*ptr == 'J') {
+ julian = 1;
+ ptr++;
+ }
+
+ for (size_t i = 0; i < 3; ++i) {
+ if (!isdigit(*ptr)) {
+ if (i == 0) {
+ return -1;
+ }
+ break;
+ }
+ day *= 10;
+ day += (*ptr) - '0';
+ ptr++;
+ }
+
+ if (*ptr == '/') {
+ ptr++;
+ Py_ssize_t num_chars =
+ parse_transition_time(ptr, &hour, &minute, &second);
+ if (num_chars < 0) {
+ return -1;
+ }
+ ptr += num_chars;
+ }
+
+ DayRule *rv = PyMem_Calloc(1, sizeof(DayRule));
+ if (rv == NULL) {
+ return -1;
+ }
+
+ if (dayrule_new(julian, day, hour, minute, second, rv)) {
+ PyMem_Free(rv);
+ return -1;
+ }
+ *out = (TransitionRuleType *)rv;
+ }
+
+ return ptr - p;
+}
+
+/* Parse the time portion of a transition rule (e.g. following an /) */
+static Py_ssize_t
+parse_transition_time(const char *const p, int8_t *hour, int8_t *minute,
+ int8_t *second)
+{
+ // From the spec:
+ //
+ // The time has the same format as offset except that no leading sign
+ // ( '-' or '+' ) is allowed.
+ //
+ // The format for the offset is:
+ //
+ // h[h][:mm[:ss]]
+ //
+ // RFC 8536 also allows transition times to be signed and to range from
+ // -167 to +167, but the current version only supports [0, 99].
+ //
+ // TODO: Support the full range of transition hours.
+ int8_t *components[3] = {hour, minute, second};
+ const char *ptr = p;
+ int8_t sign = 1;
+
+ if (*ptr == '-' || *ptr == '+') {
+ if (*ptr == '-') {
+ sign = -1;
+ }
+ ptr++;
+ }
+
+ for (size_t i = 0; i < 3; ++i) {
+ if (i > 0) {
+ if (*ptr != ':') {
+ break;
+ }
+ ptr++;
+ }
+
+ uint8_t buff = 0;
+ for (size_t j = 0; j < 2; j++) {
+ if (!isdigit(*ptr)) {
+ if (i == 0 && j > 0) {
+ break;
+ }
+ return -1;
+ }
+
+ buff *= 10;
+ buff += (*ptr) - '0';
+ ptr++;
+ }
+
+ *(components[i]) = sign * buff;
+ }
+
+ return ptr - p;
+}
+
+/* Constructor for a _tzrule.
+ *
+ * If `dst_abbr` is NULL, this will construct an "STD-only" _tzrule, in which
+ * case `dst_offset` will be ignored and `start` and `end` are expected to be
+ * NULL as well.
+ *
+ * Returns 0 on success.
+ */
+static int
+build_tzrule(PyObject *std_abbr, PyObject *dst_abbr, long std_offset,
+ long dst_offset, TransitionRuleType *start,
+ TransitionRuleType *end, _tzrule *out)
+{
+ _tzrule rv = {{0}};
+
+ rv.start = start;
+ rv.end = end;
+
+ if (build_ttinfo(std_offset, 0, std_abbr, &rv.std)) {
+ goto error;
+ }
+
+ if (dst_abbr != NULL) {
+ rv.dst_diff = dst_offset - std_offset;
+ if (build_ttinfo(dst_offset, rv.dst_diff, dst_abbr, &rv.dst)) {
+ goto error;
+ }
+ }
+ else {
+ rv.std_only = 1;
+ }
+
+ *out = rv;
+
+ return 0;
+error:
+ xdecref_ttinfo(&rv.std);
+ xdecref_ttinfo(&rv.dst);
+ return -1;
+}
+
+/* Destructor for _tzrule. */
+static void
+free_tzrule(_tzrule *tzrule)
+{
+ xdecref_ttinfo(&(tzrule->std));
+ if (!tzrule->std_only) {
+ xdecref_ttinfo(&(tzrule->dst));
+ }
+
+ if (tzrule->start != NULL) {
+ PyMem_Free(tzrule->start);
+ }
+
+ if (tzrule->end != NULL) {
+ PyMem_Free(tzrule->end);
+ }
+}
+
+/* Calculate DST offsets from transitions and UTC offsets
+ *
+ * This is necessary because each C `ttinfo` only contains the UTC offset,
+ * time zone abbreviation and an isdst boolean - it does not include the
+ * amount of the DST offset, but we need the amount for the dst() function.
+ *
+ * Thus function uses heuristics to infer what the offset should be, so it
+ * is not guaranteed that this will work for all zones. If we cannot assign
+ * a value for a given DST offset, we'll assume it's 1H rather than 0H, so
+ * bool(dt.dst()) will always match ttinfo.isdst.
+ */
+static void
+utcoff_to_dstoff(size_t *trans_idx, long *utcoffs, long *dstoffs,
+ unsigned char *isdsts, size_t num_transitions,
+ size_t num_ttinfos)
+{
+ size_t dst_count = 0;
+ size_t dst_found = 0;
+ for (size_t i = 0; i < num_ttinfos; ++i) {
+ dst_count++;
+ }
+
+ for (size_t i = 1; i < num_transitions; ++i) {
+ if (dst_count == dst_found) {
+ break;
+ }
+
+ size_t idx = trans_idx[i];
+ size_t comp_idx = trans_idx[i - 1];
+
+ // Only look at DST offsets that have nto been assigned already
+ if (!isdsts[idx] || dstoffs[idx] != 0) {
+ continue;
+ }
+
+ long dstoff = 0;
+ long utcoff = utcoffs[idx];
+
+ if (!isdsts[comp_idx]) {
+ dstoff = utcoff - utcoffs[comp_idx];
+ }
+
+ if (!dstoff && idx < (num_ttinfos - 1)) {
+ comp_idx = trans_idx[i + 1];
+
+ // If the following transition is also DST and we couldn't find
+ // the DST offset by this point, we're going to have to skip it
+ // and hope this transition gets assigned later
+ if (isdsts[comp_idx]) {
+ continue;
+ }
+
+ dstoff = utcoff - utcoffs[comp_idx];
+ }
+
+ if (dstoff) {
+ dst_found++;
+ dstoffs[idx] = dstoff;
+ }
+ }
+
+ if (dst_found < dst_count) {
+ // If there are time zones we didn't find a value for, we'll end up
+ // with dstoff = 0 for something where isdst=1. This is obviously
+ // wrong â one hour will be a much better guess than 0.
+ for (size_t idx = 0; idx < num_ttinfos; ++idx) {
+ if (isdsts[idx] && !dstoffs[idx]) {
+ dstoffs[idx] = 3600;
+ }
+ }
+ }
+}
+
+#define _swap(x, y, buffer) \
+ buffer = x; \
+ x = y; \
+ y = buffer;
+
+/* Calculate transitions in local time from UTC time and offsets.
+ *
+ * We want to know when each transition occurs, denominated in the number of
+ * nominal wall-time seconds between 1970-01-01T00:00:00 and the transition in
+ * *local time* (note: this is *not* equivalent to the output of
+ * datetime.timestamp, which is the total number of seconds actual elapsed
+ * since 1970-01-01T00:00:00Z in UTC).
+ *
+ * This is an ambiguous question because "local time" can be ambiguous â but it
+ * is disambiguated by the `fold` parameter, so we allocate two arrays:
+ *
+ * trans_local[0]: The wall-time transitions for fold=0
+ * trans_local[1]: The wall-time transitions for fold=1
+ *
+ * This returns 0 on success and a negative number of failure. The trans_local
+ * arrays must be freed if they are not NULL.
+ */
+static int
+ts_to_local(size_t *trans_idx, int64_t *trans_utc, long *utcoff,
+ int64_t *trans_local[2], size_t num_ttinfos,
+ size_t num_transitions)
+{
+ if (num_transitions == 0) {
+ return 0;
+ }
+
+ // Copy the UTC transitions into each array to be modified in place later
+ for (size_t i = 0; i < 2; ++i) {
+ trans_local[i] = PyMem_Malloc(num_transitions * sizeof(int64_t));
+ if (trans_local[i] == NULL) {
+ return -1;
+ }
+
+ memcpy(trans_local[i], trans_utc, num_transitions * sizeof(int64_t));
+ }
+
+ int64_t offset_0, offset_1, buff;
+ if (num_ttinfos > 1) {
+ offset_0 = utcoff[0];
+ offset_1 = utcoff[trans_idx[0]];
+
+ if (offset_1 > offset_0) {
+ _swap(offset_0, offset_1, buff);
+ }
+ }
+ else {
+ offset_0 = utcoff[0];
+ offset_1 = utcoff[0];
+ }
+
+ trans_local[0][0] += offset_0;
+ trans_local[1][0] += offset_1;
+
+ for (size_t i = 1; i < num_transitions; ++i) {
+ offset_0 = utcoff[trans_idx[i - 1]];
+ offset_1 = utcoff[trans_idx[i]];
+
+ if (offset_1 > offset_0) {
+ _swap(offset_1, offset_0, buff);
+ }
+
+ trans_local[0][i] += offset_0;
+ trans_local[1][i] += offset_1;
+ }
+
+ return 0;
+}
+
+/* Simple bisect_right binary search implementation */
+static size_t
+_bisect(const int64_t value, const int64_t *arr, size_t size)
+{
+ size_t lo = 0;
+ size_t hi = size;
+ size_t m;
+
+ while (lo < hi) {
+ m = (lo + hi) / 2;
+ if (arr[m] > value) {
+ hi = m;
+ }
+ else {
+ lo = m + 1;
+ }
+ }
+
+ return hi;
+}
+
+/* Find the ttinfo rules that apply at a given local datetime. */
+static _ttinfo *
+find_ttinfo(PyZoneInfo_ZoneInfo *self, PyObject *dt)
+{
+ // datetime.time has a .tzinfo attribute that passes None as the dt
+ // argument; it only really has meaning for fixed-offset zones.
+ if (dt == Py_None) {
+ if (self->fixed_offset) {
+ return &(self->tzrule_after.std);
+ }
+ else {
+ return &NO_TTINFO;
+ }
+ }
+
+ int64_t ts;
+ if (get_local_timestamp(dt, &ts)) {
+ return NULL;
+ }
+
+ unsigned char fold = PyDateTime_DATE_GET_FOLD(dt);
+ assert(fold < 2);
+ int64_t *local_transitions = self->trans_list_wall[fold];
+ size_t num_trans = self->num_transitions;
+
+ if (num_trans && ts < local_transitions[0]) {
+ return self->ttinfo_before;
+ }
+ else if (!num_trans || ts > local_transitions[self->num_transitions - 1]) {
+ return find_tzrule_ttinfo(&(self->tzrule_after), ts, fold,
+ PyDateTime_GET_YEAR(dt));
+ }
+ else {
+ size_t idx = _bisect(ts, local_transitions, self->num_transitions) - 1;
+ assert(idx < self->num_transitions);
+ return self->trans_ttinfos[idx];
+ }
+}
+
+static int
+is_leap_year(int year)
+{
+ const unsigned int ayear = (unsigned int)year;
+ return ayear % 4 == 0 && (ayear % 100 != 0 || ayear % 400 == 0);
+}
+
+/* Calculates ordinal datetime from year, month and day. */
+static int
+ymd_to_ord(int y, int m, int d)
+{
+ y -= 1;
+ int days_before_year = (y * 365) + (y / 4) - (y / 100) + (y / 400);
+ int yearday = DAYS_BEFORE_MONTH[m];
+ if (m > 2 && is_leap_year(y + 1)) {
+ yearday += 1;
+ }
+
+ return days_before_year + yearday + d;
+}
+
+/* Calculate the number of seconds since 1970-01-01 in local time.
+ *
+ * This gets a datetime in the same "units" as self->trans_list_wall so that we
+ * can easily determine which transitions a datetime falls between. See the
+ * comment above ts_to_local for more information.
+ * */
+static int
+get_local_timestamp(PyObject *dt, int64_t *local_ts)
+{
+ assert(local_ts != NULL);
+
+ int hour, minute, second;
+ int ord;
+ if (PyDateTime_CheckExact(dt)) {
+ int y = PyDateTime_GET_YEAR(dt);
+ int m = PyDateTime_GET_MONTH(dt);
+ int d = PyDateTime_GET_DAY(dt);
+ hour = PyDateTime_DATE_GET_HOUR(dt);
+ minute = PyDateTime_DATE_GET_MINUTE(dt);
+ second = PyDateTime_DATE_GET_SECOND(dt);
+
+ ord = ymd_to_ord(y, m, d);
+ }
+ else {
+ PyObject *num = PyObject_CallMethod(dt, "toordinal", NULL);
+ if (num == NULL) {
+ return -1;
+ }
+
+ ord = PyLong_AsLong(num);
+ Py_DECREF(num);
+ if (ord == -1 && PyErr_Occurred()) {
+ return -1;
+ }
+
+ num = PyObject_GetAttrString(dt, "hour");
+ if (num == NULL) {
+ return -1;
+ }
+ hour = PyLong_AsLong(num);
+ Py_DECREF(num);
+ if (hour == -1) {
+ return -1;
+ }
+
+ num = PyObject_GetAttrString(dt, "minute");
+ if (num == NULL) {
+ return -1;
+ }
+ minute = PyLong_AsLong(num);
+ Py_DECREF(num);
+ if (minute == -1) {
+ return -1;
+ }
+
+ num = PyObject_GetAttrString(dt, "second");
+ if (num == NULL) {
+ return -1;
+ }
+ second = PyLong_AsLong(num);
+ Py_DECREF(num);
+ if (second == -1) {
+ return -1;
+ }
+ }
+
+ *local_ts = (int64_t)(ord - EPOCHORDINAL) * 86400 +
+ (int64_t)(hour * 3600 + minute * 60 + second);
+
+ return 0;
+}
+
+/////
+// Functions for cache handling
+
+/* Constructor for StrongCacheNode */
+static StrongCacheNode *
+strong_cache_node_new(PyObject *key, PyObject *zone)
+{
+ StrongCacheNode *node = PyMem_Malloc(sizeof(StrongCacheNode));
+ if (node == NULL) {
+ return NULL;
+ }
+
+ Py_INCREF(key);
+ Py_INCREF(zone);
+
+ node->next = NULL;
+ node->prev = NULL;
+ node->key = key;
+ node->zone = zone;
+
+ return node;
+}
+
+/* Destructor for StrongCacheNode */
+void
+strong_cache_node_free(StrongCacheNode *node)
+{
+ Py_XDECREF(node->key);
+ Py_XDECREF(node->zone);
+
+ PyMem_Free(node);
+}
+
+/* Frees all nodes at or after a specified root in the strong cache.
+ *
+ * This can be used on the root node to free the entire cache or it can be used
+ * to clear all nodes that have been expired (which, if everything is going
+ * right, will actually only be 1 node at a time).
+ */
+void
+strong_cache_free(StrongCacheNode *root)
+{
+ StrongCacheNode *node = root;
+ StrongCacheNode *next_node;
+ while (node != NULL) {
+ next_node = node->next;
+ strong_cache_node_free(node);
+
+ node = next_node;
+ }
+}
+
+/* Removes a node from the cache and update its neighbors.
+ *
+ * This is used both when ejecting a node from the cache and when moving it to
+ * the front of the cache.
+ */
+static void
+remove_from_strong_cache(StrongCacheNode *node)
+{
+ if (ZONEINFO_STRONG_CACHE == node) {
+ ZONEINFO_STRONG_CACHE = node->next;
+ }
+
+ if (node->prev != NULL) {
+ node->prev->next = node->next;
+ }
+
+ if (node->next != NULL) {
+ node->next->prev = node->prev;
+ }
+
+ node->next = NULL;
+ node->prev = NULL;
+}
+
+/* Retrieves the node associated with a key, if it exists.
+ *
+ * This traverses the strong cache until it finds a matching key and returns a
+ * pointer to the relevant node if found. Returns NULL if no node is found.
+ *
+ * root may be NULL, indicating an empty cache.
+ */
+static StrongCacheNode *
+find_in_strong_cache(const StrongCacheNode *const root, PyObject *const key)
+{
+ const StrongCacheNode *node = root;
+ while (node != NULL) {
+ if (PyObject_RichCompareBool(key, node->key, Py_EQ)) {
+ return (StrongCacheNode *)node;
+ }
+
+ node = node->next;
+ }
+
+ return NULL;
+}
+
+/* Ejects a given key from the class's strong cache, if applicable.
+ *
+ * This function is used to enable the per-key functionality in clear_cache.
+ */
+static void
+eject_from_strong_cache(const PyTypeObject *const type, PyObject *key)
+{
+ if (type != &PyZoneInfo_ZoneInfoType) {
+ return;
+ }
+
+ StrongCacheNode *node = find_in_strong_cache(ZONEINFO_STRONG_CACHE, key);
+ if (node != NULL) {
+ remove_from_strong_cache(node);
+
+ strong_cache_node_free(node);
+ }
+}
+
+/* Moves a node to the front of the LRU cache.
+ *
+ * The strong cache is an LRU cache, so whenever a given node is accessed, if
+ * it is not at the front of the cache, it needs to be moved there.
+ */
+static void
+move_strong_cache_node_to_front(StrongCacheNode **root, StrongCacheNode *node)
+{
+ StrongCacheNode *root_p = *root;
+ if (root_p == node) {
+ return;
+ }
+
+ remove_from_strong_cache(node);
+
+ node->prev = NULL;
+ node->next = root_p;
+
+ if (root_p != NULL) {
+ root_p->prev = node;
+ }
+
+ *root = node;
+}
+
+/* Retrieves a ZoneInfo from the strong cache if it's present.
+ *
+ * This function finds the ZoneInfo by key and if found will move the node to
+ * the front of the LRU cache and return a new reference to it. It returns NULL
+ * if the key is not in the cache.
+ *
+ * The strong cache is currently only implemented for the base class, so this
+ * always returns a cache miss for subclasses.
+ */
+static PyObject *
+zone_from_strong_cache(const PyTypeObject *const type, PyObject *const key)
+{
+ if (type != &PyZoneInfo_ZoneInfoType) {
+ return NULL; // Strong cache currently only implemented for base class
+ }
+
+ StrongCacheNode *node = find_in_strong_cache(ZONEINFO_STRONG_CACHE, key);
+
+ if (node != NULL) {
+ move_strong_cache_node_to_front(&ZONEINFO_STRONG_CACHE, node);
+ Py_INCREF(node->zone);
+ return node->zone;
+ }
+
+ return NULL; // Cache miss
+}
+
+/* Inserts a new key into the strong LRU cache.
+ *
+ * This function is only to be used after a cache miss â it creates a new node
+ * at the front of the cache and ejects any stale entries (keeping the size of
+ * the cache to at most ZONEINFO_STRONG_CACHE_MAX_SIZE).
+ */
+static void
+update_strong_cache(const PyTypeObject *const type, PyObject *key,
+ PyObject *zone)
+{
+ if (type != &PyZoneInfo_ZoneInfoType) {
+ return;
+ }
+
+ StrongCacheNode *new_node = strong_cache_node_new(key, zone);
+
+ move_strong_cache_node_to_front(&ZONEINFO_STRONG_CACHE, new_node);
+
+ StrongCacheNode *node = new_node->next;
+ for (size_t i = 1; i < ZONEINFO_STRONG_CACHE_MAX_SIZE; ++i) {
+ if (node == NULL) {
+ return;
+ }
+ node = node->next;
+ }
+
+ // Everything beyond this point needs to be freed
+ if (node != NULL) {
+ if (node->prev != NULL) {
+ node->prev->next = NULL;
+ }
+ strong_cache_free(node);
+ }
+}
+
+/* Clears all entries into a type's strong cache.
+ *
+ * Because the strong cache is not implemented for subclasses, this is a no-op
+ * for everything except the base class.
+ */
+void
+clear_strong_cache(const PyTypeObject *const type)
+{
+ if (type != &PyZoneInfo_ZoneInfoType) {
+ return;
+ }
+
+ strong_cache_free(ZONEINFO_STRONG_CACHE);
+ ZONEINFO_STRONG_CACHE = NULL;
+}
+
+static PyObject *
+new_weak_cache(void)
+{
+ PyObject *weakref_module = PyImport_ImportModule("weakref");
+ if (weakref_module == NULL) {
+ return NULL;
+ }
+
+ PyObject *weak_cache =
+ PyObject_CallMethod(weakref_module, "WeakValueDictionary", "");
+ Py_DECREF(weakref_module);
+ return weak_cache;
+}
+
+static int
+initialize_caches(void)
+{
+ // TODO: Move to a PyModule_GetState / PEP 573 based caching system.
+ if (TIMEDELTA_CACHE == NULL) {
+ TIMEDELTA_CACHE = PyDict_New();
+ }
+ else {
+ Py_INCREF(TIMEDELTA_CACHE);
+ }
+
+ if (TIMEDELTA_CACHE == NULL) {
+ return -1;
+ }
+
+ if (ZONEINFO_WEAK_CACHE == NULL) {
+ ZONEINFO_WEAK_CACHE = new_weak_cache();
+ }
+ else {
+ Py_INCREF(ZONEINFO_WEAK_CACHE);
+ }
+
+ if (ZONEINFO_WEAK_CACHE == NULL) {
+ return -1;
+ }
+
+ return 0;
+}
+
+static PyObject *
+zoneinfo_init_subclass(PyTypeObject *cls, PyObject *args, PyObject **kwargs)
+{
+ PyObject *weak_cache = new_weak_cache();
+ if (weak_cache == NULL) {
+ return NULL;
+ }
+
+ PyObject_SetAttrString((PyObject *)cls, "_weak_cache", weak_cache);
+ Py_DECREF(weak_cache);
+ Py_RETURN_NONE;
+}
+
+/////
+// Specify the ZoneInfo type
+static PyMethodDef zoneinfo_methods[] = {
+ {"clear_cache", (PyCFunction)(void (*)(void))zoneinfo_clear_cache,
+ METH_VARARGS | METH_KEYWORDS | METH_CLASS,
+ PyDoc_STR("Clear the ZoneInfo cache.")},
+ {"no_cache", (PyCFunction)(void (*)(void))zoneinfo_no_cache,
+ METH_VARARGS | METH_KEYWORDS | METH_CLASS,
+ PyDoc_STR("Get a new instance of ZoneInfo, bypassing the cache.")},
+ {"from_file", (PyCFunction)(void (*)(void))zoneinfo_from_file,
+ METH_VARARGS | METH_KEYWORDS | METH_CLASS,
+ PyDoc_STR("Create a ZoneInfo file from a file object.")},
+ {"utcoffset", (PyCFunction)zoneinfo_utcoffset, METH_O,
+ PyDoc_STR("Retrieve a timedelta representing the UTC offset in a zone at "
+ "the given datetime.")},
+ {"dst", (PyCFunction)zoneinfo_dst, METH_O,
+ PyDoc_STR("Retrieve a timedelta representing the amount of DST applied "
+ "in a zone at the given datetime.")},
+ {"tzname", (PyCFunction)zoneinfo_tzname, METH_O,
+ PyDoc_STR("Retrieve a string containing the abbreviation for the time "
+ "zone that applies in a zone at a given datetime.")},
+ {"fromutc", (PyCFunction)zoneinfo_fromutc, METH_O,
+ PyDoc_STR("Given a datetime with local time in UTC, retrieve an adjusted "
+ "datetime in local time.")},
+ {"__reduce__", (PyCFunction)zoneinfo_reduce, METH_NOARGS,
+ PyDoc_STR("Function for serialization with the pickle protocol.")},
+ {"_unpickle", (PyCFunction)zoneinfo__unpickle, METH_VARARGS | METH_CLASS,
+ PyDoc_STR("Private method used in unpickling.")},
+ {"__init_subclass__", (PyCFunction)(void (*)(void))zoneinfo_init_subclass,
+ METH_VARARGS | METH_KEYWORDS | METH_CLASS,
+ PyDoc_STR("Function to initialize subclasses.")},
+ {NULL} /* Sentinel */
+};
+
+static PyMemberDef zoneinfo_members[] = {
+ {.name = "key",
+ .offset = offsetof(PyZoneInfo_ZoneInfo, key),
+ .type = T_OBJECT_EX,
+ .flags = READONLY,
+ .doc = NULL},
+ {NULL}, /* Sentinel */
+};
+
+static PyTypeObject PyZoneInfo_ZoneInfoType = {
+ PyVarObject_HEAD_INIT(NULL, 0) //
+ .tp_name = "zoneinfo.ZoneInfo",
+ .tp_basicsize = sizeof(PyZoneInfo_ZoneInfo),
+ .tp_weaklistoffset = offsetof(PyZoneInfo_ZoneInfo, weakreflist),
+ .tp_repr = (reprfunc)zoneinfo_repr,
+ .tp_str = (reprfunc)zoneinfo_str,
+ .tp_getattro = PyObject_GenericGetAttr,
+ .tp_flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE),
+ /* .tp_doc = zoneinfo_doc, */
+ .tp_methods = zoneinfo_methods,
+ .tp_members = zoneinfo_members,
+ .tp_new = zoneinfo_new,
+ .tp_dealloc = zoneinfo_dealloc,
+};
+
+/////
+// Specify the _zoneinfo module
+static PyMethodDef module_methods[] = {{NULL, NULL}};
+static void
+module_free()
+{
+ Py_XDECREF(_tzpath_find_tzfile);
+ _tzpath_find_tzfile = NULL;
+
+ Py_XDECREF(_common_mod);
+ _common_mod = NULL;
+
+ Py_XDECREF(io_open);
+ io_open = NULL;
+
+ xdecref_ttinfo(&NO_TTINFO);
+
+ if (TIMEDELTA_CACHE != NULL && Py_REFCNT(TIMEDELTA_CACHE) > 1) {
+ Py_DECREF(TIMEDELTA_CACHE);
+ } else {
+ Py_CLEAR(TIMEDELTA_CACHE);
+ }
+
+ if (ZONEINFO_WEAK_CACHE != NULL && Py_REFCNT(ZONEINFO_WEAK_CACHE) > 1) {
+ Py_DECREF(ZONEINFO_WEAK_CACHE);
+ } else {
+ Py_CLEAR(ZONEINFO_WEAK_CACHE);
+ }
+
+ clear_strong_cache(&PyZoneInfo_ZoneInfoType);
+}
+
+static int
+zoneinfomodule_exec(PyObject *m)
+{
+ PyDateTime_IMPORT;
+ PyZoneInfo_ZoneInfoType.tp_base = PyDateTimeAPI->TZInfoType;
+ if (PyType_Ready(&PyZoneInfo_ZoneInfoType) < 0) {
+ goto error;
+ }
+
+ Py_INCREF(&PyZoneInfo_ZoneInfoType);
+ PyModule_AddObject(m, "ZoneInfo", (PyObject *)&PyZoneInfo_ZoneInfoType);
+
+ /* Populate imports */
+ PyObject *_tzpath_module = PyImport_ImportModule("zoneinfo._tzpath");
+ if (_tzpath_module == NULL) {
+ goto error;
+ }
+
+ _tzpath_find_tzfile =
+ PyObject_GetAttrString(_tzpath_module, "find_tzfile");
+ Py_DECREF(_tzpath_module);
+ if (_tzpath_find_tzfile == NULL) {
+ goto error;
+ }
+
+ PyObject *io_module = PyImport_ImportModule("io");
+ if (io_module == NULL) {
+ goto error;
+ }
+
+ io_open = PyObject_GetAttrString(io_module, "open");
+ Py_DECREF(io_module);
+ if (io_open == NULL) {
+ goto error;
+ }
+
+ _common_mod = PyImport_ImportModule("zoneinfo._common");
+ if (_common_mod == NULL) {
+ goto error;
+ }
+
+ if (NO_TTINFO.utcoff == NULL) {
+ NO_TTINFO.utcoff = Py_None;
+ NO_TTINFO.dstoff = Py_None;
+ NO_TTINFO.tzname = Py_None;
+
+ for (size_t i = 0; i < 3; ++i) {
+ Py_INCREF(Py_None);
+ }
+ }
+
+ if (initialize_caches()) {
+ goto error;
+ }
+
+ return 0;
+
+error:
+ return -1;
+}
+
+static PyModuleDef_Slot zoneinfomodule_slots[] = {
+ {Py_mod_exec, zoneinfomodule_exec}, {0, NULL}};
+
+static struct PyModuleDef zoneinfomodule = {
+ PyModuleDef_HEAD_INIT,
+ .m_name = "_zoneinfo",
+ .m_doc = "C implementation of the zoneinfo module",
+ .m_size = 0,
+ .m_methods = module_methods,
+ .m_slots = zoneinfomodule_slots,
+ .m_free = (freefunc)module_free};
+
+PyMODINIT_FUNC
+PyInit__zoneinfo(void)
+{
+ return PyModuleDef_Init(&zoneinfomodule);
+}
diff --git a/Modules/arraymodule.c b/Modules/arraymodule.c
index abcdd1e8..724c503e 100644
--- a/Modules/arraymodule.c
+++ b/Modules/arraymodule.c
@@ -5,7 +5,7 @@
#define PY_SSIZE_T_CLEAN
#include "Python.h"
-#include "structmember.h"
+#include // offsetof()
#ifdef STDC_HEADERS
#include
@@ -43,7 +43,7 @@ typedef struct arrayobject {
Py_ssize_t allocated;
const struct arraydescr *ob_descr;
PyObject *weakreflist; /* List of weak references */
- int ob_exports; /* Number of exported buffers */
+ Py_ssize_t ob_exports; /* Number of exported buffers */
} arrayobject;
static PyTypeObject Arraytype;
@@ -106,7 +106,7 @@ enum machine_format_code {
#include "clinic/arraymodule.c.h"
#define array_Check(op) PyObject_TypeCheck(op, &Arraytype)
-#define array_CheckExact(op) (Py_TYPE(op) == &Arraytype)
+#define array_CheckExact(op) Py_IS_TYPE(op, &Arraytype)
static int
array_resize(arrayobject *self, Py_ssize_t newsize)
@@ -128,14 +128,14 @@ array_resize(arrayobject *self, Py_ssize_t newsize)
if (self->allocated >= newsize &&
Py_SIZE(self) < newsize + 16 &&
self->ob_item != NULL) {
- Py_SIZE(self) = newsize;
+ Py_SET_SIZE(self, newsize);
return 0;
}
if (newsize == 0) {
PyMem_FREE(self->ob_item);
self->ob_item = NULL;
- Py_SIZE(self) = 0;
+ Py_SET_SIZE(self, 0);
self->allocated = 0;
return 0;
}
@@ -165,7 +165,7 @@ array_resize(arrayobject *self, Py_ssize_t newsize)
return -1;
}
self->ob_item = items;
- Py_SIZE(self) = newsize;
+ Py_SET_SIZE(self, newsize);
self->allocated = _new_size;
return 0;
}
@@ -185,9 +185,7 @@ in bounds; that's the responsibility of the caller.
static PyObject *
b_getitem(arrayobject *ap, Py_ssize_t i)
{
- long x = ((char *)ap->ob_item)[i];
- if (x >= 128)
- x -= 256;
+ long x = ((signed char *)ap->ob_item)[i];
return PyLong_FromLong(x);
}
@@ -237,24 +235,31 @@ BB_setitem(arrayobject *ap, Py_ssize_t i, PyObject *v)
static PyObject *
u_getitem(arrayobject *ap, Py_ssize_t i)
{
- return PyUnicode_FromOrdinal(((Py_UNICODE *) ap->ob_item)[i]);
+ return PyUnicode_FromOrdinal(((wchar_t *) ap->ob_item)[i]);
}
static int
u_setitem(arrayobject *ap, Py_ssize_t i, PyObject *v)
{
- Py_UNICODE *p;
- Py_ssize_t len;
-
- if (!PyArg_Parse(v, "u#;array item must be unicode character", &p, &len))
+ PyObject *u;
+ if (!PyArg_Parse(v, "U;array item must be unicode character", &u)) {
return -1;
- if (len != 1) {
+ }
+
+ Py_ssize_t len = PyUnicode_AsWideChar(u, NULL, 0);
+ if (len != 2) {
PyErr_SetString(PyExc_TypeError,
"array item must be unicode character");
return -1;
}
- if (i >= 0)
- ((Py_UNICODE *)ap->ob_item)[i] = p[0];
+
+ wchar_t w;
+ len = PyUnicode_AsWideChar(u, &w, 1);
+ assert(len == 1);
+
+ if (i >= 0) {
+ ((wchar_t *)ap->ob_item)[i] = w;
+ }
return 0;
}
@@ -532,7 +537,7 @@ d_setitem(arrayobject *ap, Py_ssize_t i, PyObject *v)
DEFINE_COMPAREITEMS(b, signed char)
DEFINE_COMPAREITEMS(BB, unsigned char)
-DEFINE_COMPAREITEMS(u, Py_UNICODE)
+DEFINE_COMPAREITEMS(u, wchar_t)
DEFINE_COMPAREITEMS(h, short)
DEFINE_COMPAREITEMS(HH, unsigned short)
DEFINE_COMPAREITEMS(i, int)
@@ -550,7 +555,7 @@ DEFINE_COMPAREITEMS(QQ, unsigned long long)
static const struct arraydescr descriptors[] = {
{'b', 1, b_getitem, b_setitem, b_compareitems, "b", 1, 1},
{'B', 1, BB_getitem, BB_setitem, BB_compareitems, "B", 1, 0},
- {'u', sizeof(Py_UNICODE), u_getitem, u_setitem, u_compareitems, "u", 0, 0},
+ {'u', sizeof(wchar_t), u_getitem, u_setitem, u_compareitems, "u", 0, 0},
{'h', sizeof(short), h_getitem, h_setitem, h_compareitems, "h", 1, 1},
{'H', sizeof(short), HH_getitem, HH_setitem, HH_compareitems, "H", 1, 0},
{'i', sizeof(int), i_getitem, i_setitem, i_compareitems, "i", 1, 1},
@@ -595,7 +600,7 @@ newarrayobject(PyTypeObject *type, Py_ssize_t size, const struct arraydescr *des
op->ob_descr = descr;
op->allocated = size;
op->weakreflist = NULL;
- Py_SIZE(op) = size;
+ Py_SET_SIZE(op, size);
if (size <= 0) {
op->ob_item = NULL;
}
@@ -1507,7 +1512,7 @@ array_array_tofile(arrayobject *self, PyObject *f)
bytes = PyBytes_FromStringAndSize(ptr, size);
if (bytes == NULL)
return NULL;
- res = _PyObject_CallMethodIdObjArgs(f, &PyId_write, bytes, NULL);
+ res = _PyObject_CallMethodIdOneArg(f, &PyId_write, bytes);
Py_DECREF(bytes);
if (res == NULL)
return NULL;
@@ -1625,27 +1630,6 @@ frombytes(arrayobject *self, Py_buffer *buffer)
Py_RETURN_NONE;
}
-/*[clinic input]
-array.array.fromstring
-
- buffer: Py_buffer(accept={str, buffer})
- /
-
-Appends items from the string, interpreting it as an array of machine values, as if it had been read from a file using the fromfile() method).
-
-This method is deprecated. Use frombytes instead.
-[clinic start generated code]*/
-
-static PyObject *
-array_array_fromstring_impl(arrayobject *self, Py_buffer *buffer)
-/*[clinic end generated code: output=31c4baa779df84ce input=a3341a512e11d773]*/
-{
- if (PyErr_WarnEx(PyExc_DeprecationWarning,
- "fromstring() is deprecated. Use frombytes() instead.", 2) != 0)
- return NULL;
- return frombytes(self, buffer);
-}
-
/*[clinic input]
array.array.frombytes
@@ -1680,28 +1664,10 @@ array_array_tobytes_impl(arrayobject *self)
}
}
-/*[clinic input]
-array.array.tostring
-
-Convert the array to an array of machine values and return the bytes representation.
-
-This method is deprecated. Use tobytes instead.
-[clinic start generated code]*/
-
-static PyObject *
-array_array_tostring_impl(arrayobject *self)
-/*[clinic end generated code: output=7d6bd92745a2c8f3 input=b6c0ddee7b30457e]*/
-{
- if (PyErr_WarnEx(PyExc_DeprecationWarning,
- "tostring() is deprecated. Use tobytes() instead.", 2) != 0)
- return NULL;
- return array_array_tobytes_impl(self);
-}
-
/*[clinic input]
array.array.fromunicode
- ustr: Py_UNICODE(zeroes=True)
+ ustr: unicode
/
Extends this array with data from the unicode string ustr.
@@ -1712,25 +1678,28 @@ some other type.
[clinic start generated code]*/
static PyObject *
-array_array_fromunicode_impl(arrayobject *self, const Py_UNICODE *ustr,
- Py_ssize_clean_t ustr_length)
-/*[clinic end generated code: output=cf2f662908e2befc input=150f00566ffbca6e]*/
+array_array_fromunicode_impl(arrayobject *self, PyObject *ustr)
+/*[clinic end generated code: output=24359f5e001a7f2b input=025db1fdade7a4ce]*/
{
- char typecode;
-
- typecode = self->ob_descr->typecode;
- if (typecode != 'u') {
+ if (self->ob_descr->typecode != 'u') {
PyErr_SetString(PyExc_ValueError,
"fromunicode() may only be called on "
"unicode type arrays");
return NULL;
}
- if (ustr_length > 0) {
+
+ Py_ssize_t ustr_length = PyUnicode_AsWideChar(ustr, NULL, 0);
+ assert(ustr_length > 0);
+ if (ustr_length > 1) {
+ ustr_length--; /* trim trailing NUL character */
Py_ssize_t old_size = Py_SIZE(self);
- if (array_resize(self, old_size + ustr_length) == -1)
+ if (array_resize(self, old_size + ustr_length) == -1) {
return NULL;
- memcpy(self->ob_item + old_size * sizeof(Py_UNICODE),
- ustr, ustr_length * sizeof(Py_UNICODE));
+ }
+
+ // must not fail
+ PyUnicode_AsWideChar(
+ ustr, ((wchar_t *)self->ob_item) + old_size, ustr_length);
}
Py_RETURN_NONE;
@@ -1750,14 +1719,12 @@ static PyObject *
array_array_tounicode_impl(arrayobject *self)
/*[clinic end generated code: output=08e442378336e1ef input=127242eebe70b66d]*/
{
- char typecode;
- typecode = self->ob_descr->typecode;
- if (typecode != 'u') {
+ if (self->ob_descr->typecode != 'u') {
PyErr_SetString(PyExc_ValueError,
"tounicode() may only be called on unicode type arrays");
return NULL;
}
- return PyUnicode_FromWideChar((Py_UNICODE *) self->ob_item, Py_SIZE(self));
+ return PyUnicode_FromWideChar((wchar_t *) self->ob_item, Py_SIZE(self));
}
/*[clinic input]
@@ -2285,7 +2252,6 @@ static PyMethodDef array_methods[] = {
ARRAY_ARRAY_EXTEND_METHODDEF
ARRAY_ARRAY_FROMFILE_METHODDEF
ARRAY_ARRAY_FROMLIST_METHODDEF
- ARRAY_ARRAY_FROMSTRING_METHODDEF
ARRAY_ARRAY_FROMBYTES_METHODDEF
ARRAY_ARRAY_FROMUNICODE_METHODDEF
ARRAY_ARRAY_INDEX_METHODDEF
@@ -2296,7 +2262,6 @@ static PyMethodDef array_methods[] = {
ARRAY_ARRAY_REVERSE_METHODDEF
ARRAY_ARRAY_TOFILE_METHODDEF
ARRAY_ARRAY_TOLIST_METHODDEF
- ARRAY_ARRAY_TOSTRING_METHODDEF
ARRAY_ARRAY_TOBYTES_METHODDEF
ARRAY_ARRAY_TOUNICODE_METHODDEF
ARRAY_ARRAY___SIZEOF___METHODDEF
@@ -2718,30 +2683,20 @@ array_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
Py_DECREF(v);
}
else if (initial != NULL && PyUnicode_Check(initial)) {
- Py_UNICODE *ustr;
Py_ssize_t n;
-
- ustr = PyUnicode_AsUnicode(initial);
+ wchar_t *ustr = PyUnicode_AsWideCharString(initial, &n);
if (ustr == NULL) {
- PyErr_NoMemory();
Py_DECREF(a);
return NULL;
}
- n = PyUnicode_GET_DATA_SIZE(initial);
if (n > 0) {
arrayobject *self = (arrayobject *)a;
- char *item = self->ob_item;
- item = (char *)PyMem_Realloc(item, n);
- if (item == NULL) {
- PyErr_NoMemory();
- Py_DECREF(a);
- return NULL;
- }
- self->ob_item = item;
- Py_SIZE(self) = n / sizeof(Py_UNICODE);
- memcpy(item, ustr, n);
- self->allocated = Py_SIZE(self);
+ // self->ob_item may be NULL but it is safe.
+ PyMem_Free(self->ob_item);
+ self->ob_item = (char *)ustr;
+ Py_SET_SIZE(self, n);
+ self->allocated = n;
}
}
else if (initial != NULL && array_Check(initial) && len > 0) {
@@ -3034,12 +2989,11 @@ array_modexec(PyObject *m)
{
char buffer[Py_ARRAY_LENGTH(descriptors)], *p;
PyObject *typecodes;
- Py_ssize_t size = 0;
const struct arraydescr *descr;
if (PyType_Ready(&Arraytype) < 0)
return -1;
- Py_TYPE(&PyArrayIter_Type) = &PyType_Type;
+ Py_SET_TYPE(&PyArrayIter_Type, &PyType_Type);
Py_INCREF((PyObject *)&Arraytype);
if (PyModule_AddObject(m, "ArrayType", (PyObject *)&Arraytype) < 0) {
@@ -3052,10 +3006,6 @@ array_modexec(PyObject *m)
return -1;
}
- for (descr=descriptors; descr->typecode != '\0'; descr++) {
- size++;
- }
-
p = buffer;
for (descr = descriptors; descr->typecode != '\0'; descr++) {
*p++ = (char)descr->typecode;
diff --git a/Modules/atexitmodule.c b/Modules/atexitmodule.c
index 1d6d6e53..8cef64ce 100644
--- a/Modules/atexitmodule.c
+++ b/Modules/atexitmodule.c
@@ -28,7 +28,13 @@ typedef struct {
int callback_len;
} atexitmodule_state;
-#define GET_ATEXIT_STATE(mod) ((atexitmodule_state*)PyModule_GetState(mod))
+static inline atexitmodule_state*
+get_atexit_state(PyObject *module)
+{
+ void *state = PyModule_GetState(module);
+ assert(state != NULL);
+ return (atexitmodule_state *)state;
+}
static void
@@ -72,7 +78,7 @@ atexit_callfuncs(PyObject *module)
if (module == NULL)
return;
- modstate = GET_ATEXIT_STATE(module);
+ modstate = get_atexit_state(module);
if (modstate->ncallbacks == 0)
return;
@@ -130,7 +136,7 @@ atexit_register(PyObject *self, PyObject *args, PyObject *kwargs)
atexit_callback *new_callback;
PyObject *func = NULL;
- modstate = GET_ATEXIT_STATE(self);
+ modstate = get_atexit_state(self);
if (modstate->ncallbacks >= modstate->callback_len) {
atexit_callback **r;
@@ -197,7 +203,7 @@ Clear the list of previously registered exit functions.");
static PyObject *
atexit_clear(PyObject *self, PyObject *unused)
{
- atexit_cleanup(GET_ATEXIT_STATE(self));
+ atexit_cleanup(get_atexit_state(self));
Py_RETURN_NONE;
}
@@ -211,7 +217,7 @@ atexit_ncallbacks(PyObject *self, PyObject *unused)
{
atexitmodule_state *modstate;
- modstate = GET_ATEXIT_STATE(self);
+ modstate = get_atexit_state(self);
return PyLong_FromSsize_t(modstate->ncallbacks);
}
@@ -222,16 +228,15 @@ atexit_m_traverse(PyObject *self, visitproc visit, void *arg)
int i;
atexitmodule_state *modstate;
- modstate = GET_ATEXIT_STATE(self);
- if (modstate != NULL) {
- for (i = 0; i < modstate->ncallbacks; i++) {
- atexit_callback *cb = modstate->atexit_callbacks[i];
- if (cb == NULL)
- continue;
- Py_VISIT(cb->func);
- Py_VISIT(cb->args);
- Py_VISIT(cb->kwargs);
- }
+ modstate = (atexitmodule_state *)PyModule_GetState(self);
+
+ for (i = 0; i < modstate->ncallbacks; i++) {
+ atexit_callback *cb = modstate->atexit_callbacks[i];
+ if (cb == NULL)
+ continue;
+ Py_VISIT(cb->func);
+ Py_VISIT(cb->args);
+ Py_VISIT(cb->kwargs);
}
return 0;
}
@@ -240,10 +245,8 @@ static int
atexit_m_clear(PyObject *self)
{
atexitmodule_state *modstate;
- modstate = GET_ATEXIT_STATE(self);
- if (modstate != NULL) {
- atexit_cleanup(modstate);
- }
+ modstate = (atexitmodule_state *)PyModule_GetState(self);
+ atexit_cleanup(modstate);
return 0;
}
@@ -251,11 +254,9 @@ static void
atexit_free(PyObject *m)
{
atexitmodule_state *modstate;
- modstate = GET_ATEXIT_STATE(m);
- if (modstate != NULL) {
- atexit_cleanup(modstate);
- PyMem_Free(modstate->atexit_callbacks);
- }
+ modstate = (atexitmodule_state *)PyModule_GetState(m);
+ atexit_cleanup(modstate);
+ PyMem_Free(modstate->atexit_callbacks);
}
PyDoc_STRVAR(atexit_unregister__doc__,
@@ -273,7 +274,7 @@ atexit_unregister(PyObject *self, PyObject *func)
atexit_callback *cb;
int i, eq;
- modstate = GET_ATEXIT_STATE(self);
+ modstate = get_atexit_state(self);
for (i = 0; i < modstate->ncallbacks; i++)
{
@@ -318,7 +319,7 @@ static int
atexit_exec(PyObject *m) {
atexitmodule_state *modstate;
- modstate = GET_ATEXIT_STATE(m);
+ modstate = get_atexit_state(m);
modstate->callback_len = 32;
modstate->ncallbacks = 0;
modstate->atexit_callbacks = PyMem_New(atexit_callback*,
diff --git a/Modules/audioop.c b/Modules/audioop.c
index f4fdeb23..3aeb6f04 100644
--- a/Modules/audioop.c
+++ b/Modules/audioop.c
@@ -371,14 +371,26 @@ static const int stepsizeTable[89] = {
SETINT32((cp), (i), (val)); \
} while(0)
+static PyModuleDef audioopmodule;
-static PyObject *AudioopError;
+typedef struct {
+ PyObject *AudioopError;
+} audioop_state;
+
+static inline audioop_state *
+get_audioop_state(PyObject *module)
+{
+ void *state = PyModule_GetState(module);
+ assert(state != NULL);
+ return (audioop_state *)state;
+}
static int
-audioop_check_size(int size)
+audioop_check_size(PyObject *module, int size)
{
if (size < 1 || size > 4) {
- PyErr_SetString(AudioopError, "Size should be 1, 2, 3 or 4");
+ PyErr_SetString(get_audioop_state(module)->AudioopError,
+ "Size should be 1, 2, 3 or 4");
return 0;
}
else
@@ -386,12 +398,13 @@ audioop_check_size(int size)
}
static int
-audioop_check_parameters(Py_ssize_t len, int size)
+audioop_check_parameters(PyObject *module, Py_ssize_t len, int size)
{
- if (!audioop_check_size(size))
+ if (!audioop_check_size(module, size))
return 0;
if (len % size != 0) {
- PyErr_SetString(AudioopError, "not a whole number of frames");
+ PyErr_SetString(get_audioop_state(module)->AudioopError,
+ "not a whole number of frames");
return 0;
}
return 1;
@@ -420,10 +433,11 @@ audioop_getsample_impl(PyObject *module, Py_buffer *fragment, int width,
{
int val;
- if (!audioop_check_parameters(fragment->len, width))
+ if (!audioop_check_parameters(module, fragment->len, width))
return NULL;
if (index < 0 || index >= fragment->len/width) {
- PyErr_SetString(AudioopError, "Index out of range");
+ PyErr_SetString(get_audioop_state(module)->AudioopError,
+ "Index out of range");
return NULL;
}
val = GETRAWSAMPLE(width, fragment->buf, index*width);
@@ -447,7 +461,7 @@ audioop_max_impl(PyObject *module, Py_buffer *fragment, int width)
Py_ssize_t i;
unsigned int absval, max = 0;
- if (!audioop_check_parameters(fragment->len, width))
+ if (!audioop_check_parameters(module, fragment->len, width))
return NULL;
for (i = 0; i < fragment->len; i += width) {
int val = GETRAWSAMPLE(width, fragment->buf, i);
@@ -479,7 +493,7 @@ audioop_minmax_impl(PyObject *module, Py_buffer *fragment, int width)
a warning */
int min = 0x7fffffff, max = -0x7FFFFFFF-1;
- if (!audioop_check_parameters(fragment->len, width))
+ if (!audioop_check_parameters(module, fragment->len, width))
return NULL;
for (i = 0; i < fragment->len; i += width) {
int val = GETRAWSAMPLE(width, fragment->buf, i);
@@ -507,7 +521,7 @@ audioop_avg_impl(PyObject *module, Py_buffer *fragment, int width)
int avg;
double sum = 0.0;
- if (!audioop_check_parameters(fragment->len, width))
+ if (!audioop_check_parameters(module, fragment->len, width))
return NULL;
for (i = 0; i < fragment->len; i += width)
sum += GETRAWSAMPLE(width, fragment->buf, i);
@@ -536,7 +550,7 @@ audioop_rms_impl(PyObject *module, Py_buffer *fragment, int width)
unsigned int res;
double sum_squares = 0.0;
- if (!audioop_check_parameters(fragment->len, width))
+ if (!audioop_check_parameters(module, fragment->len, width))
return NULL;
for (i = 0; i < fragment->len; i += width) {
double val = GETRAWSAMPLE(width, fragment->buf, i);
@@ -614,7 +628,8 @@ audioop_findfit_impl(PyObject *module, Py_buffer *fragment,
double sum_ri_2, sum_aij_2, sum_aij_ri, result, best_result, factor;
if (fragment->len & 1 || reference->len & 1) {
- PyErr_SetString(AudioopError, "Strings should be even-sized");
+ PyErr_SetString(get_audioop_state(module)->AudioopError,
+ "Strings should be even-sized");
return NULL;
}
cp1 = (const int16_t *)fragment->buf;
@@ -623,7 +638,8 @@ audioop_findfit_impl(PyObject *module, Py_buffer *fragment,
len2 = reference->len >> 1;
if (len1 < len2) {
- PyErr_SetString(AudioopError, "First sample should be longer");
+ PyErr_SetString(get_audioop_state(module)->AudioopError,
+ "First sample should be longer");
return NULL;
}
sum_ri_2 = _sum2(cp2, cp2, len2);
@@ -681,11 +697,13 @@ audioop_findfactor_impl(PyObject *module, Py_buffer *fragment,
double sum_ri_2, sum_aij_ri, result;
if (fragment->len & 1 || reference->len & 1) {
- PyErr_SetString(AudioopError, "Strings should be even-sized");
+ PyErr_SetString(get_audioop_state(module)->AudioopError,
+ "Strings should be even-sized");
return NULL;
}
if (fragment->len != reference->len) {
- PyErr_SetString(AudioopError, "Samples should be same size");
+ PyErr_SetString(get_audioop_state(module)->AudioopError,
+ "Samples should be same size");
return NULL;
}
cp1 = (const int16_t *)fragment->buf;
@@ -725,14 +743,16 @@ audioop_findmax_impl(PyObject *module, Py_buffer *fragment,
double result, best_result;
if (fragment->len & 1) {
- PyErr_SetString(AudioopError, "Strings should be even-sized");
+ PyErr_SetString(get_audioop_state(module)->AudioopError,
+ "Strings should be even-sized");
return NULL;
}
cp1 = (const int16_t *)fragment->buf;
len1 = fragment->len >> 1;
if (length < 0 || len1 < length) {
- PyErr_SetString(AudioopError, "Input sample should be longer");
+ PyErr_SetString(get_audioop_state(module)->AudioopError,
+ "Input sample should be longer");
return NULL;
}
@@ -777,7 +797,7 @@ audioop_avgpp_impl(PyObject *module, Py_buffer *fragment, int width)
unsigned int avg;
int diff, prevdiff, nextreme = 0;
- if (!audioop_check_parameters(fragment->len, width))
+ if (!audioop_check_parameters(module, fragment->len, width))
return NULL;
if (fragment->len <= width)
return PyLong_FromLong(0);
@@ -833,7 +853,7 @@ audioop_maxpp_impl(PyObject *module, Py_buffer *fragment, int width)
unsigned int max = 0, extremediff;
int diff, prevdiff;
- if (!audioop_check_parameters(fragment->len, width))
+ if (!audioop_check_parameters(module, fragment->len, width))
return NULL;
if (fragment->len <= width)
return PyLong_FromLong(0);
@@ -885,7 +905,7 @@ audioop_cross_impl(PyObject *module, Py_buffer *fragment, int width)
int prevval;
Py_ssize_t ncross;
- if (!audioop_check_parameters(fragment->len, width))
+ if (!audioop_check_parameters(module, fragment->len, width))
return NULL;
ncross = -1;
prevval = 17; /* Anything <> 0,1 */
@@ -918,7 +938,7 @@ audioop_mul_impl(PyObject *module, Py_buffer *fragment, int width,
double maxval, minval;
PyObject *rv;
- if (!audioop_check_parameters(fragment->len, width))
+ if (!audioop_check_parameters(module, fragment->len, width))
return NULL;
maxval = (double) maxvals[width];
@@ -961,10 +981,11 @@ audioop_tomono_impl(PyObject *module, Py_buffer *fragment, int width,
cp = fragment->buf;
len = fragment->len;
- if (!audioop_check_parameters(len, width))
+ if (!audioop_check_parameters(module, len, width))
return NULL;
if (((len / width) & 1) != 0) {
- PyErr_SetString(AudioopError, "not a whole number of frames");
+ PyErr_SetString(get_audioop_state(module)->AudioopError,
+ "not a whole number of frames");
return NULL;
}
@@ -1008,7 +1029,7 @@ audioop_tostereo_impl(PyObject *module, Py_buffer *fragment, int width,
double maxval, minval;
PyObject *rv;
- if (!audioop_check_parameters(fragment->len, width))
+ if (!audioop_check_parameters(module, fragment->len, width))
return NULL;
maxval = (double) maxvals[width];
@@ -1056,10 +1077,11 @@ audioop_add_impl(PyObject *module, Py_buffer *fragment1,
int minval, maxval, newval;
PyObject *rv;
- if (!audioop_check_parameters(fragment1->len, width))
+ if (!audioop_check_parameters(module, fragment1->len, width))
return NULL;
if (fragment1->len != fragment2->len) {
- PyErr_SetString(AudioopError, "Lengths should be the same");
+ PyErr_SetString(get_audioop_state(module)->AudioopError,
+ "Lengths should be the same");
return NULL;
}
@@ -1114,7 +1136,7 @@ audioop_bias_impl(PyObject *module, Py_buffer *fragment, int width, int bias)
unsigned int val = 0, mask;
PyObject *rv;
- if (!audioop_check_parameters(fragment->len, width))
+ if (!audioop_check_parameters(module, fragment->len, width))
return NULL;
rv = PyBytes_FromStringAndSize(NULL, fragment->len);
@@ -1172,7 +1194,7 @@ audioop_reverse_impl(PyObject *module, Py_buffer *fragment, int width)
Py_ssize_t i;
PyObject *rv;
- if (!audioop_check_parameters(fragment->len, width))
+ if (!audioop_check_parameters(module, fragment->len, width))
return NULL;
rv = PyBytes_FromStringAndSize(NULL, fragment->len);
@@ -1205,7 +1227,7 @@ audioop_byteswap_impl(PyObject *module, Py_buffer *fragment, int width)
Py_ssize_t i;
PyObject *rv;
- if (!audioop_check_parameters(fragment->len, width))
+ if (!audioop_check_parameters(module, fragment->len, width))
return NULL;
rv = PyBytes_FromStringAndSize(NULL, fragment->len);
@@ -1241,9 +1263,9 @@ audioop_lin2lin_impl(PyObject *module, Py_buffer *fragment, int width,
Py_ssize_t i, j;
PyObject *rv;
- if (!audioop_check_parameters(fragment->len, width))
+ if (!audioop_check_parameters(module, fragment->len, width))
return NULL;
- if (!audioop_check_size(newwidth))
+ if (!audioop_check_size(module, newwidth))
return NULL;
if (fragment->len/width > PY_SSIZE_T_MAX/newwidth) {
@@ -1302,10 +1324,11 @@ audioop_ratecv_impl(PyObject *module, Py_buffer *fragment, int width,
PyObject *samps, *str, *rv = NULL, *channel;
int bytes_per_frame;
- if (!audioop_check_size(width))
+ if (!audioop_check_size(module, width))
return NULL;
if (nchannels < 1) {
- PyErr_SetString(AudioopError, "# of channels should be >= 1");
+ PyErr_SetString(get_audioop_state(module)->AudioopError,
+ "# of channels should be >= 1");
return NULL;
}
if (width > INT_MAX / nchannels) {
@@ -1318,17 +1341,19 @@ audioop_ratecv_impl(PyObject *module, Py_buffer *fragment, int width,
}
bytes_per_frame = width * nchannels;
if (weightA < 1 || weightB < 0) {
- PyErr_SetString(AudioopError,
+ PyErr_SetString(get_audioop_state(module)->AudioopError,
"weightA should be >= 1, weightB should be >= 0");
return NULL;
}
assert(fragment->len >= 0);
if (fragment->len % bytes_per_frame != 0) {
- PyErr_SetString(AudioopError, "not a whole number of frames");
+ PyErr_SetString(get_audioop_state(module)->AudioopError,
+ "not a whole number of frames");
return NULL;
}
if (inrate <= 0 || outrate <= 0) {
- PyErr_SetString(AudioopError, "sampling rate not > 0");
+ PyErr_SetString(get_audioop_state(module)->AudioopError,
+ "sampling rate not > 0");
return NULL;
}
/* divide inrate and outrate by their greatest common divisor */
@@ -1369,7 +1394,7 @@ audioop_ratecv_impl(PyObject *module, Py_buffer *fragment, int width,
&d, &PyTuple_Type, &samps))
goto exit;
if (PyTuple_Size(samps) != nchannels) {
- PyErr_SetString(AudioopError,
+ PyErr_SetString(get_audioop_state(module)->AudioopError,
"illegal state argument");
goto exit;
}
@@ -1491,7 +1516,7 @@ audioop_lin2ulaw_impl(PyObject *module, Py_buffer *fragment, int width)
Py_ssize_t i;
PyObject *rv;
- if (!audioop_check_parameters(fragment->len, width))
+ if (!audioop_check_parameters(module, fragment->len, width))
return NULL;
rv = PyBytes_FromStringAndSize(NULL, fragment->len/width);
@@ -1525,7 +1550,7 @@ audioop_ulaw2lin_impl(PyObject *module, Py_buffer *fragment, int width)
Py_ssize_t i;
PyObject *rv;
- if (!audioop_check_size(width))
+ if (!audioop_check_size(module, width))
return NULL;
if (fragment->len > PY_SSIZE_T_MAX/width) {
@@ -1564,7 +1589,7 @@ audioop_lin2alaw_impl(PyObject *module, Py_buffer *fragment, int width)
Py_ssize_t i;
PyObject *rv;
- if (!audioop_check_parameters(fragment->len, width))
+ if (!audioop_check_parameters(module, fragment->len, width))
return NULL;
rv = PyBytes_FromStringAndSize(NULL, fragment->len/width);
@@ -1599,7 +1624,7 @@ audioop_alaw2lin_impl(PyObject *module, Py_buffer *fragment, int width)
int val;
PyObject *rv;
- if (!audioop_check_size(width))
+ if (!audioop_check_size(module, width))
return NULL;
if (fragment->len > PY_SSIZE_T_MAX/width) {
@@ -1643,7 +1668,7 @@ audioop_lin2adpcm_impl(PyObject *module, Py_buffer *fragment, int width,
PyObject *rv = NULL, *str;
int outputbuffer = 0, bufferstep;
- if (!audioop_check_parameters(fragment->len, width))
+ if (!audioop_check_parameters(module, fragment->len, width))
return NULL;
/* Decode state, should have (value, step) */
@@ -1773,7 +1798,7 @@ audioop_adpcm2lin_impl(PyObject *module, Py_buffer *fragment, int width,
PyObject *rv, *str;
int inputbuffer = 0, bufferstep;
- if (!audioop_check_size(width))
+ if (!audioop_check_size(module, width))
return NULL;
/* Decode state, should have (value, step) */
@@ -1897,31 +1922,65 @@ static PyMethodDef audioop_methods[] = {
{ 0, 0 }
};
+static int
+audioop_traverse(PyObject *module, visitproc visit, void *arg)
+{
+ audioop_state *state = get_audioop_state(module);
+ Py_VISIT(state->AudioopError);
+ return 0;
+}
+
+static int
+audioop_clear(PyObject *module)
+{
+ audioop_state *state = get_audioop_state(module);
+ Py_CLEAR(state->AudioopError);
+ return 0;
+}
+
+static void
+audioop_free(void *module) {
+ audioop_clear((PyObject *)module);
+}
+
+static int
+audioop_exec(PyObject* module)
+{
+ audioop_state *state = get_audioop_state(module);
+
+ state->AudioopError = PyErr_NewException("audioop.error", NULL, NULL);
+ if (state->AudioopError == NULL) {
+ return -1;
+ }
+
+ Py_INCREF(state->AudioopError);
+ if (PyModule_AddObject(module, "error", state->AudioopError) < 0) {
+ Py_DECREF(state->AudioopError);
+ return -1;
+ }
+
+ return 0;
+}
+
+static PyModuleDef_Slot audioop_slots[] = {
+ {Py_mod_exec, audioop_exec},
+ {0, NULL}
+};
static struct PyModuleDef audioopmodule = {
PyModuleDef_HEAD_INIT,
"audioop",
NULL,
- -1,
+ sizeof(audioop_state),
audioop_methods,
- NULL,
- NULL,
- NULL,
- NULL
+ audioop_slots,
+ audioop_traverse,
+ audioop_clear,
+ audioop_free
};
PyMODINIT_FUNC
PyInit_audioop(void)
{
- PyObject *m, *d;
- m = PyModule_Create(&audioopmodule);
- if (m == NULL)
- return NULL;
- d = PyModule_GetDict(m);
- if (d == NULL)
- return NULL;
- AudioopError = PyErr_NewException("audioop.error", NULL, NULL);
- if (AudioopError != NULL)
- PyDict_SetItemString(d,"error",AudioopError);
- return m;
+ return PyModuleDef_Init(&audioopmodule);
}
diff --git a/Modules/binascii.c b/Modules/binascii.c
index 1c7dc358..1f3248b6 100644
--- a/Modules/binascii.c
+++ b/Modules/binascii.c
@@ -66,6 +66,12 @@ typedef struct binascii_state {
PyObject *Incomplete;
} binascii_state;
+static binascii_state *
+get_binascii_state(PyObject *module)
+{
+ return (binascii_state *)PyModule_GetState(module);
+}
+
/*
** hqx lookup table, ascii->binary.
*/
@@ -130,7 +136,7 @@ static const unsigned char table_a2b_hqx[256] = {
static const unsigned char table_b2a_hqx[] =
"!\"#$%&'()*+,-012345689@ABCDEFGHIJKLMNPQRSTUVXYZ[`abcdefhijklmpqr";
-static const char table_a2b_base64[] = {
+static const unsigned char table_a2b_base64[] = {
-1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1,
-1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1,
-1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,62, -1,-1,-1,63,
@@ -138,7 +144,16 @@ static const char table_a2b_base64[] = {
-1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,10, 11,12,13,14,
15,16,17,18, 19,20,21,22, 23,24,25,-1, -1,-1,-1,-1,
-1,26,27,28, 29,30,31,32, 33,34,35,36, 37,38,39,40,
- 41,42,43,44, 45,46,47,48, 49,50,51,-1, -1,-1,-1,-1
+ 41,42,43,44, 45,46,47,48, 49,50,51,-1, -1,-1,-1,-1,
+
+ -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1,
+ -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1,
+ -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1,
+ -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1,
+ -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1,
+ -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1,
+ -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1,
+ -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1,
};
#define BASE64_PAD '='
@@ -413,32 +428,6 @@ binascii_b2a_uu_impl(PyObject *module, Py_buffer *data, int backtick)
return _PyBytesWriter_Finish(&writer, ascii_data);
}
-
-static int
-binascii_find_valid(const unsigned char *s, Py_ssize_t slen, int num)
-{
- /* Finds & returns the (num+1)th
- ** valid character for base64, or -1 if none.
- */
-
- int ret = -1;
- unsigned char c, b64val;
-
- while ((slen > 0) && (ret == -1)) {
- c = *s;
- b64val = table_a2b_base64[c & 0x7f];
- if ( ((c <= 0x7f) && (b64val != (unsigned char)-1)) ) {
- if (num == 0)
- ret = *s;
- num--;
- }
-
- s++;
- slen--;
- }
- return ret;
-}
-
/*[clinic input]
binascii.a2b_base64
@@ -452,88 +441,74 @@ static PyObject *
binascii_a2b_base64_impl(PyObject *module, Py_buffer *data)
/*[clinic end generated code: output=0628223f19fd3f9b input=5872acf6e1cac243]*/
{
- const unsigned char *ascii_data;
- unsigned char *bin_data;
- unsigned char *bin_data_start;
- int leftbits = 0;
- unsigned char this_ch;
- unsigned int leftchar = 0;
- Py_ssize_t ascii_len, bin_len;
- int quad_pos = 0;
- _PyBytesWriter writer;
- binascii_state *state;
-
- ascii_data = data->buf;
- ascii_len = data->len;
-
- assert(ascii_len >= 0);
+ assert(data->len >= 0);
- if (ascii_len > PY_SSIZE_T_MAX - 3)
- return PyErr_NoMemory();
-
- bin_len = ((ascii_len+3)/4)*3; /* Upper bound, corrected later */
-
- _PyBytesWriter_Init(&writer);
+ const unsigned char *ascii_data = data->buf;
+ size_t ascii_len = data->len;
/* Allocate the buffer */
- bin_data = _PyBytesWriter_Alloc(&writer, bin_len);
+ Py_ssize_t bin_len = ((ascii_len+3)/4)*3; /* Upper bound, corrected later */
+ _PyBytesWriter writer;
+ _PyBytesWriter_Init(&writer);
+ unsigned char *bin_data = _PyBytesWriter_Alloc(&writer, bin_len);
if (bin_data == NULL)
return NULL;
- bin_data_start = bin_data;
+ unsigned char *bin_data_start = bin_data;
- for( ; ascii_len > 0; ascii_len--, ascii_data++) {
- this_ch = *ascii_data;
-
- if (this_ch > 0x7f ||
- this_ch == '\r' || this_ch == '\n' || this_ch == ' ')
- continue;
+ int quad_pos = 0;
+ unsigned char leftchar = 0;
+ int pads = 0;
+ for (size_t i = 0; i < ascii_len; i++) {
+ unsigned char this_ch = ascii_data[i];
/* Check for pad sequences and ignore
** the invalid ones.
*/
if (this_ch == BASE64_PAD) {
- if ( (quad_pos < 2) ||
- ((quad_pos == 2) &&
- (binascii_find_valid(ascii_data, ascii_len, 1)
- != BASE64_PAD)) )
- {
- continue;
- }
- else {
+ if (quad_pos >= 2 && quad_pos + ++pads >= 4) {
/* A pad sequence means no more input.
** We've already interpreted the data
** from the quad at this point.
*/
- leftbits = 0;
- break;
+ goto done;
}
+ continue;
}
- this_ch = table_a2b_base64[*ascii_data];
- if ( this_ch == (unsigned char) -1 )
+ this_ch = table_a2b_base64[this_ch];
+ if (this_ch >= 64) {
continue;
+ }
+ pads = 0;
- /*
- ** Shift it in on the low end, and see if there's
- ** a byte ready for output.
- */
- quad_pos = (quad_pos + 1) & 0x03;
- leftchar = (leftchar << 6) | (this_ch);
- leftbits += 6;
-
- if ( leftbits >= 8 ) {
- leftbits -= 8;
- *bin_data++ = (leftchar >> leftbits) & 0xff;
- leftchar &= ((1 << leftbits) - 1);
+ switch (quad_pos) {
+ case 0:
+ quad_pos = 1;
+ leftchar = this_ch;
+ break;
+ case 1:
+ quad_pos = 2;
+ *bin_data++ = (leftchar << 2) | (this_ch >> 4);
+ leftchar = this_ch & 0x0f;
+ break;
+ case 2:
+ quad_pos = 3;
+ *bin_data++ = (leftchar << 4) | (this_ch >> 2);
+ leftchar = this_ch & 0x03;
+ break;
+ case 3:
+ quad_pos = 0;
+ *bin_data++ = (leftchar << 6) | (this_ch);
+ leftchar = 0;
+ break;
}
}
- if (leftbits != 0) {
- state = PyModule_GetState(module);
+ if (quad_pos != 0) {
+ binascii_state *state = PyModule_GetState(module);
if (state == NULL) {
- return NULL;
- }
- if (leftbits == 6) {
+ /* error already set, from PyModule_GetState */
+ } else if (quad_pos == 1) {
/*
** There is exactly one extra valid, non-padding, base64 character.
** This is an invalid length, as there is no possible input that
@@ -551,6 +526,7 @@ binascii_a2b_base64_impl(PyObject *module, Py_buffer *data)
return NULL;
}
+done:
return _PyBytesWriter_Finish(&writer, bin_data);
}
@@ -643,6 +619,11 @@ static PyObject *
binascii_a2b_hqx_impl(PyObject *module, Py_buffer *data)
/*[clinic end generated code: output=4d6d8c54d54ea1c1 input=0d914c680e0eed55]*/
{
+ if (PyErr_WarnEx(PyExc_DeprecationWarning,
+ "binascii.a2b_hqx() is deprecated", 1) < 0) {
+ return NULL;
+ }
+
const unsigned char *ascii_data;
unsigned char *bin_data;
int leftbits = 0;
@@ -731,6 +712,11 @@ static PyObject *
binascii_rlecode_hqx_impl(PyObject *module, Py_buffer *data)
/*[clinic end generated code: output=393d79338f5f5629 input=e1f1712447a82b09]*/
{
+ if (PyErr_WarnEx(PyExc_DeprecationWarning,
+ "binascii.rlecode_hqx() is deprecated", 1) < 0) {
+ return NULL;
+ }
+
const unsigned char *in_data;
unsigned char *out_data;
unsigned char ch;
@@ -793,6 +779,11 @@ static PyObject *
binascii_b2a_hqx_impl(PyObject *module, Py_buffer *data)
/*[clinic end generated code: output=d0aa5a704bc9f7de input=9596ebe019fe12ba]*/
{
+ if (PyErr_WarnEx(PyExc_DeprecationWarning,
+ "binascii.b2a_hqx() is deprecated", 1) < 0) {
+ return NULL;
+ }
+
unsigned char *ascii_data;
const unsigned char *bin_data;
int leftbits = 0;
@@ -848,6 +839,11 @@ static PyObject *
binascii_rledecode_hqx_impl(PyObject *module, Py_buffer *data)
/*[clinic end generated code: output=9826619565de1c6c input=54cdd49fc014402c]*/
{
+ if (PyErr_WarnEx(PyExc_DeprecationWarning,
+ "binascii.rledecode_hqx() is deprecated", 1) < 0) {
+ return NULL;
+ }
+
const unsigned char *in_data;
unsigned char *out_data;
unsigned char in_byte, in_repeat;
@@ -962,7 +958,7 @@ error:
/*[clinic input]
-binascii.crc_hqx -> unsigned_int
+binascii.crc_hqx
data: Py_buffer
crc: unsigned_int(bitwise=True)
@@ -971,9 +967,9 @@ binascii.crc_hqx -> unsigned_int
Compute CRC-CCITT incrementally.
[clinic start generated code]*/
-static unsigned int
+static PyObject *
binascii_crc_hqx_impl(PyObject *module, Py_buffer *data, unsigned int crc)
-/*[clinic end generated code: output=8ec2a78590d19170 input=f18240ff8c705b79]*/
+/*[clinic end generated code: output=2fde213d0f547a98 input=56237755370a951c]*/
{
const unsigned char *bin_data;
Py_ssize_t len;
@@ -986,7 +982,7 @@ binascii_crc_hqx_impl(PyObject *module, Py_buffer *data, unsigned int crc)
crc = ((crc<<8)&0xff00) ^ crctab_hqx[(crc>>8)^*bin_data++];
}
- return crc;
+ return PyLong_FromUnsignedLong(crc);
}
#ifndef USE_ZLIB_CRC32
@@ -1315,15 +1311,12 @@ binascii_a2b_qp_impl(PyObject *module, Py_buffer *data, int header)
datalen = data->len;
/* We allocate the output same size as input, this is overkill.
- * The previous implementation used calloc() so we'll zero out the
- * memory here too, since PyMem_Malloc() does not guarantee that.
*/
- odata = (unsigned char *) PyMem_Malloc(datalen);
+ odata = (unsigned char *) PyMem_Calloc(1, datalen);
if (odata == NULL) {
PyErr_NoMemory();
return NULL;
}
- memset(odata, 0, datalen);
in = out = 0;
while (in < datalen) {
@@ -1503,15 +1496,12 @@ binascii_b2a_qp_impl(PyObject *module, Py_buffer *data, int quotetabs,
}
/* We allocate the output same size as input, this is overkill.
- * The previous implementation used calloc() so we'll zero out the
- * memory here too, since PyMem_Malloc() does not guarantee that.
*/
- odata = (unsigned char *) PyMem_Malloc(odatalen);
+ odata = (unsigned char *) PyMem_Calloc(1, odatalen);
if (odata == NULL) {
PyErr_NoMemory();
return NULL;
}
- memset(odata, 0, odatalen);
in = out = linelen = 0;
while (in < datalen) {
@@ -1616,9 +1606,9 @@ static struct PyMethodDef binascii_module_methods[] = {
PyDoc_STRVAR(doc_binascii, "Conversion between binary data and ASCII");
static int
-binascii_exec(PyObject *m) {
+binascii_exec(PyObject *module) {
int result;
- binascii_state *state = PyModule_GetState(m);
+ binascii_state *state = PyModule_GetState(module);
if (state == NULL) {
return -1;
}
@@ -1627,8 +1617,10 @@ binascii_exec(PyObject *m) {
if (state->Error == NULL) {
return -1;
}
- result = PyModule_AddObject(m, "Error", state->Error);
+ Py_INCREF(state->Error);
+ result = PyModule_AddObject(module, "Error", state->Error);
if (result == -1) {
+ Py_DECREF(state->Error);
return -1;
}
@@ -1636,8 +1628,10 @@ binascii_exec(PyObject *m) {
if (state->Incomplete == NULL) {
return -1;
}
- result = PyModule_AddObject(m, "Incomplete", state->Incomplete);
+ Py_INCREF(state->Incomplete);
+ result = PyModule_AddObject(module, "Incomplete", state->Incomplete);
if (result == -1) {
+ Py_DECREF(state->Incomplete);
return -1;
}
@@ -1649,6 +1643,30 @@ static PyModuleDef_Slot binascii_slots[] = {
{0, NULL}
};
+static int
+binascii_traverse(PyObject *module, visitproc visit, void *arg)
+{
+ binascii_state *state = get_binascii_state(module);
+ Py_VISIT(state->Error);
+ Py_VISIT(state->Incomplete);
+ return 0;
+}
+
+static int
+binascii_clear(PyObject *module)
+{
+ binascii_state *state = get_binascii_state(module);
+ Py_CLEAR(state->Error);
+ Py_CLEAR(state->Incomplete);
+ return 0;
+}
+
+static void
+binascii_free(void *module)
+{
+ binascii_clear((PyObject *)module);
+}
+
static struct PyModuleDef binasciimodule = {
PyModuleDef_HEAD_INIT,
"binascii",
@@ -1656,9 +1674,9 @@ static struct PyModuleDef binasciimodule = {
sizeof(binascii_state),
binascii_module_methods,
binascii_slots,
- NULL,
- NULL,
- NULL
+ binascii_traverse,
+ binascii_clear,
+ binascii_free
};
PyMODINIT_FUNC
diff --git a/Modules/cjkcodecs/README b/Modules/cjkcodecs/README
index b2370bc2..165ae7ad 100644
--- a/Modules/cjkcodecs/README
+++ b/Modules/cjkcodecs/README
@@ -1,12 +1,10 @@
To generate or modify mapping headers
-------------------------------------
-Mapping headers are imported from CJKCodecs as pre-generated form.
-If you need to tweak or add something on it, please look at tools/
-subdirectory of CJKCodecs' distribution.
+Mapping headers are generated from Tools/unicode/genmap_*.py
-Notes on implmentation characteristics of each codecs
+Notes on implementation characteristics of each codecs
-----------------------------------------------------
1) Big5 codec
diff --git a/Modules/cjkcodecs/cjkcodecs.h b/Modules/cjkcodecs/cjkcodecs.h
index b67f3482..e41755b1 100644
--- a/Modules/cjkcodecs/cjkcodecs.h
+++ b/Modules/cjkcodecs/cjkcodecs.h
@@ -72,7 +72,7 @@ static const struct dbcs_map *mapping_list;
#define ENCODER(encoding) \
static Py_ssize_t encoding##_encode( \
MultibyteCodec_State *state, const void *config, \
- int kind, void *data, \
+ int kind, const void *data, \
Py_ssize_t *inpos, Py_ssize_t inlen, \
unsigned char **outbuf, Py_ssize_t outleft, int flags)
#define ENCODER_RESET(encoding) \
@@ -291,7 +291,7 @@ getcodec(PyObject *self, PyObject *encoding)
if (codecobj == NULL)
return NULL;
- r = PyObject_CallFunctionObjArgs(cofunc, codecobj, NULL);
+ r = PyObject_CallOneArg(cofunc, codecobj);
Py_DECREF(codecobj);
return r;
diff --git a/Modules/cjkcodecs/mappings_cn.h b/Modules/cjkcodecs/mappings_cn.h
index 1f8c299d..87ca0de7 100644
--- a/Modules/cjkcodecs/mappings_cn.h
+++ b/Modules/cjkcodecs/mappings_cn.h
@@ -1,3 +1,4 @@
+// AUTO-GENERATED FILE FROM genmap_schinese.py: DO NOT EDIT
static const ucs2_t __gb2312_decmap[7482] = {
12288,12289,12290,12539,713,711,168,12291,12293,8213,65374,8214,8230,8216,
8217,8220,8221,12308,12309,12296,12297,12298,12299,12300,12301,12302,12303,
diff --git a/Modules/cjkcodecs/mappings_jisx0213_pair.h b/Modules/cjkcodecs/mappings_jisx0213_pair.h
index 729e4bcb..c96f2014 100644
--- a/Modules/cjkcodecs/mappings_jisx0213_pair.h
+++ b/Modules/cjkcodecs/mappings_jisx0213_pair.h
@@ -1,3 +1,4 @@
+// AUTO-GENERATED FILE FROM genmap_japanese.py: DO NOT EDIT
#define JISX0213_ENCPAIRS 46
#ifdef EXTERN_JISX0213_PAIR
static const struct widedbcs_index *jisx0213_pair_decmap;
diff --git a/Modules/cjkcodecs/mappings_jp.h b/Modules/cjkcodecs/mappings_jp.h
index c6dae3da..409aeae2 100644
--- a/Modules/cjkcodecs/mappings_jp.h
+++ b/Modules/cjkcodecs/mappings_jp.h
@@ -1,3 +1,4 @@
+// AUTO-GENERATED FILE FROM genmap_japanese.py: DO NOT EDIT
static const ucs2_t __jisx0208_decmap[6956] = {
12288,12289,12290,65292,65294,12539,65306,65307,65311,65281,12443,12444,180,
65344,168,65342,65507,65343,12541,12542,12445,12446,12291,20189,12293,12294,
diff --git a/Modules/cjkcodecs/mappings_kr.h b/Modules/cjkcodecs/mappings_kr.h
index 7e6fdd27..bb59accc 100644
--- a/Modules/cjkcodecs/mappings_kr.h
+++ b/Modules/cjkcodecs/mappings_kr.h
@@ -1,3 +1,4 @@
+// AUTO-GENERATED FILE FROM genmap_korean.py: DO NOT EDIT
static const ucs2_t __ksx1001_decmap[8264] = {
12288,12289,12290,183,8229,8230,168,12291,173,8213,8741,65340,8764,8216,8217,
8220,8221,12308,12309,12296,12297,12298,12299,12300,12301,12302,12303,12304,
@@ -3249,3 +3250,4 @@ __cp949_encmap+31959,0,255},{__cp949_encmap+32215,0,255},{__cp949_encmap+32471
__cp949_encmap+32891,0,11},{0,0,0},{0,0,0},{0,0,0},{0,0,0},{__cp949_encmap+
32903,1,230},
};
+
diff --git a/Modules/cjkcodecs/multibytecodec.c b/Modules/cjkcodecs/multibytecodec.c
index 4a751f7c..86402768 100644
--- a/Modules/cjkcodecs/multibytecodec.c
+++ b/Modules/cjkcodecs/multibytecodec.c
@@ -6,7 +6,7 @@
#define PY_SSIZE_T_CLEAN
#include "Python.h"
-#include "structmember.h"
+#include "structmember.h" // PyMemberDef
#include "multibytecodec.h"
#include "clinic/multibytecodec.c.h"
@@ -81,7 +81,7 @@ internal_error_callback(const char *errors)
static PyObject *
call_error_callback(PyObject *errors, PyObject *exc)
{
- PyObject *args, *cb, *r;
+ PyObject *cb, *r;
const char *str;
assert(PyUnicode_Check(errors));
@@ -92,17 +92,7 @@ call_error_callback(PyObject *errors, PyObject *exc)
if (cb == NULL)
return NULL;
- args = PyTuple_New(1);
- if (args == NULL) {
- Py_DECREF(cb);
- return NULL;
- }
-
- PyTuple_SET_ITEM(args, 0, exc);
- Py_INCREF(exc);
-
- r = PyObject_CallObject(cb, args);
- Py_DECREF(args);
+ r = PyObject_CallOneArg(cb, exc);
Py_DECREF(cb);
return r;
}
@@ -238,7 +228,7 @@ multibytecodec_encerror(MultibyteCodec *codec,
Py_ssize_t r;
Py_ssize_t inpos;
int kind;
- void *data;
+ const void *data;
replchar = PyUnicode_FromOrdinal('?');
if (replchar == NULL)
@@ -467,7 +457,7 @@ multibytecodec_encode(MultibyteCodec *codec,
Py_ssize_t finalsize, r = 0;
Py_ssize_t datalen;
int kind;
- void *data;
+ const void *data;
if (PyUnicode_READY(text) < 0)
return NULL;
@@ -1256,7 +1246,7 @@ _multibytecodec_MultibyteIncrementalDecoder_setstate_impl(MultibyteIncrementalDe
PyObject *buffer;
PyLongObject *statelong;
Py_ssize_t buffersize;
- char *bufferstr;
+ const char *bufferstr;
unsigned char statebytes[8];
if (!PyArg_ParseTuple(state, "SO!;setstate(): illegal state argument",
@@ -1460,7 +1450,7 @@ mbstreamreader_iread(MultibyteStreamReaderObject *self,
PyErr_Format(PyExc_TypeError,
"stream function returned a "
"non-bytes object (%.100s)",
- cres->ob_type->tp_name);
+ Py_TYPE(cres)->tp_name);
goto errorexit;
}
@@ -1786,7 +1776,7 @@ mbstreamwriter_iwrite(MultibyteStreamWriterObject *self,
if (str == NULL)
return -1;
- wr = _PyObject_CallMethodIdObjArgs(self->stream, &PyId_write, str, NULL);
+ wr = _PyObject_CallMethodIdOneArg(self->stream, &PyId_write, str);
Py_DECREF(str);
if (wr == NULL)
return -1;
@@ -1880,7 +1870,7 @@ _multibytecodec_MultibyteStreamWriter_reset_impl(MultibyteStreamWriterObject *se
if (PyBytes_Size(pwrt) > 0) {
PyObject *wr;
- wr = _PyObject_CallMethodIdObjArgs(self->stream, &PyId_write, pwrt);
+ wr = _PyObject_CallMethodIdOneArg(self->stream, &PyId_write, pwrt);
if (wr == NULL) {
Py_DECREF(pwrt);
return NULL;
@@ -2069,14 +2059,12 @@ static struct PyModuleDef _multibytecodecmodule = {
PyMODINIT_FUNC
PyInit__multibytecodec(void)
{
- int i;
PyObject *m;
PyTypeObject *typelist[] = {
&MultibyteIncrementalEncoder_Type,
&MultibyteIncrementalDecoder_Type,
&MultibyteStreamReader_Type,
- &MultibyteStreamWriter_Type,
- NULL
+ &MultibyteStreamWriter_Type
};
if (PyType_Ready(&MultibyteCodec_Type) < 0)
@@ -2086,16 +2074,13 @@ PyInit__multibytecodec(void)
if (m == NULL)
return NULL;
- for (i = 0; typelist[i] != NULL; i++) {
- if (PyType_Ready(typelist[i]) < 0)
+ for (size_t i = 0; i < Py_ARRAY_LENGTH(typelist); i++) {
+ if (PyModule_AddType(m, typelist[i]) < 0) {
return NULL;
- Py_INCREF(typelist[i]);
- PyModule_AddObject(m, typelist[i]->tp_name,
- (PyObject *)typelist[i]);
+ }
}
if (PyErr_Occurred()) {
- Py_FatalError("can't initialize the _multibytecodec module");
Py_DECREF(m);
m = NULL;
}
diff --git a/Modules/cjkcodecs/multibytecodec.h b/Modules/cjkcodecs/multibytecodec.h
index 6d34534e..59468210 100644
--- a/Modules/cjkcodecs/multibytecodec.h
+++ b/Modules/cjkcodecs/multibytecodec.h
@@ -30,7 +30,7 @@ typedef struct {
typedef int (*mbcodec_init)(const void *config);
typedef Py_ssize_t (*mbencode_func)(MultibyteCodec_State *state,
const void *config,
- int kind, void *data,
+ int kind, const void *data,
Py_ssize_t *inpos, Py_ssize_t inlen,
unsigned char **outbuf, Py_ssize_t outleft,
int flags);
@@ -65,7 +65,7 @@ typedef struct {
MultibyteCodec *codec;
} MultibyteCodecObject;
-#define MultibyteCodec_Check(op) ((op)->ob_type == &MultibyteCodec_Type)
+#define MultibyteCodec_Check(op) Py_IS_TYPE((op), &MultibyteCodec_Type)
#define _MultibyteStatefulCodec_HEAD \
PyObject_HEAD \
diff --git a/Modules/clinic/_asynciomodule.c.h b/Modules/clinic/_asynciomodule.c.h
index 17eb7733..a071efc1 100644
--- a/Modules/clinic/_asynciomodule.c.h
+++ b/Modules/clinic/_asynciomodule.c.h
@@ -174,7 +174,7 @@ PyDoc_STRVAR(_asyncio_Future_remove_done_callback__doc__,
{"remove_done_callback", (PyCFunction)_asyncio_Future_remove_done_callback, METH_O, _asyncio_Future_remove_done_callback__doc__},
PyDoc_STRVAR(_asyncio_Future_cancel__doc__,
-"cancel($self, /)\n"
+"cancel($self, /, msg=None)\n"
"--\n"
"\n"
"Cancel the future and schedule callbacks.\n"
@@ -184,15 +184,34 @@ PyDoc_STRVAR(_asyncio_Future_cancel__doc__,
"return True.");
#define _ASYNCIO_FUTURE_CANCEL_METHODDEF \
- {"cancel", (PyCFunction)_asyncio_Future_cancel, METH_NOARGS, _asyncio_Future_cancel__doc__},
+ {"cancel", (PyCFunction)(void(*)(void))_asyncio_Future_cancel, METH_FASTCALL|METH_KEYWORDS, _asyncio_Future_cancel__doc__},
static PyObject *
-_asyncio_Future_cancel_impl(FutureObj *self);
+_asyncio_Future_cancel_impl(FutureObj *self, PyObject *msg);
static PyObject *
-_asyncio_Future_cancel(FutureObj *self, PyObject *Py_UNUSED(ignored))
+_asyncio_Future_cancel(FutureObj *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
{
- return _asyncio_Future_cancel_impl(self);
+ PyObject *return_value = NULL;
+ static const char * const _keywords[] = {"msg", NULL};
+ static _PyArg_Parser _parser = {NULL, _keywords, "cancel", 0};
+ PyObject *argsbuf[1];
+ Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
+ PyObject *msg = Py_None;
+
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 1, 0, argsbuf);
+ if (!args) {
+ goto exit;
+ }
+ if (!noptargs) {
+ goto skip_optional_pos;
+ }
+ msg = args[0];
+skip_optional_pos:
+ return_value = _asyncio_Future_cancel_impl(self, msg);
+
+exit:
+ return return_value;
}
PyDoc_STRVAR(_asyncio_Future_cancelled__doc__,
@@ -252,6 +271,27 @@ _asyncio_Future_get_loop(FutureObj *self, PyObject *Py_UNUSED(ignored))
return _asyncio_Future_get_loop_impl(self);
}
+PyDoc_STRVAR(_asyncio_Future__make_cancelled_error__doc__,
+"_make_cancelled_error($self, /)\n"
+"--\n"
+"\n"
+"Create the CancelledError to raise if the Future is cancelled.\n"
+"\n"
+"This should only be called once when handling a cancellation since\n"
+"it erases the context exception value.");
+
+#define _ASYNCIO_FUTURE__MAKE_CANCELLED_ERROR_METHODDEF \
+ {"_make_cancelled_error", (PyCFunction)_asyncio_Future__make_cancelled_error, METH_NOARGS, _asyncio_Future__make_cancelled_error__doc__},
+
+static PyObject *
+_asyncio_Future__make_cancelled_error_impl(FutureObj *self);
+
+static PyObject *
+_asyncio_Future__make_cancelled_error(FutureObj *self, PyObject *Py_UNUSED(ignored))
+{
+ return _asyncio_Future__make_cancelled_error_impl(self);
+}
+
PyDoc_STRVAR(_asyncio_Future__repr_info__doc__,
"_repr_info($self, /)\n"
"--\n"
@@ -315,84 +355,25 @@ exit:
return return_value;
}
-PyDoc_STRVAR(_asyncio_Task_current_task__doc__,
-"current_task($type, /, loop=None)\n"
+PyDoc_STRVAR(_asyncio_Task__make_cancelled_error__doc__,
+"_make_cancelled_error($self, /)\n"
"--\n"
"\n"
-"Return the currently running task in an event loop or None.\n"
+"Create the CancelledError to raise if the Task is cancelled.\n"
"\n"
-"By default the current task for the current event loop is returned.\n"
-"\n"
-"None is returned when called not in the context of a Task.");
+"This should only be called once when handling a cancellation since\n"
+"it erases the context exception value.");
-#define _ASYNCIO_TASK_CURRENT_TASK_METHODDEF \
- {"current_task", (PyCFunction)(void(*)(void))_asyncio_Task_current_task, METH_FASTCALL|METH_KEYWORDS|METH_CLASS, _asyncio_Task_current_task__doc__},
+#define _ASYNCIO_TASK__MAKE_CANCELLED_ERROR_METHODDEF \
+ {"_make_cancelled_error", (PyCFunction)_asyncio_Task__make_cancelled_error, METH_NOARGS, _asyncio_Task__make_cancelled_error__doc__},
static PyObject *
-_asyncio_Task_current_task_impl(PyTypeObject *type, PyObject *loop);
+_asyncio_Task__make_cancelled_error_impl(TaskObj *self);
static PyObject *
-_asyncio_Task_current_task(PyTypeObject *type, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+_asyncio_Task__make_cancelled_error(TaskObj *self, PyObject *Py_UNUSED(ignored))
{
- PyObject *return_value = NULL;
- static const char * const _keywords[] = {"loop", NULL};
- static _PyArg_Parser _parser = {NULL, _keywords, "current_task", 0};
- PyObject *argsbuf[1];
- Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
- PyObject *loop = Py_None;
-
- args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 1, 0, argsbuf);
- if (!args) {
- goto exit;
- }
- if (!noptargs) {
- goto skip_optional_pos;
- }
- loop = args[0];
-skip_optional_pos:
- return_value = _asyncio_Task_current_task_impl(type, loop);
-
-exit:
- return return_value;
-}
-
-PyDoc_STRVAR(_asyncio_Task_all_tasks__doc__,
-"all_tasks($type, /, loop=None)\n"
-"--\n"
-"\n"
-"Return a set of all tasks for an event loop.\n"
-"\n"
-"By default all tasks for the current event loop are returned.");
-
-#define _ASYNCIO_TASK_ALL_TASKS_METHODDEF \
- {"all_tasks", (PyCFunction)(void(*)(void))_asyncio_Task_all_tasks, METH_FASTCALL|METH_KEYWORDS|METH_CLASS, _asyncio_Task_all_tasks__doc__},
-
-static PyObject *
-_asyncio_Task_all_tasks_impl(PyTypeObject *type, PyObject *loop);
-
-static PyObject *
-_asyncio_Task_all_tasks(PyTypeObject *type, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
-{
- PyObject *return_value = NULL;
- static const char * const _keywords[] = {"loop", NULL};
- static _PyArg_Parser _parser = {NULL, _keywords, "all_tasks", 0};
- PyObject *argsbuf[1];
- Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
- PyObject *loop = Py_None;
-
- args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 1, 0, argsbuf);
- if (!args) {
- goto exit;
- }
- if (!noptargs) {
- goto skip_optional_pos;
- }
- loop = args[0];
-skip_optional_pos:
- return_value = _asyncio_Task_all_tasks_impl(type, loop);
-
-exit:
- return return_value;
+ return _asyncio_Task__make_cancelled_error_impl(self);
}
PyDoc_STRVAR(_asyncio_Task__repr_info__doc__,
@@ -413,7 +394,7 @@ _asyncio_Task__repr_info(TaskObj *self, PyObject *Py_UNUSED(ignored))
}
PyDoc_STRVAR(_asyncio_Task_cancel__doc__,
-"cancel($self, /)\n"
+"cancel($self, /, msg=None)\n"
"--\n"
"\n"
"Request that this task cancel itself.\n"
@@ -436,15 +417,34 @@ PyDoc_STRVAR(_asyncio_Task_cancel__doc__,
"was not called).");
#define _ASYNCIO_TASK_CANCEL_METHODDEF \
- {"cancel", (PyCFunction)_asyncio_Task_cancel, METH_NOARGS, _asyncio_Task_cancel__doc__},
+ {"cancel", (PyCFunction)(void(*)(void))_asyncio_Task_cancel, METH_FASTCALL|METH_KEYWORDS, _asyncio_Task_cancel__doc__},
static PyObject *
-_asyncio_Task_cancel_impl(TaskObj *self);
+_asyncio_Task_cancel_impl(TaskObj *self, PyObject *msg);
static PyObject *
-_asyncio_Task_cancel(TaskObj *self, PyObject *Py_UNUSED(ignored))
+_asyncio_Task_cancel(TaskObj *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
{
- return _asyncio_Task_cancel_impl(self);
+ PyObject *return_value = NULL;
+ static const char * const _keywords[] = {"msg", NULL};
+ static _PyArg_Parser _parser = {NULL, _keywords, "cancel", 0};
+ PyObject *argsbuf[1];
+ Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
+ PyObject *msg = Py_None;
+
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 1, 0, argsbuf);
+ if (!args) {
+ goto exit;
+ }
+ if (!noptargs) {
+ goto skip_optional_pos;
+ }
+ msg = args[0];
+skip_optional_pos:
+ return_value = _asyncio_Task_cancel_impl(self, msg);
+
+exit:
+ return return_value;
}
PyDoc_STRVAR(_asyncio_Task_get_stack__doc__,
@@ -832,4 +832,4 @@ _asyncio__leave_task(PyObject *module, PyObject *const *args, Py_ssize_t nargs,
exit:
return return_value;
}
-/*[clinic end generated code: output=585ba1f8de5b4103 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=d0fc522bcbff9d61 input=a9049054013a1b77]*/
diff --git a/Modules/clinic/_bisectmodule.c.h b/Modules/clinic/_bisectmodule.c.h
new file mode 100644
index 00000000..80ab7048
--- /dev/null
+++ b/Modules/clinic/_bisectmodule.c.h
@@ -0,0 +1,306 @@
+/*[clinic input]
+preserve
+[clinic start generated code]*/
+
+PyDoc_STRVAR(_bisect_bisect_right__doc__,
+"bisect_right($module, /, a, x, lo=0, hi=None)\n"
+"--\n"
+"\n"
+"Return the index where to insert item x in list a, assuming a is sorted.\n"
+"\n"
+"The return value i is such that all e in a[:i] have e <= x, and all e in\n"
+"a[i:] have e > x. So if x already appears in the list, i points just\n"
+"beyond the rightmost x already there\n"
+"\n"
+"Optional args lo (default 0) and hi (default len(a)) bound the\n"
+"slice of a to be searched.");
+
+#define _BISECT_BISECT_RIGHT_METHODDEF \
+ {"bisect_right", (PyCFunction)(void(*)(void))_bisect_bisect_right, METH_FASTCALL|METH_KEYWORDS, _bisect_bisect_right__doc__},
+
+static Py_ssize_t
+_bisect_bisect_right_impl(PyObject *module, PyObject *a, PyObject *x,
+ Py_ssize_t lo, Py_ssize_t hi);
+
+static PyObject *
+_bisect_bisect_right(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+{
+ PyObject *return_value = NULL;
+ static const char * const _keywords[] = {"a", "x", "lo", "hi", NULL};
+ static _PyArg_Parser _parser = {NULL, _keywords, "bisect_right", 0};
+ PyObject *argsbuf[4];
+ Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 2;
+ PyObject *a;
+ PyObject *x;
+ Py_ssize_t lo = 0;
+ Py_ssize_t hi = -1;
+ Py_ssize_t _return_value;
+
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 2, 4, 0, argsbuf);
+ if (!args) {
+ goto exit;
+ }
+ a = args[0];
+ x = args[1];
+ if (!noptargs) {
+ goto skip_optional_pos;
+ }
+ if (args[2]) {
+ if (PyFloat_Check(args[2])) {
+ PyErr_SetString(PyExc_TypeError,
+ "integer argument expected, got float" );
+ goto exit;
+ }
+ {
+ Py_ssize_t ival = -1;
+ PyObject *iobj = PyNumber_Index(args[2]);
+ if (iobj != NULL) {
+ ival = PyLong_AsSsize_t(iobj);
+ Py_DECREF(iobj);
+ }
+ if (ival == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ lo = ival;
+ }
+ if (!--noptargs) {
+ goto skip_optional_pos;
+ }
+ }
+ if (!_Py_convert_optional_to_ssize_t(args[3], &hi)) {
+ goto exit;
+ }
+skip_optional_pos:
+ _return_value = _bisect_bisect_right_impl(module, a, x, lo, hi);
+ if ((_return_value == -1) && PyErr_Occurred()) {
+ goto exit;
+ }
+ return_value = PyLong_FromSsize_t(_return_value);
+
+exit:
+ return return_value;
+}
+
+PyDoc_STRVAR(_bisect_insort_right__doc__,
+"insort_right($module, /, a, x, lo=0, hi=None)\n"
+"--\n"
+"\n"
+"Insert item x in list a, and keep it sorted assuming a is sorted.\n"
+"\n"
+"If x is already in a, insert it to the right of the rightmost x.\n"
+"\n"
+"Optional args lo (default 0) and hi (default len(a)) bound the\n"
+"slice of a to be searched.");
+
+#define _BISECT_INSORT_RIGHT_METHODDEF \
+ {"insort_right", (PyCFunction)(void(*)(void))_bisect_insort_right, METH_FASTCALL|METH_KEYWORDS, _bisect_insort_right__doc__},
+
+static PyObject *
+_bisect_insort_right_impl(PyObject *module, PyObject *a, PyObject *x,
+ Py_ssize_t lo, Py_ssize_t hi);
+
+static PyObject *
+_bisect_insort_right(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+{
+ PyObject *return_value = NULL;
+ static const char * const _keywords[] = {"a", "x", "lo", "hi", NULL};
+ static _PyArg_Parser _parser = {NULL, _keywords, "insort_right", 0};
+ PyObject *argsbuf[4];
+ Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 2;
+ PyObject *a;
+ PyObject *x;
+ Py_ssize_t lo = 0;
+ Py_ssize_t hi = -1;
+
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 2, 4, 0, argsbuf);
+ if (!args) {
+ goto exit;
+ }
+ a = args[0];
+ x = args[1];
+ if (!noptargs) {
+ goto skip_optional_pos;
+ }
+ if (args[2]) {
+ if (PyFloat_Check(args[2])) {
+ PyErr_SetString(PyExc_TypeError,
+ "integer argument expected, got float" );
+ goto exit;
+ }
+ {
+ Py_ssize_t ival = -1;
+ PyObject *iobj = PyNumber_Index(args[2]);
+ if (iobj != NULL) {
+ ival = PyLong_AsSsize_t(iobj);
+ Py_DECREF(iobj);
+ }
+ if (ival == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ lo = ival;
+ }
+ if (!--noptargs) {
+ goto skip_optional_pos;
+ }
+ }
+ if (!_Py_convert_optional_to_ssize_t(args[3], &hi)) {
+ goto exit;
+ }
+skip_optional_pos:
+ return_value = _bisect_insort_right_impl(module, a, x, lo, hi);
+
+exit:
+ return return_value;
+}
+
+PyDoc_STRVAR(_bisect_bisect_left__doc__,
+"bisect_left($module, /, a, x, lo=0, hi=None)\n"
+"--\n"
+"\n"
+"Return the index where to insert item x in list a, assuming a is sorted.\n"
+"\n"
+"The return value i is such that all e in a[:i] have e < x, and all e in\n"
+"a[i:] have e >= x. So if x already appears in the list, i points just\n"
+"before the leftmost x already there.\n"
+"\n"
+"Optional args lo (default 0) and hi (default len(a)) bound the\n"
+"slice of a to be searched.");
+
+#define _BISECT_BISECT_LEFT_METHODDEF \
+ {"bisect_left", (PyCFunction)(void(*)(void))_bisect_bisect_left, METH_FASTCALL|METH_KEYWORDS, _bisect_bisect_left__doc__},
+
+static Py_ssize_t
+_bisect_bisect_left_impl(PyObject *module, PyObject *a, PyObject *x,
+ Py_ssize_t lo, Py_ssize_t hi);
+
+static PyObject *
+_bisect_bisect_left(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+{
+ PyObject *return_value = NULL;
+ static const char * const _keywords[] = {"a", "x", "lo", "hi", NULL};
+ static _PyArg_Parser _parser = {NULL, _keywords, "bisect_left", 0};
+ PyObject *argsbuf[4];
+ Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 2;
+ PyObject *a;
+ PyObject *x;
+ Py_ssize_t lo = 0;
+ Py_ssize_t hi = -1;
+ Py_ssize_t _return_value;
+
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 2, 4, 0, argsbuf);
+ if (!args) {
+ goto exit;
+ }
+ a = args[0];
+ x = args[1];
+ if (!noptargs) {
+ goto skip_optional_pos;
+ }
+ if (args[2]) {
+ if (PyFloat_Check(args[2])) {
+ PyErr_SetString(PyExc_TypeError,
+ "integer argument expected, got float" );
+ goto exit;
+ }
+ {
+ Py_ssize_t ival = -1;
+ PyObject *iobj = PyNumber_Index(args[2]);
+ if (iobj != NULL) {
+ ival = PyLong_AsSsize_t(iobj);
+ Py_DECREF(iobj);
+ }
+ if (ival == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ lo = ival;
+ }
+ if (!--noptargs) {
+ goto skip_optional_pos;
+ }
+ }
+ if (!_Py_convert_optional_to_ssize_t(args[3], &hi)) {
+ goto exit;
+ }
+skip_optional_pos:
+ _return_value = _bisect_bisect_left_impl(module, a, x, lo, hi);
+ if ((_return_value == -1) && PyErr_Occurred()) {
+ goto exit;
+ }
+ return_value = PyLong_FromSsize_t(_return_value);
+
+exit:
+ return return_value;
+}
+
+PyDoc_STRVAR(_bisect_insort_left__doc__,
+"insort_left($module, /, a, x, lo=0, hi=None)\n"
+"--\n"
+"\n"
+"Insert item x in list a, and keep it sorted assuming a is sorted.\n"
+"\n"
+"If x is already in a, insert it to the left of the leftmost x.\n"
+"\n"
+"Optional args lo (default 0) and hi (default len(a)) bound the\n"
+"slice of a to be searched.");
+
+#define _BISECT_INSORT_LEFT_METHODDEF \
+ {"insort_left", (PyCFunction)(void(*)(void))_bisect_insort_left, METH_FASTCALL|METH_KEYWORDS, _bisect_insort_left__doc__},
+
+static PyObject *
+_bisect_insort_left_impl(PyObject *module, PyObject *a, PyObject *x,
+ Py_ssize_t lo, Py_ssize_t hi);
+
+static PyObject *
+_bisect_insort_left(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+{
+ PyObject *return_value = NULL;
+ static const char * const _keywords[] = {"a", "x", "lo", "hi", NULL};
+ static _PyArg_Parser _parser = {NULL, _keywords, "insort_left", 0};
+ PyObject *argsbuf[4];
+ Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 2;
+ PyObject *a;
+ PyObject *x;
+ Py_ssize_t lo = 0;
+ Py_ssize_t hi = -1;
+
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 2, 4, 0, argsbuf);
+ if (!args) {
+ goto exit;
+ }
+ a = args[0];
+ x = args[1];
+ if (!noptargs) {
+ goto skip_optional_pos;
+ }
+ if (args[2]) {
+ if (PyFloat_Check(args[2])) {
+ PyErr_SetString(PyExc_TypeError,
+ "integer argument expected, got float" );
+ goto exit;
+ }
+ {
+ Py_ssize_t ival = -1;
+ PyObject *iobj = PyNumber_Index(args[2]);
+ if (iobj != NULL) {
+ ival = PyLong_AsSsize_t(iobj);
+ Py_DECREF(iobj);
+ }
+ if (ival == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ lo = ival;
+ }
+ if (!--noptargs) {
+ goto skip_optional_pos;
+ }
+ }
+ if (!_Py_convert_optional_to_ssize_t(args[3], &hi)) {
+ goto exit;
+ }
+skip_optional_pos:
+ return_value = _bisect_insort_left_impl(module, a, x, lo, hi);
+
+exit:
+ return return_value;
+}
+/*[clinic end generated code: output=bcbd6c77331a08f0 input=a9049054013a1b77]*/
diff --git a/Modules/clinic/_bz2module.c.h b/Modules/clinic/_bz2module.c.h
index ac826bd9..0eb6280d 100644
--- a/Modules/clinic/_bz2module.c.h
+++ b/Modules/clinic/_bz2module.c.h
@@ -85,7 +85,7 @@ _bz2_BZ2Compressor___init__(PyObject *self, PyObject *args, PyObject *kwargs)
int return_value = -1;
int compresslevel = 9;
- if ((Py_TYPE(self) == &BZ2Compressor_Type) &&
+ if (Py_IS_TYPE(self, &BZ2Compressor_Type) &&
!_PyArg_NoKeywords("BZ2Compressor", kwargs)) {
goto exit;
}
@@ -207,11 +207,11 @@ _bz2_BZ2Decompressor___init__(PyObject *self, PyObject *args, PyObject *kwargs)
{
int return_value = -1;
- if ((Py_TYPE(self) == &BZ2Decompressor_Type) &&
+ if (Py_IS_TYPE(self, &BZ2Decompressor_Type) &&
!_PyArg_NoPositional("BZ2Decompressor", args)) {
goto exit;
}
- if ((Py_TYPE(self) == &BZ2Decompressor_Type) &&
+ if (Py_IS_TYPE(self, &BZ2Decompressor_Type) &&
!_PyArg_NoKeywords("BZ2Decompressor", kwargs)) {
goto exit;
}
@@ -220,4 +220,4 @@ _bz2_BZ2Decompressor___init__(PyObject *self, PyObject *args, PyObject *kwargs)
exit:
return return_value;
}
-/*[clinic end generated code: output=ec3d1b3652c98823 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=3f3f1e788fe28ee1 input=a9049054013a1b77]*/
diff --git a/Modules/clinic/_cursesmodule.c.h b/Modules/clinic/_cursesmodule.c.h
index ad93e6a0..50d7f213 100644
--- a/Modules/clinic/_cursesmodule.c.h
+++ b/Modules/clinic/_cursesmodule.c.h
@@ -3040,6 +3040,144 @@ exit:
return return_value;
}
+#if (defined(NCURSES_EXT_FUNCS) && NCURSES_EXT_FUNCS >= 20081102)
+
+PyDoc_STRVAR(_curses_get_escdelay__doc__,
+"get_escdelay($module, /)\n"
+"--\n"
+"\n"
+"Gets the curses ESCDELAY setting.\n"
+"\n"
+"Gets the number of milliseconds to wait after reading an escape character,\n"
+"to distinguish between an individual escape character entered on the\n"
+"keyboard from escape sequences sent by cursor and function keys.");
+
+#define _CURSES_GET_ESCDELAY_METHODDEF \
+ {"get_escdelay", (PyCFunction)_curses_get_escdelay, METH_NOARGS, _curses_get_escdelay__doc__},
+
+static PyObject *
+_curses_get_escdelay_impl(PyObject *module);
+
+static PyObject *
+_curses_get_escdelay(PyObject *module, PyObject *Py_UNUSED(ignored))
+{
+ return _curses_get_escdelay_impl(module);
+}
+
+#endif /* (defined(NCURSES_EXT_FUNCS) && NCURSES_EXT_FUNCS >= 20081102) */
+
+#if (defined(NCURSES_EXT_FUNCS) && NCURSES_EXT_FUNCS >= 20081102)
+
+PyDoc_STRVAR(_curses_set_escdelay__doc__,
+"set_escdelay($module, ms, /)\n"
+"--\n"
+"\n"
+"Sets the curses ESCDELAY setting.\n"
+"\n"
+" ms\n"
+" length of the delay in milliseconds.\n"
+"\n"
+"Sets the number of milliseconds to wait after reading an escape character,\n"
+"to distinguish between an individual escape character entered on the\n"
+"keyboard from escape sequences sent by cursor and function keys.");
+
+#define _CURSES_SET_ESCDELAY_METHODDEF \
+ {"set_escdelay", (PyCFunction)_curses_set_escdelay, METH_O, _curses_set_escdelay__doc__},
+
+static PyObject *
+_curses_set_escdelay_impl(PyObject *module, int ms);
+
+static PyObject *
+_curses_set_escdelay(PyObject *module, PyObject *arg)
+{
+ PyObject *return_value = NULL;
+ int ms;
+
+ if (PyFloat_Check(arg)) {
+ PyErr_SetString(PyExc_TypeError,
+ "integer argument expected, got float" );
+ goto exit;
+ }
+ ms = _PyLong_AsInt(arg);
+ if (ms == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ return_value = _curses_set_escdelay_impl(module, ms);
+
+exit:
+ return return_value;
+}
+
+#endif /* (defined(NCURSES_EXT_FUNCS) && NCURSES_EXT_FUNCS >= 20081102) */
+
+#if (defined(NCURSES_EXT_FUNCS) && NCURSES_EXT_FUNCS >= 20081102)
+
+PyDoc_STRVAR(_curses_get_tabsize__doc__,
+"get_tabsize($module, /)\n"
+"--\n"
+"\n"
+"Gets the curses TABSIZE setting.\n"
+"\n"
+"Gets the number of columns used by the curses library when converting a tab\n"
+"character to spaces as it adds the tab to a window.");
+
+#define _CURSES_GET_TABSIZE_METHODDEF \
+ {"get_tabsize", (PyCFunction)_curses_get_tabsize, METH_NOARGS, _curses_get_tabsize__doc__},
+
+static PyObject *
+_curses_get_tabsize_impl(PyObject *module);
+
+static PyObject *
+_curses_get_tabsize(PyObject *module, PyObject *Py_UNUSED(ignored))
+{
+ return _curses_get_tabsize_impl(module);
+}
+
+#endif /* (defined(NCURSES_EXT_FUNCS) && NCURSES_EXT_FUNCS >= 20081102) */
+
+#if (defined(NCURSES_EXT_FUNCS) && NCURSES_EXT_FUNCS >= 20081102)
+
+PyDoc_STRVAR(_curses_set_tabsize__doc__,
+"set_tabsize($module, size, /)\n"
+"--\n"
+"\n"
+"Sets the curses TABSIZE setting.\n"
+"\n"
+" size\n"
+" rendered cell width of a tab character.\n"
+"\n"
+"Sets the number of columns used by the curses library when converting a tab\n"
+"character to spaces as it adds the tab to a window.");
+
+#define _CURSES_SET_TABSIZE_METHODDEF \
+ {"set_tabsize", (PyCFunction)_curses_set_tabsize, METH_O, _curses_set_tabsize__doc__},
+
+static PyObject *
+_curses_set_tabsize_impl(PyObject *module, int size);
+
+static PyObject *
+_curses_set_tabsize(PyObject *module, PyObject *arg)
+{
+ PyObject *return_value = NULL;
+ int size;
+
+ if (PyFloat_Check(arg)) {
+ PyErr_SetString(PyExc_TypeError,
+ "integer argument expected, got float" );
+ goto exit;
+ }
+ size = _PyLong_AsInt(arg);
+ if (size == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ return_value = _curses_set_tabsize_impl(module, size);
+
+exit:
+ return return_value;
+}
+
+#endif /* (defined(NCURSES_EXT_FUNCS) && NCURSES_EXT_FUNCS >= 20081102) */
+
PyDoc_STRVAR(_curses_intrflush__doc__,
"intrflush($module, flag, /)\n"
"--\n"
@@ -3799,23 +3937,13 @@ PyDoc_STRVAR(_curses_update_lines_cols__doc__,
#define _CURSES_UPDATE_LINES_COLS_METHODDEF \
{"update_lines_cols", (PyCFunction)_curses_update_lines_cols, METH_NOARGS, _curses_update_lines_cols__doc__},
-static int
+static PyObject *
_curses_update_lines_cols_impl(PyObject *module);
static PyObject *
_curses_update_lines_cols(PyObject *module, PyObject *Py_UNUSED(ignored))
{
- PyObject *return_value = NULL;
- int _return_value;
-
- _return_value = _curses_update_lines_cols_impl(module);
- if ((_return_value == -1) && PyErr_Occurred()) {
- goto exit;
- }
- return_value = PyLong_FromLong((long)_return_value);
-
-exit:
- return return_value;
+ return _curses_update_lines_cols_impl(module);
}
#endif /* (defined(HAVE_CURSES_RESIZETERM) || defined(HAVE_CURSES_RESIZE_TERM)) */
@@ -4526,6 +4654,22 @@ _curses_use_default_colors(PyObject *module, PyObject *Py_UNUSED(ignored))
#define _CURSES_HAS_KEY_METHODDEF
#endif /* !defined(_CURSES_HAS_KEY_METHODDEF) */
+#ifndef _CURSES_GET_ESCDELAY_METHODDEF
+ #define _CURSES_GET_ESCDELAY_METHODDEF
+#endif /* !defined(_CURSES_GET_ESCDELAY_METHODDEF) */
+
+#ifndef _CURSES_SET_ESCDELAY_METHODDEF
+ #define _CURSES_SET_ESCDELAY_METHODDEF
+#endif /* !defined(_CURSES_SET_ESCDELAY_METHODDEF) */
+
+#ifndef _CURSES_GET_TABSIZE_METHODDEF
+ #define _CURSES_GET_TABSIZE_METHODDEF
+#endif /* !defined(_CURSES_GET_TABSIZE_METHODDEF) */
+
+#ifndef _CURSES_SET_TABSIZE_METHODDEF
+ #define _CURSES_SET_TABSIZE_METHODDEF
+#endif /* !defined(_CURSES_SET_TABSIZE_METHODDEF) */
+
#ifndef _CURSES_IS_TERM_RESIZED_METHODDEF
#define _CURSES_IS_TERM_RESIZED_METHODDEF
#endif /* !defined(_CURSES_IS_TERM_RESIZED_METHODDEF) */
@@ -4569,4 +4713,4 @@ _curses_use_default_colors(PyObject *module, PyObject *Py_UNUSED(ignored))
#ifndef _CURSES_USE_DEFAULT_COLORS_METHODDEF
#define _CURSES_USE_DEFAULT_COLORS_METHODDEF
#endif /* !defined(_CURSES_USE_DEFAULT_COLORS_METHODDEF) */
-/*[clinic end generated code: output=e5b3502f1d38dff0 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=b53652f8acafd817 input=a9049054013a1b77]*/
diff --git a/Modules/clinic/_datetimemodule.c.h b/Modules/clinic/_datetimemodule.c.h
index 447036ca..973a4ea0 100644
--- a/Modules/clinic/_datetimemodule.c.h
+++ b/Modules/clinic/_datetimemodule.c.h
@@ -14,6 +14,60 @@ PyDoc_STRVAR(datetime_date_fromtimestamp__doc__,
#define DATETIME_DATE_FROMTIMESTAMP_METHODDEF \
{"fromtimestamp", (PyCFunction)datetime_date_fromtimestamp, METH_O|METH_CLASS, datetime_date_fromtimestamp__doc__},
+static PyObject *
+iso_calendar_date_new_impl(PyTypeObject *type, int year, int week,
+ int weekday);
+
+static PyObject *
+iso_calendar_date_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
+{
+ PyObject *return_value = NULL;
+ static const char * const _keywords[] = {"year", "week", "weekday", NULL};
+ static _PyArg_Parser _parser = {NULL, _keywords, "IsoCalendarDate", 0};
+ PyObject *argsbuf[3];
+ PyObject * const *fastargs;
+ Py_ssize_t nargs = PyTuple_GET_SIZE(args);
+ int year;
+ int week;
+ int weekday;
+
+ fastargs = _PyArg_UnpackKeywords(_PyTuple_CAST(args)->ob_item, nargs, kwargs, NULL, &_parser, 3, 3, 0, argsbuf);
+ if (!fastargs) {
+ goto exit;
+ }
+ if (PyFloat_Check(fastargs[0])) {
+ PyErr_SetString(PyExc_TypeError,
+ "integer argument expected, got float" );
+ goto exit;
+ }
+ year = _PyLong_AsInt(fastargs[0]);
+ if (year == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ if (PyFloat_Check(fastargs[1])) {
+ PyErr_SetString(PyExc_TypeError,
+ "integer argument expected, got float" );
+ goto exit;
+ }
+ week = _PyLong_AsInt(fastargs[1]);
+ if (week == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ if (PyFloat_Check(fastargs[2])) {
+ PyErr_SetString(PyExc_TypeError,
+ "integer argument expected, got float" );
+ goto exit;
+ }
+ weekday = _PyLong_AsInt(fastargs[2]);
+ if (weekday == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ return_value = iso_calendar_date_new_impl(type, year, week, weekday);
+
+exit:
+ return return_value;
+}
+
PyDoc_STRVAR(datetime_datetime_now__doc__,
"now($type, /, tz=None)\n"
"--\n"
@@ -55,4 +109,4 @@ skip_optional_pos:
exit:
return return_value;
}
-/*[clinic end generated code: output=aae916ab728ca85b input=a9049054013a1b77]*/
+/*[clinic end generated code: output=5e17549f29a439a5 input=a9049054013a1b77]*/
diff --git a/Modules/clinic/_elementtree.c.h b/Modules/clinic/_elementtree.c.h
index 0bc4bb5d..825416f4 100644
--- a/Modules/clinic/_elementtree.c.h
+++ b/Modules/clinic/_elementtree.c.h
@@ -355,23 +355,6 @@ exit:
return return_value;
}
-PyDoc_STRVAR(_elementtree_Element_getchildren__doc__,
-"getchildren($self, /)\n"
-"--\n"
-"\n");
-
-#define _ELEMENTTREE_ELEMENT_GETCHILDREN_METHODDEF \
- {"getchildren", (PyCFunction)_elementtree_Element_getchildren, METH_NOARGS, _elementtree_Element_getchildren__doc__},
-
-static PyObject *
-_elementtree_Element_getchildren_impl(ElementObject *self);
-
-static PyObject *
-_elementtree_Element_getchildren(ElementObject *self, PyObject *Py_UNUSED(ignored))
-{
- return _elementtree_Element_getchildren_impl(self);
-}
-
PyDoc_STRVAR(_elementtree_Element_iter__doc__,
"iter($self, /, tag=None)\n"
"--\n"
@@ -408,42 +391,6 @@ exit:
return return_value;
}
-PyDoc_STRVAR(_elementtree_Element_getiterator__doc__,
-"getiterator($self, /, tag=None)\n"
-"--\n"
-"\n");
-
-#define _ELEMENTTREE_ELEMENT_GETITERATOR_METHODDEF \
- {"getiterator", (PyCFunction)(void(*)(void))_elementtree_Element_getiterator, METH_FASTCALL|METH_KEYWORDS, _elementtree_Element_getiterator__doc__},
-
-static PyObject *
-_elementtree_Element_getiterator_impl(ElementObject *self, PyObject *tag);
-
-static PyObject *
-_elementtree_Element_getiterator(ElementObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
-{
- PyObject *return_value = NULL;
- static const char * const _keywords[] = {"tag", NULL};
- static _PyArg_Parser _parser = {NULL, _keywords, "getiterator", 0};
- PyObject *argsbuf[1];
- Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
- PyObject *tag = Py_None;
-
- args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 1, 0, argsbuf);
- if (!args) {
- goto exit;
- }
- if (!noptargs) {
- goto skip_optional_pos;
- }
- tag = args[0];
-skip_optional_pos:
- return_value = _elementtree_Element_getiterator_impl(self, tag);
-
-exit:
- return return_value;
-}
-
PyDoc_STRVAR(_elementtree_Element_itertext__doc__,
"itertext($self, /)\n"
"--\n"
@@ -568,6 +515,10 @@ _elementtree_Element_makeelement(ElementObject *self, PyObject *const *args, Py_
goto exit;
}
tag = args[0];
+ if (!PyDict_Check(args[1])) {
+ _PyArg_BadArgument("makeelement", "argument 2", "dict", args[1]);
+ goto exit;
+ }
attrib = args[1];
return_value = _elementtree_Element_makeelement_impl(self, tag, attrib);
@@ -814,7 +765,7 @@ _elementtree_TreeBuilder_close(TreeBuilderObject *self, PyObject *Py_UNUSED(igno
}
PyDoc_STRVAR(_elementtree_TreeBuilder_start__doc__,
-"start($self, tag, attrs=None, /)\n"
+"start($self, tag, attrs, /)\n"
"--\n"
"\n");
@@ -830,17 +781,17 @@ _elementtree_TreeBuilder_start(TreeBuilderObject *self, PyObject *const *args, P
{
PyObject *return_value = NULL;
PyObject *tag;
- PyObject *attrs = Py_None;
+ PyObject *attrs;
- if (!_PyArg_CheckPositional("start", nargs, 1, 2)) {
+ if (!_PyArg_CheckPositional("start", nargs, 2, 2)) {
goto exit;
}
tag = args[0];
- if (nargs < 2) {
- goto skip_optional;
+ if (!PyDict_Check(args[1])) {
+ _PyArg_BadArgument("start", "argument 2", "dict", args[1]);
+ goto exit;
}
attrs = args[1];
-skip_optional:
return_value = _elementtree_TreeBuilder_start_impl(self, tag, attrs);
exit:
@@ -969,4 +920,4 @@ skip_optional:
exit:
return return_value;
}
-/*[clinic end generated code: output=1443ed7bb9f9e03e input=a9049054013a1b77]*/
+/*[clinic end generated code: output=b7f6a32462fc42a9 input=a9049054013a1b77]*/
diff --git a/Modules/clinic/_hashopenssl.c.h b/Modules/clinic/_hashopenssl.c.h
index 9aaea47e..68aa765e 100644
--- a/Modules/clinic/_hashopenssl.c.h
+++ b/Modules/clinic/_hashopenssl.c.h
@@ -65,8 +65,112 @@ PyDoc_STRVAR(EVP_update__doc__,
#define EVP_UPDATE_METHODDEF \
{"update", (PyCFunction)EVP_update, METH_O, EVP_update__doc__},
+#if defined(PY_OPENSSL_HAS_SHAKE)
+
+PyDoc_STRVAR(EVPXOF_digest__doc__,
+"digest($self, /, length)\n"
+"--\n"
+"\n"
+"Return the digest value as a bytes object.");
+
+#define EVPXOF_DIGEST_METHODDEF \
+ {"digest", (PyCFunction)(void(*)(void))EVPXOF_digest, METH_FASTCALL|METH_KEYWORDS, EVPXOF_digest__doc__},
+
+static PyObject *
+EVPXOF_digest_impl(EVPobject *self, Py_ssize_t length);
+
+static PyObject *
+EVPXOF_digest(EVPobject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+{
+ PyObject *return_value = NULL;
+ static const char * const _keywords[] = {"length", NULL};
+ static _PyArg_Parser _parser = {NULL, _keywords, "digest", 0};
+ PyObject *argsbuf[1];
+ Py_ssize_t length;
+
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf);
+ if (!args) {
+ goto exit;
+ }
+ if (PyFloat_Check(args[0])) {
+ PyErr_SetString(PyExc_TypeError,
+ "integer argument expected, got float" );
+ goto exit;
+ }
+ {
+ Py_ssize_t ival = -1;
+ PyObject *iobj = PyNumber_Index(args[0]);
+ if (iobj != NULL) {
+ ival = PyLong_AsSsize_t(iobj);
+ Py_DECREF(iobj);
+ }
+ if (ival == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ length = ival;
+ }
+ return_value = EVPXOF_digest_impl(self, length);
+
+exit:
+ return return_value;
+}
+
+#endif /* defined(PY_OPENSSL_HAS_SHAKE) */
+
+#if defined(PY_OPENSSL_HAS_SHAKE)
+
+PyDoc_STRVAR(EVPXOF_hexdigest__doc__,
+"hexdigest($self, /, length)\n"
+"--\n"
+"\n"
+"Return the digest value as a string of hexadecimal digits.");
+
+#define EVPXOF_HEXDIGEST_METHODDEF \
+ {"hexdigest", (PyCFunction)(void(*)(void))EVPXOF_hexdigest, METH_FASTCALL|METH_KEYWORDS, EVPXOF_hexdigest__doc__},
+
+static PyObject *
+EVPXOF_hexdigest_impl(EVPobject *self, Py_ssize_t length);
+
+static PyObject *
+EVPXOF_hexdigest(EVPobject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+{
+ PyObject *return_value = NULL;
+ static const char * const _keywords[] = {"length", NULL};
+ static _PyArg_Parser _parser = {NULL, _keywords, "hexdigest", 0};
+ PyObject *argsbuf[1];
+ Py_ssize_t length;
+
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf);
+ if (!args) {
+ goto exit;
+ }
+ if (PyFloat_Check(args[0])) {
+ PyErr_SetString(PyExc_TypeError,
+ "integer argument expected, got float" );
+ goto exit;
+ }
+ {
+ Py_ssize_t ival = -1;
+ PyObject *iobj = PyNumber_Index(args[0]);
+ if (iobj != NULL) {
+ ival = PyLong_AsSsize_t(iobj);
+ Py_DECREF(iobj);
+ }
+ if (ival == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ length = ival;
+ }
+ return_value = EVPXOF_hexdigest_impl(self, length);
+
+exit:
+ return return_value;
+}
+
+#endif /* defined(PY_OPENSSL_HAS_SHAKE) */
+
PyDoc_STRVAR(EVP_new__doc__,
-"new($module, /, name, string=b\'\')\n"
+"new($module, /, name, string=b\'\', *, usedforsecurity=True)\n"
"--\n"
"\n"
"Return a new hash object using the named algorithm.\n"
@@ -80,18 +184,20 @@ PyDoc_STRVAR(EVP_new__doc__,
{"new", (PyCFunction)(void(*)(void))EVP_new, METH_FASTCALL|METH_KEYWORDS, EVP_new__doc__},
static PyObject *
-EVP_new_impl(PyObject *module, PyObject *name_obj, PyObject *data_obj);
+EVP_new_impl(PyObject *module, PyObject *name_obj, PyObject *data_obj,
+ int usedforsecurity);
static PyObject *
EVP_new(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
{
PyObject *return_value = NULL;
- static const char * const _keywords[] = {"name", "string", NULL};
+ static const char * const _keywords[] = {"name", "string", "usedforsecurity", NULL};
static _PyArg_Parser _parser = {NULL, _keywords, "new", 0};
- PyObject *argsbuf[2];
+ PyObject *argsbuf[3];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1;
PyObject *name_obj;
PyObject *data_obj = NULL;
+ int usedforsecurity = 1;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 2, 0, argsbuf);
if (!args) {
@@ -101,16 +207,29 @@ EVP_new(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwn
if (!noptargs) {
goto skip_optional_pos;
}
- data_obj = args[1];
+ if (args[1]) {
+ data_obj = args[1];
+ if (!--noptargs) {
+ goto skip_optional_pos;
+ }
+ }
skip_optional_pos:
- return_value = EVP_new_impl(module, name_obj, data_obj);
+ if (!noptargs) {
+ goto skip_optional_kwonly;
+ }
+ usedforsecurity = PyObject_IsTrue(args[2]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+skip_optional_kwonly:
+ return_value = EVP_new_impl(module, name_obj, data_obj, usedforsecurity);
exit:
return return_value;
}
PyDoc_STRVAR(_hashlib_openssl_md5__doc__,
-"openssl_md5($module, /, string=b\'\')\n"
+"openssl_md5($module, /, string=b\'\', *, usedforsecurity=True)\n"
"--\n"
"\n"
"Returns a md5 hash object; optionally initialized with a string");
@@ -119,54 +238,389 @@ PyDoc_STRVAR(_hashlib_openssl_md5__doc__,
{"openssl_md5", (PyCFunction)(void(*)(void))_hashlib_openssl_md5, METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_md5__doc__},
static PyObject *
-_hashlib_openssl_md5_impl(PyObject *module, PyObject *data_obj);
+_hashlib_openssl_md5_impl(PyObject *module, PyObject *data_obj,
+ int usedforsecurity);
static PyObject *
_hashlib_openssl_md5(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
{
PyObject *return_value = NULL;
- static const char * const _keywords[] = {"string", NULL};
+ static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
static _PyArg_Parser _parser = {NULL, _keywords, "openssl_md5", 0};
- PyObject *argsbuf[1];
+ PyObject *argsbuf[2];
+ Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
+ PyObject *data_obj = NULL;
+ int usedforsecurity = 1;
+
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 1, 0, argsbuf);
+ if (!args) {
+ goto exit;
+ }
+ if (!noptargs) {
+ goto skip_optional_pos;
+ }
+ if (args[0]) {
+ data_obj = args[0];
+ if (!--noptargs) {
+ goto skip_optional_pos;
+ }
+ }
+skip_optional_pos:
+ if (!noptargs) {
+ goto skip_optional_kwonly;
+ }
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+skip_optional_kwonly:
+ return_value = _hashlib_openssl_md5_impl(module, data_obj, usedforsecurity);
+
+exit:
+ return return_value;
+}
+
+PyDoc_STRVAR(_hashlib_openssl_sha1__doc__,
+"openssl_sha1($module, /, string=b\'\', *, usedforsecurity=True)\n"
+"--\n"
+"\n"
+"Returns a sha1 hash object; optionally initialized with a string");
+
+#define _HASHLIB_OPENSSL_SHA1_METHODDEF \
+ {"openssl_sha1", (PyCFunction)(void(*)(void))_hashlib_openssl_sha1, METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_sha1__doc__},
+
+static PyObject *
+_hashlib_openssl_sha1_impl(PyObject *module, PyObject *data_obj,
+ int usedforsecurity);
+
+static PyObject *
+_hashlib_openssl_sha1(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+{
+ PyObject *return_value = NULL;
+ static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static _PyArg_Parser _parser = {NULL, _keywords, "openssl_sha1", 0};
+ PyObject *argsbuf[2];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
PyObject *data_obj = NULL;
+ int usedforsecurity = 1;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 1, 0, argsbuf);
if (!args) {
goto exit;
}
if (!noptargs) {
- goto skip_optional_pos;
+ goto skip_optional_pos;
+ }
+ if (args[0]) {
+ data_obj = args[0];
+ if (!--noptargs) {
+ goto skip_optional_pos;
+ }
+ }
+skip_optional_pos:
+ if (!noptargs) {
+ goto skip_optional_kwonly;
+ }
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+skip_optional_kwonly:
+ return_value = _hashlib_openssl_sha1_impl(module, data_obj, usedforsecurity);
+
+exit:
+ return return_value;
+}
+
+PyDoc_STRVAR(_hashlib_openssl_sha224__doc__,
+"openssl_sha224($module, /, string=b\'\', *, usedforsecurity=True)\n"
+"--\n"
+"\n"
+"Returns a sha224 hash object; optionally initialized with a string");
+
+#define _HASHLIB_OPENSSL_SHA224_METHODDEF \
+ {"openssl_sha224", (PyCFunction)(void(*)(void))_hashlib_openssl_sha224, METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_sha224__doc__},
+
+static PyObject *
+_hashlib_openssl_sha224_impl(PyObject *module, PyObject *data_obj,
+ int usedforsecurity);
+
+static PyObject *
+_hashlib_openssl_sha224(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+{
+ PyObject *return_value = NULL;
+ static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static _PyArg_Parser _parser = {NULL, _keywords, "openssl_sha224", 0};
+ PyObject *argsbuf[2];
+ Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
+ PyObject *data_obj = NULL;
+ int usedforsecurity = 1;
+
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 1, 0, argsbuf);
+ if (!args) {
+ goto exit;
+ }
+ if (!noptargs) {
+ goto skip_optional_pos;
+ }
+ if (args[0]) {
+ data_obj = args[0];
+ if (!--noptargs) {
+ goto skip_optional_pos;
+ }
+ }
+skip_optional_pos:
+ if (!noptargs) {
+ goto skip_optional_kwonly;
+ }
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+skip_optional_kwonly:
+ return_value = _hashlib_openssl_sha224_impl(module, data_obj, usedforsecurity);
+
+exit:
+ return return_value;
+}
+
+PyDoc_STRVAR(_hashlib_openssl_sha256__doc__,
+"openssl_sha256($module, /, string=b\'\', *, usedforsecurity=True)\n"
+"--\n"
+"\n"
+"Returns a sha256 hash object; optionally initialized with a string");
+
+#define _HASHLIB_OPENSSL_SHA256_METHODDEF \
+ {"openssl_sha256", (PyCFunction)(void(*)(void))_hashlib_openssl_sha256, METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_sha256__doc__},
+
+static PyObject *
+_hashlib_openssl_sha256_impl(PyObject *module, PyObject *data_obj,
+ int usedforsecurity);
+
+static PyObject *
+_hashlib_openssl_sha256(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+{
+ PyObject *return_value = NULL;
+ static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static _PyArg_Parser _parser = {NULL, _keywords, "openssl_sha256", 0};
+ PyObject *argsbuf[2];
+ Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
+ PyObject *data_obj = NULL;
+ int usedforsecurity = 1;
+
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 1, 0, argsbuf);
+ if (!args) {
+ goto exit;
+ }
+ if (!noptargs) {
+ goto skip_optional_pos;
+ }
+ if (args[0]) {
+ data_obj = args[0];
+ if (!--noptargs) {
+ goto skip_optional_pos;
+ }
+ }
+skip_optional_pos:
+ if (!noptargs) {
+ goto skip_optional_kwonly;
+ }
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+skip_optional_kwonly:
+ return_value = _hashlib_openssl_sha256_impl(module, data_obj, usedforsecurity);
+
+exit:
+ return return_value;
+}
+
+PyDoc_STRVAR(_hashlib_openssl_sha384__doc__,
+"openssl_sha384($module, /, string=b\'\', *, usedforsecurity=True)\n"
+"--\n"
+"\n"
+"Returns a sha384 hash object; optionally initialized with a string");
+
+#define _HASHLIB_OPENSSL_SHA384_METHODDEF \
+ {"openssl_sha384", (PyCFunction)(void(*)(void))_hashlib_openssl_sha384, METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_sha384__doc__},
+
+static PyObject *
+_hashlib_openssl_sha384_impl(PyObject *module, PyObject *data_obj,
+ int usedforsecurity);
+
+static PyObject *
+_hashlib_openssl_sha384(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+{
+ PyObject *return_value = NULL;
+ static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static _PyArg_Parser _parser = {NULL, _keywords, "openssl_sha384", 0};
+ PyObject *argsbuf[2];
+ Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
+ PyObject *data_obj = NULL;
+ int usedforsecurity = 1;
+
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 1, 0, argsbuf);
+ if (!args) {
+ goto exit;
+ }
+ if (!noptargs) {
+ goto skip_optional_pos;
+ }
+ if (args[0]) {
+ data_obj = args[0];
+ if (!--noptargs) {
+ goto skip_optional_pos;
+ }
+ }
+skip_optional_pos:
+ if (!noptargs) {
+ goto skip_optional_kwonly;
+ }
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+skip_optional_kwonly:
+ return_value = _hashlib_openssl_sha384_impl(module, data_obj, usedforsecurity);
+
+exit:
+ return return_value;
+}
+
+PyDoc_STRVAR(_hashlib_openssl_sha512__doc__,
+"openssl_sha512($module, /, string=b\'\', *, usedforsecurity=True)\n"
+"--\n"
+"\n"
+"Returns a sha512 hash object; optionally initialized with a string");
+
+#define _HASHLIB_OPENSSL_SHA512_METHODDEF \
+ {"openssl_sha512", (PyCFunction)(void(*)(void))_hashlib_openssl_sha512, METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_sha512__doc__},
+
+static PyObject *
+_hashlib_openssl_sha512_impl(PyObject *module, PyObject *data_obj,
+ int usedforsecurity);
+
+static PyObject *
+_hashlib_openssl_sha512(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+{
+ PyObject *return_value = NULL;
+ static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static _PyArg_Parser _parser = {NULL, _keywords, "openssl_sha512", 0};
+ PyObject *argsbuf[2];
+ Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
+ PyObject *data_obj = NULL;
+ int usedforsecurity = 1;
+
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 1, 0, argsbuf);
+ if (!args) {
+ goto exit;
+ }
+ if (!noptargs) {
+ goto skip_optional_pos;
+ }
+ if (args[0]) {
+ data_obj = args[0];
+ if (!--noptargs) {
+ goto skip_optional_pos;
+ }
+ }
+skip_optional_pos:
+ if (!noptargs) {
+ goto skip_optional_kwonly;
+ }
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+skip_optional_kwonly:
+ return_value = _hashlib_openssl_sha512_impl(module, data_obj, usedforsecurity);
+
+exit:
+ return return_value;
+}
+
+#if defined(PY_OPENSSL_HAS_SHA3)
+
+PyDoc_STRVAR(_hashlib_openssl_sha3_224__doc__,
+"openssl_sha3_224($module, /, string=b\'\', *, usedforsecurity=True)\n"
+"--\n"
+"\n"
+"Returns a sha3-224 hash object; optionally initialized with a string");
+
+#define _HASHLIB_OPENSSL_SHA3_224_METHODDEF \
+ {"openssl_sha3_224", (PyCFunction)(void(*)(void))_hashlib_openssl_sha3_224, METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_sha3_224__doc__},
+
+static PyObject *
+_hashlib_openssl_sha3_224_impl(PyObject *module, PyObject *data_obj,
+ int usedforsecurity);
+
+static PyObject *
+_hashlib_openssl_sha3_224(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+{
+ PyObject *return_value = NULL;
+ static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static _PyArg_Parser _parser = {NULL, _keywords, "openssl_sha3_224", 0};
+ PyObject *argsbuf[2];
+ Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
+ PyObject *data_obj = NULL;
+ int usedforsecurity = 1;
+
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 1, 0, argsbuf);
+ if (!args) {
+ goto exit;
+ }
+ if (!noptargs) {
+ goto skip_optional_pos;
+ }
+ if (args[0]) {
+ data_obj = args[0];
+ if (!--noptargs) {
+ goto skip_optional_pos;
+ }
+ }
+skip_optional_pos:
+ if (!noptargs) {
+ goto skip_optional_kwonly;
}
- data_obj = args[0];
-skip_optional_pos:
- return_value = _hashlib_openssl_md5_impl(module, data_obj);
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+skip_optional_kwonly:
+ return_value = _hashlib_openssl_sha3_224_impl(module, data_obj, usedforsecurity);
exit:
return return_value;
}
-PyDoc_STRVAR(_hashlib_openssl_sha1__doc__,
-"openssl_sha1($module, /, string=b\'\')\n"
+#endif /* defined(PY_OPENSSL_HAS_SHA3) */
+
+#if defined(PY_OPENSSL_HAS_SHA3)
+
+PyDoc_STRVAR(_hashlib_openssl_sha3_256__doc__,
+"openssl_sha3_256($module, /, string=b\'\', *, usedforsecurity=True)\n"
"--\n"
"\n"
-"Returns a sha1 hash object; optionally initialized with a string");
+"Returns a sha3-256 hash object; optionally initialized with a string");
-#define _HASHLIB_OPENSSL_SHA1_METHODDEF \
- {"openssl_sha1", (PyCFunction)(void(*)(void))_hashlib_openssl_sha1, METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_sha1__doc__},
+#define _HASHLIB_OPENSSL_SHA3_256_METHODDEF \
+ {"openssl_sha3_256", (PyCFunction)(void(*)(void))_hashlib_openssl_sha3_256, METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_sha3_256__doc__},
static PyObject *
-_hashlib_openssl_sha1_impl(PyObject *module, PyObject *data_obj);
+_hashlib_openssl_sha3_256_impl(PyObject *module, PyObject *data_obj,
+ int usedforsecurity);
static PyObject *
-_hashlib_openssl_sha1(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+_hashlib_openssl_sha3_256(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
{
PyObject *return_value = NULL;
- static const char * const _keywords[] = {"string", NULL};
- static _PyArg_Parser _parser = {NULL, _keywords, "openssl_sha1", 0};
- PyObject *argsbuf[1];
+ static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static _PyArg_Parser _parser = {NULL, _keywords, "openssl_sha3_256", 0};
+ PyObject *argsbuf[2];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
PyObject *data_obj = NULL;
+ int usedforsecurity = 1;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 1, 0, argsbuf);
if (!args) {
@@ -175,35 +629,54 @@ _hashlib_openssl_sha1(PyObject *module, PyObject *const *args, Py_ssize_t nargs,
if (!noptargs) {
goto skip_optional_pos;
}
- data_obj = args[0];
+ if (args[0]) {
+ data_obj = args[0];
+ if (!--noptargs) {
+ goto skip_optional_pos;
+ }
+ }
skip_optional_pos:
- return_value = _hashlib_openssl_sha1_impl(module, data_obj);
+ if (!noptargs) {
+ goto skip_optional_kwonly;
+ }
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+skip_optional_kwonly:
+ return_value = _hashlib_openssl_sha3_256_impl(module, data_obj, usedforsecurity);
exit:
return return_value;
}
-PyDoc_STRVAR(_hashlib_openssl_sha224__doc__,
-"openssl_sha224($module, /, string=b\'\')\n"
+#endif /* defined(PY_OPENSSL_HAS_SHA3) */
+
+#if defined(PY_OPENSSL_HAS_SHA3)
+
+PyDoc_STRVAR(_hashlib_openssl_sha3_384__doc__,
+"openssl_sha3_384($module, /, string=b\'\', *, usedforsecurity=True)\n"
"--\n"
"\n"
-"Returns a sha224 hash object; optionally initialized with a string");
+"Returns a sha3-384 hash object; optionally initialized with a string");
-#define _HASHLIB_OPENSSL_SHA224_METHODDEF \
- {"openssl_sha224", (PyCFunction)(void(*)(void))_hashlib_openssl_sha224, METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_sha224__doc__},
+#define _HASHLIB_OPENSSL_SHA3_384_METHODDEF \
+ {"openssl_sha3_384", (PyCFunction)(void(*)(void))_hashlib_openssl_sha3_384, METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_sha3_384__doc__},
static PyObject *
-_hashlib_openssl_sha224_impl(PyObject *module, PyObject *data_obj);
+_hashlib_openssl_sha3_384_impl(PyObject *module, PyObject *data_obj,
+ int usedforsecurity);
static PyObject *
-_hashlib_openssl_sha224(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+_hashlib_openssl_sha3_384(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
{
PyObject *return_value = NULL;
- static const char * const _keywords[] = {"string", NULL};
- static _PyArg_Parser _parser = {NULL, _keywords, "openssl_sha224", 0};
- PyObject *argsbuf[1];
+ static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static _PyArg_Parser _parser = {NULL, _keywords, "openssl_sha3_384", 0};
+ PyObject *argsbuf[2];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
PyObject *data_obj = NULL;
+ int usedforsecurity = 1;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 1, 0, argsbuf);
if (!args) {
@@ -212,35 +685,54 @@ _hashlib_openssl_sha224(PyObject *module, PyObject *const *args, Py_ssize_t narg
if (!noptargs) {
goto skip_optional_pos;
}
- data_obj = args[0];
+ if (args[0]) {
+ data_obj = args[0];
+ if (!--noptargs) {
+ goto skip_optional_pos;
+ }
+ }
skip_optional_pos:
- return_value = _hashlib_openssl_sha224_impl(module, data_obj);
+ if (!noptargs) {
+ goto skip_optional_kwonly;
+ }
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+skip_optional_kwonly:
+ return_value = _hashlib_openssl_sha3_384_impl(module, data_obj, usedforsecurity);
exit:
return return_value;
}
-PyDoc_STRVAR(_hashlib_openssl_sha256__doc__,
-"openssl_sha256($module, /, string=b\'\')\n"
+#endif /* defined(PY_OPENSSL_HAS_SHA3) */
+
+#if defined(PY_OPENSSL_HAS_SHA3)
+
+PyDoc_STRVAR(_hashlib_openssl_sha3_512__doc__,
+"openssl_sha3_512($module, /, string=b\'\', *, usedforsecurity=True)\n"
"--\n"
"\n"
-"Returns a sha256 hash object; optionally initialized with a string");
+"Returns a sha3-512 hash object; optionally initialized with a string");
-#define _HASHLIB_OPENSSL_SHA256_METHODDEF \
- {"openssl_sha256", (PyCFunction)(void(*)(void))_hashlib_openssl_sha256, METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_sha256__doc__},
+#define _HASHLIB_OPENSSL_SHA3_512_METHODDEF \
+ {"openssl_sha3_512", (PyCFunction)(void(*)(void))_hashlib_openssl_sha3_512, METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_sha3_512__doc__},
static PyObject *
-_hashlib_openssl_sha256_impl(PyObject *module, PyObject *data_obj);
+_hashlib_openssl_sha3_512_impl(PyObject *module, PyObject *data_obj,
+ int usedforsecurity);
static PyObject *
-_hashlib_openssl_sha256(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+_hashlib_openssl_sha3_512(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
{
PyObject *return_value = NULL;
- static const char * const _keywords[] = {"string", NULL};
- static _PyArg_Parser _parser = {NULL, _keywords, "openssl_sha256", 0};
- PyObject *argsbuf[1];
+ static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static _PyArg_Parser _parser = {NULL, _keywords, "openssl_sha3_512", 0};
+ PyObject *argsbuf[2];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
PyObject *data_obj = NULL;
+ int usedforsecurity = 1;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 1, 0, argsbuf);
if (!args) {
@@ -249,35 +741,54 @@ _hashlib_openssl_sha256(PyObject *module, PyObject *const *args, Py_ssize_t narg
if (!noptargs) {
goto skip_optional_pos;
}
- data_obj = args[0];
+ if (args[0]) {
+ data_obj = args[0];
+ if (!--noptargs) {
+ goto skip_optional_pos;
+ }
+ }
skip_optional_pos:
- return_value = _hashlib_openssl_sha256_impl(module, data_obj);
+ if (!noptargs) {
+ goto skip_optional_kwonly;
+ }
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+skip_optional_kwonly:
+ return_value = _hashlib_openssl_sha3_512_impl(module, data_obj, usedforsecurity);
exit:
return return_value;
}
-PyDoc_STRVAR(_hashlib_openssl_sha384__doc__,
-"openssl_sha384($module, /, string=b\'\')\n"
+#endif /* defined(PY_OPENSSL_HAS_SHA3) */
+
+#if defined(PY_OPENSSL_HAS_SHAKE)
+
+PyDoc_STRVAR(_hashlib_openssl_shake_128__doc__,
+"openssl_shake_128($module, /, string=b\'\', *, usedforsecurity=True)\n"
"--\n"
"\n"
-"Returns a sha384 hash object; optionally initialized with a string");
+"Returns a shake-128 variable hash object; optionally initialized with a string");
-#define _HASHLIB_OPENSSL_SHA384_METHODDEF \
- {"openssl_sha384", (PyCFunction)(void(*)(void))_hashlib_openssl_sha384, METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_sha384__doc__},
+#define _HASHLIB_OPENSSL_SHAKE_128_METHODDEF \
+ {"openssl_shake_128", (PyCFunction)(void(*)(void))_hashlib_openssl_shake_128, METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_shake_128__doc__},
static PyObject *
-_hashlib_openssl_sha384_impl(PyObject *module, PyObject *data_obj);
+_hashlib_openssl_shake_128_impl(PyObject *module, PyObject *data_obj,
+ int usedforsecurity);
static PyObject *
-_hashlib_openssl_sha384(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+_hashlib_openssl_shake_128(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
{
PyObject *return_value = NULL;
- static const char * const _keywords[] = {"string", NULL};
- static _PyArg_Parser _parser = {NULL, _keywords, "openssl_sha384", 0};
- PyObject *argsbuf[1];
+ static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static _PyArg_Parser _parser = {NULL, _keywords, "openssl_shake_128", 0};
+ PyObject *argsbuf[2];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
PyObject *data_obj = NULL;
+ int usedforsecurity = 1;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 1, 0, argsbuf);
if (!args) {
@@ -286,35 +797,54 @@ _hashlib_openssl_sha384(PyObject *module, PyObject *const *args, Py_ssize_t narg
if (!noptargs) {
goto skip_optional_pos;
}
- data_obj = args[0];
+ if (args[0]) {
+ data_obj = args[0];
+ if (!--noptargs) {
+ goto skip_optional_pos;
+ }
+ }
skip_optional_pos:
- return_value = _hashlib_openssl_sha384_impl(module, data_obj);
+ if (!noptargs) {
+ goto skip_optional_kwonly;
+ }
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+skip_optional_kwonly:
+ return_value = _hashlib_openssl_shake_128_impl(module, data_obj, usedforsecurity);
exit:
return return_value;
}
-PyDoc_STRVAR(_hashlib_openssl_sha512__doc__,
-"openssl_sha512($module, /, string=b\'\')\n"
+#endif /* defined(PY_OPENSSL_HAS_SHAKE) */
+
+#if defined(PY_OPENSSL_HAS_SHAKE)
+
+PyDoc_STRVAR(_hashlib_openssl_shake_256__doc__,
+"openssl_shake_256($module, /, string=b\'\', *, usedforsecurity=True)\n"
"--\n"
"\n"
-"Returns a sha512 hash object; optionally initialized with a string");
+"Returns a shake-256 variable hash object; optionally initialized with a string");
-#define _HASHLIB_OPENSSL_SHA512_METHODDEF \
- {"openssl_sha512", (PyCFunction)(void(*)(void))_hashlib_openssl_sha512, METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_sha512__doc__},
+#define _HASHLIB_OPENSSL_SHAKE_256_METHODDEF \
+ {"openssl_shake_256", (PyCFunction)(void(*)(void))_hashlib_openssl_shake_256, METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_shake_256__doc__},
static PyObject *
-_hashlib_openssl_sha512_impl(PyObject *module, PyObject *data_obj);
+_hashlib_openssl_shake_256_impl(PyObject *module, PyObject *data_obj,
+ int usedforsecurity);
static PyObject *
-_hashlib_openssl_sha512(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+_hashlib_openssl_shake_256(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
{
PyObject *return_value = NULL;
- static const char * const _keywords[] = {"string", NULL};
- static _PyArg_Parser _parser = {NULL, _keywords, "openssl_sha512", 0};
- PyObject *argsbuf[1];
+ static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static _PyArg_Parser _parser = {NULL, _keywords, "openssl_shake_256", 0};
+ PyObject *argsbuf[2];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
PyObject *data_obj = NULL;
+ int usedforsecurity = 1;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 1, 0, argsbuf);
if (!args) {
@@ -323,14 +853,29 @@ _hashlib_openssl_sha512(PyObject *module, PyObject *const *args, Py_ssize_t narg
if (!noptargs) {
goto skip_optional_pos;
}
- data_obj = args[0];
+ if (args[0]) {
+ data_obj = args[0];
+ if (!--noptargs) {
+ goto skip_optional_pos;
+ }
+ }
skip_optional_pos:
- return_value = _hashlib_openssl_sha512_impl(module, data_obj);
+ if (!noptargs) {
+ goto skip_optional_kwonly;
+ }
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+skip_optional_kwonly:
+ return_value = _hashlib_openssl_shake_256_impl(module, data_obj, usedforsecurity);
exit:
return return_value;
}
+#endif /* defined(PY_OPENSSL_HAS_SHAKE) */
+
PyDoc_STRVAR(pbkdf2_hmac__doc__,
"pbkdf2_hmac($module, /, hash_name, password, salt, iterations,\n"
" dklen=None)\n"
@@ -550,21 +1095,21 @@ exit:
#endif /* (OPENSSL_VERSION_NUMBER > 0x10100000L && !defined(OPENSSL_NO_SCRYPT) && !defined(LIBRESSL_VERSION_NUMBER)) */
-PyDoc_STRVAR(_hashlib_hmac_digest__doc__,
+PyDoc_STRVAR(_hashlib_hmac_singleshot__doc__,
"hmac_digest($module, /, key, msg, digest)\n"
"--\n"
"\n"
"Single-shot HMAC.");
-#define _HASHLIB_HMAC_DIGEST_METHODDEF \
- {"hmac_digest", (PyCFunction)(void(*)(void))_hashlib_hmac_digest, METH_FASTCALL|METH_KEYWORDS, _hashlib_hmac_digest__doc__},
+#define _HASHLIB_HMAC_SINGLESHOT_METHODDEF \
+ {"hmac_digest", (PyCFunction)(void(*)(void))_hashlib_hmac_singleshot, METH_FASTCALL|METH_KEYWORDS, _hashlib_hmac_singleshot__doc__},
static PyObject *
-_hashlib_hmac_digest_impl(PyObject *module, Py_buffer *key, Py_buffer *msg,
- const char *digest);
+_hashlib_hmac_singleshot_impl(PyObject *module, Py_buffer *key,
+ Py_buffer *msg, const char *digest);
static PyObject *
-_hashlib_hmac_digest(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+_hashlib_hmac_singleshot(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
{
PyObject *return_value = NULL;
static const char * const _keywords[] = {"key", "msg", "digest", NULL};
@@ -605,7 +1150,7 @@ _hashlib_hmac_digest(PyObject *module, PyObject *const *args, Py_ssize_t nargs,
PyErr_SetString(PyExc_ValueError, "embedded null character");
goto exit;
}
- return_value = _hashlib_hmac_digest_impl(module, &key, &msg, digest);
+ return_value = _hashlib_hmac_singleshot_impl(module, &key, &msg, digest);
exit:
/* Cleanup for key */
@@ -620,7 +1165,281 @@ exit:
return return_value;
}
+PyDoc_STRVAR(_hashlib_hmac_new__doc__,
+"hmac_new($module, /, key, msg=b\'\', digestmod=None)\n"
+"--\n"
+"\n"
+"Return a new hmac object.");
+
+#define _HASHLIB_HMAC_NEW_METHODDEF \
+ {"hmac_new", (PyCFunction)(void(*)(void))_hashlib_hmac_new, METH_FASTCALL|METH_KEYWORDS, _hashlib_hmac_new__doc__},
+
+static PyObject *
+_hashlib_hmac_new_impl(PyObject *module, Py_buffer *key, PyObject *msg_obj,
+ const char *digestmod);
+
+static PyObject *
+_hashlib_hmac_new(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+{
+ PyObject *return_value = NULL;
+ static const char * const _keywords[] = {"key", "msg", "digestmod", NULL};
+ static _PyArg_Parser _parser = {NULL, _keywords, "hmac_new", 0};
+ PyObject *argsbuf[3];
+ Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1;
+ Py_buffer key = {NULL, NULL};
+ PyObject *msg_obj = NULL;
+ const char *digestmod = NULL;
+
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 3, 0, argsbuf);
+ if (!args) {
+ goto exit;
+ }
+ if (PyObject_GetBuffer(args[0], &key, PyBUF_SIMPLE) != 0) {
+ goto exit;
+ }
+ if (!PyBuffer_IsContiguous(&key, 'C')) {
+ _PyArg_BadArgument("hmac_new", "argument 'key'", "contiguous buffer", args[0]);
+ goto exit;
+ }
+ if (!noptargs) {
+ goto skip_optional_pos;
+ }
+ if (args[1]) {
+ msg_obj = args[1];
+ if (!--noptargs) {
+ goto skip_optional_pos;
+ }
+ }
+ if (!PyUnicode_Check(args[2])) {
+ _PyArg_BadArgument("hmac_new", "argument 'digestmod'", "str", args[2]);
+ goto exit;
+ }
+ Py_ssize_t digestmod_length;
+ digestmod = PyUnicode_AsUTF8AndSize(args[2], &digestmod_length);
+ if (digestmod == NULL) {
+ goto exit;
+ }
+ if (strlen(digestmod) != (size_t)digestmod_length) {
+ PyErr_SetString(PyExc_ValueError, "embedded null character");
+ goto exit;
+ }
+skip_optional_pos:
+ return_value = _hashlib_hmac_new_impl(module, &key, msg_obj, digestmod);
+
+exit:
+ /* Cleanup for key */
+ if (key.obj) {
+ PyBuffer_Release(&key);
+ }
+
+ return return_value;
+}
+
+PyDoc_STRVAR(_hashlib_HMAC_copy__doc__,
+"copy($self, /)\n"
+"--\n"
+"\n"
+"Return a copy (\"clone\") of the HMAC object.");
+
+#define _HASHLIB_HMAC_COPY_METHODDEF \
+ {"copy", (PyCFunction)_hashlib_HMAC_copy, METH_NOARGS, _hashlib_HMAC_copy__doc__},
+
+static PyObject *
+_hashlib_HMAC_copy_impl(HMACobject *self);
+
+static PyObject *
+_hashlib_HMAC_copy(HMACobject *self, PyObject *Py_UNUSED(ignored))
+{
+ return _hashlib_HMAC_copy_impl(self);
+}
+
+PyDoc_STRVAR(_hashlib_HMAC_update__doc__,
+"update($self, /, msg)\n"
+"--\n"
+"\n"
+"Update the HMAC object with msg.");
+
+#define _HASHLIB_HMAC_UPDATE_METHODDEF \
+ {"update", (PyCFunction)(void(*)(void))_hashlib_HMAC_update, METH_FASTCALL|METH_KEYWORDS, _hashlib_HMAC_update__doc__},
+
+static PyObject *
+_hashlib_HMAC_update_impl(HMACobject *self, PyObject *msg);
+
+static PyObject *
+_hashlib_HMAC_update(HMACobject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+{
+ PyObject *return_value = NULL;
+ static const char * const _keywords[] = {"msg", NULL};
+ static _PyArg_Parser _parser = {NULL, _keywords, "update", 0};
+ PyObject *argsbuf[1];
+ PyObject *msg;
+
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf);
+ if (!args) {
+ goto exit;
+ }
+ msg = args[0];
+ return_value = _hashlib_HMAC_update_impl(self, msg);
+
+exit:
+ return return_value;
+}
+
+PyDoc_STRVAR(_hashlib_HMAC_digest__doc__,
+"digest($self, /)\n"
+"--\n"
+"\n"
+"Return the digest of the bytes passed to the update() method so far.");
+
+#define _HASHLIB_HMAC_DIGEST_METHODDEF \
+ {"digest", (PyCFunction)_hashlib_HMAC_digest, METH_NOARGS, _hashlib_HMAC_digest__doc__},
+
+static PyObject *
+_hashlib_HMAC_digest_impl(HMACobject *self);
+
+static PyObject *
+_hashlib_HMAC_digest(HMACobject *self, PyObject *Py_UNUSED(ignored))
+{
+ return _hashlib_HMAC_digest_impl(self);
+}
+
+PyDoc_STRVAR(_hashlib_HMAC_hexdigest__doc__,
+"hexdigest($self, /)\n"
+"--\n"
+"\n"
+"Return hexadecimal digest of the bytes passed to the update() method so far.\n"
+"\n"
+"This may be used to exchange the value safely in email or other non-binary\n"
+"environments.");
+
+#define _HASHLIB_HMAC_HEXDIGEST_METHODDEF \
+ {"hexdigest", (PyCFunction)_hashlib_HMAC_hexdigest, METH_NOARGS, _hashlib_HMAC_hexdigest__doc__},
+
+static PyObject *
+_hashlib_HMAC_hexdigest_impl(HMACobject *self);
+
+static PyObject *
+_hashlib_HMAC_hexdigest(HMACobject *self, PyObject *Py_UNUSED(ignored))
+{
+ return _hashlib_HMAC_hexdigest_impl(self);
+}
+
+#if !defined(LIBRESSL_VERSION_NUMBER)
+
+PyDoc_STRVAR(_hashlib_get_fips_mode__doc__,
+"get_fips_mode($module, /)\n"
+"--\n"
+"\n"
+"Determine the OpenSSL FIPS mode of operation.\n"
+"\n"
+"For OpenSSL 3.0.0 and newer it returns the state of the default provider\n"
+"in the default OSSL context. It\'s not quite the same as FIPS_mode() but good\n"
+"enough for unittests.\n"
+"\n"
+"Effectively any non-zero return value indicates FIPS mode;\n"
+"values other than 1 may have additional significance.");
+
+#define _HASHLIB_GET_FIPS_MODE_METHODDEF \
+ {"get_fips_mode", (PyCFunction)_hashlib_get_fips_mode, METH_NOARGS, _hashlib_get_fips_mode__doc__},
+
+static int
+_hashlib_get_fips_mode_impl(PyObject *module);
+
+static PyObject *
+_hashlib_get_fips_mode(PyObject *module, PyObject *Py_UNUSED(ignored))
+{
+ PyObject *return_value = NULL;
+ int _return_value;
+
+ _return_value = _hashlib_get_fips_mode_impl(module);
+ if ((_return_value == -1) && PyErr_Occurred()) {
+ goto exit;
+ }
+ return_value = PyLong_FromLong((long)_return_value);
+
+exit:
+ return return_value;
+}
+
+#endif /* !defined(LIBRESSL_VERSION_NUMBER) */
+
+PyDoc_STRVAR(_hashlib_compare_digest__doc__,
+"compare_digest($module, a, b, /)\n"
+"--\n"
+"\n"
+"Return \'a == b\'.\n"
+"\n"
+"This function uses an approach designed to prevent\n"
+"timing analysis, making it appropriate for cryptography.\n"
+"\n"
+"a and b must both be of the same type: either str (ASCII only),\n"
+"or any bytes-like object.\n"
+"\n"
+"Note: If a and b are of different lengths, or if an error occurs,\n"
+"a timing attack could theoretically reveal information about the\n"
+"types and lengths of a and b--but not their values.");
+
+#define _HASHLIB_COMPARE_DIGEST_METHODDEF \
+ {"compare_digest", (PyCFunction)(void(*)(void))_hashlib_compare_digest, METH_FASTCALL, _hashlib_compare_digest__doc__},
+
+static PyObject *
+_hashlib_compare_digest_impl(PyObject *module, PyObject *a, PyObject *b);
+
+static PyObject *
+_hashlib_compare_digest(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
+{
+ PyObject *return_value = NULL;
+ PyObject *a;
+ PyObject *b;
+
+ if (!_PyArg_CheckPositional("compare_digest", nargs, 2, 2)) {
+ goto exit;
+ }
+ a = args[0];
+ b = args[1];
+ return_value = _hashlib_compare_digest_impl(module, a, b);
+
+exit:
+ return return_value;
+}
+
+#ifndef EVPXOF_DIGEST_METHODDEF
+ #define EVPXOF_DIGEST_METHODDEF
+#endif /* !defined(EVPXOF_DIGEST_METHODDEF) */
+
+#ifndef EVPXOF_HEXDIGEST_METHODDEF
+ #define EVPXOF_HEXDIGEST_METHODDEF
+#endif /* !defined(EVPXOF_HEXDIGEST_METHODDEF) */
+
+#ifndef _HASHLIB_OPENSSL_SHA3_224_METHODDEF
+ #define _HASHLIB_OPENSSL_SHA3_224_METHODDEF
+#endif /* !defined(_HASHLIB_OPENSSL_SHA3_224_METHODDEF) */
+
+#ifndef _HASHLIB_OPENSSL_SHA3_256_METHODDEF
+ #define _HASHLIB_OPENSSL_SHA3_256_METHODDEF
+#endif /* !defined(_HASHLIB_OPENSSL_SHA3_256_METHODDEF) */
+
+#ifndef _HASHLIB_OPENSSL_SHA3_384_METHODDEF
+ #define _HASHLIB_OPENSSL_SHA3_384_METHODDEF
+#endif /* !defined(_HASHLIB_OPENSSL_SHA3_384_METHODDEF) */
+
+#ifndef _HASHLIB_OPENSSL_SHA3_512_METHODDEF
+ #define _HASHLIB_OPENSSL_SHA3_512_METHODDEF
+#endif /* !defined(_HASHLIB_OPENSSL_SHA3_512_METHODDEF) */
+
+#ifndef _HASHLIB_OPENSSL_SHAKE_128_METHODDEF
+ #define _HASHLIB_OPENSSL_SHAKE_128_METHODDEF
+#endif /* !defined(_HASHLIB_OPENSSL_SHAKE_128_METHODDEF) */
+
+#ifndef _HASHLIB_OPENSSL_SHAKE_256_METHODDEF
+ #define _HASHLIB_OPENSSL_SHAKE_256_METHODDEF
+#endif /* !defined(_HASHLIB_OPENSSL_SHAKE_256_METHODDEF) */
+
#ifndef _HASHLIB_SCRYPT_METHODDEF
#define _HASHLIB_SCRYPT_METHODDEF
#endif /* !defined(_HASHLIB_SCRYPT_METHODDEF) */
-/*[clinic end generated code: output=38c2637f67e9bb79 input=a9049054013a1b77]*/
+
+#ifndef _HASHLIB_GET_FIPS_MODE_METHODDEF
+ #define _HASHLIB_GET_FIPS_MODE_METHODDEF
+#endif /* !defined(_HASHLIB_GET_FIPS_MODE_METHODDEF) */
+/*[clinic end generated code: output=b6b280e46bf0b139 input=a9049054013a1b77]*/
diff --git a/Modules/clinic/_pickle.c.h b/Modules/clinic/_pickle.c.h
index 0457a433..136524b6 100644
--- a/Modules/clinic/_pickle.c.h
+++ b/Modules/clinic/_pickle.c.h
@@ -735,7 +735,7 @@ exit:
}
PyDoc_STRVAR(_pickle_loads__doc__,
-"loads($module, /, data, *, fix_imports=True, encoding=\'ASCII\',\n"
+"loads($module, data, /, *, fix_imports=True, encoding=\'ASCII\',\n"
" errors=\'strict\', buffers=())\n"
"--\n"
"\n"
@@ -766,7 +766,7 @@ static PyObject *
_pickle_loads(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
{
PyObject *return_value = NULL;
- static const char * const _keywords[] = {"data", "fix_imports", "encoding", "errors", "buffers", NULL};
+ static const char * const _keywords[] = {"", "fix_imports", "encoding", "errors", "buffers", NULL};
static _PyArg_Parser _parser = {NULL, _keywords, "loads", 0};
PyObject *argsbuf[5];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1;
@@ -836,4 +836,4 @@ skip_optional_kwonly:
exit:
return return_value;
}
-/*[clinic end generated code: output=e2506823be1960c5 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=324aad69644beda2 input=a9049054013a1b77]*/
diff --git a/Modules/clinic/_randommodule.c.h b/Modules/clinic/_randommodule.c.h
index a467811d..0a642dff 100644
--- a/Modules/clinic/_randommodule.c.h
+++ b/Modules/clinic/_randommodule.c.h
@@ -114,4 +114,21 @@ _random_Random_getrandbits(RandomObject *self, PyObject *arg)
exit:
return return_value;
}
-/*[clinic end generated code: output=a7feb0c9c8d1b627 input=a9049054013a1b77]*/
+
+PyDoc_STRVAR(_random_Random___reduce____doc__,
+"__reduce__($self, /)\n"
+"--\n"
+"\n");
+
+#define _RANDOM_RANDOM___REDUCE___METHODDEF \
+ {"__reduce__", (PyCFunction)_random_Random___reduce__, METH_NOARGS, _random_Random___reduce____doc__},
+
+static PyObject *
+_random_Random___reduce___impl(RandomObject *self);
+
+static PyObject *
+_random_Random___reduce__(RandomObject *self, PyObject *Py_UNUSED(ignored))
+{
+ return _random_Random___reduce___impl(self);
+}
+/*[clinic end generated code: output=d8a99be3f1192219 input=a9049054013a1b77]*/
diff --git a/Modules/clinic/_testmultiphase.c.h b/Modules/clinic/_testmultiphase.c.h
new file mode 100644
index 00000000..0d38c230
--- /dev/null
+++ b/Modules/clinic/_testmultiphase.c.h
@@ -0,0 +1,101 @@
+/*[clinic input]
+preserve
+[clinic start generated code]*/
+
+PyDoc_STRVAR(_testmultiphase_StateAccessType_get_defining_module__doc__,
+"get_defining_module($self, /)\n"
+"--\n"
+"\n"
+"Return the module of the defining class.");
+
+#define _TESTMULTIPHASE_STATEACCESSTYPE_GET_DEFINING_MODULE_METHODDEF \
+ {"get_defining_module", (PyCFunction)(void(*)(void))_testmultiphase_StateAccessType_get_defining_module, METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _testmultiphase_StateAccessType_get_defining_module__doc__},
+
+static PyObject *
+_testmultiphase_StateAccessType_get_defining_module_impl(StateAccessTypeObject *self,
+ PyTypeObject *cls);
+
+static PyObject *
+_testmultiphase_StateAccessType_get_defining_module(StateAccessTypeObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+{
+ PyObject *return_value = NULL;
+ static const char * const _keywords[] = { NULL};
+ static _PyArg_Parser _parser = {":get_defining_module", _keywords, 0};
+
+ if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser
+ )) {
+ goto exit;
+ }
+ return_value = _testmultiphase_StateAccessType_get_defining_module_impl(self, cls);
+
+exit:
+ return return_value;
+}
+
+PyDoc_STRVAR(_testmultiphase_StateAccessType_increment_count_clinic__doc__,
+"increment_count_clinic($self, /, n=1, *, twice=False)\n"
+"--\n"
+"\n"
+"Add \'n\' from the module-state counter.\n"
+"\n"
+"Pass \'twice\' to double that amount.\n"
+"\n"
+"This tests Argument Clinic support for defining_class.");
+
+#define _TESTMULTIPHASE_STATEACCESSTYPE_INCREMENT_COUNT_CLINIC_METHODDEF \
+ {"increment_count_clinic", (PyCFunction)(void(*)(void))_testmultiphase_StateAccessType_increment_count_clinic, METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _testmultiphase_StateAccessType_increment_count_clinic__doc__},
+
+static PyObject *
+_testmultiphase_StateAccessType_increment_count_clinic_impl(StateAccessTypeObject *self,
+ PyTypeObject *cls,
+ int n, int twice);
+
+static PyObject *
+_testmultiphase_StateAccessType_increment_count_clinic(StateAccessTypeObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+{
+ PyObject *return_value = NULL;
+ static const char * const _keywords[] = {"n", "twice", NULL};
+ static _PyArg_Parser _parser = {"|i$p:increment_count_clinic", _keywords, 0};
+ int n = 1;
+ int twice = 0;
+
+ if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser,
+ &n, &twice)) {
+ goto exit;
+ }
+ return_value = _testmultiphase_StateAccessType_increment_count_clinic_impl(self, cls, n, twice);
+
+exit:
+ return return_value;
+}
+
+PyDoc_STRVAR(_testmultiphase_StateAccessType_get_count__doc__,
+"get_count($self, /)\n"
+"--\n"
+"\n"
+"Return the value of the module-state counter.");
+
+#define _TESTMULTIPHASE_STATEACCESSTYPE_GET_COUNT_METHODDEF \
+ {"get_count", (PyCFunction)(void(*)(void))_testmultiphase_StateAccessType_get_count, METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _testmultiphase_StateAccessType_get_count__doc__},
+
+static PyObject *
+_testmultiphase_StateAccessType_get_count_impl(StateAccessTypeObject *self,
+ PyTypeObject *cls);
+
+static PyObject *
+_testmultiphase_StateAccessType_get_count(StateAccessTypeObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+{
+ PyObject *return_value = NULL;
+ static const char * const _keywords[] = { NULL};
+ static _PyArg_Parser _parser = {":get_count", _keywords, 0};
+
+ if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser
+ )) {
+ goto exit;
+ }
+ return_value = _testmultiphase_StateAccessType_get_count_impl(self, cls);
+
+exit:
+ return return_value;
+}
+/*[clinic end generated code: output=39eea487e94e7f5d input=a9049054013a1b77]*/
diff --git a/Modules/clinic/_tracemalloc.c.h b/Modules/clinic/_tracemalloc.c.h
index 68fafdc3..049cacd8 100644
--- a/Modules/clinic/_tracemalloc.c.h
+++ b/Modules/clinic/_tracemalloc.c.h
@@ -197,4 +197,24 @@ _tracemalloc_get_traced_memory(PyObject *module, PyObject *Py_UNUSED(ignored))
{
return _tracemalloc_get_traced_memory_impl(module);
}
-/*[clinic end generated code: output=1bc96dc569706afa input=a9049054013a1b77]*/
+
+PyDoc_STRVAR(_tracemalloc_reset_peak__doc__,
+"reset_peak($module, /)\n"
+"--\n"
+"\n"
+"Set the peak size of memory blocks traced by tracemalloc to the current size.\n"
+"\n"
+"Do nothing if the tracemalloc module is not tracing memory allocations.");
+
+#define _TRACEMALLOC_RESET_PEAK_METHODDEF \
+ {"reset_peak", (PyCFunction)_tracemalloc_reset_peak, METH_NOARGS, _tracemalloc_reset_peak__doc__},
+
+static PyObject *
+_tracemalloc_reset_peak_impl(PyObject *module);
+
+static PyObject *
+_tracemalloc_reset_peak(PyObject *module, PyObject *Py_UNUSED(ignored))
+{
+ return _tracemalloc_reset_peak_impl(module);
+}
+/*[clinic end generated code: output=a130117b1af821da input=a9049054013a1b77]*/
diff --git a/Modules/clinic/arraymodule.c.h b/Modules/clinic/arraymodule.c.h
index 33f82d4d..b9245ca9 100644
--- a/Modules/clinic/arraymodule.c.h
+++ b/Modules/clinic/arraymodule.c.h
@@ -312,54 +312,6 @@ array_array_tolist(arrayobject *self, PyObject *Py_UNUSED(ignored))
return array_array_tolist_impl(self);
}
-PyDoc_STRVAR(array_array_fromstring__doc__,
-"fromstring($self, buffer, /)\n"
-"--\n"
-"\n"
-"Appends items from the string, interpreting it as an array of machine values, as if it had been read from a file using the fromfile() method).\n"
-"\n"
-"This method is deprecated. Use frombytes instead.");
-
-#define ARRAY_ARRAY_FROMSTRING_METHODDEF \
- {"fromstring", (PyCFunction)array_array_fromstring, METH_O, array_array_fromstring__doc__},
-
-static PyObject *
-array_array_fromstring_impl(arrayobject *self, Py_buffer *buffer);
-
-static PyObject *
-array_array_fromstring(arrayobject *self, PyObject *arg)
-{
- PyObject *return_value = NULL;
- Py_buffer buffer = {NULL, NULL};
-
- if (PyUnicode_Check(arg)) {
- Py_ssize_t len;
- const char *ptr = PyUnicode_AsUTF8AndSize(arg, &len);
- if (ptr == NULL) {
- goto exit;
- }
- PyBuffer_FillInfo(&buffer, arg, (void *)ptr, len, 1, 0);
- }
- else { /* any bytes-like object */
- if (PyObject_GetBuffer(arg, &buffer, PyBUF_SIMPLE) != 0) {
- goto exit;
- }
- if (!PyBuffer_IsContiguous(&buffer, 'C')) {
- _PyArg_BadArgument("fromstring", "argument", "contiguous buffer", arg);
- goto exit;
- }
- }
- return_value = array_array_fromstring_impl(self, &buffer);
-
-exit:
- /* Cleanup for buffer */
- if (buffer.obj) {
- PyBuffer_Release(&buffer);
- }
-
- return return_value;
-}
-
PyDoc_STRVAR(array_array_frombytes__doc__,
"frombytes($self, buffer, /)\n"
"--\n"
@@ -414,26 +366,6 @@ array_array_tobytes(arrayobject *self, PyObject *Py_UNUSED(ignored))
return array_array_tobytes_impl(self);
}
-PyDoc_STRVAR(array_array_tostring__doc__,
-"tostring($self, /)\n"
-"--\n"
-"\n"
-"Convert the array to an array of machine values and return the bytes representation.\n"
-"\n"
-"This method is deprecated. Use tobytes instead.");
-
-#define ARRAY_ARRAY_TOSTRING_METHODDEF \
- {"tostring", (PyCFunction)array_array_tostring, METH_NOARGS, array_array_tostring__doc__},
-
-static PyObject *
-array_array_tostring_impl(arrayobject *self);
-
-static PyObject *
-array_array_tostring(arrayobject *self, PyObject *Py_UNUSED(ignored))
-{
- return array_array_tostring_impl(self);
-}
-
PyDoc_STRVAR(array_array_fromunicode__doc__,
"fromunicode($self, ustr, /)\n"
"--\n"
@@ -448,20 +380,23 @@ PyDoc_STRVAR(array_array_fromunicode__doc__,
{"fromunicode", (PyCFunction)array_array_fromunicode, METH_O, array_array_fromunicode__doc__},
static PyObject *
-array_array_fromunicode_impl(arrayobject *self, const Py_UNICODE *ustr,
- Py_ssize_clean_t ustr_length);
+array_array_fromunicode_impl(arrayobject *self, PyObject *ustr);
static PyObject *
array_array_fromunicode(arrayobject *self, PyObject *arg)
{
PyObject *return_value = NULL;
- const Py_UNICODE *ustr;
- Py_ssize_clean_t ustr_length;
+ PyObject *ustr;
- if (!PyArg_Parse(arg, "u#:fromunicode", &ustr, &ustr_length)) {
+ if (!PyUnicode_Check(arg)) {
+ _PyArg_BadArgument("fromunicode", "argument", "str", arg);
+ goto exit;
+ }
+ if (PyUnicode_READY(arg) == -1) {
goto exit;
}
- return_value = array_array_fromunicode_impl(self, ustr, ustr_length);
+ ustr = arg;
+ return_value = array_array_fromunicode_impl(self, ustr);
exit:
return return_value;
@@ -599,4 +534,4 @@ PyDoc_STRVAR(array_arrayiterator___setstate____doc__,
#define ARRAY_ARRAYITERATOR___SETSTATE___METHODDEF \
{"__setstate__", (PyCFunction)array_arrayiterator___setstate__, METH_O, array_arrayiterator___setstate____doc__},
-/*[clinic end generated code: output=6aa421571e2c0756 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=9f70748dd3bc532f input=a9049054013a1b77]*/
diff --git a/Modules/clinic/binascii.c.h b/Modules/clinic/binascii.c.h
index 82942f08..4d02c72c 100644
--- a/Modules/clinic/binascii.c.h
+++ b/Modules/clinic/binascii.c.h
@@ -328,7 +328,7 @@ PyDoc_STRVAR(binascii_crc_hqx__doc__,
#define BINASCII_CRC_HQX_METHODDEF \
{"crc_hqx", (PyCFunction)(void(*)(void))binascii_crc_hqx, METH_FASTCALL, binascii_crc_hqx__doc__},
-static unsigned int
+static PyObject *
binascii_crc_hqx_impl(PyObject *module, Py_buffer *data, unsigned int crc);
static PyObject *
@@ -337,7 +337,6 @@ binascii_crc_hqx(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
PyObject *return_value = NULL;
Py_buffer data = {NULL, NULL};
unsigned int crc;
- unsigned int _return_value;
if (!_PyArg_CheckPositional("crc_hqx", nargs, 2, 2)) {
goto exit;
@@ -358,11 +357,7 @@ binascii_crc_hqx(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
if (crc == (unsigned int)-1 && PyErr_Occurred()) {
goto exit;
}
- _return_value = binascii_crc_hqx_impl(module, &data, crc);
- if ((_return_value == (unsigned int)-1) && PyErr_Occurred()) {
- goto exit;
- }
- return_value = PyLong_FromUnsignedLong((unsigned long)_return_value);
+ return_value = binascii_crc_hqx_impl(module, &data, crc);
exit:
/* Cleanup for data */
@@ -801,4 +796,4 @@ exit:
return return_value;
}
-/*[clinic end generated code: output=ec26d03c2007eaac input=a9049054013a1b77]*/
+/*[clinic end generated code: output=a1e878d3963b615e input=a9049054013a1b77]*/
diff --git a/Modules/clinic/cmathmodule.c.h b/Modules/clinic/cmathmodule.c.h
index 81a8437c..4b6653aa 100644
--- a/Modules/clinic/cmathmodule.c.h
+++ b/Modules/clinic/cmathmodule.c.h
@@ -26,9 +26,8 @@ cmath_acos(PyObject *module, PyObject *arg)
goto exit;
}
/* modifications for z */
- errno = 0; PyFPE_START_PROTECT("complex function", goto exit);
+ errno = 0;
_return_value = cmath_acos_impl(module, z);
- PyFPE_END_PROTECT(_return_value);
if (errno == EDOM) {
PyErr_SetString(PyExc_ValueError, "math domain error");
goto exit;
@@ -69,9 +68,8 @@ cmath_acosh(PyObject *module, PyObject *arg)
goto exit;
}
/* modifications for z */
- errno = 0; PyFPE_START_PROTECT("complex function", goto exit);
+ errno = 0;
_return_value = cmath_acosh_impl(module, z);
- PyFPE_END_PROTECT(_return_value);
if (errno == EDOM) {
PyErr_SetString(PyExc_ValueError, "math domain error");
goto exit;
@@ -112,9 +110,8 @@ cmath_asin(PyObject *module, PyObject *arg)
goto exit;
}
/* modifications for z */
- errno = 0; PyFPE_START_PROTECT("complex function", goto exit);
+ errno = 0;
_return_value = cmath_asin_impl(module, z);
- PyFPE_END_PROTECT(_return_value);
if (errno == EDOM) {
PyErr_SetString(PyExc_ValueError, "math domain error");
goto exit;
@@ -155,9 +152,8 @@ cmath_asinh(PyObject *module, PyObject *arg)
goto exit;
}
/* modifications for z */
- errno = 0; PyFPE_START_PROTECT("complex function", goto exit);
+ errno = 0;
_return_value = cmath_asinh_impl(module, z);
- PyFPE_END_PROTECT(_return_value);
if (errno == EDOM) {
PyErr_SetString(PyExc_ValueError, "math domain error");
goto exit;
@@ -198,9 +194,8 @@ cmath_atan(PyObject *module, PyObject *arg)
goto exit;
}
/* modifications for z */
- errno = 0; PyFPE_START_PROTECT("complex function", goto exit);
+ errno = 0;
_return_value = cmath_atan_impl(module, z);
- PyFPE_END_PROTECT(_return_value);
if (errno == EDOM) {
PyErr_SetString(PyExc_ValueError, "math domain error");
goto exit;
@@ -241,9 +236,8 @@ cmath_atanh(PyObject *module, PyObject *arg)
goto exit;
}
/* modifications for z */
- errno = 0; PyFPE_START_PROTECT("complex function", goto exit);
+ errno = 0;
_return_value = cmath_atanh_impl(module, z);
- PyFPE_END_PROTECT(_return_value);
if (errno == EDOM) {
PyErr_SetString(PyExc_ValueError, "math domain error");
goto exit;
@@ -284,9 +278,8 @@ cmath_cos(PyObject *module, PyObject *arg)
goto exit;
}
/* modifications for z */
- errno = 0; PyFPE_START_PROTECT("complex function", goto exit);
+ errno = 0;
_return_value = cmath_cos_impl(module, z);
- PyFPE_END_PROTECT(_return_value);
if (errno == EDOM) {
PyErr_SetString(PyExc_ValueError, "math domain error");
goto exit;
@@ -327,9 +320,8 @@ cmath_cosh(PyObject *module, PyObject *arg)
goto exit;
}
/* modifications for z */
- errno = 0; PyFPE_START_PROTECT("complex function", goto exit);
+ errno = 0;
_return_value = cmath_cosh_impl(module, z);
- PyFPE_END_PROTECT(_return_value);
if (errno == EDOM) {
PyErr_SetString(PyExc_ValueError, "math domain error");
goto exit;
@@ -370,9 +362,8 @@ cmath_exp(PyObject *module, PyObject *arg)
goto exit;
}
/* modifications for z */
- errno = 0; PyFPE_START_PROTECT("complex function", goto exit);
+ errno = 0;
_return_value = cmath_exp_impl(module, z);
- PyFPE_END_PROTECT(_return_value);
if (errno == EDOM) {
PyErr_SetString(PyExc_ValueError, "math domain error");
goto exit;
@@ -413,9 +404,8 @@ cmath_log10(PyObject *module, PyObject *arg)
goto exit;
}
/* modifications for z */
- errno = 0; PyFPE_START_PROTECT("complex function", goto exit);
+ errno = 0;
_return_value = cmath_log10_impl(module, z);
- PyFPE_END_PROTECT(_return_value);
if (errno == EDOM) {
PyErr_SetString(PyExc_ValueError, "math domain error");
goto exit;
@@ -456,9 +446,8 @@ cmath_sin(PyObject *module, PyObject *arg)
goto exit;
}
/* modifications for z */
- errno = 0; PyFPE_START_PROTECT("complex function", goto exit);
+ errno = 0;
_return_value = cmath_sin_impl(module, z);
- PyFPE_END_PROTECT(_return_value);
if (errno == EDOM) {
PyErr_SetString(PyExc_ValueError, "math domain error");
goto exit;
@@ -499,9 +488,8 @@ cmath_sinh(PyObject *module, PyObject *arg)
goto exit;
}
/* modifications for z */
- errno = 0; PyFPE_START_PROTECT("complex function", goto exit);
+ errno = 0;
_return_value = cmath_sinh_impl(module, z);
- PyFPE_END_PROTECT(_return_value);
if (errno == EDOM) {
PyErr_SetString(PyExc_ValueError, "math domain error");
goto exit;
@@ -542,9 +530,8 @@ cmath_sqrt(PyObject *module, PyObject *arg)
goto exit;
}
/* modifications for z */
- errno = 0; PyFPE_START_PROTECT("complex function", goto exit);
+ errno = 0;
_return_value = cmath_sqrt_impl(module, z);
- PyFPE_END_PROTECT(_return_value);
if (errno == EDOM) {
PyErr_SetString(PyExc_ValueError, "math domain error");
goto exit;
@@ -585,9 +572,8 @@ cmath_tan(PyObject *module, PyObject *arg)
goto exit;
}
/* modifications for z */
- errno = 0; PyFPE_START_PROTECT("complex function", goto exit);
+ errno = 0;
_return_value = cmath_tan_impl(module, z);
- PyFPE_END_PROTECT(_return_value);
if (errno == EDOM) {
PyErr_SetString(PyExc_ValueError, "math domain error");
goto exit;
@@ -628,9 +614,8 @@ cmath_tanh(PyObject *module, PyObject *arg)
goto exit;
}
/* modifications for z */
- errno = 0; PyFPE_START_PROTECT("complex function", goto exit);
+ errno = 0;
_return_value = cmath_tanh_impl(module, z);
- PyFPE_END_PROTECT(_return_value);
if (errno == EDOM) {
PyErr_SetString(PyExc_ValueError, "math domain error");
goto exit;
@@ -968,4 +953,4 @@ skip_optional_kwonly:
exit:
return return_value;
}
-/*[clinic end generated code: output=3edc4484b10ae752 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=353347db2e808e0d input=a9049054013a1b77]*/
diff --git a/Modules/clinic/gcmodule.c.h b/Modules/clinic/gcmodule.c.h
index 22d2aa4a..72795c66 100644
--- a/Modules/clinic/gcmodule.c.h
+++ b/Modules/clinic/gcmodule.c.h
@@ -304,6 +304,15 @@ PyDoc_STRVAR(gc_is_tracked__doc__,
#define GC_IS_TRACKED_METHODDEF \
{"is_tracked", (PyCFunction)gc_is_tracked, METH_O, gc_is_tracked__doc__},
+PyDoc_STRVAR(gc_is_finalized__doc__,
+"is_finalized($module, obj, /)\n"
+"--\n"
+"\n"
+"Returns true if the object has been already finalized by the GC.");
+
+#define GC_IS_FINALIZED_METHODDEF \
+ {"is_finalized", (PyCFunction)gc_is_finalized, METH_O, gc_is_finalized__doc__},
+
PyDoc_STRVAR(gc_freeze__doc__,
"freeze($module, /)\n"
"--\n"
@@ -373,4 +382,4 @@ gc_get_freeze_count(PyObject *module, PyObject *Py_UNUSED(ignored))
exit:
return return_value;
}
-/*[clinic end generated code: output=e40d384b1f0d513c input=a9049054013a1b77]*/
+/*[clinic end generated code: output=bd6a8056989e2e69 input=a9049054013a1b77]*/
diff --git a/Modules/clinic/mathmodule.c.h b/Modules/clinic/mathmodule.c.h
index 95d68ee5..65f3dd4f 100644
--- a/Modules/clinic/mathmodule.c.h
+++ b/Modules/clinic/mathmodule.c.h
@@ -2,36 +2,6 @@
preserve
[clinic start generated code]*/
-PyDoc_STRVAR(math_gcd__doc__,
-"gcd($module, x, y, /)\n"
-"--\n"
-"\n"
-"greatest common divisor of x and y");
-
-#define MATH_GCD_METHODDEF \
- {"gcd", (PyCFunction)(void(*)(void))math_gcd, METH_FASTCALL, math_gcd__doc__},
-
-static PyObject *
-math_gcd_impl(PyObject *module, PyObject *a, PyObject *b);
-
-static PyObject *
-math_gcd(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
-{
- PyObject *return_value = NULL;
- PyObject *a;
- PyObject *b;
-
- if (!_PyArg_CheckPositional("gcd", nargs, 2, 2)) {
- goto exit;
- }
- a = args[0];
- b = args[1];
- return_value = math_gcd_impl(module, a, b);
-
-exit:
- return return_value;
-}
-
PyDoc_STRVAR(math_ceil__doc__,
"ceil($module, x, /)\n"
"--\n"
@@ -808,4 +778,91 @@ math_comb(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
exit:
return return_value;
}
-/*[clinic end generated code: output=9a2b3dc91eb9aadd input=a9049054013a1b77]*/
+
+PyDoc_STRVAR(math_nextafter__doc__,
+"nextafter($module, x, y, /)\n"
+"--\n"
+"\n"
+"Return the next floating-point value after x towards y.");
+
+#define MATH_NEXTAFTER_METHODDEF \
+ {"nextafter", (PyCFunction)(void(*)(void))math_nextafter, METH_FASTCALL, math_nextafter__doc__},
+
+static PyObject *
+math_nextafter_impl(PyObject *module, double x, double y);
+
+static PyObject *
+math_nextafter(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
+{
+ PyObject *return_value = NULL;
+ double x;
+ double y;
+
+ if (!_PyArg_CheckPositional("nextafter", nargs, 2, 2)) {
+ goto exit;
+ }
+ if (PyFloat_CheckExact(args[0])) {
+ x = PyFloat_AS_DOUBLE(args[0]);
+ }
+ else
+ {
+ x = PyFloat_AsDouble(args[0]);
+ if (x == -1.0 && PyErr_Occurred()) {
+ goto exit;
+ }
+ }
+ if (PyFloat_CheckExact(args[1])) {
+ y = PyFloat_AS_DOUBLE(args[1]);
+ }
+ else
+ {
+ y = PyFloat_AsDouble(args[1]);
+ if (y == -1.0 && PyErr_Occurred()) {
+ goto exit;
+ }
+ }
+ return_value = math_nextafter_impl(module, x, y);
+
+exit:
+ return return_value;
+}
+
+PyDoc_STRVAR(math_ulp__doc__,
+"ulp($module, x, /)\n"
+"--\n"
+"\n"
+"Return the value of the least significant bit of the float x.");
+
+#define MATH_ULP_METHODDEF \
+ {"ulp", (PyCFunction)math_ulp, METH_O, math_ulp__doc__},
+
+static double
+math_ulp_impl(PyObject *module, double x);
+
+static PyObject *
+math_ulp(PyObject *module, PyObject *arg)
+{
+ PyObject *return_value = NULL;
+ double x;
+ double _return_value;
+
+ if (PyFloat_CheckExact(arg)) {
+ x = PyFloat_AS_DOUBLE(arg);
+ }
+ else
+ {
+ x = PyFloat_AsDouble(arg);
+ if (x == -1.0 && PyErr_Occurred()) {
+ goto exit;
+ }
+ }
+ _return_value = math_ulp_impl(module, x);
+ if ((_return_value == -1.0) && PyErr_Occurred()) {
+ goto exit;
+ }
+ return_value = PyFloat_FromDouble(_return_value);
+
+exit:
+ return return_value;
+}
+/*[clinic end generated code: output=1eae2b3ef19568fa input=a9049054013a1b77]*/
diff --git a/Modules/clinic/md5module.c.h b/Modules/clinic/md5module.c.h
index 12484cc0..c109f9ef 100644
--- a/Modules/clinic/md5module.c.h
+++ b/Modules/clinic/md5module.c.h
@@ -66,7 +66,7 @@ PyDoc_STRVAR(MD5Type_update__doc__,
{"update", (PyCFunction)MD5Type_update, METH_O, MD5Type_update__doc__},
PyDoc_STRVAR(_md5_md5__doc__,
-"md5($module, /, string=b\'\')\n"
+"md5($module, /, string=b\'\', *, usedforsecurity=True)\n"
"--\n"
"\n"
"Return a new MD5 hash object; optionally initialized with a string.");
@@ -75,17 +75,18 @@ PyDoc_STRVAR(_md5_md5__doc__,
{"md5", (PyCFunction)(void(*)(void))_md5_md5, METH_FASTCALL|METH_KEYWORDS, _md5_md5__doc__},
static PyObject *
-_md5_md5_impl(PyObject *module, PyObject *string);
+_md5_md5_impl(PyObject *module, PyObject *string, int usedforsecurity);
static PyObject *
_md5_md5(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
{
PyObject *return_value = NULL;
- static const char * const _keywords[] = {"string", NULL};
+ static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
static _PyArg_Parser _parser = {NULL, _keywords, "md5", 0};
- PyObject *argsbuf[1];
+ PyObject *argsbuf[2];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
PyObject *string = NULL;
+ int usedforsecurity = 1;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 1, 0, argsbuf);
if (!args) {
@@ -94,11 +95,24 @@ _md5_md5(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kw
if (!noptargs) {
goto skip_optional_pos;
}
- string = args[0];
+ if (args[0]) {
+ string = args[0];
+ if (!--noptargs) {
+ goto skip_optional_pos;
+ }
+ }
skip_optional_pos:
- return_value = _md5_md5_impl(module, string);
+ if (!noptargs) {
+ goto skip_optional_kwonly;
+ }
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+skip_optional_kwonly:
+ return_value = _md5_md5_impl(module, string, usedforsecurity);
exit:
return return_value;
}
-/*[clinic end generated code: output=53133f08cf9095fc input=a9049054013a1b77]*/
+/*[clinic end generated code: output=dbe3abc60086f3ef input=a9049054013a1b77]*/
diff --git a/Modules/clinic/posixmodule.c.h b/Modules/clinic/posixmodule.c.h
index 09ecdb35..41baa455 100644
--- a/Modules/clinic/posixmodule.c.h
+++ b/Modules/clinic/posixmodule.c.h
@@ -2805,7 +2805,7 @@ PyDoc_STRVAR(os_sched_getscheduler__doc__,
"sched_getscheduler($module, pid, /)\n"
"--\n"
"\n"
-"Get the scheduling policy for the process identifiedy by pid.\n"
+"Get the scheduling policy for the process identified by pid.\n"
"\n"
"Passing 0 for pid returns the scheduling policy for the calling process.");
@@ -2838,7 +2838,7 @@ PyDoc_STRVAR(os_sched_param__doc__,
"sched_param(sched_priority)\n"
"--\n"
"\n"
-"Current has only one field: sched_priority\");\n"
+"Currently has only one field: sched_priority\n"
"\n"
" sched_priority\n"
" A scheduling parameter.");
@@ -2886,7 +2886,7 @@ PyDoc_STRVAR(os_sched_setscheduler__doc__,
static PyObject *
os_sched_setscheduler_impl(PyObject *module, pid_t pid, int policy,
- struct sched_param *param);
+ PyObject *param_obj);
static PyObject *
os_sched_setscheduler(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
@@ -2894,13 +2894,13 @@ os_sched_setscheduler(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
PyObject *return_value = NULL;
pid_t pid;
int policy;
- struct sched_param param;
+ PyObject *param_obj;
- if (!_PyArg_ParseStack(args, nargs, "" _Py_PARSE_PID "iO&:sched_setscheduler",
- &pid, &policy, convert_sched_param, ¶m)) {
+ if (!_PyArg_ParseStack(args, nargs, "" _Py_PARSE_PID "iO:sched_setscheduler",
+ &pid, &policy, ¶m_obj)) {
goto exit;
}
- return_value = os_sched_setscheduler_impl(module, pid, policy, ¶m);
+ return_value = os_sched_setscheduler_impl(module, pid, policy, param_obj);
exit:
return return_value;
@@ -2957,21 +2957,20 @@ PyDoc_STRVAR(os_sched_setparam__doc__,
{"sched_setparam", (PyCFunction)(void(*)(void))os_sched_setparam, METH_FASTCALL, os_sched_setparam__doc__},
static PyObject *
-os_sched_setparam_impl(PyObject *module, pid_t pid,
- struct sched_param *param);
+os_sched_setparam_impl(PyObject *module, pid_t pid, PyObject *param_obj);
static PyObject *
os_sched_setparam(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
{
PyObject *return_value = NULL;
pid_t pid;
- struct sched_param param;
+ PyObject *param_obj;
- if (!_PyArg_ParseStack(args, nargs, "" _Py_PARSE_PID "O&:sched_setparam",
- &pid, convert_sched_param, ¶m)) {
+ if (!_PyArg_ParseStack(args, nargs, "" _Py_PARSE_PID "O:sched_setparam",
+ &pid, ¶m_obj)) {
goto exit;
}
- return_value = os_sched_setparam_impl(module, pid, ¶m);
+ return_value = os_sched_setparam_impl(module, pid, param_obj);
exit:
return return_value;
@@ -3247,6 +3246,118 @@ os_getpid(PyObject *module, PyObject *Py_UNUSED(ignored))
#endif /* defined(HAVE_GETPID) */
+#if defined(HAVE_GETGROUPLIST) && defined(__APPLE__)
+
+PyDoc_STRVAR(os_getgrouplist__doc__,
+"getgrouplist($module, user, group, /)\n"
+"--\n"
+"\n"
+"Returns a list of groups to which a user belongs.\n"
+"\n"
+" user\n"
+" username to lookup\n"
+" group\n"
+" base group id of the user");
+
+#define OS_GETGROUPLIST_METHODDEF \
+ {"getgrouplist", (PyCFunction)(void(*)(void))os_getgrouplist, METH_FASTCALL, os_getgrouplist__doc__},
+
+static PyObject *
+os_getgrouplist_impl(PyObject *module, const char *user, int basegid);
+
+static PyObject *
+os_getgrouplist(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
+{
+ PyObject *return_value = NULL;
+ const char *user;
+ int basegid;
+
+ if (!_PyArg_CheckPositional("getgrouplist", nargs, 2, 2)) {
+ goto exit;
+ }
+ if (!PyUnicode_Check(args[0])) {
+ _PyArg_BadArgument("getgrouplist", "argument 1", "str", args[0]);
+ goto exit;
+ }
+ Py_ssize_t user_length;
+ user = PyUnicode_AsUTF8AndSize(args[0], &user_length);
+ if (user == NULL) {
+ goto exit;
+ }
+ if (strlen(user) != (size_t)user_length) {
+ PyErr_SetString(PyExc_ValueError, "embedded null character");
+ goto exit;
+ }
+ if (PyFloat_Check(args[1])) {
+ PyErr_SetString(PyExc_TypeError,
+ "integer argument expected, got float" );
+ goto exit;
+ }
+ basegid = _PyLong_AsInt(args[1]);
+ if (basegid == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ return_value = os_getgrouplist_impl(module, user, basegid);
+
+exit:
+ return return_value;
+}
+
+#endif /* defined(HAVE_GETGROUPLIST) && defined(__APPLE__) */
+
+#if defined(HAVE_GETGROUPLIST) && !defined(__APPLE__)
+
+PyDoc_STRVAR(os_getgrouplist__doc__,
+"getgrouplist($module, user, group, /)\n"
+"--\n"
+"\n"
+"Returns a list of groups to which a user belongs.\n"
+"\n"
+" user\n"
+" username to lookup\n"
+" group\n"
+" base group id of the user");
+
+#define OS_GETGROUPLIST_METHODDEF \
+ {"getgrouplist", (PyCFunction)(void(*)(void))os_getgrouplist, METH_FASTCALL, os_getgrouplist__doc__},
+
+static PyObject *
+os_getgrouplist_impl(PyObject *module, const char *user, gid_t basegid);
+
+static PyObject *
+os_getgrouplist(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
+{
+ PyObject *return_value = NULL;
+ const char *user;
+ gid_t basegid;
+
+ if (!_PyArg_CheckPositional("getgrouplist", nargs, 2, 2)) {
+ goto exit;
+ }
+ if (!PyUnicode_Check(args[0])) {
+ _PyArg_BadArgument("getgrouplist", "argument 1", "str", args[0]);
+ goto exit;
+ }
+ Py_ssize_t user_length;
+ user = PyUnicode_AsUTF8AndSize(args[0], &user_length);
+ if (user == NULL) {
+ goto exit;
+ }
+ if (strlen(user) != (size_t)user_length) {
+ PyErr_SetString(PyExc_ValueError, "embedded null character");
+ goto exit;
+ }
+ if (!_Py_Gid_Converter(args[1], &basegid)) {
+ goto exit;
+ }
+ return_value = os_getgrouplist_impl(module, user, basegid);
+
+exit:
+ return return_value;
+}
+
+#endif /* defined(HAVE_GETGROUPLIST) && !defined(__APPLE__) */
+
#if defined(HAVE_GETGROUPS)
PyDoc_STRVAR(os_getgroups__doc__,
@@ -3269,6 +3380,102 @@ os_getgroups(PyObject *module, PyObject *Py_UNUSED(ignored))
#endif /* defined(HAVE_GETGROUPS) */
+#if defined(HAVE_INITGROUPS) && defined(__APPLE__)
+
+PyDoc_STRVAR(os_initgroups__doc__,
+"initgroups($module, username, gid, /)\n"
+"--\n"
+"\n"
+"Initialize the group access list.\n"
+"\n"
+"Call the system initgroups() to initialize the group access list with all of\n"
+"the groups of which the specified username is a member, plus the specified\n"
+"group id.");
+
+#define OS_INITGROUPS_METHODDEF \
+ {"initgroups", (PyCFunction)(void(*)(void))os_initgroups, METH_FASTCALL, os_initgroups__doc__},
+
+static PyObject *
+os_initgroups_impl(PyObject *module, PyObject *oname, int gid);
+
+static PyObject *
+os_initgroups(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
+{
+ PyObject *return_value = NULL;
+ PyObject *oname = NULL;
+ int gid;
+
+ if (!_PyArg_CheckPositional("initgroups", nargs, 2, 2)) {
+ goto exit;
+ }
+ if (!PyUnicode_FSConverter(args[0], &oname)) {
+ goto exit;
+ }
+ if (PyFloat_Check(args[1])) {
+ PyErr_SetString(PyExc_TypeError,
+ "integer argument expected, got float" );
+ goto exit;
+ }
+ gid = _PyLong_AsInt(args[1]);
+ if (gid == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ return_value = os_initgroups_impl(module, oname, gid);
+
+exit:
+ /* Cleanup for oname */
+ Py_XDECREF(oname);
+
+ return return_value;
+}
+
+#endif /* defined(HAVE_INITGROUPS) && defined(__APPLE__) */
+
+#if defined(HAVE_INITGROUPS) && !defined(__APPLE__)
+
+PyDoc_STRVAR(os_initgroups__doc__,
+"initgroups($module, username, gid, /)\n"
+"--\n"
+"\n"
+"Initialize the group access list.\n"
+"\n"
+"Call the system initgroups() to initialize the group access list with all of\n"
+"the groups of which the specified username is a member, plus the specified\n"
+"group id.");
+
+#define OS_INITGROUPS_METHODDEF \
+ {"initgroups", (PyCFunction)(void(*)(void))os_initgroups, METH_FASTCALL, os_initgroups__doc__},
+
+static PyObject *
+os_initgroups_impl(PyObject *module, PyObject *oname, gid_t gid);
+
+static PyObject *
+os_initgroups(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
+{
+ PyObject *return_value = NULL;
+ PyObject *oname = NULL;
+ gid_t gid;
+
+ if (!_PyArg_CheckPositional("initgroups", nargs, 2, 2)) {
+ goto exit;
+ }
+ if (!PyUnicode_FSConverter(args[0], &oname)) {
+ goto exit;
+ }
+ if (!_Py_Gid_Converter(args[1], &gid)) {
+ goto exit;
+ }
+ return_value = os_initgroups_impl(module, oname, gid);
+
+exit:
+ /* Cleanup for oname */
+ Py_XDECREF(oname);
+
+ return return_value;
+}
+
+#endif /* defined(HAVE_INITGROUPS) && !defined(__APPLE__) */
+
#if defined(HAVE_GETPGID)
PyDoc_STRVAR(os_getpgid__doc__,
@@ -3963,6 +4170,44 @@ os_wait(PyObject *module, PyObject *Py_UNUSED(ignored))
#endif /* defined(HAVE_WAIT) */
+#if (defined(__linux__) && defined(__NR_pidfd_open))
+
+PyDoc_STRVAR(os_pidfd_open__doc__,
+"pidfd_open($module, /, pid, flags=0)\n"
+"--\n"
+"\n"
+"Return a file descriptor referring to the process *pid*.\n"
+"\n"
+"The descriptor can be used to perform process management without races and\n"
+"signals.");
+
+#define OS_PIDFD_OPEN_METHODDEF \
+ {"pidfd_open", (PyCFunction)(void(*)(void))os_pidfd_open, METH_FASTCALL|METH_KEYWORDS, os_pidfd_open__doc__},
+
+static PyObject *
+os_pidfd_open_impl(PyObject *module, pid_t pid, unsigned int flags);
+
+static PyObject *
+os_pidfd_open(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+{
+ PyObject *return_value = NULL;
+ static const char * const _keywords[] = {"pid", "flags", NULL};
+ static _PyArg_Parser _parser = {"" _Py_PARSE_PID "|O&:pidfd_open", _keywords, 0};
+ pid_t pid;
+ unsigned int flags = 0;
+
+ if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser,
+ &pid, _PyLong_UnsignedInt_Converter, &flags)) {
+ goto exit;
+ }
+ return_value = os_pidfd_open_impl(module, pid, flags);
+
+exit:
+ return return_value;
+}
+
+#endif /* (defined(__linux__) && defined(__NR_pidfd_open)) */
+
#if (defined(HAVE_READLINK) || defined(MS_WINDOWS))
PyDoc_STRVAR(os_readlink__doc__,
@@ -4799,14 +5044,14 @@ PyDoc_STRVAR(os_pread__doc__,
{"pread", (PyCFunction)(void(*)(void))os_pread, METH_FASTCALL, os_pread__doc__},
static PyObject *
-os_pread_impl(PyObject *module, int fd, int length, Py_off_t offset);
+os_pread_impl(PyObject *module, int fd, Py_ssize_t length, Py_off_t offset);
static PyObject *
os_pread(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
{
PyObject *return_value = NULL;
int fd;
- int length;
+ Py_ssize_t length;
Py_off_t offset;
if (!_PyArg_CheckPositional("pread", nargs, 3, 3)) {
@@ -4826,9 +5071,17 @@ os_pread(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
"integer argument expected, got float" );
goto exit;
}
- length = _PyLong_AsInt(args[1]);
- if (length == -1 && PyErr_Occurred()) {
- goto exit;
+ {
+ Py_ssize_t ival = -1;
+ PyObject *iobj = PyNumber_Index(args[1]);
+ if (iobj != NULL) {
+ ival = PyLong_AsSsize_t(iobj);
+ Py_DECREF(iobj);
+ }
+ if (ival == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ length = ival;
}
if (!Py_off_t_converter(args[2], &offset)) {
goto exit;
@@ -4975,10 +5228,287 @@ exit:
return return_value;
}
+#if defined(HAVE_SENDFILE) && defined(__APPLE__)
+
+PyDoc_STRVAR(os_sendfile__doc__,
+"sendfile($module, /, out_fd, in_fd, offset, count, headers=(),\n"
+" trailers=(), flags=0)\n"
+"--\n"
+"\n"
+"Copy count bytes from file descriptor in_fd to file descriptor out_fd.");
+
+#define OS_SENDFILE_METHODDEF \
+ {"sendfile", (PyCFunction)(void(*)(void))os_sendfile, METH_FASTCALL|METH_KEYWORDS, os_sendfile__doc__},
+
+static PyObject *
+os_sendfile_impl(PyObject *module, int out_fd, int in_fd, Py_off_t offset,
+ Py_off_t sbytes, PyObject *headers, PyObject *trailers,
+ int flags);
+
+static PyObject *
+os_sendfile(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+{
+ PyObject *return_value = NULL;
+ static const char * const _keywords[] = {"out_fd", "in_fd", "offset", "count", "headers", "trailers", "flags", NULL};
+ static _PyArg_Parser _parser = {NULL, _keywords, "sendfile", 0};
+ PyObject *argsbuf[7];
+ Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 4;
+ int out_fd;
+ int in_fd;
+ Py_off_t offset;
+ Py_off_t sbytes;
+ PyObject *headers = NULL;
+ PyObject *trailers = NULL;
+ int flags = 0;
+
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 4, 7, 0, argsbuf);
+ if (!args) {
+ goto exit;
+ }
+ if (PyFloat_Check(args[0])) {
+ PyErr_SetString(PyExc_TypeError,
+ "integer argument expected, got float" );
+ goto exit;
+ }
+ out_fd = _PyLong_AsInt(args[0]);
+ if (out_fd == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ if (PyFloat_Check(args[1])) {
+ PyErr_SetString(PyExc_TypeError,
+ "integer argument expected, got float" );
+ goto exit;
+ }
+ in_fd = _PyLong_AsInt(args[1]);
+ if (in_fd == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ if (!Py_off_t_converter(args[2], &offset)) {
+ goto exit;
+ }
+ if (!Py_off_t_converter(args[3], &sbytes)) {
+ goto exit;
+ }
+ if (!noptargs) {
+ goto skip_optional_pos;
+ }
+ if (args[4]) {
+ headers = args[4];
+ if (!--noptargs) {
+ goto skip_optional_pos;
+ }
+ }
+ if (args[5]) {
+ trailers = args[5];
+ if (!--noptargs) {
+ goto skip_optional_pos;
+ }
+ }
+ if (PyFloat_Check(args[6])) {
+ PyErr_SetString(PyExc_TypeError,
+ "integer argument expected, got float" );
+ goto exit;
+ }
+ flags = _PyLong_AsInt(args[6]);
+ if (flags == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+skip_optional_pos:
+ return_value = os_sendfile_impl(module, out_fd, in_fd, offset, sbytes, headers, trailers, flags);
+
+exit:
+ return return_value;
+}
+
+#endif /* defined(HAVE_SENDFILE) && defined(__APPLE__) */
+
+#if defined(HAVE_SENDFILE) && !defined(__APPLE__) && (defined(__FreeBSD__) || defined(__DragonFly__))
+
+PyDoc_STRVAR(os_sendfile__doc__,
+"sendfile($module, /, out_fd, in_fd, offset, count, headers=(),\n"
+" trailers=(), flags=0)\n"
+"--\n"
+"\n"
+"Copy count bytes from file descriptor in_fd to file descriptor out_fd.");
+
+#define OS_SENDFILE_METHODDEF \
+ {"sendfile", (PyCFunction)(void(*)(void))os_sendfile, METH_FASTCALL|METH_KEYWORDS, os_sendfile__doc__},
+
+static PyObject *
+os_sendfile_impl(PyObject *module, int out_fd, int in_fd, Py_off_t offset,
+ Py_ssize_t count, PyObject *headers, PyObject *trailers,
+ int flags);
+
+static PyObject *
+os_sendfile(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+{
+ PyObject *return_value = NULL;
+ static const char * const _keywords[] = {"out_fd", "in_fd", "offset", "count", "headers", "trailers", "flags", NULL};
+ static _PyArg_Parser _parser = {NULL, _keywords, "sendfile", 0};
+ PyObject *argsbuf[7];
+ Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 4;
+ int out_fd;
+ int in_fd;
+ Py_off_t offset;
+ Py_ssize_t count;
+ PyObject *headers = NULL;
+ PyObject *trailers = NULL;
+ int flags = 0;
+
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 4, 7, 0, argsbuf);
+ if (!args) {
+ goto exit;
+ }
+ if (PyFloat_Check(args[0])) {
+ PyErr_SetString(PyExc_TypeError,
+ "integer argument expected, got float" );
+ goto exit;
+ }
+ out_fd = _PyLong_AsInt(args[0]);
+ if (out_fd == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ if (PyFloat_Check(args[1])) {
+ PyErr_SetString(PyExc_TypeError,
+ "integer argument expected, got float" );
+ goto exit;
+ }
+ in_fd = _PyLong_AsInt(args[1]);
+ if (in_fd == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ if (!Py_off_t_converter(args[2], &offset)) {
+ goto exit;
+ }
+ if (PyFloat_Check(args[3])) {
+ PyErr_SetString(PyExc_TypeError,
+ "integer argument expected, got float" );
+ goto exit;
+ }
+ {
+ Py_ssize_t ival = -1;
+ PyObject *iobj = PyNumber_Index(args[3]);
+ if (iobj != NULL) {
+ ival = PyLong_AsSsize_t(iobj);
+ Py_DECREF(iobj);
+ }
+ if (ival == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ count = ival;
+ }
+ if (!noptargs) {
+ goto skip_optional_pos;
+ }
+ if (args[4]) {
+ headers = args[4];
+ if (!--noptargs) {
+ goto skip_optional_pos;
+ }
+ }
+ if (args[5]) {
+ trailers = args[5];
+ if (!--noptargs) {
+ goto skip_optional_pos;
+ }
+ }
+ if (PyFloat_Check(args[6])) {
+ PyErr_SetString(PyExc_TypeError,
+ "integer argument expected, got float" );
+ goto exit;
+ }
+ flags = _PyLong_AsInt(args[6]);
+ if (flags == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+skip_optional_pos:
+ return_value = os_sendfile_impl(module, out_fd, in_fd, offset, count, headers, trailers, flags);
+
+exit:
+ return return_value;
+}
+
+#endif /* defined(HAVE_SENDFILE) && !defined(__APPLE__) && (defined(__FreeBSD__) || defined(__DragonFly__)) */
+
+#if defined(HAVE_SENDFILE) && !defined(__APPLE__) && !(defined(__FreeBSD__) || defined(__DragonFly__))
+
+PyDoc_STRVAR(os_sendfile__doc__,
+"sendfile($module, /, out_fd, in_fd, offset, count)\n"
+"--\n"
+"\n"
+"Copy count bytes from file descriptor in_fd to file descriptor out_fd.");
+
+#define OS_SENDFILE_METHODDEF \
+ {"sendfile", (PyCFunction)(void(*)(void))os_sendfile, METH_FASTCALL|METH_KEYWORDS, os_sendfile__doc__},
+
+static PyObject *
+os_sendfile_impl(PyObject *module, int out_fd, int in_fd, PyObject *offobj,
+ Py_ssize_t count);
+
+static PyObject *
+os_sendfile(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+{
+ PyObject *return_value = NULL;
+ static const char * const _keywords[] = {"out_fd", "in_fd", "offset", "count", NULL};
+ static _PyArg_Parser _parser = {NULL, _keywords, "sendfile", 0};
+ PyObject *argsbuf[4];
+ int out_fd;
+ int in_fd;
+ PyObject *offobj;
+ Py_ssize_t count;
+
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 4, 4, 0, argsbuf);
+ if (!args) {
+ goto exit;
+ }
+ if (PyFloat_Check(args[0])) {
+ PyErr_SetString(PyExc_TypeError,
+ "integer argument expected, got float" );
+ goto exit;
+ }
+ out_fd = _PyLong_AsInt(args[0]);
+ if (out_fd == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ if (PyFloat_Check(args[1])) {
+ PyErr_SetString(PyExc_TypeError,
+ "integer argument expected, got float" );
+ goto exit;
+ }
+ in_fd = _PyLong_AsInt(args[1]);
+ if (in_fd == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ offobj = args[2];
+ if (PyFloat_Check(args[3])) {
+ PyErr_SetString(PyExc_TypeError,
+ "integer argument expected, got float" );
+ goto exit;
+ }
+ {
+ Py_ssize_t ival = -1;
+ PyObject *iobj = PyNumber_Index(args[3]);
+ if (iobj != NULL) {
+ ival = PyLong_AsSsize_t(iobj);
+ Py_DECREF(iobj);
+ }
+ if (ival == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ count = ival;
+ }
+ return_value = os_sendfile_impl(module, out_fd, in_fd, offobj, count);
+
+exit:
+ return return_value;
+}
+
+#endif /* defined(HAVE_SENDFILE) && !defined(__APPLE__) && !(defined(__FreeBSD__) || defined(__DragonFly__)) */
+
#if defined(__APPLE__)
PyDoc_STRVAR(os__fcopyfile__doc__,
-"_fcopyfile($module, infd, outfd, flags, /)\n"
+"_fcopyfile($module, in_fd, out_fd, flags, /)\n"
"--\n"
"\n"
"Efficiently copy content or metadata of 2 regular file descriptors (macOS).");
@@ -4987,14 +5517,14 @@ PyDoc_STRVAR(os__fcopyfile__doc__,
{"_fcopyfile", (PyCFunction)(void(*)(void))os__fcopyfile, METH_FASTCALL, os__fcopyfile__doc__},
static PyObject *
-os__fcopyfile_impl(PyObject *module, int infd, int outfd, int flags);
+os__fcopyfile_impl(PyObject *module, int in_fd, int out_fd, int flags);
static PyObject *
os__fcopyfile(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
{
PyObject *return_value = NULL;
- int infd;
- int outfd;
+ int in_fd;
+ int out_fd;
int flags;
if (!_PyArg_CheckPositional("_fcopyfile", nargs, 3, 3)) {
@@ -5005,8 +5535,8 @@ os__fcopyfile(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
"integer argument expected, got float" );
goto exit;
}
- infd = _PyLong_AsInt(args[0]);
- if (infd == -1 && PyErr_Occurred()) {
+ in_fd = _PyLong_AsInt(args[0]);
+ if (in_fd == -1 && PyErr_Occurred()) {
goto exit;
}
if (PyFloat_Check(args[1])) {
@@ -5014,8 +5544,8 @@ os__fcopyfile(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
"integer argument expected, got float" );
goto exit;
}
- outfd = _PyLong_AsInt(args[1]);
- if (outfd == -1 && PyErr_Occurred()) {
+ out_fd = _PyLong_AsInt(args[1]);
+ if (out_fd == -1 && PyErr_Occurred()) {
goto exit;
}
if (PyFloat_Check(args[2])) {
@@ -5027,7 +5557,7 @@ os__fcopyfile(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
if (flags == -1 && PyErr_Occurred()) {
goto exit;
}
- return_value = os__fcopyfile_impl(module, infd, outfd, flags);
+ return_value = os__fcopyfile_impl(module, in_fd, out_fd, flags);
exit:
return return_value;
@@ -5988,7 +6518,7 @@ exit:
#endif /* (defined(HAVE_POSIX_FADVISE) && !defined(POSIX_FADVISE_AIX_BUG)) */
-#if defined(HAVE_PUTENV) && defined(MS_WINDOWS)
+#if defined(MS_WINDOWS)
PyDoc_STRVAR(os_putenv__doc__,
"putenv($module, name, value, /)\n"
@@ -6034,9 +6564,9 @@ exit:
return return_value;
}
-#endif /* defined(HAVE_PUTENV) && defined(MS_WINDOWS) */
+#endif /* defined(MS_WINDOWS) */
-#if defined(HAVE_PUTENV) && !defined(MS_WINDOWS)
+#if !defined(MS_WINDOWS)
PyDoc_STRVAR(os_putenv__doc__,
"putenv($module, name, value, /)\n"
@@ -6077,9 +6607,45 @@ exit:
return return_value;
}
-#endif /* defined(HAVE_PUTENV) && !defined(MS_WINDOWS) */
+#endif /* !defined(MS_WINDOWS) */
-#if defined(HAVE_UNSETENV)
+#if defined(MS_WINDOWS)
+
+PyDoc_STRVAR(os_unsetenv__doc__,
+"unsetenv($module, name, /)\n"
+"--\n"
+"\n"
+"Delete an environment variable.");
+
+#define OS_UNSETENV_METHODDEF \
+ {"unsetenv", (PyCFunction)os_unsetenv, METH_O, os_unsetenv__doc__},
+
+static PyObject *
+os_unsetenv_impl(PyObject *module, PyObject *name);
+
+static PyObject *
+os_unsetenv(PyObject *module, PyObject *arg)
+{
+ PyObject *return_value = NULL;
+ PyObject *name;
+
+ if (!PyUnicode_Check(arg)) {
+ _PyArg_BadArgument("unsetenv", "argument", "str", arg);
+ goto exit;
+ }
+ if (PyUnicode_READY(arg) == -1) {
+ goto exit;
+ }
+ name = arg;
+ return_value = os_unsetenv_impl(module, name);
+
+exit:
+ return return_value;
+}
+
+#endif /* defined(MS_WINDOWS) */
+
+#if !defined(MS_WINDOWS)
PyDoc_STRVAR(os_unsetenv__doc__,
"unsetenv($module, name, /)\n"
@@ -6111,7 +6677,7 @@ exit:
return return_value;
}
-#endif /* defined(HAVE_UNSETENV) */
+#endif /* !defined(MS_WINDOWS) */
PyDoc_STRVAR(os_strerror__doc__,
"strerror($module, code, /)\n"
@@ -7486,6 +8052,62 @@ exit:
#endif /* defined(HAVE_MEMFD_CREATE) */
+#if (defined(TERMSIZE_USE_CONIO) || defined(TERMSIZE_USE_IOCTL))
+
+PyDoc_STRVAR(os_get_terminal_size__doc__,
+"get_terminal_size($module, fd=, /)\n"
+"--\n"
+"\n"
+"Return the size of the terminal window as (columns, lines).\n"
+"\n"
+"The optional argument fd (default standard output) specifies\n"
+"which file descriptor should be queried.\n"
+"\n"
+"If the file descriptor is not connected to a terminal, an OSError\n"
+"is thrown.\n"
+"\n"
+"This function will only be defined if an implementation is\n"
+"available for this system.\n"
+"\n"
+"shutil.get_terminal_size is the high-level function which should\n"
+"normally be used, os.get_terminal_size is the low-level implementation.");
+
+#define OS_GET_TERMINAL_SIZE_METHODDEF \
+ {"get_terminal_size", (PyCFunction)(void(*)(void))os_get_terminal_size, METH_FASTCALL, os_get_terminal_size__doc__},
+
+static PyObject *
+os_get_terminal_size_impl(PyObject *module, int fd);
+
+static PyObject *
+os_get_terminal_size(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
+{
+ PyObject *return_value = NULL;
+ int fd = fileno(stdout);
+
+ if (!_PyArg_CheckPositional("get_terminal_size", nargs, 0, 1)) {
+ goto exit;
+ }
+ if (nargs < 1) {
+ goto skip_optional;
+ }
+ if (PyFloat_Check(args[0])) {
+ PyErr_SetString(PyExc_TypeError,
+ "integer argument expected, got float" );
+ goto exit;
+ }
+ fd = _PyLong_AsInt(args[0]);
+ if (fd == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+skip_optional:
+ return_value = os_get_terminal_size_impl(module, fd);
+
+exit:
+ return return_value;
+}
+
+#endif /* (defined(TERMSIZE_USE_CONIO) || defined(TERMSIZE_USE_IOCTL)) */
+
PyDoc_STRVAR(os_cpu_count__doc__,
"cpu_count($module, /)\n"
"--\n"
@@ -7766,18 +8388,24 @@ PyDoc_STRVAR(os_DirEntry_is_symlink__doc__,
"Return True if the entry is a symbolic link; cached per entry.");
#define OS_DIRENTRY_IS_SYMLINK_METHODDEF \
- {"is_symlink", (PyCFunction)os_DirEntry_is_symlink, METH_NOARGS, os_DirEntry_is_symlink__doc__},
+ {"is_symlink", (PyCFunction)(void(*)(void))os_DirEntry_is_symlink, METH_METHOD|METH_FASTCALL|METH_KEYWORDS, os_DirEntry_is_symlink__doc__},
static int
-os_DirEntry_is_symlink_impl(DirEntry *self);
+os_DirEntry_is_symlink_impl(DirEntry *self, PyTypeObject *defining_class);
static PyObject *
-os_DirEntry_is_symlink(DirEntry *self, PyObject *Py_UNUSED(ignored))
+os_DirEntry_is_symlink(DirEntry *self, PyTypeObject *defining_class, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
{
PyObject *return_value = NULL;
+ static const char * const _keywords[] = { NULL};
+ static _PyArg_Parser _parser = {":is_symlink", _keywords, 0};
int _return_value;
- _return_value = os_DirEntry_is_symlink_impl(self);
+ if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser
+ )) {
+ goto exit;
+ }
+ _return_value = os_DirEntry_is_symlink_impl(self, defining_class);
if ((_return_value == -1) && PyErr_Occurred()) {
goto exit;
}
@@ -7794,34 +8422,25 @@ PyDoc_STRVAR(os_DirEntry_stat__doc__,
"Return stat_result object for the entry; cached per entry.");
#define OS_DIRENTRY_STAT_METHODDEF \
- {"stat", (PyCFunction)(void(*)(void))os_DirEntry_stat, METH_FASTCALL|METH_KEYWORDS, os_DirEntry_stat__doc__},
+ {"stat", (PyCFunction)(void(*)(void))os_DirEntry_stat, METH_METHOD|METH_FASTCALL|METH_KEYWORDS, os_DirEntry_stat__doc__},
static PyObject *
-os_DirEntry_stat_impl(DirEntry *self, int follow_symlinks);
+os_DirEntry_stat_impl(DirEntry *self, PyTypeObject *defining_class,
+ int follow_symlinks);
static PyObject *
-os_DirEntry_stat(DirEntry *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+os_DirEntry_stat(DirEntry *self, PyTypeObject *defining_class, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
{
PyObject *return_value = NULL;
static const char * const _keywords[] = {"follow_symlinks", NULL};
- static _PyArg_Parser _parser = {NULL, _keywords, "stat", 0};
- PyObject *argsbuf[1];
- Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
+ static _PyArg_Parser _parser = {"|$p:stat", _keywords, 0};
int follow_symlinks = 1;
- args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 0, 0, argsbuf);
- if (!args) {
- goto exit;
- }
- if (!noptargs) {
- goto skip_optional_kwonly;
- }
- follow_symlinks = PyObject_IsTrue(args[0]);
- if (follow_symlinks < 0) {
+ if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser,
+ &follow_symlinks)) {
goto exit;
}
-skip_optional_kwonly:
- return_value = os_DirEntry_stat_impl(self, follow_symlinks);
+ return_value = os_DirEntry_stat_impl(self, defining_class, follow_symlinks);
exit:
return return_value;
@@ -7834,35 +8453,26 @@ PyDoc_STRVAR(os_DirEntry_is_dir__doc__,
"Return True if the entry is a directory; cached per entry.");
#define OS_DIRENTRY_IS_DIR_METHODDEF \
- {"is_dir", (PyCFunction)(void(*)(void))os_DirEntry_is_dir, METH_FASTCALL|METH_KEYWORDS, os_DirEntry_is_dir__doc__},
+ {"is_dir", (PyCFunction)(void(*)(void))os_DirEntry_is_dir, METH_METHOD|METH_FASTCALL|METH_KEYWORDS, os_DirEntry_is_dir__doc__},
static int
-os_DirEntry_is_dir_impl(DirEntry *self, int follow_symlinks);
+os_DirEntry_is_dir_impl(DirEntry *self, PyTypeObject *defining_class,
+ int follow_symlinks);
static PyObject *
-os_DirEntry_is_dir(DirEntry *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+os_DirEntry_is_dir(DirEntry *self, PyTypeObject *defining_class, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
{
PyObject *return_value = NULL;
static const char * const _keywords[] = {"follow_symlinks", NULL};
- static _PyArg_Parser _parser = {NULL, _keywords, "is_dir", 0};
- PyObject *argsbuf[1];
- Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
+ static _PyArg_Parser _parser = {"|$p:is_dir", _keywords, 0};
int follow_symlinks = 1;
int _return_value;
- args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 0, 0, argsbuf);
- if (!args) {
- goto exit;
- }
- if (!noptargs) {
- goto skip_optional_kwonly;
- }
- follow_symlinks = PyObject_IsTrue(args[0]);
- if (follow_symlinks < 0) {
+ if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser,
+ &follow_symlinks)) {
goto exit;
}
-skip_optional_kwonly:
- _return_value = os_DirEntry_is_dir_impl(self, follow_symlinks);
+ _return_value = os_DirEntry_is_dir_impl(self, defining_class, follow_symlinks);
if ((_return_value == -1) && PyErr_Occurred()) {
goto exit;
}
@@ -7879,35 +8489,26 @@ PyDoc_STRVAR(os_DirEntry_is_file__doc__,
"Return True if the entry is a file; cached per entry.");
#define OS_DIRENTRY_IS_FILE_METHODDEF \
- {"is_file", (PyCFunction)(void(*)(void))os_DirEntry_is_file, METH_FASTCALL|METH_KEYWORDS, os_DirEntry_is_file__doc__},
+ {"is_file", (PyCFunction)(void(*)(void))os_DirEntry_is_file, METH_METHOD|METH_FASTCALL|METH_KEYWORDS, os_DirEntry_is_file__doc__},
static int
-os_DirEntry_is_file_impl(DirEntry *self, int follow_symlinks);
+os_DirEntry_is_file_impl(DirEntry *self, PyTypeObject *defining_class,
+ int follow_symlinks);
static PyObject *
-os_DirEntry_is_file(DirEntry *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+os_DirEntry_is_file(DirEntry *self, PyTypeObject *defining_class, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
{
PyObject *return_value = NULL;
static const char * const _keywords[] = {"follow_symlinks", NULL};
- static _PyArg_Parser _parser = {NULL, _keywords, "is_file", 0};
- PyObject *argsbuf[1];
- Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
+ static _PyArg_Parser _parser = {"|$p:is_file", _keywords, 0};
int follow_symlinks = 1;
int _return_value;
- args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 0, 0, argsbuf);
- if (!args) {
- goto exit;
- }
- if (!noptargs) {
- goto skip_optional_kwonly;
- }
- follow_symlinks = PyObject_IsTrue(args[0]);
- if (follow_symlinks < 0) {
+ if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser,
+ &follow_symlinks)) {
goto exit;
}
-skip_optional_kwonly:
- _return_value = os_DirEntry_is_file_impl(self, follow_symlinks);
+ _return_value = os_DirEntry_is_file_impl(self, defining_class, follow_symlinks);
if ((_return_value == -1) && PyErr_Occurred()) {
goto exit;
}
@@ -8192,6 +8793,54 @@ exit:
#endif /* defined(MS_WINDOWS) */
+#if (defined(WIFEXITED) || defined(MS_WINDOWS))
+
+PyDoc_STRVAR(os_waitstatus_to_exitcode__doc__,
+"waitstatus_to_exitcode($module, /, status)\n"
+"--\n"
+"\n"
+"Convert a wait status to an exit code.\n"
+"\n"
+"On Unix:\n"
+"\n"
+"* If WIFEXITED(status) is true, return WEXITSTATUS(status).\n"
+"* If WIFSIGNALED(status) is true, return -WTERMSIG(status).\n"
+"* Otherwise, raise a ValueError.\n"
+"\n"
+"On Windows, return status shifted right by 8 bits.\n"
+"\n"
+"On Unix, if the process is being traced or if waitpid() was called with\n"
+"WUNTRACED option, the caller must first check if WIFSTOPPED(status) is true.\n"
+"This function must not be called if WIFSTOPPED(status) is true.");
+
+#define OS_WAITSTATUS_TO_EXITCODE_METHODDEF \
+ {"waitstatus_to_exitcode", (PyCFunction)(void(*)(void))os_waitstatus_to_exitcode, METH_FASTCALL|METH_KEYWORDS, os_waitstatus_to_exitcode__doc__},
+
+static PyObject *
+os_waitstatus_to_exitcode_impl(PyObject *module, PyObject *status_obj);
+
+static PyObject *
+os_waitstatus_to_exitcode(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+{
+ PyObject *return_value = NULL;
+ static const char * const _keywords[] = {"status", NULL};
+ static _PyArg_Parser _parser = {NULL, _keywords, "waitstatus_to_exitcode", 0};
+ PyObject *argsbuf[1];
+ PyObject *status_obj;
+
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf);
+ if (!args) {
+ goto exit;
+ }
+ status_obj = args[0];
+ return_value = os_waitstatus_to_exitcode_impl(module, status_obj);
+
+exit:
+ return return_value;
+}
+
+#endif /* (defined(WIFEXITED) || defined(MS_WINDOWS)) */
+
#ifndef OS_TTYNAME_METHODDEF
#define OS_TTYNAME_METHODDEF
#endif /* !defined(OS_TTYNAME_METHODDEF) */
@@ -8384,10 +9033,18 @@ exit:
#define OS_GETPID_METHODDEF
#endif /* !defined(OS_GETPID_METHODDEF) */
+#ifndef OS_GETGROUPLIST_METHODDEF
+ #define OS_GETGROUPLIST_METHODDEF
+#endif /* !defined(OS_GETGROUPLIST_METHODDEF) */
+
#ifndef OS_GETGROUPS_METHODDEF
#define OS_GETGROUPS_METHODDEF
#endif /* !defined(OS_GETGROUPS_METHODDEF) */
+#ifndef OS_INITGROUPS_METHODDEF
+ #define OS_INITGROUPS_METHODDEF
+#endif /* !defined(OS_INITGROUPS_METHODDEF) */
+
#ifndef OS_GETPGID_METHODDEF
#define OS_GETPGID_METHODDEF
#endif /* !defined(OS_GETPGID_METHODDEF) */
@@ -8472,6 +9129,10 @@ exit:
#define OS_WAIT_METHODDEF
#endif /* !defined(OS_WAIT_METHODDEF) */
+#ifndef OS_PIDFD_OPEN_METHODDEF
+ #define OS_PIDFD_OPEN_METHODDEF
+#endif /* !defined(OS_PIDFD_OPEN_METHODDEF) */
+
#ifndef OS_READLINK_METHODDEF
#define OS_READLINK_METHODDEF
#endif /* !defined(OS_READLINK_METHODDEF) */
@@ -8520,6 +9181,10 @@ exit:
#define OS_PREADV_METHODDEF
#endif /* !defined(OS_PREADV_METHODDEF) */
+#ifndef OS_SENDFILE_METHODDEF
+ #define OS_SENDFILE_METHODDEF
+#endif /* !defined(OS_SENDFILE_METHODDEF) */
+
#ifndef OS__FCOPYFILE_METHODDEF
#define OS__FCOPYFILE_METHODDEF
#endif /* !defined(OS__FCOPYFILE_METHODDEF) */
@@ -8696,6 +9361,10 @@ exit:
#define OS_MEMFD_CREATE_METHODDEF
#endif /* !defined(OS_MEMFD_CREATE_METHODDEF) */
+#ifndef OS_GET_TERMINAL_SIZE_METHODDEF
+ #define OS_GET_TERMINAL_SIZE_METHODDEF
+#endif /* !defined(OS_GET_TERMINAL_SIZE_METHODDEF) */
+
#ifndef OS_GET_HANDLE_INHERITABLE_METHODDEF
#define OS_GET_HANDLE_INHERITABLE_METHODDEF
#endif /* !defined(OS_GET_HANDLE_INHERITABLE_METHODDEF) */
@@ -8723,4 +9392,8 @@ exit:
#ifndef OS__REMOVE_DLL_DIRECTORY_METHODDEF
#define OS__REMOVE_DLL_DIRECTORY_METHODDEF
#endif /* !defined(OS__REMOVE_DLL_DIRECTORY_METHODDEF) */
-/*[clinic end generated code: output=edb5a840b51fcaa8 input=a9049054013a1b77]*/
+
+#ifndef OS_WAITSTATUS_TO_EXITCODE_METHODDEF
+ #define OS_WAITSTATUS_TO_EXITCODE_METHODDEF
+#endif /* !defined(OS_WAITSTATUS_TO_EXITCODE_METHODDEF) */
+/*[clinic end generated code: output=005919eaaef3f8e6 input=a9049054013a1b77]*/
diff --git a/Modules/clinic/selectmodule.c.h b/Modules/clinic/selectmodule.c.h
index a9e14840..c1072e6e 100644
--- a/Modules/clinic/selectmodule.c.h
+++ b/Modules/clinic/selectmodule.c.h
@@ -963,11 +963,11 @@ select_kqueue(PyTypeObject *type, PyObject *args, PyObject *kwargs)
{
PyObject *return_value = NULL;
- if ((type == &kqueue_queue_Type) &&
+ if ((type == _selectstate_global->kqueue_queue_Type) &&
!_PyArg_NoPositional("kqueue", args)) {
goto exit;
}
- if ((type == &kqueue_queue_Type) &&
+ if ((type == _selectstate_global->kqueue_queue_Type) &&
!_PyArg_NoKeywords("kqueue", kwargs)) {
goto exit;
}
@@ -1219,4 +1219,4 @@ exit:
#ifndef SELECT_KQUEUE_CONTROL_METHODDEF
#define SELECT_KQUEUE_CONTROL_METHODDEF
#endif /* !defined(SELECT_KQUEUE_CONTROL_METHODDEF) */
-/*[clinic end generated code: output=86010dde10ca89c6 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=ef42c3485a8fe3a0 input=a9049054013a1b77]*/
diff --git a/Modules/clinic/sha1module.c.h b/Modules/clinic/sha1module.c.h
index 001c6af7..fc37b1ab 100644
--- a/Modules/clinic/sha1module.c.h
+++ b/Modules/clinic/sha1module.c.h
@@ -66,7 +66,7 @@ PyDoc_STRVAR(SHA1Type_update__doc__,
{"update", (PyCFunction)SHA1Type_update, METH_O, SHA1Type_update__doc__},
PyDoc_STRVAR(_sha1_sha1__doc__,
-"sha1($module, /, string=b\'\')\n"
+"sha1($module, /, string=b\'\', *, usedforsecurity=True)\n"
"--\n"
"\n"
"Return a new SHA1 hash object; optionally initialized with a string.");
@@ -75,17 +75,18 @@ PyDoc_STRVAR(_sha1_sha1__doc__,
{"sha1", (PyCFunction)(void(*)(void))_sha1_sha1, METH_FASTCALL|METH_KEYWORDS, _sha1_sha1__doc__},
static PyObject *
-_sha1_sha1_impl(PyObject *module, PyObject *string);
+_sha1_sha1_impl(PyObject *module, PyObject *string, int usedforsecurity);
static PyObject *
_sha1_sha1(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
{
PyObject *return_value = NULL;
- static const char * const _keywords[] = {"string", NULL};
+ static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
static _PyArg_Parser _parser = {NULL, _keywords, "sha1", 0};
- PyObject *argsbuf[1];
+ PyObject *argsbuf[2];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
PyObject *string = NULL;
+ int usedforsecurity = 1;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 1, 0, argsbuf);
if (!args) {
@@ -94,11 +95,24 @@ _sha1_sha1(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *
if (!noptargs) {
goto skip_optional_pos;
}
- string = args[0];
+ if (args[0]) {
+ string = args[0];
+ if (!--noptargs) {
+ goto skip_optional_pos;
+ }
+ }
skip_optional_pos:
- return_value = _sha1_sha1_impl(module, string);
+ if (!noptargs) {
+ goto skip_optional_kwonly;
+ }
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+skip_optional_kwonly:
+ return_value = _sha1_sha1_impl(module, string, usedforsecurity);
exit:
return return_value;
}
-/*[clinic end generated code: output=1ae7e73ec84a27d5 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=3ddd637ae17e14b3 input=a9049054013a1b77]*/
diff --git a/Modules/clinic/sha256module.c.h b/Modules/clinic/sha256module.c.h
index 658abb15..2a788ea9 100644
--- a/Modules/clinic/sha256module.c.h
+++ b/Modules/clinic/sha256module.c.h
@@ -66,7 +66,7 @@ PyDoc_STRVAR(SHA256Type_update__doc__,
{"update", (PyCFunction)SHA256Type_update, METH_O, SHA256Type_update__doc__},
PyDoc_STRVAR(_sha256_sha256__doc__,
-"sha256($module, /, string=b\'\')\n"
+"sha256($module, /, string=b\'\', *, usedforsecurity=True)\n"
"--\n"
"\n"
"Return a new SHA-256 hash object; optionally initialized with a string.");
@@ -75,17 +75,18 @@ PyDoc_STRVAR(_sha256_sha256__doc__,
{"sha256", (PyCFunction)(void(*)(void))_sha256_sha256, METH_FASTCALL|METH_KEYWORDS, _sha256_sha256__doc__},
static PyObject *
-_sha256_sha256_impl(PyObject *module, PyObject *string);
+_sha256_sha256_impl(PyObject *module, PyObject *string, int usedforsecurity);
static PyObject *
_sha256_sha256(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
{
PyObject *return_value = NULL;
- static const char * const _keywords[] = {"string", NULL};
+ static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
static _PyArg_Parser _parser = {NULL, _keywords, "sha256", 0};
- PyObject *argsbuf[1];
+ PyObject *argsbuf[2];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
PyObject *string = NULL;
+ int usedforsecurity = 1;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 1, 0, argsbuf);
if (!args) {
@@ -94,16 +95,29 @@ _sha256_sha256(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObje
if (!noptargs) {
goto skip_optional_pos;
}
- string = args[0];
+ if (args[0]) {
+ string = args[0];
+ if (!--noptargs) {
+ goto skip_optional_pos;
+ }
+ }
skip_optional_pos:
- return_value = _sha256_sha256_impl(module, string);
+ if (!noptargs) {
+ goto skip_optional_kwonly;
+ }
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+skip_optional_kwonly:
+ return_value = _sha256_sha256_impl(module, string, usedforsecurity);
exit:
return return_value;
}
PyDoc_STRVAR(_sha256_sha224__doc__,
-"sha224($module, /, string=b\'\')\n"
+"sha224($module, /, string=b\'\', *, usedforsecurity=True)\n"
"--\n"
"\n"
"Return a new SHA-224 hash object; optionally initialized with a string.");
@@ -112,17 +126,18 @@ PyDoc_STRVAR(_sha256_sha224__doc__,
{"sha224", (PyCFunction)(void(*)(void))_sha256_sha224, METH_FASTCALL|METH_KEYWORDS, _sha256_sha224__doc__},
static PyObject *
-_sha256_sha224_impl(PyObject *module, PyObject *string);
+_sha256_sha224_impl(PyObject *module, PyObject *string, int usedforsecurity);
static PyObject *
_sha256_sha224(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
{
PyObject *return_value = NULL;
- static const char * const _keywords[] = {"string", NULL};
+ static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
static _PyArg_Parser _parser = {NULL, _keywords, "sha224", 0};
- PyObject *argsbuf[1];
+ PyObject *argsbuf[2];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
PyObject *string = NULL;
+ int usedforsecurity = 1;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 1, 0, argsbuf);
if (!args) {
@@ -131,11 +146,24 @@ _sha256_sha224(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObje
if (!noptargs) {
goto skip_optional_pos;
}
- string = args[0];
+ if (args[0]) {
+ string = args[0];
+ if (!--noptargs) {
+ goto skip_optional_pos;
+ }
+ }
skip_optional_pos:
- return_value = _sha256_sha224_impl(module, string);
+ if (!noptargs) {
+ goto skip_optional_kwonly;
+ }
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+skip_optional_kwonly:
+ return_value = _sha256_sha224_impl(module, string, usedforsecurity);
exit:
return return_value;
}
-/*[clinic end generated code: output=c54d0956ec88409d input=a9049054013a1b77]*/
+/*[clinic end generated code: output=c8cca8adbe72ec9a input=a9049054013a1b77]*/
diff --git a/Modules/clinic/sha512module.c.h b/Modules/clinic/sha512module.c.h
index 459a9341..b8185b62 100644
--- a/Modules/clinic/sha512module.c.h
+++ b/Modules/clinic/sha512module.c.h
@@ -66,7 +66,7 @@ PyDoc_STRVAR(SHA512Type_update__doc__,
{"update", (PyCFunction)SHA512Type_update, METH_O, SHA512Type_update__doc__},
PyDoc_STRVAR(_sha512_sha512__doc__,
-"sha512($module, /, string=b\'\')\n"
+"sha512($module, /, string=b\'\', *, usedforsecurity=True)\n"
"--\n"
"\n"
"Return a new SHA-512 hash object; optionally initialized with a string.");
@@ -75,17 +75,18 @@ PyDoc_STRVAR(_sha512_sha512__doc__,
{"sha512", (PyCFunction)(void(*)(void))_sha512_sha512, METH_FASTCALL|METH_KEYWORDS, _sha512_sha512__doc__},
static PyObject *
-_sha512_sha512_impl(PyObject *module, PyObject *string);
+_sha512_sha512_impl(PyObject *module, PyObject *string, int usedforsecurity);
static PyObject *
_sha512_sha512(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
{
PyObject *return_value = NULL;
- static const char * const _keywords[] = {"string", NULL};
+ static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
static _PyArg_Parser _parser = {NULL, _keywords, "sha512", 0};
- PyObject *argsbuf[1];
+ PyObject *argsbuf[2];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
PyObject *string = NULL;
+ int usedforsecurity = 1;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 1, 0, argsbuf);
if (!args) {
@@ -94,16 +95,29 @@ _sha512_sha512(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObje
if (!noptargs) {
goto skip_optional_pos;
}
- string = args[0];
+ if (args[0]) {
+ string = args[0];
+ if (!--noptargs) {
+ goto skip_optional_pos;
+ }
+ }
skip_optional_pos:
- return_value = _sha512_sha512_impl(module, string);
+ if (!noptargs) {
+ goto skip_optional_kwonly;
+ }
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+skip_optional_kwonly:
+ return_value = _sha512_sha512_impl(module, string, usedforsecurity);
exit:
return return_value;
}
PyDoc_STRVAR(_sha512_sha384__doc__,
-"sha384($module, /, string=b\'\')\n"
+"sha384($module, /, string=b\'\', *, usedforsecurity=True)\n"
"--\n"
"\n"
"Return a new SHA-384 hash object; optionally initialized with a string.");
@@ -112,17 +126,18 @@ PyDoc_STRVAR(_sha512_sha384__doc__,
{"sha384", (PyCFunction)(void(*)(void))_sha512_sha384, METH_FASTCALL|METH_KEYWORDS, _sha512_sha384__doc__},
static PyObject *
-_sha512_sha384_impl(PyObject *module, PyObject *string);
+_sha512_sha384_impl(PyObject *module, PyObject *string, int usedforsecurity);
static PyObject *
_sha512_sha384(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
{
PyObject *return_value = NULL;
- static const char * const _keywords[] = {"string", NULL};
+ static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
static _PyArg_Parser _parser = {NULL, _keywords, "sha384", 0};
- PyObject *argsbuf[1];
+ PyObject *argsbuf[2];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
PyObject *string = NULL;
+ int usedforsecurity = 1;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 1, 0, argsbuf);
if (!args) {
@@ -131,11 +146,24 @@ _sha512_sha384(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObje
if (!noptargs) {
goto skip_optional_pos;
}
- string = args[0];
+ if (args[0]) {
+ string = args[0];
+ if (!--noptargs) {
+ goto skip_optional_pos;
+ }
+ }
skip_optional_pos:
- return_value = _sha512_sha384_impl(module, string);
+ if (!noptargs) {
+ goto skip_optional_kwonly;
+ }
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+skip_optional_kwonly:
+ return_value = _sha512_sha384_impl(module, string, usedforsecurity);
exit:
return return_value;
}
-/*[clinic end generated code: output=580df4b667084a7e input=a9049054013a1b77]*/
+/*[clinic end generated code: output=bbfa72d8703c82b5 input=a9049054013a1b77]*/
diff --git a/Modules/clinic/signalmodule.c.h b/Modules/clinic/signalmodule.c.h
index 3cb1db14..7f60e28a 100644
--- a/Modules/clinic/signalmodule.c.h
+++ b/Modules/clinic/signalmodule.c.h
@@ -611,6 +611,76 @@ exit:
#endif /* defined(HAVE_PTHREAD_KILL) */
+#if (defined(__linux__) && defined(__NR_pidfd_send_signal))
+
+PyDoc_STRVAR(signal_pidfd_send_signal__doc__,
+"pidfd_send_signal($module, pidfd, signalnum, siginfo=None, flags=0, /)\n"
+"--\n"
+"\n"
+"Send a signal to a process referred to by a pid file descriptor.");
+
+#define SIGNAL_PIDFD_SEND_SIGNAL_METHODDEF \
+ {"pidfd_send_signal", (PyCFunction)(void(*)(void))signal_pidfd_send_signal, METH_FASTCALL, signal_pidfd_send_signal__doc__},
+
+static PyObject *
+signal_pidfd_send_signal_impl(PyObject *module, int pidfd, int signalnum,
+ PyObject *siginfo, int flags);
+
+static PyObject *
+signal_pidfd_send_signal(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
+{
+ PyObject *return_value = NULL;
+ int pidfd;
+ int signalnum;
+ PyObject *siginfo = Py_None;
+ int flags = 0;
+
+ if (!_PyArg_CheckPositional("pidfd_send_signal", nargs, 2, 4)) {
+ goto exit;
+ }
+ if (PyFloat_Check(args[0])) {
+ PyErr_SetString(PyExc_TypeError,
+ "integer argument expected, got float" );
+ goto exit;
+ }
+ pidfd = _PyLong_AsInt(args[0]);
+ if (pidfd == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ if (PyFloat_Check(args[1])) {
+ PyErr_SetString(PyExc_TypeError,
+ "integer argument expected, got float" );
+ goto exit;
+ }
+ signalnum = _PyLong_AsInt(args[1]);
+ if (signalnum == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ if (nargs < 3) {
+ goto skip_optional;
+ }
+ siginfo = args[2];
+ if (nargs < 4) {
+ goto skip_optional;
+ }
+ if (PyFloat_Check(args[3])) {
+ PyErr_SetString(PyExc_TypeError,
+ "integer argument expected, got float" );
+ goto exit;
+ }
+ flags = _PyLong_AsInt(args[3]);
+ if (flags == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+skip_optional:
+ return_value = signal_pidfd_send_signal_impl(module, pidfd, signalnum, siginfo, flags);
+
+exit:
+ return return_value;
+}
+
+#endif /* (defined(__linux__) && defined(__NR_pidfd_send_signal)) */
+
#ifndef SIGNAL_ALARM_METHODDEF
#define SIGNAL_ALARM_METHODDEF
#endif /* !defined(SIGNAL_ALARM_METHODDEF) */
@@ -658,4 +728,8 @@ exit:
#ifndef SIGNAL_PTHREAD_KILL_METHODDEF
#define SIGNAL_PTHREAD_KILL_METHODDEF
#endif /* !defined(SIGNAL_PTHREAD_KILL_METHODDEF) */
-/*[clinic end generated code: output=3320b8f73c20ba60 input=a9049054013a1b77]*/
+
+#ifndef SIGNAL_PIDFD_SEND_SIGNAL_METHODDEF
+ #define SIGNAL_PIDFD_SEND_SIGNAL_METHODDEF
+#endif /* !defined(SIGNAL_PIDFD_SEND_SIGNAL_METHODDEF) */
+/*[clinic end generated code: output=b41b4b6bd9ad4da2 input=a9049054013a1b77]*/
diff --git a/Modules/cmathmodule.c b/Modules/cmathmodule.c
index 02c09bbe..5eac4b49 100644
--- a/Modules/cmathmodule.c
+++ b/Modules/cmathmodule.c
@@ -3,6 +3,7 @@
/* much code borrowed from mathmodule.c */
#include "Python.h"
+#include "pycore_dtoa.h"
#include "_math.h"
/* we need DBL_MAX, DBL_MIN, DBL_EPSILON, DBL_MANT_DIG and FLT_RADIX from
float.h. We assume that FLT_RADIX is either 2 or 16. */
@@ -17,7 +18,7 @@ module cmath
/*[python input]
class Py_complex_protected_converter(Py_complex_converter):
def modify(self):
- return 'errno = 0; PyFPE_START_PROTECT("complex function", goto exit);'
+ return 'errno = 0;'
class Py_complex_protected_return_converter(CReturnConverter):
@@ -26,7 +27,6 @@ class Py_complex_protected_return_converter(CReturnConverter):
def render(self, function, data):
self.declare(data)
data.return_conversion.append("""
-PyFPE_END_PROTECT(_return_value);
if (errno == EDOM) {
PyErr_SetString(PyExc_ValueError, "math domain error");
goto exit;
@@ -40,7 +40,7 @@ else {
}
""".strip())
[python start generated code]*/
-/*[python end generated code: output=da39a3ee5e6b4b0d input=345daa075b1028e7]*/
+/*[python end generated code: output=da39a3ee5e6b4b0d input=8b27adb674c08321]*/
#if (FLT_RADIX != 2 && FLT_RADIX != 16)
#error "Modules/cmathmodule.c expects FLT_RADIX to be 2 or 16"
@@ -960,7 +960,6 @@ cmath_log_impl(PyObject *module, Py_complex x, PyObject *y_obj)
Py_complex y;
errno = 0;
- PyFPE_START_PROTECT("complex function", return 0)
x = c_log(x);
if (y_obj != NULL) {
y = PyComplex_AsCComplex(y_obj);
@@ -970,7 +969,6 @@ cmath_log_impl(PyObject *module, Py_complex x, PyObject *y_obj)
y = c_log(y);
x = _Py_c_quot(x, y);
}
- PyFPE_END_PROTECT(x)
if (errno != 0)
return math_error();
return PyComplex_FromCComplex(x);
@@ -1008,9 +1006,7 @@ cmath_phase_impl(PyObject *module, Py_complex z)
double phi;
errno = 0;
- PyFPE_START_PROTECT("arg function", return 0)
phi = c_atan2(z);
- PyFPE_END_PROTECT(phi)
if (errno != 0)
return math_error();
else
@@ -1035,10 +1031,8 @@ cmath_polar_impl(PyObject *module, Py_complex z)
double r, phi;
errno = 0;
- PyFPE_START_PROTECT("polar function", return 0)
phi = c_atan2(z); /* should not cause any exception */
r = _Py_c_abs(z); /* sets errno to ERANGE on overflow */
- PyFPE_END_PROTECT(r)
if (errno != 0)
return math_error();
else
@@ -1074,7 +1068,6 @@ cmath_rect_impl(PyObject *module, double r, double phi)
{
Py_complex z;
errno = 0;
- PyFPE_START_PROTECT("rect function", return 0)
/* deal with special values */
if (!Py_IS_FINITE(r) || !Py_IS_FINITE(phi)) {
@@ -1116,7 +1109,6 @@ cmath_rect_impl(PyObject *module, double r, double phi)
errno = 0;
}
- PyFPE_END_PROTECT(z)
if (errno != 0)
return math_error();
else
diff --git a/Modules/errnomodule.c b/Modules/errnomodule.c
index 06ed53a6..d99bed45 100644
--- a/Modules/errnomodule.c
+++ b/Modules/errnomodule.c
@@ -46,66 +46,57 @@ static PyMethodDef errno_methods[] = {
/* Helper function doing the dictionary inserting */
-static void
-_inscode(PyObject *d, PyObject *de, const char *name, int code)
+static int
+_add_errcode(PyObject *module_dict, PyObject *error_dict, const char *name_str, int code_int)
{
- PyObject *u = PyUnicode_FromString(name);
- PyObject *v = PyLong_FromLong((long) code);
-
- /* Don't bother checking for errors; they'll be caught at the end
- * of the module initialization function by the caller of
- * initerrno().
- */
- if (u && v) {
- /* insert in modules dict */
- PyDict_SetItem(d, u, v);
- /* insert in errorcode dict */
- PyDict_SetItem(de, v, u);
+ PyObject *name = PyUnicode_FromString(name_str);
+ if (!name) {
+ return -1;
}
- Py_XDECREF(u);
- Py_XDECREF(v);
-}
-PyDoc_STRVAR(errno__doc__,
-"This module makes available standard errno system symbols.\n\
-\n\
-The value of each symbol is the corresponding integer value,\n\
-e.g., on most systems, errno.ENOENT equals the integer 2.\n\
-\n\
-The dictionary errno.errorcode maps numeric codes to symbol names,\n\
-e.g., errno.errorcode[2] could be the string 'ENOENT'.\n\
-\n\
-Symbols that are not relevant to the underlying system are not defined.\n\
-\n\
-To map error codes to error messages, use the function os.strerror(),\n\
-e.g. os.strerror(2) could return 'No such file or directory'.");
+ PyObject *code = PyLong_FromLong(code_int);
+ if (!code) {
+ Py_DECREF(name);
+ return -1;
+ }
-static struct PyModuleDef errnomodule = {
- PyModuleDef_HEAD_INIT,
- "errno",
- errno__doc__,
- -1,
- errno_methods,
- NULL,
- NULL,
- NULL,
- NULL
-};
+ int ret = -1;
+ /* insert in modules dict */
+ if (PyDict_SetItem(module_dict, name, code) < 0) {
+ goto end;
+ }
+ /* insert in errorcode dict */
+ if (PyDict_SetItem(error_dict, code, name) < 0) {
+ goto end;
+ }
+ ret = 0;
+end:
+ Py_DECREF(name);
+ Py_DECREF(code);
+ return ret;
+}
-PyMODINIT_FUNC
-PyInit_errno(void)
+static int
+errno_exec(PyObject *module)
{
- PyObject *m, *d, *de;
- m = PyModule_Create(&errnomodule);
- if (m == NULL)
- return NULL;
- d = PyModule_GetDict(m);
- de = PyDict_New();
- if (!d || !de || PyDict_SetItemString(d, "errorcode", de) < 0)
- return NULL;
+ PyObject *module_dict = PyModule_GetDict(module);
+ PyObject *error_dict = PyDict_New();
+ if (!module_dict || !error_dict) {
+ return -1;
+ }
+ if (PyDict_SetItemString(module_dict, "errorcode", error_dict) < 0) {
+ Py_DECREF(error_dict);
+ return -1;
+ }
/* Macro so I don't have to edit each and every line below... */
-#define inscode(d, ds, de, name, code, comment) _inscode(d, de, name, code)
+#define add_errcode(name, code, comment) \
+ do { \
+ if (_add_errcode(module_dict, error_dict, name, code) < 0) { \
+ Py_DECREF(error_dict); \
+ return -1; \
+ } \
+ } while (0);
/*
* The names and comments are borrowed from linux/include/errno.h,
@@ -116,820 +107,854 @@ PyInit_errno(void)
*/
#ifdef ENODEV
- inscode(d, ds, de, "ENODEV", ENODEV, "No such device");
+ add_errcode("ENODEV", ENODEV, "No such device");
#endif
#ifdef ENOCSI
- inscode(d, ds, de, "ENOCSI", ENOCSI, "No CSI structure available");
+ add_errcode("ENOCSI", ENOCSI, "No CSI structure available");
#endif
#ifdef EHOSTUNREACH
- inscode(d, ds, de, "EHOSTUNREACH", EHOSTUNREACH, "No route to host");
+ add_errcode("EHOSTUNREACH", EHOSTUNREACH, "No route to host");
#else
#ifdef WSAEHOSTUNREACH
- inscode(d, ds, de, "EHOSTUNREACH", WSAEHOSTUNREACH, "No route to host");
+ add_errcode("EHOSTUNREACH", WSAEHOSTUNREACH, "No route to host");
#endif
#endif
#ifdef ENOMSG
- inscode(d, ds, de, "ENOMSG", ENOMSG, "No message of desired type");
+ add_errcode("ENOMSG", ENOMSG, "No message of desired type");
#endif
#ifdef EUCLEAN
- inscode(d, ds, de, "EUCLEAN", EUCLEAN, "Structure needs cleaning");
+ add_errcode("EUCLEAN", EUCLEAN, "Structure needs cleaning");
#endif
#ifdef EL2NSYNC
- inscode(d, ds, de, "EL2NSYNC", EL2NSYNC, "Level 2 not synchronized");
+ add_errcode("EL2NSYNC", EL2NSYNC, "Level 2 not synchronized");
#endif
#ifdef EL2HLT
- inscode(d, ds, de, "EL2HLT", EL2HLT, "Level 2 halted");
+ add_errcode("EL2HLT", EL2HLT, "Level 2 halted");
#endif
#ifdef ENODATA
- inscode(d, ds, de, "ENODATA", ENODATA, "No data available");
+ add_errcode("ENODATA", ENODATA, "No data available");
#endif
#ifdef ENOTBLK
- inscode(d, ds, de, "ENOTBLK", ENOTBLK, "Block device required");
+ add_errcode("ENOTBLK", ENOTBLK, "Block device required");
#endif
#ifdef ENOSYS
- inscode(d, ds, de, "ENOSYS", ENOSYS, "Function not implemented");
+ add_errcode("ENOSYS", ENOSYS, "Function not implemented");
#endif
#ifdef EPIPE
- inscode(d, ds, de, "EPIPE", EPIPE, "Broken pipe");
+ add_errcode("EPIPE", EPIPE, "Broken pipe");
#endif
#ifdef EINVAL
- inscode(d, ds, de, "EINVAL", EINVAL, "Invalid argument");
+ add_errcode("EINVAL", EINVAL, "Invalid argument");
#else
#ifdef WSAEINVAL
- inscode(d, ds, de, "EINVAL", WSAEINVAL, "Invalid argument");
+ add_errcode("EINVAL", WSAEINVAL, "Invalid argument");
#endif
#endif
#ifdef EOVERFLOW
- inscode(d, ds, de, "EOVERFLOW", EOVERFLOW, "Value too large for defined data type");
+ add_errcode("EOVERFLOW", EOVERFLOW, "Value too large for defined data type");
#endif
#ifdef EADV
- inscode(d, ds, de, "EADV", EADV, "Advertise error");
+ add_errcode("EADV", EADV, "Advertise error");
#endif
#ifdef EINTR
- inscode(d, ds, de, "EINTR", EINTR, "Interrupted system call");
+ add_errcode("EINTR", EINTR, "Interrupted system call");
#else
#ifdef WSAEINTR
- inscode(d, ds, de, "EINTR", WSAEINTR, "Interrupted system call");
+ add_errcode("EINTR", WSAEINTR, "Interrupted system call");
#endif
#endif
#ifdef EUSERS
- inscode(d, ds, de, "EUSERS", EUSERS, "Too many users");
+ add_errcode("EUSERS", EUSERS, "Too many users");
#else
#ifdef WSAEUSERS
- inscode(d, ds, de, "EUSERS", WSAEUSERS, "Too many users");
+ add_errcode("EUSERS", WSAEUSERS, "Too many users");
#endif
#endif
#ifdef ENOTEMPTY
- inscode(d, ds, de, "ENOTEMPTY", ENOTEMPTY, "Directory not empty");
+ add_errcode("ENOTEMPTY", ENOTEMPTY, "Directory not empty");
#else
#ifdef WSAENOTEMPTY
- inscode(d, ds, de, "ENOTEMPTY", WSAENOTEMPTY, "Directory not empty");
+ add_errcode("ENOTEMPTY", WSAENOTEMPTY, "Directory not empty");
#endif
#endif
#ifdef ENOBUFS
- inscode(d, ds, de, "ENOBUFS", ENOBUFS, "No buffer space available");
+ add_errcode("ENOBUFS", ENOBUFS, "No buffer space available");
#else
#ifdef WSAENOBUFS
- inscode(d, ds, de, "ENOBUFS", WSAENOBUFS, "No buffer space available");
+ add_errcode("ENOBUFS", WSAENOBUFS, "No buffer space available");
#endif
#endif
#ifdef EPROTO
- inscode(d, ds, de, "EPROTO", EPROTO, "Protocol error");
+ add_errcode("EPROTO", EPROTO, "Protocol error");
#endif
#ifdef EREMOTE
- inscode(d, ds, de, "EREMOTE", EREMOTE, "Object is remote");
+ add_errcode("EREMOTE", EREMOTE, "Object is remote");
#else
#ifdef WSAEREMOTE
- inscode(d, ds, de, "EREMOTE", WSAEREMOTE, "Object is remote");
+ add_errcode("EREMOTE", WSAEREMOTE, "Object is remote");
#endif
#endif
#ifdef ENAVAIL
- inscode(d, ds, de, "ENAVAIL", ENAVAIL, "No XENIX semaphores available");
+ add_errcode("ENAVAIL", ENAVAIL, "No XENIX semaphores available");
#endif
#ifdef ECHILD
- inscode(d, ds, de, "ECHILD", ECHILD, "No child processes");
+ add_errcode("ECHILD", ECHILD, "No child processes");
#endif
#ifdef ELOOP
- inscode(d, ds, de, "ELOOP", ELOOP, "Too many symbolic links encountered");
+ add_errcode("ELOOP", ELOOP, "Too many symbolic links encountered");
#else
#ifdef WSAELOOP
- inscode(d, ds, de, "ELOOP", WSAELOOP, "Too many symbolic links encountered");
+ add_errcode("ELOOP", WSAELOOP, "Too many symbolic links encountered");
#endif
#endif
#ifdef EXDEV
- inscode(d, ds, de, "EXDEV", EXDEV, "Cross-device link");
+ add_errcode("EXDEV", EXDEV, "Cross-device link");
#endif
#ifdef E2BIG
- inscode(d, ds, de, "E2BIG", E2BIG, "Arg list too long");
+ add_errcode("E2BIG", E2BIG, "Arg list too long");
#endif
#ifdef ESRCH
- inscode(d, ds, de, "ESRCH", ESRCH, "No such process");
+ add_errcode("ESRCH", ESRCH, "No such process");
#endif
#ifdef EMSGSIZE
- inscode(d, ds, de, "EMSGSIZE", EMSGSIZE, "Message too long");
+ add_errcode("EMSGSIZE", EMSGSIZE, "Message too long");
#else
#ifdef WSAEMSGSIZE
- inscode(d, ds, de, "EMSGSIZE", WSAEMSGSIZE, "Message too long");
+ add_errcode("EMSGSIZE", WSAEMSGSIZE, "Message too long");
#endif
#endif
#ifdef EAFNOSUPPORT
- inscode(d, ds, de, "EAFNOSUPPORT", EAFNOSUPPORT, "Address family not supported by protocol");
+ add_errcode("EAFNOSUPPORT", EAFNOSUPPORT, "Address family not supported by protocol");
#else
#ifdef WSAEAFNOSUPPORT
- inscode(d, ds, de, "EAFNOSUPPORT", WSAEAFNOSUPPORT, "Address family not supported by protocol");
+ add_errcode("EAFNOSUPPORT", WSAEAFNOSUPPORT, "Address family not supported by protocol");
#endif
#endif
#ifdef EBADR
- inscode(d, ds, de, "EBADR", EBADR, "Invalid request descriptor");
+ add_errcode("EBADR", EBADR, "Invalid request descriptor");
#endif
#ifdef EHOSTDOWN
- inscode(d, ds, de, "EHOSTDOWN", EHOSTDOWN, "Host is down");
+ add_errcode("EHOSTDOWN", EHOSTDOWN, "Host is down");
#else
#ifdef WSAEHOSTDOWN
- inscode(d, ds, de, "EHOSTDOWN", WSAEHOSTDOWN, "Host is down");
+ add_errcode("EHOSTDOWN", WSAEHOSTDOWN, "Host is down");
#endif
#endif
#ifdef EPFNOSUPPORT
- inscode(d, ds, de, "EPFNOSUPPORT", EPFNOSUPPORT, "Protocol family not supported");
+ add_errcode("EPFNOSUPPORT", EPFNOSUPPORT, "Protocol family not supported");
#else
#ifdef WSAEPFNOSUPPORT
- inscode(d, ds, de, "EPFNOSUPPORT", WSAEPFNOSUPPORT, "Protocol family not supported");
+ add_errcode("EPFNOSUPPORT", WSAEPFNOSUPPORT, "Protocol family not supported");
#endif
#endif
#ifdef ENOPROTOOPT
- inscode(d, ds, de, "ENOPROTOOPT", ENOPROTOOPT, "Protocol not available");
+ add_errcode("ENOPROTOOPT", ENOPROTOOPT, "Protocol not available");
#else
#ifdef WSAENOPROTOOPT
- inscode(d, ds, de, "ENOPROTOOPT", WSAENOPROTOOPT, "Protocol not available");
+ add_errcode("ENOPROTOOPT", WSAENOPROTOOPT, "Protocol not available");
#endif
#endif
#ifdef EBUSY
- inscode(d, ds, de, "EBUSY", EBUSY, "Device or resource busy");
+ add_errcode("EBUSY", EBUSY, "Device or resource busy");
#endif
#ifdef EWOULDBLOCK
- inscode(d, ds, de, "EWOULDBLOCK", EWOULDBLOCK, "Operation would block");
+ add_errcode("EWOULDBLOCK", EWOULDBLOCK, "Operation would block");
#else
#ifdef WSAEWOULDBLOCK
- inscode(d, ds, de, "EWOULDBLOCK", WSAEWOULDBLOCK, "Operation would block");
+ add_errcode("EWOULDBLOCK", WSAEWOULDBLOCK, "Operation would block");
#endif
#endif
#ifdef EBADFD
- inscode(d, ds, de, "EBADFD", EBADFD, "File descriptor in bad state");
+ add_errcode("EBADFD", EBADFD, "File descriptor in bad state");
#endif
#ifdef EDOTDOT
- inscode(d, ds, de, "EDOTDOT", EDOTDOT, "RFS specific error");
+ add_errcode("EDOTDOT", EDOTDOT, "RFS specific error");
#endif
#ifdef EISCONN
- inscode(d, ds, de, "EISCONN", EISCONN, "Transport endpoint is already connected");
+ add_errcode("EISCONN", EISCONN, "Transport endpoint is already connected");
#else
#ifdef WSAEISCONN
- inscode(d, ds, de, "EISCONN", WSAEISCONN, "Transport endpoint is already connected");
+ add_errcode("EISCONN", WSAEISCONN, "Transport endpoint is already connected");
#endif
#endif
#ifdef ENOANO
- inscode(d, ds, de, "ENOANO", ENOANO, "No anode");
+ add_errcode("ENOANO", ENOANO, "No anode");
#endif
#ifdef ESHUTDOWN
- inscode(d, ds, de, "ESHUTDOWN", ESHUTDOWN, "Cannot send after transport endpoint shutdown");
+ add_errcode("ESHUTDOWN", ESHUTDOWN, "Cannot send after transport endpoint shutdown");
#else
#ifdef WSAESHUTDOWN
- inscode(d, ds, de, "ESHUTDOWN", WSAESHUTDOWN, "Cannot send after transport endpoint shutdown");
+ add_errcode("ESHUTDOWN", WSAESHUTDOWN, "Cannot send after transport endpoint shutdown");
#endif
#endif
#ifdef ECHRNG
- inscode(d, ds, de, "ECHRNG", ECHRNG, "Channel number out of range");
+ add_errcode("ECHRNG", ECHRNG, "Channel number out of range");
#endif
#ifdef ELIBBAD
- inscode(d, ds, de, "ELIBBAD", ELIBBAD, "Accessing a corrupted shared library");
+ add_errcode("ELIBBAD", ELIBBAD, "Accessing a corrupted shared library");
#endif
#ifdef ENONET
- inscode(d, ds, de, "ENONET", ENONET, "Machine is not on the network");
+ add_errcode("ENONET", ENONET, "Machine is not on the network");
#endif
#ifdef EBADE
- inscode(d, ds, de, "EBADE", EBADE, "Invalid exchange");
+ add_errcode("EBADE", EBADE, "Invalid exchange");
#endif
#ifdef EBADF
- inscode(d, ds, de, "EBADF", EBADF, "Bad file number");
+ add_errcode("EBADF", EBADF, "Bad file number");
#else
#ifdef WSAEBADF
- inscode(d, ds, de, "EBADF", WSAEBADF, "Bad file number");
+ add_errcode("EBADF", WSAEBADF, "Bad file number");
#endif
#endif
#ifdef EMULTIHOP
- inscode(d, ds, de, "EMULTIHOP", EMULTIHOP, "Multihop attempted");
+ add_errcode("EMULTIHOP", EMULTIHOP, "Multihop attempted");
#endif
#ifdef EIO
- inscode(d, ds, de, "EIO", EIO, "I/O error");
+ add_errcode("EIO", EIO, "I/O error");
#endif
#ifdef EUNATCH
- inscode(d, ds, de, "EUNATCH", EUNATCH, "Protocol driver not attached");
+ add_errcode("EUNATCH", EUNATCH, "Protocol driver not attached");
#endif
#ifdef EPROTOTYPE
- inscode(d, ds, de, "EPROTOTYPE", EPROTOTYPE, "Protocol wrong type for socket");
+ add_errcode("EPROTOTYPE", EPROTOTYPE, "Protocol wrong type for socket");
#else
#ifdef WSAEPROTOTYPE
- inscode(d, ds, de, "EPROTOTYPE", WSAEPROTOTYPE, "Protocol wrong type for socket");
+ add_errcode("EPROTOTYPE", WSAEPROTOTYPE, "Protocol wrong type for socket");
#endif
#endif
#ifdef ENOSPC
- inscode(d, ds, de, "ENOSPC", ENOSPC, "No space left on device");
+ add_errcode("ENOSPC", ENOSPC, "No space left on device");
#endif
#ifdef ENOEXEC
- inscode(d, ds, de, "ENOEXEC", ENOEXEC, "Exec format error");
+ add_errcode("ENOEXEC", ENOEXEC, "Exec format error");
#endif
#ifdef EALREADY
- inscode(d, ds, de, "EALREADY", EALREADY, "Operation already in progress");
+ add_errcode("EALREADY", EALREADY, "Operation already in progress");
#else
#ifdef WSAEALREADY
- inscode(d, ds, de, "EALREADY", WSAEALREADY, "Operation already in progress");
+ add_errcode("EALREADY", WSAEALREADY, "Operation already in progress");
#endif
#endif
#ifdef ENETDOWN
- inscode(d, ds, de, "ENETDOWN", ENETDOWN, "Network is down");
+ add_errcode("ENETDOWN", ENETDOWN, "Network is down");
#else
#ifdef WSAENETDOWN
- inscode(d, ds, de, "ENETDOWN", WSAENETDOWN, "Network is down");
+ add_errcode("ENETDOWN", WSAENETDOWN, "Network is down");
#endif
#endif
#ifdef ENOTNAM
- inscode(d, ds, de, "ENOTNAM", ENOTNAM, "Not a XENIX named type file");
+ add_errcode("ENOTNAM", ENOTNAM, "Not a XENIX named type file");
#endif
#ifdef EACCES
- inscode(d, ds, de, "EACCES", EACCES, "Permission denied");
+ add_errcode("EACCES", EACCES, "Permission denied");
#else
#ifdef WSAEACCES
- inscode(d, ds, de, "EACCES", WSAEACCES, "Permission denied");
+ add_errcode("EACCES", WSAEACCES, "Permission denied");
#endif
#endif
#ifdef ELNRNG
- inscode(d, ds, de, "ELNRNG", ELNRNG, "Link number out of range");
+ add_errcode("ELNRNG", ELNRNG, "Link number out of range");
#endif
#ifdef EILSEQ
- inscode(d, ds, de, "EILSEQ", EILSEQ, "Illegal byte sequence");
+ add_errcode("EILSEQ", EILSEQ, "Illegal byte sequence");
#endif
#ifdef ENOTDIR
- inscode(d, ds, de, "ENOTDIR", ENOTDIR, "Not a directory");
+ add_errcode("ENOTDIR", ENOTDIR, "Not a directory");
#endif
#ifdef ENOTUNIQ
- inscode(d, ds, de, "ENOTUNIQ", ENOTUNIQ, "Name not unique on network");
+ add_errcode("ENOTUNIQ", ENOTUNIQ, "Name not unique on network");
#endif
#ifdef EPERM
- inscode(d, ds, de, "EPERM", EPERM, "Operation not permitted");
+ add_errcode("EPERM", EPERM, "Operation not permitted");
#endif
#ifdef EDOM
- inscode(d, ds, de, "EDOM", EDOM, "Math argument out of domain of func");
+ add_errcode("EDOM", EDOM, "Math argument out of domain of func");
#endif
#ifdef EXFULL
- inscode(d, ds, de, "EXFULL", EXFULL, "Exchange full");
+ add_errcode("EXFULL", EXFULL, "Exchange full");
#endif
#ifdef ECONNREFUSED
- inscode(d, ds, de, "ECONNREFUSED", ECONNREFUSED, "Connection refused");
+ add_errcode("ECONNREFUSED", ECONNREFUSED, "Connection refused");
#else
#ifdef WSAECONNREFUSED
- inscode(d, ds, de, "ECONNREFUSED", WSAECONNREFUSED, "Connection refused");
+ add_errcode("ECONNREFUSED", WSAECONNREFUSED, "Connection refused");
#endif
#endif
#ifdef EISDIR
- inscode(d, ds, de, "EISDIR", EISDIR, "Is a directory");
+ add_errcode("EISDIR", EISDIR, "Is a directory");
#endif
#ifdef EPROTONOSUPPORT
- inscode(d, ds, de, "EPROTONOSUPPORT", EPROTONOSUPPORT, "Protocol not supported");
+ add_errcode("EPROTONOSUPPORT", EPROTONOSUPPORT, "Protocol not supported");
#else
#ifdef WSAEPROTONOSUPPORT
- inscode(d, ds, de, "EPROTONOSUPPORT", WSAEPROTONOSUPPORT, "Protocol not supported");
+ add_errcode("EPROTONOSUPPORT", WSAEPROTONOSUPPORT, "Protocol not supported");
#endif
#endif
#ifdef EROFS
- inscode(d, ds, de, "EROFS", EROFS, "Read-only file system");
+ add_errcode("EROFS", EROFS, "Read-only file system");
#endif
#ifdef EADDRNOTAVAIL
- inscode(d, ds, de, "EADDRNOTAVAIL", EADDRNOTAVAIL, "Cannot assign requested address");
+ add_errcode("EADDRNOTAVAIL", EADDRNOTAVAIL, "Cannot assign requested address");
#else
#ifdef WSAEADDRNOTAVAIL
- inscode(d, ds, de, "EADDRNOTAVAIL", WSAEADDRNOTAVAIL, "Cannot assign requested address");
+ add_errcode("EADDRNOTAVAIL", WSAEADDRNOTAVAIL, "Cannot assign requested address");
#endif
#endif
#ifdef EIDRM
- inscode(d, ds, de, "EIDRM", EIDRM, "Identifier removed");
+ add_errcode("EIDRM", EIDRM, "Identifier removed");
#endif
#ifdef ECOMM
- inscode(d, ds, de, "ECOMM", ECOMM, "Communication error on send");
+ add_errcode("ECOMM", ECOMM, "Communication error on send");
#endif
#ifdef ESRMNT
- inscode(d, ds, de, "ESRMNT", ESRMNT, "Srmount error");
+ add_errcode("ESRMNT", ESRMNT, "Srmount error");
#endif
#ifdef EREMOTEIO
- inscode(d, ds, de, "EREMOTEIO", EREMOTEIO, "Remote I/O error");
+ add_errcode("EREMOTEIO", EREMOTEIO, "Remote I/O error");
#endif
#ifdef EL3RST
- inscode(d, ds, de, "EL3RST", EL3RST, "Level 3 reset");
+ add_errcode("EL3RST", EL3RST, "Level 3 reset");
#endif
#ifdef EBADMSG
- inscode(d, ds, de, "EBADMSG", EBADMSG, "Not a data message");
+ add_errcode("EBADMSG", EBADMSG, "Not a data message");
#endif
#ifdef ENFILE
- inscode(d, ds, de, "ENFILE", ENFILE, "File table overflow");
+ add_errcode("ENFILE", ENFILE, "File table overflow");
#endif
#ifdef ELIBMAX
- inscode(d, ds, de, "ELIBMAX", ELIBMAX, "Attempting to link in too many shared libraries");
+ add_errcode("ELIBMAX", ELIBMAX, "Attempting to link in too many shared libraries");
#endif
#ifdef ESPIPE
- inscode(d, ds, de, "ESPIPE", ESPIPE, "Illegal seek");
+ add_errcode("ESPIPE", ESPIPE, "Illegal seek");
#endif
#ifdef ENOLINK
- inscode(d, ds, de, "ENOLINK", ENOLINK, "Link has been severed");
+ add_errcode("ENOLINK", ENOLINK, "Link has been severed");
#endif
#ifdef ENETRESET
- inscode(d, ds, de, "ENETRESET", ENETRESET, "Network dropped connection because of reset");
+ add_errcode("ENETRESET", ENETRESET, "Network dropped connection because of reset");
#else
#ifdef WSAENETRESET
- inscode(d, ds, de, "ENETRESET", WSAENETRESET, "Network dropped connection because of reset");
+ add_errcode("ENETRESET", WSAENETRESET, "Network dropped connection because of reset");
#endif
#endif
#ifdef ETIMEDOUT
- inscode(d, ds, de, "ETIMEDOUT", ETIMEDOUT, "Connection timed out");
+ add_errcode("ETIMEDOUT", ETIMEDOUT, "Connection timed out");
#else
#ifdef WSAETIMEDOUT
- inscode(d, ds, de, "ETIMEDOUT", WSAETIMEDOUT, "Connection timed out");
+ add_errcode("ETIMEDOUT", WSAETIMEDOUT, "Connection timed out");
#endif
#endif
#ifdef ENOENT
- inscode(d, ds, de, "ENOENT", ENOENT, "No such file or directory");
+ add_errcode("ENOENT", ENOENT, "No such file or directory");
#endif
#ifdef EEXIST
- inscode(d, ds, de, "EEXIST", EEXIST, "File exists");
+ add_errcode("EEXIST", EEXIST, "File exists");
#endif
#ifdef EDQUOT
- inscode(d, ds, de, "EDQUOT", EDQUOT, "Quota exceeded");
+ add_errcode("EDQUOT", EDQUOT, "Quota exceeded");
#else
#ifdef WSAEDQUOT
- inscode(d, ds, de, "EDQUOT", WSAEDQUOT, "Quota exceeded");
+ add_errcode("EDQUOT", WSAEDQUOT, "Quota exceeded");
#endif
#endif
#ifdef ENOSTR
- inscode(d, ds, de, "ENOSTR", ENOSTR, "Device not a stream");
+ add_errcode("ENOSTR", ENOSTR, "Device not a stream");
#endif
#ifdef EBADSLT
- inscode(d, ds, de, "EBADSLT", EBADSLT, "Invalid slot");
+ add_errcode("EBADSLT", EBADSLT, "Invalid slot");
#endif
#ifdef EBADRQC
- inscode(d, ds, de, "EBADRQC", EBADRQC, "Invalid request code");
+ add_errcode("EBADRQC", EBADRQC, "Invalid request code");
#endif
#ifdef ELIBACC
- inscode(d, ds, de, "ELIBACC", ELIBACC, "Can not access a needed shared library");
+ add_errcode("ELIBACC", ELIBACC, "Can not access a needed shared library");
#endif
#ifdef EFAULT
- inscode(d, ds, de, "EFAULT", EFAULT, "Bad address");
+ add_errcode("EFAULT", EFAULT, "Bad address");
#else
#ifdef WSAEFAULT
- inscode(d, ds, de, "EFAULT", WSAEFAULT, "Bad address");
+ add_errcode("EFAULT", WSAEFAULT, "Bad address");
#endif
#endif
#ifdef EFBIG
- inscode(d, ds, de, "EFBIG", EFBIG, "File too large");
+ add_errcode("EFBIG", EFBIG, "File too large");
#endif
#ifdef EDEADLK
- inscode(d, ds, de, "EDEADLK", EDEADLK, "Resource deadlock would occur");
+ add_errcode("EDEADLK", EDEADLK, "Resource deadlock would occur");
#endif
#ifdef ENOTCONN
- inscode(d, ds, de, "ENOTCONN", ENOTCONN, "Transport endpoint is not connected");
+ add_errcode("ENOTCONN", ENOTCONN, "Transport endpoint is not connected");
#else
#ifdef WSAENOTCONN
- inscode(d, ds, de, "ENOTCONN", WSAENOTCONN, "Transport endpoint is not connected");
+ add_errcode("ENOTCONN", WSAENOTCONN, "Transport endpoint is not connected");
#endif
#endif
#ifdef EDESTADDRREQ
- inscode(d, ds, de, "EDESTADDRREQ", EDESTADDRREQ, "Destination address required");
+ add_errcode("EDESTADDRREQ", EDESTADDRREQ, "Destination address required");
#else
#ifdef WSAEDESTADDRREQ
- inscode(d, ds, de, "EDESTADDRREQ", WSAEDESTADDRREQ, "Destination address required");
+ add_errcode("EDESTADDRREQ", WSAEDESTADDRREQ, "Destination address required");
#endif
#endif
#ifdef ELIBSCN
- inscode(d, ds, de, "ELIBSCN", ELIBSCN, ".lib section in a.out corrupted");
+ add_errcode("ELIBSCN", ELIBSCN, ".lib section in a.out corrupted");
#endif
#ifdef ENOLCK
- inscode(d, ds, de, "ENOLCK", ENOLCK, "No record locks available");
+ add_errcode("ENOLCK", ENOLCK, "No record locks available");
#endif
#ifdef EISNAM
- inscode(d, ds, de, "EISNAM", EISNAM, "Is a named type file");
+ add_errcode("EISNAM", EISNAM, "Is a named type file");
#endif
#ifdef ECONNABORTED
- inscode(d, ds, de, "ECONNABORTED", ECONNABORTED, "Software caused connection abort");
+ add_errcode("ECONNABORTED", ECONNABORTED, "Software caused connection abort");
#else
#ifdef WSAECONNABORTED
- inscode(d, ds, de, "ECONNABORTED", WSAECONNABORTED, "Software caused connection abort");
+ add_errcode("ECONNABORTED", WSAECONNABORTED, "Software caused connection abort");
#endif
#endif
#ifdef ENETUNREACH
- inscode(d, ds, de, "ENETUNREACH", ENETUNREACH, "Network is unreachable");
+ add_errcode("ENETUNREACH", ENETUNREACH, "Network is unreachable");
#else
#ifdef WSAENETUNREACH
- inscode(d, ds, de, "ENETUNREACH", WSAENETUNREACH, "Network is unreachable");
+ add_errcode("ENETUNREACH", WSAENETUNREACH, "Network is unreachable");
#endif
#endif
#ifdef ESTALE
- inscode(d, ds, de, "ESTALE", ESTALE, "Stale NFS file handle");
+ add_errcode("ESTALE", ESTALE, "Stale NFS file handle");
#else
#ifdef WSAESTALE
- inscode(d, ds, de, "ESTALE", WSAESTALE, "Stale NFS file handle");
+ add_errcode("ESTALE", WSAESTALE, "Stale NFS file handle");
#endif
#endif
#ifdef ENOSR
- inscode(d, ds, de, "ENOSR", ENOSR, "Out of streams resources");
+ add_errcode("ENOSR", ENOSR, "Out of streams resources");
#endif
#ifdef ENOMEM
- inscode(d, ds, de, "ENOMEM", ENOMEM, "Out of memory");
+ add_errcode("ENOMEM", ENOMEM, "Out of memory");
#endif
#ifdef ENOTSOCK
- inscode(d, ds, de, "ENOTSOCK", ENOTSOCK, "Socket operation on non-socket");
+ add_errcode("ENOTSOCK", ENOTSOCK, "Socket operation on non-socket");
#else
#ifdef WSAENOTSOCK
- inscode(d, ds, de, "ENOTSOCK", WSAENOTSOCK, "Socket operation on non-socket");
+ add_errcode("ENOTSOCK", WSAENOTSOCK, "Socket operation on non-socket");
#endif
#endif
#ifdef ESTRPIPE
- inscode(d, ds, de, "ESTRPIPE", ESTRPIPE, "Streams pipe error");
+ add_errcode("ESTRPIPE", ESTRPIPE, "Streams pipe error");
#endif
#ifdef EMLINK
- inscode(d, ds, de, "EMLINK", EMLINK, "Too many links");
+ add_errcode("EMLINK", EMLINK, "Too many links");
#endif
#ifdef ERANGE
- inscode(d, ds, de, "ERANGE", ERANGE, "Math result not representable");
+ add_errcode("ERANGE", ERANGE, "Math result not representable");
#endif
#ifdef ELIBEXEC
- inscode(d, ds, de, "ELIBEXEC", ELIBEXEC, "Cannot exec a shared library directly");
+ add_errcode("ELIBEXEC", ELIBEXEC, "Cannot exec a shared library directly");
#endif
#ifdef EL3HLT
- inscode(d, ds, de, "EL3HLT", EL3HLT, "Level 3 halted");
+ add_errcode("EL3HLT", EL3HLT, "Level 3 halted");
#endif
#ifdef ECONNRESET
- inscode(d, ds, de, "ECONNRESET", ECONNRESET, "Connection reset by peer");
+ add_errcode("ECONNRESET", ECONNRESET, "Connection reset by peer");
#else
#ifdef WSAECONNRESET
- inscode(d, ds, de, "ECONNRESET", WSAECONNRESET, "Connection reset by peer");
+ add_errcode("ECONNRESET", WSAECONNRESET, "Connection reset by peer");
#endif
#endif
#ifdef EADDRINUSE
- inscode(d, ds, de, "EADDRINUSE", EADDRINUSE, "Address already in use");
+ add_errcode("EADDRINUSE", EADDRINUSE, "Address already in use");
#else
#ifdef WSAEADDRINUSE
- inscode(d, ds, de, "EADDRINUSE", WSAEADDRINUSE, "Address already in use");
+ add_errcode("EADDRINUSE", WSAEADDRINUSE, "Address already in use");
#endif
#endif
#ifdef EOPNOTSUPP
- inscode(d, ds, de, "EOPNOTSUPP", EOPNOTSUPP, "Operation not supported on transport endpoint");
+ add_errcode("EOPNOTSUPP", EOPNOTSUPP, "Operation not supported on transport endpoint");
#else
#ifdef WSAEOPNOTSUPP
- inscode(d, ds, de, "EOPNOTSUPP", WSAEOPNOTSUPP, "Operation not supported on transport endpoint");
+ add_errcode("EOPNOTSUPP", WSAEOPNOTSUPP, "Operation not supported on transport endpoint");
#endif
#endif
#ifdef EREMCHG
- inscode(d, ds, de, "EREMCHG", EREMCHG, "Remote address changed");
+ add_errcode("EREMCHG", EREMCHG, "Remote address changed");
#endif
#ifdef EAGAIN
- inscode(d, ds, de, "EAGAIN", EAGAIN, "Try again");
+ add_errcode("EAGAIN", EAGAIN, "Try again");
#endif
#ifdef ENAMETOOLONG
- inscode(d, ds, de, "ENAMETOOLONG", ENAMETOOLONG, "File name too long");
+ add_errcode("ENAMETOOLONG", ENAMETOOLONG, "File name too long");
#else
#ifdef WSAENAMETOOLONG
- inscode(d, ds, de, "ENAMETOOLONG", WSAENAMETOOLONG, "File name too long");
+ add_errcode("ENAMETOOLONG", WSAENAMETOOLONG, "File name too long");
#endif
#endif
#ifdef ENOTTY
- inscode(d, ds, de, "ENOTTY", ENOTTY, "Not a typewriter");
+ add_errcode("ENOTTY", ENOTTY, "Not a typewriter");
#endif
#ifdef ERESTART
- inscode(d, ds, de, "ERESTART", ERESTART, "Interrupted system call should be restarted");
+ add_errcode("ERESTART", ERESTART, "Interrupted system call should be restarted");
#endif
#ifdef ESOCKTNOSUPPORT
- inscode(d, ds, de, "ESOCKTNOSUPPORT", ESOCKTNOSUPPORT, "Socket type not supported");
+ add_errcode("ESOCKTNOSUPPORT", ESOCKTNOSUPPORT, "Socket type not supported");
#else
#ifdef WSAESOCKTNOSUPPORT
- inscode(d, ds, de, "ESOCKTNOSUPPORT", WSAESOCKTNOSUPPORT, "Socket type not supported");
+ add_errcode("ESOCKTNOSUPPORT", WSAESOCKTNOSUPPORT, "Socket type not supported");
#endif
#endif
#ifdef ETIME
- inscode(d, ds, de, "ETIME", ETIME, "Timer expired");
+ add_errcode("ETIME", ETIME, "Timer expired");
#endif
#ifdef EBFONT
- inscode(d, ds, de, "EBFONT", EBFONT, "Bad font file format");
+ add_errcode("EBFONT", EBFONT, "Bad font file format");
#endif
#ifdef EDEADLOCK
- inscode(d, ds, de, "EDEADLOCK", EDEADLOCK, "Error EDEADLOCK");
+ add_errcode("EDEADLOCK", EDEADLOCK, "Error EDEADLOCK");
#endif
#ifdef ETOOMANYREFS
- inscode(d, ds, de, "ETOOMANYREFS", ETOOMANYREFS, "Too many references: cannot splice");
+ add_errcode("ETOOMANYREFS", ETOOMANYREFS, "Too many references: cannot splice");
#else
#ifdef WSAETOOMANYREFS
- inscode(d, ds, de, "ETOOMANYREFS", WSAETOOMANYREFS, "Too many references: cannot splice");
+ add_errcode("ETOOMANYREFS", WSAETOOMANYREFS, "Too many references: cannot splice");
#endif
#endif
#ifdef EMFILE
- inscode(d, ds, de, "EMFILE", EMFILE, "Too many open files");
+ add_errcode("EMFILE", EMFILE, "Too many open files");
#else
#ifdef WSAEMFILE
- inscode(d, ds, de, "EMFILE", WSAEMFILE, "Too many open files");
+ add_errcode("EMFILE", WSAEMFILE, "Too many open files");
#endif
#endif
#ifdef ETXTBSY
- inscode(d, ds, de, "ETXTBSY", ETXTBSY, "Text file busy");
+ add_errcode("ETXTBSY", ETXTBSY, "Text file busy");
#endif
#ifdef EINPROGRESS
- inscode(d, ds, de, "EINPROGRESS", EINPROGRESS, "Operation now in progress");
+ add_errcode("EINPROGRESS", EINPROGRESS, "Operation now in progress");
#else
#ifdef WSAEINPROGRESS
- inscode(d, ds, de, "EINPROGRESS", WSAEINPROGRESS, "Operation now in progress");
+ add_errcode("EINPROGRESS", WSAEINPROGRESS, "Operation now in progress");
#endif
#endif
#ifdef ENXIO
- inscode(d, ds, de, "ENXIO", ENXIO, "No such device or address");
+ add_errcode("ENXIO", ENXIO, "No such device or address");
#endif
#ifdef ENOPKG
- inscode(d, ds, de, "ENOPKG", ENOPKG, "Package not installed");
+ add_errcode("ENOPKG", ENOPKG, "Package not installed");
#endif
#ifdef WSASY
- inscode(d, ds, de, "WSASY", WSASY, "Error WSASY");
+ add_errcode("WSASY", WSASY, "Error WSASY");
#endif
#ifdef WSAEHOSTDOWN
- inscode(d, ds, de, "WSAEHOSTDOWN", WSAEHOSTDOWN, "Host is down");
+ add_errcode("WSAEHOSTDOWN", WSAEHOSTDOWN, "Host is down");
#endif
#ifdef WSAENETDOWN
- inscode(d, ds, de, "WSAENETDOWN", WSAENETDOWN, "Network is down");
+ add_errcode("WSAENETDOWN", WSAENETDOWN, "Network is down");
#endif
#ifdef WSAENOTSOCK
- inscode(d, ds, de, "WSAENOTSOCK", WSAENOTSOCK, "Socket operation on non-socket");
+ add_errcode("WSAENOTSOCK", WSAENOTSOCK, "Socket operation on non-socket");
#endif
#ifdef WSAEHOSTUNREACH
- inscode(d, ds, de, "WSAEHOSTUNREACH", WSAEHOSTUNREACH, "No route to host");
+ add_errcode("WSAEHOSTUNREACH", WSAEHOSTUNREACH, "No route to host");
#endif
#ifdef WSAELOOP
- inscode(d, ds, de, "WSAELOOP", WSAELOOP, "Too many symbolic links encountered");
+ add_errcode("WSAELOOP", WSAELOOP, "Too many symbolic links encountered");
#endif
#ifdef WSAEMFILE
- inscode(d, ds, de, "WSAEMFILE", WSAEMFILE, "Too many open files");
+ add_errcode("WSAEMFILE", WSAEMFILE, "Too many open files");
#endif
#ifdef WSAESTALE
- inscode(d, ds, de, "WSAESTALE", WSAESTALE, "Stale NFS file handle");
+ add_errcode("WSAESTALE", WSAESTALE, "Stale NFS file handle");
#endif
#ifdef WSAVERNOTSUPPORTED
- inscode(d, ds, de, "WSAVERNOTSUPPORTED", WSAVERNOTSUPPORTED, "Error WSAVERNOTSUPPORTED");
+ add_errcode("WSAVERNOTSUPPORTED", WSAVERNOTSUPPORTED, "Error WSAVERNOTSUPPORTED");
#endif
#ifdef WSAENETUNREACH
- inscode(d, ds, de, "WSAENETUNREACH", WSAENETUNREACH, "Network is unreachable");
+ add_errcode("WSAENETUNREACH", WSAENETUNREACH, "Network is unreachable");
#endif
#ifdef WSAEPROCLIM
- inscode(d, ds, de, "WSAEPROCLIM", WSAEPROCLIM, "Error WSAEPROCLIM");
+ add_errcode("WSAEPROCLIM", WSAEPROCLIM, "Error WSAEPROCLIM");
#endif
#ifdef WSAEFAULT
- inscode(d, ds, de, "WSAEFAULT", WSAEFAULT, "Bad address");
+ add_errcode("WSAEFAULT", WSAEFAULT, "Bad address");
#endif
#ifdef WSANOTINITIALISED
- inscode(d, ds, de, "WSANOTINITIALISED", WSANOTINITIALISED, "Error WSANOTINITIALISED");
+ add_errcode("WSANOTINITIALISED", WSANOTINITIALISED, "Error WSANOTINITIALISED");
#endif
#ifdef WSAEUSERS
- inscode(d, ds, de, "WSAEUSERS", WSAEUSERS, "Too many users");
+ add_errcode("WSAEUSERS", WSAEUSERS, "Too many users");
#endif
#ifdef WSAMAKEASYNCREPL
- inscode(d, ds, de, "WSAMAKEASYNCREPL", WSAMAKEASYNCREPL, "Error WSAMAKEASYNCREPL");
+ add_errcode("WSAMAKEASYNCREPL", WSAMAKEASYNCREPL, "Error WSAMAKEASYNCREPL");
#endif
#ifdef WSAENOPROTOOPT
- inscode(d, ds, de, "WSAENOPROTOOPT", WSAENOPROTOOPT, "Protocol not available");
+ add_errcode("WSAENOPROTOOPT", WSAENOPROTOOPT, "Protocol not available");
#endif
#ifdef WSAECONNABORTED
- inscode(d, ds, de, "WSAECONNABORTED", WSAECONNABORTED, "Software caused connection abort");
+ add_errcode("WSAECONNABORTED", WSAECONNABORTED, "Software caused connection abort");
#endif
#ifdef WSAENAMETOOLONG
- inscode(d, ds, de, "WSAENAMETOOLONG", WSAENAMETOOLONG, "File name too long");
+ add_errcode("WSAENAMETOOLONG", WSAENAMETOOLONG, "File name too long");
#endif
#ifdef WSAENOTEMPTY
- inscode(d, ds, de, "WSAENOTEMPTY", WSAENOTEMPTY, "Directory not empty");
+ add_errcode("WSAENOTEMPTY", WSAENOTEMPTY, "Directory not empty");
#endif
#ifdef WSAESHUTDOWN
- inscode(d, ds, de, "WSAESHUTDOWN", WSAESHUTDOWN, "Cannot send after transport endpoint shutdown");
+ add_errcode("WSAESHUTDOWN", WSAESHUTDOWN, "Cannot send after transport endpoint shutdown");
#endif
#ifdef WSAEAFNOSUPPORT
- inscode(d, ds, de, "WSAEAFNOSUPPORT", WSAEAFNOSUPPORT, "Address family not supported by protocol");
+ add_errcode("WSAEAFNOSUPPORT", WSAEAFNOSUPPORT, "Address family not supported by protocol");
#endif
#ifdef WSAETOOMANYREFS
- inscode(d, ds, de, "WSAETOOMANYREFS", WSAETOOMANYREFS, "Too many references: cannot splice");
+ add_errcode("WSAETOOMANYREFS", WSAETOOMANYREFS, "Too many references: cannot splice");
#endif
#ifdef WSAEACCES
- inscode(d, ds, de, "WSAEACCES", WSAEACCES, "Permission denied");
+ add_errcode("WSAEACCES", WSAEACCES, "Permission denied");
#endif
#ifdef WSATR
- inscode(d, ds, de, "WSATR", WSATR, "Error WSATR");
+ add_errcode("WSATR", WSATR, "Error WSATR");
#endif
#ifdef WSABASEERR
- inscode(d, ds, de, "WSABASEERR", WSABASEERR, "Error WSABASEERR");
+ add_errcode("WSABASEERR", WSABASEERR, "Error WSABASEERR");
#endif
#ifdef WSADESCRIPTIO
- inscode(d, ds, de, "WSADESCRIPTIO", WSADESCRIPTIO, "Error WSADESCRIPTIO");
+ add_errcode("WSADESCRIPTIO", WSADESCRIPTIO, "Error WSADESCRIPTIO");
#endif
#ifdef WSAEMSGSIZE
- inscode(d, ds, de, "WSAEMSGSIZE", WSAEMSGSIZE, "Message too long");
+ add_errcode("WSAEMSGSIZE", WSAEMSGSIZE, "Message too long");
#endif
#ifdef WSAEBADF
- inscode(d, ds, de, "WSAEBADF", WSAEBADF, "Bad file number");
+ add_errcode("WSAEBADF", WSAEBADF, "Bad file number");
#endif
#ifdef WSAECONNRESET
- inscode(d, ds, de, "WSAECONNRESET", WSAECONNRESET, "Connection reset by peer");
+ add_errcode("WSAECONNRESET", WSAECONNRESET, "Connection reset by peer");
#endif
#ifdef WSAGETSELECTERRO
- inscode(d, ds, de, "WSAGETSELECTERRO", WSAGETSELECTERRO, "Error WSAGETSELECTERRO");
+ add_errcode("WSAGETSELECTERRO", WSAGETSELECTERRO, "Error WSAGETSELECTERRO");
#endif
#ifdef WSAETIMEDOUT
- inscode(d, ds, de, "WSAETIMEDOUT", WSAETIMEDOUT, "Connection timed out");
+ add_errcode("WSAETIMEDOUT", WSAETIMEDOUT, "Connection timed out");
#endif
#ifdef WSAENOBUFS
- inscode(d, ds, de, "WSAENOBUFS", WSAENOBUFS, "No buffer space available");
+ add_errcode("WSAENOBUFS", WSAENOBUFS, "No buffer space available");
#endif
#ifdef WSAEDISCON
- inscode(d, ds, de, "WSAEDISCON", WSAEDISCON, "Error WSAEDISCON");
+ add_errcode("WSAEDISCON", WSAEDISCON, "Error WSAEDISCON");
#endif
#ifdef WSAEINTR
- inscode(d, ds, de, "WSAEINTR", WSAEINTR, "Interrupted system call");
+ add_errcode("WSAEINTR", WSAEINTR, "Interrupted system call");
#endif
#ifdef WSAEPROTOTYPE
- inscode(d, ds, de, "WSAEPROTOTYPE", WSAEPROTOTYPE, "Protocol wrong type for socket");
+ add_errcode("WSAEPROTOTYPE", WSAEPROTOTYPE, "Protocol wrong type for socket");
#endif
#ifdef WSAHOS
- inscode(d, ds, de, "WSAHOS", WSAHOS, "Error WSAHOS");
+ add_errcode("WSAHOS", WSAHOS, "Error WSAHOS");
#endif
#ifdef WSAEADDRINUSE
- inscode(d, ds, de, "WSAEADDRINUSE", WSAEADDRINUSE, "Address already in use");
+ add_errcode("WSAEADDRINUSE", WSAEADDRINUSE, "Address already in use");
#endif
#ifdef WSAEADDRNOTAVAIL
- inscode(d, ds, de, "WSAEADDRNOTAVAIL", WSAEADDRNOTAVAIL, "Cannot assign requested address");
+ add_errcode("WSAEADDRNOTAVAIL", WSAEADDRNOTAVAIL, "Cannot assign requested address");
#endif
#ifdef WSAEALREADY
- inscode(d, ds, de, "WSAEALREADY", WSAEALREADY, "Operation already in progress");
+ add_errcode("WSAEALREADY", WSAEALREADY, "Operation already in progress");
#endif
#ifdef WSAEPROTONOSUPPORT
- inscode(d, ds, de, "WSAEPROTONOSUPPORT", WSAEPROTONOSUPPORT, "Protocol not supported");
+ add_errcode("WSAEPROTONOSUPPORT", WSAEPROTONOSUPPORT, "Protocol not supported");
#endif
#ifdef WSASYSNOTREADY
- inscode(d, ds, de, "WSASYSNOTREADY", WSASYSNOTREADY, "Error WSASYSNOTREADY");
+ add_errcode("WSASYSNOTREADY", WSASYSNOTREADY, "Error WSASYSNOTREADY");
#endif
#ifdef WSAEWOULDBLOCK
- inscode(d, ds, de, "WSAEWOULDBLOCK", WSAEWOULDBLOCK, "Operation would block");
+ add_errcode("WSAEWOULDBLOCK", WSAEWOULDBLOCK, "Operation would block");
#endif
#ifdef WSAEPFNOSUPPORT
- inscode(d, ds, de, "WSAEPFNOSUPPORT", WSAEPFNOSUPPORT, "Protocol family not supported");
+ add_errcode("WSAEPFNOSUPPORT", WSAEPFNOSUPPORT, "Protocol family not supported");
#endif
#ifdef WSAEOPNOTSUPP
- inscode(d, ds, de, "WSAEOPNOTSUPP", WSAEOPNOTSUPP, "Operation not supported on transport endpoint");
+ add_errcode("WSAEOPNOTSUPP", WSAEOPNOTSUPP, "Operation not supported on transport endpoint");
#endif
#ifdef WSAEISCONN
- inscode(d, ds, de, "WSAEISCONN", WSAEISCONN, "Transport endpoint is already connected");
+ add_errcode("WSAEISCONN", WSAEISCONN, "Transport endpoint is already connected");
#endif
#ifdef WSAEDQUOT
- inscode(d, ds, de, "WSAEDQUOT", WSAEDQUOT, "Quota exceeded");
+ add_errcode("WSAEDQUOT", WSAEDQUOT, "Quota exceeded");
#endif
#ifdef WSAENOTCONN
- inscode(d, ds, de, "WSAENOTCONN", WSAENOTCONN, "Transport endpoint is not connected");
+ add_errcode("WSAENOTCONN", WSAENOTCONN, "Transport endpoint is not connected");
#endif
#ifdef WSAEREMOTE
- inscode(d, ds, de, "WSAEREMOTE", WSAEREMOTE, "Object is remote");
+ add_errcode("WSAEREMOTE", WSAEREMOTE, "Object is remote");
#endif
#ifdef WSAEINVAL
- inscode(d, ds, de, "WSAEINVAL", WSAEINVAL, "Invalid argument");
+ add_errcode("WSAEINVAL", WSAEINVAL, "Invalid argument");
#endif
#ifdef WSAEINPROGRESS
- inscode(d, ds, de, "WSAEINPROGRESS", WSAEINPROGRESS, "Operation now in progress");
+ add_errcode("WSAEINPROGRESS", WSAEINPROGRESS, "Operation now in progress");
#endif
#ifdef WSAGETSELECTEVEN
- inscode(d, ds, de, "WSAGETSELECTEVEN", WSAGETSELECTEVEN, "Error WSAGETSELECTEVEN");
+ add_errcode("WSAGETSELECTEVEN", WSAGETSELECTEVEN, "Error WSAGETSELECTEVEN");
#endif
#ifdef WSAESOCKTNOSUPPORT
- inscode(d, ds, de, "WSAESOCKTNOSUPPORT", WSAESOCKTNOSUPPORT, "Socket type not supported");
+ add_errcode("WSAESOCKTNOSUPPORT", WSAESOCKTNOSUPPORT, "Socket type not supported");
#endif
#ifdef WSAGETASYNCERRO
- inscode(d, ds, de, "WSAGETASYNCERRO", WSAGETASYNCERRO, "Error WSAGETASYNCERRO");
+ add_errcode("WSAGETASYNCERRO", WSAGETASYNCERRO, "Error WSAGETASYNCERRO");
#endif
#ifdef WSAMAKESELECTREPL
- inscode(d, ds, de, "WSAMAKESELECTREPL", WSAMAKESELECTREPL, "Error WSAMAKESELECTREPL");
+ add_errcode("WSAMAKESELECTREPL", WSAMAKESELECTREPL, "Error WSAMAKESELECTREPL");
#endif
#ifdef WSAGETASYNCBUFLE
- inscode(d, ds, de, "WSAGETASYNCBUFLE", WSAGETASYNCBUFLE, "Error WSAGETASYNCBUFLE");
+ add_errcode("WSAGETASYNCBUFLE", WSAGETASYNCBUFLE, "Error WSAGETASYNCBUFLE");
#endif
#ifdef WSAEDESTADDRREQ
- inscode(d, ds, de, "WSAEDESTADDRREQ", WSAEDESTADDRREQ, "Destination address required");
+ add_errcode("WSAEDESTADDRREQ", WSAEDESTADDRREQ, "Destination address required");
#endif
#ifdef WSAECONNREFUSED
- inscode(d, ds, de, "WSAECONNREFUSED", WSAECONNREFUSED, "Connection refused");
+ add_errcode("WSAECONNREFUSED", WSAECONNREFUSED, "Connection refused");
#endif
#ifdef WSAENETRESET
- inscode(d, ds, de, "WSAENETRESET", WSAENETRESET, "Network dropped connection because of reset");
+ add_errcode("WSAENETRESET", WSAENETRESET, "Network dropped connection because of reset");
#endif
#ifdef WSAN
- inscode(d, ds, de, "WSAN", WSAN, "Error WSAN");
+ add_errcode("WSAN", WSAN, "Error WSAN");
#endif
#ifdef ENOMEDIUM
- inscode(d, ds, de, "ENOMEDIUM", ENOMEDIUM, "No medium found");
+ add_errcode("ENOMEDIUM", ENOMEDIUM, "No medium found");
#endif
#ifdef EMEDIUMTYPE
- inscode(d, ds, de, "EMEDIUMTYPE", EMEDIUMTYPE, "Wrong medium type");
+ add_errcode("EMEDIUMTYPE", EMEDIUMTYPE, "Wrong medium type");
#endif
#ifdef ECANCELED
- inscode(d, ds, de, "ECANCELED", ECANCELED, "Operation Canceled");
+ add_errcode("ECANCELED", ECANCELED, "Operation Canceled");
#endif
#ifdef ENOKEY
- inscode(d, ds, de, "ENOKEY", ENOKEY, "Required key not available");
+ add_errcode("ENOKEY", ENOKEY, "Required key not available");
#endif
#ifdef EKEYEXPIRED
- inscode(d, ds, de, "EKEYEXPIRED", EKEYEXPIRED, "Key has expired");
+ add_errcode("EKEYEXPIRED", EKEYEXPIRED, "Key has expired");
#endif
#ifdef EKEYREVOKED
- inscode(d, ds, de, "EKEYREVOKED", EKEYREVOKED, "Key has been revoked");
+ add_errcode("EKEYREVOKED", EKEYREVOKED, "Key has been revoked");
#endif
#ifdef EKEYREJECTED
- inscode(d, ds, de, "EKEYREJECTED", EKEYREJECTED, "Key was rejected by service");
+ add_errcode("EKEYREJECTED", EKEYREJECTED, "Key was rejected by service");
#endif
#ifdef EOWNERDEAD
- inscode(d, ds, de, "EOWNERDEAD", EOWNERDEAD, "Owner died");
+ add_errcode("EOWNERDEAD", EOWNERDEAD, "Owner died");
#endif
#ifdef ENOTRECOVERABLE
- inscode(d, ds, de, "ENOTRECOVERABLE", ENOTRECOVERABLE, "State not recoverable");
+ add_errcode("ENOTRECOVERABLE", ENOTRECOVERABLE, "State not recoverable");
#endif
#ifdef ERFKILL
- inscode(d, ds, de, "ERFKILL", ERFKILL, "Operation not possible due to RF-kill");
+ add_errcode("ERFKILL", ERFKILL, "Operation not possible due to RF-kill");
#endif
/* Solaris-specific errnos */
#ifdef ECANCELED
- inscode(d, ds, de, "ECANCELED", ECANCELED, "Operation canceled");
+ add_errcode("ECANCELED", ECANCELED, "Operation canceled");
#endif
#ifdef ENOTSUP
- inscode(d, ds, de, "ENOTSUP", ENOTSUP, "Operation not supported");
+ add_errcode("ENOTSUP", ENOTSUP, "Operation not supported");
#endif
#ifdef EOWNERDEAD
- inscode(d, ds, de, "EOWNERDEAD", EOWNERDEAD, "Process died with the lock");
+ add_errcode("EOWNERDEAD", EOWNERDEAD, "Process died with the lock");
#endif
#ifdef ENOTRECOVERABLE
- inscode(d, ds, de, "ENOTRECOVERABLE", ENOTRECOVERABLE, "Lock is not recoverable");
+ add_errcode("ENOTRECOVERABLE", ENOTRECOVERABLE, "Lock is not recoverable");
#endif
#ifdef ELOCKUNMAPPED
- inscode(d, ds, de, "ELOCKUNMAPPED", ELOCKUNMAPPED, "Locked lock was unmapped");
+ add_errcode("ELOCKUNMAPPED", ELOCKUNMAPPED, "Locked lock was unmapped");
#endif
#ifdef ENOTACTIVE
- inscode(d, ds, de, "ENOTACTIVE", ENOTACTIVE, "Facility is not active");
+ add_errcode("ENOTACTIVE", ENOTACTIVE, "Facility is not active");
#endif
/* MacOSX specific errnos */
#ifdef EAUTH
- inscode(d, ds, de, "EAUTH", EAUTH, "Authentication error");
+ add_errcode("EAUTH", EAUTH, "Authentication error");
#endif
#ifdef EBADARCH
- inscode(d, ds, de, "EBADARCH", EBADARCH, "Bad CPU type in executable");
+ add_errcode("EBADARCH", EBADARCH, "Bad CPU type in executable");
#endif
#ifdef EBADEXEC
- inscode(d, ds, de, "EBADEXEC", EBADEXEC, "Bad executable (or shared library)");
+ add_errcode("EBADEXEC", EBADEXEC, "Bad executable (or shared library)");
#endif
#ifdef EBADMACHO
- inscode(d, ds, de, "EBADMACHO", EBADMACHO, "Malformed Mach-o file");
+ add_errcode("EBADMACHO", EBADMACHO, "Malformed Mach-o file");
#endif
#ifdef EBADRPC
- inscode(d, ds, de, "EBADRPC", EBADRPC, "RPC struct is bad");
+ add_errcode("EBADRPC", EBADRPC, "RPC struct is bad");
#endif
#ifdef EDEVERR
- inscode(d, ds, de, "EDEVERR", EDEVERR, "Device error");
+ add_errcode("EDEVERR", EDEVERR, "Device error");
#endif
#ifdef EFTYPE
- inscode(d, ds, de, "EFTYPE", EFTYPE, "Inappropriate file type or format");
+ add_errcode("EFTYPE", EFTYPE, "Inappropriate file type or format");
#endif
#ifdef ENEEDAUTH
- inscode(d, ds, de, "ENEEDAUTH", ENEEDAUTH, "Need authenticator");
+ add_errcode("ENEEDAUTH", ENEEDAUTH, "Need authenticator");
#endif
#ifdef ENOATTR
- inscode(d, ds, de, "ENOATTR", ENOATTR, "Attribute not found");
+ add_errcode("ENOATTR", ENOATTR, "Attribute not found");
#endif
#ifdef ENOPOLICY
- inscode(d, ds, de, "ENOPOLICY", ENOPOLICY, "Policy not found");
+ add_errcode("ENOPOLICY", ENOPOLICY, "Policy not found");
#endif
#ifdef EPROCLIM
- inscode(d, ds, de, "EPROCLIM", EPROCLIM, "Too many processes");
+ add_errcode("EPROCLIM", EPROCLIM, "Too many processes");
#endif
#ifdef EPROCUNAVAIL
- inscode(d, ds, de, "EPROCUNAVAIL", EPROCUNAVAIL, "Bad procedure for program");
+ add_errcode("EPROCUNAVAIL", EPROCUNAVAIL, "Bad procedure for program");
#endif
#ifdef EPROGMISMATCH
- inscode(d, ds, de, "EPROGMISMATCH", EPROGMISMATCH, "Program version wrong");
+ add_errcode("EPROGMISMATCH", EPROGMISMATCH, "Program version wrong");
#endif
#ifdef EPROGUNAVAIL
- inscode(d, ds, de, "EPROGUNAVAIL", EPROGUNAVAIL, "RPC prog. not avail");
+ add_errcode("EPROGUNAVAIL", EPROGUNAVAIL, "RPC prog. not avail");
#endif
#ifdef EPWROFF
- inscode(d, ds, de, "EPWROFF", EPWROFF, "Device power is off");
+ add_errcode("EPWROFF", EPWROFF, "Device power is off");
#endif
#ifdef ERPCMISMATCH
- inscode(d, ds, de, "ERPCMISMATCH", ERPCMISMATCH, "RPC version wrong");
+ add_errcode("ERPCMISMATCH", ERPCMISMATCH, "RPC version wrong");
#endif
#ifdef ESHLIBVERS
- inscode(d, ds, de, "ESHLIBVERS", ESHLIBVERS, "Shared library version mismatch");
+ add_errcode("ESHLIBVERS", ESHLIBVERS, "Shared library version mismatch");
#endif
- Py_DECREF(de);
- return m;
+ Py_DECREF(error_dict);
+ return 0;
+}
+
+static PyModuleDef_Slot errno_slots[] = {
+ {Py_mod_exec, errno_exec},
+ {0, NULL}
+};
+
+PyDoc_STRVAR(errno__doc__,
+"This module makes available standard errno system symbols.\n\
+\n\
+The value of each symbol is the corresponding integer value,\n\
+e.g., on most systems, errno.ENOENT equals the integer 2.\n\
+\n\
+The dictionary errno.errorcode maps numeric codes to symbol names,\n\
+e.g., errno.errorcode[2] could be the string 'ENOENT'.\n\
+\n\
+Symbols that are not relevant to the underlying system are not defined.\n\
+\n\
+To map error codes to error messages, use the function os.strerror(),\n\
+e.g. os.strerror(2) could return 'No such file or directory'.");
+
+static struct PyModuleDef errnomodule = {
+ PyModuleDef_HEAD_INIT,
+ .m_name = "errno",
+ .m_doc = errno__doc__,
+ .m_size = 0,
+ .m_methods = errno_methods,
+ .m_slots = errno_slots,
+};
+
+PyMODINIT_FUNC
+PyInit_errno(void)
+{
+ return PyModuleDef_Init(&errnomodule);
}
diff --git a/Modules/faulthandler.c b/Modules/faulthandler.c
index 230cde49..e7a28503 100644
--- a/Modules/faulthandler.c
+++ b/Modules/faulthandler.c
@@ -1,7 +1,6 @@
#include "Python.h"
#include "pycore_initconfig.h"
#include "pycore_traceback.h"
-#include "pythread.h"
#include
#include
#include
@@ -19,8 +18,6 @@
/* Allocate at maximum 100 MiB of the stack to raise the stack overflow */
#define STACK_OVERFLOW_MAX_SIZE (100 * 1024 * 1024)
-#define FAULTHANDLER_LATER
-
#ifndef MS_WINDOWS
/* register() is useless on Windows, because only SIGSEGV, SIGABRT and
SIGILL can be handled by the process, and these signals can only be used
@@ -60,7 +57,6 @@ static struct {
#endif
} fatal_error = {0, NULL, -1, 0};
-#ifdef FAULTHANDLER_LATER
static struct {
PyObject *file;
int fd;
@@ -77,7 +73,6 @@ static struct {
/* released by child thread when joined */
PyThread_type_lock running;
} thread;
-#endif
#ifdef FAULTHANDLER_USER
typedef struct {
@@ -125,7 +120,13 @@ static fault_handler_t faulthandler_handlers[] = {
static const size_t faulthandler_nsignals = \
Py_ARRAY_LENGTH(faulthandler_handlers);
-#ifdef HAVE_SIGALTSTACK
+/* Using an alternative stack requires sigaltstack()
+ and sigaction() SA_ONSTACK */
+#if defined(HAVE_SIGALTSTACK) && defined(HAVE_SIGACTION)
+# define FAULTHANDLER_USE_ALT_STACK
+#endif
+
+#ifdef FAULTHANDLER_USE_ALT_STACK
static stack_t stack;
static stack_t old_stack;
#endif
@@ -172,7 +173,7 @@ faulthandler_get_fileno(PyObject **file_ptr)
return fd;
}
- result = _PyObject_CallMethodId(file, &PyId_fileno, NULL);
+ result = _PyObject_CallMethodIdNoArgs(file, &PyId_fileno);
if (result == NULL)
return -1;
@@ -190,7 +191,7 @@ faulthandler_get_fileno(PyObject **file_ptr)
return -1;
}
- result = _PyObject_CallMethodId(file, &PyId_flush, NULL);
+ result = _PyObject_CallMethodIdNoArgs(file, &PyId_flush);
if (result != NULL)
Py_DECREF(result);
else {
@@ -427,6 +428,36 @@ faulthandler_exc_handler(struct _EXCEPTION_POINTERS *exc_info)
}
#endif
+
+#ifdef FAULTHANDLER_USE_ALT_STACK
+static int
+faulthandler_allocate_stack(void)
+{
+ if (stack.ss_sp != NULL) {
+ return 0;
+ }
+ /* Allocate an alternate stack for faulthandler() signal handler
+ to be able to execute a signal handler on a stack overflow error */
+ stack.ss_sp = PyMem_Malloc(stack.ss_size);
+ if (stack.ss_sp == NULL) {
+ PyErr_NoMemory();
+ return -1;
+ }
+
+ int err = sigaltstack(&stack, &old_stack);
+ if (err) {
+ /* Release the stack to retry sigaltstack() next time */
+ PyMem_Free(stack.ss_sp);
+ stack.ss_sp = NULL;
+
+ PyErr_SetFromErrno(PyExc_OSError);
+ return -1;
+ }
+ return 0;
+}
+#endif
+
+
/* Install the handler for fatal signals, faulthandler_fatal_error(). */
static int
@@ -437,32 +468,35 @@ faulthandler_enable(void)
}
fatal_error.enabled = 1;
+#ifdef FAULTHANDLER_USE_ALT_STACK
+ if (faulthandler_allocate_stack() < 0) {
+ return -1;
+ }
+#endif
+
for (size_t i=0; i < faulthandler_nsignals; i++) {
fault_handler_t *handler;
-#ifdef HAVE_SIGACTION
- struct sigaction action;
-#endif
int err;
handler = &faulthandler_handlers[i];
assert(!handler->enabled);
#ifdef HAVE_SIGACTION
+ struct sigaction action;
action.sa_handler = faulthandler_fatal_error;
sigemptyset(&action.sa_mask);
/* Do not prevent the signal from being received from within
its own signal handler */
action.sa_flags = SA_NODEFER;
-#ifdef HAVE_SIGALTSTACK
- if (stack.ss_sp != NULL) {
- /* Call the signal handler on an alternate signal stack
- provided by sigaltstack() */
- action.sa_flags |= SA_ONSTACK;
- }
+#ifdef FAULTHANDLER_USE_ALT_STACK
+ assert(stack.ss_sp != NULL);
+ /* Call the signal handler on an alternate signal stack
+ provided by sigaltstack() */
+ action.sa_flags |= SA_ONSTACK;
#endif
err = sigaction(handler->signum, &action, &handler->previous);
#else
handler->previous = signal(handler->signum,
- faulthandler_fatal_error);
+ faulthandler_fatal_error);
err = (handler->previous == SIG_ERR);
#endif
if (err) {
@@ -505,7 +539,7 @@ faulthandler_py_enable(PyObject *self, PyObject *args, PyObject *kwargs)
Py_XSETREF(fatal_error.file, file);
fatal_error.fd = fd;
fatal_error.all_threads = all_threads;
- fatal_error.interp = tstate->interp;
+ fatal_error.interp = PyThreadState_GetInterpreter(tstate);
if (faulthandler_enable() < 0) {
return NULL;
@@ -550,8 +584,6 @@ faulthandler_is_enabled(PyObject *self, PyObject *Py_UNUSED(ignored))
return PyBool_FromLong(fatal_error.enabled);
}
-#ifdef FAULTHANDLER_LATER
-
static void
faulthandler_thread(void *unused)
{
@@ -592,6 +624,11 @@ faulthandler_thread(void *unused)
static void
cancel_dump_traceback_later(void)
{
+ /* If not scheduled, nothing to cancel */
+ if (!thread.cancel_event) {
+ return;
+ }
+
/* Notify cancellation */
PyThread_release_lock(thread.cancel_event);
@@ -676,17 +713,37 @@ faulthandler_dump_traceback_later(PyObject *self,
}
tstate = get_thread_state();
- if (tstate == NULL)
+ if (tstate == NULL) {
return NULL;
+ }
fd = faulthandler_get_fileno(&file);
- if (fd < 0)
+ if (fd < 0) {
return NULL;
+ }
+
+ if (!thread.running) {
+ thread.running = PyThread_allocate_lock();
+ if (!thread.running) {
+ return PyErr_NoMemory();
+ }
+ }
+ if (!thread.cancel_event) {
+ thread.cancel_event = PyThread_allocate_lock();
+ if (!thread.cancel_event || !thread.running) {
+ return PyErr_NoMemory();
+ }
+
+ /* cancel_event starts to be acquired: it's only released to cancel
+ the thread. */
+ PyThread_acquire_lock(thread.cancel_event, 1);
+ }
/* format the timeout */
header = format_timeout(timeout_us);
- if (header == NULL)
+ if (header == NULL) {
return PyErr_NoMemory();
+ }
header_len = strlen(header);
/* Cancel previous thread, if running */
@@ -698,7 +755,7 @@ faulthandler_dump_traceback_later(PyObject *self,
/* the downcast is safe: we check that 0 < timeout_us < PY_TIMEOUT_MAX */
thread.timeout_us = (PY_TIMEOUT_T)timeout_us;
thread.repeat = repeat;
- thread.interp = tstate->interp;
+ thread.interp = PyThreadState_GetInterpreter(tstate);
thread.exit = exit;
thread.header = header;
thread.header_len = header_len;
@@ -726,11 +783,11 @@ faulthandler_cancel_dump_traceback_later_py(PyObject *self,
cancel_dump_traceback_later();
Py_RETURN_NONE;
}
-#endif /* FAULTHANDLER_LATER */
+
#ifdef FAULTHANDLER_USER
static int
-faulthandler_register(int signum, int chain, _Py_sighandler_t *p_previous)
+faulthandler_register(int signum, int chain, _Py_sighandler_t *previous_p)
{
#ifdef HAVE_SIGACTION
struct sigaction action;
@@ -745,19 +802,19 @@ faulthandler_register(int signum, int chain, _Py_sighandler_t *p_previous)
own signal handler */
action.sa_flags = SA_NODEFER;
}
-#ifdef HAVE_SIGALTSTACK
- if (stack.ss_sp != NULL) {
- /* Call the signal handler on an alternate signal stack
- provided by sigaltstack() */
- action.sa_flags |= SA_ONSTACK;
- }
+#ifdef FAULTHANDLER_USE_ALT_STACK
+ assert(stack.ss_sp != NULL);
+ /* Call the signal handler on an alternate signal stack
+ provided by sigaltstack() */
+ action.sa_flags |= SA_ONSTACK;
#endif
- return sigaction(signum, &action, p_previous);
+ return sigaction(signum, &action, previous_p);
#else
_Py_sighandler_t previous;
previous = signal(signum, faulthandler_user);
- if (p_previous != NULL)
- *p_previous = previous;
+ if (previous_p != NULL) {
+ *previous_p = previous;
+ }
return (previous == SIG_ERR);
#endif
}
@@ -853,14 +910,19 @@ faulthandler_register_py(PyObject *self,
return NULL;
if (user_signals == NULL) {
- user_signals = PyMem_Malloc(NSIG * sizeof(user_signal_t));
+ user_signals = PyMem_Calloc(NSIG, sizeof(user_signal_t));
if (user_signals == NULL)
return PyErr_NoMemory();
- memset(user_signals, 0, NSIG * sizeof(user_signal_t));
}
user = &user_signals[signum];
if (!user->enabled) {
+#ifdef FAULTHANDLER_USE_ALT_STACK
+ if (faulthandler_allocate_stack() < 0) {
+ return NULL;
+ }
+#endif
+
err = faulthandler_register(signum, chain, &previous);
if (err) {
PyErr_SetFromErrno(PyExc_OSError);
@@ -875,7 +937,7 @@ faulthandler_register_py(PyObject *self,
user->fd = fd;
user->all_threads = all_threads;
user->chain = chain;
- user->interp = tstate->interp;
+ user->interp = PyThreadState_GetInterpreter(tstate);
user->enabled = 1;
Py_RETURN_NONE;
@@ -1001,24 +1063,10 @@ faulthandler_sigsegv(PyObject *self, PyObject *args)
Py_RETURN_NONE;
}
-static void
+static void _Py_NO_RETURN
faulthandler_fatal_error_thread(void *plock)
{
-#ifndef __clang__
- PyThread_type_lock *lock = (PyThread_type_lock *)plock;
-#endif
-
Py_FatalError("in new thread");
-
-#ifndef __clang__
- /* Issue #28152: Py_FatalError() is declared with
- __attribute__((__noreturn__)). GCC emits a warning without
- "PyThread_release_lock()" (compiler bug?), but Clang is smarter and
- emits a warning on the return. */
-
- /* notify the caller that we are done */
- PyThread_release_lock(lock);
-#endif
}
static PyObject *
@@ -1094,7 +1142,7 @@ faulthandler_fatal_error_py(PyObject *self, PyObject *args)
Py_RETURN_NONE;
}
-#if defined(HAVE_SIGALTSTACK) && defined(HAVE_SIGACTION)
+#if defined(FAULTHANDLER_USE_ALT_STACK)
#define FAULTHANDLER_STACK_OVERFLOW
static uintptr_t
@@ -1149,15 +1197,13 @@ faulthandler_stack_overflow(PyObject *self, PyObject *Py_UNUSED(ignored))
size, depth);
return NULL;
}
-#endif /* defined(HAVE_SIGALTSTACK) && defined(HAVE_SIGACTION) */
+#endif /* defined(FAULTHANDLER_USE_ALT_STACK) && defined(HAVE_SIGACTION) */
static int
faulthandler_traverse(PyObject *module, visitproc visit, void *arg)
{
-#ifdef FAULTHANDLER_LATER
Py_VISIT(thread.file);
-#endif
#ifdef FAULTHANDLER_USER
if (user_signals != NULL) {
for (size_t signum=0; signum < NSIG; signum++)
@@ -1198,7 +1244,6 @@ static PyMethodDef module_methods[] = {
PyDoc_STR("dump_traceback(file=sys.stderr, all_threads=True): "
"dump the traceback of the current thread, or of all threads "
"if all_threads is True, into file")},
-#ifdef FAULTHANDLER_LATER
{"dump_traceback_later",
(PyCFunction)(void(*)(void))faulthandler_dump_traceback_later, METH_VARARGS|METH_KEYWORDS,
PyDoc_STR("dump_traceback_later(timeout, repeat=False, file=sys.stderrn, exit=False):\n"
@@ -1209,8 +1254,6 @@ static PyMethodDef module_methods[] = {
faulthandler_cancel_dump_traceback_later_py, METH_NOARGS,
PyDoc_STR("cancel_dump_traceback_later():\ncancel the previous call "
"to dump_traceback_later().")},
-#endif
-
#ifdef FAULTHANDLER_USER
{"register",
(PyCFunction)(void(*)(void))faulthandler_register_py, METH_VARARGS|METH_KEYWORDS,
@@ -1223,7 +1266,6 @@ static PyMethodDef module_methods[] = {
PyDoc_STR("unregister(signum): unregister the handler of the signal "
"'signum' registered by register()")},
#endif
-
{"_read_null", faulthandler_read_null, METH_NOARGS,
PyDoc_STR("_read_null(): read from NULL, raise "
"a SIGSEGV or SIGBUS signal depending on the platform")},
@@ -1312,7 +1354,7 @@ faulthandler_init_enable(void)
return -1;
}
- PyObject *res = _PyObject_CallMethodId(module, &PyId_enable, NULL);
+ PyObject *res = _PyObject_CallMethodIdNoArgs(module, &PyId_enable);
Py_DECREF(module);
if (res == NULL) {
return -1;
@@ -1325,37 +1367,18 @@ faulthandler_init_enable(void)
PyStatus
_PyFaulthandler_Init(int enable)
{
-#ifdef HAVE_SIGALTSTACK
- int err;
-
- /* Try to allocate an alternate stack for faulthandler() signal handler to
- * be able to allocate memory on the stack, even on a stack overflow. If it
- * fails, ignore the error. */
+#ifdef FAULTHANDLER_USE_ALT_STACK
+ memset(&stack, 0, sizeof(stack));
stack.ss_flags = 0;
/* bpo-21131: allocate dedicated stack of SIGSTKSZ*2 bytes, instead of just
SIGSTKSZ bytes. Calling the previous signal handler in faulthandler
signal handler uses more than SIGSTKSZ bytes of stack memory on some
platforms. */
stack.ss_size = SIGSTKSZ * 2;
- stack.ss_sp = PyMem_Malloc(stack.ss_size);
- if (stack.ss_sp != NULL) {
- err = sigaltstack(&stack, &old_stack);
- if (err) {
- PyMem_Free(stack.ss_sp);
- stack.ss_sp = NULL;
- }
- }
-#endif
-#ifdef FAULTHANDLER_LATER
- thread.file = NULL;
- thread.cancel_event = PyThread_allocate_lock();
- thread.running = PyThread_allocate_lock();
- if (!thread.cancel_event || !thread.running) {
- return _PyStatus_ERR("failed to allocate locks for faulthandler");
- }
- PyThread_acquire_lock(thread.cancel_event, 1);
#endif
+ memset(&thread, 0, sizeof(thread));
+
if (enable) {
if (faulthandler_init_enable() < 0) {
return _PyStatus_ERR("failed to enable faulthandler");
@@ -1366,7 +1389,6 @@ _PyFaulthandler_Init(int enable)
void _PyFaulthandler_Fini(void)
{
-#ifdef FAULTHANDLER_LATER
/* later */
if (thread.cancel_event) {
cancel_dump_traceback_later();
@@ -1378,7 +1400,6 @@ void _PyFaulthandler_Fini(void)
PyThread_free_lock(thread.running);
thread.running = NULL;
}
-#endif
#ifdef FAULTHANDLER_USER
/* user */
@@ -1393,7 +1414,8 @@ void _PyFaulthandler_Fini(void)
/* fatal */
faulthandler_disable();
-#ifdef HAVE_SIGALTSTACK
+
+#ifdef FAULTHANDLER_USE_ALT_STACK
if (stack.ss_sp != NULL) {
/* Fetch the current alt stack */
stack_t current_stack;
diff --git a/Modules/fcntlmodule.c b/Modules/fcntlmodule.c
index a7d21930..43f9b22f 100644
--- a/Modules/fcntlmodule.c
+++ b/Modules/fcntlmodule.c
@@ -513,12 +513,24 @@ all_ins(PyObject* m)
#ifdef F_SETLKW
if (PyModule_AddIntMacro(m, F_SETLKW)) return -1;
#endif
+#ifdef F_OFD_GETLK
+ if (PyModule_AddIntMacro(m, F_OFD_GETLK)) return -1;
+#endif
+#ifdef F_OFD_SETLK
+ if (PyModule_AddIntMacro(m, F_OFD_SETLK)) return -1;
+#endif
+#ifdef F_OFD_SETLKW
+ if (PyModule_AddIntMacro(m, F_OFD_SETLKW)) return -1;
+#endif
#ifdef F_GETOWN
if (PyModule_AddIntMacro(m, F_GETOWN)) return -1;
#endif
#ifdef F_SETOWN
if (PyModule_AddIntMacro(m, F_SETOWN)) return -1;
#endif
+#ifdef F_GETPATH
+ if (PyModule_AddIntMacro(m, F_GETPATH)) return -1;
+#endif
#ifdef F_GETSIG
if (PyModule_AddIntMacro(m, F_GETSIG)) return -1;
#endif
@@ -674,8 +686,10 @@ PyInit_fcntl(void)
return NULL;
/* Add some symbolic constants to the module */
- if (all_ins(m) < 0)
+ if (all_ins(m) < 0) {
+ Py_DECREF(m);
return NULL;
+ }
return m;
}
diff --git a/Modules/gcmodule.c b/Modules/gcmodule.c
index 5a6a81d8..56dcb101 100644
--- a/Modules/gcmodule.c
+++ b/Modules/gcmodule.c
@@ -25,19 +25,25 @@
#include "Python.h"
#include "pycore_context.h"
+#include "pycore_initconfig.h"
+#include "pycore_interp.h" // PyInterpreterState.gc
#include "pycore_object.h"
-#include "pycore_pymem.h"
-#include "pycore_pystate.h"
-#include "frameobject.h" /* for PyFrame_ClearFreeList */
+#include "pycore_pyerrors.h"
+#include "pycore_pystate.h" // _PyThreadState_GET()
#include "pydtrace.h"
-#include "pytime.h" /* for _PyTime_GetMonotonicClock() */
+#include "pytime.h" // _PyTime_GetMonotonicClock()
+
+typedef struct _gc_runtime_state GCState;
/*[clinic input]
module gc
[clinic start generated code]*/
/*[clinic end generated code: output=da39a3ee5e6b4b0d input=b5c9690ecc842d79]*/
-#define GC_DEBUG (0) /* Enable more asserts */
+
+#ifdef Py_DEBUG
+# define GC_DEBUG
+#endif
#define GC_NEXT _PyGCHead_NEXT
#define GC_PREV _PyGCHead_PREV
@@ -111,9 +117,6 @@ gc_decref(PyGC_Head *g)
g->_gc_prev -= 1 << _PyGC_PREV_SHIFT;
}
-/* Python string to use if unhandled exception occurs */
-static PyObject *gc_str = NULL;
-
/* set for debugging information */
#define DEBUG_STATS (1<<0) /* print collection statistics */
#define DEBUG_COLLECTABLE (1<<1) /* print collectable objects */
@@ -123,14 +126,14 @@ static PyObject *gc_str = NULL;
DEBUG_UNCOLLECTABLE | \
DEBUG_SAVEALL
-#define GEN_HEAD(state, n) (&(state)->generations[n].head)
+#define GEN_HEAD(gcstate, n) (&(gcstate)->generations[n].head)
void
-_PyGC_Initialize(struct _gc_runtime_state *state)
+_PyGC_InitState(GCState *gcstate)
{
- state->enabled = 1; /* automatic collection enabled? */
+ gcstate->enabled = 1; /* automatic collection enabled? */
-#define _GEN_HEAD(n) GEN_HEAD(state, n)
+#define _GEN_HEAD(n) GEN_HEAD(gcstate, n)
struct gc_generation generations[NUM_GENERATIONS] = {
/* PyGC_Head, threshold, count */
{{(uintptr_t)_GEN_HEAD(0), (uintptr_t)_GEN_HEAD(0)}, 700, 0},
@@ -138,16 +141,31 @@ _PyGC_Initialize(struct _gc_runtime_state *state)
{{(uintptr_t)_GEN_HEAD(2), (uintptr_t)_GEN_HEAD(2)}, 10, 0},
};
for (int i = 0; i < NUM_GENERATIONS; i++) {
- state->generations[i] = generations[i];
+ gcstate->generations[i] = generations[i];
};
- state->generation0 = GEN_HEAD(state, 0);
+ gcstate->generation0 = GEN_HEAD(gcstate, 0);
struct gc_generation permanent_generation = {
- {(uintptr_t)&state->permanent_generation.head,
- (uintptr_t)&state->permanent_generation.head}, 0, 0
+ {(uintptr_t)&gcstate->permanent_generation.head,
+ (uintptr_t)&gcstate->permanent_generation.head}, 0, 0
};
- state->permanent_generation = permanent_generation;
+ gcstate->permanent_generation = permanent_generation;
}
+
+PyStatus
+_PyGC_Init(PyThreadState *tstate)
+{
+ GCState *gcstate = &tstate->interp->gc;
+ if (gcstate->garbage == NULL) {
+ gcstate->garbage = PyList_New(0);
+ if (gcstate->garbage == NULL) {
+ return _PyStatus_NO_MEMORY();
+ }
+ }
+ return _PyStatus_OK();
+}
+
+
/*
_gc_prev values
---------------
@@ -298,8 +316,18 @@ gc_list_size(PyGC_Head *list)
return n;
}
+/* Walk the list and mark all objects as non-collecting */
+static inline void
+gc_list_clear_collecting(PyGC_Head *collectable)
+{
+ PyGC_Head *gc;
+ for (gc = GC_NEXT(collectable); gc != collectable; gc = GC_NEXT(gc)) {
+ gc_clear_collecting(gc);
+ }
+}
+
/* Append objects in a GC list to a Python list.
- * Return 0 if all OK, < 0 if error (out of memory for list).
+ * Return 0 if all OK, < 0 if error (out of memory for list)
*/
static int
append_objects(PyObject *py_list, PyGC_Head *gc_list)
@@ -316,25 +344,60 @@ append_objects(PyObject *py_list, PyGC_Head *gc_list)
return 0;
}
-#if GC_DEBUG
+// Constants for validate_list's flags argument.
+enum flagstates {collecting_clear_unreachable_clear,
+ collecting_clear_unreachable_set,
+ collecting_set_unreachable_clear,
+ collecting_set_unreachable_set};
+
+#ifdef GC_DEBUG
// validate_list checks list consistency. And it works as document
-// describing when expected_mask is set / unset.
+// describing when flags are expected to be set / unset.
+// `head` must be a doubly-linked gc list, although it's fine (expected!) if
+// the prev and next pointers are "polluted" with flags.
+// What's checked:
+// - The `head` pointers are not polluted.
+// - The objects' PREV_MASK_COLLECTING and NEXT_MASK_UNREACHABLE flags are all
+// `set or clear, as specified by the 'flags' argument.
+// - The prev and next pointers are mutually consistent.
static void
-validate_list(PyGC_Head *head, uintptr_t expected_mask)
+validate_list(PyGC_Head *head, enum flagstates flags)
{
+ assert((head->_gc_prev & PREV_MASK_COLLECTING) == 0);
+ assert((head->_gc_next & NEXT_MASK_UNREACHABLE) == 0);
+ uintptr_t prev_value = 0, next_value = 0;
+ switch (flags) {
+ case collecting_clear_unreachable_clear:
+ break;
+ case collecting_set_unreachable_clear:
+ prev_value = PREV_MASK_COLLECTING;
+ break;
+ case collecting_clear_unreachable_set:
+ next_value = NEXT_MASK_UNREACHABLE;
+ break;
+ case collecting_set_unreachable_set:
+ prev_value = PREV_MASK_COLLECTING;
+ next_value = NEXT_MASK_UNREACHABLE;
+ break;
+ default:
+ assert(! "bad internal flags argument");
+ }
PyGC_Head *prev = head;
PyGC_Head *gc = GC_NEXT(head);
while (gc != head) {
- assert(GC_NEXT(gc) != NULL);
- assert(GC_PREV(gc) == prev);
- assert((gc->_gc_prev & PREV_MASK_COLLECTING) == expected_mask);
+ PyGC_Head *trueprev = GC_PREV(gc);
+ PyGC_Head *truenext = (PyGC_Head *)(gc->_gc_next & ~NEXT_MASK_UNREACHABLE);
+ assert(truenext != NULL);
+ assert(trueprev == prev);
+ assert((gc->_gc_prev & PREV_MASK_COLLECTING) == prev_value);
+ assert((gc->_gc_next & NEXT_MASK_UNREACHABLE) == next_value);
prev = gc;
- gc = GC_NEXT(gc);
+ gc = truenext;
}
assert(prev == GC_PREV(head));
}
#else
-#define validate_list(x,y) do{}while(0)
+#define validate_list(x, y) do{}while(0)
#endif
/*** end of list stuff ***/
@@ -377,7 +440,7 @@ visit_decref(PyObject *op, void *parent)
{
_PyObject_ASSERT(_PyObject_CAST(parent), !_PyObject_IsFreed(op));
- if (PyObject_IS_GC(op)) {
+ if (_PyObject_IS_GC(op)) {
PyGC_Head *gc = AS_GC(op);
/* We're only interested in gc_refs for objects in the
* generation being collected, which can be recognized
@@ -413,17 +476,23 @@ subtract_refs(PyGC_Head *containers)
static int
visit_reachable(PyObject *op, PyGC_Head *reachable)
{
- if (!PyObject_IS_GC(op)) {
+ if (!_PyObject_IS_GC(op)) {
return 0;
}
PyGC_Head *gc = AS_GC(op);
const Py_ssize_t gc_refs = gc_get_refs(gc);
- // Ignore untracked objects and objects in other generation.
- if (gc->_gc_next == 0 || !gc_is_collecting(gc)) {
+ // Ignore objects in other generation.
+ // This also skips objects "to the left" of the current position in
+ // move_unreachable's scan of the 'young' list - they've already been
+ // traversed, and no longer have the PREV_MASK_COLLECTING flag.
+ if (! gc_is_collecting(gc)) {
return 0;
}
+ // It would be a logic error elsewhere if the collecting flag were set on
+ // an untracked object.
+ assert(gc->_gc_next != 0);
if (gc->_gc_next & NEXT_MASK_UNREACHABLE) {
/* This had gc_refs = 0 when move_unreachable got
@@ -432,7 +501,8 @@ visit_reachable(PyObject *op, PyGC_Head *reachable)
* and move_unreachable will eventually get to it
* again.
*/
- // Manually unlink gc from unreachable list because
+ // Manually unlink gc from unreachable list because the list functions
+ // don't work right in the presence of NEXT_MASK_UNREACHABLE flags.
PyGC_Head *prev = GC_PREV(gc);
PyGC_Head *next = (PyGC_Head*)(gc->_gc_next & ~NEXT_MASK_UNREACHABLE);
_PyObject_ASSERT(FROM_GC(prev),
@@ -533,8 +603,9 @@ move_unreachable(PyGC_Head *young, PyGC_Head *unreachable)
PyGC_Head *last = GC_PREV(unreachable);
// NOTE: Since all objects in unreachable set has
// NEXT_MASK_UNREACHABLE flag, we set it unconditionally.
- // But this may set the flat to unreachable too.
- // move_legacy_finalizers() should care about it.
+ // But this may pollute the unreachable list head's 'next' pointer
+ // too. That's semantically senseless but expedient here - the
+ // damage is repaired when this function ends.
last->_gc_next = (NEXT_MASK_UNREACHABLE | (uintptr_t)gc);
_PyGCHead_SET_PREV(gc, last);
gc->_gc_next = (NEXT_MASK_UNREACHABLE | (uintptr_t)unreachable);
@@ -544,6 +615,8 @@ move_unreachable(PyGC_Head *young, PyGC_Head *unreachable)
}
// young->_gc_prev must be last element remained in the list.
young->_gc_prev = (uintptr_t)prev;
+ // don't let the pollution of the list head's next pointer leak
+ unreachable->_gc_next &= ~NEXT_MASK_UNREACHABLE;
}
static void
@@ -579,7 +652,7 @@ untrack_dicts(PyGC_Head *head)
static int
has_legacy_finalizer(PyObject *op)
{
- return op->ob_type->tp_del != NULL;
+ return Py_TYPE(op)->tp_del != NULL;
}
/* Move the objects in unreachable with tp_del slots into `finalizers`.
@@ -591,7 +664,7 @@ static void
move_legacy_finalizers(PyGC_Head *unreachable, PyGC_Head *finalizers)
{
PyGC_Head *gc, *next;
- unreachable->_gc_next &= ~NEXT_MASK_UNREACHABLE;
+ assert((unreachable->_gc_next & NEXT_MASK_UNREACHABLE) == 0);
/* March over unreachable. Move objects with finalizers into
* `finalizers`.
@@ -610,11 +683,27 @@ move_legacy_finalizers(PyGC_Head *unreachable, PyGC_Head *finalizers)
}
}
+static inline void
+clear_unreachable_mask(PyGC_Head *unreachable)
+{
+ /* Check that the list head does not have the unreachable bit set */
+ assert(((uintptr_t)unreachable & NEXT_MASK_UNREACHABLE) == 0);
+
+ PyGC_Head *gc, *next;
+ assert((unreachable->_gc_next & NEXT_MASK_UNREACHABLE) == 0);
+ for (gc = GC_NEXT(unreachable); gc != unreachable; gc = next) {
+ _PyObject_ASSERT((PyObject*)FROM_GC(gc), gc->_gc_next & NEXT_MASK_UNREACHABLE);
+ gc->_gc_next &= ~NEXT_MASK_UNREACHABLE;
+ next = (PyGC_Head*)gc->_gc_next;
+ }
+ validate_list(unreachable, collecting_set_unreachable_clear);
+}
+
/* A traversal callback for move_legacy_finalizer_reachable. */
static int
visit_move(PyObject *op, PyGC_Head *tolist)
{
- if (PyObject_IS_GC(op)) {
+ if (_PyObject_IS_GC(op)) {
PyGC_Head *gc = AS_GC(op);
if (gc_is_collecting(gc)) {
gc_list_move(gc, tolist);
@@ -698,7 +787,7 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old)
/* It supports weakrefs. Does it have any? */
wrlist = (PyWeakReference **)
- PyObject_GET_WEAKREFS_LISTPTR(op);
+ _PyObject_GET_WEAKREFS_LISTPTR(op);
/* `op` may have some weakrefs. March over the list, clear
* all the weakrefs, and move the weakrefs with callbacks
@@ -782,7 +871,7 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old)
_PyObject_ASSERT(op, callback != NULL);
/* copy-paste of weakrefobject.c's handle_callback() */
- temp = PyObject_CallFunctionObjArgs(callback, wr, NULL);
+ temp = PyObject_CallOneArg(callback, (PyObject *)wr);
if (temp == NULL)
PyErr_WriteUnraisable(callback);
else
@@ -827,23 +916,20 @@ debug_cycle(const char *msg, PyObject *op)
* merged into the old list regardless.
*/
static void
-handle_legacy_finalizers(struct _gc_runtime_state *state,
+handle_legacy_finalizers(PyThreadState *tstate,
+ GCState *gcstate,
PyGC_Head *finalizers, PyGC_Head *old)
{
- assert(!PyErr_Occurred());
+ assert(!_PyErr_Occurred(tstate));
+ assert(gcstate->garbage != NULL);
PyGC_Head *gc = GC_NEXT(finalizers);
- if (state->garbage == NULL) {
- state->garbage = PyList_New(0);
- if (state->garbage == NULL)
- Py_FatalError("gc couldn't create gc.garbage list");
- }
for (; gc != finalizers; gc = GC_NEXT(gc)) {
PyObject *op = FROM_GC(gc);
- if ((state->debug & DEBUG_SAVEALL) || has_legacy_finalizer(op)) {
- if (PyList_Append(state->garbage, op) < 0) {
- PyErr_Clear();
+ if ((gcstate->debug & DEBUG_SAVEALL) || has_legacy_finalizer(op)) {
+ if (PyList_Append(gcstate->garbage, op) < 0) {
+ _PyErr_Clear(tstate);
break;
}
}
@@ -857,7 +943,7 @@ handle_legacy_finalizers(struct _gc_runtime_state *state,
* list, due to refcounts falling to 0.
*/
static void
-finalize_garbage(PyGC_Head *collectable)
+finalize_garbage(PyThreadState *tstate, PyGC_Head *collectable)
{
destructor finalize;
PyGC_Head seen;
@@ -881,52 +967,22 @@ finalize_garbage(PyGC_Head *collectable)
_PyGCHead_SET_FINALIZED(gc);
Py_INCREF(op);
finalize(op);
- assert(!PyErr_Occurred());
+ assert(!_PyErr_Occurred(tstate));
Py_DECREF(op);
}
}
gc_list_merge(&seen, collectable);
}
-/* Walk the collectable list and check that they are really unreachable
- from the outside (some objects could have been resurrected by a
- finalizer). */
-static int
-check_garbage(PyGC_Head *collectable)
-{
- int ret = 0;
- PyGC_Head *gc;
- for (gc = GC_NEXT(collectable); gc != collectable; gc = GC_NEXT(gc)) {
- // Use gc_refs and break gc_prev again.
- gc_set_refs(gc, Py_REFCNT(FROM_GC(gc)));
- _PyObject_ASSERT(FROM_GC(gc), gc_get_refs(gc) != 0);
- }
- subtract_refs(collectable);
- PyGC_Head *prev = collectable;
- for (gc = GC_NEXT(collectable); gc != collectable; gc = GC_NEXT(gc)) {
- _PyObject_ASSERT_WITH_MSG(FROM_GC(gc),
- gc_get_refs(gc) >= 0,
- "refcount is too small");
- if (gc_get_refs(gc) != 0) {
- ret = -1;
- }
- // Restore gc_prev here.
- _PyGCHead_SET_PREV(gc, prev);
- gc_clear_collecting(gc);
- prev = gc;
- }
- return ret;
-}
-
/* Break reference cycles by clearing the containers involved. This is
* tricky business as the lists can be changing and we don't know which
* objects may be freed. It is possible I screwed something up here.
*/
static void
-delete_garbage(struct _gc_runtime_state *state,
+delete_garbage(PyThreadState *tstate, GCState *gcstate,
PyGC_Head *collectable, PyGC_Head *old)
{
- assert(!PyErr_Occurred());
+ assert(!_PyErr_Occurred(tstate));
while (!gc_list_is_empty(collectable)) {
PyGC_Head *gc = GC_NEXT(collectable);
@@ -935,10 +991,10 @@ delete_garbage(struct _gc_runtime_state *state,
_PyObject_ASSERT_WITH_MSG(op, Py_REFCNT(op) > 0,
"refcount is too small");
- if (state->debug & DEBUG_SAVEALL) {
- assert(state->garbage != NULL);
- if (PyList_Append(state->garbage, op) < 0) {
- PyErr_Clear();
+ if (gcstate->debug & DEBUG_SAVEALL) {
+ assert(gcstate->garbage != NULL);
+ if (PyList_Append(gcstate->garbage, op) < 0) {
+ _PyErr_Clear(tstate);
}
}
else {
@@ -946,7 +1002,7 @@ delete_garbage(struct _gc_runtime_state *state,
if ((clear = Py_TYPE(op)->tp_clear) != NULL) {
Py_INCREF(op);
(void) clear(op);
- if (PyErr_Occurred()) {
+ if (_PyErr_Occurred(tstate)) {
_PyErr_WriteUnraisableMsg("in tp_clear of",
(PyObject*)Py_TYPE(op));
}
@@ -955,6 +1011,7 @@ delete_garbage(struct _gc_runtime_state *state,
}
if (GC_NEXT(collectable) == gc) {
/* object is still alive, move it, it may die later */
+ gc_clear_collecting(gc);
gc_list_move(gc, old);
}
}
@@ -968,22 +1025,18 @@ delete_garbage(struct _gc_runtime_state *state,
static void
clear_freelists(void)
{
- (void)PyMethod_ClearFreeList();
- (void)PyFrame_ClearFreeList();
- (void)PyCFunction_ClearFreeList();
- (void)PyTuple_ClearFreeList();
- (void)PyUnicode_ClearFreeList();
- (void)PyFloat_ClearFreeList();
- (void)PyList_ClearFreeList();
- (void)PyDict_ClearFreeList();
- (void)PySet_ClearFreeList();
- (void)PyAsyncGen_ClearFreeLists();
- (void)PyContext_ClearFreeList();
+ _PyFrame_ClearFreeList();
+ _PyTuple_ClearFreeList();
+ _PyFloat_ClearFreeList();
+ _PyList_ClearFreeList();
+ _PyDict_ClearFreeList();
+ _PyAsyncGen_ClearFreeLists();
+ _PyContext_ClearFreeList();
}
-// Show stats for objects in each gennerations.
+// Show stats for objects in each generations
static void
-show_stats_each_generations(struct _gc_runtime_state *state)
+show_stats_each_generations(GCState *gcstate)
{
char buf[100];
size_t pos = 0;
@@ -991,19 +1044,130 @@ show_stats_each_generations(struct _gc_runtime_state *state)
for (int i = 0; i < NUM_GENERATIONS && pos < sizeof(buf); i++) {
pos += PyOS_snprintf(buf+pos, sizeof(buf)-pos,
" %"PY_FORMAT_SIZE_T"d",
- gc_list_size(GEN_HEAD(state, i)));
+ gc_list_size(GEN_HEAD(gcstate, i)));
}
PySys_FormatStderr(
"gc: objects in each generation:%s\n"
"gc: objects in permanent generation: %zd\n",
- buf, gc_list_size(&state->permanent_generation.head));
+ buf, gc_list_size(&gcstate->permanent_generation.head));
+}
+
+/* Deduce which objects among "base" are unreachable from outside the list
+ and move them to 'unreachable'. The process consist in the following steps:
+
+1. Copy all reference counts to a different field (gc_prev is used to hold
+ this copy to save memory).
+2. Traverse all objects in "base" and visit all referred objects using
+ "tp_traverse" and for every visited object, subtract 1 to the reference
+ count (the one that we copied in the previous step). After this step, all
+ objects that can be reached directly from outside must have strictly positive
+ reference count, while all unreachable objects must have a count of exactly 0.
+3. Identify all unreachable objects (the ones with 0 reference count) and move
+ them to the "unreachable" list. This step also needs to move back to "base" all
+ objects that were initially marked as unreachable but are referred transitively
+ by the reachable objects (the ones with strictly positive reference count).
+
+Contracts:
+
+ * The "base" has to be a valid list with no mask set.
+
+ * The "unreachable" list must be uninitialized (this function calls
+ gc_list_init over 'unreachable').
+
+IMPORTANT: This function leaves 'unreachable' with the NEXT_MASK_UNREACHABLE
+flag set but it does not clear it to skip unnecessary iteration. Before the
+flag is cleared (for example, by using 'clear_unreachable_mask' function or
+by a call to 'move_legacy_finalizers'), the 'unreachable' list is not a normal
+list and we can not use most gc_list_* functions for it. */
+static inline void
+deduce_unreachable(PyGC_Head *base, PyGC_Head *unreachable) {
+ validate_list(base, collecting_clear_unreachable_clear);
+ /* Using ob_refcnt and gc_refs, calculate which objects in the
+ * container set are reachable from outside the set (i.e., have a
+ * refcount greater than 0 when all the references within the
+ * set are taken into account).
+ */
+ update_refs(base); // gc_prev is used for gc_refs
+ subtract_refs(base);
+
+ /* Leave everything reachable from outside base in base, and move
+ * everything else (in base) to unreachable.
+ *
+ * NOTE: This used to move the reachable objects into a reachable
+ * set instead. But most things usually turn out to be reachable,
+ * so it's more efficient to move the unreachable things. It "sounds slick"
+ * to move the unreachable objects, until you think about it - the reason it
+ * pays isn't actually obvious.
+ *
+ * Suppose we create objects A, B, C in that order. They appear in the young
+ * generation in the same order. If B points to A, and C to B, and C is
+ * reachable from outside, then the adjusted refcounts will be 0, 0, and 1
+ * respectively.
+ *
+ * When move_unreachable finds A, A is moved to the unreachable list. The
+ * same for B when it's first encountered. Then C is traversed, B is moved
+ * _back_ to the reachable list. B is eventually traversed, and then A is
+ * moved back to the reachable list.
+ *
+ * So instead of not moving at all, the reachable objects B and A are moved
+ * twice each. Why is this a win? A straightforward algorithm to move the
+ * reachable objects instead would move A, B, and C once each.
+ *
+ * The key is that this dance leaves the objects in order C, B, A - it's
+ * reversed from the original order. On all _subsequent_ scans, none of
+ * them will move. Since most objects aren't in cycles, this can save an
+ * unbounded number of moves across an unbounded number of later collections.
+ * It can cost more only the first time the chain is scanned.
+ *
+ * Drawback: move_unreachable is also used to find out what's still trash
+ * after finalizers may resurrect objects. In _that_ case most unreachable
+ * objects will remain unreachable, so it would be more efficient to move
+ * the reachable objects instead. But this is a one-time cost, probably not
+ * worth complicating the code to speed just a little.
+ */
+ gc_list_init(unreachable);
+ move_unreachable(base, unreachable); // gc_prev is pointer again
+ validate_list(base, collecting_clear_unreachable_clear);
+ validate_list(unreachable, collecting_set_unreachable_set);
+}
+
+/* Handle objects that may have resurrected after a call to 'finalize_garbage', moving
+ them to 'old_generation' and placing the rest on 'still_unreachable'.
+
+ Contracts:
+ * After this function 'unreachable' must not be used anymore and 'still_unreachable'
+ will contain the objects that did not resurrect.
+
+ * The "still_unreachable" list must be uninitialized (this function calls
+ gc_list_init over 'still_unreachable').
+
+IMPORTANT: After a call to this function, the 'still_unreachable' set will have the
+PREV_MARK_COLLECTING set, but the objects in this set are going to be removed so
+we can skip the expense of clearing the flag to avoid extra iteration. */
+static inline void
+handle_resurrected_objects(PyGC_Head *unreachable, PyGC_Head* still_unreachable,
+ PyGC_Head *old_generation)
+{
+ // Remove the PREV_MASK_COLLECTING from unreachable
+ // to prepare it for a new call to 'deduce_unreachable'
+ gc_list_clear_collecting(unreachable);
+
+ // After the call to deduce_unreachable, the 'still_unreachable' set will
+ // have the PREV_MARK_COLLECTING set, but the objects are going to be
+ // removed so we can skip the expense of clearing the flag.
+ PyGC_Head* resurrected = unreachable;
+ deduce_unreachable(resurrected, still_unreachable);
+ clear_unreachable_mask(still_unreachable);
+
+ // Move the resurrected objects to the old generation for future collection.
+ gc_list_merge(resurrected, old_generation);
}
/* This is the main function. Read this to understand how the
* collection process works. */
static Py_ssize_t
-collect(struct _gc_runtime_state *state, int generation,
+collect(PyThreadState *tstate, int generation,
Py_ssize_t *n_collected, Py_ssize_t *n_uncollectable, int nofail)
{
int i;
@@ -1015,10 +1179,11 @@ collect(struct _gc_runtime_state *state, int generation,
PyGC_Head finalizers; /* objects with, & reachable from, __del__ */
PyGC_Head *gc;
_PyTime_t t1 = 0; /* initialize to prevent a compiler warning */
+ GCState *gcstate = &tstate->interp->gc;
- if (state->debug & DEBUG_STATS) {
+ if (gcstate->debug & DEBUG_STATS) {
PySys_WriteStderr("gc: collecting generation %d...\n", generation);
- show_stats_each_generations(state);
+ show_stats_each_generations(gcstate);
t1 = _PyTime_GetMonotonicClock();
}
@@ -1027,56 +1192,39 @@ collect(struct _gc_runtime_state *state, int generation,
/* update collection and allocation counters */
if (generation+1 < NUM_GENERATIONS)
- state->generations[generation+1].count += 1;
+ gcstate->generations[generation+1].count += 1;
for (i = 0; i <= generation; i++)
- state->generations[i].count = 0;
+ gcstate->generations[i].count = 0;
/* merge younger generations with one we are currently collecting */
for (i = 0; i < generation; i++) {
- gc_list_merge(GEN_HEAD(state, i), GEN_HEAD(state, generation));
+ gc_list_merge(GEN_HEAD(gcstate, i), GEN_HEAD(gcstate, generation));
}
/* handy references */
- young = GEN_HEAD(state, generation);
+ young = GEN_HEAD(gcstate, generation);
if (generation < NUM_GENERATIONS-1)
- old = GEN_HEAD(state, generation+1);
+ old = GEN_HEAD(gcstate, generation+1);
else
old = young;
+ validate_list(old, collecting_clear_unreachable_clear);
- validate_list(young, 0);
- validate_list(old, 0);
- /* Using ob_refcnt and gc_refs, calculate which objects in the
- * container set are reachable from outside the set (i.e., have a
- * refcount greater than 0 when all the references within the
- * set are taken into account).
- */
- update_refs(young); // gc_prev is used for gc_refs
- subtract_refs(young);
-
- /* Leave everything reachable from outside young in young, and move
- * everything else (in young) to unreachable.
- * NOTE: This used to move the reachable objects into a reachable
- * set instead. But most things usually turn out to be reachable,
- * so it's more efficient to move the unreachable things.
- */
- gc_list_init(&unreachable);
- move_unreachable(young, &unreachable); // gc_prev is pointer again
- validate_list(young, 0);
+ deduce_unreachable(young, &unreachable);
untrack_tuples(young);
/* Move reachable objects to next generation. */
if (young != old) {
if (generation == NUM_GENERATIONS - 2) {
- state->long_lived_pending += gc_list_size(young);
+ gcstate->long_lived_pending += gc_list_size(young);
}
gc_list_merge(young, old);
}
else {
- /* We only untrack dicts in full collections, to avoid quadratic
+ /* We only un-track dicts in full collections, to avoid quadratic
dict build-up. See issue #14775. */
untrack_dicts(young);
- state->long_lived_pending = 0;
- state->long_lived_total = gc_list_size(young);
+ gcstate->long_lived_pending = 0;
+ gcstate->long_lived_total = gc_list_size(young);
}
/* All objects in unreachable are trash, but objects reachable from
@@ -1092,11 +1240,11 @@ collect(struct _gc_runtime_state *state, int generation,
*/
move_legacy_finalizer_reachable(&finalizers);
- validate_list(&finalizers, 0);
- validate_list(&unreachable, PREV_MASK_COLLECTING);
+ validate_list(&finalizers, collecting_clear_unreachable_clear);
+ validate_list(&unreachable, collecting_set_unreachable_clear);
/* Print debugging information. */
- if (state->debug & DEBUG_COLLECTABLE) {
+ if (gcstate->debug & DEBUG_COLLECTABLE) {
for (gc = GC_NEXT(&unreachable); gc != &unreachable; gc = GC_NEXT(gc)) {
debug_cycle("collectable", FROM_GC(gc));
}
@@ -1105,32 +1253,33 @@ collect(struct _gc_runtime_state *state, int generation,
/* Clear weakrefs and invoke callbacks as necessary. */
m += handle_weakrefs(&unreachable, old);
- validate_list(old, 0);
- validate_list(&unreachable, PREV_MASK_COLLECTING);
+ validate_list(old, collecting_clear_unreachable_clear);
+ validate_list(&unreachable, collecting_set_unreachable_clear);
/* Call tp_finalize on objects which have one. */
- finalize_garbage(&unreachable);
+ finalize_garbage(tstate, &unreachable);
- if (check_garbage(&unreachable)) { // clear PREV_MASK_COLLECTING here
- gc_list_merge(&unreachable, old);
- }
- else {
- /* Call tp_clear on objects in the unreachable set. This will cause
- * the reference cycles to be broken. It may also cause some objects
- * in finalizers to be freed.
- */
- m += gc_list_size(&unreachable);
- delete_garbage(state, &unreachable, old);
- }
+ /* Handle any objects that may have resurrected after the call
+ * to 'finalize_garbage' and continue the collection with the
+ * objects that are still unreachable */
+ PyGC_Head final_unreachable;
+ handle_resurrected_objects(&unreachable, &final_unreachable, old);
+
+ /* Call tp_clear on objects in the final_unreachable set. This will cause
+ * the reference cycles to be broken. It may also cause some objects
+ * in finalizers to be freed.
+ */
+ m += gc_list_size(&final_unreachable);
+ delete_garbage(tstate, gcstate, &final_unreachable, old);
/* Collect statistics on uncollectable objects found and print
* debugging information. */
for (gc = GC_NEXT(&finalizers); gc != &finalizers; gc = GC_NEXT(gc)) {
n++;
- if (state->debug & DEBUG_UNCOLLECTABLE)
+ if (gcstate->debug & DEBUG_UNCOLLECTABLE)
debug_cycle("uncollectable", FROM_GC(gc));
}
- if (state->debug & DEBUG_STATS) {
+ if (gcstate->debug & DEBUG_STATS) {
double d = _PyTime_AsSecondsDouble(_PyTime_GetMonotonicClock() - t1);
PySys_WriteStderr(
"gc: done, %" PY_FORMAT_SIZE_T "d unreachable, "
@@ -1142,8 +1291,8 @@ collect(struct _gc_runtime_state *state, int generation,
* reachable list of garbage. The programmer has to deal with
* this if they insist on creating this type of structure.
*/
- handle_legacy_finalizers(state, &finalizers, old);
- validate_list(old, 0);
+ handle_legacy_finalizers(tstate, gcstate, &finalizers, old);
+ validate_list(old, collecting_clear_unreachable_clear);
/* Clear free list only during the collection of the highest
* generation */
@@ -1151,15 +1300,12 @@ collect(struct _gc_runtime_state *state, int generation,
clear_freelists();
}
- if (PyErr_Occurred()) {
+ if (_PyErr_Occurred(tstate)) {
if (nofail) {
- PyErr_Clear();
+ _PyErr_Clear(tstate);
}
else {
- if (gc_str == NULL)
- gc_str = PyUnicode_FromString("garbage collection");
- PyErr_WriteUnraisable(gc_str);
- Py_FatalError("unexpected exception during garbage collection");
+ _PyErr_WriteUnraisableMsg("in garbage collection", NULL);
}
}
@@ -1171,38 +1317,39 @@ collect(struct _gc_runtime_state *state, int generation,
*n_uncollectable = n;
}
- struct gc_generation_stats *stats = &state->generation_stats[generation];
+ struct gc_generation_stats *stats = &gcstate->generation_stats[generation];
stats->collections++;
stats->collected += m;
stats->uncollectable += n;
if (PyDTrace_GC_DONE_ENABLED()) {
- PyDTrace_GC_DONE(n+m);
+ PyDTrace_GC_DONE(n + m);
}
- assert(!PyErr_Occurred());
- return n+m;
+ assert(!_PyErr_Occurred(tstate));
+ return n + m;
}
/* Invoke progress callbacks to notify clients that garbage collection
* is starting or stopping
*/
static void
-invoke_gc_callback(struct _gc_runtime_state *state, const char *phase,
+invoke_gc_callback(PyThreadState *tstate, const char *phase,
int generation, Py_ssize_t collected,
Py_ssize_t uncollectable)
{
- assert(!PyErr_Occurred());
+ assert(!_PyErr_Occurred(tstate));
/* we may get called very early */
- if (state->callbacks == NULL) {
+ GCState *gcstate = &tstate->interp->gc;
+ if (gcstate->callbacks == NULL) {
return;
}
/* The local variable cannot be rebound, check it for sanity */
- assert(PyList_CheckExact(state->callbacks));
+ assert(PyList_CheckExact(gcstate->callbacks));
PyObject *info = NULL;
- if (PyList_GET_SIZE(state->callbacks) != 0) {
+ if (PyList_GET_SIZE(gcstate->callbacks) != 0) {
info = Py_BuildValue("{sisnsn}",
"generation", generation,
"collected", collected,
@@ -1212,8 +1359,8 @@ invoke_gc_callback(struct _gc_runtime_state *state, const char *phase,
return;
}
}
- for (Py_ssize_t i=0; icallbacks); i++) {
- PyObject *r, *cb = PyList_GET_ITEM(state->callbacks, i);
+ for (Py_ssize_t i=0; icallbacks); i++) {
+ PyObject *r, *cb = PyList_GET_ITEM(gcstate->callbacks, i);
Py_INCREF(cb); /* make sure cb doesn't go away */
r = PyObject_CallFunction(cb, "sO", phase, info);
if (r == NULL) {
@@ -1225,41 +1372,74 @@ invoke_gc_callback(struct _gc_runtime_state *state, const char *phase,
Py_DECREF(cb);
}
Py_XDECREF(info);
- assert(!PyErr_Occurred());
+ assert(!_PyErr_Occurred(tstate));
}
/* Perform garbage collection of a generation and invoke
* progress callbacks.
*/
static Py_ssize_t
-collect_with_callback(struct _gc_runtime_state *state, int generation)
+collect_with_callback(PyThreadState *tstate, int generation)
{
- assert(!PyErr_Occurred());
+ assert(!_PyErr_Occurred(tstate));
Py_ssize_t result, collected, uncollectable;
- invoke_gc_callback(state, "start", generation, 0, 0);
- result = collect(state, generation, &collected, &uncollectable, 0);
- invoke_gc_callback(state, "stop", generation, collected, uncollectable);
- assert(!PyErr_Occurred());
+ invoke_gc_callback(tstate, "start", generation, 0, 0);
+ result = collect(tstate, generation, &collected, &uncollectable, 0);
+ invoke_gc_callback(tstate, "stop", generation, collected, uncollectable);
+ assert(!_PyErr_Occurred(tstate));
return result;
}
static Py_ssize_t
-collect_generations(struct _gc_runtime_state *state)
+collect_generations(PyThreadState *tstate)
{
+ GCState *gcstate = &tstate->interp->gc;
/* Find the oldest generation (highest numbered) where the count
* exceeds the threshold. Objects in the that generation and
* generations younger than it will be collected. */
Py_ssize_t n = 0;
for (int i = NUM_GENERATIONS-1; i >= 0; i--) {
- if (state->generations[i].count > state->generations[i].threshold) {
+ if (gcstate->generations[i].count > gcstate->generations[i].threshold) {
/* Avoid quadratic performance degradation in number
- of tracked objects. See comments at the beginning
- of this file, and issue #4074.
+ of tracked objects (see also issue #4074):
+
+ To limit the cost of garbage collection, there are two strategies;
+ - make each collection faster, e.g. by scanning fewer objects
+ - do less collections
+ This heuristic is about the latter strategy.
+
+ In addition to the various configurable thresholds, we only trigger a
+ full collection if the ratio
+
+ long_lived_pending / long_lived_total
+
+ is above a given value (hardwired to 25%).
+
+ The reason is that, while "non-full" collections (i.e., collections of
+ the young and middle generations) will always examine roughly the same
+ number of objects -- determined by the aforementioned thresholds --,
+ the cost of a full collection is proportional to the total number of
+ long-lived objects, which is virtually unbounded.
+
+ Indeed, it has been remarked that doing a full collection every
+ of object creations entails a dramatic performance
+ degradation in workloads which consist in creating and storing lots of
+ long-lived objects (e.g. building a large list of GC-tracked objects would
+ show quadratic performance, instead of linear as expected: see issue #4074).
+
+ Using the above ratio, instead, yields amortized linear performance in
+ the total number of objects (the effect of which can be summarized
+ thusly: "each full garbage collection is more and more costly as the
+ number of objects grows, but we do fewer and fewer of them").
+
+ This heuristic was suggested by Martin von Löwis on python-dev in
+ June 2008. His original analysis and proposal can be found at:
+ http://mail.python.org/pipermail/python-dev/2008-June/080579.html
*/
if (i == NUM_GENERATIONS - 1
- && state->long_lived_pending < state->long_lived_total / 4)
+ && gcstate->long_lived_pending < gcstate->long_lived_total / 4)
continue;
- n = collect_with_callback(state, i);
+ n = collect_with_callback(tstate, i);
break;
}
}
@@ -1278,7 +1458,9 @@ static PyObject *
gc_enable_impl(PyObject *module)
/*[clinic end generated code: output=45a427e9dce9155c input=81ac4940ca579707]*/
{
- _PyRuntime.gc.enabled = 1;
+ PyThreadState *tstate = _PyThreadState_GET();
+ GCState *gcstate = &tstate->interp->gc;
+ gcstate->enabled = 1;
Py_RETURN_NONE;
}
@@ -1292,7 +1474,9 @@ static PyObject *
gc_disable_impl(PyObject *module)
/*[clinic end generated code: output=97d1030f7aa9d279 input=8c2e5a14e800d83b]*/
{
- _PyRuntime.gc.enabled = 0;
+ PyThreadState *tstate = _PyThreadState_GET();
+ GCState *gcstate = &tstate->interp->gc;
+ gcstate->enabled = 0;
Py_RETURN_NONE;
}
@@ -1306,7 +1490,9 @@ static int
gc_isenabled_impl(PyObject *module)
/*[clinic end generated code: output=1874298331c49130 input=30005e0422373b31]*/
{
- return _PyRuntime.gc.enabled;
+ PyThreadState *tstate = _PyThreadState_GET();
+ GCState *gcstate = &tstate->interp->gc;
+ return gcstate->enabled;
}
/*[clinic input]
@@ -1327,22 +1513,23 @@ static Py_ssize_t
gc_collect_impl(PyObject *module, int generation)
/*[clinic end generated code: output=b697e633043233c7 input=40720128b682d879]*/
{
+ PyThreadState *tstate = _PyThreadState_GET();
if (generation < 0 || generation >= NUM_GENERATIONS) {
- PyErr_SetString(PyExc_ValueError, "invalid generation");
+ _PyErr_SetString(tstate, PyExc_ValueError, "invalid generation");
return -1;
}
- struct _gc_runtime_state *state = &_PyRuntime.gc;
+ GCState *gcstate = &tstate->interp->gc;
Py_ssize_t n;
- if (state->collecting) {
+ if (gcstate->collecting) {
/* already collecting, don't do anything */
n = 0;
}
else {
- state->collecting = 1;
- n = collect_with_callback(state, generation);
- state->collecting = 0;
+ gcstate->collecting = 1;
+ n = collect_with_callback(tstate, generation);
+ gcstate->collecting = 0;
}
return n;
}
@@ -1369,8 +1556,9 @@ static PyObject *
gc_set_debug_impl(PyObject *module, int flags)
/*[clinic end generated code: output=7c8366575486b228 input=5e5ce15e84fbed15]*/
{
- _PyRuntime.gc.debug = flags;
-
+ PyThreadState *tstate = _PyThreadState_GET();
+ GCState *gcstate = &tstate->interp->gc;
+ gcstate->debug = flags;
Py_RETURN_NONE;
}
@@ -1384,7 +1572,9 @@ static int
gc_get_debug_impl(PyObject *module)
/*[clinic end generated code: output=91242f3506cd1e50 input=91a101e1c3b98366]*/
{
- return _PyRuntime.gc.debug;
+ PyThreadState *tstate = _PyThreadState_GET();
+ GCState *gcstate = &tstate->interp->gc;
+ return gcstate->debug;
}
PyDoc_STRVAR(gc_set_thresh__doc__,
@@ -1396,15 +1586,16 @@ PyDoc_STRVAR(gc_set_thresh__doc__,
static PyObject *
gc_set_threshold(PyObject *self, PyObject *args)
{
- struct _gc_runtime_state *state = &_PyRuntime.gc;
+ PyThreadState *tstate = _PyThreadState_GET();
+ GCState *gcstate = &tstate->interp->gc;
if (!PyArg_ParseTuple(args, "i|ii:set_threshold",
- &state->generations[0].threshold,
- &state->generations[1].threshold,
- &state->generations[2].threshold))
+ &gcstate->generations[0].threshold,
+ &gcstate->generations[1].threshold,
+ &gcstate->generations[2].threshold))
return NULL;
for (int i = 3; i < NUM_GENERATIONS; i++) {
/* generations higher than 2 get the same threshold */
- state->generations[i].threshold = state->generations[2].threshold;
+ gcstate->generations[i].threshold = gcstate->generations[2].threshold;
}
Py_RETURN_NONE;
}
@@ -1419,11 +1610,12 @@ static PyObject *
gc_get_threshold_impl(PyObject *module)
/*[clinic end generated code: output=7902bc9f41ecbbd8 input=286d79918034d6e6]*/
{
- struct _gc_runtime_state *state = &_PyRuntime.gc;
+ PyThreadState *tstate = _PyThreadState_GET();
+ GCState *gcstate = &tstate->interp->gc;
return Py_BuildValue("(iii)",
- state->generations[0].threshold,
- state->generations[1].threshold,
- state->generations[2].threshold);
+ gcstate->generations[0].threshold,
+ gcstate->generations[1].threshold,
+ gcstate->generations[2].threshold);
}
/*[clinic input]
@@ -1436,11 +1628,12 @@ static PyObject *
gc_get_count_impl(PyObject *module)
/*[clinic end generated code: output=354012e67b16398f input=a392794a08251751]*/
{
- struct _gc_runtime_state *state = &_PyRuntime.gc;
+ PyThreadState *tstate = _PyThreadState_GET();
+ GCState *gcstate = &tstate->interp->gc;
return Py_BuildValue("(iii)",
- state->generations[0].count,
- state->generations[1].count,
- state->generations[2].count);
+ gcstate->generations[0].count,
+ gcstate->generations[1].count,
+ gcstate->generations[2].count);
}
static int
@@ -1479,13 +1672,16 @@ Return the list of objects that directly refer to any of objs.");
static PyObject *
gc_get_referrers(PyObject *self, PyObject *args)
{
+ PyThreadState *tstate = _PyThreadState_GET();
int i;
PyObject *result = PyList_New(0);
- if (!result) return NULL;
+ if (!result) {
+ return NULL;
+ }
- struct _gc_runtime_state *state = &_PyRuntime.gc;
+ GCState *gcstate = &tstate->interp->gc;
for (i = 0; i < NUM_GENERATIONS; i++) {
- if (!(gc_referrers_for(args, GEN_HEAD(state, i), result))) {
+ if (!(gc_referrers_for(args, GEN_HEAD(gcstate, i), result))) {
Py_DECREF(result);
return NULL;
}
@@ -1517,7 +1713,7 @@ gc_get_referents(PyObject *self, PyObject *args)
traverseproc traverse;
PyObject *obj = PyTuple_GET_ITEM(args, i);
- if (! PyObject_IS_GC(obj))
+ if (!_PyObject_IS_GC(obj))
continue;
traverse = Py_TYPE(obj)->tp_traverse;
if (! traverse)
@@ -1545,9 +1741,10 @@ static PyObject *
gc_get_objects_impl(PyObject *module, Py_ssize_t generation)
/*[clinic end generated code: output=48b35fea4ba6cb0e input=ef7da9df9806754c]*/
{
+ PyThreadState *tstate = _PyThreadState_GET();
int i;
PyObject* result;
- struct _gc_runtime_state *state = &_PyRuntime.gc;
+ GCState *gcstate = &tstate->interp->gc;
result = PyList_New(0);
if (result == NULL) {
@@ -1557,20 +1754,20 @@ gc_get_objects_impl(PyObject *module, Py_ssize_t generation)
/* If generation is passed, we extract only that generation */
if (generation != -1) {
if (generation >= NUM_GENERATIONS) {
- PyErr_Format(PyExc_ValueError,
- "generation parameter must be less than the number of "
- "available generations (%i)",
- NUM_GENERATIONS);
+ _PyErr_Format(tstate, PyExc_ValueError,
+ "generation parameter must be less than the number of "
+ "available generations (%i)",
+ NUM_GENERATIONS);
goto error;
}
if (generation < 0) {
- PyErr_SetString(PyExc_ValueError,
- "generation parameter cannot be negative");
+ _PyErr_SetString(tstate, PyExc_ValueError,
+ "generation parameter cannot be negative");
goto error;
}
- if (append_objects(result, GEN_HEAD(state, generation))) {
+ if (append_objects(result, GEN_HEAD(gcstate, generation))) {
goto error;
}
@@ -1579,7 +1776,7 @@ gc_get_objects_impl(PyObject *module, Py_ssize_t generation)
/* If generation is not passed or None, get all objects from all generations */
for (i = 0; i < NUM_GENERATIONS; i++) {
- if (append_objects(result, GEN_HEAD(state, i))) {
+ if (append_objects(result, GEN_HEAD(gcstate, i))) {
goto error;
}
}
@@ -1602,12 +1799,13 @@ gc_get_stats_impl(PyObject *module)
{
int i;
struct gc_generation_stats stats[NUM_GENERATIONS], *st;
+ PyThreadState *tstate = _PyThreadState_GET();
/* To get consistent values despite allocations while constructing
the result list, we use a snapshot of the running stats. */
- struct _gc_runtime_state *state = &_PyRuntime.gc;
+ GCState *gcstate = &tstate->interp->gc;
for (i = 0; i < NUM_GENERATIONS; i++) {
- stats[i] = state->generation_stats[i];
+ stats[i] = gcstate->generation_stats[i];
}
PyObject *result = PyList_New(0);
@@ -1655,7 +1853,7 @@ gc_is_tracked(PyObject *module, PyObject *obj)
{
PyObject *result;
- if (PyObject_IS_GC(obj) && _PyObject_GC_IS_TRACKED(obj))
+ if (_PyObject_IS_GC(obj) && _PyObject_GC_IS_TRACKED(obj))
result = Py_True;
else
result = Py_False;
@@ -1663,6 +1861,25 @@ gc_is_tracked(PyObject *module, PyObject *obj)
return result;
}
+/*[clinic input]
+gc.is_finalized
+
+ obj: object
+ /
+
+Returns true if the object has been already finalized by the GC.
+[clinic start generated code]*/
+
+static PyObject *
+gc_is_finalized(PyObject *module, PyObject *obj)
+/*[clinic end generated code: output=e1516ac119a918ed input=201d0c58f69ae390]*/
+{
+ if (_PyObject_IS_GC(obj) && _PyGCHead_FINALIZED(AS_GC(obj))) {
+ Py_RETURN_TRUE;
+ }
+ Py_RETURN_FALSE;
+}
+
/*[clinic input]
gc.freeze
@@ -1677,10 +1894,11 @@ static PyObject *
gc_freeze_impl(PyObject *module)
/*[clinic end generated code: output=502159d9cdc4c139 input=b602b16ac5febbe5]*/
{
- struct _gc_runtime_state *state = &_PyRuntime.gc;
+ PyThreadState *tstate = _PyThreadState_GET();
+ GCState *gcstate = &tstate->interp->gc;
for (int i = 0; i < NUM_GENERATIONS; ++i) {
- gc_list_merge(GEN_HEAD(state, i), &state->permanent_generation.head);
- state->generations[i].count = 0;
+ gc_list_merge(GEN_HEAD(gcstate, i), &gcstate->permanent_generation.head);
+ gcstate->generations[i].count = 0;
}
Py_RETURN_NONE;
}
@@ -1697,8 +1915,10 @@ static PyObject *
gc_unfreeze_impl(PyObject *module)
/*[clinic end generated code: output=1c15f2043b25e169 input=2dd52b170f4cef6c]*/
{
- struct _gc_runtime_state *state = &_PyRuntime.gc;
- gc_list_merge(&state->permanent_generation.head, GEN_HEAD(state, NUM_GENERATIONS-1));
+ PyThreadState *tstate = _PyThreadState_GET();
+ GCState *gcstate = &tstate->interp->gc;
+ gc_list_merge(&gcstate->permanent_generation.head,
+ GEN_HEAD(gcstate, NUM_GENERATIONS-1));
Py_RETURN_NONE;
}
@@ -1712,7 +1932,9 @@ static Py_ssize_t
gc_get_freeze_count_impl(PyObject *module)
/*[clinic end generated code: output=61cbd9f43aa032e1 input=45ffbc65cfe2a6ed]*/
{
- return gc_list_size(&_PyRuntime.gc.permanent_generation.head);
+ PyThreadState *tstate = _PyThreadState_GET();
+ GCState *gcstate = &tstate->interp->gc;
+ return gc_list_size(&gcstate->permanent_generation.head);
}
@@ -1731,6 +1953,7 @@ PyDoc_STRVAR(gc__doc__,
"get_threshold() -- Return the current the collection thresholds.\n"
"get_objects() -- Return a list of all objects tracked by the collector.\n"
"is_tracked() -- Returns true if a given object is tracked.\n"
+"is_finalized() -- Returns true if a given object has been already finalized.\n"
"get_referrers() -- Return the list of objects that refer to an object.\n"
"get_referents() -- Return the list of objects that an object refers to.\n"
"freeze() -- Freeze all tracked objects and ignore them for future collections.\n"
@@ -1750,6 +1973,7 @@ static PyMethodDef GcMethods[] = {
GC_GET_OBJECTS_METHODDEF
GC_GET_STATS_METHODDEF
GC_IS_TRACKED_METHODDEF
+ GC_IS_FINALIZED_METHODDEF
{"get_referrers", gc_get_referrers, METH_VARARGS,
gc_get_referrers__doc__},
{"get_referents", gc_get_referents, METH_VARARGS,
@@ -1775,34 +1999,38 @@ static struct PyModuleDef gcmodule = {
PyMODINIT_FUNC
PyInit_gc(void)
{
- PyObject *m;
+ PyThreadState *tstate = _PyThreadState_GET();
+ GCState *gcstate = &tstate->interp->gc;
- m = PyModule_Create(&gcmodule);
+ PyObject *m = PyModule_Create(&gcmodule);
if (m == NULL) {
return NULL;
}
- struct _gc_runtime_state *state = &_PyRuntime.gc;
- if (state->garbage == NULL) {
- state->garbage = PyList_New(0);
- if (state->garbage == NULL)
+ if (gcstate->garbage == NULL) {
+ gcstate->garbage = PyList_New(0);
+ if (gcstate->garbage == NULL) {
return NULL;
+ }
}
- Py_INCREF(state->garbage);
- if (PyModule_AddObject(m, "garbage", state->garbage) < 0)
+ Py_INCREF(gcstate->garbage);
+ if (PyModule_AddObject(m, "garbage", gcstate->garbage) < 0) {
return NULL;
+ }
- if (state->callbacks == NULL) {
- state->callbacks = PyList_New(0);
- if (state->callbacks == NULL)
+ if (gcstate->callbacks == NULL) {
+ gcstate->callbacks = PyList_New(0);
+ if (gcstate->callbacks == NULL) {
return NULL;
+ }
}
- Py_INCREF(state->callbacks);
- if (PyModule_AddObject(m, "callbacks", state->callbacks) < 0)
+ Py_INCREF(gcstate->callbacks);
+ if (PyModule_AddObject(m, "callbacks", gcstate->callbacks) < 0) {
return NULL;
+ }
-#define ADD_INT(NAME) if (PyModule_AddIntConstant(m, #NAME, NAME) < 0) return NULL
+#define ADD_INT(NAME) if (PyModule_AddIntConstant(m, #NAME, NAME) < 0) { return NULL; }
ADD_INT(DEBUG_STATS);
ADD_INT(DEBUG_COLLECTABLE);
ADD_INT(DEBUG_UNCOLLECTABLE);
@@ -1816,23 +2044,25 @@ PyInit_gc(void)
Py_ssize_t
PyGC_Collect(void)
{
- struct _gc_runtime_state *state = &_PyRuntime.gc;
- if (!state->enabled) {
+ PyThreadState *tstate = _PyThreadState_GET();
+ GCState *gcstate = &tstate->interp->gc;
+
+ if (!gcstate->enabled) {
return 0;
}
Py_ssize_t n;
- if (state->collecting) {
+ if (gcstate->collecting) {
/* already collecting, don't do anything */
n = 0;
}
else {
PyObject *exc, *value, *tb;
- state->collecting = 1;
- PyErr_Fetch(&exc, &value, &tb);
- n = collect_with_callback(state, NUM_GENERATIONS - 1);
- PyErr_Restore(exc, value, tb);
- state->collecting = 0;
+ gcstate->collecting = 1;
+ _PyErr_Fetch(tstate, &exc, &value, &tb);
+ n = collect_with_callback(tstate, NUM_GENERATIONS - 1);
+ _PyErr_Restore(tstate, exc, value, tb);
+ gcstate->collecting = 0;
}
return n;
@@ -1847,9 +2077,10 @@ _PyGC_CollectIfEnabled(void)
Py_ssize_t
_PyGC_CollectNoFail(void)
{
- assert(!PyErr_Occurred());
+ PyThreadState *tstate = _PyThreadState_GET();
+ assert(!_PyErr_Occurred(tstate));
- struct _gc_runtime_state *state = &_PyRuntime.gc;
+ GCState *gcstate = &tstate->interp->gc;
Py_ssize_t n;
/* Ideally, this function is only called on interpreter shutdown,
@@ -1858,25 +2089,25 @@ _PyGC_CollectNoFail(void)
during interpreter shutdown (and then never finish it).
See http://bugs.python.org/issue8713#msg195178 for an example.
*/
- if (state->collecting) {
+ if (gcstate->collecting) {
n = 0;
}
else {
- state->collecting = 1;
- n = collect(state, NUM_GENERATIONS - 1, NULL, NULL, 1);
- state->collecting = 0;
+ gcstate->collecting = 1;
+ n = collect(tstate, NUM_GENERATIONS - 1, NULL, NULL, 1);
+ gcstate->collecting = 0;
}
return n;
}
void
-_PyGC_DumpShutdownStats(_PyRuntimeState *runtime)
+_PyGC_DumpShutdownStats(PyThreadState *tstate)
{
- struct _gc_runtime_state *state = &runtime->gc;
- if (!(state->debug & DEBUG_SAVEALL)
- && state->garbage != NULL && PyList_GET_SIZE(state->garbage) > 0) {
+ GCState *gcstate = &tstate->interp->gc;
+ if (!(gcstate->debug & DEBUG_SAVEALL)
+ && gcstate->garbage != NULL && PyList_GET_SIZE(gcstate->garbage) > 0) {
const char *message;
- if (state->debug & DEBUG_UNCOLLECTABLE)
+ if (gcstate->debug & DEBUG_UNCOLLECTABLE)
message = "gc: %zd uncollectable objects at " \
"shutdown";
else
@@ -1887,13 +2118,13 @@ _PyGC_DumpShutdownStats(_PyRuntimeState *runtime)
already. */
if (PyErr_WarnExplicitFormat(PyExc_ResourceWarning, "gc", 0,
"gc", NULL, message,
- PyList_GET_SIZE(state->garbage)))
+ PyList_GET_SIZE(gcstate->garbage)))
PyErr_WriteUnraisable(NULL);
- if (state->debug & DEBUG_UNCOLLECTABLE) {
+ if (gcstate->debug & DEBUG_UNCOLLECTABLE) {
PyObject *repr = NULL, *bytes = NULL;
- repr = PyObject_Repr(state->garbage);
+ repr = PyObject_Repr(gcstate->garbage);
if (!repr || !(bytes = PyUnicode_EncodeFSDefault(repr)))
- PyErr_WriteUnraisable(state->garbage);
+ PyErr_WriteUnraisable(gcstate->garbage);
else {
PySys_WriteStderr(
" %s\n",
@@ -1907,11 +2138,11 @@ _PyGC_DumpShutdownStats(_PyRuntimeState *runtime)
}
void
-_PyGC_Fini(_PyRuntimeState *runtime)
+_PyGC_Fini(PyThreadState *tstate)
{
- struct _gc_runtime_state *state = &runtime->gc;
- Py_CLEAR(state->garbage);
- Py_CLEAR(state->callbacks);
+ GCState *gcstate = &tstate->interp->gc;
+ Py_CLEAR(gcstate->garbage);
+ Py_CLEAR(gcstate->callbacks);
}
/* for debugging */
@@ -1921,6 +2152,21 @@ _PyGC_Dump(PyGC_Head *g)
_PyObject_Dump(FROM_GC(g));
}
+
+#ifdef Py_DEBUG
+static int
+visit_validate(PyObject *op, void *parent_raw)
+{
+ PyObject *parent = _PyObject_CAST(parent_raw);
+ if (_PyObject_IsFreed(op)) {
+ _PyObject_ASSERT_FAILED_MSG(parent,
+ "PyObject_GC_Track() object is not valid");
+ }
+ return 0;
+}
+#endif
+
+
/* extension modules might be compiled with GC support so these
functions must always be available */
@@ -1934,6 +2180,13 @@ PyObject_GC_Track(void *op_raw)
"by the garbage collector");
}
_PyObject_GC_TRACK(op);
+
+#ifdef Py_DEBUG
+ /* Check that the object is valid: validate objects traversed
+ by tp_traverse() */
+ traverseproc traverse = Py_TYPE(op)->tp_traverse;
+ (void)traverse(op, visit_validate, op);
+#endif
}
void
@@ -1948,36 +2201,48 @@ PyObject_GC_UnTrack(void *op_raw)
}
}
+int
+PyObject_IS_GC(PyObject *obj)
+{
+ return _PyObject_IS_GC(obj);
+}
+
static PyObject *
_PyObject_GC_Alloc(int use_calloc, size_t basicsize)
{
- struct _gc_runtime_state *state = &_PyRuntime.gc;
- PyObject *op;
+ PyThreadState *tstate = _PyThreadState_GET();
+ GCState *gcstate = &tstate->interp->gc;
+ if (basicsize > PY_SSIZE_T_MAX - sizeof(PyGC_Head)) {
+ return _PyErr_NoMemory(tstate);
+ }
+ size_t size = sizeof(PyGC_Head) + basicsize;
+
PyGC_Head *g;
- size_t size;
- if (basicsize > PY_SSIZE_T_MAX - sizeof(PyGC_Head))
- return PyErr_NoMemory();
- size = sizeof(PyGC_Head) + basicsize;
- if (use_calloc)
+ if (use_calloc) {
g = (PyGC_Head *)PyObject_Calloc(1, size);
- else
+ }
+ else {
g = (PyGC_Head *)PyObject_Malloc(size);
- if (g == NULL)
- return PyErr_NoMemory();
+ }
+ if (g == NULL) {
+ return _PyErr_NoMemory(tstate);
+ }
assert(((uintptr_t)g & 3) == 0); // g must be aligned 4bytes boundary
+
g->_gc_next = 0;
g->_gc_prev = 0;
- state->generations[0].count++; /* number of allocated GC objects */
- if (state->generations[0].count > state->generations[0].threshold &&
- state->enabled &&
- state->generations[0].threshold &&
- !state->collecting &&
- !PyErr_Occurred()) {
- state->collecting = 1;
- collect_generations(state);
- state->collecting = 0;
- }
- op = FROM_GC(g);
+ gcstate->generations[0].count++; /* number of allocated GC objects */
+ if (gcstate->generations[0].count > gcstate->generations[0].threshold &&
+ gcstate->enabled &&
+ gcstate->generations[0].threshold &&
+ !gcstate->collecting &&
+ !_PyErr_Occurred(tstate))
+ {
+ gcstate->collecting = 1;
+ collect_generations(tstate);
+ gcstate->collecting = 0;
+ }
+ PyObject *op = FROM_GC(g);
return op;
}
@@ -2033,7 +2298,7 @@ _PyObject_GC_Resize(PyVarObject *op, Py_ssize_t nitems)
if (g == NULL)
return (PyVarObject *)PyErr_NoMemory();
op = (PyVarObject *) FROM_GC(g);
- Py_SIZE(op) = nitems;
+ Py_SET_SIZE(op, nitems);
return op;
}
@@ -2044,9 +2309,28 @@ PyObject_GC_Del(void *op)
if (_PyObject_GC_IS_TRACKED(op)) {
gc_list_remove(g);
}
- struct _gc_runtime_state *state = &_PyRuntime.gc;
- if (state->generations[0].count > 0) {
- state->generations[0].count--;
+ PyThreadState *tstate = _PyThreadState_GET();
+ GCState *gcstate = &tstate->interp->gc;
+ if (gcstate->generations[0].count > 0) {
+ gcstate->generations[0].count--;
}
PyObject_FREE(g);
}
+
+int
+PyObject_GC_IsTracked(PyObject* obj)
+{
+ if (_PyObject_IS_GC(obj) && _PyObject_GC_IS_TRACKED(obj)) {
+ return 1;
+ }
+ return 0;
+}
+
+int
+PyObject_GC_IsFinalized(PyObject *obj)
+{
+ if (_PyObject_IS_GC(obj) && _PyGCHead_FINALIZED(AS_GC(obj))) {
+ return 1;
+ }
+ return 0;
+}
diff --git a/Modules/getpath.c b/Modules/getpath.c
index b727f669..a84c8586 100644
--- a/Modules/getpath.c
+++ b/Modules/getpath.c
@@ -1,11 +1,10 @@
/* Return the initial module search path. */
#include "Python.h"
-#include "pycore_initconfig.h"
-#include "osdefs.h"
#include "pycore_fileutils.h"
+#include "pycore_initconfig.h"
#include "pycore_pathconfig.h"
-#include "pycore_pystate.h"
+#include "osdefs.h" // DELIM
#include
#include
@@ -95,7 +94,7 @@
* process to find the installed Python tree.
*
* An embedding application can use Py_SetPath() to override all of
- * these authomatic path computations.
+ * these automatic path computations.
*
* NOTE: Windows MSVC builds use PC/getpathp.c instead!
*/
@@ -105,14 +104,17 @@ extern "C" {
#endif
-#if !defined(PREFIX) || !defined(EXEC_PREFIX) || !defined(VERSION) || !defined(VPATH)
-#error "PREFIX, EXEC_PREFIX, VERSION, and VPATH must be constant defined"
+#if (!defined(PREFIX) || !defined(EXEC_PREFIX) \
+ || !defined(VERSION) || !defined(VPATH))
+#error "PREFIX, EXEC_PREFIX, VERSION and VPATH macros must be defined"
#endif
#ifndef LANDMARK
#define LANDMARK L"os.py"
#endif
+#define BUILD_LANDMARK L"Modules/Setup.local"
+
#define DECODE_LOCALE_ERR(NAME, LEN) \
((LEN) == (size_t)-2) \
? _PyStatus_ERR("cannot decode " NAME) \
@@ -123,17 +125,24 @@ extern "C" {
typedef struct {
wchar_t *path_env; /* PATH environment variable */
- wchar_t *pythonpath; /* PYTHONPATH macro */
- wchar_t *prefix; /* PREFIX macro */
- wchar_t *exec_prefix; /* EXEC_PREFIX macro */
+ wchar_t *pythonpath_macro; /* PYTHONPATH macro */
+ wchar_t *prefix_macro; /* PREFIX macro */
+ wchar_t *exec_prefix_macro; /* EXEC_PREFIX macro */
+ wchar_t *vpath_macro; /* VPATH macro */
- wchar_t *lib_python; /* "lib/pythonX.Y" */
+ wchar_t *lib_python; /* / "pythonX.Y" */
int prefix_found; /* found platform independent libraries? */
int exec_prefix_found; /* found the platform dependent libraries? */
int warnings;
const wchar_t *pythonpath_env;
+ const wchar_t *platlibdir;
+
+ wchar_t *argv0_path;
+ wchar_t *zip_path;
+ wchar_t *prefix;
+ wchar_t *exec_prefix;
} PyCalculatePath;
static const wchar_t delimiter[2] = {DELIM, '\0'};
@@ -183,25 +192,6 @@ isfile(const wchar_t *filename)
}
-/* Is module -- check for .pyc too */
-static int
-ismodule(wchar_t *filename, size_t filename_len)
-{
- if (isfile(filename)) {
- return 1;
- }
-
- /* Check for the compiled version of prefix. */
- if (wcslen(filename) + 2 <= filename_len) {
- wcscat(filename, L"c");
- if (isfile(filename)) {
- return 1;
- }
- }
- return 0;
-}
-
-
/* Is executable file */
static int
isxfile(const wchar_t *filename)
@@ -222,7 +212,7 @@ isxfile(const wchar_t *filename)
/* Is directory */
static int
-isdir(wchar_t *filename)
+isdir(const wchar_t *filename)
{
struct stat buf;
if (_Py_wstat(filename, &buf) != 0) {
@@ -236,36 +226,83 @@ isdir(wchar_t *filename)
/* Add a path component, by appending stuff to buffer.
- buflen: 'buffer' length in characters including trailing NUL. */
+ buflen: 'buffer' length in characters including trailing NUL.
+
+ If path2 is empty:
+
+ - if path doesn't end with SEP and is not empty, add SEP to path
+ - otherwise, do nothing. */
static PyStatus
-joinpath(wchar_t *buffer, const wchar_t *stuff, size_t buflen)
+joinpath(wchar_t *path, const wchar_t *path2, size_t path_len)
{
- size_t n, k;
- if (stuff[0] != SEP) {
- n = wcslen(buffer);
- if (n >= buflen) {
+ size_t n;
+ if (!_Py_isabs(path2)) {
+ n = wcslen(path);
+ if (n >= path_len) {
return PATHLEN_ERR();
}
- if (n > 0 && buffer[n-1] != SEP) {
- buffer[n++] = SEP;
+ if (n > 0 && path[n-1] != SEP) {
+ path[n++] = SEP;
}
}
else {
n = 0;
}
- k = wcslen(stuff);
- if (n + k >= buflen) {
+ size_t k = wcslen(path2);
+ if (n + k >= path_len) {
return PATHLEN_ERR();
}
- wcsncpy(buffer+n, stuff, k);
- buffer[n+k] = '\0';
+ wcsncpy(path + n, path2, k);
+ path[n + k] = '\0';
return _PyStatus_OK();
}
+static wchar_t*
+substring(const wchar_t *str, size_t len)
+{
+ wchar_t *substr = PyMem_RawMalloc((len + 1) * sizeof(wchar_t));
+ if (substr == NULL) {
+ return NULL;
+ }
+
+ if (len) {
+ memcpy(substr, str, len * sizeof(wchar_t));
+ }
+ substr[len] = L'\0';
+ return substr;
+}
+
+
+static wchar_t*
+joinpath2(const wchar_t *path, const wchar_t *path2)
+{
+ if (_Py_isabs(path2)) {
+ return _PyMem_RawWcsdup(path2);
+ }
+
+ size_t len = wcslen(path);
+ int add_sep = (len > 0 && path[len - 1] != SEP);
+ len += add_sep;
+ len += wcslen(path2);
+
+ wchar_t *new_path = PyMem_RawMalloc((len + 1) * sizeof(wchar_t));
+ if (new_path == NULL) {
+ return NULL;
+ }
+
+ wcscpy(new_path, path);
+ if (add_sep) {
+ wcscat(new_path, separator);
+ }
+ wcscat(new_path, path2);
+ return new_path;
+}
+
+
static inline int
safe_wcscpy(wchar_t *dst, const wchar_t *src, size_t n)
{
@@ -280,27 +317,27 @@ safe_wcscpy(wchar_t *dst, const wchar_t *src, size_t n)
/* copy_absolute requires that path be allocated at least
- 'pathlen' characters (including trailing NUL). */
+ 'abs_path_len' characters (including trailing NUL). */
static PyStatus
-copy_absolute(wchar_t *path, const wchar_t *p, size_t pathlen)
+copy_absolute(wchar_t *abs_path, const wchar_t *path, size_t abs_path_len)
{
- if (p[0] == SEP) {
- if (safe_wcscpy(path, p, pathlen) < 0) {
+ if (_Py_isabs(path)) {
+ if (safe_wcscpy(abs_path, path, abs_path_len) < 0) {
return PATHLEN_ERR();
}
}
else {
- if (!_Py_wgetcwd(path, pathlen)) {
+ if (!_Py_wgetcwd(abs_path, abs_path_len)) {
/* unable to get the current directory */
- if (safe_wcscpy(path, p, pathlen) < 0) {
+ if (safe_wcscpy(abs_path, path, abs_path_len) < 0) {
return PATHLEN_ERR();
}
return _PyStatus_OK();
}
- if (p[0] == '.' && p[1] == SEP) {
- p += 2;
+ if (path[0] == '.' && path[1] == SEP) {
+ path += 2;
}
- PyStatus status = joinpath(path, p, pathlen);
+ PyStatus status = joinpath(abs_path, path, abs_path_len);
if (_PyStatus_EXCEPTION(status)) {
return status;
}
@@ -311,21 +348,58 @@ copy_absolute(wchar_t *path, const wchar_t *p, size_t pathlen)
/* path_len: path length in characters including trailing NUL */
static PyStatus
-absolutize(wchar_t *path, size_t path_len)
+absolutize(wchar_t **path_p)
{
- if (path[0] == SEP) {
- return _PyStatus_OK();
- }
+ assert(!_Py_isabs(*path_p));
wchar_t abs_path[MAXPATHLEN+1];
+ wchar_t *path = *path_p;
+
PyStatus status = copy_absolute(abs_path, path, Py_ARRAY_LENGTH(abs_path));
if (_PyStatus_EXCEPTION(status)) {
return status;
}
- if (safe_wcscpy(path, abs_path, path_len) < 0) {
- return PATHLEN_ERR();
+ PyMem_RawFree(*path_p);
+ *path_p = _PyMem_RawWcsdup(abs_path);
+ if (*path_p == NULL) {
+ return _PyStatus_NO_MEMORY();
+ }
+ return _PyStatus_OK();
+}
+
+
+/* Is module -- check for .pyc too */
+static PyStatus
+ismodule(const wchar_t *path, int *result)
+{
+ wchar_t *filename = joinpath2(path, LANDMARK);
+ if (filename == NULL) {
+ return _PyStatus_NO_MEMORY();
+ }
+
+ if (isfile(filename)) {
+ PyMem_RawFree(filename);
+ *result = 1;
+ return _PyStatus_OK();
}
+
+ /* Check for the compiled version of prefix. */
+ size_t len = wcslen(filename);
+ wchar_t *pyc = PyMem_RawMalloc((len + 2) * sizeof(wchar_t));
+ if (pyc == NULL) {
+ PyMem_RawFree(filename);
+ return _PyStatus_NO_MEMORY();
+ }
+
+ memcpy(pyc, filename, len * sizeof(wchar_t));
+ pyc[len] = L'c';
+ pyc[len + 1] = L'\0';
+ *result = isfile(pyc);
+
+ PyMem_RawFree(filename);
+ PyMem_RawFree(pyc);
+
return _PyStatus_OK();
}
@@ -337,8 +411,10 @@ absolutize(wchar_t *path, size_t path_len)
/* pathlen: 'path' length in characters including trailing NUL */
static PyStatus
-add_exe_suffix(wchar_t *progpath, size_t progpathlen)
+add_exe_suffix(wchar_t **progpath_p)
{
+ wchar_t *progpath = *progpath_p;
+
/* Check for already have an executable suffix */
size_t n = wcslen(progpath);
size_t s = wcslen(EXE_SUFFIX);
@@ -346,17 +422,22 @@ add_exe_suffix(wchar_t *progpath, size_t progpathlen)
return _PyStatus_OK();
}
- if (n + s >= progpathlen) {
- return PATHLEN_ERR();
+ wchar_t *progpath2 = PyMem_RawMalloc((n + s + 1) * sizeof(wchar_t));
+ if (progpath2 == NULL) {
+ return _PyStatus_NO_MEMORY();
}
- wcsncpy(progpath + n, EXE_SUFFIX, s);
- progpath[n+s] = '\0';
- if (!isxfile(progpath)) {
- /* Path that added suffix is invalid: truncate (remove suffix) */
- progpath[n] = '\0';
- }
+ memcpy(progpath2, progpath, n * sizeof(wchar_t));
+ memcpy(progpath2 + n, EXE_SUFFIX, s * sizeof(wchar_t));
+ progpath2[n+s] = L'\0';
+ if (isxfile(progpath2)) {
+ PyMem_RawFree(*progpath_p);
+ *progpath_p = progpath2;
+ }
+ else {
+ PyMem_RawFree(progpath2);
+ }
return _PyStatus_OK();
}
#endif
@@ -367,13 +448,8 @@ add_exe_suffix(wchar_t *progpath, size_t progpathlen)
*/
static PyStatus
search_for_prefix(PyCalculatePath *calculate, _PyPathConfig *pathconfig,
- const wchar_t *argv0_path,
wchar_t *prefix, size_t prefix_len, int *found)
{
- wchar_t path[MAXPATHLEN+1];
- memset(path, 0, sizeof(path));
- size_t path_len = Py_ARRAY_LENGTH(path);
-
PyStatus status;
/* If PYTHONHOME is set, we believe it unconditionally */
@@ -394,49 +470,51 @@ search_for_prefix(PyCalculatePath *calculate, _PyPathConfig *pathconfig,
return _PyStatus_OK();
}
- /* Check to see if argv[0] is in the build directory */
- if (safe_wcscpy(path, argv0_path, path_len) < 0) {
- return PATHLEN_ERR();
- }
- status = joinpath(path, L"Modules/Setup.local", path_len);
- if (_PyStatus_EXCEPTION(status)) {
- return status;
+ /* Check to see if argv0_path is in the build directory
+
+ Path: / */
+ wchar_t *path = joinpath2(calculate->argv0_path, BUILD_LANDMARK);
+ if (path == NULL) {
+ return _PyStatus_NO_MEMORY();
}
- if (isfile(path)) {
- /* Check VPATH to see if argv0_path is in the build directory.
- VPATH can be empty. */
- wchar_t *vpath = Py_DecodeLocale(VPATH, NULL);
- if (vpath != NULL) {
- /* Path: / / Lib / LANDMARK */
- if (safe_wcscpy(prefix, argv0_path, prefix_len) < 0) {
- return PATHLEN_ERR();
- }
- status = joinpath(prefix, vpath, prefix_len);
- PyMem_RawFree(vpath);
- if (_PyStatus_EXCEPTION(status)) {
- return status;
- }
+ int is_build_dir = isfile(path);
+ PyMem_RawFree(path);
- status = joinpath(prefix, L"Lib", prefix_len);
- if (_PyStatus_EXCEPTION(status)) {
- return status;
- }
- status = joinpath(prefix, LANDMARK, prefix_len);
- if (_PyStatus_EXCEPTION(status)) {
- return status;
- }
+ if (is_build_dir) {
+ /* argv0_path is the build directory (BUILD_LANDMARK exists),
+ now also check LANDMARK using ismodule(). */
- if (ismodule(prefix, prefix_len)) {
- *found = -1;
- reduce(prefix);
- return _PyStatus_OK();
- }
+ /* Path: / / Lib */
+ /* or if VPATH is empty: / Lib */
+ if (safe_wcscpy(prefix, calculate->argv0_path, prefix_len) < 0) {
+ return PATHLEN_ERR();
+ }
+
+ status = joinpath(prefix, calculate->vpath_macro, prefix_len);
+ if (_PyStatus_EXCEPTION(status)) {
+ return status;
+ }
+
+ status = joinpath(prefix, L"Lib", prefix_len);
+ if (_PyStatus_EXCEPTION(status)) {
+ return status;
+ }
+
+ int module;
+ status = ismodule(prefix, &module);
+ if (_PyStatus_EXCEPTION(status)) {
+ return status;
+ }
+ if (module) {
+ /* BUILD_LANDMARK and LANDMARK found */
+ *found = -1;
+ return _PyStatus_OK();
}
}
/* Search from argv0_path, until root is found */
- status = copy_absolute(prefix, argv0_path, prefix_len);
+ status = copy_absolute(prefix, calculate->argv0_path, prefix_len);
if (_PyStatus_EXCEPTION(status)) {
return status;
}
@@ -448,14 +526,14 @@ search_for_prefix(PyCalculatePath *calculate, _PyPathConfig *pathconfig,
if (_PyStatus_EXCEPTION(status)) {
return status;
}
- status = joinpath(prefix, LANDMARK, prefix_len);
+
+ int module;
+ status = ismodule(prefix, &module);
if (_PyStatus_EXCEPTION(status)) {
return status;
}
-
- if (ismodule(prefix, prefix_len)) {
+ if (module) {
*found = 1;
- reduce(prefix);
return _PyStatus_OK();
}
prefix[n] = L'\0';
@@ -464,21 +542,21 @@ search_for_prefix(PyCalculatePath *calculate, _PyPathConfig *pathconfig,
/* Look at configure's PREFIX.
Path: