Imported Upstream version 3.7.9 upstream/3.7.9
authorDongHun Kwak <dh0128.kwak@samsung.com>
Tue, 8 Dec 2020 04:59:39 +0000 (13:59 +0900)
committerDongHun Kwak <dh0128.kwak@samsung.com>
Tue, 8 Dec 2020 04:59:39 +0000 (13:59 +0900)
39 files changed:
.azure-pipelines/ci.yml [deleted file]
.azure-pipelines/docs-steps.yml [deleted file]
.azure-pipelines/macos-steps.yml [deleted file]
.azure-pipelines/posix-deps-apt.sh [deleted file]
.azure-pipelines/posix-steps.yml [deleted file]
.azure-pipelines/pr.yml [deleted file]
.azure-pipelines/prebuild-checks.yml [deleted file]
.azure-pipelines/windows-layout-steps.yml [deleted file]
.azure-pipelines/windows-steps.yml [deleted file]
Doc/library/gc.rst
Doc/whatsnew/3.7.rst
Include/patchlevel.h
Lib/contextlib.py
Lib/distutils/tests/test_build_ext.py
Lib/http/client.py
Lib/ipaddress.py
Lib/pydoc_data/topics.py
Lib/tarfile.py
Lib/test/pickletester.py
Lib/test/recursion.tar [new file with mode: 0644]
Lib/test/test_asyncgen.py
Lib/test/test_contextlib_async.py
Lib/test/test_httplib.py
Lib/test/test_ipaddress.py
Lib/test/test_platform.py
Lib/test/test_site.py
Lib/test/test_tarfile.py
Misc/NEWS
Modules/_pickle.c
Modules/_ssl.c
Modules/getpath.c
Objects/genobject.c
PC/getpathp.c
PCbuild/pyproject.props
PCbuild/python.props
Python/dynload_win.c
README.rst
configure
configure.ac

diff --git a/.azure-pipelines/ci.yml b/.azure-pipelines/ci.yml
deleted file mode 100644 (file)
index b9038b9..0000000
+++ /dev/null
@@ -1,164 +0,0 @@
-variables:
-  manylinux: false
-  coverage: false
-
-resources:
-  containers:
-  - container: manylinux1
-    image: pyca/cryptography-manylinux1:x86_64
-
-jobs:
-- job: Prebuild
-  displayName: Pre-build checks
-
-  pool:
-    vmImage: ubuntu-16.04
-
-  steps:
-  - template: ./prebuild-checks.yml
-
-
-- job: Docs_PR
-  displayName: Docs PR
-  dependsOn: Prebuild
-  condition: and(succeeded(), eq(dependencies.Prebuild.outputs['docs.run'], 'true'))
-
-  pool:
-    vmImage: ubuntu-16.04
-
-  steps:
-  - template: ./docs-steps.yml
-    parameters:
-      upload: true
-
-
-- job: macOS_CI_Tests
-  displayName: macOS CI Tests
-  dependsOn: Prebuild
-  #condition: and(succeeded(), eq(dependencies.Prebuild.outputs['tests.run'], 'true'))
-  # bpo-39837: macOS tests on Azure Pipelines are disabled
-  condition: false
-
-  variables:
-    testRunTitle: '$(build.sourceBranchName)-macos'
-    testRunPlatform: macos
-
-  pool:
-    vmImage: macos-10.14
-
-  steps:
-  - template: ./macos-steps.yml
-
-
-- job: Ubuntu_CI_Tests
-  displayName: Ubuntu CI Tests
-  dependsOn: Prebuild
-  condition: and(succeeded(), eq(dependencies.Prebuild.outputs['tests.run'], 'true'))
-
-  pool:
-    vmImage: ubuntu-16.04
-
-  variables:
-    testRunTitle: '$(build.sourceBranchName)-linux'
-    testRunPlatform: linux
-    openssl_version: 1.1.1g
-
-  steps:
-  - template: ./posix-steps.yml
-    parameters:
-      dependencies: apt
-
-
-- job: ManyLinux1_CI_Tests
-  displayName: ManyLinux1 CI Tests
-  dependsOn: Prebuild
-  condition: |
-    and(
-        and(
-            succeeded(),
-            eq(variables['manylinux'], 'true')
-        ),
-        eq(dependencies.Prebuild.outputs['tests.run'], 'true')
-    )
-
-  pool:
-    vmImage: ubuntu-16.04
-
-  container: manylinux1
-
-  variables:
-    testRunTitle: '$(build.sourceBranchName)-manylinux1'
-    testRunPlatform: manylinux1
-    openssl_version: ''
-
-  steps:
-  - template: ./posix-steps.yml
-    parameters:
-      dependencies: yum
-      sudo_dependencies: ''
-      xvfb: false
-      patchcheck: false
-
-
-- job: Ubuntu_Coverage_CI_Tests
-  displayName: Ubuntu CI Tests (coverage)
-  dependsOn: Prebuild
-  condition: |
-    and(
-        and(
-            succeeded(),
-            eq(variables['coverage'], 'true')
-        ),
-        eq(dependencies.Prebuild.outputs['tests.run'], 'true')
-    )
-
-  pool:
-    vmImage: ubuntu-16.04
-
-  variables:
-    testRunTitle: '$(Build.SourceBranchName)-linux-coverage'
-    testRunPlatform: linux-coverage
-    openssl_version: 1.1.1g
-
-  steps:
-  - template: ./posix-steps.yml
-    parameters:
-      dependencies: apt
-      coverage: true
-
-
-- job: Windows_CI_Tests
-  displayName: Windows CI Tests
-  dependsOn: Prebuild
-  condition: and(succeeded(), eq(dependencies.Prebuild.outputs['tests.run'], 'true'))
-
-  pool:
-    vmImage: vs2017-win2016
-
-  strategy:
-    matrix:
-      win32:
-        arch: win32
-        buildOpt:
-        testRunTitle: '$(Build.SourceBranchName)-win32'
-        testRunPlatform: win32
-      win64:
-        arch: amd64
-        buildOpt: '-p x64'
-        testRunTitle: '$(Build.SourceBranchName)-win64'
-        testRunPlatform: win64
-    maxParallel: 2
-
-  steps:
-  - template: ./windows-steps.yml
-
-  - template: ./windows-layout-steps.yml
-    parameters:
-      kind: nuget
-  - template: ./windows-layout-steps.yml
-    parameters:
-      kind: embed
-  - template: ./windows-layout-steps.yml
-    parameters:
-      kind: appx
-      fulltest: true
diff --git a/.azure-pipelines/docs-steps.yml b/.azure-pipelines/docs-steps.yml
deleted file mode 100644 (file)
index 492e4e3..0000000
+++ /dev/null
@@ -1,46 +0,0 @@
-parameters:
-  latex: false
-  upload: false
-
-steps:
-- checkout: self
-  clean: true
-  fetchDepth: 5
-
-- task: UsePythonVersion@0
-  displayName: 'Use Python 3.6 or later'
-  inputs:
-    versionSpec: '>=3.6'
-
-- script: python -m pip install sphinx==1.8.2 blurb python-docs-theme
-  displayName: 'Install build dependencies'
-
-- ${{ if ne(parameters.latex, 'true') }}:
-  - script: make check suspicious html PYTHON=python
-    workingDirectory: '$(build.sourcesDirectory)/Doc'
-    displayName: 'Build documentation'
-
-- ${{ if eq(parameters.latex, 'true') }}:
-  - script: sudo apt-get update && sudo apt-get install -qy --force-yes texlive-full 
-    displayName: 'Install LaTeX'
-
-  - script: make dist PYTHON=python SPHINXBUILD='python -m sphinx' BLURB='python -m blurb'
-    workingDirectory: '$(build.sourcesDirectory)/Doc'
-    displayName: 'Build documentation'
-
-- ${{ if eq(parameters.upload, 'true') }}:
-  - task: PublishBuildArtifacts@1
-    displayName: 'Publish docs'
-  
-    inputs:
-      PathToPublish: '$(build.sourcesDirectory)/Doc/build'
-      ArtifactName: docs
-      publishLocation: Container
-
-  - ${{ if eq(parameters.latex, 'true') }}:
-    - task: PublishBuildArtifacts@1
-      displayName: 'Publish dist'
-      inputs:
-        PathToPublish: '$(build.sourcesDirectory)/Doc/dist'
-        ArtifactName: docs_dist
-        publishLocation: Container
diff --git a/.azure-pipelines/macos-steps.yml b/.azure-pipelines/macos-steps.yml
deleted file mode 100644 (file)
index fa38a0d..0000000
+++ /dev/null
@@ -1,27 +0,0 @@
-steps:
-- checkout: self
-  clean: true
-  fetchDepth: 5
-
-- script: ./configure --with-pydebug --with-openssl=/usr/local/opt/openssl --prefix=/opt/python-azdev
-  displayName: 'Configure CPython (debug)'
-
-- script: make -j4
-  displayName: 'Build CPython'
-
-- script: make pythoninfo
-  displayName: 'Display build info'
-
-- script: make buildbottest TESTOPTS="-j4 -uall,-cpu --junit-xml=$(build.binariesDirectory)/test-results.xml"
-  displayName: 'Tests'
-  continueOnError: true
-  timeoutInMinutes: 30
-
-- task: PublishTestResults@2
-  displayName: 'Publish Test Results'
-  inputs:
-    testResultsFiles: '$(build.binariesDirectory)/test-results.xml'
-    mergeTestResults: true
-    testRunTitle: $(testRunTitle)
-    platform: $(testRunPlatform)
-  condition: succeededOrFailed()
diff --git a/.azure-pipelines/posix-deps-apt.sh b/.azure-pipelines/posix-deps-apt.sh
deleted file mode 100755 (executable)
index 4f48990..0000000
+++ /dev/null
@@ -1,26 +0,0 @@
-apt-get update
-
-apt-get -yq install \
-    build-essential \
-    zlib1g-dev \
-    libbz2-dev \
-    liblzma-dev \
-    libncurses5-dev \
-    libreadline6-dev \
-    libsqlite3-dev \
-    libssl-dev \
-    libgdbm-dev \
-    tk-dev \
-    lzma \
-    lzma-dev \
-    liblzma-dev \
-    libffi-dev \
-    uuid-dev \
-    xvfb
-
-if [ ! -z "$1" ]
-then
-  echo ##vso[task.prependpath]$PWD/multissl/openssl/$1
-  echo ##vso[task.setvariable variable=OPENSSL_DIR]$PWD/multissl/openssl/$1
-  python3 Tools/ssl/multissltests.py --steps=library --base-directory $PWD/multissl --openssl $1 --system Linux
-fi
diff --git a/.azure-pipelines/posix-steps.yml b/.azure-pipelines/posix-steps.yml
deleted file mode 100644 (file)
index 95d3e98..0000000
+++ /dev/null
@@ -1,81 +0,0 @@
-parameters:
-  coverage: false
-  sudo_dependencies: sudo
-  dependencies: apt
-  patchcheck: true
-  xvfb: true
-
-steps:
-- checkout: self
-  clean: true
-  fetchDepth: 5
-
-# Work around a known issue affecting Ubuntu VMs on Pipelines
-- script: sudo setfacl -Rb /home/vsts
-  displayName: 'Workaround ACL issue'
-
-- script: ${{ parameters.sudo_dependencies }} ./.azure-pipelines/posix-deps-${{ parameters.dependencies }}.sh $(openssl_version)
-  displayName: 'Install dependencies'
-
-- script: ./configure --with-pydebug
-  displayName: 'Configure CPython (debug)'
-
-- script: make -j4
-  displayName: 'Build CPython'
-
-- ${{ if eq(parameters.coverage, 'true') }}:
-  - script: ./python -m venv venv && ./venv/bin/python -m pip install -U coverage
-    displayName: 'Set up virtual environment'
-
-  - script: ./venv/bin/python -m test.pythoninfo
-    displayName: 'Display build info'
-
-  - script: |
-      $COMMAND -m coverage run --pylib -m test \
-                --fail-env-changed \
-                -uall,-cpu \
-                --junit-xml=$(build.binariesDirectory)/test-results.xml" \
-                -x test_multiprocessing_fork \
-                -x test_multiprocessing_forkserver \
-                -x test_multiprocessing_spawn \
-                -x test_concurrent_futures
-    displayName: 'Tests with coverage'
-    env:
-      ${{ if eq(parameters.xvfb, 'true') }}:
-        COMMAND: xvfb-run ./venv/bin/python
-      ${{ if ne(parameters.xvfb, 'true') }}:
-        COMMAND: ./venv/bin/python
-
-  - script: ./venv/bin/python -m coverage xml
-    displayName: 'Generate coverage.xml'
-
-  - script: source ./venv/bin/activate && bash <(curl -s https://codecov.io/bash)
-    displayName: 'Publish code coverage results'
-
-
-- ${{ if ne(parameters.coverage, 'true') }}:
-  - script: make pythoninfo
-    displayName: 'Display build info'
-
-  - script: $COMMAND buildbottest TESTOPTS="-j4 -uall,-cpu --junit-xml=$(build.binariesDirectory)/test-results.xml"
-    displayName: 'Tests'
-    env:
-      ${{ if eq(parameters.xvfb, 'true') }}:
-        COMMAND: xvfb-run make
-      ${{ if ne(parameters.xvfb, 'true') }}:
-        COMMAND: make
-
-- ${{ if eq(parameters.patchcheck, 'true') }}:
-  - script: ./python Tools/scripts/patchcheck.py --travis true
-    displayName: 'Run patchcheck.py'
-    condition: and(succeeded(), eq(variables['Build.Reason'], 'PullRequest'))
-
-
-- task: PublishTestResults@2
-  displayName: 'Publish Test Results'
-  inputs:
-    testResultsFiles: '$(build.binariesDirectory)/test-results.xml'
-    mergeTestResults: true
-    testRunTitle: $(testRunTitle)
-    platform: $(testRunPlatform)
-  condition: succeededOrFailed()
diff --git a/.azure-pipelines/pr.yml b/.azure-pipelines/pr.yml
deleted file mode 100644 (file)
index 808b5f1..0000000
+++ /dev/null
@@ -1,155 +0,0 @@
-variables:
-  manylinux: false
-  coverage: false
-
-resources:
-  containers:
-  - container: manylinux1
-    image: pyca/cryptography-manylinux1:x86_64
-
-jobs:
-- job: Prebuild
-  displayName: Pre-build checks
-
-  pool:
-    vmImage: ubuntu-16.04
-
-  steps:
-  - template: ./prebuild-checks.yml
-
-
-- job: Docs_PR
-  displayName: Docs PR
-  dependsOn: Prebuild
-  condition: and(succeeded(), eq(dependencies.Prebuild.outputs['docs.run'], 'true'))
-
-  pool:
-    vmImage: ubuntu-16.04
-
-  steps:
-  - template: ./docs-steps.yml
-
-
-- job: macOS_PR_Tests
-  displayName: macOS PR Tests
-  dependsOn: Prebuild
-  #condition: and(succeeded(), eq(dependencies.Prebuild.outputs['tests.run'], 'true'))
-  # bpo-39837: macOS tests on Azure Pipelines are disabled
-  condition: false
-
-  variables:
-    testRunTitle: '$(system.pullRequest.TargetBranch)-macos'
-    testRunPlatform: macos
-
-  pool:
-    vmImage: macos-10.14
-
-  steps:
-  - template: ./macos-steps.yml
-    parameters:
-      targetBranch: $(System.PullRequest.TargetBranch)
-
-
-- job: Ubuntu_PR_Tests
-  displayName: Ubuntu PR Tests
-  dependsOn: Prebuild
-  condition: and(succeeded(), eq(dependencies.Prebuild.outputs['tests.run'], 'true'))
-
-  pool:
-    vmImage: ubuntu-16.04
-
-  variables:
-    testRunTitle: '$(system.pullRequest.TargetBranch)-linux'
-    testRunPlatform: linux
-    openssl_version: 1.1.1g
-
-  steps:
-  - template: ./posix-steps.yml
-    parameters:
-      dependencies: apt
-
-
-- job: ManyLinux1_PR_Tests
-  displayName: ManyLinux1 PR Tests
-  dependsOn: Prebuild
-  condition: |
-    and(
-        and(
-            succeeded(),
-            eq(variables['manylinux'], 'true')
-        ),
-        eq(dependencies.Prebuild.outputs['tests.run'], 'true')
-    )
-
-  pool:
-    vmImage: ubuntu-16.04
-
-  container: manylinux1
-
-  variables:
-    testRunTitle: '$(system.pullRequest.TargetBranch)-manylinux1'
-    testRunPlatform: manylinux1
-    openssl_version: ''
-
-  steps:
-  - template: ./posix-steps.yml
-    parameters:
-      dependencies: yum
-      sudo_dependencies: ''
-      xvfb: false
-      patchcheck: false
-
-
-- job: Ubuntu_Coverage_PR_Tests
-  displayName: Ubuntu PR Tests (coverage)
-  dependsOn: Prebuild
-  condition: |
-    and(
-        and(
-            succeeded(),
-            eq(variables['coverage'], 'true')
-        ),
-        eq(dependencies.Prebuild.outputs['tests.run'], 'true')
-    )
-
-  pool:
-    vmImage: ubuntu-16.04
-
-  variables:
-    testRunTitle: '$(Build.SourceBranchName)-linux-coverage'
-    testRunPlatform: linux-coverage
-    openssl_version: 1.1.1g
-
-  steps:
-  - template: ./posix-steps.yml
-    parameters:
-      dependencies: apt
-      coverage: true
-
-
-- job: Windows_PR_Tests
-  displayName: Windows PR Tests
-  dependsOn: Prebuild
-  condition: and(succeeded(), eq(dependencies.Prebuild.outputs['tests.run'], 'true'))
-
-  pool:
-    vmImage: vs2017-win2016
-
-  strategy:
-    matrix:
-      win32:
-        arch: win32
-        buildOpt:
-        testRunTitle: '$(System.PullRequest.TargetBranch)-win32'
-        testRunPlatform: win32
-      win64:
-        arch: amd64
-        buildOpt: '-p x64'
-        testRunTitle: '$(System.PullRequest.TargetBranch)-win64'
-        testRunPlatform: win64
-    maxParallel: 2
-
-  steps:
-  - template: ./windows-steps.yml
-    parameters:
-      targetBranch: $(System.PullRequest.TargetBranch)
diff --git a/.azure-pipelines/prebuild-checks.yml b/.azure-pipelines/prebuild-checks.yml
deleted file mode 100644 (file)
index 30ff642..0000000
+++ /dev/null
@@ -1,36 +0,0 @@
-steps:
-- checkout: self
-  fetchDepth: 5
-
-- script: echo "##vso[task.setvariable variable=diffTarget]HEAD~1"
-  displayName: Set default diff target
-
-- script: |
-    git fetch -q origin $(System.PullRequest.TargetBranch)
-    echo "##vso[task.setvariable variable=diffTarget]HEAD \$(git merge-base HEAD FETCH_HEAD)"
-  displayName: Fetch comparison tree
-  condition: and(succeeded(), variables['System.PullRequest.TargetBranch'])
-
-- script: |
-   if ! git diff --name-only $(diffTarget) | grep -qE '(\.rst$|^Doc|^Misc)'
-   then
-     echo "No docs were updated: docs.run=false"
-     echo "##vso[task.setvariable variable=run;isOutput=true]false"
-   else
-     echo "Docs were updated: docs.run=true"
-     echo "##vso[task.setvariable variable=run;isOutput=true]true"
-   fi
-  displayName: Detect documentation changes
-  name: docs
-
-- script: |
-   if ! git diff --name-only $(diffTarget) | grep -qvE '(\.rst$|^Doc|^Misc)'
-   then
-     echo "Only docs were updated: tests.run=false"
-     echo "##vso[task.setvariable variable=run;isOutput=true]false"
-   else
-     echo "Code was updated: tests.run=true"
-     echo "##vso[task.setvariable variable=run;isOutput=true]true"
-   fi
-  displayName: Detect source changes
-  name: tests
diff --git a/.azure-pipelines/windows-layout-steps.yml b/.azure-pipelines/windows-layout-steps.yml
deleted file mode 100644 (file)
index e15729f..0000000
+++ /dev/null
@@ -1,28 +0,0 @@
-parameters:
-  kind: nuget
-  extraOpts: --precompile
-  fulltest: false
-
-steps:
-- script: .\python.bat PC\layout -vv -s "$(Build.SourcesDirectory)" -b "$(Py_OutDir)\$(arch)" -t "$(Build.BinariesDirectory)\layout-tmp-${{ parameters.kind }}-$(arch)" --copy "$(Build.BinariesDirectory)\layout-${{ parameters.kind }}-$(arch)" ${{ parameters.extraOpts }} --preset-${{ parameters.kind }} --include-tests
-  displayName: Create ${{ parameters.kind }} layout
-
-- script: .\python.exe -m test.pythoninfo
-  workingDirectory: $(Build.BinariesDirectory)\layout-${{ parameters.kind }}-$(arch)
-  displayName: Show layout info (${{ parameters.kind }})
-
-- ${{ if eq(parameters.fulltest, 'true') }}:
-  - script: .\python.exe -m test -q -uall -u-cpu -rwW --slowest --timeout=1200 -j0 --junit-xml="$(Build.BinariesDirectory)\test-results-${{ parameters.kind }}.xml" --tempdir "$(Build.BinariesDirectory)\tmp-${{ parameters.kind }}-$(arch)"
-    workingDirectory: $(Build.BinariesDirectory)\layout-${{ parameters.kind }}-$(arch)
-    displayName: ${{ parameters.kind }} Tests
-    env:
-      PREFIX: $(Build.BinariesDirectory)\layout-${{ parameters.kind }}-$(arch)
-
-  - task: PublishTestResults@2
-    displayName: Publish ${{ parameters.kind }} Test Results
-    inputs:
-      testResultsFiles: $(Build.BinariesDirectory)\test-results-${{ parameters.kind }}.xml
-      mergeTestResults: true
-      testRunTitle: ${{ parameters.kind }}-$(testRunTitle)
-      platform: $(testRunPlatform)
-    condition: succeededOrFailed()
diff --git a/.azure-pipelines/windows-steps.yml b/.azure-pipelines/windows-steps.yml
deleted file mode 100644 (file)
index 794a23a..0000000
+++ /dev/null
@@ -1,35 +0,0 @@
-steps:
-- checkout: self
-  clean: false
-  fetchDepth: 5
-
-- powershell: |
-    # Relocate build outputs outside of source directory to make cleaning faster
-    Write-Host '##vso[task.setvariable variable=Py_IntDir]$(Build.BinariesDirectory)\obj'
-    # UNDONE: Do not build to a different directory because of broken tests
-    Write-Host '##vso[task.setvariable variable=Py_OutDir]$(Build.SourcesDirectory)\PCbuild'
-    #Write-Host '##vso[task.setvariable variable=Py_OutDir]$(Build.BinariesDirectory)\bin'
-    Write-Host '##vso[task.setvariable variable=EXTERNALS_DIR]$(Build.BinariesDirectory)\externals'
-  displayName: Update build locations
-
-- script: PCbuild\build.bat -e $(buildOpt)
-  displayName: 'Build CPython'
-  env:
-    IncludeUwp: true
-
-- script: python.bat -m test.pythoninfo
-  displayName: 'Display build info'
-
-- script: PCbuild\rt.bat -q -uall -u-cpu -rwW --slowest --timeout=1200 -j0 --junit-xml="$(Build.BinariesDirectory)\test-results.xml" --tempdir="$(Build.BinariesDirectory)\test"
-  displayName: 'Tests'
-  env:
-    PREFIX: $(Py_OutDir)\$(arch)
-
-- task: PublishTestResults@2
-  displayName: 'Publish Test Results'
-  inputs:
-    testResultsFiles: '$(Build.BinariesDirectory)\test-results.xml'
-    mergeTestResults: true
-    testRunTitle: $(testRunTitle)
-    platform: $(testRunPlatform)
-  condition: succeededOrFailed()
index af45581..1f09ed5 100644 (file)
@@ -103,9 +103,9 @@ The :mod:`gc` module provides the following functions:
    allocations minus the number of deallocations exceeds *threshold0*, collection
    starts.  Initially only generation ``0`` is examined.  If generation ``0`` has
    been examined more than *threshold1* times since generation ``1`` has been
-   examined, then generation ``1`` is examined as well.  Similarly, *threshold2*
-   controls the number of collections of generation ``1`` before collecting
-   generation ``2``.
+   examined, then generation ``1`` is examined as well.
+   With the third generation, things are a bit more complicated,
+   see `Collecting the oldest generation <https://devguide.python.org/garbage_collector/#collecting-the-oldest-generation>`_ for more information.
 
 
 .. function:: get_count()
index 9644a4f..6dcb006 100644 (file)
@@ -171,7 +171,7 @@ on a per-module basis in Python 3.7 using a :mod:`__future__` import::
 
     from __future__ import annotations
 
-It will become the default in Python 4.0.
+It will become the default in Python 3.10.
 
 .. seealso::
 
index c1bdb83..d823319 100644 (file)
 /*--start constants--*/
 #define PY_MAJOR_VERSION        3
 #define PY_MINOR_VERSION        7
-#define PY_MICRO_VERSION        8
+#define PY_MICRO_VERSION        9
 #define PY_RELEASE_LEVEL        PY_RELEASE_LEVEL_FINAL
 #define PY_RELEASE_SERIAL       0
 
 /* Version as a string */
-#define PY_VERSION              "3.7.8"
+#define PY_VERSION              "3.7.9"
 /*--end constants--*/
 
 /* Version as a single 4-byte hex number, e.g. 0x010502B2 == 1.5.2b2.
index 2d745ea..13a3e1a 100644 (file)
@@ -186,7 +186,7 @@ class _AsyncGeneratorContextManager(_GeneratorContextManagerBase,
             # in this implementation
             try:
                 await self.gen.athrow(typ, value, traceback)
-                raise RuntimeError("generator didn't stop after throw()")
+                raise RuntimeError("generator didn't stop after athrow()")
             except StopAsyncIteration as exc:
                 return exc is not value
             except RuntimeError as exc:
index 88847f9..d042859 100644 (file)
@@ -470,7 +470,7 @@ class BuildExtTestCase(TempdirManager,
         # format the target value as defined in the Apple
         # Availability Macros.  We can't use the macro names since
         # at least one value we test with will not exist yet.
-        if target[1] < 10:
+        if target[:2] < (10, 10):
             # for 10.1 through 10.9.x -> "10n0"
             target = '%02d%01d0' % target
         else:
index 09c57af..04cd8f7 100644 (file)
@@ -150,6 +150,10 @@ _contains_disallowed_url_pchar_re = re.compile('[\x00-\x20\x7f]')
 #  _is_allowed_url_pchars_re = re.compile(r"^[/!$&'()*+,;=:@%a-zA-Z0-9._~-]+$")
 # We are more lenient for assumed real world compatibility purposes.
 
+# These characters are not allowed within HTTP method names
+# to prevent http header injection.
+_contains_disallowed_method_pchar_re = re.compile('[\x00-\x1f]')
+
 # We always set the Content-Length header for these methods because some
 # servers will otherwise respond with a 411
 _METHODS_EXPECTING_BODY = {'PATCH', 'POST', 'PUT'}
@@ -1109,6 +1113,8 @@ class HTTPConnection:
         else:
             raise CannotSendRequest(self.__state)
 
+        self._validate_method(method)
+
         # Save the method for use later in the response phase
         self._method = method
 
@@ -1199,6 +1205,15 @@ class HTTPConnection:
         # ASCII also helps prevent CVE-2019-9740.
         return request.encode('ascii')
 
+    def _validate_method(self, method):
+        """Validate a method name for putrequest."""
+        # prevent http header injection
+        match = _contains_disallowed_method_pchar_re.search(method)
+        if match:
+            raise ValueError(
+                    f"method can't contain control characters. {method!r} "
+                    f"(found at least {match.group()!r})")
+
     def _validate_path(self, url):
         """Validate a url for putrequest."""
         # Prevent CVE-2019-9740.
index 8024928..5488293 100644 (file)
@@ -1442,7 +1442,7 @@ class IPv4Interface(IPv4Address):
             return False
 
     def __hash__(self):
-        return self._ip ^ self._prefixlen ^ int(self.network.network_address)
+        return hash((self._ip, self._prefixlen, int(self.network.network_address)))
 
     __reduce__ = _IPAddressBase.__reduce__
 
@@ -2088,7 +2088,7 @@ class IPv6Interface(IPv6Address):
             return False
 
     def __hash__(self):
-        return self._ip ^ self._prefixlen ^ int(self.network.network_address)
+        return hash((self._ip, self._prefixlen, int(self.network.network_address)))
 
     __reduce__ = _IPAddressBase.__reduce__
 
index 8340220..d42bb99 100644 (file)
@@ -1,5 +1,5 @@
 # -*- coding: utf-8 -*-
-# Autogenerated by Sphinx on Wed Jun 17 04:38:18 2020
+# Autogenerated by Sphinx on Sat Aug 15 01:12:49 2020
 topics = {'assert': 'The "assert" statement\n'
            '**********************\n'
            '\n'
index 3b596cb..3be5188 100755 (executable)
@@ -1233,6 +1233,8 @@ class TarInfo(object):
 
             length, keyword = match.groups()
             length = int(length)
+            if length == 0:
+                raise InvalidHeaderError("invalid header")
             value = buf[match.end(2) + 1:match.start(1) + length - 1]
 
             # Normally, we could just use "utf-8" as the encoding and "strict"
index 1d88fcb..c576d73 100644 (file)
@@ -998,6 +998,24 @@ class AbstractUnpickleTests(unittest.TestCase):
             self.assertIs(type(unpickled), collections.UserDict)
             self.assertEqual(unpickled, collections.UserDict({1: 2}))
 
+    def test_bad_reduce(self):
+        self.assertEqual(self.loads(b'cbuiltins\nint\n)R.'), 0)
+        self.check_unpickling_error(TypeError, b'N)R.')
+        self.check_unpickling_error(TypeError, b'cbuiltins\nint\nNR.')
+
+    def test_bad_newobj(self):
+        error = (pickle.UnpicklingError, TypeError)
+        self.assertEqual(self.loads(b'cbuiltins\nint\n)\x81.'), 0)
+        self.check_unpickling_error(error, b'cbuiltins\nlen\n)\x81.')
+        self.check_unpickling_error(error, b'cbuiltins\nint\nN\x81.')
+
+    def test_bad_newobj_ex(self):
+        error = (pickle.UnpicklingError, TypeError)
+        self.assertEqual(self.loads(b'cbuiltins\nint\n)}\x92.'), 0)
+        self.check_unpickling_error(error, b'cbuiltins\nlen\n)}\x92.')
+        self.check_unpickling_error(error, b'cbuiltins\nint\nN}\x92.')
+        self.check_unpickling_error(error, b'cbuiltins\nint\n)N\x92.')
+
     def test_bad_stack(self):
         badpickles = [
             b'.',                       # STOP
diff --git a/Lib/test/recursion.tar b/Lib/test/recursion.tar
new file mode 100644 (file)
index 0000000..b823725
Binary files /dev/null and b/Lib/test/recursion.tar differ
index 5a292fb..5da2741 100644 (file)
@@ -111,6 +111,31 @@ class AsyncGenTest(unittest.TestCase):
         def async_iterate(g):
             res = []
             while True:
+                an = g.__anext__()
+                try:
+                    while True:
+                        try:
+                            an.__next__()
+                        except StopIteration as ex:
+                            if ex.args:
+                                res.append(ex.args[0])
+                                break
+                            else:
+                                res.append('EMPTY StopIteration')
+                                break
+                        except StopAsyncIteration:
+                            raise
+                        except Exception as ex:
+                            res.append(str(type(ex)))
+                            break
+                except StopAsyncIteration:
+                    res.append('STOP')
+                    break
+            return res
+
+        def async_iterate(g):
+            res = []
+            while True:
                 try:
                     g.__anext__().__next__()
                 except StopAsyncIteration:
@@ -297,6 +322,37 @@ class AsyncGenTest(unittest.TestCase):
                                     "non-None value .* async generator"):
             gen().__anext__().send(100)
 
+    def test_async_gen_exception_11(self):
+        def sync_gen():
+            yield 10
+            yield 20
+
+        def sync_gen_wrapper():
+            yield 1
+            sg = sync_gen()
+            sg.send(None)
+            try:
+                sg.throw(GeneratorExit())
+            except GeneratorExit:
+                yield 2
+            yield 3
+
+        async def async_gen():
+            yield 10
+            yield 20
+
+        async def async_gen_wrapper():
+            yield 1
+            asg = async_gen()
+            await asg.asend(None)
+            try:
+                await asg.athrow(GeneratorExit())
+            except GeneratorExit:
+                yield 2
+            yield 3
+
+        self.compare_generators(sync_gen_wrapper(), async_gen_wrapper())
+
     def test_async_gen_api_01(self):
         async def gen():
             yield 123
index cc38dcf..9db4065 100644 (file)
@@ -36,6 +36,28 @@ class TestAbstractAsyncContextManager(unittest.TestCase):
         async with manager as context:
             self.assertIs(manager, context)
 
+    @_async_test
+    async def test_async_gen_propagates_generator_exit(self):
+        # A regression test for https://bugs.python.org/issue33786.
+
+        @asynccontextmanager
+        async def ctx():
+            yield
+
+        async def gen():
+            async with ctx():
+                yield 11
+
+        ret = []
+        exc = ValueError(22)
+        with self.assertRaises(ValueError):
+            async with ctx():
+                async for val in gen():
+                    ret.append(val)
+                    raise exc
+
+        self.assertEqual(ret, [11])
+
     def test_exit_is_abstract(self):
         class MissingAexit(AbstractAsyncContextManager):
             pass
index 891393a..3fa0691 100644 (file)
@@ -363,6 +363,28 @@ class HeaderTests(TestCase):
         self.assertEqual(lines[3], "header: Second: val2")
 
 
+class HttpMethodTests(TestCase):
+    def test_invalid_method_names(self):
+        methods = (
+            'GET\r',
+            'POST\n',
+            'PUT\n\r',
+            'POST\nValue',
+            'POST\nHOST:abc',
+            'GET\nrHost:abc\n',
+            'POST\rRemainder:\r',
+            'GET\rHOST:\n',
+            '\nPUT'
+        )
+
+        for method in methods:
+            with self.assertRaisesRegex(
+                    ValueError, "method can't contain control characters"):
+                conn = client.HTTPConnection('example.com')
+                conn.sock = FakeSocket(None)
+                conn.request(method=method, url="/")
+
+
 class TransferEncodingTest(TestCase):
     expected_body = b"It's just a flesh wound"
 
index 455b893..1fb6a92 100644 (file)
@@ -2091,6 +2091,17 @@ class IpaddrUnitTest(unittest.TestCase):
                          sixtofouraddr.sixtofour)
         self.assertFalse(bad_addr.sixtofour)
 
+    # issue41004 Hash collisions in IPv4Interface and IPv6Interface
+    def testV4HashIsNotConstant(self):
+        ipv4_address1 = ipaddress.IPv4Interface("1.2.3.4")
+        ipv4_address2 = ipaddress.IPv4Interface("2.3.4.5")
+        self.assertNotEqual(ipv4_address1.__hash__(), ipv4_address2.__hash__())
+
+    # issue41004 Hash collisions in IPv4Interface and IPv6Interface
+    def testV6HashIsNotConstant(self):
+        ipv6_address1 = ipaddress.IPv6Interface("2001:658:22a:cafe:200:0:0:1")
+        ipv6_address2 = ipaddress.IPv6Interface("2001:658:22a:cafe:200:0:0:2")
+        self.assertNotEqual(ipv6_address1.__hash__(), ipv6_address2.__hash__())
 
 if __name__ == '__main__':
     unittest.main()
index d91e978..452a56e 100644 (file)
@@ -236,6 +236,11 @@ class PlatformTest(unittest.TestCase):
             fd.close()
             self.assertFalse(real_ver is None)
             result_list = res[0].split('.')
+            # macOS 11.0 (Big Sur) may report its version number
+            # as 10.16 if the executable is built with an older
+            # SDK target but sw_vers reports 11.0.
+            if result_list == ['10', '16']:
+                result_list = ['11', '0']
             expect_list = real_ver.split('.')
             len_diff = len(result_list) - len(expect_list)
             # On Snow Leopard, sw_vers reports 10.6.0 as 10.6
index 6def4e5..8815c83 100644 (file)
@@ -573,12 +573,19 @@ class StartupImportTests(unittest.TestCase):
 @unittest.skipUnless(sys.platform == 'win32', "only supported on Windows")
 class _pthFileTests(unittest.TestCase):
 
-    def _create_underpth_exe(self, lines):
+    def _create_underpth_exe(self, lines, exe_pth=True):
+        import _winapi
         temp_dir = tempfile.mkdtemp()
         self.addCleanup(test.support.rmtree, temp_dir)
         exe_file = os.path.join(temp_dir, os.path.split(sys.executable)[1])
+        dll_src_file = _winapi.GetModuleFileName(sys.dllhandle)
+        dll_file = os.path.join(temp_dir, os.path.split(dll_src_file)[1])
         shutil.copy(sys.executable, exe_file)
-        _pth_file = os.path.splitext(exe_file)[0] + '._pth'
+        shutil.copy(dll_src_file, dll_file)
+        if exe_pth:
+            _pth_file = os.path.splitext(exe_file)[0] + '._pth'
+        else:
+            _pth_file = os.path.splitext(dll_file)[0] + '._pth'
         with open(_pth_file, 'w') as f:
             for line in lines:
                 print(line, file=f)
@@ -646,5 +653,30 @@ class _pthFileTests(unittest.TestCase):
         self.assertTrue(rc, "sys.path is incorrect")
 
 
+    def test_underpth_dll_file(self):
+        libpath = os.path.dirname(os.path.dirname(encodings.__file__))
+        exe_prefix = os.path.dirname(sys.executable)
+        exe_file = self._create_underpth_exe([
+            'fake-path-name',
+            *[libpath for _ in range(200)],
+            '',
+            '# comment',
+            'import site'
+        ], exe_pth=False)
+        sys_prefix = os.path.dirname(exe_file)
+        env = os.environ.copy()
+        env['PYTHONPATH'] = 'from-env'
+        env['PATH'] = '{};{}'.format(exe_prefix, os.getenv('PATH'))
+        rc = subprocess.call([exe_file, '-c',
+            'import sys; sys.exit(not sys.flags.no_site and '
+            '%r in sys.path and %r in sys.path and %r not in sys.path and '
+            'all("\\r" not in p and "\\n" not in p for p in sys.path))' % (
+                os.path.join(sys_prefix, 'fake-path-name'),
+                libpath,
+                os.path.join(sys_prefix, 'from-env'),
+            )], env=env)
+        self.assertTrue(rc, "sys.path is incorrect")
+
+
 if __name__ == "__main__":
     unittest.main()
index 5e4d75e..9133d60 100644 (file)
@@ -395,6 +395,13 @@ class CommonReadTest(ReadTest):
                 with self.assertRaisesRegex(tarfile.ReadError, "unexpected end of data"):
                     tar.extractfile(t).read()
 
+    def test_length_zero_header(self):
+        # bpo-39017 (CVE-2019-20907): reading a zero-length header should fail
+        # with an exception
+        with self.assertRaisesRegex(tarfile.ReadError, "file could not be opened successfully"):
+            with tarfile.open(support.findfile('recursion.tar')) as tar:
+                pass
+
 class MiscReadTestBase(CommonReadTest):
     def requires_name_attribute(self):
         pass
index f4f28a8..f3afa2f 100644 (file)
--- a/Misc/NEWS
+++ b/Misc/NEWS
@@ -2,6 +2,55 @@
 Python News
 +++++++++++
 
+What's New in Python 3.7.9 final?
+=================================
+
+*Release date: 2020-08-15*
+
+Security
+--------
+
+- bpo-41304: Fixes `python3x._pth` being ignored on Windows, caused by the
+  fix for :issue:`29778` (CVE-2020-15801).
+
+- bpo-29778: Ensure :file:`python3.dll` is loaded from correct locations
+  when Python is embedded (CVE-2020-15523).
+
+- bpo-41004: CVE-2020-14422: The __hash__() methods of
+  ipaddress.IPv4Interface and ipaddress.IPv6Interface incorrectly generated
+  constant hash values of 32 and 128 respectively. This resulted in always
+  causing hash collisions. The fix uses hash() to generate hash values for
+  the tuple of (address, mask length, network address).
+
+- bpo-39603: Prevent http header injection by rejecting control characters
+  in http.client.putrequest(...).
+
+Core and Builtins
+-----------------
+
+- bpo-33786: Fix asynchronous generators to handle GeneratorExit in athrow()
+  correctly
+
+Library
+-------
+
+- bpo-41288: Unpickling invalid NEWOBJ_EX opcode with the C implementation
+  raises now UnpicklingError instead of crashing.
+
+- bpo-39017: Avoid infinite loop when reading specially crafted TAR files
+  using the tarfile module (CVE-2019-20907).
+
+- bpo-41235: Fix the error handling in
+  :meth:`ssl.SSLContext.load_dh_params`.
+
+macOS
+-----
+
+- bpo-41100: Additional fixes for testing on macOS 11 Big Sur Intel. Note:
+  macOS 11 is not yet released, this release of Python is not fully
+  supported on 11.0, and not all tests pass.
+
+
 What's New in Python 3.7.8 final?
 =================================
 
index ef83da0..329631d 100644 (file)
@@ -5515,23 +5515,30 @@ load_newobj_ex(UnpicklerObject *self)
     }
 
     if (!PyType_Check(cls)) {
-        Py_DECREF(kwargs);
-        Py_DECREF(args);
         PyErr_Format(st->UnpicklingError,
                      "NEWOBJ_EX class argument must be a type, not %.200s",
                      Py_TYPE(cls)->tp_name);
-        Py_DECREF(cls);
-        return -1;
+        goto error;
     }
 
     if (((PyTypeObject *)cls)->tp_new == NULL) {
-        Py_DECREF(kwargs);
-        Py_DECREF(args);
-        Py_DECREF(cls);
         PyErr_SetString(st->UnpicklingError,
                         "NEWOBJ_EX class argument doesn't have __new__");
-        return -1;
+        goto error;
+    }
+    if (!PyTuple_Check(args)) {
+        PyErr_Format(st->UnpicklingError,
+                     "NEWOBJ_EX args argument must be a tuple, not %.200s",
+                     Py_TYPE(args)->tp_name);
+        goto error;
+    }
+    if (!PyDict_Check(kwargs)) {
+        PyErr_Format(st->UnpicklingError,
+                     "NEWOBJ_EX kwargs argument must be a dict, not %.200s",
+                     Py_TYPE(kwargs)->tp_name);
+        goto error;
     }
+
     obj = ((PyTypeObject *)cls)->tp_new((PyTypeObject *)cls, args, kwargs);
     Py_DECREF(kwargs);
     Py_DECREF(args);
@@ -5541,6 +5548,12 @@ load_newobj_ex(UnpicklerObject *self)
     }
     PDATA_PUSH(self->stack, obj, -1);
     return 0;
+
+error:
+    Py_DECREF(kwargs);
+    Py_DECREF(args);
+    Py_DECREF(cls);
+    return -1;
 }
 
 static int
index 93cc529..719f8e8 100644 (file)
@@ -4189,8 +4189,10 @@ _ssl__SSLContext_load_dh_params(PySSLContext *self, PyObject *filepath)
         }
         return NULL;
     }
-    if (SSL_CTX_set_tmp_dh(self->ctx, dh) == 0)
-        _setSSLError(NULL, 0, __FILE__, __LINE__);
+    if (!SSL_CTX_set_tmp_dh(self->ctx, dh)) {
+        DH_free(dh);
+        return _setSSLError(NULL, 0, __FILE__, __LINE__);
+    }
     DH_free(dh);
     Py_RETURN_NONE;
 }
index ba8d74b..35f9e31 100644 (file)
@@ -566,11 +566,7 @@ calculate_program_full_path(const _PyCoreConfig *core_config,
     memset(program_full_path, 0, sizeof(program_full_path));
 
 #ifdef __APPLE__
-#if MAC_OS_X_VERSION_MAX_ALLOWED >= MAC_OS_X_VERSION_10_4
     uint32_t nsexeclength = MAXPATHLEN;
-#else
-    unsigned long nsexeclength = MAXPATHLEN;
-#endif
     char execpath[MAXPATHLEN+1];
 #endif
 
index dd7d44b..b11690c 100644 (file)
@@ -1893,21 +1893,20 @@ yield_close:
     return NULL;
 
 check_error:
-    if (PyErr_ExceptionMatches(PyExc_StopAsyncIteration)) {
+    if (PyErr_ExceptionMatches(PyExc_StopAsyncIteration) ||
+            PyErr_ExceptionMatches(PyExc_GeneratorExit))
+    {
         o->agt_state = AWAITABLE_STATE_CLOSED;
         if (o->agt_args == NULL) {
             /* when aclose() is called we don't want to propagate
-               StopAsyncIteration; just raise StopIteration, signalling
-               that 'aclose()' is done. */
+               StopAsyncIteration or GeneratorExit; just raise
+               StopIteration, signalling that this 'aclose()' await
+               is done.
+            */
             PyErr_Clear();
             PyErr_SetNone(PyExc_StopIteration);
         }
     }
-    else if (PyErr_ExceptionMatches(PyExc_GeneratorExit)) {
-        o->agt_state = AWAITABLE_STATE_CLOSED;
-        PyErr_Clear();          /* ignore these errors */
-        PyErr_SetNone(PyExc_StopIteration);
-    }
     return NULL;
 }
 
index dc4e43f..387ac60 100644 (file)
@@ -161,27 +161,37 @@ reduce(wchar_t *dir)
 static int
 change_ext(wchar_t *dest, const wchar_t *src, const wchar_t *ext)
 {
-    size_t src_len = wcsnlen_s(src, MAXPATHLEN+1);
-    size_t i = src_len;
-    if (i >= MAXPATHLEN+1) {
-        Py_FatalError("buffer overflow in getpathp.c's reduce()");
-    }
+    if (src && src != dest) {
+        size_t src_len = wcsnlen_s(src, MAXPATHLEN+1);
+        size_t i = src_len;
+        if (i >= MAXPATHLEN+1) {
+            Py_FatalError("buffer overflow in getpathp.c's reduce()");
+        }
 
-    while (i > 0 && src[i] != '.' && !is_sep(src[i]))
-        --i;
+        while (i > 0 && src[i] != '.' && !is_sep(src[i]))
+            --i;
 
-    if (i == 0) {
-        dest[0] = '\0';
-        return -1;
-    }
+        if (i == 0) {
+            dest[0] = '\0';
+            return -1;
+        }
+
+        if (is_sep(src[i])) {
+            i = src_len;
+        }
 
-    if (is_sep(src[i])) {
-        i = src_len;
+        if (wcsncpy_s(dest, MAXPATHLEN+1, src, i)) {
+            dest[0] = '\0';
+            return -1;
+        }
+    } else {
+        wchar_t *s = wcsrchr(dest, L'.');
+        if (s) {
+            s[0] = '\0';
+        }
     }
 
-    if (wcsncpy_s(dest, MAXPATHLEN+1, src, i) ||
-        wcscat_s(dest, MAXPATHLEN+1, ext))
-    {
+    if (wcscat_s(dest, MAXPATHLEN+1, ext)) {
         dest[0] = '\0';
         return -1;
     }
@@ -337,6 +347,19 @@ search_for_prefix(wchar_t *prefix, const wchar_t *argv0_path, const wchar_t *lan
 }
 
 
+static int
+get_dllpath(wchar_t *dllpath)
+{
+#ifdef Py_ENABLE_SHARED
+    extern HANDLE PyWin_DLLhModule;
+    if (PyWin_DLLhModule && GetModuleFileNameW(PyWin_DLLhModule, dllpath, MAXPATHLEN)) {
+        return 0;
+    }
+#endif
+    return -1;
+}
+
+
 #ifdef Py_ENABLE_SHARED
 
 /* a string loaded from the DLL at startup.*/
@@ -510,31 +533,6 @@ done:
 
 
 static _PyInitError
-get_dll_path(PyCalculatePath *calculate, _PyPathConfig *config)
-{
-    wchar_t dll_path[MAXPATHLEN+1];
-    memset(dll_path, 0, sizeof(dll_path));
-
-#ifdef Py_ENABLE_SHARED
-    extern HANDLE PyWin_DLLhModule;
-    if (PyWin_DLLhModule) {
-        if (!GetModuleFileNameW(PyWin_DLLhModule, dll_path, MAXPATHLEN)) {
-            dll_path[0] = 0;
-        }
-    }
-#else
-    dll_path[0] = 0;
-#endif
-
-    config->dll_path = _PyMem_RawWcsdup(dll_path);
-    if (config->dll_path == NULL) {
-        return _Py_INIT_NO_MEMORY();
-    }
-    return _Py_INIT_OK();
-}
-
-
-static _PyInitError
 get_program_full_path(const _PyCoreConfig *core_config,
                       PyCalculatePath *calculate, _PyPathConfig *config)
 {
@@ -675,12 +673,11 @@ calculate_init(PyCalculatePath *calculate,
 static int
 get_pth_filename(wchar_t *spbuffer, _PyPathConfig *config)
 {
-    if (config->dll_path[0]) {
-        if (!change_ext(spbuffer, config->dll_path, L"._pth") &&
-            exists(spbuffer))
-        {
-            return 1;
-        }
+    if (!get_dllpath(spbuffer) &&
+        !change_ext(spbuffer, spbuffer, L"._pth") &&
+        exists(spbuffer))
+    {
+        return 1;
     }
     if (config->program_full_path[0]) {
         if (!change_ext(spbuffer, config->program_full_path, L"._pth") &&
@@ -967,11 +964,6 @@ calculate_path_impl(const _PyCoreConfig *core_config,
 {
     _PyInitError err;
 
-    err = get_dll_path(calculate, config);
-    if (_Py_INIT_FAILED(err)) {
-        return err;
-    }
-
     err = get_program_full_path(core_config, calculate, config);
     if (_Py_INIT_FAILED(err)) {
         return err;
@@ -992,9 +984,13 @@ calculate_path_impl(const _PyCoreConfig *core_config,
     calculate_pyvenv_file(calculate);
 
     /* Calculate zip archive path from DLL or exe path */
-    change_ext(calculate->zip_path,
-               config->dll_path[0] ? config->dll_path : config->program_full_path,
-               L".zip");
+    if (get_dllpath(calculate->zip_path) ||
+        change_ext(calculate->zip_path, calculate->zip_path, L".zip"))
+    {
+        if (change_ext(calculate->zip_path, config->program_full_path, L".zip")) {
+            calculate->zip_path[0] = L'\0';
+        }
+    }
 
     calculate_home_prefix(calculate, prefix);
 
@@ -1054,28 +1050,39 @@ int
 _Py_CheckPython3(void)
 {
     wchar_t py3path[MAXPATHLEN+1];
-    wchar_t *s;
     if (python3_checked) {
         return hPython3 != NULL;
     }
     python3_checked = 1;
 
     /* If there is a python3.dll next to the python3y.dll,
-       assume this is a build tree; use that DLL */
-    wcscpy(py3path, _Py_path_config.dll_path);
-    s = wcsrchr(py3path, L'\\');
-    if (!s) {
-        s = py3path;
+       use that DLL */
+    if (!get_dllpath(py3path)) {
+        reduce(py3path);
+        join(py3path, PY3_DLLNAME);
+        hPython3 = LoadLibraryExW(py3path, NULL, LOAD_WITH_ALTERED_SEARCH_PATH);
+        if (hPython3 != NULL) {
+            return 1;
+        }
     }
-    wcscpy(s, L"\\python3.dll");
-    hPython3 = LoadLibraryExW(py3path, NULL, LOAD_WITH_ALTERED_SEARCH_PATH);
-    if (hPython3 != NULL) {
-        return 1;
+
+    /* If we can locate python3.dll in our application dir,
+       use that DLL */
+    wcscpy(py3path, Py_GetPrefix());
+    if (py3path[0]) {
+        join(py3path, PY3_DLLNAME);
+        hPython3 = LoadLibraryExW(py3path, NULL, LOAD_WITH_ALTERED_SEARCH_PATH);
+        if (hPython3 != NULL) {
+            return 1;
+        }
     }
 
-    /* Check sys.prefix\DLLs\python3.dll */
+    /* For back-compat, also search {sys.prefix}\DLLs, though
+       that has not been a normal install layout for a while */
     wcscpy(py3path, Py_GetPrefix());
-    wcscat(py3path, L"\\DLLs\\python3.dll");
-    hPython3 = LoadLibraryExW(py3path, NULL, LOAD_WITH_ALTERED_SEARCH_PATH);
+    if (py3path[0]) {
+        join(py3path, L"DLLs\\" PY3_DLLNAME);
+        hPython3 = LoadLibraryExW(py3path, NULL, LOAD_WITH_ALTERED_SEARCH_PATH);
+    }
     return hPython3 != NULL;
 }
index 7e726f2..ab3fff6 100644 (file)
     <_PlatformPreprocessorDefinition>_WIN32;</_PlatformPreprocessorDefinition>\r
     <_PlatformPreprocessorDefinition Condition="$(Platform) == 'x64'">_WIN64;_M_X64;</_PlatformPreprocessorDefinition>\r
     <_PydPreprocessorDefinition Condition="$(TargetExt) == '.pyd'">Py_BUILD_CORE_MODULE;</_PydPreprocessorDefinition>\r
+    <_Py3NamePreprocessorDefinition>PY3_DLLNAME=L"$(Py3DllName)";</_Py3NamePreprocessorDefinition>\r
   </PropertyGroup>\r
   <ItemDefinitionGroup>\r
     <ClCompile>\r
       <AdditionalIncludeDirectories>$(PySourcePath)Include;$(PySourcePath)PC;$(IntDir);%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>\r
-      <PreprocessorDefinitions>WIN32;$(_PlatformPreprocessorDefinition)$(_DebugPreprocessorDefinition)$(_PydPreprocessorDefinition)%(PreprocessorDefinitions)</PreprocessorDefinitions>\r
-      \r
+      <PreprocessorDefinitions>WIN32;$(_Py3NamePreprocessorDefinition)$(_PlatformPreprocessorDefinition)$(_DebugPreprocessorDefinition)$(_PydPreprocessorDefinition)%(PreprocessorDefinitions)</PreprocessorDefinitions>\r
       <Optimization>MaxSpeed</Optimization>\r
       <IntrinsicFunctions>true</IntrinsicFunctions>\r
       <StringPooling>true</StringPooling>\r
index 8c8392d..cc30ad3 100644 (file)
     \r
     <!-- The name of the resulting pythonXY.dll (without the extension) -->\r
     <PyDllName>python$(MajorVersionNumber)$(MinorVersionNumber)$(PyDebugExt)</PyDllName>\r
+    <!-- The name of the resulting pythonX.dll (without the extension) -->\r
+    <Py3DllName>python3$(PyDebugExt)</Py3DllName>\r
 \r
     <!-- The version and platform tag to include in .pyd filenames -->\r
     <PydTag Condition="$(ArchName) == 'win32'">.cp$(MajorVersionNumber)$(MinorVersionNumber)-win32</PydTag>\r
index 51325be..8d67d2e 100644 (file)
@@ -192,9 +192,7 @@ dl_funcptr _PyImport_FindSharedFuncptrWindows(const char *prefix,
     char funcname[258], *import_python;
     const wchar_t *wpathname;
 
-#ifndef _DEBUG
     _Py_CheckPython3();
-#endif
 
     wpathname = _PyUnicode_AsUnicode(pathname);
     if (wpathname == NULL)
index 5570783..d1d66a8 100644 (file)
@@ -1,4 +1,4 @@
-This is Python version 3.7.8
+This is Python version 3.7.9
 ============================
 
 .. image:: https://travis-ci.org/python/cpython.svg?branch=3.7
index c807c98..829dd69 100755 (executable)
--- a/configure
+++ b/configure
@@ -9251,6 +9251,9 @@ fi
        ppc)
                MACOSX_DEFAULT_ARCH="ppc64"
                ;;
+       arm64)
+               MACOSX_DEFAULT_ARCH="arm64"
+               ;;
        *)
                as_fn_error $? "Unexpected output of 'arch' on OSX" "$LINENO" 5
                ;;
index 805c0bb..f1cc8e9 100644 (file)
@@ -2456,6 +2456,9 @@ case $ac_sys_system/$ac_sys_release in
        ppc)
                MACOSX_DEFAULT_ARCH="ppc64"
                ;;
+       arm64)
+               MACOSX_DEFAULT_ARCH="arm64"
+               ;;
        *)
                AC_MSG_ERROR([Unexpected output of 'arch' on OSX])
                ;;