Imported Upstream version 1.22.2 upstream/1.22.2
authorDongHun Kwak <dh0128.kwak@samsung.com>
Fri, 15 Jul 2022 02:14:54 +0000 (11:14 +0900)
committerDongHun Kwak <dh0128.kwak@samsung.com>
Fri, 15 Jul 2022 02:14:54 +0000 (11:14 +0900)
46 files changed:
PKG-INFO
_configtest.c [deleted file]
_configtest.o [deleted file]
doc/changelog/1.22.2-changelog.rst [new file with mode: 0644]
doc/source/release.rst
doc/source/release/1.22.2-notes.rst [new file with mode: 0644]
doc/source/user/absolute_beginners.rst
numpy/__init__.pyi
numpy/_version.py
numpy/core/fromnumeric.pyi
numpy/core/include/numpy/npy_common.h
numpy/core/include/numpy/ufuncobject.h
numpy/core/setup.py
numpy/core/src/multiarray/_multiarray_tests.c.src
numpy/core/src/multiarray/arrayobject.c
numpy/core/src/multiarray/arraytypes.c.src
numpy/core/src/multiarray/buffer.c
numpy/core/src/multiarray/ctors.c
numpy/core/src/multiarray/descriptor.c
numpy/core/src/multiarray/dtype_transfer.c
numpy/core/src/multiarray/dtypemeta.c
numpy/core/src/multiarray/getset.c
numpy/core/src/multiarray/methods.c
numpy/core/src/multiarray/nditer_constr.c
numpy/core/src/multiarray/scalarapi.c
numpy/core/src/multiarray/scalartypes.c.src
numpy/core/src/npymath/npy_math_internal.h.src
numpy/core/src/umath/reduction.c
numpy/core/tests/examples/checks.pyx [deleted file]
numpy/core/tests/examples/cython/checks.pyx [new file with mode: 0644]
numpy/core/tests/examples/cython/setup.py [new file with mode: 0644]
numpy/core/tests/examples/limited_api/limited_api.c [new file with mode: 0644]
numpy/core/tests/examples/limited_api/setup.py [new file with mode: 0644]
numpy/core/tests/examples/setup.py [deleted file]
numpy/core/tests/test_cython.py
numpy/core/tests/test_limited_api.py [new file with mode: 0644]
numpy/core/tests/test_ufunc.py
numpy/distutils/command/build_ext.py
numpy/f2py/cb_rules.py
numpy/f2py/cfuncs.py
numpy/testing/_private/extbuild.py
numpy/typing/tests/data/reveal/flatiter.pyi
numpy/typing/tests/data/reveal/lib_function_base.pyi
pavement.py
setup.py
tools/swig/pyfragments.swg

index 972747ac6f4214061422ea160ea966cbc283693e..189aa9a2e9c98d869c30aa336e75efdb6b1b417d 100644 (file)
--- a/PKG-INFO
+++ b/PKG-INFO
@@ -1,6 +1,6 @@
 Metadata-Version: 2.1
 Name: numpy
-Version: 1.22.1
+Version: 1.22.2
 Summary:  NumPy is the fundamental package for array computing with Python.
 Home-page: https://www.numpy.org
 Author: Travis E. Oliphant et al.
diff --git a/_configtest.c b/_configtest.c
deleted file mode 100644 (file)
index fb34dbf..0000000
+++ /dev/null
@@ -1,7 +0,0 @@
-
-/* This file is generated from numpy/distutils/system_info.py */
-void ATL_buildinfo(void);
-int main(void) {
-  ATL_buildinfo();
-  return 0;
-}
diff --git a/_configtest.o b/_configtest.o
deleted file mode 100644 (file)
index 95a4e91..0000000
Binary files a/_configtest.o and /dev/null differ
diff --git a/doc/changelog/1.22.2-changelog.rst b/doc/changelog/1.22.2-changelog.rst
new file mode 100644 (file)
index 0000000..067ee26
--- /dev/null
@@ -0,0 +1,48 @@
+
+Contributors
+============
+
+A total of 14 people contributed to this release.  People with a "+" by their
+names contributed a patch for the first time.
+
+* Andrew J. Hesford +
+* Bas van Beek
+* BrĂ©nainn Woodsend +
+* Charles Harris
+* Hood Chatham
+* Janus Heide +
+* Leo Singer
+* Matti Picus
+* Mukulika Pahari
+* Niyas Sait
+* Pearu Peterson
+* Ralf Gommers
+* Sebastian Berg
+* Serge Guelton
+
+Pull requests merged
+====================
+
+A total of 21 pull requests were merged for this release.
+
+* `#20842 <https://github.com/numpy/numpy/pull/20842>`__: BLD: Add NPY_DISABLE_SVML env var to opt out of SVML
+* `#20843 <https://github.com/numpy/numpy/pull/20843>`__: BUG: Fix build of third party extensions with Py_LIMITED_API
+* `#20844 <https://github.com/numpy/numpy/pull/20844>`__: TYP: Fix pyright being unable to infer the ``real`` and ``imag``...
+* `#20845 <https://github.com/numpy/numpy/pull/20845>`__: BUG: Fix comparator function signatures
+* `#20906 <https://github.com/numpy/numpy/pull/20906>`__: BUG: Avoid importing ``numpy.distutils`` on import numpy.testing
+* `#20907 <https://github.com/numpy/numpy/pull/20907>`__: MAINT: remove outdated mingw32 fseek support
+* `#20908 <https://github.com/numpy/numpy/pull/20908>`__: TYP: Relax the return type of ``np.vectorize``
+* `#20909 <https://github.com/numpy/numpy/pull/20909>`__: BUG: fix f2py's define for threading when building with Mingw
+* `#20910 <https://github.com/numpy/numpy/pull/20910>`__: BUG: distutils: fix building mixed C/Fortran extensions
+* `#20912 <https://github.com/numpy/numpy/pull/20912>`__: DOC,TST: Fix Pandas code example as per new release
+* `#20935 <https://github.com/numpy/numpy/pull/20935>`__: TYP, MAINT: Add annotations for ``flatiter.__setitem__``
+* `#20936 <https://github.com/numpy/numpy/pull/20936>`__: MAINT, TYP: Added missing where typehints in ``fromnumeric.pyi``
+* `#20937 <https://github.com/numpy/numpy/pull/20937>`__: BUG: Fix build_ext interaction with non numpy extensions
+* `#20938 <https://github.com/numpy/numpy/pull/20938>`__: BUG: Fix missing intrinsics for windows/arm64 target
+* `#20945 <https://github.com/numpy/numpy/pull/20945>`__: REL: Prepare for the NumPy 1.22.2 release.
+* `#20982 <https://github.com/numpy/numpy/pull/20982>`__: MAINT: f2py: don't generate code that triggers ``-Wsometimes-uninitialized``.
+* `#20983 <https://github.com/numpy/numpy/pull/20983>`__: BUG: Fix incorrect return type in reduce without initial value
+* `#20984 <https://github.com/numpy/numpy/pull/20984>`__: ENH: review return values for PyArray_DescrNew
+* `#20985 <https://github.com/numpy/numpy/pull/20985>`__: MAINT: be more tolerant of setuptools >= 60
+* `#20986 <https://github.com/numpy/numpy/pull/20986>`__: BUG: Fix misplaced return.
+* `#20992 <https://github.com/numpy/numpy/pull/20992>`__: MAINT: Further small return value validation fixes
index e90521be5c5b4675682da2356ca2200c6bc8f6b8..621471f7ee04af8578b47e9a3ca89ac8e66e0d65 100644 (file)
@@ -5,6 +5,7 @@ Release notes
 .. toctree::
     :maxdepth: 3
 
+    1.22.2 <release/1.22.2-notes>
     1.22.1 <release/1.22.1-notes>
     1.22.0 <release/1.22.0-notes>
     1.21.4 <release/1.21.4-notes>
diff --git a/doc/source/release/1.22.2-notes.rst b/doc/source/release/1.22.2-notes.rst
new file mode 100644 (file)
index 0000000..974560f
--- /dev/null
@@ -0,0 +1,64 @@
+.. currentmodule:: numpy
+
+==========================
+NumPy 1.22.2 Release Notes
+==========================
+
+The NumPy 1.22.2 is maintenance release that fixes bugs discovered after the
+1.22.1 release. Notable fixes are:
+
+- Several build related fixes for downstream projects and other platforms.
+- Various Annotation fixes/additions.
+- Numpy wheels for Windows will use the 1.41 tool chain, fixing downstream link
+  problems for projects using NumPy provided libraries on Windows.
+- Deal with CVE-2021-41495 complaint.
+
+The Python versions supported for this release are 3.8-3.10.
+
+Contributors
+============
+
+A total of 14 people contributed to this release.  People with a "+" by their
+names contributed a patch for the first time.
+
+* Andrew J. Hesford +
+* Bas van Beek
+* BrĂ©nainn Woodsend +
+* Charles Harris
+* Hood Chatham
+* Janus Heide +
+* Leo Singer
+* Matti Picus
+* Mukulika Pahari
+* Niyas Sait
+* Pearu Peterson
+* Ralf Gommers
+* Sebastian Berg
+* Serge Guelton
+
+Pull requests merged
+====================
+
+A total of 21 pull requests were merged for this release.
+
+* `#20842 <https://github.com/numpy/numpy/pull/20842>`__: BLD: Add NPY_DISABLE_SVML env var to opt out of SVML
+* `#20843 <https://github.com/numpy/numpy/pull/20843>`__: BUG: Fix build of third party extensions with Py_LIMITED_API
+* `#20844 <https://github.com/numpy/numpy/pull/20844>`__: TYP: Fix pyright being unable to infer the ``real`` and ``imag``...
+* `#20845 <https://github.com/numpy/numpy/pull/20845>`__: BUG: Fix comparator function signatures
+* `#20906 <https://github.com/numpy/numpy/pull/20906>`__: BUG: Avoid importing ``numpy.distutils`` on import numpy.testing
+* `#20907 <https://github.com/numpy/numpy/pull/20907>`__: MAINT: remove outdated mingw32 fseek support
+* `#20908 <https://github.com/numpy/numpy/pull/20908>`__: TYP: Relax the return type of ``np.vectorize``
+* `#20909 <https://github.com/numpy/numpy/pull/20909>`__: BUG: fix f2py's define for threading when building with Mingw
+* `#20910 <https://github.com/numpy/numpy/pull/20910>`__: BUG: distutils: fix building mixed C/Fortran extensions
+* `#20912 <https://github.com/numpy/numpy/pull/20912>`__: DOC,TST: Fix Pandas code example as per new release
+* `#20935 <https://github.com/numpy/numpy/pull/20935>`__: TYP, MAINT: Add annotations for ``flatiter.__setitem__``
+* `#20936 <https://github.com/numpy/numpy/pull/20936>`__: MAINT, TYP: Added missing where typehints in ``fromnumeric.pyi``
+* `#20937 <https://github.com/numpy/numpy/pull/20937>`__: BUG: Fix build_ext interaction with non numpy extensions
+* `#20938 <https://github.com/numpy/numpy/pull/20938>`__: BUG: Fix missing intrinsics for windows/arm64 target
+* `#20945 <https://github.com/numpy/numpy/pull/20945>`__: REL: Prepare for the NumPy 1.22.2 release.
+* `#20982 <https://github.com/numpy/numpy/pull/20982>`__: MAINT: f2py: don't generate code that triggers ``-Wsometimes-uninitialized``.
+* `#20983 <https://github.com/numpy/numpy/pull/20983>`__: BUG: Fix incorrect return type in reduce without initial value
+* `#20984 <https://github.com/numpy/numpy/pull/20984>`__: ENH: review return values for PyArray_DescrNew
+* `#20985 <https://github.com/numpy/numpy/pull/20985>`__: MAINT: be more tolerant of setuptools >= 60
+* `#20986 <https://github.com/numpy/numpy/pull/20986>`__: BUG: Fix misplaced return.
+* `#20992 <https://github.com/numpy/numpy/pull/20992>`__: MAINT: Further small return value validation fixes
index 27e9e1f638c3af3f9b6297caefd94a5f5b089676..90012da1c5107b0832468dc843f6adef9dd728b1 100644 (file)
@@ -1589,7 +1589,7 @@ If you created this array "a" ::
 
 .. for doctests
    The continuous integration truncates dataframe display without this setting.
-   >>> pd.set_option('max_columns', 10)
+   >>> pd.set_option('display.max_columns', 10)
 
 You could create a Pandas dataframe ::
 
index 01cd0c875c84c3c32e41789f190ecd236a617ac0..55a13057e424b39a71b1c6eb4ec47a05553529f8 100644 (file)
@@ -950,12 +950,22 @@ class flatiter(Generic[_NdArraySubClass]):
     @overload
     def __getitem__(
         self: flatiter[ndarray[Any, dtype[_ScalarType]]],
-        key: Union[int, integer],
+        key: int | integer | tuple[int | integer],
     ) -> _ScalarType: ...
     @overload
     def __getitem__(
-        self, key: Union[_ArrayLikeInt, slice, ellipsis],
+        self,
+        key: _ArrayLikeInt | slice | ellipsis | tuple[_ArrayLikeInt | slice | ellipsis],
     ) -> _NdArraySubClass: ...
+    # TODO: `__setitem__` operates via `unsafe` casting rules, and can
+    # thus accept any type accepted by the relevant underlying `np.generic`
+    # constructor.
+    # This means that `value` must in reality be a supertype of `npt.ArrayLike`.
+    def __setitem__(
+        self,
+        key: _ArrayLikeInt | slice | ellipsis | tuple[_ArrayLikeInt | slice | ellipsis],
+        value: Any,
+    ) -> None: ...
     @overload
     def __array__(self: flatiter[ndarray[Any, _DType]], dtype: None = ..., /) -> ndarray[Any, _DType]: ...
     @overload
@@ -1030,6 +1040,8 @@ class _ArrayOrScalarCommon:
         axis: None = ...,
         out: None = ...,
         keepdims: L[False] = ...,
+        *,
+        where: _ArrayLikeBool_co = ...,
     ) -> bool_: ...
     @overload
     def all(
@@ -1037,6 +1049,8 @@ class _ArrayOrScalarCommon:
         axis: Optional[_ShapeLike] = ...,
         out: None = ...,
         keepdims: bool = ...,
+        *,
+        where: _ArrayLikeBool_co = ...,
     ) -> Any: ...
     @overload
     def all(
@@ -1044,6 +1058,8 @@ class _ArrayOrScalarCommon:
         axis: Optional[_ShapeLike] = ...,
         out: _NdArraySubClass = ...,
         keepdims: bool = ...,
+        *,
+        where: _ArrayLikeBool_co = ...,
     ) -> _NdArraySubClass: ...
 
     @overload
@@ -1052,6 +1068,8 @@ class _ArrayOrScalarCommon:
         axis: None = ...,
         out: None = ...,
         keepdims: L[False] = ...,
+        *,
+        where: _ArrayLikeBool_co = ...,
     ) -> bool_: ...
     @overload
     def any(
@@ -1059,6 +1077,8 @@ class _ArrayOrScalarCommon:
         axis: Optional[_ShapeLike] = ...,
         out: None = ...,
         keepdims: bool = ...,
+        *,
+        where: _ArrayLikeBool_co = ...,
     ) -> Any: ...
     @overload
     def any(
@@ -1066,6 +1086,8 @@ class _ArrayOrScalarCommon:
         axis: Optional[_ShapeLike] = ...,
         out: _NdArraySubClass = ...,
         keepdims: bool = ...,
+        *,
+        where: _ArrayLikeBool_co = ...,
     ) -> _NdArraySubClass: ...
 
     @overload
@@ -1248,6 +1270,8 @@ class _ArrayOrScalarCommon:
         dtype: DTypeLike = ...,
         out: None = ...,
         keepdims: bool = ...,
+        *,
+        where: _ArrayLikeBool_co = ...,
     ) -> Any: ...
     @overload
     def mean(
@@ -1256,6 +1280,8 @@ class _ArrayOrScalarCommon:
         dtype: DTypeLike = ...,
         out: _NdArraySubClass = ...,
         keepdims: bool = ...,
+        *,
+        where: _ArrayLikeBool_co = ...,
     ) -> _NdArraySubClass: ...
 
     @overload
@@ -1339,6 +1365,8 @@ class _ArrayOrScalarCommon:
         out: None = ...,
         ddof: int = ...,
         keepdims: bool = ...,
+        *,
+        where: _ArrayLikeBool_co = ...,
     ) -> Any: ...
     @overload
     def std(
@@ -1348,6 +1376,8 @@ class _ArrayOrScalarCommon:
         out: _NdArraySubClass = ...,
         ddof: int = ...,
         keepdims: bool = ...,
+        *,
+        where: _ArrayLikeBool_co = ...,
     ) -> _NdArraySubClass: ...
 
     @overload
@@ -1379,6 +1409,8 @@ class _ArrayOrScalarCommon:
         out: None = ...,
         ddof: int = ...,
         keepdims: bool = ...,
+        *,
+        where: _ArrayLikeBool_co = ...,
     ) -> Any: ...
     @overload
     def var(
@@ -1388,6 +1420,8 @@ class _ArrayOrScalarCommon:
         out: _NdArraySubClass = ...,
         ddof: int = ...,
         keepdims: bool = ...,
+        *,
+        where: _ArrayLikeBool_co = ...,
     ) -> _NdArraySubClass: ...
 
 _DType = TypeVar("_DType", bound=dtype[Any])
@@ -1458,13 +1492,13 @@ class ndarray(_ArrayOrScalarCommon, Generic[_ShapeType, _DType_co]):
     def size(self) -> int: ...
     @property
     def real(
-        self: NDArray[_SupportsReal[_ScalarType]],  # type: ignore[type-var]
+        self: ndarray[_ShapeType, dtype[_SupportsReal[_ScalarType]]],  # type: ignore[type-var]
     ) -> ndarray[_ShapeType, _dtype[_ScalarType]]: ...
     @real.setter
     def real(self, value: ArrayLike) -> None: ...
     @property
     def imag(
-        self: NDArray[_SupportsImag[_ScalarType]],  # type: ignore[type-var]
+        self: ndarray[_ShapeType, dtype[_SupportsImag[_ScalarType]]],  # type: ignore[type-var]
     ) -> ndarray[_ShapeType, _dtype[_ScalarType]]: ...
     @imag.setter
     def imag(self, value: ArrayLike) -> None: ...
@@ -3679,6 +3713,8 @@ class memmap(ndarray[_ShapeType, _DType_co]):
     ) -> Any: ...
     def flush(self) -> None: ...
 
+# TODO: Add a mypy plugin for managing functions whose output type is dependant
+# on the literal value of some sort of signature (e.g. `einsum` and `vectorize`)
 class vectorize:
     pyfunc: Callable[..., Any]
     cache: bool
@@ -3695,7 +3731,7 @@ class vectorize:
         cache: bool = ...,
         signature: None | str = ...,
     ) -> None: ...
-    def __call__(self, *args: Any, **kwargs: Any) -> NDArray[Any]: ...
+    def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
 
 class poly1d:
     @property
index 43d7041f7e3f0581233f1b1608c257db6f6aeedf..f880bf35ce93febb2f7c59ce572a7fbba1c58bb6 100644 (file)
@@ -8,11 +8,11 @@ import json
 
 version_json = '''
 {
- "date": "2022-01-13T16:11:04-0700",
+ "date": "2022-02-03T14:24:02-0700",
  "dirty": false,
  "error": null,
- "full-revisionid": "7ce4118531b585b5d8f0380c6b896ae22d93bd96",
- "version": "1.22.1"
+ "full-revisionid": "f6dddcb2e5ea5ed39675f14429af3585c585a666",
+ "version": "1.22.2"
 }
 '''  # END VERSION_JSON
 
index 3cbe1d5c5ce25c5003562efccb773d5a96002f73..4a5e50503fe0d608a57ca78b9238caf031c0aad4 100644 (file)
@@ -246,6 +246,8 @@ def all(
     axis: None = ...,
     out: None = ...,
     keepdims: Literal[False] = ...,
+    *,
+    where: _ArrayLikeBool_co = ...,
 ) -> bool_: ...
 @overload
 def all(
@@ -253,6 +255,8 @@ def all(
     axis: Optional[_ShapeLike] = ...,
     out: Optional[ndarray] = ...,
     keepdims: bool = ...,
+    *,
+    where: _ArrayLikeBool_co = ...,
 ) -> Any: ...
 
 @overload
@@ -261,6 +265,8 @@ def any(
     axis: None = ...,
     out: None = ...,
     keepdims: Literal[False] = ...,
+    *,
+    where: _ArrayLikeBool_co = ...,
 ) -> bool_: ...
 @overload
 def any(
@@ -268,6 +274,8 @@ def any(
     axis: Optional[_ShapeLike] = ...,
     out: Optional[ndarray] = ...,
     keepdims: bool = ...,
+    *,
+    where: _ArrayLikeBool_co = ...,
 ) -> Any: ...
 
 def cumsum(
@@ -342,6 +350,8 @@ def mean(
     dtype: DTypeLike = ...,
     out: Optional[ndarray] = ...,
     keepdims: bool = ...,
+    *,
+    where: _ArrayLikeBool_co = ...,
 ) -> Any: ...
 
 def std(
@@ -351,6 +361,8 @@ def std(
     out: Optional[ndarray] = ...,
     ddof: int = ...,
     keepdims: bool = ...,
+    *,
+    where: _ArrayLikeBool_co = ...,
 ) -> Any: ...
 
 def var(
@@ -360,4 +372,6 @@ def var(
     out: Optional[ndarray] = ...,
     ddof: int = ...,
     keepdims: bool = ...,
+    *,
+    where: _ArrayLikeBool_co = ...,
 ) -> Any: ...
index 88794ca07e369d3940a0d4c6b5bc510b9336d18a..1d6234e20e0f19eeaf457a81fc4b9185d76e2118 100644 (file)
     defined(__MINGW32__) || defined(__MINGW64__)
     #include <io.h>
 
-/* mingw based on 3.4.5 has lseek but not ftell/fseek */
-#if defined(__MINGW32__) || defined(__MINGW64__)
-extern int __cdecl _fseeki64(FILE *, long long, int);
-extern long long __cdecl _ftelli64(FILE *);
-#endif
-
     #define npy_fseek _fseeki64
     #define npy_ftell _ftelli64
     #define npy_lseek _lseeki64
index 1d7050bbe5a374c2b18112515fe88305415127a8..bb063310079556ad99128baac90a970e890783e9 100644 (file)
@@ -173,7 +173,11 @@ typedef struct _tagPyUFuncObject {
          * but this was never implemented. (This is also why the above
          * selector is called the "legacy" selector.)
          */
-        vectorcallfunc vectorcall;
+        #ifndef Py_LIMITED_API
+            vectorcallfunc vectorcall;
+        #else
+            void *vectorcall;
+        #endif
 
         /* Was previously the `PyUFunc_MaskedInnerLoopSelectionFunc` */
         void *_always_null_previously_masked_innerloop_selector;
index a5f423d8fe4f54548a4689f07c6241e9889b6eae..7eb08132e2b2997ff599133be80d47f0eb0dc36e 100644 (file)
@@ -24,6 +24,11 @@ NPY_RELAXED_STRIDES_CHECKING = (os.environ.get('NPY_RELAXED_STRIDES_CHECKING', "
 NPY_RELAXED_STRIDES_DEBUG = (os.environ.get('NPY_RELAXED_STRIDES_DEBUG', "0") != "0")
 NPY_RELAXED_STRIDES_DEBUG = NPY_RELAXED_STRIDES_DEBUG and NPY_RELAXED_STRIDES_CHECKING
 
+# Set NPY_DISABLE_SVML=1 in the environment to disable the vendored SVML
+# library. This option only has significance on a Linux x86_64 host and is most
+# useful to avoid improperly requiring SVML when cross compiling.
+NPY_DISABLE_SVML = (os.environ.get('NPY_DISABLE_SVML', "0") == "1")
+
 # XXX: ugly, we use a class to avoid calling twice some expensive functions in
 # config.h/numpyconfig.h. I don't see a better way because distutils force
 # config.h generation inside an Extension class, and as such sharing
@@ -68,6 +73,8 @@ def can_link_svml():
     """SVML library is supported only on x86_64 architecture and currently
     only on linux
     """
+    if NPY_DISABLE_SVML:
+        return False
     machine = platform.machine()
     system = platform.system()
     return "x86_64" in machine and system == "Linux"
index 9486b7cffa5b6e8d0a60047e228abaf9c0efe194..b7a8b08495f934c521ba4790d1354c070cc44c5b 100644 (file)
@@ -643,14 +643,12 @@ static PyObject *
 fromstring_null_term_c_api(PyObject *dummy, PyObject *byte_obj)
 {
     char *string;
-    PyArray_Descr *descr;
 
     string = PyBytes_AsString(byte_obj);
     if (string == NULL) {
         return NULL;
     }
-    descr = PyArray_DescrNewFromType(NPY_FLOAT64);
-    return PyArray_FromString(string, -1, descr, -1, " ");
+    return PyArray_FromString(string, -1, NULL, -1, " ");
 }
 
 
index f99de2a39b4f7038ac52cbc18b8c17a6d31bfd86..292523bbc26df8971f110f1d7809d5976dfd229c 100644 (file)
@@ -1005,6 +1005,9 @@ _strings_richcompare(PyArrayObject *self, PyArrayObject *other, int cmp_op,
     if (PyArray_ISNOTSWAPPED(self) != PyArray_ISNOTSWAPPED(other)) {
         /* Cast `other` to the same byte order as `self` (both unicode here) */
         PyArray_Descr* unicode = PyArray_DescrNew(PyArray_DESCR(self));
+        if (unicode == NULL) {
+            return NULL;
+        }
         unicode->elsize = PyArray_DESCR(other)->elsize;
         PyObject *new = PyArray_FromAny((PyObject *)other,
                 unicode, 0, 0, 0, NULL);
index 71808cc48aa3452ad7f32d31ad02febde6859143..71401c60e8d05b5c7fecf2d78a73c7d232ef146e 100644 (file)
@@ -2849,7 +2849,7 @@ static int
 #define LT(a,b) ((a) < (b) || ((b) != (b) && (a) ==(a)))
 
 static int
-@TYPE@_compare(@type@ *pa, @type@ *pb)
+@TYPE@_compare(@type@ *pa, @type@ *pb, PyArrayObject *NPY_UNUSED(ap))
 {
     const @type@ a = *pa;
     const @type@ b = *pb;
@@ -2869,7 +2869,7 @@ static int
 
 
 static int
-C@TYPE@_compare(@type@ *pa, @type@ *pb)
+C@TYPE@_compare(@type@ *pa, @type@ *pb, PyArrayObject *NPY_UNUSED(ap))
 {
     const @type@ ar = pa[0];
     const @type@ ai = pa[1];
@@ -2924,7 +2924,7 @@ C@TYPE@_compare(@type@ *pa, @type@ *pb)
  */
 
 static int
-@TYPE@_compare(@type@ *pa, @type@ *pb)
+@TYPE@_compare(@type@ *pa, @type@ *pb, PyArrayObject *NPY_UNUSED(ap))
 {
     const @type@ a = *pa;
     const @type@ b = *pb;
index d10122c4f190265334d1a51f53d255f4dd6f6234..13d7038d344837c6012d8454cf73c0c8d86f2742 100644 (file)
@@ -1048,12 +1048,18 @@ _descriptor_from_pep3118_format_fast(char const *s, PyObject **result)
     }
 
     descr = PyArray_DescrFromType(type_num);
+    if (descr == NULL) {
+        return 0;
+    }
     if (byte_order == '=') {
         *result = (PyObject*)descr;
     }
     else {
         *result = (PyObject*)PyArray_DescrNewByteorder(descr, byte_order);
         Py_DECREF(descr);
+        if (*result == NULL) {
+            return 0;
+        }
     }
 
     return 1;
index 78003306afe942cba083fd2a7b56b0e64db55d58..4464d7ead51def0229ac1316e15035fe4cda5063 100644 (file)
@@ -668,6 +668,9 @@ PyArray_NewFromDescr_int(
     PyArrayObject_fields *fa;
     npy_intp nbytes;
 
+    if (descr == NULL) {
+        return NULL;
+    }
     if (nd > NPY_MAXDIMS || nd < 0) {
         PyErr_Format(PyExc_ValueError,
                 "number of dimensions must be within [0, %d]", NPY_MAXDIMS);
@@ -1115,6 +1118,9 @@ PyArray_New(
             return NULL;
         }
         PyArray_DESCR_REPLACE(descr);
+        if (descr == NULL) {
+            return NULL;
+        }
         descr->elsize = itemsize;
     }
     new = PyArray_NewFromDescr(subtype, descr, nd, dims, strides,
@@ -1140,6 +1146,9 @@ _dtype_from_buffer_3118(PyObject *memoryview)
          *       terminate.
          */
         descr = PyArray_DescrNewFromType(NPY_STRING);
+        if (descr == NULL) {
+            return NULL;
+        }
         descr->elsize = view->itemsize;
     }
     return descr;
@@ -1872,6 +1881,9 @@ PyArray_CheckFromAny(PyObject *op, PyArray_Descr *descr, int min_depth,
         if (!descr && PyArray_Check(op) &&
                 PyArray_ISBYTESWAPPED((PyArrayObject* )op)) {
             descr = PyArray_DescrNew(PyArray_DESCR((PyArrayObject *)op));
+            if (descr == NULL) {
+                return NULL;
+            }
         }
         else if (descr && !PyArray_ISNBO(descr->byteorder)) {
             PyArray_DESCR_REPLACE(descr);
@@ -3583,6 +3595,10 @@ PyArray_FromFile(FILE *fp, PyArray_Descr *dtype, npy_intp num, char *sep)
     PyArrayObject *ret;
     size_t nread = 0;
 
+    if (dtype == NULL) {
+        return NULL;
+    }
+
     if (PyDataType_REFCHK(dtype)) {
         PyErr_SetString(PyExc_ValueError,
                 "Cannot read into object array");
@@ -3650,6 +3666,9 @@ PyArray_FromBuffer(PyObject *buf, PyArray_Descr *type,
     int itemsize;
     int writeable = 1;
 
+    if (type == NULL) {
+        return NULL;
+    }
 
     if (PyDataType_REFCHK(type)) {
         PyErr_SetString(PyExc_ValueError,
@@ -3857,14 +3876,20 @@ NPY_NO_EXPORT PyObject *
 PyArray_FromIter(PyObject *obj, PyArray_Descr *dtype, npy_intp count)
 {
     PyObject *value;
-    PyObject *iter = PyObject_GetIter(obj);
+    PyObject *iter = NULL;
     PyArrayObject *ret = NULL;
     npy_intp i, elsize, elcount;
     char *item, *new_data;
 
+    if (dtype == NULL) {
+        return NULL;
+    }
+
+    iter = PyObject_GetIter(obj);
     if (iter == NULL) {
         goto done;
     }
+
     if (PyDataType_ISUNSIZED(dtype)) {
         PyErr_SetString(PyExc_ValueError,
                 "Must specify length when using variable-size data-type.");
index 0c539053c9e764cd5d2318f6a49ae6888d92d99c..07abc755fab3b9828bdf01b47e5158c39836123f 100644 (file)
@@ -597,9 +597,7 @@ _convert_from_array_descr(PyObject *obj, int align)
 
     PyArray_Descr *new = PyArray_DescrNewFromType(NPY_VOID);
     if (new == NULL) {
-        Py_XDECREF(fields);
-        Py_XDECREF(nameslist);
-        return NULL;
+        goto fail;
     }
     new->fields = fields;
     new->names = nameslist;
@@ -703,6 +701,9 @@ _convert_from_list(PyObject *obj, int align)
         totalsize += conv->elsize;
     }
     PyArray_Descr *new = PyArray_DescrNewFromType(NPY_VOID);
+    if (new == NULL) {
+        goto fail;
+    }
     new->fields = fields;
     new->names = nameslist;
     new->flags = dtypeflags;
@@ -1381,6 +1382,9 @@ PyArray_DescrNewFromType(int type_num)
     PyArray_Descr *new;
 
     old = PyArray_DescrFromType(type_num);
+    if (old == NULL) {
+        return NULL;
+    }
     new = PyArray_DescrNew(old);
     Py_DECREF(old);
     return new;
@@ -2341,7 +2345,7 @@ arraydescr_new(PyTypeObject *subtype,
     }
 
     PyObject *odescr, *metadata=NULL;
-    PyArray_Descr *descr, *conv;
+    PyArray_Descr *conv;
     npy_bool align = NPY_FALSE;
     npy_bool copy = NPY_FALSE;
     npy_bool copied = NPY_FALSE;
@@ -2363,9 +2367,10 @@ arraydescr_new(PyTypeObject *subtype,
 
     /* Get a new copy of it unless it's already a copy */
     if (copy && conv->fields == Py_None) {
-        descr = PyArray_DescrNew(conv);
-        Py_DECREF(conv);
-        conv = descr;
+        PyArray_DESCR_REPLACE(conv);
+        if (conv == NULL) {
+            return NULL;
+        }
         copied = NPY_TRUE;
     }
 
@@ -2375,10 +2380,11 @@ arraydescr_new(PyTypeObject *subtype,
          * underlying dictionary
          */
         if (!copied) {
+            PyArray_DESCR_REPLACE(conv);
+            if (conv == NULL) {
+                return NULL;
+            }
             copied = NPY_TRUE;
-            descr = PyArray_DescrNew(conv);
-            Py_DECREF(conv);
-            conv = descr;
         }
         if ((conv->metadata != NULL)) {
             /*
@@ -3047,6 +3053,9 @@ PyArray_DescrNewByteorder(PyArray_Descr *self, char newendian)
     char endian;
 
     new = PyArray_DescrNew(self);
+    if (new == NULL) {
+        return NULL;
+    }
     endian = new->byteorder;
     if (endian != NPY_IGNORE) {
         if (newendian == NPY_SWAP) {
@@ -3073,6 +3082,10 @@ PyArray_DescrNewByteorder(PyArray_Descr *self, char newendian)
         int len, i;
 
         newfields = PyDict_New();
+        if (newfields == NULL) {
+            Py_DECREF(new);
+            return NULL;
+        }
         /* make new dictionary with replaced PyArray_Descr Objects */
         while (PyDict_Next(self->fields, &pos, &key, &value)) {
             if (NPY_TITLE_KEY(key, value)) {
@@ -3114,6 +3127,10 @@ PyArray_DescrNewByteorder(PyArray_Descr *self, char newendian)
         Py_DECREF(new->subarray->base);
         new->subarray->base = PyArray_DescrNewByteorder(
                 self->subarray->base, newendian);
+        if (new->subarray->base == NULL) {
+            Py_DECREF(new);
+            return NULL;
+        }
     }
     return new;
 }
index 8fb44c4f6c5bd02579596a27edbc3670fd1a334f..78704f6eda0a125e95a8df05ec486d40f7078b3a 100644 (file)
@@ -3021,9 +3021,9 @@ init_cast_info(NPY_cast_info *cast_info, NPY_CASTING *casting,
         if (!PyErr_Occurred()) {
             PyErr_Format(PyExc_TypeError,
                     "Cannot cast array data from %R to %R.", src_dtype, dst_dtype);
-            Py_DECREF(meth);
-            return -1;
         }
+        Py_DECREF(meth);
+        return -1;
     }
     assert(PyArray_DescrCheck(cast_info->descriptors[0]));
     assert(PyArray_DescrCheck(cast_info->descriptors[1]));
index cd489d5e7c9d963731758cb18236c460d53037f8..53f38e8e8b1aeaa3be3d29dd2158765193fbb8f6 100644 (file)
@@ -153,6 +153,9 @@ string_discover_descr_from_pyobject(
                     "string to large to store inside array.");
         }
         PyArray_Descr *res = PyArray_DescrNewFromType(cls->type_num);
+        if (res == NULL) {
+            return NULL;
+        }
         res->elsize = (int)itemsize;
         return res;
     }
@@ -171,10 +174,15 @@ void_discover_descr_from_pyobject(
     }
     if (PyBytes_Check(obj)) {
         PyArray_Descr *descr = PyArray_DescrNewFromType(NPY_VOID);
+        if (descr == NULL) {
+            return NULL;
+        }
         Py_ssize_t itemsize = PyBytes_Size(obj);
         if (itemsize > NPY_MAX_INT) {
             PyErr_SetString(PyExc_TypeError,
                     "byte-like to large to store inside array.");
+            Py_DECREF(descr);
+            return NULL;
         }
         descr->elsize = (int)itemsize;
         return descr;
index a92ac44b78460f3f9821de6a984865586b64f553..fc43bb3fe026caa88cbaa74b08143e668b688ff9 100644 (file)
@@ -714,15 +714,18 @@ _get_part(PyArrayObject *self, int imag)
 
     }
     type = PyArray_DescrFromType(float_type_num);
+    if (type == NULL) {
+        return NULL;
+    }
 
     offset = (imag ? type->elsize : 0);
 
     if (!PyArray_ISNBO(PyArray_DESCR(self)->byteorder)) {
-        PyArray_Descr *new;
-        new = PyArray_DescrNew(type);
-        new->byteorder = PyArray_DESCR(self)->byteorder;
-        Py_DECREF(type);
-        type = new;
+        Py_SETREF(type, PyArray_DescrNew(type));
+        if (type == NULL) {
+            return NULL;
+        }
+        type->byteorder = PyArray_DESCR(self)->byteorder;
     }
     ret = (PyArrayObject *)PyArray_NewFromDescrAndBase(
             Py_TYPE(self),
index c31a8292ce325d4ffe4f6a47beb1a2fe1744561f..8fa9bf2ca357b44e404645407e13bd2396276b7d 100644 (file)
@@ -1330,6 +1330,10 @@ array_sort(PyArrayObject *self,
             return NULL;
         }
         newd = PyArray_DescrNew(saved);
+        if (newd == NULL) {
+            Py_DECREF(new_name);
+            return NULL;
+        }
         Py_DECREF(newd->names);
         newd->names = new_name;
         ((PyArrayObject_fields *)self)->descr = newd;
@@ -1392,6 +1396,10 @@ array_partition(PyArrayObject *self,
             return NULL;
         }
         newd = PyArray_DescrNew(saved);
+        if (newd == NULL) {
+            Py_DECREF(new_name);
+            return NULL;
+        }
         Py_DECREF(newd->names);
         newd->names = new_name;
         ((PyArrayObject_fields *)self)->descr = newd;
@@ -1455,6 +1463,10 @@ array_argsort(PyArrayObject *self,
             return NULL;
         }
         newd = PyArray_DescrNew(saved);
+        if (newd == NULL) {
+            Py_DECREF(new_name);
+            return NULL;
+        }
         Py_DECREF(newd->names);
         newd->names = new_name;
         ((PyArrayObject_fields *)self)->descr = newd;
@@ -1512,6 +1524,10 @@ array_argpartition(PyArrayObject *self,
             return NULL;
         }
         newd = PyArray_DescrNew(saved);
+        if (newd == NULL) {
+            Py_DECREF(new_name);
+            return NULL;
+        }
         Py_DECREF(newd->names);
         newd->names = new_name;
         ((PyArrayObject_fields *)self)->descr = newd;
@@ -2144,6 +2160,11 @@ array_setstate(PyArrayObject *self, PyObject *args)
                 }
                 else {
                     fa->descr = PyArray_DescrNew(typecode);
+                    if (fa->descr == NULL) {
+                        Py_CLEAR(fa->mem_handler);
+                        Py_DECREF(rawdata);
+                        return NULL;
+                    }
                     if (PyArray_DESCR(self)->byteorder == NPY_BIG) {
                         PyArray_DESCR(self)->byteorder = NPY_LITTLE;
                     }
index bf32e1f6b706a6246c95f2194f5c52049b731fd2..2812aaf3cb51bc3098f603cd300c5cf249a0cfbd 100644 (file)
@@ -1128,13 +1128,12 @@ npyiter_prepare_one_operand(PyArrayObject **op,
         if (op_flags & NPY_ITER_NBO) {
             /* Check byte order */
             if (!PyArray_ISNBO((*op_dtype)->byteorder)) {
-                PyArray_Descr *nbo_dtype;
-
                 /* Replace with a new descr which is in native byte order */
-                nbo_dtype = PyArray_DescrNewByteorder(*op_dtype, NPY_NATIVE);
-                Py_DECREF(*op_dtype);
-                *op_dtype = nbo_dtype;
-
+                Py_SETREF(*op_dtype,
+                          PyArray_DescrNewByteorder(*op_dtype, NPY_NATIVE));
+                if (*op_dtype == NULL) {
+                    return 0;
+                }                
                 NPY_IT_DBG_PRINT("Iterator: Setting NPY_OP_ITFLAG_CAST "
                                     "because of NPY_ITER_NBO\n");
                 /* Indicate that byte order or alignment needs fixing */
index 564352f1fd3f42e64e81bf89b421a6575dfb2c8e..edbe5955ce97c746fe54b15cdda0c31d63f93944 100644 (file)
@@ -625,6 +625,9 @@ PyArray_DescrFromScalar(PyObject *sc)
     }
     if (PyDataType_ISUNSIZED(descr)) {
         PyArray_DESCR_REPLACE(descr);
+        if (descr == NULL) {
+            return NULL;
+        }
         type_num = descr->type_num;
         if (type_num == NPY_STRING) {
             descr->elsize = PyBytes_GET_SIZE(sc);
index 013526ff0c3e841af2d7b53b7f829ed7eeecf45f..2249fa22bc5a3a348ea051580294ad3369b97cc7 100644 (file)
@@ -3213,12 +3213,16 @@ void_arrtype_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
         }
         ((PyVoidScalarObject *)ret)->obval = destptr;
         Py_SET_SIZE((PyVoidScalarObject *)ret, (int) memu);
-        ((PyVoidScalarObject *)ret)->descr =
-            PyArray_DescrNewFromType(NPY_VOID);
-        ((PyVoidScalarObject *)ret)->descr->elsize = (int) memu;
         ((PyVoidScalarObject *)ret)->flags = NPY_ARRAY_BEHAVED |
                                              NPY_ARRAY_OWNDATA;
         ((PyVoidScalarObject *)ret)->base = NULL;
+        ((PyVoidScalarObject *)ret)->descr =
+            PyArray_DescrNewFromType(NPY_VOID);
+        if (((PyVoidScalarObject *)ret)->descr == NULL) {
+            Py_DECREF(ret);
+            return NULL;
+        }
+        ((PyVoidScalarObject *)ret)->descr->elsize = (int) memu;
         return ret;
     }
 
index 15d35637fb040b1b84cef1982344e36057d0cdc4..0364aabb1a9b0ed9ac6ed3574aeb0f4a241d7de6 100644 (file)
@@ -887,7 +887,7 @@ npy_popcountu@c@(npy_@type@ a)
 /* use built-in popcount if present, else use our implementation */
 #if (defined(__clang__) || defined(__GNUC__)) && NPY_BITSOF_@STYPE@ >= 32
     return __builtin_popcount@c@(a);
-#elif defined(_MSC_VER) && NPY_BITSOF_@STYPE@ >= 16
+#elif defined(_MSC_VER) && NPY_BITSOF_@STYPE@ >= 16 && !defined(_M_ARM64) && !defined(_M_ARM)
     /* no builtin __popcnt64 for 32 bits */
     #if defined(_WIN64) || (defined(_WIN32) && NPY_BITSOF_@STYPE@ != 64)
         return TO_BITS_LEN(__popcnt)(a);
index 8cb44d4338a7b1cbfe853bfb681e393304c55903..06709b4f36fd5cd5a4765ed281eac2dfe258d054 100644 (file)
@@ -68,7 +68,7 @@ count_axes(int ndim, const npy_bool *axis_flags)
  * Returns -1 if an error occurred, and otherwise the reduce arrays size,
  * which is the number of elements already initialized.
  */
-NPY_NO_EXPORT int
+static npy_intp
 PyArray_CopyInitialReduceValues(
                     PyArrayObject *result, PyArrayObject *operand,
                     const npy_bool *axis_flags, const char *funcname,
diff --git a/numpy/core/tests/examples/checks.pyx b/numpy/core/tests/examples/checks.pyx
deleted file mode 100644 (file)
index 151979d..0000000
+++ /dev/null
@@ -1,30 +0,0 @@
-"""
-Functions in this module give python-space wrappers for cython functions
-exposed in numpy/__init__.pxd, so they can be tested in test_cython.py
-"""
-cimport numpy as cnp
-cnp.import_array()
-
-
-def is_td64(obj):
-    return cnp.is_timedelta64_object(obj)
-
-
-def is_dt64(obj):
-    return cnp.is_datetime64_object(obj)
-
-
-def get_dt64_value(obj):
-    return cnp.get_datetime64_value(obj)
-
-
-def get_td64_value(obj):
-    return cnp.get_timedelta64_value(obj)
-
-
-def get_dt64_unit(obj):
-    return cnp.get_datetime64_unit(obj)
-
-
-def is_integer(obj):
-    return isinstance(obj, (cnp.integer, int))
diff --git a/numpy/core/tests/examples/cython/checks.pyx b/numpy/core/tests/examples/cython/checks.pyx
new file mode 100644 (file)
index 0000000..151979d
--- /dev/null
@@ -0,0 +1,30 @@
+"""
+Functions in this module give python-space wrappers for cython functions
+exposed in numpy/__init__.pxd, so they can be tested in test_cython.py
+"""
+cimport numpy as cnp
+cnp.import_array()
+
+
+def is_td64(obj):
+    return cnp.is_timedelta64_object(obj)
+
+
+def is_dt64(obj):
+    return cnp.is_datetime64_object(obj)
+
+
+def get_dt64_value(obj):
+    return cnp.get_datetime64_value(obj)
+
+
+def get_td64_value(obj):
+    return cnp.get_timedelta64_value(obj)
+
+
+def get_dt64_unit(obj):
+    return cnp.get_datetime64_unit(obj)
+
+
+def is_integer(obj):
+    return isinstance(obj, (cnp.integer, int))
diff --git a/numpy/core/tests/examples/cython/setup.py b/numpy/core/tests/examples/cython/setup.py
new file mode 100644 (file)
index 0000000..6e34aa7
--- /dev/null
@@ -0,0 +1,25 @@
+"""
+Provide python-space access to the functions exposed in numpy/__init__.pxd
+for testing.
+"""
+
+import numpy as np
+from distutils.core import setup
+from Cython.Build import cythonize
+from setuptools.extension import Extension
+import os
+
+macros = [("NPY_NO_DEPRECATED_API", 0)]
+
+checks = Extension(
+    "checks",
+    sources=[os.path.join('.', "checks.pyx")],
+    include_dirs=[np.get_include()],
+    define_macros=macros,
+)
+
+extensions = [checks]
+
+setup(
+    ext_modules=cythonize(extensions)
+)
diff --git a/numpy/core/tests/examples/limited_api/limited_api.c b/numpy/core/tests/examples/limited_api/limited_api.c
new file mode 100644 (file)
index 0000000..698c54c
--- /dev/null
@@ -0,0 +1,17 @@
+#define Py_LIMITED_API 0x03060000
+
+#include <Python.h>
+#include <numpy/arrayobject.h>
+#include <numpy/ufuncobject.h>
+
+static PyModuleDef moduledef = {
+    .m_base = PyModuleDef_HEAD_INIT,
+    .m_name = "limited_api"
+};
+
+PyMODINIT_FUNC PyInit_limited_api(void)
+{
+    import_array();
+    import_umath();
+    return PyModule_Create(&moduledef);
+}
diff --git a/numpy/core/tests/examples/limited_api/setup.py b/numpy/core/tests/examples/limited_api/setup.py
new file mode 100644 (file)
index 0000000..18747dc
--- /dev/null
@@ -0,0 +1,22 @@
+"""
+Build an example package using the limited Python C API.
+"""
+
+import numpy as np
+from setuptools import setup, Extension
+import os
+
+macros = [("NPY_NO_DEPRECATED_API", 0), ("Py_LIMITED_API", "0x03060000")]
+
+limited_api = Extension(
+    "limited_api",
+    sources=[os.path.join('.', "limited_api.c")],
+    include_dirs=[np.get_include()],
+    define_macros=macros,
+)
+
+extensions = [limited_api]
+
+setup(
+    ext_modules=extensions
+)
diff --git a/numpy/core/tests/examples/setup.py b/numpy/core/tests/examples/setup.py
deleted file mode 100644 (file)
index 6e34aa7..0000000
+++ /dev/null
@@ -1,25 +0,0 @@
-"""
-Provide python-space access to the functions exposed in numpy/__init__.pxd
-for testing.
-"""
-
-import numpy as np
-from distutils.core import setup
-from Cython.Build import cythonize
-from setuptools.extension import Extension
-import os
-
-macros = [("NPY_NO_DEPRECATED_API", 0)]
-
-checks = Extension(
-    "checks",
-    sources=[os.path.join('.', "checks.pyx")],
-    include_dirs=[np.get_include()],
-    define_macros=macros,
-)
-
-extensions = [checks]
-
-setup(
-    ext_modules=cythonize(extensions)
-)
index a1f09d0fef129f7916841605ed5db0b9fed8837d..9896de0ec29f6845de5b5ef7515cecd975ca0fdd 100644 (file)
@@ -32,7 +32,7 @@ def install_temp(request, tmp_path):
     # Based in part on test_cython from random.tests.test_extending
 
     here = os.path.dirname(__file__)
-    ext_dir = os.path.join(here, "examples")
+    ext_dir = os.path.join(here, "examples", "cython")
 
     cytest = str(tmp_path / "cytest")
 
diff --git a/numpy/core/tests/test_limited_api.py b/numpy/core/tests/test_limited_api.py
new file mode 100644 (file)
index 0000000..0bb543d
--- /dev/null
@@ -0,0 +1,41 @@
+import os
+import shutil
+import subprocess
+import sys
+import sysconfig
+import pytest
+
+
+@pytest.mark.xfail(
+    sysconfig.get_config_var("Py_DEBUG"),
+    reason=(
+        "Py_LIMITED_API is incompatible with Py_DEBUG, Py_TRACE_REFS, "
+        "and Py_REF_DEBUG"
+    ),
+)
+def test_limited_api(tmp_path):
+    """Test building a third-party C extension with the limited API."""
+    # Based in part on test_cython from random.tests.test_extending
+
+    here = os.path.dirname(__file__)
+    ext_dir = os.path.join(here, "examples", "limited_api")
+
+    cytest = str(tmp_path / "limited_api")
+
+    shutil.copytree(ext_dir, cytest)
+    # build the examples and "install" them into a temporary directory
+
+    install_log = str(tmp_path / "tmp_install_log.txt")
+    subprocess.check_call(
+        [
+            sys.executable,
+            "setup.py",
+            "build",
+            "install",
+            "--prefix", str(tmp_path / "installdir"),
+            "--single-version-externally-managed",
+            "--record",
+            install_log,
+        ],
+        cwd=cytest,
+    )
index 1e0829f0d927d910b13ce4a6f09713e71f5884d2..5328acea863aecda31318752e5eb86d06889e38e 100644 (file)
@@ -14,6 +14,7 @@ from numpy.testing import (
     assert_almost_equal, assert_array_almost_equal, assert_no_warnings,
     assert_allclose, HAS_REFCOUNT, suppress_warnings
     )
+from numpy.testing._private.utils import requires_memory
 from numpy.compat import pickle
 
 
@@ -1555,6 +1556,17 @@ class TestUfunc:
                                     [[0, 1, 1], [1, 1, 1]])
         assert_equal(np.minimum.reduce(a, axis=()), a)
 
+    @requires_memory(6 * 1024**3)
+    def test_identityless_reduction_huge_array(self):
+        # Regression test for gh-20921 (copying identity incorrectly failed)
+        arr = np.zeros((2, 2**31), 'uint8')
+        arr[:, 0] = [1, 3]
+        arr[:, -1] = [4, 1]
+        res = np.maximum.reduce(arr, axis=0)
+        del arr
+        assert res[0] == 3
+        assert res[-1] == 4
+
     def test_identityless_reduction_corder(self):
         a = np.empty((2, 3, 4), order='C')
         self.check_identityless_reduction(a)
index 7040a241167c2f7276d67dba34c8ec30716e9d6e..8b568c1596de19e9c4862bfbf4d3bae464a645df 100644 (file)
@@ -231,20 +231,36 @@ class build_ext (old_build_ext):
             l = ext.language or self.compiler.detect_language(ext.sources)
             if l:
                 ext_languages.add(l)
+
             # reset language attribute for choosing proper linker
+            #
+            # When we build extensions with multiple languages, we have to
+            # choose a linker. The rules here are:
+            #   1. if there is Fortran code, always prefer the Fortran linker,
+            #   2. otherwise prefer C++ over C,
+            #   3. Users can force a particular linker by using
+            #          `language='c'`  # or 'c++', 'f90', 'f77'
+            #      in their config.add_extension() calls.
             if 'c++' in ext_languages:
                 ext_language = 'c++'
-            elif 'f90' in ext_languages:
+            else:
+                ext_language = 'c'  # default
+
+            has_fortran = False
+            if 'f90' in ext_languages:
                 ext_language = 'f90'
+                has_fortran = True
             elif 'f77' in ext_languages:
                 ext_language = 'f77'
-            else:
-                ext_language = 'c'  # default
-            if l and l != ext_language and ext.language:
-                log.warn('resetting extension %r language from %r to %r.' %
-                         (ext.name, l, ext_language))
-            if not ext.language:
+                has_fortran = True
+
+            if not ext.language or has_fortran:
+                if l and l != ext_language and ext.language:
+                    log.warn('resetting extension %r language from %r to %r.' %
+                             (ext.name, l, ext_language))
+
                 ext.language = ext_language
+
             # global language
             all_languages.update(ext_languages)
 
@@ -377,8 +393,8 @@ class build_ext (old_build_ext):
             log.info("building '%s' extension", ext.name)
 
         extra_args = ext.extra_compile_args or []
-        extra_cflags = ext.extra_c_compile_args or []
-        extra_cxxflags = ext.extra_cxx_compile_args or []
+        extra_cflags = getattr(ext, 'extra_c_compile_args', None) or []
+        extra_cxxflags = getattr(ext, 'extra_cxx_compile_args', None) or []
 
         macros = ext.define_macros[:]
         for undef in ext.undef_macros:
index 4848233d43a54f2e691c4f0640837af279732a94..c2d7faaad9bf1f435252c62e7468975232aa942f 100644 (file)
@@ -230,12 +230,21 @@ cb_rout_rules = [
         'latexdocstrcbs': '\\noindent Call-back functions:',
         'routnote': {hasnote: '--- #note#', l_not(hasnote): ''},
     }, {  # Function
-        'decl': '    #ctype# return_value;',
-        'frompyobj': [{debugcapi: '    CFUNCSMESS("cb:Getting return_value->");'},
-                      '    if (capi_j>capi_i)\n        GETSCALARFROMPYTUPLE(capi_return,capi_i++,&return_value,#ctype#,"#ctype#_from_pyobj failed in converting return_value of call-back function #name# to C #ctype#\\n");',
-                      {debugcapi:
-                       '    fprintf(stderr,"#showvalueformat#.\\n",return_value);'}
-                      ],
+        'decl': '    #ctype# return_value = 0;',
+        'frompyobj': [
+            {debugcapi: '    CFUNCSMESS("cb:Getting return_value->");'},
+            '''\
+    if (capi_j>capi_i) {
+        GETSCALARFROMPYTUPLE(capi_return,capi_i++,&return_value,#ctype#,
+          "#ctype#_from_pyobj failed in converting return_value of"
+          " call-back function #name# to C #ctype#\\n");
+    } else {
+        fprintf(stderr,"Warning: call-back function #name# did not provide"
+                       " return value (index=%d, type=#ctype#)\\n",capi_i);
+    }''',
+            {debugcapi:
+             '    fprintf(stderr,"#showvalueformat#.\\n",return_value);'}
+        ],
         'need': ['#ctype#_from_pyobj', {debugcapi: 'CFUNCSMESS'}, 'GETSCALARFROMPYTUPLE'],
         'return': '    return return_value;',
         '_check': l_and(isfunction, l_not(isstringfunction), l_not(iscomplexfunction))
@@ -245,12 +254,18 @@ cb_rout_rules = [
         'args': '#ctype# return_value,int return_value_len',
         'args_nm': 'return_value,&return_value_len',
         'args_td': '#ctype# ,int',
-        'frompyobj': [{debugcapi: '    CFUNCSMESS("cb:Getting return_value->\\"");'},
-                      """    if (capi_j>capi_i)
-        GETSTRFROMPYTUPLE(capi_return,capi_i++,return_value,return_value_len);""",
-                      {debugcapi:
-                       '    fprintf(stderr,"#showvalueformat#\\".\\n",return_value);'}
-                      ],
+        'frompyobj': [
+            {debugcapi: '    CFUNCSMESS("cb:Getting return_value->\\"");'},
+            """\
+    if (capi_j>capi_i) {
+        GETSTRFROMPYTUPLE(capi_return,capi_i++,return_value,return_value_len);
+    } else {
+        fprintf(stderr,"Warning: call-back function #name# did not provide"
+                       " return value (index=%d, type=#ctype#)\\n",capi_i);
+    }""",
+            {debugcapi:
+             '    fprintf(stderr,"#showvalueformat#\\".\\n",return_value);'}
+        ],
         'need': ['#ctype#_from_pyobj', {debugcapi: 'CFUNCSMESS'},
                  'string.h', 'GETSTRFROMPYTUPLE'],
         'return': 'return;',
@@ -274,27 +289,35 @@ return_value
 """,
         'decl': """
 #ifdef F2PY_CB_RETURNCOMPLEX
-    #ctype# return_value;
+    #ctype# return_value = {0, 0};
 #endif
 """,
-        'frompyobj': [{debugcapi: '    CFUNCSMESS("cb:Getting return_value->");'},
-                      """\
-    if (capi_j>capi_i)
+        'frompyobj': [
+            {debugcapi: '    CFUNCSMESS("cb:Getting return_value->");'},
+            """\
+    if (capi_j>capi_i) {
 #ifdef F2PY_CB_RETURNCOMPLEX
-        GETSCALARFROMPYTUPLE(capi_return,capi_i++,&return_value,#ctype#,\"#ctype#_from_pyobj failed in converting return_value of call-back function #name# to C #ctype#\\n\");
+        GETSCALARFROMPYTUPLE(capi_return,capi_i++,&return_value,#ctype#,
+          \"#ctype#_from_pyobj failed in converting return_value of call-back\"
+          \" function #name# to C #ctype#\\n\");
 #else
-        GETSCALARFROMPYTUPLE(capi_return,capi_i++,return_value,#ctype#,\"#ctype#_from_pyobj failed in converting return_value of call-back function #name# to C #ctype#\\n\");
+        GETSCALARFROMPYTUPLE(capi_return,capi_i++,return_value,#ctype#,
+          \"#ctype#_from_pyobj failed in converting return_value of call-back\"
+          \" function #name# to C #ctype#\\n\");
 #endif
-""",
-                      {debugcapi: """
+    } else {
+        fprintf(stderr,
+                \"Warning: call-back function #name# did not provide\"
+                \" return value (index=%d, type=#ctype#)\\n\",capi_i);
+    }""",
+            {debugcapi: """\
 #ifdef F2PY_CB_RETURNCOMPLEX
     fprintf(stderr,\"#showvalueformat#.\\n\",(return_value).r,(return_value).i);
 #else
     fprintf(stderr,\"#showvalueformat#.\\n\",(*return_value).r,(*return_value).i);
 #endif
-
 """}
-                      ],
+        ],
         'return': """
 #ifdef F2PY_CB_RETURNCOMPLEX
     return return_value;
index 528c4adeee77ac1db2928f27001662218f6aeb49..bdd27adaf4c691eb33eead8cedd3fc144cfb4cca 100644 (file)
@@ -572,18 +572,20 @@ cppmacros['OLDPYNUM'] = """\
 """
 cppmacros["F2PY_THREAD_LOCAL_DECL"] = """\
 #ifndef F2PY_THREAD_LOCAL_DECL
-#if defined(_MSC_VER) \\
-      || defined(_WIN32) || defined(_WIN64) \\
-      || defined(__MINGW32__) || defined(__MINGW64__)
+#if defined(_MSC_VER)
 #define F2PY_THREAD_LOCAL_DECL __declspec(thread)
+#elif defined(__MINGW32__) || defined(__MINGW64__)
+#define F2PY_THREAD_LOCAL_DECL __thread
 #elif defined(__STDC_VERSION__) \\
       && (__STDC_VERSION__ >= 201112L) \\
       && !defined(__STDC_NO_THREADS__) \\
-      && (!defined(__GLIBC__) || __GLIBC__ > 2 || (__GLIBC__ == 2 && __GLIBC_MINOR__ > 12))
+      && (!defined(__GLIBC__) || __GLIBC__ > 2 || (__GLIBC__ == 2 && __GLIBC_MINOR__ > 12)) \\
+      && !defined(__OpenBSD__)
 /* __STDC_NO_THREADS__ was first defined in a maintenance release of glibc 2.12,
    see https://lists.gnu.org/archive/html/commit-hurd/2012-07/msg00180.html,
    so `!defined(__STDC_NO_THREADS__)` may give false positive for the existence
-   of `threads.h` when using an older release of glibc 2.12 */
+   of `threads.h` when using an older release of glibc 2.12
+   See gh-19437 for details on OpenBSD */
 #include <threads.h>
 #define F2PY_THREAD_LOCAL_DECL thread_local
 #elif defined(__GNUC__) \\
index 20bf3dceac0295192b8282c146bcd510d188c2ea..fc39163953d9e8db76465d7a3014e734b8b3534c 100644 (file)
@@ -8,8 +8,6 @@ import os
 import pathlib
 import sys
 import sysconfig
-from numpy.distutils.ccompiler import new_compiler
-from distutils.errors import CompileError
 
 __all__ = ['build_and_import_extension', 'compile_extension_module']
 
@@ -53,6 +51,7 @@ def build_and_import_extension(
     >>> assert not mod.test_bytes(u'abc')
     >>> assert mod.test_bytes(b'abc')
     """
+    from distutils.errors import CompileError
 
     body = prologue + _make_methods(functions, modname)
     init = """PyObject *mod = PyModule_Create(&moduledef);
@@ -221,6 +220,7 @@ def _c_compile(cfile, outputfilename, include_dirs=[], libraries=[],
 def build(cfile, outputfilename, compile_extra, link_extra,
           include_dirs, libraries, library_dirs):
     "cd into the directory where the cfile is, use distutils to build"
+    from numpy.distutils.ccompiler import new_compiler
 
     compiler = new_compiler(force=1, verbose=2)
     compiler.customize('')
index ef89acb58a2a73052cff8f2584cf56329c17edf3..9e7d6e5452d12d1bd6c0f0524426ca2597e83b18 100644 (file)
@@ -13,5 +13,11 @@ reveal_type(a[0])  # E: str_
 reveal_type(a[[0, 1, 2]])  # E: ndarray[Any, dtype[str_]]
 reveal_type(a[...])  # E: ndarray[Any, dtype[str_]]
 reveal_type(a[:])  # E: ndarray[Any, dtype[str_]]
+reveal_type(a[(...,)])  # E: ndarray[Any, dtype[str_]]
+reveal_type(a[(0,)])  # E: str_
 reveal_type(a.__array__())  # E: ndarray[Any, dtype[str_]]
 reveal_type(a.__array__(np.dtype(np.float64)))  # E: ndarray[Any, dtype[{float64}]]
+a[0] = "a"
+a[:5] = "a"
+a[...] = "a"
+a[(...,)] = "a"
index c559eb2958931175aa2c5264c76308dda9e152a7..eebe9fbfdc70a192142c172e5312bcca52bb5d3d 100644 (file)
@@ -26,7 +26,7 @@ reveal_type(vectorized_func.signature)  # E: Union[None, builtins.str]
 reveal_type(vectorized_func.otypes)  # E: Union[None, builtins.str]
 reveal_type(vectorized_func.excluded)  # E: set[Union[builtins.int, builtins.str]]
 reveal_type(vectorized_func.__doc__)  # E: Union[None, builtins.str]
-reveal_type(vectorized_func([1]))  # E: ndarray[Any, dtype[Any]]
+reveal_type(vectorized_func([1]))  # E: Any
 reveal_type(np.vectorize(int))  # E: vectorize
 reveal_type(np.vectorize(  # E: vectorize
     int, otypes="i", doc="doc", excluded=(), cache=True, signature=None
index 3533e420a558623c05d9e1144e8a0f50a5fb2d64..6fea333369b4d01d44ed833ac46459a133b7f37a 100644 (file)
@@ -38,7 +38,7 @@ from paver.easy import Bunch, options, task, sh
 #-----------------------------------
 
 # Path to the release notes
-RELEASE_NOTES = 'doc/source/release/1.22.1-notes.rst'
+RELEASE_NOTES = 'doc/source/release/1.22.2-notes.rst'
 
 
 #-------------------------------------------------------
index 06170df51ac408f54cc8cbf5f807b61f280e0e1d..0257fc5ba7558a5def63ab05cd6e7facf43c16b3 100755 (executable)
--- a/setup.py
+++ b/setup.py
@@ -81,9 +81,16 @@ if os.path.exists('MANIFEST'):
 import numpy.distutils.command.sdist
 import setuptools
 if int(setuptools.__version__.split('.')[0]) >= 60:
-    raise RuntimeError(
-        "Setuptools version is '{}', version < '60.0.0' is required. "
-        "See pyproject.toml".format(setuptools.__version__))
+    # setuptools >= 60 switches to vendored distutils by default; this
+    # may break the numpy build, so make sure the stdlib version is used
+    try:
+        setuptools_use_distutils = os.environ['SETUPTOOLS_USE_DISTUTILS']
+    except KeyError:
+        os.environ['SETUPTOOLS_USE_DISTUTILS'] = "stdlib"
+    else:
+        if setuptools_use_distutils != "stdlib":
+            raise RuntimeError("setuptools versions >= '60.0.0' require "
+                    "SETUPTOOLS_USE_DISTUTILS=stdlib in the environment")
 
 # Initialize cmdclass from versioneer
 from numpy.distutils.core import numpy_cmdclass
index 558633733da3244e25ebf627951ffdc297d147cb..eac81732224214dc128e0d04573ea3b86d9a170d 100644 (file)
@@ -52,7 +52,7 @@
     }
 %#endif
     if (!PyArray_IsScalar(obj,Integer)) return SWIG_TypeError;
-    PyArray_Descr * longDescr = PyArray_DescrNewFromType(NPY_LONG);
+    PyArray_Descr * longDescr = PyArray_DescrFromType(NPY_LONG);
     PyArray_CastScalarToCtype(obj, (void*)val, longDescr);
     Py_DECREF(longDescr);
     return SWIG_OK;
     }
 %#endif
     if (!PyArray_IsScalar(obj,Integer)) return SWIG_TypeError;
-    PyArray_Descr * ulongDescr = PyArray_DescrNewFromType(NPY_ULONG);
+    PyArray_Descr * ulongDescr = PyArray_DescrFromType(NPY_ULONG);
     PyArray_CastScalarToCtype(obj, (void*)val, ulongDescr);
     Py_DECREF(ulongDescr);
     return SWIG_OK;