about summary refs log tree commit diff
path: root/.venv/lib/python3.12/site-packages/numpy/tests
diff options
context:
space:
mode:
Diffstat (limited to '.venv/lib/python3.12/site-packages/numpy/tests')
-rw-r--r--.venv/lib/python3.12/site-packages/numpy/tests/__init__.py0
-rw-r--r--.venv/lib/python3.12/site-packages/numpy/tests/test__all__.py9
-rw-r--r--.venv/lib/python3.12/site-packages/numpy/tests/test_ctypeslib.py370
-rw-r--r--.venv/lib/python3.12/site-packages/numpy/tests/test_lazyloading.py38
-rw-r--r--.venv/lib/python3.12/site-packages/numpy/tests/test_matlib.py58
-rw-r--r--.venv/lib/python3.12/site-packages/numpy/tests/test_numpy_config.py44
-rw-r--r--.venv/lib/python3.12/site-packages/numpy/tests/test_numpy_version.py41
-rw-r--r--.venv/lib/python3.12/site-packages/numpy/tests/test_public_api.py551
-rw-r--r--.venv/lib/python3.12/site-packages/numpy/tests/test_reloading.py72
-rw-r--r--.venv/lib/python3.12/site-packages/numpy/tests/test_scripts.py47
-rw-r--r--.venv/lib/python3.12/site-packages/numpy/tests/test_warnings.py74
11 files changed, 1304 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/numpy/tests/__init__.py b/.venv/lib/python3.12/site-packages/numpy/tests/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/numpy/tests/__init__.py
diff --git a/.venv/lib/python3.12/site-packages/numpy/tests/test__all__.py b/.venv/lib/python3.12/site-packages/numpy/tests/test__all__.py
new file mode 100644
index 00000000..e44bda3d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/numpy/tests/test__all__.py
@@ -0,0 +1,9 @@
+
+import collections
+import numpy as np
+
+
+def test_no_duplicates_in_np__all__():
+    # Regression test for gh-10198.
+    dups = {k: v for k, v in collections.Counter(np.__all__).items() if v > 1}
+    assert len(dups) == 0
diff --git a/.venv/lib/python3.12/site-packages/numpy/tests/test_ctypeslib.py b/.venv/lib/python3.12/site-packages/numpy/tests/test_ctypeslib.py
new file mode 100644
index 00000000..965e547e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/numpy/tests/test_ctypeslib.py
@@ -0,0 +1,370 @@
+import sys
+import sysconfig
+import weakref
+from pathlib import Path
+
+import pytest
+
+import numpy as np
+from numpy.ctypeslib import ndpointer, load_library, as_array
+from numpy.testing import assert_, assert_array_equal, assert_raises, assert_equal
+
+try:
+    import ctypes
+except ImportError:
+    ctypes = None
+else:
+    cdll = None
+    test_cdll = None
+    if hasattr(sys, 'gettotalrefcount'):
+        try:
+            cdll = load_library('_multiarray_umath_d', np.core._multiarray_umath.__file__)
+        except OSError:
+            pass
+        try:
+            test_cdll = load_library('_multiarray_tests', np.core._multiarray_tests.__file__)
+        except OSError:
+            pass
+    if cdll is None:
+        cdll = load_library('_multiarray_umath', np.core._multiarray_umath.__file__)
+    if test_cdll is None:
+        test_cdll = load_library('_multiarray_tests', np.core._multiarray_tests.__file__)
+
+    c_forward_pointer = test_cdll.forward_pointer
+
+
+@pytest.mark.skipif(ctypes is None,
+                    reason="ctypes not available in this python")
+@pytest.mark.skipif(sys.platform == 'cygwin',
+                    reason="Known to fail on cygwin")
+class TestLoadLibrary:
+    def test_basic(self):
+        loader_path = np.core._multiarray_umath.__file__
+
+        out1 = load_library('_multiarray_umath', loader_path)
+        out2 = load_library(Path('_multiarray_umath'), loader_path)
+        out3 = load_library('_multiarray_umath', Path(loader_path))
+        out4 = load_library(b'_multiarray_umath', loader_path)
+
+        assert isinstance(out1, ctypes.CDLL)
+        assert out1 is out2 is out3 is out4
+
+    def test_basic2(self):
+        # Regression for #801: load_library with a full library name
+        # (including extension) does not work.
+        try:
+            so_ext = sysconfig.get_config_var('EXT_SUFFIX')
+            load_library('_multiarray_umath%s' % so_ext,
+                         np.core._multiarray_umath.__file__)
+        except ImportError as e:
+            msg = ("ctypes is not available on this python: skipping the test"
+                   " (import error was: %s)" % str(e))
+            print(msg)
+
+
+class TestNdpointer:
+    def test_dtype(self):
+        dt = np.intc
+        p = ndpointer(dtype=dt)
+        assert_(p.from_param(np.array([1], dt)))
+        dt = '<i4'
+        p = ndpointer(dtype=dt)
+        assert_(p.from_param(np.array([1], dt)))
+        dt = np.dtype('>i4')
+        p = ndpointer(dtype=dt)
+        p.from_param(np.array([1], dt))
+        assert_raises(TypeError, p.from_param,
+                          np.array([1], dt.newbyteorder('swap')))
+        dtnames = ['x', 'y']
+        dtformats = [np.intc, np.float64]
+        dtdescr = {'names': dtnames, 'formats': dtformats}
+        dt = np.dtype(dtdescr)
+        p = ndpointer(dtype=dt)
+        assert_(p.from_param(np.zeros((10,), dt)))
+        samedt = np.dtype(dtdescr)
+        p = ndpointer(dtype=samedt)
+        assert_(p.from_param(np.zeros((10,), dt)))
+        dt2 = np.dtype(dtdescr, align=True)
+        if dt.itemsize != dt2.itemsize:
+            assert_raises(TypeError, p.from_param, np.zeros((10,), dt2))
+        else:
+            assert_(p.from_param(np.zeros((10,), dt2)))
+
+    def test_ndim(self):
+        p = ndpointer(ndim=0)
+        assert_(p.from_param(np.array(1)))
+        assert_raises(TypeError, p.from_param, np.array([1]))
+        p = ndpointer(ndim=1)
+        assert_raises(TypeError, p.from_param, np.array(1))
+        assert_(p.from_param(np.array([1])))
+        p = ndpointer(ndim=2)
+        assert_(p.from_param(np.array([[1]])))
+
+    def test_shape(self):
+        p = ndpointer(shape=(1, 2))
+        assert_(p.from_param(np.array([[1, 2]])))
+        assert_raises(TypeError, p.from_param, np.array([[1], [2]]))
+        p = ndpointer(shape=())
+        assert_(p.from_param(np.array(1)))
+
+    def test_flags(self):
+        x = np.array([[1, 2], [3, 4]], order='F')
+        p = ndpointer(flags='FORTRAN')
+        assert_(p.from_param(x))
+        p = ndpointer(flags='CONTIGUOUS')
+        assert_raises(TypeError, p.from_param, x)
+        p = ndpointer(flags=x.flags.num)
+        assert_(p.from_param(x))
+        assert_raises(TypeError, p.from_param, np.array([[1, 2], [3, 4]]))
+
+    def test_cache(self):
+        assert_(ndpointer(dtype=np.float64) is ndpointer(dtype=np.float64))
+
+        # shapes are normalized
+        assert_(ndpointer(shape=2) is ndpointer(shape=(2,)))
+
+        # 1.12 <= v < 1.16 had a bug that made these fail
+        assert_(ndpointer(shape=2) is not ndpointer(ndim=2))
+        assert_(ndpointer(ndim=2) is not ndpointer(shape=2))
+
+@pytest.mark.skipif(ctypes is None,
+                    reason="ctypes not available on this python installation")
+class TestNdpointerCFunc:
+    def test_arguments(self):
+        """ Test that arguments are coerced from arrays """
+        c_forward_pointer.restype = ctypes.c_void_p
+        c_forward_pointer.argtypes = (ndpointer(ndim=2),)
+
+        c_forward_pointer(np.zeros((2, 3)))
+        # too many dimensions
+        assert_raises(
+            ctypes.ArgumentError, c_forward_pointer, np.zeros((2, 3, 4)))
+
+    @pytest.mark.parametrize(
+        'dt', [
+            float,
+            np.dtype(dict(
+                formats=['<i4', '<i4'],
+                names=['a', 'b'],
+                offsets=[0, 2],
+                itemsize=6
+            ))
+        ], ids=[
+            'float',
+            'overlapping-fields'
+        ]
+    )
+    def test_return(self, dt):
+        """ Test that return values are coerced to arrays """
+        arr = np.zeros((2, 3), dt)
+        ptr_type = ndpointer(shape=arr.shape, dtype=arr.dtype)
+
+        c_forward_pointer.restype = ptr_type
+        c_forward_pointer.argtypes = (ptr_type,)
+
+        # check that the arrays are equivalent views on the same data
+        arr2 = c_forward_pointer(arr)
+        assert_equal(arr2.dtype, arr.dtype)
+        assert_equal(arr2.shape, arr.shape)
+        assert_equal(
+            arr2.__array_interface__['data'],
+            arr.__array_interface__['data']
+        )
+
+    def test_vague_return_value(self):
+        """ Test that vague ndpointer return values do not promote to arrays """
+        arr = np.zeros((2, 3))
+        ptr_type = ndpointer(dtype=arr.dtype)
+
+        c_forward_pointer.restype = ptr_type
+        c_forward_pointer.argtypes = (ptr_type,)
+
+        ret = c_forward_pointer(arr)
+        assert_(isinstance(ret, ptr_type))
+
+
+@pytest.mark.skipif(ctypes is None,
+                    reason="ctypes not available on this python installation")
+class TestAsArray:
+    def test_array(self):
+        from ctypes import c_int
+
+        pair_t = c_int * 2
+        a = as_array(pair_t(1, 2))
+        assert_equal(a.shape, (2,))
+        assert_array_equal(a, np.array([1, 2]))
+        a = as_array((pair_t * 3)(pair_t(1, 2), pair_t(3, 4), pair_t(5, 6)))
+        assert_equal(a.shape, (3, 2))
+        assert_array_equal(a, np.array([[1, 2], [3, 4], [5, 6]]))
+
+    def test_pointer(self):
+        from ctypes import c_int, cast, POINTER
+
+        p = cast((c_int * 10)(*range(10)), POINTER(c_int))
+
+        a = as_array(p, shape=(10,))
+        assert_equal(a.shape, (10,))
+        assert_array_equal(a, np.arange(10))
+
+        a = as_array(p, shape=(2, 5))
+        assert_equal(a.shape, (2, 5))
+        assert_array_equal(a, np.arange(10).reshape((2, 5)))
+
+        # shape argument is required
+        assert_raises(TypeError, as_array, p)
+
+    @pytest.mark.skipif(
+        sys.version_info == (3, 12, 0, "candidate", 1),
+        reason="Broken in 3.12.0rc1, see gh-24399",
+    )
+    def test_struct_array_pointer(self):
+        from ctypes import c_int16, Structure, pointer
+
+        class Struct(Structure):
+            _fields_ = [('a', c_int16)]
+
+        Struct3 = 3 * Struct
+
+        c_array = (2 * Struct3)(
+            Struct3(Struct(a=1), Struct(a=2), Struct(a=3)),
+            Struct3(Struct(a=4), Struct(a=5), Struct(a=6))
+        )
+
+        expected = np.array([
+            [(1,), (2,), (3,)],
+            [(4,), (5,), (6,)],
+        ], dtype=[('a', np.int16)])
+
+        def check(x):
+            assert_equal(x.dtype, expected.dtype)
+            assert_equal(x, expected)
+
+        # all of these should be equivalent
+        check(as_array(c_array))
+        check(as_array(pointer(c_array), shape=()))
+        check(as_array(pointer(c_array[0]), shape=(2,)))
+        check(as_array(pointer(c_array[0][0]), shape=(2, 3)))
+
+    def test_reference_cycles(self):
+        # related to gh-6511
+        import ctypes
+
+        # create array to work with
+        # don't use int/long to avoid running into bpo-10746
+        N = 100
+        a = np.arange(N, dtype=np.short)
+
+        # get pointer to array
+        pnt = np.ctypeslib.as_ctypes(a)
+
+        with np.testing.assert_no_gc_cycles():
+            # decay the array above to a pointer to its first element
+            newpnt = ctypes.cast(pnt, ctypes.POINTER(ctypes.c_short))
+            # and construct an array using this data
+            b = np.ctypeslib.as_array(newpnt, (N,))
+            # now delete both, which should cleanup both objects
+            del newpnt, b
+
+    def test_segmentation_fault(self):
+        arr = np.zeros((224, 224, 3))
+        c_arr = np.ctypeslib.as_ctypes(arr)
+        arr_ref = weakref.ref(arr)
+        del arr
+
+        # check the reference wasn't cleaned up
+        assert_(arr_ref() is not None)
+
+        # check we avoid the segfault
+        c_arr[0][0][0]
+
+
+@pytest.mark.skipif(ctypes is None,
+                    reason="ctypes not available on this python installation")
+class TestAsCtypesType:
+    """ Test conversion from dtypes to ctypes types """
+    def test_scalar(self):
+        dt = np.dtype('<u2')
+        ct = np.ctypeslib.as_ctypes_type(dt)
+        assert_equal(ct, ctypes.c_uint16.__ctype_le__)
+
+        dt = np.dtype('>u2')
+        ct = np.ctypeslib.as_ctypes_type(dt)
+        assert_equal(ct, ctypes.c_uint16.__ctype_be__)
+
+        dt = np.dtype('u2')
+        ct = np.ctypeslib.as_ctypes_type(dt)
+        assert_equal(ct, ctypes.c_uint16)
+
+    def test_subarray(self):
+        dt = np.dtype((np.int32, (2, 3)))
+        ct = np.ctypeslib.as_ctypes_type(dt)
+        assert_equal(ct, 2 * (3 * ctypes.c_int32))
+
+    def test_structure(self):
+        dt = np.dtype([
+            ('a', np.uint16),
+            ('b', np.uint32),
+        ])
+
+        ct = np.ctypeslib.as_ctypes_type(dt)
+        assert_(issubclass(ct, ctypes.Structure))
+        assert_equal(ctypes.sizeof(ct), dt.itemsize)
+        assert_equal(ct._fields_, [
+            ('a', ctypes.c_uint16),
+            ('b', ctypes.c_uint32),
+        ])
+
+    def test_structure_aligned(self):
+        dt = np.dtype([
+            ('a', np.uint16),
+            ('b', np.uint32),
+        ], align=True)
+
+        ct = np.ctypeslib.as_ctypes_type(dt)
+        assert_(issubclass(ct, ctypes.Structure))
+        assert_equal(ctypes.sizeof(ct), dt.itemsize)
+        assert_equal(ct._fields_, [
+            ('a', ctypes.c_uint16),
+            ('', ctypes.c_char * 2),  # padding
+            ('b', ctypes.c_uint32),
+        ])
+
+    def test_union(self):
+        dt = np.dtype(dict(
+            names=['a', 'b'],
+            offsets=[0, 0],
+            formats=[np.uint16, np.uint32]
+        ))
+
+        ct = np.ctypeslib.as_ctypes_type(dt)
+        assert_(issubclass(ct, ctypes.Union))
+        assert_equal(ctypes.sizeof(ct), dt.itemsize)
+        assert_equal(ct._fields_, [
+            ('a', ctypes.c_uint16),
+            ('b', ctypes.c_uint32),
+        ])
+
+    def test_padded_union(self):
+        dt = np.dtype(dict(
+            names=['a', 'b'],
+            offsets=[0, 0],
+            formats=[np.uint16, np.uint32],
+            itemsize=5,
+        ))
+
+        ct = np.ctypeslib.as_ctypes_type(dt)
+        assert_(issubclass(ct, ctypes.Union))
+        assert_equal(ctypes.sizeof(ct), dt.itemsize)
+        assert_equal(ct._fields_, [
+            ('a', ctypes.c_uint16),
+            ('b', ctypes.c_uint32),
+            ('', ctypes.c_char * 5),  # padding
+        ])
+
+    def test_overlapping(self):
+        dt = np.dtype(dict(
+            names=['a', 'b'],
+            offsets=[0, 2],
+            formats=[np.uint32, np.uint32]
+        ))
+        assert_raises(NotImplementedError, np.ctypeslib.as_ctypes_type, dt)
diff --git a/.venv/lib/python3.12/site-packages/numpy/tests/test_lazyloading.py b/.venv/lib/python3.12/site-packages/numpy/tests/test_lazyloading.py
new file mode 100644
index 00000000..f31a4eab
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/numpy/tests/test_lazyloading.py
@@ -0,0 +1,38 @@
+import sys
+import importlib
+from importlib.util import LazyLoader, find_spec, module_from_spec
+import pytest
+
+
+# Warning raised by _reload_guard() in numpy/__init__.py
+@pytest.mark.filterwarnings("ignore:The NumPy module was reloaded")
+def test_lazy_load():
+    # gh-22045. lazyload doesn't import submodule names into the namespace
+    # muck with sys.modules to test the importing system
+    old_numpy = sys.modules.pop("numpy")
+
+    numpy_modules = {}
+    for mod_name, mod in list(sys.modules.items()):
+        if mod_name[:6] == "numpy.":
+            numpy_modules[mod_name] = mod
+            sys.modules.pop(mod_name)
+
+    try:
+        # create lazy load of numpy as np
+        spec = find_spec("numpy")
+        module = module_from_spec(spec)
+        sys.modules["numpy"] = module
+        loader = LazyLoader(spec.loader)
+        loader.exec_module(module)
+        np = module
+
+        # test a subpackage import
+        from numpy.lib import recfunctions
+
+        # test triggering the import of the package
+        np.ndarray
+
+    finally:
+        if old_numpy:
+            sys.modules["numpy"] = old_numpy
+            sys.modules.update(numpy_modules)
diff --git a/.venv/lib/python3.12/site-packages/numpy/tests/test_matlib.py b/.venv/lib/python3.12/site-packages/numpy/tests/test_matlib.py
new file mode 100644
index 00000000..0e93c484
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/numpy/tests/test_matlib.py
@@ -0,0 +1,58 @@
+import numpy as np
+import numpy.matlib
+from numpy.testing import assert_array_equal, assert_
+
+def test_empty():
+    x = numpy.matlib.empty((2,))
+    assert_(isinstance(x, np.matrix))
+    assert_(x.shape, (1, 2))
+
+def test_ones():
+    assert_array_equal(numpy.matlib.ones((2, 3)),
+                       np.matrix([[ 1.,  1.,  1.],
+                                 [ 1.,  1.,  1.]]))
+
+    assert_array_equal(numpy.matlib.ones(2), np.matrix([[ 1.,  1.]]))
+
+def test_zeros():
+    assert_array_equal(numpy.matlib.zeros((2, 3)),
+                       np.matrix([[ 0.,  0.,  0.],
+                                 [ 0.,  0.,  0.]]))
+
+    assert_array_equal(numpy.matlib.zeros(2), np.matrix([[ 0.,  0.]]))
+
+def test_identity():
+    x = numpy.matlib.identity(2, dtype=int)
+    assert_array_equal(x, np.matrix([[1, 0], [0, 1]]))
+
+def test_eye():
+    xc = numpy.matlib.eye(3, k=1, dtype=int)
+    assert_array_equal(xc, np.matrix([[ 0,  1,  0],
+                                      [ 0,  0,  1],
+                                      [ 0,  0,  0]]))
+    assert xc.flags.c_contiguous
+    assert not xc.flags.f_contiguous
+
+    xf = numpy.matlib.eye(3, 4, dtype=int, order='F')
+    assert_array_equal(xf, np.matrix([[ 1,  0,  0,  0],
+                                      [ 0,  1,  0,  0],
+                                      [ 0,  0,  1,  0]]))
+    assert not xf.flags.c_contiguous
+    assert xf.flags.f_contiguous
+
+def test_rand():
+    x = numpy.matlib.rand(3)
+    # check matrix type, array would have shape (3,)
+    assert_(x.ndim == 2)
+
+def test_randn():
+    x = np.matlib.randn(3)
+    # check matrix type, array would have shape (3,)
+    assert_(x.ndim == 2)
+
+def test_repmat():
+    a1 = np.arange(4)
+    x = numpy.matlib.repmat(a1, 2, 2)
+    y = np.array([[0, 1, 2, 3, 0, 1, 2, 3],
+                  [0, 1, 2, 3, 0, 1, 2, 3]])
+    assert_array_equal(x, y)
diff --git a/.venv/lib/python3.12/site-packages/numpy/tests/test_numpy_config.py b/.venv/lib/python3.12/site-packages/numpy/tests/test_numpy_config.py
new file mode 100644
index 00000000..82c1ad70
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/numpy/tests/test_numpy_config.py
@@ -0,0 +1,44 @@
+"""
+Check the numpy config is valid.
+"""
+import numpy as np
+import pytest
+from unittest.mock import Mock, patch
+
+pytestmark = pytest.mark.skipif(
+    not hasattr(np.__config__, "_built_with_meson"),
+    reason="Requires Meson builds",
+)
+
+
+class TestNumPyConfigs:
+    REQUIRED_CONFIG_KEYS = [
+        "Compilers",
+        "Machine Information",
+        "Python Information",
+    ]
+
+    @patch("numpy.__config__._check_pyyaml")
+    def test_pyyaml_not_found(self, mock_yaml_importer):
+        mock_yaml_importer.side_effect = ModuleNotFoundError()
+        with pytest.warns(UserWarning):
+            np.show_config()
+
+    def test_dict_mode(self):
+        config = np.show_config(mode="dicts")
+
+        assert isinstance(config, dict)
+        assert all([key in config for key in self.REQUIRED_CONFIG_KEYS]), (
+            "Required key missing,"
+            " see index of `False` with `REQUIRED_CONFIG_KEYS`"
+        )
+
+    def test_invalid_mode(self):
+        with pytest.raises(AttributeError):
+            np.show_config(mode="foo")
+
+    def test_warn_to_add_tests(self):
+        assert len(np.__config__.DisplayModes) == 2, (
+            "New mode detected,"
+            " please add UT if applicable and increment this count"
+        )
diff --git a/.venv/lib/python3.12/site-packages/numpy/tests/test_numpy_version.py b/.venv/lib/python3.12/site-packages/numpy/tests/test_numpy_version.py
new file mode 100644
index 00000000..61643426
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/numpy/tests/test_numpy_version.py
@@ -0,0 +1,41 @@
+"""
+Check the numpy version is valid.
+
+Note that a development version is marked by the presence of 'dev0' or '+'
+in the version string, all else is treated as a release. The version string
+itself is set from the output of ``git describe`` which relies on tags.
+
+Examples
+--------
+
+Valid Development: 1.22.0.dev0 1.22.0.dev0+5-g7999db4df2 1.22.0+5-g7999db4df2
+Valid Release: 1.21.0.rc1, 1.21.0.b1, 1.21.0
+Invalid: 1.22.0.dev, 1.22.0.dev0-5-g7999db4dfB, 1.21.0.d1, 1.21.a
+
+Note that a release is determined by the version string, which in turn
+is controlled by the result of the ``git describe`` command.
+"""
+import re
+
+import numpy as np
+from numpy.testing import assert_
+
+
+def test_valid_numpy_version():
+    # Verify that the numpy version is a valid one (no .post suffix or other
+    # nonsense).  See gh-6431 for an issue caused by an invalid version.
+    version_pattern = r"^[0-9]+\.[0-9]+\.[0-9]+(a[0-9]|b[0-9]|rc[0-9])?"
+    dev_suffix = r"(\.dev[0-9]+(\+git[0-9]+\.[0-9a-f]+)?)?"
+    res = re.match(version_pattern + dev_suffix + '$', np.__version__)
+
+    assert_(res is not None, np.__version__)
+
+
+def test_short_version():
+    # Check numpy.short_version actually exists
+    if np.version.release:
+        assert_(np.__version__ == np.version.short_version,
+                "short_version mismatch in release version")
+    else:
+        assert_(np.__version__.split("+")[0] == np.version.short_version,
+                "short_version mismatch in development version")
diff --git a/.venv/lib/python3.12/site-packages/numpy/tests/test_public_api.py b/.venv/lib/python3.12/site-packages/numpy/tests/test_public_api.py
new file mode 100644
index 00000000..54bf3dac
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/numpy/tests/test_public_api.py
@@ -0,0 +1,551 @@
+import sys
+import sysconfig
+import subprocess
+import pkgutil
+import types
+import importlib
+import warnings
+
+import numpy as np
+import numpy
+import pytest
+from numpy.testing import IS_WASM
+
+try:
+    import ctypes
+except ImportError:
+    ctypes = None
+
+
+def check_dir(module, module_name=None):
+    """Returns a mapping of all objects with the wrong __module__ attribute."""
+    if module_name is None:
+        module_name = module.__name__
+    results = {}
+    for name in dir(module):
+        item = getattr(module, name)
+        if (hasattr(item, '__module__') and hasattr(item, '__name__')
+                and item.__module__ != module_name):
+            results[name] = item.__module__ + '.' + item.__name__
+    return results
+
+
+def test_numpy_namespace():
+    # None of these objects are publicly documented to be part of the main
+    # NumPy namespace (some are useful though, others need to be cleaned up)
+    undocumented = {
+        '_add_newdoc_ufunc': 'numpy.core._multiarray_umath._add_newdoc_ufunc',
+        'add_docstring': 'numpy.core._multiarray_umath.add_docstring',
+        'add_newdoc': 'numpy.core.function_base.add_newdoc',
+        'add_newdoc_ufunc': 'numpy.core._multiarray_umath._add_newdoc_ufunc',
+        'byte_bounds': 'numpy.lib.utils.byte_bounds',
+        'compare_chararrays': 'numpy.core._multiarray_umath.compare_chararrays',
+        'deprecate': 'numpy.lib.utils.deprecate',
+        'deprecate_with_doc': 'numpy.lib.utils.deprecate_with_doc',
+        'disp': 'numpy.lib.function_base.disp',
+        'fastCopyAndTranspose': 'numpy.core._multiarray_umath.fastCopyAndTranspose',
+        'get_array_wrap': 'numpy.lib.shape_base.get_array_wrap',
+        'get_include': 'numpy.lib.utils.get_include',
+        'recfromcsv': 'numpy.lib.npyio.recfromcsv',
+        'recfromtxt': 'numpy.lib.npyio.recfromtxt',
+        'safe_eval': 'numpy.lib.utils.safe_eval',
+        'set_string_function': 'numpy.core.arrayprint.set_string_function',
+        'show_config': 'numpy.__config__.show',
+        'show_runtime': 'numpy.lib.utils.show_runtime',
+        'who': 'numpy.lib.utils.who',
+    }
+    # We override dir to not show these members
+    allowlist = undocumented
+    bad_results = check_dir(np)
+    # pytest gives better error messages with the builtin assert than with
+    # assert_equal
+    assert bad_results == allowlist
+
+
+@pytest.mark.skipif(IS_WASM, reason="can't start subprocess")
+@pytest.mark.parametrize('name', ['testing'])
+def test_import_lazy_import(name):
+    """Make sure we can actually use the modules we lazy load.
+
+    While not exported as part of the public API, it was accessible.  With the
+    use of __getattr__ and __dir__, this isn't always true It can happen that
+    an infinite recursion may happen.
+
+    This is the only way I found that would force the failure to appear on the
+    badly implemented code.
+
+    We also test for the presence of the lazily imported modules in dir
+
+    """
+    exe = (sys.executable, '-c', "import numpy; numpy." + name)
+    result = subprocess.check_output(exe)
+    assert not result
+
+    # Make sure they are still in the __dir__
+    assert name in dir(np)
+
+
+def test_dir_testing():
+    """Assert that output of dir has only one "testing/tester"
+    attribute without duplicate"""
+    assert len(dir(np)) == len(set(dir(np)))
+
+
+def test_numpy_linalg():
+    bad_results = check_dir(np.linalg)
+    assert bad_results == {}
+
+
+def test_numpy_fft():
+    bad_results = check_dir(np.fft)
+    assert bad_results == {}
+
+
+@pytest.mark.skipif(ctypes is None,
+                    reason="ctypes not available in this python")
+def test_NPY_NO_EXPORT():
+    cdll = ctypes.CDLL(np.core._multiarray_tests.__file__)
+    # Make sure an arbitrary NPY_NO_EXPORT function is actually hidden
+    f = getattr(cdll, 'test_not_exported', None)
+    assert f is None, ("'test_not_exported' is mistakenly exported, "
+                      "NPY_NO_EXPORT does not work")
+
+
+# Historically NumPy has not used leading underscores for private submodules
+# much.  This has resulted in lots of things that look like public modules
+# (i.e. things that can be imported as `import numpy.somesubmodule.somefile`),
+# but were never intended to be public.  The PUBLIC_MODULES list contains
+# modules that are either public because they were meant to be, or because they
+# contain public functions/objects that aren't present in any other namespace
+# for whatever reason and therefore should be treated as public.
+#
+# The PRIVATE_BUT_PRESENT_MODULES list contains modules that look public (lack
+# of underscores) but should not be used.  For many of those modules the
+# current status is fine.  For others it may make sense to work on making them
+# private, to clean up our public API and avoid confusion.
+PUBLIC_MODULES = ['numpy.' + s for s in [
+    "array_api",
+    "array_api.linalg",
+    "ctypeslib",
+    "doc",
+    "doc.constants",
+    "doc.ufuncs",
+    "dtypes",
+    "exceptions",
+    "f2py",
+    "fft",
+    "lib",
+    "lib.format",  # was this meant to be public?
+    "lib.mixins",
+    "lib.recfunctions",
+    "lib.scimath",
+    "lib.stride_tricks",
+    "linalg",
+    "ma",
+    "ma.extras",
+    "ma.mrecords",
+    "matlib",
+    "polynomial",
+    "polynomial.chebyshev",
+    "polynomial.hermite",
+    "polynomial.hermite_e",
+    "polynomial.laguerre",
+    "polynomial.legendre",
+    "polynomial.polynomial",
+    "random",
+    "testing",
+    "testing.overrides",
+    "typing",
+    "typing.mypy_plugin",
+    "version"  # Should be removed for NumPy 2.0
+]]
+if sys.version_info < (3, 12):
+    PUBLIC_MODULES += [
+        'numpy.' + s for s in [
+            "distutils",
+            "distutils.cpuinfo",
+            "distutils.exec_command",
+            "distutils.misc_util",
+            "distutils.log",
+            "distutils.system_info",
+        ]
+    ]
+
+
+
+PUBLIC_ALIASED_MODULES = [
+    "numpy.char",
+    "numpy.emath",
+    "numpy.rec",
+]
+
+
+PRIVATE_BUT_PRESENT_MODULES = ['numpy.' + s for s in [
+    "compat",
+    "compat.py3k",
+    "conftest",
+    "core",
+    "core.arrayprint",
+    "core.defchararray",
+    "core.einsumfunc",
+    "core.fromnumeric",
+    "core.function_base",
+    "core.getlimits",
+    "core.memmap",
+    "core.multiarray",
+    "core.numeric",
+    "core.numerictypes",
+    "core.overrides",
+    "core.records",
+    "core.shape_base",
+    "core.umath",
+    "f2py.auxfuncs",
+    "f2py.capi_maps",
+    "f2py.cb_rules",
+    "f2py.cfuncs",
+    "f2py.common_rules",
+    "f2py.crackfortran",
+    "f2py.diagnose",
+    "f2py.f2py2e",
+    "f2py.f90mod_rules",
+    "f2py.func2subr",
+    "f2py.rules",
+    "f2py.symbolic",
+    "f2py.use_rules",
+    "fft.helper",
+    "lib.arraypad",
+    "lib.arraysetops",
+    "lib.arrayterator",
+    "lib.function_base",
+    "lib.histograms",
+    "lib.index_tricks",
+    "lib.nanfunctions",
+    "lib.npyio",
+    "lib.polynomial",
+    "lib.shape_base",
+    "lib.twodim_base",
+    "lib.type_check",
+    "lib.ufunclike",
+    "lib.user_array",  # note: not in np.lib, but probably should just be deleted
+    "lib.utils",
+    "linalg.lapack_lite",
+    "linalg.linalg",
+    "ma.core",
+    "ma.testutils",
+    "ma.timer_comparison",
+    "matrixlib",
+    "matrixlib.defmatrix",
+    "polynomial.polyutils",
+    "random.mtrand",
+    "random.bit_generator",
+    "testing.print_coercion_tables",
+]]
+if sys.version_info < (3, 12):
+    PRIVATE_BUT_PRESENT_MODULES += [
+        'numpy.' + s for s in [
+            "distutils.armccompiler",
+            "distutils.fujitsuccompiler",
+            "distutils.ccompiler",
+            'distutils.ccompiler_opt',
+            "distutils.command",
+            "distutils.command.autodist",
+            "distutils.command.bdist_rpm",
+            "distutils.command.build",
+            "distutils.command.build_clib",
+            "distutils.command.build_ext",
+            "distutils.command.build_py",
+            "distutils.command.build_scripts",
+            "distutils.command.build_src",
+            "distutils.command.config",
+            "distutils.command.config_compiler",
+            "distutils.command.develop",
+            "distutils.command.egg_info",
+            "distutils.command.install",
+            "distutils.command.install_clib",
+            "distutils.command.install_data",
+            "distutils.command.install_headers",
+            "distutils.command.sdist",
+            "distutils.conv_template",
+            "distutils.core",
+            "distutils.extension",
+            "distutils.fcompiler",
+            "distutils.fcompiler.absoft",
+            "distutils.fcompiler.arm",
+            "distutils.fcompiler.compaq",
+            "distutils.fcompiler.environment",
+            "distutils.fcompiler.g95",
+            "distutils.fcompiler.gnu",
+            "distutils.fcompiler.hpux",
+            "distutils.fcompiler.ibm",
+            "distutils.fcompiler.intel",
+            "distutils.fcompiler.lahey",
+            "distutils.fcompiler.mips",
+            "distutils.fcompiler.nag",
+            "distutils.fcompiler.none",
+            "distutils.fcompiler.pathf95",
+            "distutils.fcompiler.pg",
+            "distutils.fcompiler.nv",
+            "distutils.fcompiler.sun",
+            "distutils.fcompiler.vast",
+            "distutils.fcompiler.fujitsu",
+            "distutils.from_template",
+            "distutils.intelccompiler",
+            "distutils.lib2def",
+            "distutils.line_endings",
+            "distutils.mingw32ccompiler",
+            "distutils.msvccompiler",
+            "distutils.npy_pkg_config",
+            "distutils.numpy_distribution",
+            "distutils.pathccompiler",
+            "distutils.unixccompiler",
+        ]
+    ]
+
+
+def is_unexpected(name):
+    """Check if this needs to be considered."""
+    if '._' in name or '.tests' in name or '.setup' in name:
+        return False
+
+    if name in PUBLIC_MODULES:
+        return False
+
+    if name in PUBLIC_ALIASED_MODULES:
+        return False
+
+    if name in PRIVATE_BUT_PRESENT_MODULES:
+        return False
+
+    return True
+
+
+# These are present in a directory with an __init__.py but cannot be imported
+# code_generators/ isn't installed, but present for an inplace build
+SKIP_LIST = [
+    "numpy.core.code_generators",
+    "numpy.core.code_generators.genapi",
+    "numpy.core.code_generators.generate_umath",
+    "numpy.core.code_generators.ufunc_docstrings",
+    "numpy.core.code_generators.generate_numpy_api",
+    "numpy.core.code_generators.generate_ufunc_api",
+    "numpy.core.code_generators.numpy_api",
+    "numpy.core.code_generators.generate_umath_doc",
+    "numpy.core.code_generators.verify_c_api_version",
+    "numpy.core.cversions",
+    "numpy.core.generate_numpy_api",
+    "numpy.core.umath_tests",
+]
+if sys.version_info < (3, 12):
+    SKIP_LIST += ["numpy.distutils.msvc9compiler"]
+
+
+# suppressing warnings from deprecated modules
+@pytest.mark.filterwarnings("ignore:.*np.compat.*:DeprecationWarning")
+def test_all_modules_are_expected():
+    """
+    Test that we don't add anything that looks like a new public module by
+    accident.  Check is based on filenames.
+    """
+
+    modnames = []
+    for _, modname, ispkg in pkgutil.walk_packages(path=np.__path__,
+                                                   prefix=np.__name__ + '.',
+                                                   onerror=None):
+        if is_unexpected(modname) and modname not in SKIP_LIST:
+            # We have a name that is new.  If that's on purpose, add it to
+            # PUBLIC_MODULES.  We don't expect to have to add anything to
+            # PRIVATE_BUT_PRESENT_MODULES.  Use an underscore in the name!
+            modnames.append(modname)
+
+    if modnames:
+        raise AssertionError(f'Found unexpected modules: {modnames}')
+
+
+# Stuff that clearly shouldn't be in the API and is detected by the next test
+# below
+SKIP_LIST_2 = [
+    'numpy.math',
+    'numpy.doc.constants.re',
+    'numpy.doc.constants.textwrap',
+    'numpy.lib.emath',
+    'numpy.lib.math',
+    'numpy.matlib.char',
+    'numpy.matlib.rec',
+    'numpy.matlib.emath',
+    'numpy.matlib.exceptions',
+    'numpy.matlib.math',
+    'numpy.matlib.linalg',
+    'numpy.matlib.fft',
+    'numpy.matlib.random',
+    'numpy.matlib.ctypeslib',
+    'numpy.matlib.ma',
+]
+if sys.version_info < (3, 12):
+    SKIP_LIST_2 += [
+        'numpy.distutils.log.sys',
+        'numpy.distutils.log.logging',
+        'numpy.distutils.log.warnings',
+    ]
+
+
+def test_all_modules_are_expected_2():
+    """
+    Method checking all objects. The pkgutil-based method in
+    `test_all_modules_are_expected` does not catch imports into a namespace,
+    only filenames.  So this test is more thorough, and checks this like:
+
+        import .lib.scimath as emath
+
+    To check if something in a module is (effectively) public, one can check if
+    there's anything in that namespace that's a public function/object but is
+    not exposed in a higher-level namespace.  For example for a `numpy.lib`
+    submodule::
+
+        mod = np.lib.mixins
+        for obj in mod.__all__:
+            if obj in np.__all__:
+                continue
+            elif obj in np.lib.__all__:
+                continue
+
+            else:
+                print(obj)
+
+    """
+
+    def find_unexpected_members(mod_name):
+        members = []
+        module = importlib.import_module(mod_name)
+        if hasattr(module, '__all__'):
+            objnames = module.__all__
+        else:
+            objnames = dir(module)
+
+        for objname in objnames:
+            if not objname.startswith('_'):
+                fullobjname = mod_name + '.' + objname
+                if isinstance(getattr(module, objname), types.ModuleType):
+                    if is_unexpected(fullobjname):
+                        if fullobjname not in SKIP_LIST_2:
+                            members.append(fullobjname)
+
+        return members
+
+    unexpected_members = find_unexpected_members("numpy")
+    for modname in PUBLIC_MODULES:
+        unexpected_members.extend(find_unexpected_members(modname))
+
+    if unexpected_members:
+        raise AssertionError("Found unexpected object(s) that look like "
+                             "modules: {}".format(unexpected_members))
+
+
+def test_api_importable():
+    """
+    Check that all submodules listed higher up in this file can be imported
+
+    Note that if a PRIVATE_BUT_PRESENT_MODULES entry goes missing, it may
+    simply need to be removed from the list (deprecation may or may not be
+    needed - apply common sense).
+    """
+    def check_importable(module_name):
+        try:
+            importlib.import_module(module_name)
+        except (ImportError, AttributeError):
+            return False
+
+        return True
+
+    module_names = []
+    for module_name in PUBLIC_MODULES:
+        if not check_importable(module_name):
+            module_names.append(module_name)
+
+    if module_names:
+        raise AssertionError("Modules in the public API that cannot be "
+                             "imported: {}".format(module_names))
+
+    for module_name in PUBLIC_ALIASED_MODULES:
+        try:
+            eval(module_name)
+        except AttributeError:
+            module_names.append(module_name)
+
+    if module_names:
+        raise AssertionError("Modules in the public API that were not "
+                             "found: {}".format(module_names))
+
+    with warnings.catch_warnings(record=True) as w:
+        warnings.filterwarnings('always', category=DeprecationWarning)
+        warnings.filterwarnings('always', category=ImportWarning)
+        for module_name in PRIVATE_BUT_PRESENT_MODULES:
+            if not check_importable(module_name):
+                module_names.append(module_name)
+
+    if module_names:
+        raise AssertionError("Modules that are not really public but looked "
+                             "public and can not be imported: "
+                             "{}".format(module_names))
+
+
+@pytest.mark.xfail(
+    sysconfig.get_config_var("Py_DEBUG") not in (None, 0, "0"),
+    reason=(
+        "NumPy possibly built with `USE_DEBUG=True ./tools/travis-test.sh`, "
+        "which does not expose the `array_api` entry point. "
+        "See https://github.com/numpy/numpy/pull/19800"
+    ),
+)
+def test_array_api_entry_point():
+    """
+    Entry point for Array API implementation can be found with importlib and
+    returns the numpy.array_api namespace.
+    """
+    # For a development install that did not go through meson-python,
+    # the entrypoint will not have been installed. So ensure this test fails
+    # only if numpy is inside site-packages.
+    numpy_in_sitepackages = sysconfig.get_path('platlib') in np.__file__
+
+    eps = importlib.metadata.entry_points()
+    try:
+        xp_eps = eps.select(group="array_api")
+    except AttributeError:
+        # The select interface for entry_points was introduced in py3.10,
+        # deprecating its dict interface. We fallback to dict keys for finding
+        # Array API entry points so that running this test in <=3.9 will
+        # still work - see https://github.com/numpy/numpy/pull/19800.
+        xp_eps = eps.get("array_api", [])
+    if len(xp_eps) == 0:
+        if numpy_in_sitepackages:
+            msg = "No entry points for 'array_api' found"
+            raise AssertionError(msg) from None
+        return
+
+    try:
+        ep = next(ep for ep in xp_eps if ep.name == "numpy")
+    except StopIteration:
+        if numpy_in_sitepackages:
+            msg = "'numpy' not in array_api entry points"
+            raise AssertionError(msg) from None
+        return
+
+    xp = ep.load()
+    msg = (
+        f"numpy entry point value '{ep.value}' "
+        "does not point to our Array API implementation"
+    )
+    assert xp is numpy.array_api, msg
+
+
+@pytest.mark.parametrize("name", [
+        'ModuleDeprecationWarning', 'VisibleDeprecationWarning',
+        'ComplexWarning', 'TooHardError', 'AxisError'])
+def test_moved_exceptions(name):
+    # These were moved to the exceptions namespace, but currently still
+    # available
+    assert name in np.__all__
+    assert name not in np.__dir__()
+    # Fetching works, but __module__ is set correctly:
+    assert getattr(np, name).__module__ == "numpy.exceptions"
+    assert name in np.exceptions.__all__
+    getattr(np.exceptions, name)
diff --git a/.venv/lib/python3.12/site-packages/numpy/tests/test_reloading.py b/.venv/lib/python3.12/site-packages/numpy/tests/test_reloading.py
new file mode 100644
index 00000000..a1f36008
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/numpy/tests/test_reloading.py
@@ -0,0 +1,72 @@
+from numpy.testing import (
+    assert_raises,
+    assert_warns,
+    assert_,
+    assert_equal,
+    IS_WASM,
+)
+from numpy.compat import pickle
+
+import pytest
+import sys
+import subprocess
+import textwrap
+from importlib import reload
+
+
+def test_numpy_reloading():
+    # gh-7844. Also check that relevant globals retain their identity.
+    import numpy as np
+    import numpy._globals
+
+    _NoValue = np._NoValue
+    VisibleDeprecationWarning = np.VisibleDeprecationWarning
+    ModuleDeprecationWarning = np.ModuleDeprecationWarning
+
+    with assert_warns(UserWarning):
+        reload(np)
+    assert_(_NoValue is np._NoValue)
+    assert_(ModuleDeprecationWarning is np.ModuleDeprecationWarning)
+    assert_(VisibleDeprecationWarning is np.VisibleDeprecationWarning)
+
+    assert_raises(RuntimeError, reload, numpy._globals)
+    with assert_warns(UserWarning):
+        reload(np)
+    assert_(_NoValue is np._NoValue)
+    assert_(ModuleDeprecationWarning is np.ModuleDeprecationWarning)
+    assert_(VisibleDeprecationWarning is np.VisibleDeprecationWarning)
+
+def test_novalue():
+    import numpy as np
+    for proto in range(2, pickle.HIGHEST_PROTOCOL + 1):
+        assert_equal(repr(np._NoValue), '<no value>')
+        assert_(pickle.loads(pickle.dumps(np._NoValue,
+                                          protocol=proto)) is np._NoValue)
+
+
+@pytest.mark.skipif(IS_WASM, reason="can't start subprocess")
+def test_full_reimport():
+    """At the time of writing this, it is *not* truly supported, but
+    apparently enough users rely on it, for it to be an annoying change
+    when it started failing previously.
+    """
+    # Test within a new process, to ensure that we do not mess with the
+    # global state during the test run (could lead to cryptic test failures).
+    # This is generally unsafe, especially, since we also reload the C-modules.
+    code = textwrap.dedent(r"""
+        import sys
+        from pytest import warns
+        import numpy as np
+
+        for k in list(sys.modules.keys()):
+            if "numpy" in k:
+                del sys.modules[k]
+
+        with warns(UserWarning):
+            import numpy as np
+        """)
+    p = subprocess.run([sys.executable, '-c', code], capture_output=True)
+    if p.returncode:
+        raise AssertionError(
+            f"Non-zero return code: {p.returncode!r}\n\n{p.stderr.decode()}"
+        )
diff --git a/.venv/lib/python3.12/site-packages/numpy/tests/test_scripts.py b/.venv/lib/python3.12/site-packages/numpy/tests/test_scripts.py
new file mode 100644
index 00000000..892c04ee
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/numpy/tests/test_scripts.py
@@ -0,0 +1,47 @@
+""" Test scripts
+
+Test that we can run executable scripts that have been installed with numpy.
+"""
+import sys
+import os
+import pytest
+from os.path import join as pathjoin, isfile, dirname
+import subprocess
+
+import numpy as np
+from numpy.testing import assert_equal, IS_WASM
+
+is_inplace = isfile(pathjoin(dirname(np.__file__),  '..', 'setup.py'))
+
+
+def find_f2py_commands():
+    if sys.platform == 'win32':
+        exe_dir = dirname(sys.executable)
+        if exe_dir.endswith('Scripts'): # virtualenv
+            return [os.path.join(exe_dir, 'f2py')]
+        else:
+            return [os.path.join(exe_dir, "Scripts", 'f2py')]
+    else:
+        # Three scripts are installed in Unix-like systems:
+        # 'f2py', 'f2py{major}', and 'f2py{major.minor}'. For example,
+        # if installed with python3.9 the scripts would be named
+        # 'f2py', 'f2py3', and 'f2py3.9'.
+        version = sys.version_info
+        major = str(version.major)
+        minor = str(version.minor)
+        return ['f2py', 'f2py' + major, 'f2py' + major + '.' + minor]
+
+
+@pytest.mark.skipif(is_inplace, reason="Cannot test f2py command inplace")
+@pytest.mark.xfail(reason="Test is unreliable")
+@pytest.mark.parametrize('f2py_cmd', find_f2py_commands())
+def test_f2py(f2py_cmd):
+    # test that we can run f2py script
+    stdout = subprocess.check_output([f2py_cmd, '-v'])
+    assert_equal(stdout.strip(), np.__version__.encode('ascii'))
+
+
+@pytest.mark.skipif(IS_WASM, reason="Cannot start subprocess")
+def test_pep338():
+    stdout = subprocess.check_output([sys.executable, '-mnumpy.f2py', '-v'])
+    assert_equal(stdout.strip(), np.__version__.encode('ascii'))
diff --git a/.venv/lib/python3.12/site-packages/numpy/tests/test_warnings.py b/.venv/lib/python3.12/site-packages/numpy/tests/test_warnings.py
new file mode 100644
index 00000000..df90fcef
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/numpy/tests/test_warnings.py
@@ -0,0 +1,74 @@
+"""
+Tests which scan for certain occurrences in the code, they may not find
+all of these occurrences but should catch almost all.
+"""
+import pytest
+
+from pathlib import Path
+import ast
+import tokenize
+import numpy
+
+class ParseCall(ast.NodeVisitor):
+    def __init__(self):
+        self.ls = []
+
+    def visit_Attribute(self, node):
+        ast.NodeVisitor.generic_visit(self, node)
+        self.ls.append(node.attr)
+
+    def visit_Name(self, node):
+        self.ls.append(node.id)
+
+
+class FindFuncs(ast.NodeVisitor):
+    def __init__(self, filename):
+        super().__init__()
+        self.__filename = filename
+
+    def visit_Call(self, node):
+        p = ParseCall()
+        p.visit(node.func)
+        ast.NodeVisitor.generic_visit(self, node)
+
+        if p.ls[-1] == 'simplefilter' or p.ls[-1] == 'filterwarnings':
+            if node.args[0].value == "ignore":
+                raise AssertionError(
+                    "warnings should have an appropriate stacklevel; found in "
+                    "{} on line {}".format(self.__filename, node.lineno))
+
+        if p.ls[-1] == 'warn' and (
+                len(p.ls) == 1 or p.ls[-2] == 'warnings'):
+
+            if "testing/tests/test_warnings.py" == self.__filename:
+                # This file
+                return
+
+            # See if stacklevel exists:
+            if len(node.args) == 3:
+                return
+            args = {kw.arg for kw in node.keywords}
+            if "stacklevel" in args:
+                return
+            raise AssertionError(
+                "warnings should have an appropriate stacklevel; found in "
+                "{} on line {}".format(self.__filename, node.lineno))
+
+
+@pytest.mark.slow
+def test_warning_calls():
+    # combined "ignore" and stacklevel error
+    base = Path(numpy.__file__).parent
+
+    for path in base.rglob("*.py"):
+        if base / "testing" in path.parents:
+            continue
+        if path == base / "__init__.py":
+            continue
+        if path == base / "random" / "__init__.py":
+            continue
+        # use tokenize to auto-detect encoding on systems where no
+        # default encoding is defined (e.g. LANG='C')
+        with tokenize.open(str(path)) as file:
+            tree = ast.parse(file.read())
+            FindFuncs(path).visit(tree)