summaryrefslogtreecommitdiff
path: root/numpy/testing
diff options
context:
space:
mode:
Diffstat (limited to 'numpy/testing')
-rw-r--r--numpy/testing/__init__.py6
-rw-r--r--numpy/testing/_private/pytesttester.py41
-rw-r--r--numpy/testing/tests/test_decorators.py341
-rw-r--r--numpy/testing/tests/test_doctesting.py3
-rw-r--r--numpy/testing/tests/test_utils.py3
5 files changed, 220 insertions, 174 deletions
diff --git a/numpy/testing/__init__.py b/numpy/testing/__init__.py
index f4970b06b..a7c85931c 100644
--- a/numpy/testing/__init__.py
+++ b/numpy/testing/__init__.py
@@ -12,9 +12,11 @@ from unittest import TestCase
from ._private.utils import *
from ._private import decorators as dec
from ._private.nosetester import (
- run_module_suite, NoseTester as Tester, _numpy_tester,
+ run_module_suite, NoseTester as Tester
)
__all__ = _private.utils.__all__ + ['TestCase', 'run_module_suite']
-test = _numpy_tester().test
+from ._private.pytesttester import PytestTester
+test = PytestTester(__name__)
+del PytestTester
diff --git a/numpy/testing/_private/pytesttester.py b/numpy/testing/_private/pytesttester.py
index 6a92a52fd..8c73fafa4 100644
--- a/numpy/testing/_private/pytesttester.py
+++ b/numpy/testing/_private/pytesttester.py
@@ -32,6 +32,7 @@ import os
__all__ = ['PytestTester']
+
def _show_numpy_info():
import numpy as np
@@ -68,8 +69,8 @@ class PytestTester(object):
def __init__(self, module_name):
self.module_name = module_name
- def test(self, label='fast', verbose=1, extra_argv=None,
- doctests=False, coverage=False, timer=0, tests=None):
+ def __call__(self, label='fast', verbose=1, extra_argv=None,
+ doctests=False, coverage=False, durations=-1, tests=None):
"""
Run tests for module using pytest.
@@ -88,9 +89,9 @@ class PytestTester(object):
coverage : bool, optional
If True, report coverage of NumPy code. Default is False.
Requires installation of (pip) pytest-cov.
- timer : int, optional
- If > 0, report the time of the slowest `timer` tests. Default is 0.
-
+ durations : int, optional
+ If < 0, do nothing, If 0, report time of all tests, if > 0,
+ report the time of the slowest `timer` tests. Default is -1.
tests : test or list of tests
Tests to be executed with pytest '--pyargs'
@@ -122,6 +123,7 @@ class PytestTester(object):
"""
import pytest
+ import warnings
#FIXME This is no longer needed? Assume it was for use in tests.
# cap verbosity at 3, which is equivalent to the pytest '-vv' option
@@ -134,7 +136,26 @@ class PytestTester(object):
module_path = os.path.abspath(module.__path__[0])
# setup the pytest arguments
- pytest_args = ['-l']
+ pytest_args = ["-l"]
+
+ # offset verbosity. The "-q" cancels a "-v".
+ pytest_args += ["-q"]
+
+ # Filter out distutils cpu warnings (could be localized to
+ # distutils tests). ASV has problems with top level import,
+ # so fetch module for suppression here.
+ with warnings.catch_warnings():
+ warnings.simplefilter("always")
+ from numpy.distutils import cpuinfo
+
+ # Filter out annoying import messages. Want these in both develop and
+ # release mode.
+ pytest_args += [
+ "-W ignore:Not importing directory",
+ "-W ignore:numpy.dtype size changed",
+ "-W ignore:numpy.ufunc size changed",
+ "-W ignore::UserWarning:cpuinfo",
+ ]
if doctests:
raise ValueError("Doctests not supported")
@@ -144,8 +165,6 @@ class PytestTester(object):
if verbose > 1:
pytest_args += ["-" + "v"*(verbose - 1)]
- else:
- pytest_args += ["-q"]
if coverage:
pytest_args += ["--cov=" + module_path]
@@ -155,13 +174,13 @@ class PytestTester(object):
elif label != "full":
pytest_args += ["-m", label]
- if timer > 0:
- pytest_args += ["--durations=%s" % timer]
+ if durations >= 0:
+ pytest_args += ["--durations=%s" % durations]
if tests is None:
tests = [self.module_name]
- pytest_args += ['--pyargs'] + list(tests)
+ pytest_args += ["--pyargs"] + list(tests)
# run tests.
diff --git a/numpy/testing/tests/test_decorators.py b/numpy/testing/tests/test_decorators.py
index 62329ab7d..26be1e359 100644
--- a/numpy/testing/tests/test_decorators.py
+++ b/numpy/testing/tests/test_decorators.py
@@ -5,195 +5,216 @@ Test the decorators from ``testing.decorators``.
from __future__ import division, absolute_import, print_function
import warnings
+import pytest
-from numpy.testing import (dec, assert_, assert_raises, run_module_suite,
- SkipTest, KnownFailureException)
+from numpy.testing import (
+ assert_, assert_raises, run_module_suite, dec, SkipTest,
+ KnownFailureException,
+ )
-def test_slow():
- @dec.slow
- def slow_func(x, y, z):
- pass
-
- assert_(slow_func.slow)
+try:
+ import nose
+except ImportError:
+ HAVE_NOSE = False
+else:
+ HAVE_NOSE = True
-def test_setastest():
- @dec.setastest()
- def f_default(a):
- pass
-
- @dec.setastest(True)
- def f_istest(a):
- pass
+@pytest.mark.skipif(not HAVE_NOSE, reason="Needs nose")
+class TestNoseDecorators(object):
+ # These tests are run in a class for simplicity while still
+ # getting a report on each, skipped or success.
- @dec.setastest(False)
- def f_isnottest(a):
+ class DidntSkipException(Exception):
pass
- assert_(f_default.__test__)
- assert_(f_istest.__test__)
- assert_(not f_isnottest.__test__)
-
-
-class DidntSkipException(Exception):
- pass
-
-def test_skip_functions_hardcoded():
- @dec.skipif(True)
- def f1(x):
- raise DidntSkipException
-
- try:
- f1('a')
- except DidntSkipException:
- raise Exception('Failed to skip')
- except SkipTest().__class__:
- pass
+ def test_slow(self):
+ import nose
+ @dec.slow
+ def slow_func(x, y, z):
+ pass
- @dec.skipif(False)
- def f2(x):
- raise DidntSkipException
-
- try:
- f2('a')
- except DidntSkipException:
- pass
- except SkipTest().__class__:
- raise Exception('Skipped when not expected to')
+ assert_(slow_func.slow)
+ def test_setastest(self):
+ @dec.setastest()
+ def f_default(a):
+ pass
-def test_skip_functions_callable():
- def skip_tester():
- return skip_flag == 'skip me!'
+ @dec.setastest(True)
+ def f_istest(a):
+ pass
- @dec.skipif(skip_tester)
- def f1(x):
- raise DidntSkipException
+ @dec.setastest(False)
+ def f_isnottest(a):
+ pass
- try:
- skip_flag = 'skip me!'
- f1('a')
- except DidntSkipException:
- raise Exception('Failed to skip')
- except SkipTest().__class__:
- pass
+ assert_(f_default.__test__)
+ assert_(f_istest.__test__)
+ assert_(not f_isnottest.__test__)
- @dec.skipif(skip_tester)
- def f2(x):
- raise DidntSkipException
- try:
- skip_flag = 'five is right out!'
- f2('a')
- except DidntSkipException:
- pass
- except SkipTest().__class__:
- raise Exception('Skipped when not expected to')
+ def test_skip_functions_hardcoded(self):
+ @dec.skipif(True)
+ def f1(x):
+ raise self.DidntSkipException
+ try:
+ f1('a')
+ except self.DidntSkipException:
+ raise Exception('Failed to skip')
+ except SkipTest().__class__:
+ pass
-def test_skip_generators_hardcoded():
- @dec.knownfailureif(True, "This test is known to fail")
- def g1(x):
- for i in range(x):
- yield i
+ @dec.skipif(False)
+ def f2(x):
+ raise self.DidntSkipException
- try:
- for j in g1(10):
+ try:
+ f2('a')
+ except self.DidntSkipException:
+ pass
+ except SkipTest().__class__:
+ raise Exception('Skipped when not expected to')
+
+ def test_skip_functions_callable(self):
+ def skip_tester():
+ return skip_flag == 'skip me!'
+
+ @dec.skipif(skip_tester)
+ def f1(x):
+ raise self.DidntSkipException
+
+ try:
+ skip_flag = 'skip me!'
+ f1('a')
+ except self.DidntSkipException:
+ raise Exception('Failed to skip')
+ except SkipTest().__class__:
pass
- except KnownFailureException().__class__:
- pass
- else:
- raise Exception('Failed to mark as known failure')
- @dec.knownfailureif(False, "This test is NOT known to fail")
- def g2(x):
- for i in range(x):
- yield i
- raise DidntSkipException('FAIL')
+ @dec.skipif(skip_tester)
+ def f2(x):
+ raise self.DidntSkipException
- try:
- for j in g2(10):
+ try:
+ skip_flag = 'five is right out!'
+ f2('a')
+ except self.DidntSkipException:
+ pass
+ except SkipTest().__class__:
+ raise Exception('Skipped when not expected to')
+
+ def test_skip_generators_hardcoded(self):
+ @dec.knownfailureif(True, "This test is known to fail")
+ def g1(x):
+ for i in range(x):
+ yield i
+
+ try:
+ for j in g1(10):
+ pass
+ except KnownFailureException().__class__:
+ pass
+ else:
+ raise Exception('Failed to mark as known failure')
+
+ @dec.knownfailureif(False, "This test is NOT known to fail")
+ def g2(x):
+ for i in range(x):
+ yield i
+ raise self.DidntSkipException('FAIL')
+
+ try:
+ for j in g2(10):
+ pass
+ except KnownFailureException().__class__:
+ raise Exception('Marked incorrectly as known failure')
+ except self.DidntSkipException:
pass
- except KnownFailureException().__class__:
- raise Exception('Marked incorrectly as known failure')
- except DidntSkipException:
- pass
-
-def test_skip_generators_callable():
- def skip_tester():
- return skip_flag == 'skip me!'
+ def test_skip_generators_callable(self):
+ def skip_tester():
+ return skip_flag == 'skip me!'
- @dec.knownfailureif(skip_tester, "This test is known to fail")
- def g1(x):
- for i in range(x):
- yield i
+ @dec.knownfailureif(skip_tester, "This test is known to fail")
+ def g1(x):
+ for i in range(x):
+ yield i
- try:
- skip_flag = 'skip me!'
- for j in g1(10):
+ try:
+ skip_flag = 'skip me!'
+ for j in g1(10):
+ pass
+ except KnownFailureException().__class__:
pass
- except KnownFailureException().__class__:
- pass
- else:
- raise Exception('Failed to mark as known failure')
-
- @dec.knownfailureif(skip_tester, "This test is NOT known to fail")
- def g2(x):
- for i in range(x):
- yield i
- raise DidntSkipException('FAIL')
-
- try:
- skip_flag = 'do not skip'
- for j in g2(10):
+ else:
+ raise Exception('Failed to mark as known failure')
+
+ @dec.knownfailureif(skip_tester, "This test is NOT known to fail")
+ def g2(x):
+ for i in range(x):
+ yield i
+ raise self.DidntSkipException('FAIL')
+
+ try:
+ skip_flag = 'do not skip'
+ for j in g2(10):
+ pass
+ except KnownFailureException().__class__:
+ raise Exception('Marked incorrectly as known failure')
+ except self.DidntSkipException:
pass
- except KnownFailureException().__class__:
- raise Exception('Marked incorrectly as known failure')
- except DidntSkipException:
- pass
+ def test_deprecated(self):
+ @dec.deprecated(True)
+ def non_deprecated_func():
+ pass
-def test_deprecated():
- @dec.deprecated(True)
- def non_deprecated_func():
- pass
-
- @dec.deprecated()
- def deprecated_func():
- import warnings
- warnings.warn("TEST: deprecated func", DeprecationWarning)
-
- @dec.deprecated()
- def deprecated_func2():
- import warnings
- warnings.warn("AHHHH")
- raise ValueError
-
- @dec.deprecated()
- def deprecated_func3():
- import warnings
- warnings.warn("AHHHH")
-
- # marked as deprecated, but does not raise DeprecationWarning
- assert_raises(AssertionError, non_deprecated_func)
- # should be silent
- deprecated_func()
- with warnings.catch_warnings(record=True):
- warnings.simplefilter("always") # do not propagate unrelated warnings
- # fails if deprecated decorator just disables test. See #1453.
- assert_raises(ValueError, deprecated_func2)
- # warning is not a DeprecationWarning
- assert_raises(AssertionError, deprecated_func3)
-
-
-@dec.parametrize('base, power, expected',
- [(1, 1, 1),
- (2, 1, 2),
- (2, 2, 4)])
-def test_parametrize(base, power, expected):
- assert_(base**power == expected)
+ @dec.deprecated()
+ def deprecated_func():
+ import warnings
+ warnings.warn("TEST: deprecated func", DeprecationWarning)
+
+ @dec.deprecated()
+ def deprecated_func2():
+ import warnings
+ warnings.warn("AHHHH")
+ raise ValueError
+
+ @dec.deprecated()
+ def deprecated_func3():
+ import warnings
+ warnings.warn("AHHHH")
+
+ # marked as deprecated, but does not raise DeprecationWarning
+ assert_raises(AssertionError, non_deprecated_func)
+ # should be silent
+ deprecated_func()
+ with warnings.catch_warnings(record=True):
+ warnings.simplefilter("always") # do not propagate unrelated warnings
+ # fails if deprecated decorator just disables test. See #1453.
+ assert_raises(ValueError, deprecated_func2)
+ # warning is not a DeprecationWarning
+ assert_raises(AssertionError, deprecated_func3)
+
+ def test_parametrize(self):
+ # dec.parametrize assumes that it is being run by nose. Because
+ # we are running under pytest, we need to explicitly check the
+ # results.
+ @dec.parametrize('base, power, expected',
+ [(1, 1, 1),
+ (2, 1, 2),
+ (2, 2, 4)])
+ def check_parametrize(base, power, expected):
+ assert_(base**power == expected)
+
+ count = 0
+ for test in check_parametrize():
+ test[0](*test[1:])
+ count += 1
+ assert_(count == 3)
if __name__ == '__main__':
diff --git a/numpy/testing/tests/test_doctesting.py b/numpy/testing/tests/test_doctesting.py
index 43f9fb6ce..b77cd93e0 100644
--- a/numpy/testing/tests/test_doctesting.py
+++ b/numpy/testing/tests/test_doctesting.py
@@ -3,6 +3,9 @@
"""
from __future__ import division, absolute_import, print_function
+#FIXME: None of these tests is run, because 'check' is not a recognized
+# testing prefix.
+
# try the #random directive on the output line
def check_random_directive():
'''
diff --git a/numpy/testing/tests/test_utils.py b/numpy/testing/tests/test_utils.py
index d5c582ad3..4ba484ba0 100644
--- a/numpy/testing/tests/test_utils.py
+++ b/numpy/testing/tests/test_utils.py
@@ -5,6 +5,7 @@ import sys
import os
import itertools
import textwrap
+import pytest
import numpy as np
from numpy.testing import (
@@ -659,7 +660,7 @@ class TestArrayAssertLess(object):
assert_raises(AssertionError, lambda: self._assert_func(-ainf, -x))
self._assert_func(-ainf, x)
-
+@pytest.mark.skip(reason="The raises decorator depends on Nose")
class TestRaises(object):
def setup(self):