summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorStefan Behnel <stefan_ml@behnel.de>2020-03-22 17:54:49 +0100
committerStefan Behnel <stefan_ml@behnel.de>2020-03-22 17:54:49 +0100
commitff2dfa0b5a5f2cb1a2e6636f5c54f69bd16ed492 (patch)
treed0afa8c951b7d329641143a1c30d3862ec9ee38f
parent11f4d6aef0f9cf8ee60ef839e01914d3b25dfdac (diff)
parent4dd7c18de76cb5c90a5168fba35990600307e642 (diff)
downloadcython-ff2dfa0b5a5f2cb1a2e6636f5c54f69bd16ed492.tar.gz
Merge branch 'master' into gh2564_enable_binding
-rw-r--r--.travis.yml35
-rw-r--r--CHANGES.rst112
-rw-r--r--Cython/Build/Dependencies.py3
-rw-r--r--Cython/Build/Inline.py12
-rw-r--r--Cython/Compiler/Code.py85
-rw-r--r--Cython/Compiler/ExprNodes.py105
-rw-r--r--Cython/Compiler/Main.py9
-rw-r--r--Cython/Compiler/ModuleNode.py81
-rw-r--r--Cython/Compiler/Naming.py2
-rw-r--r--Cython/Compiler/Nodes.py81
-rw-r--r--Cython/Compiler/Pythran.py17
-rw-r--r--Cython/Compiler/Visitor.py4
-rw-r--r--Cython/Includes/cpython/descr.pxd2
-rw-r--r--Cython/Includes/cpython/genobject.pxd25
-rw-r--r--Cython/Includes/cpython/number.pxd14
-rw-r--r--Cython/Includes/cpython/object.pxd18
-rw-r--r--Cython/Includes/cpython/pylifecycle.pxd4
-rw-r--r--Cython/Includes/cpython/slice.pxd23
-rw-r--r--Cython/Includes/libc/math.pxd6
-rw-r--r--Cython/Includes/libcpp/numeric.pxd23
-rw-r--r--Cython/Includes/libcpp/utility.pxd14
-rw-r--r--Cython/Includes/numpy/__init__.pxd21
-rw-r--r--Cython/Shadow.py22
-rw-r--r--Cython/TestUtils.py71
-rw-r--r--Cython/Utility/Builtins.c44
-rw-r--r--Cython/Utility/CommonStructures.c128
-rw-r--r--Cython/Utility/CythonFunction.c103
-rw-r--r--Cython/Utility/ImportExport.c18
-rw-r--r--Cython/Utility/ModuleSetupCode.c2
-rw-r--r--Cython/Utility/ObjectHandling.c5
-rw-r--r--Cython/Utility/Optimize.c4
-rw-r--r--Cython/Utility/Overflow.c46
-rw-r--r--Cython/Utility/Profile.c2
-rw-r--r--Makefile7
-rw-r--r--README.rst5
-rw-r--r--appveyor.yml8
-rw-r--r--docs/examples/tutorial/numpy/convolve2.pyx2
-rw-r--r--docs/src/tutorial/strings.rst17
-rw-r--r--docs/src/userguide/extension_types.rst7
-rw-r--r--docs/src/userguide/memoryviews.rst3
-rw-r--r--docs/src/userguide/source_files_and_compilation.rst2
-rwxr-xr-xruntests.py55
-rw-r--r--setup.cfg5
-rwxr-xr-xsetup.py1
-rw-r--r--tests/build/dotted.filename.modules.pxd0
-rw-r--r--tests/build/dotted.filename.modules.pyx7
-rw-r--r--tests/limited_api_bugs.txt17
-rw-r--r--tests/pypy2_bugs.txt25
-rw-r--r--tests/pypy_bugs.txt76
-rw-r--r--tests/pypy_crash_bugs.txt13
-rw-r--r--tests/pypy_implementation_detail_bugs.txt45
-rw-r--r--tests/run/always_allow_keywords_T295.pyx31
-rw-r--r--tests/run/async_def.pyx35
-rw-r--r--tests/run/callargs.pyx8
-rw-r--r--tests/run/cdivision_CEP_516.pyx3
-rw-r--r--tests/run/cpp_move.pyx34
-rw-r--r--tests/run/cpp_operator_exc_handling.pyx14
-rw-r--r--tests/run/cpp_operator_exc_handling_helper.hpp13
-rw-r--r--tests/run/cpp_stl_conversion.pyx4
-rw-r--r--tests/run/cpp_stl_numeric_ops.pyx147
-rw-r--r--tests/run/cython_includes.pyx1
-rw-r--r--tests/run/fused_def.pyx41
-rw-r--r--tests/run/if_else_expr.pyx12
-rw-r--r--tests/run/importfrom.pyx4
-rw-r--r--tests/run/kwargproblems.pyx2
-rw-r--r--tests/run/list_pop.pyx2
-rw-r--r--tests/run/modop.pyx6
-rw-r--r--tests/run/numpy_bufacc_T155.pyx3
-rw-r--r--tests/run/numpy_cimport.pyx1
-rw-r--r--tests/run/numpy_common.pxi10
-rw-r--r--tests/run/numpy_parallel.pyx3
-rw-r--r--tests/run/numpy_subarray.pyx2
-rw-r--r--tests/run/numpy_test.pyx5
-rw-r--r--tests/run/pep448_test_extcall.pyx6
-rw-r--r--tests/run/powop.pyx8
-rw-r--r--tests/run/pure.pyx6
-rw-r--r--tests/run/pure_fused.pxd6
-rw-r--r--tests/run/pure_fused.py60
-rw-r--r--tests/run/py_hash_t.pyx2
-rw-r--r--tests/run/pyclass_dynamic_bases.pyx19
-rw-r--r--tests/run/sequential_parallel.pyx56
-rw-r--r--tests/run/test_coroutines_pep492.pyx5
-rw-r--r--tests/run/test_unicode_string_tests.pxi8
-rw-r--r--tests/run/trashcan.pyx8
-rw-r--r--tests/windows_bugs.txt2
85 files changed, 1543 insertions, 460 deletions
diff --git a/.travis.yml b/.travis.yml
index 000af41cd..cdcd2176c 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -19,6 +19,7 @@ cache:
python:
- 3.8
- 2.7
+ - 3.9-dev
- 3.7
- 3.6
- 3.5
@@ -38,31 +39,30 @@ env:
matrix:
include:
-# Disabled: coverage analysis takes excessively long, several times longer than without.
-# - python: 3.7
-# env: COVERAGE=1
- - python: 3.7
- env: TEST_CODE_STYLE=1
- - python: 3.8
- env: LIMITED_API=--limited-api EXCLUDE=--no-file
- - python: 3.7
- env: LIMITED_API=--limited-api EXCLUDE=--no-file
- - python: 3.6
- env: LIMITED_API=--limited-api EXCLUDE=--no-file
+ # slowest first
- os: osx
- osx_image: xcode6.4
env: PY=2
python: 2
language: c
compiler: clang
cache: false
- os: osx
- osx_image: xcode6.4
env: PY=3
python: 3
language: c
compiler: clang
cache: false
+# Disabled: coverage analysis takes excessively long, several times longer than without.
+# - python: 3.7
+# env: COVERAGE=1
+ - python: 3.7
+ env: TEST_CODE_STYLE=1
+ - python: 3.8
+ env: LIMITED_API=--limited-api EXCLUDE=--no-file
+ - python: 3.7
+ env: LIMITED_API=--limited-api EXCLUDE=--no-file
+ - python: 3.6
+ env: LIMITED_API=--limited-api EXCLUDE=--no-file
- env: STACKLESS=true BACKEND=c PY=2
python: 2.7
- env: STACKLESS=true BACKEND=c PY=3
@@ -71,9 +71,16 @@ matrix:
env: BACKEND=c
- python: pypy3
env: BACKEND=c
- allow_failures:
+# a secondary pypy tests which is allowed to fail and which specifically
+# tests known bugs
- python: pypy
+ env: BACKEND=c EXCLUDE="--listfile=tests/pypy_bugs.txt --listfile=tests/pypy2_bugs.txt bugs"
- python: pypy3
+ env: BACKEND=c EXCLUDE="--listfile=tests/pypy_bugs.txt bugs"
+ allow_failures:
+ - python: 3.9-dev
+ - env: BACKEND=c EXCLUDE="--listfile=tests/pypy_bugs.txt bugs"
+ - env: BACKEND=c EXCLUDE="--listfile=tests/pypy_bugs.txt --listfile=tests/pypy2_bugs.txt bugs"
branches:
only:
diff --git a/CHANGES.rst b/CHANGES.rst
index ceecbecd5..c4487e158 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -12,7 +12,16 @@ Features added
Patch by Jeroen Demeyer. (Github issue #2263)
* Unicode identifiers are supported in Cython code (PEP 3131).
- Patch by da-woods. (Github issue #2601)
+ Patch by David Woods. (Github issue #2601)
+
+* Unicode module names and imports are supported.
+ Patch by David Woods. (Github issue #3119)
+
+* Annotations are no longer parsed, keeping them as strings following PEP-563.
+ Patch by David Woods. (Github issue #3285)
+
+* The ``LIMITED_API`` is supported by setting the ``CYTHON_LIMITED_API`` C macro.
+ Patches by Eddie Elizondo. (Github issue #3223, #3311)
* ``with gil/nogil`` statements can be conditional based on compile-time
constants, e.g. fused type checks.
@@ -34,7 +43,7 @@ Features added
CPython trashcan. This allows deallocating deeply recursive objects without
overflowing the stack. Patch by Jeroen Demeyer. (Github issue #2842)
-* Properties can be defined for external extension types.
+* Inlined properties can be defined for external extension types.
Patch by Matti Picus. (Github issue #2640)
* Multiplication of Python numbers with small constant integers is faster.
@@ -49,8 +58,13 @@ Features added
generators and coroutines now return an actual frame object for introspection.
(Github issue #2306)
-* Several missing declarations in ``cpython.*`` were added.
- Patches by Jeroen Demeyer and Zackery Spytz. (Github issues #2826, #2713)
+* Several declarations in ``cpython.*``, ``libc.*`` and ``libcpp.*`` were added.
+ Patches by Jeroen Demeyer, Matthew Edwards, Chris Gyurgyik, Jerome Kieffer,
+ Omer Ozarslan and Zackery Spytz.
+ (Github issues #3358, #3332, #3179, #2891, #2826, #2713)
+
+* Deprecated NumPy API usages were removed from ``numpy.pxd``.
+ Patch by Matti Picus. (Github issue #3365)
* The builtin ``abs()`` function can now be used on C numbers in nogil code.
Patch by Elliott Sales de Andrade. (Github issue #2748)
@@ -68,6 +82,9 @@ Features added
* ``--no-docstrings`` option added to ``cythonize`` script.
Original patch by mo-han. (Github issue #2889)
+* The Pythran ``shape`` attribute is supported.
+ Patch by Serge Guelton. (Github issue #3307)
+
* The ``@cython.binding`` decorator is available in Python code.
Bugs fixed
@@ -78,6 +95,10 @@ Bugs fixed
the first character if multiple characters should have been returned.
They now use the original Python methods again.
+* Fused argument types were not correctly handled in type annotations and
+ ``cython.locals()``.
+ Patch by David Woods. (Github issue #3391)
+
* Diverging from the usual behaviour, ``len(memoryview)``, ``len(char*)``
and ``len(Py_UNICODE*)`` returned an unsigned ``size_t`` value. They now
return a signed ``Py_ssize_t``, like other usages of ``len()``.
@@ -163,15 +184,90 @@ Other changes
* The command line parser was rewritten and modernised using ``argparse``.
Patch by Egor Dranischnikow. (Github issue #2952, #3001)
+* Dotted filenames for qualified module names (``pkg.mod.pyx``) are deprecated.
+ Use the normal Python package directory layout instead.
+ (Github issue #2686)
+
* Support for Python 2.6 was removed.
-0.29.15 (20??-??-??)
+0.29.16 (2020-0?-??)
+====================
+
+Bugs fixed
+----------
+
+* Temporary internal variables in nested prange loops could leak into other
+ threads. Patch by Frank Schlimbach. (Github issue #3348)
+
+* Default arguments on fused functions could crash.
+ Patch by David Woods. (Github issue #3370)
+
+* C-tuples declared in ``.pxd`` files could generate incomplete C code.
+ Patch by Kirk Meyer. (Github issue #1427)
+
+* Fused functions were not always detected and optimised as Cython
+ implemented functions.
+ Patch by David Woods. (Github issue #3384)
+
+* Valid Python object concatenation of (iterable) strings to non-strings
+ could fail with an exception.
+ Patch by David Woods. (Github issue #3433)
+
+* Using C functions as temporary values lead to invalid C code.
+ Original patch by David Woods. (Github issue #3418)
+
+* Fix an unhandled C++ exception in comparisons.
+ Patch by David Woods. (Github issue #3361)
+
+* Fix deprecated import of "imp" module.
+ Patch by Matti Picus. (Github issue #3350)
+
+* Fix compatibility with Pythran 0.9.6 and later.
+ Patch by Serge Guelton. (Github issue #3308)
+
+* The ``_Py_PyAtExit()`` function in ``cpython.pylifecycle`` was misdeclared.
+ Patch by Zackery Spytz. (Github issue #3382)
+
+* Several missing declarations in ``cpython.*`` were added.
+ Patches by Zackery Spytz. (Github issue #3421, #3411, #3402)
+
+* A declaration for ``libc.math.fpclassify()`` was added.
+ Patch by Zackery Spytz. (Github issue #2514)
+
+* Avoid "undeclared" warning about automatically generated pickle methods.
+ Patch by David Woods. (Github issue #3353)
+
+* Avoid C compiler warning about unreachable code in ``prange()``.
+
+* Some C compiler warnings in PyPy were resolved.
+ Patch by Matti Picus. (Github issue #3437)
+
+
+0.29.15 (2020-02-06)
====================
+Bugs fixed
+----------
+
+* Crash when returning a temporary Python object from an async-def function.
+ (Github issue #3337)
+
+* Crash when using ``**kwargs`` in generators.
+ Patch by David Woods. (Github issue #3265)
+
* Double reference free in ``__class__`` cell handling for ``super()`` calls.
(Github issue #3246)
+* Compile error when using ``*args`` as Python class bases.
+ (Github issue #3338)
+
+* Import failure in IPython 7.11.
+ (Github issue #3297)
+
+* Fixed C name collision in the auto-pickle code.
+ Patch by ThePrez. (Github issue #3238)
+
* Deprecated import failed in Python 3.9.
(Github issue #3266)
@@ -183,7 +279,7 @@ Bugs fixed
----------
* The generated code failed to initialise the ``tp_print`` slot in CPython 3.8.
- Patches by Pablo Galindo and Orivej Desh (Github issues #3171, #3201).
+ Patches by Pablo Galindo and Orivej Desh. (Github issues #3171, #3201)
* ``?`` for ``bool`` was missing from the supported NumPy dtypes.
Patch by Max Klein. (Github issue #2675)
@@ -1260,7 +1356,7 @@ Features added
Patch by Syrtis Major (Github issue #1625).
* ``abs()`` is optimised for C complex numbers.
- Patch by da-woods (Github issue #1648).
+ Patch by David Woods (Github issue #1648).
* The display of C lines in Cython tracebacks can now be enabled at runtime
via ``import cython_runtime; cython_runtime.cline_in_traceback=True``.
@@ -1389,7 +1485,7 @@ Features added
Patch by Claudio Freire.
* Buffer variables are no longer excluded from ``locals()``.
- Patch by da-woods.
+ Patch by David Woods.
* Building f-strings is faster, especially when formatting C integers.
diff --git a/Cython/Build/Dependencies.py b/Cython/Build/Dependencies.py
index 593e00a6e..9e21aa0f2 100644
--- a/Cython/Build/Dependencies.py
+++ b/Cython/Build/Dependencies.py
@@ -913,7 +913,8 @@ def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False,
variable called ``foo`` as a string, and then call
``cythonize(..., aliases={'MY_HEADERS': foo})``.
- :param quiet: If True, Cython won't print error and warning messages during the compilation.
+ :param quiet: If True, Cython won't print error, warning, or status messages during the
+ compilation.
:param force: Forces the recompilation of the Cython modules, even if the timestamps
don't indicate that a recompilation is necessary.
diff --git a/Cython/Build/Inline.py b/Cython/Build/Inline.py
index 73175fa04..cbcc5822d 100644
--- a/Cython/Build/Inline.py
+++ b/Cython/Build/Inline.py
@@ -32,6 +32,14 @@ if sys.version_info[0] < 3:
else:
to_unicode = lambda x: x
+if sys.version_info[:2] < (3, 3):
+ import imp
+ def load_dynamic(name, module_path):
+ return imp.load_dynamic(name, module_path)
+else:
+ from importlib.machinery import ExtensionFileLoader
+ def load_dynamic(name, module_path):
+ return ExtensionFileLoader(name, module_path).load_module()
class UnboundSymbols(EnvTransform, SkipDeclarations):
def __init__(self):
@@ -170,6 +178,8 @@ def cython_inline(code, get_type=unsafe_type,
print("Could not parse code as a string (to extract unbound symbols).")
cython_compiler_directives = dict(cython_compiler_directives or {})
+ if language_level is None and 'language_level' not in cython_compiler_directives:
+ language_level = '3str'
if language_level is not None:
cython_compiler_directives['language_level'] = language_level
@@ -246,7 +256,7 @@ def __invoke(%(params)s):
build_extension.build_lib = lib_dir
build_extension.run()
- module = imp.load_dynamic(module_name, module_path)
+ module = load_dynamic(module_name, module_path)
_cython_inline_cache[orig_code, arg_sigs] = module.__invoke
arg_list = [kwds[arg] for arg in arg_names]
diff --git a/Cython/Compiler/Code.py b/Cython/Compiler/Code.py
index 0cea82060..06e2662fb 100644
--- a/Cython/Compiler/Code.py
+++ b/Cython/Compiler/Code.py
@@ -237,6 +237,15 @@ class UtilityCodeBase(object):
[definitions]
+ ##### MyUtility #####
+ #@subsitute: tempita
+
+ [requires tempita substitution
+ - context can't be specified here though so only
+ tempita utility that requires no external context
+ will benefit from this tag
+ - only necessary when @required from non-tempita code]
+
for prototypes and implementation respectively. For non-python or
-cython files backslashes should be used instead. 5 to 30 comment
characters may be used on either side.
@@ -255,8 +264,7 @@ class UtilityCodeBase(object):
return
code = '\n'.join(lines)
- if tags and 'substitute' in tags and tags['substitute'] == set(['naming']):
- del tags['substitute']
+ if tags and 'substitute' in tags and 'naming' in tags['substitute']:
try:
code = Template(code).substitute(vars(Naming))
except (KeyError, ValueError) as e:
@@ -347,6 +355,7 @@ class UtilityCodeBase(object):
Load utility code from a file specified by from_file (relative to
Cython/Utility) and name util_code_name.
"""
+
if '::' in util_code_name:
from_file, util_code_name = util_code_name.rsplit('::', 1)
assert from_file
@@ -354,6 +363,9 @@ class UtilityCodeBase(object):
proto, impl, tags = utilities[util_code_name]
if tags:
+ if "substitute" in tags and "tempita" in tags["substitute"]:
+ if not issubclass(cls, TempitaUtilityCode):
+ return TempitaUtilityCode.load(util_code_name, from_file, **kwargs)
orig_kwargs = kwargs.copy()
for name, values in tags.items():
if name in kwargs:
@@ -367,6 +379,12 @@ class UtilityCodeBase(object):
# dependencies are rarely unique, so use load_cached() when we can
values = [cls.load_cached(dep, from_file)
for dep in sorted(values)]
+ elif name == 'substitute':
+ # don't want to pass "naming" or "tempita" to the constructor
+ # since these will have been handled
+ values = values - set(['naming', 'tempita'])
+ if not values:
+ continue
elif not values:
values = None
elif len(values) == 1:
@@ -814,6 +832,9 @@ class FunctionState(object):
type = type.cv_base_type
elif type.is_reference and not type.is_fake_reference:
type = type.ref_base_type
+ elif type.is_cfunction:
+ from . import PyrexTypes
+ type = PyrexTypes.c_ptr_type(type) # A function itself isn't an l-value
if not type.is_pyobject and not type.is_memoryviewslice:
# Make manage_ref canonical, so that manage_ref will always mean
# a decref is needed.
@@ -1078,14 +1099,14 @@ class GlobalState(object):
'h_code',
'filename_table',
'utility_code_proto_before_types',
- 'module_state',
- 'module_state_clear',
- 'module_state_traverse',
- 'module_state_defines',
'numeric_typedefs', # Let these detailed individual parts stay!,
'complex_type_declarations', # as the proper solution is to make a full DAG...
'type_declarations', # More coarse-grained blocks would simply hide
'utility_code_proto', # the ugliness, not fix it
+ 'module_state',
+ 'module_state_clear',
+ 'module_state_traverse',
+ 'module_state_defines',
'module_declarations',
'typeinfo',
'before_global_var',
@@ -1445,11 +1466,14 @@ class GlobalState(object):
for (type_cname, method_name), cname in sorted(self.cached_cmethods.items()):
cnames.append(cname)
method_name_cname = self.get_interned_identifier(StringEncoding.EncodedString(method_name)).cname
- decl.putln('static __Pyx_CachedCFunction %s = {0, &%s, 0, 0, 0};' % (
- cname, method_name_cname))
+ decl.putln('static __Pyx_CachedCFunction %s = {0, 0, 0, 0, 0};' % (
+ cname))
# split type reference storage as it might not be static
init.putln('%s.type = (PyObject*)&%s;' % (
cname, type_cname))
+ # method name string isn't static in limited api
+ init.putln('%s.method_name = &%s;' % (
+ cname, method_name_cname))
if Options.generate_cleanup_code:
cleanup = self.parts['cleanup_globals']
@@ -1494,6 +1518,17 @@ class GlobalState(object):
w = self.parts['pystring_table']
w.putln("")
w.putln("static __Pyx_StringTabEntry %s[] = {" % Naming.stringtab_cname)
+ w.putln("#if CYTHON_COMPILING_IN_LIMITED_API")
+ w_limited_writer = w.insertion_point()
+ w.putln("#else")
+ w_not_limited_writer = w.insertion_point()
+ w.putln("#endif")
+ decls_writer.putln("#if !CYTHON_COMPILING_IN_LIMITED_API")
+ not_limited_api_decls_writer = decls_writer.insertion_point()
+ decls_writer.putln("#endif")
+ init_globals.putln("#if CYTHON_COMPILING_IN_LIMITED_API")
+ init_globals_limited_api = init_globals.insertion_point()
+ init_globals.putln("#endif")
for idx, py_string_args in enumerate(py_strings):
c_cname, _, py_string = py_string_args
if not py_string.is_str or not py_string.encoding or \
@@ -1512,23 +1547,20 @@ class GlobalState(object):
py_string.cname)
self.parts['module_state_traverse'].putln("Py_VISIT(traverse_module_state->%s);" %
py_string.cname)
- decls_writer.putln("#if !CYTHON_COMPILING_IN_LIMITED_API")
- decls_writer.putln(
+ not_limited_api_decls_writer.putln(
"static PyObject *%s;" % py_string.cname)
- decls_writer.putln("#endif")
if py_string.py3str_cstring:
- w.putln("#if PY_MAJOR_VERSION >= 3")
- w.putln("{&%s, %s, sizeof(%s), %s, %d, %d, %d}," % (
+ w_not_limited_writer.putln("#if PY_MAJOR_VERSION >= 3")
+ w_not_limited_writer.putln("{&%s, %s, sizeof(%s), %s, %d, %d, %d}," % (
py_string.cname,
py_string.py3str_cstring.cname,
py_string.py3str_cstring.cname,
'0', 1, 0,
py_string.intern
))
- w.putln("#else")
-
- w.putln("#if CYTHON_COMPILING_IN_LIMITED_API")
- w.putln("{0, %s, sizeof(%s), %s, %d, %d, %d}," % (
+ w_not_limited_writer.putln("#else")
+ w_not_limited_writer.putln("{&%s, %s, sizeof(%s), %s, %d, %d, %d}," % (
+ py_string.cname,
c_cname,
c_cname,
encoding,
@@ -1536,26 +1568,21 @@ class GlobalState(object):
py_string.is_str,
py_string.intern
))
- w.putln("#else")
- w.putln("{&%s, %s, sizeof(%s), %s, %d, %d, %d}," % (
- py_string.cname,
- c_cname,
- c_cname,
- encoding,
+ if py_string.py3str_cstring:
+ w_not_limited_writer.putln("#endif")
+ w_limited_writer.putln("{0, %s, sizeof(%s), %s, %d, %d, %d}," % (
+ c_cname if not py_string.py3str_cstring else py_string.py3str_cstring.cname,
+ c_cname if not py_string.py3str_cstring else py_string.py3str_cstring.cname,
+ encoding if not py_string.py3str_cstring else '0',
py_string.is_unicode,
py_string.is_str,
py_string.intern
))
- w.putln("#endif")
- init_globals.putln("#if CYTHON_COMPILING_IN_LIMITED_API")
- init_globals.putln("if (__Pyx_InitString(%s[%d], &%s) < 0) %s;" % (
+ init_globals_limited_api.putln("if (__Pyx_InitString(%s[%d], &%s) < 0) %s;" % (
Naming.stringtab_cname,
idx,
py_string.cname,
init_globals.error_goto(self.module_pos)))
- init_globals.putln("#endif")
- if py_string.py3str_cstring:
- w.putln("#endif")
w.putln("{0, 0, 0, 0, 0, 0, 0}")
w.putln("};")
diff --git a/Cython/Compiler/ExprNodes.py b/Cython/Compiler/ExprNodes.py
index 03d0ae3bf..fbf340bd6 100644
--- a/Cython/Compiler/ExprNodes.py
+++ b/Cython/Compiler/ExprNodes.py
@@ -1434,11 +1434,7 @@ def _analyse_name_as_type(name, pos, env):
return type
global_entry = env.global_scope().lookup(name)
- if global_entry and global_entry.type and (
- global_entry.type.is_extension_type
- or global_entry.type.is_struct_or_union
- or global_entry.type.is_builtin_type
- or global_entry.type.is_cpp_class):
+ if global_entry and global_entry.is_type and global_entry.type:
return global_entry.type
from .TreeFragment import TreeFragment
@@ -8914,12 +8910,11 @@ class ClassNode(ExprNode, ModuleNameMixin):
# a name, tuple of bases and class dictionary.
#
# name EncodedString Name of the class
- # bases ExprNode Base class tuple
- # dict ExprNode Class dict (not owned by this node)
+ # class_def_node PyClassDefNode PyClassDefNode defining this class
# doc ExprNode or None Doc string
# module_name EncodedString Name of defining module
- subexprs = ['bases', 'doc']
+ subexprs = ['doc']
type = py_object_type
is_temp = True
@@ -8928,7 +8923,6 @@ class ClassNode(ExprNode, ModuleNameMixin):
return py_object_type
def analyse_types(self, env):
- self.bases = self.bases.analyse_types(env)
if self.doc:
self.doc = self.doc.analyse_types(env)
self.doc = self.doc.coerce_to_pyobject(env)
@@ -8941,12 +8935,13 @@ class ClassNode(ExprNode, ModuleNameMixin):
gil_message = "Constructing Python class"
def generate_result_code(self, code):
+ class_def_node = self.class_def_node
cname = code.intern_identifier(self.name)
if self.doc:
code.put_error_if_neg(self.pos,
'PyDict_SetItem(%s, %s, %s)' % (
- self.dict.py_result(),
+ class_def_node.dict.py_result(),
code.intern_identifier(
StringEncoding.EncodedString("__doc__")),
self.doc.py_result()))
@@ -8955,8 +8950,8 @@ class ClassNode(ExprNode, ModuleNameMixin):
code.putln(
'%s = __Pyx_CreateClass(%s, %s, %s, %s, %s); %s' % (
self.result(),
- self.bases.py_result(),
- self.dict.py_result(),
+ class_def_node.bases.py_result(),
+ class_def_node.dict.py_result(),
cname,
qualname,
py_mod_name,
@@ -8970,8 +8965,8 @@ class Py3ClassNode(ExprNode):
# a name, tuple of bases and class dictionary.
#
# name EncodedString Name of the class
- # dict ExprNode Class dict (not owned by this node)
# module_name EncodedString Name of defining module
+ # class_def_node PyClassDefNode PyClassDefNode defining this class
# calculate_metaclass bool should call CalculateMetaclass()
# allow_py2_metaclass bool should look for Py2 metaclass
@@ -8994,12 +8989,10 @@ class Py3ClassNode(ExprNode):
def generate_result_code(self, code):
code.globalstate.use_utility_code(UtilityCode.load_cached("Py3ClassCreate", "ObjectHandling.c"))
cname = code.intern_identifier(self.name)
- if self.mkw:
- mkw = self.mkw.py_result()
- else:
- mkw = 'NULL'
- if self.metaclass:
- metaclass = self.metaclass.py_result()
+ class_def_node = self.class_def_node
+ mkw = class_def_node.mkw.py_result() if class_def_node.mkw else 'NULL'
+ if class_def_node.metaclass:
+ metaclass = class_def_node.metaclass.py_result()
else:
metaclass = "((PyObject*)&__Pyx_DefaultClassType)"
code.putln(
@@ -9007,8 +9000,8 @@ class Py3ClassNode(ExprNode):
self.result(),
metaclass,
cname,
- self.bases.py_result(),
- self.dict.py_result(),
+ class_def_node.bases.py_result(),
+ class_def_node.dict.py_result(),
mkw,
self.calculate_metaclass,
self.allow_py2_metaclass,
@@ -9019,8 +9012,7 @@ class Py3ClassNode(ExprNode):
class PyClassMetaclassNode(ExprNode):
# Helper class holds Python3 metaclass object
#
- # bases ExprNode Base class tuple (not owned by this node)
- # mkw ExprNode Class keyword arguments (not owned by this node)
+ # class_def_node PyClassDefNode PyClassDefNode defining this class
subexprs = []
@@ -9033,38 +9025,38 @@ class PyClassMetaclassNode(ExprNode):
return True
def generate_result_code(self, code):
- if self.mkw:
+ bases = self.class_def_node.bases
+ mkw = self.class_def_node.mkw
+ if mkw:
code.globalstate.use_utility_code(
UtilityCode.load_cached("Py3MetaclassGet", "ObjectHandling.c"))
call = "__Pyx_Py3MetaclassGet(%s, %s)" % (
- self.bases.result(),
- self.mkw.result())
+ bases.result(),
+ mkw.result())
else:
code.globalstate.use_utility_code(
UtilityCode.load_cached("CalculateMetaclass", "ObjectHandling.c"))
call = "__Pyx_CalculateMetaclass(NULL, %s)" % (
- self.bases.result())
+ bases.result())
code.putln(
"%s = %s; %s" % (
self.result(), call,
code.error_goto_if_null(self.result(), self.pos)))
code.put_gotref(self.py_result())
+
class PyClassNamespaceNode(ExprNode, ModuleNameMixin):
# Helper class holds Python3 namespace object
#
# All this are not owned by this node
- # metaclass ExprNode Metaclass object
- # bases ExprNode Base class tuple
- # mkw ExprNode Class keyword arguments
+ # class_def_node PyClassDefNode PyClassDefNode defining this class
# doc ExprNode or None Doc string (owned)
subexprs = ['doc']
def analyse_types(self, env):
if self.doc:
- self.doc = self.doc.analyse_types(env)
- self.doc = self.doc.coerce_to_pyobject(env)
+ self.doc = self.doc.analyse_types(env).coerce_to_pyobject(env)
self.type = py_object_type
self.is_temp = 1
return self
@@ -9076,23 +9068,16 @@ class PyClassNamespaceNode(ExprNode, ModuleNameMixin):
cname = code.intern_identifier(self.name)
py_mod_name = self.get_py_mod_name(code)
qualname = self.get_py_qualified_name(code)
- if self.doc:
- doc_code = self.doc.result()
- else:
- doc_code = '(PyObject *) NULL'
- if self.mkw:
- mkw = self.mkw.py_result()
- else:
- mkw = '(PyObject *) NULL'
- if self.metaclass:
- metaclass = self.metaclass.py_result()
- else:
- metaclass = "(PyObject *) NULL"
+ class_def_node = self.class_def_node
+ null = "(PyObject *) NULL"
+ doc_code = self.doc.result() if self.doc else null
+ mkw = class_def_node.mkw.py_result() if class_def_node.mkw else null
+ metaclass = class_def_node.metaclass.py_result() if class_def_node.metaclass else null
code.putln(
"%s = __Pyx_Py3MetaclassPrepare(%s, %s, %s, %s, %s, %s, %s); %s" % (
self.result(),
metaclass,
- self.bases.result(),
+ class_def_node.bases.result(),
cname,
qualname,
mkw,
@@ -11382,19 +11367,24 @@ class AddNode(NumBinopNode):
self, type1, type2)
def py_operation_function(self, code):
- is_unicode_concat = False
- if isinstance(self.operand1, FormattedValueNode) or isinstance(self.operand2, FormattedValueNode):
- is_unicode_concat = True
- else:
- type1, type2 = self.operand1.type, self.operand2.type
- if type1 is unicode_type or type2 is unicode_type:
- is_unicode_concat = type1.is_builtin_type and type2.is_builtin_type
+ type1, type2 = self.operand1.type, self.operand2.type
- if is_unicode_concat:
- if self.operand1.may_be_none() or self.operand2.may_be_none():
- return '__Pyx_PyUnicode_ConcatSafe'
+ if type1 is unicode_type or type2 is unicode_type:
+ if type1 in (unicode_type, str_type) and type2 in (unicode_type, str_type):
+ is_unicode_concat = True
+ elif isinstance(self.operand1, FormattedValueNode) or isinstance(self.operand2, FormattedValueNode):
+ # Assume that even if we don't know the second type, it's going to be a string.
+ is_unicode_concat = True
else:
- return '__Pyx_PyUnicode_Concat'
+ # Operation depends on the second type.
+ is_unicode_concat = False
+
+ if is_unicode_concat:
+ if self.operand1.may_be_none() or self.operand2.may_be_none():
+ return '__Pyx_PyUnicode_ConcatSafe'
+ else:
+ return '__Pyx_PyUnicode_Concat'
+
return super(AddNode, self).py_operation_function(code)
@@ -12486,7 +12476,8 @@ class CmpNode(object):
result_code if self.type.is_pyobject else None,
self.exception_value,
self.in_nogil_context)
- code.putln(statement)
+ else:
+ code.putln(statement)
def c_operator(self, op):
if op == 'is':
diff --git a/Cython/Compiler/Main.py b/Cython/Compiler/Main.py
index 6b9a76348..3d628e128 100644
--- a/Cython/Compiler/Main.py
+++ b/Cython/Compiler/Main.py
@@ -494,6 +494,12 @@ def run_pipeline(source, options, full_module_name=None, context=None):
pipeline = Pipeline.create_pyx_pipeline(context, options, result)
context.setup_errors(options, result)
+
+ if '.' in full_module_name and '.' in os.path.splitext(os.path.basename(abs_path))[0]:
+ warning((source_desc, 1, 0),
+ "Dotted filenames ('%s') are deprecated."
+ " Please use the normal Python package directory layout." % os.path.basename(abs_path), level=1)
+
err, enddata = Pipeline.run_pipeline(pipeline, source)
context.teardown_errors(err, options, result)
return result
@@ -650,6 +656,9 @@ def search_include_directories(dirs, qualified_name, suffix, pos, include=False)
for dirname in dirs:
path = os.path.join(dirname, dotted_filename)
if os.path.exists(path):
+ if '.' in qualified_name and '.' in os.path.splitext(dotted_filename)[0]:
+ warning(pos, "Dotted filenames ('%s') are deprecated."
+ " Please use the normal Python package directory layout." % dotted_filename, level=1)
return path
# search for filename in package structure e.g. <dir>/foo/bar.pxd or <dir>/foo/bar/__init__.pxd
diff --git a/Cython/Compiler/ModuleNode.py b/Cython/Compiler/ModuleNode.py
index 2a9e1090e..62b305a50 100644
--- a/Cython/Compiler/ModuleNode.py
+++ b/Cython/Compiler/ModuleNode.py
@@ -1203,6 +1203,18 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
module_state_traverse.putln(
"Py_VISIT(traverse_module_state->%s);" %
entry.type.typeptr_cname)
+ if entry.type.typeobj_cname is not None:
+ module_state.putln("PyObject *%s;" % entry.type.typeobj_cname)
+ module_state_defines.putln("#define %s %s->%s" % (
+ entry.type.typeobj_cname,
+ Naming.modulestateglobal_cname,
+ entry.type.typeobj_cname))
+ module_state_clear.putln(
+ "Py_CLEAR(clear_module_state->%s);" % (
+ entry.type.typeobj_cname))
+ module_state_traverse.putln(
+ "Py_VISIT(traverse_module_state->%s);" % (
+ entry.type.typeobj_cname))
code.putln("#endif")
def generate_cvariable_declarations(self, env, code, definition):
@@ -1365,12 +1377,8 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
freecount_name = scope.mangle_internal(Naming.freecount_name)
decls = code.globalstate['decls']
- if cinit_func_entry is None:
- decls.putln("#if !CYTHON_COMPILING_IN_LIMITED_API")
decls.putln("static PyObject *%s(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/" %
slot_func)
- if cinit_func_entry is None:
- decls.putln("#endif")
code.putln("")
if freelist_size:
code.putln("static %s[%d];" % (
@@ -1378,8 +1386,6 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
freelist_size))
code.putln("static int %s = 0;" % freecount_name)
code.putln("")
- if cinit_func_entry is None:
- code.putln("#if !CYTHON_COMPILING_IN_LIMITED_API")
code.putln(
"static PyObject *%s(PyTypeObject *t, %sPyObject *a, %sPyObject *k) {" % (
slot_func, unused_marker, unused_marker))
@@ -1390,10 +1396,16 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
if need_self_cast:
code.putln("%s;" % scope.parent_type.declaration_code("p"))
if base_type:
+ code.putln("#if CYTHON_COMPILING_IN_LIMITED_API")
+ code.putln("newfunc new_func = (newfunc)PyType_GetSlot(%s, Py_tp_new);" %
+ base_type.typeptr_cname)
+ code.putln("PyObject *o = new_func(t, a, k);")
+ code.putln("#else")
tp_new = TypeSlots.get_base_slot_function(scope, tp_slot)
if tp_new is None:
tp_new = "%s->tp_new" % base_type.typeptr_cname
code.putln("PyObject *o = %s(t, a, k);" % tp_new)
+ code.putln("#endif")
else:
code.putln("PyObject *o;")
code.putln("#if CYTHON_COMPILING_IN_LIMITED_API")
@@ -1425,10 +1437,11 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
code.putln("} else {")
code.putln("o = (PyObject *) PyBaseObject_Type.tp_new(t, %s, 0);" % Naming.empty_tuple)
code.putln("}")
- code.putln("#endif")
code.putln("if (unlikely(!o)) return 0;")
if freelist_size and not base_type:
code.putln('}')
+ if not base_type:
+ code.putln("#endif")
if need_self_cast:
code.putln("p = %s;" % type.cast_code("o"))
#if need_self_cast:
@@ -1488,8 +1501,6 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
code.putln("return NULL;")
code.putln(
"}")
- if cinit_func_entry is None:
- code.putln("#endif")
def generate_dealloc_function(self, scope, code):
tp_slot = TypeSlots.ConstructorSlot("tp_dealloc", '__dealloc__')
@@ -2234,8 +2245,6 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
for slot in TypeSlots.slot_table:
if slot.slot_name == "tp_flags":
continue
- if slot.slot_name == "tp_new" and scope.lookup_here("__cinit__") is None:
- continue
if slot.slot_name == "tp_dealloc" and scope.lookup_here("__dealloc__") is None:
continue
if slot.slot_name == "tp_getattro":
@@ -2452,26 +2461,18 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
code.putln('int %s;' % Naming.lineno_cname)
code.putln('int %s;' % Naming.clineno_cname)
code.putln('const char *%s;' % Naming.filename_cname)
+ code.putln('#ifdef __Pyx_CyFunction_USED')
+ code.putln('PyObject *%s;' % Naming.cyfunction_type_cname)
+ code.putln('#endif')
+ code.putln('#ifdef __Pyx_FusedFunction_USED')
+ code.putln('PyObject *%s;' % Naming.fusedfunction_type_cname)
+ code.putln('#endif')
def generate_module_state_end(self, env, modules, globalstate):
module_state = globalstate['module_state']
module_state_defines = globalstate['module_state_defines']
module_state_clear = globalstate['module_state_clear']
module_state_traverse = globalstate['module_state_traverse']
- for module in modules:
- definition = module is env
- for entry in env.c_class_entries:
- if definition or entry.defined_in_pxd:
- module_state.putln("PyObject *%s;" % entry.type.typeobj_cname)
- module_state_defines.putln("#define %s %s->%s" % (
- entry.type.typeobj_cname,
- Naming.modulestateglobal_cname,
- entry.type.typeobj_cname))
- module_state_clear.putln("Py_CLEAR(clear_module_state->%s);" %
- entry.type.typeobj_cname)
- module_state_traverse.putln(
- "Py_VISIT(traverse_module_state->%s);" %
- entry.type.typeobj_cname)
module_state.putln('} %s;' % Naming.modulestate_cname)
module_state.putln('')
module_state.putln('#ifdef __cplusplus')
@@ -2542,6 +2543,18 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
Naming.filename_cname,
Naming.modulestateglobal_cname,
Naming.filename_cname))
+ code.putln('#ifdef __Pyx_CyFunction_USED')
+ code.putln('#define %s %s->%s' % (
+ Naming.cyfunction_type_cname,
+ Naming.modulestateglobal_cname,
+ Naming.cyfunction_type_cname))
+ code.putln('#endif')
+ code.putln('#ifdef __Pyx_FusedFunction_USED')
+ code.putln('#define %s %s->%s' %
+ (Naming.fusedfunction_type_cname,
+ Naming.modulestateglobal_cname,
+ Naming.fusedfunction_type_cname))
+ code.putln('#endif')
def generate_module_state_clear(self, env, code):
code.putln("#if CYTHON_COMPILING_IN_LIMITED_API")
@@ -2560,6 +2573,14 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
Naming.empty_bytes)
code.putln('Py_CLEAR(clear_module_state->%s);' %
Naming.empty_unicode)
+ code.putln('#ifdef __Pyx_CyFunction_USED')
+ code.putln('Py_CLEAR(clear_module_state->%s);' %
+ Naming.cyfunction_type_cname)
+ code.putln('#endif')
+ code.putln('#ifdef __Pyx_FusedFunction_USED')
+ code.putln('Py_CLEAR(clear_module_state->%s);' %
+ Naming.fusedfunction_type_cname)
+ code.putln('#endif')
def generate_module_state_traverse(self, env, code):
code.putln("#if CYTHON_COMPILING_IN_LIMITED_API")
@@ -2578,6 +2599,14 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
Naming.empty_bytes)
code.putln('Py_VISIT(traverse_module_state->%s);' %
Naming.empty_unicode)
+ code.putln('#ifdef __Pyx_CyFunction_USED')
+ code.putln('Py_VISIT(traverse_module_state->%s);' %
+ Naming.cyfunction_type_cname)
+ code.putln('#endif')
+ code.putln('#ifdef __Pyx_FusedFunction_USED')
+ code.putln('Py_VISIT(traverse_module_state->%s);' %
+ Naming.fusedfunction_type_cname)
+ code.putln('#endif')
def generate_module_init_func(self, imported_modules, env, code):
subfunction = self.mod_init_subfunction(self.scope, code)
@@ -3371,7 +3400,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
if type.vtabptr_cname:
code.globalstate.use_utility_code(
UtilityCode.load_cached('GetVTable', 'ImportExport.c'))
- code.putln("%s = (struct %s*)__Pyx_GetVtable(%s->tp_dict); %s" % (
+ code.putln("%s = (struct %s*)__Pyx_GetVtable(%s); %s" % (
type.vtabptr_cname,
type.vtabstruct_cname,
type.typeptr_cname,
diff --git a/Cython/Compiler/Naming.py b/Cython/Compiler/Naming.py
index f1902a77d..aed0910c7 100644
--- a/Cython/Compiler/Naming.py
+++ b/Cython/Compiler/Naming.py
@@ -134,6 +134,8 @@ tp_dict_version_temp = pyrex_prefix + "tp_dict_version"
obj_dict_version_temp = pyrex_prefix + "obj_dict_version"
type_dict_guard_temp = pyrex_prefix + "type_dict_guard"
cython_runtime_cname = pyrex_prefix + "cython_runtime"
+cyfunction_type_cname = pyrex_prefix + "CyFunctionType"
+fusedfunction_type_cname = pyrex_prefix + "FusedFunctionType"
global_code_object_cache_find = pyrex_prefix + 'find_code_object'
global_code_object_cache_insert = pyrex_prefix + 'insert_code_object'
diff --git a/Cython/Compiler/Nodes.py b/Cython/Compiler/Nodes.py
index f6980dbe2..670fff66e 100644
--- a/Cython/Compiler/Nodes.py
+++ b/Cython/Compiler/Nodes.py
@@ -877,7 +877,10 @@ class CArgDeclNode(Node):
# inject type declaration from annotations
# this is called without 'env' by AdjustDefByDirectives transform before declaration analysis
- if self.annotation and env and env.directives['annotation_typing'] and self.base_type.name is None:
+ if (self.annotation and env and env.directives['annotation_typing']
+ # CSimpleBaseTypeNode has a name attribute; CAnalysedBaseTypeNode
+ # (and maybe other options) doesn't
+ and getattr(self.base_type, "name", None) is None):
arg_type = self.inject_type_from_annotations(env)
if arg_type is not None:
base_type = arg_type
@@ -1678,6 +1681,8 @@ class FuncDefNode(StatNode, BlockNode):
return arg
if other_type is None:
error(type_node.pos, "Not a type")
+ elif other_type.is_fused and any(orig_type.same_as(t) for t in other_type.types):
+ pass # use specialized rather than fused type
elif orig_type is not py_object_type and not orig_type.same_as(other_type):
error(arg.base_type.pos, "Signature does not agree with previous declaration")
error(type_node.pos, "Previous declaration here")
@@ -2934,9 +2939,6 @@ class DefNode(FuncDefNode):
arg.name = name_declarator.name
arg.type = type
- if type.is_fused:
- self.has_fused_arguments = True
-
self.align_argument_type(env, arg)
if name_declarator and name_declarator.cname:
error(self.pos, "Python function argument cannot have C name specification")
@@ -2967,6 +2969,9 @@ class DefNode(FuncDefNode):
error(arg.pos, "Only Python type arguments can have 'not None'")
if arg.or_none:
error(arg.pos, "Only Python type arguments can have 'or None'")
+
+ if arg.type.is_fused:
+ self.has_fused_arguments = True
env.fused_to_specific = f2s
if has_np_pythran(env):
@@ -4675,26 +4680,22 @@ class PyClassDefNode(ClassDefNode):
pass # no base classes => no inherited metaclass
else:
self.metaclass = ExprNodes.PyClassMetaclassNode(
- pos, mkw=mkdict, bases=self.bases)
+ pos, class_def_node=self)
needs_metaclass_calculation = False
else:
needs_metaclass_calculation = True
self.dict = ExprNodes.PyClassNamespaceNode(
- pos, name=name, doc=doc_node,
- metaclass=self.metaclass, bases=self.bases, mkw=self.mkw)
+ pos, name=name, doc=doc_node, class_def_node=self)
self.classobj = ExprNodes.Py3ClassNode(
- pos, name=name,
- bases=self.bases, dict=self.dict, doc=doc_node,
- metaclass=self.metaclass, mkw=self.mkw,
+ pos, name=name, class_def_node=self, doc=doc_node,
calculate_metaclass=needs_metaclass_calculation,
allow_py2_metaclass=allow_py2_metaclass)
else:
# no bases, no metaclass => old style class creation
self.dict = ExprNodes.DictNode(pos, key_value_pairs=[])
self.classobj = ExprNodes.ClassNode(
- pos, name=name,
- bases=bases, dict=self.dict, doc=doc_node)
+ pos, name=name, class_def_node=self, doc=doc_node)
self.target = ExprNodes.NameNode(pos, name=name)
self.class_cell = ExprNodes.ClassCellInjectorNode(self.pos)
@@ -4712,7 +4713,7 @@ class PyClassDefNode(ClassDefNode):
visibility='private',
module_name=None,
class_name=self.name,
- bases=self.classobj.bases or ExprNodes.TupleNode(self.pos, args=[]),
+ bases=self.bases or ExprNodes.TupleNode(self.pos, args=[]),
decorators=self.decorators,
body=self.body,
in_pxd=False,
@@ -4736,6 +4737,10 @@ class PyClassDefNode(ClassDefNode):
args=[class_result])
self.decorators = None
self.class_result = class_result
+ if self.bases:
+ self.bases.analyse_declarations(env)
+ if self.mkw:
+ self.mkw.analyse_declarations(env)
self.class_result.analyse_declarations(env)
self.target.analyse_target_declaration(env)
cenv = self.create_scope(env)
@@ -4748,10 +4753,10 @@ class PyClassDefNode(ClassDefNode):
def analyse_expressions(self, env):
if self.bases:
self.bases = self.bases.analyse_expressions(env)
- if self.metaclass:
- self.metaclass = self.metaclass.analyse_expressions(env)
if self.mkw:
self.mkw = self.mkw.analyse_expressions(env)
+ if self.metaclass:
+ self.metaclass = self.metaclass.analyse_expressions(env)
self.dict = self.dict.analyse_expressions(env)
self.class_result = self.class_result.analyse_expressions(env)
cenv = self.scope
@@ -5078,11 +5083,29 @@ class CClassDefNode(ClassDefNode):
return
if entry.visibility != 'extern':
code.putln("#if CYTHON_COMPILING_IN_LIMITED_API")
- code.putln(
- "%s = PyType_FromSpec(&%s_spec); %s" % (
- typeobj_cname,
- typeobj_cname,
- code.error_goto_if_null(typeobj_cname, entry.pos)))
+ tuple_temp = code.funcstate.allocate_temp(py_object_type, manage_ref=True)
+ base_type = scope.parent_type.base_type
+ if base_type:
+ code.putln(
+ "%s = PyTuple_Pack(1, (PyObject *)%s); %s" % (
+ tuple_temp,
+ base_type.typeptr_cname,
+ code.error_goto_if_null(tuple_temp, entry.pos)))
+ code.put_gotref(tuple_temp)
+ code.putln(
+ "%s = PyType_FromSpecWithBases(&%s_spec, %s); %s" % (
+ typeobj_cname,
+ typeobj_cname,
+ tuple_temp,
+ code.error_goto_if_null(typeobj_cname, entry.pos)))
+ code.put_xdecref_clear(tuple_temp, type=py_object_type)
+ code.funcstate.release_temp(tuple_temp)
+ else:
+ code.putln(
+ "%s = PyType_FromSpec(&%s_spec); %s" % (
+ typeobj_cname,
+ typeobj_cname,
+ code.error_goto_if_null(typeobj_cname, entry.pos)))
code.putln("#else")
for slot in TypeSlots.slot_table:
slot.generate_dynamic_init_code(scope, code)
@@ -5227,7 +5250,11 @@ class CClassDefNode(ClassDefNode):
# Generate code to initialise the typeptr of an extension
# type defined in this module to point to its type object.
if type.typeobj_cname:
- code.putln("#if !CYTHON_COMPILING_IN_LIMITED_API")
+ code.putln("#if CYTHON_COMPILING_IN_LIMITED_API")
+ code.putln(
+ "%s = (PyTypeObject *)%s;" % (
+ type.typeptr_cname, type.typeobj_cname))
+ code.putln("#else")
code.putln(
"%s = &%s;" % (
type.typeptr_cname, type.typeobj_cname))
@@ -6203,6 +6230,7 @@ class ReturnStatNode(StatNode):
rhs=value,
code=code,
have_gil=self.in_nogil_context)
+ value.generate_post_assignment_code(code)
elif self.in_generator:
# return value == raise StopIteration(value), but uncatchable
code.globalstate.use_utility_code(
@@ -6216,7 +6244,7 @@ class ReturnStatNode(StatNode):
code.putln("%s = %s;" % (
Naming.retval_cname,
value.result_as(self.return_type)))
- value.generate_post_assignment_code(code)
+ value.generate_post_assignment_code(code)
value.free_temps(code)
else:
if self.return_type.is_pyobject:
@@ -9334,7 +9362,7 @@ class ParallelRangeNode(ParallelStatNode):
# TODO: check if the step is 0 and if so, raise an exception in a
# 'with gil' block. For now, just abort
- code.putln("if (%(step)s == 0) abort();" % fmt_dict)
+ code.putln("if ((%(step)s == 0)) abort();" % fmt_dict)
self.setup_parallel_control_flow_block(code) # parallel control flow block
@@ -9455,12 +9483,15 @@ class ParallelRangeNode(ParallelStatNode):
code.putln("%(target)s = (%(target_type)s)(%(start)s + %(step)s * %(i)s);" % fmt_dict)
self.initialize_privates_to_nan(code, exclude=self.target.entry)
- if self.is_parallel:
+ if self.is_parallel and not self.is_nested_prange:
+ # nested pranges are not omp'ified, temps go to outer loops
code.funcstate.start_collecting_temps()
self.body.generate_execution_code(code)
self.trap_parallel_exit(code, should_flush=True)
- self.privatize_temps(code)
+ if self.is_parallel and not self.is_nested_prange:
+ # nested pranges are not omp'ified, temps go to outer loops
+ self.privatize_temps(code)
if self.breaking_label_used:
# Put a guard around the loop body in case return, break or
diff --git a/Cython/Compiler/Pythran.py b/Cython/Compiler/Pythran.py
index 76d7ca409..c02704a91 100644
--- a/Cython/Compiler/Pythran.py
+++ b/Cython/Compiler/Pythran.py
@@ -9,9 +9,16 @@ import cython
try:
import pythran
pythran_is_pre_0_9 = tuple(map(int, pythran.__version__.split('.')[0:2])) < (0, 9)
+ pythran_is_pre_0_9_6 = tuple(map(int, pythran.__version__.split('.')[0:3])) < (0, 9, 6)
except ImportError:
pythran = None
pythran_is_pre_0_9 = True
+ pythran_is_pre_0_9_6 = True
+
+if pythran_is_pre_0_9_6:
+ pythran_builtins = '__builtin__'
+else:
+ pythran_builtins = 'builtins'
# Pythran/Numpy specific operations
@@ -47,7 +54,7 @@ def pythran_type(Ty, ptype="ndarray"):
if Ty.is_pythran_expr:
return Ty.pythran_type
#if Ty.is_none:
- # return "decltype(pythonic::__builtin__::None)"
+ # return "decltype(pythonic::builtins::None)"
if Ty.is_numeric:
return Ty.sign_and_name()
raise ValueError("unsupported pythran type %s (%s)" % (Ty, type(Ty)))
@@ -82,7 +89,9 @@ def _index_type_code(index_with_type):
idx, index_type = index_with_type
if idx.is_slice:
n = 2 + int(not idx.step.is_none)
- return "pythonic::__builtin__::functor::slice{}(%s)" % (",".join(["0"]*n))
+ return "pythonic::%s::functor::slice{}(%s)" % (
+ pythran_builtins,
+ ",".join(["0"]*n))
elif index_type.is_int:
return "std::declval<%s>()" % index_type.sign_and_name()
elif index_type.is_pythran_expr:
@@ -154,7 +163,7 @@ def to_pythran(op, ptype=None):
if is_type(op_type, ["is_pythran_expr", "is_numeric", "is_float", "is_complex"]):
return op.result()
if op.is_none:
- return "pythonic::__builtin__::None"
+ return "pythonic::%s::None" % pythran_builtins
if ptype is None:
ptype = pythran_type(op_type)
@@ -207,7 +216,7 @@ def include_pythran_generic(env):
env.add_include_file("pythonic/types/bool.hpp")
env.add_include_file("pythonic/types/ndarray.hpp")
env.add_include_file("pythonic/numpy/power.hpp")
- env.add_include_file("pythonic/__builtin__/slice.hpp")
+ env.add_include_file("pythonic/%s/slice.hpp" % pythran_builtins)
env.add_include_file("<new>") # for placement new
for i in (8, 16, 32, 64):
diff --git a/Cython/Compiler/Visitor.py b/Cython/Compiler/Visitor.py
index 94c6b0693..368c38058 100644
--- a/Cython/Compiler/Visitor.py
+++ b/Cython/Compiler/Visitor.py
@@ -832,6 +832,10 @@ class PrintTree(TreeVisitor):
result += "(type=%s, name=\"%s\")" % (repr(node.type), node.name)
elif isinstance(node, Nodes.DefNode):
result += "(name=\"%s\")" % node.name
+ elif isinstance(node, ExprNodes.AttributeNode):
+ result += "(type=%s, attribute=\"%s\")" % (repr(node.type), node.attribute)
+ elif isinstance(node, ExprNodes.ConstNode):
+ result += "(type=%s, value=\"%s\")" % (repr(node.type), node.value)
elif isinstance(node, ExprNodes.ExprNode):
t = node.type
result += "(type=%s)" % repr(t)
diff --git a/Cython/Includes/cpython/descr.pxd b/Cython/Includes/cpython/descr.pxd
index 725f5bbdd..5075f0bbd 100644
--- a/Cython/Includes/cpython/descr.pxd
+++ b/Cython/Includes/cpython/descr.pxd
@@ -21,6 +21,6 @@ cdef extern from "Python.h":
cdef wrapperbase* d_base
cdef void* d_wrapped
- PyDescr_NewWrapper(PyTypeObject* cls, wrapperbase* wrapper, void* wrapped)
+ object PyDescr_NewWrapper(PyTypeObject* cls, wrapperbase* wrapper, void* wrapped)
int PyDescr_IsData(descr)
diff --git a/Cython/Includes/cpython/genobject.pxd b/Cython/Includes/cpython/genobject.pxd
new file mode 100644
index 000000000..337b3cc0a
--- /dev/null
+++ b/Cython/Includes/cpython/genobject.pxd
@@ -0,0 +1,25 @@
+from .pystate cimport PyFrameObject
+
+cdef extern from "Python.h":
+
+ ###########################################################################
+ # Generator Objects
+ ###########################################################################
+
+ bint PyGen_Check(object ob)
+ # Return true if ob is a generator object; ob must not be NULL.
+
+ bint PyGen_CheckExact(object ob)
+ # Return true if ob's type is PyGen_Type; ob must not be NULL.
+
+ object PyGen_New(PyFrameObject *frame)
+ # Return value: New reference.
+ # Create and return a new generator object based on the frame object. A
+ # reference to frame is stolen by this function. The argument must not be
+ # NULL.
+
+ object PyGen_NewWithQualName(PyFrameObject *frame, object name, object qualname)
+ # Return value: New reference.
+ # Create and return a new generator object based on the frame object, with
+ # __name__ and __qualname__ set to name and qualname. A reference to frame
+ # is stolen by this function. The frame argument must not be NULL.
diff --git a/Cython/Includes/cpython/number.pxd b/Cython/Includes/cpython/number.pxd
index 3ad8de59c..ded35c292 100644
--- a/Cython/Includes/cpython/number.pxd
+++ b/Cython/Includes/cpython/number.pxd
@@ -27,6 +27,13 @@ cdef extern from "Python.h":
# failure. This is the equivalent of the Python expression "o1 *
# o2".
+ object PyNumber_MatrixMultiply(object o1, object o2)
+ # Return value: New reference.
+ # Returns the result of matrix multiplication on o1 and o2, or
+ # NULL on failure. This is the equivalent of the Python
+ # expression "o1 @ o2".
+ # New in version 3.5.
+
object PyNumber_Divide(object o1, object o2)
# Return value: New reference.
# Returns the result of dividing o1 by o2, or NULL on
@@ -133,6 +140,13 @@ cdef extern from "Python.h":
# failure. The operation is done in-place when o1 supports
# it. This is the equivalent of the Python statement "o1 *= o2".
+ object PyNumber_InPlaceMatrixMultiply(object o1, object o2)
+ # Return value: New reference.
+ # Returns the result of matrix multiplication on o1 and o2, or
+ # NULL on failure. The operation is done in-place when o1 supports
+ # it. This is the equivalent of the Python statement "o1 @= o2".
+ # New in version 3.5.
+
object PyNumber_InPlaceDivide(object o1, object o2)
# Return value: New reference.
# Returns the result of dividing o1 by o2, or NULL on failure. The
diff --git a/Cython/Includes/cpython/object.pxd b/Cython/Includes/cpython/object.pxd
index 471281863..aa625523d 100644
--- a/Cython/Includes/cpython/object.pxd
+++ b/Cython/Includes/cpython/object.pxd
@@ -132,6 +132,17 @@ cdef extern from "Python.h":
# failure. This is the equivalent of the Python statement "del
# o.attr_name".
+ object PyObject_GenericGetDict(object o, void *context)
+ # Return value: New reference.
+ # A generic implementation for the getter of a __dict__ descriptor. It
+ # creates the dictionary if necessary.
+ # New in version 3.3.
+
+ int PyObject_GenericSetDict(object o, object value, void *context) except -1
+ # A generic implementation for the setter of a __dict__ descriptor. This
+ # implementation does not allow the dictionary to be deleted.
+ # New in version 3.3.
+
int Py_LT, Py_LE, Py_EQ, Py_NE, Py_GT, Py_GE
object PyObject_RichCompare(object o1, object o2, int opid)
@@ -332,6 +343,13 @@ cdef extern from "Python.h":
# returned. On error, -1 is returned. This is the equivalent to
# the Python expression "len(o)".
+ Py_ssize_t PyObject_LengthHint(object o, Py_ssize_t default) except -1
+ # Return an estimated length for the object o. First try to return its
+ # actual length, then an estimate using __length_hint__(), and finally
+ # return the default value. On error, return -1. This is the equivalent to
+ # the Python expression "operator.length_hint(o, default)".
+ # New in version 3.4.
+
object PyObject_GetItem(object o, object key)
# Return value: New reference.
# Return element of o corresponding to the object key or NULL on
diff --git a/Cython/Includes/cpython/pylifecycle.pxd b/Cython/Includes/cpython/pylifecycle.pxd
index 50794380e..2c71e3716 100644
--- a/Cython/Includes/cpython/pylifecycle.pxd
+++ b/Cython/Includes/cpython/pylifecycle.pxd
@@ -27,9 +27,9 @@ cdef extern from "Python.h":
void Py_EndInterpreter(PyThreadState *)
- # Py_PyAtExit is for the atexit module, Py_AtExit is for low-level
+ # _Py_PyAtExit is for the atexit module, Py_AtExit is for low-level
# exit functions.
- void _Py_PyAtExit(void (*func)())
+ void _Py_PyAtExit(void (*func)(object), object)
int Py_AtExit(void (*func)())
void Py_Exit(int)
diff --git a/Cython/Includes/cpython/slice.pxd b/Cython/Includes/cpython/slice.pxd
index 00013754f..202dea716 100644
--- a/Cython/Includes/cpython/slice.pxd
+++ b/Cython/Includes/cpython/slice.pxd
@@ -45,3 +45,26 @@ cdef extern from "Python.h":
#
# Changed in version 3.2: The parameter type for the slice parameter was
# PySliceObject* before.
+
+ int PySlice_Unpack(object slice, Py_ssize_t *start, Py_ssize_t *stop,
+ Py_ssize_t *step) except -1
+ # Extract the start, stop and step data members from a slice object as C
+ # integers. Silently reduce values larger than PY_SSIZE_T_MAX to
+ # PY_SSIZE_T_MAX, silently boost the start and stop values less than
+ # PY_SSIZE_T_MIN to PY_SSIZE_T_MIN, and silently boost the step values
+ # less than -PY_SSIZE_T_MAX to -PY_SSIZE_T_MAX.
+
+ # Return -1 on error, 0 on success.
+
+ # New in version 3.6.1.
+
+ Py_ssize_t PySlice_AdjustIndices(Py_ssize_t length, Py_ssize_t *start,
+ Py_ssize_t *stop, Py_ssize_t step)
+ # Adjust start/end slice indices assuming a sequence of the specified
+ # length. Out of bounds indices are clipped in a manner consistent with
+ # the handling of normal slices.
+
+ # Return the length of the slice. Always successful. Doesn’t call Python
+ # code.
+
+ # New in version 3.6.1.
diff --git a/Cython/Includes/libc/math.pxd b/Cython/Includes/libc/math.pxd
index eef9e660a..b002670b2 100644
--- a/Cython/Includes/libc/math.pxd
+++ b/Cython/Includes/libc/math.pxd
@@ -104,3 +104,9 @@ cdef extern from "<math.h>" nogil:
bint isnan(long double)
bint isnormal(long double)
bint signbit(long double)
+ int fpclassify(long double)
+ const int FP_NAN
+ const int FP_INFINITE
+ const int FP_ZERO
+ const int FP_SUBNORMAL
+ const int FP_NORMAL
diff --git a/Cython/Includes/libcpp/numeric.pxd b/Cython/Includes/libcpp/numeric.pxd
new file mode 100644
index 000000000..d61b3a406
--- /dev/null
+++ b/Cython/Includes/libcpp/numeric.pxd
@@ -0,0 +1,23 @@
+cdef extern from "<numeric>" namespace "std" nogil:
+ T inner_product[InputIt1, InputIt2, T](InputIt1 first1, InputIt1 last1, InputIt2 first2, T init)
+
+ T inner_product[InputIt1, InputIt2, T, BinaryOperation1, BinaryOperation2](InputIt1 first1, InputIt1 last1,
+ InputIt2 first2, T init,
+ BinaryOperation1 op1,
+ BinaryOperation2 op2)
+
+ void iota[ForwardIt, T](ForwardIt first, ForwardIt last, T value)
+
+ T accumulate[InputIt, T](InputIt first, InputIt last, T init)
+
+ T accumulate[InputIt, T, BinaryOperation](InputIt first, InputIt last, T init, BinaryOperation op)
+
+ void adjacent_difference[InputIt, OutputIt](InputIt in_first, InputIt in_last, OutputIt out_first)
+
+ void adjacent_difference[InputIt, OutputIt, BinaryOperation](InputIt in_first, InputIt in_last, OutputIt out_first,
+ BinaryOperation op)
+
+ void partial_sum[InputIt, OutputIt](InputIt in_first, OutputIt in_last, OutputIt out_first)
+
+ void partial_sum[InputIt, OutputIt, BinaryOperation](InputIt in_first, InputIt in_last, OutputIt out_first,
+ BinaryOperation op)
diff --git a/Cython/Includes/libcpp/utility.pxd b/Cython/Includes/libcpp/utility.pxd
index ab649fe7a..ec8ba4018 100644
--- a/Cython/Includes/libcpp/utility.pxd
+++ b/Cython/Includes/libcpp/utility.pxd
@@ -13,3 +13,17 @@ cdef extern from "<utility>" namespace "std" nogil:
bint operator>(pair&, pair&)
bint operator<=(pair&, pair&)
bint operator>=(pair&, pair&)
+
+cdef extern from * namespace "cython_std" nogil:
+ """
+ #if __cplusplus > 199711L
+ #include <type_traits>
+
+ namespace cython_std {
+ template <typename T> typename std::remove_reference<T>::type&& move(T& t) noexcept { return std::move(t); }
+ template <typename T> typename std::remove_reference<T>::type&& move(T&& t) noexcept { return std::move(t); }
+ }
+
+ #endif
+ """
+ cdef T move[T](T)
diff --git a/Cython/Includes/numpy/__init__.pxd b/Cython/Includes/numpy/__init__.pxd
index 789669dac..025b503ec 100644
--- a/Cython/Includes/numpy/__init__.pxd
+++ b/Cython/Includes/numpy/__init__.pxd
@@ -244,14 +244,27 @@ cdef extern from "numpy/arrayobject.h":
cdef:
# Only taking a few of the most commonly used and stable fields.
- # One should use PyArray_* macros instead to access the C fields.
char *data
- int ndim "nd"
- npy_intp *shape "dimensions"
- npy_intp *strides
dtype descr # deprecated since NumPy 1.7 !
PyObject* base
+ @property
+ cdef int ndim(self):
+ return PyArray_NDIM(self)
+
+ @property
+ cdef npy_intp *shape(self):
+ return PyArray_DIMS(self)
+
+ @property
+ cdef npy_intp *strides(self):
+ return PyArray_STRIDES(self)
+
+ @property
+ cdef npy_intp size(self):
+ return PyArray_SIZE(ndarray)
+
+
# Note: This syntax (function definition in pxd files) is an
# experimental exception made for __getbuffer__ and __releasebuffer__
# -- the details of this may change.
diff --git a/Cython/Shadow.py b/Cython/Shadow.py
index c50f4303c..a0e014a16 100644
--- a/Cython/Shadow.py
+++ b/Cython/Shadow.py
@@ -146,27 +146,29 @@ def compile(f):
# Special functions
def cdiv(a, b):
- q = a / b
- if q < 0:
- q += 1
- return q
+ if a < 0:
+ a = -a
+ b = -b
+ if b < 0:
+ return (a + b + 1) // b
+ return a // b
def cmod(a, b):
r = a % b
- if (a*b) < 0:
+ if (a * b) < 0 and r:
r -= b
return r
# Emulated language constructs
-def cast(type, *args, **kwargs):
+def cast(type_, *args, **kwargs):
kwargs.pop('typecheck', None)
assert not kwargs
- if hasattr(type, '__call__'):
- return type(*args)
- else:
- return args[0]
+ if callable(type_):
+ if not isinstance(type_, type) or not (args and isinstance(args[0], type_)):
+ return type_(*args)
+ return args[0]
def sizeof(arg):
return 1
diff --git a/Cython/TestUtils.py b/Cython/TestUtils.py
index 9d6eb67fc..d3e73389c 100644
--- a/Cython/TestUtils.py
+++ b/Cython/TestUtils.py
@@ -2,6 +2,8 @@ from __future__ import absolute_import
import os
import unittest
+import shlex
+import sys
import tempfile
from .Compiler import Errors
@@ -184,34 +186,41 @@ class TreeAssertVisitor(VisitorTransform):
visit_Node = VisitorTransform.recurse_to_children
-def unpack_source_tree(tree_file, dir=None):
- if dir is None:
- dir = tempfile.mkdtemp()
- header = []
- cur_file = None
- f = open(tree_file)
- try:
- lines = f.readlines()
- finally:
- f.close()
- del f
- try:
- for line in lines:
- if line[:5] == '#####':
- filename = line.strip().strip('#').strip().replace('/', os.path.sep)
- path = os.path.join(dir, filename)
- if not os.path.exists(os.path.dirname(path)):
- os.makedirs(os.path.dirname(path))
- if cur_file is not None:
- f, cur_file = cur_file, None
- f.close()
- cur_file = open(path, 'w')
- elif cur_file is not None:
- cur_file.write(line)
- elif line.strip() and not line.lstrip().startswith('#'):
- if line.strip() not in ('"""', "'''"):
- header.append(line)
- finally:
- if cur_file is not None:
- cur_file.close()
- return dir, ''.join(header)
+def unpack_source_tree(tree_file, workdir, cython_root):
+ programs = {
+ 'PYTHON': [sys.executable],
+ 'CYTHON': [sys.executable, os.path.join(cython_root, 'cython.py')],
+ 'CYTHONIZE': [sys.executable, os.path.join(cython_root, 'cythonize.py')]
+ }
+
+ if workdir is None:
+ workdir = tempfile.mkdtemp()
+ header, cur_file = [], None
+ with open(tree_file) as f:
+ try:
+ for line in f:
+ if line.startswith('#####'):
+ filename = line.strip().strip('#').strip().replace('/', os.path.sep)
+ path = os.path.join(workdir, filename)
+ if not os.path.exists(os.path.dirname(path)):
+ os.makedirs(os.path.dirname(path))
+ if cur_file is not None:
+ to_close, cur_file = cur_file, None
+ to_close.close()
+ cur_file = open(path, 'w')
+ elif cur_file is not None:
+ cur_file.write(line)
+ elif line.strip() and not line.lstrip().startswith('#'):
+ if line.strip() not in ('"""', "'''"):
+ command = shlex.split(line)
+ if not command: continue
+ # In Python 3: prog, *args = command
+ prog, args = command[0], command[1:]
+ try:
+ header.append(programs[prog]+args)
+ except KeyError:
+ header.append(command)
+ finally:
+ if cur_file is not None:
+ cur_file.close()
+ return workdir, header
diff --git a/Cython/Utility/Builtins.c b/Cython/Utility/Builtins.c
index c508917d5..df5b0d86f 100644
--- a/Cython/Utility/Builtins.c
+++ b/Cython/Utility/Builtins.c
@@ -20,47 +20,15 @@ static PyObject* __Pyx_Globals(void); /*proto*/
// access requires a rewrite as a dedicated class.
static PyObject* __Pyx_Globals(void) {
- Py_ssize_t i;
- PyObject *names;
- PyObject *globals = $moddict_cname;
- Py_INCREF(globals);
- names = PyObject_Dir($module_cname);
- if (!names)
- goto bad;
- for (i = PyList_GET_SIZE(names)-1; i >= 0; i--) {
-#if CYTHON_COMPILING_IN_PYPY
- PyObject* name = PySequence_ITEM(names, i);
- if (!name)
- goto bad;
+ PyObject *globals;
+#if CYTHON_COMPILING_IN_LIMITED_API
+ globals = PyModule_GetDict($module_cname);
+ if (unlikely(!globals)) return NULL;
#else
- PyObject* name = PyList_GET_ITEM(names, i);
-#endif
- if (!PyDict_Contains(globals, name)) {
- PyObject* value = __Pyx_GetAttr($module_cname, name);
- if (!value) {
-#if CYTHON_COMPILING_IN_PYPY
- Py_DECREF(name);
-#endif
- goto bad;
- }
- if (PyDict_SetItem(globals, name, value) < 0) {
-#if CYTHON_COMPILING_IN_PYPY
- Py_DECREF(name);
-#endif
- Py_DECREF(value);
- goto bad;
- }
- }
-#if CYTHON_COMPILING_IN_PYPY
- Py_DECREF(name);
+ globals = $moddict_cname;
#endif
- }
- Py_DECREF(names);
+ Py_INCREF(globals);
return globals;
-bad:
- Py_XDECREF(names);
- Py_XDECREF(globals);
- return NULL;
}
//////////////////// PyExecGlobals.proto ////////////////////
diff --git a/Cython/Utility/CommonStructures.c b/Cython/Utility/CommonStructures.c
index dfbacb2ab..127fa5148 100644
--- a/Cython/Utility/CommonStructures.c
+++ b/Cython/Utility/CommonStructures.c
@@ -1,43 +1,108 @@
/////////////// FetchCommonType.proto ///////////////
static PyTypeObject* __Pyx_FetchCommonType(PyTypeObject* type);
+#if CYTHON_COMPILING_IN_LIMITED_API
+static PyObject* __Pyx_FetchCommonTypeFromSpec(PyType_Spec *spec, PyObject *bases);
+#endif
/////////////// FetchCommonType ///////////////
-static PyTypeObject* __Pyx_FetchCommonType(PyTypeObject* type) {
- PyObject* fake_module;
- PyTypeObject* cached_type = NULL;
+static PyObject *__Pyx_FetchSharedCythonABIModule(void) {
+ PyObject *abi_module = PyImport_AddModule((char*) "_cython_" CYTHON_ABI);
+ if (!abi_module) return NULL;
+ Py_INCREF(abi_module);
+ return abi_module;
+}
- fake_module = PyImport_AddModule((char*) "_cython_" CYTHON_ABI);
- if (!fake_module) return NULL;
- Py_INCREF(fake_module);
+static int __Pyx_VerifyCachedType(PyObject *cached_type,
+ const char *name,
+ Py_ssize_t basicsize,
+ Py_ssize_t expected_basicsize) {
+ if (!PyType_Check(cached_type)) {
+ PyErr_Format(PyExc_TypeError,
+ "Shared Cython type %.200s is not a type object", name);
+ return -1;
+ }
+ if (basicsize != expected_basicsize) {
+ PyErr_Format(PyExc_TypeError,
+ "Shared Cython type %.200s has the wrong size, try recompiling",
+ name);
+ return -1;
+ }
+ return 0;
+}
+
+static PyTypeObject* __Pyx_FetchCommonType(PyTypeObject* type) {
+ PyObject* abi_module;
+ PyTypeObject *cached_type = NULL;
- cached_type = (PyTypeObject*) PyObject_GetAttrString(fake_module, type->tp_name);
+ abi_module = __Pyx_FetchSharedCythonABIModule();
+ if (!abi_module) return NULL;
+ cached_type = (PyTypeObject*) PyObject_GetAttrString(abi_module, type->tp_name);
if (cached_type) {
- if (!PyType_Check((PyObject*)cached_type)) {
- PyErr_Format(PyExc_TypeError,
- "Shared Cython type %.200s is not a type object",
- type->tp_name);
+ if (__Pyx_VerifyCachedType(
+ (PyObject *)cached_type,
+ type->tp_name,
+ cached_type->tp_basicsize,
+ type->tp_basicsize) < 0) {
goto bad;
}
- if (cached_type->tp_basicsize != type->tp_basicsize) {
- PyErr_Format(PyExc_TypeError,
- "Shared Cython type %.200s has the wrong size, try recompiling",
- type->tp_name);
+ goto done;
+ }
+
+ if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad;
+ PyErr_Clear();
+ if (PyType_Ready(type) < 0) goto bad;
+ if (PyObject_SetAttrString(abi_module, type->tp_name, (PyObject *)type) < 0)
+ goto bad;
+ Py_INCREF(type);
+ cached_type = type;
+
+done:
+ Py_DECREF(abi_module);
+ // NOTE: always returns owned reference, or NULL on error
+ return cached_type;
+
+bad:
+ Py_XDECREF(cached_type);
+ cached_type = NULL;
+ goto done;
+}
+
+#if CYTHON_COMPILING_IN_LIMITED_API
+static PyObject *__Pyx_FetchCommonTypeFromSpec(PyType_Spec *spec, PyObject *bases) {
+ PyObject *abi_module, *py_basicsize, *type, *cached_type = NULL;
+ Py_ssize_t basicsize;
+
+ abi_module = __Pyx_FetchSharedCythonABIModule();
+ if (!abi_module) return NULL;
+ cached_type = PyObject_GetAttrString(abi_module, spec->name);
+ if (cached_type) {
+ py_basicsize = PyObject_GetAttrString(cached_type, "__basicsize__");
+ if (!py_basicsize) goto bad;
+ basicsize = PyLong_AsSsize_t(py_basicsize);
+ Py_DECREF(py_basicsize);
+ py_basicsize = 0;
+ if (basicsize == (Py_ssize_t)-1 && PyErr_Occurred()) goto bad;
+ if (__Pyx_VerifyCachedType(
+ cached_type,
+ spec->name,
+ basicsize,
+ spec->basicsize) < 0) {
goto bad;
}
- } else {
- if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad;
- PyErr_Clear();
- if (PyType_Ready(type) < 0) goto bad;
- if (PyObject_SetAttrString(fake_module, type->tp_name, (PyObject*) type) < 0)
- goto bad;
- Py_INCREF(type);
- cached_type = type;
+ goto done;
}
+ if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad;
+ PyErr_Clear();
+ type = PyType_FromSpecWithBases(spec, bases);
+ if (unlikely(!type)) goto bad;
+ if (PyObject_SetAttrString(abi_module, spec->name, type) < 0) goto bad;
+ cached_type = type;
+
done:
- Py_DECREF(fake_module);
+ Py_DECREF(abi_module);
// NOTE: always returns owned reference, or NULL on error
return cached_type;
@@ -46,6 +111,7 @@ bad:
cached_type = NULL;
goto done;
}
+#endif
/////////////// FetchCommonPointer.proto ///////////////
@@ -56,27 +122,27 @@ static void* __Pyx_FetchCommonPointer(void* pointer, const char* name);
static void* __Pyx_FetchCommonPointer(void* pointer, const char* name) {
- PyObject* fake_module = NULL;
+ PyObject* abi_module = NULL;
PyObject* capsule = NULL;
void* value = NULL;
- fake_module = PyImport_AddModule((char*) "_cython_" CYTHON_ABI);
- if (!fake_module) return NULL;
- Py_INCREF(fake_module);
+ abi_module = PyImport_AddModule((char*) "_cython_" CYTHON_ABI);
+ if (!abi_module) return NULL;
+ Py_INCREF(abi_module);
- capsule = PyObject_GetAttrString(fake_module, name);
+ capsule = PyObject_GetAttrString(abi_module, name);
if (!capsule) {
if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad;
PyErr_Clear();
capsule = PyCapsule_New(pointer, name, NULL);
if (!capsule) goto bad;
- if (PyObject_SetAttrString(fake_module, name, capsule) < 0)
+ if (PyObject_SetAttrString(abi_module, name, capsule) < 0)
goto bad;
}
value = PyCapsule_GetPointer(capsule, name);
bad:
Py_XDECREF(capsule);
- Py_DECREF(fake_module);
+ Py_DECREF(abi_module);
return value;
}
diff --git a/Cython/Utility/CythonFunction.c b/Cython/Utility/CythonFunction.c
index 3a54b78a2..a62506337 100644
--- a/Cython/Utility/CythonFunction.c
+++ b/Cython/Utility/CythonFunction.c
@@ -39,6 +39,7 @@ typedef struct {
// Dynamic default args and annotations
void *defaults;
int defaults_pyobjects;
+ size_t defaults_size; // used by FusedFunction for copying defaults
int flags;
// Defaults info
@@ -48,12 +49,14 @@ typedef struct {
PyObject *func_annotations; /* function annotations dict */
} __pyx_CyFunctionObject;
+#if !CYTHON_COMPILING_IN_LIMITED_API
static PyTypeObject *__pyx_CyFunctionType = 0;
+#endif
#define __Pyx_CyFunction_Check(obj) (__Pyx_TypeCheck(obj, __pyx_CyFunctionType))
#define __Pyx_CyFunction_NewEx(ml, flags, qualname, closure, module, globals, code) \
- __Pyx_CyFunction_New(__pyx_CyFunctionType, ml, flags, qualname, closure, module, globals, code)
+ __Pyx_CyFunction_New((PyTypeObject *)__pyx_CyFunctionType, ml, flags, qualname, closure, module, globals, code)
static PyObject *__Pyx_CyFunction_New(PyTypeObject *type, PyMethodDef *ml,
int flags, PyObject* qualname,
@@ -435,6 +438,14 @@ static PyGetSetDef __pyx_CyFunction_getsets[] = {
static PyMemberDef __pyx_CyFunction_members[] = {
{(char *) "__module__", T_OBJECT, offsetof(PyCFunctionObject, m_module), PY_WRITE_RESTRICTED, 0},
+#if CYTHON_COMPILING_IN_LIMITED_API
+ {(char *) "__dictoffset__", T_PYSSIZET, offsetof(__pyx_CyFunctionObject, func_dict), READONLY, 0},
+#if PY_VERSION_HEX < 0x030500A0
+ {(char *) "__weaklistoffset__", T_PYSSIZET, offsetof(__pyx_CyFunctionObject, func_weakreflist), READONLY, 0},
+#else
+ {(char *) "__weaklistoffset__", T_PYSSIZET, offsetof(PyCFunctionObject, m_weakreflist), READONLY, 0},
+#endif
+#endif
{0, 0, 0, 0, 0}
};
@@ -487,6 +498,7 @@ static PyObject *__Pyx_CyFunction_New(PyTypeObject *type, PyMethodDef *ml, int f
op->func_code = code;
// Dynamic Default args
op->defaults_pyobjects = 0;
+ op->defaults_size = 0;
op->defaults = NULL;
op->defaults_tuple = NULL;
op->defaults_kwdict = NULL;
@@ -812,6 +824,29 @@ static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS(PyObject *func,
}
#endif
+#if CYTHON_COMPILING_IN_LIMITED_API
+static PyType_Slot __pyx_CyFunctionType_slots[] = {
+ {Py_tp_dealloc, (void *)__Pyx_CyFunction_dealloc},
+ {Py_tp_repr, (void *)__Pyx_CyFunction_repr},
+ {Py_tp_call, (void *)__Pyx_CyFunction_CallAsMethod},
+ {Py_tp_traverse, (void *)__Pyx_CyFunction_traverse},
+ {Py_tp_clear, (void *)__Pyx_CyFunction_clear},
+ {Py_tp_methods, (void *)__pyx_CyFunction_methods},
+ {Py_tp_members, (void *)__pyx_CyFunction_members},
+ {Py_tp_getset, (void *)__pyx_CyFunction_getsets},
+ {Py_tp_descr_get, (void *)__Pyx_PyMethod_New},
+ {0, 0},
+};
+
+static PyType_Spec __pyx_CyFunctionType_spec = {
+ "cython_function_or_method",
+ sizeof(__pyx_CyFunctionObject),
+ 0,
+ // TODO: Support _Py_TPFLAGS_HAVE_VECTORCALL and _Py_TPFLAGS_HAVE_VECTORCALL
+ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /*tp_flags*/
+ __pyx_CyFunctionType_slots
+};
+#else
static PyTypeObject __pyx_CyFunctionType_type = {
PyVarObject_HEAD_INIT(0, 0)
"cython_function_or_method", /*tp_name*/
@@ -890,10 +925,15 @@ static PyTypeObject __pyx_CyFunctionType_type = {
0, /*tp_print*/
#endif
};
+#endif /* CYTHON_COMPILING_IN_LIMITED_API */
static int __pyx_CyFunction_init(void) {
+#if CYTHON_COMPILING_IN_LIMITED_API
+ __pyx_CyFunctionType = __Pyx_FetchCommonTypeFromSpec(&__pyx_CyFunctionType_spec, NULL);
+#else
__pyx_CyFunctionType = __Pyx_FetchCommonType(&__pyx_CyFunctionType_type);
+#endif
if (unlikely(__pyx_CyFunctionType == NULL)) {
return -1;
}
@@ -908,6 +948,7 @@ static CYTHON_INLINE void *__Pyx_CyFunction_InitDefaults(PyObject *func, size_t
return PyErr_NoMemory();
memset(m->defaults, 0, size);
m->defaults_pyobjects = pyobjects;
+ m->defaults_size = size;
return m->defaults;
}
@@ -965,7 +1006,7 @@ typedef struct {
} __pyx_FusedFunctionObject;
#define __pyx_FusedFunction_NewEx(ml, flags, qualname, closure, module, globals, code) \
- __pyx_FusedFunction_New(__pyx_FusedFunctionType, ml, flags, qualname, closure, module, globals, code)
+ __pyx_FusedFunction_New((PyTypeObject *)__pyx_FusedFunctionType, ml, flags, qualname, closure, module, globals, code)
static PyObject *__pyx_FusedFunction_New(PyTypeObject *type,
PyMethodDef *ml, int flags,
PyObject *qualname, PyObject *closure,
@@ -973,7 +1014,9 @@ static PyObject *__pyx_FusedFunction_New(PyTypeObject *type,
PyObject *code);
static int __pyx_FusedFunction_clear(__pyx_FusedFunctionObject *self);
+#if !CYTHON_COMPILING_IN_LIMITED_API
static PyTypeObject *__pyx_FusedFunctionType = NULL;
+#endif
static int __pyx_FusedFunction_init(void);
#define __Pyx_FusedFunction_USED
@@ -1057,6 +1100,26 @@ __pyx_FusedFunction_descr_get(PyObject *self, PyObject *obj, PyObject *type)
if (!meth)
return NULL;
+ // defaults needs copying fully rather than just copying the pointer
+ // since otherwise it will be freed on destruction of meth despite
+ // belonging to func rather than meth
+ if (func->func.defaults) {
+ PyObject **pydefaults;
+ int i;
+
+ if (!__Pyx_CyFunction_InitDefaults((PyObject*)meth,
+ func->func.defaults_size,
+ func->func.defaults_pyobjects)) {
+ Py_XDECREF((PyObject*)meth);
+ return NULL;
+ }
+ memcpy(meth->func.defaults, func->func.defaults, func->func.defaults_size);
+
+ pydefaults = __Pyx_CyFunction_Defaults(PyObject *, meth);
+ for (i = 0; i < meth->func.defaults_pyobjects; i++)
+ Py_XINCREF(pydefaults[i]);
+ }
+
Py_XINCREF(func->func.func_classobj);
meth->func.func_classobj = func->func.func_classobj;
@@ -1299,6 +1362,29 @@ static PyMemberDef __pyx_FusedFunction_members[] = {
{0, 0, 0, 0, 0},
};
+#if CYTHON_COMPILING_IN_LIMITED_API
+static PyType_Slot __pyx_FusedFunctionType_slots[] = {
+ {Py_tp_dealloc, (void *)__pyx_FusedFunction_dealloc},
+ {Py_tp_call, (void *)__pyx_FusedFunction_call},
+ {Py_tp_traverse, (void *)__pyx_FusedFunction_traverse},
+ {Py_tp_clear, (void *)__pyx_FusedFunction_clear},
+ {Py_tp_members, (void *)__pyx_FusedFunction_members},
+ {Py_tp_getset, (void *)__pyx_CyFunction_getsets},
+ {Py_tp_descr_get, (void *)__pyx_FusedFunction_descr_get},
+ {Py_mp_subscript, (void *)__pyx_FusedFunction_getitem},
+ {0, 0},
+};
+
+static PyType_Spec __pyx_FusedFunctionType_spec = {
+ "fused_cython_function",
+ sizeof(__pyx_FusedFunctionObject),
+ 0,
+ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_BASETYPE, /*tp_flags*/
+ __pyx_FusedFunctionType_slots
+};
+
+#else /* !CYTHON_COMPILING_IN_LIMITED_API */
+
static PyMappingMethods __pyx_FusedFunction_mapping_methods = {
0,
(binaryfunc) __pyx_FusedFunction_getitem,
@@ -1342,6 +1428,7 @@ static PyTypeObject __pyx_FusedFunctionType_type = {
// __doc__ is None for the fused function type, but we need it to be
// a descriptor for the instance's __doc__, so rebuild descriptors in our subclass
__pyx_CyFunction_getsets, /*tp_getset*/
+ // NOTE: tp_base may be changed later during module initialisation when importing CyFunction across modules.
&__pyx_CyFunctionType_type, /*tp_base*/
0, /*tp_dict*/
__pyx_FusedFunction_descr_get, /*tp_descr_get*/
@@ -1369,9 +1456,21 @@ static PyTypeObject __pyx_FusedFunctionType_type = {
0, /*tp_print*/
#endif
};
+#endif
static int __pyx_FusedFunction_init(void) {
+#if CYTHON_COMPILING_IN_LIMITED_API
+ PyObject *bases = PyTuple_Pack(1, __pyx_CyFunctionType);
+ if (unlikely(!bases)) {
+ return -1;
+ }
+ __pyx_FusedFunctionType = __Pyx_FetchCommonTypeFromSpec(&__pyx_FusedFunctionType_spec, bases);
+ Py_DECREF(bases);
+#else
+ // Set base from __Pyx_FetchCommonTypeFromSpec, in case it's different from the local static value.
+ __pyx_FusedFunctionType_type.tp_base = __pyx_CyFunctionType;
__pyx_FusedFunctionType = __Pyx_FetchCommonType(&__pyx_FusedFunctionType_type);
+#endif
if (__pyx_FusedFunctionType == NULL) {
return -1;
}
diff --git a/Cython/Utility/ImportExport.c b/Cython/Utility/ImportExport.c
index a350aab6d..23c4dad37 100644
--- a/Cython/Utility/ImportExport.c
+++ b/Cython/Utility/ImportExport.c
@@ -475,7 +475,7 @@ static PyTypeObject *__Pyx_ImportType(PyObject *module, const char *module_name,
PyObject *result = 0;
char warning[200];
Py_ssize_t basicsize;
-#ifdef Py_LIMITED_API
+#if CYTHON_COMPILING_IN_LIMITED_API
PyObject *py_basicsize;
#endif
@@ -488,7 +488,7 @@ static PyTypeObject *__Pyx_ImportType(PyObject *module, const char *module_name,
module_name, class_name);
goto bad;
}
-#ifndef Py_LIMITED_API
+#if !CYTHON_COMPILING_IN_LIMITED_API
basicsize = ((PyTypeObject *)result)->tp_basicsize;
#else
py_basicsize = PyObject_GetAttrString(result, "__basicsize__");
@@ -720,13 +720,17 @@ bad:
/////////////// GetVTable.proto ///////////////
-static void* __Pyx_GetVtable(PyObject *dict); /*proto*/
+static void* __Pyx_GetVtable(PyTypeObject *type); /*proto*/
/////////////// GetVTable ///////////////
-static void* __Pyx_GetVtable(PyObject *dict) {
+static void* __Pyx_GetVtable(PyTypeObject *type) {
void* ptr;
- PyObject *ob = PyObject_GetItem(dict, PYIDENT("__pyx_vtable__"));
+#if CYTHON_COMPILING_IN_LIMITED_API
+ PyObject *ob = PyObject_GetAttr((PyObject *)type, PYIDENT("__pyx_vtable__"));
+#else
+ PyObject *ob = PyObject_GetItem(type->tp_dict, PYIDENT("__pyx_vtable__"));
+#endif
if (!ob)
goto bad;
ptr = PyCapsule_GetPointer(ob, 0);
@@ -769,13 +773,13 @@ static int __Pyx_MergeVtables(PyTypeObject *type) {
// instance struct is so extended. (It would be good to also do this
// check when a multiple-base class is created in pure Python as well.)
for (i = 1; i < PyTuple_GET_SIZE(bases); i++) {
- void* base_vtable = __Pyx_GetVtable(((PyTypeObject*)PyTuple_GET_ITEM(bases, i))->tp_dict);
+ void* base_vtable = __Pyx_GetVtable(((PyTypeObject*)PyTuple_GET_ITEM(bases, i)));
if (base_vtable != NULL) {
int j;
PyTypeObject* base = type->tp_base;
for (j = 0; j < base_depth; j++) {
if (base_vtables[j] == unknown) {
- base_vtables[j] = __Pyx_GetVtable(base->tp_dict);
+ base_vtables[j] = __Pyx_GetVtable(base);
base_vtables[j + 1] = unknown;
}
if (base_vtables[j] == base_vtable) {
diff --git a/Cython/Utility/ModuleSetupCode.c b/Cython/Utility/ModuleSetupCode.c
index b0e18f4a1..0a1aa5ea2 100644
--- a/Cython/Utility/ModuleSetupCode.c
+++ b/Cython/Utility/ModuleSetupCode.c
@@ -631,7 +631,7 @@ static CYTHON_INLINE PyObject * __Pyx_PyDict_GetItemStr(PyObject *dict, PyObject
if (res == NULL) PyErr_Clear();
return res;
}
-#elif PY_MAJOR_VERSION >= 3
+#elif PY_MAJOR_VERSION >= 3 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07020000)
#define __Pyx_PyDict_GetItemStrWithError PyDict_GetItemWithError
#define __Pyx_PyDict_GetItemStr PyDict_GetItem
#else
diff --git a/Cython/Utility/ObjectHandling.c b/Cython/Utility/ObjectHandling.c
index ff72ddddb..cee30e73f 100644
--- a/Cython/Utility/ObjectHandling.c
+++ b/Cython/Utility/ObjectHandling.c
@@ -356,6 +356,7 @@ static PyObject *__Pyx_PyDict_GetItem(PyObject *d, PyObject* key) {
#endif
/////////////// GetItemInt.proto ///////////////
+//@substitute: tempita
#define __Pyx_GetItemInt(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck) \
(__Pyx_fits_Py_ssize_t(i, type, is_signed) ? \
@@ -378,6 +379,7 @@ static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i,
int is_list, int wraparound, int boundscheck);
/////////////// GetItemInt ///////////////
+//@substitute: tempita
static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j) {
PyObject *r;
@@ -796,6 +798,7 @@ static CYTHON_INLINE PyObject* __Pyx_PyTuple_GetSlice(PyObject* src, Py_ssize_t
/////////////// SliceTupleAndList ///////////////
//@requires: TupleAndListFromArray
+//@substitute: tempita
#if CYTHON_COMPILING_IN_CPYTHON
static CYTHON_INLINE void __Pyx_crop_slice(Py_ssize_t* _start, Py_ssize_t* _stop, Py_ssize_t* _length) {
@@ -1293,7 +1296,7 @@ static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name)
if (unlikely(!$module_cname)) {
return NULL;
}
- result = PyObject_GetItem($module_cname, name);
+ result = PyObject_GetAttr($module_cname, name);
if (likely(result)) {
return result;
}
diff --git a/Cython/Utility/Optimize.c b/Cython/Utility/Optimize.c
index e64af6b0e..403574f58 100644
--- a/Cython/Utility/Optimize.c
+++ b/Cython/Utility/Optimize.c
@@ -190,7 +190,7 @@ static PyObject* __Pyx_PyDict_GetItemDefault(PyObject* d, PyObject* key, PyObjec
static PyObject* __Pyx_PyDict_GetItemDefault(PyObject* d, PyObject* key, PyObject* default_value) {
PyObject* value;
-#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY
+#if PY_MAJOR_VERSION >= 3 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07020000)
value = PyDict_GetItemWithError(d, key);
if (unlikely(!value)) {
if (unlikely(PyErr_Occurred()))
@@ -238,7 +238,7 @@ static CYTHON_INLINE PyObject *__Pyx_PyDict_SetDefault(PyObject *d, PyObject *ke
#else
if (is_safe_type == 1 || (is_safe_type == -1 &&
/* the following builtins presumably have repeatably safe and fast hash functions */
-#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY
+#if PY_MAJOR_VERSION >= 3 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07020000)
(PyUnicode_CheckExact(key) || PyString_CheckExact(key) || PyLong_CheckExact(key)))) {
value = PyDict_GetItemWithError(d, key);
if (unlikely(!value)) {
diff --git a/Cython/Utility/Overflow.c b/Cython/Utility/Overflow.c
index fbce322f2..e277d20a9 100644
--- a/Cython/Utility/Overflow.c
+++ b/Cython/Utility/Overflow.c
@@ -20,10 +20,10 @@ TODO: Conditionally support 128-bit with intmax_t?
/////////////// Common.proto ///////////////
static int __Pyx_check_twos_complement(void) {
- if (-1 != ~0) {
+ if ((-1 != ~0)) {
PyErr_SetString(PyExc_RuntimeError, "Two's complement required for overflow checks.");
return 1;
- } else if (sizeof(short) == sizeof(int)) {
+ } else if ((sizeof(short) == sizeof(int))) {
PyErr_SetString(PyExc_RuntimeError, "sizeof(short) < sizeof(int) required for overflow checks.");
return 1;
} else {
@@ -31,11 +31,11 @@ static int __Pyx_check_twos_complement(void) {
}
}
-#define __PYX_IS_UNSIGNED(type) (((type) -1) > 0)
-#define __PYX_SIGN_BIT(type) (((unsigned type) 1) << (sizeof(type) * 8 - 1))
-#define __PYX_HALF_MAX(type) (((type) 1) << (sizeof(type) * 8 - 2))
-#define __PYX_MIN(type) (__PYX_IS_UNSIGNED(type) ? (type) 0 : 0 - __PYX_HALF_MAX(type) - __PYX_HALF_MAX(type))
-#define __PYX_MAX(type) (~__PYX_MIN(type))
+#define __PYX_IS_UNSIGNED(type) ((((type) -1) > 0))
+#define __PYX_SIGN_BIT(type) ((((unsigned type) 1) << (sizeof(type) * 8 - 1)))
+#define __PYX_HALF_MAX(type) ((((type) 1) << (sizeof(type) * 8 - 2)))
+#define __PYX_MIN(type) ((__PYX_IS_UNSIGNED(type) ? (type) 0 : 0 - __PYX_HALF_MAX(type) - __PYX_HALF_MAX(type)))
+#define __PYX_MAX(type) ((~__PYX_MIN(type)))
#define __Pyx_add_no_overflow(a, b, overflow) ((a) + (b))
#define __Pyx_add_const_no_overflow(a, b, overflow) ((a) + (b))
@@ -82,13 +82,13 @@ static CYTHON_INLINE {{UINT}} __Pyx_sub_{{NAME}}_checking_overflow({{UINT}} a, {
}
static CYTHON_INLINE {{UINT}} __Pyx_mul_{{NAME}}_checking_overflow({{UINT}} a, {{UINT}} b, int *overflow) {
- if (sizeof({{UINT}}) < sizeof(unsigned long)) {
+ if ((sizeof({{UINT}}) < sizeof(unsigned long))) {
unsigned long big_r = ((unsigned long) a) * ((unsigned long) b);
{{UINT}} r = ({{UINT}}) big_r;
*overflow |= big_r != r;
return r;
#ifdef HAVE_LONG_LONG
- } else if (sizeof({{UINT}}) < sizeof(unsigned PY_LONG_LONG)) {
+ } else if ((sizeof({{UINT}}) < sizeof(unsigned PY_LONG_LONG))) {
unsigned PY_LONG_LONG big_r = ((unsigned PY_LONG_LONG) a) * ((unsigned PY_LONG_LONG) b);
{{UINT}} r = ({{UINT}}) big_r;
*overflow |= big_r != r;
@@ -138,13 +138,13 @@ static CYTHON_INLINE {{INT}} __Pyx_mul_const_{{NAME}}_checking_overflow({{INT}}
/////////////// BaseCaseSigned ///////////////
static CYTHON_INLINE {{INT}} __Pyx_add_{{NAME}}_checking_overflow({{INT}} a, {{INT}} b, int *overflow) {
- if (sizeof({{INT}}) < sizeof(long)) {
+ if ((sizeof({{INT}}) < sizeof(long))) {
long big_r = ((long) a) + ((long) b);
{{INT}} r = ({{INT}}) big_r;
*overflow |= big_r != r;
return r;
#ifdef HAVE_LONG_LONG
- } else if (sizeof({{INT}}) < sizeof(PY_LONG_LONG)) {
+ } else if ((sizeof({{INT}}) < sizeof(PY_LONG_LONG))) {
PY_LONG_LONG big_r = ((PY_LONG_LONG) a) + ((PY_LONG_LONG) b);
{{INT}} r = ({{INT}}) big_r;
*overflow |= big_r != r;
@@ -184,13 +184,13 @@ static CYTHON_INLINE {{INT}} __Pyx_sub_const_{{NAME}}_checking_overflow({{INT}}
}
static CYTHON_INLINE {{INT}} __Pyx_mul_{{NAME}}_checking_overflow({{INT}} a, {{INT}} b, int *overflow) {
- if (sizeof({{INT}}) < sizeof(long)) {
+ if ((sizeof({{INT}}) < sizeof(long))) {
long big_r = ((long) a) * ((long) b);
{{INT}} r = ({{INT}}) big_r;
*overflow |= big_r != r;
return ({{INT}}) r;
#ifdef HAVE_LONG_LONG
- } else if (sizeof({{INT}}) < sizeof(PY_LONG_LONG)) {
+ } else if ((sizeof({{INT}}) < sizeof(PY_LONG_LONG))) {
PY_LONG_LONG big_r = ((PY_LONG_LONG) a) * ((PY_LONG_LONG) b);
{{INT}} r = ({{INT}}) big_r;
*overflow |= big_r != r;
@@ -240,11 +240,11 @@ if (unlikely(__Pyx_check_sane_{{NAME}}())) {
/////////////// SizeCheck.proto ///////////////
static int __Pyx_check_sane_{{NAME}}(void) {
- if (sizeof({{TYPE}}) <= sizeof(int) ||
+ if (((sizeof({{TYPE}}) <= sizeof(int)) ||
#ifdef HAVE_LONG_LONG
- sizeof({{TYPE}}) == sizeof(PY_LONG_LONG) ||
+ (sizeof({{TYPE}}) == sizeof(PY_LONG_LONG)) ||
#endif
- sizeof({{TYPE}}) == sizeof(long)) {
+ (sizeof({{TYPE}}) == sizeof(long)))) {
return 0;
} else {
PyErr_Format(PyExc_RuntimeError, \
@@ -261,27 +261,27 @@ static CYTHON_INLINE {{TYPE}} __Pyx_{{BINOP}}_{{NAME}}_checking_overflow({{TYPE}
/////////////// Binop ///////////////
static CYTHON_INLINE {{TYPE}} __Pyx_{{BINOP}}_{{NAME}}_checking_overflow({{TYPE}} a, {{TYPE}} b, int *overflow) {
- if (sizeof({{TYPE}}) < sizeof(int)) {
+ if ((sizeof({{TYPE}}) < sizeof(int))) {
return __Pyx_{{BINOP}}_no_overflow(a, b, overflow);
} else if (__PYX_IS_UNSIGNED({{TYPE}})) {
- if (sizeof({{TYPE}}) == sizeof(unsigned int)) {
+ if ((sizeof({{TYPE}}) == sizeof(unsigned int))) {
return __Pyx_{{BINOP}}_unsigned_int_checking_overflow(a, b, overflow);
- } else if (sizeof({{TYPE}}) == sizeof(unsigned long)) {
+ } else if ((sizeof({{TYPE}}) == sizeof(unsigned long))) {
return __Pyx_{{BINOP}}_unsigned_long_checking_overflow(a, b, overflow);
#ifdef HAVE_LONG_LONG
- } else if (sizeof({{TYPE}}) == sizeof(unsigned PY_LONG_LONG)) {
+ } else if ((sizeof({{TYPE}}) == sizeof(unsigned PY_LONG_LONG))) {
return __Pyx_{{BINOP}}_unsigned_long_long_checking_overflow(a, b, overflow);
#endif
} else {
abort(); return 0; /* handled elsewhere */
}
} else {
- if (sizeof({{TYPE}}) == sizeof(int)) {
+ if ((sizeof({{TYPE}}) == sizeof(int))) {
return __Pyx_{{BINOP}}_int_checking_overflow(a, b, overflow);
- } else if (sizeof({{TYPE}}) == sizeof(long)) {
+ } else if ((sizeof({{TYPE}}) == sizeof(long))) {
return __Pyx_{{BINOP}}_long_checking_overflow(a, b, overflow);
#ifdef HAVE_LONG_LONG
- } else if (sizeof({{TYPE}}) == sizeof(PY_LONG_LONG)) {
+ } else if ((sizeof({{TYPE}}) == sizeof(PY_LONG_LONG))) {
return __Pyx_{{BINOP}}_long_long_checking_overflow(a, b, overflow);
#endif
} else {
diff --git a/Cython/Utility/Profile.c b/Cython/Utility/Profile.c
index e741b0f29..15ceb41cc 100644
--- a/Cython/Utility/Profile.c
+++ b/Cython/Utility/Profile.c
@@ -6,7 +6,7 @@
// but maybe some other profilers don't.
#ifndef CYTHON_PROFILE
-#if CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_PYSTON
+#if CYTHON_COMPILING_IN_LIMITED_API || CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_PYSTON
#define CYTHON_PROFILE 0
#else
#define CYTHON_PROFILE 1
diff --git a/Makefile b/Makefile
index dfede3e96..db7053c0c 100644
--- a/Makefile
+++ b/Makefile
@@ -17,6 +17,12 @@ sdist: dist/$(PACKAGENAME)-$(VERSION).tar.gz
dist/$(PACKAGENAME)-$(VERSION).tar.gz:
$(PYTHON) setup.py sdist
+pywheel: dist/$(PACKAGENAME)-$(VERSION)-py2.py3-none-any.whl
+
+dist/$(PACKAGENAME)-$(VERSION)-py2.py3-none-any.whl:
+ ${PYTHON} setup.py bdist_wheel --no-cython-compile --universal
+ [ -f "$@" ] # check that we generated the expected universal wheel
+
TMPDIR = .repo_tmp
.git: .gitrev
rm -rf $(TMPDIR)
@@ -26,6 +32,7 @@ TMPDIR = .repo_tmp
rm -rf $(TMPDIR)
git ls-files -d | xargs git checkout --
+# Create a git repo from an unpacked source directory.
repo: .git
diff --git a/README.rst b/README.rst
index 7004610be..817e273b2 100644
--- a/README.rst
+++ b/README.rst
@@ -59,8 +59,9 @@ Get the full source history:
Note that Cython used to ship the full version control repository in its source
distribution, but no longer does so due to space constraints. To get the
-full source history, make sure you have git installed, then step into the
-base directory of the Cython source distribution and type::
+full source history from a downloaded source archive, make sure you have git
+installed, then step into the base directory of the Cython source distribution
+and type::
make repo
diff --git a/appveyor.yml b/appveyor.yml
index a2583670a..25696c7b9 100644
--- a/appveyor.yml
+++ b/appveyor.yml
@@ -18,6 +18,14 @@ environment:
PYTHON_VERSION: "2.7"
PYTHON_ARCH: "64"
+ - PYTHON: "C:\\Python38"
+ PYTHON_VERSION: "3.8"
+ PYTHON_ARCH: "32"
+
+ - PYTHON: "C:\\Python38-x64"
+ PYTHON_VERSION: "3.8"
+ PYTHON_ARCH: "64"
+
- PYTHON: "C:\\Python37"
PYTHON_VERSION: "3.7"
PYTHON_ARCH: "32"
diff --git a/docs/examples/tutorial/numpy/convolve2.pyx b/docs/examples/tutorial/numpy/convolve2.pyx
index be7512fe1..c122aeb3c 100644
--- a/docs/examples/tutorial/numpy/convolve2.pyx
+++ b/docs/examples/tutorial/numpy/convolve2.pyx
@@ -1,4 +1,4 @@
-# tag: numpy_old
+# tag: numpy
# You can ignore the previous line.
# It's for internal testing of the cython documentation.
diff --git a/docs/src/tutorial/strings.rst b/docs/src/tutorial/strings.rst
index af4761921..43c9ee6a8 100644
--- a/docs/src/tutorial/strings.rst
+++ b/docs/src/tutorial/strings.rst
@@ -220,14 +220,15 @@ object. This can simply be done as follows:
.. literalinclude:: ../../examples/tutorial/string/return_memview.pyx
-If the byte input is actually encoded text, and the further processing
-should happen at the Unicode level, then the right thing to do is to
-decode the input straight away. This is almost only a problem in Python
-2.x, where Python code expects that it can pass a byte string (:obj:`str`)
-with encoded text into a text API. Since this usually happens in more
-than one place in the module's API, a helper function is almost always the
-way to go, since it allows for easy adaptation of the input normalisation
-process later.
+For read-only buffers, like :obj:`bytes`, the memoryview item type should
+be declared as ``const`` (see :ref:`readonly_views`). If the byte input is
+actually encoded text, and the further processing should happen at the
+Unicode level, then the right thing to do is to decode the input straight
+away. This is almost only a problem in Python 2.x, where Python code
+expects that it can pass a byte string (:obj:`str`) with encoded text into
+a text API. Since this usually happens in more than one place in the
+module's API, a helper function is almost always the way to go, since it
+allows for easy adaptation of the input normalisation process later.
This kind of input normalisation function will commonly look similar to
the following:
diff --git a/docs/src/userguide/extension_types.rst b/docs/src/userguide/extension_types.rst
index 91ba6c256..301ffb0d2 100644
--- a/docs/src/userguide/extension_types.rst
+++ b/docs/src/userguide/extension_types.rst
@@ -327,7 +327,8 @@ when it is deleted.::
Subclassing
=============
-An extension type may inherit from a built-in type or another extension type::
+If an extension type inherits from other types, the first base class must be
+a built-in type or another extension type::
cdef class Parrot:
...
@@ -342,7 +343,9 @@ extern extension type. If the base type is defined in another Cython module, it
must either be declared as an extern extension type or imported using the
:keyword:`cimport` statement.
-An extension type can only have one base class (no multiple inheritance).
+Multiple inheritance is supported, however the second and subsequent base
+classes must be an ordinary Python class (not an extension type or a built-in
+type).
Cython extension types can also be subclassed in Python. A Python class can
inherit from multiple extension types provided that the usual Python rules for
diff --git a/docs/src/userguide/memoryviews.rst b/docs/src/userguide/memoryviews.rst
index 98c5114aa..ce83a3005 100644
--- a/docs/src/userguide/memoryviews.rst
+++ b/docs/src/userguide/memoryviews.rst
@@ -153,6 +153,9 @@ As for NumPy, new axes can be introduced by indexing an array with ``None`` ::
One may mix new axis indexing with all other forms of indexing and slicing.
See also an example_.
+
+.. _readonly_views:
+
Read-only views
---------------
diff --git a/docs/src/userguide/source_files_and_compilation.rst b/docs/src/userguide/source_files_and_compilation.rst
index 872bbf445..3a4d38ca2 100644
--- a/docs/src/userguide/source_files_and_compilation.rst
+++ b/docs/src/userguide/source_files_and_compilation.rst
@@ -863,7 +863,7 @@ Cython code. Here is the list of currently supported directives:
selectively as decorator on an async-def coroutine to make the affected
coroutine(s) iterable and thus directly interoperable with yield-from.
- ``annotation_typing`` (True / False)
+``annotation_typing`` (True / False)
Uses function argument annotations to determine the type of variables. Default
is True, but can be disabled. Since Python does not enforce types given in
annotations, setting to False gives greater compatibility with Python code.
diff --git a/runtests.py b/runtests.py
index 66c834f43..654302ecd 100755
--- a/runtests.py
+++ b/runtests.py
@@ -136,7 +136,6 @@ def get_distutils_distro(_cache=[]):
EXT_DEP_MODULES = {
'tag:numpy': 'numpy',
- 'tag:numpy_old': 'numpy',
'tag:pythran': 'pythran',
'tag:setuptools': 'setuptools.sandbox',
'tag:asyncio': 'asyncio',
@@ -256,10 +255,6 @@ def update_linetrace_extension(ext):
return ext
-def update_old_numpy_extension(ext):
- update_numpy_extension(ext, set_api17_macro=False)
-
-
def update_numpy_extension(ext, set_api17_macro=True):
import numpy
from numpy.distutils.misc_util import get_info
@@ -400,7 +395,6 @@ EXCLUDE_EXT = object()
EXT_EXTRAS = {
'tag:numpy' : update_numpy_extension,
- 'tag:numpy_old' : update_old_numpy_extension,
'tag:openmp': update_openmp_extension,
'tag:cpp11': update_cpp11_extension,
'tag:trace' : update_linetrace_extension,
@@ -1734,7 +1728,7 @@ class EndToEndTest(unittest.TestCase):
def setUp(self):
from Cython.TestUtils import unpack_source_tree
- _, self.commands = unpack_source_tree(self.treefile, self.workdir)
+ _, self.commands = unpack_source_tree(self.treefile, self.workdir, self.cython_root)
self.old_dir = os.getcwd()
os.chdir(self.workdir)
if self.workdir not in sys.path:
@@ -1759,10 +1753,6 @@ class EndToEndTest(unittest.TestCase):
def runTest(self):
self.success = False
- commands = (self.commands
- .replace("CYTHONIZE", "PYTHON %s" % os.path.join(self.cython_root, 'cythonize.py'))
- .replace("CYTHON", "PYTHON %s" % os.path.join(self.cython_root, 'cython.py'))
- .replace("PYTHON", sys.executable))
old_path = os.environ.get('PYTHONPATH')
env = dict(os.environ)
new_path = self.cython_syspath
@@ -1772,21 +1762,15 @@ class EndToEndTest(unittest.TestCase):
cmd = []
out = []
err = []
- for command_no, command in enumerate(filter(None, commands.splitlines()), 1):
+ for command_no, command in enumerate(self.commands, 1):
with self.stats.time('%s(%d)' % (self.name, command_no), 'c',
- 'etoe-build' if ' setup.py ' in command else 'etoe-run'):
+ 'etoe-build' if 'setup.py' in command else 'etoe-run'):
if self.capture:
- p = subprocess.Popen(command,
- stderr=subprocess.PIPE,
- stdout=subprocess.PIPE,
- shell=True,
- env=env)
+ p = subprocess.Popen(command, stderr=subprocess.PIPE, stdout=subprocess.PIPE, env=env)
_out, _err = p.communicate()
res = p.returncode
else:
- p = subprocess.call(command,
- shell=True,
- env=env)
+ p = subprocess.call(command, env=env)
_out, _err = b'', b''
res = p
cmd.append(command)
@@ -1842,6 +1826,10 @@ class EmbedTest(unittest.TestCase):
except OSError:
pass
+def load_listfile(filename):
+ # just re-use the FileListExclude implementation
+ fle = FileListExcluder(filename)
+ return list(fle.excludes)
class MissingDependencyExcluder(object):
def __init__(self, deps):
@@ -1890,8 +1878,7 @@ class FileListExcluder(object):
self.excludes[line.split()[0]] = True
def __call__(self, testname, tags=None):
- exclude = (testname in self.excludes
- or testname.split('.')[-1] in self.excludes)
+ exclude = any(string_selector(ex)(testname) for ex in self.excludes)
if exclude and self.verbose:
print("Excluding %s because it's listed in %s"
% (testname, self._list_file))
@@ -2084,6 +2071,9 @@ def main():
parser.add_option("-x", "--exclude", dest="exclude",
action="append", metavar="PATTERN",
help="exclude tests matching the PATTERN")
+ parser.add_option("--listfile", dest="listfile",
+ action="append",
+ help="specify a file containing a list of tests to run")
parser.add_option("-j", "--shard_count", dest="shard_count", metavar="N",
type=int, default=1,
help="shard this run into several parallel runs")
@@ -2177,6 +2167,10 @@ def main():
if options.xml_output_dir:
shutil.rmtree(options.xml_output_dir, ignore_errors=True)
+ if options.listfile:
+ for listfile in options.listfile:
+ cmd_args.extend(load_listfile(listfile))
+
if options.capture:
keep_alive_interval = 10
else:
@@ -2315,6 +2309,16 @@ def runtests_callback(args):
def runtests(options, cmd_args, coverage=None):
+ # faulthandler should be able to provide a limited traceback
+ # in the event of a segmentation fault. Hopefully better than Travis
+ # just keeping running until timeout. Only available on Python 3.3+
+ try:
+ import faulthandler
+ except ImportError:
+ pass # OK - not essential
+ else:
+ faulthandler.enable()
+
WITH_CYTHON = options.with_cython
ROOTDIR = os.path.abspath(options.root_dir)
@@ -2372,7 +2376,7 @@ def runtests(options, cmd_args, coverage=None):
if options.limited_api:
CFLAGS.append("-DCYTHON_LIMITED_API=1")
-
+ CFLAGS.append('-Wno-unused-function')
if xml_output_dir and options.fork:
# doesn't currently work together
@@ -2432,6 +2436,9 @@ def runtests(options, cmd_args, coverage=None):
bug_files = [
('bugs.txt', True),
('pypy_bugs.txt', IS_PYPY),
+ ('pypy2_bugs.txt', IS_PYPY and IS_PY2),
+ ('pypy_crash_bugs.txt', IS_PYPY),
+ ('pypy_implementation_detail_bugs.txt', IS_PYPY),
('limited_api_bugs.txt', options.limited_api),
('windows_bugs.txt', sys.platform == 'win32'),
('cygwin_bugs.txt', sys.platform == 'cygwin')
diff --git a/setup.cfg b/setup.cfg
index 6172638ad..b9196fadb 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -22,4 +22,7 @@ concurrency = multiprocessing,thread
include = Cython/*
source = Cython
omit = Test*
-
+
+[bdist_wheel]
+universal = 1
+
diff --git a/setup.py b/setup.py
index ace5524e3..5ee94043f 100755
--- a/setup.py
+++ b/setup.py
@@ -268,6 +268,7 @@ setup(
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
+ "Programming Language :: Python :: 3.8",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
"Programming Language :: C",
diff --git a/tests/build/dotted.filename.modules.pxd b/tests/build/dotted.filename.modules.pxd
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/tests/build/dotted.filename.modules.pxd
diff --git a/tests/build/dotted.filename.modules.pyx b/tests/build/dotted.filename.modules.pyx
new file mode 100644
index 000000000..681cbf6c8
--- /dev/null
+++ b/tests/build/dotted.filename.modules.pyx
@@ -0,0 +1,7 @@
+# mode: compile
+# tag: warnings
+
+_WARNINGS="""
+1:0: Dotted filenames ('dotted.filename.modules.pxd') are deprecated. Please use the normal Python package directory layout.
+1:0: Dotted filenames ('dotted.filename.modules.pyx') are deprecated. Please use the normal Python package directory layout.
+"""
diff --git a/tests/limited_api_bugs.txt b/tests/limited_api_bugs.txt
index dd779ac6d..60baf6eb4 100644
--- a/tests/limited_api_bugs.txt
+++ b/tests/limited_api_bugs.txt
@@ -1,25 +1,8 @@
# This file contains tests corresponding to unresolved bugs using CPython's
# Limited API which will be skipped in the normal testing run.
-calc_pi_2
-calc_pi_3
-calc_pi_4
-clone
convolve2
-cpdef_sin
dict_animal
-integrate
-landscaping
-mymodule
not_none
-overhead
-profile
-profile_2
queue3
-resize
-safe_usage
-shrubbery_2
-sin_of_square
test_queue
-unsafe_usage
-wave_function
diff --git a/tests/pypy2_bugs.txt b/tests/pypy2_bugs.txt
new file mode 100644
index 000000000..9fd6d84f4
--- /dev/null
+++ b/tests/pypy2_bugs.txt
@@ -0,0 +1,25 @@
+# Specific bugs that only apply to pypy2
+
+build.cythonize_script
+build.cythonize_script_package
+run.initial_file_path
+run.reduce_pickle
+run.final_in_pxd
+run.cdef_multiple_inheritance
+run.cdef_multiple_inheritance_nodict
+run.extstarargs
+run.cpython_capi
+run.isnot
+
+# pypy 2 seems to be preferring .py files to .so files
+# https://foss.heptapod.net/pypy/pypy/issues/3185
+run.language_level
+run.pure_pxd
+
+# Silly error with doctest matching slightly different string outputs rather than
+# an actual bug but one I can't easily resolve
+run.with_gil
+
+
+# looks like a "when does the GC run?" issue - slightly surprised it's OK on pypy3
+memoryview.numpy_memoryview
diff --git a/tests/pypy_bugs.txt b/tests/pypy_bugs.txt
index 4152fd63a..5025748fa 100644
--- a/tests/pypy_bugs.txt
+++ b/tests/pypy_bugs.txt
@@ -4,15 +4,58 @@
broken_exception
bufaccess
-memoryview
-memslice
+memoryview.memoryview$
sequential_parallel
yield_from_pep380
memoryview_inplace_division
+run.unicodemethods
+run.unicode_imports
+run.tp_new
+run.test_fstring
+run.test_exceptions
+run.test_dictviews
+run.str_subclass_kwargs
+run.special_method_docstrings
+run.slice_ptr
+compile.min_async
+run.cython_includes
+run.pyarray
+run.test_unicode
+run.__getattribute__
+run.__getattribute_subclasses__
+run.__debug__
+run.array_cimport
+run.builtin_abs
+run.builtincomplex
+run.cdef_multiple_inheritance_errors
+run.cdivision_CEP_516
+run.cyfunction
+run.final_cdef_class
+run.index
+run.pyclass_special_methods
+run.reimport_from_package
+run.reimport_from_subinterpreter
+pkg.cimportfrom
+embedded
+TestCyCache
+run.ext_auto_richcmp
+run.coverage_cmd
+run.coverage_cmd_src_layout
+run.coverage_installed_pkg
+run.coverage_api
+run.coverage_nogil
+
+# very little coroutine-related seems to work
+run.test_asyncgen
+run.test_coroutines_pep492
+run.async_iter_pep492
+run.embedsignatures
+run.py35_asyncio_async_def
+run.asyncio_generators
+
# gc issue?
-memoryview_in_subclasses
external_ref_reassignment
run.exttype_dealloc
@@ -20,20 +63,13 @@ run.exttype_dealloc
run.special_methods_T561
run.special_methods_T561_py2
-# tests for things that don't exist in cpyext
-compile.pylong
-run.datetime_pxd
-run.datetime_cimport
-run.datetime_members
-run.extern_builtins_T258
-run.line_trace
-run.line_profile_test
-run.pstats_profile_test
-run.longintrepr
-
-# refcounting-specific tests
-double_dealloc_T796
-run.exceptionrefcount
-run.capiimpl
-run.refcount_in_meth
-
+# looks to be fixed in PyPy 7.3.0
+# TODO - remove when Travis updates
+run.py_unicode_strings
+run.unicodeliterals
+run.unicode_identifiers
+run.unicode_identifiers_import
+errors.unicode_identifiers_e4
+run.tracebacks
+run.fstring
+run.unicode_identifiers_normalization
diff --git a/tests/pypy_crash_bugs.txt b/tests/pypy_crash_bugs.txt
new file mode 100644
index 000000000..779603146
--- /dev/null
+++ b/tests/pypy_crash_bugs.txt
@@ -0,0 +1,13 @@
+# Bugs that causes hard crashes that we certainly don't
+# want to run because it will break the testsuite
+
+# segfault
+run.fastcall
+memslice
+
+# """Fatal RPython error: NotImplementedError
+# Aborted (core dumped)"""
+run.py35_pep492_interop
+
+# gc issue?
+memoryview_in_subclasses
diff --git a/tests/pypy_implementation_detail_bugs.txt b/tests/pypy_implementation_detail_bugs.txt
new file mode 100644
index 000000000..9849b815a
--- /dev/null
+++ b/tests/pypy_implementation_detail_bugs.txt
@@ -0,0 +1,45 @@
+# PyPy "bugs" that are probably implementation differences from
+# CPython rather than actual bugs. Therefore they aren't targets
+# to be fixed (but there *may* be other details in the testfile
+# that should be tested on PyPy?)
+
+run.starargs
+
+# refcounting-specific tests
+double_dealloc_T796
+run.exceptionrefcount
+run.capiimpl
+run.refcount_in_meth
+# Ideally just disable the reference-counting tests on PyPy?
+run.fused_types
+run.generator_frame_cycle
+run.generators_in_refcycles
+run.generators_py
+run.parallel
+
+# "sys.getsizeof(object, default) will always return default on PyPy, and
+# raise a TypeError if default is not provided."
+buildenv
+
+# tests for things that don't exist in cpyext
+compile.pylong
+run.datetime_pxd
+run.datetime_cimport
+run.datetime_members
+run.extern_builtins_T258
+run.line_trace
+run.line_profile_test
+run.pstats_profile_test
+run.longintrepr
+
+# tests probably rely on immediate GC (although maybe the tests could be tweaked so
+# only these bits don't run in PyPy?)
+buffers.buffer
+buffers.userbuffer
+memoryview.cythonarray
+memoryview.memoryview_pep489_typing
+run.cpp_classes
+run.cpp_classes_def
+
+# missing pypy feature?
+matrix_multiplier
diff --git a/tests/run/always_allow_keywords_T295.pyx b/tests/run/always_allow_keywords_T295.pyx
index 132489b6a..694839fcf 100644
--- a/tests/run/always_allow_keywords_T295.pyx
+++ b/tests/run/always_allow_keywords_T295.pyx
@@ -2,15 +2,23 @@
cimport cython
+def assert_typeerror_no_keywords(func, *args, **kwds):
+ # Python 3.9 produces an slightly different error message
+ # to previous versions, so doctest isn't matching the
+ # traceback
+ try:
+ func(*args, **kwds)
+ except TypeError as e:
+ assert e.args[0].endswith(" takes no keyword arguments"), e.args[0]
+ else:
+ assert False, "call did not raise TypeError"
+
def func1(arg):
"""
>>> func1(None)
>>> func1(*[None])
- >>> func1(arg=None)
- Traceback (most recent call last):
- ...
- TypeError: func1() takes no keyword arguments
+ >>> assert_typeerror_no_keywords(func1, arg=None)
"""
pass
@@ -19,10 +27,7 @@ def func2(arg):
"""
>>> func2(None)
>>> func2(*[None])
- >>> func2(arg=None)
- Traceback (most recent call last):
- ...
- TypeError: func2() takes no keyword arguments
+ >>> assert_typeerror_no_keywords(func2, arg=None)
"""
pass
@@ -39,16 +44,10 @@ cdef class A:
"""
>>> A().meth1(None)
>>> A().meth1(*[None])
- >>> A().meth1(arg=None)
- Traceback (most recent call last):
- ...
- TypeError: meth1() takes no keyword arguments
+ >>> assert_typeerror_no_keywords(A().meth1, arg=None)
>>> A().meth2(None)
>>> A().meth2(*[None])
- >>> A().meth2(arg=None)
- Traceback (most recent call last):
- ...
- TypeError: meth2() takes no keyword arguments
+ >>> assert_typeerror_no_keywords(A().meth2, arg=None)
>>> A().meth3(None)
>>> A().meth3(*[None])
>>> A().meth3(arg=None)
diff --git a/tests/run/async_def.pyx b/tests/run/async_def.pyx
new file mode 100644
index 000000000..ac0b8a516
--- /dev/null
+++ b/tests/run/async_def.pyx
@@ -0,0 +1,35 @@
+# cython: language_level=3, binding=True
+# mode: run
+# tag: pep492, await, gh3337
+
+"""
+Cython specific tests in addition to "test_coroutines_pep492.pyx"
+(which is copied from CPython).
+"""
+
+import sys
+
+
+def run_async(coro):
+ #assert coro.__class__ is types.GeneratorType
+ assert coro.__class__.__name__ in ('coroutine', '_GeneratorWrapper'), coro.__class__.__name__
+
+ buffer = []
+ result = None
+ while True:
+ try:
+ buffer.append(coro.send(None))
+ except StopIteration as ex:
+ result = ex.value if sys.version_info >= (3, 5) else ex.args[0] if ex.args else None
+ break
+ return buffer, result
+
+
+async def test_async_temp_gh3337(x, y):
+ """
+ >>> run_async(test_async_temp_gh3337(2, 3))
+ ([], -1)
+ >>> run_async(test_async_temp_gh3337(3, 2))
+ ([], 0)
+ """
+ return min(x - y, 0)
diff --git a/tests/run/callargs.pyx b/tests/run/callargs.pyx
index 05f3f639b..c87f57962 100644
--- a/tests/run/callargs.pyx
+++ b/tests/run/callargs.pyx
@@ -168,15 +168,15 @@ def test_int_kwargs(f):
"""
>>> test_int_kwargs(e) # doctest: +ELLIPSIS
Traceback (most recent call last):
- TypeError: ...keywords must be strings
+ TypeError: ...keywords must be strings...
>>> test_int_kwargs(f) # doctest: +ELLIPSIS
Traceback (most recent call last):
- TypeError: ...keywords must be strings
+ TypeError: ...keywords must be strings...
>>> test_int_kwargs(g) # doctest: +ELLIPSIS
Traceback (most recent call last):
- TypeError: ...keywords must be strings
+ TypeError: ...keywords must be strings...
>>> test_int_kwargs(h) # doctest: +ELLIPSIS
Traceback (most recent call last):
- TypeError: ...keywords must be strings
+ TypeError: ...keywords must be strings...
"""
f(a=1,b=2,c=3, **{10:20,30:40})
diff --git a/tests/run/cdivision_CEP_516.pyx b/tests/run/cdivision_CEP_516.pyx
index c8b24a0e1..fbd2def3a 100644
--- a/tests/run/cdivision_CEP_516.pyx
+++ b/tests/run/cdivision_CEP_516.pyx
@@ -27,6 +27,9 @@ True
>>> [test_cdiv_cmod(a, b) for a, b in v]
[(1, 7), (-1, -7), (1, -7), (-1, 7)]
+>>> [test_cdiv_cmod(a, b) for a, b in [(4, -4), (4, -2), (4, -1)]]
+[(-1, 0), (-2, 0), (-4, 0)]
+
>>> all([mod_int_py(a,b) == a % b for a in range(-10, 10) for b in range(-10, 10) if b != 0])
True
>>> all([div_int_py(a,b) == a // b for a in range(-10, 10) for b in range(-10, 10) if b != 0])
diff --git a/tests/run/cpp_move.pyx b/tests/run/cpp_move.pyx
new file mode 100644
index 000000000..d93afe28d
--- /dev/null
+++ b/tests/run/cpp_move.pyx
@@ -0,0 +1,34 @@
+# mode: run
+# tag: cpp, werror, cpp11
+
+from libcpp cimport nullptr
+from libcpp.memory cimport shared_ptr, make_shared
+from libcpp.utility cimport move
+from cython.operator cimport dereference
+
+cdef extern from *:
+ """
+ #include <string>
+
+ template<typename T> const char* move_helper(T&) { return "lvalue-ref"; }
+ template<typename T> const char* move_helper(T&&) { return "rvalue-ref"; }
+ """
+ const char* move_helper[T](T)
+
+def test_move_assignment():
+ """
+ >>> test_move_assignment()
+ """
+ cdef shared_ptr[int] p1, p2
+ p1 = make_shared[int](1337)
+ p2 = move(p1)
+ assert p1 == nullptr
+ assert dereference(p2) == 1337
+
+def test_move_func_call():
+ """
+ >>> test_move_func_call()
+ """
+ cdef shared_ptr[int] p
+ assert move_helper(p) == b'lvalue-ref'
+ assert move_helper(move(p)) == b'rvalue-ref'
diff --git a/tests/run/cpp_operator_exc_handling.pyx b/tests/run/cpp_operator_exc_handling.pyx
index 2773c82e8..381870c22 100644
--- a/tests/run/cpp_operator_exc_handling.pyx
+++ b/tests/run/cpp_operator_exc_handling.pyx
@@ -42,6 +42,10 @@ cdef extern from "cpp_operator_exc_handling_helper.hpp" nogil:
wrapped_int &operator=(const wrapped_int &other) except +ArithmeticError
wrapped_int &operator=(const long long &vao) except +
+ cdef cppclass second_call_is_different:
+ second_call_is_different()
+ bool operator<(const second_call_is_different&) except +
+
def assert_raised(f, *args, **kwargs):
err = kwargs.get('err', None)
@@ -268,3 +272,13 @@ def test_operator_exception_handling():
assert_raised(separate_exceptions, 4, 1, 1, 1, 3, err=ArithmeticError)
assert_raised(call_temp_separation, 2, 1, 4, err=AttributeError)
assert_raised(call_temp_separation, 2, 4, 1, err=IndexError)
+
+def test_only_single_call():
+ """
+ Previous version of the operator handling code called the operator twice
+ (Resulting in a crash)
+ >>> test_only_single_call()
+ False
+ """
+ cdef second_call_is_different inst
+ return inst<inst
diff --git a/tests/run/cpp_operator_exc_handling_helper.hpp b/tests/run/cpp_operator_exc_handling_helper.hpp
index 4992b594a..5e41285d1 100644
--- a/tests/run/cpp_operator_exc_handling_helper.hpp
+++ b/tests/run/cpp_operator_exc_handling_helper.hpp
@@ -201,3 +201,16 @@ public:
return *this;
}
};
+
+class second_call_is_different {
+ int count;
+ public:
+ second_call_is_different(): count(0) {}
+ bool operator<(const second_call_is_different& lhs) {
+ if (count>0) {
+ return true;
+ }
+ ++count;
+ return false;
+ }
+};
diff --git a/tests/run/cpp_stl_conversion.pyx b/tests/run/cpp_stl_conversion.pyx
index 578915a0b..5278c677a 100644
--- a/tests/run/cpp_stl_conversion.pyx
+++ b/tests/run/cpp_stl_conversion.pyx
@@ -143,10 +143,12 @@ def test_typedef_vector(o):
Traceback (most recent call last):
...
OverflowError: ...
+
+ "TypeError: an integer is required" on CPython
>>> test_typedef_vector([1, 2, None]) #doctest: +ELLIPSIS
Traceback (most recent call last):
...
- TypeError: an integer is required
+ TypeError: ...int...
"""
cdef vector[my_int] v = o
return v
diff --git a/tests/run/cpp_stl_numeric_ops.pyx b/tests/run/cpp_stl_numeric_ops.pyx
new file mode 100644
index 000000000..52f3c58ec
--- /dev/null
+++ b/tests/run/cpp_stl_numeric_ops.pyx
@@ -0,0 +1,147 @@
+# mode: run
+# tag: cpp, werror, cpp11
+
+from libcpp.numeric cimport inner_product, iota, accumulate, adjacent_difference, partial_sum
+from libcpp.vector cimport vector
+from libcpp cimport bool
+
+# Subtracts two integers.
+cdef int subtract_integers(int lhs, int rhs):
+ return lhs - rhs
+
+# Adds two integers.
+cdef int add_integers(int lhs, int rhs):
+ return lhs + rhs
+
+# Multiplies two integers.
+cdef int multiply_integers(int lhs, int rhs):
+ return lhs * rhs
+
+# Determines equality for two integers.
+# If lhs == rhs, returns true. Returns false otherwise.
+cdef bool is_equal(int lhs, int rhs):
+ return lhs == rhs
+
+def test_inner_product(vector[int] v1, vector[int] v2, int init):
+ """
+ Test inner_product with integer values.
+ >>> test_inner_product([1, 2, 3], [1, 2, 3], 1)
+ 15
+ """
+ return inner_product(v1.begin(), v1.end(), v2.begin(), init)
+
+
+def test_inner_product_with_zero(vector[int] v1, vector[int] v2, int init):
+ """
+ Test inner_product with a zero value in the container.
+ >>> test_inner_product_with_zero([1, 2, 0], [1, 1, 1], 0)
+ 3
+ """
+ return inner_product(v1.begin(), v1.end(), v2.begin(), init)
+
+def test_inner_product_with_bin_op(vector[int] v1, vector[int] v2, int init):
+ """
+ Test inner_product with two binary operations. In this case,
+ Looks at number of pairwise matches between v1 and v2.
+ [5, 1, 2, 3, 4]
+ [5, 4, 2, 3, 1]
+ There are 3 matches (5, 2, 3). So, 1 + 1 + 1 = 3.
+
+ >>> test_inner_product_with_bin_op([5, 1, 2, 3, 4], [5, 4, 2, 3, 1], 0)
+ 3
+ """
+ return inner_product(v1.begin(), v1.end(), v2.begin(), init, add_integers, is_equal)
+
+def test_iota(vector[int] v, int value):
+ """
+ Test iota with beginning value of 0.
+ >>> test_iota(range(6), 0)
+ [0, 1, 2, 3, 4, 5]
+ """
+ iota(v.begin(), v.end(), value)
+ return v
+
+def test_iota_negative_init(vector[int] v, int value):
+ """
+ Test iota with a negative beginning value.
+ >>> test_iota_negative_init(range(7), -4)
+ [-4, -3, -2, -1, 0, 1, 2]
+ """
+ iota(v.begin(), v.end(), value)
+ return v
+
+def test_accumulate(vector[int] v, int init):
+ """
+ Test accumulate.
+ 0 + 1 = 1
+ 1 + 2 = 3
+ 3 + 3 = 6
+ >>> test_accumulate([1, 2, 3], 0)
+ 6
+ """
+ return accumulate(v.begin(), v.end(), init)
+
+def test_accumulate_with_subtraction(vector[int] v, int init):
+ """
+ Test accumulate with subtraction. Note that accumulate is a fold-left operation.
+ 0 - 1 = -1
+ -1 - 2 = -3
+ -3 - 3 = -6
+ >>> test_accumulate_with_subtraction([1, 2, 3], 0)
+ -6
+ """
+ return accumulate(v.begin(), v.end(), init, subtract_integers)
+
+def test_adjacent_difference(vector[int] v):
+ """
+ Test adjacent_difference with integer values.
+ 2 - 0, -> 2
+ 4 - 2, -> 2
+ 6 - 4, -> 2
+ 8 - 6, -> 2
+ 10 - 8, -> 2
+ 12 - 10 -> 2
+ >>> test_adjacent_difference([2, 4, 6, 8, 10, 12])
+ [2, 2, 2, 2, 2, 2]
+ """
+ adjacent_difference(v.begin(), v.end(), v.begin())
+ return v
+
+def test_adjacent_difference_with_bin_op(vector[int] v):
+ """
+ Test adjacent_difference with a binary operation.
+ 0 + 1 -> 1
+ 1 + 2 -> 3
+ 2 + 4 -> 6
+ 4 + 5 -> 9
+ 5 + 6 -> 11
+ >>> test_adjacent_difference_with_bin_op([1, 2, 4, 5, 6])
+ [1, 3, 6, 9, 11]
+ """
+ adjacent_difference(v.begin(), v.end(), v.begin(), add_integers)
+ return v
+
+def test_partial_sum(vector[int] v):
+ """
+ Test partial_sum with integer values.
+ 2 + 0 -> 2
+ 2 + 2 -> 4
+ 4 + 2 -> 6
+ 6 + 2 -> 8
+ 8 + 2 -> 10
+ 10 + 2 -> 12
+ >>> test_partial_sum([2, 2, 2, 2, 2, 2])
+ [2, 4, 6, 8, 10, 12]
+ """
+ partial_sum(v.begin(), v.end(), v.begin())
+ return v
+
+def test_partial_sum_with_bin_op(vector[int] v):
+ """
+ Test partial_sum with a binary operation.
+ Using multiply_integers, partial_sum will calculate the first 5 powers of 2.
+ >>> test_partial_sum_with_bin_op([2, 2, 2, 2, 2])
+ [2, 4, 8, 16, 32]
+ """
+ partial_sum(v.begin(), v.end(), v.begin(), multiply_integers)
+ return v
diff --git a/tests/run/cython_includes.pyx b/tests/run/cython_includes.pyx
index 2019c0922..b1eb18612 100644
--- a/tests/run/cython_includes.pyx
+++ b/tests/run/cython_includes.pyx
@@ -18,6 +18,7 @@ cimport cpython.dict
cimport cpython.exc
cimport cpython.float
cimport cpython.function
+cimport cpython.genobject
cimport cpython.getargs
cimport cpython.instance
cimport cpython.int
diff --git a/tests/run/fused_def.pyx b/tests/run/fused_def.pyx
index 9f30142f3..f0e1be4c8 100644
--- a/tests/run/fused_def.pyx
+++ b/tests/run/fused_def.pyx
@@ -7,6 +7,9 @@ Test Python def functions without extern types
cy = __import__("cython")
cimport cython
+cdef extern from *:
+ int __Pyx_CyFunction_Check(object)
+
cdef class Base(object):
def __repr__(self):
return type(self).__name__
@@ -128,6 +131,16 @@ def opt_func(fused_t obj, cython.floating myf = 1.2, cython.integral myi = 7):
print cython.typeof(obj), cython.typeof(myf), cython.typeof(myi)
print obj, "%.2f" % myf, myi, "%.2f" % f, i
+def run_cyfunction_check():
+ """
+ tp_base of the fused function was being set incorrectly meaning
+ it wasn't being identified as a CyFunction
+ >>> run_cyfunction_check()
+ fused_cython_function
+ 1
+ """
+ print(type(opt_func).__name__)
+ print(__Pyx_CyFunction_Check(opt_func)) # should be True
def test_opt_func():
"""
@@ -405,3 +418,31 @@ def test_decorators(cython.floating arg):
>>> test_decorators.order
[3, 2, 1]
"""
+
+@cython.binding(True)
+def bind_me(self, cython.floating a=1.):
+ return a
+
+cdef class HasBound:
+ """
+ Using default arguments of bound specialized fused functions used to cause a segfault
+ https://github.com/cython/cython/issues/3370
+ >>> inst = HasBound()
+ >>> inst.func()
+ 1.0
+ >>> inst.func(2)
+ 2.0
+ >>> inst.func_fused()
+ 1.0
+ >>> inst.func_fused(2.)
+ 2.0
+ >>> bind_me.__defaults__
+ (1.0,)
+ >>> inst.func.__defaults__
+ (1.0,)
+ >>> inst.func_fused.__defaults__
+ (1.0,)
+ """
+ func = bind_me[float]
+
+ func_fused = bind_me
diff --git a/tests/run/if_else_expr.pyx b/tests/run/if_else_expr.pyx
index df4ae6c80..41657ec5a 100644
--- a/tests/run/if_else_expr.pyx
+++ b/tests/run/if_else_expr.pyx
@@ -55,3 +55,15 @@ def test_syntax():
y = 0 if 1.0else 1
z = 0 if 1.else 1
return x, y, z
+
+
+from libc cimport math
+
+def test_cfunc_ptrs(double x, bint round_down):
+ """
+ >>> test_cfunc_ptrs(2.5, round_down=True)
+ 2.0
+ >>> test_cfunc_ptrs(2.5, round_down=False)
+ 3.0
+ """
+ return (math.floor if round_down else math.ceil)(x)
diff --git a/tests/run/importfrom.pyx b/tests/run/importfrom.pyx
index 52bc67611..a3e5de78d 100644
--- a/tests/run/importfrom.pyx
+++ b/tests/run/importfrom.pyx
@@ -68,6 +68,10 @@ def typed_imports():
try:
from sys import version_info as maxunicode
except TypeError, e:
+ if getattr(sys, "pypy_version_info", None):
+ # translate message
+ if e.args[0].startswith("int() argument must be"):
+ e = "an integer is required"
print(e)
try:
diff --git a/tests/run/kwargproblems.pyx b/tests/run/kwargproblems.pyx
index 88e3ac53a..7984fbb08 100644
--- a/tests/run/kwargproblems.pyx
+++ b/tests/run/kwargproblems.pyx
@@ -4,7 +4,7 @@ def test(**kw):
>>> d = {1 : 2}
>>> test(**d) # doctest: +ELLIPSIS
Traceback (most recent call last):
- TypeError: ...keywords must be strings
+ TypeError: ...keywords must be strings...
>>> d
{1: 2}
>>> d = {}
diff --git a/tests/run/list_pop.pyx b/tests/run/list_pop.pyx
index b1379f199..3e7c5bdf5 100644
--- a/tests/run/list_pop.pyx
+++ b/tests/run/list_pop.pyx
@@ -206,7 +206,7 @@ def crazy_pop(L):
"""
>>> crazy_pop(list(range(10))) # doctest: +ELLIPSIS
Traceback (most recent call last):
- TypeError: pop... at most ... argument...
+ TypeError: pop... argument...
>>> crazy_pop(A())
(1, 2, 3)
"""
diff --git a/tests/run/modop.pyx b/tests/run/modop.pyx
index 469746cde..65c66eaed 100644
--- a/tests/run/modop.pyx
+++ b/tests/run/modop.pyx
@@ -9,7 +9,7 @@ def modobj(obj2, obj3):
'5'
>>> modobj(1, 0) # doctest: +ELLIPSIS
Traceback (most recent call last):
- ZeroDivisionError: integer division or modulo by zero
+ ZeroDivisionError: integer... modulo by zero
"""
obj1 = obj2 % obj3
return obj1
@@ -17,9 +17,9 @@ def modobj(obj2, obj3):
def mod_10_obj(int2):
"""
- >>> mod_10_obj(0)
+ >>> mod_10_obj(0) # doctest: +ELLIPSIS
Traceback (most recent call last):
- ZeroDivisionError: integer division or modulo by zero
+ ZeroDivisionError: ... modulo by zero
>>> 10 % 1
0
>>> mod_10_obj(1)
diff --git a/tests/run/numpy_bufacc_T155.pyx b/tests/run/numpy_bufacc_T155.pyx
index 927eddb29..17a8525fd 100644
--- a/tests/run/numpy_bufacc_T155.pyx
+++ b/tests/run/numpy_bufacc_T155.pyx
@@ -1,5 +1,5 @@
# ticket: 155
-# tag: numpy_old
+# tag: numpy
"""
>>> myfunc()
@@ -17,4 +17,3 @@ def myfunc():
A[i, :] /= 2
return A[0,0]
-include "numpy_common.pxi"
diff --git a/tests/run/numpy_cimport.pyx b/tests/run/numpy_cimport.pyx
index 7c23d7725..8aaea8597 100644
--- a/tests/run/numpy_cimport.pyx
+++ b/tests/run/numpy_cimport.pyx
@@ -6,4 +6,3 @@
True
"""
cimport numpy as np
-include "numpy_common.pxi"
diff --git a/tests/run/numpy_common.pxi b/tests/run/numpy_common.pxi
deleted file mode 100644
index 59f020115..000000000
--- a/tests/run/numpy_common.pxi
+++ /dev/null
@@ -1,10 +0,0 @@
-# hack to avoid C compiler warnings about unused functions in the NumPy header files
-
-cdef extern from *:
- bint FALSE "0"
- void import_array()
-# void import_umath()
-
-if FALSE:
- import_array()
-# import_umath()
diff --git a/tests/run/numpy_parallel.pyx b/tests/run/numpy_parallel.pyx
index 7dcf2b27a..da1444c38 100644
--- a/tests/run/numpy_parallel.pyx
+++ b/tests/run/numpy_parallel.pyx
@@ -1,10 +1,9 @@
-# tag: numpy_old
+# tag: numpy
# tag: openmp
cimport cython
from cython.parallel import prange
cimport numpy as np
-include "numpy_common.pxi"
@cython.boundscheck(False)
diff --git a/tests/run/numpy_subarray.pyx b/tests/run/numpy_subarray.pyx
index c5d82d4b1..d032a3a01 100644
--- a/tests/run/numpy_subarray.pyx
+++ b/tests/run/numpy_subarray.pyx
@@ -1,4 +1,4 @@
-# tag: numpy_old
+# tag: numpy
cimport numpy as np
cimport cython
diff --git a/tests/run/numpy_test.pyx b/tests/run/numpy_test.pyx
index 54a243f9a..4dc89ad5c 100644
--- a/tests/run/numpy_test.pyx
+++ b/tests/run/numpy_test.pyx
@@ -1,5 +1,4 @@
-# tag: numpy_old
-# cannot be named "numpy" in order to not clash with the numpy module!
+# tag: numpy
cimport numpy as np
cimport cython
@@ -945,5 +944,3 @@ def test_broadcast_comparison(np.ndarray[double, ndim=1] a):
cdef object obj = a
return a == 0, obj == 0, a == 1, obj == 1
-
-include "numpy_common.pxi"
diff --git a/tests/run/pep448_test_extcall.pyx b/tests/run/pep448_test_extcall.pyx
index 0ba68a099..208ba4f7a 100644
--- a/tests/run/pep448_test_extcall.pyx
+++ b/tests/run/pep448_test_extcall.pyx
@@ -326,7 +326,7 @@ def errors_non_string_kwarg():
"""
>>> errors_non_string_kwarg() # doctest: +ELLIPSIS
Traceback (most recent call last):
- TypeError: ...keywords must be strings
+ TypeError: ...keywords must be strings...
"""
f(**{1:2})
@@ -463,10 +463,10 @@ def call_builtin_empty_dict():
def call_builtin_nonempty_dict():
"""
- >>> call_builtin_nonempty_dict()
+ >>> call_builtin_nonempty_dict() # doctest: +ELLIPSIS
Traceback (most recent call last):
...
- TypeError: id() takes no keyword arguments
+ TypeError: id() ... keyword argument...
"""
return id(1, **{'foo': 1})
diff --git a/tests/run/powop.pyx b/tests/run/powop.pyx
index 966336c2a..3f9fdd077 100644
--- a/tests/run/powop.pyx
+++ b/tests/run/powop.pyx
@@ -123,9 +123,9 @@ def optimised_pow2(n):
0.5
>>> optimised_pow2(0.5) == 2 ** 0.5
True
- >>> optimised_pow2('test')
+ >>> optimised_pow2('test') # doctest: +ELLIPSIS
Traceback (most recent call last):
- TypeError: unsupported operand type(s) for ** or pow(): 'int' and 'str'
+ TypeError: ...operand... **...
"""
if isinstance(n, (int, long)) and 0 <= n < 1000:
assert isinstance(2.0 ** n, float), 'float %s' % n
@@ -153,9 +153,9 @@ def optimised_pow2_inplace(n):
0.5
>>> optimised_pow2_inplace(0.5) == 2 ** 0.5
True
- >>> optimised_pow2_inplace('test')
+ >>> optimised_pow2_inplace('test') # doctest: +ELLIPSIS
Traceback (most recent call last):
- TypeError: unsupported operand type(s) for ** or pow(): 'int' and 'str'
+ TypeError: ...operand... **...
"""
x = 2
x **= n
diff --git a/tests/run/pure.pyx b/tests/run/pure.pyx
index 57a575a33..23e41c77b 100644
--- a/tests/run/pure.pyx
+++ b/tests/run/pure.pyx
@@ -25,10 +25,12 @@ def test_declare(n):
(100, 100)
>>> test_declare(100.5)
(100, 100)
- >>> test_declare(None)
+
+ # CPython: "TypeError: an integer is required"
+ >>> test_declare(None) # doctest: +ELLIPSIS
Traceback (most recent call last):
...
- TypeError: an integer is required
+ TypeError: ...int...
"""
x = cython.declare(cython.int)
y = cython.declare(cython.int, n)
diff --git a/tests/run/pure_fused.pxd b/tests/run/pure_fused.pxd
new file mode 100644
index 000000000..8517227d0
--- /dev/null
+++ b/tests/run/pure_fused.pxd
@@ -0,0 +1,6 @@
+ctypedef fused NotInPy:
+ int
+ float
+
+cdef class TestCls:
+ cpdef cpfunc(self, NotInPy arg)
diff --git a/tests/run/pure_fused.py b/tests/run/pure_fused.py
new file mode 100644
index 000000000..b6d211e0f
--- /dev/null
+++ b/tests/run/pure_fused.py
@@ -0,0 +1,60 @@
+# mode: run
+# tag: fused, pure3.0
+
+#cython: annotation_typing=True
+
+import cython
+
+InPy = cython.fused_type(cython.int, cython.float)
+
+class TestCls:
+ # although annotations as strings isn't recommended and generates a warning
+ # it does allow the test to run on more (pure) Python versions
+ def func1(self, arg: 'NotInPy'):
+ """
+ >>> TestCls().func1(1.0)
+ 'float'
+ >>> TestCls().func1(2)
+ 'int'
+ """
+ return cython.typeof(arg)
+
+ if cython.compiled:
+ @cython.locals(arg = NotInPy) # NameError in pure Python
+ def func2(self, arg):
+ """
+ >>> TestCls().func2(1.0)
+ 'float'
+ >>> TestCls().func2(2)
+ 'int'
+ """
+ return cython.typeof(arg)
+
+ def cpfunc(self, arg):
+ """
+ >>> TestCls().cpfunc(1.0)
+ 'float'
+ >>> TestCls().cpfunc(2)
+ 'int'
+ """
+ return cython.typeof(arg)
+
+ def func1_inpy(self, arg: InPy):
+ """
+ >>> TestCls().func1_inpy(1.0)
+ 'float'
+ >>> TestCls().func1_inpy(2)
+ 'int'
+ """
+ return cython.typeof(arg)
+
+ @cython.locals(arg = InPy)
+ def func2_inpy(self, arg):
+ """
+ >>> TestCls().func2_inpy(1.0)
+ 'float'
+ >>> TestCls().func2_inpy(2)
+ 'int'
+ """
+ return cython.typeof(arg)
+
diff --git a/tests/run/py_hash_t.pyx b/tests/run/py_hash_t.pyx
index ccdcd82e6..3b20584d0 100644
--- a/tests/run/py_hash_t.pyx
+++ b/tests/run/py_hash_t.pyx
@@ -25,7 +25,7 @@ def assign_py_hash_t(x):
>>> assign_py_hash_t(IntLike(1.5)) # doctest: +ELLIPSIS
Traceback (most recent call last):
...
- TypeError: __index__ ... (type float)
+ TypeError: __index__ ... (type ...float...)
"""
cdef Py_hash_t h = x
return h
diff --git a/tests/run/pyclass_dynamic_bases.pyx b/tests/run/pyclass_dynamic_bases.pyx
index f83798d09..575a33820 100644
--- a/tests/run/pyclass_dynamic_bases.pyx
+++ b/tests/run/pyclass_dynamic_bases.pyx
@@ -24,3 +24,22 @@ def cond_if_bases(x):
class PyClass(A if x else B):
p = 5
return PyClass
+
+
+def make_subclass(*bases):
+ """
+ >>> cls = make_subclass(list)
+ >>> issubclass(cls, list) or cls.__mro__
+ True
+
+ >>> class Cls(object): pass
+ >>> cls = make_subclass(Cls, list)
+ >>> issubclass(cls, list) or cls.__mro__
+ True
+ >>> issubclass(cls, Cls) or cls.__mro__
+ True
+ """
+ # GH-3338
+ class MadeClass(*bases):
+ pass
+ return MadeClass
diff --git a/tests/run/sequential_parallel.pyx b/tests/run/sequential_parallel.pyx
index d77c88561..3d8e1efff 100644
--- a/tests/run/sequential_parallel.pyx
+++ b/tests/run/sequential_parallel.pyx
@@ -378,11 +378,7 @@ def test_prange_continue():
def test_nested_break_continue():
"""
- DISABLED. For some reason this fails intermittently on jenkins, with
- the first line of output being '0 0 0 0'. The generated code looks
- awfully correct though... needs investigation
-
- >> test_nested_break_continue()
+ >>> test_nested_break_continue()
6 7 6 7
8
"""
@@ -770,3 +766,53 @@ def test_prange_in_with(int x, ctx):
with gil:
l[0] += i
return l[0]
+
+
+cdef extern from *:
+ """
+ #ifdef _OPENMP
+ #define _get_addr(_x, _idx) &_x
+ #else
+ #define _get_addr(_x, _idx) (&_x+_idx)
+ #endif
+ #define address_of_temp(store, temp, idx) store = _get_addr(temp, idx)
+ #define address_of_temp2(store, ignore, temp, idx) store = _get_addr(temp, idx)
+
+ double get_value() {
+ return 1.0;
+ }
+ """
+ void address_of_temp(...) nogil
+ void address_of_temp2(...) nogil
+ double get_value() nogil except -1.0 # will generate a temp for exception checking
+
+def test_inner_private():
+ """
+ Determines if a temp variable is private by taking its address in multiple threads
+ and seeing if they're the same (thread private variables should have different
+ addresses
+ >>> test_inner_private()
+ ok
+ """
+ cdef double* not_parallel[2]
+ cdef double* inner_vals[2]
+ cdef double* outer_vals[2]
+ cdef Py_ssize_t n, m
+
+ for n in range(2):
+ address_of_temp(not_parallel[n], get_value(), 0)
+ assert not_parallel[0] == not_parallel[1], "Addresses should be the same since they come from the same temp"
+
+ for n in prange(2, num_threads=2, schedule='static', chunksize=1, nogil=True):
+ address_of_temp(outer_vals[n], get_value(), n)
+ for m in prange(1):
+ # second temp just ensures different numbering
+ address_of_temp2(inner_vals[n], get_value(), get_value(), n)
+
+ inner_are_the_same = inner_vals[0] == inner_vals[1]
+ outer_are_the_same = outer_vals[0] == outer_vals[1]
+
+ assert outer_are_the_same == False, "Temporary variables in outer loop should be private"
+ assert inner_are_the_same == False, "Temporary variables in inner loop should be private"
+
+ print('ok')
diff --git a/tests/run/test_coroutines_pep492.pyx b/tests/run/test_coroutines_pep492.pyx
index 51cb6c570..b274160e8 100644
--- a/tests/run/test_coroutines_pep492.pyx
+++ b/tests/run/test_coroutines_pep492.pyx
@@ -2,6 +2,11 @@
# mode: run
# tag: pep492, pep530, asyncfor, await
+###########
+# This file is a copy of the corresponding test file in CPython.
+# Please keep in sync and do not add non-upstream tests.
+###########
+
import re
import gc
import sys
diff --git a/tests/run/test_unicode_string_tests.pxi b/tests/run/test_unicode_string_tests.pxi
index 61cc6f2ac..b8fb171da 100644
--- a/tests/run/test_unicode_string_tests.pxi
+++ b/tests/run/test_unicode_string_tests.pxi
@@ -993,6 +993,14 @@ class CommonTest(BaseTest):
self.checkequal('\u019b\u1d00\u1d86\u0221\u1fb7',
'\u019b\u1d00\u1d86\u0221\u1fb7', 'capitalize')
+ def test_list_concat(self):
+ # https://github.com/cython/cython/issues/3426
+ y = []
+ y += 'ab'
+ self.assertEqual('a', y[0])
+ self.assertEqual('b', y[1])
+ self.assertEqual(['a', 'b'], y)
+
class MixinStrUnicodeUserStringTest:
# additional tests that only work for
diff --git a/tests/run/trashcan.pyx b/tests/run/trashcan.pyx
index 48c43c3d2..29331fdf6 100644
--- a/tests/run/trashcan.pyx
+++ b/tests/run/trashcan.pyx
@@ -33,7 +33,7 @@ cdef class Recurse:
cdef public attr
cdef int deallocated
- def __init__(self, x):
+ def __cinit__(self, x):
self.attr = x
def __dealloc__(self):
@@ -68,7 +68,7 @@ cdef class RecurseFreelist:
cdef public attr
cdef int deallocated
- def __init__(self, x):
+ def __cinit__(self, x):
self.attr = x
def __dealloc__(self):
@@ -118,7 +118,7 @@ cdef class Sub1(Base):
"""
cdef public attr
- def __init__(self, x):
+ def __cinit__(self, x):
self.attr = x
def __dealloc__(self):
@@ -140,7 +140,7 @@ cdef class Sub2(Middle):
"""
cdef public attr
- def __init__(self, x):
+ def __cinit__(self, x):
self.attr = x
def __dealloc__(self):
diff --git a/tests/windows_bugs.txt b/tests/windows_bugs.txt
index d082e3836..52c1b0fbc 100644
--- a/tests/windows_bugs.txt
+++ b/tests/windows_bugs.txt
@@ -29,3 +29,5 @@ lunch
# "C linkage function cannot return C++ class" (uses public C++ cdef function)
cpp_template_subclasses
+# MSVC lacks "complex.h"
+complex_numbers_cmath_T2891