summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorda-woods <dw-git@d-woods.co.uk>2022-07-16 16:50:42 +0100
committerda-woods <dw-git@d-woods.co.uk>2022-07-16 16:50:42 +0100
commit9a1aa9e9b91f1eb6dea2932060478fdfdffb3a18 (patch)
treee0846c669361b5d1784d7872ff5ee7e1d707d451
parentd2d25a25c2a5b526fba1b907138bd9dcb4476ef5 (diff)
parent1777f13461f971d064bd1644b02d92b350e6e7d1 (diff)
downloadcython-9a1aa9e9b91f1eb6dea2932060478fdfdffb3a18.tar.gz
Merge branch 'master' into parse-match
-rw-r--r--CHANGES.rst101
-rw-r--r--Cython/Compiler/Builtin.py17
-rw-r--r--Cython/Compiler/Code.pxd1
-rw-r--r--Cython/Compiler/Code.py15
-rw-r--r--Cython/Compiler/Dataclass.py13
-rw-r--r--Cython/Compiler/ExprNodes.py168
-rw-r--r--Cython/Compiler/FlowControl.pxd2
-rw-r--r--Cython/Compiler/FlowControl.py8
-rw-r--r--Cython/Compiler/Lexicon.py3
-rw-r--r--Cython/Compiler/Naming.py1
-rw-r--r--Cython/Compiler/Nodes.py180
-rw-r--r--Cython/Compiler/Optimize.py2
-rw-r--r--Cython/Compiler/Options.py1
-rw-r--r--Cython/Compiler/ParseTreeTransforms.py6
-rw-r--r--Cython/Compiler/Parsing.pxd13
-rw-r--r--Cython/Compiler/Parsing.py118
-rw-r--r--Cython/Compiler/PyrexTypes.py68
-rw-r--r--Cython/Compiler/Symtab.py94
-rw-r--r--Cython/Compiler/Tests/TestGrammar.py57
-rw-r--r--Cython/Compiler/Visitor.py8
-rw-r--r--Cython/Includes/cpython/object.pxd2
-rw-r--r--Cython/Includes/libcpp/random.pxd156
-rw-r--r--Cython/Shadow.py2
-rw-r--r--Cython/Utility/CythonFunction.c17
-rw-r--r--Cython/Utility/Exceptions.c2
-rw-r--r--Cython/Utility/ExtensionTypes.c34
-rw-r--r--Cython/Utility/FunctionArguments.c2
-rw-r--r--Cython/Utility/ImportExport.c24
-rw-r--r--Cython/Utility/MemoryView.pyx33
-rw-r--r--Cython/Utility/MemoryView_C.c2
-rw-r--r--Cython/Utility/StringTools.c2
-rw-r--r--docs/examples/userguide/extension_types/cheesy.py36
-rw-r--r--docs/examples/userguide/extension_types/cheesy.pyx36
-rw-r--r--docs/examples/userguide/extension_types/dataclass.py21
-rw-r--r--docs/examples/userguide/extension_types/dataclass.pyx3
-rw-r--r--docs/examples/userguide/extension_types/dict_animal.py12
-rw-r--r--docs/examples/userguide/extension_types/dict_animal.pyx1
-rw-r--r--docs/examples/userguide/extension_types/extendable_animal.py15
-rw-r--r--docs/examples/userguide/extension_types/extendable_animal.pyx3
-rw-r--r--docs/examples/userguide/extension_types/owned_pointer.py17
-rw-r--r--docs/examples/userguide/extension_types/owned_pointer.pyx17
-rw-r--r--docs/examples/userguide/extension_types/penguin.py14
-rw-r--r--docs/examples/userguide/extension_types/penguin.pyx14
-rw-r--r--docs/examples/userguide/extension_types/penguin2.py12
-rw-r--r--docs/examples/userguide/extension_types/penguin2.pyx12
-rw-r--r--docs/examples/userguide/extension_types/pets.py22
-rw-r--r--docs/examples/userguide/extension_types/pets.pyx22
-rw-r--r--docs/examples/userguide/extension_types/python_access.py7
-rw-r--r--docs/examples/userguide/extension_types/python_access.pyx4
-rw-r--r--docs/examples/userguide/extension_types/shrubbery.py2
-rw-r--r--docs/examples/userguide/extension_types/shrubbery.pyx2
-rw-r--r--docs/examples/userguide/extension_types/shrubbery_2.py10
-rw-r--r--docs/examples/userguide/extension_types/shrubbery_2.pyx2
-rw-r--r--docs/examples/userguide/extension_types/widen_shrubbery.py6
-rw-r--r--docs/examples/userguide/extension_types/widen_shrubbery.pyx2
-rw-r--r--docs/examples/userguide/extension_types/wrapper_class.py65
-rw-r--r--docs/examples/userguide/extension_types/wrapper_class.pyx65
-rw-r--r--docs/examples/userguide/sharing_declarations/landscaping.py7
-rw-r--r--docs/examples/userguide/sharing_declarations/lunch.py5
-rw-r--r--docs/examples/userguide/sharing_declarations/lunch.pyx1
-rw-r--r--docs/examples/userguide/sharing_declarations/restaurant.py12
-rw-r--r--docs/examples/userguide/sharing_declarations/restaurant.pyx2
-rw-r--r--docs/examples/userguide/sharing_declarations/setup_py.py4
-rw-r--r--docs/examples/userguide/sharing_declarations/setup_pyx.py (renamed from docs/examples/userguide/sharing_declarations/setup.py)0
-rw-r--r--docs/examples/userguide/sharing_declarations/shrubbing.py10
-rw-r--r--docs/examples/userguide/sharing_declarations/shrubbing.pyx3
-rw-r--r--docs/examples/userguide/sharing_declarations/spammery.py10
-rw-r--r--docs/examples/userguide/sharing_declarations/spammery.pyx3
-rw-r--r--docs/examples/userguide/sharing_declarations/volume.py2
-rw-r--r--docs/examples/userguide/special_methods/total_ordering.py13
-rw-r--r--docs/examples/userguide/special_methods/total_ordering.pyx13
-rw-r--r--docs/src/quickstart/build.rst4
-rw-r--r--docs/src/quickstart/install.rst6
-rw-r--r--docs/src/tutorial/embedding.rst7
-rw-r--r--docs/src/tutorial/pure.rst3
-rw-r--r--docs/src/userguide/extension_types.rst674
-rw-r--r--docs/src/userguide/external_C_code.rst4
-rw-r--r--docs/src/userguide/language_basics.rst2
-rw-r--r--docs/src/userguide/migrating_to_cy30.rst17
-rw-r--r--docs/src/userguide/sharing_declarations.rst155
-rw-r--r--docs/src/userguide/source_files_and_compilation.rst3
-rw-r--r--docs/src/userguide/special_methods.rst24
-rw-r--r--pyximport/_pyximport2.py606
-rw-r--r--pyximport/_pyximport3.py464
-rw-r--r--pyximport/pyximport.py607
-rwxr-xr-xruntests.py1
-rw-r--r--tests/buffers/bufaccess.pyx42
-rw-r--r--tests/compile/fromimport.pyx24
-rw-r--r--tests/compile/fromimport_star.pyx7
-rw-r--r--tests/compile/fused_redeclare_T3111.pyx12
-rw-r--r--tests/errors/builtin_type_inheritance.pyx4
-rw-r--r--tests/errors/cfuncptr.pyx36
-rw-r--r--tests/errors/cpp_object_template.pyx6
-rw-r--r--tests/errors/dataclass_e1.pyx2
-rw-r--r--tests/errors/dataclass_e5.pyx21
-rw-r--r--tests/errors/e_typing_errors.pyx59
-rw-r--r--tests/errors/e_typing_optional.py33
-rw-r--r--tests/errors/incomplete_varadic.pyx8
-rw-r--r--tests/memoryview/cythonarray.pyx36
-rw-r--r--tests/memoryview/memoryview.pyx33
-rw-r--r--tests/memoryview/memslice.pyx45
-rw-r--r--tests/pypy_bugs.txt3
-rw-r--r--tests/run/annotation_typing.pyx92
-rw-r--r--tests/run/append.pyx34
-rw-r--r--tests/run/builtin_type_inheritance_T608.pyx38
-rw-r--r--tests/run/builtin_type_inheritance_T608_py2only.pyx42
-rw-r--r--tests/run/bytearray_iter.py15
-rw-r--r--tests/run/cdef_class_dataclass.pyx16
-rw-r--r--tests/run/cdef_setitem_T284.pyx4
-rw-r--r--tests/run/cfuncptr.pyx40
-rw-r--r--tests/run/cpp_stl_random.pyx319
-rw-r--r--tests/run/decorators.pyx23
-rw-r--r--tests/run/delete.pyx18
-rw-r--r--tests/run/extern_varobject_extensions.srctree94
-rw-r--r--tests/run/funcexc_iter_T228.pyx86
-rw-r--r--tests/run/function_self.py21
-rw-r--r--tests/run/line_trace.pyx60
-rw-r--r--tests/run/pep448_extended_unpacking.pyx18
-rw-r--r--tests/run/pep526_variable_annotations.py49
-rw-r--r--tests/run/pep526_variable_annotations_cy.pyx4
-rw-r--r--tests/run/pure_cdef_class_dataclass.py4
-rw-r--r--tests/run/pure_py.py7
-rw-r--r--tests/run/test_named_expressions.py4
-rw-r--r--tests/run/tuple_constants.pyx7
124 files changed, 4209 insertions, 1393 deletions
diff --git a/CHANGES.rst b/CHANGES.rst
index 24cb4e23b..94abc418e 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -10,15 +10,20 @@ Features added
* A new decorator ``@cython.dataclasses.dataclass`` was implemented that provides
compile time dataclass generation capabilities to ``cdef`` classes (extension types).
- Patch by David Woods. (Github issue :issue:`2903`)
+ Patch by David Woods. (Github issue :issue:`2903`). ``kw_only`` dataclasses
+ added by Yury Sokov. (Github issue :issue:`4794`)
* Named expressions (PEP 572) aka. assignment expressions (aka. the walrus operator
``:=``) were implemented.
Patch by David Woods. (Github issue :issue:`2636`)
+* Cython avoids raising ``StopIteration`` in ``__next__`` methods when possible.
+ Patch by David Woods. (Github issue :issue:`3447`)
+
* Some C++ library declarations were extended and fixed.
- Patches by Max Bachmann, Till Hoffmann, Julien Jerphanion.
- (Github issues :issue:`4530`, :issue:`4528`, :issue:`4710`, :issue:`4746`, :issue:`4751`)
+ Patches by Max Bachmann, Till Hoffmann, Julien Jerphanion, Wenjun Si.
+ (Github issues :issue:`4530`, :issue:`4528`, :issue:`4710`, :issue:`4746`,
+ :issue:`4751`, :issue:`4818`, :issue:`4762`)
* The ``cythonize`` command has a new option ``-M`` to generate ``.dep`` dependency
files for the compilation unit. This can be used by external build tools to track
@@ -35,6 +40,10 @@ Features added
smaller set of Cython's own modules, which can be used to reduce the package
and install size.
+* Improvements to ``PyTypeObject`` definitions in pxd wrapping of libpython.
+ Patch by John Kirkham. (Github issue :issue:`4699`)
+
+
Bugs fixed
----------
@@ -48,12 +57,19 @@ Bugs fixed
Test patch by Kirill Smelkov. (Github issue :issue:`4737`)
* Typedefs for the ``bint`` type did not always behave like ``bint``.
- Patch by 0dminnimda. (Github issue :issue:`4660`)
+ Patch by Nathan Manville and 0dminnimda. (Github issue :issue:`4660`)
* The return type of a fused function is no longer ignored for function pointers,
since it is relevant when passing them e.g. as argument into other fused functions.
Patch by David Woods. (Github issue :issue:`4644`)
+* The ``__self__`` attribute of fused functions reports its availability correctly
+ with ``hasattr()``. Patch by David Woods.
+ (Github issue :issue:`4808`)
+
+* ``pyximport`` no longer uses the deprecated ``imp`` module.
+ Patch by Matus Valo. (Github issue :issue:`4560`)
+
* The generated C code failed to compile in CPython 3.11a4 and later.
(Github issue :issue:`4500`)
@@ -65,7 +81,22 @@ Bugs fixed
* A work-around for StacklessPython < 3.8 was disabled in Py3.8 and later.
(Github issue :issue:`4329`)
-* Includes all bug-fixes from the :ref:`0.29.30` release.
+* Improve conversion between function pointers with non-identical but
+ compatible exception specifications. Patches by David Woods.
+ (Github issues :issue:`4770`, :issue:`4689`)
+
+* Improve compatibility with forthcoming CPython 3.12 release.
+
+* Limited API C preprocessor warning is compatible with MSVC. Patch by
+ Victor Molina Garcia. (Github issue :issue:`4826`)
+
+* Some C compiler warnings were fixed.
+ Patch by mwtian. (Github issue :issue:`4831`)
+
+* The parser allowed some invalid spellings of ``...``.
+ Patch by 0dminnimda. (Github issue :issue:`4868`)
+
+* Includes all bug-fixes from the 0.29 branch up to the :ref:`0.29.31` release.
Other changes
-------------
@@ -76,9 +107,13 @@ Other changes
for users who did not expect ``None`` to be allowed as input. To allow ``None``, use
``typing.Optional`` as in ``func(x: Optional[list])``. ``None`` is also automatically
allowed when it is used as default argument, i.e. ``func(x: list = None)``.
- Note that, for backwards compatibility reasons, this does not apply when using Cython's
- C notation, as in ``func(list x)``. Here, ``None`` is still allowed, as always.
- (Github issues :issue:`3883`, :issue:`2696`)
+ ``int`` and ``float`` are now also recognised in type annotations and restrict the
+ value type at runtime. They were previously ignored.
+ Note that, for backwards compatibility reasons, the new behaviour does not apply when using
+ Cython's C notation, as in ``func(list x)``. Here, ``None`` is still allowed, as always.
+ Also, the ``annotation_typing`` directive can now be enabled and disabled more finely
+ within the module.
+ (Github issues :issue:`3883`, :issue:`2696`, :issue:`4669`, :issue:`4606`, :issue:`4886`)
* The compile-time ``DEF`` and ``IF`` statements are deprecated and generate a warning.
They should be replaced with normal constants, code generation or C macros.
@@ -87,6 +122,10 @@ Other changes
* Reusing an extension type attribute name as a method name is now an error.
Patch by 0dminnimda. (Github issue :issue:`4661`)
+* Improve compatibility between classes pickled in Cython 3.0 and 0.29.x
+ by accepting MD5, SHA-1 and SHA-256 checksums.
+ (Github issue :issue:`4680`)
+
3.0.0 alpha 10 (2022-01-06)
===========================
@@ -977,6 +1016,42 @@ Other changes
.. _`PEP-563`: https://www.python.org/dev/peps/pep-0563
.. _`PEP-479`: https://www.python.org/dev/peps/pep-0479
+.. _0.29.31:
+
+0.29.31 (2022-??-??)
+====================
+
+Bugs fixed
+----------
+
+* Use ``importlib.util.find_spec()`` instead of the deprecated ``importlib.find_loader()``
+ function when setting up the package path at import-time. Patch by Matti Picus.
+ (Github issue :issue:`4764`)
+
+* Require the C compiler to support the two-arg form of ``va_start`` on Python 3.10
+ and higher. Patch by Thomas Caswell.
+ (Github issue :issue:`4820`)
+
+* Make ``fused_type`` subscriptable in Shadow.py. Patch by Pfebrer.
+ (Github issue :issue:`4842`)
+
+* Fix the incorrect code generation of the target type in ``bytearray`` loops.
+ Patch by Kenrick Everett.
+ (Github issue :issue:`4108`)
+
+* Silence some GCC ``-Wconversion`` warnings in C utility code.
+ Patch by Lisandro Dalcin.
+ (Github issue :issue:`4854`)
+
+* Stop tuple multiplication being ignored in expressions such as ``[*(1,) * 2]``.
+ Patch by David Woods.
+ (Github issue :issue:`4864`)
+
+* Ensure that object buffers (e.g. ``ndarray[object, ndim=1]``) containing
+ ``NULL`` pointers are safe to use, returning ``None`` instead of the ``NULL``
+ pointer. Patch by Sebastian Berg.
+ (Github issue :issue:`4859`)
+
.. _0.29.30:
@@ -988,7 +1063,7 @@ Bugs fixed
* The GIL handling changes in 0.29.29 introduced a regression where
objects could be deallocated without holding the GIL.
- (Github issue :issue`4796`)
+ (Github issue :issue:`4796`)
.. _0.29.29:
@@ -1002,7 +1077,7 @@ Features added
* Avoid acquiring the GIL at the end of nogil functions.
This change was backported in order to avoid generating wrong C code
that would trigger C compiler warnings with tracing support enabled.
- Backport by Oleksandr Pavlyk. (Github issue :issue`4637`)
+ Backport by Oleksandr Pavlyk. (Github issue :issue:`4637`)
Bugs fixed
----------
@@ -1018,15 +1093,15 @@ Bugs fixed
* Cython now correctly generates Python methods for both the provided regular and
reversed special numeric methods of extension types.
- Patch by David Woods. (Github issue :issue`4750`)
+ Patch by David Woods. (Github issue :issue:`4750`)
* Calling unbound extension type methods without arguments could raise an
``IndexError`` instead of a ``TypeError``.
- Patch by David Woods. (Github issue :issue`4779`)
+ Patch by David Woods. (Github issue :issue:`4779`)
* Calling unbound ``.__contains__()`` super class methods on some builtin base
types could trigger an infinite recursion.
- Patch by David Woods. (Github issue :issue`4785`)
+ Patch by David Woods. (Github issue :issue:`4785`)
* The C union type in pure Python mode mishandled some field names.
Patch by Jordan Brière. (Github issue :issue:`4727`)
diff --git a/Cython/Compiler/Builtin.py b/Cython/Compiler/Builtin.py
index 577c20775..46a4dbb5b 100644
--- a/Cython/Compiler/Builtin.py
+++ b/Cython/Compiler/Builtin.py
@@ -444,6 +444,16 @@ def init_builtins():
bool_type = builtin_scope.lookup('bool').type
complex_type = builtin_scope.lookup('complex').type
+ # Set up type inference links between equivalent Python/C types
+ bool_type.equivalent_type = PyrexTypes.c_bint_type
+ PyrexTypes.c_bint_type.equivalent_type = bool_type
+
+ float_type.equivalent_type = PyrexTypes.c_double_type
+ PyrexTypes.c_double_type.equivalent_type = float_type
+
+ complex_type.equivalent_type = PyrexTypes.c_double_complex_type
+ PyrexTypes.c_double_complex_type.equivalent_type = complex_type
+
init_builtins()
@@ -466,21 +476,20 @@ def get_known_standard_library_module_scope(module_name):
('Set', set_type),
('FrozenSet', frozenset_type),
]:
- name = EncodedString(name)
if name == "Tuple":
indexed_type = PyrexTypes.PythonTupleTypeConstructor(EncodedString("typing."+name), tp)
else:
indexed_type = PyrexTypes.PythonTypeConstructor(EncodedString("typing."+name), tp)
- entry = mod.declare_type(name, indexed_type, pos = None)
+ mod.declare_type(EncodedString(name), indexed_type, pos = None)
for name in ['ClassVar', 'Optional']:
indexed_type = PyrexTypes.SpecialPythonTypeConstructor(EncodedString("typing."+name))
- entry = mod.declare_type(name, indexed_type, pos = None)
+ mod.declare_type(name, indexed_type, pos = None)
_known_module_scopes[module_name] = mod
elif module_name == "dataclasses":
mod = ModuleScope(module_name, None, None)
indexed_type = PyrexTypes.SpecialPythonTypeConstructor(EncodedString("dataclasses.InitVar"))
- entry = mod.declare_type(EncodedString("InitVar"), indexed_type, pos = None)
+ mod.declare_type(EncodedString("InitVar"), indexed_type, pos = None)
_known_module_scopes[module_name] = mod
return mod
diff --git a/Cython/Compiler/Code.pxd b/Cython/Compiler/Code.pxd
index e17e0fb1d..59779f8bc 100644
--- a/Cython/Compiler/Code.pxd
+++ b/Cython/Compiler/Code.pxd
@@ -54,6 +54,7 @@ cdef class FunctionState:
cdef public object closure_temps
cdef public bint should_declare_error_indicator
cdef public bint uses_error_indicator
+ cdef public bint error_without_exception
@cython.locals(n=size_t)
cpdef new_label(self, name=*)
diff --git a/Cython/Compiler/Code.py b/Cython/Compiler/Code.py
index 4695b240c..4c67ac400 100644
--- a/Cython/Compiler/Code.py
+++ b/Cython/Compiler/Code.py
@@ -691,6 +691,7 @@ class LazyUtilityCode(UtilityCodeBase):
class FunctionState(object):
# return_label string function return point label
# error_label string error catch point label
+ # error_without_exception boolean Can go to the error label without an exception (e.g. __next__ can return NULL)
# continue_label string loop continue point label
# break_label string loop break point label
# return_from_error_cleanup_label string
@@ -739,6 +740,8 @@ class FunctionState(object):
self.should_declare_error_indicator = False
self.uses_error_indicator = False
+ self.error_without_exception = False
+
# safety checks
def validate_exit(self):
@@ -2332,8 +2335,16 @@ class CCodeWriter(object):
if method_noargs in method_flags:
# Special NOARGS methods really take no arguments besides 'self', but PyCFunction expects one.
func_cname = Naming.method_wrapper_prefix + func_cname
- self.putln("static PyObject *%s(PyObject *self, CYTHON_UNUSED PyObject *arg) {return %s(self);}" % (
- func_cname, entry.func_cname))
+ self.putln("static PyObject *%s(PyObject *self, CYTHON_UNUSED PyObject *arg) {" % func_cname)
+ func_call = "%s(self)" % entry.func_cname
+ if entry.name == "__next__":
+ self.putln("PyObject *res = %s;" % func_call)
+ # tp_iternext can return NULL without an exception
+ self.putln("if (!res && !PyErr_Occurred()) { PyErr_SetNone(PyExc_StopIteration); }")
+ self.putln("return res;")
+ else:
+ self.putln("return %s;" % func_call)
+ self.putln("}")
return func_cname
# GIL methods
diff --git a/Cython/Compiler/Dataclass.py b/Cython/Compiler/Dataclass.py
index 48c1888d6..0d0bb4768 100644
--- a/Cython/Compiler/Dataclass.py
+++ b/Cython/Compiler/Dataclass.py
@@ -154,12 +154,10 @@ def process_class_get_fields(node):
for entry in var_entries:
name = entry.name
- is_initvar = (entry.type.python_type_constructor_name == "dataclasses.InitVar")
+ is_initvar = entry.declared_with_pytyping_modifier("dataclasses.InitVar")
# TODO - classvars aren't included in "var_entries" so are missed here
# and thus this code is never triggered
- is_classvar = (entry.type.python_type_constructor_name == "typing.ClassVar")
- if is_initvar or is_classvar:
- entry.type = entry.type.resolve() # no longer need the special type
+ is_classvar = entry.declared_with_pytyping_modifier("typing.ClassVar")
if name in default_value_assignments:
assignment = default_value_assignments[name]
if (isinstance(assignment, ExprNodes.CallNode)
@@ -666,8 +664,11 @@ def _set_up_dataclass_fields(node, fields, dataclass_module):
name)
# create an entry in the global scope for this variable to live
field_node = ExprNodes.NameNode(field_default.pos, name=EncodedString(module_field_name))
- field_node.entry = global_scope.declare_var(field_node.name, type=field_default.type or PyrexTypes.unspecified_type,
- pos=field_default.pos, cname=field_node.name, is_cdef=1)
+ field_node.entry = global_scope.declare_var(
+ field_node.name, type=field_default.type or PyrexTypes.unspecified_type,
+ pos=field_default.pos, cname=field_node.name, is_cdef=True,
+ # TODO: do we need to set 'pytyping_modifiers' here?
+ )
# replace the field so that future users just receive the namenode
setattr(field, attrname, field_node)
diff --git a/Cython/Compiler/ExprNodes.py b/Cython/Compiler/ExprNodes.py
index 312b37329..ab228c552 100644
--- a/Cython/Compiler/ExprNodes.py
+++ b/Cython/Compiler/ExprNodes.py
@@ -328,13 +328,10 @@ class ExprNode(Node):
# is_sequence_constructor
# boolean Is a list or tuple constructor expression
# is_starred boolean Is a starred expression (e.g. '*a')
- # saved_subexpr_nodes
- # [ExprNode or [ExprNode or None] or None]
- # Cached result of subexpr_nodes()
# use_managed_ref boolean use ref-counted temps/assignments/etc.
# result_is_used boolean indicates that the result will be dropped and the
- # is_numpy_attribute boolean Is a Numpy module attribute
# result_code/temp_result can safely be set to None
+ # is_numpy_attribute boolean Is a Numpy module attribute
# annotation ExprNode or None PEP526 annotation for names or expressions
result_ctype = None
@@ -473,7 +470,6 @@ class ExprNode(Node):
is_memview_broadcast = False
is_memview_copy_assignment = False
- saved_subexpr_nodes = None
is_temp = False
has_temp_moved = False # if True then attempting to do anything but free the temp is invalid
is_target = False
@@ -1103,6 +1099,8 @@ class ExprNode(Node):
type = self.type
if type.is_enum or type.is_error:
return self
+ elif type is PyrexTypes.c_bint_type:
+ return self
elif type.is_pyobject or type.is_int or type.is_ptr or type.is_float:
return CoerceToBooleanNode(self, env)
elif type.is_cpp_class and type.scope and type.scope.lookup("operator bool"):
@@ -1530,14 +1528,18 @@ class FloatNode(ConstNode):
def _analyse_name_as_type(name, pos, env):
- type = PyrexTypes.parse_basic_type(name)
- if type is not None:
- return type
+ ctype = PyrexTypes.parse_basic_type(name)
+ if ctype is not None and env.in_c_type_context:
+ return ctype
global_entry = env.global_scope().lookup(name)
- if global_entry and global_entry.is_type and global_entry.type:
- return global_entry.type
+ if global_entry and global_entry.is_type:
+ type = global_entry.type
+ if type and (type.is_pyobject or env.in_c_type_context):
+ return type
+ ctype = ctype or type
+ # This is fairly heavy, so it's worth trying some easier things above.
from .TreeFragment import TreeFragment
with local_errors(ignore=True):
pos = (pos[0], pos[1], pos[2]-7)
@@ -1550,8 +1552,11 @@ def _analyse_name_as_type(name, pos, env):
if isinstance(sizeof_node, SizeofTypeNode):
sizeof_node = sizeof_node.analyse_types(env)
if isinstance(sizeof_node, SizeofTypeNode):
- return sizeof_node.arg_type
- return None
+ type = sizeof_node.arg_type
+ if type and (type.is_pyobject or env.in_c_type_context):
+ return type
+ ctype = ctype or type
+ return ctype
class BytesNode(ConstNode):
@@ -2025,6 +2030,8 @@ class NameNode(AtomicExprNode):
# annotations never create global cdef names
if env.is_module_scope:
return
+
+ modifiers = ()
if (
# name: "description" => not a type, but still a declared variable or attribute
annotation.expr.is_string_literal
@@ -2036,10 +2043,11 @@ class NameNode(AtomicExprNode):
# For Python class scopes every attribute is a Python object
atype = py_object_type
else:
- _, atype = annotation.analyse_type_annotation(env)
+ modifiers, atype = annotation.analyse_type_annotation(env)
+
if atype is None:
atype = unspecified_type if as_target and env.directives['infer_types'] != False else py_object_type
- if atype.is_fused and env.fused_to_specific:
+ elif atype.is_fused and env.fused_to_specific:
try:
atype = atype.specialize(env.fused_to_specific)
except CannotSpecialize:
@@ -2047,6 +2055,7 @@ class NameNode(AtomicExprNode):
"'%s' cannot be specialized since its type is not a fused argument to this function" %
self.name)
atype = error_type
+
visibility = 'private'
if 'dataclasses.dataclass' in env.directives:
# handle "frozen" directive - full inspection of the dataclass directives happens
@@ -2060,12 +2069,17 @@ class NameNode(AtomicExprNode):
if atype.is_pyobject or atype.can_coerce_to_pyobject(env):
visibility = 'readonly' if is_frozen else 'public'
# If the object can't be coerced that's fine - we just don't create a property
+
if as_target and env.is_c_class_scope and not (atype.is_pyobject or atype.is_error):
# TODO: this will need revising slightly if annotated cdef attributes are implemented
atype = py_object_type
warning(annotation.pos, "Annotation ignored since class-level attributes must be Python objects. "
"Were you trying to set up an instance attribute?", 2)
- entry = self.entry = env.declare_var(name, atype, self.pos, is_cdef=not as_target, visibility=visibility)
+
+ entry = self.entry = env.declare_var(
+ name, atype, self.pos, is_cdef=not as_target, visibility=visibility,
+ pytyping_modifiers=modifiers)
+
# Even if the entry already exists, make sure we're supplying an annotation if we can.
if annotation and not entry.annotation:
entry.annotation = annotation
@@ -2085,23 +2099,38 @@ class NameNode(AtomicExprNode):
return None
def analyse_as_type(self, env):
+ type = None
if self.cython_attribute:
type = PyrexTypes.parse_basic_type(self.cython_attribute)
- else:
+ elif env.in_c_type_context:
type = PyrexTypes.parse_basic_type(self.name)
if type:
return type
+
entry = self.entry
if not entry:
entry = env.lookup(self.name)
- if entry and entry.is_type:
- return entry.type
- elif entry and entry.known_standard_library_import:
+ if entry and not entry.is_type and entry.known_standard_library_import:
entry = Builtin.get_known_standard_library_entry(entry.known_standard_library_import)
- if entry and entry.is_type:
- return entry.type
- else:
- return None
+ if entry and entry.is_type:
+ # Infer equivalent C types instead of Python types when possible.
+ type = entry.type
+ if not env.in_c_type_context and type is Builtin.long_type:
+ # Try to give a helpful warning when users write plain C type names.
+ warning(self.pos, "Found Python 2.x type 'long' in a Python annotation. Did you mean to use 'cython.long'?")
+ type = py_object_type
+ elif type.is_pyobject and type.equivalent_type:
+ type = type.equivalent_type
+ return type
+ if self.name == 'object':
+ # This is normally parsed as "simple C type", but not if we don't parse C types.
+ return py_object_type
+
+ # Try to give a helpful warning when users write plain C type names.
+ if not env.in_c_type_context and PyrexTypes.parse_basic_type(self.name):
+ warning(self.pos, "Found C type '%s' in a Python annotation. Did you mean to use a Python type?" % self.name)
+
+ return None
def analyse_as_extension_type(self, env):
# Try to interpret this as a reference to an extension type.
@@ -3702,6 +3731,18 @@ class IndexNode(_IndexingBaseNode):
error(self.pos, "Array size must be a compile time constant")
return None
+ def analyse_pytyping_modifiers(self, env):
+ # Check for declaration modifiers, e.g. "typing.Optional[...]" or "dataclasses.InitVar[...]"
+ # TODO: somehow bring this together with TemplatedTypeNode.analyse_pytyping_modifiers()
+ modifiers = []
+ modifier_node = self
+ while modifier_node.is_subscript:
+ modifier_type = modifier_node.base.analyse_as_type(env)
+ if modifier_type.python_type_constructor_name and modifier_type.modifier_name:
+ modifiers.append(modifier_type.modifier_name)
+ modifier_node = modifier_node.index
+ return modifiers
+
def type_dependencies(self, env):
return self.base.type_dependencies(env) + self.index.type_dependencies(env)
@@ -3932,12 +3973,16 @@ class IndexNode(_IndexingBaseNode):
if base_type in (list_type, tuple_type) and self.index.type.is_int:
item_type = infer_sequence_item_type(
env, self.base, self.index, seq_type=base_type)
- if item_type is None:
- item_type = py_object_type
- self.type = item_type
if base_type in (list_type, tuple_type, dict_type):
# do the None check explicitly (not in a helper) to allow optimising it away
self.base = self.base.as_none_safe_node("'NoneType' object is not subscriptable")
+ if item_type is None or not item_type.is_pyobject:
+ # Even if we inferred a C type as result, we will read a Python object, so trigger coercion if needed.
+ # We could potentially use "item_type.equivalent_type" here, but that may trigger assumptions
+ # about the actual runtime item types, rather than just their ability to coerce to the C "item_type".
+ self.type = py_object_type
+ else:
+ self.type = item_type
self.wrap_in_nonecheck_node(env, getting)
return self
@@ -4233,6 +4278,7 @@ class IndexNode(_IndexingBaseNode):
return
utility_code = None
+ error_value = None
if self.type.is_pyobject:
error_value = 'NULL'
if self.index.type.is_int:
@@ -4268,8 +4314,8 @@ class IndexNode(_IndexingBaseNode):
error_value = '-1'
utility_code = UtilityCode.load_cached("GetItemIntByteArray", "StringTools.c")
elif not (self.base.type.is_cpp_class and self.exception_check):
- assert False, "unexpected type %s and base type %s for indexing" % (
- self.type, self.base.type)
+ assert False, "unexpected type %s and base type %s for indexing (%s)" % (
+ self.type, self.base.type, self.pos)
if utility_code is not None:
code.globalstate.use_utility_code(utility_code)
@@ -4582,17 +4628,17 @@ class BufferIndexNode(_IndexingBaseNode):
buffer_entry, ptrexpr = self.buffer_lookup_code(code)
if self.buffer_type.dtype.is_pyobject:
- # Must manage refcounts. Decref what is already there
- # and incref what we put in.
+ # Must manage refcounts. XDecref what is already there
+ # and incref what we put in (NumPy allows there to be NULL)
ptr = code.funcstate.allocate_temp(buffer_entry.buf_ptr_type,
manage_ref=False)
rhs_code = rhs.result()
code.putln("%s = %s;" % (ptr, ptrexpr))
- code.put_gotref("*%s" % ptr, self.buffer_type.dtype)
- code.putln("__Pyx_INCREF(%s); __Pyx_DECREF(*%s);" % (
+ code.put_xgotref("*%s" % ptr, self.buffer_type.dtype)
+ code.putln("__Pyx_INCREF(%s); __Pyx_XDECREF(*%s);" % (
rhs_code, ptr))
code.putln("*%s %s= %s;" % (ptr, op, rhs_code))
- code.put_giveref("*%s" % ptr, self.buffer_type.dtype)
+ code.put_xgiveref("*%s" % ptr, self.buffer_type.dtype)
code.funcstate.release_temp(ptr)
else:
# Simple case
@@ -4613,8 +4659,11 @@ class BufferIndexNode(_IndexingBaseNode):
# is_temp is True, so must pull out value and incref it.
# NOTE: object temporary results for nodes are declared
# as PyObject *, so we need a cast
- code.putln("%s = (PyObject *) *%s;" % (self.result(), self.buffer_ptr_code))
- code.putln("__Pyx_INCREF((PyObject*)%s);" % self.result())
+ res = self.result()
+ code.putln("%s = (PyObject *) *%s;" % (res, self.buffer_ptr_code))
+ # NumPy does (occasionally) allow NULL to denote None.
+ code.putln("if (unlikely(%s == NULL)) %s = Py_None;" % (res, res))
+ code.putln("__Pyx_INCREF((PyObject*)%s);" % res)
def free_subexpr_temps(self, code):
for temp in self.index_temps:
@@ -8699,7 +8748,7 @@ class MergedSequenceNode(ExprNode):
if type in (list_type, tuple_type) and args and args[0].is_sequence_constructor:
# construct a list directly from the first argument that we can then extend
if args[0].type is not list_type:
- args[0] = ListNode(args[0].pos, args=args[0].args, is_temp=True)
+ args[0] = ListNode(args[0].pos, args=args[0].args, is_temp=True, mult_factor=args[0].mult_factor)
ExprNode.__init__(self, pos, args=args, type=type)
def calculate_constant_result(self):
@@ -14020,10 +14069,8 @@ class AnnotationNode(ExprNode):
def analyse_type_annotation(self, env, assigned_value=None):
if self.untyped:
# Already applied as a fused type, not re-evaluating it here.
- return None, None
+ return [], None
annotation = self.expr
- base_type = None
- is_ambiguous = False
explicit_pytype = explicit_ctype = False
if annotation.is_dict_literal:
warning(annotation.pos,
@@ -14040,36 +14087,29 @@ class AnnotationNode(ExprNode):
annotation = value
if explicit_pytype and explicit_ctype:
warning(annotation.pos, "Duplicate type declarations found in signature annotation", level=1)
- arg_type = annotation.analyse_as_type(env)
- if annotation.is_name and not annotation.cython_attribute and annotation.name in ('int', 'long', 'float'):
- # Map builtin numeric Python types to C types in safe cases.
- if assigned_value is not None and arg_type is not None and not arg_type.is_pyobject:
- assigned_type = assigned_value.infer_type(env)
- if assigned_type and assigned_type.is_pyobject:
- # C type seems unsafe, e.g. due to 'None' default value => ignore annotation type
- is_ambiguous = True
- arg_type = None
- # ignore 'int' and require 'cython.int' to avoid unsafe integer declarations
- if arg_type in (PyrexTypes.c_long_type, PyrexTypes.c_int_type, PyrexTypes.c_float_type):
- arg_type = PyrexTypes.c_double_type if annotation.name == 'float' else py_object_type
- elif arg_type is not None and annotation.is_string_literal:
+
+ with env.new_c_type_context(in_c_type_context=explicit_ctype):
+ arg_type = annotation.analyse_as_type(env)
+
+ if arg_type is None:
+ warning(annotation.pos, "Unknown type declaration in annotation, ignoring")
+ return [], arg_type
+
+ if annotation.is_string_literal:
warning(annotation.pos,
"Strings should no longer be used for type declarations. Use 'cython.int' etc. directly.",
level=1)
- elif arg_type is not None and arg_type.is_complex:
+ if explicit_pytype and not explicit_ctype and not (arg_type.is_pyobject or arg_type.equivalent_type):
+ warning(annotation.pos,
+ "Python type declaration in signature annotation does not refer to a Python type")
+ if arg_type.is_complex:
# creating utility code needs to be special-cased for complex types
arg_type.create_declaration_utility_code(env)
- if arg_type is not None:
- if explicit_pytype and not explicit_ctype and not arg_type.is_pyobject:
- warning(annotation.pos,
- "Python type declaration in signature annotation does not refer to a Python type")
- base_type = Nodes.CAnalysedBaseTypeNode(
- annotation.pos, type=arg_type, is_arg=True)
- elif is_ambiguous:
- warning(annotation.pos, "Ambiguous types in annotation, ignoring")
- else:
- warning(annotation.pos, "Unknown type declaration in annotation, ignoring")
- return base_type, arg_type
+
+ # Check for declaration modifiers, e.g. "typing.Optional[...]" or "dataclasses.InitVar[...]"
+ modifiers = annotation.analyse_pytyping_modifiers(env) if annotation.is_subscript else []
+
+ return modifiers, arg_type
class AssignmentExpressionNode(ExprNode):
diff --git a/Cython/Compiler/FlowControl.pxd b/Cython/Compiler/FlowControl.pxd
index c876ee3b1..4a8ef19c1 100644
--- a/Cython/Compiler/FlowControl.pxd
+++ b/Cython/Compiler/FlowControl.pxd
@@ -58,6 +58,8 @@ cdef class ControlFlow:
cdef public dict assmts
+ cdef public Py_ssize_t in_try_block
+
cpdef newblock(self, ControlBlock parent=*)
cpdef nextblock(self, ControlBlock parent=*)
cpdef bint is_tracked(self, entry)
diff --git a/Cython/Compiler/FlowControl.py b/Cython/Compiler/FlowControl.py
index 4e0160e41..4018ff851 100644
--- a/Cython/Compiler/FlowControl.py
+++ b/Cython/Compiler/FlowControl.py
@@ -110,6 +110,7 @@ class ControlFlow(object):
entries set tracked entries
loops list stack for loop descriptors
exceptions list stack for exception descriptors
+ in_try_block int track if we're in a try...except or try...finally block
"""
def __init__(self):
@@ -122,6 +123,7 @@ class ControlFlow(object):
self.exit_point = ExitBlock()
self.blocks.add(self.exit_point)
self.block = self.entry_point
+ self.in_try_block = 0
def newblock(self, parent=None):
"""Create floating block linked to `parent` if given.
@@ -1166,7 +1168,9 @@ class ControlFlowAnalysis(CythonTransform):
## XXX: children nodes
self.flow.block.add_child(entry_point)
self.flow.nextblock()
+ self.flow.in_try_block += 1
self._visit(node.body)
+ self.flow.in_try_block -= 1
self.flow.exceptions.pop()
# After exception
@@ -1226,7 +1230,9 @@ class ControlFlowAnalysis(CythonTransform):
self.flow.block = body_block
body_block.add_child(entry_point)
self.flow.nextblock()
+ self.flow.in_try_block += 1
self._visit(node.body)
+ self.flow.in_try_block -= 1
self.flow.exceptions.pop()
if self.flow.loops:
self.flow.loops[-1].exceptions.pop()
@@ -1245,6 +1251,8 @@ class ControlFlowAnalysis(CythonTransform):
if self.flow.exceptions:
self.flow.block.add_child(self.flow.exceptions[-1].entry_point)
self.flow.block = None
+ if self.flow.in_try_block:
+ node.in_try_block = True
return node
def visit_ReraiseStatNode(self, node):
diff --git a/Cython/Compiler/Lexicon.py b/Cython/Compiler/Lexicon.py
index 654febbe7..c3ca05b56 100644
--- a/Cython/Compiler/Lexicon.py
+++ b/Cython/Compiler/Lexicon.py
@@ -74,6 +74,7 @@ def make_lexicon():
bra = Any("([{")
ket = Any(")]}")
+ ellipsis = Str("...")
punct = Any(":,;+-*/|&<>=.%`~^?!@")
diphthong = Str("==", "<>", "!=", "<=", ">=", "<<", ">>", "**", "//",
"+=", "-=", "*=", "/=", "%=", "|=", "^=", "&=",
@@ -89,7 +90,7 @@ def make_lexicon():
(intliteral, Method('strip_underscores', symbol='INT')),
(fltconst, Method('strip_underscores', symbol='FLOAT')),
(imagconst, Method('strip_underscores', symbol='IMAG')),
- (punct | diphthong, TEXT),
+ (ellipsis | punct | diphthong, TEXT),
(bra, Method('open_bracket_action')),
(ket, Method('close_bracket_action')),
diff --git a/Cython/Compiler/Naming.py b/Cython/Compiler/Naming.py
index 7845e4aa1..96c0b8fbd 100644
--- a/Cython/Compiler/Naming.py
+++ b/Cython/Compiler/Naming.py
@@ -126,6 +126,7 @@ cur_scope_cname = pyrex_prefix + "cur_scope"
enc_scope_cname = pyrex_prefix + "enc_scope"
frame_cname = pyrex_prefix + "frame"
frame_code_cname = pyrex_prefix + "frame_code"
+error_without_exception_cname = pyrex_prefix + "error_without_exception"
binding_cfunc = pyrex_prefix + "binding_PyCFunctionType"
fused_func_prefix = pyrex_prefix + 'fuse_'
quick_temp_cname = pyrex_prefix + "temp" # temp variable for quick'n'dirty temping
diff --git a/Cython/Compiler/Nodes.py b/Cython/Compiler/Nodes.py
index 4cad762ab..6fd87673f 100644
--- a/Cython/Compiler/Nodes.py
+++ b/Cython/Compiler/Nodes.py
@@ -158,6 +158,7 @@ class Node(object):
is_terminator = 0
is_wrapper = False # is a DefNode wrapper for a C function
is_cproperty = False
+ is_templated_type_node = False
temps = None
# All descendants should set child_attrs to a list of the attributes
@@ -966,27 +967,34 @@ class CArgDeclNode(Node):
annotation = self.annotation
if not annotation:
return None
- base_type, arg_type = annotation.analyse_type_annotation(env, assigned_value=self.default)
- if base_type is not None:
- self.base_type = base_type
-
- if arg_type and arg_type.python_type_constructor_name == "typing.Optional":
- # "x: Optional[...]" => explicitly allow 'None'
- arg_type = arg_type.resolve()
- if arg_type and not arg_type.is_pyobject:
- error(annotation.pos, "Only Python type arguments can use typing.Optional[...]")
- else:
- self.or_none = True
- elif arg_type is py_object_type:
- # exclude ": object" from the None check - None is a generic object.
- self.or_none = True
- elif arg_type and arg_type.is_pyobject and self.default and self.default.is_none:
- # "x: ... = None" => implicitly allow 'None', but warn about it.
- if not self.or_none:
- warning(self.pos, "PEP-484 recommends 'typing.Optional[...]' for arguments that can be None.")
+
+ modifiers, arg_type = annotation.analyse_type_annotation(env, assigned_value=self.default)
+ if arg_type is not None:
+ self.base_type = CAnalysedBaseTypeNode(
+ annotation.pos, type=arg_type, is_arg=True)
+
+ if arg_type:
+ if "typing.Optional" in modifiers:
+ # "x: Optional[...]" => explicitly allow 'None'
+ arg_type = arg_type.resolve()
+ if arg_type and not arg_type.is_pyobject:
+ # We probably already reported this as "cannot be applied to non-Python type".
+ # error(annotation.pos, "Only Python type arguments can use typing.Optional[...]")
+ pass
+ else:
+ self.or_none = True
+ elif arg_type is py_object_type:
+ # exclude ": object" from the None check - None is a generic object.
self.or_none = True
- elif arg_type and arg_type.is_pyobject and not self.or_none:
- self.not_none = True
+ elif self.default and self.default.is_none and (arg_type.is_pyobject or arg_type.equivalent_type):
+ # "x: ... = None" => implicitly allow 'None'
+ if not arg_type.is_pyobject:
+ arg_type = arg_type.equivalent_type
+ if not self.or_none:
+ warning(self.pos, "PEP-484 recommends 'typing.Optional[...]' for arguments that can be None.")
+ self.or_none = True
+ elif arg_type.is_pyobject and not self.or_none:
+ self.not_none = True
return arg_type
@@ -1076,9 +1084,9 @@ class CSimpleBaseTypeNode(CBaseTypeNode):
else:
type = py_object_type
else:
+ scope = env
if self.module_path:
# Maybe it's a nested C++ class.
- scope = env
for item in self.module_path:
entry = scope.lookup(item)
if entry is not None and (
@@ -1099,8 +1107,6 @@ class CSimpleBaseTypeNode(CBaseTypeNode):
if scope is None:
# Maybe it's a cimport.
scope = env.find_imported_module(self.module_path, self.pos)
- else:
- scope = env
if scope:
if scope.is_c_class_scope:
@@ -1139,10 +1145,9 @@ class CSimpleBaseTypeNode(CBaseTypeNode):
type = PyrexTypes.c_double_complex_type
type.create_declaration_utility_code(env)
self.complex = True
- if type:
- return type
- else:
- return PyrexTypes.error_type
+ if not type:
+ type = PyrexTypes.error_type
+ return type
class MemoryViewSliceTypeNode(CBaseTypeNode):
@@ -1211,10 +1216,40 @@ class TemplatedTypeNode(CBaseTypeNode):
child_attrs = ["base_type_node", "positional_args",
"keyword_args", "dtype_node"]
+ is_templated_type_node = True
dtype_node = None
-
name = None
+ def _analyse_template_types(self, env, base_type):
+ require_python_types = base_type.python_type_constructor_name in (
+ 'typing.Optional',
+ 'dataclasses.ClassVar',
+ )
+ in_c_type_context = env.in_c_type_context and not require_python_types
+
+ template_types = []
+ for template_node in self.positional_args:
+ # CBaseTypeNode -> allow C type declarations in a 'cdef' context again
+ with env.new_c_type_context(in_c_type_context or isinstance(template_node, CBaseTypeNode)):
+ ttype = template_node.analyse_as_type(env)
+ if ttype is None:
+ if base_type.is_cpp_class:
+ error(template_node.pos, "unknown type in template argument")
+ ttype = error_type
+ # For Python generics we can be a bit more flexible and allow None.
+ elif require_python_types and not ttype.is_pyobject:
+ if ttype.equivalent_type and not template_node.as_cython_attribute():
+ ttype = ttype.equivalent_type
+ else:
+ error(template_node.pos, "%s[...] cannot be applied to non-Python type %s" % (
+ base_type.python_type_constructor_name,
+ ttype,
+ ))
+ ttype = error_type
+ template_types.append(ttype)
+
+ return template_types
+
def analyse(self, env, could_be_name=False, base_type=None):
if base_type is None:
base_type = self.base_type_node.analyse(env)
@@ -1222,21 +1257,15 @@ class TemplatedTypeNode(CBaseTypeNode):
if ((base_type.is_cpp_class and base_type.is_template_type()) or
base_type.python_type_constructor_name):
- # Templated class
+ # Templated class, Python generics, etc.
if self.keyword_args and self.keyword_args.key_value_pairs:
tp = "c++ templates" if base_type.is_cpp_class else "indexed types"
error(self.pos, "%s cannot take keyword arguments" % tp)
self.type = PyrexTypes.error_type
- else:
- template_types = []
- for template_node in self.positional_args:
- type = template_node.analyse_as_type(env)
- if type is None and base_type.is_cpp_class:
- error(template_node.pos, "unknown type in template argument")
- type = error_type
- # for indexed_pytype we can be a bit more flexible and pass None
- template_types.append(type)
- self.type = base_type.specialize_here(template_node.pos, env, template_types)
+ return self.type
+
+ template_types = self._analyse_template_types(env, base_type)
+ self.type = base_type.specialize_here(self.pos, env, template_types)
elif base_type.is_pyobject:
# Buffer
@@ -1277,7 +1306,7 @@ class TemplatedTypeNode(CBaseTypeNode):
dimension=dimension)
self.type = self.array_declarator.analyse(base_type, env)[1]
- if self.type.is_fused and env.fused_to_specific:
+ if self.type and self.type.is_fused and env.fused_to_specific:
try:
self.type = self.type.specialize(env.fused_to_specific)
except CannotSpecialize:
@@ -1287,6 +1316,19 @@ class TemplatedTypeNode(CBaseTypeNode):
return self.type
+ def analyse_pytyping_modifiers(self, env):
+ # Check for declaration modifiers, e.g. "typing.Optional[...]" or "dataclasses.InitVar[...]"
+ # TODO: somehow bring this together with IndexNode.analyse_pytyping_modifiers()
+ modifiers = []
+ modifier_node = self
+ while modifier_node.is_templated_type_node and modifier_node.base_type_node and len(modifier_node.positional_args) == 1:
+ modifier_type = self.base_type_node.analyse_as_type(env)
+ if modifier_type.python_type_constructor_name and modifier_type.modifier_name:
+ modifiers.append(modifier_type.modifier_name)
+ modifier_node = modifier_node.positional_args[0]
+
+ return modifiers
+
class CComplexBaseTypeNode(CBaseTypeNode):
# base_type CBaseTypeNode
@@ -1414,6 +1456,11 @@ class CVarDefNode(StatNode):
base_type = self.base_type.analyse(env)
+ # Check for declaration modifiers, e.g. "typing.Optional[...]" or "dataclasses.InitVar[...]"
+ modifiers = None
+ if self.base_type.is_templated_type_node:
+ modifiers = self.base_type.analyse_pytyping_modifiers(env)
+
if base_type.is_fused and not self.in_pxd and (env.is_c_class_scope or
env.is_module_scope):
error(self.pos, "Fused types not allowed here")
@@ -1477,7 +1524,7 @@ class CVarDefNode(StatNode):
self.entry = dest_scope.declare_var(
name, type, declarator.pos,
cname=cname, visibility=visibility, in_pxd=self.in_pxd,
- api=self.api, is_cdef=1)
+ api=self.api, is_cdef=True, pytyping_modifiers=modifiers)
if Options.docstrings:
self.entry.doc = embed_position(self.pos, self.doc)
@@ -2184,7 +2231,14 @@ class FuncDefNode(StatNode, BlockNode):
# code.put_trace_exception()
assure_gil('error')
+ if code.funcstate.error_without_exception:
+ tempvardecl_code.putln(
+ "int %s = 0; /* StopIteration */" % Naming.error_without_exception_cname
+ )
+ code.putln("if (!%s) {" % Naming.error_without_exception_cname)
code.put_add_traceback(self.entry.qualified_name)
+ if code.funcstate.error_without_exception:
+ code.putln("}")
else:
warning(self.entry.pos,
"Unraisable exception in function '%s'." %
@@ -3164,7 +3218,7 @@ class DefNode(FuncDefNode):
else:
# probably just a plain 'object'
arg.accept_none = True
- else:
+ elif not arg.type.is_error:
arg.accept_none = True # won't be used, but must be there
if arg.not_none:
error(arg.pos, "Only Python type arguments can have 'not None'")
@@ -5188,7 +5242,8 @@ class CClassDefNode(ClassDefNode):
error(base.pos, "Base class '%s' of type '%s' is final" % (
base_type, self.class_name))
elif base_type.is_builtin_type and \
- base_type.name in ('tuple', 'str', 'bytes'):
+ base_type.name in ('tuple', 'bytes'):
+ # str in Py2 is also included in this, but now checked at run-time
error(base.pos, "inheritance from PyVarObject types like '%s' is not currently supported"
% base_type.name)
else:
@@ -5435,8 +5490,10 @@ class CClassDefNode(ClassDefNode):
typeptr_cname, buffer_slot.slot_name,
))
code.putln("}")
+ code.putln("#elif defined(_MSC_VER)")
+ code.putln("#pragma message (\"The buffer protocol is not supported in the Limited C-API.\")")
code.putln("#else")
- code.putln("#warning The buffer protocol is not supported in the Limited C-API.")
+ code.putln("#warning \"The buffer protocol is not supported in the Limited C-API.\"")
code.putln("#endif")
code.globalstate.use_utility_code(
@@ -5455,6 +5512,22 @@ class CClassDefNode(ClassDefNode):
))
code.putln("#endif") # if CYTHON_USE_TYPE_SPECS
+ base_type = type.base_type
+ while base_type:
+ if base_type.is_external and not base_type.objstruct_cname == "PyTypeObject":
+ # 'type' is special-cased because it is actually based on PyHeapTypeObject
+ # Variable length bases are allowed if the current class doesn't grow
+ code.putln("if (sizeof(%s%s) != sizeof(%s%s)) {" % (
+ "" if type.typedef_flag else "struct ", type.objstruct_cname,
+ "" if base_type.typedef_flag else "struct ", base_type.objstruct_cname))
+ code.globalstate.use_utility_code(
+ UtilityCode.load_cached("ValidateExternBase", "ExtensionTypes.c"))
+ code.put_error_if_neg(entry.pos, "__Pyx_validate_extern_base(%s)" % (
+ type.base_type.typeptr_cname))
+ code.putln("}")
+ break
+ base_type = base_type.base_type
+
code.putln("#if !CYTHON_COMPILING_IN_LIMITED_API")
# FIXME: these still need to get initialised even with the limited-API
for slot in TypeSlots.get_slot_table(code.globalstate.directives):
@@ -6654,11 +6727,15 @@ class RaiseStatNode(StatNode):
# exc_value ExprNode or None
# exc_tb ExprNode or None
# cause ExprNode or None
+ #
+ # set in FlowControl
+ # in_try_block bool
child_attrs = ["exc_type", "exc_value", "exc_tb", "cause"]
is_terminator = True
builtin_exc_name = None
wrap_tuple_value = False
+ in_try_block = False
def analyse_expressions(self, env):
if self.exc_type:
@@ -6687,9 +6764,19 @@ class RaiseStatNode(StatNode):
not (exc.args or (exc.arg_tuple is not None and exc.arg_tuple.args))):
exc = exc.function # extract the exception type
if exc.is_name and exc.entry.is_builtin:
+ from . import Symtab
self.builtin_exc_name = exc.name
if self.builtin_exc_name == 'MemoryError':
self.exc_type = None # has a separate implementation
+ elif (self.builtin_exc_name == 'StopIteration' and
+ env.is_local_scope and env.name == "__next__" and
+ env.parent_scope and env.parent_scope.is_c_class_scope and
+ not self.in_try_block):
+ # tp_iternext is allowed to return NULL without raising StopIteration.
+ # For the sake of simplicity, only allow this to happen when not in
+ # a try block
+ self.exc_type = None
+
return self
nogil_check = Node.gil_error
@@ -6700,6 +6787,11 @@ class RaiseStatNode(StatNode):
if self.builtin_exc_name == 'MemoryError':
code.putln('PyErr_NoMemory(); %s' % code.error_goto(self.pos))
return
+ elif self.builtin_exc_name == 'StopIteration' and not self.exc_type:
+ code.putln('%s = 1;' % Naming.error_without_exception_cname)
+ code.putln('%s;' % code.error_goto(None))
+ code.funcstate.error_without_exception = True
+ return
if self.exc_type:
self.exc_type.generate_evaluation_code(code)
diff --git a/Cython/Compiler/Optimize.py b/Cython/Compiler/Optimize.py
index a601d18c9..cea5970f6 100644
--- a/Cython/Compiler/Optimize.py
+++ b/Cython/Compiler/Optimize.py
@@ -3026,7 +3026,7 @@ class OptimizeBuiltinCalls(Visitor.NodeRefCleanupMixin,
"""Optimistic optimisation as X.append() is almost always
referring to a list.
"""
- if len(args) != 2 or node.result_is_used:
+ if len(args) != 2 or node.result_is_used or node.function.entry:
return node
return ExprNodes.PythonCapiCallNode(
diff --git a/Cython/Compiler/Options.py b/Cython/Compiler/Options.py
index af28a7187..97f288905 100644
--- a/Cython/Compiler/Options.py
+++ b/Cython/Compiler/Options.py
@@ -366,7 +366,6 @@ directive_scopes = { # defaults to available everywhere
'test_fail_if_path_exists' : ('function', 'class', 'cclass'),
'freelist': ('cclass',),
'emit_code_comments': ('module',),
- 'annotation_typing': ('module',), # FIXME: analysis currently lacks more specific function scope
# Avoid scope-specific to/from_py_functions for c_string.
'c_string_type': ('module',),
'c_string_encoding': ('module',),
diff --git a/Cython/Compiler/ParseTreeTransforms.py b/Cython/Compiler/ParseTreeTransforms.py
index a580f72e7..718b5fd85 100644
--- a/Cython/Compiler/ParseTreeTransforms.py
+++ b/Cython/Compiler/ParseTreeTransforms.py
@@ -2307,6 +2307,12 @@ if VALUE is not None:
assmt.analyse_declarations(env)
return assmt
+ def visit_func_outer_attrs(self, node):
+ # any names in the outer attrs should not be looked up in the function "seen_vars_stack"
+ stack = self.seen_vars_stack.pop()
+ super(AnalyseDeclarationsTransform, self).visit_func_outer_attrs(node)
+ self.seen_vars_stack.append(stack)
+
def visit_ScopedExprNode(self, node):
env = self.current_env()
node.analyse_declarations(env)
diff --git a/Cython/Compiler/Parsing.pxd b/Cython/Compiler/Parsing.pxd
index 7f4a1c220..1be718581 100644
--- a/Cython/Compiler/Parsing.pxd
+++ b/Cython/Compiler/Parsing.pxd
@@ -23,15 +23,17 @@ cdef tuple p_binop_operator(PyrexScanner s)
cdef p_binop_expr(PyrexScanner s, ops, p_sub_expr_func p_sub_expr)
cdef p_lambdef(PyrexScanner s, bint allow_conditional=*)
cdef p_lambdef_nocond(PyrexScanner s)
-cdef p_test(PyrexScanner s, bint allow_assignment_expression=*)
-cdef p_test_nocond(PyrexScanner s, bint allow_assignment_expression=*)
-cdef p_walrus_test(PyrexScanner s, bint allow_assignment_expression=*)
+cdef p_test(PyrexScanner s)
+cdef p_test_allow_walrus_after(PyrexScanner s)
+cdef p_test_nocond(PyrexScanner s)
+cdef p_namedexpr_test(PyrexScanner s)
cdef p_or_test(PyrexScanner s)
cdef p_rassoc_binop_expr(PyrexScanner s, unicode op, p_sub_expr_func p_subexpr)
cdef p_and_test(PyrexScanner s)
cdef p_not_test(PyrexScanner s)
cdef p_comparison(PyrexScanner s)
-cdef p_test_or_starred_expr(PyrexScanner s, bint is_expression=*)
+cdef p_test_or_starred_expr(PyrexScanner s)
+cdef p_namedexpr_test_or_starred_expr(PyrexScanner s)
cdef p_starred_expr(PyrexScanner s)
cdef p_cascaded_cmp(PyrexScanner s)
cdef p_cmp_op(PyrexScanner s)
@@ -85,9 +87,10 @@ cdef p_dict_or_set_maker(PyrexScanner s)
cdef p_backquote_expr(PyrexScanner s)
cdef p_simple_expr_list(PyrexScanner s, expr=*)
cdef p_test_or_starred_expr_list(PyrexScanner s, expr=*)
+cdef p_namedexpr_test_or_starred_expr_list(s, expr=*)
cdef p_testlist(PyrexScanner s)
cdef p_testlist_star_expr(PyrexScanner s)
-cdef p_testlist_comp(PyrexScanner s, bint is_expression=*)
+cdef p_testlist_comp(PyrexScanner s)
cdef p_genexp(PyrexScanner s, expr)
#-------------------------------------------------------
diff --git a/Cython/Compiler/Parsing.py b/Cython/Compiler/Parsing.py
index 1347289d4..30917c463 100644
--- a/Cython/Compiler/Parsing.py
+++ b/Cython/Compiler/Parsing.py
@@ -122,9 +122,9 @@ def p_lambdef(s, allow_conditional=True):
s, terminator=':', annotated=False)
s.expect(':')
if allow_conditional:
- expr = p_test(s, allow_assignment_expression=False)
+ expr = p_test(s)
else:
- expr = p_test_nocond(s, allow_assignment_expression=False)
+ expr = p_test_nocond(s)
return ExprNodes.LambdaNode(
pos, args = args,
star_arg = star_arg, starstar_arg = starstar_arg,
@@ -133,49 +133,64 @@ def p_lambdef(s, allow_conditional=True):
#lambdef_nocond: 'lambda' [varargslist] ':' test_nocond
def p_lambdef_nocond(s):
- return p_lambdef(s, allow_conditional=False)
+ return p_lambdef(s)
#test: or_test ['if' or_test 'else' test] | lambdef
-def p_test(s, allow_assignment_expression=True):
+def p_test(s):
+ # The check for a following ':=' is only for error reporting purposes.
+ # It simply changes a
+ # expected ')', found ':='
+ # message into something a bit more descriptive.
+ # It is close to what the PEG parser does in CPython, where an expression has
+ # a lookahead assertion that it isn't followed by ':='
+ expr = p_test_allow_walrus_after(s)
+ if s.sy == ':=':
+ s.error("invalid syntax: assignment expression not allowed in this context")
+ return expr
+
+def p_test_allow_walrus_after(s):
if s.sy == 'lambda':
return p_lambdef(s)
pos = s.position()
- expr = p_walrus_test(s, allow_assignment_expression)
+ expr = p_or_test(s)
if s.sy == 'if':
s.next()
- # Assignment expressions are always allowed here
- # even if they wouldn't be allowed in the expression as a whole.
- test = p_walrus_test(s)
+ test = p_or_test(s)
s.expect('else')
other = p_test(s)
return ExprNodes.CondExprNode(pos, test=test, true_val=expr, false_val=other)
else:
return expr
+
#test_nocond: or_test | lambdef_nocond
-def p_test_nocond(s, allow_assignment_expression=True):
+def p_test_nocond(s):
if s.sy == 'lambda':
return p_lambdef_nocond(s)
else:
- return p_walrus_test(s, allow_assignment_expression)
-
-# walrurus_test: IDENT := test | or_test
-
-def p_walrus_test(s, allow_assignment_expression=True):
- lhs = p_or_test(s)
+ return p_or_test(s)
+
+def p_namedexpr_test(s):
+ # defined in the LL parser as
+ # namedexpr_test: test [':=' test]
+ # The requirement that the LHS is a name is not enforced in the grammar.
+ # For comparison the PEG parser does:
+ # 1. look for "name :=", if found it's definitely a named expression
+ # so look for expression
+ # 2. Otherwise, look for expression
+ lhs = p_test_allow_walrus_after(s)
if s.sy == ':=':
position = s.position()
- if not allow_assignment_expression:
- s.error("invalid syntax: assignment expression not allowed in this context")
- elif not lhs.is_name:
- s.error("Left-hand side of assignment expression must be an identifier")
+ if not lhs.is_name:
+ s.error("Left-hand side of assignment expression must be an identifier", fatal=False)
s.next()
rhs = p_test(s)
return ExprNodes.AssignmentExpressionNode(position, lhs=lhs, rhs=rhs)
return lhs
+
#or_test: and_test ('or' and_test)*
COMMON_BINOP_MISTAKES = {'||': 'or', '&&': 'and'}
@@ -229,11 +244,17 @@ def p_comparison(s):
n1.cascade = p_cascaded_cmp(s)
return n1
-def p_test_or_starred_expr(s, is_expression=False):
+def p_test_or_starred_expr(s):
if s.sy == '*':
return p_starred_expr(s)
else:
- return p_test(s, allow_assignment_expression=is_expression)
+ return p_test(s)
+
+def p_namedexpr_test_or_starred_expr(s):
+ if s.sy == '*':
+ return p_starred_expr(s)
+ else:
+ return p_namedexpr_test(s)
def p_starred_expr(s):
pos = s.position()
@@ -507,7 +528,7 @@ def p_call_parse_args(s, allow_genexp=True):
keyword_args.append(p_test(s))
starstar_seen = True
else:
- arg = p_test(s)
+ arg = p_namedexpr_test(s)
if s.sy == '=':
s.next()
if not arg.is_name:
@@ -516,7 +537,7 @@ def p_call_parse_args(s, allow_genexp=True):
encoded_name = s.context.intern_ustring(arg.name)
keyword = ExprNodes.IdentifierStringNode(
arg.pos, value=encoded_name)
- arg = p_test(s, allow_assignment_expression=False)
+ arg = p_test(s)
keyword_args.append((keyword, arg))
else:
if keyword_args:
@@ -655,9 +676,7 @@ def p_slice_element(s, follow_set):
return None
def expect_ellipsis(s):
- s.expect('.')
- s.expect('.')
- s.expect('.')
+ s.expect('...')
def make_slice_nodes(pos, subscripts):
# Convert a list of subscripts as returned
@@ -694,7 +713,7 @@ def p_atom(s):
elif s.sy == 'yield':
result = p_yield_expression(s)
else:
- result = p_testlist_comp(s, is_expression=True)
+ result = p_testlist_comp(s)
s.expect(')')
return result
elif sy == '[':
@@ -703,7 +722,7 @@ def p_atom(s):
return p_dict_or_set_maker(s)
elif sy == '`':
return p_backquote_expr(s)
- elif sy == '.':
+ elif sy == '...':
expect_ellipsis(s)
return ExprNodes.EllipsisNode(pos)
elif sy == 'INT':
@@ -1265,7 +1284,7 @@ def p_f_string_expr(s, unicode_value, pos, starting_index, is_raw):
# since PEP 448:
# list_display ::= "[" [listmaker] "]"
-# listmaker ::= (test|star_expr) ( comp_for | (',' (test|star_expr))* [','] )
+# listmaker ::= (named_test|star_expr) ( comp_for | (',' (named_test|star_expr))* [','] )
# comp_iter ::= comp_for | comp_if
# comp_for ::= ["async"] "for" expression_list "in" testlist [comp_iter]
# comp_if ::= "if" test [comp_iter]
@@ -1278,7 +1297,7 @@ def p_list_maker(s):
s.expect(']')
return ExprNodes.ListNode(pos, args=[])
- expr = p_test_or_starred_expr(s, is_expression=True)
+ expr = p_namedexpr_test_or_starred_expr(s)
if s.sy in ('for', 'async'):
if expr.is_starred:
s.error("iterable unpacking cannot be used in comprehension")
@@ -1293,7 +1312,7 @@ def p_list_maker(s):
# (merged) list literal
if s.sy == ',':
s.next()
- exprs = p_test_or_starred_expr_list(s, expr)
+ exprs = p_namedexpr_test_or_starred_expr_list(s, expr)
else:
exprs = [expr]
s.expect(']')
@@ -1478,7 +1497,16 @@ def p_simple_expr_list(s, expr=None):
def p_test_or_starred_expr_list(s, expr=None):
exprs = expr is not None and [expr] or []
while s.sy not in expr_terminators:
- exprs.append(p_test_or_starred_expr(s, is_expression=(expr is not None)))
+ exprs.append(p_test_or_starred_expr(s))
+ if s.sy != ',':
+ break
+ s.next()
+ return exprs
+
+def p_namedexpr_test_or_starred_expr_list(s, expr=None):
+ exprs = expr is not None and [expr] or []
+ while s.sy not in expr_terminators:
+ exprs.append(p_namedexpr_test_or_starred_expr(s))
if s.sy != ',':
break
s.next()
@@ -1511,12 +1539,12 @@ def p_testlist_star_expr(s):
# testlist_comp: (test|star_expr) ( comp_for | (',' (test|star_expr))* [','] )
-def p_testlist_comp(s, is_expression=False):
+def p_testlist_comp(s):
pos = s.position()
- expr = p_test_or_starred_expr(s, is_expression)
+ expr = p_namedexpr_test_or_starred_expr(s)
if s.sy == ',':
s.next()
- exprs = p_test_or_starred_expr_list(s, expr)
+ exprs = p_namedexpr_test_or_starred_expr_list(s, expr)
return ExprNodes.TupleNode(pos, args = exprs)
elif s.sy in ('for', 'async'):
return p_genexp(s, expr)
@@ -1762,11 +1790,11 @@ def p_from_import_statement(s, first_statement = 0):
# s.sy == 'from'
pos = s.position()
s.next()
- if s.sy == '.':
+ if s.sy in ('.', '...'):
# count relative import level
level = 0
- while s.sy == '.':
- level += 1
+ while s.sy in ('.', '...'):
+ level += len(s.sy)
s.next()
else:
level = None
@@ -1904,7 +1932,7 @@ def p_if_statement(s):
def p_if_clause(s):
pos = s.position()
- test = p_test(s)
+ test = p_namedexpr_test(s)
body = p_suite(s)
return Nodes.IfClauseNode(pos,
condition = test, body = body)
@@ -1920,7 +1948,7 @@ def p_while_statement(s):
# s.sy == 'while'
pos = s.position()
s.next()
- test = p_test(s)
+ test = p_namedexpr_test(s)
body = p_suite(s)
else_clause = p_else_clause(s)
return Nodes.WhileStatNode(pos,
@@ -3047,7 +3075,7 @@ def p_exception_value_clause(s):
return exc_val, exc_check
c_arg_list_terminators = cython.declare(frozenset, frozenset((
- '*', '**', '.', ')', ':', '/')))
+ '*', '**', '...', ')', ':', '/')))
def p_c_arg_list(s, ctx = Ctx(), in_pyfunc = 0, cmethod_flag = 0,
nonempty_declarators = 0, kw_only = 0, annotated = 1):
@@ -3066,7 +3094,7 @@ def p_c_arg_list(s, ctx = Ctx(), in_pyfunc = 0, cmethod_flag = 0,
return args
def p_optional_ellipsis(s):
- if s.sy == '.':
+ if s.sy == '...':
expect_ellipsis(s)
return 1
else:
@@ -3110,11 +3138,11 @@ def p_c_arg_decl(s, ctx, in_pyfunc, cmethod_flag = 0, nonempty = 0,
default = ExprNodes.NoneNode(pos)
s.next()
elif 'inline' in ctx.modifiers:
- default = p_test(s, allow_assignment_expression=False)
+ default = p_test(s)
else:
error(pos, "default values cannot be specified in pxd files, use ? or *")
else:
- default = p_test(s, allow_assignment_expression=False)
+ default = p_test(s)
return Nodes.CArgDeclNode(pos,
base_type = base_type,
declarator = declarator,
@@ -4415,5 +4443,5 @@ def p_annotation(s):
then it is not a bug.
"""
pos = s.position()
- expr = p_test(s, allow_assignment_expression=False)
+ expr = p_test(s)
return ExprNodes.AnnotationNode(pos, expr=expr)
diff --git a/Cython/Compiler/PyrexTypes.py b/Cython/Compiler/PyrexTypes.py
index c773f5c5a..79e144ed1 100644
--- a/Cython/Compiler/PyrexTypes.py
+++ b/Cython/Compiler/PyrexTypes.py
@@ -205,6 +205,7 @@ class PyrexType(BaseType):
# needs_cpp_construction boolean Needs C++ constructor and destructor when used in a cdef class
# needs_refcounting boolean Needs code to be generated similar to incref/gotref/decref.
# Largely used internally.
+ # equivalent_type type A C or Python type that is equivalent to this Python or C type.
# default_value string Initial value that can be assigned before first user assignment.
# declaration_value string The value statically assigned on declaration (if any).
# entry Entry The Entry for this type
@@ -277,6 +278,7 @@ class PyrexType(BaseType):
has_attributes = 0
needs_cpp_construction = 0
needs_refcounting = 0
+ equivalent_type = None
default_value = ""
declaration_value = ""
@@ -1504,7 +1506,6 @@ class PyExtensionType(PyObjectType):
#
# name string
# scope CClassScope Attribute namespace
- # visibility string
# typedef_flag boolean
# base_type PyExtensionType or None
# module_name string or None Qualified name of defining module
@@ -1518,6 +1519,7 @@ class PyExtensionType(PyObjectType):
# vtable_cname string Name of C method table definition
# early_init boolean Whether to initialize early (as opposed to during module execution).
# defered_declarations [thunk] Used to declare class hierarchies in order
+ # is_external boolean Defined in a extern block
# check_size 'warn', 'error', 'ignore' What to do if tp_basicsize does not match
# dataclass_fields OrderedDict nor None Used for inheriting from dataclasses
@@ -3044,6 +3046,9 @@ class CFuncType(CType):
# must catch C++ exceptions if we raise them
return 0
if not other_type.exception_check or other_type.exception_value is not None:
+ # There's no problem if this type doesn't emit exceptions but the other type checks
+ if other_type.exception_check and not (self.exception_check or self.exception_value):
+ return 1
# if other does not *always* check exceptions, self must comply
if not self._same_exception_value(other_type.exception_value):
return 0
@@ -4429,6 +4434,7 @@ class ErrorType(PyrexType):
class PythonTypeConstructor(PyObjectType):
"""Used to help Cython interpret indexed types from the typing module (or similar)
"""
+ modifier_name = None
def __init__(self, name, base_type=None):
self.python_type_constructor_name = name
@@ -4457,69 +4463,35 @@ class PythonTupleTypeConstructor(PythonTypeConstructor):
not any(v.is_pyobject for v in template_values)):
entry = env.declare_tuple_type(pos, template_values)
if entry:
+ entry.used = True
return entry.type
return super(PythonTupleTypeConstructor, self).specialize_here(pos, env, template_values)
class SpecialPythonTypeConstructor(PythonTypeConstructor):
"""
- For things like ClassVar, Optional, etc, which have extra features on top of being
- a "templated" type.
+ For things like ClassVar, Optional, etc, which are not types and disappear during type analysis.
"""
- def __init__(self, name, template_type=None):
- super(SpecialPythonTypeConstructor, self).__init__(name, None)
- if (name == "typing.ClassVar" and template_type
- and not template_type.is_pyobject):
- # because classvars end up essentially used as globals they have
- # to be PyObjects. Try to find the nearest suitable type (although
- # practically I doubt this matters).
- py_type_name = template_type.py_type_name()
- if py_type_name:
- from .Builtin import builtin_scope
- template_type = (builtin_scope.lookup_type(py_type_name)
- or py_object_type)
- else:
- template_type = py_object_types
- self.template_type = template_type
+ def __init__(self, name):
+ super(SpecialPythonTypeConstructor, self).__init__(name, base_type=None)
+ self.modifier_name = name
def __repr__(self):
- if self.template_type:
- return "%s[%r]" % (self.name, self.template_type)
- else:
- return self.name
-
- def is_template_type(self):
- return self.template_type is None
+ return self.name
def resolve(self):
- if self.template_type:
- return self.template_type.resolve()
- else:
- return self
+ return self
def specialize_here(self, pos, env, template_values=None):
if len(template_values) != 1:
error(pos, "'%s' takes exactly one template argument." % self.name)
- # return a copy of the template type with python_type_constructor_name as an attribute
- # so it can be identified, and a resolve function that gets back to
- # the original type (since types are usually tested with "is")
- new_type = template_values[0]
- if self.python_type_constructor_name == "typing.ClassVar":
- # classvar must remain a py_object_type
- new_type = py_object_type
- if (self.python_type_constructor_name == "typing.Optional" and
- not new_type.is_pyobject):
- # optional must be a py_object, but can be a specialized py_object
- new_type = py_object_type
- return SpecialPythonTypeConstructor(
- self.python_type_constructor_name,
- template_type = template_values[0])
-
- def __getattr__(self, name):
- if self.template_type:
- return getattr(self.template_type, name)
- return super(SpecialPythonTypeConstructor, self).__getattr__(name)
+ return error_type
+ if template_values[0] is None:
+ # FIXME: allowing unknown types for now since we don't recognise all Python types.
+ return None
+ # Replace this type with the actual 'template' argument.
+ return template_values[0].resolve()
rank_to_type_name = (
diff --git a/Cython/Compiler/Symtab.py b/Cython/Compiler/Symtab.py
index 6554008f0..1500c7441 100644
--- a/Cython/Compiler/Symtab.py
+++ b/Cython/Compiler/Symtab.py
@@ -13,6 +13,7 @@ try:
except ImportError: # Py3
import builtins
+from ..Utils import try_finally_contextmanager
from .Errors import warning, error, InternalError
from .StringEncoding import EncodedString
from . import Options, Naming
@@ -163,6 +164,7 @@ class Entry(object):
# known_standard_library_import Either None (default), an empty string (definitely can't be determined)
# or a string of "modulename.something.attribute"
# Used for identifying imports from typing/dataclasses etc
+ # pytyping_modifiers Python type modifiers like "typing.ClassVar" but also "dataclasses.InitVar"
# TODO: utility_code and utility_code_definition serves the same purpose...
@@ -237,6 +239,7 @@ class Entry(object):
is_cgetter = False
is_cpp_optional = False
known_standard_library_import = None
+ pytyping_modifiers = None
def __init__(self, name, cname, type, pos = None, init = None):
self.name = name
@@ -282,6 +285,9 @@ class Entry(object):
assert not self.utility_code # we're not overwriting anything?
self.utility_code_definition = Code.UtilityCode.load_cached("OptionalLocals", "CppSupport.cpp")
+ def declared_with_pytyping_modifier(self, modifier_name):
+ return modifier_name in self.pytyping_modifiers if self.pytyping_modifiers else False
+
class InnerEntry(Entry):
"""
@@ -336,6 +342,7 @@ class Scope(object):
# is_builtin_scope boolean Is the builtin scope of Python/Cython
# is_py_class_scope boolean Is a Python class scope
# is_c_class_scope boolean Is an extension type scope
+ # is_local_scope boolean Is a local (i.e. function/method/generator) scope
# is_closure_scope boolean Is a closure scope
# is_generator_expression_scope boolean A subset of closure scope used for generator expressions
# is_passthrough boolean Outer scope is passed directly
@@ -354,6 +361,7 @@ class Scope(object):
is_py_class_scope = 0
is_c_class_scope = 0
is_closure_scope = 0
+ is_local_scope = False
is_generator_expression_scope = 0
is_comprehension_scope = 0
is_passthrough = 0
@@ -366,6 +374,8 @@ class Scope(object):
nogil = 0
fused_to_specific = None
return_type = None
+ # Do ambiguous type names like 'int' and 'float' refer to the C types? (Otherwise, Python types.)
+ in_c_type_context = True
def __init__(self, name, outer_scope, parent_scope):
# The outer_scope is the next scope in the lookup chain.
@@ -482,6 +492,14 @@ class Scope(object):
for scope in sorted(self.subscopes, key=operator.attrgetter('scope_prefix')):
yield scope
+ @try_finally_contextmanager
+ def new_c_type_context(self, in_c_type_context=None):
+ old_c_type_context = self.in_c_type_context
+ if in_c_type_context is not None:
+ self.in_c_type_context = in_c_type_context
+ yield
+ self.in_c_type_context = old_c_type_context
+
def declare(self, name, cname, type, pos, visibility, shadow = 0, is_type = 0, create_wrapper = 0):
# Create new entry, and add to dictionary if
# name is not None. Reports a warning if already
@@ -733,8 +751,8 @@ class Scope(object):
return self.outer_scope.declare_tuple_type(pos, components)
def declare_var(self, name, type, pos,
- cname = None, visibility = 'private',
- api = 0, in_pxd = 0, is_cdef = 0):
+ cname=None, visibility='private',
+ api=False, in_pxd=False, is_cdef=False, pytyping_modifiers=None):
# Add an entry for a variable.
if not cname:
if visibility != 'private' or api:
@@ -754,8 +772,17 @@ class Scope(object):
if api:
entry.api = 1
entry.used = 1
+ if pytyping_modifiers:
+ entry.pytyping_modifiers = pytyping_modifiers
return entry
+ def _reject_pytyping_modifiers(self, pos, modifiers, allowed=()):
+ if not modifiers:
+ return
+ for modifier in modifiers:
+ if modifier not in allowed:
+ error(pos, "Modifier '%s' is not allowed here." % modifier)
+
def declare_assignment_expression_target(self, name, type, pos):
# In most cases declares the variable as normal.
# For generator expressions and comprehensions the variable is declared in their parent
@@ -1515,14 +1542,15 @@ class ModuleScope(Scope):
return entry
def declare_var(self, name, type, pos,
- cname = None, visibility = 'private',
- api = 0, in_pxd = 0, is_cdef = 0):
+ cname=None, visibility='private',
+ api=False, in_pxd=False, is_cdef=False, pytyping_modifiers=None):
# Add an entry for a global variable. If it is a Python
# object type, and not declared with cdef, it will live
# in the module dictionary, otherwise it will be a C
# global variable.
if visibility not in ('private', 'public', 'extern'):
error(pos, "Module-level variable cannot be declared %s" % visibility)
+ self._reject_pytyping_modifiers(pos, pytyping_modifiers, ('typing.Optional',)) # let's allow at least this one
if not is_cdef:
if type is unspecified_type:
type = py_object_type
@@ -1558,7 +1586,7 @@ class ModuleScope(Scope):
entry = Scope.declare_var(self, name, type, pos,
cname=cname, visibility=visibility,
- api=api, in_pxd=in_pxd, is_cdef=is_cdef)
+ api=api, in_pxd=in_pxd, is_cdef=is_cdef, pytyping_modifiers=pytyping_modifiers)
if is_cdef:
entry.is_cglobal = 1
if entry.type.declaration_value:
@@ -1860,6 +1888,7 @@ class ModuleScope(Scope):
class LocalScope(Scope):
+ is_local_scope = True
# Does the function have a 'with gil:' block?
has_with_gil_block = False
@@ -1889,15 +1918,15 @@ class LocalScope(Scope):
return entry
def declare_var(self, name, type, pos,
- cname = None, visibility = 'private',
- api = 0, in_pxd = 0, is_cdef = 0):
+ cname=None, visibility='private',
+ api=False, in_pxd=False, is_cdef=False, pytyping_modifiers=None):
name = self.mangle_class_private_name(name)
# Add an entry for a local variable.
if visibility in ('public', 'readonly'):
error(pos, "Local variable cannot be declared %s" % visibility)
entry = Scope.declare_var(self, name, type, pos,
cname=cname, visibility=visibility,
- api=api, in_pxd=in_pxd, is_cdef=is_cdef)
+ api=api, in_pxd=in_pxd, is_cdef=is_cdef, pytyping_modifiers=pytyping_modifiers)
if entry.type.declaration_value:
entry.init = entry.type.declaration_value
entry.is_local = 1
@@ -1995,13 +2024,14 @@ class ComprehensionScope(Scope):
return '%s%s' % (self.genexp_prefix, self.parent_scope.mangle(prefix, name))
def declare_var(self, name, type, pos,
- cname = None, visibility = 'private',
- api = 0, in_pxd = 0, is_cdef = True):
+ cname=None, visibility='private',
+ api=False, in_pxd=False, is_cdef=True, pytyping_modifiers=None):
if type is unspecified_type:
# if the outer scope defines a type for this variable, inherit it
outer_entry = self.outer_scope.lookup(name)
if outer_entry and outer_entry.is_variable:
type = outer_entry.type # may still be 'unspecified_type' !
+ self._reject_pytyping_modifiers(pos, pytyping_modifiers)
# the parent scope needs to generate code for the variable, but
# this scope must hold its name exclusively
cname = '%s%s' % (self.genexp_prefix, self.parent_scope.mangle(Naming.var_prefix, name or self.next_id()))
@@ -2084,8 +2114,8 @@ class StructOrUnionScope(Scope):
Scope.__init__(self, name, None, None)
def declare_var(self, name, type, pos,
- cname = None, visibility = 'private',
- api = 0, in_pxd = 0, is_cdef = 0,
+ cname=None, visibility='private',
+ api=False, in_pxd=False, is_cdef=False, pytyping_modifiers=None,
allow_pyobject=False, allow_memoryview=False, allow_refcounted=False):
# Add an entry for an attribute.
if not cname:
@@ -2094,6 +2124,7 @@ class StructOrUnionScope(Scope):
cname = c_safe_identifier(cname)
if type.is_cfunction:
type = PyrexTypes.CPtrType(type)
+ self._reject_pytyping_modifiers(pos, pytyping_modifiers)
entry = self.declare(name, cname, type, pos, visibility)
entry.is_variable = 1
self.var_entries.append(entry)
@@ -2171,15 +2202,15 @@ class PyClassScope(ClassScope):
is_py_class_scope = 1
def declare_var(self, name, type, pos,
- cname = None, visibility = 'private',
- api = 0, in_pxd = 0, is_cdef = 0):
+ cname=None, visibility='private',
+ api=False, in_pxd=False, is_cdef=False, pytyping_modifiers=None):
name = self.mangle_class_private_name(name)
if type is unspecified_type:
type = py_object_type
# Add an entry for a class attribute.
entry = Scope.declare_var(self, name, type, pos,
cname=cname, visibility=visibility,
- api=api, in_pxd=in_pxd, is_cdef=is_cdef)
+ api=api, in_pxd=in_pxd, is_cdef=is_cdef, pytyping_modifiers=pytyping_modifiers)
entry.is_pyglobal = 1
entry.is_pyclass_attr = 1
return entry
@@ -2301,17 +2332,21 @@ class CClassScope(ClassScope):
return have_entries, (py_attrs, py_buffers, memoryview_slices)
def declare_var(self, name, type, pos,
- cname = None, visibility = 'private',
- api = 0, in_pxd = 0, is_cdef = 0):
+ cname=None, visibility='private',
+ api=False, in_pxd=False, is_cdef=False, pytyping_modifiers=None):
name = self.mangle_class_private_name(name)
- if type.python_type_constructor_name == "typing.ClassVar":
- is_cdef = 0
- type = type.resolve()
-
- if (type.python_type_constructor_name == "dataclasses.InitVar" and
- 'dataclasses.dataclass' not in self.directives):
- error(pos, "Use of cython.dataclasses.InitVar does not make sense outside a dataclass")
+ if pytyping_modifiers:
+ if "typing.ClassVar" in pytyping_modifiers:
+ is_cdef = 0
+ if not type.is_pyobject:
+ if not type.equivalent_type:
+ warning(pos, "ClassVar[] requires the type to be a Python object type. Found '%s', using object instead." % type)
+ type = py_object_type
+ else:
+ type = type.equivalent_type
+ if "dataclasses.InitVar" in pytyping_modifiers and 'dataclasses.dataclass' not in self.directives:
+ error(pos, "Use of cython.dataclasses.InitVar does not make sense outside a dataclass")
if is_cdef:
# Add an entry for an attribute.
@@ -2332,6 +2367,7 @@ class CClassScope(ClassScope):
entry = self.declare(name, cname, type, pos, visibility)
entry.is_variable = 1
self.var_entries.append(entry)
+ entry.pytyping_modifiers = pytyping_modifiers
if type.is_cpp_class and visibility != 'extern':
if self.directives['cpp_locals']:
entry.make_cpp_optional()
@@ -2369,7 +2405,7 @@ class CClassScope(ClassScope):
# Add an entry for a class attribute.
entry = Scope.declare_var(self, name, type, pos,
cname=cname, visibility=visibility,
- api=api, in_pxd=in_pxd, is_cdef=is_cdef)
+ api=api, in_pxd=in_pxd, is_cdef=is_cdef, pytyping_modifiers=pytyping_modifiers)
entry.is_member = 1
# xxx: is_pyglobal changes behaviour in so many places that I keep it in for now.
# is_member should be enough later on
@@ -2612,11 +2648,12 @@ class CppClassScope(Scope):
template_entry.is_type = 1
def declare_var(self, name, type, pos,
- cname = None, visibility = 'extern',
- api = 0, in_pxd = 0, is_cdef = 0, defining = 0):
+ cname=None, visibility='extern',
+ api=False, in_pxd=False, is_cdef=False, defining=False, pytyping_modifiers=None):
# Add an entry for an attribute.
if not cname:
cname = name
+ self._reject_pytyping_modifiers(pos, pytyping_modifiers)
entry = self.lookup_here(name)
if defining and entry is not None:
if entry.type.same_as(type):
@@ -2746,10 +2783,11 @@ class CppScopedEnumScope(Scope):
Scope.__init__(self, name, outer_scope, None)
def declare_var(self, name, type, pos,
- cname=None, visibility='extern'):
+ cname=None, visibility='extern', pytyping_modifiers=None):
# Add an entry for an attribute.
if not cname:
cname = name
+ self._reject_pytyping_modifiers(pos, pytyping_modifiers)
entry = self.declare(name, cname, type, pos, visibility)
entry.is_variable = True
return entry
diff --git a/Cython/Compiler/Tests/TestGrammar.py b/Cython/Compiler/Tests/TestGrammar.py
index f80ec22d3..852b48c33 100644
--- a/Cython/Compiler/Tests/TestGrammar.py
+++ b/Cython/Compiler/Tests/TestGrammar.py
@@ -7,9 +7,12 @@ Uses TreeFragment to test invalid syntax.
from __future__ import absolute_import
+import ast
+import textwrap
+
from ...TestUtils import CythonTest
-from ..Errors import CompileError
from .. import ExprNodes
+from ..Errors import CompileError
# Copied from CPython's test_grammar.py
VALID_UNDERSCORE_LITERALS = [
@@ -103,6 +106,39 @@ INVALID_UNDERSCORE_LITERALS = [
]
+INVALID_ELLIPSIS = [
+ (". . .", 2, 0),
+ (". ..", 2, 0),
+ (".. .", 2, 0),
+ (". ...", 2, 0),
+ (". ... .", 2, 0),
+ (".. ... .", 2, 0),
+ (". ... ..", 2, 0),
+ ("""
+ (
+ .
+ ..
+ )
+ """, 3, 4),
+ ("""
+ [
+ ..
+ .,
+ None
+ ]
+ """, 3, 4),
+ ("""
+ {
+ None,
+ .
+ .
+
+ .
+ }
+ """, 4, 4)
+]
+
+
class TestGrammar(CythonTest):
def test_invalid_number_literals(self):
@@ -142,6 +178,25 @@ class TestGrammar(CythonTest):
else:
assert isinstance(literal_node, ExprNodes.IntNode), (literal, literal_node)
+ def test_invalid_ellipsis(self):
+ ERR = ":{0}:{1}: Expected an identifier or literal"
+ for code, line, col in INVALID_ELLIPSIS:
+ try:
+ ast.parse(textwrap.dedent(code))
+ except SyntaxError as exc:
+ assert True
+ else:
+ assert False, "Invalid Python code '%s' failed to raise an exception" % code
+
+ try:
+ self.fragment(u'''\
+ # cython: language_level=3
+ ''' + code)
+ except CompileError as exc:
+ assert ERR.format(line, col) in str(exc), str(exc)
+ else:
+ assert False, "Invalid Cython code '%s' failed to raise an exception" % code
+
if __name__ == "__main__":
import unittest
diff --git a/Cython/Compiler/Visitor.py b/Cython/Compiler/Visitor.py
index 4eabd6b83..d9be14df1 100644
--- a/Cython/Compiler/Visitor.py
+++ b/Cython/Compiler/Visitor.py
@@ -380,13 +380,15 @@ class EnvTransform(CythonTransform):
self.env_stack.pop()
def visit_FuncDefNode(self, node):
- outer_attrs = node.outer_attrs
- self.visitchildren(node, attrs=outer_attrs)
+ self.visit_func_outer_attrs(node)
self.enter_scope(node, node.local_scope)
- self.visitchildren(node, attrs=None, exclude=outer_attrs)
+ self.visitchildren(node, attrs=None, exclude=node.outer_attrs)
self.exit_scope()
return node
+ def visit_func_outer_attrs(self, node):
+ self.visitchildren(node, attrs=node.outer_attrs)
+
def visit_GeneratorBodyDefNode(self, node):
self._process_children(node)
return node
diff --git a/Cython/Includes/cpython/object.pxd b/Cython/Includes/cpython/object.pxd
index c4688f738..41874159c 100644
--- a/Cython/Includes/cpython/object.pxd
+++ b/Cython/Includes/cpython/object.pxd
@@ -5,7 +5,7 @@ cdef extern from "Python.h":
ctypedef struct PyObject # forward declaration
- ctypedef object (*newfunc)(cpython.type.type, object, object) # (type, args, kwargs)
+ ctypedef object (*newfunc)(cpython.type.type, PyObject*, PyObject*) # (type, args|NULL, kwargs|NULL)
ctypedef object (*unaryfunc)(object)
ctypedef object (*binaryfunc)(object, object)
diff --git a/Cython/Includes/libcpp/random.pxd b/Cython/Includes/libcpp/random.pxd
index e879c8f64..9e48bb27f 100644
--- a/Cython/Includes/libcpp/random.pxd
+++ b/Cython/Includes/libcpp/random.pxd
@@ -1,10 +1,14 @@
-from libc.stdint cimport uint_fast32_t
+from libc.stdint cimport uint_fast32_t, uint_fast64_t
cdef extern from "<random>" namespace "std" nogil:
- cdef cppclass mt19937:
+ cdef cppclass random_device:
ctypedef uint_fast32_t result_type
+ random_device() except +
+ result_type operator()() except +
+ cdef cppclass mt19937:
+ ctypedef uint_fast32_t result_type
mt19937() except +
mt19937(result_type seed) except +
result_type operator()() except +
@@ -12,3 +16,151 @@ cdef extern from "<random>" namespace "std" nogil:
result_type max() except +
void discard(size_t z) except +
void seed(result_type seed) except +
+
+ cdef cppclass mt19937_64:
+ ctypedef uint_fast64_t result_type
+
+ mt19937_64() except +
+ mt19937_64(result_type seed) except +
+ result_type operator()() except +
+ result_type min() except +
+ result_type max() except +
+ void discard(size_t z) except +
+ void seed(result_type seed) except +
+
+ cdef cppclass uniform_int_distribution[T]:
+ ctypedef T result_type
+ uniform_int_distribution() except +
+ uniform_int_distribution(T, T) except +
+ result_type operator()[Generator](Generator&) except +
+ result_type min() except +
+ result_type max() except +
+
+ cdef cppclass uniform_real_distribution[T]:
+ ctypedef T result_type
+ uniform_real_distribution() except +
+ uniform_real_distribution(T, T) except +
+ result_type operator()[Generator](Generator&) except +
+ result_type min() except +
+ result_type max() except +
+
+ cdef cppclass bernoulli_distribution:
+ ctypedef bint result_type
+ bernoulli_distribution() except +
+ bernoulli_distribution(double) except +
+ result_type operator()[Generator](Generator&) except +
+ result_type min() except +
+ result_type max() except +
+
+ cdef cppclass binomial_distribution[T]:
+ ctypedef T result_type
+ binomial_distribution() except +
+ binomial_distribution(T, double) except +
+ result_type operator()[Generator](Generator&) except +
+ result_type min() except +
+ result_type max() except +
+
+ cdef cppclass geometric_distribution[T]:
+ ctypedef T result_type
+ geometric_distribution() except +
+ geometric_distribution(double) except +
+ result_type operator()[Generator](Generator&) except +
+ result_type min() except +
+ result_type max() except +
+
+
+ cdef cppclass negative_binomial_distribution[T]:
+ ctypedef T result_type
+ negative_binomial_distribution() except +
+ negative_binomial_distribution(T, double) except +
+ result_type operator()[Generator](Generator&) except +
+ result_type min() except +
+ result_type max() except +
+
+ cdef cppclass poisson_distribution[T]:
+ ctypedef T result_type
+ poisson_distribution() except +
+ poisson_distribution(double) except +
+ result_type operator()[Generator](Generator&) except +
+ result_type min() except +
+ result_type max() except +
+
+ cdef cppclass exponential_distribution[T]:
+ ctypedef T result_type
+ exponential_distribution() except +
+ exponential_distribution(result_type) except +
+ result_type operator()[Generator](Generator&) except +
+ result_type min() except +
+ result_type max() except +
+
+ cdef cppclass gamma_distribution[T]:
+ ctypedef T result_type
+ gamma_distribution() except +
+ gamma_distribution(result_type, result_type) except +
+ result_type operator()[Generator](Generator&) except +
+ result_type min() except +
+ result_type max() except +
+
+ cdef cppclass weibull_distribution[T]:
+ ctypedef T result_type
+ weibull_distribution() except +
+ weibull_distribution(result_type, result_type) except +
+ result_type operator()[Generator](Generator&) except +
+ result_type min() except +
+ result_type max() except +
+
+ cdef cppclass extreme_value_distribution[T]:
+ ctypedef T result_type
+ extreme_value_distribution() except +
+ extreme_value_distribution(result_type, result_type) except +
+ result_type operator()[Generator](Generator&) except +
+ result_type min() except +
+ result_type max() except +
+
+ cdef cppclass normal_distribution[T]:
+ ctypedef T result_type
+ normal_distribution() except +
+ normal_distribution(result_type, result_type) except +
+ result_type operator()[Generator](Generator&) except +
+ result_type min() except +
+ result_type max() except +
+
+ cdef cppclass lognormal_distribution[T]:
+ ctypedef T result_type
+ lognormal_distribution() except +
+ lognormal_distribution(result_type, result_type) except +
+ result_type operator()[Generator](Generator&) except +
+ result_type min() except +
+ result_type max() except +
+
+ cdef cppclass chi_squared_distribution[T]:
+ ctypedef T result_type
+ chi_squared_distribution() except +
+ chi_squared_distribution(result_type) except +
+ result_type operator()[Generator](Generator&) except +
+ result_type min() except +
+ result_type max() except +
+
+ cdef cppclass cauchy_distribution[T]:
+ ctypedef T result_type
+ cauchy_distribution() except +
+ cauchy_distribution(result_type, result_type) except +
+ result_type operator()[Generator](Generator&) except +
+ result_type min() except +
+ result_type max() except +
+
+ cdef cppclass fisher_f_distribution[T]:
+ ctypedef T result_type
+ fisher_f_distribution() except +
+ fisher_f_distribution(result_type, result_type) except +
+ result_type operator()[Generator](Generator&) except +
+ result_type min() except +
+ result_type max() except +
+
+ cdef cppclass student_t_distribution[T]:
+ ctypedef T result_type
+ student_t_distribution() except +
+ student_t_distribution(result_type) except +
+ result_type operator()[Generator](Generator&) except +
+ result_type min() except +
+ result_type max() except +
diff --git a/Cython/Shadow.py b/Cython/Shadow.py
index 48bc249e0..78d950ce2 100644
--- a/Cython/Shadow.py
+++ b/Cython/Shadow.py
@@ -385,7 +385,7 @@ class typedef(CythonType):
__getitem__ = index_type
class _FusedType(CythonType):
- pass
+ __getitem__ = index_type
def fused_type(*args):
diff --git a/Cython/Utility/CythonFunction.c b/Cython/Utility/CythonFunction.c
index 9a7bf7405..870dcf620 100644
--- a/Cython/Utility/CythonFunction.c
+++ b/Cython/Utility/CythonFunction.c
@@ -934,7 +934,7 @@ static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD(PyObject
return NULL;
}
- return ((__Pyx_PyCMethod)(void(*)(void))def->ml_meth)(self, cls, args, nargs, kwnames);
+ return ((__Pyx_PyCMethod)(void(*)(void))def->ml_meth)(self, cls, args, (size_t)nargs, kwnames);
}
#endif
@@ -1463,30 +1463,17 @@ bad:
return result;
}
-static PyObject *
-__Pyx_FusedFunction_get_self(__pyx_FusedFunctionObject *m, void *closure)
-{
- PyObject *self = m->self;
- CYTHON_UNUSED_VAR(closure);
- if (unlikely(!self)) {
- PyErr_SetString(PyExc_AttributeError, "'function' object has no attribute '__self__'");
- } else {
- Py_INCREF(self);
- }
- return self;
-}
-
static PyMemberDef __pyx_FusedFunction_members[] = {
{(char *) "__signatures__",
T_OBJECT,
offsetof(__pyx_FusedFunctionObject, __signatures__),
READONLY,
0},
+ {(char *) "__self__", T_OBJECT_EX, offsetof(__pyx_FusedFunctionObject, self), READONLY, 0},
{0, 0, 0, 0, 0},
};
static PyGetSetDef __pyx_FusedFunction_getsets[] = {
- {(char *) "__self__", (getter)__Pyx_FusedFunction_get_self, 0, 0, 0},
// __doc__ is None for the fused function type, but we need it to be
// a descriptor for the instance's __doc__, so rebuild the descriptor in our subclass
// (all other descriptors are inherited)
diff --git a/Cython/Utility/Exceptions.c b/Cython/Utility/Exceptions.c
index c6c5d20ed..9f96225d1 100644
--- a/Cython/Utility/Exceptions.c
+++ b/Cython/Utility/Exceptions.c
@@ -675,11 +675,9 @@ static void __Pyx_WriteUnraisable(const char *name, int clineno,
PyGILState_STATE state;
if (nogil)
state = PyGILState_Ensure();
-#ifdef _MSC_VER
/* arbitrary, to suppress warning */
else state = (PyGILState_STATE)-1;
#endif
-#endif
CYTHON_UNUSED_VAR(clineno);
CYTHON_UNUSED_VAR(lineno);
CYTHON_UNUSED_VAR(filename);
diff --git a/Cython/Utility/ExtensionTypes.c b/Cython/Utility/ExtensionTypes.c
index ec994a367..aa39a860a 100644
--- a/Cython/Utility/ExtensionTypes.c
+++ b/Cython/Utility/ExtensionTypes.c
@@ -564,3 +564,37 @@ static PyObject *{{func_name}}(PyObject *left, PyObject *right {{extra_arg_decl}
}
return __Pyx_NewRef(Py_NotImplemented);
}
+
+/////////////// ValidateExternBase.proto ///////////////
+
+static int __Pyx_validate_extern_base(PyTypeObject *base); /* proto */
+
+/////////////// ValidateExternBase ///////////////
+//@requires: ObjectHandling.c::FormatTypeName
+
+static int __Pyx_validate_extern_base(PyTypeObject *base) {
+ Py_ssize_t itemsize;
+#if CYTHON_COMPILING_IN_LIMITED_API
+ PyObject *py_itemsize;
+#endif
+#if !CYTHON_COMPILING_IN_LIMITED_API
+ itemsize = ((PyTypeObject *)base)->tp_itemsize;
+#else
+ py_itemsize = PyObject_GetAttrString(base, "__itemsize__");
+ if (!py_itemsize)
+ return -1;
+ itemsize = PyLong_AsSsize_t(py_itemsize);
+ Py_DECREF(py_itemsize);
+ py_itemsize = 0;
+ if (itemsize == (Py_ssize_t)-1 && PyErr_Occurred())
+ return -1;
+#endif
+ if (itemsize) {
+ __Pyx_TypeName b_name = __Pyx_PyType_GetName(base);
+ PyErr_Format(PyExc_TypeError,
+ "inheritance from PyVarObject types like '" __Pyx_FMT_TYPENAME "' not currently supported", b_name);
+ __Pyx_DECREF_TypeName(b_name);
+ return -1;
+ }
+ return 0;
+}
diff --git a/Cython/Utility/FunctionArguments.c b/Cython/Utility/FunctionArguments.c
index 1882f826f..8bdaee562 100644
--- a/Cython/Utility/FunctionArguments.c
+++ b/Cython/Utility/FunctionArguments.c
@@ -422,7 +422,7 @@ bad:
#if CYTHON_METH_FASTCALL
#define __Pyx_Arg_FASTCALL(args, i) args[i]
#define __Pyx_NumKwargs_FASTCALL(kwds) PyTuple_GET_SIZE(kwds)
- #define __Pyx_KwValues_FASTCALL(args, nargs) (&args[nargs])
+ #define __Pyx_KwValues_FASTCALL(args, nargs) ((args) + (nargs))
static CYTHON_INLINE PyObject * __Pyx_GetKwValue_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues, PyObject *s);
#define __Pyx_KwargsAsDict_FASTCALL(kw, kwvalues) _PyStack_AsDict(kwvalues, kw)
#else
diff --git a/Cython/Utility/ImportExport.c b/Cython/Utility/ImportExport.c
index 6ceba7efb..897657281 100644
--- a/Cython/Utility/ImportExport.c
+++ b/Cython/Utility/ImportExport.c
@@ -498,8 +498,10 @@ static PyTypeObject *__Pyx_ImportType(PyObject *module, const char *module_name,
PyObject *result = 0;
char warning[200];
Py_ssize_t basicsize;
+ Py_ssize_t itemsize;
#if CYTHON_COMPILING_IN_LIMITED_API
PyObject *py_basicsize;
+ PyObject *py_itemsize;
#endif
result = PyObject_GetAttrString(module, class_name);
@@ -513,6 +515,7 @@ static PyTypeObject *__Pyx_ImportType(PyObject *module, const char *module_name,
}
#if !CYTHON_COMPILING_IN_LIMITED_API
basicsize = ((PyTypeObject *)result)->tp_basicsize;
+ itemsize = ((PyTypeObject *)result)->tp_itemsize;
#else
py_basicsize = PyObject_GetAttrString(result, "__basicsize__");
if (!py_basicsize)
@@ -522,19 +525,30 @@ static PyTypeObject *__Pyx_ImportType(PyObject *module, const char *module_name,
py_basicsize = 0;
if (basicsize == (Py_ssize_t)-1 && PyErr_Occurred())
goto bad;
+ py_itemsize = PyObject_GetAttrString(result, "__itemsize__");
+ if (!py_itemsize)
+ goto bad;
+ itemsize = PyLong_AsSsize_t(py_itemsize);
+ Py_DECREF(py_itemsize);
+ py_itemsize = 0;
+ if (itemsize == (Py_ssize_t)-1 && PyErr_Occurred())
+ goto bad;
#endif
- if ((size_t)basicsize < size) {
+ if ((size_t)(basicsize + itemsize) < size) {
PyErr_Format(PyExc_ValueError,
"%.200s.%.200s size changed, may indicate binary incompatibility. "
"Expected %zd from C header, got %zd from PyObject",
- module_name, class_name, size, basicsize);
+ module_name, class_name, size, basicsize+itemsize);
goto bad;
}
- if (check_size == __Pyx_ImportType_CheckSize_Error && (size_t)basicsize != size) {
+ // varobjects almost have structs between basicsize and basicsize + itemsize
+ // but the struct isn't always one of the two limiting values
+ if (check_size == __Pyx_ImportType_CheckSize_Error &&
+ ((size_t)basicsize > size || (size_t)(basicsize + itemsize) < size)) {
PyErr_Format(PyExc_ValueError,
"%.200s.%.200s size changed, may indicate binary incompatibility. "
- "Expected %zd from C header, got %zd from PyObject",
- module_name, class_name, size, basicsize);
+ "Expected %zd from C header, got %zd-%zd from PyObject",
+ module_name, class_name, size, basicsize, basicsize+itemsize);
goto bad;
}
else if (check_size == __Pyx_ImportType_CheckSize_Warn && (size_t)basicsize > size) {
diff --git a/Cython/Utility/MemoryView.pyx b/Cython/Utility/MemoryView.pyx
index 990319e05..9361249fb 100644
--- a/Cython/Utility/MemoryView.pyx
+++ b/Cython/Utility/MemoryView.pyx
@@ -93,6 +93,17 @@ cdef extern from "<stdlib.h>":
void free(void *) nogil
void *memcpy(void *dest, void *src, size_t n) nogil
+# the sequence abstract base class
+cdef object __pyx_collections_abc_Sequence "__pyx_collections_abc_Sequence"
+try:
+ if __import__("sys").version_info >= (3, 3):
+ __pyx_collections_abc_Sequence = __import__("collections.abc").abc.Sequence
+ else:
+ __pyx_collections_abc_Sequence = __import__("collections").Sequence
+except:
+ # it isn't a big problem if this fails
+ __pyx_collections_abc_Sequence = None
+
#
### cython.array class
#
@@ -224,6 +235,12 @@ cdef class array:
def __setitem__(self, item, value):
self.memview[item] = value
+ # Sequence methods
+ try:
+ count = __pyx_collections_abc_Sequence.count
+ index = __pyx_collections_abc_Sequence.index
+ except:
+ pass
@cname("__pyx_array_allocate_buffer")
cdef int _allocate_buffer(array self) except -1:
@@ -970,6 +987,22 @@ cdef class _memoryviewslice(memoryview):
cdef _get_base(self):
return self.from_object
+ # Sequence methods
+ try:
+ count = __pyx_collections_abc_Sequence.count
+ index = __pyx_collections_abc_Sequence.index
+ except:
+ pass
+
+try:
+ if __pyx_collections_abc_Sequence:
+ # The main value of registering _memoryviewslice as a
+ # Sequence is that it can be used in structural pattern
+ # matching in Python 3.10+
+ __pyx_collections_abc_Sequence.register(_memoryviewslice)
+ __pyx_collections_abc_Sequence.register(array)
+except:
+ pass # ignore failure, it's a minor issue
@cname('__pyx_memoryview_fromslice')
cdef memoryview_fromslice({{memviewslice_name}} memviewslice,
diff --git a/Cython/Utility/MemoryView_C.c b/Cython/Utility/MemoryView_C.c
index 07ed24d20..de003a2ee 100644
--- a/Cython/Utility/MemoryView_C.c
+++ b/Cython/Utility/MemoryView_C.c
@@ -451,7 +451,7 @@ static void __pyx_fatalerror(const char *fmt, ...) Py_NO_RETURN {
va_list vargs;
char msg[200];
-#ifdef HAVE_STDARG_PROTOTYPES
+#if PY_VERSION_HEX >= 0x030A0000 || defined(HAVE_STDARG_PROTOTYPES)
va_start(vargs, fmt);
#else
va_start(vargs);
diff --git a/Cython/Utility/StringTools.c b/Cython/Utility/StringTools.c
index 8c92228cb..910fbf6fa 100644
--- a/Cython/Utility/StringTools.c
+++ b/Cython/Utility/StringTools.c
@@ -1012,7 +1012,7 @@ static PyObject* __Pyx_PyUnicode_BuildFromAscii(Py_ssize_t ulength, char* chars,
padding = PyUnicode_FromOrdinal(padding_char);
if (likely(padding) && uoffset > prepend_sign + 1) {
PyObject *tmp;
- PyObject *repeat = PyInt_FromSize_t(uoffset - prepend_sign);
+ PyObject *repeat = PyInt_FromSsize_t(uoffset - prepend_sign);
if (unlikely(!repeat)) goto done_or_error;
tmp = PyNumber_Multiply(padding, repeat);
Py_DECREF(repeat);
diff --git a/docs/examples/userguide/extension_types/cheesy.py b/docs/examples/userguide/extension_types/cheesy.py
new file mode 100644
index 000000000..0995c3993
--- /dev/null
+++ b/docs/examples/userguide/extension_types/cheesy.py
@@ -0,0 +1,36 @@
+import cython
+
+@cython.cclass
+class CheeseShop:
+
+ cheeses: object
+
+ def __cinit__(self):
+ self.cheeses = []
+
+ @property
+ def cheese(self):
+ return "We don't have: %s" % self.cheeses
+
+ @cheese.setter
+ def cheese(self, value):
+ self.cheeses.append(value)
+
+ @cheese.deleter
+ def cheese(self):
+ del self.cheeses[:]
+
+# Test input
+from cheesy import CheeseShop
+
+shop = CheeseShop()
+print(shop.cheese)
+
+shop.cheese = "camembert"
+print(shop.cheese)
+
+shop.cheese = "cheddar"
+print(shop.cheese)
+
+del shop.cheese
+print(shop.cheese)
diff --git a/docs/examples/userguide/extension_types/cheesy.pyx b/docs/examples/userguide/extension_types/cheesy.pyx
new file mode 100644
index 000000000..2859d848f
--- /dev/null
+++ b/docs/examples/userguide/extension_types/cheesy.pyx
@@ -0,0 +1,36 @@
+
+
+
+cdef class CheeseShop:
+
+ cdef object cheeses
+
+ def __cinit__(self):
+ self.cheeses = []
+
+ @property
+ def cheese(self):
+ return "We don't have: %s" % self.cheeses
+
+ @cheese.setter
+ def cheese(self, value):
+ self.cheeses.append(value)
+
+ @cheese.deleter
+ def cheese(self):
+ del self.cheeses[:]
+
+# Test input
+from cheesy import CheeseShop
+
+shop = CheeseShop()
+print(shop.cheese)
+
+shop.cheese = "camembert"
+print(shop.cheese)
+
+shop.cheese = "cheddar"
+print(shop.cheese)
+
+del shop.cheese
+print(shop.cheese)
diff --git a/docs/examples/userguide/extension_types/dataclass.py b/docs/examples/userguide/extension_types/dataclass.py
new file mode 100644
index 000000000..d8ed68666
--- /dev/null
+++ b/docs/examples/userguide/extension_types/dataclass.py
@@ -0,0 +1,21 @@
+import cython
+try:
+ import typing
+ import dataclasses
+except ImportError:
+ pass # The modules don't actually have to exists for Cython to use them as annotations
+
+@cython.dataclasses.dataclass
+@cython.cclass
+class MyDataclass:
+ # fields can be declared using annotations
+ a: cython.int = 0
+ b: double = cython.dataclasses.field(default_factory = lambda: 10, repr=False)
+
+
+ c: str = 'hello'
+
+
+ # typing.InitVar and typing.ClassVar also work
+ d: dataclasses.InitVar[double] = 5
+ e: typing.ClassVar[list] = []
diff --git a/docs/examples/userguide/extension_types/dataclass.pyx b/docs/examples/userguide/extension_types/dataclass.pyx
index 0529890ba..b03d5f7b1 100644
--- a/docs/examples/userguide/extension_types/dataclass.pyx
+++ b/docs/examples/userguide/extension_types/dataclass.pyx
@@ -5,6 +5,7 @@ try:
except ImportError:
pass # The modules don't actually have to exists for Cython to use them as annotations
+
@cython.dataclasses.dataclass
cdef class MyDataclass:
# fields can be declared using annotations
@@ -16,5 +17,5 @@ cdef class MyDataclass:
c = "hello" # assignment of default value on a separate line
# typing.InitVar and typing.ClassVar also work
- d: dataclasses.InitVar[double] = 5
+ d: dataclasses.InitVar[cython.double] = 5
e: typing.ClassVar[list] = []
diff --git a/docs/examples/userguide/extension_types/dict_animal.py b/docs/examples/userguide/extension_types/dict_animal.py
new file mode 100644
index 000000000..a36dd3f89
--- /dev/null
+++ b/docs/examples/userguide/extension_types/dict_animal.py
@@ -0,0 +1,12 @@
+@cython.cclass
+class Animal:
+
+ number_of_legs: cython.int
+ __dict__: dict
+
+ def __cinit__(self, number_of_legs: cython.int):
+ self.number_of_legs = number_of_legs
+
+
+dog = Animal(4)
+dog.has_tail = True
diff --git a/docs/examples/userguide/extension_types/dict_animal.pyx b/docs/examples/userguide/extension_types/dict_animal.pyx
index 1aa0ccc11..ec8cf6f9a 100644
--- a/docs/examples/userguide/extension_types/dict_animal.pyx
+++ b/docs/examples/userguide/extension_types/dict_animal.pyx
@@ -1,3 +1,4 @@
+
cdef class Animal:
cdef int number_of_legs
diff --git a/docs/examples/userguide/extension_types/extendable_animal.py b/docs/examples/userguide/extension_types/extendable_animal.py
new file mode 100644
index 000000000..2eef69460
--- /dev/null
+++ b/docs/examples/userguide/extension_types/extendable_animal.py
@@ -0,0 +1,15 @@
+@cython.cclass
+class Animal:
+
+ number_of_legs: cython.int
+
+ def __cinit__(self, number_of_legs: cython.int):
+ self.number_of_legs = number_of_legs
+
+
+class ExtendableAnimal(Animal): # Note that we use class, not cdef class
+ pass
+
+
+dog = ExtendableAnimal(4)
+dog.has_tail = True
diff --git a/docs/examples/userguide/extension_types/extendable_animal.pyx b/docs/examples/userguide/extension_types/extendable_animal.pyx
index 701a93148..417760efd 100644
--- a/docs/examples/userguide/extension_types/extendable_animal.pyx
+++ b/docs/examples/userguide/extension_types/extendable_animal.pyx
@@ -1,3 +1,4 @@
+
cdef class Animal:
cdef int number_of_legs
@@ -11,4 +12,4 @@ class ExtendableAnimal(Animal): # Note that we use class, not cdef class
dog = ExtendableAnimal(4)
-dog.has_tail = True \ No newline at end of file
+dog.has_tail = True
diff --git a/docs/examples/userguide/extension_types/owned_pointer.py b/docs/examples/userguide/extension_types/owned_pointer.py
new file mode 100644
index 000000000..1c235a883
--- /dev/null
+++ b/docs/examples/userguide/extension_types/owned_pointer.py
@@ -0,0 +1,17 @@
+import cython
+from cython.cimports.libc.stdlib import free
+
+@cython.cclass
+class OwnedPointer:
+ ptr: cython.pointer(cython.void)
+
+ def __dealloc__(self):
+ if self.ptr is not cython.NULL:
+ free(self.ptr)
+
+ @staticmethod
+ @cython.cfunc
+ def create(ptr: cython.pointer(cython.void)):
+ p = OwnedPointer()
+ p.ptr = ptr
+ return p
diff --git a/docs/examples/userguide/extension_types/owned_pointer.pyx b/docs/examples/userguide/extension_types/owned_pointer.pyx
new file mode 100644
index 000000000..98b61d91c
--- /dev/null
+++ b/docs/examples/userguide/extension_types/owned_pointer.pyx
@@ -0,0 +1,17 @@
+
+from libc.stdlib cimport free
+
+
+cdef class OwnedPointer:
+ cdef void* ptr
+
+ def __dealloc__(self):
+ if self.ptr is not NULL:
+ free(self.ptr)
+
+
+ @staticmethod
+ cdef create(void* ptr):
+ p = OwnedPointer()
+ p.ptr = ptr
+ return p
diff --git a/docs/examples/userguide/extension_types/penguin.py b/docs/examples/userguide/extension_types/penguin.py
new file mode 100644
index 000000000..6db8eba16
--- /dev/null
+++ b/docs/examples/userguide/extension_types/penguin.py
@@ -0,0 +1,14 @@
+import cython
+
+@cython.cclass
+class Penguin:
+ food: object
+
+ def __cinit__(self, food):
+ self.food = food
+
+ def __init__(self, food):
+ print("eating!")
+
+normal_penguin = Penguin('fish')
+fast_penguin = Penguin.__new__(Penguin, 'wheat') # note: not calling __init__() !
diff --git a/docs/examples/userguide/extension_types/penguin.pyx b/docs/examples/userguide/extension_types/penguin.pyx
new file mode 100644
index 000000000..b890c9ffd
--- /dev/null
+++ b/docs/examples/userguide/extension_types/penguin.pyx
@@ -0,0 +1,14 @@
+
+
+
+cdef class Penguin:
+ cdef object food
+
+ def __cinit__(self, food):
+ self.food = food
+
+ def __init__(self, food):
+ print("eating!")
+
+normal_penguin = Penguin('fish')
+fast_penguin = Penguin.__new__(Penguin, 'wheat') # note: not calling __init__() !
diff --git a/docs/examples/userguide/extension_types/penguin2.py b/docs/examples/userguide/extension_types/penguin2.py
new file mode 100644
index 000000000..063563d16
--- /dev/null
+++ b/docs/examples/userguide/extension_types/penguin2.py
@@ -0,0 +1,12 @@
+import cython
+
+@cython.freelist(8)
+@cython.cclass
+class Penguin:
+ food: object
+ def __cinit__(self, food):
+ self.food = food
+
+penguin = Penguin('fish 1')
+penguin = None
+penguin = Penguin('fish 2') # does not need to allocate memory!
diff --git a/docs/examples/userguide/extension_types/penguin2.pyx b/docs/examples/userguide/extension_types/penguin2.pyx
new file mode 100644
index 000000000..726aeef8e
--- /dev/null
+++ b/docs/examples/userguide/extension_types/penguin2.pyx
@@ -0,0 +1,12 @@
+cimport cython
+
+
+@cython.freelist(8)
+cdef class Penguin:
+ cdef object food
+ def __cinit__(self, food):
+ self.food = food
+
+penguin = Penguin('fish 1')
+penguin = None
+penguin = Penguin('fish 2') # does not need to allocate memory!
diff --git a/docs/examples/userguide/extension_types/pets.py b/docs/examples/userguide/extension_types/pets.py
new file mode 100644
index 000000000..fc6497cb0
--- /dev/null
+++ b/docs/examples/userguide/extension_types/pets.py
@@ -0,0 +1,22 @@
+import cython
+
+@cython.cclass
+class Parrot:
+
+ @cython.cfunc
+ def describe(self) -> cython.void:
+ print("This parrot is resting.")
+
+@cython.cclass
+class Norwegian(Parrot):
+
+ @cython.cfunc
+ def describe(self) -> cython.void:
+ Parrot.describe(self)
+ print("Lovely plumage!")
+
+cython.declare(p1=Parrot, p2=Parrot)
+p1 = Parrot()
+p2 = Norwegian()
+print("p2:")
+p2.describe()
diff --git a/docs/examples/userguide/extension_types/pets.pyx b/docs/examples/userguide/extension_types/pets.pyx
new file mode 100644
index 000000000..bb06e059d
--- /dev/null
+++ b/docs/examples/userguide/extension_types/pets.pyx
@@ -0,0 +1,22 @@
+
+
+cdef class Parrot:
+
+
+
+ cdef void describe(self):
+ print("This parrot is resting.")
+
+
+cdef class Norwegian(Parrot):
+
+
+ cdef void describe(self):
+ Parrot.describe(self)
+ print("Lovely plumage!")
+
+cdef Parrot p1, p2
+p1 = Parrot()
+p2 = Norwegian()
+print("p2:")
+p2.describe()
diff --git a/docs/examples/userguide/extension_types/python_access.py b/docs/examples/userguide/extension_types/python_access.py
new file mode 100644
index 000000000..27478f50c
--- /dev/null
+++ b/docs/examples/userguide/extension_types/python_access.py
@@ -0,0 +1,7 @@
+import cython
+
+@cython.cclass
+class Shrubbery:
+ width = cython.declare(cython.int, visibility='public')
+ height = cython.declare(cython.int, visibility='public')
+ depth = cython.declare(cython.float, visibility='readonly')
diff --git a/docs/examples/userguide/extension_types/python_access.pyx b/docs/examples/userguide/extension_types/python_access.pyx
index 6d5225ec0..db11de63c 100644
--- a/docs/examples/userguide/extension_types/python_access.pyx
+++ b/docs/examples/userguide/extension_types/python_access.pyx
@@ -1,3 +1,7 @@
+
+
+
cdef class Shrubbery:
cdef public int width, height
+
cdef readonly float depth
diff --git a/docs/examples/userguide/extension_types/shrubbery.py b/docs/examples/userguide/extension_types/shrubbery.py
index 075664527..0e624a1d2 100644
--- a/docs/examples/userguide/extension_types/shrubbery.py
+++ b/docs/examples/userguide/extension_types/shrubbery.py
@@ -1,5 +1,3 @@
-from __future__ import print_function
-
@cython.cclass
class Shrubbery:
width: cython.int
diff --git a/docs/examples/userguide/extension_types/shrubbery.pyx b/docs/examples/userguide/extension_types/shrubbery.pyx
index b74dfbd1b..8c4e58776 100644
--- a/docs/examples/userguide/extension_types/shrubbery.pyx
+++ b/docs/examples/userguide/extension_types/shrubbery.pyx
@@ -1,6 +1,4 @@
from __future__ import print_function
-
-
cdef class Shrubbery:
cdef int width
cdef int height
diff --git a/docs/examples/userguide/extension_types/shrubbery_2.py b/docs/examples/userguide/extension_types/shrubbery_2.py
new file mode 100644
index 000000000..d6b722500
--- /dev/null
+++ b/docs/examples/userguide/extension_types/shrubbery_2.py
@@ -0,0 +1,10 @@
+import cython
+from cython.cimports.my_module import Shrubbery
+
+@cython.cfunc
+def another_shrubbery(sh1: Shrubbery) -> Shrubbery:
+ sh2: Shrubbery
+ sh2 = Shrubbery()
+ sh2.width = sh1.width
+ sh2.height = sh1.height
+ return sh2
diff --git a/docs/examples/userguide/extension_types/shrubbery_2.pyx b/docs/examples/userguide/extension_types/shrubbery_2.pyx
index d05d28243..4a7782735 100644
--- a/docs/examples/userguide/extension_types/shrubbery_2.pyx
+++ b/docs/examples/userguide/extension_types/shrubbery_2.pyx
@@ -1,5 +1,7 @@
+
from my_module cimport Shrubbery
+
cdef Shrubbery another_shrubbery(Shrubbery sh1):
cdef Shrubbery sh2
sh2 = Shrubbery()
diff --git a/docs/examples/userguide/extension_types/widen_shrubbery.py b/docs/examples/userguide/extension_types/widen_shrubbery.py
new file mode 100644
index 000000000..f69f4dc96
--- /dev/null
+++ b/docs/examples/userguide/extension_types/widen_shrubbery.py
@@ -0,0 +1,6 @@
+import cython
+from cython.cimports.my_module import Shrubbery
+
+@cython.cfunc
+def widen_shrubbery(sh: Shrubbery, extra_width):
+ sh.width = sh.width + extra_width
diff --git a/docs/examples/userguide/extension_types/widen_shrubbery.pyx b/docs/examples/userguide/extension_types/widen_shrubbery.pyx
index a312fbfd9..c6f58f00c 100644
--- a/docs/examples/userguide/extension_types/widen_shrubbery.pyx
+++ b/docs/examples/userguide/extension_types/widen_shrubbery.pyx
@@ -1,4 +1,6 @@
+
from my_module cimport Shrubbery
+
cdef widen_shrubbery(Shrubbery sh, extra_width):
sh.width = sh.width + extra_width
diff --git a/docs/examples/userguide/extension_types/wrapper_class.py b/docs/examples/userguide/extension_types/wrapper_class.py
new file mode 100644
index 000000000..b625ffebd
--- /dev/null
+++ b/docs/examples/userguide/extension_types/wrapper_class.py
@@ -0,0 +1,65 @@
+import cython
+from cython.cimports.libc.stdlib import malloc, free
+
+# Example C struct
+my_c_struct = cython.struct(
+ a = cython.int,
+ b = cython.int,
+)
+
+@cython.cclass
+class WrapperClass:
+ """A wrapper class for a C/C++ data structure"""
+ _ptr: cython.pointer(my_c_struct)
+ ptr_owner: cython.bint
+
+ def __cinit__(self):
+ self.ptr_owner = False
+
+ def __dealloc__(self):
+ # De-allocate if not null and flag is set
+ if self._ptr is not cython.NULL and self.ptr_owner is True:
+ free(self._ptr)
+ self._ptr = cython.NULL
+
+ def __init__(self):
+ # Prevent accidental instantiation from normal Python code
+ # since we cannot pass a struct pointer into a Python constructor.
+ raise TypeError("This class cannot be instantiated directly.")
+
+ # Extension class properties
+ @property
+ def a(self):
+ return self._ptr.a if self._ptr is not cython.NULL else None
+
+ @property
+ def b(self):
+ return self._ptr.b if self._ptr is not cython.NULL else None
+
+ @staticmethod
+ @cython.cfunc
+ def from_ptr(_ptr: cython.pointer(my_c_struct), owner: cython.bint=False) -> WrapperClass:
+ """Factory function to create WrapperClass objects from
+ given my_c_struct pointer.
+
+ Setting ``owner`` flag to ``True`` causes
+ the extension type to ``free`` the structure pointed to by ``_ptr``
+ when the wrapper object is deallocated."""
+ # Fast call to __new__() that bypasses the __init__() constructor.
+ wrapper: WrapperClass = WrapperClass.__new__(WrapperClass)
+ wrapper._ptr = _ptr
+ wrapper.ptr_owner = owner
+ return wrapper
+
+ @staticmethod
+ @cython.cfunc
+ def new_struct() -> WrapperClass:
+ """Factory function to create WrapperClass objects with
+ newly allocated my_c_struct"""
+ _ptr: cython.pointer(my_c_struct) = cython.cast(
+ cython.pointer(my_c_struct), malloc(cython.sizeof(my_c_struct)))
+ if _ptr is cython.NULL:
+ raise MemoryError
+ _ptr.a = 0
+ _ptr.b = 0
+ return WrapperClass.from_ptr(_ptr, owner=True)
diff --git a/docs/examples/userguide/extension_types/wrapper_class.pyx b/docs/examples/userguide/extension_types/wrapper_class.pyx
new file mode 100644
index 000000000..e2a0c3ff2
--- /dev/null
+++ b/docs/examples/userguide/extension_types/wrapper_class.pyx
@@ -0,0 +1,65 @@
+
+from libc.stdlib cimport malloc, free
+
+# Example C struct
+ctypedef struct my_c_struct:
+ int a
+ int b
+
+
+
+cdef class WrapperClass:
+ """A wrapper class for a C/C++ data structure"""
+ cdef my_c_struct *_ptr
+ cdef bint ptr_owner
+
+ def __cinit__(self):
+ self.ptr_owner = False
+
+ def __dealloc__(self):
+ # De-allocate if not null and flag is set
+ if self._ptr is not NULL and self.ptr_owner is True:
+ free(self._ptr)
+ self._ptr = NULL
+
+ def __init__(self):
+ # Prevent accidental instantiation from normal Python code
+ # since we cannot pass a struct pointer into a Python constructor.
+ raise TypeError("This class cannot be instantiated directly.")
+
+ # Extension class properties
+ @property
+ def a(self):
+ return self._ptr.a if self._ptr is not NULL else None
+
+ @property
+ def b(self):
+ return self._ptr.b if self._ptr is not NULL else None
+
+
+ @staticmethod
+ cdef WrapperClass from_ptr(my_c_struct *_ptr, bint owner=False):
+ """Factory function to create WrapperClass objects from
+ given my_c_struct pointer.
+
+ Setting ``owner`` flag to ``True`` causes
+ the extension type to ``free`` the structure pointed to by ``_ptr``
+ when the wrapper object is deallocated."""
+ # Fast call to __new__() that bypasses the __init__() constructor.
+ cdef WrapperClass wrapper = WrapperClass.__new__(WrapperClass)
+ wrapper._ptr = _ptr
+ wrapper.ptr_owner = owner
+ return wrapper
+
+
+ @staticmethod
+ cdef WrapperClass new_struct():
+ """Factory function to create WrapperClass objects with
+ newly allocated my_c_struct"""
+ cdef my_c_struct *_ptr = <my_c_struct *>malloc(sizeof(my_c_struct))
+
+ if _ptr is NULL:
+ raise MemoryError
+ _ptr.a = 0
+ _ptr.b = 0
+ return WrapperClass.from_ptr(_ptr, owner=True)
diff --git a/docs/examples/userguide/sharing_declarations/landscaping.py b/docs/examples/userguide/sharing_declarations/landscaping.py
new file mode 100644
index 000000000..2d2c4b5b7
--- /dev/null
+++ b/docs/examples/userguide/sharing_declarations/landscaping.py
@@ -0,0 +1,7 @@
+from cython.cimports.shrubbing import Shrubbery
+import shrubbing
+
+def main():
+ sh: Shrubbery
+ sh = shrubbing.standard_shrubbery()
+ print("Shrubbery size is", sh.width, 'x', sh.length)
diff --git a/docs/examples/userguide/sharing_declarations/lunch.py b/docs/examples/userguide/sharing_declarations/lunch.py
new file mode 100644
index 000000000..df56913eb
--- /dev/null
+++ b/docs/examples/userguide/sharing_declarations/lunch.py
@@ -0,0 +1,5 @@
+import cython
+from cython.cimports.c_lunch import eject_tomato as c_eject_tomato
+
+def eject_tomato(speed: cython.float):
+ c_eject_tomato(speed)
diff --git a/docs/examples/userguide/sharing_declarations/lunch.pyx b/docs/examples/userguide/sharing_declarations/lunch.pyx
index 8b0911510..fea5e4c87 100644
--- a/docs/examples/userguide/sharing_declarations/lunch.pyx
+++ b/docs/examples/userguide/sharing_declarations/lunch.pyx
@@ -1,3 +1,4 @@
+
cimport c_lunch
def eject_tomato(float speed):
diff --git a/docs/examples/userguide/sharing_declarations/restaurant.py b/docs/examples/userguide/sharing_declarations/restaurant.py
new file mode 100644
index 000000000..b4bdb2eba
--- /dev/null
+++ b/docs/examples/userguide/sharing_declarations/restaurant.py
@@ -0,0 +1,12 @@
+import cython
+from cython.cimports.dishes import spamdish, sausage
+
+@cython.cfunc
+def prepare(d: cython.pointer(spamdish)) -> cython.void:
+ d.oz_of_spam = 42
+ d.filler = sausage
+
+def serve():
+ d: spamdish
+ prepare(cython.address(d))
+ print(f'{d.oz_of_spam} oz spam, filler no. {d.filler}')
diff --git a/docs/examples/userguide/sharing_declarations/restaurant.pyx b/docs/examples/userguide/sharing_declarations/restaurant.pyx
index 3257c681b..f556646dc 100644
--- a/docs/examples/userguide/sharing_declarations/restaurant.pyx
+++ b/docs/examples/userguide/sharing_declarations/restaurant.pyx
@@ -1,4 +1,4 @@
-from __future__ import print_function
+
cimport dishes
from dishes cimport spamdish
diff --git a/docs/examples/userguide/sharing_declarations/setup_py.py b/docs/examples/userguide/sharing_declarations/setup_py.py
new file mode 100644
index 000000000..45ded0ff4
--- /dev/null
+++ b/docs/examples/userguide/sharing_declarations/setup_py.py
@@ -0,0 +1,4 @@
+from setuptools import setup
+from Cython.Build import cythonize
+
+setup(ext_modules=cythonize(["landscaping.py", "shrubbing.py"]))
diff --git a/docs/examples/userguide/sharing_declarations/setup.py b/docs/examples/userguide/sharing_declarations/setup_pyx.py
index 505b53e9d..505b53e9d 100644
--- a/docs/examples/userguide/sharing_declarations/setup.py
+++ b/docs/examples/userguide/sharing_declarations/setup_pyx.py
diff --git a/docs/examples/userguide/sharing_declarations/shrubbing.py b/docs/examples/userguide/sharing_declarations/shrubbing.py
new file mode 100644
index 000000000..27e20d631
--- /dev/null
+++ b/docs/examples/userguide/sharing_declarations/shrubbing.py
@@ -0,0 +1,10 @@
+import cython
+
+@cython.cclass
+class Shrubbery:
+ def __cinit__(self, w: cython.int, l: cython.int):
+ self.width = w
+ self.length = l
+
+def standard_shrubbery():
+ return Shrubbery(3, 7)
diff --git a/docs/examples/userguide/sharing_declarations/shrubbing.pyx b/docs/examples/userguide/sharing_declarations/shrubbing.pyx
index bb97e7e77..8598b5c98 100644
--- a/docs/examples/userguide/sharing_declarations/shrubbing.pyx
+++ b/docs/examples/userguide/sharing_declarations/shrubbing.pyx
@@ -1,3 +1,6 @@
+
+
+
cdef class Shrubbery:
def __cinit__(self, int w, int l):
self.width = w
diff --git a/docs/examples/userguide/sharing_declarations/spammery.py b/docs/examples/userguide/sharing_declarations/spammery.py
new file mode 100644
index 000000000..88554be4a
--- /dev/null
+++ b/docs/examples/userguide/sharing_declarations/spammery.py
@@ -0,0 +1,10 @@
+import cython
+from cython.cimports.volume import cube
+
+def menu(description, size):
+ print(description, ":", cube(size),
+ "cubic metres of spam")
+
+menu("Entree", 1)
+menu("Main course", 3)
+menu("Dessert", 2)
diff --git a/docs/examples/userguide/sharing_declarations/spammery.pyx b/docs/examples/userguide/sharing_declarations/spammery.pyx
index 16cbda06e..da11e737e 100644
--- a/docs/examples/userguide/sharing_declarations/spammery.pyx
+++ b/docs/examples/userguide/sharing_declarations/spammery.pyx
@@ -1,5 +1,4 @@
-from __future__ import print_function
-
+
from volume cimport cube
def menu(description, size):
diff --git a/docs/examples/userguide/sharing_declarations/volume.py b/docs/examples/userguide/sharing_declarations/volume.py
new file mode 100644
index 000000000..1f6ff9c72
--- /dev/null
+++ b/docs/examples/userguide/sharing_declarations/volume.py
@@ -0,0 +1,2 @@
+def cube(x):
+ return x * x * x
diff --git a/docs/examples/userguide/special_methods/total_ordering.py b/docs/examples/userguide/special_methods/total_ordering.py
new file mode 100644
index 000000000..7d164d6df
--- /dev/null
+++ b/docs/examples/userguide/special_methods/total_ordering.py
@@ -0,0 +1,13 @@
+import cython
+@cython.total_ordering
+@cython.cclass
+class ExtGe:
+ x: cython.int
+
+ def __ge__(self, other):
+ if not isinstance(other, ExtGe):
+ return NotImplemented
+ return self.x >= cython.cast(ExtGe, other).x
+
+ def __eq__(self, other):
+ return isinstance(other, ExtGe) and self.x == cython.cast(ExtGe, other).x
diff --git a/docs/examples/userguide/special_methods/total_ordering.pyx b/docs/examples/userguide/special_methods/total_ordering.pyx
new file mode 100644
index 000000000..06d2ccef7
--- /dev/null
+++ b/docs/examples/userguide/special_methods/total_ordering.pyx
@@ -0,0 +1,13 @@
+import cython
+
+@cython.total_ordering
+cdef class ExtGe:
+ cdef int x
+
+ def __ge__(self, other):
+ if not isinstance(other, ExtGe):
+ return NotImplemented
+ return self.x >= (<ExtGe>other).x
+
+ def __eq__(self, other):
+ return isinstance(other, ExtGe) and self.x == (<ExtGe>other).x
diff --git a/docs/src/quickstart/build.rst b/docs/src/quickstart/build.rst
index 5d9e8a307..3cbcfa087 100644
--- a/docs/src/quickstart/build.rst
+++ b/docs/src/quickstart/build.rst
@@ -18,6 +18,10 @@ one may want to read more about
There are several ways to build Cython code:
- Write a setuptools ``setup.py``. This is the normal and recommended way.
+ - Run the ``cythonize`` command-line utility. This is a good approach for
+ compiling a single Cython source file directly to an extension.
+ A source file can be built "in place" (so that the extension module is created
+ next to the source file, ready to be imported) with ``cythonize -i filename.pyx``.
- Use :ref:`Pyximport<pyximport>`, importing Cython ``.pyx`` files as if they
were ``.py`` files (using setuptools to compile and build in the background).
This method is easier than writing a ``setup.py``, but is not very flexible.
diff --git a/docs/src/quickstart/install.rst b/docs/src/quickstart/install.rst
index 8b5f4c350..04a47afdc 100644
--- a/docs/src/quickstart/install.rst
+++ b/docs/src/quickstart/install.rst
@@ -15,8 +15,10 @@ according to the system used:
- **Linux** The GNU C Compiler (gcc) is usually present, or easily
available through the package system. On Ubuntu or Debian, for
- instance, the command ``sudo apt-get install build-essential`` will
- fetch everything you need.
+ instance, it is part of the ``build-essential`` package. Next to a
+ C compiler, Cython requires the Python header files. On Ubuntu or
+ Debian, the command ``sudo apt-get install build-essential python3-dev``
+ will fetch everything you need.
- **Mac OS X** To retrieve gcc, one option is to install Apple's
XCode, which can be retrieved from the Mac OS X's install DVDs or
diff --git a/docs/src/tutorial/embedding.rst b/docs/src/tutorial/embedding.rst
index 3f6325428..819506cde 100644
--- a/docs/src/tutorial/embedding.rst
+++ b/docs/src/tutorial/embedding.rst
@@ -75,3 +75,10 @@ option. Or use the
script to embed multiple modules. See the
`embedding demo program <https://github.com/cython/cython/tree/master/Demos/embed>`_
for a complete example setup.
+
+Be aware that your application will not contain any external dependencies that
+you use (including Python standard library modules) and so may not be truly portable.
+If you want to generate a portable application we recommend using a specialized
+tool (e.g. `PyInstaller <https://pyinstaller.org/en/stable/>`_
+or `cx_freeze <https://cx-freeze.readthedocs.io/en/latest/index.html>`_) to find and
+bundle these dependencies.
diff --git a/docs/src/tutorial/pure.rst b/docs/src/tutorial/pure.rst
index a536f2b31..417b7d1b2 100644
--- a/docs/src/tutorial/pure.rst
+++ b/docs/src/tutorial/pure.rst
@@ -29,6 +29,7 @@ In pure mode, you are more or less restricted to code that can be expressed
beyond that can only be done in .pyx files with extended language syntax,
because it depends on features of the Cython compiler.
+.. _augmenting_pxd:
Augmenting .pxd
---------------
@@ -249,6 +250,8 @@ releasing or acquiring the GIL. The condition must be constant (at compile time)
A common use case for conditionally acquiring and releasing the GIL are fused types
that allow different GIL handling depending on the specific type (see :ref:`gil_conditional`).
+.. py:module:: cython.cimports
+
cimports
^^^^^^^^
diff --git a/docs/src/userguide/extension_types.rst b/docs/src/userguide/extension_types.rst
index 678ddf5c8..b2690dc49 100644
--- a/docs/src/userguide/extension_types.rst
+++ b/docs/src/userguide/extension_types.rst
@@ -9,20 +9,56 @@ Extension Types
Introduction
==============
+.. include::
+ ../two-syntax-variants-used
+
As well as creating normal user-defined classes with the Python class
statement, Cython also lets you create new built-in Python types, known as
:term:`extension types<Extension type>`. You define an extension type using the :keyword:`cdef` class
-statement. Here's an example:
+statement or decorating the class with the ``@cclass`` decorator. Here's an example:
+
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. literalinclude:: ../../examples/userguide/extension_types/shrubbery.py
+
+ .. group-tab:: Cython
-.. literalinclude:: ../../examples/userguide/extension_types/shrubbery.pyx
+ .. literalinclude:: ../../examples/userguide/extension_types/shrubbery.pyx
As you can see, a Cython extension type definition looks a lot like a Python
-class definition. Within it, you use the def statement to define methods that
+class definition. Within it, you use the :keyword:`def` statement to define methods that
can be called from Python code. You can even define many of the special
methods such as :meth:`__init__` as you would in Python.
-The main difference is that you can use the :keyword:`cdef` statement to define
-attributes. The attributes may be Python objects (either generic or of a
+The main difference is that you can define attributes using
+
+* the :keyword:`cdef` statement,
+* the :func:`cython.declare()` function or
+* the annotation of an attribute name.
+
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. code-block:: python
+
+ @cython.cclass
+ class Shrubbery:
+ width = declare(cython.int)
+ height: cython.int
+
+ .. group-tab:: Cython
+
+ .. code-block:: cython
+
+ cdef class Shrubbery:
+
+ cdef int width
+ cdef int height
+
+The attributes may be Python objects (either generic or of a
particular extension type), or they may be of any C data type. So you can use
extension types to wrap arbitrary C data structures and provide a Python-like
interface to them.
@@ -50,7 +86,15 @@ not Python access, which means that they are not accessible from Python code.
To make them accessible from Python code, you need to declare them as
:keyword:`public` or :keyword:`readonly`. For example:
-.. literalinclude:: ../../examples/userguide/extension_types/python_access.pyx
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. literalinclude:: ../../examples/userguide/extension_types/python_access.py
+
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/extension_types/python_access.pyx
makes the width and height attributes readable and writable from Python code,
and the depth attribute readable but not writable.
@@ -74,15 +118,32 @@ Dynamic Attributes
It is not possible to add attributes to an extension type at runtime by default.
You have two ways of avoiding this limitation, both add an overhead when
-a method is called from Python code. Especially when calling ``cpdef`` methods.
+a method is called from Python code. Especially when calling hybrid methods declared
+with :keyword:`cpdef` in .pyx files or with the ``@ccall`` decorator.
+
+The first approach is to create a Python subclass:
-The first approach is to create a Python subclass.:
+.. tabs::
-.. literalinclude:: ../../examples/userguide/extension_types/extendable_animal.pyx
+ .. group-tab:: Pure Python
-Declaring a ``__dict__`` attribute is the second way of enabling dynamic attributes.:
+ .. literalinclude:: ../../examples/userguide/extension_types/extendable_animal.py
-.. literalinclude:: ../../examples/userguide/extension_types/dict_animal.pyx
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/extension_types/extendable_animal.pyx
+
+Declaring a ``__dict__`` attribute is the second way of enabling dynamic attributes:
+
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. literalinclude:: ../../examples/userguide/extension_types/dict_animal.py
+
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/extension_types/dict_animal.pyx
Type declarations
===================
@@ -93,10 +154,24 @@ generic Python object. It knows this already in the case of the ``self``
parameter of the methods of that type, but in other cases you will have to use
a type declaration.
-For example, in the following function::
+For example, in the following function:
- cdef widen_shrubbery(sh, extra_width): # BAD
- sh.width = sh.width + extra_width
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. code-block:: python
+
+ @cython.cfunc
+ def widen_shrubbery(sh, extra_width): # BAD
+ sh.width = sh.width + extra_width
+
+ .. group-tab:: Cython
+
+ .. code-block:: cython
+
+ cdef widen_shrubbery(sh, extra_width): # BAD
+ sh.width = sh.width + extra_width
because the ``sh`` parameter hasn't been given a type, the width attribute
will be accessed by a Python attribute lookup. If the attribute has been
@@ -107,18 +182,35 @@ will be very inefficient. If the attribute is private, it will not work at all
The solution is to declare ``sh`` as being of type :class:`Shrubbery`, as
follows:
-.. literalinclude:: ../../examples/userguide/extension_types/widen_shrubbery.pyx
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. literalinclude:: ../../examples/userguide/extension_types/widen_shrubbery.py
+
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/extension_types/widen_shrubbery.pyx
Now the Cython compiler knows that ``sh`` has a C attribute called
:attr:`width` and will generate code to access it directly and efficiently.
The same consideration applies to local variables, for example:
-.. literalinclude:: ../../examples/userguide/extension_types/shrubbery_2.pyx
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. literalinclude:: ../../examples/userguide/extension_types/shrubbery_2.py
+
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/extension_types/shrubbery_2.pyx
.. note::
- We here ``cimport`` the class :class:`Shrubbery`, and this is necessary
- to declare the type at compile time. To be able to ``cimport`` an extension type,
+ Here, we *cimport* the class :class:`Shrubbery` (using the :keyword:`cimport` statement
+ or importing from special ``cython.cimports`` package), and this is necessary
+ to declare the type at compile time. To be able to cimport an extension type,
we split the class definition into two parts, one in a definition file and
the other in the corresponding implementation file. You should read
:ref:`sharing_extension_types` to learn to do that.
@@ -128,24 +220,61 @@ Type Testing and Casting
------------------------
Suppose I have a method :meth:`quest` which returns an object of type :class:`Shrubbery`.
-To access it's width I could write::
+To access its width I could write:
+
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. code-block:: python
- cdef Shrubbery sh = quest()
- print(sh.width)
+ sh: Shrubbery = quest()
+ print(sh.width)
+
+ .. group-tab:: Cython
+
+ .. code-block:: cython
+
+ cdef Shrubbery sh = quest()
+ print(sh.width)
which requires the use of a local variable and performs a type test on assignment.
If you *know* the return value of :meth:`quest` will be of type :class:`Shrubbery`
-you can use a cast to write::
+you can use a cast to write:
+
+.. tabs::
+
+ .. group-tab:: Pure Python
- print( (<Shrubbery>quest()).width )
+ .. code-block:: python
+
+ print( cython.cast(Shrubbery, quest()).width )
+
+ .. group-tab:: Cython
+
+ .. code-block:: cython
+
+ print( (<Shrubbery>quest()).width )
This may be dangerous if :meth:`quest()` is not actually a :class:`Shrubbery`, as it
will try to access width as a C struct member which may not exist. At the C level,
rather than raising an :class:`AttributeError`, either an nonsensical result will be
returned (interpreting whatever data is at that address as an int) or a segfault
-may result from trying to access invalid memory. Instead, one can write::
+may result from trying to access invalid memory. Instead, one can write:
+
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. code-block:: python
+
+ print( cython.cast(Shrubbery, quest(), typecheck=True).width )
+
+ .. group-tab:: Cython
+
+ .. code-block:: cython
- print( (<Shrubbery?>quest()).width )
+ print( (<Shrubbery?>quest()).width )
which performs a type check (possibly raising a :class:`TypeError`) before making the
cast and allowing the code to proceed.
@@ -155,14 +284,18 @@ For known builtin or extension types, Cython translates these into a
fast and safe type check that ignores changes to
the object's ``__class__`` attribute etc., so that after a successful
:meth:`isinstance` test, code can rely on the expected C structure of the
-extension type and its :keyword:`cdef` attributes and methods.
+extension type and its C-level attributes (stored in the object’s C struct) and
+:keyword:`cdef`/``@cfunc`` methods.
.. _extension_types_and_none:
Extension types and None
=========================
-When you declare a parameter or C variable as being of an extension type,
+Cython handles ``None`` values differently in C-like type declarations and when Python annotations are used.
+
+In :keyword:`cdef` declarations and C-like function argument declarations (``func(list x)``),
+when you declare an argument or C variable as having an extension or Python builtin type,
Cython will allow it to take on the value ``None`` as well as values of its
declared type. This is analogous to the way a C pointer can take on the value
``NULL``, and you need to exercise the same caution because of it. There is no
@@ -172,24 +305,24 @@ of an extension type (as in the widen_shrubbery function above), it's up to
you to make sure the reference you're using is not ``None`` -- in the
interests of efficiency, Cython does not check this.
-You need to be particularly careful when exposing Python functions which take
-extension types as arguments. If we wanted to make :func:`widen_shrubbery` a
-Python function, for example, if we simply wrote::
+With the C-like declaration syntax, you need to be particularly careful when
+exposing Python functions which take extension types as arguments::
def widen_shrubbery(Shrubbery sh, extra_width): # This is
sh.width = sh.width + extra_width # dangerous!
-then users of our module could crash it by passing ``None`` for the ``sh``
+The users of our module could crash it by passing ``None`` for the ``sh``
parameter.
-One way to fix this would be::
+As in Python, whenever it is unclear whether a variable can be ``None``,
+but the code requires a non-None value, an explicit check can help::
def widen_shrubbery(Shrubbery sh, extra_width):
if sh is None:
raise TypeError
sh.width = sh.width + extra_width
-but since this is anticipated to be such a frequent requirement, Cython
+but since this is anticipated to be such a frequent requirement, Cython language
provides a more convenient way. Parameters of a Python function declared as an
extension type can have a ``not None`` clause::
@@ -199,18 +332,41 @@ extension type can have a ``not None`` clause::
Now the function will automatically check that ``sh`` is ``not None`` along
with checking that it has the right type.
+When annotations are used, the behaviour follows the Python typing semantics of
+`PEP-484 <https://www.python.org/dev/peps/pep-0484/>`_ instead.
+The value ``None`` is not allowed when a variable is annotated only with its plain type::
+
+ def widen_shrubbery(sh: Shrubbery, extra_width): # TypeError is raised
+ sh.width = sh.width + extra_width # when sh is None
+
+To also allow ``None``, ``typing.Optional[ ]`` must be used explicitly.
+For function arguments, this is also automatically allowed when they have a
+default argument of `None``, e.g. ``func(x: list = None)`` does not require ``typing.Optional``::
+
+ import typing
+ def widen_shrubbery(sh: typing.Optional[Shrubbery], extra_width):
+ if sh is None:
+ # We want to raise a custom exception in case of a None value.
+ raise ValueError
+ sh.width = sh.width + extra_width
+
+The upside of using annotations here is that they are safe by default because
+you need to explicitly allow ``None`` values for them.
+
+
.. note::
- ``not None`` clause can only be used in Python functions (defined with
- :keyword:`def`) and not C functions (defined with :keyword:`cdef`). If
- you need to check whether a parameter to a C function is None, you will
+ The ``not None`` and ``typing.Optional`` can only be used in Python functions (defined with
+ :keyword:`def` and without ``@cython.cfunc`` decorator) and not C functions
+ (defined with :keyword:`cdef` or decorated using ``@cython.cfunc``). If
+ you need to check whether a parameter to a C function is ``None``, you will
need to do it yourself.
.. note::
Some more things:
- * The self parameter of a method of an extension type is guaranteed never to
+ * The ``self`` parameter of a method of an extension type is guaranteed never to
be ``None``.
* When comparing a value with ``None``, keep in mind that, if ``x`` is a Python
object, ``x is None`` and ``x is not None`` are very efficient because they
@@ -232,23 +388,49 @@ extension types.
Properties
============
-You can declare properties in an extension class using the same syntax as in ordinary Python code::
+You can declare properties in an extension class using the same syntax as in ordinary Python code:
- cdef class Spam:
+.. tabs::
- @property
- def cheese(self):
- # This is called when the property is read.
- ...
+ .. group-tab:: Pure Python
- @cheese.setter
- def cheese(self, value):
- # This is called when the property is written.
- ...
+ .. code-block:: python
+
+ @cython.cclass
+ class Spam:
+ @property
+ def cheese(self):
+ # This is called when the property is read.
+ ...
+
+ @cheese.setter
+ def cheese(self, value):
+ # This is called when the property is written.
+ ...
+
+ @cheese.deleter
+ def cheese(self):
+ # This is called when the property is deleted.
+
+ .. group-tab:: Cython
+
+ .. code-block:: cython
+
+ cdef class Spam:
- @cheese.deleter
- def cheese(self):
- # This is called when the property is deleted.
+ @property
+ def cheese(self):
+ # This is called when the property is read.
+ ...
+
+ @cheese.setter
+ def cheese(self, value):
+ # This is called when the property is written.
+ ...
+
+ @cheese.deleter
+ def cheese(self):
+ # This is called when the property is deleted.
There is also a special (deprecated) legacy syntax for defining properties in an extension class::
@@ -277,42 +459,17 @@ corresponding operation is attempted.
Here's a complete example. It defines a property which adds to a list each
time it is written to, returns the list when it is read, and empties the list
-when it is deleted.::
-
- # cheesy.pyx
- cdef class CheeseShop:
-
- cdef object cheeses
-
- def __cinit__(self):
- self.cheeses = []
+when it is deleted:
- @property
- def cheese(self):
- return "We don't have: %s" % self.cheeses
+.. tabs::
- @cheese.setter
- def cheese(self, value):
- self.cheeses.append(value)
+ .. group-tab:: Pure Python
- @cheese.deleter
- def cheese(self):
- del self.cheeses[:]
+ .. literalinclude:: ../../examples/userguide/extension_types/cheesy.py
- # Test input
- from cheesy import CheeseShop
+ .. group-tab:: Cython
- shop = CheeseShop()
- print(shop.cheese)
-
- shop.cheese = "camembert"
- print(shop.cheese)
-
- shop.cheese = "cheddar"
- print(shop.cheese)
-
- del shop.cheese
- print(shop.cheese)
+ .. literalinclude:: ../../examples/userguide/extension_types/cheesy.pyx
.. code-block:: text
@@ -328,20 +485,39 @@ Subclassing
=============
If an extension type inherits from other types, the first base class must be
-a built-in type or another extension type::
+a built-in type or another extension type:
- cdef class Parrot:
- ...
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. code-block:: python
+
+ @cython.cclass
+ class Parrot:
+ ...
+
+ @cython.cclass
+ class Norwegian(Parrot):
+ ...
+
+ .. group-tab:: Cython
+
+ .. code-block:: cython
+
+ cdef class Parrot:
+ ...
- cdef class Norwegian(Parrot):
- ...
+
+ cdef class Norwegian(Parrot):
+ ...
A complete definition of the base type must be available to Cython, so if the
base type is a built-in type, it must have been previously declared as an
extern extension type. If the base type is defined in another Cython module, it
must either be declared as an extern extension type or imported using the
-:keyword:`cimport` statement.
+:keyword:`cimport` statement or importing from the special ``cython.cimports`` package.
Multiple inheritance is supported, however the second and subsequent base
classes must be an ordinary Python class (not an extension type or a built-in
@@ -354,13 +530,30 @@ must be compatible).
There is a way to prevent extension types from
being subtyped in Python. This is done via the ``final`` directive,
-usually set on an extension type using a decorator::
+usually set on an extension type using a decorator:
+
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. code-block:: python
- cimport cython
+ import cython
- @cython.final
- cdef class Parrot:
- def done(self): pass
+ @cython.final
+ @cython.cclass
+ class Parrot:
+ def done(self): pass
+
+ .. group-tab:: Cython
+
+ .. code-block:: cython
+
+ cimport cython
+
+ @cython.final
+ cdef class Parrot:
+ def done(self): pass
Trying to create a Python subclass from this type will raise a
:class:`TypeError` at runtime. Cython will also prevent subtyping a
@@ -375,32 +568,25 @@ C methods
=========
Extension types can have C methods as well as Python methods. Like C
-functions, C methods are declared using :keyword:`cdef` or :keyword:`cpdef` instead of
-:keyword:`def`. C methods are "virtual", and may be overridden in derived
-extension types. In addition, :keyword:`cpdef` methods can even be overridden by python
-methods when called as C method. This adds a little to their calling overhead
-compared to a :keyword:`cdef` method::
+functions, C methods are declared using
- # pets.pyx
- cdef class Parrot:
+* :keyword:`cdef` instead of :keyword:`def` or ``@cfunc`` decorator for *C methods*, or
+* :keyword:`cpdef` instead of :keyword:`def` or ``@ccall`` decorator for *hybrid methods*.
- cdef void describe(self):
- print("This parrot is resting.")
+C methods are "virtual", and may be overridden in derived
+extension types. In addition, :keyword:`cpdef`/``@ccall`` methods can even be overridden by Python
+methods when called as C method. This adds a little to their calling overhead
+compared to a :keyword:`cdef`/``@cfunc`` method:
+
+.. tabs::
- cdef class Norwegian(Parrot):
+ .. group-tab:: Pure Python
- cdef void describe(self):
- Parrot.describe(self)
- print("Lovely plumage!")
+ .. literalinclude:: ../../examples/userguide/extension_types/pets.py
+ .. group-tab:: Cython
- cdef Parrot p1, p2
- p1 = Parrot()
- p2 = Norwegian()
- print("p1:")
- p1.describe()
- print("p2:")
- p2.describe()
+ .. literalinclude:: ../../examples/userguide/extension_types/pets.pyx
.. code-block:: text
@@ -416,22 +602,23 @@ method using the usual Python technique, i.e.::
Parrot.describe(self)
-`cdef` methods can be declared static by using the @staticmethod decorator.
+:keyword:`cdef`/``@ccall`` methods can be declared static by using the ``@staticmethod`` decorator.
This can be especially useful for constructing classes that take non-Python
-compatible types.::
+compatible types:
+
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. literalinclude:: ../../examples/userguide/extension_types/owned_pointer.py
- cdef class OwnedPointer:
- cdef void* ptr
+ .. group-tab:: Cython
- def __dealloc__(self):
- if self.ptr is not NULL:
- free(self.ptr)
+ .. literalinclude:: ../../examples/userguide/extension_types/owned_pointer.pyx
- @staticmethod
- cdef create(void* ptr):
- p = OwnedPointer()
- p.ptr = ptr
- return p
+.. note::
+
+ Cython currently does not support decorating :keyword:`cdef`/``@ccall`` methods with ``@classmethod`` decorator.
.. _forward_declaring_extension_types:
@@ -460,19 +647,17 @@ Fast instantiation
Cython provides two ways to speed up the instantiation of extension types.
The first one is a direct call to the ``__new__()`` special static method,
as known from Python. For an extension type ``Penguin``, you could use
-the following code::
+the following code:
+
+.. tabs::
- cdef class Penguin:
- cdef object food
+ .. group-tab:: Pure Python
- def __cinit__(self, food):
- self.food = food
+ .. literalinclude:: ../../examples/userguide/extension_types/penguin.py
- def __init__(self, food):
- print("eating!")
+ .. group-tab:: Cython
- normal_penguin = Penguin('fish')
- fast_penguin = Penguin.__new__(Penguin, 'wheat') # note: not calling __init__() !
+ .. literalinclude:: ../../examples/userguide/extension_types/penguin.pyx
Note that the path through ``__new__()`` will *not* call the type's
``__init__()`` method (again, as known from Python). Thus, in the example
@@ -480,24 +665,23 @@ above, the first instantiation will print ``eating!``, but the second will
not. This is only one of the reasons why the ``__cinit__()`` method is
safer than the normal ``__init__()`` method for initialising extension types
and bringing them into a correct and safe state.
-See section :ref:`_initialisation_methods` about the differences.
+See the :ref:`Initialisation Methods Section <initialisation_methods>` about
+the differences.
The second performance improvement applies to types that are often created
and deleted in a row, so that they can benefit from a freelist. Cython
provides the decorator ``@cython.freelist(N)`` for this, which creates a
-statically sized freelist of ``N`` instances for a given type. Example::
+statically sized freelist of ``N`` instances for a given type. Example:
+
+.. tabs::
+
+ .. group-tab:: Pure Python
- cimport cython
+ .. literalinclude:: ../../examples/userguide/extension_types/penguin2.py
- @cython.freelist(8)
- cdef class Penguin:
- cdef object food
- def __cinit__(self, food):
- self.food = food
+ .. group-tab:: Cython
- penguin = Penguin('fish 1')
- penguin = None
- penguin = Penguin('fish 2') # does not need to allocate memory!
+ .. literalinclude:: ../../examples/userguide/extension_types/penguin2.pyx
.. _existing-pointers-instantiation:
@@ -508,63 +692,17 @@ It is quite common to want to instantiate an extension class from an existing
(pointer to a) data structure, often as returned by external C/C++ functions.
As extension classes can only accept Python objects as arguments in their
-constructors, this necessitates the use of factory functions. For example, ::
-
- from libc.stdlib cimport malloc, free
-
- # Example C struct
- ctypedef struct my_c_struct:
- int a
- int b
-
-
- cdef class WrapperClass:
- """A wrapper class for a C/C++ data structure"""
- cdef my_c_struct *_ptr
- cdef bint ptr_owner
-
- def __cinit__(self):
- self.ptr_owner = False
-
- def __dealloc__(self):
- # De-allocate if not null and flag is set
- if self._ptr is not NULL and self.ptr_owner is True:
- free(self._ptr)
- self._ptr = NULL
-
- # Extension class properties
- @property
- def a(self):
- return self._ptr.a if self._ptr is not NULL else None
-
- @property
- def b(self):
- return self._ptr.b if self._ptr is not NULL else None
-
- @staticmethod
- cdef WrapperClass from_ptr(my_c_struct *_ptr, bint owner=False):
- """Factory function to create WrapperClass objects from
- given my_c_struct pointer.
-
- Setting ``owner`` flag to ``True`` causes
- the extension type to ``free`` the structure pointed to by ``_ptr``
- when the wrapper object is deallocated."""
- # Call to __new__ bypasses __init__ constructor
- cdef WrapperClass wrapper = WrapperClass.__new__(WrapperClass)
- wrapper._ptr = _ptr
- wrapper.ptr_owner = owner
- return wrapper
-
- @staticmethod
- cdef WrapperClass new_struct():
- """Factory function to create WrapperClass objects with
- newly allocated my_c_struct"""
- cdef my_c_struct *_ptr = <my_c_struct *>malloc(sizeof(my_c_struct))
- if _ptr is NULL:
- raise MemoryError
- _ptr.a = 0
- _ptr.b = 0
- return WrapperClass.from_ptr(_ptr, owner=True)
+constructors, this necessitates the use of factory functions or factory methods. For example:
+
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. literalinclude:: ../../examples/userguide/extension_types/wrapper_class.py
+
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/extension_types/wrapper_class.pyx
To then create a ``WrapperClass`` object from an existing ``my_c_struct``
@@ -606,13 +744,30 @@ Making extension types weak-referenceable
By default, extension types do not support having weak references made to
them. You can enable weak referencing by declaring a C attribute of type
-object called :attr:`__weakref__`. For example,::
+object called :attr:`__weakref__`. For example:
+
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. code-block:: python
+
+ @cython.cclass
+ class ExplodingAnimal:
+ """This animal will self-destruct when it is
+ no longer strongly referenced."""
+
+ __weakref__: object
- cdef class ExplodingAnimal:
- """This animal will self-destruct when it is
- no longer strongly referenced."""
+ .. group-tab:: Cython
- cdef object __weakref__
+ .. code-block:: cython
+
+ cdef class ExplodingAnimal:
+ """This animal will self-destruct when it is
+ no longer strongly referenced."""
+
+ cdef object __weakref__
Controlling deallocation and garbage collection in CPython
@@ -690,12 +845,28 @@ CPython invented a mechanism for this called the *trashcan*. It limits the
recursion depth of deallocations by delaying some deallocations.
By default, Cython extension types do not use the trashcan but it can be
-enabled by setting the ``trashcan`` directive to ``True``. For example::
+enabled by setting the ``trashcan`` directive to ``True``. For example:
+
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. code-block:: python
- cimport cython
- @cython.trashcan(True)
- cdef class Object:
- cdef dict __dict__
+ import cython
+ @cython.trashcan(True)
+ @cython.cclass
+ class Object:
+ __dict__: dict
+
+ .. group-tab:: Cython
+
+ .. code-block:: cython
+
+ cimport cython
+ @cython.trashcan(True)
+ cdef class Object:
+ cdef dict __dict__
Trashcan usage is inherited by subclasses
(unless explicitly disabled by ``@cython.trashcan(False)``).
@@ -719,15 +890,34 @@ have triggered a call to ``tp_clear`` to clear the object
In that case, any object references have vanished when ``__dealloc__``
is called. Now your cleanup code lost access to the objects it has to clean up.
To fix this, you can disable clearing instances of a specific class by using
-the ``no_gc_clear`` directive::
+the ``no_gc_clear`` directive:
+
+.. tabs::
+
+ .. group-tab:: Pure Python
- @cython.no_gc_clear
- cdef class DBCursor:
- cdef DBConnection conn
- cdef DBAPI_Cursor *raw_cursor
- # ...
- def __dealloc__(self):
- DBAPI_close_cursor(self.conn.raw_conn, self.raw_cursor)
+ .. code-block:: python
+
+ @cython.no_gc_clear
+ @cython.cclass
+ class DBCursor:
+ conn: DBConnection
+ raw_cursor: cython.pointer(DBAPI_Cursor)
+ # ...
+ def __dealloc__(self):
+ DBAPI_close_cursor(self.conn.raw_conn, self.raw_cursor)
+
+ .. group-tab:: Cython
+
+ .. code-block:: cython
+
+ @cython.no_gc_clear
+ cdef class DBCursor:
+ cdef DBConnection conn
+ cdef DBAPI_Cursor *raw_cursor
+ # ...
+ def __dealloc__(self):
+ DBAPI_close_cursor(self.conn.raw_conn, self.raw_cursor)
This example tries to close a cursor via a database connection when the Python
object is destroyed. The ``DBConnection`` object is kept alive by the reference
@@ -747,12 +937,29 @@ but the compiler won't be able to prove this. This would be the case if
the class can never reference itself, even indirectly.
In that case, you can manually disable cycle collection by using the
``no_gc`` directive, but beware that doing so when in fact the extension type
-can participate in cycles could cause memory leaks ::
+can participate in cycles could cause memory leaks:
+
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. code-block:: python
+
+ @cython.no_gc
+ @cython.cclass
+ class UserInfo:
+ name: str
+ addresses: tuple
+
+ .. group-tab:: Cython
+
+ .. code-block:: cython
- @cython.no_gc
- cdef class UserInfo:
- cdef str name
- cdef tuple addresses
+ @cython.no_gc
+ cdef class UserInfo:
+
+ cdef str name
+ cdef tuple addresses
If you can be sure addresses will contain only references to strings,
the above would be safe, and it may yield a significant speedup, depending on
@@ -785,6 +992,13 @@ declaration makes an extension type defined in external C code available to a
Cython module. A public extension type declaration makes an extension type
defined in a Cython module available to external C code.
+.. note::
+
+ Cython currently does not support Extension types declared as extern or public
+ in Pure Python mode. This is not considered an issue since public/extern extension
+ types are most commonly declared in `.pxd` files and not in `.py` files.
+
+
.. _external_extension_types:
External extension types
@@ -801,7 +1015,7 @@ objects defined in the Python core or in a non-Cython extension module.
:ref:`sharing-declarations`.
Here is an example which will let you get at the C-level members of the
-built-in complex object.::
+built-in complex object::
from __future__ import print_function
@@ -1072,7 +1286,15 @@ can only be applied to extension types (types marked ``cdef`` or created with th
``cython.cclass`` decorator) and not to regular classes. If
you need to define special properties on a field then use ``cython.dataclasses.field``
-.. literalinclude:: ../../examples/userguide/extension_types/dataclass.pyx
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. literalinclude:: ../../examples/userguide/extension_types/dataclass.py
+
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/extension_types/dataclass.pyx
You may use C-level types such as structs, pointers, or C++ classes.
However, you may find these types are not compatible with the auto-generated
diff --git a/docs/src/userguide/external_C_code.rst b/docs/src/userguide/external_C_code.rst
index b080ecf0e..2e977243d 100644
--- a/docs/src/userguide/external_C_code.rst
+++ b/docs/src/userguide/external_C_code.rst
@@ -471,7 +471,9 @@ For example, in the following snippet that includes :file:`grail.h`:
}
This C code can then be built together with the Cython-generated C code
-in a single program (or library).
+in a single program (or library). Be aware that this program will not include
+any external dependencies that your module uses. Therefore typically this will
+not generate a truly portable application for most cases.
In Python 3.x, calling the module init function directly should be avoided. Instead,
use the `inittab mechanism <https://docs.python.org/3/c-api/import.html#c._inittab>`_
diff --git a/docs/src/userguide/language_basics.rst b/docs/src/userguide/language_basics.rst
index 593542eae..7d056bdfb 100644
--- a/docs/src/userguide/language_basics.rst
+++ b/docs/src/userguide/language_basics.rst
@@ -652,7 +652,7 @@ through defined error return values. For functions that return a Python object
``NULL`` pointer, so any function returning a Python object has a well-defined
error return value.
-While this is always the case for C functions, functions
+While this is always the case for Python functions, functions
defined as C functions or ``cpdef``/``@ccall`` functions can return arbitrary C types,
which do not have such a well-defined error return value. Thus, if an
exception is detected in such a function, a warning message is printed,
diff --git a/docs/src/userguide/migrating_to_cy30.rst b/docs/src/userguide/migrating_to_cy30.rst
index 357132887..1105ee15d 100644
--- a/docs/src/userguide/migrating_to_cy30.rst
+++ b/docs/src/userguide/migrating_to_cy30.rst
@@ -172,3 +172,20 @@ rather than relying on the user to test and cast the type of each operand.
The old behaviour can be restored with the
:ref:`directive <compiler-directives>` ``c_api_binop_methods=True``.
More details are given in :ref:`arithmetic_methods`.
+
+Annotation typing
+=================
+
+Cython 3 has made substantial improvements in recognising types in
+annotations and it is well worth reading
+:ref:`the pure Python tutorial<pep484_type_annotations>` to understand
+some of the improvements.
+
+A notable backwards-compatible change is that ``x: int`` is now typed
+such that ``x`` is an exact Python ``int`` (Cython 0.29 would accept
+any Python object for ``x``).
+
+To make it easier to handle cases where your interpretation of type
+annotations differs from Cython's, Cython 3 now supports setting the
+``annotation_typing`` :ref:`directive <compiler-directives>` on a
+per-class or per-function level.
diff --git a/docs/src/userguide/sharing_declarations.rst b/docs/src/userguide/sharing_declarations.rst
index 70e29e2b2..6beceda57 100644
--- a/docs/src/userguide/sharing_declarations.rst
+++ b/docs/src/userguide/sharing_declarations.rst
@@ -6,6 +6,9 @@
Sharing Declarations Between Cython Modules
********************************************
+.. include::
+ ../two-syntax-variants-used
+
This section describes how to make C declarations, functions and extension
types in one Cython module available for use in another Cython module.
These facilities are closely modeled on the Python import mechanism,
@@ -17,13 +20,13 @@ Definition and Implementation files
A Cython module can be split into two parts: a definition file with a ``.pxd``
suffix, containing C declarations that are to be available to other Cython
-modules, and an implementation file with a ``.pyx`` suffix, containing
+modules, and an implementation file with a ``.pyx``/``.py`` suffix, containing
everything else. When a module wants to use something declared in another
module's definition file, it imports it using the :keyword:`cimport`
-statement.
+statement or using special :py:mod:`cython.cimports` package.
A ``.pxd`` file that consists solely of extern declarations does not need
-to correspond to an actual ``.pyx`` file or Python module. This can make it a
+to correspond to an actual ``.pyx``/``.py`` file or Python module. This can make it a
convenient place to put common declarations, for example declarations of
functions from an :ref:`external library <external-C-code>` that one
wants to use in several modules.
@@ -41,8 +44,8 @@ A definition file can contain:
It cannot contain the implementations of any C or Python functions, or any
Python class definitions, or any executable statements. It is needed when one
-wants to access :keyword:`cdef` attributes and methods, or to inherit from
-:keyword:`cdef` classes defined in this module.
+wants to access :keyword:`cdef`/``@cfunc`` attributes and methods, or to inherit from
+:keyword:`cdef`/``@cclass`` classes defined in this module.
.. note::
@@ -70,23 +73,45 @@ The cimport statement
The :keyword:`cimport` statement is used in a definition or
implementation file to gain access to names declared in another definition
file. Its syntax exactly parallels that of the normal Python import
-statement::
+statement. When pure python syntax is used, the same effect can be done by
+importing from special :py:mod:`cython.cimports` package. In later text the term
+to ``cimport`` refers to using both :keyword:`cimport` statement or
+:py:mod:`cython.cimports` package.
- cimport module [, module...]
+.. tabs::
- from module cimport name [as name] [, name [as name] ...]
+ .. group-tab:: Pure Python
-Here is an example. :file:`dishes.pxd` is a definition file which exports a
-C data type. :file:`restaurant.pyx` is an implementation file which imports and
-uses it.
+ .. code-block:: python
+
+ from cython.cimports.module import name [as name][, name [as name] ...]
+
+ .. group-tab:: Cython
-:file:`dishes.pxd`:
+ .. code-block:: cython
+
+ cimport module [, module...]
+
+ from module cimport name [as name] [, name [as name] ...]
+
+Here is an example. :file:`dishes.pxd` is a definition file which exports a
+C data type. :file:`restaurant.pyx`/:file:`restaurant.py` is an implementation file
+which imports and uses it.
.. literalinclude:: ../../examples/userguide/sharing_declarations/dishes.pxd
+ :caption: dishes.pxd
+
+.. tabs::
+
+ .. group-tab:: Pure Python
-:file:`restaurant.pyx`:
+ .. literalinclude:: ../../examples/userguide/sharing_declarations/restaurant.py
+ :caption: dishes.py
-.. literalinclude:: ../../examples/userguide/sharing_declarations/restaurant.pyx
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/sharing_declarations/restaurant.pyx
+ :caption: dishes.pyx
It is important to understand that the :keyword:`cimport` statement can only
be used to import C data types, C functions and variables, and extension
@@ -116,8 +141,8 @@ option to ``cythonize()``), as well as ``sys.path``.
Using ``package_data`` to install ``.pxd`` files in your ``setup.py`` script
allows other packages to cimport items from your module as a dependency.
-Also, whenever you compile a file :file:`modulename.pyx`, the corresponding
-definition file :file:`modulename.pxd` is first searched for along the
+Also, whenever you compile a file :file:`modulename.pyx`/:file:`modulename.py`,
+the corresponding definition file :file:`modulename.pxd` is first searched for along the
include path (but not ``sys.path``), and if found, it is processed before
processing the ``.pyx`` file.
@@ -132,16 +157,23 @@ for an imaginary module, and :keyword:`cimport` that module. You can then
refer to the C functions by qualifying them with the name of the module.
Here's an example:
-:file:`c_lunch.pxd`:
-
.. literalinclude:: ../../examples/userguide/sharing_declarations/c_lunch.pxd
+ :caption: c_lunch.pxd
+
+.. tabs::
-:file:`lunch.pyx`:
+ .. group-tab:: Pure Python
-.. literalinclude:: ../../examples/userguide/sharing_declarations/lunch.pyx
+ .. literalinclude:: ../../examples/userguide/sharing_declarations/lunch.py
+ :caption: lunch.py
-You don't need any :file:`c_lunch.pyx` file, because the only things defined
-in :file:`c_lunch.pxd` are extern C entities. There won't be any actual
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/sharing_declarations/lunch.pyx
+ :caption: lunch.pyx
+
+You don't need any :file:`c_lunch.pyx`/:file:`c_lunch.py` file, because the only
+things defined in :file:`c_lunch.pxd` are extern C entities. There won't be any actual
``c_lunch`` module at run time, but that doesn't matter; the
:file:`c_lunch.pxd` file has done its job of providing an additional namespace
at compile time.
@@ -154,24 +186,32 @@ C functions defined at the top level of a module can be made available via
:keyword:`cimport` by putting headers for them in the ``.pxd`` file, for
example:
-:file:`volume.pxd`:
-
.. literalinclude:: ../../examples/userguide/sharing_declarations/volume.pxd
+ :caption: volume.pxd
-:file:`volume.pyx`:
+.. tabs::
-.. literalinclude:: ../../examples/userguide/sharing_declarations/volume.pyx
+ .. group-tab:: Pure Python
-:file:`spammery.pyx`:
+ .. literalinclude:: ../../examples/userguide/sharing_declarations/volume.py
+ :caption: volume.py
-.. literalinclude:: ../../examples/userguide/sharing_declarations/spammery.pyx
+ .. literalinclude:: ../../examples/userguide/sharing_declarations/spammery.py
+ :caption: spammery.py
-.. note::
+ .. note::
+
+ Type definitions of function ``cube()`` in :file:`volume.py` are not provided
+ since they are used from .pxd definition file. See :ref:`augmenting_pxd` and
+ GitHub issue :issue:`4388`.
+
+ .. group-tab:: Cython
- When a module exports a C function in this way, an object appears in the
- module dictionary under the function's name. However, you can't make use of
- this object from Python, nor can you use it from Cython using a normal import
- statement; you have to use :keyword:`cimport`.
+ .. literalinclude:: ../../examples/userguide/sharing_declarations/volume.pyx
+ :caption: volume.pyx
+
+ .. literalinclude:: ../../examples/userguide/sharing_declarations/spammery.pyx
+ :caption: spammery.pyx
.. _sharing_extension_types:
@@ -193,34 +233,47 @@ Python methods.
Here is an example of a module which defines and exports an extension type,
and another module which uses it:
-:file:`shrubbing.pxd`:
-
.. literalinclude:: ../../examples/userguide/sharing_declarations/shrubbing.pxd
+ :caption: shrubbing.pxd
+
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. literalinclude:: ../../examples/userguide/sharing_declarations/shrubbing.py
+ :caption: shrubbing.py
+
+ .. literalinclude:: ../../examples/userguide/sharing_declarations/landscaping.py
+ :caption: landscaping.py
-:file:`shrubbing.pyx`:
+ One would then need to compile both of these modules, e.g. using
-.. literalinclude:: ../../examples/userguide/sharing_declarations/shrubbing.pyx
+ .. literalinclude:: ../../examples/userguide/sharing_declarations/setup_py.py
+ :caption: setup.py
-:file:`landscaping.pyx`:
+ .. group-tab:: Cython
-.. literalinclude:: ../../examples/userguide/sharing_declarations/landscaping.pyx
+ .. literalinclude:: ../../examples/userguide/sharing_declarations/shrubbing.pyx
+ :caption: shrubbing.pyx
-One would then need to compile both of these modules, e.g. using
+ .. literalinclude:: ../../examples/userguide/sharing_declarations/landscaping.pyx
+ :caption: landscaping.pyx
-:file:`setup.py`:
+ One would then need to compile both of these modules, e.g. using
-.. literalinclude:: ../../examples/userguide/sharing_declarations/setup.py
+ .. literalinclude:: ../../examples/userguide/sharing_declarations/setup_pyx.py
+ :caption: setup.py
Some things to note about this example:
-* There is a :keyword:`cdef` class Shrubbery declaration in both
- :file:`Shrubbing.pxd` and :file:`Shrubbing.pyx`. When the Shrubbing module
+* There is a :keyword:`cdef`/``@cclass`` class Shrubbery declaration in both
+ :file:`shrubbing.pxd` and :file:`shrubbing.pyx`. When the shrubbing module
is compiled, these two declarations are combined into one.
-* In Landscaping.pyx, the :keyword:`cimport` Shrubbing declaration allows us
- to refer to the Shrubbery type as :class:`Shrubbing.Shrubbery`. But it
- doesn't bind the name Shrubbing in Landscaping's module namespace at run
- time, so to access :func:`Shrubbing.standard_shrubbery` we also need to
- ``import Shrubbing``.
+* In :file:`landscaping.pyx`/:file:`landscaping.py`, the :keyword:`cimport` shrubbing
+ declaration allows us to refer to the Shrubbery type as :class:`shrubbing.Shrubbery`.
+ But it doesn't bind the name shrubbing in landscaping's module namespace at run
+ time, so to access :func:`shrubbing.standard_shrubbery` we also need to
+ ``import shrubbing``.
* One caveat if you use setuptools instead of distutils, the default
action when running ``python setup.py install`` is to create a zipped
``egg`` file which will not work with ``cimport`` for ``pxd`` files
@@ -234,8 +287,8 @@ Versioning
``.pxd`` files can be labelled with a minimum Cython version as part of
their file name, similar to the version tagging of ``.so`` files in PEP 3149.
-For example a file called :file:`Shrubbing.cython-30.pxd` will only be
-found by ``cimport Shrubbing`` on Cython 3.0 and higher. Cython will use the
+For example a file called :file:`shrubbing.cython-30.pxd` will only be
+found by ``cimport shrubbing`` on Cython 3.0 and higher. Cython will use the
file tagged with the highest compatible version number.
Note that versioned files that are distributed across different directories
diff --git a/docs/src/userguide/source_files_and_compilation.rst b/docs/src/userguide/source_files_and_compilation.rst
index edf51213e..a833c61ed 100644
--- a/docs/src/userguide/source_files_and_compilation.rst
+++ b/docs/src/userguide/source_files_and_compilation.rst
@@ -946,7 +946,8 @@ Cython code. Here is the list of currently supported directives:
Uses function argument annotations to determine the type of variables. Default
is True, but can be disabled. Since Python does not enforce types given in
annotations, setting to False gives greater compatibility with Python code.
- Must be set globally.
+ From Cython 3.0, ``annotation_typing`` can be set on a per-function or
+ per-class basis.
``emit_code_comments`` (True / False)
Copy the original source code line by line into C code comments in the generated
diff --git a/docs/src/userguide/special_methods.rst b/docs/src/userguide/special_methods.rst
index af702f3c3..e6635b502 100644
--- a/docs/src/userguide/special_methods.rst
+++ b/docs/src/userguide/special_methods.rst
@@ -3,6 +3,9 @@
Special Methods of Extension Types
===================================
+.. include::
+ ../two-syntax-variants-used
+
This page describes the special methods currently supported by Cython extension
types. A complete list of all the special methods appears in the table at the
bottom. Some of these methods behave differently from their Python
@@ -12,7 +15,8 @@ mention.
.. Note::
Everything said on this page applies only to extension types, defined
- with the :keyword:`cdef` class statement. It doesn't apply to classes defined with the
+ with the :keyword:`cdef` class statement or decorated using ``@cclass`` decorator.
+ It doesn't apply to classes defined with the
Python :keyword:`class` statement, where the normal Python rules apply.
.. _declaration:
@@ -20,7 +24,7 @@ mention.
Declaration
------------
Special methods of extension types must be declared with :keyword:`def`, not
-:keyword:`cdef`. This does not impact their performance--Python uses different
+:keyword:`cdef`/``@cfunc``. This does not impact their performance--Python uses different
calling conventions to invoke these special methods.
.. _docstrings:
@@ -225,19 +229,15 @@ Depending on the application, one way or the other may be better:
decorator specifically for ``cdef`` classes. (Normal Python classes can use
the original ``functools`` decorator.)
- .. code-block:: cython
+.. tabs::
+
+ .. group-tab:: Pure Python
- @cython.total_ordering
- cdef class ExtGe:
- cdef int x
+ .. literalinclude:: ../../examples/userguide/special_methods/total_ordering.py
- def __ge__(self, other):
- if not isinstance(other, ExtGe):
- return NotImplemented
- return self.x >= (<ExtGe>other).x
+ .. group-tab:: Cython
- def __eq__(self, other):
- return isinstance(other, ExtGe) and self.x == (<ExtGe>other).x
+ .. literalinclude:: ../../examples/userguide/special_methods/total_ordering.pyx
.. _the__next__method:
diff --git a/pyximport/_pyximport2.py b/pyximport/_pyximport2.py
new file mode 100644
index 000000000..b2077826a
--- /dev/null
+++ b/pyximport/_pyximport2.py
@@ -0,0 +1,606 @@
+"""
+Import hooks; when installed with the install() function, these hooks
+allow importing .pyx files as if they were Python modules.
+
+If you want the hook installed every time you run Python
+you can add it to your Python version by adding these lines to
+sitecustomize.py (which you can create from scratch in site-packages
+if it doesn't exist there or somewhere else on your python path)::
+
+ import pyximport
+ pyximport.install()
+
+For instance on the Mac with a non-system Python 2.3, you could create
+sitecustomize.py with only those two lines at
+/usr/local/lib/python2.3/site-packages/sitecustomize.py .
+
+A custom distutils.core.Extension instance and setup() args
+(Distribution) for for the build can be defined by a <modulename>.pyxbld
+file like:
+
+# examplemod.pyxbld
+def make_ext(modname, pyxfilename):
+ from distutils.extension import Extension
+ return Extension(name = modname,
+ sources=[pyxfilename, 'hello.c'],
+ include_dirs=['/myinclude'] )
+def make_setup_args():
+ return dict(script_args=["--compiler=mingw32"])
+
+Extra dependencies can be defined by a <modulename>.pyxdep .
+See README.
+
+Since Cython 0.11, the :mod:`pyximport` module also has experimental
+compilation support for normal Python modules. This allows you to
+automatically run Cython on every .pyx and .py module that Python
+imports, including parts of the standard library and installed
+packages. Cython will still fail to compile a lot of Python modules,
+in which case the import mechanism will fall back to loading the
+Python source modules instead. The .py import mechanism is installed
+like this::
+
+ pyximport.install(pyimport = True)
+
+Running this module as a top-level script will run a test and then print
+the documentation.
+
+This code is based on the Py2.3+ import protocol as described in PEP 302.
+"""
+
+import glob
+import imp
+import os
+import sys
+from zipimport import zipimporter, ZipImportError
+
+mod_name = "pyximport"
+
+PYX_EXT = ".pyx"
+PYXDEP_EXT = ".pyxdep"
+PYXBLD_EXT = ".pyxbld"
+
+DEBUG_IMPORT = False
+
+
+def _print(message, args):
+ if args:
+ message = message % args
+ print(message)
+
+
+def _debug(message, *args):
+ if DEBUG_IMPORT:
+ _print(message, args)
+
+
+def _info(message, *args):
+ _print(message, args)
+
+
+# Performance problem: for every PYX file that is imported, we will
+# invoke the whole distutils infrastructure even if the module is
+# already built. It might be more efficient to only do it when the
+# mod time of the .pyx is newer than the mod time of the .so but
+# the question is how to get distutils to tell me the name of the .so
+# before it builds it. Maybe it is easy...but maybe the performance
+# issue isn't real.
+def _load_pyrex(name, filename):
+ "Load a pyrex file given a name and filename."
+
+
+def get_distutils_extension(modname, pyxfilename, language_level=None):
+# try:
+# import hashlib
+# except ImportError:
+# import md5 as hashlib
+# extra = "_" + hashlib.md5(open(pyxfilename).read()).hexdigest()
+# modname = modname + extra
+ extension_mod,setup_args = handle_special_build(modname, pyxfilename)
+ if not extension_mod:
+ if not isinstance(pyxfilename, str):
+ # distutils is stupid in Py2 and requires exactly 'str'
+ # => encode accidentally coerced unicode strings back to str
+ pyxfilename = pyxfilename.encode(sys.getfilesystemencoding())
+ from distutils.extension import Extension
+ extension_mod = Extension(name = modname, sources=[pyxfilename])
+ if language_level is not None:
+ extension_mod.cython_directives = {'language_level': language_level}
+ return extension_mod,setup_args
+
+
+def handle_special_build(modname, pyxfilename):
+ special_build = os.path.splitext(pyxfilename)[0] + PYXBLD_EXT
+ ext = None
+ setup_args={}
+ if os.path.exists(special_build):
+ # globls = {}
+ # locs = {}
+ # execfile(special_build, globls, locs)
+ # ext = locs["make_ext"](modname, pyxfilename)
+ with open(special_build) as fid:
+ mod = imp.load_source("XXXX", special_build, fid)
+ make_ext = getattr(mod,'make_ext',None)
+ if make_ext:
+ ext = make_ext(modname, pyxfilename)
+ assert ext and ext.sources, "make_ext in %s did not return Extension" % special_build
+ make_setup_args = getattr(mod, 'make_setup_args',None)
+ if make_setup_args:
+ setup_args = make_setup_args()
+ assert isinstance(setup_args,dict), ("make_setup_args in %s did not return a dict"
+ % special_build)
+ assert set or setup_args, ("neither make_ext nor make_setup_args %s"
+ % special_build)
+ ext.sources = [os.path.join(os.path.dirname(special_build), source)
+ for source in ext.sources]
+ return ext, setup_args
+
+
+def handle_dependencies(pyxfilename):
+ testing = '_test_files' in globals()
+ dependfile = os.path.splitext(pyxfilename)[0] + PYXDEP_EXT
+
+ # by default let distutils decide whether to rebuild on its own
+ # (it has a better idea of what the output file will be)
+
+ # but we know more about dependencies so force a rebuild if
+ # some of the dependencies are newer than the pyxfile.
+ if os.path.exists(dependfile):
+ with open(dependfile) as fid:
+ depends = fid.readlines()
+ depends = [depend.strip() for depend in depends]
+
+ # gather dependencies in the "files" variable
+ # the dependency file is itself a dependency
+ files = [dependfile]
+ for depend in depends:
+ fullpath = os.path.join(os.path.dirname(dependfile),
+ depend)
+ files.extend(glob.glob(fullpath))
+
+ # only for unit testing to see we did the right thing
+ if testing:
+ _test_files[:] = [] #$pycheck_no
+
+ # if any file that the pyxfile depends upon is newer than
+ # the pyx file, 'touch' the pyx file so that distutils will
+ # be tricked into rebuilding it.
+ for file in files:
+ from distutils.dep_util import newer
+ if newer(file, pyxfilename):
+ _debug("Rebuilding %s because of %s", pyxfilename, file)
+ filetime = os.path.getmtime(file)
+ os.utime(pyxfilename, (filetime, filetime))
+ if testing:
+ _test_files.append(file)
+
+
+def build_module(name, pyxfilename, pyxbuild_dir=None, inplace=False, language_level=None):
+ assert os.path.exists(pyxfilename), "Path does not exist: %s" % pyxfilename
+ handle_dependencies(pyxfilename)
+
+ extension_mod, setup_args = get_distutils_extension(name, pyxfilename, language_level)
+ build_in_temp = pyxargs.build_in_temp
+ sargs = pyxargs.setup_args.copy()
+ sargs.update(setup_args)
+ build_in_temp = sargs.pop('build_in_temp',build_in_temp)
+
+ from . import pyxbuild
+ so_path = pyxbuild.pyx_to_dll(pyxfilename, extension_mod,
+ build_in_temp=build_in_temp,
+ pyxbuild_dir=pyxbuild_dir,
+ setup_args=sargs,
+ inplace=inplace,
+ reload_support=pyxargs.reload_support)
+ assert os.path.exists(so_path), "Cannot find: %s" % so_path
+
+ junkpath = os.path.join(os.path.dirname(so_path), name+"_*") #very dangerous with --inplace ? yes, indeed, trying to eat my files ;)
+ junkstuff = glob.glob(junkpath)
+ for path in junkstuff:
+ if path != so_path:
+ try:
+ os.remove(path)
+ except IOError:
+ _info("Couldn't remove %s", path)
+
+ return so_path
+
+
+def load_module(name, pyxfilename, pyxbuild_dir=None, is_package=False,
+ build_inplace=False, language_level=None, so_path=None):
+ try:
+ if so_path is None:
+ if is_package:
+ module_name = name + '.__init__'
+ else:
+ module_name = name
+ so_path = build_module(module_name, pyxfilename, pyxbuild_dir,
+ inplace=build_inplace, language_level=language_level)
+ mod = imp.load_dynamic(name, so_path)
+ if is_package and not hasattr(mod, '__path__'):
+ mod.__path__ = [os.path.dirname(so_path)]
+ assert mod.__file__ == so_path, (mod.__file__, so_path)
+ except Exception as failure_exc:
+ _debug("Failed to load extension module: %r" % failure_exc)
+ if pyxargs.load_py_module_on_import_failure and pyxfilename.endswith('.py'):
+ # try to fall back to normal import
+ mod = imp.load_source(name, pyxfilename)
+ assert mod.__file__ in (pyxfilename, pyxfilename+'c', pyxfilename+'o'), (mod.__file__, pyxfilename)
+ else:
+ tb = sys.exc_info()[2]
+ import traceback
+ exc = ImportError("Building module %s failed: %s" % (
+ name, traceback.format_exception_only(*sys.exc_info()[:2])))
+ if sys.version_info[0] >= 3:
+ raise exc.with_traceback(tb)
+ else:
+ exec("raise exc, None, tb", {'exc': exc, 'tb': tb})
+ return mod
+
+
+# import hooks
+
+class PyxImporter(object):
+ """A meta-path importer for .pyx files.
+ """
+ def __init__(self, extension=PYX_EXT, pyxbuild_dir=None, inplace=False,
+ language_level=None):
+ self.extension = extension
+ self.pyxbuild_dir = pyxbuild_dir
+ self.inplace = inplace
+ self.language_level = language_level
+
+ def find_module(self, fullname, package_path=None):
+ if fullname in sys.modules and not pyxargs.reload_support:
+ return None # only here when reload()
+
+ # package_path might be a _NamespacePath. Convert that into a list...
+ if package_path is not None and not isinstance(package_path, list):
+ package_path = list(package_path)
+ try:
+ fp, pathname, (ext,mode,ty) = imp.find_module(fullname,package_path)
+ if fp: fp.close() # Python should offer a Default-Loader to avoid this double find/open!
+ if pathname and ty == imp.PKG_DIRECTORY:
+ pkg_file = os.path.join(pathname, '__init__'+self.extension)
+ if os.path.isfile(pkg_file):
+ return PyxLoader(fullname, pathname,
+ init_path=pkg_file,
+ pyxbuild_dir=self.pyxbuild_dir,
+ inplace=self.inplace,
+ language_level=self.language_level)
+ if pathname and pathname.endswith(self.extension):
+ return PyxLoader(fullname, pathname,
+ pyxbuild_dir=self.pyxbuild_dir,
+ inplace=self.inplace,
+ language_level=self.language_level)
+ if ty != imp.C_EXTENSION: # only when an extension, check if we have a .pyx next!
+ return None
+
+ # find .pyx fast, when .so/.pyd exist --inplace
+ pyxpath = os.path.splitext(pathname)[0]+self.extension
+ if os.path.isfile(pyxpath):
+ return PyxLoader(fullname, pyxpath,
+ pyxbuild_dir=self.pyxbuild_dir,
+ inplace=self.inplace,
+ language_level=self.language_level)
+
+ # .so/.pyd's on PATH should not be remote from .pyx's
+ # think no need to implement PyxArgs.importer_search_remote here?
+
+ except ImportError:
+ pass
+
+ # searching sys.path ...
+
+ #if DEBUG_IMPORT: print "SEARCHING", fullname, package_path
+
+ mod_parts = fullname.split('.')
+ module_name = mod_parts[-1]
+ pyx_module_name = module_name + self.extension
+
+ # this may work, but it returns the file content, not its path
+ #import pkgutil
+ #pyx_source = pkgutil.get_data(package, pyx_module_name)
+
+ paths = package_path or sys.path
+ for path in paths:
+ pyx_data = None
+ if not path:
+ path = os.getcwd()
+ elif os.path.isfile(path):
+ try:
+ zi = zipimporter(path)
+ pyx_data = zi.get_data(pyx_module_name)
+ except (ZipImportError, IOError, OSError):
+ continue # Module not found.
+ # unzip the imported file into the build dir
+ # FIXME: can interfere with later imports if build dir is in sys.path and comes before zip file
+ path = self.pyxbuild_dir
+ elif not os.path.isabs(path):
+ path = os.path.abspath(path)
+
+ pyx_module_path = os.path.join(path, pyx_module_name)
+ if pyx_data is not None:
+ if not os.path.exists(path):
+ try:
+ os.makedirs(path)
+ except OSError:
+ # concurrency issue?
+ if not os.path.exists(path):
+ raise
+ with open(pyx_module_path, "wb") as f:
+ f.write(pyx_data)
+ elif not os.path.isfile(pyx_module_path):
+ continue # Module not found.
+
+ return PyxLoader(fullname, pyx_module_path,
+ pyxbuild_dir=self.pyxbuild_dir,
+ inplace=self.inplace,
+ language_level=self.language_level)
+
+ # not found, normal package, not a .pyx file, none of our business
+ _debug("%s not found" % fullname)
+ return None
+
+
+class PyImporter(PyxImporter):
+ """A meta-path importer for normal .py files.
+ """
+ def __init__(self, pyxbuild_dir=None, inplace=False, language_level=None):
+ if language_level is None:
+ language_level = sys.version_info[0]
+ self.super = super(PyImporter, self)
+ self.super.__init__(extension='.py', pyxbuild_dir=pyxbuild_dir, inplace=inplace,
+ language_level=language_level)
+ self.uncompilable_modules = {}
+ self.blocked_modules = ['Cython', 'pyxbuild', 'pyximport.pyxbuild',
+ 'distutils']
+ self.blocked_packages = ['Cython.', 'distutils.']
+
+ def find_module(self, fullname, package_path=None):
+ if fullname in sys.modules:
+ return None
+ if any([fullname.startswith(pkg) for pkg in self.blocked_packages]):
+ return None
+ if fullname in self.blocked_modules:
+ # prevent infinite recursion
+ return None
+ if _lib_loader.knows(fullname):
+ return _lib_loader
+ _debug("trying import of module '%s'", fullname)
+ if fullname in self.uncompilable_modules:
+ path, last_modified = self.uncompilable_modules[fullname]
+ try:
+ new_last_modified = os.stat(path).st_mtime
+ if new_last_modified > last_modified:
+ # import would fail again
+ return None
+ except OSError:
+ # module is no longer where we found it, retry the import
+ pass
+
+ self.blocked_modules.append(fullname)
+ try:
+ importer = self.super.find_module(fullname, package_path)
+ if importer is not None:
+ if importer.init_path:
+ path = importer.init_path
+ real_name = fullname + '.__init__'
+ else:
+ path = importer.path
+ real_name = fullname
+ _debug("importer found path %s for module %s", path, real_name)
+ try:
+ so_path = build_module(
+ real_name, path,
+ pyxbuild_dir=self.pyxbuild_dir,
+ language_level=self.language_level,
+ inplace=self.inplace)
+ _lib_loader.add_lib(fullname, path, so_path,
+ is_package=bool(importer.init_path))
+ return _lib_loader
+ except Exception:
+ if DEBUG_IMPORT:
+ import traceback
+ traceback.print_exc()
+ # build failed, not a compilable Python module
+ try:
+ last_modified = os.stat(path).st_mtime
+ except OSError:
+ last_modified = 0
+ self.uncompilable_modules[fullname] = (path, last_modified)
+ importer = None
+ finally:
+ self.blocked_modules.pop()
+ return importer
+
+
+class LibLoader(object):
+ def __init__(self):
+ self._libs = {}
+
+ def load_module(self, fullname):
+ try:
+ source_path, so_path, is_package = self._libs[fullname]
+ except KeyError:
+ raise ValueError("invalid module %s" % fullname)
+ _debug("Loading shared library module '%s' from %s", fullname, so_path)
+ return load_module(fullname, source_path, so_path=so_path, is_package=is_package)
+
+ def add_lib(self, fullname, path, so_path, is_package):
+ self._libs[fullname] = (path, so_path, is_package)
+
+ def knows(self, fullname):
+ return fullname in self._libs
+
+_lib_loader = LibLoader()
+
+
+class PyxLoader(object):
+ def __init__(self, fullname, path, init_path=None, pyxbuild_dir=None,
+ inplace=False, language_level=None):
+ _debug("PyxLoader created for loading %s from %s (init path: %s)",
+ fullname, path, init_path)
+ self.fullname = fullname
+ self.path, self.init_path = path, init_path
+ self.pyxbuild_dir = pyxbuild_dir
+ self.inplace = inplace
+ self.language_level = language_level
+
+ def load_module(self, fullname):
+ assert self.fullname == fullname, (
+ "invalid module, expected %s, got %s" % (
+ self.fullname, fullname))
+ if self.init_path:
+ # package
+ #print "PACKAGE", fullname
+ module = load_module(fullname, self.init_path,
+ self.pyxbuild_dir, is_package=True,
+ build_inplace=self.inplace,
+ language_level=self.language_level)
+ module.__path__ = [self.path]
+ else:
+ #print "MODULE", fullname
+ module = load_module(fullname, self.path,
+ self.pyxbuild_dir,
+ build_inplace=self.inplace,
+ language_level=self.language_level)
+ return module
+
+
+#install args
+class PyxArgs(object):
+ build_dir=True
+ build_in_temp=True
+ setup_args={} #None
+
+##pyxargs=None
+
+
+def _have_importers():
+ has_py_importer = False
+ has_pyx_importer = False
+ for importer in sys.meta_path:
+ if isinstance(importer, PyxImporter):
+ if isinstance(importer, PyImporter):
+ has_py_importer = True
+ else:
+ has_pyx_importer = True
+
+ return has_py_importer, has_pyx_importer
+
+
+def install(pyximport=True, pyimport=False, build_dir=None, build_in_temp=True,
+ setup_args=None, reload_support=False,
+ load_py_module_on_import_failure=False, inplace=False,
+ language_level=None):
+ """ Main entry point for pyxinstall.
+
+ Call this to install the ``.pyx`` import hook in
+ your meta-path for a single Python process. If you want it to be
+ installed whenever you use Python, add it to your ``sitecustomize``
+ (as described above).
+
+ :param pyximport: If set to False, does not try to import ``.pyx`` files.
+
+ :param pyimport: You can pass ``pyimport=True`` to also
+ install the ``.py`` import hook
+ in your meta-path. Note, however, that it is rather experimental,
+ will not work at all for some ``.py`` files and packages, and will
+ heavily slow down your imports due to search and compilation.
+ Use at your own risk.
+
+ :param build_dir: By default, compiled modules will end up in a ``.pyxbld``
+ directory in the user's home directory. Passing a different path
+ as ``build_dir`` will override this.
+
+ :param build_in_temp: If ``False``, will produce the C files locally. Working
+ with complex dependencies and debugging becomes more easy. This
+ can principally interfere with existing files of the same name.
+
+ :param setup_args: Dict of arguments for Distribution.
+ See ``distutils.core.setup()``.
+
+ :param reload_support: Enables support for dynamic
+ ``reload(my_module)``, e.g. after a change in the Cython code.
+ Additional files ``<so_path>.reloadNN`` may arise on that account, when
+ the previously loaded module file cannot be overwritten.
+
+ :param load_py_module_on_import_failure: If the compilation of a ``.py``
+ file succeeds, but the subsequent import fails for some reason,
+ retry the import with the normal ``.py`` module instead of the
+ compiled module. Note that this may lead to unpredictable results
+ for modules that change the system state during their import, as
+ the second import will rerun these modifications in whatever state
+ the system was left after the import of the compiled module
+ failed.
+
+ :param inplace: Install the compiled module
+ (``.so`` for Linux and Mac / ``.pyd`` for Windows)
+ next to the source file.
+
+ :param language_level: The source language level to use: 2 or 3.
+ The default is to use the language level of the current Python
+ runtime for .py files and Py2 for ``.pyx`` files.
+ """
+ if setup_args is None:
+ setup_args = {}
+ if not build_dir:
+ build_dir = os.path.join(os.path.expanduser('~'), '.pyxbld')
+
+ global pyxargs
+ pyxargs = PyxArgs() #$pycheck_no
+ pyxargs.build_dir = build_dir
+ pyxargs.build_in_temp = build_in_temp
+ pyxargs.setup_args = (setup_args or {}).copy()
+ pyxargs.reload_support = reload_support
+ pyxargs.load_py_module_on_import_failure = load_py_module_on_import_failure
+
+ has_py_importer, has_pyx_importer = _have_importers()
+ py_importer, pyx_importer = None, None
+
+ if pyimport and not has_py_importer:
+ py_importer = PyImporter(pyxbuild_dir=build_dir, inplace=inplace,
+ language_level=language_level)
+ # make sure we import Cython before we install the import hook
+ import Cython.Compiler.Main, Cython.Compiler.Pipeline, Cython.Compiler.Optimize
+ sys.meta_path.insert(0, py_importer)
+
+ if pyximport and not has_pyx_importer:
+ pyx_importer = PyxImporter(pyxbuild_dir=build_dir, inplace=inplace,
+ language_level=language_level)
+ sys.meta_path.append(pyx_importer)
+
+ return py_importer, pyx_importer
+
+
+def uninstall(py_importer, pyx_importer):
+ """
+ Uninstall an import hook.
+ """
+ try:
+ sys.meta_path.remove(py_importer)
+ except ValueError:
+ pass
+
+ try:
+ sys.meta_path.remove(pyx_importer)
+ except ValueError:
+ pass
+
+
+# MAIN
+
+def show_docs():
+ import __main__
+ __main__.__name__ = mod_name
+ for name in dir(__main__):
+ item = getattr(__main__, name)
+ try:
+ setattr(item, "__module__", mod_name)
+ except (AttributeError, TypeError):
+ pass
+ help(__main__)
+
+
+if __name__ == '__main__':
+ show_docs()
diff --git a/pyximport/_pyximport3.py b/pyximport/_pyximport3.py
new file mode 100644
index 000000000..dccd1d09e
--- /dev/null
+++ b/pyximport/_pyximport3.py
@@ -0,0 +1,464 @@
+"""
+Import hooks; when installed with the install() function, these hooks
+allow importing .pyx files as if they were Python modules.
+
+If you want the hook installed every time you run Python
+you can add it to your Python version by adding these lines to
+sitecustomize.py (which you can create from scratch in site-packages
+if it doesn't exist there or somewhere else on your python path)::
+
+ import pyximport
+ pyximport.install()
+
+For instance on the Mac with a non-system Python 2.3, you could create
+sitecustomize.py with only those two lines at
+/usr/local/lib/python2.3/site-packages/sitecustomize.py .
+
+A custom distutils.core.Extension instance and setup() args
+(Distribution) for for the build can be defined by a <modulename>.pyxbld
+file like:
+
+# examplemod.pyxbld
+def make_ext(modname, pyxfilename):
+ from distutils.extension import Extension
+ return Extension(name = modname,
+ sources=[pyxfilename, 'hello.c'],
+ include_dirs=['/myinclude'] )
+def make_setup_args():
+ return dict(script_args=["--compiler=mingw32"])
+
+Extra dependencies can be defined by a <modulename>.pyxdep .
+See README.
+
+Since Cython 0.11, the :mod:`pyximport` module also has experimental
+compilation support for normal Python modules. This allows you to
+automatically run Cython on every .pyx and .py module that Python
+imports, including parts of the standard library and installed
+packages. Cython will still fail to compile a lot of Python modules,
+in which case the import mechanism will fall back to loading the
+Python source modules instead. The .py import mechanism is installed
+like this::
+
+ pyximport.install(pyimport = True)
+
+Running this module as a top-level script will run a test and then print
+the documentation.
+"""
+
+import glob
+import importlib
+import os
+import sys
+from importlib.abc import MetaPathFinder
+from importlib.machinery import ExtensionFileLoader, SourceFileLoader
+from importlib.util import spec_from_file_location
+
+mod_name = "pyximport"
+
+PY_EXT = ".py"
+PYX_EXT = ".pyx"
+PYXDEP_EXT = ".pyxdep"
+PYXBLD_EXT = ".pyxbld"
+
+DEBUG_IMPORT = False
+
+
+def _print(message, args):
+ if args:
+ message = message % args
+ print(message)
+
+
+def _debug(message, *args):
+ if DEBUG_IMPORT:
+ _print(message, args)
+
+
+def _info(message, *args):
+ _print(message, args)
+
+
+def load_source(file_path):
+ import importlib.util
+ from importlib.machinery import SourceFileLoader
+ spec = importlib.util.spec_from_file_location("XXXX", file_path, loader=SourceFileLoader("XXXX", file_path))
+ module = importlib.util.module_from_spec(spec)
+ spec.loader.exec_module(module)
+ return module
+
+
+def get_distutils_extension(modname, pyxfilename, language_level=None):
+# try:
+# import hashlib
+# except ImportError:
+# import md5 as hashlib
+# extra = "_" + hashlib.md5(open(pyxfilename).read()).hexdigest()
+# modname = modname + extra
+ extension_mod,setup_args = handle_special_build(modname, pyxfilename)
+ if not extension_mod:
+ if not isinstance(pyxfilename, str):
+ # distutils is stupid in Py2 and requires exactly 'str'
+ # => encode accidentally coerced unicode strings back to str
+ pyxfilename = pyxfilename.encode(sys.getfilesystemencoding())
+ from distutils.extension import Extension
+ extension_mod = Extension(name = modname, sources=[pyxfilename])
+ if language_level is not None:
+ extension_mod.cython_directives = {'language_level': language_level}
+ return extension_mod,setup_args
+
+
+def handle_special_build(modname, pyxfilename):
+ special_build = os.path.splitext(pyxfilename)[0] + PYXBLD_EXT
+ ext = None
+ setup_args={}
+ if os.path.exists(special_build):
+ # globls = {}
+ # locs = {}
+ # execfile(special_build, globls, locs)
+ # ext = locs["make_ext"](modname, pyxfilename)
+ mod = load_source(special_build)
+ make_ext = getattr(mod,'make_ext',None)
+ if make_ext:
+ ext = make_ext(modname, pyxfilename)
+ assert ext and ext.sources, "make_ext in %s did not return Extension" % special_build
+ make_setup_args = getattr(mod, 'make_setup_args',None)
+ if make_setup_args:
+ setup_args = make_setup_args()
+ assert isinstance(setup_args,dict), ("make_setup_args in %s did not return a dict"
+ % special_build)
+ assert set or setup_args, ("neither make_ext nor make_setup_args %s"
+ % special_build)
+ ext.sources = [os.path.join(os.path.dirname(special_build), source)
+ for source in ext.sources]
+ return ext, setup_args
+
+
+def handle_dependencies(pyxfilename):
+ testing = '_test_files' in globals()
+ dependfile = os.path.splitext(pyxfilename)[0] + PYXDEP_EXT
+
+ # by default let distutils decide whether to rebuild on its own
+ # (it has a better idea of what the output file will be)
+
+ # but we know more about dependencies so force a rebuild if
+ # some of the dependencies are newer than the pyxfile.
+ if os.path.exists(dependfile):
+ with open(dependfile) as fid:
+ depends = fid.readlines()
+ depends = [depend.strip() for depend in depends]
+
+ # gather dependencies in the "files" variable
+ # the dependency file is itself a dependency
+ files = [dependfile]
+ for depend in depends:
+ fullpath = os.path.join(os.path.dirname(dependfile),
+ depend)
+ files.extend(glob.glob(fullpath))
+
+ # only for unit testing to see we did the right thing
+ if testing:
+ _test_files[:] = [] #$pycheck_no
+
+ # if any file that the pyxfile depends upon is newer than
+ # the pyx file, 'touch' the pyx file so that distutils will
+ # be tricked into rebuilding it.
+ for file in files:
+ from distutils.dep_util import newer
+ if newer(file, pyxfilename):
+ _debug("Rebuilding %s because of %s", pyxfilename, file)
+ filetime = os.path.getmtime(file)
+ os.utime(pyxfilename, (filetime, filetime))
+ if testing:
+ _test_files.append(file)
+
+
+def build_module(name, pyxfilename, pyxbuild_dir=None, inplace=False, language_level=None):
+ assert os.path.exists(pyxfilename), "Path does not exist: %s" % pyxfilename
+ handle_dependencies(pyxfilename)
+
+ extension_mod, setup_args = get_distutils_extension(name, pyxfilename, language_level)
+ build_in_temp = pyxargs.build_in_temp
+ sargs = pyxargs.setup_args.copy()
+ sargs.update(setup_args)
+ build_in_temp = sargs.pop('build_in_temp',build_in_temp)
+
+ from . import pyxbuild
+ so_path = pyxbuild.pyx_to_dll(pyxfilename, extension_mod,
+ build_in_temp=build_in_temp,
+ pyxbuild_dir=pyxbuild_dir,
+ setup_args=sargs,
+ inplace=inplace,
+ reload_support=pyxargs.reload_support)
+ assert os.path.exists(so_path), "Cannot find: %s" % so_path
+
+ junkpath = os.path.join(os.path.dirname(so_path), name+"_*") #very dangerous with --inplace ? yes, indeed, trying to eat my files ;)
+ junkstuff = glob.glob(junkpath)
+ for path in junkstuff:
+ if path != so_path:
+ try:
+ os.remove(path)
+ except IOError:
+ _info("Couldn't remove %s", path)
+
+ return so_path
+
+
+# import hooks
+
+class PyxImportMetaFinder(MetaPathFinder):
+
+ def __init__(self, extension=PYX_EXT, pyxbuild_dir=None, inplace=False, language_level=None):
+ self.pyxbuild_dir = pyxbuild_dir
+ self.inplace = inplace
+ self.language_level = language_level
+ self.extension = extension
+
+ def find_spec(self, fullname, path, target=None):
+ if not path:
+ path = [os.getcwd()] # top level import --
+ if "." in fullname:
+ *parents, name = fullname.split(".")
+ else:
+ name = fullname
+ for entry in path:
+ if os.path.isdir(os.path.join(entry, name)):
+ # this module has child modules
+ filename = os.path.join(entry, name, "__init__" + self.extension)
+ submodule_locations = [os.path.join(entry, name)]
+ else:
+ filename = os.path.join(entry, name + self.extension)
+ submodule_locations = None
+ if not os.path.exists(filename):
+ continue
+
+ return spec_from_file_location(
+ fullname, filename,
+ loader=PyxImportLoader(filename, self.pyxbuild_dir, self.inplace, self.language_level),
+ submodule_search_locations=submodule_locations)
+
+ return None # we don't know how to import this
+
+
+class PyImportMetaFinder(MetaPathFinder):
+
+ def __init__(self, extension=PY_EXT, pyxbuild_dir=None, inplace=False, language_level=None):
+ self.pyxbuild_dir = pyxbuild_dir
+ self.inplace = inplace
+ self.language_level = language_level
+ self.extension = extension
+ self.uncompilable_modules = {}
+ self.blocked_modules = ['Cython', 'pyxbuild', 'pyximport.pyxbuild',
+ 'distutils', 'cython']
+ self.blocked_packages = ['Cython.', 'distutils.']
+
+ def find_spec(self, fullname, path, target=None):
+ if fullname in sys.modules:
+ return None
+ if any([fullname.startswith(pkg) for pkg in self.blocked_packages]):
+ return None
+ if fullname in self.blocked_modules:
+ # prevent infinite recursion
+ return None
+
+ self.blocked_modules.append(fullname)
+ name = fullname
+ if not path:
+ path = [os.getcwd()] # top level import --
+ try:
+ for entry in path:
+ if os.path.isdir(os.path.join(entry, name)):
+ # this module has child modules
+ filename = os.path.join(entry, name, "__init__" + self.extension)
+ submodule_locations = [os.path.join(entry, name)]
+ else:
+ filename = os.path.join(entry, name + self.extension)
+ submodule_locations = None
+ if not os.path.exists(filename):
+ continue
+
+ return spec_from_file_location(
+ fullname, filename,
+ loader=PyxImportLoader(filename, self.pyxbuild_dir, self.inplace, self.language_level),
+ submodule_search_locations=submodule_locations)
+ finally:
+ self.blocked_modules.pop()
+
+ return None # we don't know how to import this
+
+
+class PyxImportLoader(ExtensionFileLoader):
+
+ def __init__(self, filename, pyxbuild_dir, inplace, language_level):
+ module_name = os.path.splitext(os.path.basename(filename))[0]
+ super().__init__(module_name, filename)
+ self._pyxbuild_dir = pyxbuild_dir
+ self._inplace = inplace
+ self._language_level = language_level
+
+ def create_module(self, spec):
+ try:
+ so_path = build_module(spec.name, pyxfilename=spec.origin, pyxbuild_dir=self._pyxbuild_dir,
+ inplace=self._inplace, language_level=self._language_level)
+ self.path = so_path
+ spec.origin = so_path
+ return super().create_module(spec)
+ except Exception as failure_exc:
+ _debug("Failed to load extension module: %r" % failure_exc)
+ if pyxargs.load_py_module_on_import_failure and spec.origin.endswith(PY_EXT):
+ spec = importlib.util.spec_from_file_location(spec.name, spec.origin,
+ loader=SourceFileLoader(spec.name, spec.origin))
+ mod = importlib.util.module_from_spec(spec)
+ assert mod.__file__ in (spec.origin, spec.origin + 'c', spec.origin + 'o'), (mod.__file__, spec.origin)
+ return mod
+ else:
+ tb = sys.exc_info()[2]
+ import traceback
+ exc = ImportError("Building module %s failed: %s" % (
+ spec.name, traceback.format_exception_only(*sys.exc_info()[:2])))
+ raise exc.with_traceback(tb)
+
+ def exec_module(self, module):
+ try:
+ return super().exec_module(module)
+ except Exception as failure_exc:
+ import traceback
+ _debug("Failed to load extension module: %r" % failure_exc)
+ raise ImportError("Executing module %s failed %s" % (
+ module.__file__, traceback.format_exception_only(*sys.exc_info()[:2])))
+
+
+#install args
+class PyxArgs(object):
+ build_dir=True
+ build_in_temp=True
+ setup_args={} #None
+
+
+def _have_importers():
+ has_py_importer = False
+ has_pyx_importer = False
+ for importer in sys.meta_path:
+ if isinstance(importer, PyxImportMetaFinder):
+ if isinstance(importer, PyImportMetaFinder):
+ has_py_importer = True
+ else:
+ has_pyx_importer = True
+
+ return has_py_importer, has_pyx_importer
+
+
+def install(pyximport=True, pyimport=False, build_dir=None, build_in_temp=True,
+ setup_args=None, reload_support=False,
+ load_py_module_on_import_failure=False, inplace=False,
+ language_level=None):
+ """ Main entry point for pyxinstall.
+
+ Call this to install the ``.pyx`` import hook in
+ your meta-path for a single Python process. If you want it to be
+ installed whenever you use Python, add it to your ``sitecustomize``
+ (as described above).
+
+ :param pyximport: If set to False, does not try to import ``.pyx`` files.
+
+ :param pyimport: You can pass ``pyimport=True`` to also
+ install the ``.py`` import hook
+ in your meta-path. Note, however, that it is rather experimental,
+ will not work at all for some ``.py`` files and packages, and will
+ heavily slow down your imports due to search and compilation.
+ Use at your own risk.
+
+ :param build_dir: By default, compiled modules will end up in a ``.pyxbld``
+ directory in the user's home directory. Passing a different path
+ as ``build_dir`` will override this.
+
+ :param build_in_temp: If ``False``, will produce the C files locally. Working
+ with complex dependencies and debugging becomes more easy. This
+ can principally interfere with existing files of the same name.
+
+ :param setup_args: Dict of arguments for Distribution.
+ See ``distutils.core.setup()``.
+
+ :param reload_support: Enables support for dynamic
+ ``reload(my_module)``, e.g. after a change in the Cython code.
+ Additional files ``<so_path>.reloadNN`` may arise on that account, when
+ the previously loaded module file cannot be overwritten.
+
+ :param load_py_module_on_import_failure: If the compilation of a ``.py``
+ file succeeds, but the subsequent import fails for some reason,
+ retry the import with the normal ``.py`` module instead of the
+ compiled module. Note that this may lead to unpredictable results
+ for modules that change the system state during their import, as
+ the second import will rerun these modifications in whatever state
+ the system was left after the import of the compiled module
+ failed.
+
+ :param inplace: Install the compiled module
+ (``.so`` for Linux and Mac / ``.pyd`` for Windows)
+ next to the source file.
+
+ :param language_level: The source language level to use: 2 or 3.
+ The default is to use the language level of the current Python
+ runtime for .py files and Py2 for ``.pyx`` files.
+ """
+ if setup_args is None:
+ setup_args = {}
+ if not build_dir:
+ build_dir = os.path.join(os.path.expanduser('~'), '.pyxbld')
+
+ global pyxargs
+ pyxargs = PyxArgs() #$pycheck_no
+ pyxargs.build_dir = build_dir
+ pyxargs.build_in_temp = build_in_temp
+ pyxargs.setup_args = (setup_args or {}).copy()
+ pyxargs.reload_support = reload_support
+ pyxargs.load_py_module_on_import_failure = load_py_module_on_import_failure
+
+ has_py_importer, has_pyx_importer = _have_importers()
+ py_importer, pyx_importer = None, None
+
+ if pyimport and not has_py_importer:
+ py_importer = PyImportMetaFinder(pyxbuild_dir=build_dir, inplace=inplace,
+ language_level=language_level)
+ # make sure we import Cython before we install the import hook
+ import Cython.Compiler.Main, Cython.Compiler.Pipeline, Cython.Compiler.Optimize
+ sys.meta_path.insert(0, py_importer)
+
+ if pyximport and not has_pyx_importer:
+ pyx_importer = PyxImportMetaFinder(pyxbuild_dir=build_dir, inplace=inplace,
+ language_level=language_level)
+ sys.meta_path.append(pyx_importer)
+
+ return py_importer, pyx_importer
+
+
+def uninstall(py_importer, pyx_importer):
+ """
+ Uninstall an import hook.
+ """
+ try:
+ sys.meta_path.remove(py_importer)
+ except ValueError:
+ pass
+
+ try:
+ sys.meta_path.remove(pyx_importer)
+ except ValueError:
+ pass
+
+
+# MAIN
+
+def show_docs():
+ import __main__
+ __main__.__name__ = mod_name
+ for name in dir(__main__):
+ item = getattr(__main__, name)
+ try:
+ setattr(item, "__module__", mod_name)
+ except (AttributeError, TypeError):
+ pass
+ help(__main__)
+
+
+if __name__ == '__main__':
+ show_docs()
diff --git a/pyximport/pyximport.py b/pyximport/pyximport.py
index b2077826a..9d575815a 100644
--- a/pyximport/pyximport.py
+++ b/pyximport/pyximport.py
@@ -1,606 +1,11 @@
-"""
-Import hooks; when installed with the install() function, these hooks
-allow importing .pyx files as if they were Python modules.
-
-If you want the hook installed every time you run Python
-you can add it to your Python version by adding these lines to
-sitecustomize.py (which you can create from scratch in site-packages
-if it doesn't exist there or somewhere else on your python path)::
-
- import pyximport
- pyximport.install()
-
-For instance on the Mac with a non-system Python 2.3, you could create
-sitecustomize.py with only those two lines at
-/usr/local/lib/python2.3/site-packages/sitecustomize.py .
-
-A custom distutils.core.Extension instance and setup() args
-(Distribution) for for the build can be defined by a <modulename>.pyxbld
-file like:
-
-# examplemod.pyxbld
-def make_ext(modname, pyxfilename):
- from distutils.extension import Extension
- return Extension(name = modname,
- sources=[pyxfilename, 'hello.c'],
- include_dirs=['/myinclude'] )
-def make_setup_args():
- return dict(script_args=["--compiler=mingw32"])
-
-Extra dependencies can be defined by a <modulename>.pyxdep .
-See README.
-
-Since Cython 0.11, the :mod:`pyximport` module also has experimental
-compilation support for normal Python modules. This allows you to
-automatically run Cython on every .pyx and .py module that Python
-imports, including parts of the standard library and installed
-packages. Cython will still fail to compile a lot of Python modules,
-in which case the import mechanism will fall back to loading the
-Python source modules instead. The .py import mechanism is installed
-like this::
-
- pyximport.install(pyimport = True)
-
-Running this module as a top-level script will run a test and then print
-the documentation.
-
-This code is based on the Py2.3+ import protocol as described in PEP 302.
-"""
-
-import glob
-import imp
-import os
+from __future__ import absolute_import
import sys
-from zipimport import zipimporter, ZipImportError
-
-mod_name = "pyximport"
-
-PYX_EXT = ".pyx"
-PYXDEP_EXT = ".pyxdep"
-PYXBLD_EXT = ".pyxbld"
-
-DEBUG_IMPORT = False
-
-
-def _print(message, args):
- if args:
- message = message % args
- print(message)
-
-
-def _debug(message, *args):
- if DEBUG_IMPORT:
- _print(message, args)
-
-
-def _info(message, *args):
- _print(message, args)
-
-
-# Performance problem: for every PYX file that is imported, we will
-# invoke the whole distutils infrastructure even if the module is
-# already built. It might be more efficient to only do it when the
-# mod time of the .pyx is newer than the mod time of the .so but
-# the question is how to get distutils to tell me the name of the .so
-# before it builds it. Maybe it is easy...but maybe the performance
-# issue isn't real.
-def _load_pyrex(name, filename):
- "Load a pyrex file given a name and filename."
-
-
-def get_distutils_extension(modname, pyxfilename, language_level=None):
-# try:
-# import hashlib
-# except ImportError:
-# import md5 as hashlib
-# extra = "_" + hashlib.md5(open(pyxfilename).read()).hexdigest()
-# modname = modname + extra
- extension_mod,setup_args = handle_special_build(modname, pyxfilename)
- if not extension_mod:
- if not isinstance(pyxfilename, str):
- # distutils is stupid in Py2 and requires exactly 'str'
- # => encode accidentally coerced unicode strings back to str
- pyxfilename = pyxfilename.encode(sys.getfilesystemencoding())
- from distutils.extension import Extension
- extension_mod = Extension(name = modname, sources=[pyxfilename])
- if language_level is not None:
- extension_mod.cython_directives = {'language_level': language_level}
- return extension_mod,setup_args
-
-
-def handle_special_build(modname, pyxfilename):
- special_build = os.path.splitext(pyxfilename)[0] + PYXBLD_EXT
- ext = None
- setup_args={}
- if os.path.exists(special_build):
- # globls = {}
- # locs = {}
- # execfile(special_build, globls, locs)
- # ext = locs["make_ext"](modname, pyxfilename)
- with open(special_build) as fid:
- mod = imp.load_source("XXXX", special_build, fid)
- make_ext = getattr(mod,'make_ext',None)
- if make_ext:
- ext = make_ext(modname, pyxfilename)
- assert ext and ext.sources, "make_ext in %s did not return Extension" % special_build
- make_setup_args = getattr(mod, 'make_setup_args',None)
- if make_setup_args:
- setup_args = make_setup_args()
- assert isinstance(setup_args,dict), ("make_setup_args in %s did not return a dict"
- % special_build)
- assert set or setup_args, ("neither make_ext nor make_setup_args %s"
- % special_build)
- ext.sources = [os.path.join(os.path.dirname(special_build), source)
- for source in ext.sources]
- return ext, setup_args
-
-
-def handle_dependencies(pyxfilename):
- testing = '_test_files' in globals()
- dependfile = os.path.splitext(pyxfilename)[0] + PYXDEP_EXT
-
- # by default let distutils decide whether to rebuild on its own
- # (it has a better idea of what the output file will be)
-
- # but we know more about dependencies so force a rebuild if
- # some of the dependencies are newer than the pyxfile.
- if os.path.exists(dependfile):
- with open(dependfile) as fid:
- depends = fid.readlines()
- depends = [depend.strip() for depend in depends]
-
- # gather dependencies in the "files" variable
- # the dependency file is itself a dependency
- files = [dependfile]
- for depend in depends:
- fullpath = os.path.join(os.path.dirname(dependfile),
- depend)
- files.extend(glob.glob(fullpath))
-
- # only for unit testing to see we did the right thing
- if testing:
- _test_files[:] = [] #$pycheck_no
-
- # if any file that the pyxfile depends upon is newer than
- # the pyx file, 'touch' the pyx file so that distutils will
- # be tricked into rebuilding it.
- for file in files:
- from distutils.dep_util import newer
- if newer(file, pyxfilename):
- _debug("Rebuilding %s because of %s", pyxfilename, file)
- filetime = os.path.getmtime(file)
- os.utime(pyxfilename, (filetime, filetime))
- if testing:
- _test_files.append(file)
-
-
-def build_module(name, pyxfilename, pyxbuild_dir=None, inplace=False, language_level=None):
- assert os.path.exists(pyxfilename), "Path does not exist: %s" % pyxfilename
- handle_dependencies(pyxfilename)
-
- extension_mod, setup_args = get_distutils_extension(name, pyxfilename, language_level)
- build_in_temp = pyxargs.build_in_temp
- sargs = pyxargs.setup_args.copy()
- sargs.update(setup_args)
- build_in_temp = sargs.pop('build_in_temp',build_in_temp)
-
- from . import pyxbuild
- so_path = pyxbuild.pyx_to_dll(pyxfilename, extension_mod,
- build_in_temp=build_in_temp,
- pyxbuild_dir=pyxbuild_dir,
- setup_args=sargs,
- inplace=inplace,
- reload_support=pyxargs.reload_support)
- assert os.path.exists(so_path), "Cannot find: %s" % so_path
-
- junkpath = os.path.join(os.path.dirname(so_path), name+"_*") #very dangerous with --inplace ? yes, indeed, trying to eat my files ;)
- junkstuff = glob.glob(junkpath)
- for path in junkstuff:
- if path != so_path:
- try:
- os.remove(path)
- except IOError:
- _info("Couldn't remove %s", path)
-
- return so_path
-
-
-def load_module(name, pyxfilename, pyxbuild_dir=None, is_package=False,
- build_inplace=False, language_level=None, so_path=None):
- try:
- if so_path is None:
- if is_package:
- module_name = name + '.__init__'
- else:
- module_name = name
- so_path = build_module(module_name, pyxfilename, pyxbuild_dir,
- inplace=build_inplace, language_level=language_level)
- mod = imp.load_dynamic(name, so_path)
- if is_package and not hasattr(mod, '__path__'):
- mod.__path__ = [os.path.dirname(so_path)]
- assert mod.__file__ == so_path, (mod.__file__, so_path)
- except Exception as failure_exc:
- _debug("Failed to load extension module: %r" % failure_exc)
- if pyxargs.load_py_module_on_import_failure and pyxfilename.endswith('.py'):
- # try to fall back to normal import
- mod = imp.load_source(name, pyxfilename)
- assert mod.__file__ in (pyxfilename, pyxfilename+'c', pyxfilename+'o'), (mod.__file__, pyxfilename)
- else:
- tb = sys.exc_info()[2]
- import traceback
- exc = ImportError("Building module %s failed: %s" % (
- name, traceback.format_exception_only(*sys.exc_info()[:2])))
- if sys.version_info[0] >= 3:
- raise exc.with_traceback(tb)
- else:
- exec("raise exc, None, tb", {'exc': exc, 'tb': tb})
- return mod
-
-
-# import hooks
-
-class PyxImporter(object):
- """A meta-path importer for .pyx files.
- """
- def __init__(self, extension=PYX_EXT, pyxbuild_dir=None, inplace=False,
- language_level=None):
- self.extension = extension
- self.pyxbuild_dir = pyxbuild_dir
- self.inplace = inplace
- self.language_level = language_level
-
- def find_module(self, fullname, package_path=None):
- if fullname in sys.modules and not pyxargs.reload_support:
- return None # only here when reload()
-
- # package_path might be a _NamespacePath. Convert that into a list...
- if package_path is not None and not isinstance(package_path, list):
- package_path = list(package_path)
- try:
- fp, pathname, (ext,mode,ty) = imp.find_module(fullname,package_path)
- if fp: fp.close() # Python should offer a Default-Loader to avoid this double find/open!
- if pathname and ty == imp.PKG_DIRECTORY:
- pkg_file = os.path.join(pathname, '__init__'+self.extension)
- if os.path.isfile(pkg_file):
- return PyxLoader(fullname, pathname,
- init_path=pkg_file,
- pyxbuild_dir=self.pyxbuild_dir,
- inplace=self.inplace,
- language_level=self.language_level)
- if pathname and pathname.endswith(self.extension):
- return PyxLoader(fullname, pathname,
- pyxbuild_dir=self.pyxbuild_dir,
- inplace=self.inplace,
- language_level=self.language_level)
- if ty != imp.C_EXTENSION: # only when an extension, check if we have a .pyx next!
- return None
-
- # find .pyx fast, when .so/.pyd exist --inplace
- pyxpath = os.path.splitext(pathname)[0]+self.extension
- if os.path.isfile(pyxpath):
- return PyxLoader(fullname, pyxpath,
- pyxbuild_dir=self.pyxbuild_dir,
- inplace=self.inplace,
- language_level=self.language_level)
-
- # .so/.pyd's on PATH should not be remote from .pyx's
- # think no need to implement PyxArgs.importer_search_remote here?
-
- except ImportError:
- pass
-
- # searching sys.path ...
-
- #if DEBUG_IMPORT: print "SEARCHING", fullname, package_path
-
- mod_parts = fullname.split('.')
- module_name = mod_parts[-1]
- pyx_module_name = module_name + self.extension
-
- # this may work, but it returns the file content, not its path
- #import pkgutil
- #pyx_source = pkgutil.get_data(package, pyx_module_name)
-
- paths = package_path or sys.path
- for path in paths:
- pyx_data = None
- if not path:
- path = os.getcwd()
- elif os.path.isfile(path):
- try:
- zi = zipimporter(path)
- pyx_data = zi.get_data(pyx_module_name)
- except (ZipImportError, IOError, OSError):
- continue # Module not found.
- # unzip the imported file into the build dir
- # FIXME: can interfere with later imports if build dir is in sys.path and comes before zip file
- path = self.pyxbuild_dir
- elif not os.path.isabs(path):
- path = os.path.abspath(path)
-
- pyx_module_path = os.path.join(path, pyx_module_name)
- if pyx_data is not None:
- if not os.path.exists(path):
- try:
- os.makedirs(path)
- except OSError:
- # concurrency issue?
- if not os.path.exists(path):
- raise
- with open(pyx_module_path, "wb") as f:
- f.write(pyx_data)
- elif not os.path.isfile(pyx_module_path):
- continue # Module not found.
-
- return PyxLoader(fullname, pyx_module_path,
- pyxbuild_dir=self.pyxbuild_dir,
- inplace=self.inplace,
- language_level=self.language_level)
-
- # not found, normal package, not a .pyx file, none of our business
- _debug("%s not found" % fullname)
- return None
-
-
-class PyImporter(PyxImporter):
- """A meta-path importer for normal .py files.
- """
- def __init__(self, pyxbuild_dir=None, inplace=False, language_level=None):
- if language_level is None:
- language_level = sys.version_info[0]
- self.super = super(PyImporter, self)
- self.super.__init__(extension='.py', pyxbuild_dir=pyxbuild_dir, inplace=inplace,
- language_level=language_level)
- self.uncompilable_modules = {}
- self.blocked_modules = ['Cython', 'pyxbuild', 'pyximport.pyxbuild',
- 'distutils']
- self.blocked_packages = ['Cython.', 'distutils.']
-
- def find_module(self, fullname, package_path=None):
- if fullname in sys.modules:
- return None
- if any([fullname.startswith(pkg) for pkg in self.blocked_packages]):
- return None
- if fullname in self.blocked_modules:
- # prevent infinite recursion
- return None
- if _lib_loader.knows(fullname):
- return _lib_loader
- _debug("trying import of module '%s'", fullname)
- if fullname in self.uncompilable_modules:
- path, last_modified = self.uncompilable_modules[fullname]
- try:
- new_last_modified = os.stat(path).st_mtime
- if new_last_modified > last_modified:
- # import would fail again
- return None
- except OSError:
- # module is no longer where we found it, retry the import
- pass
-
- self.blocked_modules.append(fullname)
- try:
- importer = self.super.find_module(fullname, package_path)
- if importer is not None:
- if importer.init_path:
- path = importer.init_path
- real_name = fullname + '.__init__'
- else:
- path = importer.path
- real_name = fullname
- _debug("importer found path %s for module %s", path, real_name)
- try:
- so_path = build_module(
- real_name, path,
- pyxbuild_dir=self.pyxbuild_dir,
- language_level=self.language_level,
- inplace=self.inplace)
- _lib_loader.add_lib(fullname, path, so_path,
- is_package=bool(importer.init_path))
- return _lib_loader
- except Exception:
- if DEBUG_IMPORT:
- import traceback
- traceback.print_exc()
- # build failed, not a compilable Python module
- try:
- last_modified = os.stat(path).st_mtime
- except OSError:
- last_modified = 0
- self.uncompilable_modules[fullname] = (path, last_modified)
- importer = None
- finally:
- self.blocked_modules.pop()
- return importer
-
-
-class LibLoader(object):
- def __init__(self):
- self._libs = {}
-
- def load_module(self, fullname):
- try:
- source_path, so_path, is_package = self._libs[fullname]
- except KeyError:
- raise ValueError("invalid module %s" % fullname)
- _debug("Loading shared library module '%s' from %s", fullname, so_path)
- return load_module(fullname, source_path, so_path=so_path, is_package=is_package)
-
- def add_lib(self, fullname, path, so_path, is_package):
- self._libs[fullname] = (path, so_path, is_package)
-
- def knows(self, fullname):
- return fullname in self._libs
-
-_lib_loader = LibLoader()
-
-
-class PyxLoader(object):
- def __init__(self, fullname, path, init_path=None, pyxbuild_dir=None,
- inplace=False, language_level=None):
- _debug("PyxLoader created for loading %s from %s (init path: %s)",
- fullname, path, init_path)
- self.fullname = fullname
- self.path, self.init_path = path, init_path
- self.pyxbuild_dir = pyxbuild_dir
- self.inplace = inplace
- self.language_level = language_level
-
- def load_module(self, fullname):
- assert self.fullname == fullname, (
- "invalid module, expected %s, got %s" % (
- self.fullname, fullname))
- if self.init_path:
- # package
- #print "PACKAGE", fullname
- module = load_module(fullname, self.init_path,
- self.pyxbuild_dir, is_package=True,
- build_inplace=self.inplace,
- language_level=self.language_level)
- module.__path__ = [self.path]
- else:
- #print "MODULE", fullname
- module = load_module(fullname, self.path,
- self.pyxbuild_dir,
- build_inplace=self.inplace,
- language_level=self.language_level)
- return module
-
-
-#install args
-class PyxArgs(object):
- build_dir=True
- build_in_temp=True
- setup_args={} #None
-
-##pyxargs=None
-
-
-def _have_importers():
- has_py_importer = False
- has_pyx_importer = False
- for importer in sys.meta_path:
- if isinstance(importer, PyxImporter):
- if isinstance(importer, PyImporter):
- has_py_importer = True
- else:
- has_pyx_importer = True
-
- return has_py_importer, has_pyx_importer
-
-
-def install(pyximport=True, pyimport=False, build_dir=None, build_in_temp=True,
- setup_args=None, reload_support=False,
- load_py_module_on_import_failure=False, inplace=False,
- language_level=None):
- """ Main entry point for pyxinstall.
-
- Call this to install the ``.pyx`` import hook in
- your meta-path for a single Python process. If you want it to be
- installed whenever you use Python, add it to your ``sitecustomize``
- (as described above).
-
- :param pyximport: If set to False, does not try to import ``.pyx`` files.
-
- :param pyimport: You can pass ``pyimport=True`` to also
- install the ``.py`` import hook
- in your meta-path. Note, however, that it is rather experimental,
- will not work at all for some ``.py`` files and packages, and will
- heavily slow down your imports due to search and compilation.
- Use at your own risk.
-
- :param build_dir: By default, compiled modules will end up in a ``.pyxbld``
- directory in the user's home directory. Passing a different path
- as ``build_dir`` will override this.
-
- :param build_in_temp: If ``False``, will produce the C files locally. Working
- with complex dependencies and debugging becomes more easy. This
- can principally interfere with existing files of the same name.
-
- :param setup_args: Dict of arguments for Distribution.
- See ``distutils.core.setup()``.
-
- :param reload_support: Enables support for dynamic
- ``reload(my_module)``, e.g. after a change in the Cython code.
- Additional files ``<so_path>.reloadNN`` may arise on that account, when
- the previously loaded module file cannot be overwritten.
-
- :param load_py_module_on_import_failure: If the compilation of a ``.py``
- file succeeds, but the subsequent import fails for some reason,
- retry the import with the normal ``.py`` module instead of the
- compiled module. Note that this may lead to unpredictable results
- for modules that change the system state during their import, as
- the second import will rerun these modifications in whatever state
- the system was left after the import of the compiled module
- failed.
-
- :param inplace: Install the compiled module
- (``.so`` for Linux and Mac / ``.pyd`` for Windows)
- next to the source file.
-
- :param language_level: The source language level to use: 2 or 3.
- The default is to use the language level of the current Python
- runtime for .py files and Py2 for ``.pyx`` files.
- """
- if setup_args is None:
- setup_args = {}
- if not build_dir:
- build_dir = os.path.join(os.path.expanduser('~'), '.pyxbld')
-
- global pyxargs
- pyxargs = PyxArgs() #$pycheck_no
- pyxargs.build_dir = build_dir
- pyxargs.build_in_temp = build_in_temp
- pyxargs.setup_args = (setup_args or {}).copy()
- pyxargs.reload_support = reload_support
- pyxargs.load_py_module_on_import_failure = load_py_module_on_import_failure
-
- has_py_importer, has_pyx_importer = _have_importers()
- py_importer, pyx_importer = None, None
-
- if pyimport and not has_py_importer:
- py_importer = PyImporter(pyxbuild_dir=build_dir, inplace=inplace,
- language_level=language_level)
- # make sure we import Cython before we install the import hook
- import Cython.Compiler.Main, Cython.Compiler.Pipeline, Cython.Compiler.Optimize
- sys.meta_path.insert(0, py_importer)
-
- if pyximport and not has_pyx_importer:
- pyx_importer = PyxImporter(pyxbuild_dir=build_dir, inplace=inplace,
- language_level=language_level)
- sys.meta_path.append(pyx_importer)
-
- return py_importer, pyx_importer
-
-
-def uninstall(py_importer, pyx_importer):
- """
- Uninstall an import hook.
- """
- try:
- sys.meta_path.remove(py_importer)
- except ValueError:
- pass
-
- try:
- sys.meta_path.remove(pyx_importer)
- except ValueError:
- pass
-
-
-# MAIN
-
-def show_docs():
- import __main__
- __main__.__name__ = mod_name
- for name in dir(__main__):
- item = getattr(__main__, name)
- try:
- setattr(item, "__module__", mod_name)
- except (AttributeError, TypeError):
- pass
- help(__main__)
+if sys.version_info < (3, 5):
+ # _pyximport3 module requires at least Python 3.5
+ from pyximport._pyximport2 import install, uninstall, show_docs
+else:
+ from pyximport._pyximport3 import install, uninstall, show_docs
if __name__ == '__main__':
show_docs()
diff --git a/runtests.py b/runtests.py
index 72608882a..a5c12e65e 100755
--- a/runtests.py
+++ b/runtests.py
@@ -467,6 +467,7 @@ VER_DEP_MODULES = {
'compile.extsetslice',
'compile.extdelslice',
'run.special_methods_T561_py2',
+ 'run.builtin_type_inheritance_T608_py2only',
]),
(3,3) : (operator.lt, lambda x: x in ['build.package_compilation',
'build.cythonize_pep420_namespace',
diff --git a/tests/buffers/bufaccess.pyx b/tests/buffers/bufaccess.pyx
index 6b0b4ac30..3144f613d 100644
--- a/tests/buffers/bufaccess.pyx
+++ b/tests/buffers/bufaccess.pyx
@@ -10,7 +10,7 @@
from __future__ import unicode_literals
from cpython.object cimport PyObject
-from cpython.ref cimport Py_INCREF, Py_DECREF
+from cpython.ref cimport Py_INCREF, Py_DECREF, Py_CLEAR
cimport cython
import sys
@@ -1005,6 +1005,46 @@ def assign_to_object(object[object] buf, int idx, obj):
buf[idx] = obj
@testcase
+def check_object_nulled_1d(MockBuffer[object, ndim=1] buf, int idx, obj):
+ """
+ See comments on printbuf_object above.
+
+ >>> a = object()
+ >>> rc1 = get_refcount(a)
+ >>> A = ObjectMockBuffer(None, [a, a])
+ >>> check_object_nulled_1d(A, 0, a)
+ >>> check_object_nulled_1d(A, 1, a)
+ >>> A = ObjectMockBuffer(None, [a, a, a, a], strides=(2,))
+ >>> check_object_nulled_1d(A, 0, a) # only 0 due to stride
+ >>> get_refcount(a) == rc1
+ True
+ """
+ cdef PyObject **data = <PyObject **>buf.buffer
+ Py_CLEAR(data[idx])
+ res = buf[idx] # takes None
+ buf[idx] = obj
+ return res
+
+@testcase
+def check_object_nulled_2d(MockBuffer[object, ndim=2] buf, int idx1, int idx2, obj):
+ """
+ See comments on printbuf_object above.
+
+ >>> a = object()
+ >>> rc1 = get_refcount(a)
+ >>> A = ObjectMockBuffer(None, [a, a, a, a], shape=(2, 2))
+ >>> check_object_nulled_2d(A, 0, 0, a)
+ >>> check_object_nulled_2d(A, 1, 1, a)
+ >>> get_refcount(a) == rc1
+ True
+ """
+ cdef PyObject **data = <PyObject **>buf.buffer
+ Py_CLEAR(data[idx1 + 2*idx2])
+ res = buf[idx1, idx2] # takes None
+ buf[idx1, idx2] = obj
+ return res
+
+@testcase
def assign_temporary_to_object(object[object] buf):
"""
See comments on printbuf_object above.
diff --git a/tests/compile/fromimport.pyx b/tests/compile/fromimport.pyx
index 46f7b5442..e84b26a97 100644
--- a/tests/compile/fromimport.pyx
+++ b/tests/compile/fromimport.pyx
@@ -6,10 +6,34 @@ def f():
from spam import eggs as ova
from . import spam
from ... import spam
+ from .. . import spam
+ from . .. import spam
+ from . . . import spam
from .. import spam, foo
+ from . . import spam, foo
from ... import spam, foobar
+ from .. . import spam, foobar
+ from . .. import spam, foobar
+ from . . . import spam, foobar
from .spam import foo
+ from . spam import foo
from ...spam import foo, bar
+ from .. . spam import foo, bar
+ from . .. spam import foo, bar
+ from . . . spam import foo, bar
from ...spam.foo import bar
+ from ... spam.foo import bar
+ from .. . spam.foo import bar
+ from . .. spam.foo import bar
+ from . . . spam.foo import bar
from ...spam.foo import foo, bar
+ from ... spam.foo import foo, bar
+ from .. . spam.foo import foo, bar
+ from . .. spam.foo import foo, bar
+ from . . . spam.foo import foo, bar
from ...spam.foo import (foo, bar)
+ from ... spam.foo import (foo, bar)
+ from .. . spam.foo import (foo, bar)
+ from .. . spam.foo import (foo, bar)
+ from . .. spam.foo import (foo, bar)
+ from . . . spam.foo import (foo, bar)
diff --git a/tests/compile/fromimport_star.pyx b/tests/compile/fromimport_star.pyx
index 6c19476b7..80542dddb 100644
--- a/tests/compile/fromimport_star.pyx
+++ b/tests/compile/fromimport_star.pyx
@@ -2,5 +2,12 @@
from spam import *
from ...spam.foo import *
+from ... spam.foo import *
+from .. . spam.foo import *
+from . . . spam.foo import *
+from . .. spam.foo import *
from . import *
from ... import *
+from .. . import *
+from . .. import *
+from . . . import *
diff --git a/tests/compile/fused_redeclare_T3111.pyx b/tests/compile/fused_redeclare_T3111.pyx
index 04862ae88..d91f1d132 100644
--- a/tests/compile/fused_redeclare_T3111.pyx
+++ b/tests/compile/fused_redeclare_T3111.pyx
@@ -27,10 +27,10 @@ _WARNINGS = """
36:10: 'cpdef_cname_method' redeclared
# from MemoryView.pyx
-958:29: Ambiguous exception value, same as default return value: 0
-958:29: Ambiguous exception value, same as default return value: 0
-983:46: Ambiguous exception value, same as default return value: 0
-983:46: Ambiguous exception value, same as default return value: 0
-1073:29: Ambiguous exception value, same as default return value: 0
-1073:29: Ambiguous exception value, same as default return value: 0
+975:29: Ambiguous exception value, same as default return value: 0
+975:29: Ambiguous exception value, same as default return value: 0
+1016:46: Ambiguous exception value, same as default return value: 0
+1016:46: Ambiguous exception value, same as default return value: 0
+1106:29: Ambiguous exception value, same as default return value: 0
+1106:29: Ambiguous exception value, same as default return value: 0
"""
diff --git a/tests/errors/builtin_type_inheritance.pyx b/tests/errors/builtin_type_inheritance.pyx
index 1c6ad31e1..a85f7a133 100644
--- a/tests/errors/builtin_type_inheritance.pyx
+++ b/tests/errors/builtin_type_inheritance.pyx
@@ -8,11 +8,9 @@ cdef class MyTuple(tuple):
cdef class MyBytes(bytes):
pass
-cdef class MyStr(str): # only in Py2, but can't know that during compilation
- pass
+# str is also included in this in Py2, but checked at runtime instead
_ERRORS = """
5:19: inheritance from PyVarObject types like 'tuple' is not currently supported
8:19: inheritance from PyVarObject types like 'bytes' is not currently supported
-11:17: inheritance from PyVarObject types like 'str' is not currently supported
"""
diff --git a/tests/errors/cfuncptr.pyx b/tests/errors/cfuncptr.pyx
new file mode 100644
index 000000000..e05efa519
--- /dev/null
+++ b/tests/errors/cfuncptr.pyx
@@ -0,0 +1,36 @@
+# mode: error
+
+cdef int exceptmaybeminus2(int bad) except ?-2:
+ if bad:
+ raise RuntimeError
+ else:
+ return 0
+
+def fail_exceptmaybeminus2(bad):
+ cdef int (*fptr_a)(int) except -2
+ cdef int (*fptr_b)(int) except -1
+ cdef int (*fptr_c)(int) except ?-1
+ fptr_a = exceptmaybeminus2
+ fptr_b = exceptmaybeminus2
+ fptr_c = exceptmaybeminus2
+
+cdef extern from *:
+ # define this as extern since Cython converts internal "except*" to "except -1"
+ cdef int exceptstar(int bad) except *
+
+def fail_exceptstar(bad):
+ cdef int (*fptr_a)(int) # noexcept
+ cdef int (*fptr_b)(int) except -1
+ cdef int (*fptr_c)(int) except ?-1
+ fptr_a = exceptstar
+ fptr_b = exceptstar
+ fptr_c = exceptstar
+
+_ERRORS = """
+13:13: Cannot assign type 'int (int) except? -2' to 'int (*)(int) except -2'
+14:13: Cannot assign type 'int (int) except? -2' to 'int (*)(int) except -1'
+15:13: Cannot assign type 'int (int) except? -2' to 'int (*)(int) except? -1'
+25:13: Cannot assign type 'int (int) except *' to 'int (*)(int)'
+26:13: Cannot assign type 'int (int) except *' to 'int (*)(int) except -1'
+27:13: Cannot assign type 'int (int) except *' to 'int (*)(int) except? -1'
+"""
diff --git a/tests/errors/cpp_object_template.pyx b/tests/errors/cpp_object_template.pyx
index 029293e39..e1a15c905 100644
--- a/tests/errors/cpp_object_template.pyx
+++ b/tests/errors/cpp_object_template.pyx
@@ -18,7 +18,7 @@ def memview():
vmv.push_back(array.array("i", [1,2,3]))
_ERRORS = u"""
-10:16: Python object type 'Python object' cannot be used as a template argument
-12:16: Python object type 'A' cannot be used as a template argument
-17:19: Reference-counted type 'int[:]' cannot be used as a template argument
+10:15: Python object type 'Python object' cannot be used as a template argument
+12:15: Python object type 'A' cannot be used as a template argument
+17:15: Reference-counted type 'int[:]' cannot be used as a template argument
"""
diff --git a/tests/errors/dataclass_e1.pyx b/tests/errors/dataclass_e1.pyx
index 39337ba6d..95d67ad7d 100644
--- a/tests/errors/dataclass_e1.pyx
+++ b/tests/errors/dataclass_e1.pyx
@@ -1,5 +1,5 @@
# mode: error
-
+# tag: warnings
cimport cython
@cython.dataclasses.dataclass(1, shouldnt_be_here=True, init=5, unsafe_hash=True)
diff --git a/tests/errors/dataclass_e5.pyx b/tests/errors/dataclass_e5.pyx
new file mode 100644
index 000000000..e86adf47e
--- /dev/null
+++ b/tests/errors/dataclass_e5.pyx
@@ -0,0 +1,21 @@
+# mode: error
+# tag: warnings
+
+cimport cython
+
+@cython.dataclasses.dataclass
+cdef class C:
+ a: int
+ b: long
+ c: Py_ssize_t
+ d: float
+ e: double
+
+
+_WARNINGS = """
+9:7: Found Python 2.x type 'long' in a Python annotation. Did you mean to use 'cython.long'?
+10:7: Found C type 'Py_ssize_t' in a Python annotation. Did you mean to use a Python type?
+10:7: Unknown type declaration in annotation, ignoring
+12:7: Found C type 'double' in a Python annotation. Did you mean to use a Python type?
+12:7: Unknown type declaration in annotation, ignoring
+"""
diff --git a/tests/errors/e_typing_errors.pyx b/tests/errors/e_typing_errors.pyx
new file mode 100644
index 000000000..832f68d90
--- /dev/null
+++ b/tests/errors/e_typing_errors.pyx
@@ -0,0 +1,59 @@
+# mode: error
+
+import cython
+
+try:
+ from typing import Optional, ClassVar
+except ImportError:
+ pass
+
+
+# not OK
+
+def optional_cython_types(Optional[cython.int] i, Optional[cython.double] d, Optional[cython.float] f,
+ Optional[cython.complex] c, Optional[cython.long] l, Optional[cython.longlong] ll):
+ pass
+
+
+MyStruct = cython.struct(a=cython.int, b=cython.double)
+
+def optional_cstruct(Optional[MyStruct] x):
+ pass
+
+
+def optional_pytypes(Optional[int] i, Optional[float] f, Optional[complex] c, Optional[long] l):
+ pass
+
+
+cdef ClassVar[list] x
+
+
+# OK
+
+def optional_memoryview(double[:] d, Optional[double[:]] o):
+ pass
+
+
+cdef class Cls(object):
+ cdef ClassVar[list] x
+
+
+
+_ERRORS = """
+13:42: typing.Optional[...] cannot be applied to non-Python type int
+13:66: typing.Optional[...] cannot be applied to non-Python type double
+13:93: typing.Optional[...] cannot be applied to non-Python type float
+14:42: typing.Optional[...] cannot be applied to non-Python type double complex
+14:70: typing.Optional[...] cannot be applied to non-Python type long
+14:95: typing.Optional[...] cannot be applied to non-Python type long long
+24:30: typing.Optional[...] cannot be applied to non-Python type int
+24:47: typing.Optional[...] cannot be applied to non-Python type float
+24:87: typing.Optional[...] cannot be applied to non-Python type long
+
+20:30: typing.Optional[...] cannot be applied to non-Python type MyStruct
+
+28:20: Modifier 'typing.ClassVar' is not allowed here.
+
+# FIXME: this should be ok :-?
+33:52: typing.Optional[...] cannot be applied to non-Python type double[:]
+"""
diff --git a/tests/errors/e_typing_optional.py b/tests/errors/e_typing_optional.py
index e75638e00..6facfeea4 100644
--- a/tests/errors/e_typing_optional.py
+++ b/tests/errors/e_typing_optional.py
@@ -8,11 +8,10 @@ except ImportError:
pass
-def optional_pytypes(i: Optional[int], f: Optional[float]):
- pass
-
+# not OK
-def optional_cython_types(i: Optional[cython.int], d: Optional[cython.double], f: Optional[cython.float]):
+def optional_cython_types(i: Optional[cython.int], d: Optional[cython.double], f: Optional[cython.float],
+ c: Optional[cython.complex], l: Optional[cython.long], ll: Optional[cython.longlong]):
pass
@@ -22,13 +21,23 @@ def optional_cstruct(x: Optional[MyStruct]):
pass
+# OK
+
+def optional_pytypes(i: Optional[int], f: Optional[float], c: Optional[complex], l: Optional[long]):
+ pass
+
+
+def optional_memoryview(d: double[:], o: Optional[double[:]]):
+ pass
+
+
_ERRORS = """
-15:29: Only Python type arguments can use typing.Optional[...]
-15:54: Only Python type arguments can use typing.Optional[...]
-15:82: Only Python type arguments can use typing.Optional[...]
-21:24: Only Python type arguments can use typing.Optional[...]
-
-# FIXME: these should be allowed!
-11:24: Only Python type arguments can use typing.Optional[...]
-11:42: Only Python type arguments can use typing.Optional[...]
+13:44: typing.Optional[...] cannot be applied to non-Python type int
+13:69: typing.Optional[...] cannot be applied to non-Python type double
+13:97: typing.Optional[...] cannot be applied to non-Python type float
+14:44: typing.Optional[...] cannot be applied to non-Python type double complex
+14:73: typing.Optional[...] cannot be applied to non-Python type long
+14:100: typing.Optional[...] cannot be applied to non-Python type long long
+
+20:33: typing.Optional[...] cannot be applied to non-Python type MyStruct
"""
diff --git a/tests/errors/incomplete_varadic.pyx b/tests/errors/incomplete_varadic.pyx
new file mode 100644
index 000000000..1695a874d
--- /dev/null
+++ b/tests/errors/incomplete_varadic.pyx
@@ -0,0 +1,8 @@
+# mode: error
+
+cdef error_time(bool its_fine, .):
+ pass
+
+_ERRORS = u"""
+3: 31: Expected an identifier, found '.'
+"""
diff --git a/tests/memoryview/cythonarray.pyx b/tests/memoryview/cythonarray.pyx
index 0dc823581..6bfd7397e 100644
--- a/tests/memoryview/cythonarray.pyx
+++ b/tests/memoryview/cythonarray.pyx
@@ -286,3 +286,39 @@ def test_char_array_in_python_api(*shape):
arr = array(shape=shape, itemsize=sizeof(char), format='c', mode='c')
arr[:] = b'x'
return arr
+
+def test_is_Sequence():
+ """
+ >>> test_is_Sequence()
+ 1
+ 1
+ True
+ """
+ import sys
+ if sys.version_info < (3, 3):
+ from collections import Sequence
+ else:
+ from collections.abc import Sequence
+
+ arr = array(shape=(5,), itemsize=sizeof(char), format='c', mode='c')
+ for i in range(arr.shape[0]):
+ arr[i] = f'{i}'.encode('ascii')
+ print(arr.count(b'1')) # test for presence of added collection method
+ print(arr.index(b'1')) # test for presence of added collection method
+
+ if sys.version_info >= (3, 10):
+ # test structural pattern match in Python
+ # (because Cython hasn't implemented it yet, and because the details
+ # of what Python considers a sequence are important)
+ globs = {'arr': arr}
+ exec("""
+match arr:
+ case [*_]:
+ res = True
+ case _:
+ res = False
+""", globs)
+ assert globs['res']
+
+ return isinstance(arr, Sequence)
+
diff --git a/tests/memoryview/memoryview.pyx b/tests/memoryview/memoryview.pyx
index bb8b73780..d2832a0b6 100644
--- a/tests/memoryview/memoryview.pyx
+++ b/tests/memoryview/memoryview.pyx
@@ -1205,3 +1205,36 @@ def test_conversion_failures():
assert get_refcount(dmb) == dmb_before, "before %s after %s" % (dmb_before, get_refcount(dmb))
else:
assert False, "Conversion should fail!"
+
+def test_is_Sequence(double[:] a):
+ """
+ >>> test_is_Sequence(DoubleMockBuffer(None, range(6), shape=(6,)))
+ 1
+ 1
+ True
+ """
+ if sys.version_info < (3, 3):
+ from collections import Sequence
+ else:
+ from collections.abc import Sequence
+
+ for i in range(a.shape[0]):
+ a[i] = i
+ print(a.count(1.0)) # test for presence of added collection method
+ print(a.index(1.0)) # test for presence of added collection method
+
+ if sys.version_info >= (3, 10):
+ # test structural pattern match in Python
+ # (because Cython hasn't implemented it yet, and because the details
+ # of what Python considers a sequence are important)
+ globs = {'arr': a}
+ exec("""
+match arr:
+ case [*_]:
+ res = True
+ case _:
+ res = False
+""", globs)
+ assert globs['res']
+
+ return isinstance(<object>a, Sequence)
diff --git a/tests/memoryview/memslice.pyx b/tests/memoryview/memslice.pyx
index 4e06c4f41..5f6134135 100644
--- a/tests/memoryview/memslice.pyx
+++ b/tests/memoryview/memslice.pyx
@@ -7,7 +7,7 @@
from __future__ import unicode_literals
from cpython.object cimport PyObject
-from cpython.ref cimport Py_INCREF, Py_DECREF
+from cpython.ref cimport Py_INCREF, Py_DECREF, Py_CLEAR
cimport cython
from cython cimport view
@@ -1134,6 +1134,49 @@ def assign_temporary_to_object(object[:] buf):
"""
buf[1] = {3-2: 2+(2*4)-2}
+@testcase
+def check_object_nulled_1d(object[:] buf, int idx, obj):
+ """
+ See comments on printbuf_object above.
+
+ >>> a = object()
+ >>> rc1 = get_refcount(a)
+ >>> A = ObjectMockBuffer(None, [a, a])
+ >>> check_object_nulled_1d(A, 0, a)
+ >>> check_object_nulled_1d(A, 1, a)
+ >>> A = ObjectMockBuffer(None, [a, a, a, a], strides=(2,))
+ >>> check_object_nulled_1d(A, 0, a) # only 0 due to stride
+ >>> get_refcount(a) == rc1
+ True
+ """
+ cdef ObjectMockBuffer omb = buf.base
+ cdef PyObject **data = <PyObject**>(omb.buffer)
+ Py_CLEAR(data[idx])
+ res = buf[idx] # takes None
+ buf[idx] = obj
+ return res
+
+@testcase
+def check_object_nulled_2d(object[:, ::1] buf, int idx1, int idx2, obj):
+ """
+ See comments on printbuf_object above.
+
+ >>> a = object()
+ >>> rc1 = get_refcount(a)
+ >>> A = ObjectMockBuffer(None, [a, a, a, a], shape=(2, 2))
+ >>> check_object_nulled_2d(A, 0, 0, a)
+ >>> check_object_nulled_2d(A, 1, 1, a)
+ >>> get_refcount(a) == rc1
+ True
+ """
+ cdef ObjectMockBuffer omb = buf.base
+ cdef PyObject **data = <PyObject**>(omb.buffer)
+ Py_CLEAR(data[idx1 + 2*idx2])
+ res = buf[idx1, idx2] # takes None
+ buf[idx1, idx2] = obj
+ return res
+
+
#
# Test __cythonbufferdefaults__
#
diff --git a/tests/pypy_bugs.txt b/tests/pypy_bugs.txt
index 1004a93e4..5a27265ee 100644
--- a/tests/pypy_bugs.txt
+++ b/tests/pypy_bugs.txt
@@ -61,3 +61,6 @@ run.exttype_dealloc
# bugs in cpyext
run.special_methods_T561
run.special_methods_T561_py2
+
+# unicode is a PyVarObject on PyPy3
+run.builtin_type_inheritance_T608
diff --git a/tests/run/annotation_typing.pyx b/tests/run/annotation_typing.pyx
index 03900061a..ce74ef1dd 100644
--- a/tests/run/annotation_typing.pyx
+++ b/tests/run/annotation_typing.pyx
@@ -11,14 +11,14 @@ except ImportError:
pass
-def old_dict_syntax(a: list, b: "int" = 2, c: {'ctype': 'long int'} = 3, d: {'type': 'float'} = 4) -> list:
+def old_dict_syntax(a: list, b: "int" = 2, c: {'ctype': 'long int'} = 3, d: {'type': 'long int'} = 4) -> list:
"""
>>> old_dict_syntax([1])
- ('list object', 'int', 'long', 'float')
- [1, 2, 3, 4.0]
+ ('list object', 'int object', 'long', 'long')
+ [1, 2, 3, 4]
>>> old_dict_syntax([1], 3)
- ('list object', 'int', 'long', 'float')
- [1, 3, 3, 4.0]
+ ('list object', 'int object', 'long', 'long')
+ [1, 3, 3, 4]
>>> old_dict_syntax(123)
Traceback (most recent call last):
TypeError: Argument 'a' has incorrect type (expected list, got int)
@@ -33,16 +33,16 @@ def old_dict_syntax(a: list, b: "int" = 2, c: {'ctype': 'long int'} = 3, d: {'ty
return a
-def pytypes_def(a: list, b: int = 2, c: long = 3, d: float = 4, n: list = None, o: Optional[tuple] = ()) -> list:
+def pytypes_def(a: list, b: int = 2, c: long = 3, d: float = 4.0, n: list = None, o: Optional[tuple] = ()) -> list:
"""
>>> pytypes_def([1])
- ('list object', 'Python object', 'Python object', 'double', 'list object', 'tuple object')
+ ('list object', 'int object', 'Python object', 'double', 'list object', 'tuple object')
[1, 2, 3, 4.0, None, ()]
>>> pytypes_def([1], 3)
- ('list object', 'Python object', 'Python object', 'double', 'list object', 'tuple object')
+ ('list object', 'int object', 'Python object', 'double', 'list object', 'tuple object')
[1, 3, 3, 4.0, None, ()]
>>> pytypes_def([1], 3, 2, 1, [], None)
- ('list object', 'Python object', 'Python object', 'double', 'list object', 'tuple object')
+ ('list object', 'int object', 'Python object', 'double', 'list object', 'tuple object')
[1, 3, 2, 1.0, [], None]
>>> pytypes_def(123)
Traceback (most recent call last):
@@ -60,16 +60,16 @@ def pytypes_def(a: list, b: int = 2, c: long = 3, d: float = 4, n: list = None,
return a
-cpdef pytypes_cpdef(a: list, b: int = 2, c: long = 3, d: float = 4, n: list = None, o: Optional[tuple] = ()):
+cpdef pytypes_cpdef(a: list, b: int = 2, c: long = 3, d: float = 4.0, n: list = None, o: Optional[tuple] = ()):
"""
>>> pytypes_cpdef([1])
- ('list object', 'Python object', 'Python object', 'double', 'list object', 'tuple object')
+ ('list object', 'int object', 'Python object', 'double', 'list object', 'tuple object')
[1, 2, 3, 4.0, None, ()]
>>> pytypes_cpdef([1], 3)
- ('list object', 'Python object', 'Python object', 'double', 'list object', 'tuple object')
+ ('list object', 'int object', 'Python object', 'double', 'list object', 'tuple object')
[1, 3, 3, 4.0, None, ()]
>>> pytypes_cpdef([1], 3, 2, 1, [], None)
- ('list object', 'Python object', 'Python object', 'double', 'list object', 'tuple object')
+ ('list object', 'int object', 'Python object', 'double', 'list object', 'tuple object')
[1, 3, 2, 1.0, [], None]
>>> pytypes_cpdef(123)
Traceback (most recent call last):
@@ -87,7 +87,7 @@ cpdef pytypes_cpdef(a: list, b: int = 2, c: long = 3, d: float = 4, n: list = No
return a
-cdef c_pytypes_cdef(a: list, b: int = 2, c: long = 3, d: float = 4, n: list = None):
+cdef c_pytypes_cdef(a: list, b: int = 2, c: long = 3, d: float = 4.0, n: list = None):
print(typeof(a), typeof(b), typeof(c), typeof(d), typeof(n))
a.append(b)
a.append(c)
@@ -99,10 +99,10 @@ cdef c_pytypes_cdef(a: list, b: int = 2, c: long = 3, d: float = 4, n: list = No
def pytypes_cdef(a, b=2, c=3, d=4):
"""
>>> pytypes_cdef([1])
- ('list object', 'Python object', 'Python object', 'double', 'list object')
+ ('list object', 'int object', 'Python object', 'double', 'list object')
[1, 2, 3, 4.0, None]
>>> pytypes_cdef([1], 3)
- ('list object', 'Python object', 'Python object', 'double', 'list object')
+ ('list object', 'int object', 'Python object', 'double', 'list object')
[1, 3, 3, 4.0, None]
>>> pytypes_cdef(123) # doctest: +ELLIPSIS
Traceback (most recent call last):
@@ -278,24 +278,28 @@ class LateClass(object):
pass
-def py_float_default(price : float=None, ndigits=4):
+def py_float_default(price : Optional[float]=None, ndigits=4):
"""
Python default arguments should prevent C type inference.
>>> py_float_default()
(None, 4)
- >>> py_float_default(2)
- (2, 4)
+ >>> py_float_default(None)
+ (None, 4)
+ >>> py_float_default(2) # doctest: +ELLIPSIS
+ Traceback (most recent call last):
+ TypeError: ...float...
>>> py_float_default(2.0)
(2.0, 4)
- >>> py_float_default(2, 3)
- (2, 3)
+ >>> py_float_default(2, 3) # doctest: +ELLIPSIS
+ Traceback (most recent call last):
+ TypeError: ...float...
"""
return price, ndigits
cdef class ClassAttribute:
- cls_attr : float = 1.
+ cls_attr : cython.float = 1.
@cython.cfunc
@@ -325,6 +329,36 @@ class HasPtr:
return f"HasPtr({self.a[0]}, {self.b})"
+@cython.annotation_typing(False)
+def turn_off_typing(x: float, d: dict):
+ """
+ >>> turn_off_typing('not a float', []) # ignore the typing
+ ('Python object', 'Python object', 'not a float', [])
+ """
+ return typeof(x), typeof(d), x, d
+
+
+@cython.annotation_typing(False)
+cdef class ClassTurnOffTyping:
+ x: float
+ d: dict
+
+ def get_var_types(self, arg: float):
+ """
+ >>> ClassTurnOffTyping().get_var_types(1.0)
+ ('Python object', 'Python object', 'Python object')
+ """
+ return typeof(self.x), typeof(self.d), typeof(arg)
+
+ @cython.annotation_typing(True)
+ def and_turn_it_back_on_again(self, arg: float):
+ """
+ >>> ClassTurnOffTyping().and_turn_it_back_on_again(1.0)
+ ('Python object', 'Python object', 'double')
+ """
+ return typeof(self.x), typeof(self.d), typeof(arg)
+
+
_WARNINGS = """
14:32: Strings should no longer be used for type declarations. Use 'cython.int' etc. directly.
14:47: Dicts should no longer be used as type annotations. Use 'cython.int' etc. directly.
@@ -332,12 +366,16 @@ _WARNINGS = """
14:77: Dicts should no longer be used as type annotations. Use 'cython.int' etc. directly.
14:85: Python type declaration in signature annotation does not refer to a Python type
14:85: Strings should no longer be used for type declarations. Use 'cython.int' etc. directly.
-36:64: PEP-484 recommends 'typing.Optional[...]' for arguments that can be None.
-63:68: PEP-484 recommends 'typing.Optional[...]' for arguments that can be None.
-90:68: PEP-484 recommends 'typing.Optional[...]' for arguments that can be None.
+36:40: Found Python 2.x type 'long' in a Python annotation. Did you mean to use 'cython.long'?
+36:66: PEP-484 recommends 'typing.Optional[...]' for arguments that can be None.
+63:44: Found Python 2.x type 'long' in a Python annotation. Did you mean to use 'cython.long'?
+63:70: PEP-484 recommends 'typing.Optional[...]' for arguments that can be None.
+90:44: Found Python 2.x type 'long' in a Python annotation. Did you mean to use 'cython.long'?
+90:70: PEP-484 recommends 'typing.Optional[...]' for arguments that can be None.
274:44: Unknown type declaration in annotation, ignoring
-281:29: Ambiguous types in annotation, ignoring
-298:15: Annotation ignored since class-level attributes must be Python objects. Were you trying to set up an instance attribute?
+302:15: Annotation ignored since class-level attributes must be Python objects. Were you trying to set up an instance attribute?
+# DUPLICATE:
+63:44: Found Python 2.x type 'long' in a Python annotation. Did you mean to use 'cython.long'?
# BUG:
63:6: 'pytypes_cpdef' redeclared
146:0: 'struct_io' redeclared
diff --git a/tests/run/append.pyx b/tests/run/append.pyx
index 1976780d5..dcc3fe7c9 100644
--- a/tests/run/append.pyx
+++ b/tests/run/append.pyx
@@ -1,3 +1,5 @@
+cimport cython
+
class A:
def append(self, x):
print u"appending", x
@@ -94,3 +96,35 @@ def method_name():
'append'
"""
return [].append.__name__
+
+@cython.test_assert_path_exists(
+ '//PythonCapiCallNode')
+def append_optimized(probably_list):
+ """
+ >>> l = []
+ >>> append_optimized(l)
+ >>> l
+ [1]
+ """
+ probably_list.append(1)
+
+cdef class AppendBug:
+ # https://github.com/cython/cython/issues/4828
+ # if the attribute "append" is found it shouldn't be replaced with
+ # __Pyx_PyObject_Append
+ cdef object append
+ def __init__(self, append):
+ self.append = append
+
+@cython.test_fail_if_path_exists(
+ '//PythonCapiCallNode')
+def specific_attribute(AppendBug a):
+ """
+ >>> def append_to_default_arg(a, arg=[]):
+ ... arg.append(a)
+ ... return arg
+ >>> specific_attribute(AppendBug(append_to_default_arg))
+ >>> append_to_default_arg(None)
+ [1, None]
+ """
+ a.append(1)
diff --git a/tests/run/builtin_type_inheritance_T608.pyx b/tests/run/builtin_type_inheritance_T608.pyx
index 1214b6841..d03558a25 100644
--- a/tests/run/builtin_type_inheritance_T608.pyx
+++ b/tests/run/builtin_type_inheritance_T608.pyx
@@ -1,42 +1,6 @@
# ticket: t608
-cdef class MyInt(int):
- """
- >>> MyInt(2) == 2
- True
- >>> MyInt(2).attr is None
- True
- """
- cdef readonly object attr
-
-cdef class MyInt2(int):
- """
- >>> MyInt2(2) == 2
- True
- >>> MyInt2(2).attr is None
- True
- >>> MyInt2(2).test(3)
- 5
- """
- cdef readonly object attr
-
- def test(self, arg):
- return self._test(arg)
-
- cdef _test(self, arg):
- return self + arg
-
-cdef class MyInt3(MyInt2):
- """
- >>> MyInt3(2) == 2
- True
- >>> MyInt3(2).attr is None
- True
- >>> MyInt3(2).test(3)
- 6
- """
- cdef _test(self, arg):
- return self + arg + 1
+# see "builtin_type_inheritance_T608_py2only.pyx" for inheritance from int
cdef class MyFloat(float):
"""
diff --git a/tests/run/builtin_type_inheritance_T608_py2only.pyx b/tests/run/builtin_type_inheritance_T608_py2only.pyx
new file mode 100644
index 000000000..b10a2610a
--- /dev/null
+++ b/tests/run/builtin_type_inheritance_T608_py2only.pyx
@@ -0,0 +1,42 @@
+# ticket: t608
+
+# This only works reliably in Python2. In Python3 ints are variable-sized.
+# You get away with it for small ints but it's a bad idea
+
+cdef class MyInt(int):
+ """
+ >>> MyInt(2) == 2
+ True
+ >>> MyInt(2).attr is None
+ True
+ """
+ cdef readonly object attr
+
+cdef class MyInt2(int):
+ """
+ >>> MyInt2(2) == 2
+ True
+ >>> MyInt2(2).attr is None
+ True
+ >>> MyInt2(2).test(3)
+ 5
+ """
+ cdef readonly object attr
+
+ def test(self, arg):
+ return self._test(arg)
+
+ cdef _test(self, arg):
+ return self + arg
+
+cdef class MyInt3(MyInt2):
+ """
+ >>> MyInt3(2) == 2
+ True
+ >>> MyInt3(2).attr is None
+ True
+ >>> MyInt3(2).test(3)
+ 6
+ """
+ cdef _test(self, arg):
+ return self + arg + 1
diff --git a/tests/run/bytearray_iter.py b/tests/run/bytearray_iter.py
index 1865f057b..60df9fcc1 100644
--- a/tests/run/bytearray_iter.py
+++ b/tests/run/bytearray_iter.py
@@ -88,3 +88,18 @@ def modifying_reversed_bytearray_iter(x):
for a in reversed(x):
print(chr(a))
del x[0]
+
+# ticket: 3473
+
+def test_bytearray_iteration(src):
+ """
+ >>> src = b'123'
+ >>> test_bytearray_iteration(src)
+ 49
+ 50
+ 51
+ """
+
+ data = bytearray(src)
+ for elem in data:
+ print(elem)
diff --git a/tests/run/cdef_class_dataclass.pyx b/tests/run/cdef_class_dataclass.pyx
index 326fd0210..2f69e0f8f 100644
--- a/tests/run/cdef_class_dataclass.pyx
+++ b/tests/run/cdef_class_dataclass.pyx
@@ -127,8 +127,8 @@ cdef class ContainsNonPyFields:
"""
mystruct: S = cython.dataclasses.field(compare=False)
mystruct_ptr: S_ptr = field(init=False, repr=False, default_factory=malloc_a_struct)
- memview: int[:, ::1] = field(default=create_array((3,1), "c"), # mutable so not great but OK for a test
- compare=False)
+ memview: cython.int[:, ::1] = field(default=create_array((3,1), "c"), # mutable so not great but OK for a test
+ compare=False)
def __dealloc__(self):
free(self.mystruct_ptr)
@@ -154,8 +154,8 @@ cdef class InitClassVars:
True
"""
a: cython.int = 0
- b1: InitVar[double] = 1.0
- b2: py_dataclasses.InitVar[double] = 1.0
+ b1: InitVar[cython.double] = 1.0
+ b2: py_dataclasses.InitVar[cython.double] = 1.0
c1: ClassVar[float] = 2.0
c2: typing.ClassVar[float] = 2.0
cdef InitVar[cython.int] d1
@@ -206,7 +206,7 @@ cdef class TestVisibility:
"""
cdef double a
a = 1.0
- b: double = 2.0
+ b: cython.double = 2.0
cdef public double c
c = 3.0
cdef public object d
@@ -222,7 +222,7 @@ cdef class TestFrozen:
Traceback (most recent call last):
AttributeError: attribute 'a' of '...TestFrozen' objects is not writable
"""
- a: double = 2.0
+ a: cython.double = 2.0
@dataclass(kw_only=True)
cdef class TestKwOnly:
@@ -248,8 +248,8 @@ cdef class TestKwOnly:
TypeError: __init__() needs keyword-only argument b
"""
- a: double = 2.0
- b: long
+ a: cython.double = 2.0
+ b: cython.long
import sys
if sys.version_info >= (3, 7):
diff --git a/tests/run/cdef_setitem_T284.pyx b/tests/run/cdef_setitem_T284.pyx
index 389b8c409..871afb892 100644
--- a/tests/run/cdef_setitem_T284.pyx
+++ b/tests/run/cdef_setitem_T284.pyx
@@ -24,9 +24,9 @@ def with_external_list(list L):
"""
>>> with_external_list([1,2,3])
[1, -10, 3]
- >>> with_external_list(None)
+ >>> with_external_list(None) # doctest: +ELLIPSIS
Traceback (most recent call last):
- TypeError: 'NoneType' object is not subscriptable
+ TypeError: 'NoneType' object ...
"""
ob = 1L
L[ob] = -10
diff --git a/tests/run/cfuncptr.pyx b/tests/run/cfuncptr.pyx
index b7018cce0..cb3b32184 100644
--- a/tests/run/cfuncptr.pyx
+++ b/tests/run/cfuncptr.pyx
@@ -46,15 +46,49 @@ cdef int exceptminus2(int bad) except -2:
else:
return 0
-def call_exceptminus2(bad):
+def call_exceptminus2_through_exceptstar_pointer(bad):
"""
- >>> call_exceptminus2(True)
+ >>> call_exceptminus2_through_exceptstar_pointer(True)
Traceback (most recent call last):
...
RuntimeError
- >>> call_exceptminus2(False)
+ >>> call_exceptminus2_through_exceptstar_pointer(False)
0
"""
cdef int (*fptr)(int) except * # GH4770 - should not be treated as except? -1
fptr = exceptminus2
return fptr(bad)
+
+def call_exceptminus2_through_exceptmaybeminus2_pointer(bad):
+ """
+ >>> call_exceptminus2_through_exceptmaybeminus2_pointer(True)
+ Traceback (most recent call last):
+ ...
+ RuntimeError
+ >>> call_exceptminus2_through_exceptmaybeminus2_pointer(False)
+ 0
+ """
+ cdef int (*fptr)(int) except ?-2 # exceptions should be compatible
+ fptr = exceptminus2
+ return fptr(bad)
+
+cdef int noexcept_func(): # noexcept
+ return 0
+
+def call_noexcept_func_except_star():
+ """
+ >>> call_noexcept_func_except_star()
+ 0
+ """
+ cdef int (*fptr)() except *
+ fptr = noexcept_func # exception specifications are compatible
+ return fptr()
+
+def call_noexcept_func_except_check():
+ """
+ >>> call_noexcept_func_except_check()
+ 0
+ """
+ cdef int (*fptr)() except ?-1
+ fptr = noexcept_func # exception specifications are compatible
+ return fptr()
diff --git a/tests/run/cpp_stl_random.pyx b/tests/run/cpp_stl_random.pyx
index 58f7db040..3b074c278 100644
--- a/tests/run/cpp_stl_random.pyx
+++ b/tests/run/cpp_stl_random.pyx
@@ -1,7 +1,16 @@
# mode: run
# tag: cpp, cpp11
-from libcpp.random cimport mt19937
+from libcpp.random cimport mt19937, mt19937_64, random_device, uniform_int_distribution, \
+ uniform_real_distribution, bernoulli_distribution, binomial_distribution, \
+ geometric_distribution, negative_binomial_distribution, poisson_distribution, \
+ exponential_distribution, gamma_distribution, weibull_distribution, \
+ extreme_value_distribution, normal_distribution, lognormal_distribution, \
+ chi_squared_distribution, cauchy_distribution, fisher_f_distribution, student_t_distribution
+from libc.float cimport DBL_MAX as DBL_MAX_
+
+
+DBL_MAX = DBL_MAX_
def mt19937_seed_test():
@@ -9,8 +18,8 @@ def mt19937_seed_test():
>>> print(mt19937_seed_test())
1608637542
"""
- cdef mt19937 rd = mt19937(42)
- return rd()
+ cdef mt19937 gen = mt19937(42)
+ return gen()
def mt19937_reseed_test():
@@ -18,9 +27,9 @@ def mt19937_reseed_test():
>>> print(mt19937_reseed_test())
1608637542
"""
- cdef mt19937 rd
- rd.seed(42)
- return rd()
+ cdef mt19937 gen
+ gen.seed(42)
+ return gen()
def mt19937_min_max():
@@ -31,8 +40,8 @@ def mt19937_min_max():
>>> print(y) # 2 ** 32 - 1 because mt19937 is 32 bit.
4294967295
"""
- cdef mt19937 rd
- return rd.min(), rd.max()
+ cdef mt19937 gen
+ return gen.min(), gen.max()
def mt19937_discard(z):
@@ -43,13 +52,297 @@ def mt19937_discard(z):
>>> print(y)
1972458954
"""
- cdef mt19937 rd = mt19937(42)
+ cdef mt19937 gen = mt19937(42)
# Throw away z random numbers.
- rd.discard(z)
- a = rd()
+ gen.discard(z)
+ a = gen()
# Iterate over z random numbers.
- rd.seed(42)
+ gen.seed(42)
for _ in range(z + 1):
- b = rd()
+ b = gen()
return a, b
+
+
+def mt19937_64_seed_test():
+ """
+ >>> print(mt19937_64_seed_test())
+ 13930160852258120406
+ """
+ cdef mt19937_64 gen = mt19937_64(42)
+ return gen()
+
+
+def mt19937_64_reseed_test():
+ """
+ >>> print(mt19937_64_reseed_test())
+ 13930160852258120406
+ """
+ cdef mt19937_64 gen
+ gen.seed(42)
+ return gen()
+
+
+def mt19937_64_min_max():
+ """
+ >>> x, y = mt19937_64_min_max()
+ >>> print(x)
+ 0
+ >>> print(y) # 2 ** 64 - 1 because mt19937_64 is 64 bit.
+ 18446744073709551615
+ """
+ cdef mt19937_64 gen
+ return gen.min(), gen.max()
+
+
+def mt19937_64_discard(z):
+ """
+ >>> x, y = mt19937_64_discard(13)
+ >>> print(x)
+ 11756813601242511406
+ >>> print(y)
+ 11756813601242511406
+ """
+ cdef mt19937_64 gen = mt19937_64(42)
+ # Throw away z random numbers.
+ gen.discard(z)
+ a = gen()
+
+ # Iterate over z random numbers.
+ gen.seed(42)
+ for _ in range(z + 1):
+ b = gen()
+ return a, b
+
+
+ctypedef fused any_dist:
+ uniform_int_distribution[int]
+ uniform_real_distribution[double]
+ bernoulli_distribution
+ binomial_distribution[int]
+ geometric_distribution[int]
+ negative_binomial_distribution[int]
+ poisson_distribution[int]
+ exponential_distribution[double]
+ gamma_distribution[double]
+ weibull_distribution[double]
+ extreme_value_distribution[double]
+ normal_distribution[double]
+ lognormal_distribution[double]
+ chi_squared_distribution[double]
+ cauchy_distribution[double]
+ fisher_f_distribution[double]
+ student_t_distribution[double]
+
+
+cdef sample_or_range(any_dist dist, bint sample):
+ """
+ This helper function returns a sample if `sample` is truthy and the range of the distribution
+ if `sample` is falsy. We use a fused type to avoid duplicating the conditional statement in each
+ distribution test.
+ """
+ cdef random_device rd
+ if sample:
+ dist(mt19937(rd()))
+ else:
+ return dist.min(), dist.max()
+
+
+def uniform_int_distribution_test(a, b, sample=True):
+ """
+ >>> uniform_int_distribution_test(2, 3)
+ >>> uniform_int_distribution_test(5, 9, False)
+ (5, 9)
+ """
+ cdef uniform_int_distribution[int] dist = uniform_int_distribution[int](a, b)
+ return sample_or_range[uniform_int_distribution[int]](dist, sample)
+
+
+def uniform_real_distribution_test(a, b, sample=True):
+ """
+ >>> x = uniform_real_distribution_test(4, 5)
+ >>> uniform_real_distribution_test(3, 8, False)
+ (3.0, 8.0)
+ """
+ cdef uniform_real_distribution[double] dist = uniform_real_distribution[double](a, b)
+ return sample_or_range[uniform_real_distribution[double]](dist, sample)
+
+
+def bernoulli_distribution_test(proba, sample=True):
+ """
+ >>> bernoulli_distribution_test(0.2)
+ >>> bernoulli_distribution_test(0.7, False)
+ (False, True)
+ """
+ cdef bernoulli_distribution dist = bernoulli_distribution(proba)
+ return sample_or_range[bernoulli_distribution](dist, sample)
+
+
+def binomial_distribution_test(n, proba, sample=True):
+ """
+ >>> binomial_distribution_test(10, 0.7)
+ >>> binomial_distribution_test(75, 0.3, False)
+ (0, 75)
+ """
+ cdef binomial_distribution[int] dist = binomial_distribution[int](n, proba)
+ return sample_or_range[binomial_distribution[int]](dist, sample)
+
+
+def geometric_distribution_test(proba, sample=True):
+ """
+ >>> geometric_distribution_test(.4)
+ >>> geometric_distribution_test(0.2, False) # 2147483647 = 2 ** 32 - 1
+ (0, 2147483647)
+ """
+ cdef geometric_distribution[int] dist = geometric_distribution[int](proba)
+ return sample_or_range[geometric_distribution[int]](dist, sample)
+
+
+def negative_binomial_distribution_test(n, p, sample=True):
+ """
+ >>> negative_binomial_distribution_test(5, .1)
+ >>> negative_binomial_distribution_test(10, 0.2, False) # 2147483647 = 2 ** 32 - 1
+ (0, 2147483647)
+ """
+ cdef negative_binomial_distribution[int] dist = negative_binomial_distribution[int](n, p)
+ return sample_or_range[negative_binomial_distribution[int]](dist, sample)
+
+
+def poisson_distribution_test(rate, sample=True):
+ """
+ >>> poisson_distribution_test(7)
+ >>> poisson_distribution_test(7, False) # 2147483647 = 2 ** 32 - 1
+ (0, 2147483647)
+ """
+ cdef poisson_distribution[int] dist = poisson_distribution[int](rate)
+ return sample_or_range[poisson_distribution[int]](dist, sample)
+
+
+def exponential_distribution_test(rate, sample=True):
+ """
+ >>> x = exponential_distribution_test(6)
+ >>> l, u = exponential_distribution_test(1, False)
+ >>> l
+ 0.0
+ >>> u == DBL_MAX or u == float("inf")
+ True
+ """
+ cdef exponential_distribution[double] dist = exponential_distribution[double](rate)
+ return sample_or_range[exponential_distribution[double]](dist, sample)
+
+
+def gamma_distribution_test(shape, scale, sample=True):
+ """
+ >>> gamma_distribution_test(3, 4)
+ >>> l, u = gamma_distribution_test(1, 1, False)
+ >>> l
+ 0.0
+ >>> u == DBL_MAX or u == float("inf")
+ True
+ """
+ cdef gamma_distribution[double] dist = gamma_distribution[double](shape, scale)
+ return sample_or_range[gamma_distribution[double]](dist, sample)
+
+
+def weibull_distribution_test(shape, scale, sample=True):
+ """
+ >>> weibull_distribution_test(3, 2)
+ >>> l, u = weibull_distribution_test(1, 1, False)
+ >>> l
+ 0.0
+ >>> u == DBL_MAX or u == float("inf")
+ True
+ """
+ cdef weibull_distribution[double] dist = weibull_distribution[double](shape, scale)
+ return sample_or_range[weibull_distribution[double]](dist, sample)
+
+
+def extreme_value_distribution_test(shape, scale, sample=True):
+ """
+ >>> extreme_value_distribution_test(3, 0.1)
+ >>> l, u = extreme_value_distribution_test(1, 1, False)
+ >>> l == -DBL_MAX or l == -float("inf")
+ True
+ >>> u == DBL_MAX or u == float("inf")
+ True
+ """
+ cdef extreme_value_distribution[double] dist = extreme_value_distribution[double](shape, scale)
+ return sample_or_range[extreme_value_distribution[double]](dist, sample)
+
+
+def normal_distribution_test(loc, scale, sample=True):
+ """
+ >>> normal_distribution_test(3, 2)
+ >>> l, u = normal_distribution_test(1, 1, False)
+ >>> l == -DBL_MAX or l == -float("inf")
+ True
+ >>> u == DBL_MAX or u == float("inf")
+ True
+ """
+ cdef normal_distribution[double] dist = normal_distribution[double](loc, scale)
+ return sample_or_range[normal_distribution[double]](dist, sample)
+
+
+def lognormal_distribution_test(loc, scale, sample=True):
+ """
+ >>> lognormal_distribution_test(3, 2)
+ >>> l, u = lognormal_distribution_test(1, 1, False)
+ >>> l
+ 0.0
+ >>> u == DBL_MAX or u == float("inf")
+ True
+ """
+ cdef lognormal_distribution[double] dist = lognormal_distribution[double](loc, scale)
+ return sample_or_range[lognormal_distribution[double]](dist, sample)
+
+
+def chi_squared_distribution_test(dof, sample=True):
+ """
+ >>> x = chi_squared_distribution_test(9)
+ >>> l, u = chi_squared_distribution_test(5, False)
+ >>> l
+ 0.0
+ >>> u == DBL_MAX or u == float("inf")
+ True
+ """
+ cdef chi_squared_distribution[double] dist = chi_squared_distribution[double](dof)
+ return sample_or_range[chi_squared_distribution[double]](dist, sample)
+
+
+def cauchy_distribution_test(loc, scale, sample=True):
+ """
+ >>> cauchy_distribution_test(3, 9)
+ >>> l, u = cauchy_distribution_test(1, 1, False)
+ >>> l == -DBL_MAX or l == -float("inf")
+ True
+ >>> u == DBL_MAX or u == float("inf")
+ True
+ """
+ cdef cauchy_distribution[double] dist = cauchy_distribution[double](loc, scale)
+ return sample_or_range[cauchy_distribution[double]](dist, sample)
+
+
+def fisher_f_distribution_test(m, n, sample=True):
+ """
+ >>> x = fisher_f_distribution_test(9, 11)
+ >>> l, u = fisher_f_distribution_test(1, 1, False)
+ >>> l
+ 0.0
+ >>> u == DBL_MAX or u == float("inf")
+ True
+ """
+ cdef fisher_f_distribution[double] dist = fisher_f_distribution[double](m, n)
+ return sample_or_range[fisher_f_distribution[double]](dist, sample)
+
+
+def student_t_distribution_test(dof, sample=True):
+ """
+ >>> x = student_t_distribution_test(13)
+ >>> l, u = student_t_distribution_test(1, False)
+ >>> l == -DBL_MAX or l == -float("inf")
+ True
+ >>> u == DBL_MAX or u == float("inf")
+ True
+ """
+ cdef student_t_distribution[double] dist = student_t_distribution[double](dof)
+ return sample_or_range[student_t_distribution[double]](dist, sample)
diff --git a/tests/run/decorators.pyx b/tests/run/decorators.pyx
index 54623e0cb..fc20235e2 100644
--- a/tests/run/decorators.pyx
+++ b/tests/run/decorators.pyx
@@ -81,3 +81,26 @@ def outer(arg1, arg2):
def method():
return [4]
return method()
+
+class HasProperty(object):
+ """
+ >>> hp = HasProperty()
+ >>> hp.value
+ 0
+ >>> hp.value = 1
+ >>> hp.value
+ 1
+ """
+ def __init__(self) -> None:
+ self._value = 0
+
+ @property
+ def value(self) -> int:
+ return self._value
+
+ # https://github.com/cython/cython/issues/4836
+ # The variable tracker was confusing "value" in the decorator
+ # for "value" in the argument list
+ @value.setter
+ def value(self, value: int):
+ self._value = value
diff --git a/tests/run/delete.pyx b/tests/run/delete.pyx
index ec0b6c71a..6127fa9f1 100644
--- a/tests/run/delete.pyx
+++ b/tests/run/delete.pyx
@@ -29,15 +29,33 @@ def del_item(L, o):
del L[o]
return L
+
@cython.test_assert_path_exists('//DelStatNode//IndexNode//NoneCheckNode')
def del_dict(dict D, o):
"""
>>> del_dict({1: 'a', 2: 'b'}, 1)
{2: 'b'}
+ >>> del_dict(None, 1) # doctest: +ELLIPSIS
+ Traceback (most recent call last):
+ TypeError: 'NoneType' object ...
"""
del D[o]
return D
+
+@cython.test_fail_if_path_exists('//DelStatNode//IndexNode//NoneCheckNode')
+def del_dict_ann(D: dict, o):
+ """
+ >>> del_dict_ann({1: 'a', 2: 'b'}, 1)
+ {2: 'b'}
+ >>> del_dict_ann(None, 1)
+ Traceback (most recent call last):
+ TypeError: Argument 'D' has incorrect type (expected dict, got NoneType)
+ """
+ del D[o]
+ return D
+
+
@cython.test_fail_if_path_exists('//NoneCheckNode')
def del_dict_from_literal(o):
"""
diff --git a/tests/run/extern_varobject_extensions.srctree b/tests/run/extern_varobject_extensions.srctree
new file mode 100644
index 000000000..c927b8147
--- /dev/null
+++ b/tests/run/extern_varobject_extensions.srctree
@@ -0,0 +1,94 @@
+# mode: run
+
+PYTHON setup.py build_ext --inplace
+PYTHON -c "import classes"
+PYTHON -c "import test_inherit"
+
+######## setup.py ########
+
+from Cython.Build.Dependencies import cythonize
+
+from distutils.core import setup
+
+setup(
+ ext_modules=cythonize("*.pyx"),
+)
+
+###### dummy_module.py ###########
+
+tpl = tuple
+lst = list
+
+###### classes.pxd ################
+
+cdef extern from *:
+ # apart from list, these are all variable sized types
+ # and Cython shouldn't trip up about the struct size
+ ctypedef class dummy_module.tpl [object PyTupleObject]:
+ pass
+ ctypedef class dummy_module.lst [object PyListObject]:
+ pass
+ ctypedef class types.CodeType [object PyCodeObject]:
+ pass
+ # Note that bytes doesn't work here because it further
+ # the tp_basicsize to save space
+
+##### classes.pyx #################
+
+def check_tuple(tpl x):
+ assert isinstance(x, tuple)
+
+def check_list(lst x):
+ assert isinstance(x, list)
+
+def check_code(CodeType x):
+ import types
+ assert isinstance(x, types.CodeType)
+
+check_tuple((1, 2))
+check_list([1, 2])
+check_code(eval("lambda: None").__code__)
+
+##### failed_inherit1.pyx #############
+
+from classes cimport tpl
+
+cdef class SuperTuple(tpl):
+ cdef int a # importing this gives an error message
+
+##### failed_inherit2.pyx #############
+
+from classes cimport tpl
+
+cdef class SuperTuple(tpl):
+ # adding a method creates a vtab so should also fail
+ cdef int func(self):
+ return 1
+
+##### successful_inherit.pyx #########
+
+from classes cimport lst, tpl
+
+cdef class SuperList(lst):
+ cdef int a # This works OK
+
+cdef class SuperTuple(tpl):
+ # This is actually OK because it doesn't add anything
+ pass
+
+##### test_inherit.py ################
+
+try:
+ import failed_inherit1
+except TypeError as e:
+ assert e.args[0] == "inheritance from PyVarObject types like 'tuple' not currently supported", e.args[0]
+else:
+ assert False
+try:
+ import failed_inherit2
+except TypeError as e:
+ assert e.args[0] == "inheritance from PyVarObject types like 'tuple' not currently supported", e.args[0]
+else:
+ assert False
+
+import successful_inherit
diff --git a/tests/run/funcexc_iter_T228.pyx b/tests/run/funcexc_iter_T228.pyx
index 4b81166f6..40db3afb2 100644
--- a/tests/run/funcexc_iter_T228.pyx
+++ b/tests/run/funcexc_iter_T228.pyx
@@ -65,3 +65,89 @@ def double_raise(py_iterator):
print(sys.exc_info()[0] is ValueError or sys.exc_info()[0])
a = list(cy_iterator())
print(sys.exc_info()[0] is ValueError or sys.exc_info()[0])
+
+
+###### Tests to do with the optimization of StopIteration to "return NULL" #######
+# we're mainly checking that
+# 1. Calling __next__ manually doesn't crash (the wrapper function adds the exception)
+# 2. if you raise a value then that value gets raised
+# 3. putting the exception in various places try...finally / try...except blocks works
+
+def call_next_directly():
+ """
+ >>> call_next_directly()
+ Traceback (most recent call last):
+ ...
+ StopIteration
+ """
+ cy_iterator().__next__()
+
+cdef class cy_iter_many_options:
+ cdef what
+ def __init__(self, what):
+ self.what = what
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ if self.what == "StopIteration in finally no return":
+ try:
+ raise StopIteration
+ finally:
+ print "Finally..."
+ elif self.what == "StopIteration in finally return":
+ try:
+ raise StopIteration
+ finally:
+ self.what = None
+ return "in finally" # but will stop iterating next time
+ elif self.what == "StopIteration from finally":
+ try:
+ raise ValueError
+ finally:
+ raise StopIteration
+ elif self.what == "catch StopIteration":
+ try:
+ raise StopIteration
+ except StopIteration:
+ self.what = None
+ return "in except" # but will stop next time
+ elif self.what == "don't catch StopIteration":
+ try:
+ raise StopIteration
+ except ValueError:
+ return 0
+ elif self.what == "StopIteration from except":
+ try:
+ raise ValueError
+ except ValueError:
+ raise StopIteration
+ elif self.what == "StopIteration with value":
+ raise StopIteration("I'm a value!")
+ elif self.what is None:
+ raise StopIteration
+ else:
+ raise ValueError("self.what didn't match anything")
+
+def test_cy_iter_many_options(option):
+ """
+ >>> test_cy_iter_many_options("StopIteration in finally no return")
+ Finally...
+ []
+ >>> test_cy_iter_many_options("StopIteration in finally return")
+ ['in finally']
+ >>> test_cy_iter_many_options("StopIteration from finally")
+ []
+ >>> test_cy_iter_many_options("catch StopIteration")
+ ['in except']
+ >>> test_cy_iter_many_options("don't catch StopIteration")
+ []
+ >>> try:
+ ... cy_iter_many_options("StopIteration with value").__next__()
+ ... except StopIteration as e:
+ ... print(e.args)
+ ("I'm a value!",)
+ """
+ return list(cy_iter_many_options(option))
+
diff --git a/tests/run/function_self.py b/tests/run/function_self.py
index 938810491..945da404f 100644
--- a/tests/run/function_self.py
+++ b/tests/run/function_self.py
@@ -25,13 +25,8 @@ def fused(x):
>>> hasattr(nested, "__self__")
False
- #>>> hasattr(fused, "__self__") # FIXME this fails for fused functions
- #False
- # but this is OK:
- >>> fused.__self__ #doctest: +ELLIPSIS
- Traceback (most recent call last):
- ...
- AttributeError: 'function' object has no attribute '__self__'...
+ >>> hasattr(fused, "__self__")
+ False
"""
def nested_in_fused(y):
return x+y
@@ -74,15 +69,11 @@ if sys.version_info[0] > 2 or cython.compiled:
if cython.compiled:
__doc__ = """
- >>> fused['double'].__self__ #doctest: +ELLIPSIS
- Traceback (most recent call last):
- ...
- AttributeError: 'function' object has no attribute '__self__'...
+ >>> hasattr(fused['double'], '__self__')
+ False
- >>> C.fused['double'].__self__ #doctest: +ELLIPSIS
- Traceback (most recent call last):
- ...
- AttributeError: 'function' object has no attribute '__self__'...
+ >>> hasattr(C.fused['double'], '__self__')
+ False
>>> c = C()
>>> c.fused['double'].__self__ is c #doctest: +ELLIPSIS
diff --git a/tests/run/line_trace.pyx b/tests/run/line_trace.pyx
index d6f9c3d0e..32579aff7 100644
--- a/tests/run/line_trace.pyx
+++ b/tests/run/line_trace.pyx
@@ -74,7 +74,9 @@ def _create_trace_func(trace):
local_names = {}
def _trace_func(frame, event, arg):
- if sys.version_info < (3,) and 'line_trace' not in frame.f_code.co_filename:
+ if sys.version_info < (3,) and (
+ 'line_trace' not in frame.f_code.co_filename and
+ '<string>' not in frame.f_code.co_filename):
# Prevent tracing into Py2 doctest functions.
return None
@@ -165,19 +167,28 @@ def cy_try_except(func):
raise AttributeError(exc.args[0])
-def run_trace(func, *args, bint with_sys=False):
- """
- >>> def py_add(a,b):
- ... x = a+b
- ... return x
+# CPython 3.11 has an issue when these Python functions are implemented inside of doctests and the trace function fails.
+# https://github.com/python/cpython/issues/94381
+plain_python_functions = {}
+exec("""
+def py_add(a,b):
+ x = a+b
+ return x
+
+def py_add_with_nogil(a,b):
+ x=a; y=b # 1
+ for _ in range(1): # 2
+ z = 0 # 3
+ z += py_add(x, y) # 4
+ return z
+
+def py_return(retval=123): return retval
+""", plain_python_functions)
- >>> def py_add_with_nogil(a,b):
- ... x=a; y=b # 1
- ... for _ in range(1): # 2
- ... z = 0 # 3
- ... z += py_add(x, y) # 4
- ... return z # 5
+def run_trace(func, *args, bint with_sys=False):
+ """
+ >>> py_add = plain_python_functions['py_add']
>>> run_trace(py_add, 1, 2)
[('call', 0), ('line', 1), ('line', 2), ('return', 2)]
>>> run_trace(cy_add, 1, 2)
@@ -204,6 +215,7 @@ def run_trace(func, *args, bint with_sys=False):
>>> result[9:] # sys
[('line', 2), ('line', 5), ('return', 5)]
+ >>> py_add_with_nogil = plain_python_functions['py_add_with_nogil']
>>> result = run_trace(py_add_with_nogil, 1, 2)
>>> result[:5] # py
[('call', 0), ('line', 1), ('line', 2), ('line', 3), ('line', 4)]
@@ -239,7 +251,7 @@ def run_trace(func, *args, bint with_sys=False):
def run_trace_with_exception(func, bint with_sys=False, bint fail=False):
"""
- >>> def py_return(retval=123): return retval
+ >>> py_return = plain_python_functions["py_return"]
>>> run_trace_with_exception(py_return)
OK: 123
[('call', 0), ('line', 1), ('line', 2), ('call', 0), ('line', 0), ('return', 0), ('return', 2)]
@@ -295,10 +307,7 @@ def run_trace_with_exception(func, bint with_sys=False, bint fail=False):
def fail_on_call_trace(func, *args):
"""
- >>> def py_add(a,b):
- ... x = a+b
- ... return x
-
+ >>> py_add = plain_python_functions["py_add"]
>>> fail_on_call_trace(py_add, 1, 2)
Traceback (most recent call last):
ValueError: failing call trace!
@@ -319,17 +328,6 @@ def fail_on_call_trace(func, *args):
def fail_on_line_trace(fail_func, add_func, nogil_add_func):
"""
- >>> def py_add(a,b):
- ... x = a+b # 1
- ... return x # 2
-
- >>> def py_add_with_nogil(a,b):
- ... x=a; y=b # 1
- ... for _ in range(1): # 2
- ... z = 0 # 3
- ... z += py_add(x, y) # 4
- ... return z # 5
-
>>> result = fail_on_line_trace(None, cy_add, cy_add_with_nogil)
>>> len(result)
17
@@ -342,6 +340,8 @@ def fail_on_line_trace(fail_func, add_func, nogil_add_func):
>>> result[14:]
[('line', 2), ('line', 5), ('return', 5)]
+ >>> py_add = plain_python_functions["py_add"]
+ >>> py_add_with_nogil = plain_python_functions['py_add_with_nogil']
>>> result = fail_on_line_trace(None, py_add, py_add_with_nogil)
>>> len(result)
17
@@ -405,9 +405,7 @@ def fail_on_line_trace(fail_func, add_func, nogil_add_func):
def disable_trace(func, *args, bint with_sys=False):
"""
- >>> def py_add(a,b):
- ... x = a+b
- ... return x
+ >>> py_add = plain_python_functions["py_add"]
>>> disable_trace(py_add, 1, 2)
[('call', 0), ('line', 1)]
>>> disable_trace(py_add, 1, 2, with_sys=True)
diff --git a/tests/run/pep448_extended_unpacking.pyx b/tests/run/pep448_extended_unpacking.pyx
index d40a1c6a2..ac3e903a0 100644
--- a/tests/run/pep448_extended_unpacking.pyx
+++ b/tests/run/pep448_extended_unpacking.pyx
@@ -185,6 +185,24 @@ def unpack_list_literal_mult():
return [*([1, 2, *([4, 5] * 2)] * 3)]
+def unpack_list_tuple_mult():
+ """
+ >>> unpack_list_tuple_mult()
+ [1, 1]
+ """
+ return [*(1,) * 2]
+
+
+def unpack_list_tuple_bad_mult():
+ """
+ >>> unpack_list_tuple_bad_mult() # doctest: +ELLIPSIS
+ Traceback (most recent call last):
+ ...
+ TypeError: ... 'float'
+ """
+ return [*(1,) * 1.5]
+
+
@cython.test_fail_if_path_exists(
"//ListNode//ListNode",
"//MergedSequenceNode",
diff --git a/tests/run/pep526_variable_annotations.py b/tests/run/pep526_variable_annotations.py
index 3e30075c3..56cb0201b 100644
--- a/tests/run/pep526_variable_annotations.py
+++ b/tests/run/pep526_variable_annotations.py
@@ -15,11 +15,11 @@ except ImportError:
var = 1 # type: annotation
-var: int = 2
-fvar: float = 1.2
+var: cython.int = 2
+fvar: cython.float = 1.2
some_number: cython.int # variable without initial value
-some_list: List[int] = [] # variable with initial value
-t: Tuple[int, ...] = (1, 2, 3)
+some_list: List[cython.int] = [] # variable with initial value
+t: Tuple[cython.int, ...] = (1, 2, 3)
body: Optional[List[str]]
descr_only : "descriptions are allowed but ignored"
@@ -34,11 +34,11 @@ def f():
(2, 1.5, [], (1, 2, 3))
"""
var = 1 # type: annotation
- var: int = 2
- fvar: float = 1.5
+ var: cython.int = 2
+ fvar: cython.float = 1.5
some_number: cython.int # variable without initial value
- some_list: List[int] = [] # variable with initial value
- t: Tuple[int, ...] = (1, 2, 3)
+ some_list: List[cython.int] = [] # variable with initial value
+ t: Tuple[cython.int, ...] = (1, 2, 3)
body: Optional[List[str]]
descr_only: "descriptions are allowed but ignored"
@@ -59,7 +59,7 @@ class BasicStarship(object):
"""
captain: str = 'Picard' # instance variable with default
damage: cython.int # instance variable without default
- stats: ClassVar[Dict[str, int]] = {} # class variable
+ stats: ClassVar[Dict[str, cython.int]] = {} # class variable
descr_only: "descriptions are allowed but ignored"
def __init__(self, damage):
@@ -75,7 +75,7 @@ class BasicStarshipExt(object):
"""
captain: str = 'Picard' # instance variable with default
damage: cython.int # instance variable without default
- stats: ClassVar[Dict[str, int]] = {} # class variable
+ stats: ClassVar[Dict[str, cython.int]] = {} # class variable
descr_only: "descriptions are allowed but ignored"
def __init__(self, damage):
@@ -124,7 +124,7 @@ def iter_declared_dict(d):
# specialized "compiled" test in module-level __doc__
"""
- typed_dict : Dict[float, float] = d
+ typed_dict : Dict[cython.float, cython.float] = d
s = 0.0
for key in typed_dict:
s += d[key]
@@ -135,7 +135,7 @@ def iter_declared_dict(d):
"//WhileStatNode",
"//WhileStatNode//DictIterationNextNode",
)
-def iter_declared_dict_arg(d : Dict[float, float]):
+def iter_declared_dict_arg(d : Dict[cython.float, cython.float]):
"""
>>> d = {1.1: 2.5, 3.3: 4.5}
>>> iter_declared_dict_arg(d)
@@ -165,8 +165,8 @@ def test_subscripted_types():
list object
set object
"""
- a: typing.Dict[int, float] = {}
- b: List[int] = []
+ a: typing.Dict[cython.int, cython.float] = {}
+ b: List[cython.int] = []
c: _SET_[object] = set()
print(cython.typeof(a) + (" object" if not cython.compiled else ""))
@@ -174,22 +174,31 @@ def test_subscripted_types():
print(cython.typeof(c) + (" object" if not cython.compiled else ""))
# because tuple is specifically special cased to go to ctuple where possible
-def test_tuple(a: typing.Tuple[int, float], b: typing.Tuple[int, ...],
- c: Tuple[int, object] # cannot be a ctuple
+def test_tuple(a: typing.Tuple[cython.int, cython.float], b: typing.Tuple[cython.int, ...],
+ c: Tuple[cython.int, object] # cannot be a ctuple
):
"""
>>> test_tuple((1, 1.0), (1, 1.0), (1, 1.0))
int
int
+ Python object
+ Python object
+ (int, float)
+ tuple object
tuple object
tuple object
"""
- x: typing.Tuple[int, float] = (a[0], a[1])
- y: Tuple[int, ...] = (1,2.)
- z = a[0] # should infer to int
+ x: typing.Tuple[int, float] = (a[0], a[1]) # note: Python int/float, not cython.int/float
+ y: Tuple[cython.int, ...] = (1,2.)
+ z = a[0] # should infer to C int
+ p = x[1] # should infer to Python float -> C double
print(cython.typeof(z))
- print(cython.typeof(x[0]))
+ print("int" if cython.compiled and cython.typeof(x[0]) == "Python object" else cython.typeof(x[0])) # FIXME: infer Python int
+ print(cython.typeof(p) if cython.compiled or cython.typeof(p) != 'float' else "Python object") # FIXME: infer C double
+ print(cython.typeof(x[1]) if cython.compiled or cython.typeof(p) != 'float' else "Python object") # FIXME: infer C double
+ print(cython.typeof(a) if cython.compiled or cython.typeof(a) != 'tuple' else "(int, float)")
+ print(cython.typeof(x) + (" object" if not cython.compiled else ""))
print(cython.typeof(y) + (" object" if not cython.compiled else ""))
print(cython.typeof(c) + (" object" if not cython.compiled else ""))
diff --git a/tests/run/pep526_variable_annotations_cy.pyx b/tests/run/pep526_variable_annotations_cy.pyx
index c08c832b0..448824b36 100644
--- a/tests/run/pep526_variable_annotations_cy.pyx
+++ b/tests/run/pep526_variable_annotations_cy.pyx
@@ -48,9 +48,9 @@ def test_tuple(typing.Tuple[int, float] a, typing.Tuple[int, ...] b,
tuple object
tuple object
"""
- cdef typing.Tuple[int, float] x = (a[0], a[1])
+ cdef typing.Tuple[int, float] x = (a[0], a[1]) # C int/float
cdef Tuple[int, ...] y = (1,2.)
- z = a[0] # should infer to int
+ z = a[0] # should infer to C int
print(cython.typeof(z))
print(cython.typeof(x[0]))
diff --git a/tests/run/pure_cdef_class_dataclass.py b/tests/run/pure_cdef_class_dataclass.py
index 7b8fcb851..8a978d36f 100644
--- a/tests/run/pure_cdef_class_dataclass.py
+++ b/tests/run/pure_cdef_class_dataclass.py
@@ -11,9 +11,9 @@ class MyDataclass:
"""
>>> sorted(list(MyDataclass.__dataclass_fields__.keys()))
['a', 'self']
- >>> inst1 = MyDataclass(2.0, ['a', 'b'])
+ >>> inst1 = MyDataclass(2, ['a', 'b'])
>>> print(inst1)
- MyDataclass(a=2.0, self=['a', 'b'])
+ MyDataclass(a=2, self=['a', 'b'])
>>> inst2 = MyDataclass()
>>> print(inst2)
MyDataclass(a=1, self=[])
diff --git a/tests/run/pure_py.py b/tests/run/pure_py.py
index 93f737453..a8dc5b014 100644
--- a/tests/run/pure_py.py
+++ b/tests/run/pure_py.py
@@ -33,17 +33,18 @@ def test_sizeof():
def test_declare(n):
"""
>>> test_declare(100)
- (100, 100)
+ (100, 100, 100)
>>> test_declare(100.5)
- (100, 100)
+ (100, 100, 100)
"""
x = cython.declare(cython.int)
y = cython.declare(cython.int, n)
+ z = cython.declare(int, n) # C int
if cython.compiled:
cython.declare(xx=cython.int, yy=cython.long)
i = cython.sizeof(xx)
ptr = cython.declare(cython.p_int, cython.address(y))
- return y, ptr[0]
+ return y, z, ptr[0]
@cython.locals(x=cython.double, n=cython.int)
diff --git a/tests/run/test_named_expressions.py b/tests/run/test_named_expressions.py
index 28147319b..b3e2eb980 100644
--- a/tests/run/test_named_expressions.py
+++ b/tests/run/test_named_expressions.py
@@ -1,12 +1,10 @@
# mode: run
-# tag: pure38, no-cpp
+# tag: pure3.8, no-cpp
# copied from cpython with minimal modifications (mainly exec->cython_inline, and a few exception strings)
# This is not currently run in C++ because all the cython_inline compilations fail for reasons that are unclear
-# FIXME pure38 seems to be ignored
# cython: language_level=3
-import os
import unittest
import cython
from Cython.Compiler.Main import CompileError
diff --git a/tests/run/tuple_constants.pyx b/tests/run/tuple_constants.pyx
index f60d5d818..fa5794cf7 100644
--- a/tests/run/tuple_constants.pyx
+++ b/tests/run/tuple_constants.pyx
@@ -36,7 +36,12 @@ def test_deduplicated_args():
# are generated often with the same argument names. Therefore it's worth ensuring that
# they are correctly deduplicated
import sys
- if not hasattr(sys, "pypy_version_info"): # test doesn't work on PyPy (which is probably fair enough)
+ check_identity_of_co_varnames = (
+ not hasattr(sys, "pypy_version_info") and # test doesn't work on PyPy (which is probably fair enough)
+ sys.version_info < (3, 11) # on Python 3.11 co_varnames returns a new, dynamically-calculated tuple
+ # each time it is run
+ )
+ if check_identity_of_co_varnames:
assert func1.__code__.co_varnames is func2.__code__.co_varnames
@cython.test_assert_path_exists("//TupleNode",