From b2792a3d1796effcfdeb41c6b495d176ad5d7239 Mon Sep 17 00:00:00 2001 From: da-woods Date: Tue, 24 May 2022 09:55:36 +0100 Subject: Remove unused "saved_subexpr_nodes" attribute (GH-4604) --- Cython/Compiler/ExprNodes.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/Cython/Compiler/ExprNodes.py b/Cython/Compiler/ExprNodes.py index 312b37329..c20a76bd4 100644 --- a/Cython/Compiler/ExprNodes.py +++ b/Cython/Compiler/ExprNodes.py @@ -328,9 +328,6 @@ class ExprNode(Node): # is_sequence_constructor # boolean Is a list or tuple constructor expression # is_starred boolean Is a starred expression (e.g. '*a') - # saved_subexpr_nodes - # [ExprNode or [ExprNode or None] or None] - # Cached result of subexpr_nodes() # use_managed_ref boolean use ref-counted temps/assignments/etc. # result_is_used boolean indicates that the result will be dropped and the # is_numpy_attribute boolean Is a Numpy module attribute @@ -473,7 +470,6 @@ class ExprNode(Node): is_memview_broadcast = False is_memview_copy_assignment = False - saved_subexpr_nodes = None is_temp = False has_temp_moved = False # if True then attempting to do anything but free the temp is invalid is_target = False -- cgit v1.2.1 From d0719998c441be752c4c28c5791ecfa305dc4322 Mon Sep 17 00:00:00 2001 From: da-woods Date: Sun, 29 May 2022 15:06:20 +0100 Subject: Replace fused "__self__" property function with slot memberdef (GH-4808) It's a bit simpler, and it lets `hasattr` work correctly where-as the getset didn't: hasattr() returned true, but it still raised an error. --- Cython/Utility/CythonFunction.c | 15 +-------------- tests/run/function_self.py | 21 ++++++--------------- 2 files changed, 7 insertions(+), 29 deletions(-) diff --git a/Cython/Utility/CythonFunction.c b/Cython/Utility/CythonFunction.c index 9a7bf7405..270f441ef 100644 --- a/Cython/Utility/CythonFunction.c +++ b/Cython/Utility/CythonFunction.c @@ -1463,30 +1463,17 @@ bad: return result; } -static PyObject * -__Pyx_FusedFunction_get_self(__pyx_FusedFunctionObject *m, void *closure) -{ - PyObject *self = m->self; - CYTHON_UNUSED_VAR(closure); - if (unlikely(!self)) { - PyErr_SetString(PyExc_AttributeError, "'function' object has no attribute '__self__'"); - } else { - Py_INCREF(self); - } - return self; -} - static PyMemberDef __pyx_FusedFunction_members[] = { {(char *) "__signatures__", T_OBJECT, offsetof(__pyx_FusedFunctionObject, __signatures__), READONLY, 0}, + {(char *) "__self__", T_OBJECT_EX, offsetof(__pyx_FusedFunctionObject, self), READONLY, 0}, {0, 0, 0, 0, 0}, }; static PyGetSetDef __pyx_FusedFunction_getsets[] = { - {(char *) "__self__", (getter)__Pyx_FusedFunction_get_self, 0, 0, 0}, // __doc__ is None for the fused function type, but we need it to be // a descriptor for the instance's __doc__, so rebuild the descriptor in our subclass // (all other descriptors are inherited) diff --git a/tests/run/function_self.py b/tests/run/function_self.py index 938810491..945da404f 100644 --- a/tests/run/function_self.py +++ b/tests/run/function_self.py @@ -25,13 +25,8 @@ def fused(x): >>> hasattr(nested, "__self__") False - #>>> hasattr(fused, "__self__") # FIXME this fails for fused functions - #False - # but this is OK: - >>> fused.__self__ #doctest: +ELLIPSIS - Traceback (most recent call last): - ... - AttributeError: 'function' object has no attribute '__self__'... + >>> hasattr(fused, "__self__") + False """ def nested_in_fused(y): return x+y @@ -74,15 +69,11 @@ if sys.version_info[0] > 2 or cython.compiled: if cython.compiled: __doc__ = """ - >>> fused['double'].__self__ #doctest: +ELLIPSIS - Traceback (most recent call last): - ... - AttributeError: 'function' object has no attribute '__self__'... + >>> hasattr(fused['double'], '__self__') + False - >>> C.fused['double'].__self__ #doctest: +ELLIPSIS - Traceback (most recent call last): - ... - AttributeError: 'function' object has no attribute '__self__'... + >>> hasattr(C.fused['double'], '__self__') + False >>> c = C() >>> c.fused['double'].__self__ is c #doctest: +ELLIPSIS -- cgit v1.2.1 From 0159be918d7c83e145f4ec3ffc35b8d0e3d974dd Mon Sep 17 00:00:00 2001 From: da-woods Date: Mon, 30 May 2022 08:38:59 +0100 Subject: Improve coercion rules on function pointer exception specification and add tests (GH-4811) Closes https://github.com/cython/cython/issues/4689 --- Cython/Compiler/PyrexTypes.py | 3 +++ tests/errors/cfuncptr.pyx | 36 ++++++++++++++++++++++++++++++++++++ tests/run/cfuncptr.pyx | 40 +++++++++++++++++++++++++++++++++++++--- 3 files changed, 76 insertions(+), 3 deletions(-) create mode 100644 tests/errors/cfuncptr.pyx diff --git a/Cython/Compiler/PyrexTypes.py b/Cython/Compiler/PyrexTypes.py index c773f5c5a..1660eab22 100644 --- a/Cython/Compiler/PyrexTypes.py +++ b/Cython/Compiler/PyrexTypes.py @@ -3044,6 +3044,9 @@ class CFuncType(CType): # must catch C++ exceptions if we raise them return 0 if not other_type.exception_check or other_type.exception_value is not None: + # There's no problem if this type doesn't emit exceptions but the other type checks + if other_type.exception_check and not (self.exception_check or self.exception_value): + return 1 # if other does not *always* check exceptions, self must comply if not self._same_exception_value(other_type.exception_value): return 0 diff --git a/tests/errors/cfuncptr.pyx b/tests/errors/cfuncptr.pyx new file mode 100644 index 000000000..e05efa519 --- /dev/null +++ b/tests/errors/cfuncptr.pyx @@ -0,0 +1,36 @@ +# mode: error + +cdef int exceptmaybeminus2(int bad) except ?-2: + if bad: + raise RuntimeError + else: + return 0 + +def fail_exceptmaybeminus2(bad): + cdef int (*fptr_a)(int) except -2 + cdef int (*fptr_b)(int) except -1 + cdef int (*fptr_c)(int) except ?-1 + fptr_a = exceptmaybeminus2 + fptr_b = exceptmaybeminus2 + fptr_c = exceptmaybeminus2 + +cdef extern from *: + # define this as extern since Cython converts internal "except*" to "except -1" + cdef int exceptstar(int bad) except * + +def fail_exceptstar(bad): + cdef int (*fptr_a)(int) # noexcept + cdef int (*fptr_b)(int) except -1 + cdef int (*fptr_c)(int) except ?-1 + fptr_a = exceptstar + fptr_b = exceptstar + fptr_c = exceptstar + +_ERRORS = """ +13:13: Cannot assign type 'int (int) except? -2' to 'int (*)(int) except -2' +14:13: Cannot assign type 'int (int) except? -2' to 'int (*)(int) except -1' +15:13: Cannot assign type 'int (int) except? -2' to 'int (*)(int) except? -1' +25:13: Cannot assign type 'int (int) except *' to 'int (*)(int)' +26:13: Cannot assign type 'int (int) except *' to 'int (*)(int) except -1' +27:13: Cannot assign type 'int (int) except *' to 'int (*)(int) except? -1' +""" diff --git a/tests/run/cfuncptr.pyx b/tests/run/cfuncptr.pyx index b7018cce0..cb3b32184 100644 --- a/tests/run/cfuncptr.pyx +++ b/tests/run/cfuncptr.pyx @@ -46,15 +46,49 @@ cdef int exceptminus2(int bad) except -2: else: return 0 -def call_exceptminus2(bad): +def call_exceptminus2_through_exceptstar_pointer(bad): """ - >>> call_exceptminus2(True) + >>> call_exceptminus2_through_exceptstar_pointer(True) Traceback (most recent call last): ... RuntimeError - >>> call_exceptminus2(False) + >>> call_exceptminus2_through_exceptstar_pointer(False) 0 """ cdef int (*fptr)(int) except * # GH4770 - should not be treated as except? -1 fptr = exceptminus2 return fptr(bad) + +def call_exceptminus2_through_exceptmaybeminus2_pointer(bad): + """ + >>> call_exceptminus2_through_exceptmaybeminus2_pointer(True) + Traceback (most recent call last): + ... + RuntimeError + >>> call_exceptminus2_through_exceptmaybeminus2_pointer(False) + 0 + """ + cdef int (*fptr)(int) except ?-2 # exceptions should be compatible + fptr = exceptminus2 + return fptr(bad) + +cdef int noexcept_func(): # noexcept + return 0 + +def call_noexcept_func_except_star(): + """ + >>> call_noexcept_func_except_star() + 0 + """ + cdef int (*fptr)() except * + fptr = noexcept_func # exception specifications are compatible + return fptr() + +def call_noexcept_func_except_check(): + """ + >>> call_noexcept_func_except_check() + 0 + """ + cdef int (*fptr)() except ?-1 + fptr = noexcept_func # exception specifications are compatible + return fptr() -- cgit v1.2.1 From aaff13db6d4b8a6c812887dc50e1de9f180bc099 Mon Sep 17 00:00:00 2001 From: Wenjun Si Date: Wed, 1 Jun 2022 03:05:06 +0800 Subject: Add "mt19973_64" declarations to "libcpp.random" (GH-4818) --- Cython/Includes/libcpp/random.pxd | 14 ++++++- tests/run/cpp_stl_random.pyx | 77 ++++++++++++++++++++++++++++++++------- 2 files changed, 77 insertions(+), 14 deletions(-) diff --git a/Cython/Includes/libcpp/random.pxd b/Cython/Includes/libcpp/random.pxd index e879c8f64..5636400f3 100644 --- a/Cython/Includes/libcpp/random.pxd +++ b/Cython/Includes/libcpp/random.pxd @@ -1,4 +1,4 @@ -from libc.stdint cimport uint_fast32_t +from libc.stdint cimport uint_fast32_t, uint_fast64_t cdef extern from "" namespace "std" nogil: @@ -12,3 +12,15 @@ cdef extern from "" namespace "std" nogil: result_type max() except + void discard(size_t z) except + void seed(result_type seed) except + + + + cdef cppclass mt19937_64: + ctypedef uint_fast64_t result_type + + mt19937_64() except + + mt19937_64(result_type seed) except + + result_type operator()() except + + result_type min() except + + result_type max() except + + void discard(size_t z) except + + void seed(result_type seed) except + diff --git a/tests/run/cpp_stl_random.pyx b/tests/run/cpp_stl_random.pyx index 58f7db040..a793000fa 100644 --- a/tests/run/cpp_stl_random.pyx +++ b/tests/run/cpp_stl_random.pyx @@ -1,7 +1,7 @@ # mode: run # tag: cpp, cpp11 -from libcpp.random cimport mt19937 +from libcpp.random cimport mt19937, mt19937_64 def mt19937_seed_test(): @@ -9,8 +9,8 @@ def mt19937_seed_test(): >>> print(mt19937_seed_test()) 1608637542 """ - cdef mt19937 rd = mt19937(42) - return rd() + cdef mt19937 gen = mt19937(42) + return gen() def mt19937_reseed_test(): @@ -18,9 +18,9 @@ def mt19937_reseed_test(): >>> print(mt19937_reseed_test()) 1608637542 """ - cdef mt19937 rd - rd.seed(42) - return rd() + cdef mt19937 gen + gen.seed(42) + return gen() def mt19937_min_max(): @@ -31,8 +31,8 @@ def mt19937_min_max(): >>> print(y) # 2 ** 32 - 1 because mt19937 is 32 bit. 4294967295 """ - cdef mt19937 rd - return rd.min(), rd.max() + cdef mt19937 gen + return gen.min(), gen.max() def mt19937_discard(z): @@ -43,13 +43,64 @@ def mt19937_discard(z): >>> print(y) 1972458954 """ - cdef mt19937 rd = mt19937(42) + cdef mt19937 gen = mt19937(42) # Throw away z random numbers. - rd.discard(z) - a = rd() + gen.discard(z) + a = gen() # Iterate over z random numbers. - rd.seed(42) + gen.seed(42) for _ in range(z + 1): - b = rd() + b = gen() + return a, b + + +def mt19937_64_seed_test(): + """ + >>> print(mt19937_64_seed_test()) + 13930160852258120406 + """ + cdef mt19937_64 gen = mt19937_64(42) + return gen() + + +def mt19937_64_reseed_test(): + """ + >>> print(mt19937_64_reseed_test()) + 13930160852258120406 + """ + cdef mt19937_64 gen + gen.seed(42) + return gen() + + +def mt19937_64_min_max(): + """ + >>> x, y = mt19937_64_min_max() + >>> print(x) + 0 + >>> print(y) # 2 ** 64 - 1 because mt19937_64 is 64 bit. + 18446744073709551615 + """ + cdef mt19937_64 gen + return gen.min(), gen.max() + + +def mt19937_64_discard(z): + """ + >>> x, y = mt19937_64_discard(13) + >>> print(x) + 11756813601242511406 + >>> print(y) + 11756813601242511406 + """ + cdef mt19937_64 gen = mt19937_64(42) + # Throw away z random numbers. + gen.discard(z) + a = gen() + + # Iterate over z random numbers. + gen.seed(42) + for _ in range(z + 1): + b = gen() return a, b -- cgit v1.2.1 From d9a708221cebc431015ae640b31e14c10ec4fb4c Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Tue, 31 May 2022 21:11:32 +0200 Subject: Fix test tag name. --- tests/run/test_named_expressions.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/run/test_named_expressions.py b/tests/run/test_named_expressions.py index 28147319b..7f25a229d 100644 --- a/tests/run/test_named_expressions.py +++ b/tests/run/test_named_expressions.py @@ -1,9 +1,8 @@ # mode: run -# tag: pure38, no-cpp +# tag: pure3.8, no-cpp # copied from cpython with minimal modifications (mainly exec->cython_inline, and a few exception strings) # This is not currently run in C++ because all the cython_inline compilations fail for reasons that are unclear -# FIXME pure38 seems to be ignored # cython: language_level=3 import os -- cgit v1.2.1 From 6ac2422b48b689b021a48dff9ee14095232baafe Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Tue, 31 May 2022 21:12:06 +0200 Subject: Remove unused import from test file. --- tests/run/test_named_expressions.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/run/test_named_expressions.py b/tests/run/test_named_expressions.py index 7f25a229d..b3e2eb980 100644 --- a/tests/run/test_named_expressions.py +++ b/tests/run/test_named_expressions.py @@ -5,7 +5,6 @@ # This is not currently run in C++ because all the cython_inline compilations fail for reasons that are unclear # cython: language_level=3 -import os import unittest import cython from Cython.Compiler.Main import CompileError -- cgit v1.2.1 From c5a418ac0f46ce797f9261c2b8de43630f9d71d8 Mon Sep 17 00:00:00 2001 From: "Thomas J. Fan" Date: Mon, 6 Jun 2022 02:11:20 -0400 Subject: DOC Fixes link to init methods (#4824) --- docs/src/userguide/extension_types.rst | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/src/userguide/extension_types.rst b/docs/src/userguide/extension_types.rst index 678ddf5c8..d058df6c2 100644 --- a/docs/src/userguide/extension_types.rst +++ b/docs/src/userguide/extension_types.rst @@ -480,7 +480,8 @@ above, the first instantiation will print ``eating!``, but the second will not. This is only one of the reasons why the ``__cinit__()`` method is safer than the normal ``__init__()`` method for initialising extension types and bringing them into a correct and safe state. -See section :ref:`_initialisation_methods` about the differences. +See the :ref:`Initialisation Methods Section ` about +the differences. The second performance improvement applies to types that are often created and deleted in a row, so that they can benefit from a freelist. Cython -- cgit v1.2.1 From 3c0afd9a4e9d8b9aea1702876b52b31f35992b51 Mon Sep 17 00:00:00 2001 From: Till Hoffmann Date: Tue, 7 Jun 2022 13:28:53 -0400 Subject: Add distributions to `libcpp.random`. (#4762) * Add `uniform_int_distribution` to `libcpp`. * Add `uniform_real_distribution` to `libcpp`. * Add `bernoulli_distribution` to `libcpp`. * Add `binomial_distribution` to `libcpp`. * Add `geometric_distribution` to `libcpp`. * Add range tests for distributions. * Add `negative_binomial_distribution` to `libcpp`. * Add `poisson_distribution` to `libcpp`. * Add `exponential_distribution` to `libcpp`. * Add `gamma_distribution` to `libcpp`. * Add `weibull_distribution` to `libcpp`. * Add `extreme_value_distribution` to `libcpp`. * Add `normal_distribution` to `libcpp`. * Add `lognormal_distribution` to `libcpp`. * Add `chi_squared_distribution` to `libcpp`. * Add `cauchy_distribution` to `libcpp`. * Add `fisher_f_distribution` to `libcpp`. * Increase `dof` for `fisher_f_distribution_test`. * Add `student_t_distribution` to `libcpp`. * Add docstring to `sample_or_range` helper function. * Fix distribution range tests for different OSs. * Show sample values and proba. of test passing. * Test distribution call without checking values. --- Cython/Includes/libcpp/random.pxd | 144 +++++++++++++++++++++- tests/run/cpp_stl_random.pyx | 244 +++++++++++++++++++++++++++++++++++++- 2 files changed, 385 insertions(+), 3 deletions(-) diff --git a/Cython/Includes/libcpp/random.pxd b/Cython/Includes/libcpp/random.pxd index 5636400f3..9e48bb27f 100644 --- a/Cython/Includes/libcpp/random.pxd +++ b/Cython/Includes/libcpp/random.pxd @@ -2,9 +2,13 @@ from libc.stdint cimport uint_fast32_t, uint_fast64_t cdef extern from "" namespace "std" nogil: - cdef cppclass mt19937: + cdef cppclass random_device: ctypedef uint_fast32_t result_type + random_device() except + + result_type operator()() except + + cdef cppclass mt19937: + ctypedef uint_fast32_t result_type mt19937() except + mt19937(result_type seed) except + result_type operator()() except + @@ -13,7 +17,6 @@ cdef extern from "" namespace "std" nogil: void discard(size_t z) except + void seed(result_type seed) except + - cdef cppclass mt19937_64: ctypedef uint_fast64_t result_type @@ -24,3 +27,140 @@ cdef extern from "" namespace "std" nogil: result_type max() except + void discard(size_t z) except + void seed(result_type seed) except + + + cdef cppclass uniform_int_distribution[T]: + ctypedef T result_type + uniform_int_distribution() except + + uniform_int_distribution(T, T) except + + result_type operator()[Generator](Generator&) except + + result_type min() except + + result_type max() except + + + cdef cppclass uniform_real_distribution[T]: + ctypedef T result_type + uniform_real_distribution() except + + uniform_real_distribution(T, T) except + + result_type operator()[Generator](Generator&) except + + result_type min() except + + result_type max() except + + + cdef cppclass bernoulli_distribution: + ctypedef bint result_type + bernoulli_distribution() except + + bernoulli_distribution(double) except + + result_type operator()[Generator](Generator&) except + + result_type min() except + + result_type max() except + + + cdef cppclass binomial_distribution[T]: + ctypedef T result_type + binomial_distribution() except + + binomial_distribution(T, double) except + + result_type operator()[Generator](Generator&) except + + result_type min() except + + result_type max() except + + + cdef cppclass geometric_distribution[T]: + ctypedef T result_type + geometric_distribution() except + + geometric_distribution(double) except + + result_type operator()[Generator](Generator&) except + + result_type min() except + + result_type max() except + + + + cdef cppclass negative_binomial_distribution[T]: + ctypedef T result_type + negative_binomial_distribution() except + + negative_binomial_distribution(T, double) except + + result_type operator()[Generator](Generator&) except + + result_type min() except + + result_type max() except + + + cdef cppclass poisson_distribution[T]: + ctypedef T result_type + poisson_distribution() except + + poisson_distribution(double) except + + result_type operator()[Generator](Generator&) except + + result_type min() except + + result_type max() except + + + cdef cppclass exponential_distribution[T]: + ctypedef T result_type + exponential_distribution() except + + exponential_distribution(result_type) except + + result_type operator()[Generator](Generator&) except + + result_type min() except + + result_type max() except + + + cdef cppclass gamma_distribution[T]: + ctypedef T result_type + gamma_distribution() except + + gamma_distribution(result_type, result_type) except + + result_type operator()[Generator](Generator&) except + + result_type min() except + + result_type max() except + + + cdef cppclass weibull_distribution[T]: + ctypedef T result_type + weibull_distribution() except + + weibull_distribution(result_type, result_type) except + + result_type operator()[Generator](Generator&) except + + result_type min() except + + result_type max() except + + + cdef cppclass extreme_value_distribution[T]: + ctypedef T result_type + extreme_value_distribution() except + + extreme_value_distribution(result_type, result_type) except + + result_type operator()[Generator](Generator&) except + + result_type min() except + + result_type max() except + + + cdef cppclass normal_distribution[T]: + ctypedef T result_type + normal_distribution() except + + normal_distribution(result_type, result_type) except + + result_type operator()[Generator](Generator&) except + + result_type min() except + + result_type max() except + + + cdef cppclass lognormal_distribution[T]: + ctypedef T result_type + lognormal_distribution() except + + lognormal_distribution(result_type, result_type) except + + result_type operator()[Generator](Generator&) except + + result_type min() except + + result_type max() except + + + cdef cppclass chi_squared_distribution[T]: + ctypedef T result_type + chi_squared_distribution() except + + chi_squared_distribution(result_type) except + + result_type operator()[Generator](Generator&) except + + result_type min() except + + result_type max() except + + + cdef cppclass cauchy_distribution[T]: + ctypedef T result_type + cauchy_distribution() except + + cauchy_distribution(result_type, result_type) except + + result_type operator()[Generator](Generator&) except + + result_type min() except + + result_type max() except + + + cdef cppclass fisher_f_distribution[T]: + ctypedef T result_type + fisher_f_distribution() except + + fisher_f_distribution(result_type, result_type) except + + result_type operator()[Generator](Generator&) except + + result_type min() except + + result_type max() except + + + cdef cppclass student_t_distribution[T]: + ctypedef T result_type + student_t_distribution() except + + student_t_distribution(result_type) except + + result_type operator()[Generator](Generator&) except + + result_type min() except + + result_type max() except + diff --git a/tests/run/cpp_stl_random.pyx b/tests/run/cpp_stl_random.pyx index a793000fa..3b074c278 100644 --- a/tests/run/cpp_stl_random.pyx +++ b/tests/run/cpp_stl_random.pyx @@ -1,7 +1,16 @@ # mode: run # tag: cpp, cpp11 -from libcpp.random cimport mt19937, mt19937_64 +from libcpp.random cimport mt19937, mt19937_64, random_device, uniform_int_distribution, \ + uniform_real_distribution, bernoulli_distribution, binomial_distribution, \ + geometric_distribution, negative_binomial_distribution, poisson_distribution, \ + exponential_distribution, gamma_distribution, weibull_distribution, \ + extreme_value_distribution, normal_distribution, lognormal_distribution, \ + chi_squared_distribution, cauchy_distribution, fisher_f_distribution, student_t_distribution +from libc.float cimport DBL_MAX as DBL_MAX_ + + +DBL_MAX = DBL_MAX_ def mt19937_seed_test(): @@ -104,3 +113,236 @@ def mt19937_64_discard(z): for _ in range(z + 1): b = gen() return a, b + + +ctypedef fused any_dist: + uniform_int_distribution[int] + uniform_real_distribution[double] + bernoulli_distribution + binomial_distribution[int] + geometric_distribution[int] + negative_binomial_distribution[int] + poisson_distribution[int] + exponential_distribution[double] + gamma_distribution[double] + weibull_distribution[double] + extreme_value_distribution[double] + normal_distribution[double] + lognormal_distribution[double] + chi_squared_distribution[double] + cauchy_distribution[double] + fisher_f_distribution[double] + student_t_distribution[double] + + +cdef sample_or_range(any_dist dist, bint sample): + """ + This helper function returns a sample if `sample` is truthy and the range of the distribution + if `sample` is falsy. We use a fused type to avoid duplicating the conditional statement in each + distribution test. + """ + cdef random_device rd + if sample: + dist(mt19937(rd())) + else: + return dist.min(), dist.max() + + +def uniform_int_distribution_test(a, b, sample=True): + """ + >>> uniform_int_distribution_test(2, 3) + >>> uniform_int_distribution_test(5, 9, False) + (5, 9) + """ + cdef uniform_int_distribution[int] dist = uniform_int_distribution[int](a, b) + return sample_or_range[uniform_int_distribution[int]](dist, sample) + + +def uniform_real_distribution_test(a, b, sample=True): + """ + >>> x = uniform_real_distribution_test(4, 5) + >>> uniform_real_distribution_test(3, 8, False) + (3.0, 8.0) + """ + cdef uniform_real_distribution[double] dist = uniform_real_distribution[double](a, b) + return sample_or_range[uniform_real_distribution[double]](dist, sample) + + +def bernoulli_distribution_test(proba, sample=True): + """ + >>> bernoulli_distribution_test(0.2) + >>> bernoulli_distribution_test(0.7, False) + (False, True) + """ + cdef bernoulli_distribution dist = bernoulli_distribution(proba) + return sample_or_range[bernoulli_distribution](dist, sample) + + +def binomial_distribution_test(n, proba, sample=True): + """ + >>> binomial_distribution_test(10, 0.7) + >>> binomial_distribution_test(75, 0.3, False) + (0, 75) + """ + cdef binomial_distribution[int] dist = binomial_distribution[int](n, proba) + return sample_or_range[binomial_distribution[int]](dist, sample) + + +def geometric_distribution_test(proba, sample=True): + """ + >>> geometric_distribution_test(.4) + >>> geometric_distribution_test(0.2, False) # 2147483647 = 2 ** 32 - 1 + (0, 2147483647) + """ + cdef geometric_distribution[int] dist = geometric_distribution[int](proba) + return sample_or_range[geometric_distribution[int]](dist, sample) + + +def negative_binomial_distribution_test(n, p, sample=True): + """ + >>> negative_binomial_distribution_test(5, .1) + >>> negative_binomial_distribution_test(10, 0.2, False) # 2147483647 = 2 ** 32 - 1 + (0, 2147483647) + """ + cdef negative_binomial_distribution[int] dist = negative_binomial_distribution[int](n, p) + return sample_or_range[negative_binomial_distribution[int]](dist, sample) + + +def poisson_distribution_test(rate, sample=True): + """ + >>> poisson_distribution_test(7) + >>> poisson_distribution_test(7, False) # 2147483647 = 2 ** 32 - 1 + (0, 2147483647) + """ + cdef poisson_distribution[int] dist = poisson_distribution[int](rate) + return sample_or_range[poisson_distribution[int]](dist, sample) + + +def exponential_distribution_test(rate, sample=True): + """ + >>> x = exponential_distribution_test(6) + >>> l, u = exponential_distribution_test(1, False) + >>> l + 0.0 + >>> u == DBL_MAX or u == float("inf") + True + """ + cdef exponential_distribution[double] dist = exponential_distribution[double](rate) + return sample_or_range[exponential_distribution[double]](dist, sample) + + +def gamma_distribution_test(shape, scale, sample=True): + """ + >>> gamma_distribution_test(3, 4) + >>> l, u = gamma_distribution_test(1, 1, False) + >>> l + 0.0 + >>> u == DBL_MAX or u == float("inf") + True + """ + cdef gamma_distribution[double] dist = gamma_distribution[double](shape, scale) + return sample_or_range[gamma_distribution[double]](dist, sample) + + +def weibull_distribution_test(shape, scale, sample=True): + """ + >>> weibull_distribution_test(3, 2) + >>> l, u = weibull_distribution_test(1, 1, False) + >>> l + 0.0 + >>> u == DBL_MAX or u == float("inf") + True + """ + cdef weibull_distribution[double] dist = weibull_distribution[double](shape, scale) + return sample_or_range[weibull_distribution[double]](dist, sample) + + +def extreme_value_distribution_test(shape, scale, sample=True): + """ + >>> extreme_value_distribution_test(3, 0.1) + >>> l, u = extreme_value_distribution_test(1, 1, False) + >>> l == -DBL_MAX or l == -float("inf") + True + >>> u == DBL_MAX or u == float("inf") + True + """ + cdef extreme_value_distribution[double] dist = extreme_value_distribution[double](shape, scale) + return sample_or_range[extreme_value_distribution[double]](dist, sample) + + +def normal_distribution_test(loc, scale, sample=True): + """ + >>> normal_distribution_test(3, 2) + >>> l, u = normal_distribution_test(1, 1, False) + >>> l == -DBL_MAX or l == -float("inf") + True + >>> u == DBL_MAX or u == float("inf") + True + """ + cdef normal_distribution[double] dist = normal_distribution[double](loc, scale) + return sample_or_range[normal_distribution[double]](dist, sample) + + +def lognormal_distribution_test(loc, scale, sample=True): + """ + >>> lognormal_distribution_test(3, 2) + >>> l, u = lognormal_distribution_test(1, 1, False) + >>> l + 0.0 + >>> u == DBL_MAX or u == float("inf") + True + """ + cdef lognormal_distribution[double] dist = lognormal_distribution[double](loc, scale) + return sample_or_range[lognormal_distribution[double]](dist, sample) + + +def chi_squared_distribution_test(dof, sample=True): + """ + >>> x = chi_squared_distribution_test(9) + >>> l, u = chi_squared_distribution_test(5, False) + >>> l + 0.0 + >>> u == DBL_MAX or u == float("inf") + True + """ + cdef chi_squared_distribution[double] dist = chi_squared_distribution[double](dof) + return sample_or_range[chi_squared_distribution[double]](dist, sample) + + +def cauchy_distribution_test(loc, scale, sample=True): + """ + >>> cauchy_distribution_test(3, 9) + >>> l, u = cauchy_distribution_test(1, 1, False) + >>> l == -DBL_MAX or l == -float("inf") + True + >>> u == DBL_MAX or u == float("inf") + True + """ + cdef cauchy_distribution[double] dist = cauchy_distribution[double](loc, scale) + return sample_or_range[cauchy_distribution[double]](dist, sample) + + +def fisher_f_distribution_test(m, n, sample=True): + """ + >>> x = fisher_f_distribution_test(9, 11) + >>> l, u = fisher_f_distribution_test(1, 1, False) + >>> l + 0.0 + >>> u == DBL_MAX or u == float("inf") + True + """ + cdef fisher_f_distribution[double] dist = fisher_f_distribution[double](m, n) + return sample_or_range[fisher_f_distribution[double]](dist, sample) + + +def student_t_distribution_test(dof, sample=True): + """ + >>> x = student_t_distribution_test(13) + >>> l, u = student_t_distribution_test(1, False) + >>> l == -DBL_MAX or l == -float("inf") + True + >>> u == DBL_MAX or u == float("inf") + True + """ + cdef student_t_distribution[double] dist = student_t_distribution[double](dof) + return sample_or_range[student_t_distribution[double]](dist, sample) -- cgit v1.2.1 From 4613ec04a10a240f56d81d052835870e409fd2a4 Mon Sep 17 00:00:00 2001 From: Thomas A Caswell Date: Tue, 7 Jun 2022 13:32:19 -0400 Subject: MNT: always require va_start to have two arguments (#4820) * MNT: always require va_start to have two arguments https://github.com/python/cpython/pull/93215 chance CPython to always use the 2-input version of va_start and dropped defining HAVE_STDARG_PROTOTYPES. This resulted in the 1-argument version being used when compiling cython source which fails This makes cython also always use the 2-argument version. * Remove blank line * FIX: version gate 2-argument va_start checking to py311 --- Cython/Utility/MemoryView_C.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cython/Utility/MemoryView_C.c b/Cython/Utility/MemoryView_C.c index 07ed24d20..de003a2ee 100644 --- a/Cython/Utility/MemoryView_C.c +++ b/Cython/Utility/MemoryView_C.c @@ -451,7 +451,7 @@ static void __pyx_fatalerror(const char *fmt, ...) Py_NO_RETURN { va_list vargs; char msg[200]; -#ifdef HAVE_STDARG_PROTOTYPES +#if PY_VERSION_HEX >= 0x030A0000 || defined(HAVE_STDARG_PROTOTYPES) va_start(vargs, fmt); #else va_start(vargs); -- cgit v1.2.1 From ad2d1f7dd72b0dbdb9d9441e0b30936ebad1a2c4 Mon Sep 17 00:00:00 2001 From: Thomas A Caswell Date: Tue, 7 Jun 2022 13:32:19 -0400 Subject: MNT: always require va_start to have two arguments (#4820) * MNT: always require va_start to have two arguments https://github.com/python/cpython/pull/93215 chance CPython to always use the 2-input version of va_start and dropped defining HAVE_STDARG_PROTOTYPES. This resulted in the 1-argument version being used when compiling cython source which fails This makes cython also always use the 2-argument version. * Remove blank line * FIX: version gate 2-argument va_start checking to py311 --- Cython/Utility/MemoryView_C.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cython/Utility/MemoryView_C.c b/Cython/Utility/MemoryView_C.c index 0a5d8ee2c..8146c458d 100644 --- a/Cython/Utility/MemoryView_C.c +++ b/Cython/Utility/MemoryView_C.c @@ -450,7 +450,7 @@ static void __pyx_fatalerror(const char *fmt, ...) Py_NO_RETURN { va_list vargs; char msg[200]; -#ifdef HAVE_STDARG_PROTOTYPES +#if PY_VERSION_HEX >= 0x030A0000 || defined(HAVE_STDARG_PROTOTYPES) va_start(vargs, fmt); #else va_start(vargs); -- cgit v1.2.1 From 8e29b6d47f6f5b10ec1a37f06db440156ac2ac2e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Molina=20Garc=C3=ADa?= Date: Wed, 8 Jun 2022 08:14:12 +0200 Subject: Update Limited API preprocessor warning to be compatible with MSVC (#4826) --- Cython/Compiler/Nodes.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/Cython/Compiler/Nodes.py b/Cython/Compiler/Nodes.py index cfe43e890..927e47763 100644 --- a/Cython/Compiler/Nodes.py +++ b/Cython/Compiler/Nodes.py @@ -5435,8 +5435,10 @@ class CClassDefNode(ClassDefNode): typeptr_cname, buffer_slot.slot_name, )) code.putln("}") + code.putln("#elif defined(_MSC_VER)") + code.putln("#pragma message (\"The buffer protocol is not supported in the Limited C-API.\")") code.putln("#else") - code.putln("#warning The buffer protocol is not supported in the Limited C-API.") + code.putln("#warning \"The buffer protocol is not supported in the Limited C-API.\"") code.putln("#endif") code.globalstate.use_utility_code( -- cgit v1.2.1 From 9341e73aceface39dd7b48bf46b3f376cde33296 Mon Sep 17 00:00:00 2001 From: mwtian <81660174+mwtian@users.noreply.github.com> Date: Sat, 11 Jun 2022 00:21:44 -0700 Subject: Always initialize `state` in __Pyx_WriteUnraisable (#4831) Avoids error about uninitialized variables when compiling with ubscan --- Cython/Utility/Exceptions.c | 2 -- 1 file changed, 2 deletions(-) diff --git a/Cython/Utility/Exceptions.c b/Cython/Utility/Exceptions.c index c6c5d20ed..9f96225d1 100644 --- a/Cython/Utility/Exceptions.c +++ b/Cython/Utility/Exceptions.c @@ -675,10 +675,8 @@ static void __Pyx_WriteUnraisable(const char *name, int clineno, PyGILState_STATE state; if (nogil) state = PyGILState_Ensure(); -#ifdef _MSC_VER /* arbitrary, to suppress warning */ else state = (PyGILState_STATE)-1; -#endif #endif CYTHON_UNUSED_VAR(clineno); CYTHON_UNUSED_VAR(lineno); -- cgit v1.2.1 From f753deecd09e011a1bc276b78ccc0f1c0ad67f09 Mon Sep 17 00:00:00 2001 From: pfebrer <42074085+pfebrer@users.noreply.github.com> Date: Fri, 17 Jun 2022 18:51:26 +0200 Subject: BUG: fused types not subscriptable in Cython.Shadow (#4842) --- Cython/Shadow.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cython/Shadow.py b/Cython/Shadow.py index 48bc249e0..78d950ce2 100644 --- a/Cython/Shadow.py +++ b/Cython/Shadow.py @@ -385,7 +385,7 @@ class typedef(CythonType): __getitem__ = index_type class _FusedType(CythonType): - pass + __getitem__ = index_type def fused_type(*args): -- cgit v1.2.1 From a7d98eeafac9aaaa8825fd471be38172ee0b259c Mon Sep 17 00:00:00 2001 From: pfebrer <42074085+pfebrer@users.noreply.github.com> Date: Fri, 17 Jun 2022 18:51:26 +0200 Subject: BUG: fused types not subscriptable in Cython.Shadow (#4842) --- Cython/Shadow.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cython/Shadow.py b/Cython/Shadow.py index cc8c9b60a..f92f9ebf1 100644 --- a/Cython/Shadow.py +++ b/Cython/Shadow.py @@ -353,7 +353,7 @@ class typedef(CythonType): __getitem__ = index_type class _FusedType(CythonType): - pass + __getitem__ = index_type def fused_type(*args): -- cgit v1.2.1 From c416c7cb2159cc43f9461d96721aeaaa8f9f4714 Mon Sep 17 00:00:00 2001 From: Kenrick Everett <31653115+Kenrick0@users.noreply.github.com> Date: Tue, 21 Jun 2022 16:18:12 +1000 Subject: Fix bytearray iteration in 0.29.x (#4108) By explicitly setting the result type --- Cython/Compiler/ExprNodes.py | 4 ++++ tests/run/bytearray_iter.py | 15 +++++++++++++++ 2 files changed, 19 insertions(+) create mode 100644 tests/run/bytearray_iter.py diff --git a/Cython/Compiler/ExprNodes.py b/Cython/Compiler/ExprNodes.py index 9162eaad9..4a8ce5dca 100644 --- a/Cython/Compiler/ExprNodes.py +++ b/Cython/Compiler/ExprNodes.py @@ -2876,6 +2876,10 @@ class NextNode(AtomicExprNode): iterator_type = self.iterator.infer_type(env) if iterator_type.is_ptr or iterator_type.is_array: return iterator_type.base_type + elif self.iterator.sequence.type is bytearray_type: + # This is a temporary work-around to fix bytearray iteration in 0.29.x + # It has been fixed properly in master, refer to ticket: 3473 + return py_object_type elif iterator_type.is_cpp_class: item_type = env.lookup_operator_for_types(self.pos, "*", [iterator_type]).type.return_type if item_type.is_reference: diff --git a/tests/run/bytearray_iter.py b/tests/run/bytearray_iter.py new file mode 100644 index 000000000..4beb8e285 --- /dev/null +++ b/tests/run/bytearray_iter.py @@ -0,0 +1,15 @@ +# mode: run +# ticket: 3473 + +def test_bytearray_iteration(src): + """ + >>> src = b'123' + >>> test_bytearray_iteration(src) + 49 + 50 + 51 + """ + + data = bytearray(src) + for elem in data: + print(elem) -- cgit v1.2.1 From 6b6d5f20f04fb581a23effec5a14ec7a97b213fd Mon Sep 17 00:00:00 2001 From: da-woods Date: Tue, 21 Jun 2022 07:39:01 +0100 Subject: Fix code-style blank like at end of file --- tests/run/bytearray_iter.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/run/bytearray_iter.py b/tests/run/bytearray_iter.py index 70a1f139b..60df9fcc1 100644 --- a/tests/run/bytearray_iter.py +++ b/tests/run/bytearray_iter.py @@ -103,4 +103,3 @@ def test_bytearray_iteration(src): data = bytearray(src) for elem in data: print(elem) - -- cgit v1.2.1 From a118960f7643cd343f3a4bb7ef653a69ef16c13b Mon Sep 17 00:00:00 2001 From: Lisandro Dalcin Date: Tue, 21 Jun 2022 15:04:54 +0300 Subject: Fix GCC -Wconversion warnings in C utility code (GH-4854) --- Cython/Utility/CythonFunction.c | 2 +- Cython/Utility/StringTools.c | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cython/Utility/CythonFunction.c b/Cython/Utility/CythonFunction.c index 270f441ef..870dcf620 100644 --- a/Cython/Utility/CythonFunction.c +++ b/Cython/Utility/CythonFunction.c @@ -934,7 +934,7 @@ static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD(PyObject return NULL; } - return ((__Pyx_PyCMethod)(void(*)(void))def->ml_meth)(self, cls, args, nargs, kwnames); + return ((__Pyx_PyCMethod)(void(*)(void))def->ml_meth)(self, cls, args, (size_t)nargs, kwnames); } #endif diff --git a/Cython/Utility/StringTools.c b/Cython/Utility/StringTools.c index 8c92228cb..910fbf6fa 100644 --- a/Cython/Utility/StringTools.c +++ b/Cython/Utility/StringTools.c @@ -1012,7 +1012,7 @@ static PyObject* __Pyx_PyUnicode_BuildFromAscii(Py_ssize_t ulength, char* chars, padding = PyUnicode_FromOrdinal(padding_char); if (likely(padding) && uoffset > prepend_sign + 1) { PyObject *tmp; - PyObject *repeat = PyInt_FromSize_t(uoffset - prepend_sign); + PyObject *repeat = PyInt_FromSsize_t(uoffset - prepend_sign); if (unlikely(!repeat)) goto done_or_error; tmp = PyNumber_Multiply(padding, repeat); Py_DECREF(repeat); -- cgit v1.2.1 From 9fb8fae9295a9fa689bfe54a00f1f39642822d7c Mon Sep 17 00:00:00 2001 From: Lisandro Dalcin Date: Tue, 21 Jun 2022 15:04:54 +0300 Subject: Fix GCC -Wconversion warning in C utility code (GH-4854) --- Cython/Utility/StringTools.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cython/Utility/StringTools.c b/Cython/Utility/StringTools.c index 35241c64a..98b5e260e 100644 --- a/Cython/Utility/StringTools.c +++ b/Cython/Utility/StringTools.c @@ -953,7 +953,7 @@ static PyObject* __Pyx_PyUnicode_BuildFromAscii(Py_ssize_t ulength, char* chars, padding = PyUnicode_FromOrdinal(padding_char); if (likely(padding) && uoffset > prepend_sign + 1) { PyObject *tmp; - PyObject *repeat = PyInt_FromSize_t(uoffset - prepend_sign); + PyObject *repeat = PyInt_FromSsize_t(uoffset - prepend_sign); if (unlikely(!repeat)) goto done_or_error; tmp = PyNumber_Multiply(padding, repeat); Py_DECREF(repeat); -- cgit v1.2.1 From 8eca1d1c186b6505cc9f06e5d2b42647bce68836 Mon Sep 17 00:00:00 2001 From: Jouke Witteveen Date: Thu, 23 Jun 2022 09:19:37 +0200 Subject: Mention python3-dev requirement on Ubuntu/Debian (#4856) Let's assume nobody who reads the quickstart documentation actually wants python2-dev. --- docs/src/quickstart/install.rst | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/docs/src/quickstart/install.rst b/docs/src/quickstart/install.rst index 8b5f4c350..04a47afdc 100644 --- a/docs/src/quickstart/install.rst +++ b/docs/src/quickstart/install.rst @@ -15,8 +15,10 @@ according to the system used: - **Linux** The GNU C Compiler (gcc) is usually present, or easily available through the package system. On Ubuntu or Debian, for - instance, the command ``sudo apt-get install build-essential`` will - fetch everything you need. + instance, it is part of the ``build-essential`` package. Next to a + C compiler, Cython requires the Python header files. On Ubuntu or + Debian, the command ``sudo apt-get install build-essential python3-dev`` + will fetch everything you need. - **Mac OS X** To retrieve gcc, one option is to install Apple's XCode, which can be retrieved from the Mac OS X's install DVDs or -- cgit v1.2.1 From 371b0a163a7ec9f87f5d478f2249ab69c4dc6785 Mon Sep 17 00:00:00 2001 From: Matus Valo Date: Sat, 25 Jun 2022 11:40:09 +0200 Subject: Docs: Migrate special_methods.rst to pure python (#4537) * Migrate special_methods.rst to pure python * Fix missing import in examples --- .../userguide/special_methods/total_ordering.py | 13 ++++++++++++ .../userguide/special_methods/total_ordering.pyx | 13 ++++++++++++ docs/src/userguide/special_methods.rst | 24 +++++++++++----------- 3 files changed, 38 insertions(+), 12 deletions(-) create mode 100644 docs/examples/userguide/special_methods/total_ordering.py create mode 100644 docs/examples/userguide/special_methods/total_ordering.pyx diff --git a/docs/examples/userguide/special_methods/total_ordering.py b/docs/examples/userguide/special_methods/total_ordering.py new file mode 100644 index 000000000..7d164d6df --- /dev/null +++ b/docs/examples/userguide/special_methods/total_ordering.py @@ -0,0 +1,13 @@ +import cython +@cython.total_ordering +@cython.cclass +class ExtGe: + x: cython.int + + def __ge__(self, other): + if not isinstance(other, ExtGe): + return NotImplemented + return self.x >= cython.cast(ExtGe, other).x + + def __eq__(self, other): + return isinstance(other, ExtGe) and self.x == cython.cast(ExtGe, other).x diff --git a/docs/examples/userguide/special_methods/total_ordering.pyx b/docs/examples/userguide/special_methods/total_ordering.pyx new file mode 100644 index 000000000..06d2ccef7 --- /dev/null +++ b/docs/examples/userguide/special_methods/total_ordering.pyx @@ -0,0 +1,13 @@ +import cython + +@cython.total_ordering +cdef class ExtGe: + cdef int x + + def __ge__(self, other): + if not isinstance(other, ExtGe): + return NotImplemented + return self.x >= (other).x + + def __eq__(self, other): + return isinstance(other, ExtGe) and self.x == (other).x diff --git a/docs/src/userguide/special_methods.rst b/docs/src/userguide/special_methods.rst index af702f3c3..e6635b502 100644 --- a/docs/src/userguide/special_methods.rst +++ b/docs/src/userguide/special_methods.rst @@ -3,6 +3,9 @@ Special Methods of Extension Types =================================== +.. include:: + ../two-syntax-variants-used + This page describes the special methods currently supported by Cython extension types. A complete list of all the special methods appears in the table at the bottom. Some of these methods behave differently from their Python @@ -12,7 +15,8 @@ mention. .. Note:: Everything said on this page applies only to extension types, defined - with the :keyword:`cdef` class statement. It doesn't apply to classes defined with the + with the :keyword:`cdef` class statement or decorated using ``@cclass`` decorator. + It doesn't apply to classes defined with the Python :keyword:`class` statement, where the normal Python rules apply. .. _declaration: @@ -20,7 +24,7 @@ mention. Declaration ------------ Special methods of extension types must be declared with :keyword:`def`, not -:keyword:`cdef`. This does not impact their performance--Python uses different +:keyword:`cdef`/``@cfunc``. This does not impact their performance--Python uses different calling conventions to invoke these special methods. .. _docstrings: @@ -225,19 +229,15 @@ Depending on the application, one way or the other may be better: decorator specifically for ``cdef`` classes. (Normal Python classes can use the original ``functools`` decorator.) - .. code-block:: cython +.. tabs:: + + .. group-tab:: Pure Python - @cython.total_ordering - cdef class ExtGe: - cdef int x + .. literalinclude:: ../../examples/userguide/special_methods/total_ordering.py - def __ge__(self, other): - if not isinstance(other, ExtGe): - return NotImplemented - return self.x >= (other).x + .. group-tab:: Cython - def __eq__(self, other): - return isinstance(other, ExtGe) and self.x == (other).x + .. literalinclude:: ../../examples/userguide/special_methods/total_ordering.pyx .. _the__next__method: -- cgit v1.2.1 From 36520e7c90b059777271c6e71d62af55f123a42b Mon Sep 17 00:00:00 2001 From: Matus Valo Date: Sat, 25 Jun 2022 13:03:05 +0200 Subject: Docs: Migrate sharing_declarations.rst to pure python mode (#4544) * Initial migration of examples * Migrate text in sharing_declarations.rst * Some fixes of examples and clarification * Fix capitalization * Apply suggestions from code review Co-authored-by: da-woods Co-authored-by: da-woods --- .../userguide/sharing_declarations/landscaping.py | 7 + .../userguide/sharing_declarations/lunch.py | 5 + .../userguide/sharing_declarations/lunch.pyx | 1 + .../userguide/sharing_declarations/restaurant.py | 12 ++ .../userguide/sharing_declarations/restaurant.pyx | 2 +- .../userguide/sharing_declarations/setup.py | 4 - .../userguide/sharing_declarations/setup_py.py | 4 + .../userguide/sharing_declarations/setup_pyx.py | 4 + .../userguide/sharing_declarations/shrubbing.py | 10 ++ .../userguide/sharing_declarations/shrubbing.pyx | 3 + .../userguide/sharing_declarations/spammery.py | 10 ++ .../userguide/sharing_declarations/spammery.pyx | 3 +- .../userguide/sharing_declarations/volume.py | 2 + docs/src/tutorial/pure.rst | 3 + docs/src/userguide/sharing_declarations.rst | 152 ++++++++++++++------- 15 files changed, 169 insertions(+), 53 deletions(-) create mode 100644 docs/examples/userguide/sharing_declarations/landscaping.py create mode 100644 docs/examples/userguide/sharing_declarations/lunch.py create mode 100644 docs/examples/userguide/sharing_declarations/restaurant.py delete mode 100644 docs/examples/userguide/sharing_declarations/setup.py create mode 100644 docs/examples/userguide/sharing_declarations/setup_py.py create mode 100644 docs/examples/userguide/sharing_declarations/setup_pyx.py create mode 100644 docs/examples/userguide/sharing_declarations/shrubbing.py create mode 100644 docs/examples/userguide/sharing_declarations/spammery.py create mode 100644 docs/examples/userguide/sharing_declarations/volume.py diff --git a/docs/examples/userguide/sharing_declarations/landscaping.py b/docs/examples/userguide/sharing_declarations/landscaping.py new file mode 100644 index 000000000..2d2c4b5b7 --- /dev/null +++ b/docs/examples/userguide/sharing_declarations/landscaping.py @@ -0,0 +1,7 @@ +from cython.cimports.shrubbing import Shrubbery +import shrubbing + +def main(): + sh: Shrubbery + sh = shrubbing.standard_shrubbery() + print("Shrubbery size is", sh.width, 'x', sh.length) diff --git a/docs/examples/userguide/sharing_declarations/lunch.py b/docs/examples/userguide/sharing_declarations/lunch.py new file mode 100644 index 000000000..df56913eb --- /dev/null +++ b/docs/examples/userguide/sharing_declarations/lunch.py @@ -0,0 +1,5 @@ +import cython +from cython.cimports.c_lunch import eject_tomato as c_eject_tomato + +def eject_tomato(speed: cython.float): + c_eject_tomato(speed) diff --git a/docs/examples/userguide/sharing_declarations/lunch.pyx b/docs/examples/userguide/sharing_declarations/lunch.pyx index 8b0911510..fea5e4c87 100644 --- a/docs/examples/userguide/sharing_declarations/lunch.pyx +++ b/docs/examples/userguide/sharing_declarations/lunch.pyx @@ -1,3 +1,4 @@ + cimport c_lunch def eject_tomato(float speed): diff --git a/docs/examples/userguide/sharing_declarations/restaurant.py b/docs/examples/userguide/sharing_declarations/restaurant.py new file mode 100644 index 000000000..b4bdb2eba --- /dev/null +++ b/docs/examples/userguide/sharing_declarations/restaurant.py @@ -0,0 +1,12 @@ +import cython +from cython.cimports.dishes import spamdish, sausage + +@cython.cfunc +def prepare(d: cython.pointer(spamdish)) -> cython.void: + d.oz_of_spam = 42 + d.filler = sausage + +def serve(): + d: spamdish + prepare(cython.address(d)) + print(f'{d.oz_of_spam} oz spam, filler no. {d.filler}') diff --git a/docs/examples/userguide/sharing_declarations/restaurant.pyx b/docs/examples/userguide/sharing_declarations/restaurant.pyx index 3257c681b..f556646dc 100644 --- a/docs/examples/userguide/sharing_declarations/restaurant.pyx +++ b/docs/examples/userguide/sharing_declarations/restaurant.pyx @@ -1,4 +1,4 @@ -from __future__ import print_function + cimport dishes from dishes cimport spamdish diff --git a/docs/examples/userguide/sharing_declarations/setup.py b/docs/examples/userguide/sharing_declarations/setup.py deleted file mode 100644 index 505b53e9d..000000000 --- a/docs/examples/userguide/sharing_declarations/setup.py +++ /dev/null @@ -1,4 +0,0 @@ -from setuptools import setup -from Cython.Build import cythonize - -setup(ext_modules=cythonize(["landscaping.pyx", "shrubbing.pyx"])) diff --git a/docs/examples/userguide/sharing_declarations/setup_py.py b/docs/examples/userguide/sharing_declarations/setup_py.py new file mode 100644 index 000000000..45ded0ff4 --- /dev/null +++ b/docs/examples/userguide/sharing_declarations/setup_py.py @@ -0,0 +1,4 @@ +from setuptools import setup +from Cython.Build import cythonize + +setup(ext_modules=cythonize(["landscaping.py", "shrubbing.py"])) diff --git a/docs/examples/userguide/sharing_declarations/setup_pyx.py b/docs/examples/userguide/sharing_declarations/setup_pyx.py new file mode 100644 index 000000000..505b53e9d --- /dev/null +++ b/docs/examples/userguide/sharing_declarations/setup_pyx.py @@ -0,0 +1,4 @@ +from setuptools import setup +from Cython.Build import cythonize + +setup(ext_modules=cythonize(["landscaping.pyx", "shrubbing.pyx"])) diff --git a/docs/examples/userguide/sharing_declarations/shrubbing.py b/docs/examples/userguide/sharing_declarations/shrubbing.py new file mode 100644 index 000000000..27e20d631 --- /dev/null +++ b/docs/examples/userguide/sharing_declarations/shrubbing.py @@ -0,0 +1,10 @@ +import cython + +@cython.cclass +class Shrubbery: + def __cinit__(self, w: cython.int, l: cython.int): + self.width = w + self.length = l + +def standard_shrubbery(): + return Shrubbery(3, 7) diff --git a/docs/examples/userguide/sharing_declarations/shrubbing.pyx b/docs/examples/userguide/sharing_declarations/shrubbing.pyx index bb97e7e77..8598b5c98 100644 --- a/docs/examples/userguide/sharing_declarations/shrubbing.pyx +++ b/docs/examples/userguide/sharing_declarations/shrubbing.pyx @@ -1,3 +1,6 @@ + + + cdef class Shrubbery: def __cinit__(self, int w, int l): self.width = w diff --git a/docs/examples/userguide/sharing_declarations/spammery.py b/docs/examples/userguide/sharing_declarations/spammery.py new file mode 100644 index 000000000..88554be4a --- /dev/null +++ b/docs/examples/userguide/sharing_declarations/spammery.py @@ -0,0 +1,10 @@ +import cython +from cython.cimports.volume import cube + +def menu(description, size): + print(description, ":", cube(size), + "cubic metres of spam") + +menu("Entree", 1) +menu("Main course", 3) +menu("Dessert", 2) diff --git a/docs/examples/userguide/sharing_declarations/spammery.pyx b/docs/examples/userguide/sharing_declarations/spammery.pyx index 16cbda06e..da11e737e 100644 --- a/docs/examples/userguide/sharing_declarations/spammery.pyx +++ b/docs/examples/userguide/sharing_declarations/spammery.pyx @@ -1,5 +1,4 @@ -from __future__ import print_function - + from volume cimport cube def menu(description, size): diff --git a/docs/examples/userguide/sharing_declarations/volume.py b/docs/examples/userguide/sharing_declarations/volume.py new file mode 100644 index 000000000..1f6ff9c72 --- /dev/null +++ b/docs/examples/userguide/sharing_declarations/volume.py @@ -0,0 +1,2 @@ +def cube(x): + return x * x * x diff --git a/docs/src/tutorial/pure.rst b/docs/src/tutorial/pure.rst index a536f2b31..417b7d1b2 100644 --- a/docs/src/tutorial/pure.rst +++ b/docs/src/tutorial/pure.rst @@ -29,6 +29,7 @@ In pure mode, you are more or less restricted to code that can be expressed beyond that can only be done in .pyx files with extended language syntax, because it depends on features of the Cython compiler. +.. _augmenting_pxd: Augmenting .pxd --------------- @@ -249,6 +250,8 @@ releasing or acquiring the GIL. The condition must be constant (at compile time) A common use case for conditionally acquiring and releasing the GIL are fused types that allow different GIL handling depending on the specific type (see :ref:`gil_conditional`). +.. py:module:: cython.cimports + cimports ^^^^^^^^ diff --git a/docs/src/userguide/sharing_declarations.rst b/docs/src/userguide/sharing_declarations.rst index 70e29e2b2..35ba58dfd 100644 --- a/docs/src/userguide/sharing_declarations.rst +++ b/docs/src/userguide/sharing_declarations.rst @@ -6,6 +6,9 @@ Sharing Declarations Between Cython Modules ******************************************** +.. include:: + ../two-syntax-variants-used + This section describes how to make C declarations, functions and extension types in one Cython module available for use in another Cython module. These facilities are closely modeled on the Python import mechanism, @@ -17,13 +20,13 @@ Definition and Implementation files A Cython module can be split into two parts: a definition file with a ``.pxd`` suffix, containing C declarations that are to be available to other Cython -modules, and an implementation file with a ``.pyx`` suffix, containing +modules, and an implementation file with a ``.pyx``/``.py`` suffix, containing everything else. When a module wants to use something declared in another module's definition file, it imports it using the :keyword:`cimport` -statement. +statement or using special :py:mod:`cython.cimports` package. A ``.pxd`` file that consists solely of extern declarations does not need -to correspond to an actual ``.pyx`` file or Python module. This can make it a +to correspond to an actual ``.pyx``/``.py`` file or Python module. This can make it a convenient place to put common declarations, for example declarations of functions from an :ref:`external library ` that one wants to use in several modules. @@ -41,8 +44,8 @@ A definition file can contain: It cannot contain the implementations of any C or Python functions, or any Python class definitions, or any executable statements. It is needed when one -wants to access :keyword:`cdef` attributes and methods, or to inherit from -:keyword:`cdef` classes defined in this module. +wants to access :keyword:`cdef`/``@cfunc`` attributes and methods, or to inherit from +:keyword:`cdef`/``@cclass`` classes defined in this module. .. note:: @@ -70,23 +73,45 @@ The cimport statement The :keyword:`cimport` statement is used in a definition or implementation file to gain access to names declared in another definition file. Its syntax exactly parallels that of the normal Python import -statement:: +statement. When pure python syntax is used, the same effect can be done by +importing from special :py:mod:`cython.cimports` package. In later text the term +to ``cimport`` refers to using both :keyword:`cimport` statement or +:py:mod:`cython.cimports` package. - cimport module [, module...] +.. tabs:: - from module cimport name [as name] [, name [as name] ...] + .. group-tab:: Pure Python -Here is an example. :file:`dishes.pxd` is a definition file which exports a -C data type. :file:`restaurant.pyx` is an implementation file which imports and -uses it. + .. code-block:: python + + from cython.cimports.module import name [as name][, name [as name] ...] + + .. group-tab:: Cython -:file:`dishes.pxd`: + .. code-block:: cython + + cimport module [, module...] + + from module cimport name [as name] [, name [as name] ...] + +Here is an example. :file:`dishes.pxd` is a definition file which exports a +C data type. :file:`restaurant.pyx`/:file:`restaurant.py` is an implementation file +which imports and uses it. .. literalinclude:: ../../examples/userguide/sharing_declarations/dishes.pxd + :caption: dishes.pxd + +.. tabs:: + + .. group-tab:: Pure Python -:file:`restaurant.pyx`: + .. literalinclude:: ../../examples/userguide/sharing_declarations/restaurant.py + :caption: dishes.py -.. literalinclude:: ../../examples/userguide/sharing_declarations/restaurant.pyx + .. group-tab:: Cython + + .. literalinclude:: ../../examples/userguide/sharing_declarations/restaurant.pyx + :caption: dishes.pyx It is important to understand that the :keyword:`cimport` statement can only be used to import C data types, C functions and variables, and extension @@ -116,8 +141,8 @@ option to ``cythonize()``), as well as ``sys.path``. Using ``package_data`` to install ``.pxd`` files in your ``setup.py`` script allows other packages to cimport items from your module as a dependency. -Also, whenever you compile a file :file:`modulename.pyx`, the corresponding -definition file :file:`modulename.pxd` is first searched for along the +Also, whenever you compile a file :file:`modulename.pyx`/:file:`modulename.py`, +the corresponding definition file :file:`modulename.pxd` is first searched for along the include path (but not ``sys.path``), and if found, it is processed before processing the ``.pyx`` file. @@ -132,16 +157,23 @@ for an imaginary module, and :keyword:`cimport` that module. You can then refer to the C functions by qualifying them with the name of the module. Here's an example: -:file:`c_lunch.pxd`: - .. literalinclude:: ../../examples/userguide/sharing_declarations/c_lunch.pxd + :caption: c_lunch.pxd + +.. tabs:: -:file:`lunch.pyx`: + .. group-tab:: Pure Python -.. literalinclude:: ../../examples/userguide/sharing_declarations/lunch.pyx + .. literalinclude:: ../../examples/userguide/sharing_declarations/lunch.py + :caption: lunch.py -You don't need any :file:`c_lunch.pyx` file, because the only things defined -in :file:`c_lunch.pxd` are extern C entities. There won't be any actual + .. group-tab:: Cython + + .. literalinclude:: ../../examples/userguide/sharing_declarations/lunch.pyx + :caption: lunch.pyx + +You don't need any :file:`c_lunch.pyx`/:file:`c_lunch.py` file, because the only +things defined in :file:`c_lunch.pxd` are extern C entities. There won't be any actual ``c_lunch`` module at run time, but that doesn't matter; the :file:`c_lunch.pxd` file has done its job of providing an additional namespace at compile time. @@ -154,17 +186,32 @@ C functions defined at the top level of a module can be made available via :keyword:`cimport` by putting headers for them in the ``.pxd`` file, for example: -:file:`volume.pxd`: - .. literalinclude:: ../../examples/userguide/sharing_declarations/volume.pxd + :caption: volume.pxd + +.. tabs:: + + .. group-tab:: Pure Python + + .. literalinclude:: ../../examples/userguide/sharing_declarations/volume.py + :caption: volume.py + + .. literalinclude:: ../../examples/userguide/sharing_declarations/spammery.py + :caption: spammery.py -:file:`volume.pyx`: + .. note:: -.. literalinclude:: ../../examples/userguide/sharing_declarations/volume.pyx + Type definitions of function ``cube()`` in :file:`volume.py` are not provided + since they are used from .pxd definition file. See :ref:`augmenting_pxd` and + GitHub issue :issue:`4388`. -:file:`spammery.pyx`: + .. group-tab:: Cython -.. literalinclude:: ../../examples/userguide/sharing_declarations/spammery.pyx + .. literalinclude:: ../../examples/userguide/sharing_declarations/volume.pyx + :caption: volume.pyx + + .. literalinclude:: ../../examples/userguide/sharing_declarations/spammery.pyx + :caption: spammery.pyx .. note:: @@ -193,34 +240,47 @@ Python methods. Here is an example of a module which defines and exports an extension type, and another module which uses it: -:file:`shrubbing.pxd`: - .. literalinclude:: ../../examples/userguide/sharing_declarations/shrubbing.pxd + :caption: shrubbing.pxd + +.. tabs:: + + .. group-tab:: Pure Python + + .. literalinclude:: ../../examples/userguide/sharing_declarations/shrubbing.py + :caption: shrubbing.py + + .. literalinclude:: ../../examples/userguide/sharing_declarations/landscaping.py + :caption: landscaping.py -:file:`shrubbing.pyx`: + One would then need to compile both of these modules, e.g. using -.. literalinclude:: ../../examples/userguide/sharing_declarations/shrubbing.pyx + .. literalinclude:: ../../examples/userguide/sharing_declarations/setup_py.py + :caption: setup.py -:file:`landscaping.pyx`: + .. group-tab:: Cython -.. literalinclude:: ../../examples/userguide/sharing_declarations/landscaping.pyx + .. literalinclude:: ../../examples/userguide/sharing_declarations/shrubbing.pyx + :caption: shrubbing.pyx -One would then need to compile both of these modules, e.g. using + .. literalinclude:: ../../examples/userguide/sharing_declarations/landscaping.pyx + :caption: landscaping.pyx -:file:`setup.py`: + One would then need to compile both of these modules, e.g. using -.. literalinclude:: ../../examples/userguide/sharing_declarations/setup.py + .. literalinclude:: ../../examples/userguide/sharing_declarations/setup_pyx.py + :caption: setup.py Some things to note about this example: -* There is a :keyword:`cdef` class Shrubbery declaration in both - :file:`Shrubbing.pxd` and :file:`Shrubbing.pyx`. When the Shrubbing module +* There is a :keyword:`cdef`/``@cclass`` class Shrubbery declaration in both + :file:`shrubbing.pxd` and :file:`shrubbing.pyx`. When the shrubbing module is compiled, these two declarations are combined into one. -* In Landscaping.pyx, the :keyword:`cimport` Shrubbing declaration allows us - to refer to the Shrubbery type as :class:`Shrubbing.Shrubbery`. But it - doesn't bind the name Shrubbing in Landscaping's module namespace at run - time, so to access :func:`Shrubbing.standard_shrubbery` we also need to - ``import Shrubbing``. +* In :file:`landscaping.pyx`/:file:`landscaping.py`, the :keyword:`cimport` shrubbing + declaration allows us to refer to the Shrubbery type as :class:`shrubbing.Shrubbery`. + But it doesn't bind the name shrubbing in landscaping's module namespace at run + time, so to access :func:`shrubbing.standard_shrubbery` we also need to + ``import shrubbing``. * One caveat if you use setuptools instead of distutils, the default action when running ``python setup.py install`` is to create a zipped ``egg`` file which will not work with ``cimport`` for ``pxd`` files @@ -234,8 +294,8 @@ Versioning ``.pxd`` files can be labelled with a minimum Cython version as part of their file name, similar to the version tagging of ``.so`` files in PEP 3149. -For example a file called :file:`Shrubbing.cython-30.pxd` will only be -found by ``cimport Shrubbing`` on Cython 3.0 and higher. Cython will use the +For example a file called :file:`shrubbing.cython-30.pxd` will only be +found by ``cimport shrubbing`` on Cython 3.0 and higher. Cython will use the file tagged with the highest compatible version number. Note that versioned files that are distributed across different directories -- cgit v1.2.1 From 530e370ff3d4d43e1969dcc821f65bf33a99f252 Mon Sep 17 00:00:00 2001 From: da-woods Date: Sun, 26 Jun 2022 11:42:31 +0100 Subject: Docs: don't say cdef functions exist in module dict (#4865) Patch is against 0.29.x branch (to fix both versions of the documentation). --- docs/src/userguide/sharing_declarations.rst | 7 ------- 1 file changed, 7 deletions(-) diff --git a/docs/src/userguide/sharing_declarations.rst b/docs/src/userguide/sharing_declarations.rst index 57f41e38d..7c2a49e21 100644 --- a/docs/src/userguide/sharing_declarations.rst +++ b/docs/src/userguide/sharing_declarations.rst @@ -166,13 +166,6 @@ example: .. literalinclude:: ../../examples/userguide/sharing_declarations/spammery.pyx -.. note:: - - When a module exports a C function in this way, an object appears in the - module dictionary under the function's name. However, you can't make use of - this object from Python, nor can you use it from Cython using a normal import - statement; you have to use :keyword:`cimport`. - .. _sharing_extension_types: Sharing Extension Types -- cgit v1.2.1 From 5c900c59d03f23f7329d6e68e114e4a277112916 Mon Sep 17 00:00:00 2001 From: da-woods Date: Tue, 28 Jun 2022 12:52:05 +0100 Subject: Fix tuple multiplication in MergedSequenceNode (GH-4864) Fixes https://github.com/cython/cython/issues/4861 --- Cython/Compiler/ExprNodes.py | 2 +- tests/run/pep448_extended_unpacking.pyx | 18 ++++++++++++++++++ 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/Cython/Compiler/ExprNodes.py b/Cython/Compiler/ExprNodes.py index 4a8ce5dca..9678647ad 100644 --- a/Cython/Compiler/ExprNodes.py +++ b/Cython/Compiler/ExprNodes.py @@ -8482,7 +8482,7 @@ class MergedSequenceNode(ExprNode): if type in (list_type, tuple_type) and args and args[0].is_sequence_constructor: # construct a list directly from the first argument that we can then extend if args[0].type is not list_type: - args[0] = ListNode(args[0].pos, args=args[0].args, is_temp=True) + args[0] = ListNode(args[0].pos, args=args[0].args, is_temp=True, mult_factor=args[0].mult_factor) ExprNode.__init__(self, pos, args=args, type=type) def calculate_constant_result(self): diff --git a/tests/run/pep448_extended_unpacking.pyx b/tests/run/pep448_extended_unpacking.pyx index 08d39e526..4411d7e79 100644 --- a/tests/run/pep448_extended_unpacking.pyx +++ b/tests/run/pep448_extended_unpacking.pyx @@ -185,6 +185,24 @@ def unpack_list_literal_mult(): return [*([1, 2, *([4, 5] * 2)] * 3)] +def unpack_list_tuple_mult(): + """ + >>> unpack_list_tuple_mult() + [1, 1] + """ + return [*(1,) * 2] + + +def unpack_list_tuple_bad_mult(): + """ + >>> unpack_list_tuple_bad_mult() + Traceback (most recent call last): + ... + TypeError: can't multiply sequence by non-int of type 'float' + """ + return [*(1,) * 1.5] + + @cython.test_fail_if_path_exists( "//ListNode//ListNode", "//MergedSequenceNode", -- cgit v1.2.1 From a79e447ea9faea118f39a94b54d0498baad8ad17 Mon Sep 17 00:00:00 2001 From: da-woods Date: Wed, 29 Jun 2022 08:11:55 +0100 Subject: Move linetracing functions into an "exec" to work around an issue in Py3.11 (GH-4851) Thus fixing linetracing tests in Python 3.11 when the trace function raises an exception. The issue we were seeing looked to be something to do with functions defined within doctest docstrings. It was fixed by moving the "defined-in-Python" functions into an exec call instead. See https://github.com/python/cpython/issues/94381 --- tests/run/line_trace.pyx | 60 +++++++++++++++++++++++------------------------- 1 file changed, 29 insertions(+), 31 deletions(-) diff --git a/tests/run/line_trace.pyx b/tests/run/line_trace.pyx index d6f9c3d0e..32579aff7 100644 --- a/tests/run/line_trace.pyx +++ b/tests/run/line_trace.pyx @@ -74,7 +74,9 @@ def _create_trace_func(trace): local_names = {} def _trace_func(frame, event, arg): - if sys.version_info < (3,) and 'line_trace' not in frame.f_code.co_filename: + if sys.version_info < (3,) and ( + 'line_trace' not in frame.f_code.co_filename and + '' not in frame.f_code.co_filename): # Prevent tracing into Py2 doctest functions. return None @@ -165,19 +167,28 @@ def cy_try_except(func): raise AttributeError(exc.args[0]) -def run_trace(func, *args, bint with_sys=False): - """ - >>> def py_add(a,b): - ... x = a+b - ... return x +# CPython 3.11 has an issue when these Python functions are implemented inside of doctests and the trace function fails. +# https://github.com/python/cpython/issues/94381 +plain_python_functions = {} +exec(""" +def py_add(a,b): + x = a+b + return x + +def py_add_with_nogil(a,b): + x=a; y=b # 1 + for _ in range(1): # 2 + z = 0 # 3 + z += py_add(x, y) # 4 + return z + +def py_return(retval=123): return retval +""", plain_python_functions) - >>> def py_add_with_nogil(a,b): - ... x=a; y=b # 1 - ... for _ in range(1): # 2 - ... z = 0 # 3 - ... z += py_add(x, y) # 4 - ... return z # 5 +def run_trace(func, *args, bint with_sys=False): + """ + >>> py_add = plain_python_functions['py_add'] >>> run_trace(py_add, 1, 2) [('call', 0), ('line', 1), ('line', 2), ('return', 2)] >>> run_trace(cy_add, 1, 2) @@ -204,6 +215,7 @@ def run_trace(func, *args, bint with_sys=False): >>> result[9:] # sys [('line', 2), ('line', 5), ('return', 5)] + >>> py_add_with_nogil = plain_python_functions['py_add_with_nogil'] >>> result = run_trace(py_add_with_nogil, 1, 2) >>> result[:5] # py [('call', 0), ('line', 1), ('line', 2), ('line', 3), ('line', 4)] @@ -239,7 +251,7 @@ def run_trace(func, *args, bint with_sys=False): def run_trace_with_exception(func, bint with_sys=False, bint fail=False): """ - >>> def py_return(retval=123): return retval + >>> py_return = plain_python_functions["py_return"] >>> run_trace_with_exception(py_return) OK: 123 [('call', 0), ('line', 1), ('line', 2), ('call', 0), ('line', 0), ('return', 0), ('return', 2)] @@ -295,10 +307,7 @@ def run_trace_with_exception(func, bint with_sys=False, bint fail=False): def fail_on_call_trace(func, *args): """ - >>> def py_add(a,b): - ... x = a+b - ... return x - + >>> py_add = plain_python_functions["py_add"] >>> fail_on_call_trace(py_add, 1, 2) Traceback (most recent call last): ValueError: failing call trace! @@ -319,17 +328,6 @@ def fail_on_call_trace(func, *args): def fail_on_line_trace(fail_func, add_func, nogil_add_func): """ - >>> def py_add(a,b): - ... x = a+b # 1 - ... return x # 2 - - >>> def py_add_with_nogil(a,b): - ... x=a; y=b # 1 - ... for _ in range(1): # 2 - ... z = 0 # 3 - ... z += py_add(x, y) # 4 - ... return z # 5 - >>> result = fail_on_line_trace(None, cy_add, cy_add_with_nogil) >>> len(result) 17 @@ -342,6 +340,8 @@ def fail_on_line_trace(fail_func, add_func, nogil_add_func): >>> result[14:] [('line', 2), ('line', 5), ('return', 5)] + >>> py_add = plain_python_functions["py_add"] + >>> py_add_with_nogil = plain_python_functions['py_add_with_nogil'] >>> result = fail_on_line_trace(None, py_add, py_add_with_nogil) >>> len(result) 17 @@ -405,9 +405,7 @@ def fail_on_line_trace(fail_func, add_func, nogil_add_func): def disable_trace(func, *args, bint with_sys=False): """ - >>> def py_add(a,b): - ... x = a+b - ... return x + >>> py_add = plain_python_functions["py_add"] >>> disable_trace(py_add, 1, 2) [('call', 0), ('line', 1)] >>> disable_trace(py_add, 1, 2, with_sys=True) -- cgit v1.2.1 From eafc920f76e812613b09876bfb9b980651c60f31 Mon Sep 17 00:00:00 2001 From: Sebastian Berg Date: Sun, 3 Jul 2022 00:23:31 -0700 Subject: BUG: Fortify object buffers against included NULLs (#4859) * BUG: Fortify object buffers against included NULLs While NumPy tends to not actively create object buffers initialized only with NULL (rather than filled with None), at least older versions of NumPy did do that. And NumPy guards against this. This guards against embedded NULLs in object buffers interpreting a NULL as None (and anticipating a NULL value also when setting the buffer for reference count purposes). Closes gh-4858 --- Cython/Compiler/ExprNodes.py | 17 ++++++++++------- tests/buffers/bufaccess.pyx | 45 ++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 55 insertions(+), 7 deletions(-) diff --git a/Cython/Compiler/ExprNodes.py b/Cython/Compiler/ExprNodes.py index c20a76bd4..144251aec 100644 --- a/Cython/Compiler/ExprNodes.py +++ b/Cython/Compiler/ExprNodes.py @@ -4578,17 +4578,17 @@ class BufferIndexNode(_IndexingBaseNode): buffer_entry, ptrexpr = self.buffer_lookup_code(code) if self.buffer_type.dtype.is_pyobject: - # Must manage refcounts. Decref what is already there - # and incref what we put in. + # Must manage refcounts. XDecref what is already there + # and incref what we put in (NumPy allows there to be NULL) ptr = code.funcstate.allocate_temp(buffer_entry.buf_ptr_type, manage_ref=False) rhs_code = rhs.result() code.putln("%s = %s;" % (ptr, ptrexpr)) - code.put_gotref("*%s" % ptr, self.buffer_type.dtype) - code.putln("__Pyx_INCREF(%s); __Pyx_DECREF(*%s);" % ( + code.put_xgotref("*%s" % ptr, self.buffer_type.dtype) + code.putln("__Pyx_INCREF(%s); __Pyx_XDECREF(*%s);" % ( rhs_code, ptr)) code.putln("*%s %s= %s;" % (ptr, op, rhs_code)) - code.put_giveref("*%s" % ptr, self.buffer_type.dtype) + code.put_xgiveref("*%s" % ptr, self.buffer_type.dtype) code.funcstate.release_temp(ptr) else: # Simple case @@ -4609,8 +4609,11 @@ class BufferIndexNode(_IndexingBaseNode): # is_temp is True, so must pull out value and incref it. # NOTE: object temporary results for nodes are declared # as PyObject *, so we need a cast - code.putln("%s = (PyObject *) *%s;" % (self.result(), self.buffer_ptr_code)) - code.putln("__Pyx_INCREF((PyObject*)%s);" % self.result()) + res = self.result() + code.putln("%s = (PyObject *) *%s;" % (res, self.buffer_ptr_code)) + # NumPy does (occasionally) allow NULL to denote None. + code.putln("if (unlikely(%s == NULL)) %s = Py_None;" % (res, res)) + code.putln("__Pyx_INCREF((PyObject*)%s);" % res) def free_subexpr_temps(self, code): for temp in self.index_temps: diff --git a/tests/buffers/bufaccess.pyx b/tests/buffers/bufaccess.pyx index 6b0b4ac30..764d65db6 100644 --- a/tests/buffers/bufaccess.pyx +++ b/tests/buffers/bufaccess.pyx @@ -1004,6 +1004,51 @@ def assign_to_object(object[object] buf, int idx, obj): """ buf[idx] = obj +@testcase +def check_object_nulled_1d(MockBuffer[object, ndim=1] buf, int idx, obj): + """ + See comments on printbuf_object above. + + >>> a = object() + >>> rc1 = get_refcount(a) + >>> A = ObjectMockBuffer(None, [a, a]) + >>> check_object_nulled_1d(A, 0, a) + >>> decref(a) # new reference "added" to A + >>> check_object_nulled_1d(A, 1, a) + >>> decref(a) + >>> A = ObjectMockBuffer(None, [a, a, a, a], strides=(2,)) + >>> check_object_nulled_1d(A, 0, a) # only 0 due to stride + >>> decref(a) + >>> get_refcount(a) == rc1 + True + """ + cdef void **data = buf.buffer + data[idx] = NULL + res = buf[idx] # takes None + buf[idx] = obj + return res + +@testcase +def check_object_nulled_2d(MockBuffer[object, ndim=2] buf, int idx1, int idx2, obj): + """ + See comments on printbuf_object above. + + >>> a = object() + >>> rc1 = get_refcount(a) + >>> A = ObjectMockBuffer(None, [a, a, a, a], shape=(2, 2)) + >>> check_object_nulled_2d(A, 0, 0, a) + >>> decref(a) # new reference "added" to A + >>> check_object_nulled_2d(A, 1, 1, a) + >>> decref(a) + >>> get_refcount(a) == rc1 + True + """ + cdef void **data = buf.buffer + data[idx1 + 2*idx2] = NULL + res = buf[idx1, idx2] # takes None + buf[idx1, idx2] = obj + return res + @testcase def assign_temporary_to_object(object[object] buf): """ -- cgit v1.2.1 From 98cebe4dedb52550ce621cf9338283dd7262ea83 Mon Sep 17 00:00:00 2001 From: Sebastian Berg Date: Sun, 3 Jul 2022 00:23:31 -0700 Subject: BUG: Fortify object buffers against included NULLs (#4859) * BUG: Fortify object buffers against included NULLs While NumPy tends to not actively create object buffers initialized only with NULL (rather than filled with None), at least older versions of NumPy did do that. And NumPy guards against this. This guards against embedded NULLs in object buffers interpreting a NULL as None (and anticipating a NULL value also when setting the buffer for reference count purposes). Closes gh-4858 --- Cython/Compiler/ExprNodes.py | 17 ++++++++++------- tests/buffers/bufaccess.pyx | 45 ++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 55 insertions(+), 7 deletions(-) diff --git a/Cython/Compiler/ExprNodes.py b/Cython/Compiler/ExprNodes.py index 9678647ad..69632a4fe 100644 --- a/Cython/Compiler/ExprNodes.py +++ b/Cython/Compiler/ExprNodes.py @@ -4351,17 +4351,17 @@ class BufferIndexNode(_IndexingBaseNode): buffer_entry, ptrexpr = self.buffer_lookup_code(code) if self.buffer_type.dtype.is_pyobject: - # Must manage refcounts. Decref what is already there - # and incref what we put in. + # Must manage refcounts. XDecref what is already there + # and incref what we put in (NumPy allows there to be NULL) ptr = code.funcstate.allocate_temp(buffer_entry.buf_ptr_type, manage_ref=False) rhs_code = rhs.result() code.putln("%s = %s;" % (ptr, ptrexpr)) - code.put_gotref("*%s" % ptr) - code.putln("__Pyx_INCREF(%s); __Pyx_DECREF(*%s);" % ( + code.put_xgotref("*%s" % ptr) + code.putln("__Pyx_INCREF(%s); __Pyx_XDECREF(*%s);" % ( rhs_code, ptr)) code.putln("*%s %s= %s;" % (ptr, op, rhs_code)) - code.put_giveref("*%s" % ptr) + code.put_xgiveref("*%s" % ptr) code.funcstate.release_temp(ptr) else: # Simple case @@ -4382,8 +4382,11 @@ class BufferIndexNode(_IndexingBaseNode): # is_temp is True, so must pull out value and incref it. # NOTE: object temporary results for nodes are declared # as PyObject *, so we need a cast - code.putln("%s = (PyObject *) *%s;" % (self.result(), self.buffer_ptr_code)) - code.putln("__Pyx_INCREF((PyObject*)%s);" % self.result()) + res = self.result() + code.putln("%s = (PyObject *) *%s;" % (res, self.buffer_ptr_code)) + # NumPy does (occasionally) allow NULL to denote None. + code.putln("if (unlikely(%s == NULL)) %s = Py_None;" % (res, res)) + code.putln("__Pyx_INCREF((PyObject*)%s);" % res) def free_subexpr_temps(self, code): for temp in self.index_temps: diff --git a/tests/buffers/bufaccess.pyx b/tests/buffers/bufaccess.pyx index 8761e6eb9..2a5e84185 100644 --- a/tests/buffers/bufaccess.pyx +++ b/tests/buffers/bufaccess.pyx @@ -1004,6 +1004,51 @@ def assign_to_object(object[object] buf, int idx, obj): """ buf[idx] = obj +@testcase +def check_object_nulled_1d(MockBuffer[object, ndim=1] buf, int idx, obj): + """ + See comments on printbuf_object above. + + >>> a = object() + >>> rc1 = get_refcount(a) + >>> A = ObjectMockBuffer(None, [a, a]) + >>> check_object_nulled_1d(A, 0, a) + >>> decref(a) # new reference "added" to A + >>> check_object_nulled_1d(A, 1, a) + >>> decref(a) + >>> A = ObjectMockBuffer(None, [a, a, a, a], strides=(2,)) + >>> check_object_nulled_1d(A, 0, a) # only 0 due to stride + >>> decref(a) + >>> get_refcount(a) == rc1 + True + """ + cdef void **data = buf.buffer + data[idx] = NULL + res = buf[idx] # takes None + buf[idx] = obj + return res + +@testcase +def check_object_nulled_2d(MockBuffer[object, ndim=2] buf, int idx1, int idx2, obj): + """ + See comments on printbuf_object above. + + >>> a = object() + >>> rc1 = get_refcount(a) + >>> A = ObjectMockBuffer(None, [a, a, a, a], shape=(2, 2)) + >>> check_object_nulled_2d(A, 0, 0, a) + >>> decref(a) # new reference "added" to A + >>> check_object_nulled_2d(A, 1, 1, a) + >>> decref(a) + >>> get_refcount(a) == rc1 + True + """ + cdef void **data = buf.buffer + data[idx1 + 2*idx2] = NULL + res = buf[idx1, idx2] # takes None + buf[idx1, idx2] = obj + return res + @testcase def assign_temporary_to_object(object[object] buf): """ -- cgit v1.2.1 From a70b9d3cde2a0f62f0ebf7a8a28f32af5de99e8c Mon Sep 17 00:00:00 2001 From: da-woods Date: Sun, 3 Jul 2022 08:54:23 +0100 Subject: Updated changelog --- CHANGES.rst | 34 ++++++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/CHANGES.rst b/CHANGES.rst index 7087a9391..acc5e3020 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -2,6 +2,40 @@ Cython Changelog ================ +0.29.31 (2022-??-??) +==================== + +Bugs fixed +---------- + +* Use ``importlib.util.find_spec()`` instead of the deprecated ``importlib.find_loader()`` + function when setting up the package path at import-time. Patch by Matti Picus. + (Github issue #4764) + +* Require the C compiler to support the two-arg form of ``va_start`` on Python 3.10 + and higher. Patch by Thomas Caswell. + (Github issue #4820) + +* Make ``fused_type`` subscriptable in Shadow.py. Patch by Pfebrer. + (Github issue #4842) + +* Fix the incorrect code generation of the target type in ``bytearray`` loops. + Patch by Kenrick Everett. + (Github issue #4108) + +* Silence some GCC ``-Wconversion`` warnings in C utility code. + Patch by Lisandro Dalcin. + (Github issue #4854) + +* Stop tuple multiplication being ignored in expressions such as ``[*(1,) * 2]``. + Patch by David Woods. + (Github issue #4864) + +* Ensure that object buffers (e.g. ``ndarray[object, ndim=1]``) containing + ``NULL`` pointers are safe to use, returning ``None`` instead of the ``NULL`` + pointer. Patch by Sebastian Berg. + (Github issue #4859) + 0.29.30 (2022-05-16) ==================== -- cgit v1.2.1 From edf38fcf5d57b6ac58be823d31e40f30a8b6f2fd Mon Sep 17 00:00:00 2001 From: da-woods Date: Sun, 3 Jul 2022 10:15:30 +0100 Subject: Update changelog --- CHANGES.rst | 36 ++++++++++++++++++++++++++++++------ 1 file changed, 30 insertions(+), 6 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 9a844e178..38dc34781 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,15 +10,17 @@ Features added * A new decorator ``@cython.dataclasses.dataclass`` was implemented that provides compile time dataclass generation capabilities to ``cdef`` classes (extension types). - Patch by David Woods. (Github issue :issue:`2903`) + Patch by David Woods. (Github issue :issue:`2903`). ``kw_only`` dataclasses + added by Yury Sokov (Github issue :issue:`4794`) * Named expressions (PEP 572) aka. assignment expressions (aka. the walrus operator ``:=``) were implemented. Patch by David Woods. (Github issue :issue:`2636`) * Some C++ library declarations were extended and fixed. - Patches by Max Bachmann, Till Hoffmann, Julien Jerphanion. - (Github issues :issue:`4530`, :issue:`4528`, :issue:`4710`, :issue:`4746`, :issue:`4751`) + Patches by Max Bachmann, Till Hoffmann, Julien Jerphanion, Wenjun Si. + (Github issues :issue:`4530`, :issue:`4528`, :issue:`4710`, :issue:`4746`, + :issue:`4751`, :issue:`4818`, :issue:`4762`) * The ``cythonize`` command has a new option ``-M`` to generate ``.dep`` dependency files for the compilation unit. This can be used by external build tools to track @@ -35,6 +37,10 @@ Features added smaller set of Cython's own modules, which can be used to reduce the package and install size. +* Improvements to ``PyTypeObject`` definitions in pxd wrapping of libpython. + Patch by John Kirkham. (Github issue :issue:`4699`) + + Bugs fixed ---------- @@ -48,7 +54,7 @@ Bugs fixed Test patch by Kirill Smelkov. (Github issue :issue:`4737`) * Typedefs for the ``bint`` type did not always behave like ``bint``. - Patch by 0dminnimda. (Github issue :issue:`4660`) + Patch by Nathan Manville and 0dminnimda. (Github issue :issue:`4660`) * The return type of a fused function is no longer ignored for function pointers, since it is relevant when passing them e.g. as argument into other fused functions. @@ -65,7 +71,18 @@ Bugs fixed * A work-around for StacklessPython < 3.8 was disabled in Py3.8 and later. (Github issue :issue:`4329`) -* Includes all bug-fixes from the :ref:`0.29.30` release. +* Improve conversion between function pointers with non-identical but + compatible exception specifications. Patches by David Woods. + (Github issues :issue:`4770`, :issue:`4689`) + +* Improve compatibility with forthcoming CPython 3.12 release. + +* Limited API C preprocessor warning is compatible with MSVC. Patch by + Victor Molina Garcia. (Github issue :issue:`4826`) + +* C compiler warnings fixed. Patch by mwtian. (Github issue :issue:`4831`) + +* Includes all bug-fixes from the 0.29 branch up to the :ref:`0.29.31` release. Other changes ------------- @@ -78,7 +95,7 @@ Other changes allowed when it is used as default argument, i.e. ``func(x: list = None)``. Note that, for backwards compatibility reasons, this does not apply when using Cython's C notation, as in ``func(list x)``. Here, ``None`` is still allowed, as always. - (Github issues :issue:`3883`, :issue:`2696`) + (Github issues :issue:`3883`, :issue:`2696`, :issue:`4669`) * The compile-time ``DEF`` and ``IF`` statements are deprecated and generate a warning. They should be replaced with normal constants, code generation or C macros. @@ -87,6 +104,10 @@ Other changes * Reusing an extension type attribute name as a method name is now an error. Patch by 0dminnimda. (Github issue :issue:`4661`) +* Improve compatibility between classes pickled in Cython 3.0 and 0.29.x + by accepting MD5, SHA-1 and SHA-256 checksums. + (Github issue :issue:`4680`) + 3.0.0 alpha 10 (2022-01-06) =========================== @@ -977,6 +998,8 @@ Other changes .. _`PEP-563`: https://www.python.org/dev/peps/pep-0563 .. _`PEP-479`: https://www.python.org/dev/peps/pep-0479 +.. _0.29.31: + 0.29.31 (2022-??-??) ==================== @@ -1011,6 +1034,7 @@ Bugs fixed pointer. Patch by Sebastian Berg. (Github issue #4859) + .. _0.29.30: 0.29.30 (2022-05-16) -- cgit v1.2.1 From 1c0691f7720976a2ee9c471e071b6c8a0341eb7b Mon Sep 17 00:00:00 2001 From: da-woods Date: Sun, 3 Jul 2022 11:23:34 +0100 Subject: Fix tuple*float test on PyPy Test added in 5c900c59d03f23f7329d6e68e114e4a277112916 PyPy gives a slightly different error message for the unsupported operation --- tests/run/pep448_extended_unpacking.pyx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/run/pep448_extended_unpacking.pyx b/tests/run/pep448_extended_unpacking.pyx index 4411d7e79..85d22a86c 100644 --- a/tests/run/pep448_extended_unpacking.pyx +++ b/tests/run/pep448_extended_unpacking.pyx @@ -195,10 +195,10 @@ def unpack_list_tuple_mult(): def unpack_list_tuple_bad_mult(): """ - >>> unpack_list_tuple_bad_mult() + >>> unpack_list_tuple_bad_mult() # doctest: +ELLIPSIS Traceback (most recent call last): ... - TypeError: can't multiply sequence by non-int of type 'float' + TypeError: ... 'float' """ return [*(1,) * 1.5] -- cgit v1.2.1 From 7c7890348625871e6442b4b1bbd6e3e0e7c38e73 Mon Sep 17 00:00:00 2001 From: Kirill Smelkov Date: Sun, 3 Jul 2022 14:08:40 +0300 Subject: includes/cpython: Fix newfunc to use PyObject* for args/kwargs instead of object (#4823) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit object means the argument is always non-NULL valid Python object, while PyObject* argument can be generally NULL. If the argument is indeed passed as NULL, and we declare it as object, generated code will crash while trying to incref it. Quoting https://github.com/cython/cython/issues/4822: object.pxd currently declares `newfunc` as follows: ```pyx ctypedef object (*newfunc)(cpython.type.type, object, object) # (type, args, kwargs) ``` which implies that `args` and `kwargs` are always live objects and cannot be NULL. However Python can, and does, call tp_new with either args=NULL, or kwargs=NULL or both. And in such cases this leads to segfault in automatically-generated __Pyx_INCREF for args or kw. The fix is to change `object` to `PyObject*` for both args and kwargs. Please see below for details: ```cython # cython: language_level=3 from cpython cimport newfunc, type as cpytype, Py_TYPE cdef class X: cdef int i def __init__(self, i): self.i = i def __repr__(self): return 'X(%d)' % self.i cdef newfunc _orig_tp_new = Py_TYPE(X(0)).tp_new cdef object _trace_tp_new(cpytype cls, object args, object kw): print('_trace_tp_new', cls, args, kw) return _orig_tp_new(cls, args, kw) Py_TYPE(X(0)).tp_new = _trace_tp_new x = X(123) print(x) ``` ```console (neo) (py3.venv) (g.env) kirr@deca:~/src/tools/go/pygolang$ cythonize -i x.pyx Compiling /home/kirr/src/tools/go/pygolang/x.pyx because it changed. [1/1] Cythonizing /home/kirr/src/tools/go/pygolang/x.pyx running build_ext building 'x' extension ... x86_64-linux-gnu-gcc -pthread -Wno-unused-result -Wsign-compare -DNDEBUG -g -fwrapv -O2 -Wall -g -ffile-prefix-map=/build/python3.9-RNBry6/python3.9-3.9.2=. -fstack-protector-strong -Wformat -Werror=format-security -g -fwrapv -O2 -g -ffile-prefix-map=/build/python3.9-RNBry6/python3.9-3.9.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -I/home/kirr/src/wendelin/venv/py3.venv/include -I/usr/include/python3.9 -c /home/kirr/src/tools/go/pygolang/x.c -o /home/kirr/src/tools/go/pygolang/tmpqkz1r96s/home/kirr/src/tools/go/pygolang/x.o x86_64-linux-gnu-gcc -pthread -shared -Wl,-O1 -Wl,-Bsymbolic-functions -Wl,-z,relro -g -fwrapv -O2 -Wl,-z,relro -g -fwrapv -O2 -g -ffile-prefix-map=/build/python3.9-RNBry6/python3.9-3.9.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 /home/kirr/src/tools/go/pygolang/tmpqkz1r96s/home/kirr/src/tools/go/pygolang/x.o -o /home/kirr/src/tools/go/pygolang/x.cpython-39-x86_64-linux-gnu.so ``` ```console (neo) (py3.venv) (g.env) kirr@deca:~/src/tools/go/pygolang$ python -c 'import x' Ошибка сегментирования (стек памяти сброшен на диск) ``` ```console (neo) (py3.venv) (g.env) kirr@deca:~/src/tools/go/pygolang$ gdb python core ... Reading symbols from python... Reading symbols from /usr/lib/debug/.build-id/f9/02f8a561c3abdb9c8d8c859d4243bd8c3f928f.debug... [New LWP 218557] [Thread debugging using libthread_db enabled] Using host libthread_db library "/lib/x86_64-linux-gnu/libthread_db.so.1". Core was generated by `python -c import x'. Program terminated with signal SIGSEGV, Segmentation fault. #0 _Py_INCREF (op=0x0) at /usr/include/python3.9/object.h:408 408 op->ob_refcnt++; (gdb) bt 5 #0 _Py_INCREF (op=0x0) at /usr/include/python3.9/object.h:408 #1 __pyx_f_1x__trace_tp_new (__pyx_v_cls=0x7f5ce75e6880 <__pyx_type_1x_X>, __pyx_v_args=(123,), __pyx_v_kw=0x0) at /home/kirr/src/tools/go/pygolang/x.c:1986 #2 0x000000000051dd7e in type_call (type=type@entry=0x7f5ce75e6880 <__pyx_type_1x_X>, args=args@entry=(123,), kwds=kwds@entry=0x0) at ../Objects/typeobject.c:1014 #3 0x00007f5ce75df8d4 in __Pyx_PyObject_Call (func=, arg=(123,), kw=0x0) at /home/kirr/src/tools/go/pygolang/x.c:3414 #4 0x00007f5ce75df276 in __pyx_pymod_exec_x (__pyx_pyinit_module=) at /home/kirr/src/tools/go/pygolang/x.c:3017 (More stack frames follow...) (gdb) f 1 #1 __pyx_f_1x__trace_tp_new (__pyx_v_cls=0x7f5ce75e6880 <__pyx_type_1x_X>, __pyx_v_args=(123,), __pyx_v_kw=0x0) at /home/kirr/src/tools/go/pygolang/x.c:1986 1986 __Pyx_INCREF(__pyx_v_kw); ``` -> Change newfunc signature to use PyObject* instead of object to fix it. With this fix, and test example updates to account for object -> PyObject* change as follows ... --- a/x.pyx.kirr +++ b/x.pyx @@ -1,5 +1,5 @@ # cython: language_level=3 -from cpython cimport newfunc, type as cpytype, Py_TYPE +from cpython cimport newfunc, type as cpytype, Py_TYPE, PyObject cdef class X: cdef int i @@ -10,8 +10,12 @@ cdef class X: cdef newfunc _orig_tp_new = Py_TYPE(X(0)).tp_new -cdef object _trace_tp_new(cpytype cls, object args, object kw): - print('_trace_tp_new', cls, args, kw) +cdef object xobject(PyObject* x): + return "null" if x == NULL else \ + x + +cdef object _trace_tp_new(cpytype cls, PyObject* args, PyObject* kw): + print('_trace_tp_new', cls, xobject(args), xobject(kw)) return _orig_tp_new(cls, args, kw) Py_TYPE(X(0)).tp_new = _trace_tp_new ... it works as expected without crashing: $ python -c 'import x' _trace_tp_new (123,) null X(123) Fixes: https://github.com/cython/cython/issues/4822 --- Cython/Includes/cpython/object.pxd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cython/Includes/cpython/object.pxd b/Cython/Includes/cpython/object.pxd index 5a8116639..3ce4c6307 100644 --- a/Cython/Includes/cpython/object.pxd +++ b/Cython/Includes/cpython/object.pxd @@ -5,7 +5,7 @@ cdef extern from "Python.h": ctypedef struct PyObject # forward declaration - ctypedef object (*newfunc)(cpython.type.type, object, object) # (type, args, kwargs) + ctypedef object (*newfunc)(cpython.type.type, PyObject*, PyObject*) # (type, args|NULL, kwargs|NULL) ctypedef object (*unaryfunc)(object) ctypedef object (*binaryfunc)(object, object) -- cgit v1.2.1 From c769c3295dec09fbbb607f249224ce385591dbcc Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Mon, 4 Jul 2022 10:28:15 +0200 Subject: Revert "includes/cpython: Fix newfunc to use PyObject* for args/kwargs instead of object (#4823)" This reverts commit 7c7890348625871e6442b4b1bbd6e3e0e7c38e73. --- Cython/Includes/cpython/object.pxd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cython/Includes/cpython/object.pxd b/Cython/Includes/cpython/object.pxd index 3ce4c6307..5a8116639 100644 --- a/Cython/Includes/cpython/object.pxd +++ b/Cython/Includes/cpython/object.pxd @@ -5,7 +5,7 @@ cdef extern from "Python.h": ctypedef struct PyObject # forward declaration - ctypedef object (*newfunc)(cpython.type.type, PyObject*, PyObject*) # (type, args|NULL, kwargs|NULL) + ctypedef object (*newfunc)(cpython.type.type, object, object) # (type, args, kwargs) ctypedef object (*unaryfunc)(object) ctypedef object (*binaryfunc)(object, object) -- cgit v1.2.1 From 4189c759ce468b74f35cfce3cfdba9aa8b4992e3 Mon Sep 17 00:00:00 2001 From: Kirill Smelkov Date: Sun, 3 Jul 2022 14:08:40 +0300 Subject: includes/cpython: Fix newfunc to use PyObject* for args/kwargs instead of object (#4823) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit object means the argument is always non-NULL valid Python object, while PyObject* argument can be generally NULL. If the argument is indeed passed as NULL, and we declare it as object, generated code will crash while trying to incref it. Quoting https://github.com/cython/cython/issues/4822: object.pxd currently declares `newfunc` as follows: ```pyx ctypedef object (*newfunc)(cpython.type.type, object, object) # (type, args, kwargs) ``` which implies that `args` and `kwargs` are always live objects and cannot be NULL. However Python can, and does, call tp_new with either args=NULL, or kwargs=NULL or both. And in such cases this leads to segfault in automatically-generated __Pyx_INCREF for args or kw. The fix is to change `object` to `PyObject*` for both args and kwargs. Please see below for details: ```cython # cython: language_level=3 from cpython cimport newfunc, type as cpytype, Py_TYPE cdef class X: cdef int i def __init__(self, i): self.i = i def __repr__(self): return 'X(%d)' % self.i cdef newfunc _orig_tp_new = Py_TYPE(X(0)).tp_new cdef object _trace_tp_new(cpytype cls, object args, object kw): print('_trace_tp_new', cls, args, kw) return _orig_tp_new(cls, args, kw) Py_TYPE(X(0)).tp_new = _trace_tp_new x = X(123) print(x) ``` ```console (neo) (py3.venv) (g.env) kirr@deca:~/src/tools/go/pygolang$ cythonize -i x.pyx Compiling /home/kirr/src/tools/go/pygolang/x.pyx because it changed. [1/1] Cythonizing /home/kirr/src/tools/go/pygolang/x.pyx running build_ext building 'x' extension ... x86_64-linux-gnu-gcc -pthread -Wno-unused-result -Wsign-compare -DNDEBUG -g -fwrapv -O2 -Wall -g -ffile-prefix-map=/build/python3.9-RNBry6/python3.9-3.9.2=. -fstack-protector-strong -Wformat -Werror=format-security -g -fwrapv -O2 -g -ffile-prefix-map=/build/python3.9-RNBry6/python3.9-3.9.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -I/home/kirr/src/wendelin/venv/py3.venv/include -I/usr/include/python3.9 -c /home/kirr/src/tools/go/pygolang/x.c -o /home/kirr/src/tools/go/pygolang/tmpqkz1r96s/home/kirr/src/tools/go/pygolang/x.o x86_64-linux-gnu-gcc -pthread -shared -Wl,-O1 -Wl,-Bsymbolic-functions -Wl,-z,relro -g -fwrapv -O2 -Wl,-z,relro -g -fwrapv -O2 -g -ffile-prefix-map=/build/python3.9-RNBry6/python3.9-3.9.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 /home/kirr/src/tools/go/pygolang/tmpqkz1r96s/home/kirr/src/tools/go/pygolang/x.o -o /home/kirr/src/tools/go/pygolang/x.cpython-39-x86_64-linux-gnu.so ``` ```console (neo) (py3.venv) (g.env) kirr@deca:~/src/tools/go/pygolang$ python -c 'import x' Ошибка сегментирования (стек памяти сброшен на диск) ``` ```console (neo) (py3.venv) (g.env) kirr@deca:~/src/tools/go/pygolang$ gdb python core ... Reading symbols from python... Reading symbols from /usr/lib/debug/.build-id/f9/02f8a561c3abdb9c8d8c859d4243bd8c3f928f.debug... [New LWP 218557] [Thread debugging using libthread_db enabled] Using host libthread_db library "/lib/x86_64-linux-gnu/libthread_db.so.1". Core was generated by `python -c import x'. Program terminated with signal SIGSEGV, Segmentation fault. #0 _Py_INCREF (op=0x0) at /usr/include/python3.9/object.h:408 408 op->ob_refcnt++; (gdb) bt 5 #0 _Py_INCREF (op=0x0) at /usr/include/python3.9/object.h:408 #1 __pyx_f_1x__trace_tp_new (__pyx_v_cls=0x7f5ce75e6880 <__pyx_type_1x_X>, __pyx_v_args=(123,), __pyx_v_kw=0x0) at /home/kirr/src/tools/go/pygolang/x.c:1986 #2 0x000000000051dd7e in type_call (type=type@entry=0x7f5ce75e6880 <__pyx_type_1x_X>, args=args@entry=(123,), kwds=kwds@entry=0x0) at ../Objects/typeobject.c:1014 #3 0x00007f5ce75df8d4 in __Pyx_PyObject_Call (func=, arg=(123,), kw=0x0) at /home/kirr/src/tools/go/pygolang/x.c:3414 #4 0x00007f5ce75df276 in __pyx_pymod_exec_x (__pyx_pyinit_module=) at /home/kirr/src/tools/go/pygolang/x.c:3017 (More stack frames follow...) (gdb) f 1 #1 __pyx_f_1x__trace_tp_new (__pyx_v_cls=0x7f5ce75e6880 <__pyx_type_1x_X>, __pyx_v_args=(123,), __pyx_v_kw=0x0) at /home/kirr/src/tools/go/pygolang/x.c:1986 1986 __Pyx_INCREF(__pyx_v_kw); ``` -> Change newfunc signature to use PyObject* instead of object to fix it. With this fix, and test example updates to account for object -> PyObject* change as follows ... --- a/x.pyx.kirr +++ b/x.pyx @@ -1,5 +1,5 @@ # cython: language_level=3 -from cpython cimport newfunc, type as cpytype, Py_TYPE +from cpython cimport newfunc, type as cpytype, Py_TYPE, PyObject cdef class X: cdef int i @@ -10,8 +10,12 @@ cdef class X: cdef newfunc _orig_tp_new = Py_TYPE(X(0)).tp_new -cdef object _trace_tp_new(cpytype cls, object args, object kw): - print('_trace_tp_new', cls, args, kw) +cdef object xobject(PyObject* x): + return "null" if x == NULL else \ + x + +cdef object _trace_tp_new(cpytype cls, PyObject* args, PyObject* kw): + print('_trace_tp_new', cls, xobject(args), xobject(kw)) return _orig_tp_new(cls, args, kw) Py_TYPE(X(0)).tp_new = _trace_tp_new ... it works as expected without crashing: $ python -c 'import x' _trace_tp_new (123,) null X(123) Fixes: https://github.com/cython/cython/issues/4822 --- Cython/Includes/cpython/object.pxd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cython/Includes/cpython/object.pxd b/Cython/Includes/cpython/object.pxd index c4688f738..41874159c 100644 --- a/Cython/Includes/cpython/object.pxd +++ b/Cython/Includes/cpython/object.pxd @@ -5,7 +5,7 @@ cdef extern from "Python.h": ctypedef struct PyObject # forward declaration - ctypedef object (*newfunc)(cpython.type.type, object, object) # (type, args, kwargs) + ctypedef object (*newfunc)(cpython.type.type, PyObject*, PyObject*) # (type, args|NULL, kwargs|NULL) ctypedef object (*unaryfunc)(object) ctypedef object (*binaryfunc)(object, object) -- cgit v1.2.1 From 4cae7d6c3aef4f83a083f0083d22cd42c373d1fa Mon Sep 17 00:00:00 2001 From: Matus Valo Date: Mon, 4 Jul 2022 21:47:18 +0200 Subject: Port pyximport to the importlib machinery (GH-4625) Closes https://github.com/cython/cython/issues/4560 --- pyximport/_pyximport2.py | 606 ++++++++++++++++++++++++++++++++++++++++++++++ pyximport/_pyximport3.py | 464 ++++++++++++++++++++++++++++++++++++ pyximport/pyximport.py | 607 +---------------------------------------------- 3 files changed, 1076 insertions(+), 601 deletions(-) create mode 100644 pyximport/_pyximport2.py create mode 100644 pyximport/_pyximport3.py diff --git a/pyximport/_pyximport2.py b/pyximport/_pyximport2.py new file mode 100644 index 000000000..b2077826a --- /dev/null +++ b/pyximport/_pyximport2.py @@ -0,0 +1,606 @@ +""" +Import hooks; when installed with the install() function, these hooks +allow importing .pyx files as if they were Python modules. + +If you want the hook installed every time you run Python +you can add it to your Python version by adding these lines to +sitecustomize.py (which you can create from scratch in site-packages +if it doesn't exist there or somewhere else on your python path):: + + import pyximport + pyximport.install() + +For instance on the Mac with a non-system Python 2.3, you could create +sitecustomize.py with only those two lines at +/usr/local/lib/python2.3/site-packages/sitecustomize.py . + +A custom distutils.core.Extension instance and setup() args +(Distribution) for for the build can be defined by a .pyxbld +file like: + +# examplemod.pyxbld +def make_ext(modname, pyxfilename): + from distutils.extension import Extension + return Extension(name = modname, + sources=[pyxfilename, 'hello.c'], + include_dirs=['/myinclude'] ) +def make_setup_args(): + return dict(script_args=["--compiler=mingw32"]) + +Extra dependencies can be defined by a .pyxdep . +See README. + +Since Cython 0.11, the :mod:`pyximport` module also has experimental +compilation support for normal Python modules. This allows you to +automatically run Cython on every .pyx and .py module that Python +imports, including parts of the standard library and installed +packages. Cython will still fail to compile a lot of Python modules, +in which case the import mechanism will fall back to loading the +Python source modules instead. The .py import mechanism is installed +like this:: + + pyximport.install(pyimport = True) + +Running this module as a top-level script will run a test and then print +the documentation. + +This code is based on the Py2.3+ import protocol as described in PEP 302. +""" + +import glob +import imp +import os +import sys +from zipimport import zipimporter, ZipImportError + +mod_name = "pyximport" + +PYX_EXT = ".pyx" +PYXDEP_EXT = ".pyxdep" +PYXBLD_EXT = ".pyxbld" + +DEBUG_IMPORT = False + + +def _print(message, args): + if args: + message = message % args + print(message) + + +def _debug(message, *args): + if DEBUG_IMPORT: + _print(message, args) + + +def _info(message, *args): + _print(message, args) + + +# Performance problem: for every PYX file that is imported, we will +# invoke the whole distutils infrastructure even if the module is +# already built. It might be more efficient to only do it when the +# mod time of the .pyx is newer than the mod time of the .so but +# the question is how to get distutils to tell me the name of the .so +# before it builds it. Maybe it is easy...but maybe the performance +# issue isn't real. +def _load_pyrex(name, filename): + "Load a pyrex file given a name and filename." + + +def get_distutils_extension(modname, pyxfilename, language_level=None): +# try: +# import hashlib +# except ImportError: +# import md5 as hashlib +# extra = "_" + hashlib.md5(open(pyxfilename).read()).hexdigest() +# modname = modname + extra + extension_mod,setup_args = handle_special_build(modname, pyxfilename) + if not extension_mod: + if not isinstance(pyxfilename, str): + # distutils is stupid in Py2 and requires exactly 'str' + # => encode accidentally coerced unicode strings back to str + pyxfilename = pyxfilename.encode(sys.getfilesystemencoding()) + from distutils.extension import Extension + extension_mod = Extension(name = modname, sources=[pyxfilename]) + if language_level is not None: + extension_mod.cython_directives = {'language_level': language_level} + return extension_mod,setup_args + + +def handle_special_build(modname, pyxfilename): + special_build = os.path.splitext(pyxfilename)[0] + PYXBLD_EXT + ext = None + setup_args={} + if os.path.exists(special_build): + # globls = {} + # locs = {} + # execfile(special_build, globls, locs) + # ext = locs["make_ext"](modname, pyxfilename) + with open(special_build) as fid: + mod = imp.load_source("XXXX", special_build, fid) + make_ext = getattr(mod,'make_ext',None) + if make_ext: + ext = make_ext(modname, pyxfilename) + assert ext and ext.sources, "make_ext in %s did not return Extension" % special_build + make_setup_args = getattr(mod, 'make_setup_args',None) + if make_setup_args: + setup_args = make_setup_args() + assert isinstance(setup_args,dict), ("make_setup_args in %s did not return a dict" + % special_build) + assert set or setup_args, ("neither make_ext nor make_setup_args %s" + % special_build) + ext.sources = [os.path.join(os.path.dirname(special_build), source) + for source in ext.sources] + return ext, setup_args + + +def handle_dependencies(pyxfilename): + testing = '_test_files' in globals() + dependfile = os.path.splitext(pyxfilename)[0] + PYXDEP_EXT + + # by default let distutils decide whether to rebuild on its own + # (it has a better idea of what the output file will be) + + # but we know more about dependencies so force a rebuild if + # some of the dependencies are newer than the pyxfile. + if os.path.exists(dependfile): + with open(dependfile) as fid: + depends = fid.readlines() + depends = [depend.strip() for depend in depends] + + # gather dependencies in the "files" variable + # the dependency file is itself a dependency + files = [dependfile] + for depend in depends: + fullpath = os.path.join(os.path.dirname(dependfile), + depend) + files.extend(glob.glob(fullpath)) + + # only for unit testing to see we did the right thing + if testing: + _test_files[:] = [] #$pycheck_no + + # if any file that the pyxfile depends upon is newer than + # the pyx file, 'touch' the pyx file so that distutils will + # be tricked into rebuilding it. + for file in files: + from distutils.dep_util import newer + if newer(file, pyxfilename): + _debug("Rebuilding %s because of %s", pyxfilename, file) + filetime = os.path.getmtime(file) + os.utime(pyxfilename, (filetime, filetime)) + if testing: + _test_files.append(file) + + +def build_module(name, pyxfilename, pyxbuild_dir=None, inplace=False, language_level=None): + assert os.path.exists(pyxfilename), "Path does not exist: %s" % pyxfilename + handle_dependencies(pyxfilename) + + extension_mod, setup_args = get_distutils_extension(name, pyxfilename, language_level) + build_in_temp = pyxargs.build_in_temp + sargs = pyxargs.setup_args.copy() + sargs.update(setup_args) + build_in_temp = sargs.pop('build_in_temp',build_in_temp) + + from . import pyxbuild + so_path = pyxbuild.pyx_to_dll(pyxfilename, extension_mod, + build_in_temp=build_in_temp, + pyxbuild_dir=pyxbuild_dir, + setup_args=sargs, + inplace=inplace, + reload_support=pyxargs.reload_support) + assert os.path.exists(so_path), "Cannot find: %s" % so_path + + junkpath = os.path.join(os.path.dirname(so_path), name+"_*") #very dangerous with --inplace ? yes, indeed, trying to eat my files ;) + junkstuff = glob.glob(junkpath) + for path in junkstuff: + if path != so_path: + try: + os.remove(path) + except IOError: + _info("Couldn't remove %s", path) + + return so_path + + +def load_module(name, pyxfilename, pyxbuild_dir=None, is_package=False, + build_inplace=False, language_level=None, so_path=None): + try: + if so_path is None: + if is_package: + module_name = name + '.__init__' + else: + module_name = name + so_path = build_module(module_name, pyxfilename, pyxbuild_dir, + inplace=build_inplace, language_level=language_level) + mod = imp.load_dynamic(name, so_path) + if is_package and not hasattr(mod, '__path__'): + mod.__path__ = [os.path.dirname(so_path)] + assert mod.__file__ == so_path, (mod.__file__, so_path) + except Exception as failure_exc: + _debug("Failed to load extension module: %r" % failure_exc) + if pyxargs.load_py_module_on_import_failure and pyxfilename.endswith('.py'): + # try to fall back to normal import + mod = imp.load_source(name, pyxfilename) + assert mod.__file__ in (pyxfilename, pyxfilename+'c', pyxfilename+'o'), (mod.__file__, pyxfilename) + else: + tb = sys.exc_info()[2] + import traceback + exc = ImportError("Building module %s failed: %s" % ( + name, traceback.format_exception_only(*sys.exc_info()[:2]))) + if sys.version_info[0] >= 3: + raise exc.with_traceback(tb) + else: + exec("raise exc, None, tb", {'exc': exc, 'tb': tb}) + return mod + + +# import hooks + +class PyxImporter(object): + """A meta-path importer for .pyx files. + """ + def __init__(self, extension=PYX_EXT, pyxbuild_dir=None, inplace=False, + language_level=None): + self.extension = extension + self.pyxbuild_dir = pyxbuild_dir + self.inplace = inplace + self.language_level = language_level + + def find_module(self, fullname, package_path=None): + if fullname in sys.modules and not pyxargs.reload_support: + return None # only here when reload() + + # package_path might be a _NamespacePath. Convert that into a list... + if package_path is not None and not isinstance(package_path, list): + package_path = list(package_path) + try: + fp, pathname, (ext,mode,ty) = imp.find_module(fullname,package_path) + if fp: fp.close() # Python should offer a Default-Loader to avoid this double find/open! + if pathname and ty == imp.PKG_DIRECTORY: + pkg_file = os.path.join(pathname, '__init__'+self.extension) + if os.path.isfile(pkg_file): + return PyxLoader(fullname, pathname, + init_path=pkg_file, + pyxbuild_dir=self.pyxbuild_dir, + inplace=self.inplace, + language_level=self.language_level) + if pathname and pathname.endswith(self.extension): + return PyxLoader(fullname, pathname, + pyxbuild_dir=self.pyxbuild_dir, + inplace=self.inplace, + language_level=self.language_level) + if ty != imp.C_EXTENSION: # only when an extension, check if we have a .pyx next! + return None + + # find .pyx fast, when .so/.pyd exist --inplace + pyxpath = os.path.splitext(pathname)[0]+self.extension + if os.path.isfile(pyxpath): + return PyxLoader(fullname, pyxpath, + pyxbuild_dir=self.pyxbuild_dir, + inplace=self.inplace, + language_level=self.language_level) + + # .so/.pyd's on PATH should not be remote from .pyx's + # think no need to implement PyxArgs.importer_search_remote here? + + except ImportError: + pass + + # searching sys.path ... + + #if DEBUG_IMPORT: print "SEARCHING", fullname, package_path + + mod_parts = fullname.split('.') + module_name = mod_parts[-1] + pyx_module_name = module_name + self.extension + + # this may work, but it returns the file content, not its path + #import pkgutil + #pyx_source = pkgutil.get_data(package, pyx_module_name) + + paths = package_path or sys.path + for path in paths: + pyx_data = None + if not path: + path = os.getcwd() + elif os.path.isfile(path): + try: + zi = zipimporter(path) + pyx_data = zi.get_data(pyx_module_name) + except (ZipImportError, IOError, OSError): + continue # Module not found. + # unzip the imported file into the build dir + # FIXME: can interfere with later imports if build dir is in sys.path and comes before zip file + path = self.pyxbuild_dir + elif not os.path.isabs(path): + path = os.path.abspath(path) + + pyx_module_path = os.path.join(path, pyx_module_name) + if pyx_data is not None: + if not os.path.exists(path): + try: + os.makedirs(path) + except OSError: + # concurrency issue? + if not os.path.exists(path): + raise + with open(pyx_module_path, "wb") as f: + f.write(pyx_data) + elif not os.path.isfile(pyx_module_path): + continue # Module not found. + + return PyxLoader(fullname, pyx_module_path, + pyxbuild_dir=self.pyxbuild_dir, + inplace=self.inplace, + language_level=self.language_level) + + # not found, normal package, not a .pyx file, none of our business + _debug("%s not found" % fullname) + return None + + +class PyImporter(PyxImporter): + """A meta-path importer for normal .py files. + """ + def __init__(self, pyxbuild_dir=None, inplace=False, language_level=None): + if language_level is None: + language_level = sys.version_info[0] + self.super = super(PyImporter, self) + self.super.__init__(extension='.py', pyxbuild_dir=pyxbuild_dir, inplace=inplace, + language_level=language_level) + self.uncompilable_modules = {} + self.blocked_modules = ['Cython', 'pyxbuild', 'pyximport.pyxbuild', + 'distutils'] + self.blocked_packages = ['Cython.', 'distutils.'] + + def find_module(self, fullname, package_path=None): + if fullname in sys.modules: + return None + if any([fullname.startswith(pkg) for pkg in self.blocked_packages]): + return None + if fullname in self.blocked_modules: + # prevent infinite recursion + return None + if _lib_loader.knows(fullname): + return _lib_loader + _debug("trying import of module '%s'", fullname) + if fullname in self.uncompilable_modules: + path, last_modified = self.uncompilable_modules[fullname] + try: + new_last_modified = os.stat(path).st_mtime + if new_last_modified > last_modified: + # import would fail again + return None + except OSError: + # module is no longer where we found it, retry the import + pass + + self.blocked_modules.append(fullname) + try: + importer = self.super.find_module(fullname, package_path) + if importer is not None: + if importer.init_path: + path = importer.init_path + real_name = fullname + '.__init__' + else: + path = importer.path + real_name = fullname + _debug("importer found path %s for module %s", path, real_name) + try: + so_path = build_module( + real_name, path, + pyxbuild_dir=self.pyxbuild_dir, + language_level=self.language_level, + inplace=self.inplace) + _lib_loader.add_lib(fullname, path, so_path, + is_package=bool(importer.init_path)) + return _lib_loader + except Exception: + if DEBUG_IMPORT: + import traceback + traceback.print_exc() + # build failed, not a compilable Python module + try: + last_modified = os.stat(path).st_mtime + except OSError: + last_modified = 0 + self.uncompilable_modules[fullname] = (path, last_modified) + importer = None + finally: + self.blocked_modules.pop() + return importer + + +class LibLoader(object): + def __init__(self): + self._libs = {} + + def load_module(self, fullname): + try: + source_path, so_path, is_package = self._libs[fullname] + except KeyError: + raise ValueError("invalid module %s" % fullname) + _debug("Loading shared library module '%s' from %s", fullname, so_path) + return load_module(fullname, source_path, so_path=so_path, is_package=is_package) + + def add_lib(self, fullname, path, so_path, is_package): + self._libs[fullname] = (path, so_path, is_package) + + def knows(self, fullname): + return fullname in self._libs + +_lib_loader = LibLoader() + + +class PyxLoader(object): + def __init__(self, fullname, path, init_path=None, pyxbuild_dir=None, + inplace=False, language_level=None): + _debug("PyxLoader created for loading %s from %s (init path: %s)", + fullname, path, init_path) + self.fullname = fullname + self.path, self.init_path = path, init_path + self.pyxbuild_dir = pyxbuild_dir + self.inplace = inplace + self.language_level = language_level + + def load_module(self, fullname): + assert self.fullname == fullname, ( + "invalid module, expected %s, got %s" % ( + self.fullname, fullname)) + if self.init_path: + # package + #print "PACKAGE", fullname + module = load_module(fullname, self.init_path, + self.pyxbuild_dir, is_package=True, + build_inplace=self.inplace, + language_level=self.language_level) + module.__path__ = [self.path] + else: + #print "MODULE", fullname + module = load_module(fullname, self.path, + self.pyxbuild_dir, + build_inplace=self.inplace, + language_level=self.language_level) + return module + + +#install args +class PyxArgs(object): + build_dir=True + build_in_temp=True + setup_args={} #None + +##pyxargs=None + + +def _have_importers(): + has_py_importer = False + has_pyx_importer = False + for importer in sys.meta_path: + if isinstance(importer, PyxImporter): + if isinstance(importer, PyImporter): + has_py_importer = True + else: + has_pyx_importer = True + + return has_py_importer, has_pyx_importer + + +def install(pyximport=True, pyimport=False, build_dir=None, build_in_temp=True, + setup_args=None, reload_support=False, + load_py_module_on_import_failure=False, inplace=False, + language_level=None): + """ Main entry point for pyxinstall. + + Call this to install the ``.pyx`` import hook in + your meta-path for a single Python process. If you want it to be + installed whenever you use Python, add it to your ``sitecustomize`` + (as described above). + + :param pyximport: If set to False, does not try to import ``.pyx`` files. + + :param pyimport: You can pass ``pyimport=True`` to also + install the ``.py`` import hook + in your meta-path. Note, however, that it is rather experimental, + will not work at all for some ``.py`` files and packages, and will + heavily slow down your imports due to search and compilation. + Use at your own risk. + + :param build_dir: By default, compiled modules will end up in a ``.pyxbld`` + directory in the user's home directory. Passing a different path + as ``build_dir`` will override this. + + :param build_in_temp: If ``False``, will produce the C files locally. Working + with complex dependencies and debugging becomes more easy. This + can principally interfere with existing files of the same name. + + :param setup_args: Dict of arguments for Distribution. + See ``distutils.core.setup()``. + + :param reload_support: Enables support for dynamic + ``reload(my_module)``, e.g. after a change in the Cython code. + Additional files ``.reloadNN`` may arise on that account, when + the previously loaded module file cannot be overwritten. + + :param load_py_module_on_import_failure: If the compilation of a ``.py`` + file succeeds, but the subsequent import fails for some reason, + retry the import with the normal ``.py`` module instead of the + compiled module. Note that this may lead to unpredictable results + for modules that change the system state during their import, as + the second import will rerun these modifications in whatever state + the system was left after the import of the compiled module + failed. + + :param inplace: Install the compiled module + (``.so`` for Linux and Mac / ``.pyd`` for Windows) + next to the source file. + + :param language_level: The source language level to use: 2 or 3. + The default is to use the language level of the current Python + runtime for .py files and Py2 for ``.pyx`` files. + """ + if setup_args is None: + setup_args = {} + if not build_dir: + build_dir = os.path.join(os.path.expanduser('~'), '.pyxbld') + + global pyxargs + pyxargs = PyxArgs() #$pycheck_no + pyxargs.build_dir = build_dir + pyxargs.build_in_temp = build_in_temp + pyxargs.setup_args = (setup_args or {}).copy() + pyxargs.reload_support = reload_support + pyxargs.load_py_module_on_import_failure = load_py_module_on_import_failure + + has_py_importer, has_pyx_importer = _have_importers() + py_importer, pyx_importer = None, None + + if pyimport and not has_py_importer: + py_importer = PyImporter(pyxbuild_dir=build_dir, inplace=inplace, + language_level=language_level) + # make sure we import Cython before we install the import hook + import Cython.Compiler.Main, Cython.Compiler.Pipeline, Cython.Compiler.Optimize + sys.meta_path.insert(0, py_importer) + + if pyximport and not has_pyx_importer: + pyx_importer = PyxImporter(pyxbuild_dir=build_dir, inplace=inplace, + language_level=language_level) + sys.meta_path.append(pyx_importer) + + return py_importer, pyx_importer + + +def uninstall(py_importer, pyx_importer): + """ + Uninstall an import hook. + """ + try: + sys.meta_path.remove(py_importer) + except ValueError: + pass + + try: + sys.meta_path.remove(pyx_importer) + except ValueError: + pass + + +# MAIN + +def show_docs(): + import __main__ + __main__.__name__ = mod_name + for name in dir(__main__): + item = getattr(__main__, name) + try: + setattr(item, "__module__", mod_name) + except (AttributeError, TypeError): + pass + help(__main__) + + +if __name__ == '__main__': + show_docs() diff --git a/pyximport/_pyximport3.py b/pyximport/_pyximport3.py new file mode 100644 index 000000000..dccd1d09e --- /dev/null +++ b/pyximport/_pyximport3.py @@ -0,0 +1,464 @@ +""" +Import hooks; when installed with the install() function, these hooks +allow importing .pyx files as if they were Python modules. + +If you want the hook installed every time you run Python +you can add it to your Python version by adding these lines to +sitecustomize.py (which you can create from scratch in site-packages +if it doesn't exist there or somewhere else on your python path):: + + import pyximport + pyximport.install() + +For instance on the Mac with a non-system Python 2.3, you could create +sitecustomize.py with only those two lines at +/usr/local/lib/python2.3/site-packages/sitecustomize.py . + +A custom distutils.core.Extension instance and setup() args +(Distribution) for for the build can be defined by a .pyxbld +file like: + +# examplemod.pyxbld +def make_ext(modname, pyxfilename): + from distutils.extension import Extension + return Extension(name = modname, + sources=[pyxfilename, 'hello.c'], + include_dirs=['/myinclude'] ) +def make_setup_args(): + return dict(script_args=["--compiler=mingw32"]) + +Extra dependencies can be defined by a .pyxdep . +See README. + +Since Cython 0.11, the :mod:`pyximport` module also has experimental +compilation support for normal Python modules. This allows you to +automatically run Cython on every .pyx and .py module that Python +imports, including parts of the standard library and installed +packages. Cython will still fail to compile a lot of Python modules, +in which case the import mechanism will fall back to loading the +Python source modules instead. The .py import mechanism is installed +like this:: + + pyximport.install(pyimport = True) + +Running this module as a top-level script will run a test and then print +the documentation. +""" + +import glob +import importlib +import os +import sys +from importlib.abc import MetaPathFinder +from importlib.machinery import ExtensionFileLoader, SourceFileLoader +from importlib.util import spec_from_file_location + +mod_name = "pyximport" + +PY_EXT = ".py" +PYX_EXT = ".pyx" +PYXDEP_EXT = ".pyxdep" +PYXBLD_EXT = ".pyxbld" + +DEBUG_IMPORT = False + + +def _print(message, args): + if args: + message = message % args + print(message) + + +def _debug(message, *args): + if DEBUG_IMPORT: + _print(message, args) + + +def _info(message, *args): + _print(message, args) + + +def load_source(file_path): + import importlib.util + from importlib.machinery import SourceFileLoader + spec = importlib.util.spec_from_file_location("XXXX", file_path, loader=SourceFileLoader("XXXX", file_path)) + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + return module + + +def get_distutils_extension(modname, pyxfilename, language_level=None): +# try: +# import hashlib +# except ImportError: +# import md5 as hashlib +# extra = "_" + hashlib.md5(open(pyxfilename).read()).hexdigest() +# modname = modname + extra + extension_mod,setup_args = handle_special_build(modname, pyxfilename) + if not extension_mod: + if not isinstance(pyxfilename, str): + # distutils is stupid in Py2 and requires exactly 'str' + # => encode accidentally coerced unicode strings back to str + pyxfilename = pyxfilename.encode(sys.getfilesystemencoding()) + from distutils.extension import Extension + extension_mod = Extension(name = modname, sources=[pyxfilename]) + if language_level is not None: + extension_mod.cython_directives = {'language_level': language_level} + return extension_mod,setup_args + + +def handle_special_build(modname, pyxfilename): + special_build = os.path.splitext(pyxfilename)[0] + PYXBLD_EXT + ext = None + setup_args={} + if os.path.exists(special_build): + # globls = {} + # locs = {} + # execfile(special_build, globls, locs) + # ext = locs["make_ext"](modname, pyxfilename) + mod = load_source(special_build) + make_ext = getattr(mod,'make_ext',None) + if make_ext: + ext = make_ext(modname, pyxfilename) + assert ext and ext.sources, "make_ext in %s did not return Extension" % special_build + make_setup_args = getattr(mod, 'make_setup_args',None) + if make_setup_args: + setup_args = make_setup_args() + assert isinstance(setup_args,dict), ("make_setup_args in %s did not return a dict" + % special_build) + assert set or setup_args, ("neither make_ext nor make_setup_args %s" + % special_build) + ext.sources = [os.path.join(os.path.dirname(special_build), source) + for source in ext.sources] + return ext, setup_args + + +def handle_dependencies(pyxfilename): + testing = '_test_files' in globals() + dependfile = os.path.splitext(pyxfilename)[0] + PYXDEP_EXT + + # by default let distutils decide whether to rebuild on its own + # (it has a better idea of what the output file will be) + + # but we know more about dependencies so force a rebuild if + # some of the dependencies are newer than the pyxfile. + if os.path.exists(dependfile): + with open(dependfile) as fid: + depends = fid.readlines() + depends = [depend.strip() for depend in depends] + + # gather dependencies in the "files" variable + # the dependency file is itself a dependency + files = [dependfile] + for depend in depends: + fullpath = os.path.join(os.path.dirname(dependfile), + depend) + files.extend(glob.glob(fullpath)) + + # only for unit testing to see we did the right thing + if testing: + _test_files[:] = [] #$pycheck_no + + # if any file that the pyxfile depends upon is newer than + # the pyx file, 'touch' the pyx file so that distutils will + # be tricked into rebuilding it. + for file in files: + from distutils.dep_util import newer + if newer(file, pyxfilename): + _debug("Rebuilding %s because of %s", pyxfilename, file) + filetime = os.path.getmtime(file) + os.utime(pyxfilename, (filetime, filetime)) + if testing: + _test_files.append(file) + + +def build_module(name, pyxfilename, pyxbuild_dir=None, inplace=False, language_level=None): + assert os.path.exists(pyxfilename), "Path does not exist: %s" % pyxfilename + handle_dependencies(pyxfilename) + + extension_mod, setup_args = get_distutils_extension(name, pyxfilename, language_level) + build_in_temp = pyxargs.build_in_temp + sargs = pyxargs.setup_args.copy() + sargs.update(setup_args) + build_in_temp = sargs.pop('build_in_temp',build_in_temp) + + from . import pyxbuild + so_path = pyxbuild.pyx_to_dll(pyxfilename, extension_mod, + build_in_temp=build_in_temp, + pyxbuild_dir=pyxbuild_dir, + setup_args=sargs, + inplace=inplace, + reload_support=pyxargs.reload_support) + assert os.path.exists(so_path), "Cannot find: %s" % so_path + + junkpath = os.path.join(os.path.dirname(so_path), name+"_*") #very dangerous with --inplace ? yes, indeed, trying to eat my files ;) + junkstuff = glob.glob(junkpath) + for path in junkstuff: + if path != so_path: + try: + os.remove(path) + except IOError: + _info("Couldn't remove %s", path) + + return so_path + + +# import hooks + +class PyxImportMetaFinder(MetaPathFinder): + + def __init__(self, extension=PYX_EXT, pyxbuild_dir=None, inplace=False, language_level=None): + self.pyxbuild_dir = pyxbuild_dir + self.inplace = inplace + self.language_level = language_level + self.extension = extension + + def find_spec(self, fullname, path, target=None): + if not path: + path = [os.getcwd()] # top level import -- + if "." in fullname: + *parents, name = fullname.split(".") + else: + name = fullname + for entry in path: + if os.path.isdir(os.path.join(entry, name)): + # this module has child modules + filename = os.path.join(entry, name, "__init__" + self.extension) + submodule_locations = [os.path.join(entry, name)] + else: + filename = os.path.join(entry, name + self.extension) + submodule_locations = None + if not os.path.exists(filename): + continue + + return spec_from_file_location( + fullname, filename, + loader=PyxImportLoader(filename, self.pyxbuild_dir, self.inplace, self.language_level), + submodule_search_locations=submodule_locations) + + return None # we don't know how to import this + + +class PyImportMetaFinder(MetaPathFinder): + + def __init__(self, extension=PY_EXT, pyxbuild_dir=None, inplace=False, language_level=None): + self.pyxbuild_dir = pyxbuild_dir + self.inplace = inplace + self.language_level = language_level + self.extension = extension + self.uncompilable_modules = {} + self.blocked_modules = ['Cython', 'pyxbuild', 'pyximport.pyxbuild', + 'distutils', 'cython'] + self.blocked_packages = ['Cython.', 'distutils.'] + + def find_spec(self, fullname, path, target=None): + if fullname in sys.modules: + return None + if any([fullname.startswith(pkg) for pkg in self.blocked_packages]): + return None + if fullname in self.blocked_modules: + # prevent infinite recursion + return None + + self.blocked_modules.append(fullname) + name = fullname + if not path: + path = [os.getcwd()] # top level import -- + try: + for entry in path: + if os.path.isdir(os.path.join(entry, name)): + # this module has child modules + filename = os.path.join(entry, name, "__init__" + self.extension) + submodule_locations = [os.path.join(entry, name)] + else: + filename = os.path.join(entry, name + self.extension) + submodule_locations = None + if not os.path.exists(filename): + continue + + return spec_from_file_location( + fullname, filename, + loader=PyxImportLoader(filename, self.pyxbuild_dir, self.inplace, self.language_level), + submodule_search_locations=submodule_locations) + finally: + self.blocked_modules.pop() + + return None # we don't know how to import this + + +class PyxImportLoader(ExtensionFileLoader): + + def __init__(self, filename, pyxbuild_dir, inplace, language_level): + module_name = os.path.splitext(os.path.basename(filename))[0] + super().__init__(module_name, filename) + self._pyxbuild_dir = pyxbuild_dir + self._inplace = inplace + self._language_level = language_level + + def create_module(self, spec): + try: + so_path = build_module(spec.name, pyxfilename=spec.origin, pyxbuild_dir=self._pyxbuild_dir, + inplace=self._inplace, language_level=self._language_level) + self.path = so_path + spec.origin = so_path + return super().create_module(spec) + except Exception as failure_exc: + _debug("Failed to load extension module: %r" % failure_exc) + if pyxargs.load_py_module_on_import_failure and spec.origin.endswith(PY_EXT): + spec = importlib.util.spec_from_file_location(spec.name, spec.origin, + loader=SourceFileLoader(spec.name, spec.origin)) + mod = importlib.util.module_from_spec(spec) + assert mod.__file__ in (spec.origin, spec.origin + 'c', spec.origin + 'o'), (mod.__file__, spec.origin) + return mod + else: + tb = sys.exc_info()[2] + import traceback + exc = ImportError("Building module %s failed: %s" % ( + spec.name, traceback.format_exception_only(*sys.exc_info()[:2]))) + raise exc.with_traceback(tb) + + def exec_module(self, module): + try: + return super().exec_module(module) + except Exception as failure_exc: + import traceback + _debug("Failed to load extension module: %r" % failure_exc) + raise ImportError("Executing module %s failed %s" % ( + module.__file__, traceback.format_exception_only(*sys.exc_info()[:2]))) + + +#install args +class PyxArgs(object): + build_dir=True + build_in_temp=True + setup_args={} #None + + +def _have_importers(): + has_py_importer = False + has_pyx_importer = False + for importer in sys.meta_path: + if isinstance(importer, PyxImportMetaFinder): + if isinstance(importer, PyImportMetaFinder): + has_py_importer = True + else: + has_pyx_importer = True + + return has_py_importer, has_pyx_importer + + +def install(pyximport=True, pyimport=False, build_dir=None, build_in_temp=True, + setup_args=None, reload_support=False, + load_py_module_on_import_failure=False, inplace=False, + language_level=None): + """ Main entry point for pyxinstall. + + Call this to install the ``.pyx`` import hook in + your meta-path for a single Python process. If you want it to be + installed whenever you use Python, add it to your ``sitecustomize`` + (as described above). + + :param pyximport: If set to False, does not try to import ``.pyx`` files. + + :param pyimport: You can pass ``pyimport=True`` to also + install the ``.py`` import hook + in your meta-path. Note, however, that it is rather experimental, + will not work at all for some ``.py`` files and packages, and will + heavily slow down your imports due to search and compilation. + Use at your own risk. + + :param build_dir: By default, compiled modules will end up in a ``.pyxbld`` + directory in the user's home directory. Passing a different path + as ``build_dir`` will override this. + + :param build_in_temp: If ``False``, will produce the C files locally. Working + with complex dependencies and debugging becomes more easy. This + can principally interfere with existing files of the same name. + + :param setup_args: Dict of arguments for Distribution. + See ``distutils.core.setup()``. + + :param reload_support: Enables support for dynamic + ``reload(my_module)``, e.g. after a change in the Cython code. + Additional files ``.reloadNN`` may arise on that account, when + the previously loaded module file cannot be overwritten. + + :param load_py_module_on_import_failure: If the compilation of a ``.py`` + file succeeds, but the subsequent import fails for some reason, + retry the import with the normal ``.py`` module instead of the + compiled module. Note that this may lead to unpredictable results + for modules that change the system state during their import, as + the second import will rerun these modifications in whatever state + the system was left after the import of the compiled module + failed. + + :param inplace: Install the compiled module + (``.so`` for Linux and Mac / ``.pyd`` for Windows) + next to the source file. + + :param language_level: The source language level to use: 2 or 3. + The default is to use the language level of the current Python + runtime for .py files and Py2 for ``.pyx`` files. + """ + if setup_args is None: + setup_args = {} + if not build_dir: + build_dir = os.path.join(os.path.expanduser('~'), '.pyxbld') + + global pyxargs + pyxargs = PyxArgs() #$pycheck_no + pyxargs.build_dir = build_dir + pyxargs.build_in_temp = build_in_temp + pyxargs.setup_args = (setup_args or {}).copy() + pyxargs.reload_support = reload_support + pyxargs.load_py_module_on_import_failure = load_py_module_on_import_failure + + has_py_importer, has_pyx_importer = _have_importers() + py_importer, pyx_importer = None, None + + if pyimport and not has_py_importer: + py_importer = PyImportMetaFinder(pyxbuild_dir=build_dir, inplace=inplace, + language_level=language_level) + # make sure we import Cython before we install the import hook + import Cython.Compiler.Main, Cython.Compiler.Pipeline, Cython.Compiler.Optimize + sys.meta_path.insert(0, py_importer) + + if pyximport and not has_pyx_importer: + pyx_importer = PyxImportMetaFinder(pyxbuild_dir=build_dir, inplace=inplace, + language_level=language_level) + sys.meta_path.append(pyx_importer) + + return py_importer, pyx_importer + + +def uninstall(py_importer, pyx_importer): + """ + Uninstall an import hook. + """ + try: + sys.meta_path.remove(py_importer) + except ValueError: + pass + + try: + sys.meta_path.remove(pyx_importer) + except ValueError: + pass + + +# MAIN + +def show_docs(): + import __main__ + __main__.__name__ = mod_name + for name in dir(__main__): + item = getattr(__main__, name) + try: + setattr(item, "__module__", mod_name) + except (AttributeError, TypeError): + pass + help(__main__) + + +if __name__ == '__main__': + show_docs() diff --git a/pyximport/pyximport.py b/pyximport/pyximport.py index b2077826a..9d575815a 100644 --- a/pyximport/pyximport.py +++ b/pyximport/pyximport.py @@ -1,606 +1,11 @@ -""" -Import hooks; when installed with the install() function, these hooks -allow importing .pyx files as if they were Python modules. - -If you want the hook installed every time you run Python -you can add it to your Python version by adding these lines to -sitecustomize.py (which you can create from scratch in site-packages -if it doesn't exist there or somewhere else on your python path):: - - import pyximport - pyximport.install() - -For instance on the Mac with a non-system Python 2.3, you could create -sitecustomize.py with only those two lines at -/usr/local/lib/python2.3/site-packages/sitecustomize.py . - -A custom distutils.core.Extension instance and setup() args -(Distribution) for for the build can be defined by a .pyxbld -file like: - -# examplemod.pyxbld -def make_ext(modname, pyxfilename): - from distutils.extension import Extension - return Extension(name = modname, - sources=[pyxfilename, 'hello.c'], - include_dirs=['/myinclude'] ) -def make_setup_args(): - return dict(script_args=["--compiler=mingw32"]) - -Extra dependencies can be defined by a .pyxdep . -See README. - -Since Cython 0.11, the :mod:`pyximport` module also has experimental -compilation support for normal Python modules. This allows you to -automatically run Cython on every .pyx and .py module that Python -imports, including parts of the standard library and installed -packages. Cython will still fail to compile a lot of Python modules, -in which case the import mechanism will fall back to loading the -Python source modules instead. The .py import mechanism is installed -like this:: - - pyximport.install(pyimport = True) - -Running this module as a top-level script will run a test and then print -the documentation. - -This code is based on the Py2.3+ import protocol as described in PEP 302. -""" - -import glob -import imp -import os +from __future__ import absolute_import import sys -from zipimport import zipimporter, ZipImportError - -mod_name = "pyximport" - -PYX_EXT = ".pyx" -PYXDEP_EXT = ".pyxdep" -PYXBLD_EXT = ".pyxbld" - -DEBUG_IMPORT = False - - -def _print(message, args): - if args: - message = message % args - print(message) - - -def _debug(message, *args): - if DEBUG_IMPORT: - _print(message, args) - - -def _info(message, *args): - _print(message, args) - - -# Performance problem: for every PYX file that is imported, we will -# invoke the whole distutils infrastructure even if the module is -# already built. It might be more efficient to only do it when the -# mod time of the .pyx is newer than the mod time of the .so but -# the question is how to get distutils to tell me the name of the .so -# before it builds it. Maybe it is easy...but maybe the performance -# issue isn't real. -def _load_pyrex(name, filename): - "Load a pyrex file given a name and filename." - - -def get_distutils_extension(modname, pyxfilename, language_level=None): -# try: -# import hashlib -# except ImportError: -# import md5 as hashlib -# extra = "_" + hashlib.md5(open(pyxfilename).read()).hexdigest() -# modname = modname + extra - extension_mod,setup_args = handle_special_build(modname, pyxfilename) - if not extension_mod: - if not isinstance(pyxfilename, str): - # distutils is stupid in Py2 and requires exactly 'str' - # => encode accidentally coerced unicode strings back to str - pyxfilename = pyxfilename.encode(sys.getfilesystemencoding()) - from distutils.extension import Extension - extension_mod = Extension(name = modname, sources=[pyxfilename]) - if language_level is not None: - extension_mod.cython_directives = {'language_level': language_level} - return extension_mod,setup_args - - -def handle_special_build(modname, pyxfilename): - special_build = os.path.splitext(pyxfilename)[0] + PYXBLD_EXT - ext = None - setup_args={} - if os.path.exists(special_build): - # globls = {} - # locs = {} - # execfile(special_build, globls, locs) - # ext = locs["make_ext"](modname, pyxfilename) - with open(special_build) as fid: - mod = imp.load_source("XXXX", special_build, fid) - make_ext = getattr(mod,'make_ext',None) - if make_ext: - ext = make_ext(modname, pyxfilename) - assert ext and ext.sources, "make_ext in %s did not return Extension" % special_build - make_setup_args = getattr(mod, 'make_setup_args',None) - if make_setup_args: - setup_args = make_setup_args() - assert isinstance(setup_args,dict), ("make_setup_args in %s did not return a dict" - % special_build) - assert set or setup_args, ("neither make_ext nor make_setup_args %s" - % special_build) - ext.sources = [os.path.join(os.path.dirname(special_build), source) - for source in ext.sources] - return ext, setup_args - - -def handle_dependencies(pyxfilename): - testing = '_test_files' in globals() - dependfile = os.path.splitext(pyxfilename)[0] + PYXDEP_EXT - - # by default let distutils decide whether to rebuild on its own - # (it has a better idea of what the output file will be) - - # but we know more about dependencies so force a rebuild if - # some of the dependencies are newer than the pyxfile. - if os.path.exists(dependfile): - with open(dependfile) as fid: - depends = fid.readlines() - depends = [depend.strip() for depend in depends] - - # gather dependencies in the "files" variable - # the dependency file is itself a dependency - files = [dependfile] - for depend in depends: - fullpath = os.path.join(os.path.dirname(dependfile), - depend) - files.extend(glob.glob(fullpath)) - - # only for unit testing to see we did the right thing - if testing: - _test_files[:] = [] #$pycheck_no - - # if any file that the pyxfile depends upon is newer than - # the pyx file, 'touch' the pyx file so that distutils will - # be tricked into rebuilding it. - for file in files: - from distutils.dep_util import newer - if newer(file, pyxfilename): - _debug("Rebuilding %s because of %s", pyxfilename, file) - filetime = os.path.getmtime(file) - os.utime(pyxfilename, (filetime, filetime)) - if testing: - _test_files.append(file) - - -def build_module(name, pyxfilename, pyxbuild_dir=None, inplace=False, language_level=None): - assert os.path.exists(pyxfilename), "Path does not exist: %s" % pyxfilename - handle_dependencies(pyxfilename) - - extension_mod, setup_args = get_distutils_extension(name, pyxfilename, language_level) - build_in_temp = pyxargs.build_in_temp - sargs = pyxargs.setup_args.copy() - sargs.update(setup_args) - build_in_temp = sargs.pop('build_in_temp',build_in_temp) - - from . import pyxbuild - so_path = pyxbuild.pyx_to_dll(pyxfilename, extension_mod, - build_in_temp=build_in_temp, - pyxbuild_dir=pyxbuild_dir, - setup_args=sargs, - inplace=inplace, - reload_support=pyxargs.reload_support) - assert os.path.exists(so_path), "Cannot find: %s" % so_path - - junkpath = os.path.join(os.path.dirname(so_path), name+"_*") #very dangerous with --inplace ? yes, indeed, trying to eat my files ;) - junkstuff = glob.glob(junkpath) - for path in junkstuff: - if path != so_path: - try: - os.remove(path) - except IOError: - _info("Couldn't remove %s", path) - - return so_path - - -def load_module(name, pyxfilename, pyxbuild_dir=None, is_package=False, - build_inplace=False, language_level=None, so_path=None): - try: - if so_path is None: - if is_package: - module_name = name + '.__init__' - else: - module_name = name - so_path = build_module(module_name, pyxfilename, pyxbuild_dir, - inplace=build_inplace, language_level=language_level) - mod = imp.load_dynamic(name, so_path) - if is_package and not hasattr(mod, '__path__'): - mod.__path__ = [os.path.dirname(so_path)] - assert mod.__file__ == so_path, (mod.__file__, so_path) - except Exception as failure_exc: - _debug("Failed to load extension module: %r" % failure_exc) - if pyxargs.load_py_module_on_import_failure and pyxfilename.endswith('.py'): - # try to fall back to normal import - mod = imp.load_source(name, pyxfilename) - assert mod.__file__ in (pyxfilename, pyxfilename+'c', pyxfilename+'o'), (mod.__file__, pyxfilename) - else: - tb = sys.exc_info()[2] - import traceback - exc = ImportError("Building module %s failed: %s" % ( - name, traceback.format_exception_only(*sys.exc_info()[:2]))) - if sys.version_info[0] >= 3: - raise exc.with_traceback(tb) - else: - exec("raise exc, None, tb", {'exc': exc, 'tb': tb}) - return mod - - -# import hooks - -class PyxImporter(object): - """A meta-path importer for .pyx files. - """ - def __init__(self, extension=PYX_EXT, pyxbuild_dir=None, inplace=False, - language_level=None): - self.extension = extension - self.pyxbuild_dir = pyxbuild_dir - self.inplace = inplace - self.language_level = language_level - - def find_module(self, fullname, package_path=None): - if fullname in sys.modules and not pyxargs.reload_support: - return None # only here when reload() - - # package_path might be a _NamespacePath. Convert that into a list... - if package_path is not None and not isinstance(package_path, list): - package_path = list(package_path) - try: - fp, pathname, (ext,mode,ty) = imp.find_module(fullname,package_path) - if fp: fp.close() # Python should offer a Default-Loader to avoid this double find/open! - if pathname and ty == imp.PKG_DIRECTORY: - pkg_file = os.path.join(pathname, '__init__'+self.extension) - if os.path.isfile(pkg_file): - return PyxLoader(fullname, pathname, - init_path=pkg_file, - pyxbuild_dir=self.pyxbuild_dir, - inplace=self.inplace, - language_level=self.language_level) - if pathname and pathname.endswith(self.extension): - return PyxLoader(fullname, pathname, - pyxbuild_dir=self.pyxbuild_dir, - inplace=self.inplace, - language_level=self.language_level) - if ty != imp.C_EXTENSION: # only when an extension, check if we have a .pyx next! - return None - - # find .pyx fast, when .so/.pyd exist --inplace - pyxpath = os.path.splitext(pathname)[0]+self.extension - if os.path.isfile(pyxpath): - return PyxLoader(fullname, pyxpath, - pyxbuild_dir=self.pyxbuild_dir, - inplace=self.inplace, - language_level=self.language_level) - - # .so/.pyd's on PATH should not be remote from .pyx's - # think no need to implement PyxArgs.importer_search_remote here? - - except ImportError: - pass - - # searching sys.path ... - - #if DEBUG_IMPORT: print "SEARCHING", fullname, package_path - - mod_parts = fullname.split('.') - module_name = mod_parts[-1] - pyx_module_name = module_name + self.extension - - # this may work, but it returns the file content, not its path - #import pkgutil - #pyx_source = pkgutil.get_data(package, pyx_module_name) - - paths = package_path or sys.path - for path in paths: - pyx_data = None - if not path: - path = os.getcwd() - elif os.path.isfile(path): - try: - zi = zipimporter(path) - pyx_data = zi.get_data(pyx_module_name) - except (ZipImportError, IOError, OSError): - continue # Module not found. - # unzip the imported file into the build dir - # FIXME: can interfere with later imports if build dir is in sys.path and comes before zip file - path = self.pyxbuild_dir - elif not os.path.isabs(path): - path = os.path.abspath(path) - - pyx_module_path = os.path.join(path, pyx_module_name) - if pyx_data is not None: - if not os.path.exists(path): - try: - os.makedirs(path) - except OSError: - # concurrency issue? - if not os.path.exists(path): - raise - with open(pyx_module_path, "wb") as f: - f.write(pyx_data) - elif not os.path.isfile(pyx_module_path): - continue # Module not found. - - return PyxLoader(fullname, pyx_module_path, - pyxbuild_dir=self.pyxbuild_dir, - inplace=self.inplace, - language_level=self.language_level) - - # not found, normal package, not a .pyx file, none of our business - _debug("%s not found" % fullname) - return None - - -class PyImporter(PyxImporter): - """A meta-path importer for normal .py files. - """ - def __init__(self, pyxbuild_dir=None, inplace=False, language_level=None): - if language_level is None: - language_level = sys.version_info[0] - self.super = super(PyImporter, self) - self.super.__init__(extension='.py', pyxbuild_dir=pyxbuild_dir, inplace=inplace, - language_level=language_level) - self.uncompilable_modules = {} - self.blocked_modules = ['Cython', 'pyxbuild', 'pyximport.pyxbuild', - 'distutils'] - self.blocked_packages = ['Cython.', 'distutils.'] - - def find_module(self, fullname, package_path=None): - if fullname in sys.modules: - return None - if any([fullname.startswith(pkg) for pkg in self.blocked_packages]): - return None - if fullname in self.blocked_modules: - # prevent infinite recursion - return None - if _lib_loader.knows(fullname): - return _lib_loader - _debug("trying import of module '%s'", fullname) - if fullname in self.uncompilable_modules: - path, last_modified = self.uncompilable_modules[fullname] - try: - new_last_modified = os.stat(path).st_mtime - if new_last_modified > last_modified: - # import would fail again - return None - except OSError: - # module is no longer where we found it, retry the import - pass - - self.blocked_modules.append(fullname) - try: - importer = self.super.find_module(fullname, package_path) - if importer is not None: - if importer.init_path: - path = importer.init_path - real_name = fullname + '.__init__' - else: - path = importer.path - real_name = fullname - _debug("importer found path %s for module %s", path, real_name) - try: - so_path = build_module( - real_name, path, - pyxbuild_dir=self.pyxbuild_dir, - language_level=self.language_level, - inplace=self.inplace) - _lib_loader.add_lib(fullname, path, so_path, - is_package=bool(importer.init_path)) - return _lib_loader - except Exception: - if DEBUG_IMPORT: - import traceback - traceback.print_exc() - # build failed, not a compilable Python module - try: - last_modified = os.stat(path).st_mtime - except OSError: - last_modified = 0 - self.uncompilable_modules[fullname] = (path, last_modified) - importer = None - finally: - self.blocked_modules.pop() - return importer - - -class LibLoader(object): - def __init__(self): - self._libs = {} - - def load_module(self, fullname): - try: - source_path, so_path, is_package = self._libs[fullname] - except KeyError: - raise ValueError("invalid module %s" % fullname) - _debug("Loading shared library module '%s' from %s", fullname, so_path) - return load_module(fullname, source_path, so_path=so_path, is_package=is_package) - - def add_lib(self, fullname, path, so_path, is_package): - self._libs[fullname] = (path, so_path, is_package) - - def knows(self, fullname): - return fullname in self._libs - -_lib_loader = LibLoader() - - -class PyxLoader(object): - def __init__(self, fullname, path, init_path=None, pyxbuild_dir=None, - inplace=False, language_level=None): - _debug("PyxLoader created for loading %s from %s (init path: %s)", - fullname, path, init_path) - self.fullname = fullname - self.path, self.init_path = path, init_path - self.pyxbuild_dir = pyxbuild_dir - self.inplace = inplace - self.language_level = language_level - - def load_module(self, fullname): - assert self.fullname == fullname, ( - "invalid module, expected %s, got %s" % ( - self.fullname, fullname)) - if self.init_path: - # package - #print "PACKAGE", fullname - module = load_module(fullname, self.init_path, - self.pyxbuild_dir, is_package=True, - build_inplace=self.inplace, - language_level=self.language_level) - module.__path__ = [self.path] - else: - #print "MODULE", fullname - module = load_module(fullname, self.path, - self.pyxbuild_dir, - build_inplace=self.inplace, - language_level=self.language_level) - return module - - -#install args -class PyxArgs(object): - build_dir=True - build_in_temp=True - setup_args={} #None - -##pyxargs=None - - -def _have_importers(): - has_py_importer = False - has_pyx_importer = False - for importer in sys.meta_path: - if isinstance(importer, PyxImporter): - if isinstance(importer, PyImporter): - has_py_importer = True - else: - has_pyx_importer = True - - return has_py_importer, has_pyx_importer - - -def install(pyximport=True, pyimport=False, build_dir=None, build_in_temp=True, - setup_args=None, reload_support=False, - load_py_module_on_import_failure=False, inplace=False, - language_level=None): - """ Main entry point for pyxinstall. - - Call this to install the ``.pyx`` import hook in - your meta-path for a single Python process. If you want it to be - installed whenever you use Python, add it to your ``sitecustomize`` - (as described above). - - :param pyximport: If set to False, does not try to import ``.pyx`` files. - - :param pyimport: You can pass ``pyimport=True`` to also - install the ``.py`` import hook - in your meta-path. Note, however, that it is rather experimental, - will not work at all for some ``.py`` files and packages, and will - heavily slow down your imports due to search and compilation. - Use at your own risk. - - :param build_dir: By default, compiled modules will end up in a ``.pyxbld`` - directory in the user's home directory. Passing a different path - as ``build_dir`` will override this. - - :param build_in_temp: If ``False``, will produce the C files locally. Working - with complex dependencies and debugging becomes more easy. This - can principally interfere with existing files of the same name. - - :param setup_args: Dict of arguments for Distribution. - See ``distutils.core.setup()``. - - :param reload_support: Enables support for dynamic - ``reload(my_module)``, e.g. after a change in the Cython code. - Additional files ``.reloadNN`` may arise on that account, when - the previously loaded module file cannot be overwritten. - - :param load_py_module_on_import_failure: If the compilation of a ``.py`` - file succeeds, but the subsequent import fails for some reason, - retry the import with the normal ``.py`` module instead of the - compiled module. Note that this may lead to unpredictable results - for modules that change the system state during their import, as - the second import will rerun these modifications in whatever state - the system was left after the import of the compiled module - failed. - - :param inplace: Install the compiled module - (``.so`` for Linux and Mac / ``.pyd`` for Windows) - next to the source file. - - :param language_level: The source language level to use: 2 or 3. - The default is to use the language level of the current Python - runtime for .py files and Py2 for ``.pyx`` files. - """ - if setup_args is None: - setup_args = {} - if not build_dir: - build_dir = os.path.join(os.path.expanduser('~'), '.pyxbld') - - global pyxargs - pyxargs = PyxArgs() #$pycheck_no - pyxargs.build_dir = build_dir - pyxargs.build_in_temp = build_in_temp - pyxargs.setup_args = (setup_args or {}).copy() - pyxargs.reload_support = reload_support - pyxargs.load_py_module_on_import_failure = load_py_module_on_import_failure - - has_py_importer, has_pyx_importer = _have_importers() - py_importer, pyx_importer = None, None - - if pyimport and not has_py_importer: - py_importer = PyImporter(pyxbuild_dir=build_dir, inplace=inplace, - language_level=language_level) - # make sure we import Cython before we install the import hook - import Cython.Compiler.Main, Cython.Compiler.Pipeline, Cython.Compiler.Optimize - sys.meta_path.insert(0, py_importer) - - if pyximport and not has_pyx_importer: - pyx_importer = PyxImporter(pyxbuild_dir=build_dir, inplace=inplace, - language_level=language_level) - sys.meta_path.append(pyx_importer) - - return py_importer, pyx_importer - - -def uninstall(py_importer, pyx_importer): - """ - Uninstall an import hook. - """ - try: - sys.meta_path.remove(py_importer) - except ValueError: - pass - - try: - sys.meta_path.remove(pyx_importer) - except ValueError: - pass - - -# MAIN - -def show_docs(): - import __main__ - __main__.__name__ = mod_name - for name in dir(__main__): - item = getattr(__main__, name) - try: - setattr(item, "__module__", mod_name) - except (AttributeError, TypeError): - pass - help(__main__) +if sys.version_info < (3, 5): + # _pyximport3 module requires at least Python 3.5 + from pyximport._pyximport2 import install, uninstall, show_docs +else: + from pyximport._pyximport3 import install, uninstall, show_docs if __name__ == '__main__': show_docs() -- cgit v1.2.1 From f80d32583c4f6db5050ce48408415e7a55acaf02 Mon Sep 17 00:00:00 2001 From: 0dminnimda <0dminnimda@gmail.com> Date: Mon, 4 Jul 2022 22:51:42 +0300 Subject: Reject invalid spellings of Ellipsis (GH-4868) --- Cython/Compiler/Lexicon.py | 3 +- Cython/Compiler/Parsing.py | 16 +++++----- Cython/Compiler/Tests/TestGrammar.py | 57 +++++++++++++++++++++++++++++++++++- tests/compile/fromimport.pyx | 24 +++++++++++++++ tests/compile/fromimport_star.pyx | 7 +++++ tests/errors/incomplete_varadic.pyx | 8 +++++ 6 files changed, 104 insertions(+), 11 deletions(-) create mode 100644 tests/errors/incomplete_varadic.pyx diff --git a/Cython/Compiler/Lexicon.py b/Cython/Compiler/Lexicon.py index 654febbe7..c3ca05b56 100644 --- a/Cython/Compiler/Lexicon.py +++ b/Cython/Compiler/Lexicon.py @@ -74,6 +74,7 @@ def make_lexicon(): bra = Any("([{") ket = Any(")]}") + ellipsis = Str("...") punct = Any(":,;+-*/|&<>=.%`~^?!@") diphthong = Str("==", "<>", "!=", "<=", ">=", "<<", ">>", "**", "//", "+=", "-=", "*=", "/=", "%=", "|=", "^=", "&=", @@ -89,7 +90,7 @@ def make_lexicon(): (intliteral, Method('strip_underscores', symbol='INT')), (fltconst, Method('strip_underscores', symbol='FLOAT')), (imagconst, Method('strip_underscores', symbol='IMAG')), - (punct | diphthong, TEXT), + (ellipsis | punct | diphthong, TEXT), (bra, Method('open_bracket_action')), (ket, Method('close_bracket_action')), diff --git a/Cython/Compiler/Parsing.py b/Cython/Compiler/Parsing.py index de1ca12b3..1a31e2697 100644 --- a/Cython/Compiler/Parsing.py +++ b/Cython/Compiler/Parsing.py @@ -653,9 +653,7 @@ def p_slice_element(s, follow_set): return None def expect_ellipsis(s): - s.expect('.') - s.expect('.') - s.expect('.') + s.expect('...') def make_slice_nodes(pos, subscripts): # Convert a list of subscripts as returned @@ -701,7 +699,7 @@ def p_atom(s): return p_dict_or_set_maker(s) elif sy == '`': return p_backquote_expr(s) - elif sy == '.': + elif sy == '...': expect_ellipsis(s) return ExprNodes.EllipsisNode(pos) elif sy == 'INT': @@ -1760,11 +1758,11 @@ def p_from_import_statement(s, first_statement = 0): # s.sy == 'from' pos = s.position() s.next() - if s.sy == '.': + if s.sy in ('.', '...'): # count relative import level level = 0 - while s.sy == '.': - level += 1 + while s.sy in ('.', '...'): + level += len(s.sy) s.next() else: level = None @@ -3035,7 +3033,7 @@ def p_exception_value_clause(s): return exc_val, exc_check c_arg_list_terminators = cython.declare(frozenset, frozenset(( - '*', '**', '.', ')', ':', '/'))) + '*', '**', '...', ')', ':', '/'))) def p_c_arg_list(s, ctx = Ctx(), in_pyfunc = 0, cmethod_flag = 0, nonempty_declarators = 0, kw_only = 0, annotated = 1): @@ -3054,7 +3052,7 @@ def p_c_arg_list(s, ctx = Ctx(), in_pyfunc = 0, cmethod_flag = 0, return args def p_optional_ellipsis(s): - if s.sy == '.': + if s.sy == '...': expect_ellipsis(s) return 1 else: diff --git a/Cython/Compiler/Tests/TestGrammar.py b/Cython/Compiler/Tests/TestGrammar.py index f80ec22d3..852b48c33 100644 --- a/Cython/Compiler/Tests/TestGrammar.py +++ b/Cython/Compiler/Tests/TestGrammar.py @@ -7,9 +7,12 @@ Uses TreeFragment to test invalid syntax. from __future__ import absolute_import +import ast +import textwrap + from ...TestUtils import CythonTest -from ..Errors import CompileError from .. import ExprNodes +from ..Errors import CompileError # Copied from CPython's test_grammar.py VALID_UNDERSCORE_LITERALS = [ @@ -103,6 +106,39 @@ INVALID_UNDERSCORE_LITERALS = [ ] +INVALID_ELLIPSIS = [ + (". . .", 2, 0), + (". ..", 2, 0), + (".. .", 2, 0), + (". ...", 2, 0), + (". ... .", 2, 0), + (".. ... .", 2, 0), + (". ... ..", 2, 0), + (""" + ( + . + .. + ) + """, 3, 4), + (""" + [ + .. + ., + None + ] + """, 3, 4), + (""" + { + None, + . + . + + . + } + """, 4, 4) +] + + class TestGrammar(CythonTest): def test_invalid_number_literals(self): @@ -142,6 +178,25 @@ class TestGrammar(CythonTest): else: assert isinstance(literal_node, ExprNodes.IntNode), (literal, literal_node) + def test_invalid_ellipsis(self): + ERR = ":{0}:{1}: Expected an identifier or literal" + for code, line, col in INVALID_ELLIPSIS: + try: + ast.parse(textwrap.dedent(code)) + except SyntaxError as exc: + assert True + else: + assert False, "Invalid Python code '%s' failed to raise an exception" % code + + try: + self.fragment(u'''\ + # cython: language_level=3 + ''' + code) + except CompileError as exc: + assert ERR.format(line, col) in str(exc), str(exc) + else: + assert False, "Invalid Cython code '%s' failed to raise an exception" % code + if __name__ == "__main__": import unittest diff --git a/tests/compile/fromimport.pyx b/tests/compile/fromimport.pyx index 46f7b5442..e84b26a97 100644 --- a/tests/compile/fromimport.pyx +++ b/tests/compile/fromimport.pyx @@ -6,10 +6,34 @@ def f(): from spam import eggs as ova from . import spam from ... import spam + from .. . import spam + from . .. import spam + from . . . import spam from .. import spam, foo + from . . import spam, foo from ... import spam, foobar + from .. . import spam, foobar + from . .. import spam, foobar + from . . . import spam, foobar from .spam import foo + from . spam import foo from ...spam import foo, bar + from .. . spam import foo, bar + from . .. spam import foo, bar + from . . . spam import foo, bar from ...spam.foo import bar + from ... spam.foo import bar + from .. . spam.foo import bar + from . .. spam.foo import bar + from . . . spam.foo import bar from ...spam.foo import foo, bar + from ... spam.foo import foo, bar + from .. . spam.foo import foo, bar + from . .. spam.foo import foo, bar + from . . . spam.foo import foo, bar from ...spam.foo import (foo, bar) + from ... spam.foo import (foo, bar) + from .. . spam.foo import (foo, bar) + from .. . spam.foo import (foo, bar) + from . .. spam.foo import (foo, bar) + from . . . spam.foo import (foo, bar) diff --git a/tests/compile/fromimport_star.pyx b/tests/compile/fromimport_star.pyx index 6c19476b7..80542dddb 100644 --- a/tests/compile/fromimport_star.pyx +++ b/tests/compile/fromimport_star.pyx @@ -2,5 +2,12 @@ from spam import * from ...spam.foo import * +from ... spam.foo import * +from .. . spam.foo import * +from . . . spam.foo import * +from . .. spam.foo import * from . import * from ... import * +from .. . import * +from . .. import * +from . . . import * diff --git a/tests/errors/incomplete_varadic.pyx b/tests/errors/incomplete_varadic.pyx new file mode 100644 index 000000000..1695a874d --- /dev/null +++ b/tests/errors/incomplete_varadic.pyx @@ -0,0 +1,8 @@ +# mode: error + +cdef error_time(bool its_fine, .): + pass + +_ERRORS = u""" +3: 31: Expected an identifier, found '.' +""" -- cgit v1.2.1 From 77a51ab6965a892512ecdd83cd54023e531c9e9c Mon Sep 17 00:00:00 2001 From: da-woods Date: Mon, 4 Jul 2022 21:35:52 +0100 Subject: Avoid NULL dereference in __Pyx_KwValues_FASTCALL (GH-4872) Simpler follow up to https://github.com/cython/cython/pull/4726. I don't think we need to be worried null args and non-zero nargs, but null args and 0 nargs is quite common and valid I think. This PR just avoids a dereference in that case (which is probably dubious). --- Cython/Utility/FunctionArguments.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cython/Utility/FunctionArguments.c b/Cython/Utility/FunctionArguments.c index 1882f826f..8bdaee562 100644 --- a/Cython/Utility/FunctionArguments.c +++ b/Cython/Utility/FunctionArguments.c @@ -422,7 +422,7 @@ bad: #if CYTHON_METH_FASTCALL #define __Pyx_Arg_FASTCALL(args, i) args[i] #define __Pyx_NumKwargs_FASTCALL(kwds) PyTuple_GET_SIZE(kwds) - #define __Pyx_KwValues_FASTCALL(args, nargs) (&args[nargs]) + #define __Pyx_KwValues_FASTCALL(args, nargs) ((args) + (nargs)) static CYTHON_INLINE PyObject * __Pyx_GetKwValue_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues, PyObject *s); #define __Pyx_KwargsAsDict_FASTCALL(kw, kwvalues) _PyStack_AsDict(kwvalues, kw) #else -- cgit v1.2.1 From 76b22ac750531919194c4334150dea4c51d67f44 Mon Sep 17 00:00:00 2001 From: da-woods Date: Mon, 4 Jul 2022 21:37:55 +0100 Subject: Add note that embedding does not produce a portable application (GH-4863) Try to make it clear that using cython --embed doesn't embed any external dependencies. --- docs/src/tutorial/embedding.rst | 7 +++++++ docs/src/userguide/external_C_code.rst | 4 +++- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/docs/src/tutorial/embedding.rst b/docs/src/tutorial/embedding.rst index 3f6325428..819506cde 100644 --- a/docs/src/tutorial/embedding.rst +++ b/docs/src/tutorial/embedding.rst @@ -75,3 +75,10 @@ option. Or use the script to embed multiple modules. See the `embedding demo program `_ for a complete example setup. + +Be aware that your application will not contain any external dependencies that +you use (including Python standard library modules) and so may not be truly portable. +If you want to generate a portable application we recommend using a specialized +tool (e.g. `PyInstaller `_ +or `cx_freeze `_) to find and +bundle these dependencies. diff --git a/docs/src/userguide/external_C_code.rst b/docs/src/userguide/external_C_code.rst index b080ecf0e..2e977243d 100644 --- a/docs/src/userguide/external_C_code.rst +++ b/docs/src/userguide/external_C_code.rst @@ -471,7 +471,9 @@ For example, in the following snippet that includes :file:`grail.h`: } This C code can then be built together with the Cython-generated C code -in a single program (or library). +in a single program (or library). Be aware that this program will not include +any external dependencies that your module uses. Therefore typically this will +not generate a truly portable application for most cases. In Python 3.x, calling the module init function directly should be avoided. Instead, use the `inittab mechanism `_ -- cgit v1.2.1 From 24f10066335332d9ff4680e6bf6f7550f67097da Mon Sep 17 00:00:00 2001 From: da-woods Date: Mon, 4 Jul 2022 21:44:49 +0100 Subject: Don't add multiple "CoerceToBooleanNode" layers (GH-4847) --- Cython/Compiler/ExprNodes.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Cython/Compiler/ExprNodes.py b/Cython/Compiler/ExprNodes.py index 2fb66e9da..fb2dedd56 100644 --- a/Cython/Compiler/ExprNodes.py +++ b/Cython/Compiler/ExprNodes.py @@ -1099,6 +1099,8 @@ class ExprNode(Node): type = self.type if type.is_enum or type.is_error: return self + elif type is PyrexTypes.c_bint_type: + return self elif type.is_pyobject or type.is_int or type.is_ptr or type.is_float: return CoerceToBooleanNode(self, env) elif type.is_cpp_class and type.scope and type.scope.lookup("operator bool"): -- cgit v1.2.1 From 22f4444a1722fe0fd3f9157f1db35ab1c02522a9 Mon Sep 17 00:00:00 2001 From: da-woods Date: Wed, 6 Jul 2022 20:47:04 +0100 Subject: Add tests for NULL objects in memoryviews (GH-4871) Follow up on https://github.com/cython/cython/pull/4859 by adding tests for memoryviews too. Additional refactoring to avoid invalid decref calls on test failures. Instead, the item is safely cleared directly before the access. --- tests/buffers/bufaccess.pyx | 15 +++++---------- tests/memoryview/memslice.pyx | 45 ++++++++++++++++++++++++++++++++++++++++++- 2 files changed, 49 insertions(+), 11 deletions(-) diff --git a/tests/buffers/bufaccess.pyx b/tests/buffers/bufaccess.pyx index 2a5e84185..053ea2890 100644 --- a/tests/buffers/bufaccess.pyx +++ b/tests/buffers/bufaccess.pyx @@ -10,7 +10,7 @@ from __future__ import unicode_literals from cpython.object cimport PyObject -from cpython.ref cimport Py_INCREF, Py_DECREF +from cpython.ref cimport Py_INCREF, Py_DECREF, Py_CLEAR cimport cython __test__ = {} @@ -1013,17 +1013,14 @@ def check_object_nulled_1d(MockBuffer[object, ndim=1] buf, int idx, obj): >>> rc1 = get_refcount(a) >>> A = ObjectMockBuffer(None, [a, a]) >>> check_object_nulled_1d(A, 0, a) - >>> decref(a) # new reference "added" to A >>> check_object_nulled_1d(A, 1, a) - >>> decref(a) >>> A = ObjectMockBuffer(None, [a, a, a, a], strides=(2,)) >>> check_object_nulled_1d(A, 0, a) # only 0 due to stride - >>> decref(a) >>> get_refcount(a) == rc1 True """ - cdef void **data = buf.buffer - data[idx] = NULL + cdef PyObject **data = buf.buffer + Py_CLEAR(data[idx]) res = buf[idx] # takes None buf[idx] = obj return res @@ -1037,14 +1034,12 @@ def check_object_nulled_2d(MockBuffer[object, ndim=2] buf, int idx1, int idx2, o >>> rc1 = get_refcount(a) >>> A = ObjectMockBuffer(None, [a, a, a, a], shape=(2, 2)) >>> check_object_nulled_2d(A, 0, 0, a) - >>> decref(a) # new reference "added" to A >>> check_object_nulled_2d(A, 1, 1, a) - >>> decref(a) >>> get_refcount(a) == rc1 True """ - cdef void **data = buf.buffer - data[idx1 + 2*idx2] = NULL + cdef PyObject **data = buf.buffer + Py_CLEAR(data[idx1 + 2*idx2]) res = buf[idx1, idx2] # takes None buf[idx1, idx2] = obj return res diff --git a/tests/memoryview/memslice.pyx b/tests/memoryview/memslice.pyx index 06bdf8673..24af61e17 100644 --- a/tests/memoryview/memslice.pyx +++ b/tests/memoryview/memslice.pyx @@ -5,7 +5,7 @@ from __future__ import unicode_literals from cpython.object cimport PyObject -from cpython.ref cimport Py_INCREF, Py_DECREF +from cpython.ref cimport Py_INCREF, Py_DECREF, Py_CLEAR cimport cython from cython cimport view @@ -1130,6 +1130,49 @@ def assign_temporary_to_object(object[:] buf): """ buf[1] = {3-2: 2+(2*4)-2} +@testcase +def check_object_nulled_1d(object[:] buf, int idx, obj): + """ + See comments on printbuf_object above. + + >>> a = object() + >>> rc1 = get_refcount(a) + >>> A = ObjectMockBuffer(None, [a, a]) + >>> check_object_nulled_1d(A, 0, a) + >>> check_object_nulled_1d(A, 1, a) + >>> A = ObjectMockBuffer(None, [a, a, a, a], strides=(2,)) + >>> check_object_nulled_1d(A, 0, a) # only 0 due to stride + >>> get_refcount(a) == rc1 + True + """ + cdef ObjectMockBuffer omb = buf.base + cdef PyObject **data = (omb.buffer) + Py_CLEAR(data[idx]) + res = buf[idx] # takes None + buf[idx] = obj + return res + +@testcase +def check_object_nulled_2d(object[:, ::1] buf, int idx1, int idx2, obj): + """ + See comments on printbuf_object above. + + >>> a = object() + >>> rc1 = get_refcount(a) + >>> A = ObjectMockBuffer(None, [a, a, a, a], shape=(2, 2)) + >>> check_object_nulled_2d(A, 0, 0, a) + >>> check_object_nulled_2d(A, 1, 1, a) + >>> get_refcount(a) == rc1 + True + """ + cdef ObjectMockBuffer omb = buf.base + cdef PyObject **data = (omb.buffer) + Py_CLEAR(data[idx1 + 2*idx2]) + res = buf[idx1, idx2] # takes None + buf[idx1, idx2] = obj + return res + + # # Test __cythonbufferdefaults__ # -- cgit v1.2.1 From 796fd06da1fa1d5481ce43a6b6c901bc87f0ce9a Mon Sep 17 00:00:00 2001 From: da-woods Date: Wed, 6 Jul 2022 20:49:32 +0100 Subject: Fix mistake in docs error return values (GH-4876) Fixes https://github.com/cython/cython/issues/4875 --- docs/src/userguide/language_basics.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/userguide/language_basics.rst b/docs/src/userguide/language_basics.rst index 593542eae..7d056bdfb 100644 --- a/docs/src/userguide/language_basics.rst +++ b/docs/src/userguide/language_basics.rst @@ -652,7 +652,7 @@ through defined error return values. For functions that return a Python object ``NULL`` pointer, so any function returning a Python object has a well-defined error return value. -While this is always the case for C functions, functions +While this is always the case for Python functions, functions defined as C functions or ``cpdef``/``@ccall`` functions can return arbitrary C types, which do not have such a well-defined error return value. Thus, if an exception is detected in such a function, a warning message is printed, -- cgit v1.2.1 From a44bbd363029aa9ba16fefcb485c68162f8ab663 Mon Sep 17 00:00:00 2001 From: Matus Valo Date: Thu, 7 Jul 2022 14:42:55 +0200 Subject: Docs: migrate extension_types.rst to pure python (GH-4516) --- docs/examples/userguide/extension_types/cheesy.py | 36 ++ docs/examples/userguide/extension_types/cheesy.pyx | 36 ++ .../userguide/extension_types/dataclass.py | 21 + .../userguide/extension_types/dataclass.pyx | 1 + .../userguide/extension_types/dict_animal.py | 12 + .../userguide/extension_types/dict_animal.pyx | 1 + .../userguide/extension_types/extendable_animal.py | 15 + .../extension_types/extendable_animal.pyx | 3 +- .../userguide/extension_types/owned_pointer.py | 17 + .../userguide/extension_types/owned_pointer.pyx | 17 + docs/examples/userguide/extension_types/penguin.py | 14 + .../examples/userguide/extension_types/penguin.pyx | 14 + .../examples/userguide/extension_types/penguin2.py | 12 + .../userguide/extension_types/penguin2.pyx | 12 + docs/examples/userguide/extension_types/pets.py | 22 + docs/examples/userguide/extension_types/pets.pyx | 22 + .../userguide/extension_types/python_access.py | 7 + .../userguide/extension_types/python_access.pyx | 4 + .../userguide/extension_types/shrubbery.py | 2 - .../userguide/extension_types/shrubbery.pyx | 2 - .../userguide/extension_types/shrubbery_2.py | 10 + .../userguide/extension_types/shrubbery_2.pyx | 2 + .../userguide/extension_types/widen_shrubbery.py | 6 + .../userguide/extension_types/widen_shrubbery.pyx | 2 + .../userguide/extension_types/wrapper_class.py | 65 ++ .../userguide/extension_types/wrapper_class.pyx | 65 ++ docs/src/userguide/extension_types.rst | 671 ++++++++++++++------- 27 files changed, 861 insertions(+), 230 deletions(-) create mode 100644 docs/examples/userguide/extension_types/cheesy.py create mode 100644 docs/examples/userguide/extension_types/cheesy.pyx create mode 100644 docs/examples/userguide/extension_types/dataclass.py create mode 100644 docs/examples/userguide/extension_types/dict_animal.py create mode 100644 docs/examples/userguide/extension_types/extendable_animal.py create mode 100644 docs/examples/userguide/extension_types/owned_pointer.py create mode 100644 docs/examples/userguide/extension_types/owned_pointer.pyx create mode 100644 docs/examples/userguide/extension_types/penguin.py create mode 100644 docs/examples/userguide/extension_types/penguin.pyx create mode 100644 docs/examples/userguide/extension_types/penguin2.py create mode 100644 docs/examples/userguide/extension_types/penguin2.pyx create mode 100644 docs/examples/userguide/extension_types/pets.py create mode 100644 docs/examples/userguide/extension_types/pets.pyx create mode 100644 docs/examples/userguide/extension_types/python_access.py create mode 100644 docs/examples/userguide/extension_types/shrubbery_2.py create mode 100644 docs/examples/userguide/extension_types/widen_shrubbery.py create mode 100644 docs/examples/userguide/extension_types/wrapper_class.py create mode 100644 docs/examples/userguide/extension_types/wrapper_class.pyx diff --git a/docs/examples/userguide/extension_types/cheesy.py b/docs/examples/userguide/extension_types/cheesy.py new file mode 100644 index 000000000..0995c3993 --- /dev/null +++ b/docs/examples/userguide/extension_types/cheesy.py @@ -0,0 +1,36 @@ +import cython + +@cython.cclass +class CheeseShop: + + cheeses: object + + def __cinit__(self): + self.cheeses = [] + + @property + def cheese(self): + return "We don't have: %s" % self.cheeses + + @cheese.setter + def cheese(self, value): + self.cheeses.append(value) + + @cheese.deleter + def cheese(self): + del self.cheeses[:] + +# Test input +from cheesy import CheeseShop + +shop = CheeseShop() +print(shop.cheese) + +shop.cheese = "camembert" +print(shop.cheese) + +shop.cheese = "cheddar" +print(shop.cheese) + +del shop.cheese +print(shop.cheese) diff --git a/docs/examples/userguide/extension_types/cheesy.pyx b/docs/examples/userguide/extension_types/cheesy.pyx new file mode 100644 index 000000000..2859d848f --- /dev/null +++ b/docs/examples/userguide/extension_types/cheesy.pyx @@ -0,0 +1,36 @@ + + + +cdef class CheeseShop: + + cdef object cheeses + + def __cinit__(self): + self.cheeses = [] + + @property + def cheese(self): + return "We don't have: %s" % self.cheeses + + @cheese.setter + def cheese(self, value): + self.cheeses.append(value) + + @cheese.deleter + def cheese(self): + del self.cheeses[:] + +# Test input +from cheesy import CheeseShop + +shop = CheeseShop() +print(shop.cheese) + +shop.cheese = "camembert" +print(shop.cheese) + +shop.cheese = "cheddar" +print(shop.cheese) + +del shop.cheese +print(shop.cheese) diff --git a/docs/examples/userguide/extension_types/dataclass.py b/docs/examples/userguide/extension_types/dataclass.py new file mode 100644 index 000000000..d8ed68666 --- /dev/null +++ b/docs/examples/userguide/extension_types/dataclass.py @@ -0,0 +1,21 @@ +import cython +try: + import typing + import dataclasses +except ImportError: + pass # The modules don't actually have to exists for Cython to use them as annotations + +@cython.dataclasses.dataclass +@cython.cclass +class MyDataclass: + # fields can be declared using annotations + a: cython.int = 0 + b: double = cython.dataclasses.field(default_factory = lambda: 10, repr=False) + + + c: str = 'hello' + + + # typing.InitVar and typing.ClassVar also work + d: dataclasses.InitVar[double] = 5 + e: typing.ClassVar[list] = [] diff --git a/docs/examples/userguide/extension_types/dataclass.pyx b/docs/examples/userguide/extension_types/dataclass.pyx index 0529890ba..56666537d 100644 --- a/docs/examples/userguide/extension_types/dataclass.pyx +++ b/docs/examples/userguide/extension_types/dataclass.pyx @@ -5,6 +5,7 @@ try: except ImportError: pass # The modules don't actually have to exists for Cython to use them as annotations + @cython.dataclasses.dataclass cdef class MyDataclass: # fields can be declared using annotations diff --git a/docs/examples/userguide/extension_types/dict_animal.py b/docs/examples/userguide/extension_types/dict_animal.py new file mode 100644 index 000000000..a36dd3f89 --- /dev/null +++ b/docs/examples/userguide/extension_types/dict_animal.py @@ -0,0 +1,12 @@ +@cython.cclass +class Animal: + + number_of_legs: cython.int + __dict__: dict + + def __cinit__(self, number_of_legs: cython.int): + self.number_of_legs = number_of_legs + + +dog = Animal(4) +dog.has_tail = True diff --git a/docs/examples/userguide/extension_types/dict_animal.pyx b/docs/examples/userguide/extension_types/dict_animal.pyx index 1aa0ccc11..ec8cf6f9a 100644 --- a/docs/examples/userguide/extension_types/dict_animal.pyx +++ b/docs/examples/userguide/extension_types/dict_animal.pyx @@ -1,3 +1,4 @@ + cdef class Animal: cdef int number_of_legs diff --git a/docs/examples/userguide/extension_types/extendable_animal.py b/docs/examples/userguide/extension_types/extendable_animal.py new file mode 100644 index 000000000..2eef69460 --- /dev/null +++ b/docs/examples/userguide/extension_types/extendable_animal.py @@ -0,0 +1,15 @@ +@cython.cclass +class Animal: + + number_of_legs: cython.int + + def __cinit__(self, number_of_legs: cython.int): + self.number_of_legs = number_of_legs + + +class ExtendableAnimal(Animal): # Note that we use class, not cdef class + pass + + +dog = ExtendableAnimal(4) +dog.has_tail = True diff --git a/docs/examples/userguide/extension_types/extendable_animal.pyx b/docs/examples/userguide/extension_types/extendable_animal.pyx index 701a93148..417760efd 100644 --- a/docs/examples/userguide/extension_types/extendable_animal.pyx +++ b/docs/examples/userguide/extension_types/extendable_animal.pyx @@ -1,3 +1,4 @@ + cdef class Animal: cdef int number_of_legs @@ -11,4 +12,4 @@ class ExtendableAnimal(Animal): # Note that we use class, not cdef class dog = ExtendableAnimal(4) -dog.has_tail = True \ No newline at end of file +dog.has_tail = True diff --git a/docs/examples/userguide/extension_types/owned_pointer.py b/docs/examples/userguide/extension_types/owned_pointer.py new file mode 100644 index 000000000..1c235a883 --- /dev/null +++ b/docs/examples/userguide/extension_types/owned_pointer.py @@ -0,0 +1,17 @@ +import cython +from cython.cimports.libc.stdlib import free + +@cython.cclass +class OwnedPointer: + ptr: cython.pointer(cython.void) + + def __dealloc__(self): + if self.ptr is not cython.NULL: + free(self.ptr) + + @staticmethod + @cython.cfunc + def create(ptr: cython.pointer(cython.void)): + p = OwnedPointer() + p.ptr = ptr + return p diff --git a/docs/examples/userguide/extension_types/owned_pointer.pyx b/docs/examples/userguide/extension_types/owned_pointer.pyx new file mode 100644 index 000000000..98b61d91c --- /dev/null +++ b/docs/examples/userguide/extension_types/owned_pointer.pyx @@ -0,0 +1,17 @@ + +from libc.stdlib cimport free + + +cdef class OwnedPointer: + cdef void* ptr + + def __dealloc__(self): + if self.ptr is not NULL: + free(self.ptr) + + + @staticmethod + cdef create(void* ptr): + p = OwnedPointer() + p.ptr = ptr + return p diff --git a/docs/examples/userguide/extension_types/penguin.py b/docs/examples/userguide/extension_types/penguin.py new file mode 100644 index 000000000..6db8eba16 --- /dev/null +++ b/docs/examples/userguide/extension_types/penguin.py @@ -0,0 +1,14 @@ +import cython + +@cython.cclass +class Penguin: + food: object + + def __cinit__(self, food): + self.food = food + + def __init__(self, food): + print("eating!") + +normal_penguin = Penguin('fish') +fast_penguin = Penguin.__new__(Penguin, 'wheat') # note: not calling __init__() ! diff --git a/docs/examples/userguide/extension_types/penguin.pyx b/docs/examples/userguide/extension_types/penguin.pyx new file mode 100644 index 000000000..b890c9ffd --- /dev/null +++ b/docs/examples/userguide/extension_types/penguin.pyx @@ -0,0 +1,14 @@ + + + +cdef class Penguin: + cdef object food + + def __cinit__(self, food): + self.food = food + + def __init__(self, food): + print("eating!") + +normal_penguin = Penguin('fish') +fast_penguin = Penguin.__new__(Penguin, 'wheat') # note: not calling __init__() ! diff --git a/docs/examples/userguide/extension_types/penguin2.py b/docs/examples/userguide/extension_types/penguin2.py new file mode 100644 index 000000000..063563d16 --- /dev/null +++ b/docs/examples/userguide/extension_types/penguin2.py @@ -0,0 +1,12 @@ +import cython + +@cython.freelist(8) +@cython.cclass +class Penguin: + food: object + def __cinit__(self, food): + self.food = food + +penguin = Penguin('fish 1') +penguin = None +penguin = Penguin('fish 2') # does not need to allocate memory! diff --git a/docs/examples/userguide/extension_types/penguin2.pyx b/docs/examples/userguide/extension_types/penguin2.pyx new file mode 100644 index 000000000..726aeef8e --- /dev/null +++ b/docs/examples/userguide/extension_types/penguin2.pyx @@ -0,0 +1,12 @@ +cimport cython + + +@cython.freelist(8) +cdef class Penguin: + cdef object food + def __cinit__(self, food): + self.food = food + +penguin = Penguin('fish 1') +penguin = None +penguin = Penguin('fish 2') # does not need to allocate memory! diff --git a/docs/examples/userguide/extension_types/pets.py b/docs/examples/userguide/extension_types/pets.py new file mode 100644 index 000000000..fc6497cb0 --- /dev/null +++ b/docs/examples/userguide/extension_types/pets.py @@ -0,0 +1,22 @@ +import cython + +@cython.cclass +class Parrot: + + @cython.cfunc + def describe(self) -> cython.void: + print("This parrot is resting.") + +@cython.cclass +class Norwegian(Parrot): + + @cython.cfunc + def describe(self) -> cython.void: + Parrot.describe(self) + print("Lovely plumage!") + +cython.declare(p1=Parrot, p2=Parrot) +p1 = Parrot() +p2 = Norwegian() +print("p2:") +p2.describe() diff --git a/docs/examples/userguide/extension_types/pets.pyx b/docs/examples/userguide/extension_types/pets.pyx new file mode 100644 index 000000000..bb06e059d --- /dev/null +++ b/docs/examples/userguide/extension_types/pets.pyx @@ -0,0 +1,22 @@ + + +cdef class Parrot: + + + + cdef void describe(self): + print("This parrot is resting.") + + +cdef class Norwegian(Parrot): + + + cdef void describe(self): + Parrot.describe(self) + print("Lovely plumage!") + +cdef Parrot p1, p2 +p1 = Parrot() +p2 = Norwegian() +print("p2:") +p2.describe() diff --git a/docs/examples/userguide/extension_types/python_access.py b/docs/examples/userguide/extension_types/python_access.py new file mode 100644 index 000000000..27478f50c --- /dev/null +++ b/docs/examples/userguide/extension_types/python_access.py @@ -0,0 +1,7 @@ +import cython + +@cython.cclass +class Shrubbery: + width = cython.declare(cython.int, visibility='public') + height = cython.declare(cython.int, visibility='public') + depth = cython.declare(cython.float, visibility='readonly') diff --git a/docs/examples/userguide/extension_types/python_access.pyx b/docs/examples/userguide/extension_types/python_access.pyx index 6d5225ec0..db11de63c 100644 --- a/docs/examples/userguide/extension_types/python_access.pyx +++ b/docs/examples/userguide/extension_types/python_access.pyx @@ -1,3 +1,7 @@ + + + cdef class Shrubbery: cdef public int width, height + cdef readonly float depth diff --git a/docs/examples/userguide/extension_types/shrubbery.py b/docs/examples/userguide/extension_types/shrubbery.py index 075664527..0e624a1d2 100644 --- a/docs/examples/userguide/extension_types/shrubbery.py +++ b/docs/examples/userguide/extension_types/shrubbery.py @@ -1,5 +1,3 @@ -from __future__ import print_function - @cython.cclass class Shrubbery: width: cython.int diff --git a/docs/examples/userguide/extension_types/shrubbery.pyx b/docs/examples/userguide/extension_types/shrubbery.pyx index b74dfbd1b..8c4e58776 100644 --- a/docs/examples/userguide/extension_types/shrubbery.pyx +++ b/docs/examples/userguide/extension_types/shrubbery.pyx @@ -1,6 +1,4 @@ from __future__ import print_function - - cdef class Shrubbery: cdef int width cdef int height diff --git a/docs/examples/userguide/extension_types/shrubbery_2.py b/docs/examples/userguide/extension_types/shrubbery_2.py new file mode 100644 index 000000000..d6b722500 --- /dev/null +++ b/docs/examples/userguide/extension_types/shrubbery_2.py @@ -0,0 +1,10 @@ +import cython +from cython.cimports.my_module import Shrubbery + +@cython.cfunc +def another_shrubbery(sh1: Shrubbery) -> Shrubbery: + sh2: Shrubbery + sh2 = Shrubbery() + sh2.width = sh1.width + sh2.height = sh1.height + return sh2 diff --git a/docs/examples/userguide/extension_types/shrubbery_2.pyx b/docs/examples/userguide/extension_types/shrubbery_2.pyx index d05d28243..4a7782735 100644 --- a/docs/examples/userguide/extension_types/shrubbery_2.pyx +++ b/docs/examples/userguide/extension_types/shrubbery_2.pyx @@ -1,5 +1,7 @@ + from my_module cimport Shrubbery + cdef Shrubbery another_shrubbery(Shrubbery sh1): cdef Shrubbery sh2 sh2 = Shrubbery() diff --git a/docs/examples/userguide/extension_types/widen_shrubbery.py b/docs/examples/userguide/extension_types/widen_shrubbery.py new file mode 100644 index 000000000..f69f4dc96 --- /dev/null +++ b/docs/examples/userguide/extension_types/widen_shrubbery.py @@ -0,0 +1,6 @@ +import cython +from cython.cimports.my_module import Shrubbery + +@cython.cfunc +def widen_shrubbery(sh: Shrubbery, extra_width): + sh.width = sh.width + extra_width diff --git a/docs/examples/userguide/extension_types/widen_shrubbery.pyx b/docs/examples/userguide/extension_types/widen_shrubbery.pyx index a312fbfd9..c6f58f00c 100644 --- a/docs/examples/userguide/extension_types/widen_shrubbery.pyx +++ b/docs/examples/userguide/extension_types/widen_shrubbery.pyx @@ -1,4 +1,6 @@ + from my_module cimport Shrubbery + cdef widen_shrubbery(Shrubbery sh, extra_width): sh.width = sh.width + extra_width diff --git a/docs/examples/userguide/extension_types/wrapper_class.py b/docs/examples/userguide/extension_types/wrapper_class.py new file mode 100644 index 000000000..b625ffebd --- /dev/null +++ b/docs/examples/userguide/extension_types/wrapper_class.py @@ -0,0 +1,65 @@ +import cython +from cython.cimports.libc.stdlib import malloc, free + +# Example C struct +my_c_struct = cython.struct( + a = cython.int, + b = cython.int, +) + +@cython.cclass +class WrapperClass: + """A wrapper class for a C/C++ data structure""" + _ptr: cython.pointer(my_c_struct) + ptr_owner: cython.bint + + def __cinit__(self): + self.ptr_owner = False + + def __dealloc__(self): + # De-allocate if not null and flag is set + if self._ptr is not cython.NULL and self.ptr_owner is True: + free(self._ptr) + self._ptr = cython.NULL + + def __init__(self): + # Prevent accidental instantiation from normal Python code + # since we cannot pass a struct pointer into a Python constructor. + raise TypeError("This class cannot be instantiated directly.") + + # Extension class properties + @property + def a(self): + return self._ptr.a if self._ptr is not cython.NULL else None + + @property + def b(self): + return self._ptr.b if self._ptr is not cython.NULL else None + + @staticmethod + @cython.cfunc + def from_ptr(_ptr: cython.pointer(my_c_struct), owner: cython.bint=False) -> WrapperClass: + """Factory function to create WrapperClass objects from + given my_c_struct pointer. + + Setting ``owner`` flag to ``True`` causes + the extension type to ``free`` the structure pointed to by ``_ptr`` + when the wrapper object is deallocated.""" + # Fast call to __new__() that bypasses the __init__() constructor. + wrapper: WrapperClass = WrapperClass.__new__(WrapperClass) + wrapper._ptr = _ptr + wrapper.ptr_owner = owner + return wrapper + + @staticmethod + @cython.cfunc + def new_struct() -> WrapperClass: + """Factory function to create WrapperClass objects with + newly allocated my_c_struct""" + _ptr: cython.pointer(my_c_struct) = cython.cast( + cython.pointer(my_c_struct), malloc(cython.sizeof(my_c_struct))) + if _ptr is cython.NULL: + raise MemoryError + _ptr.a = 0 + _ptr.b = 0 + return WrapperClass.from_ptr(_ptr, owner=True) diff --git a/docs/examples/userguide/extension_types/wrapper_class.pyx b/docs/examples/userguide/extension_types/wrapper_class.pyx new file mode 100644 index 000000000..e2a0c3ff2 --- /dev/null +++ b/docs/examples/userguide/extension_types/wrapper_class.pyx @@ -0,0 +1,65 @@ + +from libc.stdlib cimport malloc, free + +# Example C struct +ctypedef struct my_c_struct: + int a + int b + + + +cdef class WrapperClass: + """A wrapper class for a C/C++ data structure""" + cdef my_c_struct *_ptr + cdef bint ptr_owner + + def __cinit__(self): + self.ptr_owner = False + + def __dealloc__(self): + # De-allocate if not null and flag is set + if self._ptr is not NULL and self.ptr_owner is True: + free(self._ptr) + self._ptr = NULL + + def __init__(self): + # Prevent accidental instantiation from normal Python code + # since we cannot pass a struct pointer into a Python constructor. + raise TypeError("This class cannot be instantiated directly.") + + # Extension class properties + @property + def a(self): + return self._ptr.a if self._ptr is not NULL else None + + @property + def b(self): + return self._ptr.b if self._ptr is not NULL else None + + + @staticmethod + cdef WrapperClass from_ptr(my_c_struct *_ptr, bint owner=False): + """Factory function to create WrapperClass objects from + given my_c_struct pointer. + + Setting ``owner`` flag to ``True`` causes + the extension type to ``free`` the structure pointed to by ``_ptr`` + when the wrapper object is deallocated.""" + # Fast call to __new__() that bypasses the __init__() constructor. + cdef WrapperClass wrapper = WrapperClass.__new__(WrapperClass) + wrapper._ptr = _ptr + wrapper.ptr_owner = owner + return wrapper + + + @staticmethod + cdef WrapperClass new_struct(): + """Factory function to create WrapperClass objects with + newly allocated my_c_struct""" + cdef my_c_struct *_ptr = malloc(sizeof(my_c_struct)) + + if _ptr is NULL: + raise MemoryError + _ptr.a = 0 + _ptr.b = 0 + return WrapperClass.from_ptr(_ptr, owner=True) diff --git a/docs/src/userguide/extension_types.rst b/docs/src/userguide/extension_types.rst index d058df6c2..b2690dc49 100644 --- a/docs/src/userguide/extension_types.rst +++ b/docs/src/userguide/extension_types.rst @@ -9,20 +9,56 @@ Extension Types Introduction ============== +.. include:: + ../two-syntax-variants-used + As well as creating normal user-defined classes with the Python class statement, Cython also lets you create new built-in Python types, known as :term:`extension types`. You define an extension type using the :keyword:`cdef` class -statement. Here's an example: +statement or decorating the class with the ``@cclass`` decorator. Here's an example: + +.. tabs:: + + .. group-tab:: Pure Python + + .. literalinclude:: ../../examples/userguide/extension_types/shrubbery.py + + .. group-tab:: Cython -.. literalinclude:: ../../examples/userguide/extension_types/shrubbery.pyx + .. literalinclude:: ../../examples/userguide/extension_types/shrubbery.pyx As you can see, a Cython extension type definition looks a lot like a Python -class definition. Within it, you use the def statement to define methods that +class definition. Within it, you use the :keyword:`def` statement to define methods that can be called from Python code. You can even define many of the special methods such as :meth:`__init__` as you would in Python. -The main difference is that you can use the :keyword:`cdef` statement to define -attributes. The attributes may be Python objects (either generic or of a +The main difference is that you can define attributes using + +* the :keyword:`cdef` statement, +* the :func:`cython.declare()` function or +* the annotation of an attribute name. + +.. tabs:: + + .. group-tab:: Pure Python + + .. code-block:: python + + @cython.cclass + class Shrubbery: + width = declare(cython.int) + height: cython.int + + .. group-tab:: Cython + + .. code-block:: cython + + cdef class Shrubbery: + + cdef int width + cdef int height + +The attributes may be Python objects (either generic or of a particular extension type), or they may be of any C data type. So you can use extension types to wrap arbitrary C data structures and provide a Python-like interface to them. @@ -50,7 +86,15 @@ not Python access, which means that they are not accessible from Python code. To make them accessible from Python code, you need to declare them as :keyword:`public` or :keyword:`readonly`. For example: -.. literalinclude:: ../../examples/userguide/extension_types/python_access.pyx +.. tabs:: + + .. group-tab:: Pure Python + + .. literalinclude:: ../../examples/userguide/extension_types/python_access.py + + .. group-tab:: Cython + + .. literalinclude:: ../../examples/userguide/extension_types/python_access.pyx makes the width and height attributes readable and writable from Python code, and the depth attribute readable but not writable. @@ -74,15 +118,32 @@ Dynamic Attributes It is not possible to add attributes to an extension type at runtime by default. You have two ways of avoiding this limitation, both add an overhead when -a method is called from Python code. Especially when calling ``cpdef`` methods. +a method is called from Python code. Especially when calling hybrid methods declared +with :keyword:`cpdef` in .pyx files or with the ``@ccall`` decorator. + +The first approach is to create a Python subclass: -The first approach is to create a Python subclass.: +.. tabs:: -.. literalinclude:: ../../examples/userguide/extension_types/extendable_animal.pyx + .. group-tab:: Pure Python -Declaring a ``__dict__`` attribute is the second way of enabling dynamic attributes.: + .. literalinclude:: ../../examples/userguide/extension_types/extendable_animal.py -.. literalinclude:: ../../examples/userguide/extension_types/dict_animal.pyx + .. group-tab:: Cython + + .. literalinclude:: ../../examples/userguide/extension_types/extendable_animal.pyx + +Declaring a ``__dict__`` attribute is the second way of enabling dynamic attributes: + +.. tabs:: + + .. group-tab:: Pure Python + + .. literalinclude:: ../../examples/userguide/extension_types/dict_animal.py + + .. group-tab:: Cython + + .. literalinclude:: ../../examples/userguide/extension_types/dict_animal.pyx Type declarations =================== @@ -93,10 +154,24 @@ generic Python object. It knows this already in the case of the ``self`` parameter of the methods of that type, but in other cases you will have to use a type declaration. -For example, in the following function:: +For example, in the following function: - cdef widen_shrubbery(sh, extra_width): # BAD - sh.width = sh.width + extra_width +.. tabs:: + + .. group-tab:: Pure Python + + .. code-block:: python + + @cython.cfunc + def widen_shrubbery(sh, extra_width): # BAD + sh.width = sh.width + extra_width + + .. group-tab:: Cython + + .. code-block:: cython + + cdef widen_shrubbery(sh, extra_width): # BAD + sh.width = sh.width + extra_width because the ``sh`` parameter hasn't been given a type, the width attribute will be accessed by a Python attribute lookup. If the attribute has been @@ -107,18 +182,35 @@ will be very inefficient. If the attribute is private, it will not work at all The solution is to declare ``sh`` as being of type :class:`Shrubbery`, as follows: -.. literalinclude:: ../../examples/userguide/extension_types/widen_shrubbery.pyx +.. tabs:: + + .. group-tab:: Pure Python + + .. literalinclude:: ../../examples/userguide/extension_types/widen_shrubbery.py + + .. group-tab:: Cython + + .. literalinclude:: ../../examples/userguide/extension_types/widen_shrubbery.pyx Now the Cython compiler knows that ``sh`` has a C attribute called :attr:`width` and will generate code to access it directly and efficiently. The same consideration applies to local variables, for example: -.. literalinclude:: ../../examples/userguide/extension_types/shrubbery_2.pyx +.. tabs:: + + .. group-tab:: Pure Python + + .. literalinclude:: ../../examples/userguide/extension_types/shrubbery_2.py + + .. group-tab:: Cython + + .. literalinclude:: ../../examples/userguide/extension_types/shrubbery_2.pyx .. note:: - We here ``cimport`` the class :class:`Shrubbery`, and this is necessary - to declare the type at compile time. To be able to ``cimport`` an extension type, + Here, we *cimport* the class :class:`Shrubbery` (using the :keyword:`cimport` statement + or importing from special ``cython.cimports`` package), and this is necessary + to declare the type at compile time. To be able to cimport an extension type, we split the class definition into two parts, one in a definition file and the other in the corresponding implementation file. You should read :ref:`sharing_extension_types` to learn to do that. @@ -128,24 +220,61 @@ Type Testing and Casting ------------------------ Suppose I have a method :meth:`quest` which returns an object of type :class:`Shrubbery`. -To access it's width I could write:: +To access its width I could write: + +.. tabs:: + + .. group-tab:: Pure Python + + .. code-block:: python - cdef Shrubbery sh = quest() - print(sh.width) + sh: Shrubbery = quest() + print(sh.width) + + .. group-tab:: Cython + + .. code-block:: cython + + cdef Shrubbery sh = quest() + print(sh.width) which requires the use of a local variable and performs a type test on assignment. If you *know* the return value of :meth:`quest` will be of type :class:`Shrubbery` -you can use a cast to write:: +you can use a cast to write: + +.. tabs:: + + .. group-tab:: Pure Python - print( (quest()).width ) + .. code-block:: python + + print( cython.cast(Shrubbery, quest()).width ) + + .. group-tab:: Cython + + .. code-block:: cython + + print( (quest()).width ) This may be dangerous if :meth:`quest()` is not actually a :class:`Shrubbery`, as it will try to access width as a C struct member which may not exist. At the C level, rather than raising an :class:`AttributeError`, either an nonsensical result will be returned (interpreting whatever data is at that address as an int) or a segfault -may result from trying to access invalid memory. Instead, one can write:: +may result from trying to access invalid memory. Instead, one can write: + +.. tabs:: + + .. group-tab:: Pure Python + + .. code-block:: python + + print( cython.cast(Shrubbery, quest(), typecheck=True).width ) + + .. group-tab:: Cython + + .. code-block:: cython - print( (quest()).width ) + print( (quest()).width ) which performs a type check (possibly raising a :class:`TypeError`) before making the cast and allowing the code to proceed. @@ -155,14 +284,18 @@ For known builtin or extension types, Cython translates these into a fast and safe type check that ignores changes to the object's ``__class__`` attribute etc., so that after a successful :meth:`isinstance` test, code can rely on the expected C structure of the -extension type and its :keyword:`cdef` attributes and methods. +extension type and its C-level attributes (stored in the object’s C struct) and +:keyword:`cdef`/``@cfunc`` methods. .. _extension_types_and_none: Extension types and None ========================= -When you declare a parameter or C variable as being of an extension type, +Cython handles ``None`` values differently in C-like type declarations and when Python annotations are used. + +In :keyword:`cdef` declarations and C-like function argument declarations (``func(list x)``), +when you declare an argument or C variable as having an extension or Python builtin type, Cython will allow it to take on the value ``None`` as well as values of its declared type. This is analogous to the way a C pointer can take on the value ``NULL``, and you need to exercise the same caution because of it. There is no @@ -172,24 +305,24 @@ of an extension type (as in the widen_shrubbery function above), it's up to you to make sure the reference you're using is not ``None`` -- in the interests of efficiency, Cython does not check this. -You need to be particularly careful when exposing Python functions which take -extension types as arguments. If we wanted to make :func:`widen_shrubbery` a -Python function, for example, if we simply wrote:: +With the C-like declaration syntax, you need to be particularly careful when +exposing Python functions which take extension types as arguments:: def widen_shrubbery(Shrubbery sh, extra_width): # This is sh.width = sh.width + extra_width # dangerous! -then users of our module could crash it by passing ``None`` for the ``sh`` +The users of our module could crash it by passing ``None`` for the ``sh`` parameter. -One way to fix this would be:: +As in Python, whenever it is unclear whether a variable can be ``None``, +but the code requires a non-None value, an explicit check can help:: def widen_shrubbery(Shrubbery sh, extra_width): if sh is None: raise TypeError sh.width = sh.width + extra_width -but since this is anticipated to be such a frequent requirement, Cython +but since this is anticipated to be such a frequent requirement, Cython language provides a more convenient way. Parameters of a Python function declared as an extension type can have a ``not None`` clause:: @@ -199,18 +332,41 @@ extension type can have a ``not None`` clause:: Now the function will automatically check that ``sh`` is ``not None`` along with checking that it has the right type. +When annotations are used, the behaviour follows the Python typing semantics of +`PEP-484 `_ instead. +The value ``None`` is not allowed when a variable is annotated only with its plain type:: + + def widen_shrubbery(sh: Shrubbery, extra_width): # TypeError is raised + sh.width = sh.width + extra_width # when sh is None + +To also allow ``None``, ``typing.Optional[ ]`` must be used explicitly. +For function arguments, this is also automatically allowed when they have a +default argument of `None``, e.g. ``func(x: list = None)`` does not require ``typing.Optional``:: + + import typing + def widen_shrubbery(sh: typing.Optional[Shrubbery], extra_width): + if sh is None: + # We want to raise a custom exception in case of a None value. + raise ValueError + sh.width = sh.width + extra_width + +The upside of using annotations here is that they are safe by default because +you need to explicitly allow ``None`` values for them. + + .. note:: - ``not None`` clause can only be used in Python functions (defined with - :keyword:`def`) and not C functions (defined with :keyword:`cdef`). If - you need to check whether a parameter to a C function is None, you will + The ``not None`` and ``typing.Optional`` can only be used in Python functions (defined with + :keyword:`def` and without ``@cython.cfunc`` decorator) and not C functions + (defined with :keyword:`cdef` or decorated using ``@cython.cfunc``). If + you need to check whether a parameter to a C function is ``None``, you will need to do it yourself. .. note:: Some more things: - * The self parameter of a method of an extension type is guaranteed never to + * The ``self`` parameter of a method of an extension type is guaranteed never to be ``None``. * When comparing a value with ``None``, keep in mind that, if ``x`` is a Python object, ``x is None`` and ``x is not None`` are very efficient because they @@ -232,23 +388,49 @@ extension types. Properties ============ -You can declare properties in an extension class using the same syntax as in ordinary Python code:: +You can declare properties in an extension class using the same syntax as in ordinary Python code: - cdef class Spam: +.. tabs:: - @property - def cheese(self): - # This is called when the property is read. - ... + .. group-tab:: Pure Python - @cheese.setter - def cheese(self, value): - # This is called when the property is written. - ... + .. code-block:: python + + @cython.cclass + class Spam: + @property + def cheese(self): + # This is called when the property is read. + ... + + @cheese.setter + def cheese(self, value): + # This is called when the property is written. + ... + + @cheese.deleter + def cheese(self): + # This is called when the property is deleted. + + .. group-tab:: Cython + + .. code-block:: cython + + cdef class Spam: - @cheese.deleter - def cheese(self): - # This is called when the property is deleted. + @property + def cheese(self): + # This is called when the property is read. + ... + + @cheese.setter + def cheese(self, value): + # This is called when the property is written. + ... + + @cheese.deleter + def cheese(self): + # This is called when the property is deleted. There is also a special (deprecated) legacy syntax for defining properties in an extension class:: @@ -277,42 +459,17 @@ corresponding operation is attempted. Here's a complete example. It defines a property which adds to a list each time it is written to, returns the list when it is read, and empties the list -when it is deleted.:: - - # cheesy.pyx - cdef class CheeseShop: - - cdef object cheeses - - def __cinit__(self): - self.cheeses = [] +when it is deleted: - @property - def cheese(self): - return "We don't have: %s" % self.cheeses +.. tabs:: - @cheese.setter - def cheese(self, value): - self.cheeses.append(value) + .. group-tab:: Pure Python - @cheese.deleter - def cheese(self): - del self.cheeses[:] + .. literalinclude:: ../../examples/userguide/extension_types/cheesy.py - # Test input - from cheesy import CheeseShop + .. group-tab:: Cython - shop = CheeseShop() - print(shop.cheese) - - shop.cheese = "camembert" - print(shop.cheese) - - shop.cheese = "cheddar" - print(shop.cheese) - - del shop.cheese - print(shop.cheese) + .. literalinclude:: ../../examples/userguide/extension_types/cheesy.pyx .. code-block:: text @@ -328,20 +485,39 @@ Subclassing ============= If an extension type inherits from other types, the first base class must be -a built-in type or another extension type:: +a built-in type or another extension type: - cdef class Parrot: - ... +.. tabs:: + + .. group-tab:: Pure Python + + .. code-block:: python + + @cython.cclass + class Parrot: + ... + + @cython.cclass + class Norwegian(Parrot): + ... + + .. group-tab:: Cython + + .. code-block:: cython + + cdef class Parrot: + ... - cdef class Norwegian(Parrot): - ... + + cdef class Norwegian(Parrot): + ... A complete definition of the base type must be available to Cython, so if the base type is a built-in type, it must have been previously declared as an extern extension type. If the base type is defined in another Cython module, it must either be declared as an extern extension type or imported using the -:keyword:`cimport` statement. +:keyword:`cimport` statement or importing from the special ``cython.cimports`` package. Multiple inheritance is supported, however the second and subsequent base classes must be an ordinary Python class (not an extension type or a built-in @@ -354,13 +530,30 @@ must be compatible). There is a way to prevent extension types from being subtyped in Python. This is done via the ``final`` directive, -usually set on an extension type using a decorator:: +usually set on an extension type using a decorator: + +.. tabs:: + + .. group-tab:: Pure Python + + .. code-block:: python - cimport cython + import cython - @cython.final - cdef class Parrot: - def done(self): pass + @cython.final + @cython.cclass + class Parrot: + def done(self): pass + + .. group-tab:: Cython + + .. code-block:: cython + + cimport cython + + @cython.final + cdef class Parrot: + def done(self): pass Trying to create a Python subclass from this type will raise a :class:`TypeError` at runtime. Cython will also prevent subtyping a @@ -375,32 +568,25 @@ C methods ========= Extension types can have C methods as well as Python methods. Like C -functions, C methods are declared using :keyword:`cdef` or :keyword:`cpdef` instead of -:keyword:`def`. C methods are "virtual", and may be overridden in derived -extension types. In addition, :keyword:`cpdef` methods can even be overridden by python -methods when called as C method. This adds a little to their calling overhead -compared to a :keyword:`cdef` method:: +functions, C methods are declared using - # pets.pyx - cdef class Parrot: +* :keyword:`cdef` instead of :keyword:`def` or ``@cfunc`` decorator for *C methods*, or +* :keyword:`cpdef` instead of :keyword:`def` or ``@ccall`` decorator for *hybrid methods*. - cdef void describe(self): - print("This parrot is resting.") +C methods are "virtual", and may be overridden in derived +extension types. In addition, :keyword:`cpdef`/``@ccall`` methods can even be overridden by Python +methods when called as C method. This adds a little to their calling overhead +compared to a :keyword:`cdef`/``@cfunc`` method: + +.. tabs:: - cdef class Norwegian(Parrot): + .. group-tab:: Pure Python - cdef void describe(self): - Parrot.describe(self) - print("Lovely plumage!") + .. literalinclude:: ../../examples/userguide/extension_types/pets.py + .. group-tab:: Cython - cdef Parrot p1, p2 - p1 = Parrot() - p2 = Norwegian() - print("p1:") - p1.describe() - print("p2:") - p2.describe() + .. literalinclude:: ../../examples/userguide/extension_types/pets.pyx .. code-block:: text @@ -416,22 +602,23 @@ method using the usual Python technique, i.e.:: Parrot.describe(self) -`cdef` methods can be declared static by using the @staticmethod decorator. +:keyword:`cdef`/``@ccall`` methods can be declared static by using the ``@staticmethod`` decorator. This can be especially useful for constructing classes that take non-Python -compatible types.:: +compatible types: + +.. tabs:: + + .. group-tab:: Pure Python + + .. literalinclude:: ../../examples/userguide/extension_types/owned_pointer.py - cdef class OwnedPointer: - cdef void* ptr + .. group-tab:: Cython - def __dealloc__(self): - if self.ptr is not NULL: - free(self.ptr) + .. literalinclude:: ../../examples/userguide/extension_types/owned_pointer.pyx - @staticmethod - cdef create(void* ptr): - p = OwnedPointer() - p.ptr = ptr - return p +.. note:: + + Cython currently does not support decorating :keyword:`cdef`/``@ccall`` methods with ``@classmethod`` decorator. .. _forward_declaring_extension_types: @@ -460,19 +647,17 @@ Fast instantiation Cython provides two ways to speed up the instantiation of extension types. The first one is a direct call to the ``__new__()`` special static method, as known from Python. For an extension type ``Penguin``, you could use -the following code:: +the following code: + +.. tabs:: - cdef class Penguin: - cdef object food + .. group-tab:: Pure Python - def __cinit__(self, food): - self.food = food + .. literalinclude:: ../../examples/userguide/extension_types/penguin.py - def __init__(self, food): - print("eating!") + .. group-tab:: Cython - normal_penguin = Penguin('fish') - fast_penguin = Penguin.__new__(Penguin, 'wheat') # note: not calling __init__() ! + .. literalinclude:: ../../examples/userguide/extension_types/penguin.pyx Note that the path through ``__new__()`` will *not* call the type's ``__init__()`` method (again, as known from Python). Thus, in the example @@ -486,19 +671,17 @@ the differences. The second performance improvement applies to types that are often created and deleted in a row, so that they can benefit from a freelist. Cython provides the decorator ``@cython.freelist(N)`` for this, which creates a -statically sized freelist of ``N`` instances for a given type. Example:: +statically sized freelist of ``N`` instances for a given type. Example: + +.. tabs:: + + .. group-tab:: Pure Python - cimport cython + .. literalinclude:: ../../examples/userguide/extension_types/penguin2.py - @cython.freelist(8) - cdef class Penguin: - cdef object food - def __cinit__(self, food): - self.food = food + .. group-tab:: Cython - penguin = Penguin('fish 1') - penguin = None - penguin = Penguin('fish 2') # does not need to allocate memory! + .. literalinclude:: ../../examples/userguide/extension_types/penguin2.pyx .. _existing-pointers-instantiation: @@ -509,63 +692,17 @@ It is quite common to want to instantiate an extension class from an existing (pointer to a) data structure, often as returned by external C/C++ functions. As extension classes can only accept Python objects as arguments in their -constructors, this necessitates the use of factory functions. For example, :: - - from libc.stdlib cimport malloc, free - - # Example C struct - ctypedef struct my_c_struct: - int a - int b - - - cdef class WrapperClass: - """A wrapper class for a C/C++ data structure""" - cdef my_c_struct *_ptr - cdef bint ptr_owner - - def __cinit__(self): - self.ptr_owner = False - - def __dealloc__(self): - # De-allocate if not null and flag is set - if self._ptr is not NULL and self.ptr_owner is True: - free(self._ptr) - self._ptr = NULL - - # Extension class properties - @property - def a(self): - return self._ptr.a if self._ptr is not NULL else None - - @property - def b(self): - return self._ptr.b if self._ptr is not NULL else None - - @staticmethod - cdef WrapperClass from_ptr(my_c_struct *_ptr, bint owner=False): - """Factory function to create WrapperClass objects from - given my_c_struct pointer. - - Setting ``owner`` flag to ``True`` causes - the extension type to ``free`` the structure pointed to by ``_ptr`` - when the wrapper object is deallocated.""" - # Call to __new__ bypasses __init__ constructor - cdef WrapperClass wrapper = WrapperClass.__new__(WrapperClass) - wrapper._ptr = _ptr - wrapper.ptr_owner = owner - return wrapper - - @staticmethod - cdef WrapperClass new_struct(): - """Factory function to create WrapperClass objects with - newly allocated my_c_struct""" - cdef my_c_struct *_ptr = malloc(sizeof(my_c_struct)) - if _ptr is NULL: - raise MemoryError - _ptr.a = 0 - _ptr.b = 0 - return WrapperClass.from_ptr(_ptr, owner=True) +constructors, this necessitates the use of factory functions or factory methods. For example: + +.. tabs:: + + .. group-tab:: Pure Python + + .. literalinclude:: ../../examples/userguide/extension_types/wrapper_class.py + + .. group-tab:: Cython + + .. literalinclude:: ../../examples/userguide/extension_types/wrapper_class.pyx To then create a ``WrapperClass`` object from an existing ``my_c_struct`` @@ -607,13 +744,30 @@ Making extension types weak-referenceable By default, extension types do not support having weak references made to them. You can enable weak referencing by declaring a C attribute of type -object called :attr:`__weakref__`. For example,:: +object called :attr:`__weakref__`. For example: + +.. tabs:: + + .. group-tab:: Pure Python + + .. code-block:: python + + @cython.cclass + class ExplodingAnimal: + """This animal will self-destruct when it is + no longer strongly referenced.""" + + __weakref__: object - cdef class ExplodingAnimal: - """This animal will self-destruct when it is - no longer strongly referenced.""" + .. group-tab:: Cython - cdef object __weakref__ + .. code-block:: cython + + cdef class ExplodingAnimal: + """This animal will self-destruct when it is + no longer strongly referenced.""" + + cdef object __weakref__ Controlling deallocation and garbage collection in CPython @@ -691,12 +845,28 @@ CPython invented a mechanism for this called the *trashcan*. It limits the recursion depth of deallocations by delaying some deallocations. By default, Cython extension types do not use the trashcan but it can be -enabled by setting the ``trashcan`` directive to ``True``. For example:: +enabled by setting the ``trashcan`` directive to ``True``. For example: + +.. tabs:: + + .. group-tab:: Pure Python + + .. code-block:: python - cimport cython - @cython.trashcan(True) - cdef class Object: - cdef dict __dict__ + import cython + @cython.trashcan(True) + @cython.cclass + class Object: + __dict__: dict + + .. group-tab:: Cython + + .. code-block:: cython + + cimport cython + @cython.trashcan(True) + cdef class Object: + cdef dict __dict__ Trashcan usage is inherited by subclasses (unless explicitly disabled by ``@cython.trashcan(False)``). @@ -720,15 +890,34 @@ have triggered a call to ``tp_clear`` to clear the object In that case, any object references have vanished when ``__dealloc__`` is called. Now your cleanup code lost access to the objects it has to clean up. To fix this, you can disable clearing instances of a specific class by using -the ``no_gc_clear`` directive:: +the ``no_gc_clear`` directive: + +.. tabs:: + + .. group-tab:: Pure Python - @cython.no_gc_clear - cdef class DBCursor: - cdef DBConnection conn - cdef DBAPI_Cursor *raw_cursor - # ... - def __dealloc__(self): - DBAPI_close_cursor(self.conn.raw_conn, self.raw_cursor) + .. code-block:: python + + @cython.no_gc_clear + @cython.cclass + class DBCursor: + conn: DBConnection + raw_cursor: cython.pointer(DBAPI_Cursor) + # ... + def __dealloc__(self): + DBAPI_close_cursor(self.conn.raw_conn, self.raw_cursor) + + .. group-tab:: Cython + + .. code-block:: cython + + @cython.no_gc_clear + cdef class DBCursor: + cdef DBConnection conn + cdef DBAPI_Cursor *raw_cursor + # ... + def __dealloc__(self): + DBAPI_close_cursor(self.conn.raw_conn, self.raw_cursor) This example tries to close a cursor via a database connection when the Python object is destroyed. The ``DBConnection`` object is kept alive by the reference @@ -748,12 +937,29 @@ but the compiler won't be able to prove this. This would be the case if the class can never reference itself, even indirectly. In that case, you can manually disable cycle collection by using the ``no_gc`` directive, but beware that doing so when in fact the extension type -can participate in cycles could cause memory leaks :: +can participate in cycles could cause memory leaks: + +.. tabs:: + + .. group-tab:: Pure Python + + .. code-block:: python + + @cython.no_gc + @cython.cclass + class UserInfo: + name: str + addresses: tuple + + .. group-tab:: Cython + + .. code-block:: cython - @cython.no_gc - cdef class UserInfo: - cdef str name - cdef tuple addresses + @cython.no_gc + cdef class UserInfo: + + cdef str name + cdef tuple addresses If you can be sure addresses will contain only references to strings, the above would be safe, and it may yield a significant speedup, depending on @@ -786,6 +992,13 @@ declaration makes an extension type defined in external C code available to a Cython module. A public extension type declaration makes an extension type defined in a Cython module available to external C code. +.. note:: + + Cython currently does not support Extension types declared as extern or public + in Pure Python mode. This is not considered an issue since public/extern extension + types are most commonly declared in `.pxd` files and not in `.py` files. + + .. _external_extension_types: External extension types @@ -802,7 +1015,7 @@ objects defined in the Python core or in a non-Cython extension module. :ref:`sharing-declarations`. Here is an example which will let you get at the C-level members of the -built-in complex object.:: +built-in complex object:: from __future__ import print_function @@ -1073,7 +1286,15 @@ can only be applied to extension types (types marked ``cdef`` or created with th ``cython.cclass`` decorator) and not to regular classes. If you need to define special properties on a field then use ``cython.dataclasses.field`` -.. literalinclude:: ../../examples/userguide/extension_types/dataclass.pyx +.. tabs:: + + .. group-tab:: Pure Python + + .. literalinclude:: ../../examples/userguide/extension_types/dataclass.py + + .. group-tab:: Cython + + .. literalinclude:: ../../examples/userguide/extension_types/dataclass.pyx You may use C-level types such as structs, pointers, or C++ classes. However, you may find these types are not compatible with the auto-generated -- cgit v1.2.1 From 31d40c8c62acef9509675155fe5b5bb8e48dba5a Mon Sep 17 00:00:00 2001 From: scoder Date: Mon, 11 Jul 2022 07:45:17 +0200 Subject: Fix annotation type analysis for Python "typing" types (GH-4606) * Check for "Optional[ctype]" earlier because we need to make sure that "Optional[int]" etc. interprets "int" as (valid) Python int type and not (invalid) C int type. See https://github.com/cython/cython/issues/3883 * Fix typing assumptions in PEP 526 variable annotations test: in a Python type annotation, "int" means Python int and "float" means Python float, not the C types. * Use a context manager to make it explicit in annotation type analysis when C types are allowed, and when Python types are required or expected. * Generalise the concept of equivalent Python and C types for more efficient type inference: PyFloat/double, PyBool/bint, PyComplex/double complex. * Refactor analyse_type_annotation() to prepare the extraction of type modifiers (as opposed to special types). See discussion in https://github.com/cython/cython/pull/4606#issuecomment-1026658869 * Refactor handling of "typing.Optional", "dataclasses.InitVar" etc. annotations to move them into the declared Entry during type analysis and keep only the bare type in the type system. * Force ClassVar[...] types to be object types. * Add a warning when users define a ClassVar[] with a non-Python type. See https://github.com/cython/cython/pull/4606#discussion_r805170982 * Provide a helpful warning when users write plain C types in a non-C annotation context. * Only consider Python object item types from list/tuple as self.type in IndexNode since that will be the result of the index access. Coercion needs to happen externally, then based on the type inference. * Ignore Python annotation type "long" since it almost certainly does not refer to PyLong but to C long. Issue a warning to make users aware of it. * Fix PEP-526 test by working around incomplete type inference, but leave FIXME comments. --- Cython/Compiler/Builtin.py | 17 ++- Cython/Compiler/Dataclass.py | 13 +- Cython/Compiler/ExprNodes.py | 141 +++++++++++++-------- Cython/Compiler/Nodes.py | 131 +++++++++++++------ Cython/Compiler/PyrexTypes.py | 63 +++------ Cython/Compiler/Symtab.py | 91 +++++++++---- .../userguide/extension_types/dataclass.pyx | 2 +- tests/errors/dataclass_e1.pyx | 2 +- tests/errors/dataclass_e5.pyx | 21 +++ tests/errors/e_typing_errors.pyx | 59 +++++++++ tests/errors/e_typing_optional.py | 33 +++-- tests/run/annotation_typing.pyx | 62 +++++---- tests/run/cdef_class_dataclass.pyx | 16 +-- tests/run/cdef_setitem_T284.pyx | 4 +- tests/run/delete.pyx | 18 +++ tests/run/pep526_variable_annotations.py | 49 ++++--- tests/run/pep526_variable_annotations_cy.pyx | 4 +- tests/run/pure_cdef_class_dataclass.py | 4 +- 18 files changed, 477 insertions(+), 253 deletions(-) create mode 100644 tests/errors/dataclass_e5.pyx create mode 100644 tests/errors/e_typing_errors.pyx diff --git a/Cython/Compiler/Builtin.py b/Cython/Compiler/Builtin.py index 577c20775..46a4dbb5b 100644 --- a/Cython/Compiler/Builtin.py +++ b/Cython/Compiler/Builtin.py @@ -444,6 +444,16 @@ def init_builtins(): bool_type = builtin_scope.lookup('bool').type complex_type = builtin_scope.lookup('complex').type + # Set up type inference links between equivalent Python/C types + bool_type.equivalent_type = PyrexTypes.c_bint_type + PyrexTypes.c_bint_type.equivalent_type = bool_type + + float_type.equivalent_type = PyrexTypes.c_double_type + PyrexTypes.c_double_type.equivalent_type = float_type + + complex_type.equivalent_type = PyrexTypes.c_double_complex_type + PyrexTypes.c_double_complex_type.equivalent_type = complex_type + init_builtins() @@ -466,21 +476,20 @@ def get_known_standard_library_module_scope(module_name): ('Set', set_type), ('FrozenSet', frozenset_type), ]: - name = EncodedString(name) if name == "Tuple": indexed_type = PyrexTypes.PythonTupleTypeConstructor(EncodedString("typing."+name), tp) else: indexed_type = PyrexTypes.PythonTypeConstructor(EncodedString("typing."+name), tp) - entry = mod.declare_type(name, indexed_type, pos = None) + mod.declare_type(EncodedString(name), indexed_type, pos = None) for name in ['ClassVar', 'Optional']: indexed_type = PyrexTypes.SpecialPythonTypeConstructor(EncodedString("typing."+name)) - entry = mod.declare_type(name, indexed_type, pos = None) + mod.declare_type(name, indexed_type, pos = None) _known_module_scopes[module_name] = mod elif module_name == "dataclasses": mod = ModuleScope(module_name, None, None) indexed_type = PyrexTypes.SpecialPythonTypeConstructor(EncodedString("dataclasses.InitVar")) - entry = mod.declare_type(EncodedString("InitVar"), indexed_type, pos = None) + mod.declare_type(EncodedString("InitVar"), indexed_type, pos = None) _known_module_scopes[module_name] = mod return mod diff --git a/Cython/Compiler/Dataclass.py b/Cython/Compiler/Dataclass.py index 48c1888d6..0d0bb4768 100644 --- a/Cython/Compiler/Dataclass.py +++ b/Cython/Compiler/Dataclass.py @@ -154,12 +154,10 @@ def process_class_get_fields(node): for entry in var_entries: name = entry.name - is_initvar = (entry.type.python_type_constructor_name == "dataclasses.InitVar") + is_initvar = entry.declared_with_pytyping_modifier("dataclasses.InitVar") # TODO - classvars aren't included in "var_entries" so are missed here # and thus this code is never triggered - is_classvar = (entry.type.python_type_constructor_name == "typing.ClassVar") - if is_initvar or is_classvar: - entry.type = entry.type.resolve() # no longer need the special type + is_classvar = entry.declared_with_pytyping_modifier("typing.ClassVar") if name in default_value_assignments: assignment = default_value_assignments[name] if (isinstance(assignment, ExprNodes.CallNode) @@ -666,8 +664,11 @@ def _set_up_dataclass_fields(node, fields, dataclass_module): name) # create an entry in the global scope for this variable to live field_node = ExprNodes.NameNode(field_default.pos, name=EncodedString(module_field_name)) - field_node.entry = global_scope.declare_var(field_node.name, type=field_default.type or PyrexTypes.unspecified_type, - pos=field_default.pos, cname=field_node.name, is_cdef=1) + field_node.entry = global_scope.declare_var( + field_node.name, type=field_default.type or PyrexTypes.unspecified_type, + pos=field_default.pos, cname=field_node.name, is_cdef=True, + # TODO: do we need to set 'pytyping_modifiers' here? + ) # replace the field so that future users just receive the namenode setattr(field, attrname, field_node) diff --git a/Cython/Compiler/ExprNodes.py b/Cython/Compiler/ExprNodes.py index fb2dedd56..4c325891a 100644 --- a/Cython/Compiler/ExprNodes.py +++ b/Cython/Compiler/ExprNodes.py @@ -1528,14 +1528,18 @@ class FloatNode(ConstNode): def _analyse_name_as_type(name, pos, env): - type = PyrexTypes.parse_basic_type(name) - if type is not None: - return type + ctype = PyrexTypes.parse_basic_type(name) + if ctype is not None and env.in_c_type_context: + return ctype global_entry = env.global_scope().lookup(name) - if global_entry and global_entry.is_type and global_entry.type: - return global_entry.type + if global_entry and global_entry.is_type: + type = global_entry.type + if type and (type.is_pyobject or env.in_c_type_context): + return type + ctype = ctype or type + # This is fairly heavy, so it's worth trying some easier things above. from .TreeFragment import TreeFragment with local_errors(ignore=True): pos = (pos[0], pos[1], pos[2]-7) @@ -1548,8 +1552,11 @@ def _analyse_name_as_type(name, pos, env): if isinstance(sizeof_node, SizeofTypeNode): sizeof_node = sizeof_node.analyse_types(env) if isinstance(sizeof_node, SizeofTypeNode): - return sizeof_node.arg_type - return None + type = sizeof_node.arg_type + if type and (type.is_pyobject or env.in_c_type_context): + return type + ctype = ctype or type + return ctype class BytesNode(ConstNode): @@ -2023,6 +2030,8 @@ class NameNode(AtomicExprNode): # annotations never create global cdef names if env.is_module_scope: return + + modifiers = () if ( # name: "description" => not a type, but still a declared variable or attribute annotation.expr.is_string_literal @@ -2034,10 +2043,11 @@ class NameNode(AtomicExprNode): # For Python class scopes every attribute is a Python object atype = py_object_type else: - _, atype = annotation.analyse_type_annotation(env) + modifiers, atype = annotation.analyse_type_annotation(env) + if atype is None: atype = unspecified_type if as_target and env.directives['infer_types'] != False else py_object_type - if atype.is_fused and env.fused_to_specific: + elif atype.is_fused and env.fused_to_specific: try: atype = atype.specialize(env.fused_to_specific) except CannotSpecialize: @@ -2045,6 +2055,7 @@ class NameNode(AtomicExprNode): "'%s' cannot be specialized since its type is not a fused argument to this function" % self.name) atype = error_type + visibility = 'private' if 'dataclasses.dataclass' in env.directives: # handle "frozen" directive - full inspection of the dataclass directives happens @@ -2058,12 +2069,17 @@ class NameNode(AtomicExprNode): if atype.is_pyobject or atype.can_coerce_to_pyobject(env): visibility = 'readonly' if is_frozen else 'public' # If the object can't be coerced that's fine - we just don't create a property + if as_target and env.is_c_class_scope and not (atype.is_pyobject or atype.is_error): # TODO: this will need revising slightly if annotated cdef attributes are implemented atype = py_object_type warning(annotation.pos, "Annotation ignored since class-level attributes must be Python objects. " "Were you trying to set up an instance attribute?", 2) - entry = self.entry = env.declare_var(name, atype, self.pos, is_cdef=not as_target, visibility=visibility) + + entry = self.entry = env.declare_var( + name, atype, self.pos, is_cdef=not as_target, visibility=visibility, + pytyping_modifiers=modifiers) + # Even if the entry already exists, make sure we're supplying an annotation if we can. if annotation and not entry.annotation: entry.annotation = annotation @@ -2083,23 +2099,38 @@ class NameNode(AtomicExprNode): return None def analyse_as_type(self, env): + type = None if self.cython_attribute: type = PyrexTypes.parse_basic_type(self.cython_attribute) - else: + elif env.in_c_type_context: type = PyrexTypes.parse_basic_type(self.name) if type: return type + entry = self.entry if not entry: entry = env.lookup(self.name) - if entry and entry.is_type: - return entry.type - elif entry and entry.known_standard_library_import: + if entry and not entry.is_type and entry.known_standard_library_import: entry = Builtin.get_known_standard_library_entry(entry.known_standard_library_import) - if entry and entry.is_type: - return entry.type - else: - return None + if entry and entry.is_type: + # Infer equivalent C types instead of Python types when possible. + type = entry.type + if not env.in_c_type_context and type is Builtin.long_type: + # Try to give a helpful warning when users write plain C type names. + warning(self.pos, "Found Python 2.x type 'long' in a Python annotation. Did you mean to use 'cython.long'?") + type = py_object_type + elif type.is_pyobject and type.equivalent_type: + type = type.equivalent_type + return type + if self.name == 'object': + # This is normally parsed as "simple C type", but not if we don't parse C types. + return py_object_type + + # Try to give a helpful warning when users write plain C type names. + if not env.in_c_type_context and PyrexTypes.parse_basic_type(self.name): + warning(self.pos, "Found C type '%s' in a Python annotation. Did you mean to use a Python type?" % self.name) + + return None def analyse_as_extension_type(self, env): # Try to interpret this as a reference to an extension type. @@ -3700,6 +3731,18 @@ class IndexNode(_IndexingBaseNode): error(self.pos, "Array size must be a compile time constant") return None + def analyse_pytyping_modifiers(self, env): + # Check for declaration modifiers, e.g. "typing.Optional[...]" or "dataclasses.InitVar[...]" + # TODO: somehow bring this together with TemplatedTypeNode.analyse_pytyping_modifiers() + modifiers = [] + modifier_node = self + while modifier_node.is_subscript: + modifier_type = modifier_node.base.analyse_as_type(env) + if modifier_type.python_type_constructor_name and modifier_type.modifier_name: + modifiers.append(modifier_type.modifier_name) + modifier_node = modifier_node.index + return modifiers + def type_dependencies(self, env): return self.base.type_dependencies(env) + self.index.type_dependencies(env) @@ -3930,12 +3973,16 @@ class IndexNode(_IndexingBaseNode): if base_type in (list_type, tuple_type) and self.index.type.is_int: item_type = infer_sequence_item_type( env, self.base, self.index, seq_type=base_type) - if item_type is None: - item_type = py_object_type - self.type = item_type if base_type in (list_type, tuple_type, dict_type): # do the None check explicitly (not in a helper) to allow optimising it away self.base = self.base.as_none_safe_node("'NoneType' object is not subscriptable") + if item_type is None or not item_type.is_pyobject: + # Even if we inferred a C type as result, we will read a Python object, so trigger coercion if needed. + # We could potentially use "item_type.equivalent_type" here, but that may trigger assumptions + # about the actual runtime item types, rather than just their ability to coerce to the C "item_type". + self.type = py_object_type + else: + self.type = item_type self.wrap_in_nonecheck_node(env, getting) return self @@ -4231,6 +4278,7 @@ class IndexNode(_IndexingBaseNode): return utility_code = None + error_value = None if self.type.is_pyobject: error_value = 'NULL' if self.index.type.is_int: @@ -4266,8 +4314,8 @@ class IndexNode(_IndexingBaseNode): error_value = '-1' utility_code = UtilityCode.load_cached("GetItemIntByteArray", "StringTools.c") elif not (self.base.type.is_cpp_class and self.exception_check): - assert False, "unexpected type %s and base type %s for indexing" % ( - self.type, self.base.type) + assert False, "unexpected type %s and base type %s for indexing (%s)" % ( + self.type, self.base.type, self.pos) if utility_code is not None: code.globalstate.use_utility_code(utility_code) @@ -14021,10 +14069,8 @@ class AnnotationNode(ExprNode): def analyse_type_annotation(self, env, assigned_value=None): if self.untyped: # Already applied as a fused type, not re-evaluating it here. - return None, None + return [], None annotation = self.expr - base_type = None - is_ambiguous = False explicit_pytype = explicit_ctype = False if annotation.is_dict_literal: warning(annotation.pos, @@ -14041,36 +14087,29 @@ class AnnotationNode(ExprNode): annotation = value if explicit_pytype and explicit_ctype: warning(annotation.pos, "Duplicate type declarations found in signature annotation", level=1) - arg_type = annotation.analyse_as_type(env) - if annotation.is_name and not annotation.cython_attribute and annotation.name in ('int', 'long', 'float'): - # Map builtin numeric Python types to C types in safe cases. - if assigned_value is not None and arg_type is not None and not arg_type.is_pyobject: - assigned_type = assigned_value.infer_type(env) - if assigned_type and assigned_type.is_pyobject: - # C type seems unsafe, e.g. due to 'None' default value => ignore annotation type - is_ambiguous = True - arg_type = None - # ignore 'int' and require 'cython.int' to avoid unsafe integer declarations - if arg_type in (PyrexTypes.c_long_type, PyrexTypes.c_int_type, PyrexTypes.c_float_type): - arg_type = PyrexTypes.c_double_type if annotation.name == 'float' else py_object_type - elif arg_type is not None and annotation.is_string_literal: + + with env.new_c_type_context(in_c_type_context=explicit_ctype): + arg_type = annotation.analyse_as_type(env) + + if arg_type is None: + warning(annotation.pos, "Unknown type declaration in annotation, ignoring") + return [], arg_type + + if annotation.is_string_literal: warning(annotation.pos, "Strings should no longer be used for type declarations. Use 'cython.int' etc. directly.", level=1) - elif arg_type is not None and arg_type.is_complex: + if explicit_pytype and not explicit_ctype and not (arg_type.is_pyobject or arg_type.equivalent_type): + warning(annotation.pos, + "Python type declaration in signature annotation does not refer to a Python type") + if arg_type.is_complex: # creating utility code needs to be special-cased for complex types arg_type.create_declaration_utility_code(env) - if arg_type is not None: - if explicit_pytype and not explicit_ctype and not arg_type.is_pyobject: - warning(annotation.pos, - "Python type declaration in signature annotation does not refer to a Python type") - base_type = Nodes.CAnalysedBaseTypeNode( - annotation.pos, type=arg_type, is_arg=True) - elif is_ambiguous: - warning(annotation.pos, "Ambiguous types in annotation, ignoring") - else: - warning(annotation.pos, "Unknown type declaration in annotation, ignoring") - return base_type, arg_type + + # Check for declaration modifiers, e.g. "typing.Optional[...]" or "dataclasses.InitVar[...]" + modifiers = annotation.analyse_pytyping_modifiers(env) if annotation.is_subscript else [] + + return modifiers, arg_type class AssignmentExpressionNode(ExprNode): diff --git a/Cython/Compiler/Nodes.py b/Cython/Compiler/Nodes.py index 927e47763..15c82f571 100644 --- a/Cython/Compiler/Nodes.py +++ b/Cython/Compiler/Nodes.py @@ -158,6 +158,7 @@ class Node(object): is_terminator = 0 is_wrapper = False # is a DefNode wrapper for a C function is_cproperty = False + is_templated_type_node = False temps = None # All descendants should set child_attrs to a list of the attributes @@ -966,27 +967,34 @@ class CArgDeclNode(Node): annotation = self.annotation if not annotation: return None - base_type, arg_type = annotation.analyse_type_annotation(env, assigned_value=self.default) - if base_type is not None: - self.base_type = base_type - - if arg_type and arg_type.python_type_constructor_name == "typing.Optional": - # "x: Optional[...]" => explicitly allow 'None' - arg_type = arg_type.resolve() - if arg_type and not arg_type.is_pyobject: - error(annotation.pos, "Only Python type arguments can use typing.Optional[...]") - else: - self.or_none = True - elif arg_type is py_object_type: - # exclude ": object" from the None check - None is a generic object. - self.or_none = True - elif arg_type and arg_type.is_pyobject and self.default and self.default.is_none: - # "x: ... = None" => implicitly allow 'None', but warn about it. - if not self.or_none: - warning(self.pos, "PEP-484 recommends 'typing.Optional[...]' for arguments that can be None.") + + modifiers, arg_type = annotation.analyse_type_annotation(env, assigned_value=self.default) + if arg_type is not None: + self.base_type = CAnalysedBaseTypeNode( + annotation.pos, type=arg_type, is_arg=True) + + if arg_type: + if "typing.Optional" in modifiers: + # "x: Optional[...]" => explicitly allow 'None' + arg_type = arg_type.resolve() + if arg_type and not arg_type.is_pyobject: + # We probably already reported this as "cannot be applied to non-Python type". + # error(annotation.pos, "Only Python type arguments can use typing.Optional[...]") + pass + else: + self.or_none = True + elif arg_type is py_object_type: + # exclude ": object" from the None check - None is a generic object. self.or_none = True - elif arg_type and arg_type.is_pyobject and not self.or_none: - self.not_none = True + elif self.default and self.default.is_none and (arg_type.is_pyobject or arg_type.equivalent_type): + # "x: ... = None" => implicitly allow 'None' + if not arg_type.is_pyobject: + arg_type = arg_type.equivalent_type + if not self.or_none: + warning(self.pos, "PEP-484 recommends 'typing.Optional[...]' for arguments that can be None.") + self.or_none = True + elif arg_type.is_pyobject and not self.or_none: + self.not_none = True return arg_type @@ -1076,9 +1084,9 @@ class CSimpleBaseTypeNode(CBaseTypeNode): else: type = py_object_type else: + scope = env if self.module_path: # Maybe it's a nested C++ class. - scope = env for item in self.module_path: entry = scope.lookup(item) if entry is not None and ( @@ -1099,8 +1107,6 @@ class CSimpleBaseTypeNode(CBaseTypeNode): if scope is None: # Maybe it's a cimport. scope = env.find_imported_module(self.module_path, self.pos) - else: - scope = env if scope: if scope.is_c_class_scope: @@ -1139,10 +1145,9 @@ class CSimpleBaseTypeNode(CBaseTypeNode): type = PyrexTypes.c_double_complex_type type.create_declaration_utility_code(env) self.complex = True - if type: - return type - else: - return PyrexTypes.error_type + if not type: + type = PyrexTypes.error_type + return type class MemoryViewSliceTypeNode(CBaseTypeNode): @@ -1211,10 +1216,40 @@ class TemplatedTypeNode(CBaseTypeNode): child_attrs = ["base_type_node", "positional_args", "keyword_args", "dtype_node"] + is_templated_type_node = True dtype_node = None - name = None + def _analyse_template_types(self, env, base_type): + require_python_types = base_type.python_type_constructor_name in ( + 'typing.Optional', + 'dataclasses.ClassVar', + ) + in_c_type_context = env.in_c_type_context and not require_python_types + + template_types = [] + for template_node in self.positional_args: + # CBaseTypeNode -> allow C type declarations in a 'cdef' context again + with env.new_c_type_context(in_c_type_context or isinstance(template_node, CBaseTypeNode)): + ttype = template_node.analyse_as_type(env) + if ttype is None: + if base_type.is_cpp_class: + error(template_node.pos, "unknown type in template argument") + ttype = error_type + # For Python generics we can be a bit more flexible and allow None. + elif require_python_types and not ttype.is_pyobject: + if ttype.equivalent_type and not template_node.as_cython_attribute(): + ttype = ttype.equivalent_type + else: + error(template_node.pos, "%s[...] cannot be applied to non-Python type %s" % ( + base_type.python_type_constructor_name, + ttype, + )) + ttype = error_type + template_types.append(ttype) + + return template_types + def analyse(self, env, could_be_name=False, base_type=None): if base_type is None: base_type = self.base_type_node.analyse(env) @@ -1222,21 +1257,15 @@ class TemplatedTypeNode(CBaseTypeNode): if ((base_type.is_cpp_class and base_type.is_template_type()) or base_type.python_type_constructor_name): - # Templated class + # Templated class, Python generics, etc. if self.keyword_args and self.keyword_args.key_value_pairs: tp = "c++ templates" if base_type.is_cpp_class else "indexed types" error(self.pos, "%s cannot take keyword arguments" % tp) self.type = PyrexTypes.error_type - else: - template_types = [] - for template_node in self.positional_args: - type = template_node.analyse_as_type(env) - if type is None and base_type.is_cpp_class: - error(template_node.pos, "unknown type in template argument") - type = error_type - # for indexed_pytype we can be a bit more flexible and pass None - template_types.append(type) - self.type = base_type.specialize_here(self.pos, env, template_types) + return self.type + + template_types = self._analyse_template_types(env, base_type) + self.type = base_type.specialize_here(self.pos, env, template_types) elif base_type.is_pyobject: # Buffer @@ -1277,7 +1306,7 @@ class TemplatedTypeNode(CBaseTypeNode): dimension=dimension) self.type = self.array_declarator.analyse(base_type, env)[1] - if self.type.is_fused and env.fused_to_specific: + if self.type and self.type.is_fused and env.fused_to_specific: try: self.type = self.type.specialize(env.fused_to_specific) except CannotSpecialize: @@ -1287,6 +1316,19 @@ class TemplatedTypeNode(CBaseTypeNode): return self.type + def analyse_pytyping_modifiers(self, env): + # Check for declaration modifiers, e.g. "typing.Optional[...]" or "dataclasses.InitVar[...]" + # TODO: somehow bring this together with IndexNode.analyse_pytyping_modifiers() + modifiers = [] + modifier_node = self + while modifier_node.is_templated_type_node and modifier_node.base_type_node and len(modifier_node.positional_args) == 1: + modifier_type = self.base_type_node.analyse_as_type(env) + if modifier_type.python_type_constructor_name and modifier_type.modifier_name: + modifiers.append(modifier_type.modifier_name) + modifier_node = modifier_node.positional_args[0] + + return modifiers + class CComplexBaseTypeNode(CBaseTypeNode): # base_type CBaseTypeNode @@ -1414,6 +1456,11 @@ class CVarDefNode(StatNode): base_type = self.base_type.analyse(env) + # Check for declaration modifiers, e.g. "typing.Optional[...]" or "dataclasses.InitVar[...]" + modifiers = None + if self.base_type.is_templated_type_node: + modifiers = self.base_type.analyse_pytyping_modifiers(env) + if base_type.is_fused and not self.in_pxd and (env.is_c_class_scope or env.is_module_scope): error(self.pos, "Fused types not allowed here") @@ -1477,7 +1524,7 @@ class CVarDefNode(StatNode): self.entry = dest_scope.declare_var( name, type, declarator.pos, cname=cname, visibility=visibility, in_pxd=self.in_pxd, - api=self.api, is_cdef=1) + api=self.api, is_cdef=True, pytyping_modifiers=modifiers) if Options.docstrings: self.entry.doc = embed_position(self.pos, self.doc) @@ -3164,7 +3211,7 @@ class DefNode(FuncDefNode): else: # probably just a plain 'object' arg.accept_none = True - else: + elif not arg.type.is_error: arg.accept_none = True # won't be used, but must be there if arg.not_none: error(arg.pos, "Only Python type arguments can have 'not None'") diff --git a/Cython/Compiler/PyrexTypes.py b/Cython/Compiler/PyrexTypes.py index 1660eab22..1316edddc 100644 --- a/Cython/Compiler/PyrexTypes.py +++ b/Cython/Compiler/PyrexTypes.py @@ -205,6 +205,7 @@ class PyrexType(BaseType): # needs_cpp_construction boolean Needs C++ constructor and destructor when used in a cdef class # needs_refcounting boolean Needs code to be generated similar to incref/gotref/decref. # Largely used internally. + # equivalent_type type A C or Python type that is equivalent to this Python or C type. # default_value string Initial value that can be assigned before first user assignment. # declaration_value string The value statically assigned on declaration (if any). # entry Entry The Entry for this type @@ -277,6 +278,7 @@ class PyrexType(BaseType): has_attributes = 0 needs_cpp_construction = 0 needs_refcounting = 0 + equivalent_type = None default_value = "" declaration_value = "" @@ -4432,6 +4434,7 @@ class ErrorType(PyrexType): class PythonTypeConstructor(PyObjectType): """Used to help Cython interpret indexed types from the typing module (or similar) """ + modifier_name = None def __init__(self, name, base_type=None): self.python_type_constructor_name = name @@ -4460,69 +4463,35 @@ class PythonTupleTypeConstructor(PythonTypeConstructor): not any(v.is_pyobject for v in template_values)): entry = env.declare_tuple_type(pos, template_values) if entry: + entry.used = True return entry.type return super(PythonTupleTypeConstructor, self).specialize_here(pos, env, template_values) class SpecialPythonTypeConstructor(PythonTypeConstructor): """ - For things like ClassVar, Optional, etc, which have extra features on top of being - a "templated" type. + For things like ClassVar, Optional, etc, which are not types and disappear during type analysis. """ - def __init__(self, name, template_type=None): - super(SpecialPythonTypeConstructor, self).__init__(name, None) - if (name == "typing.ClassVar" and template_type - and not template_type.is_pyobject): - # because classvars end up essentially used as globals they have - # to be PyObjects. Try to find the nearest suitable type (although - # practically I doubt this matters). - py_type_name = template_type.py_type_name() - if py_type_name: - from .Builtin import builtin_scope - template_type = (builtin_scope.lookup_type(py_type_name) - or py_object_type) - else: - template_type = py_object_types - self.template_type = template_type + def __init__(self, name): + super(SpecialPythonTypeConstructor, self).__init__(name, base_type=None) + self.modifier_name = name def __repr__(self): - if self.template_type: - return "%s[%r]" % (self.name, self.template_type) - else: - return self.name - - def is_template_type(self): - return self.template_type is None + return self.name def resolve(self): - if self.template_type: - return self.template_type.resolve() - else: - return self + return self def specialize_here(self, pos, env, template_values=None): if len(template_values) != 1: error(pos, "'%s' takes exactly one template argument." % self.name) - # return a copy of the template type with python_type_constructor_name as an attribute - # so it can be identified, and a resolve function that gets back to - # the original type (since types are usually tested with "is") - new_type = template_values[0] - if self.python_type_constructor_name == "typing.ClassVar": - # classvar must remain a py_object_type - new_type = py_object_type - if (self.python_type_constructor_name == "typing.Optional" and - not new_type.is_pyobject): - # optional must be a py_object, but can be a specialized py_object - new_type = py_object_type - return SpecialPythonTypeConstructor( - self.python_type_constructor_name, - template_type = template_values[0]) - - def __getattr__(self, name): - if self.template_type: - return getattr(self.template_type, name) - return super(SpecialPythonTypeConstructor, self).__getattr__(name) + return error_type + if template_values[0] is None: + # FIXME: allowing unknown types for now since we don't recognise all Python types. + return None + # Replace this type with the actual 'template' argument. + return template_values[0].resolve() rank_to_type_name = ( diff --git a/Cython/Compiler/Symtab.py b/Cython/Compiler/Symtab.py index 6554008f0..f657e7b7c 100644 --- a/Cython/Compiler/Symtab.py +++ b/Cython/Compiler/Symtab.py @@ -13,6 +13,7 @@ try: except ImportError: # Py3 import builtins +from ..Utils import try_finally_contextmanager from .Errors import warning, error, InternalError from .StringEncoding import EncodedString from . import Options, Naming @@ -163,6 +164,7 @@ class Entry(object): # known_standard_library_import Either None (default), an empty string (definitely can't be determined) # or a string of "modulename.something.attribute" # Used for identifying imports from typing/dataclasses etc + # pytyping_modifiers Python type modifiers like "typing.ClassVar" but also "dataclasses.InitVar" # TODO: utility_code and utility_code_definition serves the same purpose... @@ -237,6 +239,7 @@ class Entry(object): is_cgetter = False is_cpp_optional = False known_standard_library_import = None + pytyping_modifiers = None def __init__(self, name, cname, type, pos = None, init = None): self.name = name @@ -282,6 +285,9 @@ class Entry(object): assert not self.utility_code # we're not overwriting anything? self.utility_code_definition = Code.UtilityCode.load_cached("OptionalLocals", "CppSupport.cpp") + def declared_with_pytyping_modifier(self, modifier_name): + return modifier_name in self.pytyping_modifiers if self.pytyping_modifiers else False + class InnerEntry(Entry): """ @@ -366,6 +372,8 @@ class Scope(object): nogil = 0 fused_to_specific = None return_type = None + # Do ambiguous type names like 'int' and 'float' refer to the C types? (Otherwise, Python types.) + in_c_type_context = True def __init__(self, name, outer_scope, parent_scope): # The outer_scope is the next scope in the lookup chain. @@ -482,6 +490,14 @@ class Scope(object): for scope in sorted(self.subscopes, key=operator.attrgetter('scope_prefix')): yield scope + @try_finally_contextmanager + def new_c_type_context(self, in_c_type_context=None): + old_c_type_context = self.in_c_type_context + if in_c_type_context is not None: + self.in_c_type_context = in_c_type_context + yield + self.in_c_type_context = old_c_type_context + def declare(self, name, cname, type, pos, visibility, shadow = 0, is_type = 0, create_wrapper = 0): # Create new entry, and add to dictionary if # name is not None. Reports a warning if already @@ -733,8 +749,8 @@ class Scope(object): return self.outer_scope.declare_tuple_type(pos, components) def declare_var(self, name, type, pos, - cname = None, visibility = 'private', - api = 0, in_pxd = 0, is_cdef = 0): + cname=None, visibility='private', + api=False, in_pxd=False, is_cdef=False, pytyping_modifiers=None): # Add an entry for a variable. if not cname: if visibility != 'private' or api: @@ -754,8 +770,17 @@ class Scope(object): if api: entry.api = 1 entry.used = 1 + if pytyping_modifiers: + entry.pytyping_modifiers = pytyping_modifiers return entry + def _reject_pytyping_modifiers(self, pos, modifiers, allowed=()): + if not modifiers: + return + for modifier in modifiers: + if modifier not in allowed: + error(pos, "Modifier '%s' is not allowed here." % modifier) + def declare_assignment_expression_target(self, name, type, pos): # In most cases declares the variable as normal. # For generator expressions and comprehensions the variable is declared in their parent @@ -1515,14 +1540,15 @@ class ModuleScope(Scope): return entry def declare_var(self, name, type, pos, - cname = None, visibility = 'private', - api = 0, in_pxd = 0, is_cdef = 0): + cname=None, visibility='private', + api=False, in_pxd=False, is_cdef=False, pytyping_modifiers=None): # Add an entry for a global variable. If it is a Python # object type, and not declared with cdef, it will live # in the module dictionary, otherwise it will be a C # global variable. if visibility not in ('private', 'public', 'extern'): error(pos, "Module-level variable cannot be declared %s" % visibility) + self._reject_pytyping_modifiers(pos, pytyping_modifiers, ('typing.Optional',)) # let's allow at least this one if not is_cdef: if type is unspecified_type: type = py_object_type @@ -1558,7 +1584,7 @@ class ModuleScope(Scope): entry = Scope.declare_var(self, name, type, pos, cname=cname, visibility=visibility, - api=api, in_pxd=in_pxd, is_cdef=is_cdef) + api=api, in_pxd=in_pxd, is_cdef=is_cdef, pytyping_modifiers=pytyping_modifiers) if is_cdef: entry.is_cglobal = 1 if entry.type.declaration_value: @@ -1889,15 +1915,15 @@ class LocalScope(Scope): return entry def declare_var(self, name, type, pos, - cname = None, visibility = 'private', - api = 0, in_pxd = 0, is_cdef = 0): + cname=None, visibility='private', + api=False, in_pxd=False, is_cdef=False, pytyping_modifiers=None): name = self.mangle_class_private_name(name) # Add an entry for a local variable. if visibility in ('public', 'readonly'): error(pos, "Local variable cannot be declared %s" % visibility) entry = Scope.declare_var(self, name, type, pos, cname=cname, visibility=visibility, - api=api, in_pxd=in_pxd, is_cdef=is_cdef) + api=api, in_pxd=in_pxd, is_cdef=is_cdef, pytyping_modifiers=pytyping_modifiers) if entry.type.declaration_value: entry.init = entry.type.declaration_value entry.is_local = 1 @@ -1995,13 +2021,14 @@ class ComprehensionScope(Scope): return '%s%s' % (self.genexp_prefix, self.parent_scope.mangle(prefix, name)) def declare_var(self, name, type, pos, - cname = None, visibility = 'private', - api = 0, in_pxd = 0, is_cdef = True): + cname=None, visibility='private', + api=False, in_pxd=False, is_cdef=True, pytyping_modifiers=None): if type is unspecified_type: # if the outer scope defines a type for this variable, inherit it outer_entry = self.outer_scope.lookup(name) if outer_entry and outer_entry.is_variable: type = outer_entry.type # may still be 'unspecified_type' ! + self._reject_pytyping_modifiers(pos, pytyping_modifiers) # the parent scope needs to generate code for the variable, but # this scope must hold its name exclusively cname = '%s%s' % (self.genexp_prefix, self.parent_scope.mangle(Naming.var_prefix, name or self.next_id())) @@ -2084,8 +2111,8 @@ class StructOrUnionScope(Scope): Scope.__init__(self, name, None, None) def declare_var(self, name, type, pos, - cname = None, visibility = 'private', - api = 0, in_pxd = 0, is_cdef = 0, + cname=None, visibility='private', + api=False, in_pxd=False, is_cdef=False, pytyping_modifiers=None, allow_pyobject=False, allow_memoryview=False, allow_refcounted=False): # Add an entry for an attribute. if not cname: @@ -2094,6 +2121,7 @@ class StructOrUnionScope(Scope): cname = c_safe_identifier(cname) if type.is_cfunction: type = PyrexTypes.CPtrType(type) + self._reject_pytyping_modifiers(pos, pytyping_modifiers) entry = self.declare(name, cname, type, pos, visibility) entry.is_variable = 1 self.var_entries.append(entry) @@ -2171,15 +2199,15 @@ class PyClassScope(ClassScope): is_py_class_scope = 1 def declare_var(self, name, type, pos, - cname = None, visibility = 'private', - api = 0, in_pxd = 0, is_cdef = 0): + cname=None, visibility='private', + api=False, in_pxd=False, is_cdef=False, pytyping_modifiers=None): name = self.mangle_class_private_name(name) if type is unspecified_type: type = py_object_type # Add an entry for a class attribute. entry = Scope.declare_var(self, name, type, pos, cname=cname, visibility=visibility, - api=api, in_pxd=in_pxd, is_cdef=is_cdef) + api=api, in_pxd=in_pxd, is_cdef=is_cdef, pytyping_modifiers=pytyping_modifiers) entry.is_pyglobal = 1 entry.is_pyclass_attr = 1 return entry @@ -2301,17 +2329,21 @@ class CClassScope(ClassScope): return have_entries, (py_attrs, py_buffers, memoryview_slices) def declare_var(self, name, type, pos, - cname = None, visibility = 'private', - api = 0, in_pxd = 0, is_cdef = 0): + cname=None, visibility='private', + api=False, in_pxd=False, is_cdef=False, pytyping_modifiers=None): name = self.mangle_class_private_name(name) - if type.python_type_constructor_name == "typing.ClassVar": - is_cdef = 0 - type = type.resolve() - - if (type.python_type_constructor_name == "dataclasses.InitVar" and - 'dataclasses.dataclass' not in self.directives): - error(pos, "Use of cython.dataclasses.InitVar does not make sense outside a dataclass") + if pytyping_modifiers: + if "typing.ClassVar" in pytyping_modifiers: + is_cdef = 0 + if not type.is_pyobject: + if not type.equivalent_type: + warning(pos, "ClassVar[] requires the type to be a Python object type. Found '%s', using object instead." % type) + type = py_object_type + else: + type = type.equivalent_type + if "dataclasses.InitVar" in pytyping_modifiers and 'dataclasses.dataclass' not in self.directives: + error(pos, "Use of cython.dataclasses.InitVar does not make sense outside a dataclass") if is_cdef: # Add an entry for an attribute. @@ -2332,6 +2364,7 @@ class CClassScope(ClassScope): entry = self.declare(name, cname, type, pos, visibility) entry.is_variable = 1 self.var_entries.append(entry) + entry.pytyping_modifiers = pytyping_modifiers if type.is_cpp_class and visibility != 'extern': if self.directives['cpp_locals']: entry.make_cpp_optional() @@ -2369,7 +2402,7 @@ class CClassScope(ClassScope): # Add an entry for a class attribute. entry = Scope.declare_var(self, name, type, pos, cname=cname, visibility=visibility, - api=api, in_pxd=in_pxd, is_cdef=is_cdef) + api=api, in_pxd=in_pxd, is_cdef=is_cdef, pytyping_modifiers=pytyping_modifiers) entry.is_member = 1 # xxx: is_pyglobal changes behaviour in so many places that I keep it in for now. # is_member should be enough later on @@ -2612,11 +2645,12 @@ class CppClassScope(Scope): template_entry.is_type = 1 def declare_var(self, name, type, pos, - cname = None, visibility = 'extern', - api = 0, in_pxd = 0, is_cdef = 0, defining = 0): + cname=None, visibility='extern', + api=False, in_pxd=False, is_cdef=False, defining=False, pytyping_modifiers=None): # Add an entry for an attribute. if not cname: cname = name + self._reject_pytyping_modifiers(pos, pytyping_modifiers) entry = self.lookup_here(name) if defining and entry is not None: if entry.type.same_as(type): @@ -2746,10 +2780,11 @@ class CppScopedEnumScope(Scope): Scope.__init__(self, name, outer_scope, None) def declare_var(self, name, type, pos, - cname=None, visibility='extern'): + cname=None, visibility='extern', pytyping_modifiers=None): # Add an entry for an attribute. if not cname: cname = name + self._reject_pytyping_modifiers(pos, pytyping_modifiers) entry = self.declare(name, cname, type, pos, visibility) entry.is_variable = True return entry diff --git a/docs/examples/userguide/extension_types/dataclass.pyx b/docs/examples/userguide/extension_types/dataclass.pyx index 56666537d..b03d5f7b1 100644 --- a/docs/examples/userguide/extension_types/dataclass.pyx +++ b/docs/examples/userguide/extension_types/dataclass.pyx @@ -17,5 +17,5 @@ cdef class MyDataclass: c = "hello" # assignment of default value on a separate line # typing.InitVar and typing.ClassVar also work - d: dataclasses.InitVar[double] = 5 + d: dataclasses.InitVar[cython.double] = 5 e: typing.ClassVar[list] = [] diff --git a/tests/errors/dataclass_e1.pyx b/tests/errors/dataclass_e1.pyx index 39337ba6d..95d67ad7d 100644 --- a/tests/errors/dataclass_e1.pyx +++ b/tests/errors/dataclass_e1.pyx @@ -1,5 +1,5 @@ # mode: error - +# tag: warnings cimport cython @cython.dataclasses.dataclass(1, shouldnt_be_here=True, init=5, unsafe_hash=True) diff --git a/tests/errors/dataclass_e5.pyx b/tests/errors/dataclass_e5.pyx new file mode 100644 index 000000000..e86adf47e --- /dev/null +++ b/tests/errors/dataclass_e5.pyx @@ -0,0 +1,21 @@ +# mode: error +# tag: warnings + +cimport cython + +@cython.dataclasses.dataclass +cdef class C: + a: int + b: long + c: Py_ssize_t + d: float + e: double + + +_WARNINGS = """ +9:7: Found Python 2.x type 'long' in a Python annotation. Did you mean to use 'cython.long'? +10:7: Found C type 'Py_ssize_t' in a Python annotation. Did you mean to use a Python type? +10:7: Unknown type declaration in annotation, ignoring +12:7: Found C type 'double' in a Python annotation. Did you mean to use a Python type? +12:7: Unknown type declaration in annotation, ignoring +""" diff --git a/tests/errors/e_typing_errors.pyx b/tests/errors/e_typing_errors.pyx new file mode 100644 index 000000000..e11827696 --- /dev/null +++ b/tests/errors/e_typing_errors.pyx @@ -0,0 +1,59 @@ +# mode: error + +import cython + +try: + from typing import Optional, ClassVar +except ImportError: + pass + + +# not OK + +def optional_cython_types(Optional[cython.int] i, Optional[cython.double] d, Optional[cython.float] f, + Optional[cython.complex] c, Optional[cython.long] l, Optional[cython.longlong] ll): + pass + + +MyStruct = cython.struct(a=cython.int, b=cython.double) + +def optional_cstruct(Optional[MyStruct] x): + pass + + +def optional_pytypes(Optional[int] i, Optional[float] f, Optional[complex] c, Optional[long] l): + pass + + +cdef ClassVar[list] x + + +# OK + +def optional_memoryview(double[:] d, Optional[double[:]] o): + pass + + +cdef class Cls(object): + cdef ClassVar[list] x + + + +_ERRORS = """ +13:45: typing.Optional[...] cannot be applied to non-Python type int +13:72: typing.Optional[...] cannot be applied to non-Python type double +13:98: typing.Optional[...] cannot be applied to non-Python type float +14:49: typing.Optional[...] cannot be applied to non-Python type double complex +14:74: typing.Optional[...] cannot be applied to non-Python type long +14:103: typing.Optional[...] cannot be applied to non-Python type long long +24:33: typing.Optional[...] cannot be applied to non-Python type int +24:52: typing.Optional[...] cannot be applied to non-Python type float +24:91: typing.Optional[...] cannot be applied to non-Python type long + +20:38: typing.Optional[...] cannot be applied to non-Python type MyStruct + +28:20: Modifier 'typing.ClassVar' is not allowed here. + +# FIXME: this should be ok :-? +33:53: typing.Optional[...] cannot be applied to non-Python type double[:] +""" diff --git a/tests/errors/e_typing_optional.py b/tests/errors/e_typing_optional.py index e75638e00..6facfeea4 100644 --- a/tests/errors/e_typing_optional.py +++ b/tests/errors/e_typing_optional.py @@ -8,11 +8,10 @@ except ImportError: pass -def optional_pytypes(i: Optional[int], f: Optional[float]): - pass - +# not OK -def optional_cython_types(i: Optional[cython.int], d: Optional[cython.double], f: Optional[cython.float]): +def optional_cython_types(i: Optional[cython.int], d: Optional[cython.double], f: Optional[cython.float], + c: Optional[cython.complex], l: Optional[cython.long], ll: Optional[cython.longlong]): pass @@ -22,13 +21,23 @@ def optional_cstruct(x: Optional[MyStruct]): pass +# OK + +def optional_pytypes(i: Optional[int], f: Optional[float], c: Optional[complex], l: Optional[long]): + pass + + +def optional_memoryview(d: double[:], o: Optional[double[:]]): + pass + + _ERRORS = """ -15:29: Only Python type arguments can use typing.Optional[...] -15:54: Only Python type arguments can use typing.Optional[...] -15:82: Only Python type arguments can use typing.Optional[...] -21:24: Only Python type arguments can use typing.Optional[...] - -# FIXME: these should be allowed! -11:24: Only Python type arguments can use typing.Optional[...] -11:42: Only Python type arguments can use typing.Optional[...] +13:44: typing.Optional[...] cannot be applied to non-Python type int +13:69: typing.Optional[...] cannot be applied to non-Python type double +13:97: typing.Optional[...] cannot be applied to non-Python type float +14:44: typing.Optional[...] cannot be applied to non-Python type double complex +14:73: typing.Optional[...] cannot be applied to non-Python type long +14:100: typing.Optional[...] cannot be applied to non-Python type long long + +20:33: typing.Optional[...] cannot be applied to non-Python type MyStruct """ diff --git a/tests/run/annotation_typing.pyx b/tests/run/annotation_typing.pyx index 03900061a..8eb52e7c6 100644 --- a/tests/run/annotation_typing.pyx +++ b/tests/run/annotation_typing.pyx @@ -11,14 +11,14 @@ except ImportError: pass -def old_dict_syntax(a: list, b: "int" = 2, c: {'ctype': 'long int'} = 3, d: {'type': 'float'} = 4) -> list: +def old_dict_syntax(a: list, b: "int" = 2, c: {'ctype': 'long int'} = 3, d: {'type': 'long int'} = 4) -> list: """ >>> old_dict_syntax([1]) - ('list object', 'int', 'long', 'float') - [1, 2, 3, 4.0] + ('list object', 'int object', 'long', 'long') + [1, 2, 3, 4] >>> old_dict_syntax([1], 3) - ('list object', 'int', 'long', 'float') - [1, 3, 3, 4.0] + ('list object', 'int object', 'long', 'long') + [1, 3, 3, 4] >>> old_dict_syntax(123) Traceback (most recent call last): TypeError: Argument 'a' has incorrect type (expected list, got int) @@ -33,16 +33,16 @@ def old_dict_syntax(a: list, b: "int" = 2, c: {'ctype': 'long int'} = 3, d: {'ty return a -def pytypes_def(a: list, b: int = 2, c: long = 3, d: float = 4, n: list = None, o: Optional[tuple] = ()) -> list: +def pytypes_def(a: list, b: int = 2, c: long = 3, d: float = 4.0, n: list = None, o: Optional[tuple] = ()) -> list: """ >>> pytypes_def([1]) - ('list object', 'Python object', 'Python object', 'double', 'list object', 'tuple object') + ('list object', 'int object', 'Python object', 'double', 'list object', 'tuple object') [1, 2, 3, 4.0, None, ()] >>> pytypes_def([1], 3) - ('list object', 'Python object', 'Python object', 'double', 'list object', 'tuple object') + ('list object', 'int object', 'Python object', 'double', 'list object', 'tuple object') [1, 3, 3, 4.0, None, ()] >>> pytypes_def([1], 3, 2, 1, [], None) - ('list object', 'Python object', 'Python object', 'double', 'list object', 'tuple object') + ('list object', 'int object', 'Python object', 'double', 'list object', 'tuple object') [1, 3, 2, 1.0, [], None] >>> pytypes_def(123) Traceback (most recent call last): @@ -60,16 +60,16 @@ def pytypes_def(a: list, b: int = 2, c: long = 3, d: float = 4, n: list = None, return a -cpdef pytypes_cpdef(a: list, b: int = 2, c: long = 3, d: float = 4, n: list = None, o: Optional[tuple] = ()): +cpdef pytypes_cpdef(a: list, b: int = 2, c: long = 3, d: float = 4.0, n: list = None, o: Optional[tuple] = ()): """ >>> pytypes_cpdef([1]) - ('list object', 'Python object', 'Python object', 'double', 'list object', 'tuple object') + ('list object', 'int object', 'Python object', 'double', 'list object', 'tuple object') [1, 2, 3, 4.0, None, ()] >>> pytypes_cpdef([1], 3) - ('list object', 'Python object', 'Python object', 'double', 'list object', 'tuple object') + ('list object', 'int object', 'Python object', 'double', 'list object', 'tuple object') [1, 3, 3, 4.0, None, ()] >>> pytypes_cpdef([1], 3, 2, 1, [], None) - ('list object', 'Python object', 'Python object', 'double', 'list object', 'tuple object') + ('list object', 'int object', 'Python object', 'double', 'list object', 'tuple object') [1, 3, 2, 1.0, [], None] >>> pytypes_cpdef(123) Traceback (most recent call last): @@ -87,7 +87,7 @@ cpdef pytypes_cpdef(a: list, b: int = 2, c: long = 3, d: float = 4, n: list = No return a -cdef c_pytypes_cdef(a: list, b: int = 2, c: long = 3, d: float = 4, n: list = None): +cdef c_pytypes_cdef(a: list, b: int = 2, c: long = 3, d: float = 4.0, n: list = None): print(typeof(a), typeof(b), typeof(c), typeof(d), typeof(n)) a.append(b) a.append(c) @@ -99,10 +99,10 @@ cdef c_pytypes_cdef(a: list, b: int = 2, c: long = 3, d: float = 4, n: list = No def pytypes_cdef(a, b=2, c=3, d=4): """ >>> pytypes_cdef([1]) - ('list object', 'Python object', 'Python object', 'double', 'list object') + ('list object', 'int object', 'Python object', 'double', 'list object') [1, 2, 3, 4.0, None] >>> pytypes_cdef([1], 3) - ('list object', 'Python object', 'Python object', 'double', 'list object') + ('list object', 'int object', 'Python object', 'double', 'list object') [1, 3, 3, 4.0, None] >>> pytypes_cdef(123) # doctest: +ELLIPSIS Traceback (most recent call last): @@ -278,24 +278,28 @@ class LateClass(object): pass -def py_float_default(price : float=None, ndigits=4): +def py_float_default(price : Optional[float]=None, ndigits=4): """ Python default arguments should prevent C type inference. >>> py_float_default() (None, 4) - >>> py_float_default(2) - (2, 4) + >>> py_float_default(None) + (None, 4) + >>> py_float_default(2) # doctest: +ELLIPSIS + Traceback (most recent call last): + TypeError: ...float... >>> py_float_default(2.0) (2.0, 4) - >>> py_float_default(2, 3) - (2, 3) + >>> py_float_default(2, 3) # doctest: +ELLIPSIS + Traceback (most recent call last): + TypeError: ...float... """ return price, ndigits cdef class ClassAttribute: - cls_attr : float = 1. + cls_attr : cython.float = 1. @cython.cfunc @@ -332,12 +336,16 @@ _WARNINGS = """ 14:77: Dicts should no longer be used as type annotations. Use 'cython.int' etc. directly. 14:85: Python type declaration in signature annotation does not refer to a Python type 14:85: Strings should no longer be used for type declarations. Use 'cython.int' etc. directly. -36:64: PEP-484 recommends 'typing.Optional[...]' for arguments that can be None. -63:68: PEP-484 recommends 'typing.Optional[...]' for arguments that can be None. -90:68: PEP-484 recommends 'typing.Optional[...]' for arguments that can be None. +36:40: Found Python 2.x type 'long' in a Python annotation. Did you mean to use 'cython.long'? +36:66: PEP-484 recommends 'typing.Optional[...]' for arguments that can be None. +63:44: Found Python 2.x type 'long' in a Python annotation. Did you mean to use 'cython.long'? +63:70: PEP-484 recommends 'typing.Optional[...]' for arguments that can be None. +90:44: Found Python 2.x type 'long' in a Python annotation. Did you mean to use 'cython.long'? +90:70: PEP-484 recommends 'typing.Optional[...]' for arguments that can be None. 274:44: Unknown type declaration in annotation, ignoring -281:29: Ambiguous types in annotation, ignoring -298:15: Annotation ignored since class-level attributes must be Python objects. Were you trying to set up an instance attribute? +302:15: Annotation ignored since class-level attributes must be Python objects. Were you trying to set up an instance attribute? +# DUPLICATE: +63:44: Found Python 2.x type 'long' in a Python annotation. Did you mean to use 'cython.long'? # BUG: 63:6: 'pytypes_cpdef' redeclared 146:0: 'struct_io' redeclared diff --git a/tests/run/cdef_class_dataclass.pyx b/tests/run/cdef_class_dataclass.pyx index 326fd0210..2f69e0f8f 100644 --- a/tests/run/cdef_class_dataclass.pyx +++ b/tests/run/cdef_class_dataclass.pyx @@ -127,8 +127,8 @@ cdef class ContainsNonPyFields: """ mystruct: S = cython.dataclasses.field(compare=False) mystruct_ptr: S_ptr = field(init=False, repr=False, default_factory=malloc_a_struct) - memview: int[:, ::1] = field(default=create_array((3,1), "c"), # mutable so not great but OK for a test - compare=False) + memview: cython.int[:, ::1] = field(default=create_array((3,1), "c"), # mutable so not great but OK for a test + compare=False) def __dealloc__(self): free(self.mystruct_ptr) @@ -154,8 +154,8 @@ cdef class InitClassVars: True """ a: cython.int = 0 - b1: InitVar[double] = 1.0 - b2: py_dataclasses.InitVar[double] = 1.0 + b1: InitVar[cython.double] = 1.0 + b2: py_dataclasses.InitVar[cython.double] = 1.0 c1: ClassVar[float] = 2.0 c2: typing.ClassVar[float] = 2.0 cdef InitVar[cython.int] d1 @@ -206,7 +206,7 @@ cdef class TestVisibility: """ cdef double a a = 1.0 - b: double = 2.0 + b: cython.double = 2.0 cdef public double c c = 3.0 cdef public object d @@ -222,7 +222,7 @@ cdef class TestFrozen: Traceback (most recent call last): AttributeError: attribute 'a' of '...TestFrozen' objects is not writable """ - a: double = 2.0 + a: cython.double = 2.0 @dataclass(kw_only=True) cdef class TestKwOnly: @@ -248,8 +248,8 @@ cdef class TestKwOnly: TypeError: __init__() needs keyword-only argument b """ - a: double = 2.0 - b: long + a: cython.double = 2.0 + b: cython.long import sys if sys.version_info >= (3, 7): diff --git a/tests/run/cdef_setitem_T284.pyx b/tests/run/cdef_setitem_T284.pyx index 389b8c409..871afb892 100644 --- a/tests/run/cdef_setitem_T284.pyx +++ b/tests/run/cdef_setitem_T284.pyx @@ -24,9 +24,9 @@ def with_external_list(list L): """ >>> with_external_list([1,2,3]) [1, -10, 3] - >>> with_external_list(None) + >>> with_external_list(None) # doctest: +ELLIPSIS Traceback (most recent call last): - TypeError: 'NoneType' object is not subscriptable + TypeError: 'NoneType' object ... """ ob = 1L L[ob] = -10 diff --git a/tests/run/delete.pyx b/tests/run/delete.pyx index ec0b6c71a..6127fa9f1 100644 --- a/tests/run/delete.pyx +++ b/tests/run/delete.pyx @@ -29,15 +29,33 @@ def del_item(L, o): del L[o] return L + @cython.test_assert_path_exists('//DelStatNode//IndexNode//NoneCheckNode') def del_dict(dict D, o): """ >>> del_dict({1: 'a', 2: 'b'}, 1) {2: 'b'} + >>> del_dict(None, 1) # doctest: +ELLIPSIS + Traceback (most recent call last): + TypeError: 'NoneType' object ... """ del D[o] return D + +@cython.test_fail_if_path_exists('//DelStatNode//IndexNode//NoneCheckNode') +def del_dict_ann(D: dict, o): + """ + >>> del_dict_ann({1: 'a', 2: 'b'}, 1) + {2: 'b'} + >>> del_dict_ann(None, 1) + Traceback (most recent call last): + TypeError: Argument 'D' has incorrect type (expected dict, got NoneType) + """ + del D[o] + return D + + @cython.test_fail_if_path_exists('//NoneCheckNode') def del_dict_from_literal(o): """ diff --git a/tests/run/pep526_variable_annotations.py b/tests/run/pep526_variable_annotations.py index 3e30075c3..56cb0201b 100644 --- a/tests/run/pep526_variable_annotations.py +++ b/tests/run/pep526_variable_annotations.py @@ -15,11 +15,11 @@ except ImportError: var = 1 # type: annotation -var: int = 2 -fvar: float = 1.2 +var: cython.int = 2 +fvar: cython.float = 1.2 some_number: cython.int # variable without initial value -some_list: List[int] = [] # variable with initial value -t: Tuple[int, ...] = (1, 2, 3) +some_list: List[cython.int] = [] # variable with initial value +t: Tuple[cython.int, ...] = (1, 2, 3) body: Optional[List[str]] descr_only : "descriptions are allowed but ignored" @@ -34,11 +34,11 @@ def f(): (2, 1.5, [], (1, 2, 3)) """ var = 1 # type: annotation - var: int = 2 - fvar: float = 1.5 + var: cython.int = 2 + fvar: cython.float = 1.5 some_number: cython.int # variable without initial value - some_list: List[int] = [] # variable with initial value - t: Tuple[int, ...] = (1, 2, 3) + some_list: List[cython.int] = [] # variable with initial value + t: Tuple[cython.int, ...] = (1, 2, 3) body: Optional[List[str]] descr_only: "descriptions are allowed but ignored" @@ -59,7 +59,7 @@ class BasicStarship(object): """ captain: str = 'Picard' # instance variable with default damage: cython.int # instance variable without default - stats: ClassVar[Dict[str, int]] = {} # class variable + stats: ClassVar[Dict[str, cython.int]] = {} # class variable descr_only: "descriptions are allowed but ignored" def __init__(self, damage): @@ -75,7 +75,7 @@ class BasicStarshipExt(object): """ captain: str = 'Picard' # instance variable with default damage: cython.int # instance variable without default - stats: ClassVar[Dict[str, int]] = {} # class variable + stats: ClassVar[Dict[str, cython.int]] = {} # class variable descr_only: "descriptions are allowed but ignored" def __init__(self, damage): @@ -124,7 +124,7 @@ def iter_declared_dict(d): # specialized "compiled" test in module-level __doc__ """ - typed_dict : Dict[float, float] = d + typed_dict : Dict[cython.float, cython.float] = d s = 0.0 for key in typed_dict: s += d[key] @@ -135,7 +135,7 @@ def iter_declared_dict(d): "//WhileStatNode", "//WhileStatNode//DictIterationNextNode", ) -def iter_declared_dict_arg(d : Dict[float, float]): +def iter_declared_dict_arg(d : Dict[cython.float, cython.float]): """ >>> d = {1.1: 2.5, 3.3: 4.5} >>> iter_declared_dict_arg(d) @@ -165,8 +165,8 @@ def test_subscripted_types(): list object set object """ - a: typing.Dict[int, float] = {} - b: List[int] = [] + a: typing.Dict[cython.int, cython.float] = {} + b: List[cython.int] = [] c: _SET_[object] = set() print(cython.typeof(a) + (" object" if not cython.compiled else "")) @@ -174,22 +174,31 @@ def test_subscripted_types(): print(cython.typeof(c) + (" object" if not cython.compiled else "")) # because tuple is specifically special cased to go to ctuple where possible -def test_tuple(a: typing.Tuple[int, float], b: typing.Tuple[int, ...], - c: Tuple[int, object] # cannot be a ctuple +def test_tuple(a: typing.Tuple[cython.int, cython.float], b: typing.Tuple[cython.int, ...], + c: Tuple[cython.int, object] # cannot be a ctuple ): """ >>> test_tuple((1, 1.0), (1, 1.0), (1, 1.0)) int int + Python object + Python object + (int, float) + tuple object tuple object tuple object """ - x: typing.Tuple[int, float] = (a[0], a[1]) - y: Tuple[int, ...] = (1,2.) - z = a[0] # should infer to int + x: typing.Tuple[int, float] = (a[0], a[1]) # note: Python int/float, not cython.int/float + y: Tuple[cython.int, ...] = (1,2.) + z = a[0] # should infer to C int + p = x[1] # should infer to Python float -> C double print(cython.typeof(z)) - print(cython.typeof(x[0])) + print("int" if cython.compiled and cython.typeof(x[0]) == "Python object" else cython.typeof(x[0])) # FIXME: infer Python int + print(cython.typeof(p) if cython.compiled or cython.typeof(p) != 'float' else "Python object") # FIXME: infer C double + print(cython.typeof(x[1]) if cython.compiled or cython.typeof(p) != 'float' else "Python object") # FIXME: infer C double + print(cython.typeof(a) if cython.compiled or cython.typeof(a) != 'tuple' else "(int, float)") + print(cython.typeof(x) + (" object" if not cython.compiled else "")) print(cython.typeof(y) + (" object" if not cython.compiled else "")) print(cython.typeof(c) + (" object" if not cython.compiled else "")) diff --git a/tests/run/pep526_variable_annotations_cy.pyx b/tests/run/pep526_variable_annotations_cy.pyx index c08c832b0..448824b36 100644 --- a/tests/run/pep526_variable_annotations_cy.pyx +++ b/tests/run/pep526_variable_annotations_cy.pyx @@ -48,9 +48,9 @@ def test_tuple(typing.Tuple[int, float] a, typing.Tuple[int, ...] b, tuple object tuple object """ - cdef typing.Tuple[int, float] x = (a[0], a[1]) + cdef typing.Tuple[int, float] x = (a[0], a[1]) # C int/float cdef Tuple[int, ...] y = (1,2.) - z = a[0] # should infer to int + z = a[0] # should infer to C int print(cython.typeof(z)) print(cython.typeof(x[0])) diff --git a/tests/run/pure_cdef_class_dataclass.py b/tests/run/pure_cdef_class_dataclass.py index 7b8fcb851..8a978d36f 100644 --- a/tests/run/pure_cdef_class_dataclass.py +++ b/tests/run/pure_cdef_class_dataclass.py @@ -11,9 +11,9 @@ class MyDataclass: """ >>> sorted(list(MyDataclass.__dataclass_fields__.keys())) ['a', 'self'] - >>> inst1 = MyDataclass(2.0, ['a', 'b']) + >>> inst1 = MyDataclass(2, ['a', 'b']) >>> print(inst1) - MyDataclass(a=2.0, self=['a', 'b']) + MyDataclass(a=2, self=['a', 'b']) >>> inst2 = MyDataclass() >>> print(inst2) MyDataclass(a=1, self=[]) -- cgit v1.2.1 From b404a3f3b82f296931730470411a68fec7d6f40f Mon Sep 17 00:00:00 2001 From: da-woods Date: Tue, 12 Jul 2022 15:54:21 +0100 Subject: Allow setting "annotation_typing" directive more locally (GH-4886) To make it easier to handle cases where Cython's interpretation differs from the user's interpretation. Also improve the documentation about this. --- Cython/Compiler/Options.py | 1 - docs/src/userguide/migrating_to_cy30.rst | 17 ++++++++++++ .../src/userguide/source_files_and_compilation.rst | 3 ++- tests/run/annotation_typing.pyx | 30 ++++++++++++++++++++++ 4 files changed, 49 insertions(+), 2 deletions(-) diff --git a/Cython/Compiler/Options.py b/Cython/Compiler/Options.py index af28a7187..97f288905 100644 --- a/Cython/Compiler/Options.py +++ b/Cython/Compiler/Options.py @@ -366,7 +366,6 @@ directive_scopes = { # defaults to available everywhere 'test_fail_if_path_exists' : ('function', 'class', 'cclass'), 'freelist': ('cclass',), 'emit_code_comments': ('module',), - 'annotation_typing': ('module',), # FIXME: analysis currently lacks more specific function scope # Avoid scope-specific to/from_py_functions for c_string. 'c_string_type': ('module',), 'c_string_encoding': ('module',), diff --git a/docs/src/userguide/migrating_to_cy30.rst b/docs/src/userguide/migrating_to_cy30.rst index 357132887..1105ee15d 100644 --- a/docs/src/userguide/migrating_to_cy30.rst +++ b/docs/src/userguide/migrating_to_cy30.rst @@ -172,3 +172,20 @@ rather than relying on the user to test and cast the type of each operand. The old behaviour can be restored with the :ref:`directive ` ``c_api_binop_methods=True``. More details are given in :ref:`arithmetic_methods`. + +Annotation typing +================= + +Cython 3 has made substantial improvements in recognising types in +annotations and it is well worth reading +:ref:`the pure Python tutorial` to understand +some of the improvements. + +A notable backwards-compatible change is that ``x: int`` is now typed +such that ``x`` is an exact Python ``int`` (Cython 0.29 would accept +any Python object for ``x``). + +To make it easier to handle cases where your interpretation of type +annotations differs from Cython's, Cython 3 now supports setting the +``annotation_typing`` :ref:`directive ` on a +per-class or per-function level. diff --git a/docs/src/userguide/source_files_and_compilation.rst b/docs/src/userguide/source_files_and_compilation.rst index edf51213e..a833c61ed 100644 --- a/docs/src/userguide/source_files_and_compilation.rst +++ b/docs/src/userguide/source_files_and_compilation.rst @@ -946,7 +946,8 @@ Cython code. Here is the list of currently supported directives: Uses function argument annotations to determine the type of variables. Default is True, but can be disabled. Since Python does not enforce types given in annotations, setting to False gives greater compatibility with Python code. - Must be set globally. + From Cython 3.0, ``annotation_typing`` can be set on a per-function or + per-class basis. ``emit_code_comments`` (True / False) Copy the original source code line by line into C code comments in the generated diff --git a/tests/run/annotation_typing.pyx b/tests/run/annotation_typing.pyx index 8eb52e7c6..ce74ef1dd 100644 --- a/tests/run/annotation_typing.pyx +++ b/tests/run/annotation_typing.pyx @@ -329,6 +329,36 @@ class HasPtr: return f"HasPtr({self.a[0]}, {self.b})" +@cython.annotation_typing(False) +def turn_off_typing(x: float, d: dict): + """ + >>> turn_off_typing('not a float', []) # ignore the typing + ('Python object', 'Python object', 'not a float', []) + """ + return typeof(x), typeof(d), x, d + + +@cython.annotation_typing(False) +cdef class ClassTurnOffTyping: + x: float + d: dict + + def get_var_types(self, arg: float): + """ + >>> ClassTurnOffTyping().get_var_types(1.0) + ('Python object', 'Python object', 'Python object') + """ + return typeof(self.x), typeof(self.d), typeof(arg) + + @cython.annotation_typing(True) + def and_turn_it_back_on_again(self, arg: float): + """ + >>> ClassTurnOffTyping().and_turn_it_back_on_again(1.0) + ('Python object', 'Python object', 'double') + """ + return typeof(self.x), typeof(self.d), typeof(arg) + + _WARNINGS = """ 14:32: Strings should no longer be used for type declarations. Use 'cython.int' etc. directly. 14:47: Dicts should no longer be used as type annotations. Use 'cython.int' etc. directly. -- cgit v1.2.1 From f04d53bc5a372a02266a92a27c1d041d1e02b6b8 Mon Sep 17 00:00:00 2001 From: da-woods Date: Tue, 12 Jul 2022 18:39:47 +0100 Subject: Add a note about cythonize in the quickstart documentation (GH-4879) --- docs/src/quickstart/build.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/src/quickstart/build.rst b/docs/src/quickstart/build.rst index 5d9e8a307..3cbcfa087 100644 --- a/docs/src/quickstart/build.rst +++ b/docs/src/quickstart/build.rst @@ -18,6 +18,10 @@ one may want to read more about There are several ways to build Cython code: - Write a setuptools ``setup.py``. This is the normal and recommended way. + - Run the ``cythonize`` command-line utility. This is a good approach for + compiling a single Cython source file directly to an extension. + A source file can be built "in place" (so that the extension module is created + next to the source file, ready to be imported) with ``cythonize -i filename.pyx``. - Use :ref:`Pyximport`, importing Cython ``.pyx`` files as if they were ``.py`` files (using setuptools to compile and build in the background). This method is easier than writing a ``setup.py``, but is not very flexible. -- cgit v1.2.1 From e4ef0c1e807aab8c20fb08b638550c912c166be3 Mon Sep 17 00:00:00 2001 From: da-woods Date: Tue, 12 Jul 2022 19:00:58 +0100 Subject: Error on memoryview argument capture on 0.29.x (GH-4849) I don't believe it's easy to fix https://github.com/cython/cython/issues/4798 on 0.29.x Therefore, generate an error message that explains two possible workarounds. This at least makes sure that people don't end up with mysterious crashes. --- Cython/Compiler/Nodes.py | 15 +++++++++++++-- tests/memoryview/memslice.pyx | 20 ++++++++++++++++++++ 2 files changed, 33 insertions(+), 2 deletions(-) diff --git a/Cython/Compiler/Nodes.py b/Cython/Compiler/Nodes.py index 339b1fa04..743d6959b 100644 --- a/Cython/Compiler/Nodes.py +++ b/Cython/Compiler/Nodes.py @@ -2637,8 +2637,11 @@ class CFuncDefNode(FuncDefNode): def put_into_closure(entry): if entry.in_closure and not arg.default: code.putln('%s = %s;' % (entry.cname, entry.original_cname)) - code.put_var_incref(entry) - code.put_var_giveref(entry) + if entry.type.is_memoryviewslice: + code.put_incref_memoryviewslice(entry.cname, have_gil=True) + else: + code.put_var_incref(entry) + code.put_var_giveref(entry) for arg in self.args: put_into_closure(scope.lookup_here(arg.name)) @@ -3234,6 +3237,14 @@ class DefNode(FuncDefNode): # Move arguments into closure if required def put_into_closure(entry): if entry.in_closure: + if entry.type.is_memoryviewslice: + error( + self.pos, + "Referring to a memoryview typed argument directly in a nested closure function " + "is not supported in Cython 0.x. " + "Either upgrade to Cython 3, or assign the argument to a local variable " + "and use that in the nested function." + ) code.putln('%s = %s;' % (entry.cname, entry.original_cname)) if entry.xdecref_cleanup: # mostly applies to the starstar arg - this can sometimes be NULL diff --git a/tests/memoryview/memslice.pyx b/tests/memoryview/memslice.pyx index 24af61e17..ccf760c21 100644 --- a/tests/memoryview/memslice.pyx +++ b/tests/memoryview/memslice.pyx @@ -2549,3 +2549,23 @@ def test_const_buffer(const int[:] a): cdef const int[:] c = a print(a[0]) print(c[-1]) + +cdef arg_in_closure_cdef(int [:] a): + def inner(): + return (a[0], a[1]) + return inner + +def test_arg_in_closure_cdef(a): + """ + >>> A = IntMockBuffer("A", range(6), shape=(6,)) + >>> inner = test_arg_in_closure_cdef(A) + acquired A + >>> inner() + (0, 1) + + The assignment below is just to avoid printing what was collected + >>> del inner; ignore_me = gc.collect() + released A + """ + return arg_in_closure_cdef(a) + -- cgit v1.2.1 From 858b1a5ad2237cb439965450c221a15dfedd1295 Mon Sep 17 00:00:00 2001 From: da-woods Date: Wed, 13 Jul 2022 11:18:14 +0100 Subject: Disable co_varnames identity check on Python 3.11 (GH-4850) CPython 3.11 no longer stores the varnames tuple. Instead, it stores it as part of a larger list of names and calculates it dynamically on request. --- tests/run/tuple_constants.pyx | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/tests/run/tuple_constants.pyx b/tests/run/tuple_constants.pyx index f60d5d818..fa5794cf7 100644 --- a/tests/run/tuple_constants.pyx +++ b/tests/run/tuple_constants.pyx @@ -36,7 +36,12 @@ def test_deduplicated_args(): # are generated often with the same argument names. Therefore it's worth ensuring that # they are correctly deduplicated import sys - if not hasattr(sys, "pypy_version_info"): # test doesn't work on PyPy (which is probably fair enough) + check_identity_of_co_varnames = ( + not hasattr(sys, "pypy_version_info") and # test doesn't work on PyPy (which is probably fair enough) + sys.version_info < (3, 11) # on Python 3.11 co_varnames returns a new, dynamically-calculated tuple + # each time it is run + ) + if check_identity_of_co_varnames: assert func1.__code__.co_varnames is func2.__code__.co_varnames @cython.test_assert_path_exists("//TupleNode", -- cgit v1.2.1 From f298b6af61a0ab5e8e0b53907ea5080529dd98e4 Mon Sep 17 00:00:00 2001 From: da-woods Date: Wed, 13 Jul 2022 11:21:24 +0100 Subject: Avoid raising StopIteration in "__next__" if possible (GH-4844) Fixes https://github.com/cython/cython/issues/3447 --- Cython/Compiler/Code.pxd | 1 + Cython/Compiler/Code.py | 15 ++++++- Cython/Compiler/FlowControl.pxd | 2 + Cython/Compiler/FlowControl.py | 8 ++++ Cython/Compiler/Naming.py | 1 + Cython/Compiler/Nodes.py | 26 +++++++++++++ Cython/Compiler/Symtab.py | 3 ++ tests/run/funcexc_iter_T228.pyx | 86 +++++++++++++++++++++++++++++++++++++++++ 8 files changed, 140 insertions(+), 2 deletions(-) diff --git a/Cython/Compiler/Code.pxd b/Cython/Compiler/Code.pxd index e17e0fb1d..59779f8bc 100644 --- a/Cython/Compiler/Code.pxd +++ b/Cython/Compiler/Code.pxd @@ -54,6 +54,7 @@ cdef class FunctionState: cdef public object closure_temps cdef public bint should_declare_error_indicator cdef public bint uses_error_indicator + cdef public bint error_without_exception @cython.locals(n=size_t) cpdef new_label(self, name=*) diff --git a/Cython/Compiler/Code.py b/Cython/Compiler/Code.py index 4695b240c..4c67ac400 100644 --- a/Cython/Compiler/Code.py +++ b/Cython/Compiler/Code.py @@ -691,6 +691,7 @@ class LazyUtilityCode(UtilityCodeBase): class FunctionState(object): # return_label string function return point label # error_label string error catch point label + # error_without_exception boolean Can go to the error label without an exception (e.g. __next__ can return NULL) # continue_label string loop continue point label # break_label string loop break point label # return_from_error_cleanup_label string @@ -739,6 +740,8 @@ class FunctionState(object): self.should_declare_error_indicator = False self.uses_error_indicator = False + self.error_without_exception = False + # safety checks def validate_exit(self): @@ -2332,8 +2335,16 @@ class CCodeWriter(object): if method_noargs in method_flags: # Special NOARGS methods really take no arguments besides 'self', but PyCFunction expects one. func_cname = Naming.method_wrapper_prefix + func_cname - self.putln("static PyObject *%s(PyObject *self, CYTHON_UNUSED PyObject *arg) {return %s(self);}" % ( - func_cname, entry.func_cname)) + self.putln("static PyObject *%s(PyObject *self, CYTHON_UNUSED PyObject *arg) {" % func_cname) + func_call = "%s(self)" % entry.func_cname + if entry.name == "__next__": + self.putln("PyObject *res = %s;" % func_call) + # tp_iternext can return NULL without an exception + self.putln("if (!res && !PyErr_Occurred()) { PyErr_SetNone(PyExc_StopIteration); }") + self.putln("return res;") + else: + self.putln("return %s;" % func_call) + self.putln("}") return func_cname # GIL methods diff --git a/Cython/Compiler/FlowControl.pxd b/Cython/Compiler/FlowControl.pxd index c876ee3b1..4a8ef19c1 100644 --- a/Cython/Compiler/FlowControl.pxd +++ b/Cython/Compiler/FlowControl.pxd @@ -58,6 +58,8 @@ cdef class ControlFlow: cdef public dict assmts + cdef public Py_ssize_t in_try_block + cpdef newblock(self, ControlBlock parent=*) cpdef nextblock(self, ControlBlock parent=*) cpdef bint is_tracked(self, entry) diff --git a/Cython/Compiler/FlowControl.py b/Cython/Compiler/FlowControl.py index 4e0160e41..4018ff851 100644 --- a/Cython/Compiler/FlowControl.py +++ b/Cython/Compiler/FlowControl.py @@ -110,6 +110,7 @@ class ControlFlow(object): entries set tracked entries loops list stack for loop descriptors exceptions list stack for exception descriptors + in_try_block int track if we're in a try...except or try...finally block """ def __init__(self): @@ -122,6 +123,7 @@ class ControlFlow(object): self.exit_point = ExitBlock() self.blocks.add(self.exit_point) self.block = self.entry_point + self.in_try_block = 0 def newblock(self, parent=None): """Create floating block linked to `parent` if given. @@ -1166,7 +1168,9 @@ class ControlFlowAnalysis(CythonTransform): ## XXX: children nodes self.flow.block.add_child(entry_point) self.flow.nextblock() + self.flow.in_try_block += 1 self._visit(node.body) + self.flow.in_try_block -= 1 self.flow.exceptions.pop() # After exception @@ -1226,7 +1230,9 @@ class ControlFlowAnalysis(CythonTransform): self.flow.block = body_block body_block.add_child(entry_point) self.flow.nextblock() + self.flow.in_try_block += 1 self._visit(node.body) + self.flow.in_try_block -= 1 self.flow.exceptions.pop() if self.flow.loops: self.flow.loops[-1].exceptions.pop() @@ -1245,6 +1251,8 @@ class ControlFlowAnalysis(CythonTransform): if self.flow.exceptions: self.flow.block.add_child(self.flow.exceptions[-1].entry_point) self.flow.block = None + if self.flow.in_try_block: + node.in_try_block = True return node def visit_ReraiseStatNode(self, node): diff --git a/Cython/Compiler/Naming.py b/Cython/Compiler/Naming.py index 7845e4aa1..96c0b8fbd 100644 --- a/Cython/Compiler/Naming.py +++ b/Cython/Compiler/Naming.py @@ -126,6 +126,7 @@ cur_scope_cname = pyrex_prefix + "cur_scope" enc_scope_cname = pyrex_prefix + "enc_scope" frame_cname = pyrex_prefix + "frame" frame_code_cname = pyrex_prefix + "frame_code" +error_without_exception_cname = pyrex_prefix + "error_without_exception" binding_cfunc = pyrex_prefix + "binding_PyCFunctionType" fused_func_prefix = pyrex_prefix + 'fuse_' quick_temp_cname = pyrex_prefix + "temp" # temp variable for quick'n'dirty temping diff --git a/Cython/Compiler/Nodes.py b/Cython/Compiler/Nodes.py index 15c82f571..298cc5705 100644 --- a/Cython/Compiler/Nodes.py +++ b/Cython/Compiler/Nodes.py @@ -2231,7 +2231,14 @@ class FuncDefNode(StatNode, BlockNode): # code.put_trace_exception() assure_gil('error') + if code.funcstate.error_without_exception: + tempvardecl_code.putln( + "int %s = 0; /* StopIteration */" % Naming.error_without_exception_cname + ) + code.putln("if (!%s) {" % Naming.error_without_exception_cname) code.put_add_traceback(self.entry.qualified_name) + if code.funcstate.error_without_exception: + code.putln("}") else: warning(self.entry.pos, "Unraisable exception in function '%s'." % @@ -6703,11 +6710,15 @@ class RaiseStatNode(StatNode): # exc_value ExprNode or None # exc_tb ExprNode or None # cause ExprNode or None + # + # set in FlowControl + # in_try_block bool child_attrs = ["exc_type", "exc_value", "exc_tb", "cause"] is_terminator = True builtin_exc_name = None wrap_tuple_value = False + in_try_block = False def analyse_expressions(self, env): if self.exc_type: @@ -6736,9 +6747,19 @@ class RaiseStatNode(StatNode): not (exc.args or (exc.arg_tuple is not None and exc.arg_tuple.args))): exc = exc.function # extract the exception type if exc.is_name and exc.entry.is_builtin: + from . import Symtab self.builtin_exc_name = exc.name if self.builtin_exc_name == 'MemoryError': self.exc_type = None # has a separate implementation + elif (self.builtin_exc_name == 'StopIteration' and + env.is_local_scope and env.name == "__next__" and + env.parent_scope and env.parent_scope.is_c_class_scope and + not self.in_try_block): + # tp_iternext is allowed to return NULL without raising StopIteration. + # For the sake of simplicity, only allow this to happen when not in + # a try block + self.exc_type = None + return self nogil_check = Node.gil_error @@ -6749,6 +6770,11 @@ class RaiseStatNode(StatNode): if self.builtin_exc_name == 'MemoryError': code.putln('PyErr_NoMemory(); %s' % code.error_goto(self.pos)) return + elif self.builtin_exc_name == 'StopIteration' and not self.exc_type: + code.putln('%s = 1;' % Naming.error_without_exception_cname) + code.putln('%s;' % code.error_goto(None)) + code.funcstate.error_without_exception = True + return if self.exc_type: self.exc_type.generate_evaluation_code(code) diff --git a/Cython/Compiler/Symtab.py b/Cython/Compiler/Symtab.py index f657e7b7c..1500c7441 100644 --- a/Cython/Compiler/Symtab.py +++ b/Cython/Compiler/Symtab.py @@ -342,6 +342,7 @@ class Scope(object): # is_builtin_scope boolean Is the builtin scope of Python/Cython # is_py_class_scope boolean Is a Python class scope # is_c_class_scope boolean Is an extension type scope + # is_local_scope boolean Is a local (i.e. function/method/generator) scope # is_closure_scope boolean Is a closure scope # is_generator_expression_scope boolean A subset of closure scope used for generator expressions # is_passthrough boolean Outer scope is passed directly @@ -360,6 +361,7 @@ class Scope(object): is_py_class_scope = 0 is_c_class_scope = 0 is_closure_scope = 0 + is_local_scope = False is_generator_expression_scope = 0 is_comprehension_scope = 0 is_passthrough = 0 @@ -1886,6 +1888,7 @@ class ModuleScope(Scope): class LocalScope(Scope): + is_local_scope = True # Does the function have a 'with gil:' block? has_with_gil_block = False diff --git a/tests/run/funcexc_iter_T228.pyx b/tests/run/funcexc_iter_T228.pyx index 4b81166f6..40db3afb2 100644 --- a/tests/run/funcexc_iter_T228.pyx +++ b/tests/run/funcexc_iter_T228.pyx @@ -65,3 +65,89 @@ def double_raise(py_iterator): print(sys.exc_info()[0] is ValueError or sys.exc_info()[0]) a = list(cy_iterator()) print(sys.exc_info()[0] is ValueError or sys.exc_info()[0]) + + +###### Tests to do with the optimization of StopIteration to "return NULL" ####### +# we're mainly checking that +# 1. Calling __next__ manually doesn't crash (the wrapper function adds the exception) +# 2. if you raise a value then that value gets raised +# 3. putting the exception in various places try...finally / try...except blocks works + +def call_next_directly(): + """ + >>> call_next_directly() + Traceback (most recent call last): + ... + StopIteration + """ + cy_iterator().__next__() + +cdef class cy_iter_many_options: + cdef what + def __init__(self, what): + self.what = what + + def __iter__(self): + return self + + def __next__(self): + if self.what == "StopIteration in finally no return": + try: + raise StopIteration + finally: + print "Finally..." + elif self.what == "StopIteration in finally return": + try: + raise StopIteration + finally: + self.what = None + return "in finally" # but will stop iterating next time + elif self.what == "StopIteration from finally": + try: + raise ValueError + finally: + raise StopIteration + elif self.what == "catch StopIteration": + try: + raise StopIteration + except StopIteration: + self.what = None + return "in except" # but will stop next time + elif self.what == "don't catch StopIteration": + try: + raise StopIteration + except ValueError: + return 0 + elif self.what == "StopIteration from except": + try: + raise ValueError + except ValueError: + raise StopIteration + elif self.what == "StopIteration with value": + raise StopIteration("I'm a value!") + elif self.what is None: + raise StopIteration + else: + raise ValueError("self.what didn't match anything") + +def test_cy_iter_many_options(option): + """ + >>> test_cy_iter_many_options("StopIteration in finally no return") + Finally... + [] + >>> test_cy_iter_many_options("StopIteration in finally return") + ['in finally'] + >>> test_cy_iter_many_options("StopIteration from finally") + [] + >>> test_cy_iter_many_options("catch StopIteration") + ['in except'] + >>> test_cy_iter_many_options("don't catch StopIteration") + [] + >>> try: + ... cy_iter_many_options("StopIteration with value").__next__() + ... except StopIteration as e: + ... print(e.args) + ("I'm a value!",) + """ + return list(cy_iter_many_options(option)) + -- cgit v1.2.1 From c8df6a37318770ade07645c40e7751b5907348a8 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Wed, 13 Jul 2022 13:09:14 +0200 Subject: Update changelog. --- CHANGES.rst | 54 ++++++++++++++++++++++++++++++++++++------------------ 1 file changed, 36 insertions(+), 18 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 38dc34781..94abc418e 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -11,12 +11,15 @@ Features added * A new decorator ``@cython.dataclasses.dataclass`` was implemented that provides compile time dataclass generation capabilities to ``cdef`` classes (extension types). Patch by David Woods. (Github issue :issue:`2903`). ``kw_only`` dataclasses - added by Yury Sokov (Github issue :issue:`4794`) + added by Yury Sokov. (Github issue :issue:`4794`) * Named expressions (PEP 572) aka. assignment expressions (aka. the walrus operator ``:=``) were implemented. Patch by David Woods. (Github issue :issue:`2636`) +* Cython avoids raising ``StopIteration`` in ``__next__`` methods when possible. + Patch by David Woods. (Github issue :issue:`3447`) + * Some C++ library declarations were extended and fixed. Patches by Max Bachmann, Till Hoffmann, Julien Jerphanion, Wenjun Si. (Github issues :issue:`4530`, :issue:`4528`, :issue:`4710`, :issue:`4746`, @@ -60,6 +63,13 @@ Bugs fixed since it is relevant when passing them e.g. as argument into other fused functions. Patch by David Woods. (Github issue :issue:`4644`) +* The ``__self__`` attribute of fused functions reports its availability correctly + with ``hasattr()``. Patch by David Woods. + (Github issue :issue:`4808`) + +* ``pyximport`` no longer uses the deprecated ``imp`` module. + Patch by Matus Valo. (Github issue :issue:`4560`) + * The generated C code failed to compile in CPython 3.11a4 and later. (Github issue :issue:`4500`) @@ -72,7 +82,7 @@ Bugs fixed (Github issue :issue:`4329`) * Improve conversion between function pointers with non-identical but - compatible exception specifications. Patches by David Woods. + compatible exception specifications. Patches by David Woods. (Github issues :issue:`4770`, :issue:`4689`) * Improve compatibility with forthcoming CPython 3.12 release. @@ -80,7 +90,11 @@ Bugs fixed * Limited API C preprocessor warning is compatible with MSVC. Patch by Victor Molina Garcia. (Github issue :issue:`4826`) -* C compiler warnings fixed. Patch by mwtian. (Github issue :issue:`4831`) +* Some C compiler warnings were fixed. + Patch by mwtian. (Github issue :issue:`4831`) + +* The parser allowed some invalid spellings of ``...``. + Patch by 0dminnimda. (Github issue :issue:`4868`) * Includes all bug-fixes from the 0.29 branch up to the :ref:`0.29.31` release. @@ -93,9 +107,13 @@ Other changes for users who did not expect ``None`` to be allowed as input. To allow ``None``, use ``typing.Optional`` as in ``func(x: Optional[list])``. ``None`` is also automatically allowed when it is used as default argument, i.e. ``func(x: list = None)``. - Note that, for backwards compatibility reasons, this does not apply when using Cython's - C notation, as in ``func(list x)``. Here, ``None`` is still allowed, as always. - (Github issues :issue:`3883`, :issue:`2696`, :issue:`4669`) + ``int`` and ``float`` are now also recognised in type annotations and restrict the + value type at runtime. They were previously ignored. + Note that, for backwards compatibility reasons, the new behaviour does not apply when using + Cython's C notation, as in ``func(list x)``. Here, ``None`` is still allowed, as always. + Also, the ``annotation_typing`` directive can now be enabled and disabled more finely + within the module. + (Github issues :issue:`3883`, :issue:`2696`, :issue:`4669`, :issue:`4606`, :issue:`4886`) * The compile-time ``DEF`` and ``IF`` statements are deprecated and generate a warning. They should be replaced with normal constants, code generation or C macros. @@ -1008,31 +1026,31 @@ Bugs fixed * Use ``importlib.util.find_spec()`` instead of the deprecated ``importlib.find_loader()`` function when setting up the package path at import-time. Patch by Matti Picus. - (Github issue #4764) + (Github issue :issue:`4764`) * Require the C compiler to support the two-arg form of ``va_start`` on Python 3.10 and higher. Patch by Thomas Caswell. - (Github issue #4820) + (Github issue :issue:`4820`) * Make ``fused_type`` subscriptable in Shadow.py. Patch by Pfebrer. - (Github issue #4842) + (Github issue :issue:`4842`) * Fix the incorrect code generation of the target type in ``bytearray`` loops. Patch by Kenrick Everett. - (Github issue #4108) + (Github issue :issue:`4108`) * Silence some GCC ``-Wconversion`` warnings in C utility code. Patch by Lisandro Dalcin. - (Github issue #4854) + (Github issue :issue:`4854`) * Stop tuple multiplication being ignored in expressions such as ``[*(1,) * 2]``. Patch by David Woods. - (Github issue #4864) + (Github issue :issue:`4864`) * Ensure that object buffers (e.g. ``ndarray[object, ndim=1]``) containing ``NULL`` pointers are safe to use, returning ``None`` instead of the ``NULL`` pointer. Patch by Sebastian Berg. - (Github issue #4859) + (Github issue :issue:`4859`) .. _0.29.30: @@ -1045,7 +1063,7 @@ Bugs fixed * The GIL handling changes in 0.29.29 introduced a regression where objects could be deallocated without holding the GIL. - (Github issue :issue`4796`) + (Github issue :issue:`4796`) .. _0.29.29: @@ -1059,7 +1077,7 @@ Features added * Avoid acquiring the GIL at the end of nogil functions. This change was backported in order to avoid generating wrong C code that would trigger C compiler warnings with tracing support enabled. - Backport by Oleksandr Pavlyk. (Github issue :issue`4637`) + Backport by Oleksandr Pavlyk. (Github issue :issue:`4637`) Bugs fixed ---------- @@ -1075,15 +1093,15 @@ Bugs fixed * Cython now correctly generates Python methods for both the provided regular and reversed special numeric methods of extension types. - Patch by David Woods. (Github issue :issue`4750`) + Patch by David Woods. (Github issue :issue:`4750`) * Calling unbound extension type methods without arguments could raise an ``IndexError`` instead of a ``TypeError``. - Patch by David Woods. (Github issue :issue`4779`) + Patch by David Woods. (Github issue :issue:`4779`) * Calling unbound ``.__contains__()`` super class methods on some builtin base types could trigger an infinite recursion. - Patch by David Woods. (Github issue :issue`4785`) + Patch by David Woods. (Github issue :issue:`4785`) * The C union type in pure Python mode mishandled some field names. Patch by Jordan Brière. (Github issue :issue:`4727`) -- cgit v1.2.1 From 565f176f23dd91287d51cd44883af62fa0aaafa0 Mon Sep 17 00:00:00 2001 From: da-woods Date: Wed, 6 Jul 2022 20:47:04 +0100 Subject: Add tests for NULL objects in memoryviews (GH-4871) Follow up on https://github.com/cython/cython/pull/4859 by adding tests for memoryviews too. Additional refactoring to avoid invalid decref calls on test failures. Instead, the item is safely cleared directly before the access. --- tests/buffers/bufaccess.pyx | 15 +++++---------- tests/memoryview/memslice.pyx | 45 ++++++++++++++++++++++++++++++++++++++++++- 2 files changed, 49 insertions(+), 11 deletions(-) diff --git a/tests/buffers/bufaccess.pyx b/tests/buffers/bufaccess.pyx index 764d65db6..3144f613d 100644 --- a/tests/buffers/bufaccess.pyx +++ b/tests/buffers/bufaccess.pyx @@ -10,7 +10,7 @@ from __future__ import unicode_literals from cpython.object cimport PyObject -from cpython.ref cimport Py_INCREF, Py_DECREF +from cpython.ref cimport Py_INCREF, Py_DECREF, Py_CLEAR cimport cython import sys @@ -1013,17 +1013,14 @@ def check_object_nulled_1d(MockBuffer[object, ndim=1] buf, int idx, obj): >>> rc1 = get_refcount(a) >>> A = ObjectMockBuffer(None, [a, a]) >>> check_object_nulled_1d(A, 0, a) - >>> decref(a) # new reference "added" to A >>> check_object_nulled_1d(A, 1, a) - >>> decref(a) >>> A = ObjectMockBuffer(None, [a, a, a, a], strides=(2,)) >>> check_object_nulled_1d(A, 0, a) # only 0 due to stride - >>> decref(a) >>> get_refcount(a) == rc1 True """ - cdef void **data = buf.buffer - data[idx] = NULL + cdef PyObject **data = buf.buffer + Py_CLEAR(data[idx]) res = buf[idx] # takes None buf[idx] = obj return res @@ -1037,14 +1034,12 @@ def check_object_nulled_2d(MockBuffer[object, ndim=2] buf, int idx1, int idx2, o >>> rc1 = get_refcount(a) >>> A = ObjectMockBuffer(None, [a, a, a, a], shape=(2, 2)) >>> check_object_nulled_2d(A, 0, 0, a) - >>> decref(a) # new reference "added" to A >>> check_object_nulled_2d(A, 1, 1, a) - >>> decref(a) >>> get_refcount(a) == rc1 True """ - cdef void **data = buf.buffer - data[idx1 + 2*idx2] = NULL + cdef PyObject **data = buf.buffer + Py_CLEAR(data[idx1 + 2*idx2]) res = buf[idx1, idx2] # takes None buf[idx1, idx2] = obj return res diff --git a/tests/memoryview/memslice.pyx b/tests/memoryview/memslice.pyx index 4e06c4f41..5f6134135 100644 --- a/tests/memoryview/memslice.pyx +++ b/tests/memoryview/memslice.pyx @@ -7,7 +7,7 @@ from __future__ import unicode_literals from cpython.object cimport PyObject -from cpython.ref cimport Py_INCREF, Py_DECREF +from cpython.ref cimport Py_INCREF, Py_DECREF, Py_CLEAR cimport cython from cython cimport view @@ -1134,6 +1134,49 @@ def assign_temporary_to_object(object[:] buf): """ buf[1] = {3-2: 2+(2*4)-2} +@testcase +def check_object_nulled_1d(object[:] buf, int idx, obj): + """ + See comments on printbuf_object above. + + >>> a = object() + >>> rc1 = get_refcount(a) + >>> A = ObjectMockBuffer(None, [a, a]) + >>> check_object_nulled_1d(A, 0, a) + >>> check_object_nulled_1d(A, 1, a) + >>> A = ObjectMockBuffer(None, [a, a, a, a], strides=(2,)) + >>> check_object_nulled_1d(A, 0, a) # only 0 due to stride + >>> get_refcount(a) == rc1 + True + """ + cdef ObjectMockBuffer omb = buf.base + cdef PyObject **data = (omb.buffer) + Py_CLEAR(data[idx]) + res = buf[idx] # takes None + buf[idx] = obj + return res + +@testcase +def check_object_nulled_2d(object[:, ::1] buf, int idx1, int idx2, obj): + """ + See comments on printbuf_object above. + + >>> a = object() + >>> rc1 = get_refcount(a) + >>> A = ObjectMockBuffer(None, [a, a, a, a], shape=(2, 2)) + >>> check_object_nulled_2d(A, 0, 0, a) + >>> check_object_nulled_2d(A, 1, 1, a) + >>> get_refcount(a) == rc1 + True + """ + cdef ObjectMockBuffer omb = buf.base + cdef PyObject **data = (omb.buffer) + Py_CLEAR(data[idx1 + 2*idx2]) + res = buf[idx1, idx2] # takes None + buf[idx1, idx2] = obj + return res + + # # Test __cythonbufferdefaults__ # -- cgit v1.2.1 From 905fd831dd8ec6a674076cf2d1559e38887c1547 Mon Sep 17 00:00:00 2001 From: da-woods Date: Sat, 16 Jul 2022 07:19:12 +0100 Subject: Avoid conflict between propery names and function variables (GH-4845) Fixes https://github.com/cython/cython/issues/4836 Bug introduced by https://github.com/cython/cython/commit/8c7b0f3fb745aa7bd0afedfbeb862eecc5fdff0c --- Cython/Compiler/ParseTreeTransforms.py | 6 ++++++ Cython/Compiler/Visitor.py | 8 +++++--- tests/run/decorators.pyx | 23 +++++++++++++++++++++++ 3 files changed, 34 insertions(+), 3 deletions(-) diff --git a/Cython/Compiler/ParseTreeTransforms.py b/Cython/Compiler/ParseTreeTransforms.py index 7e9207eea..89620cd45 100644 --- a/Cython/Compiler/ParseTreeTransforms.py +++ b/Cython/Compiler/ParseTreeTransforms.py @@ -2293,6 +2293,12 @@ if VALUE is not None: assmt.analyse_declarations(env) return assmt + def visit_func_outer_attrs(self, node): + # any names in the outer attrs should not be looked up in the function "seen_vars_stack" + stack = self.seen_vars_stack.pop() + super(AnalyseDeclarationsTransform, self).visit_func_outer_attrs(node) + self.seen_vars_stack.append(stack) + def visit_ScopedExprNode(self, node): env = self.current_env() node.analyse_declarations(env) diff --git a/Cython/Compiler/Visitor.py b/Cython/Compiler/Visitor.py index 4eabd6b83..d9be14df1 100644 --- a/Cython/Compiler/Visitor.py +++ b/Cython/Compiler/Visitor.py @@ -380,13 +380,15 @@ class EnvTransform(CythonTransform): self.env_stack.pop() def visit_FuncDefNode(self, node): - outer_attrs = node.outer_attrs - self.visitchildren(node, attrs=outer_attrs) + self.visit_func_outer_attrs(node) self.enter_scope(node, node.local_scope) - self.visitchildren(node, attrs=None, exclude=outer_attrs) + self.visitchildren(node, attrs=None, exclude=node.outer_attrs) self.exit_scope() return node + def visit_func_outer_attrs(self, node): + self.visitchildren(node, attrs=node.outer_attrs) + def visit_GeneratorBodyDefNode(self, node): self._process_children(node) return node diff --git a/tests/run/decorators.pyx b/tests/run/decorators.pyx index 54623e0cb..fc20235e2 100644 --- a/tests/run/decorators.pyx +++ b/tests/run/decorators.pyx @@ -81,3 +81,26 @@ def outer(arg1, arg2): def method(): return [4] return method() + +class HasProperty(object): + """ + >>> hp = HasProperty() + >>> hp.value + 0 + >>> hp.value = 1 + >>> hp.value + 1 + """ + def __init__(self) -> None: + self._value = 0 + + @property + def value(self) -> int: + return self._value + + # https://github.com/cython/cython/issues/4836 + # The variable tracker was confusing "value" in the decorator + # for "value" in the argument list + @value.setter + def value(self, value: int): + self._value = value -- cgit v1.2.1 From c5e6c183436d7477d15fd1e02c6cd9144bb74708 Mon Sep 17 00:00:00 2001 From: da-woods Date: Sat, 16 Jul 2022 09:25:26 +0100 Subject: Fixed over-zealous optimization of append attribute usage to "__Pyx_PyObject_Append" (GH-4834) Fixes https://github.com/cython/cython/issues/4828 --- Cython/Compiler/ExprNodes.py | 2 +- Cython/Compiler/Optimize.py | 2 +- tests/run/append.pyx | 34 ++++++++++++++++++++++++++++++++++ 3 files changed, 36 insertions(+), 2 deletions(-) diff --git a/Cython/Compiler/ExprNodes.py b/Cython/Compiler/ExprNodes.py index 4c325891a..ab228c552 100644 --- a/Cython/Compiler/ExprNodes.py +++ b/Cython/Compiler/ExprNodes.py @@ -330,8 +330,8 @@ class ExprNode(Node): # is_starred boolean Is a starred expression (e.g. '*a') # use_managed_ref boolean use ref-counted temps/assignments/etc. # result_is_used boolean indicates that the result will be dropped and the - # is_numpy_attribute boolean Is a Numpy module attribute # result_code/temp_result can safely be set to None + # is_numpy_attribute boolean Is a Numpy module attribute # annotation ExprNode or None PEP526 annotation for names or expressions result_ctype = None diff --git a/Cython/Compiler/Optimize.py b/Cython/Compiler/Optimize.py index a601d18c9..cea5970f6 100644 --- a/Cython/Compiler/Optimize.py +++ b/Cython/Compiler/Optimize.py @@ -3026,7 +3026,7 @@ class OptimizeBuiltinCalls(Visitor.NodeRefCleanupMixin, """Optimistic optimisation as X.append() is almost always referring to a list. """ - if len(args) != 2 or node.result_is_used: + if len(args) != 2 or node.result_is_used or node.function.entry: return node return ExprNodes.PythonCapiCallNode( diff --git a/tests/run/append.pyx b/tests/run/append.pyx index 1976780d5..dcc3fe7c9 100644 --- a/tests/run/append.pyx +++ b/tests/run/append.pyx @@ -1,3 +1,5 @@ +cimport cython + class A: def append(self, x): print u"appending", x @@ -94,3 +96,35 @@ def method_name(): 'append' """ return [].append.__name__ + +@cython.test_assert_path_exists( + '//PythonCapiCallNode') +def append_optimized(probably_list): + """ + >>> l = [] + >>> append_optimized(l) + >>> l + [1] + """ + probably_list.append(1) + +cdef class AppendBug: + # https://github.com/cython/cython/issues/4828 + # if the attribute "append" is found it shouldn't be replaced with + # __Pyx_PyObject_Append + cdef object append + def __init__(self, append): + self.append = append + +@cython.test_fail_if_path_exists( + '//PythonCapiCallNode') +def specific_attribute(AppendBug a): + """ + >>> def append_to_default_arg(a, arg=[]): + ... arg.append(a) + ... return arg + >>> specific_attribute(AppendBug(append_to_default_arg)) + >>> append_to_default_arg(None) + [1, None] + """ + a.append(1) -- cgit v1.2.1 From 2f1c338ac4e7333823be84cc0d8df80acc5e23f3 Mon Sep 17 00:00:00 2001 From: da-woods Date: Sat, 16 Jul 2022 09:25:26 +0100 Subject: Fixed over-zealous optimization of append attribute usage to "__Pyx_PyObject_Append" (GH-4834) Fixes https://github.com/cython/cython/issues/4828 --- Cython/Compiler/ExprNodes.py | 2 +- Cython/Compiler/Optimize.py | 2 +- tests/run/append.pyx | 34 ++++++++++++++++++++++++++++++++++ 3 files changed, 36 insertions(+), 2 deletions(-) diff --git a/Cython/Compiler/ExprNodes.py b/Cython/Compiler/ExprNodes.py index 69632a4fe..2c5d70936 100644 --- a/Cython/Compiler/ExprNodes.py +++ b/Cython/Compiler/ExprNodes.py @@ -306,8 +306,8 @@ class ExprNode(Node): # Cached result of subexpr_nodes() # use_managed_ref boolean use ref-counted temps/assignments/etc. # result_is_used boolean indicates that the result will be dropped and the - # is_numpy_attribute boolean Is a Numpy module attribute # result_code/temp_result can safely be set to None + # is_numpy_attribute boolean Is a Numpy module attribute # annotation ExprNode or None PEP526 annotation for names or expressions result_ctype = None diff --git a/Cython/Compiler/Optimize.py b/Cython/Compiler/Optimize.py index 3cb77efe2..7e9435ba0 100644 --- a/Cython/Compiler/Optimize.py +++ b/Cython/Compiler/Optimize.py @@ -2860,7 +2860,7 @@ class OptimizeBuiltinCalls(Visitor.NodeRefCleanupMixin, """Optimistic optimisation as X.append() is almost always referring to a list. """ - if len(args) != 2 or node.result_is_used: + if len(args) != 2 or node.result_is_used or node.function.entry: return node return ExprNodes.PythonCapiCallNode( diff --git a/tests/run/append.pyx b/tests/run/append.pyx index 1976780d5..dcc3fe7c9 100644 --- a/tests/run/append.pyx +++ b/tests/run/append.pyx @@ -1,3 +1,5 @@ +cimport cython + class A: def append(self, x): print u"appending", x @@ -94,3 +96,35 @@ def method_name(): 'append' """ return [].append.__name__ + +@cython.test_assert_path_exists( + '//PythonCapiCallNode') +def append_optimized(probably_list): + """ + >>> l = [] + >>> append_optimized(l) + >>> l + [1] + """ + probably_list.append(1) + +cdef class AppendBug: + # https://github.com/cython/cython/issues/4828 + # if the attribute "append" is found it shouldn't be replaced with + # __Pyx_PyObject_Append + cdef object append + def __init__(self, append): + self.append = append + +@cython.test_fail_if_path_exists( + '//PythonCapiCallNode') +def specific_attribute(AppendBug a): + """ + >>> def append_to_default_arg(a, arg=[]): + ... arg.append(a) + ... return arg + >>> specific_attribute(AppendBug(append_to_default_arg)) + >>> append_to_default_arg(None) + [1, None] + """ + a.append(1) -- cgit v1.2.1 From a2e4139993df6bd52a5f3db670dc1ca55fdedc9e Mon Sep 17 00:00:00 2001 From: da-woods Date: Sat, 16 Jul 2022 09:34:11 +0100 Subject: Give better errors on size changes of PyVarObjects and reduce false positives (GH-4869) Fixes https://github.com/cython/cython/issues/4827 Some of the patch was copied from https://src.fedoraproject.org/rpms/Cython/pull-request/35#request_diff Allows the size of a type to be between basicsize and basicsize+itemsize since anything is this range is a reasonable size for a class to be, subject to implementations details of the object struct. Adds an explicit runtime test when an extern extension type is inherited from to make sure that it isn't a PyVarObject of unexpected size. --- Cython/Compiler/Nodes.py | 19 ++++- Cython/Compiler/PyrexTypes.py | 2 +- Cython/Utility/ExtensionTypes.c | 34 ++++++++ Cython/Utility/ImportExport.c | 24 ++++-- runtests.py | 1 + tests/errors/builtin_type_inheritance.pyx | 4 +- tests/pypy_bugs.txt | 3 + tests/run/builtin_type_inheritance_T608.pyx | 38 +-------- .../run/builtin_type_inheritance_T608_py2only.pyx | 42 ++++++++++ tests/run/extern_varobject_extensions.srctree | 94 ++++++++++++++++++++++ 10 files changed, 214 insertions(+), 47 deletions(-) create mode 100644 tests/run/builtin_type_inheritance_T608_py2only.pyx create mode 100644 tests/run/extern_varobject_extensions.srctree diff --git a/Cython/Compiler/Nodes.py b/Cython/Compiler/Nodes.py index 298cc5705..751eb31f4 100644 --- a/Cython/Compiler/Nodes.py +++ b/Cython/Compiler/Nodes.py @@ -5242,7 +5242,8 @@ class CClassDefNode(ClassDefNode): error(base.pos, "Base class '%s' of type '%s' is final" % ( base_type, self.class_name)) elif base_type.is_builtin_type and \ - base_type.name in ('tuple', 'str', 'bytes'): + base_type.name in ('tuple', 'bytes'): + # str in Py2 is also included in this, but now checked at run-time error(base.pos, "inheritance from PyVarObject types like '%s' is not currently supported" % base_type.name) else: @@ -5511,6 +5512,22 @@ class CClassDefNode(ClassDefNode): )) code.putln("#endif") # if CYTHON_USE_TYPE_SPECS + base_type = type.base_type + while base_type: + if base_type.is_external and not base_type.objstruct_cname == "PyTypeObject": + # 'type' is special-cased because it is actually based on PyHeapTypeObject + # Variable length bases are allowed if the current class doesn't grow + code.putln("if (sizeof(%s%s) != sizeof(%s%s)) {" % ( + "" if type.typedef_flag else "struct ", type.objstruct_cname, + "" if base_type.typedef_flag else "struct ", base_type.objstruct_cname)) + code.globalstate.use_utility_code( + UtilityCode.load_cached("ValidateExternBase", "ExtensionTypes.c")) + code.put_error_if_neg(entry.pos, "__Pyx_validate_extern_base(%s)" % ( + type.base_type.typeptr_cname)) + code.putln("}") + break + base_type = base_type.base_type + code.putln("#if !CYTHON_COMPILING_IN_LIMITED_API") # FIXME: these still need to get initialised even with the limited-API for slot in TypeSlots.get_slot_table(code.globalstate.directives): diff --git a/Cython/Compiler/PyrexTypes.py b/Cython/Compiler/PyrexTypes.py index 1316edddc..79e144ed1 100644 --- a/Cython/Compiler/PyrexTypes.py +++ b/Cython/Compiler/PyrexTypes.py @@ -1506,7 +1506,6 @@ class PyExtensionType(PyObjectType): # # name string # scope CClassScope Attribute namespace - # visibility string # typedef_flag boolean # base_type PyExtensionType or None # module_name string or None Qualified name of defining module @@ -1520,6 +1519,7 @@ class PyExtensionType(PyObjectType): # vtable_cname string Name of C method table definition # early_init boolean Whether to initialize early (as opposed to during module execution). # defered_declarations [thunk] Used to declare class hierarchies in order + # is_external boolean Defined in a extern block # check_size 'warn', 'error', 'ignore' What to do if tp_basicsize does not match # dataclass_fields OrderedDict nor None Used for inheriting from dataclasses diff --git a/Cython/Utility/ExtensionTypes.c b/Cython/Utility/ExtensionTypes.c index ec994a367..aa39a860a 100644 --- a/Cython/Utility/ExtensionTypes.c +++ b/Cython/Utility/ExtensionTypes.c @@ -564,3 +564,37 @@ static PyObject *{{func_name}}(PyObject *left, PyObject *right {{extra_arg_decl} } return __Pyx_NewRef(Py_NotImplemented); } + +/////////////// ValidateExternBase.proto /////////////// + +static int __Pyx_validate_extern_base(PyTypeObject *base); /* proto */ + +/////////////// ValidateExternBase /////////////// +//@requires: ObjectHandling.c::FormatTypeName + +static int __Pyx_validate_extern_base(PyTypeObject *base) { + Py_ssize_t itemsize; +#if CYTHON_COMPILING_IN_LIMITED_API + PyObject *py_itemsize; +#endif +#if !CYTHON_COMPILING_IN_LIMITED_API + itemsize = ((PyTypeObject *)base)->tp_itemsize; +#else + py_itemsize = PyObject_GetAttrString(base, "__itemsize__"); + if (!py_itemsize) + return -1; + itemsize = PyLong_AsSsize_t(py_itemsize); + Py_DECREF(py_itemsize); + py_itemsize = 0; + if (itemsize == (Py_ssize_t)-1 && PyErr_Occurred()) + return -1; +#endif + if (itemsize) { + __Pyx_TypeName b_name = __Pyx_PyType_GetName(base); + PyErr_Format(PyExc_TypeError, + "inheritance from PyVarObject types like '" __Pyx_FMT_TYPENAME "' not currently supported", b_name); + __Pyx_DECREF_TypeName(b_name); + return -1; + } + return 0; +} diff --git a/Cython/Utility/ImportExport.c b/Cython/Utility/ImportExport.c index 6ceba7efb..897657281 100644 --- a/Cython/Utility/ImportExport.c +++ b/Cython/Utility/ImportExport.c @@ -498,8 +498,10 @@ static PyTypeObject *__Pyx_ImportType(PyObject *module, const char *module_name, PyObject *result = 0; char warning[200]; Py_ssize_t basicsize; + Py_ssize_t itemsize; #if CYTHON_COMPILING_IN_LIMITED_API PyObject *py_basicsize; + PyObject *py_itemsize; #endif result = PyObject_GetAttrString(module, class_name); @@ -513,6 +515,7 @@ static PyTypeObject *__Pyx_ImportType(PyObject *module, const char *module_name, } #if !CYTHON_COMPILING_IN_LIMITED_API basicsize = ((PyTypeObject *)result)->tp_basicsize; + itemsize = ((PyTypeObject *)result)->tp_itemsize; #else py_basicsize = PyObject_GetAttrString(result, "__basicsize__"); if (!py_basicsize) @@ -522,19 +525,30 @@ static PyTypeObject *__Pyx_ImportType(PyObject *module, const char *module_name, py_basicsize = 0; if (basicsize == (Py_ssize_t)-1 && PyErr_Occurred()) goto bad; + py_itemsize = PyObject_GetAttrString(result, "__itemsize__"); + if (!py_itemsize) + goto bad; + itemsize = PyLong_AsSsize_t(py_itemsize); + Py_DECREF(py_itemsize); + py_itemsize = 0; + if (itemsize == (Py_ssize_t)-1 && PyErr_Occurred()) + goto bad; #endif - if ((size_t)basicsize < size) { + if ((size_t)(basicsize + itemsize) < size) { PyErr_Format(PyExc_ValueError, "%.200s.%.200s size changed, may indicate binary incompatibility. " "Expected %zd from C header, got %zd from PyObject", - module_name, class_name, size, basicsize); + module_name, class_name, size, basicsize+itemsize); goto bad; } - if (check_size == __Pyx_ImportType_CheckSize_Error && (size_t)basicsize != size) { + // varobjects almost have structs between basicsize and basicsize + itemsize + // but the struct isn't always one of the two limiting values + if (check_size == __Pyx_ImportType_CheckSize_Error && + ((size_t)basicsize > size || (size_t)(basicsize + itemsize) < size)) { PyErr_Format(PyExc_ValueError, "%.200s.%.200s size changed, may indicate binary incompatibility. " - "Expected %zd from C header, got %zd from PyObject", - module_name, class_name, size, basicsize); + "Expected %zd from C header, got %zd-%zd from PyObject", + module_name, class_name, size, basicsize, basicsize+itemsize); goto bad; } else if (check_size == __Pyx_ImportType_CheckSize_Warn && (size_t)basicsize > size) { diff --git a/runtests.py b/runtests.py index 72608882a..a5c12e65e 100755 --- a/runtests.py +++ b/runtests.py @@ -467,6 +467,7 @@ VER_DEP_MODULES = { 'compile.extsetslice', 'compile.extdelslice', 'run.special_methods_T561_py2', + 'run.builtin_type_inheritance_T608_py2only', ]), (3,3) : (operator.lt, lambda x: x in ['build.package_compilation', 'build.cythonize_pep420_namespace', diff --git a/tests/errors/builtin_type_inheritance.pyx b/tests/errors/builtin_type_inheritance.pyx index 1c6ad31e1..a85f7a133 100644 --- a/tests/errors/builtin_type_inheritance.pyx +++ b/tests/errors/builtin_type_inheritance.pyx @@ -8,11 +8,9 @@ cdef class MyTuple(tuple): cdef class MyBytes(bytes): pass -cdef class MyStr(str): # only in Py2, but can't know that during compilation - pass +# str is also included in this in Py2, but checked at runtime instead _ERRORS = """ 5:19: inheritance from PyVarObject types like 'tuple' is not currently supported 8:19: inheritance from PyVarObject types like 'bytes' is not currently supported -11:17: inheritance from PyVarObject types like 'str' is not currently supported """ diff --git a/tests/pypy_bugs.txt b/tests/pypy_bugs.txt index 1004a93e4..5a27265ee 100644 --- a/tests/pypy_bugs.txt +++ b/tests/pypy_bugs.txt @@ -61,3 +61,6 @@ run.exttype_dealloc # bugs in cpyext run.special_methods_T561 run.special_methods_T561_py2 + +# unicode is a PyVarObject on PyPy3 +run.builtin_type_inheritance_T608 diff --git a/tests/run/builtin_type_inheritance_T608.pyx b/tests/run/builtin_type_inheritance_T608.pyx index 1214b6841..d03558a25 100644 --- a/tests/run/builtin_type_inheritance_T608.pyx +++ b/tests/run/builtin_type_inheritance_T608.pyx @@ -1,42 +1,6 @@ # ticket: t608 -cdef class MyInt(int): - """ - >>> MyInt(2) == 2 - True - >>> MyInt(2).attr is None - True - """ - cdef readonly object attr - -cdef class MyInt2(int): - """ - >>> MyInt2(2) == 2 - True - >>> MyInt2(2).attr is None - True - >>> MyInt2(2).test(3) - 5 - """ - cdef readonly object attr - - def test(self, arg): - return self._test(arg) - - cdef _test(self, arg): - return self + arg - -cdef class MyInt3(MyInt2): - """ - >>> MyInt3(2) == 2 - True - >>> MyInt3(2).attr is None - True - >>> MyInt3(2).test(3) - 6 - """ - cdef _test(self, arg): - return self + arg + 1 +# see "builtin_type_inheritance_T608_py2only.pyx" for inheritance from int cdef class MyFloat(float): """ diff --git a/tests/run/builtin_type_inheritance_T608_py2only.pyx b/tests/run/builtin_type_inheritance_T608_py2only.pyx new file mode 100644 index 000000000..b10a2610a --- /dev/null +++ b/tests/run/builtin_type_inheritance_T608_py2only.pyx @@ -0,0 +1,42 @@ +# ticket: t608 + +# This only works reliably in Python2. In Python3 ints are variable-sized. +# You get away with it for small ints but it's a bad idea + +cdef class MyInt(int): + """ + >>> MyInt(2) == 2 + True + >>> MyInt(2).attr is None + True + """ + cdef readonly object attr + +cdef class MyInt2(int): + """ + >>> MyInt2(2) == 2 + True + >>> MyInt2(2).attr is None + True + >>> MyInt2(2).test(3) + 5 + """ + cdef readonly object attr + + def test(self, arg): + return self._test(arg) + + cdef _test(self, arg): + return self + arg + +cdef class MyInt3(MyInt2): + """ + >>> MyInt3(2) == 2 + True + >>> MyInt3(2).attr is None + True + >>> MyInt3(2).test(3) + 6 + """ + cdef _test(self, arg): + return self + arg + 1 diff --git a/tests/run/extern_varobject_extensions.srctree b/tests/run/extern_varobject_extensions.srctree new file mode 100644 index 000000000..c927b8147 --- /dev/null +++ b/tests/run/extern_varobject_extensions.srctree @@ -0,0 +1,94 @@ +# mode: run + +PYTHON setup.py build_ext --inplace +PYTHON -c "import classes" +PYTHON -c "import test_inherit" + +######## setup.py ######## + +from Cython.Build.Dependencies import cythonize + +from distutils.core import setup + +setup( + ext_modules=cythonize("*.pyx"), +) + +###### dummy_module.py ########### + +tpl = tuple +lst = list + +###### classes.pxd ################ + +cdef extern from *: + # apart from list, these are all variable sized types + # and Cython shouldn't trip up about the struct size + ctypedef class dummy_module.tpl [object PyTupleObject]: + pass + ctypedef class dummy_module.lst [object PyListObject]: + pass + ctypedef class types.CodeType [object PyCodeObject]: + pass + # Note that bytes doesn't work here because it further + # the tp_basicsize to save space + +##### classes.pyx ################# + +def check_tuple(tpl x): + assert isinstance(x, tuple) + +def check_list(lst x): + assert isinstance(x, list) + +def check_code(CodeType x): + import types + assert isinstance(x, types.CodeType) + +check_tuple((1, 2)) +check_list([1, 2]) +check_code(eval("lambda: None").__code__) + +##### failed_inherit1.pyx ############# + +from classes cimport tpl + +cdef class SuperTuple(tpl): + cdef int a # importing this gives an error message + +##### failed_inherit2.pyx ############# + +from classes cimport tpl + +cdef class SuperTuple(tpl): + # adding a method creates a vtab so should also fail + cdef int func(self): + return 1 + +##### successful_inherit.pyx ######### + +from classes cimport lst, tpl + +cdef class SuperList(lst): + cdef int a # This works OK + +cdef class SuperTuple(tpl): + # This is actually OK because it doesn't add anything + pass + +##### test_inherit.py ################ + +try: + import failed_inherit1 +except TypeError as e: + assert e.args[0] == "inheritance from PyVarObject types like 'tuple' not currently supported", e.args[0] +else: + assert False +try: + import failed_inherit2 +except TypeError as e: + assert e.args[0] == "inheritance from PyVarObject types like 'tuple' not currently supported", e.args[0] +else: + assert False + +import successful_inherit -- cgit v1.2.1 From 6414a07ec88b60d604daf0e51fd850ee974afdcb Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Sat, 16 Jul 2022 10:47:05 +0200 Subject: Extend test to make sure that cython.declare(int) interprets "int" as C int and not Python int. --- tests/run/pure_py.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/tests/run/pure_py.py b/tests/run/pure_py.py index 93f737453..a8dc5b014 100644 --- a/tests/run/pure_py.py +++ b/tests/run/pure_py.py @@ -33,17 +33,18 @@ def test_sizeof(): def test_declare(n): """ >>> test_declare(100) - (100, 100) + (100, 100, 100) >>> test_declare(100.5) - (100, 100) + (100, 100, 100) """ x = cython.declare(cython.int) y = cython.declare(cython.int, n) + z = cython.declare(int, n) # C int if cython.compiled: cython.declare(xx=cython.int, yy=cython.long) i = cython.sizeof(xx) ptr = cython.declare(cython.p_int, cython.address(y)) - return y, ptr[0] + return y, z, ptr[0] @cython.locals(x=cython.double, n=cython.int) -- cgit v1.2.1 From f236f652c5e9efc2beefb7bb696654fdd6ca30b8 Mon Sep 17 00:00:00 2001 From: da-woods Date: Sat, 16 Jul 2022 16:27:45 +0100 Subject: Refactor parsing of named expressions to bring it closer to CPython's LL parser (GH-4846) I've tried to rewrite it to largely follow the rules from the most recent version of the Python LL parser, so avoiding conditional parameters. See https://github.com/cython/cython/issues/4595 --- Cython/Compiler/Parsing.pxd | 13 +++--- Cython/Compiler/Parsing.py | 102 ++++++++++++++++++++++++++++---------------- 2 files changed, 74 insertions(+), 41 deletions(-) diff --git a/Cython/Compiler/Parsing.pxd b/Cython/Compiler/Parsing.pxd index 7f4a1c220..1be718581 100644 --- a/Cython/Compiler/Parsing.pxd +++ b/Cython/Compiler/Parsing.pxd @@ -23,15 +23,17 @@ cdef tuple p_binop_operator(PyrexScanner s) cdef p_binop_expr(PyrexScanner s, ops, p_sub_expr_func p_sub_expr) cdef p_lambdef(PyrexScanner s, bint allow_conditional=*) cdef p_lambdef_nocond(PyrexScanner s) -cdef p_test(PyrexScanner s, bint allow_assignment_expression=*) -cdef p_test_nocond(PyrexScanner s, bint allow_assignment_expression=*) -cdef p_walrus_test(PyrexScanner s, bint allow_assignment_expression=*) +cdef p_test(PyrexScanner s) +cdef p_test_allow_walrus_after(PyrexScanner s) +cdef p_test_nocond(PyrexScanner s) +cdef p_namedexpr_test(PyrexScanner s) cdef p_or_test(PyrexScanner s) cdef p_rassoc_binop_expr(PyrexScanner s, unicode op, p_sub_expr_func p_subexpr) cdef p_and_test(PyrexScanner s) cdef p_not_test(PyrexScanner s) cdef p_comparison(PyrexScanner s) -cdef p_test_or_starred_expr(PyrexScanner s, bint is_expression=*) +cdef p_test_or_starred_expr(PyrexScanner s) +cdef p_namedexpr_test_or_starred_expr(PyrexScanner s) cdef p_starred_expr(PyrexScanner s) cdef p_cascaded_cmp(PyrexScanner s) cdef p_cmp_op(PyrexScanner s) @@ -85,9 +87,10 @@ cdef p_dict_or_set_maker(PyrexScanner s) cdef p_backquote_expr(PyrexScanner s) cdef p_simple_expr_list(PyrexScanner s, expr=*) cdef p_test_or_starred_expr_list(PyrexScanner s, expr=*) +cdef p_namedexpr_test_or_starred_expr_list(s, expr=*) cdef p_testlist(PyrexScanner s) cdef p_testlist_star_expr(PyrexScanner s) -cdef p_testlist_comp(PyrexScanner s, bint is_expression=*) +cdef p_testlist_comp(PyrexScanner s) cdef p_genexp(PyrexScanner s, expr) #------------------------------------------------------- diff --git a/Cython/Compiler/Parsing.py b/Cython/Compiler/Parsing.py index 1a31e2697..938e16e99 100644 --- a/Cython/Compiler/Parsing.py +++ b/Cython/Compiler/Parsing.py @@ -120,9 +120,9 @@ def p_lambdef(s, allow_conditional=True): s, terminator=':', annotated=False) s.expect(':') if allow_conditional: - expr = p_test(s, allow_assignment_expression=False) + expr = p_test(s) else: - expr = p_test_nocond(s, allow_assignment_expression=False) + expr = p_test_nocond(s) return ExprNodes.LambdaNode( pos, args = args, star_arg = star_arg, starstar_arg = starstar_arg, @@ -131,49 +131,64 @@ def p_lambdef(s, allow_conditional=True): #lambdef_nocond: 'lambda' [varargslist] ':' test_nocond def p_lambdef_nocond(s): - return p_lambdef(s, allow_conditional=False) + return p_lambdef(s) #test: or_test ['if' or_test 'else' test] | lambdef -def p_test(s, allow_assignment_expression=True): +def p_test(s): + # The check for a following ':=' is only for error reporting purposes. + # It simply changes a + # expected ')', found ':=' + # message into something a bit more descriptive. + # It is close to what the PEG parser does in CPython, where an expression has + # a lookahead assertion that it isn't followed by ':=' + expr = p_test_allow_walrus_after(s) + if s.sy == ':=': + s.error("invalid syntax: assignment expression not allowed in this context") + return expr + +def p_test_allow_walrus_after(s): if s.sy == 'lambda': return p_lambdef(s) pos = s.position() - expr = p_walrus_test(s, allow_assignment_expression) + expr = p_or_test(s) if s.sy == 'if': s.next() - # Assignment expressions are always allowed here - # even if they wouldn't be allowed in the expression as a whole. - test = p_walrus_test(s) + test = p_or_test(s) s.expect('else') other = p_test(s) return ExprNodes.CondExprNode(pos, test=test, true_val=expr, false_val=other) else: return expr + #test_nocond: or_test | lambdef_nocond -def p_test_nocond(s, allow_assignment_expression=True): +def p_test_nocond(s): if s.sy == 'lambda': return p_lambdef_nocond(s) else: - return p_walrus_test(s, allow_assignment_expression) - -# walrurus_test: IDENT := test | or_test - -def p_walrus_test(s, allow_assignment_expression=True): - lhs = p_or_test(s) + return p_or_test(s) + +def p_namedexpr_test(s): + # defined in the LL parser as + # namedexpr_test: test [':=' test] + # The requirement that the LHS is a name is not enforced in the grammar. + # For comparison the PEG parser does: + # 1. look for "name :=", if found it's definitely a named expression + # so look for expression + # 2. Otherwise, look for expression + lhs = p_test_allow_walrus_after(s) if s.sy == ':=': position = s.position() - if not allow_assignment_expression: - s.error("invalid syntax: assignment expression not allowed in this context") - elif not lhs.is_name: - s.error("Left-hand side of assignment expression must be an identifier") + if not lhs.is_name: + s.error("Left-hand side of assignment expression must be an identifier", fatal=False) s.next() rhs = p_test(s) return ExprNodes.AssignmentExpressionNode(position, lhs=lhs, rhs=rhs) return lhs + #or_test: and_test ('or' and_test)* COMMON_BINOP_MISTAKES = {'||': 'or', '&&': 'and'} @@ -227,11 +242,17 @@ def p_comparison(s): n1.cascade = p_cascaded_cmp(s) return n1 -def p_test_or_starred_expr(s, is_expression=False): +def p_test_or_starred_expr(s): + if s.sy == '*': + return p_starred_expr(s) + else: + return p_test(s) + +def p_namedexpr_test_or_starred_expr(s): if s.sy == '*': return p_starred_expr(s) else: - return p_test(s, allow_assignment_expression=is_expression) + return p_namedexpr_test(s) def p_starred_expr(s): pos = s.position() @@ -505,7 +526,7 @@ def p_call_parse_args(s, allow_genexp=True): keyword_args.append(p_test(s)) starstar_seen = True else: - arg = p_test(s) + arg = p_namedexpr_test(s) if s.sy == '=': s.next() if not arg.is_name: @@ -514,7 +535,7 @@ def p_call_parse_args(s, allow_genexp=True): encoded_name = s.context.intern_ustring(arg.name) keyword = ExprNodes.IdentifierStringNode( arg.pos, value=encoded_name) - arg = p_test(s, allow_assignment_expression=False) + arg = p_test(s) keyword_args.append((keyword, arg)) else: if keyword_args: @@ -690,7 +711,7 @@ def p_atom(s): elif s.sy == 'yield': result = p_yield_expression(s) else: - result = p_testlist_comp(s, is_expression=True) + result = p_testlist_comp(s) s.expect(')') return result elif sy == '[': @@ -1261,7 +1282,7 @@ def p_f_string_expr(s, unicode_value, pos, starting_index, is_raw): # since PEP 448: # list_display ::= "[" [listmaker] "]" -# listmaker ::= (test|star_expr) ( comp_for | (',' (test|star_expr))* [','] ) +# listmaker ::= (named_test|star_expr) ( comp_for | (',' (named_test|star_expr))* [','] ) # comp_iter ::= comp_for | comp_if # comp_for ::= ["async"] "for" expression_list "in" testlist [comp_iter] # comp_if ::= "if" test [comp_iter] @@ -1274,7 +1295,7 @@ def p_list_maker(s): s.expect(']') return ExprNodes.ListNode(pos, args=[]) - expr = p_test_or_starred_expr(s, is_expression=True) + expr = p_namedexpr_test_or_starred_expr(s) if s.sy in ('for', 'async'): if expr.is_starred: s.error("iterable unpacking cannot be used in comprehension") @@ -1289,7 +1310,7 @@ def p_list_maker(s): # (merged) list literal if s.sy == ',': s.next() - exprs = p_test_or_starred_expr_list(s, expr) + exprs = p_namedexpr_test_or_starred_expr_list(s, expr) else: exprs = [expr] s.expect(']') @@ -1474,7 +1495,16 @@ def p_simple_expr_list(s, expr=None): def p_test_or_starred_expr_list(s, expr=None): exprs = expr is not None and [expr] or [] while s.sy not in expr_terminators: - exprs.append(p_test_or_starred_expr(s, is_expression=(expr is not None))) + exprs.append(p_test_or_starred_expr(s)) + if s.sy != ',': + break + s.next() + return exprs + +def p_namedexpr_test_or_starred_expr_list(s, expr=None): + exprs = expr is not None and [expr] or [] + while s.sy not in expr_terminators: + exprs.append(p_namedexpr_test_or_starred_expr(s)) if s.sy != ',': break s.next() @@ -1507,12 +1537,12 @@ def p_testlist_star_expr(s): # testlist_comp: (test|star_expr) ( comp_for | (',' (test|star_expr))* [','] ) -def p_testlist_comp(s, is_expression=False): +def p_testlist_comp(s): pos = s.position() - expr = p_test_or_starred_expr(s, is_expression) + expr = p_namedexpr_test_or_starred_expr(s) if s.sy == ',': s.next() - exprs = p_test_or_starred_expr_list(s, expr) + exprs = p_namedexpr_test_or_starred_expr_list(s, expr) return ExprNodes.TupleNode(pos, args = exprs) elif s.sy in ('for', 'async'): return p_genexp(s, expr) @@ -1900,7 +1930,7 @@ def p_if_statement(s): def p_if_clause(s): pos = s.position() - test = p_test(s) + test = p_namedexpr_test(s) body = p_suite(s) return Nodes.IfClauseNode(pos, condition = test, body = body) @@ -1916,7 +1946,7 @@ def p_while_statement(s): # s.sy == 'while' pos = s.position() s.next() - test = p_test(s) + test = p_namedexpr_test(s) body = p_suite(s) else_clause = p_else_clause(s) return Nodes.WhileStatNode(pos, @@ -3096,11 +3126,11 @@ def p_c_arg_decl(s, ctx, in_pyfunc, cmethod_flag = 0, nonempty = 0, default = ExprNodes.NoneNode(pos) s.next() elif 'inline' in ctx.modifiers: - default = p_test(s, allow_assignment_expression=False) + default = p_test(s) else: error(pos, "default values cannot be specified in pxd files, use ? or *") else: - default = p_test(s, allow_assignment_expression=False) + default = p_test(s) return Nodes.CArgDeclNode(pos, base_type = base_type, declarator = declarator, @@ -3978,5 +4008,5 @@ def p_annotation(s): then it is not a bug. """ pos = s.position() - expr = p_test(s, allow_assignment_expression=False) + expr = p_test(s) return ExprNodes.AnnotationNode(pos, expr=expr) -- cgit v1.2.1 From 3de56e25789de3576b5d4e7ff9bb99efea35bdf6 Mon Sep 17 00:00:00 2001 From: da-woods Date: Sat, 16 Jul 2022 16:32:28 +0100 Subject: Make memoryviewslice and cython.array be collections.abc.Sequence (GH-4817) The main reason to do this is so that they'll work in sequence patterns in structural pattern matching in Python 3.10+. Since the builtin "memoryview" type and "array.array" are sequences, I think this is reasonable. --- Cython/Utility/MemoryView.pyx | 33 ++++++++++++++++++++++++++++++ tests/compile/fused_redeclare_T3111.pyx | 12 +++++------ tests/memoryview/cythonarray.pyx | 36 +++++++++++++++++++++++++++++++++ tests/memoryview/memoryview.pyx | 33 ++++++++++++++++++++++++++++++ 4 files changed, 108 insertions(+), 6 deletions(-) diff --git a/Cython/Utility/MemoryView.pyx b/Cython/Utility/MemoryView.pyx index 990319e05..9361249fb 100644 --- a/Cython/Utility/MemoryView.pyx +++ b/Cython/Utility/MemoryView.pyx @@ -93,6 +93,17 @@ cdef extern from "": void free(void *) nogil void *memcpy(void *dest, void *src, size_t n) nogil +# the sequence abstract base class +cdef object __pyx_collections_abc_Sequence "__pyx_collections_abc_Sequence" +try: + if __import__("sys").version_info >= (3, 3): + __pyx_collections_abc_Sequence = __import__("collections.abc").abc.Sequence + else: + __pyx_collections_abc_Sequence = __import__("collections").Sequence +except: + # it isn't a big problem if this fails + __pyx_collections_abc_Sequence = None + # ### cython.array class # @@ -224,6 +235,12 @@ cdef class array: def __setitem__(self, item, value): self.memview[item] = value + # Sequence methods + try: + count = __pyx_collections_abc_Sequence.count + index = __pyx_collections_abc_Sequence.index + except: + pass @cname("__pyx_array_allocate_buffer") cdef int _allocate_buffer(array self) except -1: @@ -970,6 +987,22 @@ cdef class _memoryviewslice(memoryview): cdef _get_base(self): return self.from_object + # Sequence methods + try: + count = __pyx_collections_abc_Sequence.count + index = __pyx_collections_abc_Sequence.index + except: + pass + +try: + if __pyx_collections_abc_Sequence: + # The main value of registering _memoryviewslice as a + # Sequence is that it can be used in structural pattern + # matching in Python 3.10+ + __pyx_collections_abc_Sequence.register(_memoryviewslice) + __pyx_collections_abc_Sequence.register(array) +except: + pass # ignore failure, it's a minor issue @cname('__pyx_memoryview_fromslice') cdef memoryview_fromslice({{memviewslice_name}} memviewslice, diff --git a/tests/compile/fused_redeclare_T3111.pyx b/tests/compile/fused_redeclare_T3111.pyx index 04862ae88..d91f1d132 100644 --- a/tests/compile/fused_redeclare_T3111.pyx +++ b/tests/compile/fused_redeclare_T3111.pyx @@ -27,10 +27,10 @@ _WARNINGS = """ 36:10: 'cpdef_cname_method' redeclared # from MemoryView.pyx -958:29: Ambiguous exception value, same as default return value: 0 -958:29: Ambiguous exception value, same as default return value: 0 -983:46: Ambiguous exception value, same as default return value: 0 -983:46: Ambiguous exception value, same as default return value: 0 -1073:29: Ambiguous exception value, same as default return value: 0 -1073:29: Ambiguous exception value, same as default return value: 0 +975:29: Ambiguous exception value, same as default return value: 0 +975:29: Ambiguous exception value, same as default return value: 0 +1016:46: Ambiguous exception value, same as default return value: 0 +1016:46: Ambiguous exception value, same as default return value: 0 +1106:29: Ambiguous exception value, same as default return value: 0 +1106:29: Ambiguous exception value, same as default return value: 0 """ diff --git a/tests/memoryview/cythonarray.pyx b/tests/memoryview/cythonarray.pyx index 0dc823581..6bfd7397e 100644 --- a/tests/memoryview/cythonarray.pyx +++ b/tests/memoryview/cythonarray.pyx @@ -286,3 +286,39 @@ def test_char_array_in_python_api(*shape): arr = array(shape=shape, itemsize=sizeof(char), format='c', mode='c') arr[:] = b'x' return arr + +def test_is_Sequence(): + """ + >>> test_is_Sequence() + 1 + 1 + True + """ + import sys + if sys.version_info < (3, 3): + from collections import Sequence + else: + from collections.abc import Sequence + + arr = array(shape=(5,), itemsize=sizeof(char), format='c', mode='c') + for i in range(arr.shape[0]): + arr[i] = f'{i}'.encode('ascii') + print(arr.count(b'1')) # test for presence of added collection method + print(arr.index(b'1')) # test for presence of added collection method + + if sys.version_info >= (3, 10): + # test structural pattern match in Python + # (because Cython hasn't implemented it yet, and because the details + # of what Python considers a sequence are important) + globs = {'arr': arr} + exec(""" +match arr: + case [*_]: + res = True + case _: + res = False +""", globs) + assert globs['res'] + + return isinstance(arr, Sequence) + diff --git a/tests/memoryview/memoryview.pyx b/tests/memoryview/memoryview.pyx index bb8b73780..d2832a0b6 100644 --- a/tests/memoryview/memoryview.pyx +++ b/tests/memoryview/memoryview.pyx @@ -1205,3 +1205,36 @@ def test_conversion_failures(): assert get_refcount(dmb) == dmb_before, "before %s after %s" % (dmb_before, get_refcount(dmb)) else: assert False, "Conversion should fail!" + +def test_is_Sequence(double[:] a): + """ + >>> test_is_Sequence(DoubleMockBuffer(None, range(6), shape=(6,))) + 1 + 1 + True + """ + if sys.version_info < (3, 3): + from collections import Sequence + else: + from collections.abc import Sequence + + for i in range(a.shape[0]): + a[i] = i + print(a.count(1.0)) # test for presence of added collection method + print(a.index(1.0)) # test for presence of added collection method + + if sys.version_info >= (3, 10): + # test structural pattern match in Python + # (because Cython hasn't implemented it yet, and because the details + # of what Python considers a sequence are important) + globs = {'arr': a} + exec(""" +match arr: + case [*_]: + res = True + case _: + res = False +""", globs) + assert globs['res'] + + return isinstance(a, Sequence) -- cgit v1.2.1 From 1777f13461f971d064bd1644b02d92b350e6e7d1 Mon Sep 17 00:00:00 2001 From: da-woods Date: Sat, 16 Jul 2022 16:36:45 +0100 Subject: Make it easier to restore scanner state during parsing phase (GH-4813) Things like match-case (essentially anything that uses Python's new PEG parser capacities) are going to have to be implemented by trying to parse something, failing, then going back and trying to parse something else. This commit gets the initial work done to make this easier to do. Several error positions change in this effort, but this seems to improve the error reporting overall. --- Cython/Compiler/Errors.py | 2 +- Cython/Compiler/Nodes.py | 4 +- Cython/Compiler/Parsing.py | 31 +++--- Cython/Compiler/Scanning.pxd | 3 +- Cython/Compiler/Scanning.py | 64 ++++++++++--- Cython/Compiler/Tests/TestScanning.py | 136 +++++++++++++++++++++++++++ Cython/Plex/Scanners.pxd | 6 +- Cython/Plex/Scanners.py | 39 ++++++-- tests/errors/cpp_object_template.pyx | 6 +- tests/errors/cppexc_non_extern.pyx | 4 +- tests/errors/e_argdefault.pyx | 2 +- tests/errors/e_bufaccess.pyx | 2 +- tests/errors/e_cpp_only_features.pyx | 2 +- tests/errors/e_cstruct.pyx | 2 +- tests/errors/e_public_cdef_private_types.pyx | 4 +- tests/errors/e_typing_errors.pyx | 24 ++--- tests/errors/fused_types.pyx | 6 +- tests/memoryview/error_declarations.pyx | 18 ++-- 18 files changed, 279 insertions(+), 76 deletions(-) create mode 100644 Cython/Compiler/Tests/TestScanning.py diff --git a/Cython/Compiler/Errors.py b/Cython/Compiler/Errors.py index 512f05638..bde320732 100644 --- a/Cython/Compiler/Errors.py +++ b/Cython/Compiler/Errors.py @@ -45,7 +45,7 @@ def context(position): s = u"[unprintable code]\n" else: s = u''.join(F[max(0, position[1]-6):position[1]]) - s = u'...\n%s%s^\n' % (s, u' '*(position[2]-1)) + s = u'...\n%s%s^\n' % (s, u' '*(position[2])) s = u'%s\n%s%s\n' % (u'-'*60, s, u'-'*60) return s diff --git a/Cython/Compiler/Nodes.py b/Cython/Compiler/Nodes.py index 751eb31f4..4d6d95e79 100644 --- a/Cython/Compiler/Nodes.py +++ b/Cython/Compiler/Nodes.py @@ -2616,10 +2616,10 @@ class CFuncDefNode(FuncDefNode): # it really is impossible to reason about what the user wants to happens # if they've specified a C++ exception translation function. Therefore, # raise an error. - error(self.cfunc_declarator.pos, + error(self.pos, "Only extern functions can throw C++ exceptions.") else: - warning(self.cfunc_declarator.pos, + warning(self.pos, "Only extern functions can throw C++ exceptions.", 2) for formal_arg, type_arg in zip(self.args, typ.args): diff --git a/Cython/Compiler/Parsing.py b/Cython/Compiler/Parsing.py index 938e16e99..f81ff22fd 100644 --- a/Cython/Compiler/Parsing.py +++ b/Cython/Compiler/Parsing.py @@ -2419,13 +2419,14 @@ def p_statement(s, ctx, first_statement = 0): else: if s.sy == 'IDENT' and s.systring == 'async': ident_name = s.systring + ident_pos = s.position() # PEP 492 enables the async/await keywords when it spots "async def ..." s.next() if s.sy == 'def': return p_async_statement(s, ctx, decorators) elif decorators: s.error("Decorators can only be followed by functions or classes") - s.put_back(u'IDENT', ident_name) # re-insert original token + s.put_back(u'IDENT', ident_name, ident_pos) # re-insert original token return p_simple_statement_list(s, ctx, first_statement=first_statement) @@ -2637,20 +2638,22 @@ def p_c_simple_base_type(s, nonempty, templates=None): name = p_ident(s) else: name = s.systring + name_pos = s.position() s.next() if nonempty and s.sy != 'IDENT': # Make sure this is not a declaration of a variable or function. if s.sy == '(': + old_pos = s.position() s.next() if (s.sy == '*' or s.sy == '**' or s.sy == '&' or (s.sy == 'IDENT' and s.systring in calling_convention_words)): - s.put_back(u'(', u'(') + s.put_back(u'(', u'(', old_pos) else: - s.put_back(u'(', u'(') - s.put_back(u'IDENT', name) + s.put_back(u'(', u'(', old_pos) + s.put_back(u'IDENT', name, name_pos) name = None elif s.sy not in ('*', '**', '[', '&'): - s.put_back(u'IDENT', name) + s.put_back(u'IDENT', name, name_pos) name = None type_node = Nodes.CSimpleBaseTypeNode(pos, @@ -2724,13 +2727,13 @@ def is_memoryviewslice_access(s): # a memoryview slice declaration is distinguishable from a buffer access # declaration by the first entry in the bracketed list. The buffer will # not have an unnested colon in the first entry; the memoryview slice will. - saved = [(s.sy, s.systring)] + saved = [(s.sy, s.systring, s.position())] s.next() retval = False if s.systring == ':': retval = True elif s.sy == 'INT': - saved.append((s.sy, s.systring)) + saved.append((s.sy, s.systring, s.position())) s.next() if s.sy == ':': retval = True @@ -2765,15 +2768,16 @@ def looking_at_expr(s): elif s.sy == 'IDENT': is_type = False name = s.systring + name_pos = s.position() dotted_path = [] s.next() while s.sy == '.': s.next() - dotted_path.append(s.systring) + dotted_path.append((s.systring, s.position())) s.expect('IDENT') - saved = s.sy, s.systring + saved = s.sy, s.systring, s.position() if s.sy == 'IDENT': is_type = True elif s.sy == '*' or s.sy == '**': @@ -2791,10 +2795,10 @@ def looking_at_expr(s): dotted_path.reverse() for p in dotted_path: - s.put_back(u'IDENT', p) - s.put_back(u'.', u'.') + s.put_back(u'IDENT', *p) + s.put_back(u'.', u'.', p[1]) # gets the position slightly wrong - s.put_back(u'IDENT', name) + s.put_back(u'IDENT', name, name_pos) return not is_type and saved[0] else: return True @@ -2806,9 +2810,10 @@ def looking_at_base_type(s): def looking_at_dotted_name(s): if s.sy == 'IDENT': name = s.systring + name_pos = s.position() s.next() result = s.sy == '.' - s.put_back(u'IDENT', name) + s.put_back(u'IDENT', name, name_pos) return result else: return 0 diff --git a/Cython/Compiler/Scanning.pxd b/Cython/Compiler/Scanning.pxd index 96d26c540..2d64565c0 100644 --- a/Cython/Compiler/Scanning.pxd +++ b/Cython/Compiler/Scanning.pxd @@ -36,6 +36,7 @@ cdef class PyrexScanner(Scanner): cdef readonly bint async_enabled cdef public unicode sy cdef public systring # EncodedString + cdef public list put_back_on_failure cdef Py_ssize_t current_level(self) #cpdef commentline(self, text) @@ -51,7 +52,6 @@ cdef class PyrexScanner(Scanner): ##cdef next(self) ##cdef peek(self) #cpdef put_back(self, sy, systring) - #cdef unread(self, token, value) ##cdef bint expect(self, what, message = *) except -2 ##cdef expect_keyword(self, what, message = *) ##cdef expected(self, what, message = *) @@ -60,3 +60,4 @@ cdef class PyrexScanner(Scanner): ##cdef expect_newline(self, message=*, bint ignore_semicolon=*) ##cdef int enter_async(self) except -1 ##cdef int exit_async(self) except -1 + cdef void error_at_scanpos(self, str message) except * diff --git a/Cython/Compiler/Scanning.py b/Cython/Compiler/Scanning.py index 5c5963021..d12d9d305 100644 --- a/Cython/Compiler/Scanning.py +++ b/Cython/Compiler/Scanning.py @@ -13,11 +13,12 @@ cython.declare(make_lexicon=object, lexicon=object, import os import platform from unicodedata import normalize +from contextlib import contextmanager from .. import Utils from ..Plex.Scanners import Scanner from ..Plex.Errors import UnrecognizedInput -from .Errors import error, warning +from .Errors import error, warning, hold_errors, release_errors, CompileError from .Lexicon import any_string_prefix, make_lexicon, IDENT from .Future import print_function @@ -300,6 +301,8 @@ class PyrexScanner(Scanner): # compile_time_env dict Environment for conditional compilation # compile_time_eval boolean In a true conditional compilation context # compile_time_expr boolean In a compile-time expression context + # put_back_on_failure list or None If set, this records states so the tentatively_scan + # contextmanager can restore it def __init__(self, file, filename, parent_scanner=None, scope=None, context=None, source_encoding=None, parse_comments=True, initial_pos=None): @@ -338,6 +341,8 @@ class PyrexScanner(Scanner): self.indentation_char = None self.bracket_nesting_level = 0 + self.put_back_on_failure = None + self.begin('INDENT') self.sy = '' self.next() @@ -391,7 +396,7 @@ class PyrexScanner(Scanner): def unclosed_string_action(self, text): self.end_string_action(text) - self.error("Unclosed string literal") + self.error_at_scanpos("Unclosed string literal") def indentation_action(self, text): self.begin('') @@ -407,9 +412,9 @@ class PyrexScanner(Scanner): #print "Scanner.indentation_action: setting indent_char to", repr(c) else: if self.indentation_char != c: - self.error("Mixed use of tabs and spaces") + self.error_at_scanpos("Mixed use of tabs and spaces") if text.replace(c, "") != "": - self.error("Mixed use of tabs and spaces") + self.error_at_scanpos("Mixed use of tabs and spaces") # Figure out how many indents/dedents to do current_level = self.current_level() new_level = len(text) @@ -427,7 +432,7 @@ class PyrexScanner(Scanner): self.produce('DEDENT', '') #print "...current level now", self.current_level() ### if new_level != self.current_level(): - self.error("Inconsistent indentation") + self.error_at_scanpos("Inconsistent indentation") def eof_action(self, text): while len(self.indentation_stack) > 1: @@ -439,7 +444,7 @@ class PyrexScanner(Scanner): try: sy, systring = self.read() except UnrecognizedInput: - self.error("Unrecognized character") + self.error_at_scanpos("Unrecognized character") return # just a marker, error() always raises if sy == IDENT: if systring in self.keywords: @@ -450,6 +455,8 @@ class PyrexScanner(Scanner): else: sy = systring systring = self.context.intern_ustring(systring) + if self.put_back_on_failure is not None: + self.put_back_on_failure.append((sy, systring, self.position())) self.sy = sy self.systring = systring if False: # debug_scanner: @@ -462,20 +469,20 @@ class PyrexScanner(Scanner): def peek(self): saved = self.sy, self.systring + saved_pos = self.position() self.next() next = self.sy, self.systring - self.unread(*next) + self.unread(self.sy, self.systring, self.position()) self.sy, self.systring = saved + self.last_token_position_tuple = saved_pos return next - def put_back(self, sy, systring): - self.unread(self.sy, self.systring) + def put_back(self, sy, systring, pos): + self.unread(self.sy, self.systring, self.last_token_position_tuple) self.sy = sy self.systring = systring + self.last_token_position_tuple = pos - def unread(self, token, value): - # This method should be added to Plex - self.queue.insert(0, (token, value)) def error(self, message, pos=None, fatal=True): if pos is None: @@ -485,6 +492,12 @@ class PyrexScanner(Scanner): err = error(pos, message) if fatal: raise err + def error_at_scanpos(self, message): + # Like error(fatal=True), but gets the current scanning position rather than + # the position of the last token read. + pos = self.get_current_scan_pos() + self.error(message, pos, True) + def expect(self, what, message=None): if self.sy == what: self.next() @@ -538,3 +551,30 @@ class PyrexScanner(Scanner): self.keywords.discard('async') if self.sy in ('async', 'await'): self.sy, self.systring = IDENT, self.context.intern_ustring(self.sy) + +@contextmanager +@cython.locals(scanner=Scanner) +def tentatively_scan(scanner): + errors = hold_errors() + try: + put_back_on_failure = scanner.put_back_on_failure + scanner.put_back_on_failure = [] + initial_state = (scanner.sy, scanner.systring, scanner.position()) + try: + yield errors + except CompileError as e: + pass + finally: + if errors: + if scanner.put_back_on_failure: + for put_back in reversed(scanner.put_back_on_failure[:-1]): + scanner.put_back(*put_back) + # we need to restore the initial state too + scanner.put_back(*initial_state) + elif put_back_on_failure is not None: + # the outer "tentatively_scan" block that we're in might still + # want to undo this block + put_back_on_failure.extend(scanner.put_back_on_failure) + scanner.put_back_on_failure = put_back_on_failure + finally: + release_errors(ignore=True) diff --git a/Cython/Compiler/Tests/TestScanning.py b/Cython/Compiler/Tests/TestScanning.py new file mode 100644 index 000000000..e9cac1b47 --- /dev/null +++ b/Cython/Compiler/Tests/TestScanning.py @@ -0,0 +1,136 @@ +from __future__ import unicode_literals + +import unittest +from io import StringIO +import string + +from .. import Scanning +from ..Symtab import ModuleScope +from ..TreeFragment import StringParseContext +from ..Errors import init_thread + +# generate some fake code - just a bunch of lines of the form "a0 a1 ..." +code = [] +for ch in string.ascii_lowercase: + line = " ".join(["%s%s" % (ch, n) for n in range(10)]) + code.append(line) +code = "\n".join(code) + +init_thread() + + +class TestScanning(unittest.TestCase): + def make_scanner(self): + source = Scanning.StringSourceDescriptor("fake code", code) + buf = StringIO(code) + context = StringParseContext("fake context") + scope = ModuleScope("fake_module", None, None) + + return Scanning.PyrexScanner(buf, source, scope=scope, context=context) + + def test_put_back_positions(self): + scanner = self.make_scanner() + + self.assertEqual(scanner.sy, "IDENT") + self.assertEqual(scanner.systring, "a0") + scanner.next() + self.assertEqual(scanner.sy, "IDENT") + self.assertEqual(scanner.systring, "a1") + a1pos = scanner.position() + self.assertEqual(a1pos[1:], (1, 3)) + a2peek = scanner.peek() # shouldn't mess up the position + self.assertEqual(a1pos, scanner.position()) + scanner.next() + self.assertEqual(a2peek, (scanner.sy, scanner.systring)) + + # find next line + while scanner.sy != "NEWLINE": + scanner.next() + + line_sy = [] + line_systring = [] + line_pos = [] + + scanner.next() + while scanner.sy != "NEWLINE": + line_sy.append(scanner.sy) + line_systring.append(scanner.systring) + line_pos.append(scanner.position()) + scanner.next() + + for sy, systring, pos in zip( + line_sy[::-1], line_systring[::-1], line_pos[::-1] + ): + scanner.put_back(sy, systring, pos) + + n = 0 + while scanner.sy != "NEWLINE": + self.assertEqual(scanner.sy, line_sy[n]) + self.assertEqual(scanner.systring, line_systring[n]) + self.assertEqual(scanner.position(), line_pos[n]) + scanner.next() + n += 1 + + self.assertEqual(n, len(line_pos)) + + def test_tentatively_scan(self): + scanner = self.make_scanner() + with Scanning.tentatively_scan(scanner) as errors: + while scanner.sy != "NEWLINE": + scanner.next() + self.assertFalse(errors) + + scanner.next() + self.assertEqual(scanner.systring, "b0") + pos = scanner.position() + with Scanning.tentatively_scan(scanner) as errors: + while scanner.sy != "NEWLINE": + scanner.next() + if scanner.systring == "b7": + scanner.error("Oh no not b7!") + break + self.assertTrue(errors) + self.assertEqual(scanner.systring, "b0") # state has been restored + self.assertEqual(scanner.position(), pos) + scanner.next() + self.assertEqual(scanner.systring, "b1") # and we can keep going again + scanner.next() + self.assertEqual(scanner.systring, "b2") # and we can keep going again + + with Scanning.tentatively_scan(scanner) as error: + scanner.error("Something has gone wrong with the current symbol") + self.assertEqual(scanner.systring, "b2") + scanner.next() + self.assertEqual(scanner.systring, "b3") + + # test a few combinations of nested scanning + sy1, systring1 = scanner.sy, scanner.systring + pos1 = scanner.position() + with Scanning.tentatively_scan(scanner): + scanner.next() + sy2, systring2 = scanner.sy, scanner.systring + pos2 = scanner.position() + with Scanning.tentatively_scan(scanner): + with Scanning.tentatively_scan(scanner): + scanner.next() + scanner.next() + scanner.error("Ooops") + self.assertEqual((scanner.sy, scanner.systring), (sy2, systring2)) + self.assertEqual((scanner.sy, scanner.systring), (sy2, systring2)) + scanner.error("eee") + self.assertEqual((scanner.sy, scanner.systring), (sy1, systring1)) + with Scanning.tentatively_scan(scanner): + scanner.next() + scanner.next() + with Scanning.tentatively_scan(scanner): + scanner.next() + # no error - but this block should be unwound by the outer block too + scanner.next() + scanner.error("Oooops") + self.assertEqual((scanner.sy, scanner.systring), (sy1, systring1)) + + + + +if __name__ == "__main__": + unittest.main() diff --git a/Cython/Plex/Scanners.pxd b/Cython/Plex/Scanners.pxd index c6cb19b40..664b1a6f0 100644 --- a/Cython/Plex/Scanners.pxd +++ b/Cython/Plex/Scanners.pxd @@ -16,8 +16,8 @@ cdef class Scanner: cdef public Py_ssize_t cur_line cdef public Py_ssize_t cur_line_start cdef public Py_ssize_t start_pos - cdef public Py_ssize_t start_line - cdef public Py_ssize_t start_col + cdef tuple current_scanner_position_tuple + cdef public tuple last_token_position_tuple cdef public text cdef public initial_state # int? cdef public state_name @@ -32,6 +32,8 @@ cdef class Scanner: cdef inline next_char(self) @cython.locals(action=Action) cpdef tuple read(self) + cdef inline unread(self, token, value, position) + cdef inline get_current_scan_pos(self) cdef inline tuple scan_a_token(self) ##cdef tuple position(self) # used frequently by Parsing.py diff --git a/Cython/Plex/Scanners.py b/Cython/Plex/Scanners.py index e850e0cc9..5729e3a3f 100644 --- a/Cython/Plex/Scanners.py +++ b/Cython/Plex/Scanners.py @@ -53,18 +53,25 @@ class Scanner(object): # stream = None # file-like object # name = '' # buffer = '' + # + # These positions are used by the scanner to track its internal state: # buf_start_pos = 0 # position in input of start of buffer # next_pos = 0 # position in input of next char to read # cur_pos = 0 # position in input of current char # cur_line = 1 # line number of current char # cur_line_start = 0 # position in input of start of current line # start_pos = 0 # position in input of start of token - # start_line = 0 # line number of start of token - # start_col = 0 # position in line of start of token + # current_scanner_position_tuple = ("", 0, 0) + # tuple of filename, line number and position in line, really mainly for error reporting + # + # These positions are used to track what was read from the queue + # (which may differ from the internal state when tokens are replaced onto the queue) + # last_token_position_tuple = ("", 0, 0) # tuple of filename, line number and position in line + # text = None # text of last token read # initial_state = None # Node # state_name = '' # Name of initial state - # queue = None # list of tokens to be returned + # queue = None # list of tokens and positions to be returned # trace = 0 def __init__(self, lexicon, stream, name='', initial_pos=None): @@ -88,8 +95,8 @@ class Scanner(object): self.cur_pos = 0 self.cur_line = 1 self.start_pos = 0 - self.start_line = 0 - self.start_col = 0 + self.current_scanner_position_tuple = ("", 0, 0) + self.last_token_position_tuple = ("", 0, 0) self.text = None self.state_name = None @@ -124,10 +131,17 @@ class Scanner(object): value = action.perform(self, self.text) if value is not None: self.produce(value) - result = queue[0] + result, self.last_token_position_tuple = queue[0] del queue[0] return result + def unread(self, token, value, position): + self.queue.insert(0, ((token, value), position)) + + def get_current_scan_pos(self): + # distinct from the position of the last token due to the queue + return self.current_scanner_position_tuple + def scan_a_token(self): """ Read the next input sequence recognised by the machine @@ -135,8 +149,9 @@ class Scanner(object): file. """ self.start_pos = self.cur_pos - self.start_line = self.cur_line - self.start_col = self.cur_pos - self.cur_line_start + self.current_scanner_position_tuple = ( + self.name, self.cur_line, self.cur_pos - self.cur_line_start + ) action = self.run_machine_inlined() if action is not None: if self.trace: @@ -303,7 +318,7 @@ class Scanner(object): position within the line of the first character of the token (0-based). """ - return (self.name, self.start_line, self.start_col) + return self.last_token_position_tuple def get_position(self): """ @@ -330,7 +345,7 @@ class Scanner(object): """ if text is None: text = self.text - self.queue.append((value, text)) + self.queue.append(((value, text), self.current_scanner_position_tuple)) def eof(self): """ @@ -338,3 +353,7 @@ class Scanner(object): end of file. """ pass + + @property + def start_line(self): + return self.last_token_position_tuple[1] diff --git a/tests/errors/cpp_object_template.pyx b/tests/errors/cpp_object_template.pyx index db4381b51..e1a15c905 100644 --- a/tests/errors/cpp_object_template.pyx +++ b/tests/errors/cpp_object_template.pyx @@ -18,7 +18,7 @@ def memview(): vmv.push_back(array.array("i", [1,2,3])) _ERRORS = u""" -10:16: Python object type 'Python object' cannot be used as a template argument -12:16: Python object type 'A' cannot be used as a template argument -17:16: Reference-counted type 'int[:]' cannot be used as a template argument +10:15: Python object type 'Python object' cannot be used as a template argument +12:15: Python object type 'A' cannot be used as a template argument +17:15: Reference-counted type 'int[:]' cannot be used as a template argument """ diff --git a/tests/errors/cppexc_non_extern.pyx b/tests/errors/cppexc_non_extern.pyx index 95427e6e4..f498e398d 100644 --- a/tests/errors/cppexc_non_extern.pyx +++ b/tests/errors/cppexc_non_extern.pyx @@ -14,9 +14,9 @@ cdef test_func2(self) except +: pass _ERRORS = """ -9:16: Only extern functions can throw C++ exceptions. +9:5: Only extern functions can throw C++ exceptions. """ _WARNINGS = """ -13:16: Only extern functions can throw C++ exceptions. +13:5: Only extern functions can throw C++ exceptions. """ diff --git a/tests/errors/e_argdefault.pyx b/tests/errors/e_argdefault.pyx index d8828741f..43e69ae6f 100644 --- a/tests/errors/e_argdefault.pyx +++ b/tests/errors/e_argdefault.pyx @@ -12,7 +12,7 @@ cdef class Grail: pass _ERRORS = u""" -3:10: Non-default argument follows default argument +3:9: Non-default argument follows default argument 3:36: Non-default argument following default argument 6:23: Non-default argument following default argument 11:16: This argument cannot have a default value diff --git a/tests/errors/e_bufaccess.pyx b/tests/errors/e_bufaccess.pyx index bc5b9c0f3..5be4876d5 100644 --- a/tests/errors/e_bufaccess.pyx +++ b/tests/errors/e_bufaccess.pyx @@ -17,7 +17,7 @@ def f(): _ERRORS = u""" 3:17: Buffer types only allowed as function local variables 5:21: Buffer types only allowed as function local variables -8:31: "fakeoption" is not a buffer option +8:27: "fakeoption" is not a buffer option """ #TODO: #7:22: "ndim" must be non-negative diff --git a/tests/errors/e_cpp_only_features.pyx b/tests/errors/e_cpp_only_features.pyx index 005e415e6..19b7a6e39 100644 --- a/tests/errors/e_cpp_only_features.pyx +++ b/tests/errors/e_cpp_only_features.pyx @@ -21,6 +21,6 @@ def use_del(): _ERRORS = """ 8:10: typeid operator only allowed in c++ 8:23: typeid operator only allowed in c++ -14:20: Operation only allowed in c++ +14:16: Operation only allowed in c++ 19:4: Operation only allowed in c++ """ diff --git a/tests/errors/e_cstruct.pyx b/tests/errors/e_cstruct.pyx index ad3ca9695..e0a09fbeb 100644 --- a/tests/errors/e_cstruct.pyx +++ b/tests/errors/e_cstruct.pyx @@ -24,7 +24,7 @@ cdef void eggs(Spam s): _ERRORS = u""" -7:39: C struct/union member cannot be a Python object +7:4: C struct/union member cannot be a Python object 17:9: Object of type 'Spam' has no attribute 'k' 18:9: Cannot assign type 'float (*)[42]' to 'int' 19:10: Cannot assign type 'int' to 'float (*)[42]' diff --git a/tests/errors/e_public_cdef_private_types.pyx b/tests/errors/e_public_cdef_private_types.pyx index 331d6c04f..9d8f55c87 100644 --- a/tests/errors/e_public_cdef_private_types.pyx +++ b/tests/errors/e_public_cdef_private_types.pyx @@ -38,6 +38,6 @@ e_public_cdef_private_types.pyx:8:22: Function declared public or api may not ha e_public_cdef_private_types.pyx:11:19: Function declared public or api may not have private types e_public_cdef_private_types.pyx:14:5: Function declared public or api may not have private types e_public_cdef_private_types.pyx:17:5: Function declared public or api may not have private types -e_public_cdef_private_types.pyx:20:25: Function with optional arguments may not be declared public or api -e_public_cdef_private_types.pyx:23:22: Function with optional arguments may not be declared public or api +e_public_cdef_private_types.pyx:20:24: Function with optional arguments may not be declared public or api +e_public_cdef_private_types.pyx:23:21: Function with optional arguments may not be declared public or api """ diff --git a/tests/errors/e_typing_errors.pyx b/tests/errors/e_typing_errors.pyx index e11827696..832f68d90 100644 --- a/tests/errors/e_typing_errors.pyx +++ b/tests/errors/e_typing_errors.pyx @@ -40,20 +40,20 @@ cdef class Cls(object): _ERRORS = """ -13:45: typing.Optional[...] cannot be applied to non-Python type int -13:72: typing.Optional[...] cannot be applied to non-Python type double -13:98: typing.Optional[...] cannot be applied to non-Python type float -14:49: typing.Optional[...] cannot be applied to non-Python type double complex -14:74: typing.Optional[...] cannot be applied to non-Python type long -14:103: typing.Optional[...] cannot be applied to non-Python type long long -24:33: typing.Optional[...] cannot be applied to non-Python type int -24:52: typing.Optional[...] cannot be applied to non-Python type float -24:91: typing.Optional[...] cannot be applied to non-Python type long - -20:38: typing.Optional[...] cannot be applied to non-Python type MyStruct +13:42: typing.Optional[...] cannot be applied to non-Python type int +13:66: typing.Optional[...] cannot be applied to non-Python type double +13:93: typing.Optional[...] cannot be applied to non-Python type float +14:42: typing.Optional[...] cannot be applied to non-Python type double complex +14:70: typing.Optional[...] cannot be applied to non-Python type long +14:95: typing.Optional[...] cannot be applied to non-Python type long long +24:30: typing.Optional[...] cannot be applied to non-Python type int +24:47: typing.Optional[...] cannot be applied to non-Python type float +24:87: typing.Optional[...] cannot be applied to non-Python type long + +20:30: typing.Optional[...] cannot be applied to non-Python type MyStruct 28:20: Modifier 'typing.ClassVar' is not allowed here. # FIXME: this should be ok :-? -33:53: typing.Optional[...] cannot be applied to non-Python type double[:] +33:52: typing.Optional[...] cannot be applied to non-Python type double[:] """ diff --git a/tests/errors/fused_types.pyx b/tests/errors/fused_types.pyx index 378ac5506..31aa35b86 100644 --- a/tests/errors/fused_types.pyx +++ b/tests/errors/fused_types.pyx @@ -109,7 +109,7 @@ _ERRORS = u""" 86:4: 'z' cannot be specialized since its type is not a fused argument to this function 86:4: 'z' cannot be specialized since its type is not a fused argument to this function 86:4: 'z' cannot be specialized since its type is not a fused argument to this function -87:24: Type cannot be specialized since it is not a fused argument to this function -87:24: Type cannot be specialized since it is not a fused argument to this function -87:24: Type cannot be specialized since it is not a fused argument to this function +87:16: Type cannot be specialized since it is not a fused argument to this function +87:16: Type cannot be specialized since it is not a fused argument to this function +87:16: Type cannot be specialized since it is not a fused argument to this function """ diff --git a/tests/memoryview/error_declarations.pyx b/tests/memoryview/error_declarations.pyx index 0f6c52043..8c4f12a56 100644 --- a/tests/memoryview/error_declarations.pyx +++ b/tests/memoryview/error_declarations.pyx @@ -73,24 +73,24 @@ _ERRORS = u''' 13:19: Step must be omitted, 1, or a valid specifier. 14:20: Step must be omitted, 1, or a valid specifier. 15:20: Step must be omitted, 1, or a valid specifier. -16:17: Start must not be given. -17:18: Start must not be given. +16:15: Start must not be given. +17:17: Start must not be given. 18:22: Axis specification only allowed in the 'step' slot. -19:19: Fortran contiguous specifier must follow an indirect dimension +19:18: Fortran contiguous specifier must follow an indirect dimension 20:22: Invalid axis specification. 21:19: Invalid axis specification. 22:22: no expressions allowed in axis spec, only names and literals. 25:37: Memoryview 'object[::1, :]' not conformable to memoryview 'object[:, ::1]'. 28:17: Different base types for memoryviews (int, Python object) -31:9: Dimension may not be contiguous -37:9: Only one direct contiguous axis may be specified. -38:9:Only dimensions 3 and 2 may be contiguous and direct -44:10: Invalid base type for memoryview slice: intp +31:8: Dimension may not be contiguous +37:8: Only one direct contiguous axis may be specified. +38:8:Only dimensions 3 and 2 may be contiguous and direct +44:9: Invalid base type for memoryview slice: intp 46:35: Can only create cython.array from pointer or array 47:24: Cannot assign type 'double' to 'Py_ssize_t' -55:13: Invalid base type for memoryview slice: Invalid +55:12: Invalid base type for memoryview slice: Invalid 58:6: More dimensions than the maximum number of buffer dimensions were used. 59:6: More dimensions than the maximum number of buffer dimensions were used. -61:9: More dimensions than the maximum number of buffer dimensions were used. +61:8: More dimensions than the maximum number of buffer dimensions were used. 64:13: Cannot take address of memoryview slice ''' -- cgit v1.2.1