From b2792a3d1796effcfdeb41c6b495d176ad5d7239 Mon Sep 17 00:00:00 2001 From: da-woods Date: Tue, 24 May 2022 09:55:36 +0100 Subject: Remove unused "saved_subexpr_nodes" attribute (GH-4604) --- Cython/Compiler/ExprNodes.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/Cython/Compiler/ExprNodes.py b/Cython/Compiler/ExprNodes.py index 312b37329..c20a76bd4 100644 --- a/Cython/Compiler/ExprNodes.py +++ b/Cython/Compiler/ExprNodes.py @@ -328,9 +328,6 @@ class ExprNode(Node): # is_sequence_constructor # boolean Is a list or tuple constructor expression # is_starred boolean Is a starred expression (e.g. '*a') - # saved_subexpr_nodes - # [ExprNode or [ExprNode or None] or None] - # Cached result of subexpr_nodes() # use_managed_ref boolean use ref-counted temps/assignments/etc. # result_is_used boolean indicates that the result will be dropped and the # is_numpy_attribute boolean Is a Numpy module attribute @@ -473,7 +470,6 @@ class ExprNode(Node): is_memview_broadcast = False is_memview_copy_assignment = False - saved_subexpr_nodes = None is_temp = False has_temp_moved = False # if True then attempting to do anything but free the temp is invalid is_target = False -- cgit v1.2.1 From d0719998c441be752c4c28c5791ecfa305dc4322 Mon Sep 17 00:00:00 2001 From: da-woods Date: Sun, 29 May 2022 15:06:20 +0100 Subject: Replace fused "__self__" property function with slot memberdef (GH-4808) It's a bit simpler, and it lets `hasattr` work correctly where-as the getset didn't: hasattr() returned true, but it still raised an error. --- Cython/Utility/CythonFunction.c | 15 +-------------- tests/run/function_self.py | 21 ++++++--------------- 2 files changed, 7 insertions(+), 29 deletions(-) diff --git a/Cython/Utility/CythonFunction.c b/Cython/Utility/CythonFunction.c index 9a7bf7405..270f441ef 100644 --- a/Cython/Utility/CythonFunction.c +++ b/Cython/Utility/CythonFunction.c @@ -1463,30 +1463,17 @@ bad: return result; } -static PyObject * -__Pyx_FusedFunction_get_self(__pyx_FusedFunctionObject *m, void *closure) -{ - PyObject *self = m->self; - CYTHON_UNUSED_VAR(closure); - if (unlikely(!self)) { - PyErr_SetString(PyExc_AttributeError, "'function' object has no attribute '__self__'"); - } else { - Py_INCREF(self); - } - return self; -} - static PyMemberDef __pyx_FusedFunction_members[] = { {(char *) "__signatures__", T_OBJECT, offsetof(__pyx_FusedFunctionObject, __signatures__), READONLY, 0}, + {(char *) "__self__", T_OBJECT_EX, offsetof(__pyx_FusedFunctionObject, self), READONLY, 0}, {0, 0, 0, 0, 0}, }; static PyGetSetDef __pyx_FusedFunction_getsets[] = { - {(char *) "__self__", (getter)__Pyx_FusedFunction_get_self, 0, 0, 0}, // __doc__ is None for the fused function type, but we need it to be // a descriptor for the instance's __doc__, so rebuild the descriptor in our subclass // (all other descriptors are inherited) diff --git a/tests/run/function_self.py b/tests/run/function_self.py index 938810491..945da404f 100644 --- a/tests/run/function_self.py +++ b/tests/run/function_self.py @@ -25,13 +25,8 @@ def fused(x): >>> hasattr(nested, "__self__") False - #>>> hasattr(fused, "__self__") # FIXME this fails for fused functions - #False - # but this is OK: - >>> fused.__self__ #doctest: +ELLIPSIS - Traceback (most recent call last): - ... - AttributeError: 'function' object has no attribute '__self__'... + >>> hasattr(fused, "__self__") + False """ def nested_in_fused(y): return x+y @@ -74,15 +69,11 @@ if sys.version_info[0] > 2 or cython.compiled: if cython.compiled: __doc__ = """ - >>> fused['double'].__self__ #doctest: +ELLIPSIS - Traceback (most recent call last): - ... - AttributeError: 'function' object has no attribute '__self__'... + >>> hasattr(fused['double'], '__self__') + False - >>> C.fused['double'].__self__ #doctest: +ELLIPSIS - Traceback (most recent call last): - ... - AttributeError: 'function' object has no attribute '__self__'... + >>> hasattr(C.fused['double'], '__self__') + False >>> c = C() >>> c.fused['double'].__self__ is c #doctest: +ELLIPSIS -- cgit v1.2.1 From 0159be918d7c83e145f4ec3ffc35b8d0e3d974dd Mon Sep 17 00:00:00 2001 From: da-woods Date: Mon, 30 May 2022 08:38:59 +0100 Subject: Improve coercion rules on function pointer exception specification and add tests (GH-4811) Closes https://github.com/cython/cython/issues/4689 --- Cython/Compiler/PyrexTypes.py | 3 +++ tests/errors/cfuncptr.pyx | 36 ++++++++++++++++++++++++++++++++++++ tests/run/cfuncptr.pyx | 40 +++++++++++++++++++++++++++++++++++++--- 3 files changed, 76 insertions(+), 3 deletions(-) create mode 100644 tests/errors/cfuncptr.pyx diff --git a/Cython/Compiler/PyrexTypes.py b/Cython/Compiler/PyrexTypes.py index c773f5c5a..1660eab22 100644 --- a/Cython/Compiler/PyrexTypes.py +++ b/Cython/Compiler/PyrexTypes.py @@ -3044,6 +3044,9 @@ class CFuncType(CType): # must catch C++ exceptions if we raise them return 0 if not other_type.exception_check or other_type.exception_value is not None: + # There's no problem if this type doesn't emit exceptions but the other type checks + if other_type.exception_check and not (self.exception_check or self.exception_value): + return 1 # if other does not *always* check exceptions, self must comply if not self._same_exception_value(other_type.exception_value): return 0 diff --git a/tests/errors/cfuncptr.pyx b/tests/errors/cfuncptr.pyx new file mode 100644 index 000000000..e05efa519 --- /dev/null +++ b/tests/errors/cfuncptr.pyx @@ -0,0 +1,36 @@ +# mode: error + +cdef int exceptmaybeminus2(int bad) except ?-2: + if bad: + raise RuntimeError + else: + return 0 + +def fail_exceptmaybeminus2(bad): + cdef int (*fptr_a)(int) except -2 + cdef int (*fptr_b)(int) except -1 + cdef int (*fptr_c)(int) except ?-1 + fptr_a = exceptmaybeminus2 + fptr_b = exceptmaybeminus2 + fptr_c = exceptmaybeminus2 + +cdef extern from *: + # define this as extern since Cython converts internal "except*" to "except -1" + cdef int exceptstar(int bad) except * + +def fail_exceptstar(bad): + cdef int (*fptr_a)(int) # noexcept + cdef int (*fptr_b)(int) except -1 + cdef int (*fptr_c)(int) except ?-1 + fptr_a = exceptstar + fptr_b = exceptstar + fptr_c = exceptstar + +_ERRORS = """ +13:13: Cannot assign type 'int (int) except? -2' to 'int (*)(int) except -2' +14:13: Cannot assign type 'int (int) except? -2' to 'int (*)(int) except -1' +15:13: Cannot assign type 'int (int) except? -2' to 'int (*)(int) except? -1' +25:13: Cannot assign type 'int (int) except *' to 'int (*)(int)' +26:13: Cannot assign type 'int (int) except *' to 'int (*)(int) except -1' +27:13: Cannot assign type 'int (int) except *' to 'int (*)(int) except? -1' +""" diff --git a/tests/run/cfuncptr.pyx b/tests/run/cfuncptr.pyx index b7018cce0..cb3b32184 100644 --- a/tests/run/cfuncptr.pyx +++ b/tests/run/cfuncptr.pyx @@ -46,15 +46,49 @@ cdef int exceptminus2(int bad) except -2: else: return 0 -def call_exceptminus2(bad): +def call_exceptminus2_through_exceptstar_pointer(bad): """ - >>> call_exceptminus2(True) + >>> call_exceptminus2_through_exceptstar_pointer(True) Traceback (most recent call last): ... RuntimeError - >>> call_exceptminus2(False) + >>> call_exceptminus2_through_exceptstar_pointer(False) 0 """ cdef int (*fptr)(int) except * # GH4770 - should not be treated as except? -1 fptr = exceptminus2 return fptr(bad) + +def call_exceptminus2_through_exceptmaybeminus2_pointer(bad): + """ + >>> call_exceptminus2_through_exceptmaybeminus2_pointer(True) + Traceback (most recent call last): + ... + RuntimeError + >>> call_exceptminus2_through_exceptmaybeminus2_pointer(False) + 0 + """ + cdef int (*fptr)(int) except ?-2 # exceptions should be compatible + fptr = exceptminus2 + return fptr(bad) + +cdef int noexcept_func(): # noexcept + return 0 + +def call_noexcept_func_except_star(): + """ + >>> call_noexcept_func_except_star() + 0 + """ + cdef int (*fptr)() except * + fptr = noexcept_func # exception specifications are compatible + return fptr() + +def call_noexcept_func_except_check(): + """ + >>> call_noexcept_func_except_check() + 0 + """ + cdef int (*fptr)() except ?-1 + fptr = noexcept_func # exception specifications are compatible + return fptr() -- cgit v1.2.1 From aaff13db6d4b8a6c812887dc50e1de9f180bc099 Mon Sep 17 00:00:00 2001 From: Wenjun Si Date: Wed, 1 Jun 2022 03:05:06 +0800 Subject: Add "mt19973_64" declarations to "libcpp.random" (GH-4818) --- Cython/Includes/libcpp/random.pxd | 14 ++++++- tests/run/cpp_stl_random.pyx | 77 ++++++++++++++++++++++++++++++++------- 2 files changed, 77 insertions(+), 14 deletions(-) diff --git a/Cython/Includes/libcpp/random.pxd b/Cython/Includes/libcpp/random.pxd index e879c8f64..5636400f3 100644 --- a/Cython/Includes/libcpp/random.pxd +++ b/Cython/Includes/libcpp/random.pxd @@ -1,4 +1,4 @@ -from libc.stdint cimport uint_fast32_t +from libc.stdint cimport uint_fast32_t, uint_fast64_t cdef extern from "" namespace "std" nogil: @@ -12,3 +12,15 @@ cdef extern from "" namespace "std" nogil: result_type max() except + void discard(size_t z) except + void seed(result_type seed) except + + + + cdef cppclass mt19937_64: + ctypedef uint_fast64_t result_type + + mt19937_64() except + + mt19937_64(result_type seed) except + + result_type operator()() except + + result_type min() except + + result_type max() except + + void discard(size_t z) except + + void seed(result_type seed) except + diff --git a/tests/run/cpp_stl_random.pyx b/tests/run/cpp_stl_random.pyx index 58f7db040..a793000fa 100644 --- a/tests/run/cpp_stl_random.pyx +++ b/tests/run/cpp_stl_random.pyx @@ -1,7 +1,7 @@ # mode: run # tag: cpp, cpp11 -from libcpp.random cimport mt19937 +from libcpp.random cimport mt19937, mt19937_64 def mt19937_seed_test(): @@ -9,8 +9,8 @@ def mt19937_seed_test(): >>> print(mt19937_seed_test()) 1608637542 """ - cdef mt19937 rd = mt19937(42) - return rd() + cdef mt19937 gen = mt19937(42) + return gen() def mt19937_reseed_test(): @@ -18,9 +18,9 @@ def mt19937_reseed_test(): >>> print(mt19937_reseed_test()) 1608637542 """ - cdef mt19937 rd - rd.seed(42) - return rd() + cdef mt19937 gen + gen.seed(42) + return gen() def mt19937_min_max(): @@ -31,8 +31,8 @@ def mt19937_min_max(): >>> print(y) # 2 ** 32 - 1 because mt19937 is 32 bit. 4294967295 """ - cdef mt19937 rd - return rd.min(), rd.max() + cdef mt19937 gen + return gen.min(), gen.max() def mt19937_discard(z): @@ -43,13 +43,64 @@ def mt19937_discard(z): >>> print(y) 1972458954 """ - cdef mt19937 rd = mt19937(42) + cdef mt19937 gen = mt19937(42) # Throw away z random numbers. - rd.discard(z) - a = rd() + gen.discard(z) + a = gen() # Iterate over z random numbers. - rd.seed(42) + gen.seed(42) for _ in range(z + 1): - b = rd() + b = gen() + return a, b + + +def mt19937_64_seed_test(): + """ + >>> print(mt19937_64_seed_test()) + 13930160852258120406 + """ + cdef mt19937_64 gen = mt19937_64(42) + return gen() + + +def mt19937_64_reseed_test(): + """ + >>> print(mt19937_64_reseed_test()) + 13930160852258120406 + """ + cdef mt19937_64 gen + gen.seed(42) + return gen() + + +def mt19937_64_min_max(): + """ + >>> x, y = mt19937_64_min_max() + >>> print(x) + 0 + >>> print(y) # 2 ** 64 - 1 because mt19937_64 is 64 bit. + 18446744073709551615 + """ + cdef mt19937_64 gen + return gen.min(), gen.max() + + +def mt19937_64_discard(z): + """ + >>> x, y = mt19937_64_discard(13) + >>> print(x) + 11756813601242511406 + >>> print(y) + 11756813601242511406 + """ + cdef mt19937_64 gen = mt19937_64(42) + # Throw away z random numbers. + gen.discard(z) + a = gen() + + # Iterate over z random numbers. + gen.seed(42) + for _ in range(z + 1): + b = gen() return a, b -- cgit v1.2.1 From d9a708221cebc431015ae640b31e14c10ec4fb4c Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Tue, 31 May 2022 21:11:32 +0200 Subject: Fix test tag name. --- tests/run/test_named_expressions.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/run/test_named_expressions.py b/tests/run/test_named_expressions.py index 28147319b..7f25a229d 100644 --- a/tests/run/test_named_expressions.py +++ b/tests/run/test_named_expressions.py @@ -1,9 +1,8 @@ # mode: run -# tag: pure38, no-cpp +# tag: pure3.8, no-cpp # copied from cpython with minimal modifications (mainly exec->cython_inline, and a few exception strings) # This is not currently run in C++ because all the cython_inline compilations fail for reasons that are unclear -# FIXME pure38 seems to be ignored # cython: language_level=3 import os -- cgit v1.2.1 From 6ac2422b48b689b021a48dff9ee14095232baafe Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Tue, 31 May 2022 21:12:06 +0200 Subject: Remove unused import from test file. --- tests/run/test_named_expressions.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/run/test_named_expressions.py b/tests/run/test_named_expressions.py index 7f25a229d..b3e2eb980 100644 --- a/tests/run/test_named_expressions.py +++ b/tests/run/test_named_expressions.py @@ -5,7 +5,6 @@ # This is not currently run in C++ because all the cython_inline compilations fail for reasons that are unclear # cython: language_level=3 -import os import unittest import cython from Cython.Compiler.Main import CompileError -- cgit v1.2.1 From c5a418ac0f46ce797f9261c2b8de43630f9d71d8 Mon Sep 17 00:00:00 2001 From: "Thomas J. Fan" Date: Mon, 6 Jun 2022 02:11:20 -0400 Subject: DOC Fixes link to init methods (#4824) --- docs/src/userguide/extension_types.rst | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/src/userguide/extension_types.rst b/docs/src/userguide/extension_types.rst index 678ddf5c8..d058df6c2 100644 --- a/docs/src/userguide/extension_types.rst +++ b/docs/src/userguide/extension_types.rst @@ -480,7 +480,8 @@ above, the first instantiation will print ``eating!``, but the second will not. This is only one of the reasons why the ``__cinit__()`` method is safer than the normal ``__init__()`` method for initialising extension types and bringing them into a correct and safe state. -See section :ref:`_initialisation_methods` about the differences. +See the :ref:`Initialisation Methods Section ` about +the differences. The second performance improvement applies to types that are often created and deleted in a row, so that they can benefit from a freelist. Cython -- cgit v1.2.1 From 3c0afd9a4e9d8b9aea1702876b52b31f35992b51 Mon Sep 17 00:00:00 2001 From: Till Hoffmann Date: Tue, 7 Jun 2022 13:28:53 -0400 Subject: Add distributions to `libcpp.random`. (#4762) * Add `uniform_int_distribution` to `libcpp`. * Add `uniform_real_distribution` to `libcpp`. * Add `bernoulli_distribution` to `libcpp`. * Add `binomial_distribution` to `libcpp`. * Add `geometric_distribution` to `libcpp`. * Add range tests for distributions. * Add `negative_binomial_distribution` to `libcpp`. * Add `poisson_distribution` to `libcpp`. * Add `exponential_distribution` to `libcpp`. * Add `gamma_distribution` to `libcpp`. * Add `weibull_distribution` to `libcpp`. * Add `extreme_value_distribution` to `libcpp`. * Add `normal_distribution` to `libcpp`. * Add `lognormal_distribution` to `libcpp`. * Add `chi_squared_distribution` to `libcpp`. * Add `cauchy_distribution` to `libcpp`. * Add `fisher_f_distribution` to `libcpp`. * Increase `dof` for `fisher_f_distribution_test`. * Add `student_t_distribution` to `libcpp`. * Add docstring to `sample_or_range` helper function. * Fix distribution range tests for different OSs. * Show sample values and proba. of test passing. * Test distribution call without checking values. --- Cython/Includes/libcpp/random.pxd | 144 +++++++++++++++++++++- tests/run/cpp_stl_random.pyx | 244 +++++++++++++++++++++++++++++++++++++- 2 files changed, 385 insertions(+), 3 deletions(-) diff --git a/Cython/Includes/libcpp/random.pxd b/Cython/Includes/libcpp/random.pxd index 5636400f3..9e48bb27f 100644 --- a/Cython/Includes/libcpp/random.pxd +++ b/Cython/Includes/libcpp/random.pxd @@ -2,9 +2,13 @@ from libc.stdint cimport uint_fast32_t, uint_fast64_t cdef extern from "" namespace "std" nogil: - cdef cppclass mt19937: + cdef cppclass random_device: ctypedef uint_fast32_t result_type + random_device() except + + result_type operator()() except + + cdef cppclass mt19937: + ctypedef uint_fast32_t result_type mt19937() except + mt19937(result_type seed) except + result_type operator()() except + @@ -13,7 +17,6 @@ cdef extern from "" namespace "std" nogil: void discard(size_t z) except + void seed(result_type seed) except + - cdef cppclass mt19937_64: ctypedef uint_fast64_t result_type @@ -24,3 +27,140 @@ cdef extern from "" namespace "std" nogil: result_type max() except + void discard(size_t z) except + void seed(result_type seed) except + + + cdef cppclass uniform_int_distribution[T]: + ctypedef T result_type + uniform_int_distribution() except + + uniform_int_distribution(T, T) except + + result_type operator()[Generator](Generator&) except + + result_type min() except + + result_type max() except + + + cdef cppclass uniform_real_distribution[T]: + ctypedef T result_type + uniform_real_distribution() except + + uniform_real_distribution(T, T) except + + result_type operator()[Generator](Generator&) except + + result_type min() except + + result_type max() except + + + cdef cppclass bernoulli_distribution: + ctypedef bint result_type + bernoulli_distribution() except + + bernoulli_distribution(double) except + + result_type operator()[Generator](Generator&) except + + result_type min() except + + result_type max() except + + + cdef cppclass binomial_distribution[T]: + ctypedef T result_type + binomial_distribution() except + + binomial_distribution(T, double) except + + result_type operator()[Generator](Generator&) except + + result_type min() except + + result_type max() except + + + cdef cppclass geometric_distribution[T]: + ctypedef T result_type + geometric_distribution() except + + geometric_distribution(double) except + + result_type operator()[Generator](Generator&) except + + result_type min() except + + result_type max() except + + + + cdef cppclass negative_binomial_distribution[T]: + ctypedef T result_type + negative_binomial_distribution() except + + negative_binomial_distribution(T, double) except + + result_type operator()[Generator](Generator&) except + + result_type min() except + + result_type max() except + + + cdef cppclass poisson_distribution[T]: + ctypedef T result_type + poisson_distribution() except + + poisson_distribution(double) except + + result_type operator()[Generator](Generator&) except + + result_type min() except + + result_type max() except + + + cdef cppclass exponential_distribution[T]: + ctypedef T result_type + exponential_distribution() except + + exponential_distribution(result_type) except + + result_type operator()[Generator](Generator&) except + + result_type min() except + + result_type max() except + + + cdef cppclass gamma_distribution[T]: + ctypedef T result_type + gamma_distribution() except + + gamma_distribution(result_type, result_type) except + + result_type operator()[Generator](Generator&) except + + result_type min() except + + result_type max() except + + + cdef cppclass weibull_distribution[T]: + ctypedef T result_type + weibull_distribution() except + + weibull_distribution(result_type, result_type) except + + result_type operator()[Generator](Generator&) except + + result_type min() except + + result_type max() except + + + cdef cppclass extreme_value_distribution[T]: + ctypedef T result_type + extreme_value_distribution() except + + extreme_value_distribution(result_type, result_type) except + + result_type operator()[Generator](Generator&) except + + result_type min() except + + result_type max() except + + + cdef cppclass normal_distribution[T]: + ctypedef T result_type + normal_distribution() except + + normal_distribution(result_type, result_type) except + + result_type operator()[Generator](Generator&) except + + result_type min() except + + result_type max() except + + + cdef cppclass lognormal_distribution[T]: + ctypedef T result_type + lognormal_distribution() except + + lognormal_distribution(result_type, result_type) except + + result_type operator()[Generator](Generator&) except + + result_type min() except + + result_type max() except + + + cdef cppclass chi_squared_distribution[T]: + ctypedef T result_type + chi_squared_distribution() except + + chi_squared_distribution(result_type) except + + result_type operator()[Generator](Generator&) except + + result_type min() except + + result_type max() except + + + cdef cppclass cauchy_distribution[T]: + ctypedef T result_type + cauchy_distribution() except + + cauchy_distribution(result_type, result_type) except + + result_type operator()[Generator](Generator&) except + + result_type min() except + + result_type max() except + + + cdef cppclass fisher_f_distribution[T]: + ctypedef T result_type + fisher_f_distribution() except + + fisher_f_distribution(result_type, result_type) except + + result_type operator()[Generator](Generator&) except + + result_type min() except + + result_type max() except + + + cdef cppclass student_t_distribution[T]: + ctypedef T result_type + student_t_distribution() except + + student_t_distribution(result_type) except + + result_type operator()[Generator](Generator&) except + + result_type min() except + + result_type max() except + diff --git a/tests/run/cpp_stl_random.pyx b/tests/run/cpp_stl_random.pyx index a793000fa..3b074c278 100644 --- a/tests/run/cpp_stl_random.pyx +++ b/tests/run/cpp_stl_random.pyx @@ -1,7 +1,16 @@ # mode: run # tag: cpp, cpp11 -from libcpp.random cimport mt19937, mt19937_64 +from libcpp.random cimport mt19937, mt19937_64, random_device, uniform_int_distribution, \ + uniform_real_distribution, bernoulli_distribution, binomial_distribution, \ + geometric_distribution, negative_binomial_distribution, poisson_distribution, \ + exponential_distribution, gamma_distribution, weibull_distribution, \ + extreme_value_distribution, normal_distribution, lognormal_distribution, \ + chi_squared_distribution, cauchy_distribution, fisher_f_distribution, student_t_distribution +from libc.float cimport DBL_MAX as DBL_MAX_ + + +DBL_MAX = DBL_MAX_ def mt19937_seed_test(): @@ -104,3 +113,236 @@ def mt19937_64_discard(z): for _ in range(z + 1): b = gen() return a, b + + +ctypedef fused any_dist: + uniform_int_distribution[int] + uniform_real_distribution[double] + bernoulli_distribution + binomial_distribution[int] + geometric_distribution[int] + negative_binomial_distribution[int] + poisson_distribution[int] + exponential_distribution[double] + gamma_distribution[double] + weibull_distribution[double] + extreme_value_distribution[double] + normal_distribution[double] + lognormal_distribution[double] + chi_squared_distribution[double] + cauchy_distribution[double] + fisher_f_distribution[double] + student_t_distribution[double] + + +cdef sample_or_range(any_dist dist, bint sample): + """ + This helper function returns a sample if `sample` is truthy and the range of the distribution + if `sample` is falsy. We use a fused type to avoid duplicating the conditional statement in each + distribution test. + """ + cdef random_device rd + if sample: + dist(mt19937(rd())) + else: + return dist.min(), dist.max() + + +def uniform_int_distribution_test(a, b, sample=True): + """ + >>> uniform_int_distribution_test(2, 3) + >>> uniform_int_distribution_test(5, 9, False) + (5, 9) + """ + cdef uniform_int_distribution[int] dist = uniform_int_distribution[int](a, b) + return sample_or_range[uniform_int_distribution[int]](dist, sample) + + +def uniform_real_distribution_test(a, b, sample=True): + """ + >>> x = uniform_real_distribution_test(4, 5) + >>> uniform_real_distribution_test(3, 8, False) + (3.0, 8.0) + """ + cdef uniform_real_distribution[double] dist = uniform_real_distribution[double](a, b) + return sample_or_range[uniform_real_distribution[double]](dist, sample) + + +def bernoulli_distribution_test(proba, sample=True): + """ + >>> bernoulli_distribution_test(0.2) + >>> bernoulli_distribution_test(0.7, False) + (False, True) + """ + cdef bernoulli_distribution dist = bernoulli_distribution(proba) + return sample_or_range[bernoulli_distribution](dist, sample) + + +def binomial_distribution_test(n, proba, sample=True): + """ + >>> binomial_distribution_test(10, 0.7) + >>> binomial_distribution_test(75, 0.3, False) + (0, 75) + """ + cdef binomial_distribution[int] dist = binomial_distribution[int](n, proba) + return sample_or_range[binomial_distribution[int]](dist, sample) + + +def geometric_distribution_test(proba, sample=True): + """ + >>> geometric_distribution_test(.4) + >>> geometric_distribution_test(0.2, False) # 2147483647 = 2 ** 32 - 1 + (0, 2147483647) + """ + cdef geometric_distribution[int] dist = geometric_distribution[int](proba) + return sample_or_range[geometric_distribution[int]](dist, sample) + + +def negative_binomial_distribution_test(n, p, sample=True): + """ + >>> negative_binomial_distribution_test(5, .1) + >>> negative_binomial_distribution_test(10, 0.2, False) # 2147483647 = 2 ** 32 - 1 + (0, 2147483647) + """ + cdef negative_binomial_distribution[int] dist = negative_binomial_distribution[int](n, p) + return sample_or_range[negative_binomial_distribution[int]](dist, sample) + + +def poisson_distribution_test(rate, sample=True): + """ + >>> poisson_distribution_test(7) + >>> poisson_distribution_test(7, False) # 2147483647 = 2 ** 32 - 1 + (0, 2147483647) + """ + cdef poisson_distribution[int] dist = poisson_distribution[int](rate) + return sample_or_range[poisson_distribution[int]](dist, sample) + + +def exponential_distribution_test(rate, sample=True): + """ + >>> x = exponential_distribution_test(6) + >>> l, u = exponential_distribution_test(1, False) + >>> l + 0.0 + >>> u == DBL_MAX or u == float("inf") + True + """ + cdef exponential_distribution[double] dist = exponential_distribution[double](rate) + return sample_or_range[exponential_distribution[double]](dist, sample) + + +def gamma_distribution_test(shape, scale, sample=True): + """ + >>> gamma_distribution_test(3, 4) + >>> l, u = gamma_distribution_test(1, 1, False) + >>> l + 0.0 + >>> u == DBL_MAX or u == float("inf") + True + """ + cdef gamma_distribution[double] dist = gamma_distribution[double](shape, scale) + return sample_or_range[gamma_distribution[double]](dist, sample) + + +def weibull_distribution_test(shape, scale, sample=True): + """ + >>> weibull_distribution_test(3, 2) + >>> l, u = weibull_distribution_test(1, 1, False) + >>> l + 0.0 + >>> u == DBL_MAX or u == float("inf") + True + """ + cdef weibull_distribution[double] dist = weibull_distribution[double](shape, scale) + return sample_or_range[weibull_distribution[double]](dist, sample) + + +def extreme_value_distribution_test(shape, scale, sample=True): + """ + >>> extreme_value_distribution_test(3, 0.1) + >>> l, u = extreme_value_distribution_test(1, 1, False) + >>> l == -DBL_MAX or l == -float("inf") + True + >>> u == DBL_MAX or u == float("inf") + True + """ + cdef extreme_value_distribution[double] dist = extreme_value_distribution[double](shape, scale) + return sample_or_range[extreme_value_distribution[double]](dist, sample) + + +def normal_distribution_test(loc, scale, sample=True): + """ + >>> normal_distribution_test(3, 2) + >>> l, u = normal_distribution_test(1, 1, False) + >>> l == -DBL_MAX or l == -float("inf") + True + >>> u == DBL_MAX or u == float("inf") + True + """ + cdef normal_distribution[double] dist = normal_distribution[double](loc, scale) + return sample_or_range[normal_distribution[double]](dist, sample) + + +def lognormal_distribution_test(loc, scale, sample=True): + """ + >>> lognormal_distribution_test(3, 2) + >>> l, u = lognormal_distribution_test(1, 1, False) + >>> l + 0.0 + >>> u == DBL_MAX or u == float("inf") + True + """ + cdef lognormal_distribution[double] dist = lognormal_distribution[double](loc, scale) + return sample_or_range[lognormal_distribution[double]](dist, sample) + + +def chi_squared_distribution_test(dof, sample=True): + """ + >>> x = chi_squared_distribution_test(9) + >>> l, u = chi_squared_distribution_test(5, False) + >>> l + 0.0 + >>> u == DBL_MAX or u == float("inf") + True + """ + cdef chi_squared_distribution[double] dist = chi_squared_distribution[double](dof) + return sample_or_range[chi_squared_distribution[double]](dist, sample) + + +def cauchy_distribution_test(loc, scale, sample=True): + """ + >>> cauchy_distribution_test(3, 9) + >>> l, u = cauchy_distribution_test(1, 1, False) + >>> l == -DBL_MAX or l == -float("inf") + True + >>> u == DBL_MAX or u == float("inf") + True + """ + cdef cauchy_distribution[double] dist = cauchy_distribution[double](loc, scale) + return sample_or_range[cauchy_distribution[double]](dist, sample) + + +def fisher_f_distribution_test(m, n, sample=True): + """ + >>> x = fisher_f_distribution_test(9, 11) + >>> l, u = fisher_f_distribution_test(1, 1, False) + >>> l + 0.0 + >>> u == DBL_MAX or u == float("inf") + True + """ + cdef fisher_f_distribution[double] dist = fisher_f_distribution[double](m, n) + return sample_or_range[fisher_f_distribution[double]](dist, sample) + + +def student_t_distribution_test(dof, sample=True): + """ + >>> x = student_t_distribution_test(13) + >>> l, u = student_t_distribution_test(1, False) + >>> l == -DBL_MAX or l == -float("inf") + True + >>> u == DBL_MAX or u == float("inf") + True + """ + cdef student_t_distribution[double] dist = student_t_distribution[double](dof) + return sample_or_range[student_t_distribution[double]](dist, sample) -- cgit v1.2.1 From 4613ec04a10a240f56d81d052835870e409fd2a4 Mon Sep 17 00:00:00 2001 From: Thomas A Caswell Date: Tue, 7 Jun 2022 13:32:19 -0400 Subject: MNT: always require va_start to have two arguments (#4820) * MNT: always require va_start to have two arguments https://github.com/python/cpython/pull/93215 chance CPython to always use the 2-input version of va_start and dropped defining HAVE_STDARG_PROTOTYPES. This resulted in the 1-argument version being used when compiling cython source which fails This makes cython also always use the 2-argument version. * Remove blank line * FIX: version gate 2-argument va_start checking to py311 --- Cython/Utility/MemoryView_C.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cython/Utility/MemoryView_C.c b/Cython/Utility/MemoryView_C.c index 07ed24d20..de003a2ee 100644 --- a/Cython/Utility/MemoryView_C.c +++ b/Cython/Utility/MemoryView_C.c @@ -451,7 +451,7 @@ static void __pyx_fatalerror(const char *fmt, ...) Py_NO_RETURN { va_list vargs; char msg[200]; -#ifdef HAVE_STDARG_PROTOTYPES +#if PY_VERSION_HEX >= 0x030A0000 || defined(HAVE_STDARG_PROTOTYPES) va_start(vargs, fmt); #else va_start(vargs); -- cgit v1.2.1 From ad2d1f7dd72b0dbdb9d9441e0b30936ebad1a2c4 Mon Sep 17 00:00:00 2001 From: Thomas A Caswell Date: Tue, 7 Jun 2022 13:32:19 -0400 Subject: MNT: always require va_start to have two arguments (#4820) * MNT: always require va_start to have two arguments https://github.com/python/cpython/pull/93215 chance CPython to always use the 2-input version of va_start and dropped defining HAVE_STDARG_PROTOTYPES. This resulted in the 1-argument version being used when compiling cython source which fails This makes cython also always use the 2-argument version. * Remove blank line * FIX: version gate 2-argument va_start checking to py311 --- Cython/Utility/MemoryView_C.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cython/Utility/MemoryView_C.c b/Cython/Utility/MemoryView_C.c index 0a5d8ee2c..8146c458d 100644 --- a/Cython/Utility/MemoryView_C.c +++ b/Cython/Utility/MemoryView_C.c @@ -450,7 +450,7 @@ static void __pyx_fatalerror(const char *fmt, ...) Py_NO_RETURN { va_list vargs; char msg[200]; -#ifdef HAVE_STDARG_PROTOTYPES +#if PY_VERSION_HEX >= 0x030A0000 || defined(HAVE_STDARG_PROTOTYPES) va_start(vargs, fmt); #else va_start(vargs); -- cgit v1.2.1 From 8e29b6d47f6f5b10ec1a37f06db440156ac2ac2e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Molina=20Garc=C3=ADa?= Date: Wed, 8 Jun 2022 08:14:12 +0200 Subject: Update Limited API preprocessor warning to be compatible with MSVC (#4826) --- Cython/Compiler/Nodes.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/Cython/Compiler/Nodes.py b/Cython/Compiler/Nodes.py index cfe43e890..927e47763 100644 --- a/Cython/Compiler/Nodes.py +++ b/Cython/Compiler/Nodes.py @@ -5435,8 +5435,10 @@ class CClassDefNode(ClassDefNode): typeptr_cname, buffer_slot.slot_name, )) code.putln("}") + code.putln("#elif defined(_MSC_VER)") + code.putln("#pragma message (\"The buffer protocol is not supported in the Limited C-API.\")") code.putln("#else") - code.putln("#warning The buffer protocol is not supported in the Limited C-API.") + code.putln("#warning \"The buffer protocol is not supported in the Limited C-API.\"") code.putln("#endif") code.globalstate.use_utility_code( -- cgit v1.2.1 From 9341e73aceface39dd7b48bf46b3f376cde33296 Mon Sep 17 00:00:00 2001 From: mwtian <81660174+mwtian@users.noreply.github.com> Date: Sat, 11 Jun 2022 00:21:44 -0700 Subject: Always initialize `state` in __Pyx_WriteUnraisable (#4831) Avoids error about uninitialized variables when compiling with ubscan --- Cython/Utility/Exceptions.c | 2 -- 1 file changed, 2 deletions(-) diff --git a/Cython/Utility/Exceptions.c b/Cython/Utility/Exceptions.c index c6c5d20ed..9f96225d1 100644 --- a/Cython/Utility/Exceptions.c +++ b/Cython/Utility/Exceptions.c @@ -675,10 +675,8 @@ static void __Pyx_WriteUnraisable(const char *name, int clineno, PyGILState_STATE state; if (nogil) state = PyGILState_Ensure(); -#ifdef _MSC_VER /* arbitrary, to suppress warning */ else state = (PyGILState_STATE)-1; -#endif #endif CYTHON_UNUSED_VAR(clineno); CYTHON_UNUSED_VAR(lineno); -- cgit v1.2.1 From f753deecd09e011a1bc276b78ccc0f1c0ad67f09 Mon Sep 17 00:00:00 2001 From: pfebrer <42074085+pfebrer@users.noreply.github.com> Date: Fri, 17 Jun 2022 18:51:26 +0200 Subject: BUG: fused types not subscriptable in Cython.Shadow (#4842) --- Cython/Shadow.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cython/Shadow.py b/Cython/Shadow.py index 48bc249e0..78d950ce2 100644 --- a/Cython/Shadow.py +++ b/Cython/Shadow.py @@ -385,7 +385,7 @@ class typedef(CythonType): __getitem__ = index_type class _FusedType(CythonType): - pass + __getitem__ = index_type def fused_type(*args): -- cgit v1.2.1 From a7d98eeafac9aaaa8825fd471be38172ee0b259c Mon Sep 17 00:00:00 2001 From: pfebrer <42074085+pfebrer@users.noreply.github.com> Date: Fri, 17 Jun 2022 18:51:26 +0200 Subject: BUG: fused types not subscriptable in Cython.Shadow (#4842) --- Cython/Shadow.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cython/Shadow.py b/Cython/Shadow.py index cc8c9b60a..f92f9ebf1 100644 --- a/Cython/Shadow.py +++ b/Cython/Shadow.py @@ -353,7 +353,7 @@ class typedef(CythonType): __getitem__ = index_type class _FusedType(CythonType): - pass + __getitem__ = index_type def fused_type(*args): -- cgit v1.2.1 From 2277c3345ed81770166a0f457c77e3b01b26f994 Mon Sep 17 00:00:00 2001 From: da-woods Date: Sat, 18 Jun 2022 10:10:02 +0100 Subject: Improve pattern identification Some types of pattern are unambiguous so don't need backtracking --- Cython/Compiler/Parsing.py | 46 +++++++++++++++++++++++----------------------- 1 file changed, 23 insertions(+), 23 deletions(-) diff --git a/Cython/Compiler/Parsing.py b/Cython/Compiler/Parsing.py index a08abe66d..1cd67f445 100644 --- a/Cython/Compiler/Parsing.py +++ b/Cython/Compiler/Parsing.py @@ -4073,6 +4073,7 @@ def p_pattern(s): def p_closed_pattern(s): """ + The PEG parser specifies it as | literal_pattern | capture_pattern | wildcard_pattern @@ -4081,7 +4082,29 @@ def p_closed_pattern(s): | sequence_pattern | mapping_pattern | class_pattern + + For the sake avoiding too much backtracking, we know: + * starts with "{" is a sequence_pattern + * starts with "[" is a mapping_pattern + * starts with "(" is a group_pattern or sequence_pattern + * wildcard pattern is just identifier=='_' + The rest are then tried in order with backtracking """ + if s.sy == 'IDENT' and s.systring == '_': + pos = s.position() + s.next() + return MatchCaseNodes.MatchAndAssignPatternNode(pos) + elif s.sy == '{': + return p_mapping_pattern(s) + elif s.sy == '[': + return p_sequence_pattern(s) + elif s.sy == '(': + with tentatively_scan(s) as errors: + result = p_group_pattern(s) + if not errors: + return result + return p_sequence_pattern(s) + with tentatively_scan(s) as errors: result = p_literal_pattern(s) if not errors: @@ -4090,26 +4113,10 @@ def p_closed_pattern(s): result = p_capture_pattern(s) if not errors: return result - with tentatively_scan(s) as errors: - result = p_wildcard_pattern(s) - if not errors: - return result with tentatively_scan(s) as errors: result = p_value_pattern(s) if not errors: return result - with tentatively_scan(s) as errors: - result = p_group_pattern(s) - if not errors: - return result - with tentatively_scan(s) as errors: - result = p_sequence_pattern(s) - if not errors: - return result - with tentatively_scan(s) as errors: - result = p_mapping_pattern(s) - if not errors: - return result return p_class_pattern(s) def p_literal_pattern(s): @@ -4224,13 +4231,6 @@ def p_group_pattern(s): s.expect(")") return pattern -def p_wildcard_pattern(s): - if s.sy != "IDENT" or s.systring != "_": - s.error("Expected '_'") - pos = s.position() - s.next() - return MatchCaseNodes.MatchAndAssignPatternNode(pos) - def p_sequence_pattern(s): opener = s.sy pos = s.position() -- cgit v1.2.1 From d2d25a25c2a5b526fba1b907138bd9dcb4476ef5 Mon Sep 17 00:00:00 2001 From: da-woods Date: Sun, 19 Jun 2022 19:37:25 +0100 Subject: + NUMBER isn't a valid pattern --- Cython/Compiler/Parsing.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Cython/Compiler/Parsing.py b/Cython/Compiler/Parsing.py index 1cd67f445..1347289d4 100644 --- a/Cython/Compiler/Parsing.py +++ b/Cython/Compiler/Parsing.py @@ -4006,7 +4006,7 @@ def p_patterns(s): e = errors[0] s.error(e.args[1], pos = e.args[0]) patterns.append(pattern) - + if s.sy == ",": seq = True s.next() @@ -4123,7 +4123,7 @@ def p_literal_pattern(s): # a lot of duplication in this function with "p_atom" next_must_be_a_number = False sign = '' - if s.sy in ['+', '-']: + if s.sy == '-': sign = s.sy sign_pos = s.position() s.next() @@ -4198,7 +4198,7 @@ def p_literal_pattern(s): result = ExprNodes.NullNode(pos) if result: s.next() - return MatchCaseNodes.MatchValuePatternNode(pos, value = result, is_is_check = True) + return MatchCaseNodes.MatchValuePatternNode(pos, value = result, is_is_check = True) s.error("Failed to match literal") @@ -4255,7 +4255,7 @@ def p_sequence_pattern(s): s.expect(closer) return MatchCaseNodes.MatchSequencePatternNode(pos, patterns=patterns) else: - s.error("Expected '[' or '('") + s.error("Expected '[' or '('") def p_mapping_pattern(s): pos = s.position() -- cgit v1.2.1 From c416c7cb2159cc43f9461d96721aeaaa8f9f4714 Mon Sep 17 00:00:00 2001 From: Kenrick Everett <31653115+Kenrick0@users.noreply.github.com> Date: Tue, 21 Jun 2022 16:18:12 +1000 Subject: Fix bytearray iteration in 0.29.x (#4108) By explicitly setting the result type --- Cython/Compiler/ExprNodes.py | 4 ++++ tests/run/bytearray_iter.py | 15 +++++++++++++++ 2 files changed, 19 insertions(+) create mode 100644 tests/run/bytearray_iter.py diff --git a/Cython/Compiler/ExprNodes.py b/Cython/Compiler/ExprNodes.py index 9162eaad9..4a8ce5dca 100644 --- a/Cython/Compiler/ExprNodes.py +++ b/Cython/Compiler/ExprNodes.py @@ -2876,6 +2876,10 @@ class NextNode(AtomicExprNode): iterator_type = self.iterator.infer_type(env) if iterator_type.is_ptr or iterator_type.is_array: return iterator_type.base_type + elif self.iterator.sequence.type is bytearray_type: + # This is a temporary work-around to fix bytearray iteration in 0.29.x + # It has been fixed properly in master, refer to ticket: 3473 + return py_object_type elif iterator_type.is_cpp_class: item_type = env.lookup_operator_for_types(self.pos, "*", [iterator_type]).type.return_type if item_type.is_reference: diff --git a/tests/run/bytearray_iter.py b/tests/run/bytearray_iter.py new file mode 100644 index 000000000..4beb8e285 --- /dev/null +++ b/tests/run/bytearray_iter.py @@ -0,0 +1,15 @@ +# mode: run +# ticket: 3473 + +def test_bytearray_iteration(src): + """ + >>> src = b'123' + >>> test_bytearray_iteration(src) + 49 + 50 + 51 + """ + + data = bytearray(src) + for elem in data: + print(elem) -- cgit v1.2.1 From 6b6d5f20f04fb581a23effec5a14ec7a97b213fd Mon Sep 17 00:00:00 2001 From: da-woods Date: Tue, 21 Jun 2022 07:39:01 +0100 Subject: Fix code-style blank like at end of file --- tests/run/bytearray_iter.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/run/bytearray_iter.py b/tests/run/bytearray_iter.py index 70a1f139b..60df9fcc1 100644 --- a/tests/run/bytearray_iter.py +++ b/tests/run/bytearray_iter.py @@ -103,4 +103,3 @@ def test_bytearray_iteration(src): data = bytearray(src) for elem in data: print(elem) - -- cgit v1.2.1 From a118960f7643cd343f3a4bb7ef653a69ef16c13b Mon Sep 17 00:00:00 2001 From: Lisandro Dalcin Date: Tue, 21 Jun 2022 15:04:54 +0300 Subject: Fix GCC -Wconversion warnings in C utility code (GH-4854) --- Cython/Utility/CythonFunction.c | 2 +- Cython/Utility/StringTools.c | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cython/Utility/CythonFunction.c b/Cython/Utility/CythonFunction.c index 270f441ef..870dcf620 100644 --- a/Cython/Utility/CythonFunction.c +++ b/Cython/Utility/CythonFunction.c @@ -934,7 +934,7 @@ static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD(PyObject return NULL; } - return ((__Pyx_PyCMethod)(void(*)(void))def->ml_meth)(self, cls, args, nargs, kwnames); + return ((__Pyx_PyCMethod)(void(*)(void))def->ml_meth)(self, cls, args, (size_t)nargs, kwnames); } #endif diff --git a/Cython/Utility/StringTools.c b/Cython/Utility/StringTools.c index 8c92228cb..910fbf6fa 100644 --- a/Cython/Utility/StringTools.c +++ b/Cython/Utility/StringTools.c @@ -1012,7 +1012,7 @@ static PyObject* __Pyx_PyUnicode_BuildFromAscii(Py_ssize_t ulength, char* chars, padding = PyUnicode_FromOrdinal(padding_char); if (likely(padding) && uoffset > prepend_sign + 1) { PyObject *tmp; - PyObject *repeat = PyInt_FromSize_t(uoffset - prepend_sign); + PyObject *repeat = PyInt_FromSsize_t(uoffset - prepend_sign); if (unlikely(!repeat)) goto done_or_error; tmp = PyNumber_Multiply(padding, repeat); Py_DECREF(repeat); -- cgit v1.2.1 From 9fb8fae9295a9fa689bfe54a00f1f39642822d7c Mon Sep 17 00:00:00 2001 From: Lisandro Dalcin Date: Tue, 21 Jun 2022 15:04:54 +0300 Subject: Fix GCC -Wconversion warning in C utility code (GH-4854) --- Cython/Utility/StringTools.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cython/Utility/StringTools.c b/Cython/Utility/StringTools.c index 35241c64a..98b5e260e 100644 --- a/Cython/Utility/StringTools.c +++ b/Cython/Utility/StringTools.c @@ -953,7 +953,7 @@ static PyObject* __Pyx_PyUnicode_BuildFromAscii(Py_ssize_t ulength, char* chars, padding = PyUnicode_FromOrdinal(padding_char); if (likely(padding) && uoffset > prepend_sign + 1) { PyObject *tmp; - PyObject *repeat = PyInt_FromSize_t(uoffset - prepend_sign); + PyObject *repeat = PyInt_FromSsize_t(uoffset - prepend_sign); if (unlikely(!repeat)) goto done_or_error; tmp = PyNumber_Multiply(padding, repeat); Py_DECREF(repeat); -- cgit v1.2.1 From 8eca1d1c186b6505cc9f06e5d2b42647bce68836 Mon Sep 17 00:00:00 2001 From: Jouke Witteveen Date: Thu, 23 Jun 2022 09:19:37 +0200 Subject: Mention python3-dev requirement on Ubuntu/Debian (#4856) Let's assume nobody who reads the quickstart documentation actually wants python2-dev. --- docs/src/quickstart/install.rst | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/docs/src/quickstart/install.rst b/docs/src/quickstart/install.rst index 8b5f4c350..04a47afdc 100644 --- a/docs/src/quickstart/install.rst +++ b/docs/src/quickstart/install.rst @@ -15,8 +15,10 @@ according to the system used: - **Linux** The GNU C Compiler (gcc) is usually present, or easily available through the package system. On Ubuntu or Debian, for - instance, the command ``sudo apt-get install build-essential`` will - fetch everything you need. + instance, it is part of the ``build-essential`` package. Next to a + C compiler, Cython requires the Python header files. On Ubuntu or + Debian, the command ``sudo apt-get install build-essential python3-dev`` + will fetch everything you need. - **Mac OS X** To retrieve gcc, one option is to install Apple's XCode, which can be retrieved from the Mac OS X's install DVDs or -- cgit v1.2.1 From 371b0a163a7ec9f87f5d478f2249ab69c4dc6785 Mon Sep 17 00:00:00 2001 From: Matus Valo Date: Sat, 25 Jun 2022 11:40:09 +0200 Subject: Docs: Migrate special_methods.rst to pure python (#4537) * Migrate special_methods.rst to pure python * Fix missing import in examples --- .../userguide/special_methods/total_ordering.py | 13 ++++++++++++ .../userguide/special_methods/total_ordering.pyx | 13 ++++++++++++ docs/src/userguide/special_methods.rst | 24 +++++++++++----------- 3 files changed, 38 insertions(+), 12 deletions(-) create mode 100644 docs/examples/userguide/special_methods/total_ordering.py create mode 100644 docs/examples/userguide/special_methods/total_ordering.pyx diff --git a/docs/examples/userguide/special_methods/total_ordering.py b/docs/examples/userguide/special_methods/total_ordering.py new file mode 100644 index 000000000..7d164d6df --- /dev/null +++ b/docs/examples/userguide/special_methods/total_ordering.py @@ -0,0 +1,13 @@ +import cython +@cython.total_ordering +@cython.cclass +class ExtGe: + x: cython.int + + def __ge__(self, other): + if not isinstance(other, ExtGe): + return NotImplemented + return self.x >= cython.cast(ExtGe, other).x + + def __eq__(self, other): + return isinstance(other, ExtGe) and self.x == cython.cast(ExtGe, other).x diff --git a/docs/examples/userguide/special_methods/total_ordering.pyx b/docs/examples/userguide/special_methods/total_ordering.pyx new file mode 100644 index 000000000..06d2ccef7 --- /dev/null +++ b/docs/examples/userguide/special_methods/total_ordering.pyx @@ -0,0 +1,13 @@ +import cython + +@cython.total_ordering +cdef class ExtGe: + cdef int x + + def __ge__(self, other): + if not isinstance(other, ExtGe): + return NotImplemented + return self.x >= (other).x + + def __eq__(self, other): + return isinstance(other, ExtGe) and self.x == (other).x diff --git a/docs/src/userguide/special_methods.rst b/docs/src/userguide/special_methods.rst index af702f3c3..e6635b502 100644 --- a/docs/src/userguide/special_methods.rst +++ b/docs/src/userguide/special_methods.rst @@ -3,6 +3,9 @@ Special Methods of Extension Types =================================== +.. include:: + ../two-syntax-variants-used + This page describes the special methods currently supported by Cython extension types. A complete list of all the special methods appears in the table at the bottom. Some of these methods behave differently from their Python @@ -12,7 +15,8 @@ mention. .. Note:: Everything said on this page applies only to extension types, defined - with the :keyword:`cdef` class statement. It doesn't apply to classes defined with the + with the :keyword:`cdef` class statement or decorated using ``@cclass`` decorator. + It doesn't apply to classes defined with the Python :keyword:`class` statement, where the normal Python rules apply. .. _declaration: @@ -20,7 +24,7 @@ mention. Declaration ------------ Special methods of extension types must be declared with :keyword:`def`, not -:keyword:`cdef`. This does not impact their performance--Python uses different +:keyword:`cdef`/``@cfunc``. This does not impact their performance--Python uses different calling conventions to invoke these special methods. .. _docstrings: @@ -225,19 +229,15 @@ Depending on the application, one way or the other may be better: decorator specifically for ``cdef`` classes. (Normal Python classes can use the original ``functools`` decorator.) - .. code-block:: cython +.. tabs:: + + .. group-tab:: Pure Python - @cython.total_ordering - cdef class ExtGe: - cdef int x + .. literalinclude:: ../../examples/userguide/special_methods/total_ordering.py - def __ge__(self, other): - if not isinstance(other, ExtGe): - return NotImplemented - return self.x >= (other).x + .. group-tab:: Cython - def __eq__(self, other): - return isinstance(other, ExtGe) and self.x == (other).x + .. literalinclude:: ../../examples/userguide/special_methods/total_ordering.pyx .. _the__next__method: -- cgit v1.2.1 From 36520e7c90b059777271c6e71d62af55f123a42b Mon Sep 17 00:00:00 2001 From: Matus Valo Date: Sat, 25 Jun 2022 13:03:05 +0200 Subject: Docs: Migrate sharing_declarations.rst to pure python mode (#4544) * Initial migration of examples * Migrate text in sharing_declarations.rst * Some fixes of examples and clarification * Fix capitalization * Apply suggestions from code review Co-authored-by: da-woods Co-authored-by: da-woods --- .../userguide/sharing_declarations/landscaping.py | 7 + .../userguide/sharing_declarations/lunch.py | 5 + .../userguide/sharing_declarations/lunch.pyx | 1 + .../userguide/sharing_declarations/restaurant.py | 12 ++ .../userguide/sharing_declarations/restaurant.pyx | 2 +- .../userguide/sharing_declarations/setup.py | 4 - .../userguide/sharing_declarations/setup_py.py | 4 + .../userguide/sharing_declarations/setup_pyx.py | 4 + .../userguide/sharing_declarations/shrubbing.py | 10 ++ .../userguide/sharing_declarations/shrubbing.pyx | 3 + .../userguide/sharing_declarations/spammery.py | 10 ++ .../userguide/sharing_declarations/spammery.pyx | 3 +- .../userguide/sharing_declarations/volume.py | 2 + docs/src/tutorial/pure.rst | 3 + docs/src/userguide/sharing_declarations.rst | 152 ++++++++++++++------- 15 files changed, 169 insertions(+), 53 deletions(-) create mode 100644 docs/examples/userguide/sharing_declarations/landscaping.py create mode 100644 docs/examples/userguide/sharing_declarations/lunch.py create mode 100644 docs/examples/userguide/sharing_declarations/restaurant.py delete mode 100644 docs/examples/userguide/sharing_declarations/setup.py create mode 100644 docs/examples/userguide/sharing_declarations/setup_py.py create mode 100644 docs/examples/userguide/sharing_declarations/setup_pyx.py create mode 100644 docs/examples/userguide/sharing_declarations/shrubbing.py create mode 100644 docs/examples/userguide/sharing_declarations/spammery.py create mode 100644 docs/examples/userguide/sharing_declarations/volume.py diff --git a/docs/examples/userguide/sharing_declarations/landscaping.py b/docs/examples/userguide/sharing_declarations/landscaping.py new file mode 100644 index 000000000..2d2c4b5b7 --- /dev/null +++ b/docs/examples/userguide/sharing_declarations/landscaping.py @@ -0,0 +1,7 @@ +from cython.cimports.shrubbing import Shrubbery +import shrubbing + +def main(): + sh: Shrubbery + sh = shrubbing.standard_shrubbery() + print("Shrubbery size is", sh.width, 'x', sh.length) diff --git a/docs/examples/userguide/sharing_declarations/lunch.py b/docs/examples/userguide/sharing_declarations/lunch.py new file mode 100644 index 000000000..df56913eb --- /dev/null +++ b/docs/examples/userguide/sharing_declarations/lunch.py @@ -0,0 +1,5 @@ +import cython +from cython.cimports.c_lunch import eject_tomato as c_eject_tomato + +def eject_tomato(speed: cython.float): + c_eject_tomato(speed) diff --git a/docs/examples/userguide/sharing_declarations/lunch.pyx b/docs/examples/userguide/sharing_declarations/lunch.pyx index 8b0911510..fea5e4c87 100644 --- a/docs/examples/userguide/sharing_declarations/lunch.pyx +++ b/docs/examples/userguide/sharing_declarations/lunch.pyx @@ -1,3 +1,4 @@ + cimport c_lunch def eject_tomato(float speed): diff --git a/docs/examples/userguide/sharing_declarations/restaurant.py b/docs/examples/userguide/sharing_declarations/restaurant.py new file mode 100644 index 000000000..b4bdb2eba --- /dev/null +++ b/docs/examples/userguide/sharing_declarations/restaurant.py @@ -0,0 +1,12 @@ +import cython +from cython.cimports.dishes import spamdish, sausage + +@cython.cfunc +def prepare(d: cython.pointer(spamdish)) -> cython.void: + d.oz_of_spam = 42 + d.filler = sausage + +def serve(): + d: spamdish + prepare(cython.address(d)) + print(f'{d.oz_of_spam} oz spam, filler no. {d.filler}') diff --git a/docs/examples/userguide/sharing_declarations/restaurant.pyx b/docs/examples/userguide/sharing_declarations/restaurant.pyx index 3257c681b..f556646dc 100644 --- a/docs/examples/userguide/sharing_declarations/restaurant.pyx +++ b/docs/examples/userguide/sharing_declarations/restaurant.pyx @@ -1,4 +1,4 @@ -from __future__ import print_function + cimport dishes from dishes cimport spamdish diff --git a/docs/examples/userguide/sharing_declarations/setup.py b/docs/examples/userguide/sharing_declarations/setup.py deleted file mode 100644 index 505b53e9d..000000000 --- a/docs/examples/userguide/sharing_declarations/setup.py +++ /dev/null @@ -1,4 +0,0 @@ -from setuptools import setup -from Cython.Build import cythonize - -setup(ext_modules=cythonize(["landscaping.pyx", "shrubbing.pyx"])) diff --git a/docs/examples/userguide/sharing_declarations/setup_py.py b/docs/examples/userguide/sharing_declarations/setup_py.py new file mode 100644 index 000000000..45ded0ff4 --- /dev/null +++ b/docs/examples/userguide/sharing_declarations/setup_py.py @@ -0,0 +1,4 @@ +from setuptools import setup +from Cython.Build import cythonize + +setup(ext_modules=cythonize(["landscaping.py", "shrubbing.py"])) diff --git a/docs/examples/userguide/sharing_declarations/setup_pyx.py b/docs/examples/userguide/sharing_declarations/setup_pyx.py new file mode 100644 index 000000000..505b53e9d --- /dev/null +++ b/docs/examples/userguide/sharing_declarations/setup_pyx.py @@ -0,0 +1,4 @@ +from setuptools import setup +from Cython.Build import cythonize + +setup(ext_modules=cythonize(["landscaping.pyx", "shrubbing.pyx"])) diff --git a/docs/examples/userguide/sharing_declarations/shrubbing.py b/docs/examples/userguide/sharing_declarations/shrubbing.py new file mode 100644 index 000000000..27e20d631 --- /dev/null +++ b/docs/examples/userguide/sharing_declarations/shrubbing.py @@ -0,0 +1,10 @@ +import cython + +@cython.cclass +class Shrubbery: + def __cinit__(self, w: cython.int, l: cython.int): + self.width = w + self.length = l + +def standard_shrubbery(): + return Shrubbery(3, 7) diff --git a/docs/examples/userguide/sharing_declarations/shrubbing.pyx b/docs/examples/userguide/sharing_declarations/shrubbing.pyx index bb97e7e77..8598b5c98 100644 --- a/docs/examples/userguide/sharing_declarations/shrubbing.pyx +++ b/docs/examples/userguide/sharing_declarations/shrubbing.pyx @@ -1,3 +1,6 @@ + + + cdef class Shrubbery: def __cinit__(self, int w, int l): self.width = w diff --git a/docs/examples/userguide/sharing_declarations/spammery.py b/docs/examples/userguide/sharing_declarations/spammery.py new file mode 100644 index 000000000..88554be4a --- /dev/null +++ b/docs/examples/userguide/sharing_declarations/spammery.py @@ -0,0 +1,10 @@ +import cython +from cython.cimports.volume import cube + +def menu(description, size): + print(description, ":", cube(size), + "cubic metres of spam") + +menu("Entree", 1) +menu("Main course", 3) +menu("Dessert", 2) diff --git a/docs/examples/userguide/sharing_declarations/spammery.pyx b/docs/examples/userguide/sharing_declarations/spammery.pyx index 16cbda06e..da11e737e 100644 --- a/docs/examples/userguide/sharing_declarations/spammery.pyx +++ b/docs/examples/userguide/sharing_declarations/spammery.pyx @@ -1,5 +1,4 @@ -from __future__ import print_function - + from volume cimport cube def menu(description, size): diff --git a/docs/examples/userguide/sharing_declarations/volume.py b/docs/examples/userguide/sharing_declarations/volume.py new file mode 100644 index 000000000..1f6ff9c72 --- /dev/null +++ b/docs/examples/userguide/sharing_declarations/volume.py @@ -0,0 +1,2 @@ +def cube(x): + return x * x * x diff --git a/docs/src/tutorial/pure.rst b/docs/src/tutorial/pure.rst index a536f2b31..417b7d1b2 100644 --- a/docs/src/tutorial/pure.rst +++ b/docs/src/tutorial/pure.rst @@ -29,6 +29,7 @@ In pure mode, you are more or less restricted to code that can be expressed beyond that can only be done in .pyx files with extended language syntax, because it depends on features of the Cython compiler. +.. _augmenting_pxd: Augmenting .pxd --------------- @@ -249,6 +250,8 @@ releasing or acquiring the GIL. The condition must be constant (at compile time) A common use case for conditionally acquiring and releasing the GIL are fused types that allow different GIL handling depending on the specific type (see :ref:`gil_conditional`). +.. py:module:: cython.cimports + cimports ^^^^^^^^ diff --git a/docs/src/userguide/sharing_declarations.rst b/docs/src/userguide/sharing_declarations.rst index 70e29e2b2..35ba58dfd 100644 --- a/docs/src/userguide/sharing_declarations.rst +++ b/docs/src/userguide/sharing_declarations.rst @@ -6,6 +6,9 @@ Sharing Declarations Between Cython Modules ******************************************** +.. include:: + ../two-syntax-variants-used + This section describes how to make C declarations, functions and extension types in one Cython module available for use in another Cython module. These facilities are closely modeled on the Python import mechanism, @@ -17,13 +20,13 @@ Definition and Implementation files A Cython module can be split into two parts: a definition file with a ``.pxd`` suffix, containing C declarations that are to be available to other Cython -modules, and an implementation file with a ``.pyx`` suffix, containing +modules, and an implementation file with a ``.pyx``/``.py`` suffix, containing everything else. When a module wants to use something declared in another module's definition file, it imports it using the :keyword:`cimport` -statement. +statement or using special :py:mod:`cython.cimports` package. A ``.pxd`` file that consists solely of extern declarations does not need -to correspond to an actual ``.pyx`` file or Python module. This can make it a +to correspond to an actual ``.pyx``/``.py`` file or Python module. This can make it a convenient place to put common declarations, for example declarations of functions from an :ref:`external library ` that one wants to use in several modules. @@ -41,8 +44,8 @@ A definition file can contain: It cannot contain the implementations of any C or Python functions, or any Python class definitions, or any executable statements. It is needed when one -wants to access :keyword:`cdef` attributes and methods, or to inherit from -:keyword:`cdef` classes defined in this module. +wants to access :keyword:`cdef`/``@cfunc`` attributes and methods, or to inherit from +:keyword:`cdef`/``@cclass`` classes defined in this module. .. note:: @@ -70,23 +73,45 @@ The cimport statement The :keyword:`cimport` statement is used in a definition or implementation file to gain access to names declared in another definition file. Its syntax exactly parallels that of the normal Python import -statement:: +statement. When pure python syntax is used, the same effect can be done by +importing from special :py:mod:`cython.cimports` package. In later text the term +to ``cimport`` refers to using both :keyword:`cimport` statement or +:py:mod:`cython.cimports` package. - cimport module [, module...] +.. tabs:: - from module cimport name [as name] [, name [as name] ...] + .. group-tab:: Pure Python -Here is an example. :file:`dishes.pxd` is a definition file which exports a -C data type. :file:`restaurant.pyx` is an implementation file which imports and -uses it. + .. code-block:: python + + from cython.cimports.module import name [as name][, name [as name] ...] + + .. group-tab:: Cython -:file:`dishes.pxd`: + .. code-block:: cython + + cimport module [, module...] + + from module cimport name [as name] [, name [as name] ...] + +Here is an example. :file:`dishes.pxd` is a definition file which exports a +C data type. :file:`restaurant.pyx`/:file:`restaurant.py` is an implementation file +which imports and uses it. .. literalinclude:: ../../examples/userguide/sharing_declarations/dishes.pxd + :caption: dishes.pxd + +.. tabs:: + + .. group-tab:: Pure Python -:file:`restaurant.pyx`: + .. literalinclude:: ../../examples/userguide/sharing_declarations/restaurant.py + :caption: dishes.py -.. literalinclude:: ../../examples/userguide/sharing_declarations/restaurant.pyx + .. group-tab:: Cython + + .. literalinclude:: ../../examples/userguide/sharing_declarations/restaurant.pyx + :caption: dishes.pyx It is important to understand that the :keyword:`cimport` statement can only be used to import C data types, C functions and variables, and extension @@ -116,8 +141,8 @@ option to ``cythonize()``), as well as ``sys.path``. Using ``package_data`` to install ``.pxd`` files in your ``setup.py`` script allows other packages to cimport items from your module as a dependency. -Also, whenever you compile a file :file:`modulename.pyx`, the corresponding -definition file :file:`modulename.pxd` is first searched for along the +Also, whenever you compile a file :file:`modulename.pyx`/:file:`modulename.py`, +the corresponding definition file :file:`modulename.pxd` is first searched for along the include path (but not ``sys.path``), and if found, it is processed before processing the ``.pyx`` file. @@ -132,16 +157,23 @@ for an imaginary module, and :keyword:`cimport` that module. You can then refer to the C functions by qualifying them with the name of the module. Here's an example: -:file:`c_lunch.pxd`: - .. literalinclude:: ../../examples/userguide/sharing_declarations/c_lunch.pxd + :caption: c_lunch.pxd + +.. tabs:: -:file:`lunch.pyx`: + .. group-tab:: Pure Python -.. literalinclude:: ../../examples/userguide/sharing_declarations/lunch.pyx + .. literalinclude:: ../../examples/userguide/sharing_declarations/lunch.py + :caption: lunch.py -You don't need any :file:`c_lunch.pyx` file, because the only things defined -in :file:`c_lunch.pxd` are extern C entities. There won't be any actual + .. group-tab:: Cython + + .. literalinclude:: ../../examples/userguide/sharing_declarations/lunch.pyx + :caption: lunch.pyx + +You don't need any :file:`c_lunch.pyx`/:file:`c_lunch.py` file, because the only +things defined in :file:`c_lunch.pxd` are extern C entities. There won't be any actual ``c_lunch`` module at run time, but that doesn't matter; the :file:`c_lunch.pxd` file has done its job of providing an additional namespace at compile time. @@ -154,17 +186,32 @@ C functions defined at the top level of a module can be made available via :keyword:`cimport` by putting headers for them in the ``.pxd`` file, for example: -:file:`volume.pxd`: - .. literalinclude:: ../../examples/userguide/sharing_declarations/volume.pxd + :caption: volume.pxd + +.. tabs:: + + .. group-tab:: Pure Python + + .. literalinclude:: ../../examples/userguide/sharing_declarations/volume.py + :caption: volume.py + + .. literalinclude:: ../../examples/userguide/sharing_declarations/spammery.py + :caption: spammery.py -:file:`volume.pyx`: + .. note:: -.. literalinclude:: ../../examples/userguide/sharing_declarations/volume.pyx + Type definitions of function ``cube()`` in :file:`volume.py` are not provided + since they are used from .pxd definition file. See :ref:`augmenting_pxd` and + GitHub issue :issue:`4388`. -:file:`spammery.pyx`: + .. group-tab:: Cython -.. literalinclude:: ../../examples/userguide/sharing_declarations/spammery.pyx + .. literalinclude:: ../../examples/userguide/sharing_declarations/volume.pyx + :caption: volume.pyx + + .. literalinclude:: ../../examples/userguide/sharing_declarations/spammery.pyx + :caption: spammery.pyx .. note:: @@ -193,34 +240,47 @@ Python methods. Here is an example of a module which defines and exports an extension type, and another module which uses it: -:file:`shrubbing.pxd`: - .. literalinclude:: ../../examples/userguide/sharing_declarations/shrubbing.pxd + :caption: shrubbing.pxd + +.. tabs:: + + .. group-tab:: Pure Python + + .. literalinclude:: ../../examples/userguide/sharing_declarations/shrubbing.py + :caption: shrubbing.py + + .. literalinclude:: ../../examples/userguide/sharing_declarations/landscaping.py + :caption: landscaping.py -:file:`shrubbing.pyx`: + One would then need to compile both of these modules, e.g. using -.. literalinclude:: ../../examples/userguide/sharing_declarations/shrubbing.pyx + .. literalinclude:: ../../examples/userguide/sharing_declarations/setup_py.py + :caption: setup.py -:file:`landscaping.pyx`: + .. group-tab:: Cython -.. literalinclude:: ../../examples/userguide/sharing_declarations/landscaping.pyx + .. literalinclude:: ../../examples/userguide/sharing_declarations/shrubbing.pyx + :caption: shrubbing.pyx -One would then need to compile both of these modules, e.g. using + .. literalinclude:: ../../examples/userguide/sharing_declarations/landscaping.pyx + :caption: landscaping.pyx -:file:`setup.py`: + One would then need to compile both of these modules, e.g. using -.. literalinclude:: ../../examples/userguide/sharing_declarations/setup.py + .. literalinclude:: ../../examples/userguide/sharing_declarations/setup_pyx.py + :caption: setup.py Some things to note about this example: -* There is a :keyword:`cdef` class Shrubbery declaration in both - :file:`Shrubbing.pxd` and :file:`Shrubbing.pyx`. When the Shrubbing module +* There is a :keyword:`cdef`/``@cclass`` class Shrubbery declaration in both + :file:`shrubbing.pxd` and :file:`shrubbing.pyx`. When the shrubbing module is compiled, these two declarations are combined into one. -* In Landscaping.pyx, the :keyword:`cimport` Shrubbing declaration allows us - to refer to the Shrubbery type as :class:`Shrubbing.Shrubbery`. But it - doesn't bind the name Shrubbing in Landscaping's module namespace at run - time, so to access :func:`Shrubbing.standard_shrubbery` we also need to - ``import Shrubbing``. +* In :file:`landscaping.pyx`/:file:`landscaping.py`, the :keyword:`cimport` shrubbing + declaration allows us to refer to the Shrubbery type as :class:`shrubbing.Shrubbery`. + But it doesn't bind the name shrubbing in landscaping's module namespace at run + time, so to access :func:`shrubbing.standard_shrubbery` we also need to + ``import shrubbing``. * One caveat if you use setuptools instead of distutils, the default action when running ``python setup.py install`` is to create a zipped ``egg`` file which will not work with ``cimport`` for ``pxd`` files @@ -234,8 +294,8 @@ Versioning ``.pxd`` files can be labelled with a minimum Cython version as part of their file name, similar to the version tagging of ``.so`` files in PEP 3149. -For example a file called :file:`Shrubbing.cython-30.pxd` will only be -found by ``cimport Shrubbing`` on Cython 3.0 and higher. Cython will use the +For example a file called :file:`shrubbing.cython-30.pxd` will only be +found by ``cimport shrubbing`` on Cython 3.0 and higher. Cython will use the file tagged with the highest compatible version number. Note that versioned files that are distributed across different directories -- cgit v1.2.1 From 530e370ff3d4d43e1969dcc821f65bf33a99f252 Mon Sep 17 00:00:00 2001 From: da-woods Date: Sun, 26 Jun 2022 11:42:31 +0100 Subject: Docs: don't say cdef functions exist in module dict (#4865) Patch is against 0.29.x branch (to fix both versions of the documentation). --- docs/src/userguide/sharing_declarations.rst | 7 ------- 1 file changed, 7 deletions(-) diff --git a/docs/src/userguide/sharing_declarations.rst b/docs/src/userguide/sharing_declarations.rst index 57f41e38d..7c2a49e21 100644 --- a/docs/src/userguide/sharing_declarations.rst +++ b/docs/src/userguide/sharing_declarations.rst @@ -166,13 +166,6 @@ example: .. literalinclude:: ../../examples/userguide/sharing_declarations/spammery.pyx -.. note:: - - When a module exports a C function in this way, an object appears in the - module dictionary under the function's name. However, you can't make use of - this object from Python, nor can you use it from Cython using a normal import - statement; you have to use :keyword:`cimport`. - .. _sharing_extension_types: Sharing Extension Types -- cgit v1.2.1 From 5c900c59d03f23f7329d6e68e114e4a277112916 Mon Sep 17 00:00:00 2001 From: da-woods Date: Tue, 28 Jun 2022 12:52:05 +0100 Subject: Fix tuple multiplication in MergedSequenceNode (GH-4864) Fixes https://github.com/cython/cython/issues/4861 --- Cython/Compiler/ExprNodes.py | 2 +- tests/run/pep448_extended_unpacking.pyx | 18 ++++++++++++++++++ 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/Cython/Compiler/ExprNodes.py b/Cython/Compiler/ExprNodes.py index 4a8ce5dca..9678647ad 100644 --- a/Cython/Compiler/ExprNodes.py +++ b/Cython/Compiler/ExprNodes.py @@ -8482,7 +8482,7 @@ class MergedSequenceNode(ExprNode): if type in (list_type, tuple_type) and args and args[0].is_sequence_constructor: # construct a list directly from the first argument that we can then extend if args[0].type is not list_type: - args[0] = ListNode(args[0].pos, args=args[0].args, is_temp=True) + args[0] = ListNode(args[0].pos, args=args[0].args, is_temp=True, mult_factor=args[0].mult_factor) ExprNode.__init__(self, pos, args=args, type=type) def calculate_constant_result(self): diff --git a/tests/run/pep448_extended_unpacking.pyx b/tests/run/pep448_extended_unpacking.pyx index 08d39e526..4411d7e79 100644 --- a/tests/run/pep448_extended_unpacking.pyx +++ b/tests/run/pep448_extended_unpacking.pyx @@ -185,6 +185,24 @@ def unpack_list_literal_mult(): return [*([1, 2, *([4, 5] * 2)] * 3)] +def unpack_list_tuple_mult(): + """ + >>> unpack_list_tuple_mult() + [1, 1] + """ + return [*(1,) * 2] + + +def unpack_list_tuple_bad_mult(): + """ + >>> unpack_list_tuple_bad_mult() + Traceback (most recent call last): + ... + TypeError: can't multiply sequence by non-int of type 'float' + """ + return [*(1,) * 1.5] + + @cython.test_fail_if_path_exists( "//ListNode//ListNode", "//MergedSequenceNode", -- cgit v1.2.1 From a79e447ea9faea118f39a94b54d0498baad8ad17 Mon Sep 17 00:00:00 2001 From: da-woods Date: Wed, 29 Jun 2022 08:11:55 +0100 Subject: Move linetracing functions into an "exec" to work around an issue in Py3.11 (GH-4851) Thus fixing linetracing tests in Python 3.11 when the trace function raises an exception. The issue we were seeing looked to be something to do with functions defined within doctest docstrings. It was fixed by moving the "defined-in-Python" functions into an exec call instead. See https://github.com/python/cpython/issues/94381 --- tests/run/line_trace.pyx | 60 +++++++++++++++++++++++------------------------- 1 file changed, 29 insertions(+), 31 deletions(-) diff --git a/tests/run/line_trace.pyx b/tests/run/line_trace.pyx index d6f9c3d0e..32579aff7 100644 --- a/tests/run/line_trace.pyx +++ b/tests/run/line_trace.pyx @@ -74,7 +74,9 @@ def _create_trace_func(trace): local_names = {} def _trace_func(frame, event, arg): - if sys.version_info < (3,) and 'line_trace' not in frame.f_code.co_filename: + if sys.version_info < (3,) and ( + 'line_trace' not in frame.f_code.co_filename and + '' not in frame.f_code.co_filename): # Prevent tracing into Py2 doctest functions. return None @@ -165,19 +167,28 @@ def cy_try_except(func): raise AttributeError(exc.args[0]) -def run_trace(func, *args, bint with_sys=False): - """ - >>> def py_add(a,b): - ... x = a+b - ... return x +# CPython 3.11 has an issue when these Python functions are implemented inside of doctests and the trace function fails. +# https://github.com/python/cpython/issues/94381 +plain_python_functions = {} +exec(""" +def py_add(a,b): + x = a+b + return x + +def py_add_with_nogil(a,b): + x=a; y=b # 1 + for _ in range(1): # 2 + z = 0 # 3 + z += py_add(x, y) # 4 + return z + +def py_return(retval=123): return retval +""", plain_python_functions) - >>> def py_add_with_nogil(a,b): - ... x=a; y=b # 1 - ... for _ in range(1): # 2 - ... z = 0 # 3 - ... z += py_add(x, y) # 4 - ... return z # 5 +def run_trace(func, *args, bint with_sys=False): + """ + >>> py_add = plain_python_functions['py_add'] >>> run_trace(py_add, 1, 2) [('call', 0), ('line', 1), ('line', 2), ('return', 2)] >>> run_trace(cy_add, 1, 2) @@ -204,6 +215,7 @@ def run_trace(func, *args, bint with_sys=False): >>> result[9:] # sys [('line', 2), ('line', 5), ('return', 5)] + >>> py_add_with_nogil = plain_python_functions['py_add_with_nogil'] >>> result = run_trace(py_add_with_nogil, 1, 2) >>> result[:5] # py [('call', 0), ('line', 1), ('line', 2), ('line', 3), ('line', 4)] @@ -239,7 +251,7 @@ def run_trace(func, *args, bint with_sys=False): def run_trace_with_exception(func, bint with_sys=False, bint fail=False): """ - >>> def py_return(retval=123): return retval + >>> py_return = plain_python_functions["py_return"] >>> run_trace_with_exception(py_return) OK: 123 [('call', 0), ('line', 1), ('line', 2), ('call', 0), ('line', 0), ('return', 0), ('return', 2)] @@ -295,10 +307,7 @@ def run_trace_with_exception(func, bint with_sys=False, bint fail=False): def fail_on_call_trace(func, *args): """ - >>> def py_add(a,b): - ... x = a+b - ... return x - + >>> py_add = plain_python_functions["py_add"] >>> fail_on_call_trace(py_add, 1, 2) Traceback (most recent call last): ValueError: failing call trace! @@ -319,17 +328,6 @@ def fail_on_call_trace(func, *args): def fail_on_line_trace(fail_func, add_func, nogil_add_func): """ - >>> def py_add(a,b): - ... x = a+b # 1 - ... return x # 2 - - >>> def py_add_with_nogil(a,b): - ... x=a; y=b # 1 - ... for _ in range(1): # 2 - ... z = 0 # 3 - ... z += py_add(x, y) # 4 - ... return z # 5 - >>> result = fail_on_line_trace(None, cy_add, cy_add_with_nogil) >>> len(result) 17 @@ -342,6 +340,8 @@ def fail_on_line_trace(fail_func, add_func, nogil_add_func): >>> result[14:] [('line', 2), ('line', 5), ('return', 5)] + >>> py_add = plain_python_functions["py_add"] + >>> py_add_with_nogil = plain_python_functions['py_add_with_nogil'] >>> result = fail_on_line_trace(None, py_add, py_add_with_nogil) >>> len(result) 17 @@ -405,9 +405,7 @@ def fail_on_line_trace(fail_func, add_func, nogil_add_func): def disable_trace(func, *args, bint with_sys=False): """ - >>> def py_add(a,b): - ... x = a+b - ... return x + >>> py_add = plain_python_functions["py_add"] >>> disable_trace(py_add, 1, 2) [('call', 0), ('line', 1)] >>> disable_trace(py_add, 1, 2, with_sys=True) -- cgit v1.2.1 From eafc920f76e812613b09876bfb9b980651c60f31 Mon Sep 17 00:00:00 2001 From: Sebastian Berg Date: Sun, 3 Jul 2022 00:23:31 -0700 Subject: BUG: Fortify object buffers against included NULLs (#4859) * BUG: Fortify object buffers against included NULLs While NumPy tends to not actively create object buffers initialized only with NULL (rather than filled with None), at least older versions of NumPy did do that. And NumPy guards against this. This guards against embedded NULLs in object buffers interpreting a NULL as None (and anticipating a NULL value also when setting the buffer for reference count purposes). Closes gh-4858 --- Cython/Compiler/ExprNodes.py | 17 ++++++++++------- tests/buffers/bufaccess.pyx | 45 ++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 55 insertions(+), 7 deletions(-) diff --git a/Cython/Compiler/ExprNodes.py b/Cython/Compiler/ExprNodes.py index c20a76bd4..144251aec 100644 --- a/Cython/Compiler/ExprNodes.py +++ b/Cython/Compiler/ExprNodes.py @@ -4578,17 +4578,17 @@ class BufferIndexNode(_IndexingBaseNode): buffer_entry, ptrexpr = self.buffer_lookup_code(code) if self.buffer_type.dtype.is_pyobject: - # Must manage refcounts. Decref what is already there - # and incref what we put in. + # Must manage refcounts. XDecref what is already there + # and incref what we put in (NumPy allows there to be NULL) ptr = code.funcstate.allocate_temp(buffer_entry.buf_ptr_type, manage_ref=False) rhs_code = rhs.result() code.putln("%s = %s;" % (ptr, ptrexpr)) - code.put_gotref("*%s" % ptr, self.buffer_type.dtype) - code.putln("__Pyx_INCREF(%s); __Pyx_DECREF(*%s);" % ( + code.put_xgotref("*%s" % ptr, self.buffer_type.dtype) + code.putln("__Pyx_INCREF(%s); __Pyx_XDECREF(*%s);" % ( rhs_code, ptr)) code.putln("*%s %s= %s;" % (ptr, op, rhs_code)) - code.put_giveref("*%s" % ptr, self.buffer_type.dtype) + code.put_xgiveref("*%s" % ptr, self.buffer_type.dtype) code.funcstate.release_temp(ptr) else: # Simple case @@ -4609,8 +4609,11 @@ class BufferIndexNode(_IndexingBaseNode): # is_temp is True, so must pull out value and incref it. # NOTE: object temporary results for nodes are declared # as PyObject *, so we need a cast - code.putln("%s = (PyObject *) *%s;" % (self.result(), self.buffer_ptr_code)) - code.putln("__Pyx_INCREF((PyObject*)%s);" % self.result()) + res = self.result() + code.putln("%s = (PyObject *) *%s;" % (res, self.buffer_ptr_code)) + # NumPy does (occasionally) allow NULL to denote None. + code.putln("if (unlikely(%s == NULL)) %s = Py_None;" % (res, res)) + code.putln("__Pyx_INCREF((PyObject*)%s);" % res) def free_subexpr_temps(self, code): for temp in self.index_temps: diff --git a/tests/buffers/bufaccess.pyx b/tests/buffers/bufaccess.pyx index 6b0b4ac30..764d65db6 100644 --- a/tests/buffers/bufaccess.pyx +++ b/tests/buffers/bufaccess.pyx @@ -1004,6 +1004,51 @@ def assign_to_object(object[object] buf, int idx, obj): """ buf[idx] = obj +@testcase +def check_object_nulled_1d(MockBuffer[object, ndim=1] buf, int idx, obj): + """ + See comments on printbuf_object above. + + >>> a = object() + >>> rc1 = get_refcount(a) + >>> A = ObjectMockBuffer(None, [a, a]) + >>> check_object_nulled_1d(A, 0, a) + >>> decref(a) # new reference "added" to A + >>> check_object_nulled_1d(A, 1, a) + >>> decref(a) + >>> A = ObjectMockBuffer(None, [a, a, a, a], strides=(2,)) + >>> check_object_nulled_1d(A, 0, a) # only 0 due to stride + >>> decref(a) + >>> get_refcount(a) == rc1 + True + """ + cdef void **data = buf.buffer + data[idx] = NULL + res = buf[idx] # takes None + buf[idx] = obj + return res + +@testcase +def check_object_nulled_2d(MockBuffer[object, ndim=2] buf, int idx1, int idx2, obj): + """ + See comments on printbuf_object above. + + >>> a = object() + >>> rc1 = get_refcount(a) + >>> A = ObjectMockBuffer(None, [a, a, a, a], shape=(2, 2)) + >>> check_object_nulled_2d(A, 0, 0, a) + >>> decref(a) # new reference "added" to A + >>> check_object_nulled_2d(A, 1, 1, a) + >>> decref(a) + >>> get_refcount(a) == rc1 + True + """ + cdef void **data = buf.buffer + data[idx1 + 2*idx2] = NULL + res = buf[idx1, idx2] # takes None + buf[idx1, idx2] = obj + return res + @testcase def assign_temporary_to_object(object[object] buf): """ -- cgit v1.2.1 From 98cebe4dedb52550ce621cf9338283dd7262ea83 Mon Sep 17 00:00:00 2001 From: Sebastian Berg Date: Sun, 3 Jul 2022 00:23:31 -0700 Subject: BUG: Fortify object buffers against included NULLs (#4859) * BUG: Fortify object buffers against included NULLs While NumPy tends to not actively create object buffers initialized only with NULL (rather than filled with None), at least older versions of NumPy did do that. And NumPy guards against this. This guards against embedded NULLs in object buffers interpreting a NULL as None (and anticipating a NULL value also when setting the buffer for reference count purposes). Closes gh-4858 --- Cython/Compiler/ExprNodes.py | 17 ++++++++++------- tests/buffers/bufaccess.pyx | 45 ++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 55 insertions(+), 7 deletions(-) diff --git a/Cython/Compiler/ExprNodes.py b/Cython/Compiler/ExprNodes.py index 9678647ad..69632a4fe 100644 --- a/Cython/Compiler/ExprNodes.py +++ b/Cython/Compiler/ExprNodes.py @@ -4351,17 +4351,17 @@ class BufferIndexNode(_IndexingBaseNode): buffer_entry, ptrexpr = self.buffer_lookup_code(code) if self.buffer_type.dtype.is_pyobject: - # Must manage refcounts. Decref what is already there - # and incref what we put in. + # Must manage refcounts. XDecref what is already there + # and incref what we put in (NumPy allows there to be NULL) ptr = code.funcstate.allocate_temp(buffer_entry.buf_ptr_type, manage_ref=False) rhs_code = rhs.result() code.putln("%s = %s;" % (ptr, ptrexpr)) - code.put_gotref("*%s" % ptr) - code.putln("__Pyx_INCREF(%s); __Pyx_DECREF(*%s);" % ( + code.put_xgotref("*%s" % ptr) + code.putln("__Pyx_INCREF(%s); __Pyx_XDECREF(*%s);" % ( rhs_code, ptr)) code.putln("*%s %s= %s;" % (ptr, op, rhs_code)) - code.put_giveref("*%s" % ptr) + code.put_xgiveref("*%s" % ptr) code.funcstate.release_temp(ptr) else: # Simple case @@ -4382,8 +4382,11 @@ class BufferIndexNode(_IndexingBaseNode): # is_temp is True, so must pull out value and incref it. # NOTE: object temporary results for nodes are declared # as PyObject *, so we need a cast - code.putln("%s = (PyObject *) *%s;" % (self.result(), self.buffer_ptr_code)) - code.putln("__Pyx_INCREF((PyObject*)%s);" % self.result()) + res = self.result() + code.putln("%s = (PyObject *) *%s;" % (res, self.buffer_ptr_code)) + # NumPy does (occasionally) allow NULL to denote None. + code.putln("if (unlikely(%s == NULL)) %s = Py_None;" % (res, res)) + code.putln("__Pyx_INCREF((PyObject*)%s);" % res) def free_subexpr_temps(self, code): for temp in self.index_temps: diff --git a/tests/buffers/bufaccess.pyx b/tests/buffers/bufaccess.pyx index 8761e6eb9..2a5e84185 100644 --- a/tests/buffers/bufaccess.pyx +++ b/tests/buffers/bufaccess.pyx @@ -1004,6 +1004,51 @@ def assign_to_object(object[object] buf, int idx, obj): """ buf[idx] = obj +@testcase +def check_object_nulled_1d(MockBuffer[object, ndim=1] buf, int idx, obj): + """ + See comments on printbuf_object above. + + >>> a = object() + >>> rc1 = get_refcount(a) + >>> A = ObjectMockBuffer(None, [a, a]) + >>> check_object_nulled_1d(A, 0, a) + >>> decref(a) # new reference "added" to A + >>> check_object_nulled_1d(A, 1, a) + >>> decref(a) + >>> A = ObjectMockBuffer(None, [a, a, a, a], strides=(2,)) + >>> check_object_nulled_1d(A, 0, a) # only 0 due to stride + >>> decref(a) + >>> get_refcount(a) == rc1 + True + """ + cdef void **data = buf.buffer + data[idx] = NULL + res = buf[idx] # takes None + buf[idx] = obj + return res + +@testcase +def check_object_nulled_2d(MockBuffer[object, ndim=2] buf, int idx1, int idx2, obj): + """ + See comments on printbuf_object above. + + >>> a = object() + >>> rc1 = get_refcount(a) + >>> A = ObjectMockBuffer(None, [a, a, a, a], shape=(2, 2)) + >>> check_object_nulled_2d(A, 0, 0, a) + >>> decref(a) # new reference "added" to A + >>> check_object_nulled_2d(A, 1, 1, a) + >>> decref(a) + >>> get_refcount(a) == rc1 + True + """ + cdef void **data = buf.buffer + data[idx1 + 2*idx2] = NULL + res = buf[idx1, idx2] # takes None + buf[idx1, idx2] = obj + return res + @testcase def assign_temporary_to_object(object[object] buf): """ -- cgit v1.2.1 From a70b9d3cde2a0f62f0ebf7a8a28f32af5de99e8c Mon Sep 17 00:00:00 2001 From: da-woods Date: Sun, 3 Jul 2022 08:54:23 +0100 Subject: Updated changelog --- CHANGES.rst | 34 ++++++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/CHANGES.rst b/CHANGES.rst index 7087a9391..acc5e3020 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -2,6 +2,40 @@ Cython Changelog ================ +0.29.31 (2022-??-??) +==================== + +Bugs fixed +---------- + +* Use ``importlib.util.find_spec()`` instead of the deprecated ``importlib.find_loader()`` + function when setting up the package path at import-time. Patch by Matti Picus. + (Github issue #4764) + +* Require the C compiler to support the two-arg form of ``va_start`` on Python 3.10 + and higher. Patch by Thomas Caswell. + (Github issue #4820) + +* Make ``fused_type`` subscriptable in Shadow.py. Patch by Pfebrer. + (Github issue #4842) + +* Fix the incorrect code generation of the target type in ``bytearray`` loops. + Patch by Kenrick Everett. + (Github issue #4108) + +* Silence some GCC ``-Wconversion`` warnings in C utility code. + Patch by Lisandro Dalcin. + (Github issue #4854) + +* Stop tuple multiplication being ignored in expressions such as ``[*(1,) * 2]``. + Patch by David Woods. + (Github issue #4864) + +* Ensure that object buffers (e.g. ``ndarray[object, ndim=1]``) containing + ``NULL`` pointers are safe to use, returning ``None`` instead of the ``NULL`` + pointer. Patch by Sebastian Berg. + (Github issue #4859) + 0.29.30 (2022-05-16) ==================== -- cgit v1.2.1 From edf38fcf5d57b6ac58be823d31e40f30a8b6f2fd Mon Sep 17 00:00:00 2001 From: da-woods Date: Sun, 3 Jul 2022 10:15:30 +0100 Subject: Update changelog --- CHANGES.rst | 36 ++++++++++++++++++++++++++++++------ 1 file changed, 30 insertions(+), 6 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 9a844e178..38dc34781 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,15 +10,17 @@ Features added * A new decorator ``@cython.dataclasses.dataclass`` was implemented that provides compile time dataclass generation capabilities to ``cdef`` classes (extension types). - Patch by David Woods. (Github issue :issue:`2903`) + Patch by David Woods. (Github issue :issue:`2903`). ``kw_only`` dataclasses + added by Yury Sokov (Github issue :issue:`4794`) * Named expressions (PEP 572) aka. assignment expressions (aka. the walrus operator ``:=``) were implemented. Patch by David Woods. (Github issue :issue:`2636`) * Some C++ library declarations were extended and fixed. - Patches by Max Bachmann, Till Hoffmann, Julien Jerphanion. - (Github issues :issue:`4530`, :issue:`4528`, :issue:`4710`, :issue:`4746`, :issue:`4751`) + Patches by Max Bachmann, Till Hoffmann, Julien Jerphanion, Wenjun Si. + (Github issues :issue:`4530`, :issue:`4528`, :issue:`4710`, :issue:`4746`, + :issue:`4751`, :issue:`4818`, :issue:`4762`) * The ``cythonize`` command has a new option ``-M`` to generate ``.dep`` dependency files for the compilation unit. This can be used by external build tools to track @@ -35,6 +37,10 @@ Features added smaller set of Cython's own modules, which can be used to reduce the package and install size. +* Improvements to ``PyTypeObject`` definitions in pxd wrapping of libpython. + Patch by John Kirkham. (Github issue :issue:`4699`) + + Bugs fixed ---------- @@ -48,7 +54,7 @@ Bugs fixed Test patch by Kirill Smelkov. (Github issue :issue:`4737`) * Typedefs for the ``bint`` type did not always behave like ``bint``. - Patch by 0dminnimda. (Github issue :issue:`4660`) + Patch by Nathan Manville and 0dminnimda. (Github issue :issue:`4660`) * The return type of a fused function is no longer ignored for function pointers, since it is relevant when passing them e.g. as argument into other fused functions. @@ -65,7 +71,18 @@ Bugs fixed * A work-around for StacklessPython < 3.8 was disabled in Py3.8 and later. (Github issue :issue:`4329`) -* Includes all bug-fixes from the :ref:`0.29.30` release. +* Improve conversion between function pointers with non-identical but + compatible exception specifications. Patches by David Woods. + (Github issues :issue:`4770`, :issue:`4689`) + +* Improve compatibility with forthcoming CPython 3.12 release. + +* Limited API C preprocessor warning is compatible with MSVC. Patch by + Victor Molina Garcia. (Github issue :issue:`4826`) + +* C compiler warnings fixed. Patch by mwtian. (Github issue :issue:`4831`) + +* Includes all bug-fixes from the 0.29 branch up to the :ref:`0.29.31` release. Other changes ------------- @@ -78,7 +95,7 @@ Other changes allowed when it is used as default argument, i.e. ``func(x: list = None)``. Note that, for backwards compatibility reasons, this does not apply when using Cython's C notation, as in ``func(list x)``. Here, ``None`` is still allowed, as always. - (Github issues :issue:`3883`, :issue:`2696`) + (Github issues :issue:`3883`, :issue:`2696`, :issue:`4669`) * The compile-time ``DEF`` and ``IF`` statements are deprecated and generate a warning. They should be replaced with normal constants, code generation or C macros. @@ -87,6 +104,10 @@ Other changes * Reusing an extension type attribute name as a method name is now an error. Patch by 0dminnimda. (Github issue :issue:`4661`) +* Improve compatibility between classes pickled in Cython 3.0 and 0.29.x + by accepting MD5, SHA-1 and SHA-256 checksums. + (Github issue :issue:`4680`) + 3.0.0 alpha 10 (2022-01-06) =========================== @@ -977,6 +998,8 @@ Other changes .. _`PEP-563`: https://www.python.org/dev/peps/pep-0563 .. _`PEP-479`: https://www.python.org/dev/peps/pep-0479 +.. _0.29.31: + 0.29.31 (2022-??-??) ==================== @@ -1011,6 +1034,7 @@ Bugs fixed pointer. Patch by Sebastian Berg. (Github issue #4859) + .. _0.29.30: 0.29.30 (2022-05-16) -- cgit v1.2.1 From 1c0691f7720976a2ee9c471e071b6c8a0341eb7b Mon Sep 17 00:00:00 2001 From: da-woods Date: Sun, 3 Jul 2022 11:23:34 +0100 Subject: Fix tuple*float test on PyPy Test added in 5c900c59d03f23f7329d6e68e114e4a277112916 PyPy gives a slightly different error message for the unsupported operation --- tests/run/pep448_extended_unpacking.pyx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/run/pep448_extended_unpacking.pyx b/tests/run/pep448_extended_unpacking.pyx index 4411d7e79..85d22a86c 100644 --- a/tests/run/pep448_extended_unpacking.pyx +++ b/tests/run/pep448_extended_unpacking.pyx @@ -195,10 +195,10 @@ def unpack_list_tuple_mult(): def unpack_list_tuple_bad_mult(): """ - >>> unpack_list_tuple_bad_mult() + >>> unpack_list_tuple_bad_mult() # doctest: +ELLIPSIS Traceback (most recent call last): ... - TypeError: can't multiply sequence by non-int of type 'float' + TypeError: ... 'float' """ return [*(1,) * 1.5] -- cgit v1.2.1 From 7c7890348625871e6442b4b1bbd6e3e0e7c38e73 Mon Sep 17 00:00:00 2001 From: Kirill Smelkov Date: Sun, 3 Jul 2022 14:08:40 +0300 Subject: includes/cpython: Fix newfunc to use PyObject* for args/kwargs instead of object (#4823) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit object means the argument is always non-NULL valid Python object, while PyObject* argument can be generally NULL. If the argument is indeed passed as NULL, and we declare it as object, generated code will crash while trying to incref it. Quoting https://github.com/cython/cython/issues/4822: object.pxd currently declares `newfunc` as follows: ```pyx ctypedef object (*newfunc)(cpython.type.type, object, object) # (type, args, kwargs) ``` which implies that `args` and `kwargs` are always live objects and cannot be NULL. However Python can, and does, call tp_new with either args=NULL, or kwargs=NULL or both. And in such cases this leads to segfault in automatically-generated __Pyx_INCREF for args or kw. The fix is to change `object` to `PyObject*` for both args and kwargs. Please see below for details: ```cython # cython: language_level=3 from cpython cimport newfunc, type as cpytype, Py_TYPE cdef class X: cdef int i def __init__(self, i): self.i = i def __repr__(self): return 'X(%d)' % self.i cdef newfunc _orig_tp_new = Py_TYPE(X(0)).tp_new cdef object _trace_tp_new(cpytype cls, object args, object kw): print('_trace_tp_new', cls, args, kw) return _orig_tp_new(cls, args, kw) Py_TYPE(X(0)).tp_new = _trace_tp_new x = X(123) print(x) ``` ```console (neo) (py3.venv) (g.env) kirr@deca:~/src/tools/go/pygolang$ cythonize -i x.pyx Compiling /home/kirr/src/tools/go/pygolang/x.pyx because it changed. [1/1] Cythonizing /home/kirr/src/tools/go/pygolang/x.pyx running build_ext building 'x' extension ... x86_64-linux-gnu-gcc -pthread -Wno-unused-result -Wsign-compare -DNDEBUG -g -fwrapv -O2 -Wall -g -ffile-prefix-map=/build/python3.9-RNBry6/python3.9-3.9.2=. -fstack-protector-strong -Wformat -Werror=format-security -g -fwrapv -O2 -g -ffile-prefix-map=/build/python3.9-RNBry6/python3.9-3.9.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -I/home/kirr/src/wendelin/venv/py3.venv/include -I/usr/include/python3.9 -c /home/kirr/src/tools/go/pygolang/x.c -o /home/kirr/src/tools/go/pygolang/tmpqkz1r96s/home/kirr/src/tools/go/pygolang/x.o x86_64-linux-gnu-gcc -pthread -shared -Wl,-O1 -Wl,-Bsymbolic-functions -Wl,-z,relro -g -fwrapv -O2 -Wl,-z,relro -g -fwrapv -O2 -g -ffile-prefix-map=/build/python3.9-RNBry6/python3.9-3.9.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 /home/kirr/src/tools/go/pygolang/tmpqkz1r96s/home/kirr/src/tools/go/pygolang/x.o -o /home/kirr/src/tools/go/pygolang/x.cpython-39-x86_64-linux-gnu.so ``` ```console (neo) (py3.venv) (g.env) kirr@deca:~/src/tools/go/pygolang$ python -c 'import x' Ошибка сегментирования (стек памяти сброшен на диск) ``` ```console (neo) (py3.venv) (g.env) kirr@deca:~/src/tools/go/pygolang$ gdb python core ... Reading symbols from python... Reading symbols from /usr/lib/debug/.build-id/f9/02f8a561c3abdb9c8d8c859d4243bd8c3f928f.debug... [New LWP 218557] [Thread debugging using libthread_db enabled] Using host libthread_db library "/lib/x86_64-linux-gnu/libthread_db.so.1". Core was generated by `python -c import x'. Program terminated with signal SIGSEGV, Segmentation fault. #0 _Py_INCREF (op=0x0) at /usr/include/python3.9/object.h:408 408 op->ob_refcnt++; (gdb) bt 5 #0 _Py_INCREF (op=0x0) at /usr/include/python3.9/object.h:408 #1 __pyx_f_1x__trace_tp_new (__pyx_v_cls=0x7f5ce75e6880 <__pyx_type_1x_X>, __pyx_v_args=(123,), __pyx_v_kw=0x0) at /home/kirr/src/tools/go/pygolang/x.c:1986 #2 0x000000000051dd7e in type_call (type=type@entry=0x7f5ce75e6880 <__pyx_type_1x_X>, args=args@entry=(123,), kwds=kwds@entry=0x0) at ../Objects/typeobject.c:1014 #3 0x00007f5ce75df8d4 in __Pyx_PyObject_Call (func=, arg=(123,), kw=0x0) at /home/kirr/src/tools/go/pygolang/x.c:3414 #4 0x00007f5ce75df276 in __pyx_pymod_exec_x (__pyx_pyinit_module=) at /home/kirr/src/tools/go/pygolang/x.c:3017 (More stack frames follow...) (gdb) f 1 #1 __pyx_f_1x__trace_tp_new (__pyx_v_cls=0x7f5ce75e6880 <__pyx_type_1x_X>, __pyx_v_args=(123,), __pyx_v_kw=0x0) at /home/kirr/src/tools/go/pygolang/x.c:1986 1986 __Pyx_INCREF(__pyx_v_kw); ``` -> Change newfunc signature to use PyObject* instead of object to fix it. With this fix, and test example updates to account for object -> PyObject* change as follows ... --- a/x.pyx.kirr +++ b/x.pyx @@ -1,5 +1,5 @@ # cython: language_level=3 -from cpython cimport newfunc, type as cpytype, Py_TYPE +from cpython cimport newfunc, type as cpytype, Py_TYPE, PyObject cdef class X: cdef int i @@ -10,8 +10,12 @@ cdef class X: cdef newfunc _orig_tp_new = Py_TYPE(X(0)).tp_new -cdef object _trace_tp_new(cpytype cls, object args, object kw): - print('_trace_tp_new', cls, args, kw) +cdef object xobject(PyObject* x): + return "null" if x == NULL else \ + x + +cdef object _trace_tp_new(cpytype cls, PyObject* args, PyObject* kw): + print('_trace_tp_new', cls, xobject(args), xobject(kw)) return _orig_tp_new(cls, args, kw) Py_TYPE(X(0)).tp_new = _trace_tp_new ... it works as expected without crashing: $ python -c 'import x' _trace_tp_new (123,) null X(123) Fixes: https://github.com/cython/cython/issues/4822 --- Cython/Includes/cpython/object.pxd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cython/Includes/cpython/object.pxd b/Cython/Includes/cpython/object.pxd index 5a8116639..3ce4c6307 100644 --- a/Cython/Includes/cpython/object.pxd +++ b/Cython/Includes/cpython/object.pxd @@ -5,7 +5,7 @@ cdef extern from "Python.h": ctypedef struct PyObject # forward declaration - ctypedef object (*newfunc)(cpython.type.type, object, object) # (type, args, kwargs) + ctypedef object (*newfunc)(cpython.type.type, PyObject*, PyObject*) # (type, args|NULL, kwargs|NULL) ctypedef object (*unaryfunc)(object) ctypedef object (*binaryfunc)(object, object) -- cgit v1.2.1 From c769c3295dec09fbbb607f249224ce385591dbcc Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Mon, 4 Jul 2022 10:28:15 +0200 Subject: Revert "includes/cpython: Fix newfunc to use PyObject* for args/kwargs instead of object (#4823)" This reverts commit 7c7890348625871e6442b4b1bbd6e3e0e7c38e73. --- Cython/Includes/cpython/object.pxd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cython/Includes/cpython/object.pxd b/Cython/Includes/cpython/object.pxd index 3ce4c6307..5a8116639 100644 --- a/Cython/Includes/cpython/object.pxd +++ b/Cython/Includes/cpython/object.pxd @@ -5,7 +5,7 @@ cdef extern from "Python.h": ctypedef struct PyObject # forward declaration - ctypedef object (*newfunc)(cpython.type.type, PyObject*, PyObject*) # (type, args|NULL, kwargs|NULL) + ctypedef object (*newfunc)(cpython.type.type, object, object) # (type, args, kwargs) ctypedef object (*unaryfunc)(object) ctypedef object (*binaryfunc)(object, object) -- cgit v1.2.1 From 4189c759ce468b74f35cfce3cfdba9aa8b4992e3 Mon Sep 17 00:00:00 2001 From: Kirill Smelkov Date: Sun, 3 Jul 2022 14:08:40 +0300 Subject: includes/cpython: Fix newfunc to use PyObject* for args/kwargs instead of object (#4823) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit object means the argument is always non-NULL valid Python object, while PyObject* argument can be generally NULL. If the argument is indeed passed as NULL, and we declare it as object, generated code will crash while trying to incref it. Quoting https://github.com/cython/cython/issues/4822: object.pxd currently declares `newfunc` as follows: ```pyx ctypedef object (*newfunc)(cpython.type.type, object, object) # (type, args, kwargs) ``` which implies that `args` and `kwargs` are always live objects and cannot be NULL. However Python can, and does, call tp_new with either args=NULL, or kwargs=NULL or both. And in such cases this leads to segfault in automatically-generated __Pyx_INCREF for args or kw. The fix is to change `object` to `PyObject*` for both args and kwargs. Please see below for details: ```cython # cython: language_level=3 from cpython cimport newfunc, type as cpytype, Py_TYPE cdef class X: cdef int i def __init__(self, i): self.i = i def __repr__(self): return 'X(%d)' % self.i cdef newfunc _orig_tp_new = Py_TYPE(X(0)).tp_new cdef object _trace_tp_new(cpytype cls, object args, object kw): print('_trace_tp_new', cls, args, kw) return _orig_tp_new(cls, args, kw) Py_TYPE(X(0)).tp_new = _trace_tp_new x = X(123) print(x) ``` ```console (neo) (py3.venv) (g.env) kirr@deca:~/src/tools/go/pygolang$ cythonize -i x.pyx Compiling /home/kirr/src/tools/go/pygolang/x.pyx because it changed. [1/1] Cythonizing /home/kirr/src/tools/go/pygolang/x.pyx running build_ext building 'x' extension ... x86_64-linux-gnu-gcc -pthread -Wno-unused-result -Wsign-compare -DNDEBUG -g -fwrapv -O2 -Wall -g -ffile-prefix-map=/build/python3.9-RNBry6/python3.9-3.9.2=. -fstack-protector-strong -Wformat -Werror=format-security -g -fwrapv -O2 -g -ffile-prefix-map=/build/python3.9-RNBry6/python3.9-3.9.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -I/home/kirr/src/wendelin/venv/py3.venv/include -I/usr/include/python3.9 -c /home/kirr/src/tools/go/pygolang/x.c -o /home/kirr/src/tools/go/pygolang/tmpqkz1r96s/home/kirr/src/tools/go/pygolang/x.o x86_64-linux-gnu-gcc -pthread -shared -Wl,-O1 -Wl,-Bsymbolic-functions -Wl,-z,relro -g -fwrapv -O2 -Wl,-z,relro -g -fwrapv -O2 -g -ffile-prefix-map=/build/python3.9-RNBry6/python3.9-3.9.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 /home/kirr/src/tools/go/pygolang/tmpqkz1r96s/home/kirr/src/tools/go/pygolang/x.o -o /home/kirr/src/tools/go/pygolang/x.cpython-39-x86_64-linux-gnu.so ``` ```console (neo) (py3.venv) (g.env) kirr@deca:~/src/tools/go/pygolang$ python -c 'import x' Ошибка сегментирования (стек памяти сброшен на диск) ``` ```console (neo) (py3.venv) (g.env) kirr@deca:~/src/tools/go/pygolang$ gdb python core ... Reading symbols from python... Reading symbols from /usr/lib/debug/.build-id/f9/02f8a561c3abdb9c8d8c859d4243bd8c3f928f.debug... [New LWP 218557] [Thread debugging using libthread_db enabled] Using host libthread_db library "/lib/x86_64-linux-gnu/libthread_db.so.1". Core was generated by `python -c import x'. Program terminated with signal SIGSEGV, Segmentation fault. #0 _Py_INCREF (op=0x0) at /usr/include/python3.9/object.h:408 408 op->ob_refcnt++; (gdb) bt 5 #0 _Py_INCREF (op=0x0) at /usr/include/python3.9/object.h:408 #1 __pyx_f_1x__trace_tp_new (__pyx_v_cls=0x7f5ce75e6880 <__pyx_type_1x_X>, __pyx_v_args=(123,), __pyx_v_kw=0x0) at /home/kirr/src/tools/go/pygolang/x.c:1986 #2 0x000000000051dd7e in type_call (type=type@entry=0x7f5ce75e6880 <__pyx_type_1x_X>, args=args@entry=(123,), kwds=kwds@entry=0x0) at ../Objects/typeobject.c:1014 #3 0x00007f5ce75df8d4 in __Pyx_PyObject_Call (func=, arg=(123,), kw=0x0) at /home/kirr/src/tools/go/pygolang/x.c:3414 #4 0x00007f5ce75df276 in __pyx_pymod_exec_x (__pyx_pyinit_module=) at /home/kirr/src/tools/go/pygolang/x.c:3017 (More stack frames follow...) (gdb) f 1 #1 __pyx_f_1x__trace_tp_new (__pyx_v_cls=0x7f5ce75e6880 <__pyx_type_1x_X>, __pyx_v_args=(123,), __pyx_v_kw=0x0) at /home/kirr/src/tools/go/pygolang/x.c:1986 1986 __Pyx_INCREF(__pyx_v_kw); ``` -> Change newfunc signature to use PyObject* instead of object to fix it. With this fix, and test example updates to account for object -> PyObject* change as follows ... --- a/x.pyx.kirr +++ b/x.pyx @@ -1,5 +1,5 @@ # cython: language_level=3 -from cpython cimport newfunc, type as cpytype, Py_TYPE +from cpython cimport newfunc, type as cpytype, Py_TYPE, PyObject cdef class X: cdef int i @@ -10,8 +10,12 @@ cdef class X: cdef newfunc _orig_tp_new = Py_TYPE(X(0)).tp_new -cdef object _trace_tp_new(cpytype cls, object args, object kw): - print('_trace_tp_new', cls, args, kw) +cdef object xobject(PyObject* x): + return "null" if x == NULL else \ + x + +cdef object _trace_tp_new(cpytype cls, PyObject* args, PyObject* kw): + print('_trace_tp_new', cls, xobject(args), xobject(kw)) return _orig_tp_new(cls, args, kw) Py_TYPE(X(0)).tp_new = _trace_tp_new ... it works as expected without crashing: $ python -c 'import x' _trace_tp_new (123,) null X(123) Fixes: https://github.com/cython/cython/issues/4822 --- Cython/Includes/cpython/object.pxd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cython/Includes/cpython/object.pxd b/Cython/Includes/cpython/object.pxd index c4688f738..41874159c 100644 --- a/Cython/Includes/cpython/object.pxd +++ b/Cython/Includes/cpython/object.pxd @@ -5,7 +5,7 @@ cdef extern from "Python.h": ctypedef struct PyObject # forward declaration - ctypedef object (*newfunc)(cpython.type.type, object, object) # (type, args, kwargs) + ctypedef object (*newfunc)(cpython.type.type, PyObject*, PyObject*) # (type, args|NULL, kwargs|NULL) ctypedef object (*unaryfunc)(object) ctypedef object (*binaryfunc)(object, object) -- cgit v1.2.1 From 4cae7d6c3aef4f83a083f0083d22cd42c373d1fa Mon Sep 17 00:00:00 2001 From: Matus Valo Date: Mon, 4 Jul 2022 21:47:18 +0200 Subject: Port pyximport to the importlib machinery (GH-4625) Closes https://github.com/cython/cython/issues/4560 --- pyximport/_pyximport2.py | 606 ++++++++++++++++++++++++++++++++++++++++++++++ pyximport/_pyximport3.py | 464 ++++++++++++++++++++++++++++++++++++ pyximport/pyximport.py | 607 +---------------------------------------------- 3 files changed, 1076 insertions(+), 601 deletions(-) create mode 100644 pyximport/_pyximport2.py create mode 100644 pyximport/_pyximport3.py diff --git a/pyximport/_pyximport2.py b/pyximport/_pyximport2.py new file mode 100644 index 000000000..b2077826a --- /dev/null +++ b/pyximport/_pyximport2.py @@ -0,0 +1,606 @@ +""" +Import hooks; when installed with the install() function, these hooks +allow importing .pyx files as if they were Python modules. + +If you want the hook installed every time you run Python +you can add it to your Python version by adding these lines to +sitecustomize.py (which you can create from scratch in site-packages +if it doesn't exist there or somewhere else on your python path):: + + import pyximport + pyximport.install() + +For instance on the Mac with a non-system Python 2.3, you could create +sitecustomize.py with only those two lines at +/usr/local/lib/python2.3/site-packages/sitecustomize.py . + +A custom distutils.core.Extension instance and setup() args +(Distribution) for for the build can be defined by a .pyxbld +file like: + +# examplemod.pyxbld +def make_ext(modname, pyxfilename): + from distutils.extension import Extension + return Extension(name = modname, + sources=[pyxfilename, 'hello.c'], + include_dirs=['/myinclude'] ) +def make_setup_args(): + return dict(script_args=["--compiler=mingw32"]) + +Extra dependencies can be defined by a .pyxdep . +See README. + +Since Cython 0.11, the :mod:`pyximport` module also has experimental +compilation support for normal Python modules. This allows you to +automatically run Cython on every .pyx and .py module that Python +imports, including parts of the standard library and installed +packages. Cython will still fail to compile a lot of Python modules, +in which case the import mechanism will fall back to loading the +Python source modules instead. The .py import mechanism is installed +like this:: + + pyximport.install(pyimport = True) + +Running this module as a top-level script will run a test and then print +the documentation. + +This code is based on the Py2.3+ import protocol as described in PEP 302. +""" + +import glob +import imp +import os +import sys +from zipimport import zipimporter, ZipImportError + +mod_name = "pyximport" + +PYX_EXT = ".pyx" +PYXDEP_EXT = ".pyxdep" +PYXBLD_EXT = ".pyxbld" + +DEBUG_IMPORT = False + + +def _print(message, args): + if args: + message = message % args + print(message) + + +def _debug(message, *args): + if DEBUG_IMPORT: + _print(message, args) + + +def _info(message, *args): + _print(message, args) + + +# Performance problem: for every PYX file that is imported, we will +# invoke the whole distutils infrastructure even if the module is +# already built. It might be more efficient to only do it when the +# mod time of the .pyx is newer than the mod time of the .so but +# the question is how to get distutils to tell me the name of the .so +# before it builds it. Maybe it is easy...but maybe the performance +# issue isn't real. +def _load_pyrex(name, filename): + "Load a pyrex file given a name and filename." + + +def get_distutils_extension(modname, pyxfilename, language_level=None): +# try: +# import hashlib +# except ImportError: +# import md5 as hashlib +# extra = "_" + hashlib.md5(open(pyxfilename).read()).hexdigest() +# modname = modname + extra + extension_mod,setup_args = handle_special_build(modname, pyxfilename) + if not extension_mod: + if not isinstance(pyxfilename, str): + # distutils is stupid in Py2 and requires exactly 'str' + # => encode accidentally coerced unicode strings back to str + pyxfilename = pyxfilename.encode(sys.getfilesystemencoding()) + from distutils.extension import Extension + extension_mod = Extension(name = modname, sources=[pyxfilename]) + if language_level is not None: + extension_mod.cython_directives = {'language_level': language_level} + return extension_mod,setup_args + + +def handle_special_build(modname, pyxfilename): + special_build = os.path.splitext(pyxfilename)[0] + PYXBLD_EXT + ext = None + setup_args={} + if os.path.exists(special_build): + # globls = {} + # locs = {} + # execfile(special_build, globls, locs) + # ext = locs["make_ext"](modname, pyxfilename) + with open(special_build) as fid: + mod = imp.load_source("XXXX", special_build, fid) + make_ext = getattr(mod,'make_ext',None) + if make_ext: + ext = make_ext(modname, pyxfilename) + assert ext and ext.sources, "make_ext in %s did not return Extension" % special_build + make_setup_args = getattr(mod, 'make_setup_args',None) + if make_setup_args: + setup_args = make_setup_args() + assert isinstance(setup_args,dict), ("make_setup_args in %s did not return a dict" + % special_build) + assert set or setup_args, ("neither make_ext nor make_setup_args %s" + % special_build) + ext.sources = [os.path.join(os.path.dirname(special_build), source) + for source in ext.sources] + return ext, setup_args + + +def handle_dependencies(pyxfilename): + testing = '_test_files' in globals() + dependfile = os.path.splitext(pyxfilename)[0] + PYXDEP_EXT + + # by default let distutils decide whether to rebuild on its own + # (it has a better idea of what the output file will be) + + # but we know more about dependencies so force a rebuild if + # some of the dependencies are newer than the pyxfile. + if os.path.exists(dependfile): + with open(dependfile) as fid: + depends = fid.readlines() + depends = [depend.strip() for depend in depends] + + # gather dependencies in the "files" variable + # the dependency file is itself a dependency + files = [dependfile] + for depend in depends: + fullpath = os.path.join(os.path.dirname(dependfile), + depend) + files.extend(glob.glob(fullpath)) + + # only for unit testing to see we did the right thing + if testing: + _test_files[:] = [] #$pycheck_no + + # if any file that the pyxfile depends upon is newer than + # the pyx file, 'touch' the pyx file so that distutils will + # be tricked into rebuilding it. + for file in files: + from distutils.dep_util import newer + if newer(file, pyxfilename): + _debug("Rebuilding %s because of %s", pyxfilename, file) + filetime = os.path.getmtime(file) + os.utime(pyxfilename, (filetime, filetime)) + if testing: + _test_files.append(file) + + +def build_module(name, pyxfilename, pyxbuild_dir=None, inplace=False, language_level=None): + assert os.path.exists(pyxfilename), "Path does not exist: %s" % pyxfilename + handle_dependencies(pyxfilename) + + extension_mod, setup_args = get_distutils_extension(name, pyxfilename, language_level) + build_in_temp = pyxargs.build_in_temp + sargs = pyxargs.setup_args.copy() + sargs.update(setup_args) + build_in_temp = sargs.pop('build_in_temp',build_in_temp) + + from . import pyxbuild + so_path = pyxbuild.pyx_to_dll(pyxfilename, extension_mod, + build_in_temp=build_in_temp, + pyxbuild_dir=pyxbuild_dir, + setup_args=sargs, + inplace=inplace, + reload_support=pyxargs.reload_support) + assert os.path.exists(so_path), "Cannot find: %s" % so_path + + junkpath = os.path.join(os.path.dirname(so_path), name+"_*") #very dangerous with --inplace ? yes, indeed, trying to eat my files ;) + junkstuff = glob.glob(junkpath) + for path in junkstuff: + if path != so_path: + try: + os.remove(path) + except IOError: + _info("Couldn't remove %s", path) + + return so_path + + +def load_module(name, pyxfilename, pyxbuild_dir=None, is_package=False, + build_inplace=False, language_level=None, so_path=None): + try: + if so_path is None: + if is_package: + module_name = name + '.__init__' + else: + module_name = name + so_path = build_module(module_name, pyxfilename, pyxbuild_dir, + inplace=build_inplace, language_level=language_level) + mod = imp.load_dynamic(name, so_path) + if is_package and not hasattr(mod, '__path__'): + mod.__path__ = [os.path.dirname(so_path)] + assert mod.__file__ == so_path, (mod.__file__, so_path) + except Exception as failure_exc: + _debug("Failed to load extension module: %r" % failure_exc) + if pyxargs.load_py_module_on_import_failure and pyxfilename.endswith('.py'): + # try to fall back to normal import + mod = imp.load_source(name, pyxfilename) + assert mod.__file__ in (pyxfilename, pyxfilename+'c', pyxfilename+'o'), (mod.__file__, pyxfilename) + else: + tb = sys.exc_info()[2] + import traceback + exc = ImportError("Building module %s failed: %s" % ( + name, traceback.format_exception_only(*sys.exc_info()[:2]))) + if sys.version_info[0] >= 3: + raise exc.with_traceback(tb) + else: + exec("raise exc, None, tb", {'exc': exc, 'tb': tb}) + return mod + + +# import hooks + +class PyxImporter(object): + """A meta-path importer for .pyx files. + """ + def __init__(self, extension=PYX_EXT, pyxbuild_dir=None, inplace=False, + language_level=None): + self.extension = extension + self.pyxbuild_dir = pyxbuild_dir + self.inplace = inplace + self.language_level = language_level + + def find_module(self, fullname, package_path=None): + if fullname in sys.modules and not pyxargs.reload_support: + return None # only here when reload() + + # package_path might be a _NamespacePath. Convert that into a list... + if package_path is not None and not isinstance(package_path, list): + package_path = list(package_path) + try: + fp, pathname, (ext,mode,ty) = imp.find_module(fullname,package_path) + if fp: fp.close() # Python should offer a Default-Loader to avoid this double find/open! + if pathname and ty == imp.PKG_DIRECTORY: + pkg_file = os.path.join(pathname, '__init__'+self.extension) + if os.path.isfile(pkg_file): + return PyxLoader(fullname, pathname, + init_path=pkg_file, + pyxbuild_dir=self.pyxbuild_dir, + inplace=self.inplace, + language_level=self.language_level) + if pathname and pathname.endswith(self.extension): + return PyxLoader(fullname, pathname, + pyxbuild_dir=self.pyxbuild_dir, + inplace=self.inplace, + language_level=self.language_level) + if ty != imp.C_EXTENSION: # only when an extension, check if we have a .pyx next! + return None + + # find .pyx fast, when .so/.pyd exist --inplace + pyxpath = os.path.splitext(pathname)[0]+self.extension + if os.path.isfile(pyxpath): + return PyxLoader(fullname, pyxpath, + pyxbuild_dir=self.pyxbuild_dir, + inplace=self.inplace, + language_level=self.language_level) + + # .so/.pyd's on PATH should not be remote from .pyx's + # think no need to implement PyxArgs.importer_search_remote here? + + except ImportError: + pass + + # searching sys.path ... + + #if DEBUG_IMPORT: print "SEARCHING", fullname, package_path + + mod_parts = fullname.split('.') + module_name = mod_parts[-1] + pyx_module_name = module_name + self.extension + + # this may work, but it returns the file content, not its path + #import pkgutil + #pyx_source = pkgutil.get_data(package, pyx_module_name) + + paths = package_path or sys.path + for path in paths: + pyx_data = None + if not path: + path = os.getcwd() + elif os.path.isfile(path): + try: + zi = zipimporter(path) + pyx_data = zi.get_data(pyx_module_name) + except (ZipImportError, IOError, OSError): + continue # Module not found. + # unzip the imported file into the build dir + # FIXME: can interfere with later imports if build dir is in sys.path and comes before zip file + path = self.pyxbuild_dir + elif not os.path.isabs(path): + path = os.path.abspath(path) + + pyx_module_path = os.path.join(path, pyx_module_name) + if pyx_data is not None: + if not os.path.exists(path): + try: + os.makedirs(path) + except OSError: + # concurrency issue? + if not os.path.exists(path): + raise + with open(pyx_module_path, "wb") as f: + f.write(pyx_data) + elif not os.path.isfile(pyx_module_path): + continue # Module not found. + + return PyxLoader(fullname, pyx_module_path, + pyxbuild_dir=self.pyxbuild_dir, + inplace=self.inplace, + language_level=self.language_level) + + # not found, normal package, not a .pyx file, none of our business + _debug("%s not found" % fullname) + return None + + +class PyImporter(PyxImporter): + """A meta-path importer for normal .py files. + """ + def __init__(self, pyxbuild_dir=None, inplace=False, language_level=None): + if language_level is None: + language_level = sys.version_info[0] + self.super = super(PyImporter, self) + self.super.__init__(extension='.py', pyxbuild_dir=pyxbuild_dir, inplace=inplace, + language_level=language_level) + self.uncompilable_modules = {} + self.blocked_modules = ['Cython', 'pyxbuild', 'pyximport.pyxbuild', + 'distutils'] + self.blocked_packages = ['Cython.', 'distutils.'] + + def find_module(self, fullname, package_path=None): + if fullname in sys.modules: + return None + if any([fullname.startswith(pkg) for pkg in self.blocked_packages]): + return None + if fullname in self.blocked_modules: + # prevent infinite recursion + return None + if _lib_loader.knows(fullname): + return _lib_loader + _debug("trying import of module '%s'", fullname) + if fullname in self.uncompilable_modules: + path, last_modified = self.uncompilable_modules[fullname] + try: + new_last_modified = os.stat(path).st_mtime + if new_last_modified > last_modified: + # import would fail again + return None + except OSError: + # module is no longer where we found it, retry the import + pass + + self.blocked_modules.append(fullname) + try: + importer = self.super.find_module(fullname, package_path) + if importer is not None: + if importer.init_path: + path = importer.init_path + real_name = fullname + '.__init__' + else: + path = importer.path + real_name = fullname + _debug("importer found path %s for module %s", path, real_name) + try: + so_path = build_module( + real_name, path, + pyxbuild_dir=self.pyxbuild_dir, + language_level=self.language_level, + inplace=self.inplace) + _lib_loader.add_lib(fullname, path, so_path, + is_package=bool(importer.init_path)) + return _lib_loader + except Exception: + if DEBUG_IMPORT: + import traceback + traceback.print_exc() + # build failed, not a compilable Python module + try: + last_modified = os.stat(path).st_mtime + except OSError: + last_modified = 0 + self.uncompilable_modules[fullname] = (path, last_modified) + importer = None + finally: + self.blocked_modules.pop() + return importer + + +class LibLoader(object): + def __init__(self): + self._libs = {} + + def load_module(self, fullname): + try: + source_path, so_path, is_package = self._libs[fullname] + except KeyError: + raise ValueError("invalid module %s" % fullname) + _debug("Loading shared library module '%s' from %s", fullname, so_path) + return load_module(fullname, source_path, so_path=so_path, is_package=is_package) + + def add_lib(self, fullname, path, so_path, is_package): + self._libs[fullname] = (path, so_path, is_package) + + def knows(self, fullname): + return fullname in self._libs + +_lib_loader = LibLoader() + + +class PyxLoader(object): + def __init__(self, fullname, path, init_path=None, pyxbuild_dir=None, + inplace=False, language_level=None): + _debug("PyxLoader created for loading %s from %s (init path: %s)", + fullname, path, init_path) + self.fullname = fullname + self.path, self.init_path = path, init_path + self.pyxbuild_dir = pyxbuild_dir + self.inplace = inplace + self.language_level = language_level + + def load_module(self, fullname): + assert self.fullname == fullname, ( + "invalid module, expected %s, got %s" % ( + self.fullname, fullname)) + if self.init_path: + # package + #print "PACKAGE", fullname + module = load_module(fullname, self.init_path, + self.pyxbuild_dir, is_package=True, + build_inplace=self.inplace, + language_level=self.language_level) + module.__path__ = [self.path] + else: + #print "MODULE", fullname + module = load_module(fullname, self.path, + self.pyxbuild_dir, + build_inplace=self.inplace, + language_level=self.language_level) + return module + + +#install args +class PyxArgs(object): + build_dir=True + build_in_temp=True + setup_args={} #None + +##pyxargs=None + + +def _have_importers(): + has_py_importer = False + has_pyx_importer = False + for importer in sys.meta_path: + if isinstance(importer, PyxImporter): + if isinstance(importer, PyImporter): + has_py_importer = True + else: + has_pyx_importer = True + + return has_py_importer, has_pyx_importer + + +def install(pyximport=True, pyimport=False, build_dir=None, build_in_temp=True, + setup_args=None, reload_support=False, + load_py_module_on_import_failure=False, inplace=False, + language_level=None): + """ Main entry point for pyxinstall. + + Call this to install the ``.pyx`` import hook in + your meta-path for a single Python process. If you want it to be + installed whenever you use Python, add it to your ``sitecustomize`` + (as described above). + + :param pyximport: If set to False, does not try to import ``.pyx`` files. + + :param pyimport: You can pass ``pyimport=True`` to also + install the ``.py`` import hook + in your meta-path. Note, however, that it is rather experimental, + will not work at all for some ``.py`` files and packages, and will + heavily slow down your imports due to search and compilation. + Use at your own risk. + + :param build_dir: By default, compiled modules will end up in a ``.pyxbld`` + directory in the user's home directory. Passing a different path + as ``build_dir`` will override this. + + :param build_in_temp: If ``False``, will produce the C files locally. Working + with complex dependencies and debugging becomes more easy. This + can principally interfere with existing files of the same name. + + :param setup_args: Dict of arguments for Distribution. + See ``distutils.core.setup()``. + + :param reload_support: Enables support for dynamic + ``reload(my_module)``, e.g. after a change in the Cython code. + Additional files ``.reloadNN`` may arise on that account, when + the previously loaded module file cannot be overwritten. + + :param load_py_module_on_import_failure: If the compilation of a ``.py`` + file succeeds, but the subsequent import fails for some reason, + retry the import with the normal ``.py`` module instead of the + compiled module. Note that this may lead to unpredictable results + for modules that change the system state during their import, as + the second import will rerun these modifications in whatever state + the system was left after the import of the compiled module + failed. + + :param inplace: Install the compiled module + (``.so`` for Linux and Mac / ``.pyd`` for Windows) + next to the source file. + + :param language_level: The source language level to use: 2 or 3. + The default is to use the language level of the current Python + runtime for .py files and Py2 for ``.pyx`` files. + """ + if setup_args is None: + setup_args = {} + if not build_dir: + build_dir = os.path.join(os.path.expanduser('~'), '.pyxbld') + + global pyxargs + pyxargs = PyxArgs() #$pycheck_no + pyxargs.build_dir = build_dir + pyxargs.build_in_temp = build_in_temp + pyxargs.setup_args = (setup_args or {}).copy() + pyxargs.reload_support = reload_support + pyxargs.load_py_module_on_import_failure = load_py_module_on_import_failure + + has_py_importer, has_pyx_importer = _have_importers() + py_importer, pyx_importer = None, None + + if pyimport and not has_py_importer: + py_importer = PyImporter(pyxbuild_dir=build_dir, inplace=inplace, + language_level=language_level) + # make sure we import Cython before we install the import hook + import Cython.Compiler.Main, Cython.Compiler.Pipeline, Cython.Compiler.Optimize + sys.meta_path.insert(0, py_importer) + + if pyximport and not has_pyx_importer: + pyx_importer = PyxImporter(pyxbuild_dir=build_dir, inplace=inplace, + language_level=language_level) + sys.meta_path.append(pyx_importer) + + return py_importer, pyx_importer + + +def uninstall(py_importer, pyx_importer): + """ + Uninstall an import hook. + """ + try: + sys.meta_path.remove(py_importer) + except ValueError: + pass + + try: + sys.meta_path.remove(pyx_importer) + except ValueError: + pass + + +# MAIN + +def show_docs(): + import __main__ + __main__.__name__ = mod_name + for name in dir(__main__): + item = getattr(__main__, name) + try: + setattr(item, "__module__", mod_name) + except (AttributeError, TypeError): + pass + help(__main__) + + +if __name__ == '__main__': + show_docs() diff --git a/pyximport/_pyximport3.py b/pyximport/_pyximport3.py new file mode 100644 index 000000000..dccd1d09e --- /dev/null +++ b/pyximport/_pyximport3.py @@ -0,0 +1,464 @@ +""" +Import hooks; when installed with the install() function, these hooks +allow importing .pyx files as if they were Python modules. + +If you want the hook installed every time you run Python +you can add it to your Python version by adding these lines to +sitecustomize.py (which you can create from scratch in site-packages +if it doesn't exist there or somewhere else on your python path):: + + import pyximport + pyximport.install() + +For instance on the Mac with a non-system Python 2.3, you could create +sitecustomize.py with only those two lines at +/usr/local/lib/python2.3/site-packages/sitecustomize.py . + +A custom distutils.core.Extension instance and setup() args +(Distribution) for for the build can be defined by a .pyxbld +file like: + +# examplemod.pyxbld +def make_ext(modname, pyxfilename): + from distutils.extension import Extension + return Extension(name = modname, + sources=[pyxfilename, 'hello.c'], + include_dirs=['/myinclude'] ) +def make_setup_args(): + return dict(script_args=["--compiler=mingw32"]) + +Extra dependencies can be defined by a .pyxdep . +See README. + +Since Cython 0.11, the :mod:`pyximport` module also has experimental +compilation support for normal Python modules. This allows you to +automatically run Cython on every .pyx and .py module that Python +imports, including parts of the standard library and installed +packages. Cython will still fail to compile a lot of Python modules, +in which case the import mechanism will fall back to loading the +Python source modules instead. The .py import mechanism is installed +like this:: + + pyximport.install(pyimport = True) + +Running this module as a top-level script will run a test and then print +the documentation. +""" + +import glob +import importlib +import os +import sys +from importlib.abc import MetaPathFinder +from importlib.machinery import ExtensionFileLoader, SourceFileLoader +from importlib.util import spec_from_file_location + +mod_name = "pyximport" + +PY_EXT = ".py" +PYX_EXT = ".pyx" +PYXDEP_EXT = ".pyxdep" +PYXBLD_EXT = ".pyxbld" + +DEBUG_IMPORT = False + + +def _print(message, args): + if args: + message = message % args + print(message) + + +def _debug(message, *args): + if DEBUG_IMPORT: + _print(message, args) + + +def _info(message, *args): + _print(message, args) + + +def load_source(file_path): + import importlib.util + from importlib.machinery import SourceFileLoader + spec = importlib.util.spec_from_file_location("XXXX", file_path, loader=SourceFileLoader("XXXX", file_path)) + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + return module + + +def get_distutils_extension(modname, pyxfilename, language_level=None): +# try: +# import hashlib +# except ImportError: +# import md5 as hashlib +# extra = "_" + hashlib.md5(open(pyxfilename).read()).hexdigest() +# modname = modname + extra + extension_mod,setup_args = handle_special_build(modname, pyxfilename) + if not extension_mod: + if not isinstance(pyxfilename, str): + # distutils is stupid in Py2 and requires exactly 'str' + # => encode accidentally coerced unicode strings back to str + pyxfilename = pyxfilename.encode(sys.getfilesystemencoding()) + from distutils.extension import Extension + extension_mod = Extension(name = modname, sources=[pyxfilename]) + if language_level is not None: + extension_mod.cython_directives = {'language_level': language_level} + return extension_mod,setup_args + + +def handle_special_build(modname, pyxfilename): + special_build = os.path.splitext(pyxfilename)[0] + PYXBLD_EXT + ext = None + setup_args={} + if os.path.exists(special_build): + # globls = {} + # locs = {} + # execfile(special_build, globls, locs) + # ext = locs["make_ext"](modname, pyxfilename) + mod = load_source(special_build) + make_ext = getattr(mod,'make_ext',None) + if make_ext: + ext = make_ext(modname, pyxfilename) + assert ext and ext.sources, "make_ext in %s did not return Extension" % special_build + make_setup_args = getattr(mod, 'make_setup_args',None) + if make_setup_args: + setup_args = make_setup_args() + assert isinstance(setup_args,dict), ("make_setup_args in %s did not return a dict" + % special_build) + assert set or setup_args, ("neither make_ext nor make_setup_args %s" + % special_build) + ext.sources = [os.path.join(os.path.dirname(special_build), source) + for source in ext.sources] + return ext, setup_args + + +def handle_dependencies(pyxfilename): + testing = '_test_files' in globals() + dependfile = os.path.splitext(pyxfilename)[0] + PYXDEP_EXT + + # by default let distutils decide whether to rebuild on its own + # (it has a better idea of what the output file will be) + + # but we know more about dependencies so force a rebuild if + # some of the dependencies are newer than the pyxfile. + if os.path.exists(dependfile): + with open(dependfile) as fid: + depends = fid.readlines() + depends = [depend.strip() for depend in depends] + + # gather dependencies in the "files" variable + # the dependency file is itself a dependency + files = [dependfile] + for depend in depends: + fullpath = os.path.join(os.path.dirname(dependfile), + depend) + files.extend(glob.glob(fullpath)) + + # only for unit testing to see we did the right thing + if testing: + _test_files[:] = [] #$pycheck_no + + # if any file that the pyxfile depends upon is newer than + # the pyx file, 'touch' the pyx file so that distutils will + # be tricked into rebuilding it. + for file in files: + from distutils.dep_util import newer + if newer(file, pyxfilename): + _debug("Rebuilding %s because of %s", pyxfilename, file) + filetime = os.path.getmtime(file) + os.utime(pyxfilename, (filetime, filetime)) + if testing: + _test_files.append(file) + + +def build_module(name, pyxfilename, pyxbuild_dir=None, inplace=False, language_level=None): + assert os.path.exists(pyxfilename), "Path does not exist: %s" % pyxfilename + handle_dependencies(pyxfilename) + + extension_mod, setup_args = get_distutils_extension(name, pyxfilename, language_level) + build_in_temp = pyxargs.build_in_temp + sargs = pyxargs.setup_args.copy() + sargs.update(setup_args) + build_in_temp = sargs.pop('build_in_temp',build_in_temp) + + from . import pyxbuild + so_path = pyxbuild.pyx_to_dll(pyxfilename, extension_mod, + build_in_temp=build_in_temp, + pyxbuild_dir=pyxbuild_dir, + setup_args=sargs, + inplace=inplace, + reload_support=pyxargs.reload_support) + assert os.path.exists(so_path), "Cannot find: %s" % so_path + + junkpath = os.path.join(os.path.dirname(so_path), name+"_*") #very dangerous with --inplace ? yes, indeed, trying to eat my files ;) + junkstuff = glob.glob(junkpath) + for path in junkstuff: + if path != so_path: + try: + os.remove(path) + except IOError: + _info("Couldn't remove %s", path) + + return so_path + + +# import hooks + +class PyxImportMetaFinder(MetaPathFinder): + + def __init__(self, extension=PYX_EXT, pyxbuild_dir=None, inplace=False, language_level=None): + self.pyxbuild_dir = pyxbuild_dir + self.inplace = inplace + self.language_level = language_level + self.extension = extension + + def find_spec(self, fullname, path, target=None): + if not path: + path = [os.getcwd()] # top level import -- + if "." in fullname: + *parents, name = fullname.split(".") + else: + name = fullname + for entry in path: + if os.path.isdir(os.path.join(entry, name)): + # this module has child modules + filename = os.path.join(entry, name, "__init__" + self.extension) + submodule_locations = [os.path.join(entry, name)] + else: + filename = os.path.join(entry, name + self.extension) + submodule_locations = None + if not os.path.exists(filename): + continue + + return spec_from_file_location( + fullname, filename, + loader=PyxImportLoader(filename, self.pyxbuild_dir, self.inplace, self.language_level), + submodule_search_locations=submodule_locations) + + return None # we don't know how to import this + + +class PyImportMetaFinder(MetaPathFinder): + + def __init__(self, extension=PY_EXT, pyxbuild_dir=None, inplace=False, language_level=None): + self.pyxbuild_dir = pyxbuild_dir + self.inplace = inplace + self.language_level = language_level + self.extension = extension + self.uncompilable_modules = {} + self.blocked_modules = ['Cython', 'pyxbuild', 'pyximport.pyxbuild', + 'distutils', 'cython'] + self.blocked_packages = ['Cython.', 'distutils.'] + + def find_spec(self, fullname, path, target=None): + if fullname in sys.modules: + return None + if any([fullname.startswith(pkg) for pkg in self.blocked_packages]): + return None + if fullname in self.blocked_modules: + # prevent infinite recursion + return None + + self.blocked_modules.append(fullname) + name = fullname + if not path: + path = [os.getcwd()] # top level import -- + try: + for entry in path: + if os.path.isdir(os.path.join(entry, name)): + # this module has child modules + filename = os.path.join(entry, name, "__init__" + self.extension) + submodule_locations = [os.path.join(entry, name)] + else: + filename = os.path.join(entry, name + self.extension) + submodule_locations = None + if not os.path.exists(filename): + continue + + return spec_from_file_location( + fullname, filename, + loader=PyxImportLoader(filename, self.pyxbuild_dir, self.inplace, self.language_level), + submodule_search_locations=submodule_locations) + finally: + self.blocked_modules.pop() + + return None # we don't know how to import this + + +class PyxImportLoader(ExtensionFileLoader): + + def __init__(self, filename, pyxbuild_dir, inplace, language_level): + module_name = os.path.splitext(os.path.basename(filename))[0] + super().__init__(module_name, filename) + self._pyxbuild_dir = pyxbuild_dir + self._inplace = inplace + self._language_level = language_level + + def create_module(self, spec): + try: + so_path = build_module(spec.name, pyxfilename=spec.origin, pyxbuild_dir=self._pyxbuild_dir, + inplace=self._inplace, language_level=self._language_level) + self.path = so_path + spec.origin = so_path + return super().create_module(spec) + except Exception as failure_exc: + _debug("Failed to load extension module: %r" % failure_exc) + if pyxargs.load_py_module_on_import_failure and spec.origin.endswith(PY_EXT): + spec = importlib.util.spec_from_file_location(spec.name, spec.origin, + loader=SourceFileLoader(spec.name, spec.origin)) + mod = importlib.util.module_from_spec(spec) + assert mod.__file__ in (spec.origin, spec.origin + 'c', spec.origin + 'o'), (mod.__file__, spec.origin) + return mod + else: + tb = sys.exc_info()[2] + import traceback + exc = ImportError("Building module %s failed: %s" % ( + spec.name, traceback.format_exception_only(*sys.exc_info()[:2]))) + raise exc.with_traceback(tb) + + def exec_module(self, module): + try: + return super().exec_module(module) + except Exception as failure_exc: + import traceback + _debug("Failed to load extension module: %r" % failure_exc) + raise ImportError("Executing module %s failed %s" % ( + module.__file__, traceback.format_exception_only(*sys.exc_info()[:2]))) + + +#install args +class PyxArgs(object): + build_dir=True + build_in_temp=True + setup_args={} #None + + +def _have_importers(): + has_py_importer = False + has_pyx_importer = False + for importer in sys.meta_path: + if isinstance(importer, PyxImportMetaFinder): + if isinstance(importer, PyImportMetaFinder): + has_py_importer = True + else: + has_pyx_importer = True + + return has_py_importer, has_pyx_importer + + +def install(pyximport=True, pyimport=False, build_dir=None, build_in_temp=True, + setup_args=None, reload_support=False, + load_py_module_on_import_failure=False, inplace=False, + language_level=None): + """ Main entry point for pyxinstall. + + Call this to install the ``.pyx`` import hook in + your meta-path for a single Python process. If you want it to be + installed whenever you use Python, add it to your ``sitecustomize`` + (as described above). + + :param pyximport: If set to False, does not try to import ``.pyx`` files. + + :param pyimport: You can pass ``pyimport=True`` to also + install the ``.py`` import hook + in your meta-path. Note, however, that it is rather experimental, + will not work at all for some ``.py`` files and packages, and will + heavily slow down your imports due to search and compilation. + Use at your own risk. + + :param build_dir: By default, compiled modules will end up in a ``.pyxbld`` + directory in the user's home directory. Passing a different path + as ``build_dir`` will override this. + + :param build_in_temp: If ``False``, will produce the C files locally. Working + with complex dependencies and debugging becomes more easy. This + can principally interfere with existing files of the same name. + + :param setup_args: Dict of arguments for Distribution. + See ``distutils.core.setup()``. + + :param reload_support: Enables support for dynamic + ``reload(my_module)``, e.g. after a change in the Cython code. + Additional files ``.reloadNN`` may arise on that account, when + the previously loaded module file cannot be overwritten. + + :param load_py_module_on_import_failure: If the compilation of a ``.py`` + file succeeds, but the subsequent import fails for some reason, + retry the import with the normal ``.py`` module instead of the + compiled module. Note that this may lead to unpredictable results + for modules that change the system state during their import, as + the second import will rerun these modifications in whatever state + the system was left after the import of the compiled module + failed. + + :param inplace: Install the compiled module + (``.so`` for Linux and Mac / ``.pyd`` for Windows) + next to the source file. + + :param language_level: The source language level to use: 2 or 3. + The default is to use the language level of the current Python + runtime for .py files and Py2 for ``.pyx`` files. + """ + if setup_args is None: + setup_args = {} + if not build_dir: + build_dir = os.path.join(os.path.expanduser('~'), '.pyxbld') + + global pyxargs + pyxargs = PyxArgs() #$pycheck_no + pyxargs.build_dir = build_dir + pyxargs.build_in_temp = build_in_temp + pyxargs.setup_args = (setup_args or {}).copy() + pyxargs.reload_support = reload_support + pyxargs.load_py_module_on_import_failure = load_py_module_on_import_failure + + has_py_importer, has_pyx_importer = _have_importers() + py_importer, pyx_importer = None, None + + if pyimport and not has_py_importer: + py_importer = PyImportMetaFinder(pyxbuild_dir=build_dir, inplace=inplace, + language_level=language_level) + # make sure we import Cython before we install the import hook + import Cython.Compiler.Main, Cython.Compiler.Pipeline, Cython.Compiler.Optimize + sys.meta_path.insert(0, py_importer) + + if pyximport and not has_pyx_importer: + pyx_importer = PyxImportMetaFinder(pyxbuild_dir=build_dir, inplace=inplace, + language_level=language_level) + sys.meta_path.append(pyx_importer) + + return py_importer, pyx_importer + + +def uninstall(py_importer, pyx_importer): + """ + Uninstall an import hook. + """ + try: + sys.meta_path.remove(py_importer) + except ValueError: + pass + + try: + sys.meta_path.remove(pyx_importer) + except ValueError: + pass + + +# MAIN + +def show_docs(): + import __main__ + __main__.__name__ = mod_name + for name in dir(__main__): + item = getattr(__main__, name) + try: + setattr(item, "__module__", mod_name) + except (AttributeError, TypeError): + pass + help(__main__) + + +if __name__ == '__main__': + show_docs() diff --git a/pyximport/pyximport.py b/pyximport/pyximport.py index b2077826a..9d575815a 100644 --- a/pyximport/pyximport.py +++ b/pyximport/pyximport.py @@ -1,606 +1,11 @@ -""" -Import hooks; when installed with the install() function, these hooks -allow importing .pyx files as if they were Python modules. - -If you want the hook installed every time you run Python -you can add it to your Python version by adding these lines to -sitecustomize.py (which you can create from scratch in site-packages -if it doesn't exist there or somewhere else on your python path):: - - import pyximport - pyximport.install() - -For instance on the Mac with a non-system Python 2.3, you could create -sitecustomize.py with only those two lines at -/usr/local/lib/python2.3/site-packages/sitecustomize.py . - -A custom distutils.core.Extension instance and setup() args -(Distribution) for for the build can be defined by a .pyxbld -file like: - -# examplemod.pyxbld -def make_ext(modname, pyxfilename): - from distutils.extension import Extension - return Extension(name = modname, - sources=[pyxfilename, 'hello.c'], - include_dirs=['/myinclude'] ) -def make_setup_args(): - return dict(script_args=["--compiler=mingw32"]) - -Extra dependencies can be defined by a .pyxdep . -See README. - -Since Cython 0.11, the :mod:`pyximport` module also has experimental -compilation support for normal Python modules. This allows you to -automatically run Cython on every .pyx and .py module that Python -imports, including parts of the standard library and installed -packages. Cython will still fail to compile a lot of Python modules, -in which case the import mechanism will fall back to loading the -Python source modules instead. The .py import mechanism is installed -like this:: - - pyximport.install(pyimport = True) - -Running this module as a top-level script will run a test and then print -the documentation. - -This code is based on the Py2.3+ import protocol as described in PEP 302. -""" - -import glob -import imp -import os +from __future__ import absolute_import import sys -from zipimport import zipimporter, ZipImportError - -mod_name = "pyximport" - -PYX_EXT = ".pyx" -PYXDEP_EXT = ".pyxdep" -PYXBLD_EXT = ".pyxbld" - -DEBUG_IMPORT = False - - -def _print(message, args): - if args: - message = message % args - print(message) - - -def _debug(message, *args): - if DEBUG_IMPORT: - _print(message, args) - - -def _info(message, *args): - _print(message, args) - - -# Performance problem: for every PYX file that is imported, we will -# invoke the whole distutils infrastructure even if the module is -# already built. It might be more efficient to only do it when the -# mod time of the .pyx is newer than the mod time of the .so but -# the question is how to get distutils to tell me the name of the .so -# before it builds it. Maybe it is easy...but maybe the performance -# issue isn't real. -def _load_pyrex(name, filename): - "Load a pyrex file given a name and filename." - - -def get_distutils_extension(modname, pyxfilename, language_level=None): -# try: -# import hashlib -# except ImportError: -# import md5 as hashlib -# extra = "_" + hashlib.md5(open(pyxfilename).read()).hexdigest() -# modname = modname + extra - extension_mod,setup_args = handle_special_build(modname, pyxfilename) - if not extension_mod: - if not isinstance(pyxfilename, str): - # distutils is stupid in Py2 and requires exactly 'str' - # => encode accidentally coerced unicode strings back to str - pyxfilename = pyxfilename.encode(sys.getfilesystemencoding()) - from distutils.extension import Extension - extension_mod = Extension(name = modname, sources=[pyxfilename]) - if language_level is not None: - extension_mod.cython_directives = {'language_level': language_level} - return extension_mod,setup_args - - -def handle_special_build(modname, pyxfilename): - special_build = os.path.splitext(pyxfilename)[0] + PYXBLD_EXT - ext = None - setup_args={} - if os.path.exists(special_build): - # globls = {} - # locs = {} - # execfile(special_build, globls, locs) - # ext = locs["make_ext"](modname, pyxfilename) - with open(special_build) as fid: - mod = imp.load_source("XXXX", special_build, fid) - make_ext = getattr(mod,'make_ext',None) - if make_ext: - ext = make_ext(modname, pyxfilename) - assert ext and ext.sources, "make_ext in %s did not return Extension" % special_build - make_setup_args = getattr(mod, 'make_setup_args',None) - if make_setup_args: - setup_args = make_setup_args() - assert isinstance(setup_args,dict), ("make_setup_args in %s did not return a dict" - % special_build) - assert set or setup_args, ("neither make_ext nor make_setup_args %s" - % special_build) - ext.sources = [os.path.join(os.path.dirname(special_build), source) - for source in ext.sources] - return ext, setup_args - - -def handle_dependencies(pyxfilename): - testing = '_test_files' in globals() - dependfile = os.path.splitext(pyxfilename)[0] + PYXDEP_EXT - - # by default let distutils decide whether to rebuild on its own - # (it has a better idea of what the output file will be) - - # but we know more about dependencies so force a rebuild if - # some of the dependencies are newer than the pyxfile. - if os.path.exists(dependfile): - with open(dependfile) as fid: - depends = fid.readlines() - depends = [depend.strip() for depend in depends] - - # gather dependencies in the "files" variable - # the dependency file is itself a dependency - files = [dependfile] - for depend in depends: - fullpath = os.path.join(os.path.dirname(dependfile), - depend) - files.extend(glob.glob(fullpath)) - - # only for unit testing to see we did the right thing - if testing: - _test_files[:] = [] #$pycheck_no - - # if any file that the pyxfile depends upon is newer than - # the pyx file, 'touch' the pyx file so that distutils will - # be tricked into rebuilding it. - for file in files: - from distutils.dep_util import newer - if newer(file, pyxfilename): - _debug("Rebuilding %s because of %s", pyxfilename, file) - filetime = os.path.getmtime(file) - os.utime(pyxfilename, (filetime, filetime)) - if testing: - _test_files.append(file) - - -def build_module(name, pyxfilename, pyxbuild_dir=None, inplace=False, language_level=None): - assert os.path.exists(pyxfilename), "Path does not exist: %s" % pyxfilename - handle_dependencies(pyxfilename) - - extension_mod, setup_args = get_distutils_extension(name, pyxfilename, language_level) - build_in_temp = pyxargs.build_in_temp - sargs = pyxargs.setup_args.copy() - sargs.update(setup_args) - build_in_temp = sargs.pop('build_in_temp',build_in_temp) - - from . import pyxbuild - so_path = pyxbuild.pyx_to_dll(pyxfilename, extension_mod, - build_in_temp=build_in_temp, - pyxbuild_dir=pyxbuild_dir, - setup_args=sargs, - inplace=inplace, - reload_support=pyxargs.reload_support) - assert os.path.exists(so_path), "Cannot find: %s" % so_path - - junkpath = os.path.join(os.path.dirname(so_path), name+"_*") #very dangerous with --inplace ? yes, indeed, trying to eat my files ;) - junkstuff = glob.glob(junkpath) - for path in junkstuff: - if path != so_path: - try: - os.remove(path) - except IOError: - _info("Couldn't remove %s", path) - - return so_path - - -def load_module(name, pyxfilename, pyxbuild_dir=None, is_package=False, - build_inplace=False, language_level=None, so_path=None): - try: - if so_path is None: - if is_package: - module_name = name + '.__init__' - else: - module_name = name - so_path = build_module(module_name, pyxfilename, pyxbuild_dir, - inplace=build_inplace, language_level=language_level) - mod = imp.load_dynamic(name, so_path) - if is_package and not hasattr(mod, '__path__'): - mod.__path__ = [os.path.dirname(so_path)] - assert mod.__file__ == so_path, (mod.__file__, so_path) - except Exception as failure_exc: - _debug("Failed to load extension module: %r" % failure_exc) - if pyxargs.load_py_module_on_import_failure and pyxfilename.endswith('.py'): - # try to fall back to normal import - mod = imp.load_source(name, pyxfilename) - assert mod.__file__ in (pyxfilename, pyxfilename+'c', pyxfilename+'o'), (mod.__file__, pyxfilename) - else: - tb = sys.exc_info()[2] - import traceback - exc = ImportError("Building module %s failed: %s" % ( - name, traceback.format_exception_only(*sys.exc_info()[:2]))) - if sys.version_info[0] >= 3: - raise exc.with_traceback(tb) - else: - exec("raise exc, None, tb", {'exc': exc, 'tb': tb}) - return mod - - -# import hooks - -class PyxImporter(object): - """A meta-path importer for .pyx files. - """ - def __init__(self, extension=PYX_EXT, pyxbuild_dir=None, inplace=False, - language_level=None): - self.extension = extension - self.pyxbuild_dir = pyxbuild_dir - self.inplace = inplace - self.language_level = language_level - - def find_module(self, fullname, package_path=None): - if fullname in sys.modules and not pyxargs.reload_support: - return None # only here when reload() - - # package_path might be a _NamespacePath. Convert that into a list... - if package_path is not None and not isinstance(package_path, list): - package_path = list(package_path) - try: - fp, pathname, (ext,mode,ty) = imp.find_module(fullname,package_path) - if fp: fp.close() # Python should offer a Default-Loader to avoid this double find/open! - if pathname and ty == imp.PKG_DIRECTORY: - pkg_file = os.path.join(pathname, '__init__'+self.extension) - if os.path.isfile(pkg_file): - return PyxLoader(fullname, pathname, - init_path=pkg_file, - pyxbuild_dir=self.pyxbuild_dir, - inplace=self.inplace, - language_level=self.language_level) - if pathname and pathname.endswith(self.extension): - return PyxLoader(fullname, pathname, - pyxbuild_dir=self.pyxbuild_dir, - inplace=self.inplace, - language_level=self.language_level) - if ty != imp.C_EXTENSION: # only when an extension, check if we have a .pyx next! - return None - - # find .pyx fast, when .so/.pyd exist --inplace - pyxpath = os.path.splitext(pathname)[0]+self.extension - if os.path.isfile(pyxpath): - return PyxLoader(fullname, pyxpath, - pyxbuild_dir=self.pyxbuild_dir, - inplace=self.inplace, - language_level=self.language_level) - - # .so/.pyd's on PATH should not be remote from .pyx's - # think no need to implement PyxArgs.importer_search_remote here? - - except ImportError: - pass - - # searching sys.path ... - - #if DEBUG_IMPORT: print "SEARCHING", fullname, package_path - - mod_parts = fullname.split('.') - module_name = mod_parts[-1] - pyx_module_name = module_name + self.extension - - # this may work, but it returns the file content, not its path - #import pkgutil - #pyx_source = pkgutil.get_data(package, pyx_module_name) - - paths = package_path or sys.path - for path in paths: - pyx_data = None - if not path: - path = os.getcwd() - elif os.path.isfile(path): - try: - zi = zipimporter(path) - pyx_data = zi.get_data(pyx_module_name) - except (ZipImportError, IOError, OSError): - continue # Module not found. - # unzip the imported file into the build dir - # FIXME: can interfere with later imports if build dir is in sys.path and comes before zip file - path = self.pyxbuild_dir - elif not os.path.isabs(path): - path = os.path.abspath(path) - - pyx_module_path = os.path.join(path, pyx_module_name) - if pyx_data is not None: - if not os.path.exists(path): - try: - os.makedirs(path) - except OSError: - # concurrency issue? - if not os.path.exists(path): - raise - with open(pyx_module_path, "wb") as f: - f.write(pyx_data) - elif not os.path.isfile(pyx_module_path): - continue # Module not found. - - return PyxLoader(fullname, pyx_module_path, - pyxbuild_dir=self.pyxbuild_dir, - inplace=self.inplace, - language_level=self.language_level) - - # not found, normal package, not a .pyx file, none of our business - _debug("%s not found" % fullname) - return None - - -class PyImporter(PyxImporter): - """A meta-path importer for normal .py files. - """ - def __init__(self, pyxbuild_dir=None, inplace=False, language_level=None): - if language_level is None: - language_level = sys.version_info[0] - self.super = super(PyImporter, self) - self.super.__init__(extension='.py', pyxbuild_dir=pyxbuild_dir, inplace=inplace, - language_level=language_level) - self.uncompilable_modules = {} - self.blocked_modules = ['Cython', 'pyxbuild', 'pyximport.pyxbuild', - 'distutils'] - self.blocked_packages = ['Cython.', 'distutils.'] - - def find_module(self, fullname, package_path=None): - if fullname in sys.modules: - return None - if any([fullname.startswith(pkg) for pkg in self.blocked_packages]): - return None - if fullname in self.blocked_modules: - # prevent infinite recursion - return None - if _lib_loader.knows(fullname): - return _lib_loader - _debug("trying import of module '%s'", fullname) - if fullname in self.uncompilable_modules: - path, last_modified = self.uncompilable_modules[fullname] - try: - new_last_modified = os.stat(path).st_mtime - if new_last_modified > last_modified: - # import would fail again - return None - except OSError: - # module is no longer where we found it, retry the import - pass - - self.blocked_modules.append(fullname) - try: - importer = self.super.find_module(fullname, package_path) - if importer is not None: - if importer.init_path: - path = importer.init_path - real_name = fullname + '.__init__' - else: - path = importer.path - real_name = fullname - _debug("importer found path %s for module %s", path, real_name) - try: - so_path = build_module( - real_name, path, - pyxbuild_dir=self.pyxbuild_dir, - language_level=self.language_level, - inplace=self.inplace) - _lib_loader.add_lib(fullname, path, so_path, - is_package=bool(importer.init_path)) - return _lib_loader - except Exception: - if DEBUG_IMPORT: - import traceback - traceback.print_exc() - # build failed, not a compilable Python module - try: - last_modified = os.stat(path).st_mtime - except OSError: - last_modified = 0 - self.uncompilable_modules[fullname] = (path, last_modified) - importer = None - finally: - self.blocked_modules.pop() - return importer - - -class LibLoader(object): - def __init__(self): - self._libs = {} - - def load_module(self, fullname): - try: - source_path, so_path, is_package = self._libs[fullname] - except KeyError: - raise ValueError("invalid module %s" % fullname) - _debug("Loading shared library module '%s' from %s", fullname, so_path) - return load_module(fullname, source_path, so_path=so_path, is_package=is_package) - - def add_lib(self, fullname, path, so_path, is_package): - self._libs[fullname] = (path, so_path, is_package) - - def knows(self, fullname): - return fullname in self._libs - -_lib_loader = LibLoader() - - -class PyxLoader(object): - def __init__(self, fullname, path, init_path=None, pyxbuild_dir=None, - inplace=False, language_level=None): - _debug("PyxLoader created for loading %s from %s (init path: %s)", - fullname, path, init_path) - self.fullname = fullname - self.path, self.init_path = path, init_path - self.pyxbuild_dir = pyxbuild_dir - self.inplace = inplace - self.language_level = language_level - - def load_module(self, fullname): - assert self.fullname == fullname, ( - "invalid module, expected %s, got %s" % ( - self.fullname, fullname)) - if self.init_path: - # package - #print "PACKAGE", fullname - module = load_module(fullname, self.init_path, - self.pyxbuild_dir, is_package=True, - build_inplace=self.inplace, - language_level=self.language_level) - module.__path__ = [self.path] - else: - #print "MODULE", fullname - module = load_module(fullname, self.path, - self.pyxbuild_dir, - build_inplace=self.inplace, - language_level=self.language_level) - return module - - -#install args -class PyxArgs(object): - build_dir=True - build_in_temp=True - setup_args={} #None - -##pyxargs=None - - -def _have_importers(): - has_py_importer = False - has_pyx_importer = False - for importer in sys.meta_path: - if isinstance(importer, PyxImporter): - if isinstance(importer, PyImporter): - has_py_importer = True - else: - has_pyx_importer = True - - return has_py_importer, has_pyx_importer - - -def install(pyximport=True, pyimport=False, build_dir=None, build_in_temp=True, - setup_args=None, reload_support=False, - load_py_module_on_import_failure=False, inplace=False, - language_level=None): - """ Main entry point for pyxinstall. - - Call this to install the ``.pyx`` import hook in - your meta-path for a single Python process. If you want it to be - installed whenever you use Python, add it to your ``sitecustomize`` - (as described above). - - :param pyximport: If set to False, does not try to import ``.pyx`` files. - - :param pyimport: You can pass ``pyimport=True`` to also - install the ``.py`` import hook - in your meta-path. Note, however, that it is rather experimental, - will not work at all for some ``.py`` files and packages, and will - heavily slow down your imports due to search and compilation. - Use at your own risk. - - :param build_dir: By default, compiled modules will end up in a ``.pyxbld`` - directory in the user's home directory. Passing a different path - as ``build_dir`` will override this. - - :param build_in_temp: If ``False``, will produce the C files locally. Working - with complex dependencies and debugging becomes more easy. This - can principally interfere with existing files of the same name. - - :param setup_args: Dict of arguments for Distribution. - See ``distutils.core.setup()``. - - :param reload_support: Enables support for dynamic - ``reload(my_module)``, e.g. after a change in the Cython code. - Additional files ``.reloadNN`` may arise on that account, when - the previously loaded module file cannot be overwritten. - - :param load_py_module_on_import_failure: If the compilation of a ``.py`` - file succeeds, but the subsequent import fails for some reason, - retry the import with the normal ``.py`` module instead of the - compiled module. Note that this may lead to unpredictable results - for modules that change the system state during their import, as - the second import will rerun these modifications in whatever state - the system was left after the import of the compiled module - failed. - - :param inplace: Install the compiled module - (``.so`` for Linux and Mac / ``.pyd`` for Windows) - next to the source file. - - :param language_level: The source language level to use: 2 or 3. - The default is to use the language level of the current Python - runtime for .py files and Py2 for ``.pyx`` files. - """ - if setup_args is None: - setup_args = {} - if not build_dir: - build_dir = os.path.join(os.path.expanduser('~'), '.pyxbld') - - global pyxargs - pyxargs = PyxArgs() #$pycheck_no - pyxargs.build_dir = build_dir - pyxargs.build_in_temp = build_in_temp - pyxargs.setup_args = (setup_args or {}).copy() - pyxargs.reload_support = reload_support - pyxargs.load_py_module_on_import_failure = load_py_module_on_import_failure - - has_py_importer, has_pyx_importer = _have_importers() - py_importer, pyx_importer = None, None - - if pyimport and not has_py_importer: - py_importer = PyImporter(pyxbuild_dir=build_dir, inplace=inplace, - language_level=language_level) - # make sure we import Cython before we install the import hook - import Cython.Compiler.Main, Cython.Compiler.Pipeline, Cython.Compiler.Optimize - sys.meta_path.insert(0, py_importer) - - if pyximport and not has_pyx_importer: - pyx_importer = PyxImporter(pyxbuild_dir=build_dir, inplace=inplace, - language_level=language_level) - sys.meta_path.append(pyx_importer) - - return py_importer, pyx_importer - - -def uninstall(py_importer, pyx_importer): - """ - Uninstall an import hook. - """ - try: - sys.meta_path.remove(py_importer) - except ValueError: - pass - - try: - sys.meta_path.remove(pyx_importer) - except ValueError: - pass - - -# MAIN - -def show_docs(): - import __main__ - __main__.__name__ = mod_name - for name in dir(__main__): - item = getattr(__main__, name) - try: - setattr(item, "__module__", mod_name) - except (AttributeError, TypeError): - pass - help(__main__) +if sys.version_info < (3, 5): + # _pyximport3 module requires at least Python 3.5 + from pyximport._pyximport2 import install, uninstall, show_docs +else: + from pyximport._pyximport3 import install, uninstall, show_docs if __name__ == '__main__': show_docs() -- cgit v1.2.1 From f80d32583c4f6db5050ce48408415e7a55acaf02 Mon Sep 17 00:00:00 2001 From: 0dminnimda <0dminnimda@gmail.com> Date: Mon, 4 Jul 2022 22:51:42 +0300 Subject: Reject invalid spellings of Ellipsis (GH-4868) --- Cython/Compiler/Lexicon.py | 3 +- Cython/Compiler/Parsing.py | 16 +++++----- Cython/Compiler/Tests/TestGrammar.py | 57 +++++++++++++++++++++++++++++++++++- tests/compile/fromimport.pyx | 24 +++++++++++++++ tests/compile/fromimport_star.pyx | 7 +++++ tests/errors/incomplete_varadic.pyx | 8 +++++ 6 files changed, 104 insertions(+), 11 deletions(-) create mode 100644 tests/errors/incomplete_varadic.pyx diff --git a/Cython/Compiler/Lexicon.py b/Cython/Compiler/Lexicon.py index 654febbe7..c3ca05b56 100644 --- a/Cython/Compiler/Lexicon.py +++ b/Cython/Compiler/Lexicon.py @@ -74,6 +74,7 @@ def make_lexicon(): bra = Any("([{") ket = Any(")]}") + ellipsis = Str("...") punct = Any(":,;+-*/|&<>=.%`~^?!@") diphthong = Str("==", "<>", "!=", "<=", ">=", "<<", ">>", "**", "//", "+=", "-=", "*=", "/=", "%=", "|=", "^=", "&=", @@ -89,7 +90,7 @@ def make_lexicon(): (intliteral, Method('strip_underscores', symbol='INT')), (fltconst, Method('strip_underscores', symbol='FLOAT')), (imagconst, Method('strip_underscores', symbol='IMAG')), - (punct | diphthong, TEXT), + (ellipsis | punct | diphthong, TEXT), (bra, Method('open_bracket_action')), (ket, Method('close_bracket_action')), diff --git a/Cython/Compiler/Parsing.py b/Cython/Compiler/Parsing.py index de1ca12b3..1a31e2697 100644 --- a/Cython/Compiler/Parsing.py +++ b/Cython/Compiler/Parsing.py @@ -653,9 +653,7 @@ def p_slice_element(s, follow_set): return None def expect_ellipsis(s): - s.expect('.') - s.expect('.') - s.expect('.') + s.expect('...') def make_slice_nodes(pos, subscripts): # Convert a list of subscripts as returned @@ -701,7 +699,7 @@ def p_atom(s): return p_dict_or_set_maker(s) elif sy == '`': return p_backquote_expr(s) - elif sy == '.': + elif sy == '...': expect_ellipsis(s) return ExprNodes.EllipsisNode(pos) elif sy == 'INT': @@ -1760,11 +1758,11 @@ def p_from_import_statement(s, first_statement = 0): # s.sy == 'from' pos = s.position() s.next() - if s.sy == '.': + if s.sy in ('.', '...'): # count relative import level level = 0 - while s.sy == '.': - level += 1 + while s.sy in ('.', '...'): + level += len(s.sy) s.next() else: level = None @@ -3035,7 +3033,7 @@ def p_exception_value_clause(s): return exc_val, exc_check c_arg_list_terminators = cython.declare(frozenset, frozenset(( - '*', '**', '.', ')', ':', '/'))) + '*', '**', '...', ')', ':', '/'))) def p_c_arg_list(s, ctx = Ctx(), in_pyfunc = 0, cmethod_flag = 0, nonempty_declarators = 0, kw_only = 0, annotated = 1): @@ -3054,7 +3052,7 @@ def p_c_arg_list(s, ctx = Ctx(), in_pyfunc = 0, cmethod_flag = 0, return args def p_optional_ellipsis(s): - if s.sy == '.': + if s.sy == '...': expect_ellipsis(s) return 1 else: diff --git a/Cython/Compiler/Tests/TestGrammar.py b/Cython/Compiler/Tests/TestGrammar.py index f80ec22d3..852b48c33 100644 --- a/Cython/Compiler/Tests/TestGrammar.py +++ b/Cython/Compiler/Tests/TestGrammar.py @@ -7,9 +7,12 @@ Uses TreeFragment to test invalid syntax. from __future__ import absolute_import +import ast +import textwrap + from ...TestUtils import CythonTest -from ..Errors import CompileError from .. import ExprNodes +from ..Errors import CompileError # Copied from CPython's test_grammar.py VALID_UNDERSCORE_LITERALS = [ @@ -103,6 +106,39 @@ INVALID_UNDERSCORE_LITERALS = [ ] +INVALID_ELLIPSIS = [ + (". . .", 2, 0), + (". ..", 2, 0), + (".. .", 2, 0), + (". ...", 2, 0), + (". ... .", 2, 0), + (".. ... .", 2, 0), + (". ... ..", 2, 0), + (""" + ( + . + .. + ) + """, 3, 4), + (""" + [ + .. + ., + None + ] + """, 3, 4), + (""" + { + None, + . + . + + . + } + """, 4, 4) +] + + class TestGrammar(CythonTest): def test_invalid_number_literals(self): @@ -142,6 +178,25 @@ class TestGrammar(CythonTest): else: assert isinstance(literal_node, ExprNodes.IntNode), (literal, literal_node) + def test_invalid_ellipsis(self): + ERR = ":{0}:{1}: Expected an identifier or literal" + for code, line, col in INVALID_ELLIPSIS: + try: + ast.parse(textwrap.dedent(code)) + except SyntaxError as exc: + assert True + else: + assert False, "Invalid Python code '%s' failed to raise an exception" % code + + try: + self.fragment(u'''\ + # cython: language_level=3 + ''' + code) + except CompileError as exc: + assert ERR.format(line, col) in str(exc), str(exc) + else: + assert False, "Invalid Cython code '%s' failed to raise an exception" % code + if __name__ == "__main__": import unittest diff --git a/tests/compile/fromimport.pyx b/tests/compile/fromimport.pyx index 46f7b5442..e84b26a97 100644 --- a/tests/compile/fromimport.pyx +++ b/tests/compile/fromimport.pyx @@ -6,10 +6,34 @@ def f(): from spam import eggs as ova from . import spam from ... import spam + from .. . import spam + from . .. import spam + from . . . import spam from .. import spam, foo + from . . import spam, foo from ... import spam, foobar + from .. . import spam, foobar + from . .. import spam, foobar + from . . . import spam, foobar from .spam import foo + from . spam import foo from ...spam import foo, bar + from .. . spam import foo, bar + from . .. spam import foo, bar + from . . . spam import foo, bar from ...spam.foo import bar + from ... spam.foo import bar + from .. . spam.foo import bar + from . .. spam.foo import bar + from . . . spam.foo import bar from ...spam.foo import foo, bar + from ... spam.foo import foo, bar + from .. . spam.foo import foo, bar + from . .. spam.foo import foo, bar + from . . . spam.foo import foo, bar from ...spam.foo import (foo, bar) + from ... spam.foo import (foo, bar) + from .. . spam.foo import (foo, bar) + from .. . spam.foo import (foo, bar) + from . .. spam.foo import (foo, bar) + from . . . spam.foo import (foo, bar) diff --git a/tests/compile/fromimport_star.pyx b/tests/compile/fromimport_star.pyx index 6c19476b7..80542dddb 100644 --- a/tests/compile/fromimport_star.pyx +++ b/tests/compile/fromimport_star.pyx @@ -2,5 +2,12 @@ from spam import * from ...spam.foo import * +from ... spam.foo import * +from .. . spam.foo import * +from . . . spam.foo import * +from . .. spam.foo import * from . import * from ... import * +from .. . import * +from . .. import * +from . . . import * diff --git a/tests/errors/incomplete_varadic.pyx b/tests/errors/incomplete_varadic.pyx new file mode 100644 index 000000000..1695a874d --- /dev/null +++ b/tests/errors/incomplete_varadic.pyx @@ -0,0 +1,8 @@ +# mode: error + +cdef error_time(bool its_fine, .): + pass + +_ERRORS = u""" +3: 31: Expected an identifier, found '.' +""" -- cgit v1.2.1 From 77a51ab6965a892512ecdd83cd54023e531c9e9c Mon Sep 17 00:00:00 2001 From: da-woods Date: Mon, 4 Jul 2022 21:35:52 +0100 Subject: Avoid NULL dereference in __Pyx_KwValues_FASTCALL (GH-4872) Simpler follow up to https://github.com/cython/cython/pull/4726. I don't think we need to be worried null args and non-zero nargs, but null args and 0 nargs is quite common and valid I think. This PR just avoids a dereference in that case (which is probably dubious). --- Cython/Utility/FunctionArguments.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cython/Utility/FunctionArguments.c b/Cython/Utility/FunctionArguments.c index 1882f826f..8bdaee562 100644 --- a/Cython/Utility/FunctionArguments.c +++ b/Cython/Utility/FunctionArguments.c @@ -422,7 +422,7 @@ bad: #if CYTHON_METH_FASTCALL #define __Pyx_Arg_FASTCALL(args, i) args[i] #define __Pyx_NumKwargs_FASTCALL(kwds) PyTuple_GET_SIZE(kwds) - #define __Pyx_KwValues_FASTCALL(args, nargs) (&args[nargs]) + #define __Pyx_KwValues_FASTCALL(args, nargs) ((args) + (nargs)) static CYTHON_INLINE PyObject * __Pyx_GetKwValue_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues, PyObject *s); #define __Pyx_KwargsAsDict_FASTCALL(kw, kwvalues) _PyStack_AsDict(kwvalues, kw) #else -- cgit v1.2.1 From 76b22ac750531919194c4334150dea4c51d67f44 Mon Sep 17 00:00:00 2001 From: da-woods Date: Mon, 4 Jul 2022 21:37:55 +0100 Subject: Add note that embedding does not produce a portable application (GH-4863) Try to make it clear that using cython --embed doesn't embed any external dependencies. --- docs/src/tutorial/embedding.rst | 7 +++++++ docs/src/userguide/external_C_code.rst | 4 +++- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/docs/src/tutorial/embedding.rst b/docs/src/tutorial/embedding.rst index 3f6325428..819506cde 100644 --- a/docs/src/tutorial/embedding.rst +++ b/docs/src/tutorial/embedding.rst @@ -75,3 +75,10 @@ option. Or use the script to embed multiple modules. See the `embedding demo program `_ for a complete example setup. + +Be aware that your application will not contain any external dependencies that +you use (including Python standard library modules) and so may not be truly portable. +If you want to generate a portable application we recommend using a specialized +tool (e.g. `PyInstaller `_ +or `cx_freeze `_) to find and +bundle these dependencies. diff --git a/docs/src/userguide/external_C_code.rst b/docs/src/userguide/external_C_code.rst index b080ecf0e..2e977243d 100644 --- a/docs/src/userguide/external_C_code.rst +++ b/docs/src/userguide/external_C_code.rst @@ -471,7 +471,9 @@ For example, in the following snippet that includes :file:`grail.h`: } This C code can then be built together with the Cython-generated C code -in a single program (or library). +in a single program (or library). Be aware that this program will not include +any external dependencies that your module uses. Therefore typically this will +not generate a truly portable application for most cases. In Python 3.x, calling the module init function directly should be avoided. Instead, use the `inittab mechanism `_ -- cgit v1.2.1 From 24f10066335332d9ff4680e6bf6f7550f67097da Mon Sep 17 00:00:00 2001 From: da-woods Date: Mon, 4 Jul 2022 21:44:49 +0100 Subject: Don't add multiple "CoerceToBooleanNode" layers (GH-4847) --- Cython/Compiler/ExprNodes.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Cython/Compiler/ExprNodes.py b/Cython/Compiler/ExprNodes.py index 2fb66e9da..fb2dedd56 100644 --- a/Cython/Compiler/ExprNodes.py +++ b/Cython/Compiler/ExprNodes.py @@ -1099,6 +1099,8 @@ class ExprNode(Node): type = self.type if type.is_enum or type.is_error: return self + elif type is PyrexTypes.c_bint_type: + return self elif type.is_pyobject or type.is_int or type.is_ptr or type.is_float: return CoerceToBooleanNode(self, env) elif type.is_cpp_class and type.scope and type.scope.lookup("operator bool"): -- cgit v1.2.1 From 22f4444a1722fe0fd3f9157f1db35ab1c02522a9 Mon Sep 17 00:00:00 2001 From: da-woods Date: Wed, 6 Jul 2022 20:47:04 +0100 Subject: Add tests for NULL objects in memoryviews (GH-4871) Follow up on https://github.com/cython/cython/pull/4859 by adding tests for memoryviews too. Additional refactoring to avoid invalid decref calls on test failures. Instead, the item is safely cleared directly before the access. --- tests/buffers/bufaccess.pyx | 15 +++++---------- tests/memoryview/memslice.pyx | 45 ++++++++++++++++++++++++++++++++++++++++++- 2 files changed, 49 insertions(+), 11 deletions(-) diff --git a/tests/buffers/bufaccess.pyx b/tests/buffers/bufaccess.pyx index 2a5e84185..053ea2890 100644 --- a/tests/buffers/bufaccess.pyx +++ b/tests/buffers/bufaccess.pyx @@ -10,7 +10,7 @@ from __future__ import unicode_literals from cpython.object cimport PyObject -from cpython.ref cimport Py_INCREF, Py_DECREF +from cpython.ref cimport Py_INCREF, Py_DECREF, Py_CLEAR cimport cython __test__ = {} @@ -1013,17 +1013,14 @@ def check_object_nulled_1d(MockBuffer[object, ndim=1] buf, int idx, obj): >>> rc1 = get_refcount(a) >>> A = ObjectMockBuffer(None, [a, a]) >>> check_object_nulled_1d(A, 0, a) - >>> decref(a) # new reference "added" to A >>> check_object_nulled_1d(A, 1, a) - >>> decref(a) >>> A = ObjectMockBuffer(None, [a, a, a, a], strides=(2,)) >>> check_object_nulled_1d(A, 0, a) # only 0 due to stride - >>> decref(a) >>> get_refcount(a) == rc1 True """ - cdef void **data = buf.buffer - data[idx] = NULL + cdef PyObject **data = buf.buffer + Py_CLEAR(data[idx]) res = buf[idx] # takes None buf[idx] = obj return res @@ -1037,14 +1034,12 @@ def check_object_nulled_2d(MockBuffer[object, ndim=2] buf, int idx1, int idx2, o >>> rc1 = get_refcount(a) >>> A = ObjectMockBuffer(None, [a, a, a, a], shape=(2, 2)) >>> check_object_nulled_2d(A, 0, 0, a) - >>> decref(a) # new reference "added" to A >>> check_object_nulled_2d(A, 1, 1, a) - >>> decref(a) >>> get_refcount(a) == rc1 True """ - cdef void **data = buf.buffer - data[idx1 + 2*idx2] = NULL + cdef PyObject **data = buf.buffer + Py_CLEAR(data[idx1 + 2*idx2]) res = buf[idx1, idx2] # takes None buf[idx1, idx2] = obj return res diff --git a/tests/memoryview/memslice.pyx b/tests/memoryview/memslice.pyx index 06bdf8673..24af61e17 100644 --- a/tests/memoryview/memslice.pyx +++ b/tests/memoryview/memslice.pyx @@ -5,7 +5,7 @@ from __future__ import unicode_literals from cpython.object cimport PyObject -from cpython.ref cimport Py_INCREF, Py_DECREF +from cpython.ref cimport Py_INCREF, Py_DECREF, Py_CLEAR cimport cython from cython cimport view @@ -1130,6 +1130,49 @@ def assign_temporary_to_object(object[:] buf): """ buf[1] = {3-2: 2+(2*4)-2} +@testcase +def check_object_nulled_1d(object[:] buf, int idx, obj): + """ + See comments on printbuf_object above. + + >>> a = object() + >>> rc1 = get_refcount(a) + >>> A = ObjectMockBuffer(None, [a, a]) + >>> check_object_nulled_1d(A, 0, a) + >>> check_object_nulled_1d(A, 1, a) + >>> A = ObjectMockBuffer(None, [a, a, a, a], strides=(2,)) + >>> check_object_nulled_1d(A, 0, a) # only 0 due to stride + >>> get_refcount(a) == rc1 + True + """ + cdef ObjectMockBuffer omb = buf.base + cdef PyObject **data = (omb.buffer) + Py_CLEAR(data[idx]) + res = buf[idx] # takes None + buf[idx] = obj + return res + +@testcase +def check_object_nulled_2d(object[:, ::1] buf, int idx1, int idx2, obj): + """ + See comments on printbuf_object above. + + >>> a = object() + >>> rc1 = get_refcount(a) + >>> A = ObjectMockBuffer(None, [a, a, a, a], shape=(2, 2)) + >>> check_object_nulled_2d(A, 0, 0, a) + >>> check_object_nulled_2d(A, 1, 1, a) + >>> get_refcount(a) == rc1 + True + """ + cdef ObjectMockBuffer omb = buf.base + cdef PyObject **data = (omb.buffer) + Py_CLEAR(data[idx1 + 2*idx2]) + res = buf[idx1, idx2] # takes None + buf[idx1, idx2] = obj + return res + + # # Test __cythonbufferdefaults__ # -- cgit v1.2.1 From 796fd06da1fa1d5481ce43a6b6c901bc87f0ce9a Mon Sep 17 00:00:00 2001 From: da-woods Date: Wed, 6 Jul 2022 20:49:32 +0100 Subject: Fix mistake in docs error return values (GH-4876) Fixes https://github.com/cython/cython/issues/4875 --- docs/src/userguide/language_basics.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/userguide/language_basics.rst b/docs/src/userguide/language_basics.rst index 593542eae..7d056bdfb 100644 --- a/docs/src/userguide/language_basics.rst +++ b/docs/src/userguide/language_basics.rst @@ -652,7 +652,7 @@ through defined error return values. For functions that return a Python object ``NULL`` pointer, so any function returning a Python object has a well-defined error return value. -While this is always the case for C functions, functions +While this is always the case for Python functions, functions defined as C functions or ``cpdef``/``@ccall`` functions can return arbitrary C types, which do not have such a well-defined error return value. Thus, if an exception is detected in such a function, a warning message is printed, -- cgit v1.2.1 From a44bbd363029aa9ba16fefcb485c68162f8ab663 Mon Sep 17 00:00:00 2001 From: Matus Valo Date: Thu, 7 Jul 2022 14:42:55 +0200 Subject: Docs: migrate extension_types.rst to pure python (GH-4516) --- docs/examples/userguide/extension_types/cheesy.py | 36 ++ docs/examples/userguide/extension_types/cheesy.pyx | 36 ++ .../userguide/extension_types/dataclass.py | 21 + .../userguide/extension_types/dataclass.pyx | 1 + .../userguide/extension_types/dict_animal.py | 12 + .../userguide/extension_types/dict_animal.pyx | 1 + .../userguide/extension_types/extendable_animal.py | 15 + .../extension_types/extendable_animal.pyx | 3 +- .../userguide/extension_types/owned_pointer.py | 17 + .../userguide/extension_types/owned_pointer.pyx | 17 + docs/examples/userguide/extension_types/penguin.py | 14 + .../examples/userguide/extension_types/penguin.pyx | 14 + .../examples/userguide/extension_types/penguin2.py | 12 + .../userguide/extension_types/penguin2.pyx | 12 + docs/examples/userguide/extension_types/pets.py | 22 + docs/examples/userguide/extension_types/pets.pyx | 22 + .../userguide/extension_types/python_access.py | 7 + .../userguide/extension_types/python_access.pyx | 4 + .../userguide/extension_types/shrubbery.py | 2 - .../userguide/extension_types/shrubbery.pyx | 2 - .../userguide/extension_types/shrubbery_2.py | 10 + .../userguide/extension_types/shrubbery_2.pyx | 2 + .../userguide/extension_types/widen_shrubbery.py | 6 + .../userguide/extension_types/widen_shrubbery.pyx | 2 + .../userguide/extension_types/wrapper_class.py | 65 ++ .../userguide/extension_types/wrapper_class.pyx | 65 ++ docs/src/userguide/extension_types.rst | 671 ++++++++++++++------- 27 files changed, 861 insertions(+), 230 deletions(-) create mode 100644 docs/examples/userguide/extension_types/cheesy.py create mode 100644 docs/examples/userguide/extension_types/cheesy.pyx create mode 100644 docs/examples/userguide/extension_types/dataclass.py create mode 100644 docs/examples/userguide/extension_types/dict_animal.py create mode 100644 docs/examples/userguide/extension_types/extendable_animal.py create mode 100644 docs/examples/userguide/extension_types/owned_pointer.py create mode 100644 docs/examples/userguide/extension_types/owned_pointer.pyx create mode 100644 docs/examples/userguide/extension_types/penguin.py create mode 100644 docs/examples/userguide/extension_types/penguin.pyx create mode 100644 docs/examples/userguide/extension_types/penguin2.py create mode 100644 docs/examples/userguide/extension_types/penguin2.pyx create mode 100644 docs/examples/userguide/extension_types/pets.py create mode 100644 docs/examples/userguide/extension_types/pets.pyx create mode 100644 docs/examples/userguide/extension_types/python_access.py create mode 100644 docs/examples/userguide/extension_types/shrubbery_2.py create mode 100644 docs/examples/userguide/extension_types/widen_shrubbery.py create mode 100644 docs/examples/userguide/extension_types/wrapper_class.py create mode 100644 docs/examples/userguide/extension_types/wrapper_class.pyx diff --git a/docs/examples/userguide/extension_types/cheesy.py b/docs/examples/userguide/extension_types/cheesy.py new file mode 100644 index 000000000..0995c3993 --- /dev/null +++ b/docs/examples/userguide/extension_types/cheesy.py @@ -0,0 +1,36 @@ +import cython + +@cython.cclass +class CheeseShop: + + cheeses: object + + def __cinit__(self): + self.cheeses = [] + + @property + def cheese(self): + return "We don't have: %s" % self.cheeses + + @cheese.setter + def cheese(self, value): + self.cheeses.append(value) + + @cheese.deleter + def cheese(self): + del self.cheeses[:] + +# Test input +from cheesy import CheeseShop + +shop = CheeseShop() +print(shop.cheese) + +shop.cheese = "camembert" +print(shop.cheese) + +shop.cheese = "cheddar" +print(shop.cheese) + +del shop.cheese +print(shop.cheese) diff --git a/docs/examples/userguide/extension_types/cheesy.pyx b/docs/examples/userguide/extension_types/cheesy.pyx new file mode 100644 index 000000000..2859d848f --- /dev/null +++ b/docs/examples/userguide/extension_types/cheesy.pyx @@ -0,0 +1,36 @@ + + + +cdef class CheeseShop: + + cdef object cheeses + + def __cinit__(self): + self.cheeses = [] + + @property + def cheese(self): + return "We don't have: %s" % self.cheeses + + @cheese.setter + def cheese(self, value): + self.cheeses.append(value) + + @cheese.deleter + def cheese(self): + del self.cheeses[:] + +# Test input +from cheesy import CheeseShop + +shop = CheeseShop() +print(shop.cheese) + +shop.cheese = "camembert" +print(shop.cheese) + +shop.cheese = "cheddar" +print(shop.cheese) + +del shop.cheese +print(shop.cheese) diff --git a/docs/examples/userguide/extension_types/dataclass.py b/docs/examples/userguide/extension_types/dataclass.py new file mode 100644 index 000000000..d8ed68666 --- /dev/null +++ b/docs/examples/userguide/extension_types/dataclass.py @@ -0,0 +1,21 @@ +import cython +try: + import typing + import dataclasses +except ImportError: + pass # The modules don't actually have to exists for Cython to use them as annotations + +@cython.dataclasses.dataclass +@cython.cclass +class MyDataclass: + # fields can be declared using annotations + a: cython.int = 0 + b: double = cython.dataclasses.field(default_factory = lambda: 10, repr=False) + + + c: str = 'hello' + + + # typing.InitVar and typing.ClassVar also work + d: dataclasses.InitVar[double] = 5 + e: typing.ClassVar[list] = [] diff --git a/docs/examples/userguide/extension_types/dataclass.pyx b/docs/examples/userguide/extension_types/dataclass.pyx index 0529890ba..56666537d 100644 --- a/docs/examples/userguide/extension_types/dataclass.pyx +++ b/docs/examples/userguide/extension_types/dataclass.pyx @@ -5,6 +5,7 @@ try: except ImportError: pass # The modules don't actually have to exists for Cython to use them as annotations + @cython.dataclasses.dataclass cdef class MyDataclass: # fields can be declared using annotations diff --git a/docs/examples/userguide/extension_types/dict_animal.py b/docs/examples/userguide/extension_types/dict_animal.py new file mode 100644 index 000000000..a36dd3f89 --- /dev/null +++ b/docs/examples/userguide/extension_types/dict_animal.py @@ -0,0 +1,12 @@ +@cython.cclass +class Animal: + + number_of_legs: cython.int + __dict__: dict + + def __cinit__(self, number_of_legs: cython.int): + self.number_of_legs = number_of_legs + + +dog = Animal(4) +dog.has_tail = True diff --git a/docs/examples/userguide/extension_types/dict_animal.pyx b/docs/examples/userguide/extension_types/dict_animal.pyx index 1aa0ccc11..ec8cf6f9a 100644 --- a/docs/examples/userguide/extension_types/dict_animal.pyx +++ b/docs/examples/userguide/extension_types/dict_animal.pyx @@ -1,3 +1,4 @@ + cdef class Animal: cdef int number_of_legs diff --git a/docs/examples/userguide/extension_types/extendable_animal.py b/docs/examples/userguide/extension_types/extendable_animal.py new file mode 100644 index 000000000..2eef69460 --- /dev/null +++ b/docs/examples/userguide/extension_types/extendable_animal.py @@ -0,0 +1,15 @@ +@cython.cclass +class Animal: + + number_of_legs: cython.int + + def __cinit__(self, number_of_legs: cython.int): + self.number_of_legs = number_of_legs + + +class ExtendableAnimal(Animal): # Note that we use class, not cdef class + pass + + +dog = ExtendableAnimal(4) +dog.has_tail = True diff --git a/docs/examples/userguide/extension_types/extendable_animal.pyx b/docs/examples/userguide/extension_types/extendable_animal.pyx index 701a93148..417760efd 100644 --- a/docs/examples/userguide/extension_types/extendable_animal.pyx +++ b/docs/examples/userguide/extension_types/extendable_animal.pyx @@ -1,3 +1,4 @@ + cdef class Animal: cdef int number_of_legs @@ -11,4 +12,4 @@ class ExtendableAnimal(Animal): # Note that we use class, not cdef class dog = ExtendableAnimal(4) -dog.has_tail = True \ No newline at end of file +dog.has_tail = True diff --git a/docs/examples/userguide/extension_types/owned_pointer.py b/docs/examples/userguide/extension_types/owned_pointer.py new file mode 100644 index 000000000..1c235a883 --- /dev/null +++ b/docs/examples/userguide/extension_types/owned_pointer.py @@ -0,0 +1,17 @@ +import cython +from cython.cimports.libc.stdlib import free + +@cython.cclass +class OwnedPointer: + ptr: cython.pointer(cython.void) + + def __dealloc__(self): + if self.ptr is not cython.NULL: + free(self.ptr) + + @staticmethod + @cython.cfunc + def create(ptr: cython.pointer(cython.void)): + p = OwnedPointer() + p.ptr = ptr + return p diff --git a/docs/examples/userguide/extension_types/owned_pointer.pyx b/docs/examples/userguide/extension_types/owned_pointer.pyx new file mode 100644 index 000000000..98b61d91c --- /dev/null +++ b/docs/examples/userguide/extension_types/owned_pointer.pyx @@ -0,0 +1,17 @@ + +from libc.stdlib cimport free + + +cdef class OwnedPointer: + cdef void* ptr + + def __dealloc__(self): + if self.ptr is not NULL: + free(self.ptr) + + + @staticmethod + cdef create(void* ptr): + p = OwnedPointer() + p.ptr = ptr + return p diff --git a/docs/examples/userguide/extension_types/penguin.py b/docs/examples/userguide/extension_types/penguin.py new file mode 100644 index 000000000..6db8eba16 --- /dev/null +++ b/docs/examples/userguide/extension_types/penguin.py @@ -0,0 +1,14 @@ +import cython + +@cython.cclass +class Penguin: + food: object + + def __cinit__(self, food): + self.food = food + + def __init__(self, food): + print("eating!") + +normal_penguin = Penguin('fish') +fast_penguin = Penguin.__new__(Penguin, 'wheat') # note: not calling __init__() ! diff --git a/docs/examples/userguide/extension_types/penguin.pyx b/docs/examples/userguide/extension_types/penguin.pyx new file mode 100644 index 000000000..b890c9ffd --- /dev/null +++ b/docs/examples/userguide/extension_types/penguin.pyx @@ -0,0 +1,14 @@ + + + +cdef class Penguin: + cdef object food + + def __cinit__(self, food): + self.food = food + + def __init__(self, food): + print("eating!") + +normal_penguin = Penguin('fish') +fast_penguin = Penguin.__new__(Penguin, 'wheat') # note: not calling __init__() ! diff --git a/docs/examples/userguide/extension_types/penguin2.py b/docs/examples/userguide/extension_types/penguin2.py new file mode 100644 index 000000000..063563d16 --- /dev/null +++ b/docs/examples/userguide/extension_types/penguin2.py @@ -0,0 +1,12 @@ +import cython + +@cython.freelist(8) +@cython.cclass +class Penguin: + food: object + def __cinit__(self, food): + self.food = food + +penguin = Penguin('fish 1') +penguin = None +penguin = Penguin('fish 2') # does not need to allocate memory! diff --git a/docs/examples/userguide/extension_types/penguin2.pyx b/docs/examples/userguide/extension_types/penguin2.pyx new file mode 100644 index 000000000..726aeef8e --- /dev/null +++ b/docs/examples/userguide/extension_types/penguin2.pyx @@ -0,0 +1,12 @@ +cimport cython + + +@cython.freelist(8) +cdef class Penguin: + cdef object food + def __cinit__(self, food): + self.food = food + +penguin = Penguin('fish 1') +penguin = None +penguin = Penguin('fish 2') # does not need to allocate memory! diff --git a/docs/examples/userguide/extension_types/pets.py b/docs/examples/userguide/extension_types/pets.py new file mode 100644 index 000000000..fc6497cb0 --- /dev/null +++ b/docs/examples/userguide/extension_types/pets.py @@ -0,0 +1,22 @@ +import cython + +@cython.cclass +class Parrot: + + @cython.cfunc + def describe(self) -> cython.void: + print("This parrot is resting.") + +@cython.cclass +class Norwegian(Parrot): + + @cython.cfunc + def describe(self) -> cython.void: + Parrot.describe(self) + print("Lovely plumage!") + +cython.declare(p1=Parrot, p2=Parrot) +p1 = Parrot() +p2 = Norwegian() +print("p2:") +p2.describe() diff --git a/docs/examples/userguide/extension_types/pets.pyx b/docs/examples/userguide/extension_types/pets.pyx new file mode 100644 index 000000000..bb06e059d --- /dev/null +++ b/docs/examples/userguide/extension_types/pets.pyx @@ -0,0 +1,22 @@ + + +cdef class Parrot: + + + + cdef void describe(self): + print("This parrot is resting.") + + +cdef class Norwegian(Parrot): + + + cdef void describe(self): + Parrot.describe(self) + print("Lovely plumage!") + +cdef Parrot p1, p2 +p1 = Parrot() +p2 = Norwegian() +print("p2:") +p2.describe() diff --git a/docs/examples/userguide/extension_types/python_access.py b/docs/examples/userguide/extension_types/python_access.py new file mode 100644 index 000000000..27478f50c --- /dev/null +++ b/docs/examples/userguide/extension_types/python_access.py @@ -0,0 +1,7 @@ +import cython + +@cython.cclass +class Shrubbery: + width = cython.declare(cython.int, visibility='public') + height = cython.declare(cython.int, visibility='public') + depth = cython.declare(cython.float, visibility='readonly') diff --git a/docs/examples/userguide/extension_types/python_access.pyx b/docs/examples/userguide/extension_types/python_access.pyx index 6d5225ec0..db11de63c 100644 --- a/docs/examples/userguide/extension_types/python_access.pyx +++ b/docs/examples/userguide/extension_types/python_access.pyx @@ -1,3 +1,7 @@ + + + cdef class Shrubbery: cdef public int width, height + cdef readonly float depth diff --git a/docs/examples/userguide/extension_types/shrubbery.py b/docs/examples/userguide/extension_types/shrubbery.py index 075664527..0e624a1d2 100644 --- a/docs/examples/userguide/extension_types/shrubbery.py +++ b/docs/examples/userguide/extension_types/shrubbery.py @@ -1,5 +1,3 @@ -from __future__ import print_function - @cython.cclass class Shrubbery: width: cython.int diff --git a/docs/examples/userguide/extension_types/shrubbery.pyx b/docs/examples/userguide/extension_types/shrubbery.pyx index b74dfbd1b..8c4e58776 100644 --- a/docs/examples/userguide/extension_types/shrubbery.pyx +++ b/docs/examples/userguide/extension_types/shrubbery.pyx @@ -1,6 +1,4 @@ from __future__ import print_function - - cdef class Shrubbery: cdef int width cdef int height diff --git a/docs/examples/userguide/extension_types/shrubbery_2.py b/docs/examples/userguide/extension_types/shrubbery_2.py new file mode 100644 index 000000000..d6b722500 --- /dev/null +++ b/docs/examples/userguide/extension_types/shrubbery_2.py @@ -0,0 +1,10 @@ +import cython +from cython.cimports.my_module import Shrubbery + +@cython.cfunc +def another_shrubbery(sh1: Shrubbery) -> Shrubbery: + sh2: Shrubbery + sh2 = Shrubbery() + sh2.width = sh1.width + sh2.height = sh1.height + return sh2 diff --git a/docs/examples/userguide/extension_types/shrubbery_2.pyx b/docs/examples/userguide/extension_types/shrubbery_2.pyx index d05d28243..4a7782735 100644 --- a/docs/examples/userguide/extension_types/shrubbery_2.pyx +++ b/docs/examples/userguide/extension_types/shrubbery_2.pyx @@ -1,5 +1,7 @@ + from my_module cimport Shrubbery + cdef Shrubbery another_shrubbery(Shrubbery sh1): cdef Shrubbery sh2 sh2 = Shrubbery() diff --git a/docs/examples/userguide/extension_types/widen_shrubbery.py b/docs/examples/userguide/extension_types/widen_shrubbery.py new file mode 100644 index 000000000..f69f4dc96 --- /dev/null +++ b/docs/examples/userguide/extension_types/widen_shrubbery.py @@ -0,0 +1,6 @@ +import cython +from cython.cimports.my_module import Shrubbery + +@cython.cfunc +def widen_shrubbery(sh: Shrubbery, extra_width): + sh.width = sh.width + extra_width diff --git a/docs/examples/userguide/extension_types/widen_shrubbery.pyx b/docs/examples/userguide/extension_types/widen_shrubbery.pyx index a312fbfd9..c6f58f00c 100644 --- a/docs/examples/userguide/extension_types/widen_shrubbery.pyx +++ b/docs/examples/userguide/extension_types/widen_shrubbery.pyx @@ -1,4 +1,6 @@ + from my_module cimport Shrubbery + cdef widen_shrubbery(Shrubbery sh, extra_width): sh.width = sh.width + extra_width diff --git a/docs/examples/userguide/extension_types/wrapper_class.py b/docs/examples/userguide/extension_types/wrapper_class.py new file mode 100644 index 000000000..b625ffebd --- /dev/null +++ b/docs/examples/userguide/extension_types/wrapper_class.py @@ -0,0 +1,65 @@ +import cython +from cython.cimports.libc.stdlib import malloc, free + +# Example C struct +my_c_struct = cython.struct( + a = cython.int, + b = cython.int, +) + +@cython.cclass +class WrapperClass: + """A wrapper class for a C/C++ data structure""" + _ptr: cython.pointer(my_c_struct) + ptr_owner: cython.bint + + def __cinit__(self): + self.ptr_owner = False + + def __dealloc__(self): + # De-allocate if not null and flag is set + if self._ptr is not cython.NULL and self.ptr_owner is True: + free(self._ptr) + self._ptr = cython.NULL + + def __init__(self): + # Prevent accidental instantiation from normal Python code + # since we cannot pass a struct pointer into a Python constructor. + raise TypeError("This class cannot be instantiated directly.") + + # Extension class properties + @property + def a(self): + return self._ptr.a if self._ptr is not cython.NULL else None + + @property + def b(self): + return self._ptr.b if self._ptr is not cython.NULL else None + + @staticmethod + @cython.cfunc + def from_ptr(_ptr: cython.pointer(my_c_struct), owner: cython.bint=False) -> WrapperClass: + """Factory function to create WrapperClass objects from + given my_c_struct pointer. + + Setting ``owner`` flag to ``True`` causes + the extension type to ``free`` the structure pointed to by ``_ptr`` + when the wrapper object is deallocated.""" + # Fast call to __new__() that bypasses the __init__() constructor. + wrapper: WrapperClass = WrapperClass.__new__(WrapperClass) + wrapper._ptr = _ptr + wrapper.ptr_owner = owner + return wrapper + + @staticmethod + @cython.cfunc + def new_struct() -> WrapperClass: + """Factory function to create WrapperClass objects with + newly allocated my_c_struct""" + _ptr: cython.pointer(my_c_struct) = cython.cast( + cython.pointer(my_c_struct), malloc(cython.sizeof(my_c_struct))) + if _ptr is cython.NULL: + raise MemoryError + _ptr.a = 0 + _ptr.b = 0 + return WrapperClass.from_ptr(_ptr, owner=True) diff --git a/docs/examples/userguide/extension_types/wrapper_class.pyx b/docs/examples/userguide/extension_types/wrapper_class.pyx new file mode 100644 index 000000000..e2a0c3ff2 --- /dev/null +++ b/docs/examples/userguide/extension_types/wrapper_class.pyx @@ -0,0 +1,65 @@ + +from libc.stdlib cimport malloc, free + +# Example C struct +ctypedef struct my_c_struct: + int a + int b + + + +cdef class WrapperClass: + """A wrapper class for a C/C++ data structure""" + cdef my_c_struct *_ptr + cdef bint ptr_owner + + def __cinit__(self): + self.ptr_owner = False + + def __dealloc__(self): + # De-allocate if not null and flag is set + if self._ptr is not NULL and self.ptr_owner is True: + free(self._ptr) + self._ptr = NULL + + def __init__(self): + # Prevent accidental instantiation from normal Python code + # since we cannot pass a struct pointer into a Python constructor. + raise TypeError("This class cannot be instantiated directly.") + + # Extension class properties + @property + def a(self): + return self._ptr.a if self._ptr is not NULL else None + + @property + def b(self): + return self._ptr.b if self._ptr is not NULL else None + + + @staticmethod + cdef WrapperClass from_ptr(my_c_struct *_ptr, bint owner=False): + """Factory function to create WrapperClass objects from + given my_c_struct pointer. + + Setting ``owner`` flag to ``True`` causes + the extension type to ``free`` the structure pointed to by ``_ptr`` + when the wrapper object is deallocated.""" + # Fast call to __new__() that bypasses the __init__() constructor. + cdef WrapperClass wrapper = WrapperClass.__new__(WrapperClass) + wrapper._ptr = _ptr + wrapper.ptr_owner = owner + return wrapper + + + @staticmethod + cdef WrapperClass new_struct(): + """Factory function to create WrapperClass objects with + newly allocated my_c_struct""" + cdef my_c_struct *_ptr = malloc(sizeof(my_c_struct)) + + if _ptr is NULL: + raise MemoryError + _ptr.a = 0 + _ptr.b = 0 + return WrapperClass.from_ptr(_ptr, owner=True) diff --git a/docs/src/userguide/extension_types.rst b/docs/src/userguide/extension_types.rst index d058df6c2..b2690dc49 100644 --- a/docs/src/userguide/extension_types.rst +++ b/docs/src/userguide/extension_types.rst @@ -9,20 +9,56 @@ Extension Types Introduction ============== +.. include:: + ../two-syntax-variants-used + As well as creating normal user-defined classes with the Python class statement, Cython also lets you create new built-in Python types, known as :term:`extension types`. You define an extension type using the :keyword:`cdef` class -statement. Here's an example: +statement or decorating the class with the ``@cclass`` decorator. Here's an example: + +.. tabs:: + + .. group-tab:: Pure Python + + .. literalinclude:: ../../examples/userguide/extension_types/shrubbery.py + + .. group-tab:: Cython -.. literalinclude:: ../../examples/userguide/extension_types/shrubbery.pyx + .. literalinclude:: ../../examples/userguide/extension_types/shrubbery.pyx As you can see, a Cython extension type definition looks a lot like a Python -class definition. Within it, you use the def statement to define methods that +class definition. Within it, you use the :keyword:`def` statement to define methods that can be called from Python code. You can even define many of the special methods such as :meth:`__init__` as you would in Python. -The main difference is that you can use the :keyword:`cdef` statement to define -attributes. The attributes may be Python objects (either generic or of a +The main difference is that you can define attributes using + +* the :keyword:`cdef` statement, +* the :func:`cython.declare()` function or +* the annotation of an attribute name. + +.. tabs:: + + .. group-tab:: Pure Python + + .. code-block:: python + + @cython.cclass + class Shrubbery: + width = declare(cython.int) + height: cython.int + + .. group-tab:: Cython + + .. code-block:: cython + + cdef class Shrubbery: + + cdef int width + cdef int height + +The attributes may be Python objects (either generic or of a particular extension type), or they may be of any C data type. So you can use extension types to wrap arbitrary C data structures and provide a Python-like interface to them. @@ -50,7 +86,15 @@ not Python access, which means that they are not accessible from Python code. To make them accessible from Python code, you need to declare them as :keyword:`public` or :keyword:`readonly`. For example: -.. literalinclude:: ../../examples/userguide/extension_types/python_access.pyx +.. tabs:: + + .. group-tab:: Pure Python + + .. literalinclude:: ../../examples/userguide/extension_types/python_access.py + + .. group-tab:: Cython + + .. literalinclude:: ../../examples/userguide/extension_types/python_access.pyx makes the width and height attributes readable and writable from Python code, and the depth attribute readable but not writable. @@ -74,15 +118,32 @@ Dynamic Attributes It is not possible to add attributes to an extension type at runtime by default. You have two ways of avoiding this limitation, both add an overhead when -a method is called from Python code. Especially when calling ``cpdef`` methods. +a method is called from Python code. Especially when calling hybrid methods declared +with :keyword:`cpdef` in .pyx files or with the ``@ccall`` decorator. + +The first approach is to create a Python subclass: -The first approach is to create a Python subclass.: +.. tabs:: -.. literalinclude:: ../../examples/userguide/extension_types/extendable_animal.pyx + .. group-tab:: Pure Python -Declaring a ``__dict__`` attribute is the second way of enabling dynamic attributes.: + .. literalinclude:: ../../examples/userguide/extension_types/extendable_animal.py -.. literalinclude:: ../../examples/userguide/extension_types/dict_animal.pyx + .. group-tab:: Cython + + .. literalinclude:: ../../examples/userguide/extension_types/extendable_animal.pyx + +Declaring a ``__dict__`` attribute is the second way of enabling dynamic attributes: + +.. tabs:: + + .. group-tab:: Pure Python + + .. literalinclude:: ../../examples/userguide/extension_types/dict_animal.py + + .. group-tab:: Cython + + .. literalinclude:: ../../examples/userguide/extension_types/dict_animal.pyx Type declarations =================== @@ -93,10 +154,24 @@ generic Python object. It knows this already in the case of the ``self`` parameter of the methods of that type, but in other cases you will have to use a type declaration. -For example, in the following function:: +For example, in the following function: - cdef widen_shrubbery(sh, extra_width): # BAD - sh.width = sh.width + extra_width +.. tabs:: + + .. group-tab:: Pure Python + + .. code-block:: python + + @cython.cfunc + def widen_shrubbery(sh, extra_width): # BAD + sh.width = sh.width + extra_width + + .. group-tab:: Cython + + .. code-block:: cython + + cdef widen_shrubbery(sh, extra_width): # BAD + sh.width = sh.width + extra_width because the ``sh`` parameter hasn't been given a type, the width attribute will be accessed by a Python attribute lookup. If the attribute has been @@ -107,18 +182,35 @@ will be very inefficient. If the attribute is private, it will not work at all The solution is to declare ``sh`` as being of type :class:`Shrubbery`, as follows: -.. literalinclude:: ../../examples/userguide/extension_types/widen_shrubbery.pyx +.. tabs:: + + .. group-tab:: Pure Python + + .. literalinclude:: ../../examples/userguide/extension_types/widen_shrubbery.py + + .. group-tab:: Cython + + .. literalinclude:: ../../examples/userguide/extension_types/widen_shrubbery.pyx Now the Cython compiler knows that ``sh`` has a C attribute called :attr:`width` and will generate code to access it directly and efficiently. The same consideration applies to local variables, for example: -.. literalinclude:: ../../examples/userguide/extension_types/shrubbery_2.pyx +.. tabs:: + + .. group-tab:: Pure Python + + .. literalinclude:: ../../examples/userguide/extension_types/shrubbery_2.py + + .. group-tab:: Cython + + .. literalinclude:: ../../examples/userguide/extension_types/shrubbery_2.pyx .. note:: - We here ``cimport`` the class :class:`Shrubbery`, and this is necessary - to declare the type at compile time. To be able to ``cimport`` an extension type, + Here, we *cimport* the class :class:`Shrubbery` (using the :keyword:`cimport` statement + or importing from special ``cython.cimports`` package), and this is necessary + to declare the type at compile time. To be able to cimport an extension type, we split the class definition into two parts, one in a definition file and the other in the corresponding implementation file. You should read :ref:`sharing_extension_types` to learn to do that. @@ -128,24 +220,61 @@ Type Testing and Casting ------------------------ Suppose I have a method :meth:`quest` which returns an object of type :class:`Shrubbery`. -To access it's width I could write:: +To access its width I could write: + +.. tabs:: + + .. group-tab:: Pure Python + + .. code-block:: python - cdef Shrubbery sh = quest() - print(sh.width) + sh: Shrubbery = quest() + print(sh.width) + + .. group-tab:: Cython + + .. code-block:: cython + + cdef Shrubbery sh = quest() + print(sh.width) which requires the use of a local variable and performs a type test on assignment. If you *know* the return value of :meth:`quest` will be of type :class:`Shrubbery` -you can use a cast to write:: +you can use a cast to write: + +.. tabs:: + + .. group-tab:: Pure Python - print( (quest()).width ) + .. code-block:: python + + print( cython.cast(Shrubbery, quest()).width ) + + .. group-tab:: Cython + + .. code-block:: cython + + print( (quest()).width ) This may be dangerous if :meth:`quest()` is not actually a :class:`Shrubbery`, as it will try to access width as a C struct member which may not exist. At the C level, rather than raising an :class:`AttributeError`, either an nonsensical result will be returned (interpreting whatever data is at that address as an int) or a segfault -may result from trying to access invalid memory. Instead, one can write:: +may result from trying to access invalid memory. Instead, one can write: + +.. tabs:: + + .. group-tab:: Pure Python + + .. code-block:: python + + print( cython.cast(Shrubbery, quest(), typecheck=True).width ) + + .. group-tab:: Cython + + .. code-block:: cython - print( (quest()).width ) + print( (quest()).width ) which performs a type check (possibly raising a :class:`TypeError`) before making the cast and allowing the code to proceed. @@ -155,14 +284,18 @@ For known builtin or extension types, Cython translates these into a fast and safe type check that ignores changes to the object's ``__class__`` attribute etc., so that after a successful :meth:`isinstance` test, code can rely on the expected C structure of the -extension type and its :keyword:`cdef` attributes and methods. +extension type and its C-level attributes (stored in the object’s C struct) and +:keyword:`cdef`/``@cfunc`` methods. .. _extension_types_and_none: Extension types and None ========================= -When you declare a parameter or C variable as being of an extension type, +Cython handles ``None`` values differently in C-like type declarations and when Python annotations are used. + +In :keyword:`cdef` declarations and C-like function argument declarations (``func(list x)``), +when you declare an argument or C variable as having an extension or Python builtin type, Cython will allow it to take on the value ``None`` as well as values of its declared type. This is analogous to the way a C pointer can take on the value ``NULL``, and you need to exercise the same caution because of it. There is no @@ -172,24 +305,24 @@ of an extension type (as in the widen_shrubbery function above), it's up to you to make sure the reference you're using is not ``None`` -- in the interests of efficiency, Cython does not check this. -You need to be particularly careful when exposing Python functions which take -extension types as arguments. If we wanted to make :func:`widen_shrubbery` a -Python function, for example, if we simply wrote:: +With the C-like declaration syntax, you need to be particularly careful when +exposing Python functions which take extension types as arguments:: def widen_shrubbery(Shrubbery sh, extra_width): # This is sh.width = sh.width + extra_width # dangerous! -then users of our module could crash it by passing ``None`` for the ``sh`` +The users of our module could crash it by passing ``None`` for the ``sh`` parameter. -One way to fix this would be:: +As in Python, whenever it is unclear whether a variable can be ``None``, +but the code requires a non-None value, an explicit check can help:: def widen_shrubbery(Shrubbery sh, extra_width): if sh is None: raise TypeError sh.width = sh.width + extra_width -but since this is anticipated to be such a frequent requirement, Cython +but since this is anticipated to be such a frequent requirement, Cython language provides a more convenient way. Parameters of a Python function declared as an extension type can have a ``not None`` clause:: @@ -199,18 +332,41 @@ extension type can have a ``not None`` clause:: Now the function will automatically check that ``sh`` is ``not None`` along with checking that it has the right type. +When annotations are used, the behaviour follows the Python typing semantics of +`PEP-484 `_ instead. +The value ``None`` is not allowed when a variable is annotated only with its plain type:: + + def widen_shrubbery(sh: Shrubbery, extra_width): # TypeError is raised + sh.width = sh.width + extra_width # when sh is None + +To also allow ``None``, ``typing.Optional[ ]`` must be used explicitly. +For function arguments, this is also automatically allowed when they have a +default argument of `None``, e.g. ``func(x: list = None)`` does not require ``typing.Optional``:: + + import typing + def widen_shrubbery(sh: typing.Optional[Shrubbery], extra_width): + if sh is None: + # We want to raise a custom exception in case of a None value. + raise ValueError + sh.width = sh.width + extra_width + +The upside of using annotations here is that they are safe by default because +you need to explicitly allow ``None`` values for them. + + .. note:: - ``not None`` clause can only be used in Python functions (defined with - :keyword:`def`) and not C functions (defined with :keyword:`cdef`). If - you need to check whether a parameter to a C function is None, you will + The ``not None`` and ``typing.Optional`` can only be used in Python functions (defined with + :keyword:`def` and without ``@cython.cfunc`` decorator) and not C functions + (defined with :keyword:`cdef` or decorated using ``@cython.cfunc``). If + you need to check whether a parameter to a C function is ``None``, you will need to do it yourself. .. note:: Some more things: - * The self parameter of a method of an extension type is guaranteed never to + * The ``self`` parameter of a method of an extension type is guaranteed never to be ``None``. * When comparing a value with ``None``, keep in mind that, if ``x`` is a Python object, ``x is None`` and ``x is not None`` are very efficient because they @@ -232,23 +388,49 @@ extension types. Properties ============ -You can declare properties in an extension class using the same syntax as in ordinary Python code:: +You can declare properties in an extension class using the same syntax as in ordinary Python code: - cdef class Spam: +.. tabs:: - @property - def cheese(self): - # This is called when the property is read. - ... + .. group-tab:: Pure Python - @cheese.setter - def cheese(self, value): - # This is called when the property is written. - ... + .. code-block:: python + + @cython.cclass + class Spam: + @property + def cheese(self): + # This is called when the property is read. + ... + + @cheese.setter + def cheese(self, value): + # This is called when the property is written. + ... + + @cheese.deleter + def cheese(self): + # This is called when the property is deleted. + + .. group-tab:: Cython + + .. code-block:: cython + + cdef class Spam: - @cheese.deleter - def cheese(self): - # This is called when the property is deleted. + @property + def cheese(self): + # This is called when the property is read. + ... + + @cheese.setter + def cheese(self, value): + # This is called when the property is written. + ... + + @cheese.deleter + def cheese(self): + # This is called when the property is deleted. There is also a special (deprecated) legacy syntax for defining properties in an extension class:: @@ -277,42 +459,17 @@ corresponding operation is attempted. Here's a complete example. It defines a property which adds to a list each time it is written to, returns the list when it is read, and empties the list -when it is deleted.:: - - # cheesy.pyx - cdef class CheeseShop: - - cdef object cheeses - - def __cinit__(self): - self.cheeses = [] +when it is deleted: - @property - def cheese(self): - return "We don't have: %s" % self.cheeses +.. tabs:: - @cheese.setter - def cheese(self, value): - self.cheeses.append(value) + .. group-tab:: Pure Python - @cheese.deleter - def cheese(self): - del self.cheeses[:] + .. literalinclude:: ../../examples/userguide/extension_types/cheesy.py - # Test input - from cheesy import CheeseShop + .. group-tab:: Cython - shop = CheeseShop() - print(shop.cheese) - - shop.cheese = "camembert" - print(shop.cheese) - - shop.cheese = "cheddar" - print(shop.cheese) - - del shop.cheese - print(shop.cheese) + .. literalinclude:: ../../examples/userguide/extension_types/cheesy.pyx .. code-block:: text @@ -328,20 +485,39 @@ Subclassing ============= If an extension type inherits from other types, the first base class must be -a built-in type or another extension type:: +a built-in type or another extension type: - cdef class Parrot: - ... +.. tabs:: + + .. group-tab:: Pure Python + + .. code-block:: python + + @cython.cclass + class Parrot: + ... + + @cython.cclass + class Norwegian(Parrot): + ... + + .. group-tab:: Cython + + .. code-block:: cython + + cdef class Parrot: + ... - cdef class Norwegian(Parrot): - ... + + cdef class Norwegian(Parrot): + ... A complete definition of the base type must be available to Cython, so if the base type is a built-in type, it must have been previously declared as an extern extension type. If the base type is defined in another Cython module, it must either be declared as an extern extension type or imported using the -:keyword:`cimport` statement. +:keyword:`cimport` statement or importing from the special ``cython.cimports`` package. Multiple inheritance is supported, however the second and subsequent base classes must be an ordinary Python class (not an extension type or a built-in @@ -354,13 +530,30 @@ must be compatible). There is a way to prevent extension types from being subtyped in Python. This is done via the ``final`` directive, -usually set on an extension type using a decorator:: +usually set on an extension type using a decorator: + +.. tabs:: + + .. group-tab:: Pure Python + + .. code-block:: python - cimport cython + import cython - @cython.final - cdef class Parrot: - def done(self): pass + @cython.final + @cython.cclass + class Parrot: + def done(self): pass + + .. group-tab:: Cython + + .. code-block:: cython + + cimport cython + + @cython.final + cdef class Parrot: + def done(self): pass Trying to create a Python subclass from this type will raise a :class:`TypeError` at runtime. Cython will also prevent subtyping a @@ -375,32 +568,25 @@ C methods ========= Extension types can have C methods as well as Python methods. Like C -functions, C methods are declared using :keyword:`cdef` or :keyword:`cpdef` instead of -:keyword:`def`. C methods are "virtual", and may be overridden in derived -extension types. In addition, :keyword:`cpdef` methods can even be overridden by python -methods when called as C method. This adds a little to their calling overhead -compared to a :keyword:`cdef` method:: +functions, C methods are declared using - # pets.pyx - cdef class Parrot: +* :keyword:`cdef` instead of :keyword:`def` or ``@cfunc`` decorator for *C methods*, or +* :keyword:`cpdef` instead of :keyword:`def` or ``@ccall`` decorator for *hybrid methods*. - cdef void describe(self): - print("This parrot is resting.") +C methods are "virtual", and may be overridden in derived +extension types. In addition, :keyword:`cpdef`/``@ccall`` methods can even be overridden by Python +methods when called as C method. This adds a little to their calling overhead +compared to a :keyword:`cdef`/``@cfunc`` method: + +.. tabs:: - cdef class Norwegian(Parrot): + .. group-tab:: Pure Python - cdef void describe(self): - Parrot.describe(self) - print("Lovely plumage!") + .. literalinclude:: ../../examples/userguide/extension_types/pets.py + .. group-tab:: Cython - cdef Parrot p1, p2 - p1 = Parrot() - p2 = Norwegian() - print("p1:") - p1.describe() - print("p2:") - p2.describe() + .. literalinclude:: ../../examples/userguide/extension_types/pets.pyx .. code-block:: text @@ -416,22 +602,23 @@ method using the usual Python technique, i.e.:: Parrot.describe(self) -`cdef` methods can be declared static by using the @staticmethod decorator. +:keyword:`cdef`/``@ccall`` methods can be declared static by using the ``@staticmethod`` decorator. This can be especially useful for constructing classes that take non-Python -compatible types.:: +compatible types: + +.. tabs:: + + .. group-tab:: Pure Python + + .. literalinclude:: ../../examples/userguide/extension_types/owned_pointer.py - cdef class OwnedPointer: - cdef void* ptr + .. group-tab:: Cython - def __dealloc__(self): - if self.ptr is not NULL: - free(self.ptr) + .. literalinclude:: ../../examples/userguide/extension_types/owned_pointer.pyx - @staticmethod - cdef create(void* ptr): - p = OwnedPointer() - p.ptr = ptr - return p +.. note:: + + Cython currently does not support decorating :keyword:`cdef`/``@ccall`` methods with ``@classmethod`` decorator. .. _forward_declaring_extension_types: @@ -460,19 +647,17 @@ Fast instantiation Cython provides two ways to speed up the instantiation of extension types. The first one is a direct call to the ``__new__()`` special static method, as known from Python. For an extension type ``Penguin``, you could use -the following code:: +the following code: + +.. tabs:: - cdef class Penguin: - cdef object food + .. group-tab:: Pure Python - def __cinit__(self, food): - self.food = food + .. literalinclude:: ../../examples/userguide/extension_types/penguin.py - def __init__(self, food): - print("eating!") + .. group-tab:: Cython - normal_penguin = Penguin('fish') - fast_penguin = Penguin.__new__(Penguin, 'wheat') # note: not calling __init__() ! + .. literalinclude:: ../../examples/userguide/extension_types/penguin.pyx Note that the path through ``__new__()`` will *not* call the type's ``__init__()`` method (again, as known from Python). Thus, in the example @@ -486,19 +671,17 @@ the differences. The second performance improvement applies to types that are often created and deleted in a row, so that they can benefit from a freelist. Cython provides the decorator ``@cython.freelist(N)`` for this, which creates a -statically sized freelist of ``N`` instances for a given type. Example:: +statically sized freelist of ``N`` instances for a given type. Example: + +.. tabs:: + + .. group-tab:: Pure Python - cimport cython + .. literalinclude:: ../../examples/userguide/extension_types/penguin2.py - @cython.freelist(8) - cdef class Penguin: - cdef object food - def __cinit__(self, food): - self.food = food + .. group-tab:: Cython - penguin = Penguin('fish 1') - penguin = None - penguin = Penguin('fish 2') # does not need to allocate memory! + .. literalinclude:: ../../examples/userguide/extension_types/penguin2.pyx .. _existing-pointers-instantiation: @@ -509,63 +692,17 @@ It is quite common to want to instantiate an extension class from an existing (pointer to a) data structure, often as returned by external C/C++ functions. As extension classes can only accept Python objects as arguments in their -constructors, this necessitates the use of factory functions. For example, :: - - from libc.stdlib cimport malloc, free - - # Example C struct - ctypedef struct my_c_struct: - int a - int b - - - cdef class WrapperClass: - """A wrapper class for a C/C++ data structure""" - cdef my_c_struct *_ptr - cdef bint ptr_owner - - def __cinit__(self): - self.ptr_owner = False - - def __dealloc__(self): - # De-allocate if not null and flag is set - if self._ptr is not NULL and self.ptr_owner is True: - free(self._ptr) - self._ptr = NULL - - # Extension class properties - @property - def a(self): - return self._ptr.a if self._ptr is not NULL else None - - @property - def b(self): - return self._ptr.b if self._ptr is not NULL else None - - @staticmethod - cdef WrapperClass from_ptr(my_c_struct *_ptr, bint owner=False): - """Factory function to create WrapperClass objects from - given my_c_struct pointer. - - Setting ``owner`` flag to ``True`` causes - the extension type to ``free`` the structure pointed to by ``_ptr`` - when the wrapper object is deallocated.""" - # Call to __new__ bypasses __init__ constructor - cdef WrapperClass wrapper = WrapperClass.__new__(WrapperClass) - wrapper._ptr = _ptr - wrapper.ptr_owner = owner - return wrapper - - @staticmethod - cdef WrapperClass new_struct(): - """Factory function to create WrapperClass objects with - newly allocated my_c_struct""" - cdef my_c_struct *_ptr = malloc(sizeof(my_c_struct)) - if _ptr is NULL: - raise MemoryError - _ptr.a = 0 - _ptr.b = 0 - return WrapperClass.from_ptr(_ptr, owner=True) +constructors, this necessitates the use of factory functions or factory methods. For example: + +.. tabs:: + + .. group-tab:: Pure Python + + .. literalinclude:: ../../examples/userguide/extension_types/wrapper_class.py + + .. group-tab:: Cython + + .. literalinclude:: ../../examples/userguide/extension_types/wrapper_class.pyx To then create a ``WrapperClass`` object from an existing ``my_c_struct`` @@ -607,13 +744,30 @@ Making extension types weak-referenceable By default, extension types do not support having weak references made to them. You can enable weak referencing by declaring a C attribute of type -object called :attr:`__weakref__`. For example,:: +object called :attr:`__weakref__`. For example: + +.. tabs:: + + .. group-tab:: Pure Python + + .. code-block:: python + + @cython.cclass + class ExplodingAnimal: + """This animal will self-destruct when it is + no longer strongly referenced.""" + + __weakref__: object - cdef class ExplodingAnimal: - """This animal will self-destruct when it is - no longer strongly referenced.""" + .. group-tab:: Cython - cdef object __weakref__ + .. code-block:: cython + + cdef class ExplodingAnimal: + """This animal will self-destruct when it is + no longer strongly referenced.""" + + cdef object __weakref__ Controlling deallocation and garbage collection in CPython @@ -691,12 +845,28 @@ CPython invented a mechanism for this called the *trashcan*. It limits the recursion depth of deallocations by delaying some deallocations. By default, Cython extension types do not use the trashcan but it can be -enabled by setting the ``trashcan`` directive to ``True``. For example:: +enabled by setting the ``trashcan`` directive to ``True``. For example: + +.. tabs:: + + .. group-tab:: Pure Python + + .. code-block:: python - cimport cython - @cython.trashcan(True) - cdef class Object: - cdef dict __dict__ + import cython + @cython.trashcan(True) + @cython.cclass + class Object: + __dict__: dict + + .. group-tab:: Cython + + .. code-block:: cython + + cimport cython + @cython.trashcan(True) + cdef class Object: + cdef dict __dict__ Trashcan usage is inherited by subclasses (unless explicitly disabled by ``@cython.trashcan(False)``). @@ -720,15 +890,34 @@ have triggered a call to ``tp_clear`` to clear the object In that case, any object references have vanished when ``__dealloc__`` is called. Now your cleanup code lost access to the objects it has to clean up. To fix this, you can disable clearing instances of a specific class by using -the ``no_gc_clear`` directive:: +the ``no_gc_clear`` directive: + +.. tabs:: + + .. group-tab:: Pure Python - @cython.no_gc_clear - cdef class DBCursor: - cdef DBConnection conn - cdef DBAPI_Cursor *raw_cursor - # ... - def __dealloc__(self): - DBAPI_close_cursor(self.conn.raw_conn, self.raw_cursor) + .. code-block:: python + + @cython.no_gc_clear + @cython.cclass + class DBCursor: + conn: DBConnection + raw_cursor: cython.pointer(DBAPI_Cursor) + # ... + def __dealloc__(self): + DBAPI_close_cursor(self.conn.raw_conn, self.raw_cursor) + + .. group-tab:: Cython + + .. code-block:: cython + + @cython.no_gc_clear + cdef class DBCursor: + cdef DBConnection conn + cdef DBAPI_Cursor *raw_cursor + # ... + def __dealloc__(self): + DBAPI_close_cursor(self.conn.raw_conn, self.raw_cursor) This example tries to close a cursor via a database connection when the Python object is destroyed. The ``DBConnection`` object is kept alive by the reference @@ -748,12 +937,29 @@ but the compiler won't be able to prove this. This would be the case if the class can never reference itself, even indirectly. In that case, you can manually disable cycle collection by using the ``no_gc`` directive, but beware that doing so when in fact the extension type -can participate in cycles could cause memory leaks :: +can participate in cycles could cause memory leaks: + +.. tabs:: + + .. group-tab:: Pure Python + + .. code-block:: python + + @cython.no_gc + @cython.cclass + class UserInfo: + name: str + addresses: tuple + + .. group-tab:: Cython + + .. code-block:: cython - @cython.no_gc - cdef class UserInfo: - cdef str name - cdef tuple addresses + @cython.no_gc + cdef class UserInfo: + + cdef str name + cdef tuple addresses If you can be sure addresses will contain only references to strings, the above would be safe, and it may yield a significant speedup, depending on @@ -786,6 +992,13 @@ declaration makes an extension type defined in external C code available to a Cython module. A public extension type declaration makes an extension type defined in a Cython module available to external C code. +.. note:: + + Cython currently does not support Extension types declared as extern or public + in Pure Python mode. This is not considered an issue since public/extern extension + types are most commonly declared in `.pxd` files and not in `.py` files. + + .. _external_extension_types: External extension types @@ -802,7 +1015,7 @@ objects defined in the Python core or in a non-Cython extension module. :ref:`sharing-declarations`. Here is an example which will let you get at the C-level members of the -built-in complex object.:: +built-in complex object:: from __future__ import print_function @@ -1073,7 +1286,15 @@ can only be applied to extension types (types marked ``cdef`` or created with th ``cython.cclass`` decorator) and not to regular classes. If you need to define special properties on a field then use ``cython.dataclasses.field`` -.. literalinclude:: ../../examples/userguide/extension_types/dataclass.pyx +.. tabs:: + + .. group-tab:: Pure Python + + .. literalinclude:: ../../examples/userguide/extension_types/dataclass.py + + .. group-tab:: Cython + + .. literalinclude:: ../../examples/userguide/extension_types/dataclass.pyx You may use C-level types such as structs, pointers, or C++ classes. However, you may find these types are not compatible with the auto-generated -- cgit v1.2.1 From 31d40c8c62acef9509675155fe5b5bb8e48dba5a Mon Sep 17 00:00:00 2001 From: scoder Date: Mon, 11 Jul 2022 07:45:17 +0200 Subject: Fix annotation type analysis for Python "typing" types (GH-4606) * Check for "Optional[ctype]" earlier because we need to make sure that "Optional[int]" etc. interprets "int" as (valid) Python int type and not (invalid) C int type. See https://github.com/cython/cython/issues/3883 * Fix typing assumptions in PEP 526 variable annotations test: in a Python type annotation, "int" means Python int and "float" means Python float, not the C types. * Use a context manager to make it explicit in annotation type analysis when C types are allowed, and when Python types are required or expected. * Generalise the concept of equivalent Python and C types for more efficient type inference: PyFloat/double, PyBool/bint, PyComplex/double complex. * Refactor analyse_type_annotation() to prepare the extraction of type modifiers (as opposed to special types). See discussion in https://github.com/cython/cython/pull/4606#issuecomment-1026658869 * Refactor handling of "typing.Optional", "dataclasses.InitVar" etc. annotations to move them into the declared Entry during type analysis and keep only the bare type in the type system. * Force ClassVar[...] types to be object types. * Add a warning when users define a ClassVar[] with a non-Python type. See https://github.com/cython/cython/pull/4606#discussion_r805170982 * Provide a helpful warning when users write plain C types in a non-C annotation context. * Only consider Python object item types from list/tuple as self.type in IndexNode since that will be the result of the index access. Coercion needs to happen externally, then based on the type inference. * Ignore Python annotation type "long" since it almost certainly does not refer to PyLong but to C long. Issue a warning to make users aware of it. * Fix PEP-526 test by working around incomplete type inference, but leave FIXME comments. --- Cython/Compiler/Builtin.py | 17 ++- Cython/Compiler/Dataclass.py | 13 +- Cython/Compiler/ExprNodes.py | 141 +++++++++++++-------- Cython/Compiler/Nodes.py | 131 +++++++++++++------ Cython/Compiler/PyrexTypes.py | 63 +++------ Cython/Compiler/Symtab.py | 91 +++++++++---- .../userguide/extension_types/dataclass.pyx | 2 +- tests/errors/dataclass_e1.pyx | 2 +- tests/errors/dataclass_e5.pyx | 21 +++ tests/errors/e_typing_errors.pyx | 59 +++++++++ tests/errors/e_typing_optional.py | 33 +++-- tests/run/annotation_typing.pyx | 62 +++++---- tests/run/cdef_class_dataclass.pyx | 16 +-- tests/run/cdef_setitem_T284.pyx | 4 +- tests/run/delete.pyx | 18 +++ tests/run/pep526_variable_annotations.py | 49 ++++--- tests/run/pep526_variable_annotations_cy.pyx | 4 +- tests/run/pure_cdef_class_dataclass.py | 4 +- 18 files changed, 477 insertions(+), 253 deletions(-) create mode 100644 tests/errors/dataclass_e5.pyx create mode 100644 tests/errors/e_typing_errors.pyx diff --git a/Cython/Compiler/Builtin.py b/Cython/Compiler/Builtin.py index 577c20775..46a4dbb5b 100644 --- a/Cython/Compiler/Builtin.py +++ b/Cython/Compiler/Builtin.py @@ -444,6 +444,16 @@ def init_builtins(): bool_type = builtin_scope.lookup('bool').type complex_type = builtin_scope.lookup('complex').type + # Set up type inference links between equivalent Python/C types + bool_type.equivalent_type = PyrexTypes.c_bint_type + PyrexTypes.c_bint_type.equivalent_type = bool_type + + float_type.equivalent_type = PyrexTypes.c_double_type + PyrexTypes.c_double_type.equivalent_type = float_type + + complex_type.equivalent_type = PyrexTypes.c_double_complex_type + PyrexTypes.c_double_complex_type.equivalent_type = complex_type + init_builtins() @@ -466,21 +476,20 @@ def get_known_standard_library_module_scope(module_name): ('Set', set_type), ('FrozenSet', frozenset_type), ]: - name = EncodedString(name) if name == "Tuple": indexed_type = PyrexTypes.PythonTupleTypeConstructor(EncodedString("typing."+name), tp) else: indexed_type = PyrexTypes.PythonTypeConstructor(EncodedString("typing."+name), tp) - entry = mod.declare_type(name, indexed_type, pos = None) + mod.declare_type(EncodedString(name), indexed_type, pos = None) for name in ['ClassVar', 'Optional']: indexed_type = PyrexTypes.SpecialPythonTypeConstructor(EncodedString("typing."+name)) - entry = mod.declare_type(name, indexed_type, pos = None) + mod.declare_type(name, indexed_type, pos = None) _known_module_scopes[module_name] = mod elif module_name == "dataclasses": mod = ModuleScope(module_name, None, None) indexed_type = PyrexTypes.SpecialPythonTypeConstructor(EncodedString("dataclasses.InitVar")) - entry = mod.declare_type(EncodedString("InitVar"), indexed_type, pos = None) + mod.declare_type(EncodedString("InitVar"), indexed_type, pos = None) _known_module_scopes[module_name] = mod return mod diff --git a/Cython/Compiler/Dataclass.py b/Cython/Compiler/Dataclass.py index 48c1888d6..0d0bb4768 100644 --- a/Cython/Compiler/Dataclass.py +++ b/Cython/Compiler/Dataclass.py @@ -154,12 +154,10 @@ def process_class_get_fields(node): for entry in var_entries: name = entry.name - is_initvar = (entry.type.python_type_constructor_name == "dataclasses.InitVar") + is_initvar = entry.declared_with_pytyping_modifier("dataclasses.InitVar") # TODO - classvars aren't included in "var_entries" so are missed here # and thus this code is never triggered - is_classvar = (entry.type.python_type_constructor_name == "typing.ClassVar") - if is_initvar or is_classvar: - entry.type = entry.type.resolve() # no longer need the special type + is_classvar = entry.declared_with_pytyping_modifier("typing.ClassVar") if name in default_value_assignments: assignment = default_value_assignments[name] if (isinstance(assignment, ExprNodes.CallNode) @@ -666,8 +664,11 @@ def _set_up_dataclass_fields(node, fields, dataclass_module): name) # create an entry in the global scope for this variable to live field_node = ExprNodes.NameNode(field_default.pos, name=EncodedString(module_field_name)) - field_node.entry = global_scope.declare_var(field_node.name, type=field_default.type or PyrexTypes.unspecified_type, - pos=field_default.pos, cname=field_node.name, is_cdef=1) + field_node.entry = global_scope.declare_var( + field_node.name, type=field_default.type or PyrexTypes.unspecified_type, + pos=field_default.pos, cname=field_node.name, is_cdef=True, + # TODO: do we need to set 'pytyping_modifiers' here? + ) # replace the field so that future users just receive the namenode setattr(field, attrname, field_node) diff --git a/Cython/Compiler/ExprNodes.py b/Cython/Compiler/ExprNodes.py index fb2dedd56..4c325891a 100644 --- a/Cython/Compiler/ExprNodes.py +++ b/Cython/Compiler/ExprNodes.py @@ -1528,14 +1528,18 @@ class FloatNode(ConstNode): def _analyse_name_as_type(name, pos, env): - type = PyrexTypes.parse_basic_type(name) - if type is not None: - return type + ctype = PyrexTypes.parse_basic_type(name) + if ctype is not None and env.in_c_type_context: + return ctype global_entry = env.global_scope().lookup(name) - if global_entry and global_entry.is_type and global_entry.type: - return global_entry.type + if global_entry and global_entry.is_type: + type = global_entry.type + if type and (type.is_pyobject or env.in_c_type_context): + return type + ctype = ctype or type + # This is fairly heavy, so it's worth trying some easier things above. from .TreeFragment import TreeFragment with local_errors(ignore=True): pos = (pos[0], pos[1], pos[2]-7) @@ -1548,8 +1552,11 @@ def _analyse_name_as_type(name, pos, env): if isinstance(sizeof_node, SizeofTypeNode): sizeof_node = sizeof_node.analyse_types(env) if isinstance(sizeof_node, SizeofTypeNode): - return sizeof_node.arg_type - return None + type = sizeof_node.arg_type + if type and (type.is_pyobject or env.in_c_type_context): + return type + ctype = ctype or type + return ctype class BytesNode(ConstNode): @@ -2023,6 +2030,8 @@ class NameNode(AtomicExprNode): # annotations never create global cdef names if env.is_module_scope: return + + modifiers = () if ( # name: "description" => not a type, but still a declared variable or attribute annotation.expr.is_string_literal @@ -2034,10 +2043,11 @@ class NameNode(AtomicExprNode): # For Python class scopes every attribute is a Python object atype = py_object_type else: - _, atype = annotation.analyse_type_annotation(env) + modifiers, atype = annotation.analyse_type_annotation(env) + if atype is None: atype = unspecified_type if as_target and env.directives['infer_types'] != False else py_object_type - if atype.is_fused and env.fused_to_specific: + elif atype.is_fused and env.fused_to_specific: try: atype = atype.specialize(env.fused_to_specific) except CannotSpecialize: @@ -2045,6 +2055,7 @@ class NameNode(AtomicExprNode): "'%s' cannot be specialized since its type is not a fused argument to this function" % self.name) atype = error_type + visibility = 'private' if 'dataclasses.dataclass' in env.directives: # handle "frozen" directive - full inspection of the dataclass directives happens @@ -2058,12 +2069,17 @@ class NameNode(AtomicExprNode): if atype.is_pyobject or atype.can_coerce_to_pyobject(env): visibility = 'readonly' if is_frozen else 'public' # If the object can't be coerced that's fine - we just don't create a property + if as_target and env.is_c_class_scope and not (atype.is_pyobject or atype.is_error): # TODO: this will need revising slightly if annotated cdef attributes are implemented atype = py_object_type warning(annotation.pos, "Annotation ignored since class-level attributes must be Python objects. " "Were you trying to set up an instance attribute?", 2) - entry = self.entry = env.declare_var(name, atype, self.pos, is_cdef=not as_target, visibility=visibility) + + entry = self.entry = env.declare_var( + name, atype, self.pos, is_cdef=not as_target, visibility=visibility, + pytyping_modifiers=modifiers) + # Even if the entry already exists, make sure we're supplying an annotation if we can. if annotation and not entry.annotation: entry.annotation = annotation @@ -2083,23 +2099,38 @@ class NameNode(AtomicExprNode): return None def analyse_as_type(self, env): + type = None if self.cython_attribute: type = PyrexTypes.parse_basic_type(self.cython_attribute) - else: + elif env.in_c_type_context: type = PyrexTypes.parse_basic_type(self.name) if type: return type + entry = self.entry if not entry: entry = env.lookup(self.name) - if entry and entry.is_type: - return entry.type - elif entry and entry.known_standard_library_import: + if entry and not entry.is_type and entry.known_standard_library_import: entry = Builtin.get_known_standard_library_entry(entry.known_standard_library_import) - if entry and entry.is_type: - return entry.type - else: - return None + if entry and entry.is_type: + # Infer equivalent C types instead of Python types when possible. + type = entry.type + if not env.in_c_type_context and type is Builtin.long_type: + # Try to give a helpful warning when users write plain C type names. + warning(self.pos, "Found Python 2.x type 'long' in a Python annotation. Did you mean to use 'cython.long'?") + type = py_object_type + elif type.is_pyobject and type.equivalent_type: + type = type.equivalent_type + return type + if self.name == 'object': + # This is normally parsed as "simple C type", but not if we don't parse C types. + return py_object_type + + # Try to give a helpful warning when users write plain C type names. + if not env.in_c_type_context and PyrexTypes.parse_basic_type(self.name): + warning(self.pos, "Found C type '%s' in a Python annotation. Did you mean to use a Python type?" % self.name) + + return None def analyse_as_extension_type(self, env): # Try to interpret this as a reference to an extension type. @@ -3700,6 +3731,18 @@ class IndexNode(_IndexingBaseNode): error(self.pos, "Array size must be a compile time constant") return None + def analyse_pytyping_modifiers(self, env): + # Check for declaration modifiers, e.g. "typing.Optional[...]" or "dataclasses.InitVar[...]" + # TODO: somehow bring this together with TemplatedTypeNode.analyse_pytyping_modifiers() + modifiers = [] + modifier_node = self + while modifier_node.is_subscript: + modifier_type = modifier_node.base.analyse_as_type(env) + if modifier_type.python_type_constructor_name and modifier_type.modifier_name: + modifiers.append(modifier_type.modifier_name) + modifier_node = modifier_node.index + return modifiers + def type_dependencies(self, env): return self.base.type_dependencies(env) + self.index.type_dependencies(env) @@ -3930,12 +3973,16 @@ class IndexNode(_IndexingBaseNode): if base_type in (list_type, tuple_type) and self.index.type.is_int: item_type = infer_sequence_item_type( env, self.base, self.index, seq_type=base_type) - if item_type is None: - item_type = py_object_type - self.type = item_type if base_type in (list_type, tuple_type, dict_type): # do the None check explicitly (not in a helper) to allow optimising it away self.base = self.base.as_none_safe_node("'NoneType' object is not subscriptable") + if item_type is None or not item_type.is_pyobject: + # Even if we inferred a C type as result, we will read a Python object, so trigger coercion if needed. + # We could potentially use "item_type.equivalent_type" here, but that may trigger assumptions + # about the actual runtime item types, rather than just their ability to coerce to the C "item_type". + self.type = py_object_type + else: + self.type = item_type self.wrap_in_nonecheck_node(env, getting) return self @@ -4231,6 +4278,7 @@ class IndexNode(_IndexingBaseNode): return utility_code = None + error_value = None if self.type.is_pyobject: error_value = 'NULL' if self.index.type.is_int: @@ -4266,8 +4314,8 @@ class IndexNode(_IndexingBaseNode): error_value = '-1' utility_code = UtilityCode.load_cached("GetItemIntByteArray", "StringTools.c") elif not (self.base.type.is_cpp_class and self.exception_check): - assert False, "unexpected type %s and base type %s for indexing" % ( - self.type, self.base.type) + assert False, "unexpected type %s and base type %s for indexing (%s)" % ( + self.type, self.base.type, self.pos) if utility_code is not None: code.globalstate.use_utility_code(utility_code) @@ -14021,10 +14069,8 @@ class AnnotationNode(ExprNode): def analyse_type_annotation(self, env, assigned_value=None): if self.untyped: # Already applied as a fused type, not re-evaluating it here. - return None, None + return [], None annotation = self.expr - base_type = None - is_ambiguous = False explicit_pytype = explicit_ctype = False if annotation.is_dict_literal: warning(annotation.pos, @@ -14041,36 +14087,29 @@ class AnnotationNode(ExprNode): annotation = value if explicit_pytype and explicit_ctype: warning(annotation.pos, "Duplicate type declarations found in signature annotation", level=1) - arg_type = annotation.analyse_as_type(env) - if annotation.is_name and not annotation.cython_attribute and annotation.name in ('int', 'long', 'float'): - # Map builtin numeric Python types to C types in safe cases. - if assigned_value is not None and arg_type is not None and not arg_type.is_pyobject: - assigned_type = assigned_value.infer_type(env) - if assigned_type and assigned_type.is_pyobject: - # C type seems unsafe, e.g. due to 'None' default value => ignore annotation type - is_ambiguous = True - arg_type = None - # ignore 'int' and require 'cython.int' to avoid unsafe integer declarations - if arg_type in (PyrexTypes.c_long_type, PyrexTypes.c_int_type, PyrexTypes.c_float_type): - arg_type = PyrexTypes.c_double_type if annotation.name == 'float' else py_object_type - elif arg_type is not None and annotation.is_string_literal: + + with env.new_c_type_context(in_c_type_context=explicit_ctype): + arg_type = annotation.analyse_as_type(env) + + if arg_type is None: + warning(annotation.pos, "Unknown type declaration in annotation, ignoring") + return [], arg_type + + if annotation.is_string_literal: warning(annotation.pos, "Strings should no longer be used for type declarations. Use 'cython.int' etc. directly.", level=1) - elif arg_type is not None and arg_type.is_complex: + if explicit_pytype and not explicit_ctype and not (arg_type.is_pyobject or arg_type.equivalent_type): + warning(annotation.pos, + "Python type declaration in signature annotation does not refer to a Python type") + if arg_type.is_complex: # creating utility code needs to be special-cased for complex types arg_type.create_declaration_utility_code(env) - if arg_type is not None: - if explicit_pytype and not explicit_ctype and not arg_type.is_pyobject: - warning(annotation.pos, - "Python type declaration in signature annotation does not refer to a Python type") - base_type = Nodes.CAnalysedBaseTypeNode( - annotation.pos, type=arg_type, is_arg=True) - elif is_ambiguous: - warning(annotation.pos, "Ambiguous types in annotation, ignoring") - else: - warning(annotation.pos, "Unknown type declaration in annotation, ignoring") - return base_type, arg_type + + # Check for declaration modifiers, e.g. "typing.Optional[...]" or "dataclasses.InitVar[...]" + modifiers = annotation.analyse_pytyping_modifiers(env) if annotation.is_subscript else [] + + return modifiers, arg_type class AssignmentExpressionNode(ExprNode): diff --git a/Cython/Compiler/Nodes.py b/Cython/Compiler/Nodes.py index 927e47763..15c82f571 100644 --- a/Cython/Compiler/Nodes.py +++ b/Cython/Compiler/Nodes.py @@ -158,6 +158,7 @@ class Node(object): is_terminator = 0 is_wrapper = False # is a DefNode wrapper for a C function is_cproperty = False + is_templated_type_node = False temps = None # All descendants should set child_attrs to a list of the attributes @@ -966,27 +967,34 @@ class CArgDeclNode(Node): annotation = self.annotation if not annotation: return None - base_type, arg_type = annotation.analyse_type_annotation(env, assigned_value=self.default) - if base_type is not None: - self.base_type = base_type - - if arg_type and arg_type.python_type_constructor_name == "typing.Optional": - # "x: Optional[...]" => explicitly allow 'None' - arg_type = arg_type.resolve() - if arg_type and not arg_type.is_pyobject: - error(annotation.pos, "Only Python type arguments can use typing.Optional[...]") - else: - self.or_none = True - elif arg_type is py_object_type: - # exclude ": object" from the None check - None is a generic object. - self.or_none = True - elif arg_type and arg_type.is_pyobject and self.default and self.default.is_none: - # "x: ... = None" => implicitly allow 'None', but warn about it. - if not self.or_none: - warning(self.pos, "PEP-484 recommends 'typing.Optional[...]' for arguments that can be None.") + + modifiers, arg_type = annotation.analyse_type_annotation(env, assigned_value=self.default) + if arg_type is not None: + self.base_type = CAnalysedBaseTypeNode( + annotation.pos, type=arg_type, is_arg=True) + + if arg_type: + if "typing.Optional" in modifiers: + # "x: Optional[...]" => explicitly allow 'None' + arg_type = arg_type.resolve() + if arg_type and not arg_type.is_pyobject: + # We probably already reported this as "cannot be applied to non-Python type". + # error(annotation.pos, "Only Python type arguments can use typing.Optional[...]") + pass + else: + self.or_none = True + elif arg_type is py_object_type: + # exclude ": object" from the None check - None is a generic object. self.or_none = True - elif arg_type and arg_type.is_pyobject and not self.or_none: - self.not_none = True + elif self.default and self.default.is_none and (arg_type.is_pyobject or arg_type.equivalent_type): + # "x: ... = None" => implicitly allow 'None' + if not arg_type.is_pyobject: + arg_type = arg_type.equivalent_type + if not self.or_none: + warning(self.pos, "PEP-484 recommends 'typing.Optional[...]' for arguments that can be None.") + self.or_none = True + elif arg_type.is_pyobject and not self.or_none: + self.not_none = True return arg_type @@ -1076,9 +1084,9 @@ class CSimpleBaseTypeNode(CBaseTypeNode): else: type = py_object_type else: + scope = env if self.module_path: # Maybe it's a nested C++ class. - scope = env for item in self.module_path: entry = scope.lookup(item) if entry is not None and ( @@ -1099,8 +1107,6 @@ class CSimpleBaseTypeNode(CBaseTypeNode): if scope is None: # Maybe it's a cimport. scope = env.find_imported_module(self.module_path, self.pos) - else: - scope = env if scope: if scope.is_c_class_scope: @@ -1139,10 +1145,9 @@ class CSimpleBaseTypeNode(CBaseTypeNode): type = PyrexTypes.c_double_complex_type type.create_declaration_utility_code(env) self.complex = True - if type: - return type - else: - return PyrexTypes.error_type + if not type: + type = PyrexTypes.error_type + return type class MemoryViewSliceTypeNode(CBaseTypeNode): @@ -1211,10 +1216,40 @@ class TemplatedTypeNode(CBaseTypeNode): child_attrs = ["base_type_node", "positional_args", "keyword_args", "dtype_node"] + is_templated_type_node = True dtype_node = None - name = None + def _analyse_template_types(self, env, base_type): + require_python_types = base_type.python_type_constructor_name in ( + 'typing.Optional', + 'dataclasses.ClassVar', + ) + in_c_type_context = env.in_c_type_context and not require_python_types + + template_types = [] + for template_node in self.positional_args: + # CBaseTypeNode -> allow C type declarations in a 'cdef' context again + with env.new_c_type_context(in_c_type_context or isinstance(template_node, CBaseTypeNode)): + ttype = template_node.analyse_as_type(env) + if ttype is None: + if base_type.is_cpp_class: + error(template_node.pos, "unknown type in template argument") + ttype = error_type + # For Python generics we can be a bit more flexible and allow None. + elif require_python_types and not ttype.is_pyobject: + if ttype.equivalent_type and not template_node.as_cython_attribute(): + ttype = ttype.equivalent_type + else: + error(template_node.pos, "%s[...] cannot be applied to non-Python type %s" % ( + base_type.python_type_constructor_name, + ttype, + )) + ttype = error_type + template_types.append(ttype) + + return template_types + def analyse(self, env, could_be_name=False, base_type=None): if base_type is None: base_type = self.base_type_node.analyse(env) @@ -1222,21 +1257,15 @@ class TemplatedTypeNode(CBaseTypeNode): if ((base_type.is_cpp_class and base_type.is_template_type()) or base_type.python_type_constructor_name): - # Templated class + # Templated class, Python generics, etc. if self.keyword_args and self.keyword_args.key_value_pairs: tp = "c++ templates" if base_type.is_cpp_class else "indexed types" error(self.pos, "%s cannot take keyword arguments" % tp) self.type = PyrexTypes.error_type - else: - template_types = [] - for template_node in self.positional_args: - type = template_node.analyse_as_type(env) - if type is None and base_type.is_cpp_class: - error(template_node.pos, "unknown type in template argument") - type = error_type - # for indexed_pytype we can be a bit more flexible and pass None - template_types.append(type) - self.type = base_type.specialize_here(self.pos, env, template_types) + return self.type + + template_types = self._analyse_template_types(env, base_type) + self.type = base_type.specialize_here(self.pos, env, template_types) elif base_type.is_pyobject: # Buffer @@ -1277,7 +1306,7 @@ class TemplatedTypeNode(CBaseTypeNode): dimension=dimension) self.type = self.array_declarator.analyse(base_type, env)[1] - if self.type.is_fused and env.fused_to_specific: + if self.type and self.type.is_fused and env.fused_to_specific: try: self.type = self.type.specialize(env.fused_to_specific) except CannotSpecialize: @@ -1287,6 +1316,19 @@ class TemplatedTypeNode(CBaseTypeNode): return self.type + def analyse_pytyping_modifiers(self, env): + # Check for declaration modifiers, e.g. "typing.Optional[...]" or "dataclasses.InitVar[...]" + # TODO: somehow bring this together with IndexNode.analyse_pytyping_modifiers() + modifiers = [] + modifier_node = self + while modifier_node.is_templated_type_node and modifier_node.base_type_node and len(modifier_node.positional_args) == 1: + modifier_type = self.base_type_node.analyse_as_type(env) + if modifier_type.python_type_constructor_name and modifier_type.modifier_name: + modifiers.append(modifier_type.modifier_name) + modifier_node = modifier_node.positional_args[0] + + return modifiers + class CComplexBaseTypeNode(CBaseTypeNode): # base_type CBaseTypeNode @@ -1414,6 +1456,11 @@ class CVarDefNode(StatNode): base_type = self.base_type.analyse(env) + # Check for declaration modifiers, e.g. "typing.Optional[...]" or "dataclasses.InitVar[...]" + modifiers = None + if self.base_type.is_templated_type_node: + modifiers = self.base_type.analyse_pytyping_modifiers(env) + if base_type.is_fused and not self.in_pxd and (env.is_c_class_scope or env.is_module_scope): error(self.pos, "Fused types not allowed here") @@ -1477,7 +1524,7 @@ class CVarDefNode(StatNode): self.entry = dest_scope.declare_var( name, type, declarator.pos, cname=cname, visibility=visibility, in_pxd=self.in_pxd, - api=self.api, is_cdef=1) + api=self.api, is_cdef=True, pytyping_modifiers=modifiers) if Options.docstrings: self.entry.doc = embed_position(self.pos, self.doc) @@ -3164,7 +3211,7 @@ class DefNode(FuncDefNode): else: # probably just a plain 'object' arg.accept_none = True - else: + elif not arg.type.is_error: arg.accept_none = True # won't be used, but must be there if arg.not_none: error(arg.pos, "Only Python type arguments can have 'not None'") diff --git a/Cython/Compiler/PyrexTypes.py b/Cython/Compiler/PyrexTypes.py index 1660eab22..1316edddc 100644 --- a/Cython/Compiler/PyrexTypes.py +++ b/Cython/Compiler/PyrexTypes.py @@ -205,6 +205,7 @@ class PyrexType(BaseType): # needs_cpp_construction boolean Needs C++ constructor and destructor when used in a cdef class # needs_refcounting boolean Needs code to be generated similar to incref/gotref/decref. # Largely used internally. + # equivalent_type type A C or Python type that is equivalent to this Python or C type. # default_value string Initial value that can be assigned before first user assignment. # declaration_value string The value statically assigned on declaration (if any). # entry Entry The Entry for this type @@ -277,6 +278,7 @@ class PyrexType(BaseType): has_attributes = 0 needs_cpp_construction = 0 needs_refcounting = 0 + equivalent_type = None default_value = "" declaration_value = "" @@ -4432,6 +4434,7 @@ class ErrorType(PyrexType): class PythonTypeConstructor(PyObjectType): """Used to help Cython interpret indexed types from the typing module (or similar) """ + modifier_name = None def __init__(self, name, base_type=None): self.python_type_constructor_name = name @@ -4460,69 +4463,35 @@ class PythonTupleTypeConstructor(PythonTypeConstructor): not any(v.is_pyobject for v in template_values)): entry = env.declare_tuple_type(pos, template_values) if entry: + entry.used = True return entry.type return super(PythonTupleTypeConstructor, self).specialize_here(pos, env, template_values) class SpecialPythonTypeConstructor(PythonTypeConstructor): """ - For things like ClassVar, Optional, etc, which have extra features on top of being - a "templated" type. + For things like ClassVar, Optional, etc, which are not types and disappear during type analysis. """ - def __init__(self, name, template_type=None): - super(SpecialPythonTypeConstructor, self).__init__(name, None) - if (name == "typing.ClassVar" and template_type - and not template_type.is_pyobject): - # because classvars end up essentially used as globals they have - # to be PyObjects. Try to find the nearest suitable type (although - # practically I doubt this matters). - py_type_name = template_type.py_type_name() - if py_type_name: - from .Builtin import builtin_scope - template_type = (builtin_scope.lookup_type(py_type_name) - or py_object_type) - else: - template_type = py_object_types - self.template_type = template_type + def __init__(self, name): + super(SpecialPythonTypeConstructor, self).__init__(name, base_type=None) + self.modifier_name = name def __repr__(self): - if self.template_type: - return "%s[%r]" % (self.name, self.template_type) - else: - return self.name - - def is_template_type(self): - return self.template_type is None + return self.name def resolve(self): - if self.template_type: - return self.template_type.resolve() - else: - return self + return self def specialize_here(self, pos, env, template_values=None): if len(template_values) != 1: error(pos, "'%s' takes exactly one template argument." % self.name) - # return a copy of the template type with python_type_constructor_name as an attribute - # so it can be identified, and a resolve function that gets back to - # the original type (since types are usually tested with "is") - new_type = template_values[0] - if self.python_type_constructor_name == "typing.ClassVar": - # classvar must remain a py_object_type - new_type = py_object_type - if (self.python_type_constructor_name == "typing.Optional" and - not new_type.is_pyobject): - # optional must be a py_object, but can be a specialized py_object - new_type = py_object_type - return SpecialPythonTypeConstructor( - self.python_type_constructor_name, - template_type = template_values[0]) - - def __getattr__(self, name): - if self.template_type: - return getattr(self.template_type, name) - return super(SpecialPythonTypeConstructor, self).__getattr__(name) + return error_type + if template_values[0] is None: + # FIXME: allowing unknown types for now since we don't recognise all Python types. + return None + # Replace this type with the actual 'template' argument. + return template_values[0].resolve() rank_to_type_name = ( diff --git a/Cython/Compiler/Symtab.py b/Cython/Compiler/Symtab.py index 6554008f0..f657e7b7c 100644 --- a/Cython/Compiler/Symtab.py +++ b/Cython/Compiler/Symtab.py @@ -13,6 +13,7 @@ try: except ImportError: # Py3 import builtins +from ..Utils import try_finally_contextmanager from .Errors import warning, error, InternalError from .StringEncoding import EncodedString from . import Options, Naming @@ -163,6 +164,7 @@ class Entry(object): # known_standard_library_import Either None (default), an empty string (definitely can't be determined) # or a string of "modulename.something.attribute" # Used for identifying imports from typing/dataclasses etc + # pytyping_modifiers Python type modifiers like "typing.ClassVar" but also "dataclasses.InitVar" # TODO: utility_code and utility_code_definition serves the same purpose... @@ -237,6 +239,7 @@ class Entry(object): is_cgetter = False is_cpp_optional = False known_standard_library_import = None + pytyping_modifiers = None def __init__(self, name, cname, type, pos = None, init = None): self.name = name @@ -282,6 +285,9 @@ class Entry(object): assert not self.utility_code # we're not overwriting anything? self.utility_code_definition = Code.UtilityCode.load_cached("OptionalLocals", "CppSupport.cpp") + def declared_with_pytyping_modifier(self, modifier_name): + return modifier_name in self.pytyping_modifiers if self.pytyping_modifiers else False + class InnerEntry(Entry): """ @@ -366,6 +372,8 @@ class Scope(object): nogil = 0 fused_to_specific = None return_type = None + # Do ambiguous type names like 'int' and 'float' refer to the C types? (Otherwise, Python types.) + in_c_type_context = True def __init__(self, name, outer_scope, parent_scope): # The outer_scope is the next scope in the lookup chain. @@ -482,6 +490,14 @@ class Scope(object): for scope in sorted(self.subscopes, key=operator.attrgetter('scope_prefix')): yield scope + @try_finally_contextmanager + def new_c_type_context(self, in_c_type_context=None): + old_c_type_context = self.in_c_type_context + if in_c_type_context is not None: + self.in_c_type_context = in_c_type_context + yield + self.in_c_type_context = old_c_type_context + def declare(self, name, cname, type, pos, visibility, shadow = 0, is_type = 0, create_wrapper = 0): # Create new entry, and add to dictionary if # name is not None. Reports a warning if already @@ -733,8 +749,8 @@ class Scope(object): return self.outer_scope.declare_tuple_type(pos, components) def declare_var(self, name, type, pos, - cname = None, visibility = 'private', - api = 0, in_pxd = 0, is_cdef = 0): + cname=None, visibility='private', + api=False, in_pxd=False, is_cdef=False, pytyping_modifiers=None): # Add an entry for a variable. if not cname: if visibility != 'private' or api: @@ -754,8 +770,17 @@ class Scope(object): if api: entry.api = 1 entry.used = 1 + if pytyping_modifiers: + entry.pytyping_modifiers = pytyping_modifiers return entry + def _reject_pytyping_modifiers(self, pos, modifiers, allowed=()): + if not modifiers: + return + for modifier in modifiers: + if modifier not in allowed: + error(pos, "Modifier '%s' is not allowed here." % modifier) + def declare_assignment_expression_target(self, name, type, pos): # In most cases declares the variable as normal. # For generator expressions and comprehensions the variable is declared in their parent @@ -1515,14 +1540,15 @@ class ModuleScope(Scope): return entry def declare_var(self, name, type, pos, - cname = None, visibility = 'private', - api = 0, in_pxd = 0, is_cdef = 0): + cname=None, visibility='private', + api=False, in_pxd=False, is_cdef=False, pytyping_modifiers=None): # Add an entry for a global variable. If it is a Python # object type, and not declared with cdef, it will live # in the module dictionary, otherwise it will be a C # global variable. if visibility not in ('private', 'public', 'extern'): error(pos, "Module-level variable cannot be declared %s" % visibility) + self._reject_pytyping_modifiers(pos, pytyping_modifiers, ('typing.Optional',)) # let's allow at least this one if not is_cdef: if type is unspecified_type: type = py_object_type @@ -1558,7 +1584,7 @@ class ModuleScope(Scope): entry = Scope.declare_var(self, name, type, pos, cname=cname, visibility=visibility, - api=api, in_pxd=in_pxd, is_cdef=is_cdef) + api=api, in_pxd=in_pxd, is_cdef=is_cdef, pytyping_modifiers=pytyping_modifiers) if is_cdef: entry.is_cglobal = 1 if entry.type.declaration_value: @@ -1889,15 +1915,15 @@ class LocalScope(Scope): return entry def declare_var(self, name, type, pos, - cname = None, visibility = 'private', - api = 0, in_pxd = 0, is_cdef = 0): + cname=None, visibility='private', + api=False, in_pxd=False, is_cdef=False, pytyping_modifiers=None): name = self.mangle_class_private_name(name) # Add an entry for a local variable. if visibility in ('public', 'readonly'): error(pos, "Local variable cannot be declared %s" % visibility) entry = Scope.declare_var(self, name, type, pos, cname=cname, visibility=visibility, - api=api, in_pxd=in_pxd, is_cdef=is_cdef) + api=api, in_pxd=in_pxd, is_cdef=is_cdef, pytyping_modifiers=pytyping_modifiers) if entry.type.declaration_value: entry.init = entry.type.declaration_value entry.is_local = 1 @@ -1995,13 +2021,14 @@ class ComprehensionScope(Scope): return '%s%s' % (self.genexp_prefix, self.parent_scope.mangle(prefix, name)) def declare_var(self, name, type, pos, - cname = None, visibility = 'private', - api = 0, in_pxd = 0, is_cdef = True): + cname=None, visibility='private', + api=False, in_pxd=False, is_cdef=True, pytyping_modifiers=None): if type is unspecified_type: # if the outer scope defines a type for this variable, inherit it outer_entry = self.outer_scope.lookup(name) if outer_entry and outer_entry.is_variable: type = outer_entry.type # may still be 'unspecified_type' ! + self._reject_pytyping_modifiers(pos, pytyping_modifiers) # the parent scope needs to generate code for the variable, but # this scope must hold its name exclusively cname = '%s%s' % (self.genexp_prefix, self.parent_scope.mangle(Naming.var_prefix, name or self.next_id())) @@ -2084,8 +2111,8 @@ class StructOrUnionScope(Scope): Scope.__init__(self, name, None, None) def declare_var(self, name, type, pos, - cname = None, visibility = 'private', - api = 0, in_pxd = 0, is_cdef = 0, + cname=None, visibility='private', + api=False, in_pxd=False, is_cdef=False, pytyping_modifiers=None, allow_pyobject=False, allow_memoryview=False, allow_refcounted=False): # Add an entry for an attribute. if not cname: @@ -2094,6 +2121,7 @@ class StructOrUnionScope(Scope): cname = c_safe_identifier(cname) if type.is_cfunction: type = PyrexTypes.CPtrType(type) + self._reject_pytyping_modifiers(pos, pytyping_modifiers) entry = self.declare(name, cname, type, pos, visibility) entry.is_variable = 1 self.var_entries.append(entry) @@ -2171,15 +2199,15 @@ class PyClassScope(ClassScope): is_py_class_scope = 1 def declare_var(self, name, type, pos, - cname = None, visibility = 'private', - api = 0, in_pxd = 0, is_cdef = 0): + cname=None, visibility='private', + api=False, in_pxd=False, is_cdef=False, pytyping_modifiers=None): name = self.mangle_class_private_name(name) if type is unspecified_type: type = py_object_type # Add an entry for a class attribute. entry = Scope.declare_var(self, name, type, pos, cname=cname, visibility=visibility, - api=api, in_pxd=in_pxd, is_cdef=is_cdef) + api=api, in_pxd=in_pxd, is_cdef=is_cdef, pytyping_modifiers=pytyping_modifiers) entry.is_pyglobal = 1 entry.is_pyclass_attr = 1 return entry @@ -2301,17 +2329,21 @@ class CClassScope(ClassScope): return have_entries, (py_attrs, py_buffers, memoryview_slices) def declare_var(self, name, type, pos, - cname = None, visibility = 'private', - api = 0, in_pxd = 0, is_cdef = 0): + cname=None, visibility='private', + api=False, in_pxd=False, is_cdef=False, pytyping_modifiers=None): name = self.mangle_class_private_name(name) - if type.python_type_constructor_name == "typing.ClassVar": - is_cdef = 0 - type = type.resolve() - - if (type.python_type_constructor_name == "dataclasses.InitVar" and - 'dataclasses.dataclass' not in self.directives): - error(pos, "Use of cython.dataclasses.InitVar does not make sense outside a dataclass") + if pytyping_modifiers: + if "typing.ClassVar" in pytyping_modifiers: + is_cdef = 0 + if not type.is_pyobject: + if not type.equivalent_type: + warning(pos, "ClassVar[] requires the type to be a Python object type. Found '%s', using object instead." % type) + type = py_object_type + else: + type = type.equivalent_type + if "dataclasses.InitVar" in pytyping_modifiers and 'dataclasses.dataclass' not in self.directives: + error(pos, "Use of cython.dataclasses.InitVar does not make sense outside a dataclass") if is_cdef: # Add an entry for an attribute. @@ -2332,6 +2364,7 @@ class CClassScope(ClassScope): entry = self.declare(name, cname, type, pos, visibility) entry.is_variable = 1 self.var_entries.append(entry) + entry.pytyping_modifiers = pytyping_modifiers if type.is_cpp_class and visibility != 'extern': if self.directives['cpp_locals']: entry.make_cpp_optional() @@ -2369,7 +2402,7 @@ class CClassScope(ClassScope): # Add an entry for a class attribute. entry = Scope.declare_var(self, name, type, pos, cname=cname, visibility=visibility, - api=api, in_pxd=in_pxd, is_cdef=is_cdef) + api=api, in_pxd=in_pxd, is_cdef=is_cdef, pytyping_modifiers=pytyping_modifiers) entry.is_member = 1 # xxx: is_pyglobal changes behaviour in so many places that I keep it in for now. # is_member should be enough later on @@ -2612,11 +2645,12 @@ class CppClassScope(Scope): template_entry.is_type = 1 def declare_var(self, name, type, pos, - cname = None, visibility = 'extern', - api = 0, in_pxd = 0, is_cdef = 0, defining = 0): + cname=None, visibility='extern', + api=False, in_pxd=False, is_cdef=False, defining=False, pytyping_modifiers=None): # Add an entry for an attribute. if not cname: cname = name + self._reject_pytyping_modifiers(pos, pytyping_modifiers) entry = self.lookup_here(name) if defining and entry is not None: if entry.type.same_as(type): @@ -2746,10 +2780,11 @@ class CppScopedEnumScope(Scope): Scope.__init__(self, name, outer_scope, None) def declare_var(self, name, type, pos, - cname=None, visibility='extern'): + cname=None, visibility='extern', pytyping_modifiers=None): # Add an entry for an attribute. if not cname: cname = name + self._reject_pytyping_modifiers(pos, pytyping_modifiers) entry = self.declare(name, cname, type, pos, visibility) entry.is_variable = True return entry diff --git a/docs/examples/userguide/extension_types/dataclass.pyx b/docs/examples/userguide/extension_types/dataclass.pyx index 56666537d..b03d5f7b1 100644 --- a/docs/examples/userguide/extension_types/dataclass.pyx +++ b/docs/examples/userguide/extension_types/dataclass.pyx @@ -17,5 +17,5 @@ cdef class MyDataclass: c = "hello" # assignment of default value on a separate line # typing.InitVar and typing.ClassVar also work - d: dataclasses.InitVar[double] = 5 + d: dataclasses.InitVar[cython.double] = 5 e: typing.ClassVar[list] = [] diff --git a/tests/errors/dataclass_e1.pyx b/tests/errors/dataclass_e1.pyx index 39337ba6d..95d67ad7d 100644 --- a/tests/errors/dataclass_e1.pyx +++ b/tests/errors/dataclass_e1.pyx @@ -1,5 +1,5 @@ # mode: error - +# tag: warnings cimport cython @cython.dataclasses.dataclass(1, shouldnt_be_here=True, init=5, unsafe_hash=True) diff --git a/tests/errors/dataclass_e5.pyx b/tests/errors/dataclass_e5.pyx new file mode 100644 index 000000000..e86adf47e --- /dev/null +++ b/tests/errors/dataclass_e5.pyx @@ -0,0 +1,21 @@ +# mode: error +# tag: warnings + +cimport cython + +@cython.dataclasses.dataclass +cdef class C: + a: int + b: long + c: Py_ssize_t + d: float + e: double + + +_WARNINGS = """ +9:7: Found Python 2.x type 'long' in a Python annotation. Did you mean to use 'cython.long'? +10:7: Found C type 'Py_ssize_t' in a Python annotation. Did you mean to use a Python type? +10:7: Unknown type declaration in annotation, ignoring +12:7: Found C type 'double' in a Python annotation. Did you mean to use a Python type? +12:7: Unknown type declaration in annotation, ignoring +""" diff --git a/tests/errors/e_typing_errors.pyx b/tests/errors/e_typing_errors.pyx new file mode 100644 index 000000000..e11827696 --- /dev/null +++ b/tests/errors/e_typing_errors.pyx @@ -0,0 +1,59 @@ +# mode: error + +import cython + +try: + from typing import Optional, ClassVar +except ImportError: + pass + + +# not OK + +def optional_cython_types(Optional[cython.int] i, Optional[cython.double] d, Optional[cython.float] f, + Optional[cython.complex] c, Optional[cython.long] l, Optional[cython.longlong] ll): + pass + + +MyStruct = cython.struct(a=cython.int, b=cython.double) + +def optional_cstruct(Optional[MyStruct] x): + pass + + +def optional_pytypes(Optional[int] i, Optional[float] f, Optional[complex] c, Optional[long] l): + pass + + +cdef ClassVar[list] x + + +# OK + +def optional_memoryview(double[:] d, Optional[double[:]] o): + pass + + +cdef class Cls(object): + cdef ClassVar[list] x + + + +_ERRORS = """ +13:45: typing.Optional[...] cannot be applied to non-Python type int +13:72: typing.Optional[...] cannot be applied to non-Python type double +13:98: typing.Optional[...] cannot be applied to non-Python type float +14:49: typing.Optional[...] cannot be applied to non-Python type double complex +14:74: typing.Optional[...] cannot be applied to non-Python type long +14:103: typing.Optional[...] cannot be applied to non-Python type long long +24:33: typing.Optional[...] cannot be applied to non-Python type int +24:52: typing.Optional[...] cannot be applied to non-Python type float +24:91: typing.Optional[...] cannot be applied to non-Python type long + +20:38: typing.Optional[...] cannot be applied to non-Python type MyStruct + +28:20: Modifier 'typing.ClassVar' is not allowed here. + +# FIXME: this should be ok :-? +33:53: typing.Optional[...] cannot be applied to non-Python type double[:] +""" diff --git a/tests/errors/e_typing_optional.py b/tests/errors/e_typing_optional.py index e75638e00..6facfeea4 100644 --- a/tests/errors/e_typing_optional.py +++ b/tests/errors/e_typing_optional.py @@ -8,11 +8,10 @@ except ImportError: pass -def optional_pytypes(i: Optional[int], f: Optional[float]): - pass - +# not OK -def optional_cython_types(i: Optional[cython.int], d: Optional[cython.double], f: Optional[cython.float]): +def optional_cython_types(i: Optional[cython.int], d: Optional[cython.double], f: Optional[cython.float], + c: Optional[cython.complex], l: Optional[cython.long], ll: Optional[cython.longlong]): pass @@ -22,13 +21,23 @@ def optional_cstruct(x: Optional[MyStruct]): pass +# OK + +def optional_pytypes(i: Optional[int], f: Optional[float], c: Optional[complex], l: Optional[long]): + pass + + +def optional_memoryview(d: double[:], o: Optional[double[:]]): + pass + + _ERRORS = """ -15:29: Only Python type arguments can use typing.Optional[...] -15:54: Only Python type arguments can use typing.Optional[...] -15:82: Only Python type arguments can use typing.Optional[...] -21:24: Only Python type arguments can use typing.Optional[...] - -# FIXME: these should be allowed! -11:24: Only Python type arguments can use typing.Optional[...] -11:42: Only Python type arguments can use typing.Optional[...] +13:44: typing.Optional[...] cannot be applied to non-Python type int +13:69: typing.Optional[...] cannot be applied to non-Python type double +13:97: typing.Optional[...] cannot be applied to non-Python type float +14:44: typing.Optional[...] cannot be applied to non-Python type double complex +14:73: typing.Optional[...] cannot be applied to non-Python type long +14:100: typing.Optional[...] cannot be applied to non-Python type long long + +20:33: typing.Optional[...] cannot be applied to non-Python type MyStruct """ diff --git a/tests/run/annotation_typing.pyx b/tests/run/annotation_typing.pyx index 03900061a..8eb52e7c6 100644 --- a/tests/run/annotation_typing.pyx +++ b/tests/run/annotation_typing.pyx @@ -11,14 +11,14 @@ except ImportError: pass -def old_dict_syntax(a: list, b: "int" = 2, c: {'ctype': 'long int'} = 3, d: {'type': 'float'} = 4) -> list: +def old_dict_syntax(a: list, b: "int" = 2, c: {'ctype': 'long int'} = 3, d: {'type': 'long int'} = 4) -> list: """ >>> old_dict_syntax([1]) - ('list object', 'int', 'long', 'float') - [1, 2, 3, 4.0] + ('list object', 'int object', 'long', 'long') + [1, 2, 3, 4] >>> old_dict_syntax([1], 3) - ('list object', 'int', 'long', 'float') - [1, 3, 3, 4.0] + ('list object', 'int object', 'long', 'long') + [1, 3, 3, 4] >>> old_dict_syntax(123) Traceback (most recent call last): TypeError: Argument 'a' has incorrect type (expected list, got int) @@ -33,16 +33,16 @@ def old_dict_syntax(a: list, b: "int" = 2, c: {'ctype': 'long int'} = 3, d: {'ty return a -def pytypes_def(a: list, b: int = 2, c: long = 3, d: float = 4, n: list = None, o: Optional[tuple] = ()) -> list: +def pytypes_def(a: list, b: int = 2, c: long = 3, d: float = 4.0, n: list = None, o: Optional[tuple] = ()) -> list: """ >>> pytypes_def([1]) - ('list object', 'Python object', 'Python object', 'double', 'list object', 'tuple object') + ('list object', 'int object', 'Python object', 'double', 'list object', 'tuple object') [1, 2, 3, 4.0, None, ()] >>> pytypes_def([1], 3) - ('list object', 'Python object', 'Python object', 'double', 'list object', 'tuple object') + ('list object', 'int object', 'Python object', 'double', 'list object', 'tuple object') [1, 3, 3, 4.0, None, ()] >>> pytypes_def([1], 3, 2, 1, [], None) - ('list object', 'Python object', 'Python object', 'double', 'list object', 'tuple object') + ('list object', 'int object', 'Python object', 'double', 'list object', 'tuple object') [1, 3, 2, 1.0, [], None] >>> pytypes_def(123) Traceback (most recent call last): @@ -60,16 +60,16 @@ def pytypes_def(a: list, b: int = 2, c: long = 3, d: float = 4, n: list = None, return a -cpdef pytypes_cpdef(a: list, b: int = 2, c: long = 3, d: float = 4, n: list = None, o: Optional[tuple] = ()): +cpdef pytypes_cpdef(a: list, b: int = 2, c: long = 3, d: float = 4.0, n: list = None, o: Optional[tuple] = ()): """ >>> pytypes_cpdef([1]) - ('list object', 'Python object', 'Python object', 'double', 'list object', 'tuple object') + ('list object', 'int object', 'Python object', 'double', 'list object', 'tuple object') [1, 2, 3, 4.0, None, ()] >>> pytypes_cpdef([1], 3) - ('list object', 'Python object', 'Python object', 'double', 'list object', 'tuple object') + ('list object', 'int object', 'Python object', 'double', 'list object', 'tuple object') [1, 3, 3, 4.0, None, ()] >>> pytypes_cpdef([1], 3, 2, 1, [], None) - ('list object', 'Python object', 'Python object', 'double', 'list object', 'tuple object') + ('list object', 'int object', 'Python object', 'double', 'list object', 'tuple object') [1, 3, 2, 1.0, [], None] >>> pytypes_cpdef(123) Traceback (most recent call last): @@ -87,7 +87,7 @@ cpdef pytypes_cpdef(a: list, b: int = 2, c: long = 3, d: float = 4, n: list = No return a -cdef c_pytypes_cdef(a: list, b: int = 2, c: long = 3, d: float = 4, n: list = None): +cdef c_pytypes_cdef(a: list, b: int = 2, c: long = 3, d: float = 4.0, n: list = None): print(typeof(a), typeof(b), typeof(c), typeof(d), typeof(n)) a.append(b) a.append(c) @@ -99,10 +99,10 @@ cdef c_pytypes_cdef(a: list, b: int = 2, c: long = 3, d: float = 4, n: list = No def pytypes_cdef(a, b=2, c=3, d=4): """ >>> pytypes_cdef([1]) - ('list object', 'Python object', 'Python object', 'double', 'list object') + ('list object', 'int object', 'Python object', 'double', 'list object') [1, 2, 3, 4.0, None] >>> pytypes_cdef([1], 3) - ('list object', 'Python object', 'Python object', 'double', 'list object') + ('list object', 'int object', 'Python object', 'double', 'list object') [1, 3, 3, 4.0, None] >>> pytypes_cdef(123) # doctest: +ELLIPSIS Traceback (most recent call last): @@ -278,24 +278,28 @@ class LateClass(object): pass -def py_float_default(price : float=None, ndigits=4): +def py_float_default(price : Optional[float]=None, ndigits=4): """ Python default arguments should prevent C type inference. >>> py_float_default() (None, 4) - >>> py_float_default(2) - (2, 4) + >>> py_float_default(None) + (None, 4) + >>> py_float_default(2) # doctest: +ELLIPSIS + Traceback (most recent call last): + TypeError: ...float... >>> py_float_default(2.0) (2.0, 4) - >>> py_float_default(2, 3) - (2, 3) + >>> py_float_default(2, 3) # doctest: +ELLIPSIS + Traceback (most recent call last): + TypeError: ...float... """ return price, ndigits cdef class ClassAttribute: - cls_attr : float = 1. + cls_attr : cython.float = 1. @cython.cfunc @@ -332,12 +336,16 @@ _WARNINGS = """ 14:77: Dicts should no longer be used as type annotations. Use 'cython.int' etc. directly. 14:85: Python type declaration in signature annotation does not refer to a Python type 14:85: Strings should no longer be used for type declarations. Use 'cython.int' etc. directly. -36:64: PEP-484 recommends 'typing.Optional[...]' for arguments that can be None. -63:68: PEP-484 recommends 'typing.Optional[...]' for arguments that can be None. -90:68: PEP-484 recommends 'typing.Optional[...]' for arguments that can be None. +36:40: Found Python 2.x type 'long' in a Python annotation. Did you mean to use 'cython.long'? +36:66: PEP-484 recommends 'typing.Optional[...]' for arguments that can be None. +63:44: Found Python 2.x type 'long' in a Python annotation. Did you mean to use 'cython.long'? +63:70: PEP-484 recommends 'typing.Optional[...]' for arguments that can be None. +90:44: Found Python 2.x type 'long' in a Python annotation. Did you mean to use 'cython.long'? +90:70: PEP-484 recommends 'typing.Optional[...]' for arguments that can be None. 274:44: Unknown type declaration in annotation, ignoring -281:29: Ambiguous types in annotation, ignoring -298:15: Annotation ignored since class-level attributes must be Python objects. Were you trying to set up an instance attribute? +302:15: Annotation ignored since class-level attributes must be Python objects. Were you trying to set up an instance attribute? +# DUPLICATE: +63:44: Found Python 2.x type 'long' in a Python annotation. Did you mean to use 'cython.long'? # BUG: 63:6: 'pytypes_cpdef' redeclared 146:0: 'struct_io' redeclared diff --git a/tests/run/cdef_class_dataclass.pyx b/tests/run/cdef_class_dataclass.pyx index 326fd0210..2f69e0f8f 100644 --- a/tests/run/cdef_class_dataclass.pyx +++ b/tests/run/cdef_class_dataclass.pyx @@ -127,8 +127,8 @@ cdef class ContainsNonPyFields: """ mystruct: S = cython.dataclasses.field(compare=False) mystruct_ptr: S_ptr = field(init=False, repr=False, default_factory=malloc_a_struct) - memview: int[:, ::1] = field(default=create_array((3,1), "c"), # mutable so not great but OK for a test - compare=False) + memview: cython.int[:, ::1] = field(default=create_array((3,1), "c"), # mutable so not great but OK for a test + compare=False) def __dealloc__(self): free(self.mystruct_ptr) @@ -154,8 +154,8 @@ cdef class InitClassVars: True """ a: cython.int = 0 - b1: InitVar[double] = 1.0 - b2: py_dataclasses.InitVar[double] = 1.0 + b1: InitVar[cython.double] = 1.0 + b2: py_dataclasses.InitVar[cython.double] = 1.0 c1: ClassVar[float] = 2.0 c2: typing.ClassVar[float] = 2.0 cdef InitVar[cython.int] d1 @@ -206,7 +206,7 @@ cdef class TestVisibility: """ cdef double a a = 1.0 - b: double = 2.0 + b: cython.double = 2.0 cdef public double c c = 3.0 cdef public object d @@ -222,7 +222,7 @@ cdef class TestFrozen: Traceback (most recent call last): AttributeError: attribute 'a' of '...TestFrozen' objects is not writable """ - a: double = 2.0 + a: cython.double = 2.0 @dataclass(kw_only=True) cdef class TestKwOnly: @@ -248,8 +248,8 @@ cdef class TestKwOnly: TypeError: __init__() needs keyword-only argument b """ - a: double = 2.0 - b: long + a: cython.double = 2.0 + b: cython.long import sys if sys.version_info >= (3, 7): diff --git a/tests/run/cdef_setitem_T284.pyx b/tests/run/cdef_setitem_T284.pyx index 389b8c409..871afb892 100644 --- a/tests/run/cdef_setitem_T284.pyx +++ b/tests/run/cdef_setitem_T284.pyx @@ -24,9 +24,9 @@ def with_external_list(list L): """ >>> with_external_list([1,2,3]) [1, -10, 3] - >>> with_external_list(None) + >>> with_external_list(None) # doctest: +ELLIPSIS Traceback (most recent call last): - TypeError: 'NoneType' object is not subscriptable + TypeError: 'NoneType' object ... """ ob = 1L L[ob] = -10 diff --git a/tests/run/delete.pyx b/tests/run/delete.pyx index ec0b6c71a..6127fa9f1 100644 --- a/tests/run/delete.pyx +++ b/tests/run/delete.pyx @@ -29,15 +29,33 @@ def del_item(L, o): del L[o] return L + @cython.test_assert_path_exists('//DelStatNode//IndexNode//NoneCheckNode') def del_dict(dict D, o): """ >>> del_dict({1: 'a', 2: 'b'}, 1) {2: 'b'} + >>> del_dict(None, 1) # doctest: +ELLIPSIS + Traceback (most recent call last): + TypeError: 'NoneType' object ... """ del D[o] return D + +@cython.test_fail_if_path_exists('//DelStatNode//IndexNode//NoneCheckNode') +def del_dict_ann(D: dict, o): + """ + >>> del_dict_ann({1: 'a', 2: 'b'}, 1) + {2: 'b'} + >>> del_dict_ann(None, 1) + Traceback (most recent call last): + TypeError: Argument 'D' has incorrect type (expected dict, got NoneType) + """ + del D[o] + return D + + @cython.test_fail_if_path_exists('//NoneCheckNode') def del_dict_from_literal(o): """ diff --git a/tests/run/pep526_variable_annotations.py b/tests/run/pep526_variable_annotations.py index 3e30075c3..56cb0201b 100644 --- a/tests/run/pep526_variable_annotations.py +++ b/tests/run/pep526_variable_annotations.py @@ -15,11 +15,11 @@ except ImportError: var = 1 # type: annotation -var: int = 2 -fvar: float = 1.2 +var: cython.int = 2 +fvar: cython.float = 1.2 some_number: cython.int # variable without initial value -some_list: List[int] = [] # variable with initial value -t: Tuple[int, ...] = (1, 2, 3) +some_list: List[cython.int] = [] # variable with initial value +t: Tuple[cython.int, ...] = (1, 2, 3) body: Optional[List[str]] descr_only : "descriptions are allowed but ignored" @@ -34,11 +34,11 @@ def f(): (2, 1.5, [], (1, 2, 3)) """ var = 1 # type: annotation - var: int = 2 - fvar: float = 1.5 + var: cython.int = 2 + fvar: cython.float = 1.5 some_number: cython.int # variable without initial value - some_list: List[int] = [] # variable with initial value - t: Tuple[int, ...] = (1, 2, 3) + some_list: List[cython.int] = [] # variable with initial value + t: Tuple[cython.int, ...] = (1, 2, 3) body: Optional[List[str]] descr_only: "descriptions are allowed but ignored" @@ -59,7 +59,7 @@ class BasicStarship(object): """ captain: str = 'Picard' # instance variable with default damage: cython.int # instance variable without default - stats: ClassVar[Dict[str, int]] = {} # class variable + stats: ClassVar[Dict[str, cython.int]] = {} # class variable descr_only: "descriptions are allowed but ignored" def __init__(self, damage): @@ -75,7 +75,7 @@ class BasicStarshipExt(object): """ captain: str = 'Picard' # instance variable with default damage: cython.int # instance variable without default - stats: ClassVar[Dict[str, int]] = {} # class variable + stats: ClassVar[Dict[str, cython.int]] = {} # class variable descr_only: "descriptions are allowed but ignored" def __init__(self, damage): @@ -124,7 +124,7 @@ def iter_declared_dict(d): # specialized "compiled" test in module-level __doc__ """ - typed_dict : Dict[float, float] = d + typed_dict : Dict[cython.float, cython.float] = d s = 0.0 for key in typed_dict: s += d[key] @@ -135,7 +135,7 @@ def iter_declared_dict(d): "//WhileStatNode", "//WhileStatNode//DictIterationNextNode", ) -def iter_declared_dict_arg(d : Dict[float, float]): +def iter_declared_dict_arg(d : Dict[cython.float, cython.float]): """ >>> d = {1.1: 2.5, 3.3: 4.5} >>> iter_declared_dict_arg(d) @@ -165,8 +165,8 @@ def test_subscripted_types(): list object set object """ - a: typing.Dict[int, float] = {} - b: List[int] = [] + a: typing.Dict[cython.int, cython.float] = {} + b: List[cython.int] = [] c: _SET_[object] = set() print(cython.typeof(a) + (" object" if not cython.compiled else "")) @@ -174,22 +174,31 @@ def test_subscripted_types(): print(cython.typeof(c) + (" object" if not cython.compiled else "")) # because tuple is specifically special cased to go to ctuple where possible -def test_tuple(a: typing.Tuple[int, float], b: typing.Tuple[int, ...], - c: Tuple[int, object] # cannot be a ctuple +def test_tuple(a: typing.Tuple[cython.int, cython.float], b: typing.Tuple[cython.int, ...], + c: Tuple[cython.int, object] # cannot be a ctuple ): """ >>> test_tuple((1, 1.0), (1, 1.0), (1, 1.0)) int int + Python object + Python object + (int, float) + tuple object tuple object tuple object """ - x: typing.Tuple[int, float] = (a[0], a[1]) - y: Tuple[int, ...] = (1,2.) - z = a[0] # should infer to int + x: typing.Tuple[int, float] = (a[0], a[1]) # note: Python int/float, not cython.int/float + y: Tuple[cython.int, ...] = (1,2.) + z = a[0] # should infer to C int + p = x[1] # should infer to Python float -> C double print(cython.typeof(z)) - print(cython.typeof(x[0])) + print("int" if cython.compiled and cython.typeof(x[0]) == "Python object" else cython.typeof(x[0])) # FIXME: infer Python int + print(cython.typeof(p) if cython.compiled or cython.typeof(p) != 'float' else "Python object") # FIXME: infer C double + print(cython.typeof(x[1]) if cython.compiled or cython.typeof(p) != 'float' else "Python object") # FIXME: infer C double + print(cython.typeof(a) if cython.compiled or cython.typeof(a) != 'tuple' else "(int, float)") + print(cython.typeof(x) + (" object" if not cython.compiled else "")) print(cython.typeof(y) + (" object" if not cython.compiled else "")) print(cython.typeof(c) + (" object" if not cython.compiled else "")) diff --git a/tests/run/pep526_variable_annotations_cy.pyx b/tests/run/pep526_variable_annotations_cy.pyx index c08c832b0..448824b36 100644 --- a/tests/run/pep526_variable_annotations_cy.pyx +++ b/tests/run/pep526_variable_annotations_cy.pyx @@ -48,9 +48,9 @@ def test_tuple(typing.Tuple[int, float] a, typing.Tuple[int, ...] b, tuple object tuple object """ - cdef typing.Tuple[int, float] x = (a[0], a[1]) + cdef typing.Tuple[int, float] x = (a[0], a[1]) # C int/float cdef Tuple[int, ...] y = (1,2.) - z = a[0] # should infer to int + z = a[0] # should infer to C int print(cython.typeof(z)) print(cython.typeof(x[0])) diff --git a/tests/run/pure_cdef_class_dataclass.py b/tests/run/pure_cdef_class_dataclass.py index 7b8fcb851..8a978d36f 100644 --- a/tests/run/pure_cdef_class_dataclass.py +++ b/tests/run/pure_cdef_class_dataclass.py @@ -11,9 +11,9 @@ class MyDataclass: """ >>> sorted(list(MyDataclass.__dataclass_fields__.keys())) ['a', 'self'] - >>> inst1 = MyDataclass(2.0, ['a', 'b']) + >>> inst1 = MyDataclass(2, ['a', 'b']) >>> print(inst1) - MyDataclass(a=2.0, self=['a', 'b']) + MyDataclass(a=2, self=['a', 'b']) >>> inst2 = MyDataclass() >>> print(inst2) MyDataclass(a=1, self=[]) -- cgit v1.2.1 From b404a3f3b82f296931730470411a68fec7d6f40f Mon Sep 17 00:00:00 2001 From: da-woods Date: Tue, 12 Jul 2022 15:54:21 +0100 Subject: Allow setting "annotation_typing" directive more locally (GH-4886) To make it easier to handle cases where Cython's interpretation differs from the user's interpretation. Also improve the documentation about this. --- Cython/Compiler/Options.py | 1 - docs/src/userguide/migrating_to_cy30.rst | 17 ++++++++++++ .../src/userguide/source_files_and_compilation.rst | 3 ++- tests/run/annotation_typing.pyx | 30 ++++++++++++++++++++++ 4 files changed, 49 insertions(+), 2 deletions(-) diff --git a/Cython/Compiler/Options.py b/Cython/Compiler/Options.py index af28a7187..97f288905 100644 --- a/Cython/Compiler/Options.py +++ b/Cython/Compiler/Options.py @@ -366,7 +366,6 @@ directive_scopes = { # defaults to available everywhere 'test_fail_if_path_exists' : ('function', 'class', 'cclass'), 'freelist': ('cclass',), 'emit_code_comments': ('module',), - 'annotation_typing': ('module',), # FIXME: analysis currently lacks more specific function scope # Avoid scope-specific to/from_py_functions for c_string. 'c_string_type': ('module',), 'c_string_encoding': ('module',), diff --git a/docs/src/userguide/migrating_to_cy30.rst b/docs/src/userguide/migrating_to_cy30.rst index 357132887..1105ee15d 100644 --- a/docs/src/userguide/migrating_to_cy30.rst +++ b/docs/src/userguide/migrating_to_cy30.rst @@ -172,3 +172,20 @@ rather than relying on the user to test and cast the type of each operand. The old behaviour can be restored with the :ref:`directive ` ``c_api_binop_methods=True``. More details are given in :ref:`arithmetic_methods`. + +Annotation typing +================= + +Cython 3 has made substantial improvements in recognising types in +annotations and it is well worth reading +:ref:`the pure Python tutorial` to understand +some of the improvements. + +A notable backwards-compatible change is that ``x: int`` is now typed +such that ``x`` is an exact Python ``int`` (Cython 0.29 would accept +any Python object for ``x``). + +To make it easier to handle cases where your interpretation of type +annotations differs from Cython's, Cython 3 now supports setting the +``annotation_typing`` :ref:`directive ` on a +per-class or per-function level. diff --git a/docs/src/userguide/source_files_and_compilation.rst b/docs/src/userguide/source_files_and_compilation.rst index edf51213e..a833c61ed 100644 --- a/docs/src/userguide/source_files_and_compilation.rst +++ b/docs/src/userguide/source_files_and_compilation.rst @@ -946,7 +946,8 @@ Cython code. Here is the list of currently supported directives: Uses function argument annotations to determine the type of variables. Default is True, but can be disabled. Since Python does not enforce types given in annotations, setting to False gives greater compatibility with Python code. - Must be set globally. + From Cython 3.0, ``annotation_typing`` can be set on a per-function or + per-class basis. ``emit_code_comments`` (True / False) Copy the original source code line by line into C code comments in the generated diff --git a/tests/run/annotation_typing.pyx b/tests/run/annotation_typing.pyx index 8eb52e7c6..ce74ef1dd 100644 --- a/tests/run/annotation_typing.pyx +++ b/tests/run/annotation_typing.pyx @@ -329,6 +329,36 @@ class HasPtr: return f"HasPtr({self.a[0]}, {self.b})" +@cython.annotation_typing(False) +def turn_off_typing(x: float, d: dict): + """ + >>> turn_off_typing('not a float', []) # ignore the typing + ('Python object', 'Python object', 'not a float', []) + """ + return typeof(x), typeof(d), x, d + + +@cython.annotation_typing(False) +cdef class ClassTurnOffTyping: + x: float + d: dict + + def get_var_types(self, arg: float): + """ + >>> ClassTurnOffTyping().get_var_types(1.0) + ('Python object', 'Python object', 'Python object') + """ + return typeof(self.x), typeof(self.d), typeof(arg) + + @cython.annotation_typing(True) + def and_turn_it_back_on_again(self, arg: float): + """ + >>> ClassTurnOffTyping().and_turn_it_back_on_again(1.0) + ('Python object', 'Python object', 'double') + """ + return typeof(self.x), typeof(self.d), typeof(arg) + + _WARNINGS = """ 14:32: Strings should no longer be used for type declarations. Use 'cython.int' etc. directly. 14:47: Dicts should no longer be used as type annotations. Use 'cython.int' etc. directly. -- cgit v1.2.1 From f04d53bc5a372a02266a92a27c1d041d1e02b6b8 Mon Sep 17 00:00:00 2001 From: da-woods Date: Tue, 12 Jul 2022 18:39:47 +0100 Subject: Add a note about cythonize in the quickstart documentation (GH-4879) --- docs/src/quickstart/build.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/src/quickstart/build.rst b/docs/src/quickstart/build.rst index 5d9e8a307..3cbcfa087 100644 --- a/docs/src/quickstart/build.rst +++ b/docs/src/quickstart/build.rst @@ -18,6 +18,10 @@ one may want to read more about There are several ways to build Cython code: - Write a setuptools ``setup.py``. This is the normal and recommended way. + - Run the ``cythonize`` command-line utility. This is a good approach for + compiling a single Cython source file directly to an extension. + A source file can be built "in place" (so that the extension module is created + next to the source file, ready to be imported) with ``cythonize -i filename.pyx``. - Use :ref:`Pyximport`, importing Cython ``.pyx`` files as if they were ``.py`` files (using setuptools to compile and build in the background). This method is easier than writing a ``setup.py``, but is not very flexible. -- cgit v1.2.1 From e4ef0c1e807aab8c20fb08b638550c912c166be3 Mon Sep 17 00:00:00 2001 From: da-woods Date: Tue, 12 Jul 2022 19:00:58 +0100 Subject: Error on memoryview argument capture on 0.29.x (GH-4849) I don't believe it's easy to fix https://github.com/cython/cython/issues/4798 on 0.29.x Therefore, generate an error message that explains two possible workarounds. This at least makes sure that people don't end up with mysterious crashes. --- Cython/Compiler/Nodes.py | 15 +++++++++++++-- tests/memoryview/memslice.pyx | 20 ++++++++++++++++++++ 2 files changed, 33 insertions(+), 2 deletions(-) diff --git a/Cython/Compiler/Nodes.py b/Cython/Compiler/Nodes.py index 339b1fa04..743d6959b 100644 --- a/Cython/Compiler/Nodes.py +++ b/Cython/Compiler/Nodes.py @@ -2637,8 +2637,11 @@ class CFuncDefNode(FuncDefNode): def put_into_closure(entry): if entry.in_closure and not arg.default: code.putln('%s = %s;' % (entry.cname, entry.original_cname)) - code.put_var_incref(entry) - code.put_var_giveref(entry) + if entry.type.is_memoryviewslice: + code.put_incref_memoryviewslice(entry.cname, have_gil=True) + else: + code.put_var_incref(entry) + code.put_var_giveref(entry) for arg in self.args: put_into_closure(scope.lookup_here(arg.name)) @@ -3234,6 +3237,14 @@ class DefNode(FuncDefNode): # Move arguments into closure if required def put_into_closure(entry): if entry.in_closure: + if entry.type.is_memoryviewslice: + error( + self.pos, + "Referring to a memoryview typed argument directly in a nested closure function " + "is not supported in Cython 0.x. " + "Either upgrade to Cython 3, or assign the argument to a local variable " + "and use that in the nested function." + ) code.putln('%s = %s;' % (entry.cname, entry.original_cname)) if entry.xdecref_cleanup: # mostly applies to the starstar arg - this can sometimes be NULL diff --git a/tests/memoryview/memslice.pyx b/tests/memoryview/memslice.pyx index 24af61e17..ccf760c21 100644 --- a/tests/memoryview/memslice.pyx +++ b/tests/memoryview/memslice.pyx @@ -2549,3 +2549,23 @@ def test_const_buffer(const int[:] a): cdef const int[:] c = a print(a[0]) print(c[-1]) + +cdef arg_in_closure_cdef(int [:] a): + def inner(): + return (a[0], a[1]) + return inner + +def test_arg_in_closure_cdef(a): + """ + >>> A = IntMockBuffer("A", range(6), shape=(6,)) + >>> inner = test_arg_in_closure_cdef(A) + acquired A + >>> inner() + (0, 1) + + The assignment below is just to avoid printing what was collected + >>> del inner; ignore_me = gc.collect() + released A + """ + return arg_in_closure_cdef(a) + -- cgit v1.2.1 From 858b1a5ad2237cb439965450c221a15dfedd1295 Mon Sep 17 00:00:00 2001 From: da-woods Date: Wed, 13 Jul 2022 11:18:14 +0100 Subject: Disable co_varnames identity check on Python 3.11 (GH-4850) CPython 3.11 no longer stores the varnames tuple. Instead, it stores it as part of a larger list of names and calculates it dynamically on request. --- tests/run/tuple_constants.pyx | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/tests/run/tuple_constants.pyx b/tests/run/tuple_constants.pyx index f60d5d818..fa5794cf7 100644 --- a/tests/run/tuple_constants.pyx +++ b/tests/run/tuple_constants.pyx @@ -36,7 +36,12 @@ def test_deduplicated_args(): # are generated often with the same argument names. Therefore it's worth ensuring that # they are correctly deduplicated import sys - if not hasattr(sys, "pypy_version_info"): # test doesn't work on PyPy (which is probably fair enough) + check_identity_of_co_varnames = ( + not hasattr(sys, "pypy_version_info") and # test doesn't work on PyPy (which is probably fair enough) + sys.version_info < (3, 11) # on Python 3.11 co_varnames returns a new, dynamically-calculated tuple + # each time it is run + ) + if check_identity_of_co_varnames: assert func1.__code__.co_varnames is func2.__code__.co_varnames @cython.test_assert_path_exists("//TupleNode", -- cgit v1.2.1 From f298b6af61a0ab5e8e0b53907ea5080529dd98e4 Mon Sep 17 00:00:00 2001 From: da-woods Date: Wed, 13 Jul 2022 11:21:24 +0100 Subject: Avoid raising StopIteration in "__next__" if possible (GH-4844) Fixes https://github.com/cython/cython/issues/3447 --- Cython/Compiler/Code.pxd | 1 + Cython/Compiler/Code.py | 15 ++++++- Cython/Compiler/FlowControl.pxd | 2 + Cython/Compiler/FlowControl.py | 8 ++++ Cython/Compiler/Naming.py | 1 + Cython/Compiler/Nodes.py | 26 +++++++++++++ Cython/Compiler/Symtab.py | 3 ++ tests/run/funcexc_iter_T228.pyx | 86 +++++++++++++++++++++++++++++++++++++++++ 8 files changed, 140 insertions(+), 2 deletions(-) diff --git a/Cython/Compiler/Code.pxd b/Cython/Compiler/Code.pxd index e17e0fb1d..59779f8bc 100644 --- a/Cython/Compiler/Code.pxd +++ b/Cython/Compiler/Code.pxd @@ -54,6 +54,7 @@ cdef class FunctionState: cdef public object closure_temps cdef public bint should_declare_error_indicator cdef public bint uses_error_indicator + cdef public bint error_without_exception @cython.locals(n=size_t) cpdef new_label(self, name=*) diff --git a/Cython/Compiler/Code.py b/Cython/Compiler/Code.py index 4695b240c..4c67ac400 100644 --- a/Cython/Compiler/Code.py +++ b/Cython/Compiler/Code.py @@ -691,6 +691,7 @@ class LazyUtilityCode(UtilityCodeBase): class FunctionState(object): # return_label string function return point label # error_label string error catch point label + # error_without_exception boolean Can go to the error label without an exception (e.g. __next__ can return NULL) # continue_label string loop continue point label # break_label string loop break point label # return_from_error_cleanup_label string @@ -739,6 +740,8 @@ class FunctionState(object): self.should_declare_error_indicator = False self.uses_error_indicator = False + self.error_without_exception = False + # safety checks def validate_exit(self): @@ -2332,8 +2335,16 @@ class CCodeWriter(object): if method_noargs in method_flags: # Special NOARGS methods really take no arguments besides 'self', but PyCFunction expects one. func_cname = Naming.method_wrapper_prefix + func_cname - self.putln("static PyObject *%s(PyObject *self, CYTHON_UNUSED PyObject *arg) {return %s(self);}" % ( - func_cname, entry.func_cname)) + self.putln("static PyObject *%s(PyObject *self, CYTHON_UNUSED PyObject *arg) {" % func_cname) + func_call = "%s(self)" % entry.func_cname + if entry.name == "__next__": + self.putln("PyObject *res = %s;" % func_call) + # tp_iternext can return NULL without an exception + self.putln("if (!res && !PyErr_Occurred()) { PyErr_SetNone(PyExc_StopIteration); }") + self.putln("return res;") + else: + self.putln("return %s;" % func_call) + self.putln("}") return func_cname # GIL methods diff --git a/Cython/Compiler/FlowControl.pxd b/Cython/Compiler/FlowControl.pxd index c876ee3b1..4a8ef19c1 100644 --- a/Cython/Compiler/FlowControl.pxd +++ b/Cython/Compiler/FlowControl.pxd @@ -58,6 +58,8 @@ cdef class ControlFlow: cdef public dict assmts + cdef public Py_ssize_t in_try_block + cpdef newblock(self, ControlBlock parent=*) cpdef nextblock(self, ControlBlock parent=*) cpdef bint is_tracked(self, entry) diff --git a/Cython/Compiler/FlowControl.py b/Cython/Compiler/FlowControl.py index 4e0160e41..4018ff851 100644 --- a/Cython/Compiler/FlowControl.py +++ b/Cython/Compiler/FlowControl.py @@ -110,6 +110,7 @@ class ControlFlow(object): entries set tracked entries loops list stack for loop descriptors exceptions list stack for exception descriptors + in_try_block int track if we're in a try...except or try...finally block """ def __init__(self): @@ -122,6 +123,7 @@ class ControlFlow(object): self.exit_point = ExitBlock() self.blocks.add(self.exit_point) self.block = self.entry_point + self.in_try_block = 0 def newblock(self, parent=None): """Create floating block linked to `parent` if given. @@ -1166,7 +1168,9 @@ class ControlFlowAnalysis(CythonTransform): ## XXX: children nodes self.flow.block.add_child(entry_point) self.flow.nextblock() + self.flow.in_try_block += 1 self._visit(node.body) + self.flow.in_try_block -= 1 self.flow.exceptions.pop() # After exception @@ -1226,7 +1230,9 @@ class ControlFlowAnalysis(CythonTransform): self.flow.block = body_block body_block.add_child(entry_point) self.flow.nextblock() + self.flow.in_try_block += 1 self._visit(node.body) + self.flow.in_try_block -= 1 self.flow.exceptions.pop() if self.flow.loops: self.flow.loops[-1].exceptions.pop() @@ -1245,6 +1251,8 @@ class ControlFlowAnalysis(CythonTransform): if self.flow.exceptions: self.flow.block.add_child(self.flow.exceptions[-1].entry_point) self.flow.block = None + if self.flow.in_try_block: + node.in_try_block = True return node def visit_ReraiseStatNode(self, node): diff --git a/Cython/Compiler/Naming.py b/Cython/Compiler/Naming.py index 7845e4aa1..96c0b8fbd 100644 --- a/Cython/Compiler/Naming.py +++ b/Cython/Compiler/Naming.py @@ -126,6 +126,7 @@ cur_scope_cname = pyrex_prefix + "cur_scope" enc_scope_cname = pyrex_prefix + "enc_scope" frame_cname = pyrex_prefix + "frame" frame_code_cname = pyrex_prefix + "frame_code" +error_without_exception_cname = pyrex_prefix + "error_without_exception" binding_cfunc = pyrex_prefix + "binding_PyCFunctionType" fused_func_prefix = pyrex_prefix + 'fuse_' quick_temp_cname = pyrex_prefix + "temp" # temp variable for quick'n'dirty temping diff --git a/Cython/Compiler/Nodes.py b/Cython/Compiler/Nodes.py index 15c82f571..298cc5705 100644 --- a/Cython/Compiler/Nodes.py +++ b/Cython/Compiler/Nodes.py @@ -2231,7 +2231,14 @@ class FuncDefNode(StatNode, BlockNode): # code.put_trace_exception() assure_gil('error') + if code.funcstate.error_without_exception: + tempvardecl_code.putln( + "int %s = 0; /* StopIteration */" % Naming.error_without_exception_cname + ) + code.putln("if (!%s) {" % Naming.error_without_exception_cname) code.put_add_traceback(self.entry.qualified_name) + if code.funcstate.error_without_exception: + code.putln("}") else: warning(self.entry.pos, "Unraisable exception in function '%s'." % @@ -6703,11 +6710,15 @@ class RaiseStatNode(StatNode): # exc_value ExprNode or None # exc_tb ExprNode or None # cause ExprNode or None + # + # set in FlowControl + # in_try_block bool child_attrs = ["exc_type", "exc_value", "exc_tb", "cause"] is_terminator = True builtin_exc_name = None wrap_tuple_value = False + in_try_block = False def analyse_expressions(self, env): if self.exc_type: @@ -6736,9 +6747,19 @@ class RaiseStatNode(StatNode): not (exc.args or (exc.arg_tuple is not None and exc.arg_tuple.args))): exc = exc.function # extract the exception type if exc.is_name and exc.entry.is_builtin: + from . import Symtab self.builtin_exc_name = exc.name if self.builtin_exc_name == 'MemoryError': self.exc_type = None # has a separate implementation + elif (self.builtin_exc_name == 'StopIteration' and + env.is_local_scope and env.name == "__next__" and + env.parent_scope and env.parent_scope.is_c_class_scope and + not self.in_try_block): + # tp_iternext is allowed to return NULL without raising StopIteration. + # For the sake of simplicity, only allow this to happen when not in + # a try block + self.exc_type = None + return self nogil_check = Node.gil_error @@ -6749,6 +6770,11 @@ class RaiseStatNode(StatNode): if self.builtin_exc_name == 'MemoryError': code.putln('PyErr_NoMemory(); %s' % code.error_goto(self.pos)) return + elif self.builtin_exc_name == 'StopIteration' and not self.exc_type: + code.putln('%s = 1;' % Naming.error_without_exception_cname) + code.putln('%s;' % code.error_goto(None)) + code.funcstate.error_without_exception = True + return if self.exc_type: self.exc_type.generate_evaluation_code(code) diff --git a/Cython/Compiler/Symtab.py b/Cython/Compiler/Symtab.py index f657e7b7c..1500c7441 100644 --- a/Cython/Compiler/Symtab.py +++ b/Cython/Compiler/Symtab.py @@ -342,6 +342,7 @@ class Scope(object): # is_builtin_scope boolean Is the builtin scope of Python/Cython # is_py_class_scope boolean Is a Python class scope # is_c_class_scope boolean Is an extension type scope + # is_local_scope boolean Is a local (i.e. function/method/generator) scope # is_closure_scope boolean Is a closure scope # is_generator_expression_scope boolean A subset of closure scope used for generator expressions # is_passthrough boolean Outer scope is passed directly @@ -360,6 +361,7 @@ class Scope(object): is_py_class_scope = 0 is_c_class_scope = 0 is_closure_scope = 0 + is_local_scope = False is_generator_expression_scope = 0 is_comprehension_scope = 0 is_passthrough = 0 @@ -1886,6 +1888,7 @@ class ModuleScope(Scope): class LocalScope(Scope): + is_local_scope = True # Does the function have a 'with gil:' block? has_with_gil_block = False diff --git a/tests/run/funcexc_iter_T228.pyx b/tests/run/funcexc_iter_T228.pyx index 4b81166f6..40db3afb2 100644 --- a/tests/run/funcexc_iter_T228.pyx +++ b/tests/run/funcexc_iter_T228.pyx @@ -65,3 +65,89 @@ def double_raise(py_iterator): print(sys.exc_info()[0] is ValueError or sys.exc_info()[0]) a = list(cy_iterator()) print(sys.exc_info()[0] is ValueError or sys.exc_info()[0]) + + +###### Tests to do with the optimization of StopIteration to "return NULL" ####### +# we're mainly checking that +# 1. Calling __next__ manually doesn't crash (the wrapper function adds the exception) +# 2. if you raise a value then that value gets raised +# 3. putting the exception in various places try...finally / try...except blocks works + +def call_next_directly(): + """ + >>> call_next_directly() + Traceback (most recent call last): + ... + StopIteration + """ + cy_iterator().__next__() + +cdef class cy_iter_many_options: + cdef what + def __init__(self, what): + self.what = what + + def __iter__(self): + return self + + def __next__(self): + if self.what == "StopIteration in finally no return": + try: + raise StopIteration + finally: + print "Finally..." + elif self.what == "StopIteration in finally return": + try: + raise StopIteration + finally: + self.what = None + return "in finally" # but will stop iterating next time + elif self.what == "StopIteration from finally": + try: + raise ValueError + finally: + raise StopIteration + elif self.what == "catch StopIteration": + try: + raise StopIteration + except StopIteration: + self.what = None + return "in except" # but will stop next time + elif self.what == "don't catch StopIteration": + try: + raise StopIteration + except ValueError: + return 0 + elif self.what == "StopIteration from except": + try: + raise ValueError + except ValueError: + raise StopIteration + elif self.what == "StopIteration with value": + raise StopIteration("I'm a value!") + elif self.what is None: + raise StopIteration + else: + raise ValueError("self.what didn't match anything") + +def test_cy_iter_many_options(option): + """ + >>> test_cy_iter_many_options("StopIteration in finally no return") + Finally... + [] + >>> test_cy_iter_many_options("StopIteration in finally return") + ['in finally'] + >>> test_cy_iter_many_options("StopIteration from finally") + [] + >>> test_cy_iter_many_options("catch StopIteration") + ['in except'] + >>> test_cy_iter_many_options("don't catch StopIteration") + [] + >>> try: + ... cy_iter_many_options("StopIteration with value").__next__() + ... except StopIteration as e: + ... print(e.args) + ("I'm a value!",) + """ + return list(cy_iter_many_options(option)) + -- cgit v1.2.1 From c8df6a37318770ade07645c40e7751b5907348a8 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Wed, 13 Jul 2022 13:09:14 +0200 Subject: Update changelog. --- CHANGES.rst | 54 ++++++++++++++++++++++++++++++++++++------------------ 1 file changed, 36 insertions(+), 18 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 38dc34781..94abc418e 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -11,12 +11,15 @@ Features added * A new decorator ``@cython.dataclasses.dataclass`` was implemented that provides compile time dataclass generation capabilities to ``cdef`` classes (extension types). Patch by David Woods. (Github issue :issue:`2903`). ``kw_only`` dataclasses - added by Yury Sokov (Github issue :issue:`4794`) + added by Yury Sokov. (Github issue :issue:`4794`) * Named expressions (PEP 572) aka. assignment expressions (aka. the walrus operator ``:=``) were implemented. Patch by David Woods. (Github issue :issue:`2636`) +* Cython avoids raising ``StopIteration`` in ``__next__`` methods when possible. + Patch by David Woods. (Github issue :issue:`3447`) + * Some C++ library declarations were extended and fixed. Patches by Max Bachmann, Till Hoffmann, Julien Jerphanion, Wenjun Si. (Github issues :issue:`4530`, :issue:`4528`, :issue:`4710`, :issue:`4746`, @@ -60,6 +63,13 @@ Bugs fixed since it is relevant when passing them e.g. as argument into other fused functions. Patch by David Woods. (Github issue :issue:`4644`) +* The ``__self__`` attribute of fused functions reports its availability correctly + with ``hasattr()``. Patch by David Woods. + (Github issue :issue:`4808`) + +* ``pyximport`` no longer uses the deprecated ``imp`` module. + Patch by Matus Valo. (Github issue :issue:`4560`) + * The generated C code failed to compile in CPython 3.11a4 and later. (Github issue :issue:`4500`) @@ -72,7 +82,7 @@ Bugs fixed (Github issue :issue:`4329`) * Improve conversion between function pointers with non-identical but - compatible exception specifications. Patches by David Woods. + compatible exception specifications. Patches by David Woods. (Github issues :issue:`4770`, :issue:`4689`) * Improve compatibility with forthcoming CPython 3.12 release. @@ -80,7 +90,11 @@ Bugs fixed * Limited API C preprocessor warning is compatible with MSVC. Patch by Victor Molina Garcia. (Github issue :issue:`4826`) -* C compiler warnings fixed. Patch by mwtian. (Github issue :issue:`4831`) +* Some C compiler warnings were fixed. + Patch by mwtian. (Github issue :issue:`4831`) + +* The parser allowed some invalid spellings of ``...``. + Patch by 0dminnimda. (Github issue :issue:`4868`) * Includes all bug-fixes from the 0.29 branch up to the :ref:`0.29.31` release. @@ -93,9 +107,13 @@ Other changes for users who did not expect ``None`` to be allowed as input. To allow ``None``, use ``typing.Optional`` as in ``func(x: Optional[list])``. ``None`` is also automatically allowed when it is used as default argument, i.e. ``func(x: list = None)``. - Note that, for backwards compatibility reasons, this does not apply when using Cython's - C notation, as in ``func(list x)``. Here, ``None`` is still allowed, as always. - (Github issues :issue:`3883`, :issue:`2696`, :issue:`4669`) + ``int`` and ``float`` are now also recognised in type annotations and restrict the + value type at runtime. They were previously ignored. + Note that, for backwards compatibility reasons, the new behaviour does not apply when using + Cython's C notation, as in ``func(list x)``. Here, ``None`` is still allowed, as always. + Also, the ``annotation_typing`` directive can now be enabled and disabled more finely + within the module. + (Github issues :issue:`3883`, :issue:`2696`, :issue:`4669`, :issue:`4606`, :issue:`4886`) * The compile-time ``DEF`` and ``IF`` statements are deprecated and generate a warning. They should be replaced with normal constants, code generation or C macros. @@ -1008,31 +1026,31 @@ Bugs fixed * Use ``importlib.util.find_spec()`` instead of the deprecated ``importlib.find_loader()`` function when setting up the package path at import-time. Patch by Matti Picus. - (Github issue #4764) + (Github issue :issue:`4764`) * Require the C compiler to support the two-arg form of ``va_start`` on Python 3.10 and higher. Patch by Thomas Caswell. - (Github issue #4820) + (Github issue :issue:`4820`) * Make ``fused_type`` subscriptable in Shadow.py. Patch by Pfebrer. - (Github issue #4842) + (Github issue :issue:`4842`) * Fix the incorrect code generation of the target type in ``bytearray`` loops. Patch by Kenrick Everett. - (Github issue #4108) + (Github issue :issue:`4108`) * Silence some GCC ``-Wconversion`` warnings in C utility code. Patch by Lisandro Dalcin. - (Github issue #4854) + (Github issue :issue:`4854`) * Stop tuple multiplication being ignored in expressions such as ``[*(1,) * 2]``. Patch by David Woods. - (Github issue #4864) + (Github issue :issue:`4864`) * Ensure that object buffers (e.g. ``ndarray[object, ndim=1]``) containing ``NULL`` pointers are safe to use, returning ``None`` instead of the ``NULL`` pointer. Patch by Sebastian Berg. - (Github issue #4859) + (Github issue :issue:`4859`) .. _0.29.30: @@ -1045,7 +1063,7 @@ Bugs fixed * The GIL handling changes in 0.29.29 introduced a regression where objects could be deallocated without holding the GIL. - (Github issue :issue`4796`) + (Github issue :issue:`4796`) .. _0.29.29: @@ -1059,7 +1077,7 @@ Features added * Avoid acquiring the GIL at the end of nogil functions. This change was backported in order to avoid generating wrong C code that would trigger C compiler warnings with tracing support enabled. - Backport by Oleksandr Pavlyk. (Github issue :issue`4637`) + Backport by Oleksandr Pavlyk. (Github issue :issue:`4637`) Bugs fixed ---------- @@ -1075,15 +1093,15 @@ Bugs fixed * Cython now correctly generates Python methods for both the provided regular and reversed special numeric methods of extension types. - Patch by David Woods. (Github issue :issue`4750`) + Patch by David Woods. (Github issue :issue:`4750`) * Calling unbound extension type methods without arguments could raise an ``IndexError`` instead of a ``TypeError``. - Patch by David Woods. (Github issue :issue`4779`) + Patch by David Woods. (Github issue :issue:`4779`) * Calling unbound ``.__contains__()`` super class methods on some builtin base types could trigger an infinite recursion. - Patch by David Woods. (Github issue :issue`4785`) + Patch by David Woods. (Github issue :issue:`4785`) * The C union type in pure Python mode mishandled some field names. Patch by Jordan Brière. (Github issue :issue:`4727`) -- cgit v1.2.1 From 565f176f23dd91287d51cd44883af62fa0aaafa0 Mon Sep 17 00:00:00 2001 From: da-woods Date: Wed, 6 Jul 2022 20:47:04 +0100 Subject: Add tests for NULL objects in memoryviews (GH-4871) Follow up on https://github.com/cython/cython/pull/4859 by adding tests for memoryviews too. Additional refactoring to avoid invalid decref calls on test failures. Instead, the item is safely cleared directly before the access. --- tests/buffers/bufaccess.pyx | 15 +++++---------- tests/memoryview/memslice.pyx | 45 ++++++++++++++++++++++++++++++++++++++++++- 2 files changed, 49 insertions(+), 11 deletions(-) diff --git a/tests/buffers/bufaccess.pyx b/tests/buffers/bufaccess.pyx index 764d65db6..3144f613d 100644 --- a/tests/buffers/bufaccess.pyx +++ b/tests/buffers/bufaccess.pyx @@ -10,7 +10,7 @@ from __future__ import unicode_literals from cpython.object cimport PyObject -from cpython.ref cimport Py_INCREF, Py_DECREF +from cpython.ref cimport Py_INCREF, Py_DECREF, Py_CLEAR cimport cython import sys @@ -1013,17 +1013,14 @@ def check_object_nulled_1d(MockBuffer[object, ndim=1] buf, int idx, obj): >>> rc1 = get_refcount(a) >>> A = ObjectMockBuffer(None, [a, a]) >>> check_object_nulled_1d(A, 0, a) - >>> decref(a) # new reference "added" to A >>> check_object_nulled_1d(A, 1, a) - >>> decref(a) >>> A = ObjectMockBuffer(None, [a, a, a, a], strides=(2,)) >>> check_object_nulled_1d(A, 0, a) # only 0 due to stride - >>> decref(a) >>> get_refcount(a) == rc1 True """ - cdef void **data = buf.buffer - data[idx] = NULL + cdef PyObject **data = buf.buffer + Py_CLEAR(data[idx]) res = buf[idx] # takes None buf[idx] = obj return res @@ -1037,14 +1034,12 @@ def check_object_nulled_2d(MockBuffer[object, ndim=2] buf, int idx1, int idx2, o >>> rc1 = get_refcount(a) >>> A = ObjectMockBuffer(None, [a, a, a, a], shape=(2, 2)) >>> check_object_nulled_2d(A, 0, 0, a) - >>> decref(a) # new reference "added" to A >>> check_object_nulled_2d(A, 1, 1, a) - >>> decref(a) >>> get_refcount(a) == rc1 True """ - cdef void **data = buf.buffer - data[idx1 + 2*idx2] = NULL + cdef PyObject **data = buf.buffer + Py_CLEAR(data[idx1 + 2*idx2]) res = buf[idx1, idx2] # takes None buf[idx1, idx2] = obj return res diff --git a/tests/memoryview/memslice.pyx b/tests/memoryview/memslice.pyx index 4e06c4f41..5f6134135 100644 --- a/tests/memoryview/memslice.pyx +++ b/tests/memoryview/memslice.pyx @@ -7,7 +7,7 @@ from __future__ import unicode_literals from cpython.object cimport PyObject -from cpython.ref cimport Py_INCREF, Py_DECREF +from cpython.ref cimport Py_INCREF, Py_DECREF, Py_CLEAR cimport cython from cython cimport view @@ -1134,6 +1134,49 @@ def assign_temporary_to_object(object[:] buf): """ buf[1] = {3-2: 2+(2*4)-2} +@testcase +def check_object_nulled_1d(object[:] buf, int idx, obj): + """ + See comments on printbuf_object above. + + >>> a = object() + >>> rc1 = get_refcount(a) + >>> A = ObjectMockBuffer(None, [a, a]) + >>> check_object_nulled_1d(A, 0, a) + >>> check_object_nulled_1d(A, 1, a) + >>> A = ObjectMockBuffer(None, [a, a, a, a], strides=(2,)) + >>> check_object_nulled_1d(A, 0, a) # only 0 due to stride + >>> get_refcount(a) == rc1 + True + """ + cdef ObjectMockBuffer omb = buf.base + cdef PyObject **data = (omb.buffer) + Py_CLEAR(data[idx]) + res = buf[idx] # takes None + buf[idx] = obj + return res + +@testcase +def check_object_nulled_2d(object[:, ::1] buf, int idx1, int idx2, obj): + """ + See comments on printbuf_object above. + + >>> a = object() + >>> rc1 = get_refcount(a) + >>> A = ObjectMockBuffer(None, [a, a, a, a], shape=(2, 2)) + >>> check_object_nulled_2d(A, 0, 0, a) + >>> check_object_nulled_2d(A, 1, 1, a) + >>> get_refcount(a) == rc1 + True + """ + cdef ObjectMockBuffer omb = buf.base + cdef PyObject **data = (omb.buffer) + Py_CLEAR(data[idx1 + 2*idx2]) + res = buf[idx1, idx2] # takes None + buf[idx1, idx2] = obj + return res + + # # Test __cythonbufferdefaults__ # -- cgit v1.2.1 From 905fd831dd8ec6a674076cf2d1559e38887c1547 Mon Sep 17 00:00:00 2001 From: da-woods Date: Sat, 16 Jul 2022 07:19:12 +0100 Subject: Avoid conflict between propery names and function variables (GH-4845) Fixes https://github.com/cython/cython/issues/4836 Bug introduced by https://github.com/cython/cython/commit/8c7b0f3fb745aa7bd0afedfbeb862eecc5fdff0c --- Cython/Compiler/ParseTreeTransforms.py | 6 ++++++ Cython/Compiler/Visitor.py | 8 +++++--- tests/run/decorators.pyx | 23 +++++++++++++++++++++++ 3 files changed, 34 insertions(+), 3 deletions(-) diff --git a/Cython/Compiler/ParseTreeTransforms.py b/Cython/Compiler/ParseTreeTransforms.py index 7e9207eea..89620cd45 100644 --- a/Cython/Compiler/ParseTreeTransforms.py +++ b/Cython/Compiler/ParseTreeTransforms.py @@ -2293,6 +2293,12 @@ if VALUE is not None: assmt.analyse_declarations(env) return assmt + def visit_func_outer_attrs(self, node): + # any names in the outer attrs should not be looked up in the function "seen_vars_stack" + stack = self.seen_vars_stack.pop() + super(AnalyseDeclarationsTransform, self).visit_func_outer_attrs(node) + self.seen_vars_stack.append(stack) + def visit_ScopedExprNode(self, node): env = self.current_env() node.analyse_declarations(env) diff --git a/Cython/Compiler/Visitor.py b/Cython/Compiler/Visitor.py index 4eabd6b83..d9be14df1 100644 --- a/Cython/Compiler/Visitor.py +++ b/Cython/Compiler/Visitor.py @@ -380,13 +380,15 @@ class EnvTransform(CythonTransform): self.env_stack.pop() def visit_FuncDefNode(self, node): - outer_attrs = node.outer_attrs - self.visitchildren(node, attrs=outer_attrs) + self.visit_func_outer_attrs(node) self.enter_scope(node, node.local_scope) - self.visitchildren(node, attrs=None, exclude=outer_attrs) + self.visitchildren(node, attrs=None, exclude=node.outer_attrs) self.exit_scope() return node + def visit_func_outer_attrs(self, node): + self.visitchildren(node, attrs=node.outer_attrs) + def visit_GeneratorBodyDefNode(self, node): self._process_children(node) return node diff --git a/tests/run/decorators.pyx b/tests/run/decorators.pyx index 54623e0cb..fc20235e2 100644 --- a/tests/run/decorators.pyx +++ b/tests/run/decorators.pyx @@ -81,3 +81,26 @@ def outer(arg1, arg2): def method(): return [4] return method() + +class HasProperty(object): + """ + >>> hp = HasProperty() + >>> hp.value + 0 + >>> hp.value = 1 + >>> hp.value + 1 + """ + def __init__(self) -> None: + self._value = 0 + + @property + def value(self) -> int: + return self._value + + # https://github.com/cython/cython/issues/4836 + # The variable tracker was confusing "value" in the decorator + # for "value" in the argument list + @value.setter + def value(self, value: int): + self._value = value -- cgit v1.2.1 From c5e6c183436d7477d15fd1e02c6cd9144bb74708 Mon Sep 17 00:00:00 2001 From: da-woods Date: Sat, 16 Jul 2022 09:25:26 +0100 Subject: Fixed over-zealous optimization of append attribute usage to "__Pyx_PyObject_Append" (GH-4834) Fixes https://github.com/cython/cython/issues/4828 --- Cython/Compiler/ExprNodes.py | 2 +- Cython/Compiler/Optimize.py | 2 +- tests/run/append.pyx | 34 ++++++++++++++++++++++++++++++++++ 3 files changed, 36 insertions(+), 2 deletions(-) diff --git a/Cython/Compiler/ExprNodes.py b/Cython/Compiler/ExprNodes.py index 4c325891a..ab228c552 100644 --- a/Cython/Compiler/ExprNodes.py +++ b/Cython/Compiler/ExprNodes.py @@ -330,8 +330,8 @@ class ExprNode(Node): # is_starred boolean Is a starred expression (e.g. '*a') # use_managed_ref boolean use ref-counted temps/assignments/etc. # result_is_used boolean indicates that the result will be dropped and the - # is_numpy_attribute boolean Is a Numpy module attribute # result_code/temp_result can safely be set to None + # is_numpy_attribute boolean Is a Numpy module attribute # annotation ExprNode or None PEP526 annotation for names or expressions result_ctype = None diff --git a/Cython/Compiler/Optimize.py b/Cython/Compiler/Optimize.py index a601d18c9..cea5970f6 100644 --- a/Cython/Compiler/Optimize.py +++ b/Cython/Compiler/Optimize.py @@ -3026,7 +3026,7 @@ class OptimizeBuiltinCalls(Visitor.NodeRefCleanupMixin, """Optimistic optimisation as X.append() is almost always referring to a list. """ - if len(args) != 2 or node.result_is_used: + if len(args) != 2 or node.result_is_used or node.function.entry: return node return ExprNodes.PythonCapiCallNode( diff --git a/tests/run/append.pyx b/tests/run/append.pyx index 1976780d5..dcc3fe7c9 100644 --- a/tests/run/append.pyx +++ b/tests/run/append.pyx @@ -1,3 +1,5 @@ +cimport cython + class A: def append(self, x): print u"appending", x @@ -94,3 +96,35 @@ def method_name(): 'append' """ return [].append.__name__ + +@cython.test_assert_path_exists( + '//PythonCapiCallNode') +def append_optimized(probably_list): + """ + >>> l = [] + >>> append_optimized(l) + >>> l + [1] + """ + probably_list.append(1) + +cdef class AppendBug: + # https://github.com/cython/cython/issues/4828 + # if the attribute "append" is found it shouldn't be replaced with + # __Pyx_PyObject_Append + cdef object append + def __init__(self, append): + self.append = append + +@cython.test_fail_if_path_exists( + '//PythonCapiCallNode') +def specific_attribute(AppendBug a): + """ + >>> def append_to_default_arg(a, arg=[]): + ... arg.append(a) + ... return arg + >>> specific_attribute(AppendBug(append_to_default_arg)) + >>> append_to_default_arg(None) + [1, None] + """ + a.append(1) -- cgit v1.2.1 From 2f1c338ac4e7333823be84cc0d8df80acc5e23f3 Mon Sep 17 00:00:00 2001 From: da-woods Date: Sat, 16 Jul 2022 09:25:26 +0100 Subject: Fixed over-zealous optimization of append attribute usage to "__Pyx_PyObject_Append" (GH-4834) Fixes https://github.com/cython/cython/issues/4828 --- Cython/Compiler/ExprNodes.py | 2 +- Cython/Compiler/Optimize.py | 2 +- tests/run/append.pyx | 34 ++++++++++++++++++++++++++++++++++ 3 files changed, 36 insertions(+), 2 deletions(-) diff --git a/Cython/Compiler/ExprNodes.py b/Cython/Compiler/ExprNodes.py index 69632a4fe..2c5d70936 100644 --- a/Cython/Compiler/ExprNodes.py +++ b/Cython/Compiler/ExprNodes.py @@ -306,8 +306,8 @@ class ExprNode(Node): # Cached result of subexpr_nodes() # use_managed_ref boolean use ref-counted temps/assignments/etc. # result_is_used boolean indicates that the result will be dropped and the - # is_numpy_attribute boolean Is a Numpy module attribute # result_code/temp_result can safely be set to None + # is_numpy_attribute boolean Is a Numpy module attribute # annotation ExprNode or None PEP526 annotation for names or expressions result_ctype = None diff --git a/Cython/Compiler/Optimize.py b/Cython/Compiler/Optimize.py index 3cb77efe2..7e9435ba0 100644 --- a/Cython/Compiler/Optimize.py +++ b/Cython/Compiler/Optimize.py @@ -2860,7 +2860,7 @@ class OptimizeBuiltinCalls(Visitor.NodeRefCleanupMixin, """Optimistic optimisation as X.append() is almost always referring to a list. """ - if len(args) != 2 or node.result_is_used: + if len(args) != 2 or node.result_is_used or node.function.entry: return node return ExprNodes.PythonCapiCallNode( diff --git a/tests/run/append.pyx b/tests/run/append.pyx index 1976780d5..dcc3fe7c9 100644 --- a/tests/run/append.pyx +++ b/tests/run/append.pyx @@ -1,3 +1,5 @@ +cimport cython + class A: def append(self, x): print u"appending", x @@ -94,3 +96,35 @@ def method_name(): 'append' """ return [].append.__name__ + +@cython.test_assert_path_exists( + '//PythonCapiCallNode') +def append_optimized(probably_list): + """ + >>> l = [] + >>> append_optimized(l) + >>> l + [1] + """ + probably_list.append(1) + +cdef class AppendBug: + # https://github.com/cython/cython/issues/4828 + # if the attribute "append" is found it shouldn't be replaced with + # __Pyx_PyObject_Append + cdef object append + def __init__(self, append): + self.append = append + +@cython.test_fail_if_path_exists( + '//PythonCapiCallNode') +def specific_attribute(AppendBug a): + """ + >>> def append_to_default_arg(a, arg=[]): + ... arg.append(a) + ... return arg + >>> specific_attribute(AppendBug(append_to_default_arg)) + >>> append_to_default_arg(None) + [1, None] + """ + a.append(1) -- cgit v1.2.1 From a2e4139993df6bd52a5f3db670dc1ca55fdedc9e Mon Sep 17 00:00:00 2001 From: da-woods Date: Sat, 16 Jul 2022 09:34:11 +0100 Subject: Give better errors on size changes of PyVarObjects and reduce false positives (GH-4869) Fixes https://github.com/cython/cython/issues/4827 Some of the patch was copied from https://src.fedoraproject.org/rpms/Cython/pull-request/35#request_diff Allows the size of a type to be between basicsize and basicsize+itemsize since anything is this range is a reasonable size for a class to be, subject to implementations details of the object struct. Adds an explicit runtime test when an extern extension type is inherited from to make sure that it isn't a PyVarObject of unexpected size. --- Cython/Compiler/Nodes.py | 19 ++++- Cython/Compiler/PyrexTypes.py | 2 +- Cython/Utility/ExtensionTypes.c | 34 ++++++++ Cython/Utility/ImportExport.c | 24 ++++-- runtests.py | 1 + tests/errors/builtin_type_inheritance.pyx | 4 +- tests/pypy_bugs.txt | 3 + tests/run/builtin_type_inheritance_T608.pyx | 38 +-------- .../run/builtin_type_inheritance_T608_py2only.pyx | 42 ++++++++++ tests/run/extern_varobject_extensions.srctree | 94 ++++++++++++++++++++++ 10 files changed, 214 insertions(+), 47 deletions(-) create mode 100644 tests/run/builtin_type_inheritance_T608_py2only.pyx create mode 100644 tests/run/extern_varobject_extensions.srctree diff --git a/Cython/Compiler/Nodes.py b/Cython/Compiler/Nodes.py index 298cc5705..751eb31f4 100644 --- a/Cython/Compiler/Nodes.py +++ b/Cython/Compiler/Nodes.py @@ -5242,7 +5242,8 @@ class CClassDefNode(ClassDefNode): error(base.pos, "Base class '%s' of type '%s' is final" % ( base_type, self.class_name)) elif base_type.is_builtin_type and \ - base_type.name in ('tuple', 'str', 'bytes'): + base_type.name in ('tuple', 'bytes'): + # str in Py2 is also included in this, but now checked at run-time error(base.pos, "inheritance from PyVarObject types like '%s' is not currently supported" % base_type.name) else: @@ -5511,6 +5512,22 @@ class CClassDefNode(ClassDefNode): )) code.putln("#endif") # if CYTHON_USE_TYPE_SPECS + base_type = type.base_type + while base_type: + if base_type.is_external and not base_type.objstruct_cname == "PyTypeObject": + # 'type' is special-cased because it is actually based on PyHeapTypeObject + # Variable length bases are allowed if the current class doesn't grow + code.putln("if (sizeof(%s%s) != sizeof(%s%s)) {" % ( + "" if type.typedef_flag else "struct ", type.objstruct_cname, + "" if base_type.typedef_flag else "struct ", base_type.objstruct_cname)) + code.globalstate.use_utility_code( + UtilityCode.load_cached("ValidateExternBase", "ExtensionTypes.c")) + code.put_error_if_neg(entry.pos, "__Pyx_validate_extern_base(%s)" % ( + type.base_type.typeptr_cname)) + code.putln("}") + break + base_type = base_type.base_type + code.putln("#if !CYTHON_COMPILING_IN_LIMITED_API") # FIXME: these still need to get initialised even with the limited-API for slot in TypeSlots.get_slot_table(code.globalstate.directives): diff --git a/Cython/Compiler/PyrexTypes.py b/Cython/Compiler/PyrexTypes.py index 1316edddc..79e144ed1 100644 --- a/Cython/Compiler/PyrexTypes.py +++ b/Cython/Compiler/PyrexTypes.py @@ -1506,7 +1506,6 @@ class PyExtensionType(PyObjectType): # # name string # scope CClassScope Attribute namespace - # visibility string # typedef_flag boolean # base_type PyExtensionType or None # module_name string or None Qualified name of defining module @@ -1520,6 +1519,7 @@ class PyExtensionType(PyObjectType): # vtable_cname string Name of C method table definition # early_init boolean Whether to initialize early (as opposed to during module execution). # defered_declarations [thunk] Used to declare class hierarchies in order + # is_external boolean Defined in a extern block # check_size 'warn', 'error', 'ignore' What to do if tp_basicsize does not match # dataclass_fields OrderedDict nor None Used for inheriting from dataclasses diff --git a/Cython/Utility/ExtensionTypes.c b/Cython/Utility/ExtensionTypes.c index ec994a367..aa39a860a 100644 --- a/Cython/Utility/ExtensionTypes.c +++ b/Cython/Utility/ExtensionTypes.c @@ -564,3 +564,37 @@ static PyObject *{{func_name}}(PyObject *left, PyObject *right {{extra_arg_decl} } return __Pyx_NewRef(Py_NotImplemented); } + +/////////////// ValidateExternBase.proto /////////////// + +static int __Pyx_validate_extern_base(PyTypeObject *base); /* proto */ + +/////////////// ValidateExternBase /////////////// +//@requires: ObjectHandling.c::FormatTypeName + +static int __Pyx_validate_extern_base(PyTypeObject *base) { + Py_ssize_t itemsize; +#if CYTHON_COMPILING_IN_LIMITED_API + PyObject *py_itemsize; +#endif +#if !CYTHON_COMPILING_IN_LIMITED_API + itemsize = ((PyTypeObject *)base)->tp_itemsize; +#else + py_itemsize = PyObject_GetAttrString(base, "__itemsize__"); + if (!py_itemsize) + return -1; + itemsize = PyLong_AsSsize_t(py_itemsize); + Py_DECREF(py_itemsize); + py_itemsize = 0; + if (itemsize == (Py_ssize_t)-1 && PyErr_Occurred()) + return -1; +#endif + if (itemsize) { + __Pyx_TypeName b_name = __Pyx_PyType_GetName(base); + PyErr_Format(PyExc_TypeError, + "inheritance from PyVarObject types like '" __Pyx_FMT_TYPENAME "' not currently supported", b_name); + __Pyx_DECREF_TypeName(b_name); + return -1; + } + return 0; +} diff --git a/Cython/Utility/ImportExport.c b/Cython/Utility/ImportExport.c index 6ceba7efb..897657281 100644 --- a/Cython/Utility/ImportExport.c +++ b/Cython/Utility/ImportExport.c @@ -498,8 +498,10 @@ static PyTypeObject *__Pyx_ImportType(PyObject *module, const char *module_name, PyObject *result = 0; char warning[200]; Py_ssize_t basicsize; + Py_ssize_t itemsize; #if CYTHON_COMPILING_IN_LIMITED_API PyObject *py_basicsize; + PyObject *py_itemsize; #endif result = PyObject_GetAttrString(module, class_name); @@ -513,6 +515,7 @@ static PyTypeObject *__Pyx_ImportType(PyObject *module, const char *module_name, } #if !CYTHON_COMPILING_IN_LIMITED_API basicsize = ((PyTypeObject *)result)->tp_basicsize; + itemsize = ((PyTypeObject *)result)->tp_itemsize; #else py_basicsize = PyObject_GetAttrString(result, "__basicsize__"); if (!py_basicsize) @@ -522,19 +525,30 @@ static PyTypeObject *__Pyx_ImportType(PyObject *module, const char *module_name, py_basicsize = 0; if (basicsize == (Py_ssize_t)-1 && PyErr_Occurred()) goto bad; + py_itemsize = PyObject_GetAttrString(result, "__itemsize__"); + if (!py_itemsize) + goto bad; + itemsize = PyLong_AsSsize_t(py_itemsize); + Py_DECREF(py_itemsize); + py_itemsize = 0; + if (itemsize == (Py_ssize_t)-1 && PyErr_Occurred()) + goto bad; #endif - if ((size_t)basicsize < size) { + if ((size_t)(basicsize + itemsize) < size) { PyErr_Format(PyExc_ValueError, "%.200s.%.200s size changed, may indicate binary incompatibility. " "Expected %zd from C header, got %zd from PyObject", - module_name, class_name, size, basicsize); + module_name, class_name, size, basicsize+itemsize); goto bad; } - if (check_size == __Pyx_ImportType_CheckSize_Error && (size_t)basicsize != size) { + // varobjects almost have structs between basicsize and basicsize + itemsize + // but the struct isn't always one of the two limiting values + if (check_size == __Pyx_ImportType_CheckSize_Error && + ((size_t)basicsize > size || (size_t)(basicsize + itemsize) < size)) { PyErr_Format(PyExc_ValueError, "%.200s.%.200s size changed, may indicate binary incompatibility. " - "Expected %zd from C header, got %zd from PyObject", - module_name, class_name, size, basicsize); + "Expected %zd from C header, got %zd-%zd from PyObject", + module_name, class_name, size, basicsize, basicsize+itemsize); goto bad; } else if (check_size == __Pyx_ImportType_CheckSize_Warn && (size_t)basicsize > size) { diff --git a/runtests.py b/runtests.py index 72608882a..a5c12e65e 100755 --- a/runtests.py +++ b/runtests.py @@ -467,6 +467,7 @@ VER_DEP_MODULES = { 'compile.extsetslice', 'compile.extdelslice', 'run.special_methods_T561_py2', + 'run.builtin_type_inheritance_T608_py2only', ]), (3,3) : (operator.lt, lambda x: x in ['build.package_compilation', 'build.cythonize_pep420_namespace', diff --git a/tests/errors/builtin_type_inheritance.pyx b/tests/errors/builtin_type_inheritance.pyx index 1c6ad31e1..a85f7a133 100644 --- a/tests/errors/builtin_type_inheritance.pyx +++ b/tests/errors/builtin_type_inheritance.pyx @@ -8,11 +8,9 @@ cdef class MyTuple(tuple): cdef class MyBytes(bytes): pass -cdef class MyStr(str): # only in Py2, but can't know that during compilation - pass +# str is also included in this in Py2, but checked at runtime instead _ERRORS = """ 5:19: inheritance from PyVarObject types like 'tuple' is not currently supported 8:19: inheritance from PyVarObject types like 'bytes' is not currently supported -11:17: inheritance from PyVarObject types like 'str' is not currently supported """ diff --git a/tests/pypy_bugs.txt b/tests/pypy_bugs.txt index 1004a93e4..5a27265ee 100644 --- a/tests/pypy_bugs.txt +++ b/tests/pypy_bugs.txt @@ -61,3 +61,6 @@ run.exttype_dealloc # bugs in cpyext run.special_methods_T561 run.special_methods_T561_py2 + +# unicode is a PyVarObject on PyPy3 +run.builtin_type_inheritance_T608 diff --git a/tests/run/builtin_type_inheritance_T608.pyx b/tests/run/builtin_type_inheritance_T608.pyx index 1214b6841..d03558a25 100644 --- a/tests/run/builtin_type_inheritance_T608.pyx +++ b/tests/run/builtin_type_inheritance_T608.pyx @@ -1,42 +1,6 @@ # ticket: t608 -cdef class MyInt(int): - """ - >>> MyInt(2) == 2 - True - >>> MyInt(2).attr is None - True - """ - cdef readonly object attr - -cdef class MyInt2(int): - """ - >>> MyInt2(2) == 2 - True - >>> MyInt2(2).attr is None - True - >>> MyInt2(2).test(3) - 5 - """ - cdef readonly object attr - - def test(self, arg): - return self._test(arg) - - cdef _test(self, arg): - return self + arg - -cdef class MyInt3(MyInt2): - """ - >>> MyInt3(2) == 2 - True - >>> MyInt3(2).attr is None - True - >>> MyInt3(2).test(3) - 6 - """ - cdef _test(self, arg): - return self + arg + 1 +# see "builtin_type_inheritance_T608_py2only.pyx" for inheritance from int cdef class MyFloat(float): """ diff --git a/tests/run/builtin_type_inheritance_T608_py2only.pyx b/tests/run/builtin_type_inheritance_T608_py2only.pyx new file mode 100644 index 000000000..b10a2610a --- /dev/null +++ b/tests/run/builtin_type_inheritance_T608_py2only.pyx @@ -0,0 +1,42 @@ +# ticket: t608 + +# This only works reliably in Python2. In Python3 ints are variable-sized. +# You get away with it for small ints but it's a bad idea + +cdef class MyInt(int): + """ + >>> MyInt(2) == 2 + True + >>> MyInt(2).attr is None + True + """ + cdef readonly object attr + +cdef class MyInt2(int): + """ + >>> MyInt2(2) == 2 + True + >>> MyInt2(2).attr is None + True + >>> MyInt2(2).test(3) + 5 + """ + cdef readonly object attr + + def test(self, arg): + return self._test(arg) + + cdef _test(self, arg): + return self + arg + +cdef class MyInt3(MyInt2): + """ + >>> MyInt3(2) == 2 + True + >>> MyInt3(2).attr is None + True + >>> MyInt3(2).test(3) + 6 + """ + cdef _test(self, arg): + return self + arg + 1 diff --git a/tests/run/extern_varobject_extensions.srctree b/tests/run/extern_varobject_extensions.srctree new file mode 100644 index 000000000..c927b8147 --- /dev/null +++ b/tests/run/extern_varobject_extensions.srctree @@ -0,0 +1,94 @@ +# mode: run + +PYTHON setup.py build_ext --inplace +PYTHON -c "import classes" +PYTHON -c "import test_inherit" + +######## setup.py ######## + +from Cython.Build.Dependencies import cythonize + +from distutils.core import setup + +setup( + ext_modules=cythonize("*.pyx"), +) + +###### dummy_module.py ########### + +tpl = tuple +lst = list + +###### classes.pxd ################ + +cdef extern from *: + # apart from list, these are all variable sized types + # and Cython shouldn't trip up about the struct size + ctypedef class dummy_module.tpl [object PyTupleObject]: + pass + ctypedef class dummy_module.lst [object PyListObject]: + pass + ctypedef class types.CodeType [object PyCodeObject]: + pass + # Note that bytes doesn't work here because it further + # the tp_basicsize to save space + +##### classes.pyx ################# + +def check_tuple(tpl x): + assert isinstance(x, tuple) + +def check_list(lst x): + assert isinstance(x, list) + +def check_code(CodeType x): + import types + assert isinstance(x, types.CodeType) + +check_tuple((1, 2)) +check_list([1, 2]) +check_code(eval("lambda: None").__code__) + +##### failed_inherit1.pyx ############# + +from classes cimport tpl + +cdef class SuperTuple(tpl): + cdef int a # importing this gives an error message + +##### failed_inherit2.pyx ############# + +from classes cimport tpl + +cdef class SuperTuple(tpl): + # adding a method creates a vtab so should also fail + cdef int func(self): + return 1 + +##### successful_inherit.pyx ######### + +from classes cimport lst, tpl + +cdef class SuperList(lst): + cdef int a # This works OK + +cdef class SuperTuple(tpl): + # This is actually OK because it doesn't add anything + pass + +##### test_inherit.py ################ + +try: + import failed_inherit1 +except TypeError as e: + assert e.args[0] == "inheritance from PyVarObject types like 'tuple' not currently supported", e.args[0] +else: + assert False +try: + import failed_inherit2 +except TypeError as e: + assert e.args[0] == "inheritance from PyVarObject types like 'tuple' not currently supported", e.args[0] +else: + assert False + +import successful_inherit -- cgit v1.2.1 From 6414a07ec88b60d604daf0e51fd850ee974afdcb Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Sat, 16 Jul 2022 10:47:05 +0200 Subject: Extend test to make sure that cython.declare(int) interprets "int" as C int and not Python int. --- tests/run/pure_py.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/tests/run/pure_py.py b/tests/run/pure_py.py index 93f737453..a8dc5b014 100644 --- a/tests/run/pure_py.py +++ b/tests/run/pure_py.py @@ -33,17 +33,18 @@ def test_sizeof(): def test_declare(n): """ >>> test_declare(100) - (100, 100) + (100, 100, 100) >>> test_declare(100.5) - (100, 100) + (100, 100, 100) """ x = cython.declare(cython.int) y = cython.declare(cython.int, n) + z = cython.declare(int, n) # C int if cython.compiled: cython.declare(xx=cython.int, yy=cython.long) i = cython.sizeof(xx) ptr = cython.declare(cython.p_int, cython.address(y)) - return y, ptr[0] + return y, z, ptr[0] @cython.locals(x=cython.double, n=cython.int) -- cgit v1.2.1 From f236f652c5e9efc2beefb7bb696654fdd6ca30b8 Mon Sep 17 00:00:00 2001 From: da-woods Date: Sat, 16 Jul 2022 16:27:45 +0100 Subject: Refactor parsing of named expressions to bring it closer to CPython's LL parser (GH-4846) I've tried to rewrite it to largely follow the rules from the most recent version of the Python LL parser, so avoiding conditional parameters. See https://github.com/cython/cython/issues/4595 --- Cython/Compiler/Parsing.pxd | 13 +++--- Cython/Compiler/Parsing.py | 102 ++++++++++++++++++++++++++++---------------- 2 files changed, 74 insertions(+), 41 deletions(-) diff --git a/Cython/Compiler/Parsing.pxd b/Cython/Compiler/Parsing.pxd index 7f4a1c220..1be718581 100644 --- a/Cython/Compiler/Parsing.pxd +++ b/Cython/Compiler/Parsing.pxd @@ -23,15 +23,17 @@ cdef tuple p_binop_operator(PyrexScanner s) cdef p_binop_expr(PyrexScanner s, ops, p_sub_expr_func p_sub_expr) cdef p_lambdef(PyrexScanner s, bint allow_conditional=*) cdef p_lambdef_nocond(PyrexScanner s) -cdef p_test(PyrexScanner s, bint allow_assignment_expression=*) -cdef p_test_nocond(PyrexScanner s, bint allow_assignment_expression=*) -cdef p_walrus_test(PyrexScanner s, bint allow_assignment_expression=*) +cdef p_test(PyrexScanner s) +cdef p_test_allow_walrus_after(PyrexScanner s) +cdef p_test_nocond(PyrexScanner s) +cdef p_namedexpr_test(PyrexScanner s) cdef p_or_test(PyrexScanner s) cdef p_rassoc_binop_expr(PyrexScanner s, unicode op, p_sub_expr_func p_subexpr) cdef p_and_test(PyrexScanner s) cdef p_not_test(PyrexScanner s) cdef p_comparison(PyrexScanner s) -cdef p_test_or_starred_expr(PyrexScanner s, bint is_expression=*) +cdef p_test_or_starred_expr(PyrexScanner s) +cdef p_namedexpr_test_or_starred_expr(PyrexScanner s) cdef p_starred_expr(PyrexScanner s) cdef p_cascaded_cmp(PyrexScanner s) cdef p_cmp_op(PyrexScanner s) @@ -85,9 +87,10 @@ cdef p_dict_or_set_maker(PyrexScanner s) cdef p_backquote_expr(PyrexScanner s) cdef p_simple_expr_list(PyrexScanner s, expr=*) cdef p_test_or_starred_expr_list(PyrexScanner s, expr=*) +cdef p_namedexpr_test_or_starred_expr_list(s, expr=*) cdef p_testlist(PyrexScanner s) cdef p_testlist_star_expr(PyrexScanner s) -cdef p_testlist_comp(PyrexScanner s, bint is_expression=*) +cdef p_testlist_comp(PyrexScanner s) cdef p_genexp(PyrexScanner s, expr) #------------------------------------------------------- diff --git a/Cython/Compiler/Parsing.py b/Cython/Compiler/Parsing.py index 1a31e2697..938e16e99 100644 --- a/Cython/Compiler/Parsing.py +++ b/Cython/Compiler/Parsing.py @@ -120,9 +120,9 @@ def p_lambdef(s, allow_conditional=True): s, terminator=':', annotated=False) s.expect(':') if allow_conditional: - expr = p_test(s, allow_assignment_expression=False) + expr = p_test(s) else: - expr = p_test_nocond(s, allow_assignment_expression=False) + expr = p_test_nocond(s) return ExprNodes.LambdaNode( pos, args = args, star_arg = star_arg, starstar_arg = starstar_arg, @@ -131,49 +131,64 @@ def p_lambdef(s, allow_conditional=True): #lambdef_nocond: 'lambda' [varargslist] ':' test_nocond def p_lambdef_nocond(s): - return p_lambdef(s, allow_conditional=False) + return p_lambdef(s) #test: or_test ['if' or_test 'else' test] | lambdef -def p_test(s, allow_assignment_expression=True): +def p_test(s): + # The check for a following ':=' is only for error reporting purposes. + # It simply changes a + # expected ')', found ':=' + # message into something a bit more descriptive. + # It is close to what the PEG parser does in CPython, where an expression has + # a lookahead assertion that it isn't followed by ':=' + expr = p_test_allow_walrus_after(s) + if s.sy == ':=': + s.error("invalid syntax: assignment expression not allowed in this context") + return expr + +def p_test_allow_walrus_after(s): if s.sy == 'lambda': return p_lambdef(s) pos = s.position() - expr = p_walrus_test(s, allow_assignment_expression) + expr = p_or_test(s) if s.sy == 'if': s.next() - # Assignment expressions are always allowed here - # even if they wouldn't be allowed in the expression as a whole. - test = p_walrus_test(s) + test = p_or_test(s) s.expect('else') other = p_test(s) return ExprNodes.CondExprNode(pos, test=test, true_val=expr, false_val=other) else: return expr + #test_nocond: or_test | lambdef_nocond -def p_test_nocond(s, allow_assignment_expression=True): +def p_test_nocond(s): if s.sy == 'lambda': return p_lambdef_nocond(s) else: - return p_walrus_test(s, allow_assignment_expression) - -# walrurus_test: IDENT := test | or_test - -def p_walrus_test(s, allow_assignment_expression=True): - lhs = p_or_test(s) + return p_or_test(s) + +def p_namedexpr_test(s): + # defined in the LL parser as + # namedexpr_test: test [':=' test] + # The requirement that the LHS is a name is not enforced in the grammar. + # For comparison the PEG parser does: + # 1. look for "name :=", if found it's definitely a named expression + # so look for expression + # 2. Otherwise, look for expression + lhs = p_test_allow_walrus_after(s) if s.sy == ':=': position = s.position() - if not allow_assignment_expression: - s.error("invalid syntax: assignment expression not allowed in this context") - elif not lhs.is_name: - s.error("Left-hand side of assignment expression must be an identifier") + if not lhs.is_name: + s.error("Left-hand side of assignment expression must be an identifier", fatal=False) s.next() rhs = p_test(s) return ExprNodes.AssignmentExpressionNode(position, lhs=lhs, rhs=rhs) return lhs + #or_test: and_test ('or' and_test)* COMMON_BINOP_MISTAKES = {'||': 'or', '&&': 'and'} @@ -227,11 +242,17 @@ def p_comparison(s): n1.cascade = p_cascaded_cmp(s) return n1 -def p_test_or_starred_expr(s, is_expression=False): +def p_test_or_starred_expr(s): + if s.sy == '*': + return p_starred_expr(s) + else: + return p_test(s) + +def p_namedexpr_test_or_starred_expr(s): if s.sy == '*': return p_starred_expr(s) else: - return p_test(s, allow_assignment_expression=is_expression) + return p_namedexpr_test(s) def p_starred_expr(s): pos = s.position() @@ -505,7 +526,7 @@ def p_call_parse_args(s, allow_genexp=True): keyword_args.append(p_test(s)) starstar_seen = True else: - arg = p_test(s) + arg = p_namedexpr_test(s) if s.sy == '=': s.next() if not arg.is_name: @@ -514,7 +535,7 @@ def p_call_parse_args(s, allow_genexp=True): encoded_name = s.context.intern_ustring(arg.name) keyword = ExprNodes.IdentifierStringNode( arg.pos, value=encoded_name) - arg = p_test(s, allow_assignment_expression=False) + arg = p_test(s) keyword_args.append((keyword, arg)) else: if keyword_args: @@ -690,7 +711,7 @@ def p_atom(s): elif s.sy == 'yield': result = p_yield_expression(s) else: - result = p_testlist_comp(s, is_expression=True) + result = p_testlist_comp(s) s.expect(')') return result elif sy == '[': @@ -1261,7 +1282,7 @@ def p_f_string_expr(s, unicode_value, pos, starting_index, is_raw): # since PEP 448: # list_display ::= "[" [listmaker] "]" -# listmaker ::= (test|star_expr) ( comp_for | (',' (test|star_expr))* [','] ) +# listmaker ::= (named_test|star_expr) ( comp_for | (',' (named_test|star_expr))* [','] ) # comp_iter ::= comp_for | comp_if # comp_for ::= ["async"] "for" expression_list "in" testlist [comp_iter] # comp_if ::= "if" test [comp_iter] @@ -1274,7 +1295,7 @@ def p_list_maker(s): s.expect(']') return ExprNodes.ListNode(pos, args=[]) - expr = p_test_or_starred_expr(s, is_expression=True) + expr = p_namedexpr_test_or_starred_expr(s) if s.sy in ('for', 'async'): if expr.is_starred: s.error("iterable unpacking cannot be used in comprehension") @@ -1289,7 +1310,7 @@ def p_list_maker(s): # (merged) list literal if s.sy == ',': s.next() - exprs = p_test_or_starred_expr_list(s, expr) + exprs = p_namedexpr_test_or_starred_expr_list(s, expr) else: exprs = [expr] s.expect(']') @@ -1474,7 +1495,16 @@ def p_simple_expr_list(s, expr=None): def p_test_or_starred_expr_list(s, expr=None): exprs = expr is not None and [expr] or [] while s.sy not in expr_terminators: - exprs.append(p_test_or_starred_expr(s, is_expression=(expr is not None))) + exprs.append(p_test_or_starred_expr(s)) + if s.sy != ',': + break + s.next() + return exprs + +def p_namedexpr_test_or_starred_expr_list(s, expr=None): + exprs = expr is not None and [expr] or [] + while s.sy not in expr_terminators: + exprs.append(p_namedexpr_test_or_starred_expr(s)) if s.sy != ',': break s.next() @@ -1507,12 +1537,12 @@ def p_testlist_star_expr(s): # testlist_comp: (test|star_expr) ( comp_for | (',' (test|star_expr))* [','] ) -def p_testlist_comp(s, is_expression=False): +def p_testlist_comp(s): pos = s.position() - expr = p_test_or_starred_expr(s, is_expression) + expr = p_namedexpr_test_or_starred_expr(s) if s.sy == ',': s.next() - exprs = p_test_or_starred_expr_list(s, expr) + exprs = p_namedexpr_test_or_starred_expr_list(s, expr) return ExprNodes.TupleNode(pos, args = exprs) elif s.sy in ('for', 'async'): return p_genexp(s, expr) @@ -1900,7 +1930,7 @@ def p_if_statement(s): def p_if_clause(s): pos = s.position() - test = p_test(s) + test = p_namedexpr_test(s) body = p_suite(s) return Nodes.IfClauseNode(pos, condition = test, body = body) @@ -1916,7 +1946,7 @@ def p_while_statement(s): # s.sy == 'while' pos = s.position() s.next() - test = p_test(s) + test = p_namedexpr_test(s) body = p_suite(s) else_clause = p_else_clause(s) return Nodes.WhileStatNode(pos, @@ -3096,11 +3126,11 @@ def p_c_arg_decl(s, ctx, in_pyfunc, cmethod_flag = 0, nonempty = 0, default = ExprNodes.NoneNode(pos) s.next() elif 'inline' in ctx.modifiers: - default = p_test(s, allow_assignment_expression=False) + default = p_test(s) else: error(pos, "default values cannot be specified in pxd files, use ? or *") else: - default = p_test(s, allow_assignment_expression=False) + default = p_test(s) return Nodes.CArgDeclNode(pos, base_type = base_type, declarator = declarator, @@ -3978,5 +4008,5 @@ def p_annotation(s): then it is not a bug. """ pos = s.position() - expr = p_test(s, allow_assignment_expression=False) + expr = p_test(s) return ExprNodes.AnnotationNode(pos, expr=expr) -- cgit v1.2.1 From 3de56e25789de3576b5d4e7ff9bb99efea35bdf6 Mon Sep 17 00:00:00 2001 From: da-woods Date: Sat, 16 Jul 2022 16:32:28 +0100 Subject: Make memoryviewslice and cython.array be collections.abc.Sequence (GH-4817) The main reason to do this is so that they'll work in sequence patterns in structural pattern matching in Python 3.10+. Since the builtin "memoryview" type and "array.array" are sequences, I think this is reasonable. --- Cython/Utility/MemoryView.pyx | 33 ++++++++++++++++++++++++++++++ tests/compile/fused_redeclare_T3111.pyx | 12 +++++------ tests/memoryview/cythonarray.pyx | 36 +++++++++++++++++++++++++++++++++ tests/memoryview/memoryview.pyx | 33 ++++++++++++++++++++++++++++++ 4 files changed, 108 insertions(+), 6 deletions(-) diff --git a/Cython/Utility/MemoryView.pyx b/Cython/Utility/MemoryView.pyx index 990319e05..9361249fb 100644 --- a/Cython/Utility/MemoryView.pyx +++ b/Cython/Utility/MemoryView.pyx @@ -93,6 +93,17 @@ cdef extern from "": void free(void *) nogil void *memcpy(void *dest, void *src, size_t n) nogil +# the sequence abstract base class +cdef object __pyx_collections_abc_Sequence "__pyx_collections_abc_Sequence" +try: + if __import__("sys").version_info >= (3, 3): + __pyx_collections_abc_Sequence = __import__("collections.abc").abc.Sequence + else: + __pyx_collections_abc_Sequence = __import__("collections").Sequence +except: + # it isn't a big problem if this fails + __pyx_collections_abc_Sequence = None + # ### cython.array class # @@ -224,6 +235,12 @@ cdef class array: def __setitem__(self, item, value): self.memview[item] = value + # Sequence methods + try: + count = __pyx_collections_abc_Sequence.count + index = __pyx_collections_abc_Sequence.index + except: + pass @cname("__pyx_array_allocate_buffer") cdef int _allocate_buffer(array self) except -1: @@ -970,6 +987,22 @@ cdef class _memoryviewslice(memoryview): cdef _get_base(self): return self.from_object + # Sequence methods + try: + count = __pyx_collections_abc_Sequence.count + index = __pyx_collections_abc_Sequence.index + except: + pass + +try: + if __pyx_collections_abc_Sequence: + # The main value of registering _memoryviewslice as a + # Sequence is that it can be used in structural pattern + # matching in Python 3.10+ + __pyx_collections_abc_Sequence.register(_memoryviewslice) + __pyx_collections_abc_Sequence.register(array) +except: + pass # ignore failure, it's a minor issue @cname('__pyx_memoryview_fromslice') cdef memoryview_fromslice({{memviewslice_name}} memviewslice, diff --git a/tests/compile/fused_redeclare_T3111.pyx b/tests/compile/fused_redeclare_T3111.pyx index 04862ae88..d91f1d132 100644 --- a/tests/compile/fused_redeclare_T3111.pyx +++ b/tests/compile/fused_redeclare_T3111.pyx @@ -27,10 +27,10 @@ _WARNINGS = """ 36:10: 'cpdef_cname_method' redeclared # from MemoryView.pyx -958:29: Ambiguous exception value, same as default return value: 0 -958:29: Ambiguous exception value, same as default return value: 0 -983:46: Ambiguous exception value, same as default return value: 0 -983:46: Ambiguous exception value, same as default return value: 0 -1073:29: Ambiguous exception value, same as default return value: 0 -1073:29: Ambiguous exception value, same as default return value: 0 +975:29: Ambiguous exception value, same as default return value: 0 +975:29: Ambiguous exception value, same as default return value: 0 +1016:46: Ambiguous exception value, same as default return value: 0 +1016:46: Ambiguous exception value, same as default return value: 0 +1106:29: Ambiguous exception value, same as default return value: 0 +1106:29: Ambiguous exception value, same as default return value: 0 """ diff --git a/tests/memoryview/cythonarray.pyx b/tests/memoryview/cythonarray.pyx index 0dc823581..6bfd7397e 100644 --- a/tests/memoryview/cythonarray.pyx +++ b/tests/memoryview/cythonarray.pyx @@ -286,3 +286,39 @@ def test_char_array_in_python_api(*shape): arr = array(shape=shape, itemsize=sizeof(char), format='c', mode='c') arr[:] = b'x' return arr + +def test_is_Sequence(): + """ + >>> test_is_Sequence() + 1 + 1 + True + """ + import sys + if sys.version_info < (3, 3): + from collections import Sequence + else: + from collections.abc import Sequence + + arr = array(shape=(5,), itemsize=sizeof(char), format='c', mode='c') + for i in range(arr.shape[0]): + arr[i] = f'{i}'.encode('ascii') + print(arr.count(b'1')) # test for presence of added collection method + print(arr.index(b'1')) # test for presence of added collection method + + if sys.version_info >= (3, 10): + # test structural pattern match in Python + # (because Cython hasn't implemented it yet, and because the details + # of what Python considers a sequence are important) + globs = {'arr': arr} + exec(""" +match arr: + case [*_]: + res = True + case _: + res = False +""", globs) + assert globs['res'] + + return isinstance(arr, Sequence) + diff --git a/tests/memoryview/memoryview.pyx b/tests/memoryview/memoryview.pyx index bb8b73780..d2832a0b6 100644 --- a/tests/memoryview/memoryview.pyx +++ b/tests/memoryview/memoryview.pyx @@ -1205,3 +1205,36 @@ def test_conversion_failures(): assert get_refcount(dmb) == dmb_before, "before %s after %s" % (dmb_before, get_refcount(dmb)) else: assert False, "Conversion should fail!" + +def test_is_Sequence(double[:] a): + """ + >>> test_is_Sequence(DoubleMockBuffer(None, range(6), shape=(6,))) + 1 + 1 + True + """ + if sys.version_info < (3, 3): + from collections import Sequence + else: + from collections.abc import Sequence + + for i in range(a.shape[0]): + a[i] = i + print(a.count(1.0)) # test for presence of added collection method + print(a.index(1.0)) # test for presence of added collection method + + if sys.version_info >= (3, 10): + # test structural pattern match in Python + # (because Cython hasn't implemented it yet, and because the details + # of what Python considers a sequence are important) + globs = {'arr': a} + exec(""" +match arr: + case [*_]: + res = True + case _: + res = False +""", globs) + assert globs['res'] + + return isinstance(a, Sequence) -- cgit v1.2.1 From 1777f13461f971d064bd1644b02d92b350e6e7d1 Mon Sep 17 00:00:00 2001 From: da-woods Date: Sat, 16 Jul 2022 16:36:45 +0100 Subject: Make it easier to restore scanner state during parsing phase (GH-4813) Things like match-case (essentially anything that uses Python's new PEG parser capacities) are going to have to be implemented by trying to parse something, failing, then going back and trying to parse something else. This commit gets the initial work done to make this easier to do. Several error positions change in this effort, but this seems to improve the error reporting overall. --- Cython/Compiler/Errors.py | 2 +- Cython/Compiler/Nodes.py | 4 +- Cython/Compiler/Parsing.py | 31 +++--- Cython/Compiler/Scanning.pxd | 3 +- Cython/Compiler/Scanning.py | 64 ++++++++++--- Cython/Compiler/Tests/TestScanning.py | 136 +++++++++++++++++++++++++++ Cython/Plex/Scanners.pxd | 6 +- Cython/Plex/Scanners.py | 39 ++++++-- tests/errors/cpp_object_template.pyx | 6 +- tests/errors/cppexc_non_extern.pyx | 4 +- tests/errors/e_argdefault.pyx | 2 +- tests/errors/e_bufaccess.pyx | 2 +- tests/errors/e_cpp_only_features.pyx | 2 +- tests/errors/e_cstruct.pyx | 2 +- tests/errors/e_public_cdef_private_types.pyx | 4 +- tests/errors/e_typing_errors.pyx | 24 ++--- tests/errors/fused_types.pyx | 6 +- tests/memoryview/error_declarations.pyx | 18 ++-- 18 files changed, 279 insertions(+), 76 deletions(-) create mode 100644 Cython/Compiler/Tests/TestScanning.py diff --git a/Cython/Compiler/Errors.py b/Cython/Compiler/Errors.py index 512f05638..bde320732 100644 --- a/Cython/Compiler/Errors.py +++ b/Cython/Compiler/Errors.py @@ -45,7 +45,7 @@ def context(position): s = u"[unprintable code]\n" else: s = u''.join(F[max(0, position[1]-6):position[1]]) - s = u'...\n%s%s^\n' % (s, u' '*(position[2]-1)) + s = u'...\n%s%s^\n' % (s, u' '*(position[2])) s = u'%s\n%s%s\n' % (u'-'*60, s, u'-'*60) return s diff --git a/Cython/Compiler/Nodes.py b/Cython/Compiler/Nodes.py index 751eb31f4..4d6d95e79 100644 --- a/Cython/Compiler/Nodes.py +++ b/Cython/Compiler/Nodes.py @@ -2616,10 +2616,10 @@ class CFuncDefNode(FuncDefNode): # it really is impossible to reason about what the user wants to happens # if they've specified a C++ exception translation function. Therefore, # raise an error. - error(self.cfunc_declarator.pos, + error(self.pos, "Only extern functions can throw C++ exceptions.") else: - warning(self.cfunc_declarator.pos, + warning(self.pos, "Only extern functions can throw C++ exceptions.", 2) for formal_arg, type_arg in zip(self.args, typ.args): diff --git a/Cython/Compiler/Parsing.py b/Cython/Compiler/Parsing.py index 938e16e99..f81ff22fd 100644 --- a/Cython/Compiler/Parsing.py +++ b/Cython/Compiler/Parsing.py @@ -2419,13 +2419,14 @@ def p_statement(s, ctx, first_statement = 0): else: if s.sy == 'IDENT' and s.systring == 'async': ident_name = s.systring + ident_pos = s.position() # PEP 492 enables the async/await keywords when it spots "async def ..." s.next() if s.sy == 'def': return p_async_statement(s, ctx, decorators) elif decorators: s.error("Decorators can only be followed by functions or classes") - s.put_back(u'IDENT', ident_name) # re-insert original token + s.put_back(u'IDENT', ident_name, ident_pos) # re-insert original token return p_simple_statement_list(s, ctx, first_statement=first_statement) @@ -2637,20 +2638,22 @@ def p_c_simple_base_type(s, nonempty, templates=None): name = p_ident(s) else: name = s.systring + name_pos = s.position() s.next() if nonempty and s.sy != 'IDENT': # Make sure this is not a declaration of a variable or function. if s.sy == '(': + old_pos = s.position() s.next() if (s.sy == '*' or s.sy == '**' or s.sy == '&' or (s.sy == 'IDENT' and s.systring in calling_convention_words)): - s.put_back(u'(', u'(') + s.put_back(u'(', u'(', old_pos) else: - s.put_back(u'(', u'(') - s.put_back(u'IDENT', name) + s.put_back(u'(', u'(', old_pos) + s.put_back(u'IDENT', name, name_pos) name = None elif s.sy not in ('*', '**', '[', '&'): - s.put_back(u'IDENT', name) + s.put_back(u'IDENT', name, name_pos) name = None type_node = Nodes.CSimpleBaseTypeNode(pos, @@ -2724,13 +2727,13 @@ def is_memoryviewslice_access(s): # a memoryview slice declaration is distinguishable from a buffer access # declaration by the first entry in the bracketed list. The buffer will # not have an unnested colon in the first entry; the memoryview slice will. - saved = [(s.sy, s.systring)] + saved = [(s.sy, s.systring, s.position())] s.next() retval = False if s.systring == ':': retval = True elif s.sy == 'INT': - saved.append((s.sy, s.systring)) + saved.append((s.sy, s.systring, s.position())) s.next() if s.sy == ':': retval = True @@ -2765,15 +2768,16 @@ def looking_at_expr(s): elif s.sy == 'IDENT': is_type = False name = s.systring + name_pos = s.position() dotted_path = [] s.next() while s.sy == '.': s.next() - dotted_path.append(s.systring) + dotted_path.append((s.systring, s.position())) s.expect('IDENT') - saved = s.sy, s.systring + saved = s.sy, s.systring, s.position() if s.sy == 'IDENT': is_type = True elif s.sy == '*' or s.sy == '**': @@ -2791,10 +2795,10 @@ def looking_at_expr(s): dotted_path.reverse() for p in dotted_path: - s.put_back(u'IDENT', p) - s.put_back(u'.', u'.') + s.put_back(u'IDENT', *p) + s.put_back(u'.', u'.', p[1]) # gets the position slightly wrong - s.put_back(u'IDENT', name) + s.put_back(u'IDENT', name, name_pos) return not is_type and saved[0] else: return True @@ -2806,9 +2810,10 @@ def looking_at_base_type(s): def looking_at_dotted_name(s): if s.sy == 'IDENT': name = s.systring + name_pos = s.position() s.next() result = s.sy == '.' - s.put_back(u'IDENT', name) + s.put_back(u'IDENT', name, name_pos) return result else: return 0 diff --git a/Cython/Compiler/Scanning.pxd b/Cython/Compiler/Scanning.pxd index 96d26c540..2d64565c0 100644 --- a/Cython/Compiler/Scanning.pxd +++ b/Cython/Compiler/Scanning.pxd @@ -36,6 +36,7 @@ cdef class PyrexScanner(Scanner): cdef readonly bint async_enabled cdef public unicode sy cdef public systring # EncodedString + cdef public list put_back_on_failure cdef Py_ssize_t current_level(self) #cpdef commentline(self, text) @@ -51,7 +52,6 @@ cdef class PyrexScanner(Scanner): ##cdef next(self) ##cdef peek(self) #cpdef put_back(self, sy, systring) - #cdef unread(self, token, value) ##cdef bint expect(self, what, message = *) except -2 ##cdef expect_keyword(self, what, message = *) ##cdef expected(self, what, message = *) @@ -60,3 +60,4 @@ cdef class PyrexScanner(Scanner): ##cdef expect_newline(self, message=*, bint ignore_semicolon=*) ##cdef int enter_async(self) except -1 ##cdef int exit_async(self) except -1 + cdef void error_at_scanpos(self, str message) except * diff --git a/Cython/Compiler/Scanning.py b/Cython/Compiler/Scanning.py index 5c5963021..d12d9d305 100644 --- a/Cython/Compiler/Scanning.py +++ b/Cython/Compiler/Scanning.py @@ -13,11 +13,12 @@ cython.declare(make_lexicon=object, lexicon=object, import os import platform from unicodedata import normalize +from contextlib import contextmanager from .. import Utils from ..Plex.Scanners import Scanner from ..Plex.Errors import UnrecognizedInput -from .Errors import error, warning +from .Errors import error, warning, hold_errors, release_errors, CompileError from .Lexicon import any_string_prefix, make_lexicon, IDENT from .Future import print_function @@ -300,6 +301,8 @@ class PyrexScanner(Scanner): # compile_time_env dict Environment for conditional compilation # compile_time_eval boolean In a true conditional compilation context # compile_time_expr boolean In a compile-time expression context + # put_back_on_failure list or None If set, this records states so the tentatively_scan + # contextmanager can restore it def __init__(self, file, filename, parent_scanner=None, scope=None, context=None, source_encoding=None, parse_comments=True, initial_pos=None): @@ -338,6 +341,8 @@ class PyrexScanner(Scanner): self.indentation_char = None self.bracket_nesting_level = 0 + self.put_back_on_failure = None + self.begin('INDENT') self.sy = '' self.next() @@ -391,7 +396,7 @@ class PyrexScanner(Scanner): def unclosed_string_action(self, text): self.end_string_action(text) - self.error("Unclosed string literal") + self.error_at_scanpos("Unclosed string literal") def indentation_action(self, text): self.begin('') @@ -407,9 +412,9 @@ class PyrexScanner(Scanner): #print "Scanner.indentation_action: setting indent_char to", repr(c) else: if self.indentation_char != c: - self.error("Mixed use of tabs and spaces") + self.error_at_scanpos("Mixed use of tabs and spaces") if text.replace(c, "") != "": - self.error("Mixed use of tabs and spaces") + self.error_at_scanpos("Mixed use of tabs and spaces") # Figure out how many indents/dedents to do current_level = self.current_level() new_level = len(text) @@ -427,7 +432,7 @@ class PyrexScanner(Scanner): self.produce('DEDENT', '') #print "...current level now", self.current_level() ### if new_level != self.current_level(): - self.error("Inconsistent indentation") + self.error_at_scanpos("Inconsistent indentation") def eof_action(self, text): while len(self.indentation_stack) > 1: @@ -439,7 +444,7 @@ class PyrexScanner(Scanner): try: sy, systring = self.read() except UnrecognizedInput: - self.error("Unrecognized character") + self.error_at_scanpos("Unrecognized character") return # just a marker, error() always raises if sy == IDENT: if systring in self.keywords: @@ -450,6 +455,8 @@ class PyrexScanner(Scanner): else: sy = systring systring = self.context.intern_ustring(systring) + if self.put_back_on_failure is not None: + self.put_back_on_failure.append((sy, systring, self.position())) self.sy = sy self.systring = systring if False: # debug_scanner: @@ -462,20 +469,20 @@ class PyrexScanner(Scanner): def peek(self): saved = self.sy, self.systring + saved_pos = self.position() self.next() next = self.sy, self.systring - self.unread(*next) + self.unread(self.sy, self.systring, self.position()) self.sy, self.systring = saved + self.last_token_position_tuple = saved_pos return next - def put_back(self, sy, systring): - self.unread(self.sy, self.systring) + def put_back(self, sy, systring, pos): + self.unread(self.sy, self.systring, self.last_token_position_tuple) self.sy = sy self.systring = systring + self.last_token_position_tuple = pos - def unread(self, token, value): - # This method should be added to Plex - self.queue.insert(0, (token, value)) def error(self, message, pos=None, fatal=True): if pos is None: @@ -485,6 +492,12 @@ class PyrexScanner(Scanner): err = error(pos, message) if fatal: raise err + def error_at_scanpos(self, message): + # Like error(fatal=True), but gets the current scanning position rather than + # the position of the last token read. + pos = self.get_current_scan_pos() + self.error(message, pos, True) + def expect(self, what, message=None): if self.sy == what: self.next() @@ -538,3 +551,30 @@ class PyrexScanner(Scanner): self.keywords.discard('async') if self.sy in ('async', 'await'): self.sy, self.systring = IDENT, self.context.intern_ustring(self.sy) + +@contextmanager +@cython.locals(scanner=Scanner) +def tentatively_scan(scanner): + errors = hold_errors() + try: + put_back_on_failure = scanner.put_back_on_failure + scanner.put_back_on_failure = [] + initial_state = (scanner.sy, scanner.systring, scanner.position()) + try: + yield errors + except CompileError as e: + pass + finally: + if errors: + if scanner.put_back_on_failure: + for put_back in reversed(scanner.put_back_on_failure[:-1]): + scanner.put_back(*put_back) + # we need to restore the initial state too + scanner.put_back(*initial_state) + elif put_back_on_failure is not None: + # the outer "tentatively_scan" block that we're in might still + # want to undo this block + put_back_on_failure.extend(scanner.put_back_on_failure) + scanner.put_back_on_failure = put_back_on_failure + finally: + release_errors(ignore=True) diff --git a/Cython/Compiler/Tests/TestScanning.py b/Cython/Compiler/Tests/TestScanning.py new file mode 100644 index 000000000..e9cac1b47 --- /dev/null +++ b/Cython/Compiler/Tests/TestScanning.py @@ -0,0 +1,136 @@ +from __future__ import unicode_literals + +import unittest +from io import StringIO +import string + +from .. import Scanning +from ..Symtab import ModuleScope +from ..TreeFragment import StringParseContext +from ..Errors import init_thread + +# generate some fake code - just a bunch of lines of the form "a0 a1 ..." +code = [] +for ch in string.ascii_lowercase: + line = " ".join(["%s%s" % (ch, n) for n in range(10)]) + code.append(line) +code = "\n".join(code) + +init_thread() + + +class TestScanning(unittest.TestCase): + def make_scanner(self): + source = Scanning.StringSourceDescriptor("fake code", code) + buf = StringIO(code) + context = StringParseContext("fake context") + scope = ModuleScope("fake_module", None, None) + + return Scanning.PyrexScanner(buf, source, scope=scope, context=context) + + def test_put_back_positions(self): + scanner = self.make_scanner() + + self.assertEqual(scanner.sy, "IDENT") + self.assertEqual(scanner.systring, "a0") + scanner.next() + self.assertEqual(scanner.sy, "IDENT") + self.assertEqual(scanner.systring, "a1") + a1pos = scanner.position() + self.assertEqual(a1pos[1:], (1, 3)) + a2peek = scanner.peek() # shouldn't mess up the position + self.assertEqual(a1pos, scanner.position()) + scanner.next() + self.assertEqual(a2peek, (scanner.sy, scanner.systring)) + + # find next line + while scanner.sy != "NEWLINE": + scanner.next() + + line_sy = [] + line_systring = [] + line_pos = [] + + scanner.next() + while scanner.sy != "NEWLINE": + line_sy.append(scanner.sy) + line_systring.append(scanner.systring) + line_pos.append(scanner.position()) + scanner.next() + + for sy, systring, pos in zip( + line_sy[::-1], line_systring[::-1], line_pos[::-1] + ): + scanner.put_back(sy, systring, pos) + + n = 0 + while scanner.sy != "NEWLINE": + self.assertEqual(scanner.sy, line_sy[n]) + self.assertEqual(scanner.systring, line_systring[n]) + self.assertEqual(scanner.position(), line_pos[n]) + scanner.next() + n += 1 + + self.assertEqual(n, len(line_pos)) + + def test_tentatively_scan(self): + scanner = self.make_scanner() + with Scanning.tentatively_scan(scanner) as errors: + while scanner.sy != "NEWLINE": + scanner.next() + self.assertFalse(errors) + + scanner.next() + self.assertEqual(scanner.systring, "b0") + pos = scanner.position() + with Scanning.tentatively_scan(scanner) as errors: + while scanner.sy != "NEWLINE": + scanner.next() + if scanner.systring == "b7": + scanner.error("Oh no not b7!") + break + self.assertTrue(errors) + self.assertEqual(scanner.systring, "b0") # state has been restored + self.assertEqual(scanner.position(), pos) + scanner.next() + self.assertEqual(scanner.systring, "b1") # and we can keep going again + scanner.next() + self.assertEqual(scanner.systring, "b2") # and we can keep going again + + with Scanning.tentatively_scan(scanner) as error: + scanner.error("Something has gone wrong with the current symbol") + self.assertEqual(scanner.systring, "b2") + scanner.next() + self.assertEqual(scanner.systring, "b3") + + # test a few combinations of nested scanning + sy1, systring1 = scanner.sy, scanner.systring + pos1 = scanner.position() + with Scanning.tentatively_scan(scanner): + scanner.next() + sy2, systring2 = scanner.sy, scanner.systring + pos2 = scanner.position() + with Scanning.tentatively_scan(scanner): + with Scanning.tentatively_scan(scanner): + scanner.next() + scanner.next() + scanner.error("Ooops") + self.assertEqual((scanner.sy, scanner.systring), (sy2, systring2)) + self.assertEqual((scanner.sy, scanner.systring), (sy2, systring2)) + scanner.error("eee") + self.assertEqual((scanner.sy, scanner.systring), (sy1, systring1)) + with Scanning.tentatively_scan(scanner): + scanner.next() + scanner.next() + with Scanning.tentatively_scan(scanner): + scanner.next() + # no error - but this block should be unwound by the outer block too + scanner.next() + scanner.error("Oooops") + self.assertEqual((scanner.sy, scanner.systring), (sy1, systring1)) + + + + +if __name__ == "__main__": + unittest.main() diff --git a/Cython/Plex/Scanners.pxd b/Cython/Plex/Scanners.pxd index c6cb19b40..664b1a6f0 100644 --- a/Cython/Plex/Scanners.pxd +++ b/Cython/Plex/Scanners.pxd @@ -16,8 +16,8 @@ cdef class Scanner: cdef public Py_ssize_t cur_line cdef public Py_ssize_t cur_line_start cdef public Py_ssize_t start_pos - cdef public Py_ssize_t start_line - cdef public Py_ssize_t start_col + cdef tuple current_scanner_position_tuple + cdef public tuple last_token_position_tuple cdef public text cdef public initial_state # int? cdef public state_name @@ -32,6 +32,8 @@ cdef class Scanner: cdef inline next_char(self) @cython.locals(action=Action) cpdef tuple read(self) + cdef inline unread(self, token, value, position) + cdef inline get_current_scan_pos(self) cdef inline tuple scan_a_token(self) ##cdef tuple position(self) # used frequently by Parsing.py diff --git a/Cython/Plex/Scanners.py b/Cython/Plex/Scanners.py index e850e0cc9..5729e3a3f 100644 --- a/Cython/Plex/Scanners.py +++ b/Cython/Plex/Scanners.py @@ -53,18 +53,25 @@ class Scanner(object): # stream = None # file-like object # name = '' # buffer = '' + # + # These positions are used by the scanner to track its internal state: # buf_start_pos = 0 # position in input of start of buffer # next_pos = 0 # position in input of next char to read # cur_pos = 0 # position in input of current char # cur_line = 1 # line number of current char # cur_line_start = 0 # position in input of start of current line # start_pos = 0 # position in input of start of token - # start_line = 0 # line number of start of token - # start_col = 0 # position in line of start of token + # current_scanner_position_tuple = ("", 0, 0) + # tuple of filename, line number and position in line, really mainly for error reporting + # + # These positions are used to track what was read from the queue + # (which may differ from the internal state when tokens are replaced onto the queue) + # last_token_position_tuple = ("", 0, 0) # tuple of filename, line number and position in line + # text = None # text of last token read # initial_state = None # Node # state_name = '' # Name of initial state - # queue = None # list of tokens to be returned + # queue = None # list of tokens and positions to be returned # trace = 0 def __init__(self, lexicon, stream, name='', initial_pos=None): @@ -88,8 +95,8 @@ class Scanner(object): self.cur_pos = 0 self.cur_line = 1 self.start_pos = 0 - self.start_line = 0 - self.start_col = 0 + self.current_scanner_position_tuple = ("", 0, 0) + self.last_token_position_tuple = ("", 0, 0) self.text = None self.state_name = None @@ -124,10 +131,17 @@ class Scanner(object): value = action.perform(self, self.text) if value is not None: self.produce(value) - result = queue[0] + result, self.last_token_position_tuple = queue[0] del queue[0] return result + def unread(self, token, value, position): + self.queue.insert(0, ((token, value), position)) + + def get_current_scan_pos(self): + # distinct from the position of the last token due to the queue + return self.current_scanner_position_tuple + def scan_a_token(self): """ Read the next input sequence recognised by the machine @@ -135,8 +149,9 @@ class Scanner(object): file. """ self.start_pos = self.cur_pos - self.start_line = self.cur_line - self.start_col = self.cur_pos - self.cur_line_start + self.current_scanner_position_tuple = ( + self.name, self.cur_line, self.cur_pos - self.cur_line_start + ) action = self.run_machine_inlined() if action is not None: if self.trace: @@ -303,7 +318,7 @@ class Scanner(object): position within the line of the first character of the token (0-based). """ - return (self.name, self.start_line, self.start_col) + return self.last_token_position_tuple def get_position(self): """ @@ -330,7 +345,7 @@ class Scanner(object): """ if text is None: text = self.text - self.queue.append((value, text)) + self.queue.append(((value, text), self.current_scanner_position_tuple)) def eof(self): """ @@ -338,3 +353,7 @@ class Scanner(object): end of file. """ pass + + @property + def start_line(self): + return self.last_token_position_tuple[1] diff --git a/tests/errors/cpp_object_template.pyx b/tests/errors/cpp_object_template.pyx index db4381b51..e1a15c905 100644 --- a/tests/errors/cpp_object_template.pyx +++ b/tests/errors/cpp_object_template.pyx @@ -18,7 +18,7 @@ def memview(): vmv.push_back(array.array("i", [1,2,3])) _ERRORS = u""" -10:16: Python object type 'Python object' cannot be used as a template argument -12:16: Python object type 'A' cannot be used as a template argument -17:16: Reference-counted type 'int[:]' cannot be used as a template argument +10:15: Python object type 'Python object' cannot be used as a template argument +12:15: Python object type 'A' cannot be used as a template argument +17:15: Reference-counted type 'int[:]' cannot be used as a template argument """ diff --git a/tests/errors/cppexc_non_extern.pyx b/tests/errors/cppexc_non_extern.pyx index 95427e6e4..f498e398d 100644 --- a/tests/errors/cppexc_non_extern.pyx +++ b/tests/errors/cppexc_non_extern.pyx @@ -14,9 +14,9 @@ cdef test_func2(self) except +: pass _ERRORS = """ -9:16: Only extern functions can throw C++ exceptions. +9:5: Only extern functions can throw C++ exceptions. """ _WARNINGS = """ -13:16: Only extern functions can throw C++ exceptions. +13:5: Only extern functions can throw C++ exceptions. """ diff --git a/tests/errors/e_argdefault.pyx b/tests/errors/e_argdefault.pyx index d8828741f..43e69ae6f 100644 --- a/tests/errors/e_argdefault.pyx +++ b/tests/errors/e_argdefault.pyx @@ -12,7 +12,7 @@ cdef class Grail: pass _ERRORS = u""" -3:10: Non-default argument follows default argument +3:9: Non-default argument follows default argument 3:36: Non-default argument following default argument 6:23: Non-default argument following default argument 11:16: This argument cannot have a default value diff --git a/tests/errors/e_bufaccess.pyx b/tests/errors/e_bufaccess.pyx index bc5b9c0f3..5be4876d5 100644 --- a/tests/errors/e_bufaccess.pyx +++ b/tests/errors/e_bufaccess.pyx @@ -17,7 +17,7 @@ def f(): _ERRORS = u""" 3:17: Buffer types only allowed as function local variables 5:21: Buffer types only allowed as function local variables -8:31: "fakeoption" is not a buffer option +8:27: "fakeoption" is not a buffer option """ #TODO: #7:22: "ndim" must be non-negative diff --git a/tests/errors/e_cpp_only_features.pyx b/tests/errors/e_cpp_only_features.pyx index 005e415e6..19b7a6e39 100644 --- a/tests/errors/e_cpp_only_features.pyx +++ b/tests/errors/e_cpp_only_features.pyx @@ -21,6 +21,6 @@ def use_del(): _ERRORS = """ 8:10: typeid operator only allowed in c++ 8:23: typeid operator only allowed in c++ -14:20: Operation only allowed in c++ +14:16: Operation only allowed in c++ 19:4: Operation only allowed in c++ """ diff --git a/tests/errors/e_cstruct.pyx b/tests/errors/e_cstruct.pyx index ad3ca9695..e0a09fbeb 100644 --- a/tests/errors/e_cstruct.pyx +++ b/tests/errors/e_cstruct.pyx @@ -24,7 +24,7 @@ cdef void eggs(Spam s): _ERRORS = u""" -7:39: C struct/union member cannot be a Python object +7:4: C struct/union member cannot be a Python object 17:9: Object of type 'Spam' has no attribute 'k' 18:9: Cannot assign type 'float (*)[42]' to 'int' 19:10: Cannot assign type 'int' to 'float (*)[42]' diff --git a/tests/errors/e_public_cdef_private_types.pyx b/tests/errors/e_public_cdef_private_types.pyx index 331d6c04f..9d8f55c87 100644 --- a/tests/errors/e_public_cdef_private_types.pyx +++ b/tests/errors/e_public_cdef_private_types.pyx @@ -38,6 +38,6 @@ e_public_cdef_private_types.pyx:8:22: Function declared public or api may not ha e_public_cdef_private_types.pyx:11:19: Function declared public or api may not have private types e_public_cdef_private_types.pyx:14:5: Function declared public or api may not have private types e_public_cdef_private_types.pyx:17:5: Function declared public or api may not have private types -e_public_cdef_private_types.pyx:20:25: Function with optional arguments may not be declared public or api -e_public_cdef_private_types.pyx:23:22: Function with optional arguments may not be declared public or api +e_public_cdef_private_types.pyx:20:24: Function with optional arguments may not be declared public or api +e_public_cdef_private_types.pyx:23:21: Function with optional arguments may not be declared public or api """ diff --git a/tests/errors/e_typing_errors.pyx b/tests/errors/e_typing_errors.pyx index e11827696..832f68d90 100644 --- a/tests/errors/e_typing_errors.pyx +++ b/tests/errors/e_typing_errors.pyx @@ -40,20 +40,20 @@ cdef class Cls(object): _ERRORS = """ -13:45: typing.Optional[...] cannot be applied to non-Python type int -13:72: typing.Optional[...] cannot be applied to non-Python type double -13:98: typing.Optional[...] cannot be applied to non-Python type float -14:49: typing.Optional[...] cannot be applied to non-Python type double complex -14:74: typing.Optional[...] cannot be applied to non-Python type long -14:103: typing.Optional[...] cannot be applied to non-Python type long long -24:33: typing.Optional[...] cannot be applied to non-Python type int -24:52: typing.Optional[...] cannot be applied to non-Python type float -24:91: typing.Optional[...] cannot be applied to non-Python type long - -20:38: typing.Optional[...] cannot be applied to non-Python type MyStruct +13:42: typing.Optional[...] cannot be applied to non-Python type int +13:66: typing.Optional[...] cannot be applied to non-Python type double +13:93: typing.Optional[...] cannot be applied to non-Python type float +14:42: typing.Optional[...] cannot be applied to non-Python type double complex +14:70: typing.Optional[...] cannot be applied to non-Python type long +14:95: typing.Optional[...] cannot be applied to non-Python type long long +24:30: typing.Optional[...] cannot be applied to non-Python type int +24:47: typing.Optional[...] cannot be applied to non-Python type float +24:87: typing.Optional[...] cannot be applied to non-Python type long + +20:30: typing.Optional[...] cannot be applied to non-Python type MyStruct 28:20: Modifier 'typing.ClassVar' is not allowed here. # FIXME: this should be ok :-? -33:53: typing.Optional[...] cannot be applied to non-Python type double[:] +33:52: typing.Optional[...] cannot be applied to non-Python type double[:] """ diff --git a/tests/errors/fused_types.pyx b/tests/errors/fused_types.pyx index 378ac5506..31aa35b86 100644 --- a/tests/errors/fused_types.pyx +++ b/tests/errors/fused_types.pyx @@ -109,7 +109,7 @@ _ERRORS = u""" 86:4: 'z' cannot be specialized since its type is not a fused argument to this function 86:4: 'z' cannot be specialized since its type is not a fused argument to this function 86:4: 'z' cannot be specialized since its type is not a fused argument to this function -87:24: Type cannot be specialized since it is not a fused argument to this function -87:24: Type cannot be specialized since it is not a fused argument to this function -87:24: Type cannot be specialized since it is not a fused argument to this function +87:16: Type cannot be specialized since it is not a fused argument to this function +87:16: Type cannot be specialized since it is not a fused argument to this function +87:16: Type cannot be specialized since it is not a fused argument to this function """ diff --git a/tests/memoryview/error_declarations.pyx b/tests/memoryview/error_declarations.pyx index 0f6c52043..8c4f12a56 100644 --- a/tests/memoryview/error_declarations.pyx +++ b/tests/memoryview/error_declarations.pyx @@ -73,24 +73,24 @@ _ERRORS = u''' 13:19: Step must be omitted, 1, or a valid specifier. 14:20: Step must be omitted, 1, or a valid specifier. 15:20: Step must be omitted, 1, or a valid specifier. -16:17: Start must not be given. -17:18: Start must not be given. +16:15: Start must not be given. +17:17: Start must not be given. 18:22: Axis specification only allowed in the 'step' slot. -19:19: Fortran contiguous specifier must follow an indirect dimension +19:18: Fortran contiguous specifier must follow an indirect dimension 20:22: Invalid axis specification. 21:19: Invalid axis specification. 22:22: no expressions allowed in axis spec, only names and literals. 25:37: Memoryview 'object[::1, :]' not conformable to memoryview 'object[:, ::1]'. 28:17: Different base types for memoryviews (int, Python object) -31:9: Dimension may not be contiguous -37:9: Only one direct contiguous axis may be specified. -38:9:Only dimensions 3 and 2 may be contiguous and direct -44:10: Invalid base type for memoryview slice: intp +31:8: Dimension may not be contiguous +37:8: Only one direct contiguous axis may be specified. +38:8:Only dimensions 3 and 2 may be contiguous and direct +44:9: Invalid base type for memoryview slice: intp 46:35: Can only create cython.array from pointer or array 47:24: Cannot assign type 'double' to 'Py_ssize_t' -55:13: Invalid base type for memoryview slice: Invalid +55:12: Invalid base type for memoryview slice: Invalid 58:6: More dimensions than the maximum number of buffer dimensions were used. 59:6: More dimensions than the maximum number of buffer dimensions were used. -61:9: More dimensions than the maximum number of buffer dimensions were used. +61:8: More dimensions than the maximum number of buffer dimensions were used. 64:13: Cannot take address of memoryview slice ''' -- cgit v1.2.1 From fe17d9533d13edf873674a946b9cdf788640428e Mon Sep 17 00:00:00 2001 From: da-woods Date: Sat, 16 Jul 2022 17:18:24 +0100 Subject: Update to reflect named_expr refactor --- Cython/Compiler/Parsing.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cython/Compiler/Parsing.py b/Cython/Compiler/Parsing.py index 30917c463..23fb08dba 100644 --- a/Cython/Compiler/Parsing.py +++ b/Cython/Compiler/Parsing.py @@ -3979,7 +3979,7 @@ def p_match_statement(s, ctx): pos = s.position() with tentatively_scan(s) as errors: s.next() - subject = p_test(s) + subject = p_namedexpr_test(s) subjects = None if s.sy == ",": subjects = [subject] -- cgit v1.2.1 From 0b4370678e5b00a020cd990f922964d3aba59884 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Sat, 16 Jul 2022 22:14:48 +0200 Subject: Reduce overhead in the code writer when writing out simple code without newlines. --- Cython/Compiler/Code.pxd | 1 + Cython/Compiler/Code.py | 19 ++++++++++++------- 2 files changed, 13 insertions(+), 7 deletions(-) diff --git a/Cython/Compiler/Code.pxd b/Cython/Compiler/Code.pxd index 59779f8bc..c22f7caa2 100644 --- a/Cython/Compiler/Code.pxd +++ b/Cython/Compiler/Code.pxd @@ -110,6 +110,7 @@ cdef class CCodeWriter(object): cdef bint bol cpdef write(self, s) + cpdef write_lines(self, s) cpdef put(self, code) cpdef put_safe(self, code) cpdef putln(self, code=*, bint safe=*) diff --git a/Cython/Compiler/Code.py b/Cython/Compiler/Code.py index 4c67ac400..056bfccfc 100644 --- a/Cython/Compiler/Code.py +++ b/Cython/Compiler/Code.py @@ -1860,10 +1860,15 @@ class CCodeWriter(object): return self.buffer.getvalue() def write(self, s): + if '\n' in s: + self.write_lines(s) + else: + self.buffer.write(s) + + def write_lines(self, s): # Cygdb needs to know which Cython source line corresponds to which C line. # Therefore, we write this information into "self.buffer.markers" and then write it from there # into cython_debug/cython_debug_info_* (see ModuleNode._serialize_lineno_map). - filename_line = self.last_marked_pos[:2] if self.last_marked_pos else (None, 0) self.buffer.markers.extend([filename_line] * s.count('\n')) @@ -1970,13 +1975,13 @@ class CCodeWriter(object): self.emit_marker() if self.code_config.emit_linenums and self.last_marked_pos: source_desc, line, _ = self.last_marked_pos - self.write('\n#line %s "%s"\n' % (line, source_desc.get_escaped_description())) + self.write_lines('\n#line %s "%s"\n' % (line, source_desc.get_escaped_description())) if code: if safe: self.put_safe(code) else: self.put(code) - self.write("\n") + self.write_lines("\n") self.bol = 1 def mark_pos(self, pos, trace=True): @@ -1990,13 +1995,13 @@ class CCodeWriter(object): pos, trace = self.last_pos self.last_marked_pos = pos self.last_pos = None - self.write("\n") + self.write_lines("\n") if self.code_config.emit_code_comments: self.indent() - self.write("/* %s */\n" % self._build_marker(pos)) + self.write_lines("/* %s */\n" % self._build_marker(pos)) if trace and self.funcstate and self.funcstate.can_trace and self.globalstate.directives['linetrace']: self.indent() - self.write('__Pyx_TraceLine(%d,%d,%s)\n' % ( + self.write_lines('__Pyx_TraceLine(%d,%d,%s)\n' % ( pos[1], not self.funcstate.gil_owned, self.error_goto(pos))) def _build_marker(self, pos): @@ -2073,7 +2078,7 @@ class CCodeWriter(object): self.putln("}") def indent(self): - self.write(" " * self.level) + self.buffer.write(" " * self.level) def get_py_version_hex(self, pyversion): return "0x%02X%02X%02X%02X" % (tuple(pyversion) + (0,0,0,0))[:4] -- cgit v1.2.1 From 9c6140b373334e9fa661582ccd1ffd816f1b622c Mon Sep 17 00:00:00 2001 From: da-woods Date: Sun, 17 Jul 2022 09:41:10 +0100 Subject: Enable parenthesized context managers (GH-4814) As described in https://docs.python.org/3/whatsnew/3.10.html#parenthesized-context-managers The approach to parsing is largely copied from the CPython parser (with comments to support it) - closer to the PEG approach of "try the bracketed case first, and let it fail silently then try the unbracketed case". --- Cython/Compiler/Parsing.py | 62 +++++++++++++++++++------ tests/run/test_grammar.py | 112 +++++++++++++++++++-------------------------- 2 files changed, 97 insertions(+), 77 deletions(-) diff --git a/Cython/Compiler/Parsing.py b/Cython/Compiler/Parsing.py index f81ff22fd..1d636bef9 100644 --- a/Cython/Compiler/Parsing.py +++ b/Cython/Compiler/Parsing.py @@ -22,7 +22,7 @@ import sys from unicodedata import lookup as lookup_unicodechar, category as unicode_category from functools import partial, reduce -from .Scanning import PyrexScanner, FileSourceDescriptor +from .Scanning import PyrexScanner, FileSourceDescriptor, tentatively_scan from . import Nodes from . import ExprNodes from . import Builtin @@ -2144,6 +2144,52 @@ def p_with_statement(s): def p_with_items(s, is_async=False): + """ + Copied from CPython: + | 'with' '(' a[asdl_withitem_seq*]=','.with_item+ ','? ')' ':' b=block { + _PyAST_With(a, b, NULL, EXTRA) } + | 'with' a[asdl_withitem_seq*]=','.with_item+ ':' tc=[TYPE_COMMENT] b=block { + _PyAST_With(a, b, NEW_TYPE_COMMENT(p, tc), EXTRA) } + Therefore the first thing to try is the bracket-enclosed + version and if that fails try the regular version + """ + brackets_succeeded=False + if s.sy == '(': + items = [] + with tentatively_scan(s) as errors: + s.next() + while True: + items.append(p_with_item(s, is_async)) + if s.sy == ")": + s.next() + break + s.expect(",") + if s.sy == ")": + # trailing commas allowed + s.next() + break + brackets_succeeded = not errors + if not brackets_succeeded: + # try the non-bracket version + items = [] + while True: + items.append(p_with_item(s, is_async)) + if s.sy == ",": + s.next() + else: + break + body = p_suite(s) + for cls, pos, kwds in reversed(items): + # construct the actual nodes now that we know what the body is + body = cls(pos, body=body, **kwds) + return body + + +def p_with_item(s, is_async): + # In contrast to most parsing functions, this returns a tuple of + # class, pos, kwd_dict + # This is because GILStatNode does a reasonable amount of initialization in its + # constructor, and requires "body" to be set, which we don't currently have pos = s.position() if not s.in_python_file and s.sy == 'IDENT' and s.systring in ('nogil', 'gil'): if is_async: @@ -2158,24 +2204,14 @@ def p_with_items(s, is_async=False): condition = p_test(s) s.expect(')') - if s.sy == ',': - s.next() - body = p_with_items(s) - else: - body = p_suite(s) - return Nodes.GILStatNode(pos, state=state, body=body, condition=condition) + return Nodes.GILStatNode, pos, {"state": state, "condition": condition} else: manager = p_test(s) target = None if s.sy == 'IDENT' and s.systring == 'as': s.next() target = p_starred_expr(s) - if s.sy == ',': - s.next() - body = p_with_items(s, is_async=is_async) - else: - body = p_suite(s) - return Nodes.WithStatNode(pos, manager=manager, target=target, body=body, is_async=is_async) + return Nodes.WithStatNode, pos, {"manager": manager, "target": target, "is_async": is_async} def p_with_template(s): diff --git a/tests/run/test_grammar.py b/tests/run/test_grammar.py index c41b75f55..bb937348e 100644 --- a/tests/run/test_grammar.py +++ b/tests/run/test_grammar.py @@ -64,8 +64,7 @@ if cython.compiled: def use_old_parser(): - # FIXME: currently disabling new PEG parser tests. - return True + return False import unittest @@ -1869,68 +1868,53 @@ class GrammarTests(unittest.TestCase): with manager() as x, manager(): pass - if not use_old_parser(): - test_cases = [ - """if 1: - with ( - manager() - ): - pass - """, - """if 1: - with ( - manager() as x - ): - pass - """, - """if 1: - with ( - manager() as (x, y), - manager() as z, - ): - pass - """, - """if 1: - with ( - manager(), - manager() - ): - pass - """, - """if 1: - with ( - manager() as x, - manager() as y - ): - pass - """, - """if 1: - with ( - manager() as x, - manager() - ): - pass - """, - """if 1: - with ( - manager() as x, - manager() as y, - manager() as z, - ): - pass - """, - """if 1: - with ( - manager() as x, - manager() as y, - manager(), - ): - pass - """, - ] - for case in test_cases: - with self.subTest(case=case): - compile(case, "", "exec") + with ( + manager() + ): + pass + + with ( + manager() as x + ): + pass + + with ( + manager() as (x, y), + manager() as z, + ): + pass + + with ( + manager(), + manager() + ): + pass + + with ( + manager() as x, + manager() as y + ): + pass + + with ( + manager() as x, + manager() + ): + pass + + with ( + manager() as x, + manager() as y, + manager() as z, + ): + pass + + with ( + manager() as x, + manager() as y, + manager(), + ): + pass def test_if_else_expr(self): -- cgit v1.2.1 From 1103ae152923dd0337fde30a031817290b60fba0 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Sun, 17 Jul 2022 10:57:19 +0200 Subject: Refactor "with" parsing code to reduce code duplication. --- Cython/Compiler/Parsing.py | 37 ++++++++++++++++++------------------- 1 file changed, 18 insertions(+), 19 deletions(-) diff --git a/Cython/Compiler/Parsing.py b/Cython/Compiler/Parsing.py index 1d636bef9..60220282b 100644 --- a/Cython/Compiler/Parsing.py +++ b/Cython/Compiler/Parsing.py @@ -2153,31 +2153,17 @@ def p_with_items(s, is_async=False): Therefore the first thing to try is the bracket-enclosed version and if that fails try the regular version """ - brackets_succeeded=False + brackets_succeeded = False + items = () # unused, but static analysis fails to track that below if s.sy == '(': - items = [] with tentatively_scan(s) as errors: s.next() - while True: - items.append(p_with_item(s, is_async)) - if s.sy == ")": - s.next() - break - s.expect(",") - if s.sy == ")": - # trailing commas allowed - s.next() - break + items = p_with_items_list(s, is_async) + s.expect(")") brackets_succeeded = not errors if not brackets_succeeded: # try the non-bracket version - items = [] - while True: - items.append(p_with_item(s, is_async)) - if s.sy == ",": - s.next() - else: - break + items = p_with_items_list(s, is_async) body = p_suite(s) for cls, pos, kwds in reversed(items): # construct the actual nodes now that we know what the body is @@ -2185,6 +2171,19 @@ def p_with_items(s, is_async=False): return body +def p_with_items_list(s, is_async): + items = [] + while True: + items.append(p_with_item(s, is_async)) + if s.sy != ",": + break + s.next() + if s.sy == ")": + # trailing commas allowed + break + return items + + def p_with_item(s, is_async): # In contrast to most parsing functions, this returns a tuple of # class, pos, kwd_dict -- cgit v1.2.1 From c65db31e2cf22807891bdd52ccc298c6145b3bef Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Sun, 17 Jul 2022 10:58:41 +0200 Subject: Add missing .pxd declarations for new parser functions. --- Cython/Compiler/Parsing.pxd | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Cython/Compiler/Parsing.pxd b/Cython/Compiler/Parsing.pxd index 1be718581..5fcc735b4 100644 --- a/Cython/Compiler/Parsing.pxd +++ b/Cython/Compiler/Parsing.pxd @@ -132,6 +132,8 @@ cdef p_except_clause(PyrexScanner s) cdef p_include_statement(PyrexScanner s, ctx) cdef p_with_statement(PyrexScanner s) cdef p_with_items(PyrexScanner s, bint is_async=*) +cdef p_with_items_list(s, bint is_async) +cdef tuple p_with_item(s, bint is_async) cdef p_with_template(PyrexScanner s) cdef p_simple_statement(PyrexScanner s, bint first_statement = *) cdef p_simple_statement_list(PyrexScanner s, ctx, bint first_statement = *) -- cgit v1.2.1 From da2732fccdfe38598f45c2a70fbeb6f1faa2df7e Mon Sep 17 00:00:00 2001 From: da-woods Date: Sun, 17 Jul 2022 10:48:52 +0100 Subject: Apply suggestions from code review Co-authored-by: scoder --- Cython/Compiler/MatchCaseNodes.py | 5 ++--- Cython/Compiler/Parsing.py | 10 ++++------ 2 files changed, 6 insertions(+), 9 deletions(-) diff --git a/Cython/Compiler/MatchCaseNodes.py b/Cython/Compiler/MatchCaseNodes.py index 579cf7d94..b4d39e318 100644 --- a/Cython/Compiler/MatchCaseNodes.py +++ b/Cython/Compiler/MatchCaseNodes.py @@ -1,7 +1,6 @@ # Nodes for structural pattern matching. # -# In a separate file because they're unlikely to be useful -# for much else +# In a separate file because they're unlikely to be useful for much else. from .Nodes import Node, StatNode from .Errors import error @@ -69,7 +68,7 @@ class PatternNode(Node): def __init__(self, pos, **kwds): super(PatternNode, self).__init__(pos, **kwds) - if not hasattr(self, "as_targets"): + if "as_targets" not in kwds: self.as_targets = [] def is_irrefutable(self): diff --git a/Cython/Compiler/Parsing.py b/Cython/Compiler/Parsing.py index 23fb08dba..35f16fb7b 100644 --- a/Cython/Compiler/Parsing.py +++ b/Cython/Compiler/Parsing.py @@ -4006,7 +4006,7 @@ def p_match_statement(s, ctx): return MatchCaseNodes.MatchNode(pos, subject = subject, cases = cases) def p_case_block(s, ctx): - if not (s.sy=="IDENT" and s.systring == "case"): + if not (s.sy == "IDENT" and s.systring == "case"): s.error("Expected 'case'") s.next() pos = s.position() @@ -4062,8 +4062,8 @@ def p_maybe_star_pattern(s): ) return pattern else: - p = p_pattern(s) - return p + pattern = p_pattern(s) + return pattern def p_pattern(s): # try "as_pattern" then "or_pattern" @@ -4317,9 +4317,7 @@ def p_mapping_pattern(s): break if s.sy=='}': break - if s.sy != '}': - s.error("Expected '}'") - s.next() + s.expect('}') if double_star_set_twice is not None: return Nodes.ErrorNode(double_star_set_twice, what = "Double star capture set twice") return MatchCaseNodes.MatchMappingPatternNode( -- cgit v1.2.1 From 5e8e5a490a0721fbb86f422e366c725051ffc74f Mon Sep 17 00:00:00 2001 From: da-woods Date: Sun, 17 Jul 2022 10:49:58 +0100 Subject: Apply one more suggestion Co-authored-by: scoder --- Cython/Compiler/Nodes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cython/Compiler/Nodes.py b/Cython/Compiler/Nodes.py index 6fd87673f..27d8ce323 100644 --- a/Cython/Compiler/Nodes.py +++ b/Cython/Compiler/Nodes.py @@ -10120,7 +10120,7 @@ class CnameDecoratorNode(StatNode): class ErrorNode(Node): """ - Node type for things that we want to get throught the parser + Node type for things that we want to get through the parser (especially for things that are being scanned in "tentative_scan" blocks), but should immediately raise and error afterwards. -- cgit v1.2.1 From 20cb27bf1ca1f3b2b45ef4f59fabfc64acb29cf6 Mon Sep 17 00:00:00 2001 From: da-woods Date: Sun, 17 Jul 2022 10:51:11 +0100 Subject: Account for possible extension type struct padding when calculating the itemsize for the "size changed" check (GH-4894) --- Cython/Compiler/ModuleNode.py | 8 ++++---- Cython/Utility/ImportExport.c | 27 +++++++++++++++++++++++++-- 2 files changed, 29 insertions(+), 6 deletions(-) diff --git a/Cython/Compiler/ModuleNode.py b/Cython/Compiler/ModuleNode.py index f83a51706..d140caff1 100644 --- a/Cython/Compiler/ModuleNode.py +++ b/Cython/Compiler/ModuleNode.py @@ -3772,14 +3772,14 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): if not condition: code.putln("") # start in new line code.putln("#if defined(PYPY_VERSION_NUM) && PYPY_VERSION_NUM < 0x050B0000") - code.putln('sizeof(%s),' % objstruct) + code.putln('sizeof(%s), __PYX_GET_STRUCT_ALIGNMENT(%s),' % (objstruct, objstruct)) code.putln("#elif CYTHON_COMPILING_IN_LIMITED_API") - code.putln('sizeof(%s),' % objstruct) + code.putln('sizeof(%s), __PYX_GET_STRUCT_ALIGNMENT(%s),' % (objstruct, objstruct)) code.putln("#else") - code.putln('sizeof(%s),' % sizeof_objstruct) + code.putln('sizeof(%s), __PYX_GET_STRUCT_ALIGNMENT(%s),' % (sizeof_objstruct, sizeof_objstruct)) code.putln("#endif") else: - code.put('sizeof(%s), ' % objstruct) + code.putln('sizeof(%s), __PYX_GET_STRUCT_ALIGNMENT(%s),' % (objstruct, objstruct)) # check_size if type.check_size and type.check_size in ('error', 'warn', 'ignore'): diff --git a/Cython/Utility/ImportExport.c b/Cython/Utility/ImportExport.c index 897657281..66e75ea00 100644 --- a/Cython/Utility/ImportExport.c +++ b/Cython/Utility/ImportExport.c @@ -478,13 +478,24 @@ set_path: #ifndef __PYX_HAVE_RT_ImportType_proto #define __PYX_HAVE_RT_ImportType_proto +#if __STDC_VERSION__ >= 201112L +#include +#endif + +#if __STDC_VERSION__ >= 201112L || __cplusplus >= 201103L +#define __PYX_GET_STRUCT_ALIGNMENT(s) alignof(s) +#else +// best guess at what the alignment could be since we can't measure it +#define __PYX_GET_STRUCT_ALIGNMENT(s) sizeof(void*) +#endif + enum __Pyx_ImportType_CheckSize { __Pyx_ImportType_CheckSize_Error = 0, __Pyx_ImportType_CheckSize_Warn = 1, __Pyx_ImportType_CheckSize_Ignore = 2 }; -static PyTypeObject *__Pyx_ImportType(PyObject* module, const char *module_name, const char *class_name, size_t size, enum __Pyx_ImportType_CheckSize check_size); /*proto*/ +static PyTypeObject *__Pyx_ImportType(PyObject* module, const char *module_name, const char *class_name, size_t size, size_t alignment, enum __Pyx_ImportType_CheckSize check_size); /*proto*/ #endif @@ -493,7 +504,7 @@ static PyTypeObject *__Pyx_ImportType(PyObject* module, const char *module_name, #ifndef __PYX_HAVE_RT_ImportType #define __PYX_HAVE_RT_ImportType static PyTypeObject *__Pyx_ImportType(PyObject *module, const char *module_name, const char *class_name, - size_t size, enum __Pyx_ImportType_CheckSize check_size) + size_t size, size_t alignment, enum __Pyx_ImportType_CheckSize check_size) { PyObject *result = 0; char warning[200]; @@ -534,6 +545,18 @@ static PyTypeObject *__Pyx_ImportType(PyObject *module, const char *module_name, if (itemsize == (Py_ssize_t)-1 && PyErr_Occurred()) goto bad; #endif + if (itemsize) { + // If itemsize is smaller than the alignment the struct can end up with some extra + // padding at the end. In this case we need to work out the maximum size that + // the padding could be when calculating the range of valid struct sizes. + if (size % alignment) { + // if this is true we've probably calculated the alignment wrongly + // (most likely because alignof isn't available) + alignment = size % alignment; + } + if (itemsize < (Py_ssize_t)alignment) + itemsize = (Py_ssize_t)alignment; + } if ((size_t)(basicsize + itemsize) < size) { PyErr_Format(PyExc_ValueError, "%.200s.%.200s size changed, may indicate binary incompatibility. " -- cgit v1.2.1 From ef0e7a2ebfae6a32eaf9da5d935750250b8573f5 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Sun, 17 Jul 2022 11:56:44 +0200 Subject: Fix parser functions declarations. --- Cython/Compiler/Parsing.pxd | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cython/Compiler/Parsing.pxd b/Cython/Compiler/Parsing.pxd index 5fcc735b4..dbed77415 100644 --- a/Cython/Compiler/Parsing.pxd +++ b/Cython/Compiler/Parsing.pxd @@ -132,8 +132,8 @@ cdef p_except_clause(PyrexScanner s) cdef p_include_statement(PyrexScanner s, ctx) cdef p_with_statement(PyrexScanner s) cdef p_with_items(PyrexScanner s, bint is_async=*) -cdef p_with_items_list(s, bint is_async) -cdef tuple p_with_item(s, bint is_async) +cdef p_with_items_list(PyrexScanner s, bint is_async) +cdef tuple p_with_item(PyrexScanner s, bint is_async) cdef p_with_template(PyrexScanner s) cdef p_simple_statement(PyrexScanner s, bint first_statement = *) cdef p_simple_statement_list(PyrexScanner s, ctx, bint first_statement = *) -- cgit v1.2.1 From b1ffd71c4af5888173fda64b2bca43f709b6893a Mon Sep 17 00:00:00 2001 From: da-woods Date: Sun, 17 Jul 2022 10:28:32 +0100 Subject: Tiny comment fixes --- tests/run/test_patma.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/run/test_patma.py b/tests/run/test_patma.py index c1b483d54..690cb9086 100644 --- a/tests/run/test_patma.py +++ b/tests/run/test_patma.py @@ -63,7 +63,7 @@ else: # TestCompiler removed - it's very CPython-specific # TestTracing also removed - doesn't seem like a core test -# FIXME - return all the "return"s added to cause code to be dropped +# FIXME - remove all the "return"s added to cause code to be dropped ############## ORIGINAL PART FROM CPYTHON -- cgit v1.2.1 From 9924b689f3f6160f48dbf7a17df0f0b9f277f583 Mon Sep 17 00:00:00 2001 From: da-woods Date: Sun, 17 Jul 2022 11:18:06 +0100 Subject: Updated test_patma to match most recent CPython --- tests/run/test_patma.py | 48 ++++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 46 insertions(+), 2 deletions(-) diff --git a/tests/run/test_patma.py b/tests/run/test_patma.py index 690cb9086..d815b7191 100644 --- a/tests/run/test_patma.py +++ b/tests/run/test_patma.py @@ -1,4 +1,4 @@ -### COPIED FROM CPython 3.9 +### COPIED FROM CPython 3.12 alpha (July 2022) ### Original part after ############ # cython: language_level=3 @@ -61,7 +61,36 @@ else: y: int # TestCompiler removed - it's very CPython-specific -# TestTracing also removed - doesn't seem like a core test +# TestTracing also mainly removed - doesn't seem like a core test +# except for one test that seems misplaced in CPython (which is below) + +class TestTracing(unittest.TestCase): + def test_parser_deeply_nested_patterns(self): + # Deeply nested patterns can cause exponential backtracking when parsing. + # See CPython gh-93671 for more information. + # + # DW Cython note - this doesn't break the parser but may cause a + # RecursionError later in the code-generation. I don't believe that's + # easily avoidable + + levels = 100 + + patterns = [ + "A" + "(" * levels + ")" * levels, + "{1:" * levels + "1" + "}" * levels, + "[" * levels + "1" + "]" * levels, + ] + + for pattern in patterns: + with self.subTest(pattern): + code = inspect.cleandoc(""" + if 0: # FIXME remove once pattern matching is fully implemented! + match None: + case {}: + pass + """.format(pattern)) + compile(code, "", "exec") + # FIXME - remove all the "return"s added to cause code to be dropped ############## ORIGINAL PART FROM CPYTHON @@ -2953,6 +2982,21 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 'bar') + def test_patma_249(self): + return + class C: + __attr = "eggs" # mangled to _C__attr + _Outer__attr = "bacon" + class Outer: + def f(self, x): + match x: + # looks up __attr, not _C__attr or _Outer__attr + case C(__attr=y): + return y + c = C() + setattr(c, "__attr", "spam") # setattr is needed because we're in a class scope + self.assertEqual(Outer().f(c), "spam") + class TestSyntaxErrors(unittest.TestCase): -- cgit v1.2.1 From e14e3308b47ed7b4e84646241d21cb3061d422e6 Mon Sep 17 00:00:00 2001 From: da-woods Date: Sun, 17 Jul 2022 11:48:59 +0100 Subject: Make disabling returns easier to find --- tests/run/test_patma.py | 532 ++++++++++++++++++++++++------------------------ 1 file changed, 266 insertions(+), 266 deletions(-) diff --git a/tests/run/test_patma.py b/tests/run/test_patma.py index d815b7191..6956099e2 100644 --- a/tests/run/test_patma.py +++ b/tests/run/test_patma.py @@ -84,7 +84,7 @@ class TestTracing(unittest.TestCase): for pattern in patterns: with self.subTest(pattern): code = inspect.cleandoc(""" - if 0: # FIXME remove once pattern matching is fully implemented! + if 0: # disabled - FIXME remove once pattern matching is fully implemented! match None: case {}: pass @@ -100,7 +100,7 @@ class TestInheritance(unittest.TestCase): @staticmethod def check_sequence_then_mapping(x): - return + return # disabled match x: case [*_]: return "seq" @@ -109,7 +109,7 @@ class TestInheritance(unittest.TestCase): @staticmethod def check_mapping_then_sequence(x): - return + return # disabled match x: case {}: return "map" @@ -117,7 +117,7 @@ class TestInheritance(unittest.TestCase): return "seq" def test_multiple_inheritance_mapping(self): - return + return # disabled class C: pass class M1(collections.UserDict, collections.abc.Sequence): @@ -138,7 +138,7 @@ class TestInheritance(unittest.TestCase): self.assertEqual(self.check_mapping_then_sequence(M4()), "map") def test_multiple_inheritance_sequence(self): - return + return # disabled class C: pass class S1(collections.UserList, collections.abc.Mapping): @@ -159,7 +159,7 @@ class TestInheritance(unittest.TestCase): self.assertEqual(self.check_mapping_then_sequence(S4()), "seq") def test_late_registration_mapping(self): - return + return # disabled class Parent: pass class ChildPre(Parent): @@ -183,7 +183,7 @@ class TestInheritance(unittest.TestCase): self.assertEqual(self.check_mapping_then_sequence(GrandchildPost()), "map") def test_late_registration_sequence(self): - return + return # disabled class Parent: pass class ChildPre(Parent): @@ -210,14 +210,14 @@ class TestInheritance(unittest.TestCase): class TestPatma(unittest.TestCase): def test_patma_000(self): - return + return # disabled match 0: case 0: x = True self.assertIs(x, True) def test_patma_001(self): - return + return # disabled match 0: case 0 if False: x = False @@ -226,7 +226,7 @@ class TestPatma(unittest.TestCase): self.assertIs(x, True) def test_patma_002(self): - return + return # disabled match 0: case 0: x = True @@ -235,7 +235,7 @@ class TestPatma(unittest.TestCase): self.assertIs(x, True) def test_patma_003(self): - return + return # disabled x = False match 0: case 0 | 1 | 2 | 3: @@ -243,7 +243,7 @@ class TestPatma(unittest.TestCase): self.assertIs(x, True) def test_patma_004(self): - return + return # disabled x = False match 1: case 0 | 1 | 2 | 3: @@ -251,7 +251,7 @@ class TestPatma(unittest.TestCase): self.assertIs(x, True) def test_patma_005(self): - return + return # disabled x = False match 2: case 0 | 1 | 2 | 3: @@ -259,7 +259,7 @@ class TestPatma(unittest.TestCase): self.assertIs(x, True) def test_patma_006(self): - return + return # disabled x = False match 3: case 0 | 1 | 2 | 3: @@ -267,7 +267,7 @@ class TestPatma(unittest.TestCase): self.assertIs(x, True) def test_patma_007(self): - return + return # disabled x = False match 4: case 0 | 1 | 2 | 3: @@ -275,7 +275,7 @@ class TestPatma(unittest.TestCase): self.assertIs(x, False) def test_patma_008(self): - return + return # disabled x = 0 class A: y = 1 @@ -286,7 +286,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(A.y, 1) def test_patma_009(self): - return + return # disabled class A: B = 0 match 0: @@ -302,14 +302,14 @@ class TestPatma(unittest.TestCase): self.assertEqual(z, 2) def test_patma_010(self): - return + return # disabled match (): case []: x = 0 self.assertEqual(x, 0) def test_patma_011(self): - return + return # disabled match (0, 1, 2): case [*x]: y = 0 @@ -317,7 +317,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_012(self): - return + return # disabled match (0, 1, 2): case [0, *x]: y = 0 @@ -325,7 +325,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_013(self): - return + return # disabled match (0, 1, 2): case [0, 1, *x,]: y = 0 @@ -333,7 +333,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_014(self): - return + return # disabled match (0, 1, 2): case [0, 1, 2, *x]: y = 0 @@ -341,7 +341,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_015(self): - return + return # disabled match (0, 1, 2): case [*x, 2,]: y = 0 @@ -349,7 +349,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_016(self): - return + return # disabled match (0, 1, 2): case [*x, 1, 2]: y = 0 @@ -357,7 +357,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_017(self): - return + return # disabled match (0, 1, 2): case [*x, 0, 1, 2,]: y = 0 @@ -365,7 +365,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_018(self): - return + return # disabled match (0, 1, 2): case [0, *x, 2]: y = 0 @@ -373,7 +373,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_019(self): - return + return # disabled match (0, 1, 2): case [0, 1, *x, 2,]: y = 0 @@ -381,7 +381,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_020(self): - return + return # disabled match (0, 1, 2): case [0, *x, 1, 2]: y = 0 @@ -389,7 +389,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_021(self): - return + return # disabled match (0, 1, 2): case [*x,]: y = 0 @@ -397,7 +397,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_022(self): - return + return # disabled x = {} match x: case {}: @@ -406,7 +406,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_023(self): - return + return # disabled x = {0: 0} match x: case {}: @@ -415,7 +415,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_024(self): - return + return # disabled x = {} y = None match x: @@ -425,7 +425,7 @@ class TestPatma(unittest.TestCase): self.assertIs(y, None) def test_patma_025(self): - return + return # disabled x = {0: 0} match x: case {0: (0 | 1 | 2 as z)}: @@ -435,7 +435,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(z, 0) def test_patma_026(self): - return + return # disabled x = {0: 1} match x: case {0: (0 | 1 | 2 as z)}: @@ -445,7 +445,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(z, 1) def test_patma_027(self): - return + return # disabled x = {0: 2} match x: case {0: (0 | 1 | 2 as z)}: @@ -455,7 +455,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(z, 2) def test_patma_028(self): - return + return # disabled x = {0: 3} y = None match x: @@ -465,7 +465,7 @@ class TestPatma(unittest.TestCase): self.assertIs(y, None) def test_patma_029(self): - return + return # disabled x = {} y = None match x: @@ -479,7 +479,7 @@ class TestPatma(unittest.TestCase): self.assertIs(y, None) def test_patma_030(self): - return + return # disabled x = {False: (True, 2.0, {})} match x: case {0: [1, 2, {}]}: @@ -492,7 +492,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_031(self): - return + return # disabled x = {False: (True, 2.0, {}), 1: [[]], 2: 0} match x: case {0: [1, 2, {}]}: @@ -505,7 +505,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_032(self): - return + return # disabled x = {False: (True, 2.0, {}), 1: [[]], 2: 0} match x: case {0: [1, 2]}: @@ -518,7 +518,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 1) def test_patma_033(self): - return + return # disabled x = [] match x: case {0: [1, 2, {}]}: @@ -531,7 +531,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 2) def test_patma_034(self): - return + return # disabled x = {0: 0} match x: case {0: [1, 2, {}]}: @@ -544,7 +544,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 1) def test_patma_035(self): - return + return # disabled x = {0: 0} match x: case {0: [1, 2, {}]}: @@ -557,7 +557,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 1) def test_patma_036(self): - return + return # disabled x = 0 match x: case 0 | 1 | 2: @@ -566,7 +566,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_037(self): - return + return # disabled x = 1 match x: case 0 | 1 | 2: @@ -575,7 +575,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_038(self): - return + return # disabled x = 2 match x: case 0 | 1 | 2: @@ -584,7 +584,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_039(self): - return + return # disabled x = 3 y = None match x: @@ -594,7 +594,7 @@ class TestPatma(unittest.TestCase): self.assertIs(y, None) def test_patma_040(self): - return + return # disabled x = 0 match x: case (0 as z) | (1 as z) | (2 as z) if z == x % 2: @@ -604,7 +604,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(z, 0) def test_patma_041(self): - return + return # disabled x = 1 match x: case (0 as z) | (1 as z) | (2 as z) if z == x % 2: @@ -614,7 +614,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(z, 1) def test_patma_042(self): - return + return # disabled x = 2 y = None match x: @@ -625,7 +625,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(z, 2) def test_patma_043(self): - return + return # disabled x = 3 y = None match x: @@ -635,7 +635,7 @@ class TestPatma(unittest.TestCase): self.assertIs(y, None) def test_patma_044(self): - return + return # disabled x = () match x: case []: @@ -644,7 +644,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_045(self): - return + return # disabled x = () match x: case (): @@ -653,7 +653,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_046(self): - return + return # disabled x = (0,) match x: case [0]: @@ -662,7 +662,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_047(self): - return + return # disabled x = ((),) match x: case [[]]: @@ -671,7 +671,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_048(self): - return + return # disabled x = [0, 1] match x: case [0, 1] | [1, 0]: @@ -680,7 +680,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_049(self): - return + return # disabled x = [1, 0] match x: case [0, 1] | [1, 0]: @@ -689,7 +689,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_050(self): - return + return # disabled x = [0, 0] y = None match x: @@ -699,7 +699,7 @@ class TestPatma(unittest.TestCase): self.assertIs(y, None) def test_patma_051(self): - return + return # disabled w = None x = [1, 0] match x: @@ -713,7 +713,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(z, 0) def test_patma_052(self): - return + return # disabled x = [1, 0] match x: case [0]: @@ -726,7 +726,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 2) def test_patma_053(self): - return + return # disabled x = {0} y = None match x: @@ -736,7 +736,7 @@ class TestPatma(unittest.TestCase): self.assertIs(y, None) def test_patma_054(self): - return + return # disabled x = set() y = None match x: @@ -746,7 +746,7 @@ class TestPatma(unittest.TestCase): self.assertIs(y, None) def test_patma_055(self): - return + return # disabled x = iter([1, 2, 3]) y = None match x: @@ -756,7 +756,7 @@ class TestPatma(unittest.TestCase): self.assertIs(y, None) def test_patma_056(self): - return + return # disabled x = {} y = None match x: @@ -766,7 +766,7 @@ class TestPatma(unittest.TestCase): self.assertIs(y, None) def test_patma_057(self): - return + return # disabled x = {0: False, 1: True} y = None match x: @@ -776,7 +776,7 @@ class TestPatma(unittest.TestCase): self.assertIs(y, None) def test_patma_058(self): - return + return # disabled x = 0 match x: case 0: @@ -785,7 +785,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_059(self): - return + return # disabled x = 0 y = None match x: @@ -795,7 +795,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, None) def test_patma_060(self): - return + return # disabled x = 0 y = None match x: @@ -805,7 +805,7 @@ class TestPatma(unittest.TestCase): self.assertIs(y, None) def test_patma_061(self): - return + return # disabled x = 0 y = None match x: @@ -815,7 +815,7 @@ class TestPatma(unittest.TestCase): self.assertIs(y, None) def test_patma_062(self): - return + return # disabled x = 0 match x: case 0: @@ -826,7 +826,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_063(self): - return + return # disabled x = 0 y = None match x: @@ -838,7 +838,7 @@ class TestPatma(unittest.TestCase): self.assertIs(y, None) def test_patma_064(self): - return + return # disabled x = "x" match x: case "x": @@ -849,7 +849,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_065(self): - return + return # disabled x = "x" match x: case "y": @@ -860,7 +860,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 1) def test_patma_066(self): - return + return # disabled x = "x" match x: case "": @@ -871,7 +871,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 1) def test_patma_067(self): - return + return # disabled x = b"x" match x: case b"y": @@ -882,7 +882,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 1) def test_patma_068(self): - return + return # disabled x = 0 match x: case 0 if False: @@ -893,7 +893,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 1) def test_patma_069(self): - return + return # disabled x = 0 y = None match x: @@ -905,7 +905,7 @@ class TestPatma(unittest.TestCase): self.assertIs(y, None) def test_patma_070(self): - return + return # disabled x = 0 match x: case 0 if True: @@ -916,7 +916,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_071(self): - return + return # disabled x = 0 match x: case 0 if 1: @@ -927,7 +927,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_072(self): - return + return # disabled x = 0 match x: case 0 if True: @@ -939,7 +939,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 2) def test_patma_073(self): - return + return # disabled x = 0 match x: case 0 if 0: @@ -951,7 +951,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 2) def test_patma_074(self): - return + return # disabled x = 0 y = None match x: @@ -963,7 +963,7 @@ class TestPatma(unittest.TestCase): self.assertIs(y, None) def test_patma_075(self): - return + return # disabled x = "x" match x: case ["x"]: @@ -974,7 +974,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 1) def test_patma_076(self): - return + return # disabled x = b"x" match x: case [b"x"]: @@ -989,7 +989,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 4) def test_patma_077(self): - return + return # disabled x = bytearray(b"x") y = None match x: @@ -1001,7 +1001,7 @@ class TestPatma(unittest.TestCase): self.assertIs(y, None) def test_patma_078(self): - return + return # disabled x = "" match x: case []: @@ -1014,7 +1014,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 2) def test_patma_079(self): - return + return # disabled x = "xxx" match x: case ["x", "x", "x"]: @@ -1027,7 +1027,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 2) def test_patma_080(self): - return + return # disabled x = b"xxx" match x: case [120, 120, 120]: @@ -1040,7 +1040,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 2) def test_patma_081(self): - return + return # disabled x = 0 match x: case 0 if not (x := 1): @@ -1052,7 +1052,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(z, 0) def test_patma_082(self): - return + return # disabled x = 0 match x: case (1 as z) if not (x := 1): @@ -1063,7 +1063,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 1) def test_patma_083(self): - return + return # disabled x = 0 match x: case (0 as z): @@ -1073,7 +1073,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(z, 0) def test_patma_084(self): - return + return # disabled x = 0 y = None match x: @@ -1083,7 +1083,7 @@ class TestPatma(unittest.TestCase): self.assertIs(y, None) def test_patma_085(self): - return + return # disabled x = 0 y = None match x: @@ -1095,7 +1095,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(z, 0) def test_patma_086(self): - return + return # disabled x = 0 match x: case ((0 as w) as z): @@ -1106,7 +1106,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(z, 0) def test_patma_087(self): - return + return # disabled x = 0 match x: case (0 | 1) | 2: @@ -1115,7 +1115,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_088(self): - return + return # disabled x = 1 match x: case (0 | 1) | 2: @@ -1124,7 +1124,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_089(self): - return + return # disabled x = 2 match x: case (0 | 1) | 2: @@ -1133,7 +1133,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_090(self): - return + return # disabled x = 3 y = None match x: @@ -1143,7 +1143,7 @@ class TestPatma(unittest.TestCase): self.assertIs(y, None) def test_patma_091(self): - return + return # disabled x = 0 match x: case 0 | (1 | 2): @@ -1152,7 +1152,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_092(self): - return + return # disabled x = 1 match x: case 0 | (1 | 2): @@ -1161,7 +1161,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_093(self): - return + return # disabled x = 2 match x: case 0 | (1 | 2): @@ -1170,7 +1170,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_094(self): - return + return # disabled x = 3 y = None match x: @@ -1180,7 +1180,7 @@ class TestPatma(unittest.TestCase): self.assertIs(y, None) def test_patma_095(self): - return + return # disabled x = 0 match x: case -0: @@ -1189,7 +1189,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_096(self): - return + return # disabled x = 0 match x: case -0.0: @@ -1198,7 +1198,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_097(self): - return + return # disabled x = 0 match x: case -0j: @@ -1207,7 +1207,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_098(self): - return + return # disabled x = 0 match x: case -0.0j: @@ -1216,7 +1216,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_099(self): - return + return # disabled x = -1 match x: case -1: @@ -1225,7 +1225,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_100(self): - return + return # disabled x = -1.5 match x: case -1.5: @@ -1234,7 +1234,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_101(self): - return + return # disabled x = -1j match x: case -1j: @@ -1243,7 +1243,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_102(self): - return + return # disabled x = -1.5j match x: case -1.5j: @@ -1252,7 +1252,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_103(self): - return + return # disabled x = 0 match x: case 0 + 0j: @@ -1261,7 +1261,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_104(self): - return + return # disabled x = 0 match x: case 0 - 0j: @@ -1270,7 +1270,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_105(self): - return + return # disabled x = 0 match x: case -0 + 0j: @@ -1279,7 +1279,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_106(self): - return + return # disabled x = 0 match x: case -0 - 0j: @@ -1288,7 +1288,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_107(self): - return + return # disabled x = 0.25 + 1.75j match x: case 0.25 + 1.75j: @@ -1297,7 +1297,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_108(self): - return + return # disabled x = 0.25 - 1.75j match x: case 0.25 - 1.75j: @@ -1306,7 +1306,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_109(self): - return + return # disabled x = -0.25 + 1.75j match x: case -0.25 + 1.75j: @@ -1315,7 +1315,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_110(self): - return + return # disabled x = -0.25 - 1.75j match x: case -0.25 - 1.75j: @@ -1324,7 +1324,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_111(self): - return + return # disabled class A: B = 0 x = 0 @@ -1336,7 +1336,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_112(self): - return + return # disabled class A: class B: C = 0 @@ -1349,7 +1349,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_113(self): - return + return # disabled class A: class B: C = 0 @@ -1366,7 +1366,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 1) def test_patma_114(self): - return + return # disabled class A: class B: class C: @@ -1380,7 +1380,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_115(self): - return + return # disabled class A: class B: class C: @@ -1398,7 +1398,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 1) def test_patma_116(self): - return + return # disabled match = case = 0 match match: case case: @@ -1408,7 +1408,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(x, 0) def test_patma_117(self): - return + return # disabled match = case = 0 match case: case match: @@ -1418,7 +1418,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(x, 0) def test_patma_118(self): - return + return # disabled x = [] match x: case [*_, _]: @@ -1429,7 +1429,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 1) def test_patma_119(self): - return + return # disabled x = collections.defaultdict(int) match x: case {0: 0}: @@ -1440,7 +1440,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 1) def test_patma_120(self): - return + return # disabled x = collections.defaultdict(int) match x: case {0: 0}: @@ -1452,14 +1452,14 @@ class TestPatma(unittest.TestCase): self.assertEqual(z, {}) def test_patma_121(self): - return + return # disabled match (): case (): x = 0 self.assertEqual(x, 0) def test_patma_122(self): - return + return # disabled match (0, 1, 2): case (*x,): y = 0 @@ -1467,7 +1467,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_123(self): - return + return # disabled match (0, 1, 2): case 0, *x: y = 0 @@ -1475,7 +1475,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_124(self): - return + return # disabled match (0, 1, 2): case (0, 1, *x,): y = 0 @@ -1483,7 +1483,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_125(self): - return + return # disabled match (0, 1, 2): case 0, 1, 2, *x: y = 0 @@ -1491,7 +1491,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_126(self): - return + return # disabled match (0, 1, 2): case *x, 2,: y = 0 @@ -1499,7 +1499,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_127(self): - return + return # disabled match (0, 1, 2): case (*x, 1, 2): y = 0 @@ -1507,7 +1507,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_128(self): - return + return # disabled match (0, 1, 2): case *x, 0, 1, 2,: y = 0 @@ -1515,7 +1515,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_129(self): - return + return # disabled match (0, 1, 2): case (0, *x, 2): y = 0 @@ -1523,7 +1523,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_130(self): - return + return # disabled match (0, 1, 2): case 0, 1, *x, 2,: y = 0 @@ -1531,7 +1531,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_131(self): - return + return # disabled match (0, 1, 2): case (0, *x, 1, 2): y = 0 @@ -1539,7 +1539,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_132(self): - return + return # disabled match (0, 1, 2): case *x,: y = 0 @@ -1547,7 +1547,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_133(self): - return + return # disabled x = collections.defaultdict(int, {0: 1}) match x: case {1: 0}: @@ -1560,7 +1560,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 2) def test_patma_134(self): - return + return # disabled x = collections.defaultdict(int, {0: 1}) match x: case {1: 0}: @@ -1574,7 +1574,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(z, {0: 1}) def test_patma_135(self): - return + return # disabled x = collections.defaultdict(int, {0: 1}) match x: case {1: 0}: @@ -1588,7 +1588,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(z, {}) def test_patma_136(self): - return + return # disabled x = {0: 1} match x: case {1: 0}: @@ -1601,7 +1601,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 1) def test_patma_137(self): - return + return # disabled x = {0: 1} match x: case {1: 0}: @@ -1615,7 +1615,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(z, {0: 1}) def test_patma_138(self): - return + return # disabled x = {0: 1} match x: case {1: 0}: @@ -1629,7 +1629,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(z, {}) def test_patma_139(self): - return + return # disabled x = False match x: case bool(z): @@ -1639,7 +1639,7 @@ class TestPatma(unittest.TestCase): self.assertIs(z, x) def test_patma_140(self): - return + return # disabled x = True match x: case bool(z): @@ -1649,7 +1649,7 @@ class TestPatma(unittest.TestCase): self.assertIs(z, x) def test_patma_141(self): - return + return # disabled x = bytearray() match x: case bytearray(z): @@ -1659,7 +1659,7 @@ class TestPatma(unittest.TestCase): self.assertIs(z, x) def test_patma_142(self): - return + return # disabled x = b"" match x: case bytes(z): @@ -1669,7 +1669,7 @@ class TestPatma(unittest.TestCase): self.assertIs(z, x) def test_patma_143(self): - return + return # disabled x = {} match x: case dict(z): @@ -1679,7 +1679,7 @@ class TestPatma(unittest.TestCase): self.assertIs(z, x) def test_patma_144(self): - return + return # disabled x = 0.0 match x: case float(z): @@ -1689,7 +1689,7 @@ class TestPatma(unittest.TestCase): self.assertIs(z, x) def test_patma_145(self): - return + return # disabled x = frozenset() match x: case frozenset(z): @@ -1699,7 +1699,7 @@ class TestPatma(unittest.TestCase): self.assertIs(z, x) def test_patma_146(self): - return + return # disabled x = 0 match x: case int(z): @@ -1709,7 +1709,7 @@ class TestPatma(unittest.TestCase): self.assertIs(z, x) def test_patma_147(self): - return + return # disabled x = [] match x: case list(z): @@ -1719,7 +1719,7 @@ class TestPatma(unittest.TestCase): self.assertIs(z, x) def test_patma_148(self): - return + return # disabled x = set() match x: case set(z): @@ -1729,7 +1729,7 @@ class TestPatma(unittest.TestCase): self.assertIs(z, x) def test_patma_149(self): - return + return # disabled x = "" match x: case str(z): @@ -1739,7 +1739,7 @@ class TestPatma(unittest.TestCase): self.assertIs(z, x) def test_patma_150(self): - return + return # disabled x = () match x: case tuple(z): @@ -1749,7 +1749,7 @@ class TestPatma(unittest.TestCase): self.assertIs(z, x) def test_patma_151(self): - return + return # disabled x = 0 match x,: case y,: @@ -1759,7 +1759,7 @@ class TestPatma(unittest.TestCase): self.assertIs(z, 0) def test_patma_152(self): - return + return # disabled w = 0 x = 0 match w, x: @@ -1772,7 +1772,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(v, 0) def test_patma_153(self): - return + return # disabled x = 0 match w := x,: case y as v,: @@ -1784,7 +1784,7 @@ class TestPatma(unittest.TestCase): self.assertIs(v, y) def test_patma_154(self): - return + return # disabled x = 0 y = None match x: @@ -1794,7 +1794,7 @@ class TestPatma(unittest.TestCase): self.assertIs(y, None) def test_patma_155(self): - return + return # disabled x = 0 y = None match x: @@ -1804,7 +1804,7 @@ class TestPatma(unittest.TestCase): self.assertIs(y, None) def test_patma_156(self): - return + return # disabled x = 0 match x: case z: @@ -1814,7 +1814,7 @@ class TestPatma(unittest.TestCase): self.assertIs(z, x) def test_patma_157(self): - return + return # disabled x = 0 y = None match x: @@ -1824,7 +1824,7 @@ class TestPatma(unittest.TestCase): self.assertIs(y, None) def test_patma_158(self): - return + return # disabled x = 0 match x: case -1e1000: @@ -1835,7 +1835,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 1) def test_patma_159(self): - return + return # disabled x = 0 match x: case 0 if not x: @@ -1846,7 +1846,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_160(self): - return + return # disabled x = 0 z = None match x: @@ -1859,7 +1859,7 @@ class TestPatma(unittest.TestCase): self.assertIs(z, None) def test_patma_161(self): - return + return # disabled x = 0 match x: case 0: @@ -1870,7 +1870,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_162(self): - return + return # disabled x = 0 match x: case 1 if x: @@ -1881,7 +1881,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 1) def test_patma_163(self): - return + return # disabled x = 0 y = None match x: @@ -1893,7 +1893,7 @@ class TestPatma(unittest.TestCase): self.assertIs(y, None) def test_patma_164(self): - return + return # disabled x = 0 match x: case 1: @@ -1905,7 +1905,7 @@ class TestPatma(unittest.TestCase): self.assertIs(z, x) def test_patma_165(self): - return + return # disabled x = 0 match x: case 1 if x: @@ -1916,7 +1916,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 1) def test_patma_166(self): - return + return # disabled x = 0 match x: case z if not z: @@ -1928,7 +1928,7 @@ class TestPatma(unittest.TestCase): self.assertIs(z, x) def test_patma_167(self): - return + return # disabled x = 0 match x: case z if not z: @@ -1940,7 +1940,7 @@ class TestPatma(unittest.TestCase): self.assertIs(z, x) def test_patma_168(self): - return + return # disabled x = 0 match x: case z if not x: @@ -1952,7 +1952,7 @@ class TestPatma(unittest.TestCase): self.assertIs(z, x) def test_patma_169(self): - return + return # disabled x = 0 match x: case z if not z: @@ -1964,7 +1964,7 @@ class TestPatma(unittest.TestCase): self.assertIs(z, x) def test_patma_170(self): - return + return # disabled x = 0 match x: case _ if not x: @@ -1975,7 +1975,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_171(self): - return + return # disabled x = 0 y = None match x: @@ -1987,7 +1987,7 @@ class TestPatma(unittest.TestCase): self.assertIs(y, None) def test_patma_172(self): - return + return # disabled x = 0 z = None match x: @@ -2000,7 +2000,7 @@ class TestPatma(unittest.TestCase): self.assertIs(z, None) def test_patma_173(self): - return + return # disabled x = 0 match x: case _ if not x: @@ -2011,7 +2011,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_174(self): - return + return # disabled def http_error(status): match status: case 400: @@ -2036,7 +2036,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(http_error(401 | 403 | 404), "Something else") # 407 def test_patma_175(self): - return + return # disabled def http_error(status): match status: case 400: @@ -2055,7 +2055,7 @@ class TestPatma(unittest.TestCase): self.assertIs(http_error(401 | 403 | 404), None) # 407 def test_patma_176(self): - return + return # disabled def whereis(point): match point: case (0, 0): @@ -2075,7 +2075,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(whereis(42), "Not a point") def test_patma_177(self): - return + return # disabled def whereis(point): match point: case Point(0, 0): @@ -2099,7 +2099,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(whereis(42), "Not a point") def test_patma_178(self): - return + return # disabled def whereis(point): match point: case Point(1, var): @@ -2108,7 +2108,7 @@ class TestPatma(unittest.TestCase): self.assertIs(whereis(Point(0, 0)), None) def test_patma_179(self): - return + return # disabled def whereis(point): match point: case Point(1, y=var): @@ -2117,7 +2117,7 @@ class TestPatma(unittest.TestCase): self.assertIs(whereis(Point(0, 0)), None) def test_patma_180(self): - return + return # disabled def whereis(point): match point: case Point(x=1, y=var): @@ -2126,7 +2126,7 @@ class TestPatma(unittest.TestCase): self.assertIs(whereis(Point(0, 0)), None) def test_patma_181(self): - return + return # disabled def whereis(point): match point: case Point(y=var, x=1): @@ -2135,7 +2135,7 @@ class TestPatma(unittest.TestCase): self.assertIs(whereis(Point(0, 0)), None) def test_patma_182(self): - return + return # disabled def whereis(points): match points: case []: @@ -2158,7 +2158,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(whereis([Point(0, 1), Point(0, 1), Point(0, 1)]), "Something else") def test_patma_183(self): - return + return # disabled def whereis(point): match point: case Point(x, y) if x == y: @@ -2173,7 +2173,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(whereis(Point("X", "x")), "Not on the diagonal") def test_patma_184(self): - return + return # disabled class Seq(collections.abc.Sequence): __getitem__ = None def __len__(self): @@ -2184,7 +2184,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_185(self): - return + return # disabled class Seq(collections.abc.Sequence): __getitem__ = None def __len__(self): @@ -2195,7 +2195,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_186(self): - return + return # disabled class Seq(collections.abc.Sequence): def __getitem__(self, i): return i @@ -2209,7 +2209,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(z, 0) def test_patma_187(self): - return + return # disabled w = range(10) match w: case [x, y, *rest]: @@ -2221,7 +2221,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(rest, list(range(2, 10))) def test_patma_188(self): - return + return # disabled w = range(100) match w: case (x, y, *rest): @@ -2233,7 +2233,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(rest, list(range(2, 100))) def test_patma_189(self): - return + return # disabled w = range(1000) match w: case x, y, *rest: @@ -2245,7 +2245,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(rest, list(range(2, 1000))) def test_patma_190(self): - return + return # disabled w = range(1 << 10) match w: case [x, y, *_]: @@ -2256,7 +2256,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(z, 0) def test_patma_191(self): - return + return # disabled w = range(1 << 20) match w: case (x, y, *_): @@ -2267,7 +2267,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(z, 0) def test_patma_192(self): - return + return # disabled w = range(1 << 30) match w: case x, y, *_: @@ -2278,7 +2278,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(z, 0) def test_patma_193(self): - return + return # disabled x = {"bandwidth": 0, "latency": 1} match x: case {"bandwidth": b, "latency": l}: @@ -2289,7 +2289,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_194(self): - return + return # disabled x = {"bandwidth": 0, "latency": 1, "key": "value"} match x: case {"latency": l, "bandwidth": b}: @@ -2300,7 +2300,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_195(self): - return + return # disabled x = {"bandwidth": 0, "latency": 1, "key": "value"} match x: case {"bandwidth": b, "latency": l, **rest}: @@ -2312,7 +2312,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_196(self): - return + return # disabled x = {"bandwidth": 0, "latency": 1} match x: case {"latency": l, "bandwidth": b, **rest}: @@ -2324,7 +2324,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_197(self): - return + return # disabled w = [Point(-1, 0), Point(1, 2)] match w: case (Point(x1, y1), Point(x2, y2) as p2): @@ -2338,7 +2338,7 @@ class TestPatma(unittest.TestCase): self.assertIs(z, 0) def test_patma_198(self): - return + return # disabled class Color(enum.Enum): RED = 0 GREEN = 1 @@ -2365,7 +2365,7 @@ class TestPatma(unittest.TestCase): self.assertIs(f(3.0), None) def test_patma_199(self): - return + return # disabled class Color(int, enum.Enum): RED = 0 GREEN = 1 @@ -2392,7 +2392,7 @@ class TestPatma(unittest.TestCase): self.assertIs(f(3.0), None) def test_patma_200(self): - return + return # disabled class Class: __match_args__ = ("a", "b") c = Class() @@ -2406,7 +2406,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(z, 0) def test_patma_201(self): - return + return # disabled class Class: __match_args__ = ("a", "b") c = Class() @@ -2420,7 +2420,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(z, 0) def test_patma_202(self): - return + return # disabled class Parent: __match_args__ = "a", "b" class Child(Parent): @@ -2436,7 +2436,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(z, 0) def test_patma_203(self): - return + return # disabled class Parent: __match_args__ = ("a", "b") class Child(Parent): @@ -2452,7 +2452,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(z, 0) def test_patma_204(self): - return + return # disabled def f(w): match w: case 42: @@ -2465,7 +2465,7 @@ class TestPatma(unittest.TestCase): self.assertIs(f("42"), None) def test_patma_205(self): - return + return # disabled def f(w): match w: case 42.0: @@ -2478,7 +2478,7 @@ class TestPatma(unittest.TestCase): self.assertIs(f(0), None) def test_patma_206(self): - return + return # disabled def f(w): match w: case 1 | 2 | 3: @@ -2494,7 +2494,7 @@ class TestPatma(unittest.TestCase): self.assertIs(f("1"), None) def test_patma_207(self): - return + return # disabled def f(w): match w: case [1, 2] | [3, 4]: @@ -2509,7 +2509,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(f([1, 2.0]), {}) def test_patma_208(self): - return + return # disabled def f(w): match w: case x: @@ -2521,7 +2521,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(f(None), {"x": None}) def test_patma_209(self): - return + return # disabled def f(w): match w: case _: @@ -2533,7 +2533,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(f((1, 2)), {}) def test_patma_210(self): - return + return # disabled def f(w): match w: case (x, y, z): @@ -2551,7 +2551,7 @@ class TestPatma(unittest.TestCase): self.assertIs(f(bytearray(b"abc")), None) def test_patma_211(self): - return + return # disabled def f(w): match w: case {"x": x, "y": "y", "z": z}: @@ -2564,7 +2564,7 @@ class TestPatma(unittest.TestCase): self.assertIs(f(({"x": "x", "y": "y"})), None) def test_patma_212(self): - return + return # disabled def f(w): match w: case Point(int(xx), y="hello"): @@ -2574,7 +2574,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(f(Point(42, "hello")), {"xx": 42}) def test_patma_213(self): - return + return # disabled def f(w): match w: case (p, q) as x: @@ -2587,7 +2587,7 @@ class TestPatma(unittest.TestCase): self.assertIs(f((1, 2, 3)), None) def test_patma_214(self): - return + return # disabled def f(): match 42: case 42: @@ -2595,7 +2595,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(set(f()), set()) def test_patma_215(self): - return + return # disabled def f(): match 1: case 1 | 2 | 3: @@ -2603,7 +2603,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(set(f()), set()) def test_patma_216(self): - return + return # disabled def f(): match ...: case _: @@ -2611,7 +2611,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(set(f()), set()) def test_patma_217(self): - return + return # disabled def f(): match ...: case abc: @@ -2619,7 +2619,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(set(f()), {"abc"}) def test_patma_218(self): - return + return # disabled def f(): match ..., ...: case a, b: @@ -2627,7 +2627,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(set(f()), {"a", "b"}) def test_patma_219(self): - return + return # disabled def f(): match {"k": ..., "l": ...}: case {"k": a, "l": b}: @@ -2635,7 +2635,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(set(f()), {"a", "b"}) def test_patma_220(self): - return + return # disabled def f(): match Point(..., ...): case Point(x, y=y): @@ -2643,7 +2643,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(set(f()), {"x", "y"}) def test_patma_221(self): - return + return # disabled def f(): match ...: case b as a: @@ -2651,7 +2651,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(set(f()), {"a", "b"}) def test_patma_222(self): - return + return # disabled def f(x): match x: case _: @@ -2662,7 +2662,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(f(3), 0) def test_patma_223(self): - return + return # disabled def f(x): match x: case 0: @@ -2673,7 +2673,7 @@ class TestPatma(unittest.TestCase): self.assertIs(f(3), None) def test_patma_224(self): - return + return # disabled def f(x): match x: case 0: @@ -2686,7 +2686,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(f(3), 1) def test_patma_225(self): - return + return # disabled def f(x): match x: case 0: @@ -2699,7 +2699,7 @@ class TestPatma(unittest.TestCase): self.assertIs(f(3), None) def test_patma_226(self): - return + return # disabled def f(x): match x: case 0: @@ -2714,7 +2714,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(f(3), 2) def test_patma_227(self): - return + return # disabled def f(x): match x: case 0: @@ -2729,14 +2729,14 @@ class TestPatma(unittest.TestCase): self.assertIs(f(3), None) def test_patma_228(self): - return + return # disabled match(): case(): x = 0 self.assertEqual(x, 0) def test_patma_229(self): - return + return # disabled x = 0 match(x): case(x): @@ -2745,7 +2745,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_230(self): - return + return # disabled x = 0 match x: case False: @@ -2756,7 +2756,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 1) def test_patma_231(self): - return + return # disabled x = 1 match x: case True: @@ -2767,7 +2767,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 1) def test_patma_232(self): - return + return # disabled class Eq: def __eq__(self, other): return True @@ -2780,7 +2780,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, None) def test_patma_233(self): - return + return # disabled x = False match x: case False: @@ -2789,7 +2789,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_234(self): - return + return # disabled x = True match x: case True: @@ -2798,7 +2798,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_235(self): - return + return # disabled x = None match x: case None: @@ -2807,7 +2807,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_236(self): - return + return # disabled x = 0 match x: case (0 as w) as z: @@ -2818,7 +2818,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(z, 0) def test_patma_237(self): - return + return # disabled x = 0 match x: case (0 as w) as z: @@ -2829,7 +2829,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(z, 0) def test_patma_238(self): - return + return # disabled x = ((0, 1), (2, 3)) match x: case ((a as b, c as d) as e) as w, ((f as g, h) as i) as z: @@ -2849,7 +2849,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(z, (2, 3)) def test_patma_239(self): - return + return # disabled x = collections.UserDict({0: 1, 2: 3}) match x: case {2: 3}: @@ -2858,7 +2858,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 0) def test_patma_240(self): - return + return # disabled x = collections.UserDict({0: 1, 2: 3}) match x: case {2: 3, **z}: @@ -2868,7 +2868,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(z, {0: 1}) def test_patma_241(self): - return + return # disabled x = [[{0: 0}]] match x: case list([({-0-0j: int(real=0+0j, imag=0-0j) | (1) as z},)]): @@ -2878,7 +2878,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(z, 0) def test_patma_242(self): - return + return # disabled x = range(3) match x: case [y, *_, z]: @@ -2889,7 +2889,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(z, 2) def test_patma_243(self): - return + return # disabled x = range(3) match x: case [_, *_, y]: @@ -2899,7 +2899,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(z, 0) def test_patma_244(self): - return + return # disabled x = range(3) match x: case [*_, y]: @@ -2909,7 +2909,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(z, 0) def test_patma_245(self): - return + return # disabled x = {"y": 1} match x: case {"y": (0 as y) | (1 as y)}: @@ -2919,7 +2919,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(z, 0) def test_patma_246(self): - return + return # disabled def f(x): match x: case ((a, b, c, d, e, f, g, h, i, 9) | @@ -2944,7 +2944,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(f(range(10, 20)), alts[4]) def test_patma_247(self): - return + return # disabled def f(x): match x: case [y, (a, b, c, d, e, f, g, h, i, 9) | @@ -2969,7 +2969,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(f((False, range(10, 20), True)), alts[4]) def test_patma_248(self): - return + return # disabled class C(dict): @staticmethod def get(key, default=None): @@ -2983,7 +2983,7 @@ class TestPatma(unittest.TestCase): self.assertEqual(y, 'bar') def test_patma_249(self): - return + return # disabled class C: __attr = "eggs" # mangled to _C__attr _Outer__attr = "bacon" @@ -3296,7 +3296,7 @@ class TestSyntaxErrors(unittest.TestCase): class TestTypeErrors(unittest.TestCase): def test_accepts_positional_subpatterns_0(self): - return + return # disabled class Class: __match_args__ = () x = Class() @@ -3309,7 +3309,7 @@ class TestTypeErrors(unittest.TestCase): self.assertIs(z, None) def test_accepts_positional_subpatterns_1(self): - return + return # disabled x = range(10) y = None with self.assertRaises(TypeError): @@ -3320,7 +3320,7 @@ class TestTypeErrors(unittest.TestCase): self.assertIs(y, None) def test_got_multiple_subpatterns_for_attribute_0(self): - return + return # disabled class Class: __match_args__ = ("a", "a") a = None @@ -3335,7 +3335,7 @@ class TestTypeErrors(unittest.TestCase): self.assertIs(z, None) def test_got_multiple_subpatterns_for_attribute_1(self): - return + return # disabled class Class: __match_args__ = ("a",) a = None @@ -3350,7 +3350,7 @@ class TestTypeErrors(unittest.TestCase): self.assertIs(z, None) def test_match_args_elements_must_be_strings(self): - return + return # disabled class Class: __match_args__ = (None,) x = Class() @@ -3363,7 +3363,7 @@ class TestTypeErrors(unittest.TestCase): self.assertIs(z, None) def test_match_args_must_be_a_tuple_0(self): - return + return # disabled class Class: __match_args__ = None x = Class() @@ -3376,7 +3376,7 @@ class TestTypeErrors(unittest.TestCase): self.assertIs(z, None) def test_match_args_must_be_a_tuple_1(self): - return + return # disabled class Class: __match_args__ = "XYZ" x = Class() @@ -3389,7 +3389,7 @@ class TestTypeErrors(unittest.TestCase): self.assertIs(z, None) def test_match_args_must_be_a_tuple_2(self): - return + return # disabled class Class: __match_args__ = ["spam", "eggs"] spam = 0 @@ -3408,7 +3408,7 @@ class TestTypeErrors(unittest.TestCase): class TestValueErrors(unittest.TestCase): def test_mapping_pattern_checks_duplicate_key_1(self): - return + return # disabled class Keys: KEY = "a" x = {"a": 0, "b": 1} -- cgit v1.2.1 From 4989461fa51c8cb99c94476f8cdc446f1ff82ab8 Mon Sep 17 00:00:00 2001 From: da-woods Date: Sun, 17 Jul 2022 12:02:12 +0100 Subject: A bit of spacing for readability --- Cython/Compiler/Parsing.py | 33 +++++++++++++++++++++++++++++++-- 1 file changed, 31 insertions(+), 2 deletions(-) diff --git a/Cython/Compiler/Parsing.py b/Cython/Compiler/Parsing.py index 35f16fb7b..4441e1ae3 100644 --- a/Cython/Compiler/Parsing.py +++ b/Cython/Compiler/Parsing.py @@ -3974,6 +3974,7 @@ def p_cpp_class_attribute(s, ctx): node.decorators = decorators return node + def p_match_statement(s, ctx): assert s.sy == "IDENT" and s.systring == "match" pos = s.position() @@ -3993,6 +3994,7 @@ def p_match_statement(s, ctx): s.expect(":") if errors: return None + # at this stage were commited to it being a match block so continue # outside "with tentatively_scan" # (I think this deviates from the PEG parser slightly, and it'd @@ -4005,6 +4007,7 @@ def p_match_statement(s, ctx): s.expect_dedent() return MatchCaseNodes.MatchNode(pos, subject = subject, cases = cases) + def p_case_block(s, ctx): if not (s.sy == "IDENT" and s.systring == "case"): s.error("Expected 'case'") @@ -4019,8 +4022,10 @@ def p_case_block(s, ctx): return MatchCaseNodes.MatchCaseNode(pos, pattern=pattern, body=body, guard=guard) + def p_patterns(s): - # note - in slight contrast to the name, returns a single pattern + # note - in slight contrast to the name (which comes from the Python grammar), + # returns a single pattern patterns = [] seq = False pos = s.position() @@ -4042,11 +4047,13 @@ def p_patterns(s): break # common reasons to break else: break + if seq: return MatchCaseNodes.MatchSequencePatternNode(pos, patterns = patterns) else: return patterns[0] + def p_maybe_star_pattern(s): # For match case. Either star_pattern or pattern if s.sy == "*": @@ -4065,6 +4072,7 @@ def p_maybe_star_pattern(s): pattern = p_pattern(s) return pattern + def p_pattern(s): # try "as_pattern" then "or_pattern" # (but practically "as_pattern" starts with "or_pattern" too) @@ -4076,6 +4084,7 @@ def p_pattern(s): s.next() else: break + if len(patterns) > 1: pattern = MatchCaseNodes.OrPatternNode( pos, @@ -4083,6 +4092,7 @@ def p_pattern(s): ) else: pattern = patterns[0] + if s.sy == 'IDENT' and s.systring == 'as': s.next() with tentatively_scan(s) as errors: @@ -4147,6 +4157,7 @@ def p_closed_pattern(s): return result return p_class_pattern(s) + def p_literal_pattern(s): # a lot of duplication in this function with "p_atom" next_must_be_a_number = False @@ -4167,8 +4178,10 @@ def p_literal_pattern(s): value = s.systring s.next() res = ExprNodes.FloatNode(pos, value = value) + if res and sign == "-": res = ExprNodes.UnaryMinusNode(sign_pos, operand=res) + if res and s.sy in ['+', '-']: sign = s.sy s.next() @@ -4230,12 +4243,14 @@ def p_literal_pattern(s): s.error("Failed to match literal") + def p_capture_pattern(s): return MatchCaseNodes.MatchAndAssignPatternNode( s.position(), target = p_pattern_capture_target(s) ) + def p_value_pattern(s): if s.sy != "IDENT": s.error("Expected identifier") @@ -4253,12 +4268,14 @@ def p_value_pattern(s): s.error("Unexpected symbol '%s'" % s.sy) return MatchCaseNodes.MatchValuePatternNode(pos, value = res) + def p_group_pattern(s): s.expect("(") pattern = p_pattern(s) s.expect(")") return pattern + def p_sequence_pattern(s): opener = s.sy pos = s.position() @@ -4285,12 +4302,15 @@ def p_sequence_pattern(s): else: s.error("Expected '[' or '('") + def p_mapping_pattern(s): pos = s.position() s.expect('{') if s.sy == '}': + # trivial empty mapping s.next() return MatchCaseNodes.MatchMappingPatternNode(pos) + double_star_capture_target = None items_patterns = [] double_star_set_twice = None @@ -4318,6 +4338,7 @@ def p_mapping_pattern(s): if s.sy=='}': break s.expect('}') + if double_star_set_twice is not None: return Nodes.ErrorNode(double_star_set_twice, what = "Double star capture set twice") return MatchCaseNodes.MatchMappingPatternNode( @@ -4327,8 +4348,9 @@ def p_mapping_pattern(s): double_star_capture_target = double_star_capture_target ) + def p_class_pattern(s): - # name_or_attr + # start by parsing the class as name_or_attr pos = s.position() res = p_name(s, s.systring) s.next() @@ -4338,10 +4360,14 @@ def p_class_pattern(s): attr = p_ident(s) res = ExprNodes.AttributeNode(attr_pos, obj = res, attribute=attr) class_ = res + s.expect("(") if s.sy == ")": + # trivial case with no arguments matched s.next() return MatchCaseNodes.ClassPatternNode(pos, class_=class_) + + # parse the arguments positional_patterns = [] keyword_patterns = [] keyword_patterns_error = None @@ -4361,6 +4387,7 @@ def p_class_pattern(s): else: break s.expect(")") + if keyword_patterns_error is not None: return Nodes.ErrorNode( keyword_patterns_error, @@ -4373,6 +4400,7 @@ def p_class_pattern(s): keyword_pattern_patterns = [kv[1] for kv in keyword_patterns], ) + def p_keyword_pattern(s): if s.sy != "IDENT": s.error("Expected identifier") @@ -4382,6 +4410,7 @@ def p_keyword_pattern(s): value = p_pattern(s) return arg, value + def p_pattern_capture_target(s): # any name but '_', and with some constraints on what follows if s.sy != 'IDENT': -- cgit v1.2.1 From 3a919f5cb689ac0e72d4431515ab27171f2e1d72 Mon Sep 17 00:00:00 2001 From: da-woods Date: Sun, 17 Jul 2022 12:20:14 +0100 Subject: setup enum34 on pypy2.7 --- Tools/ci-run.sh | 2 ++ test-requirements-pypy27.txt | 1 + 2 files changed, 3 insertions(+) create mode 100644 test-requirements-pypy27.txt diff --git a/Tools/ci-run.sh b/Tools/ci-run.sh index 4309fd4ad..0585f9a04 100644 --- a/Tools/ci-run.sh +++ b/Tools/ci-run.sh @@ -76,6 +76,8 @@ else if [[ $PYTHON_VERSION != "pypy"* && $PYTHON_VERSION != "3."[1]* ]]; then python -m pip install -r test-requirements-cpython.txt || exit 1 + elif [[ $PYTHON_VERSION == "pypy-2.7" ]]; then + python -m pip install -r test-requirements-pypy27.txt || exit 1 fi fi fi diff --git a/test-requirements-pypy27.txt b/test-requirements-pypy27.txt new file mode 100644 index 000000000..369b7225a --- /dev/null +++ b/test-requirements-pypy27.txt @@ -0,0 +1 @@ +enum34==1.1.10 -- cgit v1.2.1 From 1b284442bb9f9ea59dddf069c63c3cc042cd9696 Mon Sep 17 00:00:00 2001 From: Ewout ter Hoeven Date: Sun, 17 Jul 2022 19:46:02 +0200 Subject: CI: Remove "allow_failures" for Python 3.11 to make sure we notice failures during the release phases (GH-4780) --- .github/workflows/ci.yml | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f5f645555..adf05da7c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -39,9 +39,7 @@ jobs: env: [{}] include: - # Temporary - Allow failure on Python 3.11-dev jobs until they are considered stable - - python-version: 3.11-dev - allowed_failure: true + # Temporary - Allow failure on Python 3.12-dev jobs until they are in beta (feature frozen) #- python-version: 3.12-dev # allowed_failure: true -- cgit v1.2.1 From 344389e9be01a34b0d8a79260396c791b1183f69 Mon Sep 17 00:00:00 2001 From: Matti Picus Date: Sun, 17 Jul 2022 20:54:15 +0300 Subject: pyximport: 'cd' into common dir to prevent too-long filenames (mostly for windows) (GH-4630) --- pyximport/_pyximport2.py | 26 ++++++++++++++++++++------ pyximport/_pyximport3.py | 26 ++++++++++++++++++++------ 2 files changed, 40 insertions(+), 12 deletions(-) diff --git a/pyximport/_pyximport2.py b/pyximport/_pyximport2.py index b2077826a..00e88a8ac 100644 --- a/pyximport/_pyximport2.py +++ b/pyximport/_pyximport2.py @@ -185,12 +185,26 @@ def build_module(name, pyxfilename, pyxbuild_dir=None, inplace=False, language_l build_in_temp = sargs.pop('build_in_temp',build_in_temp) from . import pyxbuild - so_path = pyxbuild.pyx_to_dll(pyxfilename, extension_mod, - build_in_temp=build_in_temp, - pyxbuild_dir=pyxbuild_dir, - setup_args=sargs, - inplace=inplace, - reload_support=pyxargs.reload_support) + olddir = os.getcwd() + common = '' + if pyxbuild_dir: + # Windows concantenates the pyxbuild_dir to the pyxfilename when + # compiling, and then complains that the filename is too long + common = os.path.commonprefix([pyxbuild_dir, pyxfilename]) + if len(common) > 30: + pyxfilename = os.path.relpath(pyxfilename) + pyxbuild_dir = os.path.relpath(pyxbuild_dir) + os.chdir(common) + try: + so_path = pyxbuild.pyx_to_dll(pyxfilename, extension_mod, + build_in_temp=build_in_temp, + pyxbuild_dir=pyxbuild_dir, + setup_args=sargs, + inplace=inplace, + reload_support=pyxargs.reload_support) + finally: + os.chdir(olddir) + so_path = os.path.join(common, so_path) assert os.path.exists(so_path), "Cannot find: %s" % so_path junkpath = os.path.join(os.path.dirname(so_path), name+"_*") #very dangerous with --inplace ? yes, indeed, trying to eat my files ;) diff --git a/pyximport/_pyximport3.py b/pyximport/_pyximport3.py index dccd1d09e..4fa811f8a 100644 --- a/pyximport/_pyximport3.py +++ b/pyximport/_pyximport3.py @@ -183,12 +183,26 @@ def build_module(name, pyxfilename, pyxbuild_dir=None, inplace=False, language_l build_in_temp = sargs.pop('build_in_temp',build_in_temp) from . import pyxbuild - so_path = pyxbuild.pyx_to_dll(pyxfilename, extension_mod, - build_in_temp=build_in_temp, - pyxbuild_dir=pyxbuild_dir, - setup_args=sargs, - inplace=inplace, - reload_support=pyxargs.reload_support) + olddir = os.getcwd() + common = '' + if pyxbuild_dir: + # Windows concantenates the pyxbuild_dir to the pyxfilename when + # compiling, and then complains that the filename is too long + common = os.path.commonprefix([pyxbuild_dir, pyxfilename]) + if len(common) > 30: + pyxfilename = os.path.relpath(pyxfilename) + pyxbuild_dir = os.path.relpath(pyxbuild_dir) + os.chdir(common) + try: + so_path = pyxbuild.pyx_to_dll(pyxfilename, extension_mod, + build_in_temp=build_in_temp, + pyxbuild_dir=pyxbuild_dir, + setup_args=sargs, + inplace=inplace, + reload_support=pyxargs.reload_support) + finally: + os.chdir(olddir) + so_path = os.path.join(common, so_path) assert os.path.exists(so_path), "Cannot find: %s" % so_path junkpath = os.path.join(os.path.dirname(so_path), name+"_*") #very dangerous with --inplace ? yes, indeed, trying to eat my files ;) -- cgit v1.2.1 From 7183d4896804bb7364e296d9146722a41a0a4d56 Mon Sep 17 00:00:00 2001 From: da-woods Date: Sun, 17 Jul 2022 12:20:14 +0100 Subject: Change compile tests to use TreeFragment and fix a few tests --- Cython/Compiler/MatchCaseNodes.py | 2 +- Cython/Compiler/Nodes.py | 2 +- Cython/Compiler/Parsing.py | 13 +++++--- tests/run/test_patma.py | 67 ++++++++++++++++++++------------------- 4 files changed, 44 insertions(+), 40 deletions(-) diff --git a/Cython/Compiler/MatchCaseNodes.py b/Cython/Compiler/MatchCaseNodes.py index b4d39e318..99fa70ccd 100644 --- a/Cython/Compiler/MatchCaseNodes.py +++ b/Cython/Compiler/MatchCaseNodes.py @@ -213,7 +213,7 @@ class MatchMappingPatternNode(PatternNode): value_patterns = [] double_star_capture_target = None - child_atts = PatternNode.child_attrs + [ + child_attrs = PatternNode.child_attrs + [ "keys", "value_patterns", "double_star_capture_target", diff --git a/Cython/Compiler/Nodes.py b/Cython/Compiler/Nodes.py index 27d8ce323..476b380a3 100644 --- a/Cython/Compiler/Nodes.py +++ b/Cython/Compiler/Nodes.py @@ -10126,7 +10126,7 @@ class ErrorNode(Node): what str """ - pass + child_attrs = [] #------------------------------------------------------------------------------------ diff --git a/Cython/Compiler/Parsing.py b/Cython/Compiler/Parsing.py index 4441e1ae3..bb89875c4 100644 --- a/Cython/Compiler/Parsing.py +++ b/Cython/Compiler/Parsing.py @@ -4313,11 +4313,11 @@ def p_mapping_pattern(s): double_star_capture_target = None items_patterns = [] - double_star_set_twice = None + double_star_is_not_final = None while True: + if double_star_capture_target and not double_star_is_not_final: + double_star_is_not_final = s.position() if s.sy == '**': - if double_star_capture_target: - double_star_set_twice = s.position() s.next() double_star_capture_target = p_pattern_capture_target(s) else: @@ -4339,8 +4339,11 @@ def p_mapping_pattern(s): break s.expect('}') - if double_star_set_twice is not None: - return Nodes.ErrorNode(double_star_set_twice, what = "Double star capture set twice") + if double_star_is_not_final is not None: + return Nodes.ErrorNode( + double_star_is_not_final, + what = "** pattern must be the final part of a mapping pattern." + ) return MatchCaseNodes.MatchMappingPatternNode( pos, keys = [kv[0] for kv in items_patterns], diff --git a/tests/run/test_patma.py b/tests/run/test_patma.py index 6956099e2..240dbf0d3 100644 --- a/tests/run/test_patma.py +++ b/tests/run/test_patma.py @@ -4,40 +4,34 @@ # new code import cython -from Cython.Compiler.Main import compile as cython_compile, CompileError -from Cython.Build.Inline import cython_inline -import contextlib -from tempfile import NamedTemporaryFile - -@contextlib.contextmanager -def hidden_stderr(): - try: - from StringIO import StringIO - except ImportError: - from io import StringIO - - old_stderr = sys.stderr - try: - sys.stderr = StringIO() - yield - finally: - sys.stderr = old_stderr +from Cython.Compiler.TreeFragment import TreeFragment, StringParseContext +from Cython.Compiler.Errors import local_errors, CompileError +from Cython.Compiler.ParseTreeTransforms import PostParse def _compile(code): - with NamedTemporaryFile(suffix='.py') as f: - f.write(code.encode('utf8')) - f.flush() + context = StringParseContext("test") + # all the errors we care about are in the parsing or postparse stage + try: + with local_errors() as errors: + result = TreeFragment(code, pipeline=[PostParse(context)]) + result = result.substitute() + if errors: + raise errors[0] # compile error, which should get caught + else: + return result + except CompileError as e: + raise SyntaxError(e.message_only) - with hidden_stderr(): - result = cython_compile(f.name, language_level=3) - return result if cython.compiled: def compile(code, name, what): assert what == 'exec' - result = _compile(code) - if not result.c_file: - raise SyntaxError('unexpected EOF') # compile is only used for testing errors + _compile(code) + + +def disable(func): + pass + ############## SLIGHTLY MODIFIED ORIGINAL CODE import array @@ -69,9 +63,9 @@ class TestTracing(unittest.TestCase): # Deeply nested patterns can cause exponential backtracking when parsing. # See CPython gh-93671 for more information. # - # DW Cython note - this doesn't break the parser but may cause a + # DW: Cython note - this doesn't break the parser but may cause a # RecursionError later in the code-generation. I don't believe that's - # easily avoidable + # easily avoidable with the way Cython visitors currently work levels = 100 @@ -84,10 +78,9 @@ class TestTracing(unittest.TestCase): for pattern in patterns: with self.subTest(pattern): code = inspect.cleandoc(""" - if 0: # disabled - FIXME remove once pattern matching is fully implemented! - match None: - case {}: - pass + match None: + case {}: + pass """.format(pattern)) compile(code, "", "exec") @@ -3019,6 +3012,7 @@ class TestSyntaxErrors(unittest.TestCase): """) + @disable # validation will be added when class patterns are added def test_attribute_name_repeated_in_class_pattern(self): self.assert_syntax_error(""" match ...: @@ -3117,6 +3111,7 @@ class TestSyntaxErrors(unittest.TestCase): pass """) + @disable # will be implemented as part of sequence patterns def test_multiple_starred_names_in_sequence_pattern_0(self): self.assert_syntax_error(""" match ...: @@ -3124,6 +3119,7 @@ class TestSyntaxErrors(unittest.TestCase): pass """) + @disable # will be implemented as part of sequence patterns def test_multiple_starred_names_in_sequence_pattern_1(self): self.assert_syntax_error(""" match ...: @@ -3258,6 +3254,7 @@ class TestSyntaxErrors(unittest.TestCase): pass """) + @disable # validation will be added when class patterns are added def test_mapping_pattern_duplicate_key(self): self.assert_syntax_error(""" match ...: @@ -3265,6 +3262,7 @@ class TestSyntaxErrors(unittest.TestCase): pass """) + @disable # validation will be added when class patterns are added def test_mapping_pattern_duplicate_key_edge_case0(self): self.assert_syntax_error(""" match ...: @@ -3272,6 +3270,7 @@ class TestSyntaxErrors(unittest.TestCase): pass """) + @disable # validation will be added when class patterns are added def test_mapping_pattern_duplicate_key_edge_case1(self): self.assert_syntax_error(""" match ...: @@ -3279,6 +3278,7 @@ class TestSyntaxErrors(unittest.TestCase): pass """) + @disable # validation will be added when class patterns are added def test_mapping_pattern_duplicate_key_edge_case2(self): self.assert_syntax_error(""" match ...: @@ -3286,6 +3286,7 @@ class TestSyntaxErrors(unittest.TestCase): pass """) + @disable # validation will be added when class patterns are added def test_mapping_pattern_duplicate_key_edge_case3(self): self.assert_syntax_error(""" match ...: -- cgit v1.2.1 From 771051ab77593b507b91e8041da0c293a5e9dd13 Mon Sep 17 00:00:00 2001 From: da-woods Date: Sun, 17 Jul 2022 18:59:06 +0100 Subject: Added a NULL test --- tests/run/extra_patma.pyx | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 tests/run/extra_patma.pyx diff --git a/tests/run/extra_patma.pyx b/tests/run/extra_patma.pyx new file mode 100644 index 000000000..b2303f45b --- /dev/null +++ b/tests/run/extra_patma.pyx @@ -0,0 +1,18 @@ +# mode: run + +cdef bint is_null(int* x): + return False # disabled - currently just a parser test + match x: + case NULL: + return True + case _: + return False + +def test_is_null(): + """ + >>> test_is_null() + """ + cdef int some_int = 1 + return # disabled - currently just a parser test + assert is_null(&some_int) == False + assert is_null(NULL) == True -- cgit v1.2.1 From 34b72dc6fc511c900d0e27c415550fd625c4d724 Mon Sep 17 00:00:00 2001 From: da-woods Date: Sun, 17 Jul 2022 21:18:54 +0100 Subject: Substitute subtest of Python 2.7 --- tests/run/test_patma.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/tests/run/test_patma.py b/tests/run/test_patma.py index 240dbf0d3..61ac6957f 100644 --- a/tests/run/test_patma.py +++ b/tests/run/test_patma.py @@ -59,6 +59,16 @@ else: # except for one test that seems misplaced in CPython (which is below) class TestTracing(unittest.TestCase): + if sys.version_info < (3, 4): + class SubTestClass(object): + def __enter__(self): + return self + def __exit__(self, exc_type, exc_value, traceback): + return + def __call__(self, *args): + return self + subTest = SubTestClass() + def test_parser_deeply_nested_patterns(self): # Deeply nested patterns can cause exponential backtracking when parsing. # See CPython gh-93671 for more information. -- cgit v1.2.1 From 4d626caa314c8d6de741185e3ec11199effb8f22 Mon Sep 17 00:00:00 2001 From: Ewout ter Hoeven Date: Sun, 17 Jul 2022 19:46:02 +0200 Subject: CI: Remove "allow_failures" for Python 3.11 to make sure we notice failures during the release phases (GH-4780) --- .github/workflows/ci.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c8ae1eb1f..3e972eda1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -29,9 +29,9 @@ jobs: env: [{}] include: - # Temporary - Allow failure on Python 3.11-dev jobs until they are considered stable - - python-version: 3.11-dev - allowed_failure: true + # Temporary - Allow failure on Python 3.12-dev jobs until they are in beta (feature frozen) + #- python-version: 3.12-dev + # allowed_failure: true # Ubuntu sub-jobs: # ================ -- cgit v1.2.1 From b190c217899638f9d18da2a92cdda6c67d547c14 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Mon, 18 Jul 2022 18:23:25 +0200 Subject: On test failures, list the directory structure in end-to-end tests to aid in remote debugging. --- runtests.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/runtests.py b/runtests.py index a5c12e65e..426831a47 100755 --- a/runtests.py +++ b/runtests.py @@ -1966,6 +1966,10 @@ class EndToEndTest(unittest.TestCase): for c, o, e in zip(cmd, out, err): sys.stderr.write("[%d] %s\n%s\n%s\n\n" % ( self.shard_num, c, self._try_decode(o), self._try_decode(e))) + sys.stderr.write("Final directory layout of '%s':\n%s\n\n" % ( + self.name, + '\n'.join(os.path.join(dirpath, filename) for dirpath, dirs, files in os.walk(".") for filename in files), + )) self.assertEqual(0, res, "non-zero exit status, last output was:\n%r\n-- stdout:%s\n-- stderr:%s\n" % ( ' '.join(command), self._try_decode(out[-1]), self._try_decode(err[-1]))) self.success = True -- cgit v1.2.1 From c01c6d508339587935ef4c101663dd2803c70e9e Mon Sep 17 00:00:00 2001 From: Matthew Brett Date: Tue, 19 Jul 2022 07:53:22 +0100 Subject: Add --module-name argument to cython command (GH-4548) It can be useful to specify the module name for the output file directly, rather than working it out from the enclosing file tree - particularly for out of tree build systems, like Meson. See background in https://github.com/rgommers/scipy/issues/31#issuecomment-1002662816 --- Cython/Compiler/CmdLine.py | 12 ++++++- Cython/Compiler/Main.py | 8 +++-- Cython/Compiler/Options.py | 1 + Cython/Compiler/Tests/TestCmdLine.py | 65 ++++++++++++++++++++++++++++------- tests/compile/module_name_arg.srctree | 52 ++++++++++++++++++++++++++++ 5 files changed, 123 insertions(+), 15 deletions(-) create mode 100644 tests/compile/module_name_arg.srctree diff --git a/Cython/Compiler/CmdLine.py b/Cython/Compiler/CmdLine.py index ffff6a61c..894eacd0a 100644 --- a/Cython/Compiler/CmdLine.py +++ b/Cython/Compiler/CmdLine.py @@ -145,6 +145,11 @@ def create_cython_argparser(): dest='compile_time_env', type=str, action=ParseCompileTimeEnvAction, help='Provides compile time env like DEF would do.') + parser.add_argument("--module-name", + dest='module_name', type=str, action='store', + help='Fully qualified module name. If not given, is ' + 'deduced from the import path if source file is in ' + 'a package, or equals the filename otherwise.') parser.add_argument('sources', nargs='*', default=[]) # TODO: add help @@ -222,5 +227,10 @@ def parse_command_line(args): if len(sources) == 0 and not options.show_version: parser.error("cython: Need at least one source file\n") if Options.embed and len(sources) > 1: - parser.error("cython: Only one source file allowed when using -embed\n") + parser.error("cython: Only one source file allowed when using --embed\n") + if options.module_name: + if options.timestamps: + parser.error("cython: Cannot use --module-name with --timestamps\n") + if len(sources) > 1: + parser.error("cython: Only one source file allowed when using --module-name\n") return options, sources diff --git a/Cython/Compiler/Main.py b/Cython/Compiler/Main.py index 764d9af21..0ea5f7748 100644 --- a/Cython/Compiler/Main.py +++ b/Cython/Compiler/Main.py @@ -583,6 +583,9 @@ def compile_multiple(sources, options): a CompilationResultSet. Performs timestamp checking and/or recursion if these are specified in the options. """ + if len(sources) > 1 and options.module_name: + raise RuntimeError('Full module name can only be set ' + 'for single source compilation') # run_pipeline creates the context # context = Context.from_options(options) sources = [os.path.abspath(source) for source in sources] @@ -601,8 +604,9 @@ def compile_multiple(sources, options): if (not timestamps) or out_of_date: if verbose: sys.stderr.write("Compiling %s\n" % source) - - result = run_pipeline(source, options, context=context) + result = run_pipeline(source, options, + full_module_name=options.module_name, + context=context) results.add(source, result) # Compiling multiple sources in one context doesn't quite # work properly yet. diff --git a/Cython/Compiler/Options.py b/Cython/Compiler/Options.py index 97f288905..ea0b95c90 100644 --- a/Cython/Compiler/Options.py +++ b/Cython/Compiler/Options.py @@ -757,6 +757,7 @@ default_options = dict( formal_grammar=False, gdb_debug=False, compile_time_env=None, + module_name=None, common_utility_include_dir=None, output_dir=None, build_dir=None, diff --git a/Cython/Compiler/Tests/TestCmdLine.py b/Cython/Compiler/Tests/TestCmdLine.py index 5953112dc..6c74fe3a2 100644 --- a/Cython/Compiler/Tests/TestCmdLine.py +++ b/Cython/Compiler/Tests/TestCmdLine.py @@ -1,5 +1,6 @@ import os import sys +import re from unittest import TestCase try: from StringIO import StringIO @@ -495,22 +496,62 @@ class CmdLineParserTest(TestCase): self.check_default_global_options() self.check_default_options(options, ['compiler_directives']) + def test_module_name(self): + options, sources = parse_command_line([ + 'source.pyx' + ]) + self.assertEqual(options.module_name, None) + self.check_default_global_options() + self.check_default_options(options) + options, sources = parse_command_line([ + '--module-name', 'foo.bar', + 'source.pyx' + ]) + self.assertEqual(options.module_name, 'foo.bar') + self.check_default_global_options() + self.check_default_options(options, ['module_name']) + def test_errors(self): - def error(*args): + def error(args, regex=None): old_stderr = sys.stderr stderr = sys.stderr = StringIO() try: self.assertRaises(SystemExit, parse_command_line, list(args)) finally: sys.stderr = old_stderr - self.assertTrue(stderr.getvalue()) - - error('-1') - error('-I') - error('--version=-a') - error('--version=--annotate=true') - error('--working') - error('--verbose=1') - error('--verbose=1') - error('--cleanup') - error('--debug-disposal-code-wrong-name', 'file3.pyx') + msg = stderr.getvalue() + err_msg = 'Message "{}"'.format(msg.strip()) + self.assertTrue(msg.startswith('usage: '), + '%s does not start with "usage :"' % err_msg) + self.assertTrue(': error: ' in msg, + '%s does not contain ": error :"' % err_msg) + if regex: + self.assertTrue(re.search(regex, msg), + '%s does not match search "%s"' % + (err_msg, regex)) + + error(['-1'], + 'unknown option -1') + error(['-I'], + 'argument -I/--include-dir: expected one argument') + error(['--version=-a'], + "argument -V/--version: ignored explicit argument '-a'") + error(['--version=--annotate=true'], + "argument -V/--version: ignored explicit argument " + "'--annotate=true'") + error(['--working'], + "argument -w/--working: expected one argument") + error(['--verbose=1'], + "argument -v/--verbose: ignored explicit argument '1'") + error(['--cleanup'], + "argument --cleanup: expected one argument") + error(['--debug-disposal-code-wrong-name', 'file3.pyx'], + "unknown option --debug-disposal-code-wrong-name") + error(['--module-name', 'foo.pyx'], + "Need at least one source file") + error(['--module-name', 'foo.bar'], + "Need at least one source file") + error(['--module-name', 'foo.bar', 'foo.pyx', 'bar.pyx'], + "Only one source file allowed when using --module-name") + error(['--module-name', 'foo.bar', '--timestamps', 'foo.pyx'], + "Cannot use --module-name with --timestamps") diff --git a/tests/compile/module_name_arg.srctree b/tests/compile/module_name_arg.srctree new file mode 100644 index 000000000..81e75b008 --- /dev/null +++ b/tests/compile/module_name_arg.srctree @@ -0,0 +1,52 @@ +# Test that we can set module name with --module-name arg to cython +CYTHON a.pyx +CYTHON --module-name w b.pyx +CYTHON --module-name my_module.submod.x c.pyx +PYTHON setup.py build_ext --inplace +PYTHON checks.py + +######## checks.py ######## + +from importlib import import_module + +try: + exc = ModuleNotFoundError +except NameError: + exc = ImportError + +for module_name, should_import in ( + ('a', True), + ('b', False), + ('w', True), + ('my_module.submod.x', True), + ('c', False), + ): + try: + import_module(module_name) + except exc: + if should_import: + assert False, "Cannot import module " + module_name + else: + if not should_import: + assert False, ("Can import module " + module_name + + " but import should not be possible") + + +######## setup.py ######## + +from distutils.core import setup +from distutils.extension import Extension + +setup( + ext_modules = [ + Extension("a", ["a.c"]), + Extension("w", ["b.c"]), + Extension("my_module.submod.x", ["c.c"]), + ], +) + +######## a.pyx ######## +######## b.pyx ######## +######## c.pyx ######## +######## my_module/__init__.py ######## +######## my_module/submod/__init__.py ######## -- cgit v1.2.1 From 15819719f4ae804df92cf58bbb744ccd079e04e4 Mon Sep 17 00:00:00 2001 From: da-woods Date: Wed, 20 Jul 2022 09:11:15 +0100 Subject: Improve test coverage of special methods with type conversions (#4900) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit https://github.com/cython/cython/issues/4163 ``` if old_type.is_pyobject: if arg.default: # 4325 ↛ 4326 code.putln("if (%s) {" % arg.hdr_cname) else: code.putln("assert(%s); {" % arg.hdr_cname) self.generate_arg_conversion_from_pyobject(arg, code) code.putln("}") elif new_type.is_pyobject: # 4331 ↛ 4334 self.generate_arg_conversion_to_pyobject(arg, code) else: if new_type.assignable_from(old_type): code.putln("%s = %s;" % (arg.entry.cname, arg.hdr_cname)) else: error(arg.pos, "Cannot convert 1 argument from '%s' to '%s'" % (old_type, new_type)) ``` It doesn't cover the arg.default case (since I don't think any of these methods accept a default argument, or the failed conversion case. But does cover the pyobject->C, C->pyobject and C->C cases --- tests/run/special_methods_T561.pyx | 41 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) diff --git a/tests/run/special_methods_T561.pyx b/tests/run/special_methods_T561.pyx index 5eb9dddfc..79de744b3 100644 --- a/tests/run/special_methods_T561.pyx +++ b/tests/run/special_methods_T561.pyx @@ -956,3 +956,44 @@ cdef class ReverseMethodsExist: return "radd" def __rsub__(self, other): return "rsub" + + +cdef class ArgumentTypeConversions: + """ + The user can set the signature of special method arguments so that + it doesn't match the C signature. This just tests that a few work + (and fills in a hole in coverage of the Cython source) + + >>> obj = ArgumentTypeConversions() + >>> obj[1] + 1 + >>> obj["not a number!"] + Traceback (most recent call last): + ... + TypeError: an integer is required + >>> obj < obj + In comparison 0 + True + >>> obj == obj + In comparison 2 + False + + Here I'm not sure how reproducible the flags are between Python versions. + Therefore I'm just checking that they end with ".0" + >>> memoryview(obj) # doctest:+ELLIPSIS + Traceback (most recent call last): + ... + RuntimeError: From __getbuffer__ with flags ....0 + """ + # force conversion of object to int + def __getitem__(self, int x): + return x + + # force conversion of comparison (int) to object + def __richcmp__(self, other, object comparison): + print "In comparison", comparison + return not bool(comparison) + + # force conversion of flags (int) to double + def __getbuffer__(self, Py_buffer *buffer, double flags): + raise RuntimeError("From __getbuffer__ with flags {}".format(flags)) -- cgit v1.2.1 From 87329f1f9747a5a7e483cf4827d40457f6354c7c Mon Sep 17 00:00:00 2001 From: da-woods Date: Wed, 20 Jul 2022 09:36:10 +0100 Subject: Remove unhelpful comment in test --- tests/run/special_methods_T561.pyx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/run/special_methods_T561.pyx b/tests/run/special_methods_T561.pyx index 79de744b3..bd68291e7 100644 --- a/tests/run/special_methods_T561.pyx +++ b/tests/run/special_methods_T561.pyx @@ -961,8 +961,8 @@ cdef class ReverseMethodsExist: cdef class ArgumentTypeConversions: """ The user can set the signature of special method arguments so that - it doesn't match the C signature. This just tests that a few work - (and fills in a hole in coverage of the Cython source) + it doesn't match the C signature. This just tests that a few + variations work >>> obj = ArgumentTypeConversions() >>> obj[1] -- cgit v1.2.1 From 3a373e2ccdddc79202d9ed13edc85d4b95616b26 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Wed, 20 Jul 2022 18:19:44 +0200 Subject: Make pickle checksum calculation succeed even if one of the hash algorithms is blocked at runtime. Also, pass "usedforsecurity=False" in Py3.9+ to unblock MD5 also on some security constrained systems (FIPS). Closes https://github.com/cython/cython/issues/4909 --- Cython/Compiler/ParseTreeTransforms.py | 39 +++++++++++++----------- Cython/Compiler/Tests/TestParseTreeTransforms.py | 9 +++++- 2 files changed, 29 insertions(+), 19 deletions(-) diff --git a/Cython/Compiler/ParseTreeTransforms.py b/Cython/Compiler/ParseTreeTransforms.py index 8e0cbc303..5799c4945 100644 --- a/Cython/Compiler/ParseTreeTransforms.py +++ b/Cython/Compiler/ParseTreeTransforms.py @@ -8,6 +8,7 @@ cython.declare(PyrexTypes=object, Naming=object, ExprNodes=object, Nodes=object, import copy import hashlib +import sys from . import PyrexTypes from . import Naming @@ -1700,24 +1701,9 @@ if VALUE is not None: if not e.type.is_pyobject: e.type.create_to_py_utility_code(env) e.type.create_from_py_utility_code(env) - all_members_names = sorted([e.name for e in all_members]) - - # Cython 0.x used MD5 for the checksum, which a few Python installations remove for security reasons. - # SHA-256 should be ok for years to come, but early Cython 3.0 alpha releases used SHA-1, - # which may not be. - checksum_algos = [] - try: - checksum_algos.append(hashlib.md5) - except AttributeError: - pass - checksum_algos.append(hashlib.sha256) - checksum_algos.append(hashlib.sha1) - - member_names_string = ' '.join(all_members_names).encode('utf-8') - checksums = [ - '0x' + mkchecksum(member_names_string).hexdigest()[:7] - for mkchecksum in checksum_algos - ] + all_members_names = [e.name for e in all_members] + checksums = _calculate_pickle_checksums(all_members_names) + unpickle_func_name = '__pyx_unpickle_%s' % node.class_name # TODO(robertwb): Move the state into the third argument @@ -2136,6 +2122,23 @@ if VALUE is not None: return property +def _calculate_pickle_checksums(member_names): + # Cython 0.x used MD5 for the checksum, which a few Python installations remove for security reasons. + # SHA-256 should be ok for years to come, but early Cython 3.0 alpha releases used SHA-1, + # which may not be. + member_names_string = ' '.join(member_names).encode('utf-8') + hash_kwargs = {'usedforsecurity': False} if sys.version_info >= (3, 9) else {} + checksums = [] + for algo_name in ['md5', 'sha256', 'sha1']: + try: + mkchecksum = getattr(hashlib, algo_name) + checksums.append('0x' + mkchecksum(member_names_string, **hash_kwargs).hexdigest()[:7]) + except (AttributeError, ValueError): + # The algorithm (i.e. MD5) might not be there at all, or might be blocked at runtime + continue + return checksums + + class CalculateQualifiedNamesTransform(EnvTransform): """ Calculate and store the '__qualname__' and the global diff --git a/Cython/Compiler/Tests/TestParseTreeTransforms.py b/Cython/Compiler/Tests/TestParseTreeTransforms.py index 234b45db5..8a16f98cc 100644 --- a/Cython/Compiler/Tests/TestParseTreeTransforms.py +++ b/Cython/Compiler/Tests/TestParseTreeTransforms.py @@ -1,7 +1,9 @@ -import os +import os.path +import unittest from Cython.TestUtils import TransformTest from Cython.Compiler.ParseTreeTransforms import * +from Cython.Compiler.ParseTreeTransforms import _calculate_pickle_checksums from Cython.Compiler.Nodes import * from Cython.Compiler import Main, Symtab @@ -276,6 +278,11 @@ class TestDebugTransform(DebuggerTestCase): raise +class TestAnalyseDeclarationsTransform(unittest.TestCase): + def test_calculate_pickle_checksums(self): + checksums = _calculate_pickle_checksums(['member1', 'member2', 'member3']) + assert 2 <= len(checksums) <= 3, checksums # expecting ['0xc0af380' (MD5), '0x0c75bd4', '0xa7a7b94'] + if __name__ == "__main__": import unittest -- cgit v1.2.1 From 3cb3c2fd3a1359c46c81675610f7458ac6dcf223 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Wed, 20 Jul 2022 20:26:50 +0200 Subject: Minor code cleanups after 3a373e2ccdddc79202d9ed13edc85d4b95616b26. --- Cython/Compiler/ParseTreeTransforms.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/Cython/Compiler/ParseTreeTransforms.py b/Cython/Compiler/ParseTreeTransforms.py index 5799c4945..0e86d5b0e 100644 --- a/Cython/Compiler/ParseTreeTransforms.py +++ b/Cython/Compiler/ParseTreeTransforms.py @@ -4,7 +4,8 @@ import cython cython.declare(PyrexTypes=object, Naming=object, ExprNodes=object, Nodes=object, Options=object, UtilNodes=object, LetNode=object, LetRefNode=object, TreeFragment=object, EncodedString=object, - error=object, warning=object, copy=object, _unicode=object) + error=object, warning=object, copy=object, hashlib=object, sys=object, + _unicode=object) import copy import hashlib @@ -2132,10 +2133,11 @@ def _calculate_pickle_checksums(member_names): for algo_name in ['md5', 'sha256', 'sha1']: try: mkchecksum = getattr(hashlib, algo_name) - checksums.append('0x' + mkchecksum(member_names_string, **hash_kwargs).hexdigest()[:7]) + checksum = mkchecksum(member_names_string, **hash_kwargs).hexdigest() except (AttributeError, ValueError): - # The algorithm (i.e. MD5) might not be there at all, or might be blocked at runtime + # The algorithm (i.e. MD5) might not be there at all, or might be blocked at runtime. continue + checksums.append('0x' + checksum[:7]) return checksums -- cgit v1.2.1 From d5835270a42fcedd19805c135fbb8afdd873c362 Mon Sep 17 00:00:00 2001 From: da-woods Date: Mon, 25 Jul 2022 14:23:27 +0100 Subject: Update macos version for github actions (#4917) See https://github.com/cython/cython/issues/4913 --- .github/workflows/ci.yml | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3e972eda1..60d2a862b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -107,43 +107,43 @@ jobs: # MacOS sub-jobs # ============== # (C-only builds are used to create wheels) - - os: macos-10.15 + - os: macos-11 python-version: 2.7 backend: c env: { MACOSX_DEPLOYMENT_TARGET: 10.14 } - - os: macos-10.15 + - os: macos-11 python-version: 2.7 backend: cpp env: { MACOSX_DEPLOYMENT_TARGET: 10.14 } - - os: macos-10.15 + - os: macos-11 python-version: 3.5 backend: c env: { MACOSX_DEPLOYMENT_TARGET: 10.14 } - - os: macos-10.15 + - os: macos-11 python-version: 3.6 backend: c env: { MACOSX_DEPLOYMENT_TARGET: 10.14 } - - os: macos-10.15 + - os: macos-11 python-version: 3.7 backend: c env: { MACOSX_DEPLOYMENT_TARGET: 10.14 } - - os: macos-10.15 + - os: macos-11 python-version: 3.8 backend: c env: { MACOSX_DEPLOYMENT_TARGET: 10.14 } - - os: macos-10.15 + - os: macos-11 python-version: 3.9 backend: c env: { MACOSX_DEPLOYMENT_TARGET: 10.14 } - - os: macos-10.15 + - os: macos-11 python-version: 3.9 backend: cpp env: { MACOSX_DEPLOYMENT_TARGET: 10.14 } - - os: macos-10.15 + - os: macos-11 python-version: "3.10" backend: c env: { MACOSX_DEPLOYMENT_TARGET: 10.14 } - - os: macos-10.15 + - os: macos-11 python-version: "3.10" backend: cpp env: { MACOSX_DEPLOYMENT_TARGET: 10.14 } -- cgit v1.2.1 From ea38521bf59edef9e6d22cbabf44229848091a76 Mon Sep 17 00:00:00 2001 From: Max Bachmann Date: Tue, 26 Jul 2022 10:45:53 +0200 Subject: Add missing unicode C-APIs (GH-4910) --- Cython/Includes/cpython/unicode.pxd | 35 +++++++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/Cython/Includes/cpython/unicode.pxd b/Cython/Includes/cpython/unicode.pxd index ba11f5736..6ec77f7b3 100644 --- a/Cython/Includes/cpython/unicode.pxd +++ b/Cython/Includes/cpython/unicode.pxd @@ -1,4 +1,8 @@ + cdef extern from *: + ctypedef unsigned char Py_UCS1 # uint8_t + ctypedef unsigned short Py_UCS2 # uint16_t + # Return true if the object o is a Unicode object or an instance # of a Unicode subtype. Changed in version 2.2: Allowed subtypes # to be accepted. @@ -23,6 +27,21 @@ cdef extern from *: # New in version 3.3. Py_ssize_t PyUnicode_GET_LENGTH(object o) + Py_UCS1 *PyUnicode_1BYTE_DATA(object o) + Py_UCS2 *PyUnicode_2BYTE_DATA(object o) + Py_UCS4 *PyUnicode_4BYTE_DATA(object o) + + int PyUnicode_WCHAR_KIND # Deprecated since Python 3.10, removed in 3.12. + int PyUnicode_1BYTE_KIND + int PyUnicode_2BYTE_KIND + int PyUnicode_4BYTE_KIND + void PyUnicode_WRITE(int kind, void *data, Py_ssize_t index, Py_UCS4 value) + Py_UCS4 PyUnicode_READ(int kind, void *data, Py_ssize_t index) + Py_UCS4 PyUnicode_READ_CHAR(object o, Py_ssize_t index) + + unsigned int PyUnicode_KIND(object o) + void *PyUnicode_DATA(object o) + # Return the size of the object's internal buffer in bytes. o has # to be a PyUnicodeObject (not checked). Py_ssize_t PyUnicode_GET_DATA_SIZE(object o) @@ -35,6 +54,8 @@ cdef extern from *: # be a PyUnicodeObject (not checked). char* PyUnicode_AS_DATA(object o) + bint PyUnicode_IsIdentifier(object o) + # Return 1 or 0 depending on whether ch is a whitespace character. bint Py_UNICODE_ISSPACE(Py_UCS4 ch) @@ -65,6 +86,8 @@ cdef extern from *: # Return 1 or 0 depending on whether ch is an alphanumeric character. bint Py_UNICODE_ISALNUM(Py_UCS4 ch) + bint Py_UNICODE_ISPRINTABLE(Py_UCS4 ch) + # Return the character ch converted to lower case. # Used to return a Py_UNICODE value before Py3.3. Py_UCS4 Py_UNICODE_TOLOWER(Py_UCS4 ch) @@ -111,6 +134,18 @@ cdef extern from *: # UTF-8 encoded bytes. The size is determined with strlen(). unicode PyUnicode_FromString(const char *u) + unicode PyUnicode_New(Py_ssize_t size, Py_UCS4 maxchar) + unicode PyUnicode_FromKindAndData(int kind, const void *buffer, Py_ssize_t size) + unicode PyUnicode_FromFormat(const char *format, ...) + Py_ssize_t PyUnicode_GetLength(object unicode) except -1 + Py_ssize_t PyUnicode_CopyCharacters(object to, Py_ssize_t to_start, object from_, Py_ssize_t from_start, Py_ssize_t how_many) except -1 + Py_ssize_t PyUnicode_Fill(object unicode, Py_ssize_t start, Py_ssize_t length, Py_UCS4 fill_char) except -1 + int PyUnicode_WriteChar(object unicode, Py_ssize_t index, Py_UCS4 character) except -1 + Py_UCS4 PyUnicode_ReadChar(object unicode, Py_ssize_t index) except -1 + unicode PyUnicode_Substring(object str, Py_ssize_t start, Py_ssize_t end) + Py_UCS4 *PyUnicode_AsUCS4(object u, Py_UCS4 *buffer, Py_ssize_t buflen, int copy_null) except NULL + Py_UCS4 *PyUnicode_AsUCS4Copy(object u) except NULL + # Create a Unicode Object from the given Unicode code point ordinal. # # The ordinal must be in range(0x10000) on narrow Python builds -- cgit v1.2.1 From 3f9fa1a46b90b9763b84ea5ec4319fdb5eba55f0 Mon Sep 17 00:00:00 2001 From: da-woods Date: Tue, 26 Jul 2022 15:02:32 +0100 Subject: Remove unused function "looking_at_call" from parser (GH-4922) --- Cython/Compiler/Parsing.pxd | 1 - Cython/Compiler/Parsing.py | 10 ---------- 2 files changed, 11 deletions(-) diff --git a/Cython/Compiler/Parsing.pxd b/Cython/Compiler/Parsing.pxd index dbed77415..233ef214e 100644 --- a/Cython/Compiler/Parsing.pxd +++ b/Cython/Compiler/Parsing.pxd @@ -159,7 +159,6 @@ cdef bint looking_at_name(PyrexScanner s) except -2 cdef object looking_at_expr(PyrexScanner s)# except -2 cdef bint looking_at_base_type(PyrexScanner s) except -2 cdef bint looking_at_dotted_name(PyrexScanner s) except -2 -cdef bint looking_at_call(PyrexScanner s) except -2 cdef p_sign_and_longness(PyrexScanner s) cdef p_opt_cname(PyrexScanner s) cpdef p_c_declarator(PyrexScanner s, ctx = *, bint empty = *, bint is_type = *, bint cmethod_flag = *, diff --git a/Cython/Compiler/Parsing.py b/Cython/Compiler/Parsing.py index 60220282b..d7c844849 100644 --- a/Cython/Compiler/Parsing.py +++ b/Cython/Compiler/Parsing.py @@ -2853,16 +2853,6 @@ def looking_at_dotted_name(s): else: return 0 -def looking_at_call(s): - "See if we're looking at a.b.c(" - # Don't mess up the original position, so save and restore it. - # Unfortunately there's no good way to handle this, as a subsequent call - # to next() will not advance the position until it reads a new token. - position = s.start_line, s.start_col - result = looking_at_expr(s) == u'(' - if not result: - s.start_line, s.start_col = position - return result basic_c_type_names = cython.declare(frozenset, frozenset(( "void", "char", "int", "float", "double", "bint"))) -- cgit v1.2.1 From f1748bb86bf7edb731728ac366e129c1be9bcb61 Mon Sep 17 00:00:00 2001 From: Sam Gross Date: Tue, 26 Jul 2022 21:39:52 +0000 Subject: [0.29] Use atomic reference counting in MemoryView in more cases (GH-4912) (GH-4915) This fixes a few issues in MemoryView_C.c to allow atomic reference counting to be used in more cases. - Enable GNU atomics for `__GNUC__` >= 5. Previously, GCC 5.0, 6.0, X.0 versions used lock-based reference counting due to an incorrect preprocessor check. - Typo in `__GNUC_PATCHLEVEL__` macro (missing underscores) - Enable atomics in MSVC and fix returned values. InterlockedExchangeAdd returns the *initial* value (like __sync_fetch_and_add). InterlockedIncrement returned the *resulting* value (post increment), which would have been incorrect if MSVC atomics had been enabled. Also avoids allocating a lock in MemoryView when atomics are available, which additionally fixes a thread-safety issue in the "nogil" CPython fork. * Use _InterlockedExchangeAdd intrinsic The InterlockedExchangeSubtract function isn't available in older versions of MSVC, while InterlockedExchangeAdd is available since Windows XP. The intrinsic variant (with the underscore prefix) avoids needing to include the entire Windows.h header. * Only use MSVC atomics when compiling for the "nogil" CPython fork to prevent potential breakage of existing Windows setups. --- Cython/Utility/MemoryView.pyx | 16 ++++++++------- Cython/Utility/MemoryView_C.c | 35 +++++++++++++-------------------- tests/compile/fused_redeclare_T3111.pyx | 8 ++++---- 3 files changed, 27 insertions(+), 32 deletions(-) diff --git a/Cython/Utility/MemoryView.pyx b/Cython/Utility/MemoryView.pyx index 6ca5fab9b..8fa3c926a 100644 --- a/Cython/Utility/MemoryView.pyx +++ b/Cython/Utility/MemoryView.pyx @@ -23,6 +23,7 @@ cdef extern from "": void *memset(void *b, int c, size_t len) cdef extern from *: + bint CYTHON_ATOMICS int __Pyx_GetBuffer(object, Py_buffer *, int) except -1 void __Pyx_ReleaseBuffer(Py_buffer *) @@ -351,14 +352,15 @@ cdef class memoryview(object): (<__pyx_buffer *> &self.view).obj = Py_None Py_INCREF(Py_None) - global __pyx_memoryview_thread_locks_used - if __pyx_memoryview_thread_locks_used < THREAD_LOCKS_PREALLOCATED: - self.lock = __pyx_memoryview_thread_locks[__pyx_memoryview_thread_locks_used] - __pyx_memoryview_thread_locks_used += 1 - if self.lock is NULL: - self.lock = PyThread_allocate_lock() + if not CYTHON_ATOMICS: + global __pyx_memoryview_thread_locks_used + if __pyx_memoryview_thread_locks_used < THREAD_LOCKS_PREALLOCATED: + self.lock = __pyx_memoryview_thread_locks[__pyx_memoryview_thread_locks_used] + __pyx_memoryview_thread_locks_used += 1 if self.lock is NULL: - raise MemoryError + self.lock = PyThread_allocate_lock() + if self.lock is NULL: + raise MemoryError if flags & PyBUF_FORMAT: self.dtype_is_object = (self.view.format[0] == b'O' and self.view.format[1] == b'\0') diff --git a/Cython/Utility/MemoryView_C.c b/Cython/Utility/MemoryView_C.c index 8146c458d..34664f062 100644 --- a/Cython/Utility/MemoryView_C.c +++ b/Cython/Utility/MemoryView_C.c @@ -26,36 +26,29 @@ typedef struct { #endif #define __pyx_atomic_int_type int -// todo: Portland pgcc, maybe OS X's OSAtomicIncrement32, -// libatomic + autotools-like distutils support? Such a pain... -#if CYTHON_ATOMICS && __GNUC__ >= 4 && (__GNUC_MINOR__ > 1 || \ - (__GNUC_MINOR__ == 1 && __GNUC_PATCHLEVEL >= 2)) && \ - !defined(__i386__) + +#if CYTHON_ATOMICS && (__GNUC__ >= 5 || (__GNUC__ == 4 && \ + (__GNUC_MINOR__ > 1 || \ + (__GNUC_MINOR__ == 1 && __GNUC_PATCHLEVEL__ >= 2)))) /* gcc >= 4.1.2 */ - #define __pyx_atomic_incr_aligned(value, lock) __sync_fetch_and_add(value, 1) - #define __pyx_atomic_decr_aligned(value, lock) __sync_fetch_and_sub(value, 1) + #define __pyx_atomic_incr_aligned(value) __sync_fetch_and_add(value, 1) + #define __pyx_atomic_decr_aligned(value) __sync_fetch_and_sub(value, 1) #ifdef __PYX_DEBUG_ATOMICS #warning "Using GNU atomics" #endif -#elif CYTHON_ATOMICS && defined(_MSC_VER) && 0 +#elif CYTHON_ATOMICS && defined(_MSC_VER) && CYTHON_COMPILING_IN_NOGIL /* msvc */ - #include + #include #undef __pyx_atomic_int_type - #define __pyx_atomic_int_type LONG - #define __pyx_atomic_incr_aligned(value, lock) InterlockedIncrement(value) - #define __pyx_atomic_decr_aligned(value, lock) InterlockedDecrement(value) + #define __pyx_atomic_int_type long + #pragma intrinsic (_InterlockedExchangeAdd) + #define __pyx_atomic_incr_aligned(value) _InterlockedExchangeAdd(value, 1) + #define __pyx_atomic_decr_aligned(value) _InterlockedExchangeAdd(value, -1) #ifdef __PYX_DEBUG_ATOMICS #pragma message ("Using MSVC atomics") #endif -#elif CYTHON_ATOMICS && (defined(__ICC) || defined(__INTEL_COMPILER)) && 0 - #define __pyx_atomic_incr_aligned(value, lock) _InterlockedIncrement(value) - #define __pyx_atomic_decr_aligned(value, lock) _InterlockedDecrement(value) - - #ifdef __PYX_DEBUG_ATOMICS - #warning "Using Intel atomics" - #endif #else #undef CYTHON_ATOMICS #define CYTHON_ATOMICS 0 @@ -69,9 +62,9 @@ typedef volatile __pyx_atomic_int_type __pyx_atomic_int; #if CYTHON_ATOMICS #define __pyx_add_acquisition_count(memview) \ - __pyx_atomic_incr_aligned(__pyx_get_slice_count_pointer(memview), memview->lock) + __pyx_atomic_incr_aligned(__pyx_get_slice_count_pointer(memview)) #define __pyx_sub_acquisition_count(memview) \ - __pyx_atomic_decr_aligned(__pyx_get_slice_count_pointer(memview), memview->lock) + __pyx_atomic_decr_aligned(__pyx_get_slice_count_pointer(memview)) #else #define __pyx_add_acquisition_count(memview) \ __pyx_add_acquisition_count_locked(__pyx_get_slice_count_pointer(memview), memview->lock) diff --git a/tests/compile/fused_redeclare_T3111.pyx b/tests/compile/fused_redeclare_T3111.pyx index baf932bd4..c7064e7c0 100644 --- a/tests/compile/fused_redeclare_T3111.pyx +++ b/tests/compile/fused_redeclare_T3111.pyx @@ -24,8 +24,8 @@ def foo(dtype_t[:] a, dtype_t_out[:, :] b): _WARNINGS = """ 20:10: 'cpdef_method' redeclared 31:10: 'cpdef_cname_method' redeclared -446:72: Argument evaluation order in C function call is undefined and may not be as expected -446:72: Argument evaluation order in C function call is undefined and may not be as expected -749:34: Argument evaluation order in C function call is undefined and may not be as expected -749:34: Argument evaluation order in C function call is undefined and may not be as expected +448:72: Argument evaluation order in C function call is undefined and may not be as expected +448:72: Argument evaluation order in C function call is undefined and may not be as expected +751:34: Argument evaluation order in C function call is undefined and may not be as expected +751:34: Argument evaluation order in C function call is undefined and may not be as expected """ -- cgit v1.2.1 From 3de4be40a1642ffeec18d783cde9d285622b24ce Mon Sep 17 00:00:00 2001 From: Sam Gross Date: Tue, 26 Jul 2022 21:43:15 +0000 Subject: [0.29] Add configuration for the "nogil" CPython fork (GH-4912) (GH-4914) --- Cython/Utility/ModuleSetupCode.c | 50 ++++++++++++++++++++++++++++++++++++++++ tests/compile/buildenv.pyx | 2 ++ 2 files changed, 52 insertions(+) diff --git a/Cython/Utility/ModuleSetupCode.c b/Cython/Utility/ModuleSetupCode.c index f7af78bfa..f8bf885bc 100644 --- a/Cython/Utility/ModuleSetupCode.c +++ b/Cython/Utility/ModuleSetupCode.c @@ -46,6 +46,7 @@ #define CYTHON_COMPILING_IN_PYPY 1 #define CYTHON_COMPILING_IN_PYSTON 0 #define CYTHON_COMPILING_IN_CPYTHON 0 + #define CYTHON_COMPILING_IN_NOGIL 0 #undef CYTHON_USE_TYPE_SLOTS #define CYTHON_USE_TYPE_SLOTS 0 @@ -91,6 +92,7 @@ #define CYTHON_COMPILING_IN_PYPY 0 #define CYTHON_COMPILING_IN_PYSTON 1 #define CYTHON_COMPILING_IN_CPYTHON 0 + #define CYTHON_COMPILING_IN_NOGIL 0 #ifndef CYTHON_USE_TYPE_SLOTS #define CYTHON_USE_TYPE_SLOTS 1 @@ -133,10 +135,58 @@ #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 #endif +#elif defined(PY_NOGIL) + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_PYSTON 0 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #define CYTHON_COMPILING_IN_NOGIL 1 + + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #ifndef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #ifndef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 1 + #endif + #ifndef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 1 + #endif + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 + #else #define CYTHON_COMPILING_IN_PYPY 0 #define CYTHON_COMPILING_IN_PYSTON 0 #define CYTHON_COMPILING_IN_CPYTHON 1 + #define CYTHON_COMPILING_IN_NOGIL 0 #ifndef CYTHON_USE_TYPE_SLOTS #define CYTHON_USE_TYPE_SLOTS 1 diff --git a/tests/compile/buildenv.pyx b/tests/compile/buildenv.pyx index ec2445c59..f4c48ceca 100644 --- a/tests/compile/buildenv.pyx +++ b/tests/compile/buildenv.pyx @@ -36,6 +36,7 @@ cdef extern from *: cdef int CYTHON_COMPILING_IN_CPYTHON cdef int CYTHON_COMPILING_IN_PYPY cdef int CYTHON_COMPILING_IN_PYSTON + cdef int CYTHON_COMPILING_IN_NOGIL cdef int CYTHON_USE_PYLONG_INTERNALS cdef int CYTHON_USE_PYLIST_INTERNALS cdef int CYTHON_USE_UNICODE_INTERNALS @@ -78,6 +79,7 @@ PY_VERSION_HEX 0x{PY_VERSION_HEX:X} CYTHON_COMPILING_IN_CPYTHON {CYTHON_COMPILING_IN_CPYTHON} CYTHON_COMPILING_IN_PYPY {CYTHON_COMPILING_IN_PYPY} CYTHON_COMPILING_IN_PYSTON {CYTHON_COMPILING_IN_PYSTON} +CYTHON_COMPILING_IN_NOGIL {CYTHON_COMPILING_IN_NOGIL} CYTHON_USE_PYLONG_INTERNALS {CYTHON_USE_PYLONG_INTERNALS} CYTHON_USE_PYLIST_INTERNALS {CYTHON_USE_PYLIST_INTERNALS} -- cgit v1.2.1 From 4b1f45dd5a6a3a95f3b63b50a6a06e0c14334ee3 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Wed, 27 Jul 2022 11:08:36 +0200 Subject: Update changelog. --- CHANGES.rst | 65 +++++++++++++++++++++++++++++++++++++------------------------ 1 file changed, 40 insertions(+), 25 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index acc5e3020..a8e02bd8b 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -8,33 +8,48 @@ Cython Changelog Bugs fixed ---------- -* Use ``importlib.util.find_spec()`` instead of the deprecated ``importlib.find_loader()`` - function when setting up the package path at import-time. Patch by Matti Picus. - (Github issue #4764) - -* Require the C compiler to support the two-arg form of ``va_start`` on Python 3.10 - and higher. Patch by Thomas Caswell. - (Github issue #4820) - -* Make ``fused_type`` subscriptable in Shadow.py. Patch by Pfebrer. - (Github issue #4842) - -* Fix the incorrect code generation of the target type in ``bytearray`` loops. - Patch by Kenrick Everett. - (Github issue #4108) - +* Use ``importlib.util.find_spec()`` instead of the deprecated ``importlib.find_loader()`` + function when setting up the package path at import-time. + Patch by Matti Picus. (Github issue #4764) + +* Require the C compiler to support the two-arg form of ``va_start`` + on Python 3.10 and higher. + Patch by Thomas Caswell. (Github issue #4820) + +* Make ``fused_type`` subscriptable in Shadow.py. + Patch by Pfebrer. (Github issue #4842) + +* Fix the incorrect code generation of the target type in ``bytearray`` loops. + Patch by Kenrick Everett. (Github issue #4108) + +* Atomic refcounts for memoryviews were not used on some GCC versions by accident. + Patch by Sam Gross. (Github issue #4915) + * Silence some GCC ``-Wconversion`` warnings in C utility code. - Patch by Lisandro Dalcin. - (Github issue #4854) - -* Stop tuple multiplication being ignored in expressions such as ``[*(1,) * 2]``. - Patch by David Woods. - (Github issue #4864) - -* Ensure that object buffers (e.g. ``ndarray[object, ndim=1]``) containing + Patch by Lisandro Dalcin. (Github issue #4854) + +* Tuple multiplication was ignored in expressions such as ``[*(1,) * 2]``. + Patch by David Woods. (Github issue #4864) + +* Calling ``append`` methods on extension types could fail to find the method + in some cases. + Patch by David Woods. (Github issue #4828) + +* Ensure that object buffers (e.g. ``ndarray[object, ndim=1]``) containing ``NULL`` pointers are safe to use, returning ``None`` instead of the ``NULL`` - pointer. Patch by Sebastian Berg. - (Github issue #4859) + pointer. + Patch by Sebastian Berg. (Github issue #4859) + +* Using memoryview typed arguments in inner functions is now rejected as unsupported. + Patch by David Woods. (Github issue #4798) + +* Compilation could fail on systems (e.g. FIPS) that block MD5 checksums at runtime. + (Github issue #4909) + +* Experimental adaptations for the CPython "nogil" fork was added. + Note that there is no official support for this in Cython 0.x. + Patch by Sam Gross. (Github issue #4912) + 0.29.30 (2022-05-16) ==================== -- cgit v1.2.1 From 72848313d8b8f06c14ae171e66d6f09f5312c325 Mon Sep 17 00:00:00 2001 From: da-woods Date: Wed, 27 Jul 2022 10:19:35 +0100 Subject: Backport "noexcept" function modifier to Cython 0.29.x (GH-4903) As a no-op, but it parses fine. Also add some basic compile tests, and some brief documentation. --- Cython/Compiler/Parsing.py | 8 +++++++- docs/src/userguide/language_basics.rst | 11 +++++++++++ tests/compile/excvaldecl.pyx | 8 ++++++++ 3 files changed, 26 insertions(+), 1 deletion(-) diff --git a/Cython/Compiler/Parsing.py b/Cython/Compiler/Parsing.py index 82bf82d3d..2aafcb99a 100644 --- a/Cython/Compiler/Parsing.py +++ b/Cython/Compiler/Parsing.py @@ -2820,6 +2820,8 @@ def p_c_func_declarator(s, pos, ctx, base, cmethod_flag): s.expect(')') nogil = p_nogil(s) exc_val, exc_check = p_exception_value_clause(s) + # TODO - warning to enforce preferred exception specification order + nogil = nogil or p_nogil(s) with_gil = p_with_gil(s) return Nodes.CFuncDeclaratorNode(pos, base = base, args = args, has_varargs = ellipsis, @@ -2938,7 +2940,11 @@ def p_with_gil(s): def p_exception_value_clause(s): exc_val = None exc_check = 0 - if s.sy == 'except': + + if s.sy == 'IDENT' and s.systring == 'noexcept': + s.next() + exc_check = False # No-op in Cython 0.29.x + elif s.sy == 'except': s.next() if s.sy == '*': exc_check = 1 diff --git a/docs/src/userguide/language_basics.rst b/docs/src/userguide/language_basics.rst index 0fdd87783..c3b9f36e4 100644 --- a/docs/src/userguide/language_basics.rst +++ b/docs/src/userguide/language_basics.rst @@ -414,6 +414,17 @@ use this form, since there isn't any error return value to test. Otherwise, an explicit error return value allows the C compiler to generate more efficient code and is thus generally preferable. +To explicitly mark a function as not returning an exception use +``noexcept``. + + cdef int spam() noexcept: + ... + +This is worth doing because (a) "explicit is better than implicit", and +(b) the default behaviour for ``cdef`` functions will change in Cython 3.0 +so that functions will propagate exceptions by default. Therefore, it is +best to mark them now if you want them to swallow exceptions in the future. + An external C++ function that may raise an exception can be declared with:: cdef int spam() except + diff --git a/tests/compile/excvaldecl.pyx b/tests/compile/excvaldecl.pyx index 63f3c65dc..06af71ce0 100644 --- a/tests/compile/excvaldecl.pyx +++ b/tests/compile/excvaldecl.pyx @@ -18,9 +18,17 @@ cdef int brian() except? 0: cdef int silly() except -1: pass +cdef int not_so_silly() noexcept: + pass + +cdef int not_so_silly_and_gilless() noexcept nogil: + pass + spam() eggs() grail() tomato() brian() silly() +not_so_silly() +not_so_silly_and_gilless() -- cgit v1.2.1 From 34ce43c8f3dafda734d59345731a61b8200c3b94 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Wed, 27 Jul 2022 11:08:36 +0200 Subject: Update changelog. --- CHANGES.rst | 65 +++++++++++++++++++++++++++++++++++++------------------------ 1 file changed, 40 insertions(+), 25 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index acc5e3020..a8e02bd8b 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -8,33 +8,48 @@ Cython Changelog Bugs fixed ---------- -* Use ``importlib.util.find_spec()`` instead of the deprecated ``importlib.find_loader()`` - function when setting up the package path at import-time. Patch by Matti Picus. - (Github issue #4764) - -* Require the C compiler to support the two-arg form of ``va_start`` on Python 3.10 - and higher. Patch by Thomas Caswell. - (Github issue #4820) - -* Make ``fused_type`` subscriptable in Shadow.py. Patch by Pfebrer. - (Github issue #4842) - -* Fix the incorrect code generation of the target type in ``bytearray`` loops. - Patch by Kenrick Everett. - (Github issue #4108) - +* Use ``importlib.util.find_spec()`` instead of the deprecated ``importlib.find_loader()`` + function when setting up the package path at import-time. + Patch by Matti Picus. (Github issue #4764) + +* Require the C compiler to support the two-arg form of ``va_start`` + on Python 3.10 and higher. + Patch by Thomas Caswell. (Github issue #4820) + +* Make ``fused_type`` subscriptable in Shadow.py. + Patch by Pfebrer. (Github issue #4842) + +* Fix the incorrect code generation of the target type in ``bytearray`` loops. + Patch by Kenrick Everett. (Github issue #4108) + +* Atomic refcounts for memoryviews were not used on some GCC versions by accident. + Patch by Sam Gross. (Github issue #4915) + * Silence some GCC ``-Wconversion`` warnings in C utility code. - Patch by Lisandro Dalcin. - (Github issue #4854) - -* Stop tuple multiplication being ignored in expressions such as ``[*(1,) * 2]``. - Patch by David Woods. - (Github issue #4864) - -* Ensure that object buffers (e.g. ``ndarray[object, ndim=1]``) containing + Patch by Lisandro Dalcin. (Github issue #4854) + +* Tuple multiplication was ignored in expressions such as ``[*(1,) * 2]``. + Patch by David Woods. (Github issue #4864) + +* Calling ``append`` methods on extension types could fail to find the method + in some cases. + Patch by David Woods. (Github issue #4828) + +* Ensure that object buffers (e.g. ``ndarray[object, ndim=1]``) containing ``NULL`` pointers are safe to use, returning ``None`` instead of the ``NULL`` - pointer. Patch by Sebastian Berg. - (Github issue #4859) + pointer. + Patch by Sebastian Berg. (Github issue #4859) + +* Using memoryview typed arguments in inner functions is now rejected as unsupported. + Patch by David Woods. (Github issue #4798) + +* Compilation could fail on systems (e.g. FIPS) that block MD5 checksums at runtime. + (Github issue #4909) + +* Experimental adaptations for the CPython "nogil" fork was added. + Note that there is no official support for this in Cython 0.x. + Patch by Sam Gross. (Github issue #4912) + 0.29.30 (2022-05-16) ==================== -- cgit v1.2.1 From 6cede008009c6a5bf5c1414591ca79ec62f56256 Mon Sep 17 00:00:00 2001 From: h-vetinari Date: Wed, 27 Jul 2022 16:59:02 +0200 Subject: [0.29] Add --module-name argument to cython command (GH-4906) Backport of https://github.com/cython/cython/pull/4548 It can be useful to specify the module name for the output file directly, rather than working it out from the enclosing file tree - particularly for out of tree build systems, like Meson. See background in https://github.com/rgommers/scipy/issues/31#issuecomment-1002662816 --- Cython/Compiler/CmdLine.py | 17 +++++++- Cython/Compiler/Main.py | 9 ++++- Cython/Compiler/Tests/TestCmdLine.py | 74 +++++++++++++++++++++++++++++------ tests/compile/module_name_arg.srctree | 52 ++++++++++++++++++++++++ 4 files changed, 138 insertions(+), 14 deletions(-) create mode 100644 tests/compile/module_name_arg.srctree diff --git a/Cython/Compiler/CmdLine.py b/Cython/Compiler/CmdLine.py index e89e45ab4..9e2f8beb0 100644 --- a/Cython/Compiler/CmdLine.py +++ b/Cython/Compiler/CmdLine.py @@ -50,6 +50,9 @@ Options: --warning-extra, -Wextra Enable extra warnings -X, --directive =[, 1: sys.stderr.write( - "cython: Only one source file allowed when using -embed\n") + "cython: Only one source file allowed when using --embed\n") sys.exit(1) + if options.module_name: + if options.timestamps: + sys.stderr.write( + "cython: Cannot use --module-name with --timestamps\n") + sys.exit(1) + if len(sources) > 1: + sys.stderr.write( + "cython: Only one source file allowed when using --module-name\n") + sys.exit(1) return options, sources diff --git a/Cython/Compiler/Main.py b/Cython/Compiler/Main.py index dc4add541..128441da6 100644 --- a/Cython/Compiler/Main.py +++ b/Cython/Compiler/Main.py @@ -735,6 +735,9 @@ def compile_multiple(sources, options): a CompilationResultSet. Performs timestamp checking and/or recursion if these are specified in the options. """ + if options.module_name and len(sources) > 1: + raise RuntimeError('Full module name can only be set ' + 'for single source compilation') # run_pipeline creates the context # context = options.create_context() sources = [os.path.abspath(source) for source in sources] @@ -753,8 +756,9 @@ def compile_multiple(sources, options): if (not timestamps) or out_of_date: if verbose: sys.stderr.write("Compiling %s\n" % source) - - result = run_pipeline(source, options, context=context) + result = run_pipeline(source, options, + full_module_name=options.module_name, + context=context) results.add(source, result) # Compiling multiple sources in one context doesn't quite # work properly yet. @@ -900,5 +904,6 @@ default_options = dict( build_dir=None, cache=None, create_extension=None, + module_name=None, np_pythran=False ) diff --git a/Cython/Compiler/Tests/TestCmdLine.py b/Cython/Compiler/Tests/TestCmdLine.py index abc7c0a89..bd31da000 100644 --- a/Cython/Compiler/Tests/TestCmdLine.py +++ b/Cython/Compiler/Tests/TestCmdLine.py @@ -1,5 +1,6 @@ import sys +import re from unittest import TestCase try: from StringIO import StringIO @@ -10,6 +11,18 @@ from .. import Options from ..CmdLine import parse_command_line +def check_global_options(expected_options, white_list=[]): + """ + returns error message of "" if check Ok + """ + no_value = object() + for name, orig_value in expected_options.items(): + if name not in white_list: + if getattr(Options, name, no_value) != orig_value: + return "error in option " + name + return "" + + class CmdLineParserTest(TestCase): def setUp(self): backup = {} @@ -23,6 +36,17 @@ class CmdLineParserTest(TestCase): if getattr(Options, name, no_value) != orig_value: setattr(Options, name, orig_value) + def check_default_global_options(self, white_list=[]): + self.assertEqual(check_global_options(self._options_backup, white_list), "") + + def check_default_options(self, options, white_list=[]): + from ..Main import CompilationOptions, default_options + default_options = CompilationOptions(default_options) + no_value = object() + for name in default_options.__dict__.keys(): + if name not in white_list: + self.assertEqual(getattr(options, name, no_value), getattr(default_options, name), msg="error in option " + name) + def test_short_options(self): options, sources = parse_command_line([ '-V', '-l', '-+', '-t', '-v', '-v', '-v', '-p', '-D', '-a', '-3', @@ -98,21 +122,49 @@ class CmdLineParserTest(TestCase): self.assertTrue(options.gdb_debug) self.assertEqual(options.output_dir, '/gdb/outdir') + def test_module_name(self): + options, sources = parse_command_line([ + 'source.pyx' + ]) + self.assertEqual(options.module_name, None) + self.check_default_global_options() + self.check_default_options(options) + options, sources = parse_command_line([ + '--module-name', 'foo.bar', + 'source.pyx' + ]) + self.assertEqual(options.module_name, 'foo.bar') + self.check_default_global_options() + self.check_default_options(options, ['module_name']) + def test_errors(self): - def error(*args): + def error(args, regex=None): old_stderr = sys.stderr stderr = sys.stderr = StringIO() try: self.assertRaises(SystemExit, parse_command_line, list(args)) finally: sys.stderr = old_stderr - self.assertTrue(stderr.getvalue()) - - error('-1') - error('-I') - error('--version=-a') - error('--version=--annotate=true') - error('--working') - error('--verbose=1') - error('--verbose=1') - error('--cleanup') + msg = stderr.getvalue().strip() + self.assertTrue(msg) + if regex: + self.assertTrue(re.search(regex, msg), + '"%s" does not match search "%s"' % + (msg, regex)) + + error(['-1'], + 'Unknown compiler flag: -1') + error(['-I']) + error(['--version=-a']) + error(['--version=--annotate=true']) + error(['--working']) + error(['--verbose=1']) + error(['--cleanup']) + error(['--debug-disposal-code-wrong-name', 'file3.pyx'], + "Unknown debug flag: debug_disposal_code_wrong_name") + error(['--module-name', 'foo.pyx']) + error(['--module-name', 'foo.bar']) + error(['--module-name', 'foo.bar', 'foo.pyx', 'bar.pyx'], + "Only one source file allowed when using --module-name") + error(['--module-name', 'foo.bar', '--timestamps', 'foo.pyx'], + "Cannot use --module-name with --timestamps") diff --git a/tests/compile/module_name_arg.srctree b/tests/compile/module_name_arg.srctree new file mode 100644 index 000000000..81e75b008 --- /dev/null +++ b/tests/compile/module_name_arg.srctree @@ -0,0 +1,52 @@ +# Test that we can set module name with --module-name arg to cython +CYTHON a.pyx +CYTHON --module-name w b.pyx +CYTHON --module-name my_module.submod.x c.pyx +PYTHON setup.py build_ext --inplace +PYTHON checks.py + +######## checks.py ######## + +from importlib import import_module + +try: + exc = ModuleNotFoundError +except NameError: + exc = ImportError + +for module_name, should_import in ( + ('a', True), + ('b', False), + ('w', True), + ('my_module.submod.x', True), + ('c', False), + ): + try: + import_module(module_name) + except exc: + if should_import: + assert False, "Cannot import module " + module_name + else: + if not should_import: + assert False, ("Can import module " + module_name + + " but import should not be possible") + + +######## setup.py ######## + +from distutils.core import setup +from distutils.extension import Extension + +setup( + ext_modules = [ + Extension("a", ["a.c"]), + Extension("w", ["b.c"]), + Extension("my_module.submod.x", ["c.c"]), + ], +) + +######## a.pyx ######## +######## b.pyx ######## +######## c.pyx ######## +######## my_module/__init__.py ######## +######## my_module/submod/__init__.py ######## -- cgit v1.2.1 From 7d59ffaebe08914681067458d8e5c5288e13fe4d Mon Sep 17 00:00:00 2001 From: da-woods Date: Wed, 27 Jul 2022 16:05:44 +0100 Subject: [docs] Fix typo in embedded.pyx --- docs/examples/tutorial/embedding/embedded.pyx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/examples/tutorial/embedding/embedded.pyx b/docs/examples/tutorial/embedding/embedded.pyx index 26704d45f..719702c75 100644 --- a/docs/examples/tutorial/embedding/embedded.pyx +++ b/docs/examples/tutorial/embedding/embedded.pyx @@ -1,6 +1,6 @@ # embedded.pyx -# The following two lines are for test purposed only, please ignore them. +# The following two lines are for test purposes only, please ignore them. # distutils: sources = embedded_main.c # tag: py3only -- cgit v1.2.1 From b6cb192f29da5af63f252c4f831a0b71f73ac1da Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Wed, 27 Jul 2022 17:07:25 +0200 Subject: Prepare release of 0.29.31. --- CHANGES.rst | 9 ++++++++- Cython/Shadow.py | 2 +- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index a8e02bd8b..9016c1b45 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -2,9 +2,16 @@ Cython Changelog ================ -0.29.31 (2022-??-??) +0.29.31 (2022-07-22) ==================== +Features added +-------------- + +* A new argument ``--module-name`` was added to the ``cython`` command to + provide the (one) exact target module name from the command line. + Patch by Matthew Brett and h-vetinari. (Github issue #4906) + Bugs fixed ---------- diff --git a/Cython/Shadow.py b/Cython/Shadow.py index f92f9ebf1..60d9c37fd 100644 --- a/Cython/Shadow.py +++ b/Cython/Shadow.py @@ -1,7 +1,7 @@ # cython.* namespace for pure mode. from __future__ import absolute_import -__version__ = "0.29.30" +__version__ = "0.29.31" try: from __builtin__ import basestring -- cgit v1.2.1 From 29c9e92b3333561b217dc4b96c5a3281c82ae686 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Wed, 27 Jul 2022 17:07:25 +0200 Subject: Prepare release of 0.29.31. --- CHANGES.rst | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index 477d727a5..ce5525410 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1019,9 +1019,16 @@ Other changes .. _0.29.31: -0.29.31 (2022-??-??) +0.29.31 (2022-07-22) ==================== +Features added +-------------- + +* A new argument ``--module-name`` was added to the ``cython`` command to + provide the (one) exact target module name from the command line. + Patch by Matthew Brett and h-vetinari. (Github issue #4906) + Bugs fixed ---------- -- cgit v1.2.1 From 1f71962268c7fa4d0361b38b172e7376fa904a61 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Wed, 27 Jul 2022 17:30:16 +0200 Subject: Fix release date. --- CHANGES.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index 9016c1b45..b0eaae423 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -2,7 +2,7 @@ Cython Changelog ================ -0.29.31 (2022-07-22) +0.29.31 (2022-07-27) ==================== Features added -- cgit v1.2.1 From 64701f6e89ab04d2253c3dea38d8ab1cc008e35d Mon Sep 17 00:00:00 2001 From: 0dminnimda <0dminnimda@gmail.com> Date: Thu, 28 Jul 2022 08:58:59 +0300 Subject: Add GitHub Actions build jobs for Windows (GH-4324) There are still test failures, but at least it allows us to see the test results in GHA. --- .github/workflows/ci.yml | 93 +++++++++++++++++++++++++++-------------------- .gitignore | 1 + Tools/ci-run.sh | 13 ++++++- runtests.py | 3 +- test-requirements-27.txt | 2 + test-requirements-34.txt | 2 +- tests/windows_bugs_39.txt | 3 ++ 7 files changed, 74 insertions(+), 43 deletions(-) create mode 100644 tests/windows_bugs_39.txt diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a8db37d71..4c2193c5d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -23,7 +23,7 @@ jobs: # # FIXME: 'cpp' tests seems to fail due to compilation errors (numpy_pythran_unit) # in all python versions and test failures (builtin_float) in 3.5< - os: [ubuntu-18.04] + os: [windows-2019, ubuntu-18.04] backend: [c, cpp] python-version: - "2.7" @@ -145,12 +145,15 @@ jobs: python-version: pypy-3.7 backend: c env: { NO_CYTHON_COMPILE: 1 } - # Coverage - Disabled due to taking too long to run - # - os: ubuntu-18.04 - # python-version: 3.7 - # backend: "c,cpp" - # env: { COVERAGE: 1 } - # extra_hash: '-coverage' + # Coverage + - os: ubuntu-18.04 + python-version: 3.8 + backend: "c,cpp" + env: { COVERAGE: 1 } + extra_hash: '-coverage' + + - os: windows-2019 + allowed_failure: true # MacOS sub-jobs # ============== @@ -196,15 +199,27 @@ jobs: backend: cpp env: { MACOSX_DEPLOYMENT_TARGET: 10.14 } + exclude: + # fails due to lack of a compatible compiler + - os: windows-2019 + python-version: 2.7 + - os: windows-2019 + python-version: 3.4 + + # cpp specific test fails + - os: windows-2019 + python-version: 3.5 + backend: cpp + + # This defaults to 360 minutes (6h) which is way too long and if a test gets stuck, it can block other pipelines. - # From testing, the runs tend to take ~20/~30 minutes, so a limit of 40 minutes should be enough. This can always be - # changed in the future if needed. - timeout-minutes: 40 + # From testing, the runs tend to take ~20 minutes for ubuntu / macos and ~40 for windows, + # so a limit of 50 minutes should be enough. This can always be changed in the future if needed. + timeout-minutes: 50 runs-on: ${{ matrix.os }} env: BACKEND: ${{ matrix.backend }} - OS_NAME: ${{ matrix.os }} PYTHON_VERSION: ${{ matrix.python-version }} GCC_VERSION: 8 USE_CCACHE: 1 @@ -277,31 +292,31 @@ jobs: name: pycoverage_html path: coverage-report-html -# cycoverage: -# runs-on: ubuntu-18.04 -# -# env: -# BACKEND: c,cpp -# OS_NAME: ubuntu-18.04 -# PYTHON_VERSION: 3.9 -# -# steps: -# - name: Checkout repo -# uses: actions/checkout@v2 -# with: -# fetch-depth: 1 -# -# - name: Setup python -# uses: actions/setup-python@v2 -# with: -# python-version: 3.9 -# -# - name: Run Coverage -# env: { COVERAGE: 1 } -# run: bash ./Tools/ci-run.sh -# -# - name: Upload Coverage Report -# uses: actions/upload-artifact@v2 -# with: -# name: cycoverage_html -# path: coverage-report-html + cycoverage: + runs-on: ubuntu-18.04 + + env: + BACKEND: c,cpp + OS_NAME: ubuntu-18.04 + PYTHON_VERSION: 3.9 + + steps: + - name: Checkout repo + uses: actions/checkout@v2 + with: + fetch-depth: 1 + + - name: Setup python + uses: actions/setup-python@v2 + with: + python-version: 3.9 + + - name: Run Coverage + env: { COVERAGE: 1 } + run: bash ./Tools/ci-run.sh + + - name: Upload Coverage Report + uses: actions/upload-artifact@v2 + with: + name: cycoverage_html + path: coverage-report-html diff --git a/.gitignore b/.gitignore index deb4c6fce..979aa41e1 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,5 @@ *.pyc +*.pyd *.pyo __pycache__ *.so diff --git a/Tools/ci-run.sh b/Tools/ci-run.sh index 4309fd4ad..da6a87c1c 100644 --- a/Tools/ci-run.sh +++ b/Tools/ci-run.sh @@ -108,7 +108,16 @@ export PATH="/usr/lib/ccache:$PATH" # Most modern compilers allow the last conflicting option # to override the previous ones, so '-O0 -O3' == '-O3' # This is true for the latest msvc, gcc and clang -CFLAGS="-O0 -ggdb -Wall -Wextra" +if [[ $OSTYPE == "msys" ]]; then # for MSVC cl + # /wd disables warnings + # 4711 warns that function `x` was selected for automatic inline expansion + # 4127 warns that a conditional expression is constant, should be fixed here https://github.com/cython/cython/pull/4317 + # (off by default) 5045 warns that the compiler will insert Spectre mitigations for memory load if the /Qspectre switch is specified + # (off by default) 4820 warns about the code in Python\3.9.6\x64\include ... + CFLAGS="-Od /Z7 /W4 /wd4711 /wd4127 /wd5045 /wd4820" +else + CFLAGS="-O0 -ggdb -Wall -Wextra" +fi if [[ $NO_CYTHON_COMPILE != "1" && $PYTHON_VERSION != "pypy"* ]]; then @@ -141,7 +150,7 @@ fi if [[ $TEST_CODE_STYLE == "1" ]]; then make -C docs html || exit 1 -elif [[ $PYTHON_VERSION != "pypy"* ]]; then +elif [[ $PYTHON_VERSION != "pypy"* && $OSTYPE != "msys" ]]; then # Run the debugger tests in python-dbg if available # (but don't fail, because they currently do fail) PYTHON_DBG=$(python -c 'import sys; print("%d.%d" % sys.version_info[:2])') diff --git a/runtests.py b/runtests.py index 426831a47..a3bd397da 100755 --- a/runtests.py +++ b/runtests.py @@ -2706,7 +2706,8 @@ def runtests(options, cmd_args, coverage=None): ('graal_bugs.txt', IS_GRAAL), ('limited_api_bugs.txt', options.limited_api), ('windows_bugs.txt', sys.platform == 'win32'), - ('cygwin_bugs.txt', sys.platform == 'cygwin') + ('cygwin_bugs.txt', sys.platform == 'cygwin'), + ('windows_bugs_39.txt', sys.platform == 'win32' and sys.version_info[:2] == (3, 9)) ] exclude_selectors += [ diff --git a/test-requirements-27.txt b/test-requirements-27.txt index a3ad0439e..efec3bbbf 100644 --- a/test-requirements-27.txt +++ b/test-requirements-27.txt @@ -26,6 +26,7 @@ jupyter-console==5.2.0 jupyter-core==4.6.3 line-profiler==3.1.0 MarkupSafe==1.1.1 +maturin==0.7.6; os_name == "nt" # actually 0.9.4, but it's not available; pywinpty dependency mistune==0.8.4 nbconvert==5.6.1 nbformat==4.4.0 @@ -44,6 +45,7 @@ Pygments==2.5.2 pyparsing==2.4.7 pyrsistent==0.15.7 python-dateutil==2.8.1 +pywinpty==0.5.7 # terminado dependency (pywinpty>=0.5) pyzmq==16.0.4 qtconsole==4.7.7 QtPy==1.9.0 diff --git a/test-requirements-34.txt b/test-requirements-34.txt index 8697eff4b..8a48d1ae6 100644 --- a/test-requirements-34.txt +++ b/test-requirements-34.txt @@ -1,3 +1,3 @@ -numpy < 1.19.0 +numpy<1.16.0 coverage pycodestyle diff --git a/tests/windows_bugs_39.txt b/tests/windows_bugs_39.txt new file mode 100644 index 000000000..6b56b9d33 --- /dev/null +++ b/tests/windows_bugs_39.txt @@ -0,0 +1,3 @@ +# https://github.com/cython/cython/issues/3450 +TestInline +scanner_trace -- cgit v1.2.1 From a0f63cd4f9318ed65767ea8a6b81cb6f24edf516 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Wed, 27 Jul 2022 20:41:10 +0200 Subject: Add manylinux_2_28 wheel builds. --- .github/workflows/wheel-manylinux.yml | 2 ++ Makefile | 2 ++ 2 files changed, 4 insertions(+) diff --git a/.github/workflows/wheel-manylinux.yml b/.github/workflows/wheel-manylinux.yml index 40ef7ad47..67233d9bd 100644 --- a/.github/workflows/wheel-manylinux.yml +++ b/.github/workflows/wheel-manylinux.yml @@ -57,6 +57,8 @@ jobs: - manylinux_2_24_x86_64 - manylinux_2_24_i686 - manylinux_2_24_aarch64 + - manylinux_2_28_x86_64 + - manylinux_2_28_aarch64 runs-on: ubuntu-latest diff --git a/Makefile b/Makefile index 280f30a3a..27051cbe7 100644 --- a/Makefile +++ b/Makefile @@ -15,6 +15,8 @@ MANYLINUX_IMAGES= \ manylinux_2_24_x86_64 \ manylinux_2_24_i686 \ manylinux_2_24_aarch64 \ + manylinux_2_28_x86_64 \ + manylinux_2_28_aarch64 \ # manylinux_2_24_ppc64le \ # manylinux_2_24_s390x -- cgit v1.2.1 From 67c44be9dd01a7a5bdde8df928f0cd4139570e0e Mon Sep 17 00:00:00 2001 From: da-woods Date: Thu, 28 Jul 2022 22:10:59 +0100 Subject: Fix error where "import *" tried to overwrite a macro in utility code (GH-4930) Closes https://github.com/cython/cython/issues/4927 --- Cython/Utility/MemoryView.pyx | 4 ++-- Cython/Utility/MemoryView_C.c | 3 +++ tests/memoryview/memslice.pyx | 6 ++++++ 3 files changed, 11 insertions(+), 2 deletions(-) diff --git a/Cython/Utility/MemoryView.pyx b/Cython/Utility/MemoryView.pyx index 8fa3c926a..277c0bd87 100644 --- a/Cython/Utility/MemoryView.pyx +++ b/Cython/Utility/MemoryView.pyx @@ -23,7 +23,7 @@ cdef extern from "": void *memset(void *b, int c, size_t len) cdef extern from *: - bint CYTHON_ATOMICS + bint __PYX_CYTHON_ATOMICS_ENABLED() noexcept int __Pyx_GetBuffer(object, Py_buffer *, int) except -1 void __Pyx_ReleaseBuffer(Py_buffer *) @@ -352,7 +352,7 @@ cdef class memoryview(object): (<__pyx_buffer *> &self.view).obj = Py_None Py_INCREF(Py_None) - if not CYTHON_ATOMICS: + if not __PYX_CYTHON_ATOMICS_ENABLED(): global __pyx_memoryview_thread_locks_used if __pyx_memoryview_thread_locks_used < THREAD_LOCKS_PREALLOCATED: self.lock = __pyx_memoryview_thread_locks[__pyx_memoryview_thread_locks_used] diff --git a/Cython/Utility/MemoryView_C.c b/Cython/Utility/MemoryView_C.c index 34664f062..1b78b2a4e 100644 --- a/Cython/Utility/MemoryView_C.c +++ b/Cython/Utility/MemoryView_C.c @@ -24,6 +24,9 @@ typedef struct { #ifndef CYTHON_ATOMICS #define CYTHON_ATOMICS 1 #endif +// using CYTHON_ATOMICS as a cdef extern bint in the Cython memoryview code +// interacts badly with "import *". Therefore, define a helper function-like macro +#define __PYX_CYTHON_ATOMICS_ENABLED() CYTHON_ATOMICS #define __pyx_atomic_int_type int diff --git a/tests/memoryview/memslice.pyx b/tests/memoryview/memslice.pyx index ccf760c21..3a5943aa3 100644 --- a/tests/memoryview/memslice.pyx +++ b/tests/memoryview/memslice.pyx @@ -21,6 +21,12 @@ if sys.version_info[0] < 3: else: import builtins +try: + from Cython.Tests.this_module_does_not_exist import * +except ImportError: + # Fails, but the existence of "import *" interacted badly with some utility code + pass + def testcase(func): @wraps(func) -- cgit v1.2.1 From a4be069c6bb0de91c98db5c6378ed2947ff46a45 Mon Sep 17 00:00:00 2001 From: da-woods Date: Fri, 29 Jul 2022 06:32:01 +0100 Subject: Reallow capture of memoryview arguments (GH-4929) It was actually OK in def functions. It only looks very dodgy: ``` __Pyx_XDEC_MEMVIEW(closure->arg) ``` This gets called twice and `INC` gets called once. However this is actually OK since XDEC really means "clear" Fixes https://github.com/cython/cython/issues/4798 for 0.29.x (completely I think) --- Cython/Compiler/Nodes.py | 8 -------- tests/memoryview/memslice.pyx | 19 +++++++++++++++++++ 2 files changed, 19 insertions(+), 8 deletions(-) diff --git a/Cython/Compiler/Nodes.py b/Cython/Compiler/Nodes.py index 743d6959b..c5ddad0a6 100644 --- a/Cython/Compiler/Nodes.py +++ b/Cython/Compiler/Nodes.py @@ -3237,14 +3237,6 @@ class DefNode(FuncDefNode): # Move arguments into closure if required def put_into_closure(entry): if entry.in_closure: - if entry.type.is_memoryviewslice: - error( - self.pos, - "Referring to a memoryview typed argument directly in a nested closure function " - "is not supported in Cython 0.x. " - "Either upgrade to Cython 3, or assign the argument to a local variable " - "and use that in the nested function." - ) code.putln('%s = %s;' % (entry.cname, entry.original_cname)) if entry.xdecref_cleanup: # mostly applies to the starstar arg - this can sometimes be NULL diff --git a/tests/memoryview/memslice.pyx b/tests/memoryview/memslice.pyx index 3a5943aa3..2d496821f 100644 --- a/tests/memoryview/memslice.pyx +++ b/tests/memoryview/memslice.pyx @@ -2556,6 +2556,25 @@ def test_const_buffer(const int[:] a): print(a[0]) print(c[-1]) + +@testcase +def test_arg_in_closure(int [:] a): + """ + >>> A = IntMockBuffer("A", range(6), shape=(6,)) + >>> inner = test_arg_in_closure(A) + acquired A + >>> inner() + (0, 1) + + The assignment below is just to avoid printing what was collected + >>> del inner; ignore_me = gc.collect() + released A + """ + def inner(): + return (a[0], a[1]) + return inner + + cdef arg_in_closure_cdef(int [:] a): def inner(): return (a[0], a[1]) -- cgit v1.2.1 From c48361d0a0969206e227ec016f654c9d941c2b69 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Fri, 29 Jul 2022 07:43:40 +0200 Subject: Prepare release of 0.29.32. --- CHANGES.rst | 13 +++++++++++++ Cython/Shadow.py | 2 +- 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index b0eaae423..8b5297166 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -2,6 +2,19 @@ Cython Changelog ================ +0.29.32 (2022-07-29) +==================== + +Bugs fixed +---------- + +* Revert "Using memoryview typed arguments in inner functions is now rejected as unsupported." + Patch by David Woods. (Github issue #4798) + +* ``from module import *`` failed in 0.29.31 when using memoryviews. + Patch by David Woods. (Github issue #4927) + + 0.29.31 (2022-07-27) ==================== diff --git a/Cython/Shadow.py b/Cython/Shadow.py index 60d9c37fd..15b9e4c05 100644 --- a/Cython/Shadow.py +++ b/Cython/Shadow.py @@ -1,7 +1,7 @@ # cython.* namespace for pure mode. from __future__ import absolute_import -__version__ = "0.29.31" +__version__ = "0.29.32" try: from __builtin__ import basestring -- cgit v1.2.1 From 6a9b519c3b0d1696c07058eaa4bb1bc628e87bf7 Mon Sep 17 00:00:00 2001 From: da-woods Date: Fri, 29 Jul 2022 07:31:50 +0100 Subject: Fix a crash due to incorrect reference counting of memoryview arguments (GH-4848) Closes https://github.com/cython/cython/issues/4798 Moves reference counting of memoryview arguments entirely into the def function wrapper. New behaviour is: ``` def wrapper(x, ...): # summary of generated code! xview = memoryview(x) # handle rest of args if error_in_rest_of_args: cleanup(xview) ret_value = func(xview, ...) cleanup(xview) return ret_value ``` while old behaviour was ``` def wrapper(x, ...) xview = memoryview(x) # handle rest of args if error_in_rest_of_args: cleanup(xview) return func(xview, ...) def func(xview, ...): # body goes here cleanup(xview) return ... ``` This treats memoryviews in the same way as PyObjects. The next step (not done here) is to unify put_var_incref_memoryviewslice() with put_var_incref() of other types. One observable consequence is that a memoryview argument is released later in functions that reassign their argument. E.g.: ``` def f(double[:] xview, x): # where xview is a view of x... xview = None # In the old version, the memoryview would be released and you # could probably resize x. In the new version the memoryview # would be held until the end of the wrapper function and you # could not resize x x.append(1) ``` I don't think this is a major problem but it's worth mentioning. --- Cython/Compiler/Nodes.py | 33 +++++++++++++++++------- tests/memoryview/memslice.pyx | 58 +++++++++++++++++++++++++++++++++++++++++-- 2 files changed, 80 insertions(+), 11 deletions(-) diff --git a/Cython/Compiler/Nodes.py b/Cython/Compiler/Nodes.py index 4d6d95e79..50436561b 100644 --- a/Cython/Compiler/Nodes.py +++ b/Cython/Compiler/Nodes.py @@ -2117,7 +2117,6 @@ class FuncDefNode(StatNode, BlockNode): self.generate_argument_parsing_code(env, code) # If an argument is assigned to in the body, we must # incref it to properly keep track of refcounts. - is_cdef = isinstance(self, CFuncDefNode) for entry in lenv.arg_entries: if not entry.type.is_memoryviewslice: if (acquire_gil or entry.cf_is_reassigned) and not entry.in_closure: @@ -2126,7 +2125,7 @@ class FuncDefNode(StatNode, BlockNode): # we acquire arguments from object conversion, so we have # new references. If we are a cdef function, we need to # incref our arguments - elif is_cdef and entry.cf_is_reassigned: + elif entry.cf_is_reassigned and not entry.in_closure: code.put_var_incref_memoryviewslice(entry, have_gil=code.funcstate.gil_owned) for entry in lenv.var_entries: @@ -2328,14 +2327,14 @@ class FuncDefNode(StatNode, BlockNode): # Decref any increfed args for entry in lenv.arg_entries: + if entry.in_closure: + continue if entry.type.is_memoryviewslice: # decref slices of def functions and acquired slices from cdef # functions, but not borrowed slices from cdef functions. - if is_cdef and not entry.cf_is_reassigned: + if not entry.cf_is_reassigned: continue else: - if entry.in_closure: - continue if not acquire_gil and not entry.cf_is_reassigned: continue if entry.type.needs_refcounting: @@ -2881,8 +2880,11 @@ class CFuncDefNode(FuncDefNode): def put_into_closure(entry): if entry.in_closure and not arg.default: code.putln('%s = %s;' % (entry.cname, entry.original_cname)) - code.put_var_incref(entry) - code.put_var_giveref(entry) + if entry.type.is_memoryviewslice: + entry.type.generate_incref_memoryviewslice(code, entry.cname, True) + else: + code.put_var_incref(entry) + code.put_var_giveref(entry) for arg in self.args: put_into_closure(scope.lookup_here(arg.name)) @@ -3512,7 +3514,11 @@ class DefNode(FuncDefNode): def put_into_closure(entry): if entry.in_closure: code.putln('%s = %s;' % (entry.cname, entry.original_cname)) - if entry.xdecref_cleanup: + if entry.type.is_memoryviewslice: + # TODO - at some point reference count of memoryviews should + # genuinely be unified with PyObjects + entry.type.generate_incref_memoryviewslice(code, entry.cname, True) + elif entry.xdecref_cleanup: # mostly applies to the starstar arg - this can sometimes be NULL # so must be xincrefed instead code.put_var_xincref(entry) @@ -3670,11 +3676,20 @@ class DefNodeWrapper(FuncDefNode): # ----- Non-error return cleanup code.put_label(code.return_label) for entry in lenv.var_entries: - if entry.is_arg and entry.type.is_pyobject: + if entry.is_arg: + # mainly captures the star/starstar args if entry.xdecref_cleanup: code.put_var_xdecref(entry) else: code.put_var_decref(entry) + for arg in self.args: + if not arg.type.is_pyobject: + # This captures anything that's been converted from a PyObject. + # Primarily memoryviews at the moment + if arg.entry.xdecref_cleanup: + code.put_var_xdecref(arg.entry) + else: + code.put_var_decref(arg.entry) code.put_finish_refcount_context() if not self.return_type.is_void: diff --git a/tests/memoryview/memslice.pyx b/tests/memoryview/memslice.pyx index 6a63e6d6e..9d01d36c8 100644 --- a/tests/memoryview/memslice.pyx +++ b/tests/memoryview/memslice.pyx @@ -2622,13 +2622,67 @@ def test_loop_reassign(int[:] a): 3 4 5 - released A 15 + released A """ cdef int sum = 0 for ai in a: sum += ai print(ai) a = None # this should not mess up the loop though! - # release happens here, when the loop temp is released print(sum) + # release happens in the wrapper function + +@testcase +def test_arg_in_closure(int [:] a): + """ + >>> A = IntMockBuffer("A", range(6), shape=(6,)) + >>> inner = test_arg_in_closure(A) + acquired A + >>> inner() + (0, 1) + + The assignment below is just to avoid printing what was collected + >>> del inner; ignore_me = gc.collect() + released A + """ + def inner(): + return (a[0], a[1]) + return inner + +cdef arg_in_closure_cdef(int [:] a): + def inner(): + return (a[0], a[1]) + return inner + +def test_arg_in_closure_cdef(a): + """ + >>> A = IntMockBuffer("A", range(6), shape=(6,)) + >>> inner = test_arg_in_closure_cdef(A) + acquired A + >>> inner() + (0, 1) + + The assignment below is just to avoid printing what was collected + >>> del inner; ignore_me = gc.collect() + released A + """ + return arg_in_closure_cdef(a) + +@testcase +def test_local_in_closure(a): + """ + >>> A = IntMockBuffer("A", range(6), shape=(6,)) + >>> inner = test_local_in_closure(A) + acquired A + >>> inner() + (0, 1) + + The assignment below is just to avoid printing what was collected + >>> del inner; ignore_me = gc.collect() + released A + """ + cdef int[:] a_view = a + def inner(): + return (a_view[0], a_view[1]) + return inner -- cgit v1.2.1 From ccc02e097fc70b8aef43773d77607013d51f9438 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Fri, 29 Jul 2022 08:33:36 +0200 Subject: Remove dead test code (replaced by new version further down). --- tests/memoryview/memslice.pyx | 25 ++++--------------------- 1 file changed, 4 insertions(+), 21 deletions(-) diff --git a/tests/memoryview/memslice.pyx b/tests/memoryview/memslice.pyx index 9d01d36c8..c8c6eb7fa 100644 --- a/tests/memoryview/memslice.pyx +++ b/tests/memoryview/memslice.pyx @@ -2567,27 +2567,6 @@ def test_const_buffer(const int[:] a): print(c[-1]) -''' -# FIXME: currently broken in 3.0 -@testcase -def test_arg_in_closure(int [:] a): - """ - >>> A = IntMockBuffer("A", range(6), shape=(6,)) - >>> inner = test_arg_in_closure(A) - acquired A - >>> inner() - (0, 1) - - The assignment below is just to avoid printing what was collected - >>> del inner; ignore_me = gc.collect() - released A - """ - def inner(): - return (a[0], a[1]) - return inner -''' - - @testcase def test_loop(int[:] a, throw_exception): """ @@ -2610,6 +2589,7 @@ def test_loop(int[:] a, throw_exception): raise ValueError() print(sum) + @testcase def test_loop_reassign(int[:] a): """ @@ -2633,6 +2613,7 @@ def test_loop_reassign(int[:] a): print(sum) # release happens in the wrapper function + @testcase def test_arg_in_closure(int [:] a): """ @@ -2650,6 +2631,7 @@ def test_arg_in_closure(int [:] a): return (a[0], a[1]) return inner + cdef arg_in_closure_cdef(int [:] a): def inner(): return (a[0], a[1]) @@ -2669,6 +2651,7 @@ def test_arg_in_closure_cdef(a): """ return arg_in_closure_cdef(a) + @testcase def test_local_in_closure(a): """ -- cgit v1.2.1 From 820b444ddcbfae7e393298d3a6478e742aa12022 Mon Sep 17 00:00:00 2001 From: Eli Schwartz Date: Fri, 29 Jul 2022 13:51:01 -0400 Subject: Implement the --depfile command-line option for the "cython" tool (GH-4916) * refactor cythonize depfile support to be reusable We would like to use it in the frontend for the cython tool as well. Pull it out of the main loop and make it a utility function. * fix depfiles emitting unusable relative paths The paths for files in the source tree are relative instead of absolute. This is not inherently a problem, but they are relative to the containing directory of the source code file but the program that parses these files (make, ninja) computes filenames relative to its working directory, not relative to the source file. Likewise, the final output file had its entire path component trimmed, leaving just a bare filename. Make this actually work by computing all relative paths relative to the current working directory of the cython process itself. When invoked by a build system, this will be the same directory the build system expects files to be based on. This has the obvious drawback of requiring a sensible work directory (i.e. you can't "just run" cython next to the sources), but it's still a reasonable thing to assume and we already make that assumption in other places. --- Cython/Build/Dependencies.py | 18 +------- Cython/Compiler/CmdLine.py | 1 + Cython/Compiler/Main.py | 4 ++ Cython/Compiler/Options.py | 4 ++ Cython/Utils.py | 20 +++++++++ tests/build/depfile_package.srctree | 57 ------------------------- tests/build/depfile_package_cython.srctree | 61 +++++++++++++++++++++++++++ tests/build/depfile_package_cythonize.srctree | 60 ++++++++++++++++++++++++++ 8 files changed, 152 insertions(+), 73 deletions(-) delete mode 100644 tests/build/depfile_package.srctree create mode 100644 tests/build/depfile_package_cython.srctree create mode 100644 tests/build/depfile_package_cythonize.srctree diff --git a/Cython/Build/Dependencies.py b/Cython/Build/Dependencies.py index f14166f7a..e0414f8da 100644 --- a/Cython/Build/Dependencies.py +++ b/Cython/Build/Dependencies.py @@ -43,7 +43,7 @@ except: from .. import Utils from ..Utils import (cached_function, cached_method, path_exists, - safe_makedirs, copy_file_to_dir_if_newer, is_package_dir) + safe_makedirs, copy_file_to_dir_if_newer, is_package_dir, write_depfile) from ..Compiler import Errors from ..Compiler.Main import Context from ..Compiler.Options import CompilationOptions, default_options @@ -1049,21 +1049,7 @@ def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False, # write out the depfile, if requested if depfile: dependencies = deps.all_dependencies(source) - src_base_dir, _ = os.path.split(source) - if not src_base_dir.endswith(os.sep): - src_base_dir += os.sep - # paths below the base_dir are relative, otherwise absolute - paths = [] - for fname in dependencies: - if fname.startswith(src_base_dir): - paths.append(os.path.relpath(fname, src_base_dir)) - else: - paths.append(os.path.abspath(fname)) - - depline = os.path.split(c_file)[1] + ": \\\n " - depline += " \\\n ".join(paths) + "\n" - with open(c_file+'.dep', 'w') as outfile: - outfile.write(depline) + write_depfile(c_file, source, dependencies) # Missing files and those generated by other Cython versions should always be recreated. if Utils.file_generated_by_this_cython(c_file): diff --git a/Cython/Compiler/CmdLine.py b/Cython/Compiler/CmdLine.py index 894eacd0a..80a0cc99d 100644 --- a/Cython/Compiler/CmdLine.py +++ b/Cython/Compiler/CmdLine.py @@ -150,6 +150,7 @@ def create_cython_argparser(): help='Fully qualified module name. If not given, is ' 'deduced from the import path if source file is in ' 'a package, or equals the filename otherwise.') + parser.add_argument('-M', '--depfile', action='store_true', help='produce depfiles for the sources') parser.add_argument('sources', nargs='*', default=[]) # TODO: add help diff --git a/Cython/Compiler/Main.py b/Cython/Compiler/Main.py index 0ea5f7748..28a12c8d5 100644 --- a/Cython/Compiler/Main.py +++ b/Cython/Compiler/Main.py @@ -502,6 +502,10 @@ def run_pipeline(source, options, full_module_name=None, context=None): err, enddata = Pipeline.run_pipeline(pipeline, source) context.teardown_errors(err, options, result) + if options.depfile: + from ..Build.Dependencies import create_dependency_tree + dependencies = create_dependency_tree(context).all_dependencies(result.main_source_file) + Utils.write_depfile(result.c_file, result.main_source_file, dependencies) return result diff --git a/Cython/Compiler/Options.py b/Cython/Compiler/Options.py index ea0b95c90..f400e4ba2 100644 --- a/Cython/Compiler/Options.py +++ b/Cython/Compiler/Options.py @@ -661,6 +661,9 @@ class CompilationOptions(object): elif key in ['output_file', 'output_dir']: # ignore the exact name of the output file continue + elif key in ['depfile']: + # external build system dependency tracking file does not influence outputs + continue elif key in ['timestamps']: # the cache cares about the content of files, not about the timestamps of sources continue @@ -739,6 +742,7 @@ default_options = dict( errors_to_stderr=1, cplus=0, output_file=None, + depfile=None, annotate=None, annotate_coverage_xml=None, generate_pxi=0, diff --git a/Cython/Utils.py b/Cython/Utils.py index d6a49f973..0c86ca044 100644 --- a/Cython/Utils.py +++ b/Cython/Utils.py @@ -626,3 +626,23 @@ def build_hex_version(version_string): hexversion = (hexversion << 8) + digit return '0x%08X' % hexversion + + +def write_depfile(target, source, dependencies): + src_base_dir = os.path.dirname(source) + cwd = os.getcwd() + if not src_base_dir.endswith(os.sep): + src_base_dir += os.sep + # paths below the base_dir are relative, otherwise absolute + paths = [] + for fname in dependencies: + if fname.startswith(src_base_dir): + paths.append(os.path.relpath(fname, cwd)) + else: + paths.append(os.path.abspath(fname)) + + depline = os.path.relpath(target, cwd) + ": \\\n " + depline += " \\\n ".join(paths) + "\n" + + with open(target+'.dep', 'w') as outfile: + outfile.write(depline) diff --git a/tests/build/depfile_package.srctree b/tests/build/depfile_package.srctree deleted file mode 100644 index d96294b27..000000000 --- a/tests/build/depfile_package.srctree +++ /dev/null @@ -1,57 +0,0 @@ -""" -CYTHONIZE -i pkg --depfile -PYTHON package_test.py -""" - -######## package_test.py ######## - -import os.path - -with open(os.path.join("pkg", "test.c.dep"), "r") as f: - contents = f.read().replace("\\\n", " ").replace("\n", " ") - -assert sorted(contents.split()) == sorted(['test.c:', os.path.join('sub', 'incl.pxi'), 'test.pxd', 'test.pyx']), contents - - -with open(os.path.join("pkg", "sub", "test.c.dep"), "r") as f: - contents = f.read().replace("\\\n", " ").replace("\n", " ") - -contents = [os.path.relpath(entry, '.') - if os.path.isabs(entry) else entry for entry in contents.split()] -assert sorted(contents) == sorted(['test.c:', 'incl.pxi', 'test.pyx', os.path.join('pkg', 'test.pxd')]), contents # last is really one level up - - -######## pkg/__init__.py ######## - - -######## pkg/test.pyx ######## - -TEST = "pkg.test" - -include "sub/incl.pxi" - -cdef object get_str(): - return TEST - - -######## pkg/test.pxd ######## - -cdef object get_str() - - -######## pkg/sub/__init__.py ######## - - -######## pkg/sub/test.pyx ######## -# cython: language_level=3 - -from ..test cimport get_str - -include 'incl.pxi' - -TEST = 'pkg.sub.test' - - -######## pkg/sub/incl.pxi ######## - -pass diff --git a/tests/build/depfile_package_cython.srctree b/tests/build/depfile_package_cython.srctree new file mode 100644 index 000000000..ccb1dc230 --- /dev/null +++ b/tests/build/depfile_package_cython.srctree @@ -0,0 +1,61 @@ +""" +PYTHON -c 'import os; os.makedirs("builddir/pkg/sub")' +CYTHON -M pkg/test.pyx -o builddir/pkg/test.c +CYTHON --depfile pkg/sub/test.pyx -o builddir/pkg/sub/test.c +PYTHON check.py +""" + +######## check.py ######## + +import os.path + +def pkgpath(*args): + return os.path.join('pkg', *args) + +with open(os.path.join("builddir", "pkg", "test.c.dep"), "r") as f: + contents = f.read().replace("\\\n", " ").replace("\n", " ") + +assert sorted(contents.split()) == sorted([os.path.join('builddir', 'pkg', 'test.c:'), pkgpath('sub', 'incl.pxi'), pkgpath('test.pxd'), pkgpath('test.pyx')]), contents + + +with open(os.path.join("builddir", "pkg", "sub", "test.c.dep"), "r") as f: + contents = f.read().replace("\\\n", " ").replace("\n", " ") + +contents = [os.path.relpath(entry, '.') + if os.path.isabs(entry) else entry for entry in contents.split()] +assert sorted(contents) == sorted([os.path.join('builddir', 'pkg', 'sub', 'test.c:'), pkgpath('sub', 'incl.pxi'), pkgpath('sub', 'test.pyx'), pkgpath('test.pxd')]), contents # last is really one level up + +######## pkg/__init__.py ######## + + +######## pkg/test.pyx ######## + +TEST = "pkg.test" + +include "sub/incl.pxi" + +cdef object get_str(): + return TEST + + +######## pkg/test.pxd ######## + +cdef object get_str() + + +######## pkg/sub/__init__.py ######## + + +######## pkg/sub/test.pyx ######## +# cython: language_level=3 + +from ..test cimport get_str + +include 'incl.pxi' + +TEST = 'pkg.sub.test' + + +######## pkg/sub/incl.pxi ######## + +pass diff --git a/tests/build/depfile_package_cythonize.srctree b/tests/build/depfile_package_cythonize.srctree new file mode 100644 index 000000000..d68e82ece --- /dev/null +++ b/tests/build/depfile_package_cythonize.srctree @@ -0,0 +1,60 @@ +""" +CYTHONIZE -i pkg --depfile +PYTHON package_test.py +""" + +######## package_test.py ######## + +import os.path + +def pkgpath(*args): + return os.path.join('pkg', *args) + +with open(os.path.join("pkg", "test.c.dep"), "r") as f: + contents = f.read().replace("\\\n", " ").replace("\n", " ") + +assert sorted(contents.split()) == sorted([pkgpath('test.c:'), pkgpath('sub', 'incl.pxi'), pkgpath('test.pxd'), pkgpath('test.pyx')]), contents + + +with open(os.path.join("pkg", "sub", "test.c.dep"), "r") as f: + contents = f.read().replace("\\\n", " ").replace("\n", " ") + +contents = [os.path.relpath(entry, '.') + if os.path.isabs(entry) else entry for entry in contents.split()] +assert sorted(contents) == sorted([pkgpath('sub', 'test.c:'), pkgpath('sub', 'incl.pxi'), pkgpath('sub', 'test.pyx'), pkgpath('test.pxd')]), contents # last is really one level up + + +######## pkg/__init__.py ######## + + +######## pkg/test.pyx ######## + +TEST = "pkg.test" + +include "sub/incl.pxi" + +cdef object get_str(): + return TEST + + +######## pkg/test.pxd ######## + +cdef object get_str() + + +######## pkg/sub/__init__.py ######## + + +######## pkg/sub/test.pyx ######## +# cython: language_level=3 + +from ..test cimport get_str + +include 'incl.pxi' + +TEST = 'pkg.sub.test' + + +######## pkg/sub/incl.pxi ######## + +pass -- cgit v1.2.1 From 11e4b1f11c3849622007481d92c3589ce5cfbec8 Mon Sep 17 00:00:00 2001 From: scoder Date: Fri, 29 Jul 2022 19:56:50 +0200 Subject: Make Cython async functions set CO_COROUTINE and CO_ASYNC_GENERATOR code flags (GH-4902) --- Cython/Compiler/ExprNodes.py | 6 ++++++ Cython/Utility/ModuleSetupCode.c | 7 +++++++ tests/run/test_coroutines_pep492.pyx | 8 +++++--- 3 files changed, 18 insertions(+), 3 deletions(-) diff --git a/Cython/Compiler/ExprNodes.py b/Cython/Compiler/ExprNodes.py index ab228c552..130c4f1c7 100644 --- a/Cython/Compiler/ExprNodes.py +++ b/Cython/Compiler/ExprNodes.py @@ -9839,6 +9839,12 @@ class CodeObjectNode(ExprNode): flags.append('CO_VARARGS') if self.def_node.starstar_arg: flags.append('CO_VARKEYWORDS') + if self.def_node.is_asyncgen: + flags.append('CO_ASYNC_GENERATOR') + elif self.def_node.is_coroutine: + flags.append('CO_COROUTINE') + elif self.def_node.is_generator: + flags.append('CO_GENERATOR') code.putln("%s = (PyObject*)__Pyx_PyCode_New(%d, %d, %d, %d, 0, %s, %s, %s, %s, %s, %s, %s, %s, %s, %d, %s); %s" % ( self.result_code, diff --git a/Cython/Utility/ModuleSetupCode.c b/Cython/Utility/ModuleSetupCode.c index cc3f658b0..d54426a4f 100644 --- a/Cython/Utility/ModuleSetupCode.c +++ b/Cython/Utility/ModuleSetupCode.c @@ -705,6 +705,13 @@ class __Pyx_FakeReference { #endif #define __Pyx_NoneAsNull(obj) (__Pyx_Py_IsNone(obj) ? NULL : (obj)) +#ifndef CO_COROUTINE + #define CO_COROUTINE 0x80 +#endif +#ifndef CO_ASYNC_GENERATOR + #define CO_ASYNC_GENERATOR 0x200 +#endif + #ifndef Py_TPFLAGS_CHECKTYPES #define Py_TPFLAGS_CHECKTYPES 0 #endif diff --git a/tests/run/test_coroutines_pep492.pyx b/tests/run/test_coroutines_pep492.pyx index 2841d97af..3060ab704 100644 --- a/tests/run/test_coroutines_pep492.pyx +++ b/tests/run/test_coroutines_pep492.pyx @@ -14,7 +14,7 @@ import copy #import types import pickle import os.path -#import inspect +import inspect import unittest import warnings import contextlib @@ -754,7 +754,8 @@ class AsyncBadSyntaxTest(unittest.TestCase): async def g(): pass await z await = 1 - #self.assertTrue(inspect.iscoroutinefunction(f)) + if sys.version_info >= (3,10,6): + self.assertTrue(inspect.iscoroutinefunction(f)) class TokenizerRegrTest(unittest.TestCase): @@ -777,7 +778,8 @@ class TokenizerRegrTest(unittest.TestCase): exec(buf, ns, ns) self.assertEqual(ns['i499'](), 499) self.assertEqual(type(ns['foo']()).__name__, 'coroutine') - #self.assertTrue(inspect.iscoroutinefunction(ns['foo'])) + if sys.version_info >= (3,10,6): + self.assertTrue(inspect.iscoroutinefunction(ns['foo'])) class CoroutineTest(unittest.TestCase): -- cgit v1.2.1 From 9b69292ee4f5340994776cac4ba18c27c9578ea1 Mon Sep 17 00:00:00 2001 From: da-woods Date: Fri, 29 Jul 2022 18:58:31 +0100 Subject: Fix initialization of init=False dataclass fields (GH-4908) init=False (in a field name) means that it isn't an argument to `__init__`. It does not mean that the field isn't initialized if a default method of initialization is provided. --- Cython/Compiler/Dataclass.py | 10 +++++---- tests/run/pure_cdef_class_dataclass.py | 41 ++++++++++++++++++++++++++++++++++ 2 files changed, 47 insertions(+), 4 deletions(-) diff --git a/Cython/Compiler/Dataclass.py b/Cython/Compiler/Dataclass.py index 0d0bb4768..87ef5e76a 100644 --- a/Cython/Compiler/Dataclass.py +++ b/Cython/Compiler/Dataclass.py @@ -341,8 +341,6 @@ def generate_init_code(init, node, fields, kw_only): seen_default = False for name, field in fields.items(): - if not field.init.value: - continue entry = node.scope.lookup(name) if entry.annotation: annotation = u": %s" % entry.annotation.string.value @@ -357,18 +355,22 @@ def generate_init_code(init, node, fields, kw_only): ph_name = get_placeholder_name() placeholders[ph_name] = field.default # should be a node assignment = u" = %s" % ph_name - elif seen_default and not kw_only: + elif seen_default and not kw_only and field.init.value: error(entry.pos, ("non-default argument '%s' follows default argument " "in dataclass __init__") % name) return "", {}, [] - args.append(u"%s%s%s" % (name, annotation, assignment)) + if field.init.value: + args.append(u"%s%s%s" % (name, annotation, assignment)) if field.is_initvar: continue elif field.default_factory is MISSING: if field.init.value: function_body_code_lines.append(u" %s.%s = %s" % (selfname, name, name)) + elif assignment: + # not an argument to the function, but is still initialized + function_body_code_lines.append(u" %s.%s%s" % (selfname, name, assignment)) else: ph_name = get_placeholder_name() placeholders[ph_name] = field.default_factory diff --git a/tests/run/pure_cdef_class_dataclass.py b/tests/run/pure_cdef_class_dataclass.py index 8a978d36f..b3892586f 100644 --- a/tests/run/pure_cdef_class_dataclass.py +++ b/tests/run/pure_cdef_class_dataclass.py @@ -29,3 +29,44 @@ class MyDataclass: a: int = 1 self: list = cython.dataclasses.field(default_factory=list, hash=False) # test that arguments of init don't conflict + + +class DummyObj: + def __repr__(self): + return "DummyObj()" + + +@cython.dataclasses.dataclass +@cython.cclass +class NoInitFields: + """ + >>> NoInitFields() + NoInitFields(has_default=DummyObj(), has_factory='From a lambda', neither=None) + >>> NoInitFields().has_default is NoInitFields().has_default + True + + >>> NoInitFields(1) # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + TypeError: NoInitFields.__init__() takes 1 positional argument but 2 were given + + >>> NoInitFields(has_default=1) # doctest: +ELLIPSIS + Traceback (most recent call last): + TypeError: ...has_default... + >>> NoInitFields(has_factory=1) # doctest: +ELLIPSIS + Traceback (most recent call last): + TypeError: ...has_factory... + >>> NoInitFields(neither=1) # doctest: +ELLIPSIS + Traceback (most recent call last): + TypeError: ...neither... + """ + has_default : object = cython.dataclasses.field(default=DummyObj(), init=False) + has_factory : object = cython.dataclasses.field(default_factory=lambda: "From a lambda", init=False) + # Cython will default-initialize to None + neither : object = cython.dataclasses.field(init=False) + + def __post_init__(self): + if not cython.compiled: + # Cython will default-initialize this to None, while Python won't + # and not initializing it will mess up repr + assert not hasattr(self, "neither") + self.neither = None -- cgit v1.2.1 From 84afe5550e619cc1b40e9909e4a35234841ad366 Mon Sep 17 00:00:00 2001 From: scoder Date: Sat, 30 Jul 2022 18:43:10 +0200 Subject: Allow C code assertions in tests by defining regular expressions in module directives. (GH-4938) --- Cython/Compiler/Options.py | 11 +++++- Cython/Compiler/Parsing.py | 3 ++ Cython/Compiler/Pipeline.py | 15 ++++---- Cython/TestUtils.py | 70 +++++++++++++++++++++++++++++++++++- tests/compile/c_directives.pyx | 2 ++ tests/run/c_file_validation.srctree | 72 +++++++++++++++++++++++++++++++++++++ 6 files changed, 165 insertions(+), 8 deletions(-) create mode 100644 tests/run/c_file_validation.srctree diff --git a/Cython/Compiler/Options.py b/Cython/Compiler/Options.py index f400e4ba2..bb547e978 100644 --- a/Cython/Compiler/Options.py +++ b/Cython/Compiler/Options.py @@ -171,7 +171,7 @@ def copy_inherited_directives(outer_directives, **new_directives): # For example, test_assert_path_exists and test_fail_if_path_exists should not be inherited # otherwise they can produce very misleading test failures new_directives_out = dict(outer_directives) - for name in ('test_assert_path_exists', 'test_fail_if_path_exists'): + for name in ('test_assert_path_exists', 'test_fail_if_path_exists', 'test_assert_c_code_has', 'test_fail_if_c_code_has'): new_directives_out.pop(name, None) new_directives_out.update(new_directives) return new_directives_out @@ -247,6 +247,8 @@ _directive_defaults = { # test support 'test_assert_path_exists' : [], 'test_fail_if_path_exists' : [], + 'test_assert_c_code_has' : [], + 'test_fail_if_c_code_has' : [], # experimental, subject to change 'formal_grammar': False, @@ -364,6 +366,8 @@ directive_scopes = { # defaults to available everywhere 'set_initial_path' : ('module',), 'test_assert_path_exists' : ('function', 'class', 'cclass'), 'test_fail_if_path_exists' : ('function', 'class', 'cclass'), + 'test_assert_c_code_has' : ('module',), + 'test_fail_if_c_code_has' : ('module',), 'freelist': ('cclass',), 'emit_code_comments': ('module',), # Avoid scope-specific to/from_py_functions for c_string. @@ -509,6 +513,11 @@ def parse_directive_list(s, relaxed_bool=False, ignore_unknown=False, result[directive] = parsed_value if not found and not ignore_unknown: raise ValueError('Unknown option: "%s"' % name) + elif directive_types.get(name) is list: + if name in result: + result[name].append(value) + else: + result[name] = [value] else: parsed_value = parse_directive_value(name, value, relaxed_bool=relaxed_bool) result[name] = parsed_value diff --git a/Cython/Compiler/Parsing.py b/Cython/Compiler/Parsing.py index d7c844849..8160149af 100644 --- a/Cython/Compiler/Parsing.py +++ b/Cython/Compiler/Parsing.py @@ -3853,6 +3853,9 @@ def p_compiler_directive_comments(s): for name in new_directives: if name not in result: pass + elif Options.directive_types.get(name) is list: + result[name] += new_directives[name] + new_directives[name] = result[name] elif new_directives[name] == result[name]: warning(pos, "Duplicate directive found: %s" % (name,)) else: diff --git a/Cython/Compiler/Pipeline.py b/Cython/Compiler/Pipeline.py index 3a5c42352..2fd3a1d3f 100644 --- a/Cython/Compiler/Pipeline.py +++ b/Cython/Compiler/Pipeline.py @@ -231,14 +231,15 @@ def create_pipeline(context, mode, exclude_classes=()): return stages def create_pyx_pipeline(context, options, result, py=False, exclude_classes=()): - if py: - mode = 'py' - else: - mode = 'pyx' + mode = 'py' if py else 'pyx' + test_support = [] + ctest_support = [] if options.evaluate_tree_assertions: from ..TestUtils import TreeAssertVisitor - test_support.append(TreeAssertVisitor()) + test_validator = TreeAssertVisitor() + test_support.append(test_validator) + ctest_support.append(test_validator.create_c_file_validator()) if options.gdb_debug: from ..Debugger import DebugWriter # requires Py2.5+ @@ -257,7 +258,9 @@ def create_pyx_pipeline(context, options, result, py=False, exclude_classes=()): inject_utility_code_stage_factory(context), abort_on_errors], debug_transform, - [generate_pyx_code_stage_factory(options, result)])) + [generate_pyx_code_stage_factory(options, result)], + ctest_support, + )) def create_pxd_pipeline(context, scope, module_name): from .CodeGeneration import ExtractPxdCode diff --git a/Cython/TestUtils.py b/Cython/TestUtils.py index bb2070d39..8328a3d6f 100644 --- a/Cython/TestUtils.py +++ b/Cython/TestUtils.py @@ -1,12 +1,14 @@ from __future__ import absolute_import import os +import re import unittest import shlex import sys import tempfile import textwrap from io import open +from functools import partial from .Compiler import Errors from .CodeWriter import CodeWriter @@ -161,11 +163,64 @@ class TransformTest(CythonTest): return tree +# For the test C code validation, we have to take care that the test directives (and thus +# the match strings) do not just appear in (multiline) C code comments containing the original +# Cython source code. Thus, we discard the comments before matching. +# This seems a prime case for re.VERBOSE, but it seems to match some of the whitespace. +_strip_c_comments = partial(re.compile( + re.sub('\s+', '', r''' + /[*] ( + (?: [^*\n] | [*][^/] )* + [\n] + (?: [^*] | [*][^/] )* + ) [*]/ + ''') +).sub, '') + + class TreeAssertVisitor(VisitorTransform): # actually, a TreeVisitor would be enough, but this needs to run # as part of the compiler pipeline - def visit_CompilerDirectivesNode(self, node): + def __init__(self): + super(TreeAssertVisitor, self).__init__() + self._module_pos = None + self._c_patterns = [] + self._c_antipatterns = [] + + def create_c_file_validator(self): + patterns, antipatterns = self._c_patterns, self._c_antipatterns + + def fail(pos, pattern, found, file_path): + Errors.error(pos, "Pattern '%s' %s found in %s" %( + pattern, + 'was' if found else 'was not', + file_path, + )) + + def validate_c_file(result): + c_file = result.c_file + if not (patterns or antipatterns): + #print("No patterns defined for %s" % c_file) + return result + + with open(c_file, encoding='utf8') as f: + c_content = f.read() + c_content = _strip_c_comments(c_content) + + for pattern in patterns: + #print("Searching pattern '%s'" % pattern) + if not re.search(pattern, c_content): + fail(self._module_pos, pattern, found=False, file_path=c_file) + + for antipattern in antipatterns: + #print("Searching antipattern '%s'" % antipattern) + if re.search(antipattern, c_content): + fail(self._module_pos, antipattern, found=True, file_path=c_file) + + return validate_c_file + + def _check_directives(self, node): directives = node.directives if 'test_assert_path_exists' in directives: for path in directives['test_assert_path_exists']: @@ -179,6 +234,19 @@ class TreeAssertVisitor(VisitorTransform): Errors.error( node.pos, "Unexpected path '%s' found in result tree" % path) + if 'test_assert_c_code_has' in directives: + self._c_patterns.extend(directives['test_assert_c_code_has']) + if 'test_fail_if_c_code_has' in directives: + self._c_antipatterns.extend(directives['test_fail_if_c_code_has']) + + def visit_ModuleNode(self, node): + self._module_pos = node.pos + self._check_directives(node) + self.visitchildren(node) + return node + + def visit_CompilerDirectivesNode(self, node): + self._check_directives(node) self.visitchildren(node) return node diff --git a/tests/compile/c_directives.pyx b/tests/compile/c_directives.pyx index 0ede90ba8..ee19e652f 100644 --- a/tests/compile/c_directives.pyx +++ b/tests/compile/c_directives.pyx @@ -2,6 +2,8 @@ # cython: boundscheck = False # cython: ignoreme = OK # cython: warn.undeclared = False +# cython: test_assert_c_code_has = Generated by Cython +# cython: test_fail_if_c_code_has = Generated by Python # This testcase is most useful if you inspect the generated C file diff --git a/tests/run/c_file_validation.srctree b/tests/run/c_file_validation.srctree new file mode 100644 index 000000000..cceb014ac --- /dev/null +++ b/tests/run/c_file_validation.srctree @@ -0,0 +1,72 @@ +""" +PYTHON run_test.py +""" + +######## run_test.py ######## + +import os +from collections import defaultdict +from os.path import basename, splitext + +from Cython.Compiler.Options import CompilationOptions +from Cython.Compiler.Main import compile as cython_compile +from Cython.Compiler.Options import default_options + + +def validate_file(filename): + module_name = basename(filename) + c_file = splitext(filename)[0] + '.c' + + options = CompilationOptions( + default_options, + language_level="3", + evaluate_tree_assertions=True, + ) + result = cython_compile(filename, options=options) + return result.num_errors + + +counts = defaultdict(int) +failed = False + +for filename in sorted(os.listdir(".")): + if "run_test" in filename: + continue + + print("Testing '%s'" % filename) + num_errors = validate_file(filename) + print(num_errors, filename) + counts[num_errors] += 1 + + if '_ok' in filename: + if num_errors > 0: + failed = True + print("ERROR: Compilation failed: %s (%s errors)" % (filename, num_errors)) + else: + if num_errors == 0: + failed = True + print("ERROR: Expected failure, but compilation succeeded: %s" % filename) + +assert counts == {0: 2, 1: 2}, counts +assert not failed + + +######## assert_ok.py ######## + +# cython: test_assert_c_code_has = Generated by Cython +# cython: test_assert_c_code_has = CYTHON_HEX_VERSION + + +######## assert_missing.py ######## + +# cython: test_assert_c_code_has = Generated by Python + + +######## fail_if_ok.py ######## + +# cython: test_fail_if_c_code_has = Generated by Python + + +######## fail_if_found.py ######## + +# cython: test_fail_if_c_code_has = Generated by Cython -- cgit v1.2.1 From a864f9b8830fb80a24abb0d82cd9a28289e532a5 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Sun, 31 Jul 2022 06:37:05 +0200 Subject: Fix some code style issues: assert is not a function but a statement. --- Cython/Compiler/ExprNodes.py | 2 +- tests/run/pure_py.py | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/Cython/Compiler/ExprNodes.py b/Cython/Compiler/ExprNodes.py index 130c4f1c7..b2990fdf0 100644 --- a/Cython/Compiler/ExprNodes.py +++ b/Cython/Compiler/ExprNodes.py @@ -544,7 +544,7 @@ class ExprNode(Node): if is_pythran_supported_node_or_none(self): return to_pythran(self) - assert(type_ is not None) + assert type_ is not None return to_pythran(self, type_) def is_c_result_required(self): diff --git a/tests/run/pure_py.py b/tests/run/pure_py.py index a8dc5b014..ae1f820d3 100644 --- a/tests/run/pure_py.py +++ b/tests/run/pure_py.py @@ -549,18 +549,18 @@ def empty_declare(): ] r2.is_integral = True - assert( r2.is_integral == True ) + assert r2.is_integral == True r3.x = 12.3 - assert( r3.x == 12.3 ) + assert r3.x == 12.3 #It generates a correct C code, but raises an exception when interpreted if cython.compiled: r4[0].is_integral = True - assert( r4[0].is_integral == True ) + assert r4[0].is_integral == True r5[0] = 42 - assert ( r5[0] == 42 ) + assert r5[0] == 42 return [i for i, x in enumerate(res) if not x] -- cgit v1.2.1 From 66534408d3d8d6132e5b4e4061481661ba2d190e Mon Sep 17 00:00:00 2001 From: da-woods Date: Sun, 31 Jul 2022 05:42:21 +0100 Subject: Don't call __Pyx_ImportNumPyArrayTypeIfAvailable() for every fused def function (GH-4935) Cython.Utils.OrderedSet did not have a `__bool__` function and so was always treated as "true". The mean that Cython always generated buffer-handling utility code (`if all_buffer_types:` in FusedNode.py) even when just handling scalar types. --- Cython/Utils.py | 5 +++++ tests/compile/fused_buffers.pyx | 16 ++++++++++++++++ tests/compile/fused_no_numpy.pyx | 13 +++++++++++++ 3 files changed, 34 insertions(+) create mode 100644 tests/compile/fused_buffers.pyx create mode 100644 tests/compile/fused_no_numpy.pyx diff --git a/Cython/Utils.py b/Cython/Utils.py index 0c86ca044..fa4801731 100644 --- a/Cython/Utils.py +++ b/Cython/Utils.py @@ -579,6 +579,11 @@ class OrderedSet(object): self._list.append(e) self._set.add(e) + def __bool__(self): + return bool(self._set) + + __nonzero__ = __bool__ + # Class decorator that adds a metaclass and recreates the class with it. # Copied from 'six'. diff --git a/tests/compile/fused_buffers.pyx b/tests/compile/fused_buffers.pyx new file mode 100644 index 000000000..73b0315ed --- /dev/null +++ b/tests/compile/fused_buffers.pyx @@ -0,0 +1,16 @@ +# mode: compile + +# cython: test_assert_c_code_has = __Pyx_ImportNumPyArrayTypeIfAvailable +# cython: test_assert_c_code_has = ndarray + +# counterpart test to fused_no_numpy - buffer types are compared against Numpy +# dtypes as a quick test. fused_no_numpy tests that the mechanism isn't +# accidentally generated, while this just confirms that the same mechanism is +# still in use + +ctypedef fused IntOrFloat: + int + float + +def f(IntOrFloat[:] x): + return x diff --git a/tests/compile/fused_no_numpy.pyx b/tests/compile/fused_no_numpy.pyx new file mode 100644 index 000000000..efb49c322 --- /dev/null +++ b/tests/compile/fused_no_numpy.pyx @@ -0,0 +1,13 @@ +# mode: compile + +# cython: test_fail_if_c_code_has = __Pyx_ImportNumPyArrayTypeIfAvailable + +ctypedef fused IntOrFloat: + int + float + +# This function does not use buffers so has no reason to import numpy to +# look up dtypes. fused_buffers.pyx is the corresponding test for the case +# where numpy is imported +def f(IntOrFloat x): + return x -- cgit v1.2.1 From cfe1a4eb374c529e946b7684dbfd81a25fbc7eae Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Sun, 31 Jul 2022 07:15:46 +0200 Subject: Update changelog. --- CHANGES.rst | 47 ++++++++++++++++++++++++++++++++++++++++------- 1 file changed, 40 insertions(+), 7 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 90e7de05b..83fef5e57 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -17,22 +17,36 @@ Features added ``:=``) were implemented. Patch by David Woods. (Github issue :issue:`2636`) +* Context managers can be written in parentheses. + Patch by David Woods. (Github issue :issue:`4814`) + * Cython avoids raising ``StopIteration`` in ``__next__`` methods when possible. Patch by David Woods. (Github issue :issue:`3447`) -* Some C++ library declarations were extended and fixed. +* Some C++ and CPython library declarations were extended and fixed. Patches by Max Bachmann, Till Hoffmann, Julien Jerphanion, Wenjun Si. (Github issues :issue:`4530`, :issue:`4528`, :issue:`4710`, :issue:`4746`, - :issue:`4751`, :issue:`4818`, :issue:`4762`) + :issue:`4751`, :issue:`4818`, :issue:`4762`, :issue:`4910`) -* The ``cythonize`` command has a new option ``-M`` to generate ``.dep`` dependency - files for the compilation unit. This can be used by external build tools to track - these dependencies. Already available in Cython :ref:`0.29.27`. - Patch by Evgeni Burovski. (Github issue :issue:`1214`) +* The ``cythonize`` and ``cython`` commands have a new option ``-M`` / ``--depfile`` + to generate ``.dep`` dependency files for the compilation unit. This can be used + by external build tools to track these dependencies. + The ``cythonize`` option was already available in Cython :ref:`0.29.27`. + Patches by Evgeni Burovski and Eli Schwartz. (Github issue :issue:`1214`) * ``cythonize()`` and the corresponding CLI command now regenerate the output files also when they already exist but were generated by a different Cython version. +* Memory views and the internal Cython array type now identify as ``collections.abc.Sequence``. + Patch by David Woods. (Github issue :issue:`4817`) + +* Cython generators and coroutines now identify as ``CO_ASYNC_GENERATOR``, + ``CO_COROUTINE`` and ``CO_GENERATOR`` accordingly. + (Github issue :issue:`4902`) + +* Memory views can use atomic CPU instructions instead of locks in more cases. + Patch by Sam Gross. (Github issue :issue:`4912`) + * The environment variable ``CYTHON_FORCE_REGEN=1`` can be used to force ``cythonize`` to regenerate the output files regardless of modification times and changes. @@ -53,9 +67,16 @@ Bugs fixed * Exceptions within for-loops that run over memoryviews could lead to a ref-counting error. Patch by David Woods. (Github issue :issue:`4662`) +* Using memoryview arguments in closures of inner functions could lead to ref-counting errors. + Patch by David Woods. (Github issue :issue:`4798`) + * Several optimised string methods failed to accept ``None`` as arguments to their options. Test patch by Kirill Smelkov. (Github issue :issue:`4737`) +* A regression in 3.0.0a10 was resolved that prevented property setter methods from + having the same name as their value argument. + Patch by David Woods. (Github issue :issue:`4836`) + * Typedefs for the ``bint`` type did not always behave like ``bint``. Patch by Nathan Manville and 0dminnimda. (Github issue :issue:`4660`) @@ -70,6 +91,9 @@ Bugs fixed * ``pyximport`` no longer uses the deprecated ``imp`` module. Patch by Matus Valo. (Github issue :issue:`4560`) +* ``pyximport`` failed for long filenames on Windows. + Patch by Matti Picus. (Github issue :issue:`4630`) + * The generated C code failed to compile in CPython 3.11a4 and later. (Github issue :issue:`4500`) @@ -85,6 +109,14 @@ Bugs fixed compatible exception specifications. Patches by David Woods. (Github issues :issue:`4770`, :issue:`4689`) +* The runtime size check for imported ``PyVarObject`` types was improved + to reduce false positives and adapt to Python 3.11. + Patch by David Woods. (Github issues :issue:`4827`, :issue:`4894`) + +* The generated modules no longer import NumPy internally when using + fused types but no memoryviews. + Patch by David Woods. (Github issue :issue:`4935`) + * Improve compatibility with forthcoming CPython 3.12 release. * Limited API C preprocessor warning is compatible with MSVC. Patch by @@ -96,7 +128,8 @@ Bugs fixed * The parser allowed some invalid spellings of ``...``. Patch by 0dminnimda. (Github issue :issue:`4868`) -* Includes all bug-fixes from the 0.29 branch up to the :ref:`0.29.31` release. +* Includes all bug-fixes and features from the 0.29 maintenance branch + up to the :ref:`0.29.32` release. Other changes ------------- -- cgit v1.2.1 From 65ff43ed9ce7f90ae80afd83702e84ad98f4241a Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Sun, 31 Jul 2022 07:22:37 +0200 Subject: Prepare release of 3.0.0a11. --- CHANGES.rst | 2 +- Cython/Shadow.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 83fef5e57..8c8462e5e 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -2,7 +2,7 @@ Cython Changelog ================ -3.0.0 alpha 11 (2022-0?-??) +3.0.0 alpha 11 (2022-07-31) =========================== Features added diff --git a/Cython/Shadow.py b/Cython/Shadow.py index 78d950ce2..097126475 100644 --- a/Cython/Shadow.py +++ b/Cython/Shadow.py @@ -1,7 +1,7 @@ # cython.* namespace for pure mode. from __future__ import absolute_import -__version__ = "3.0.0a10" +__version__ = "3.0.0a11" try: from __builtin__ import basestring -- cgit v1.2.1 From 9366abc7d6700da7c98b3b1f169e4f2bfac28c54 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Sun, 31 Jul 2022 07:30:07 +0200 Subject: Fix code style. --- Cython/Distutils/old_build_ext.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cython/Distutils/old_build_ext.py b/Cython/Distutils/old_build_ext.py index 3595d80e0..cec54d93d 100644 --- a/Cython/Distutils/old_build_ext.py +++ b/Cython/Distutils/old_build_ext.py @@ -321,8 +321,8 @@ class old_build_ext(_build_ext.build_ext): for source in cython_sources: target = cython_targets[source] depends = [source] + list(extension.depends or ()) - if(source[-4:].lower()==".pyx" and os.path.isfile(source[:-3]+"pxd")): - depends += [source[:-3]+"pxd"] + if source[-4:].lower() == ".pyx" and os.path.isfile(source[:-3] + "pxd"): + depends += [source[:-3] + "pxd"] rebuild = self.force or newer_group(depends, target, 'newer') if not rebuild and newest_dependency is not None: rebuild = newer(newest_dependency, target) -- cgit v1.2.1 From d6c66598353555a42b18095f1027cea4503cfdd6 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Sun, 31 Jul 2022 11:13:50 +0200 Subject: Use manylinux2014 instead of the EOL manylinux1 docker images to build legacy wheels. --- .github/workflows/wheel-manylinux.yml | 4 ++-- Makefile | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/wheel-manylinux.yml b/.github/workflows/wheel-manylinux.yml index 67233d9bd..7a2ecd926 100644 --- a/.github/workflows/wheel-manylinux.yml +++ b/.github/workflows/wheel-manylinux.yml @@ -50,8 +50,8 @@ jobs: matrix: image: - - manylinux1_x86_64 - - manylinux1_i686 + - manylinux2014_x86_64 + - manylinux2014_i686 - musllinux_1_1_x86_64 - musllinux_1_1_aarch64 - manylinux_2_24_x86_64 diff --git a/Makefile b/Makefile index 27051cbe7..ddc9e3cc0 100644 --- a/Makefile +++ b/Makefile @@ -8,8 +8,8 @@ PARALLEL?=$(shell ${PYTHON} -c 'import sys; print("-j5" if sys.version_info >= ( MANYLINUX_CFLAGS=-O3 -g0 -mtune=generic -pipe -fPIC MANYLINUX_LDFLAGS= MANYLINUX_IMAGES= \ - manylinux1_x86_64 \ - manylinux1_i686 \ + manylinux2014_x86_64 \ + manylinux2014_i686 \ musllinux_1_1_x86_64 \ musllinux_1_1_aarch64 \ manylinux_2_24_x86_64 \ -- cgit v1.2.1 From 6cd64875336b5121e145a3fe85c7621f3db5a64e Mon Sep 17 00:00:00 2001 From: da-woods Date: Sun, 31 Jul 2022 14:03:19 +0100 Subject: code style in old_build_ext.py --- Cython/Distutils/old_build_ext.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cython/Distutils/old_build_ext.py b/Cython/Distutils/old_build_ext.py index 3595d80e0..f09ee3a1e 100644 --- a/Cython/Distutils/old_build_ext.py +++ b/Cython/Distutils/old_build_ext.py @@ -321,7 +321,7 @@ class old_build_ext(_build_ext.build_ext): for source in cython_sources: target = cython_targets[source] depends = [source] + list(extension.depends or ()) - if(source[-4:].lower()==".pyx" and os.path.isfile(source[:-3]+"pxd")): + if source[-4:].lower()==".pyx" and os.path.isfile(source[:-3]+"pxd"): depends += [source[:-3]+"pxd"] rebuild = self.force or newer_group(depends, target, 'newer') if not rebuild and newest_dependency is not None: -- cgit v1.2.1 From f6edb652db32e12505be8bc4767e64b50790f378 Mon Sep 17 00:00:00 2001 From: da-woods Date: Sun, 31 Jul 2022 17:34:43 +0100 Subject: Add var_entry for "special" modules (e.g. typing) (#4603) Allows them to be accessed as regular Python attributes too. (First second or third(?) fix for something that probably should have been in #3400) --- Cython/Compiler/Builtin.py | 23 +++++++++++++++++++---- tests/run/cdef_class_dataclass.pyx | 9 ++++++++- tests/run/pep526_variable_annotations.py | 24 +++++++++++++++++++++++- 3 files changed, 50 insertions(+), 6 deletions(-) diff --git a/Cython/Compiler/Builtin.py b/Cython/Compiler/Builtin.py index 46a4dbb5b..0e904cf81 100644 --- a/Cython/Compiler/Builtin.py +++ b/Cython/Compiler/Builtin.py @@ -5,7 +5,7 @@ from __future__ import absolute_import from .StringEncoding import EncodedString -from .Symtab import BuiltinScope, StructOrUnionScope, ModuleScope +from .Symtab import BuiltinScope, StructOrUnionScope, ModuleScope, Entry from .Code import UtilityCode from .TypeSlots import Signature from . import PyrexTypes @@ -480,16 +480,31 @@ def get_known_standard_library_module_scope(module_name): indexed_type = PyrexTypes.PythonTupleTypeConstructor(EncodedString("typing."+name), tp) else: indexed_type = PyrexTypes.PythonTypeConstructor(EncodedString("typing."+name), tp) - mod.declare_type(EncodedString(name), indexed_type, pos = None) + name = EncodedString(name) + entry = mod.declare_type(name, indexed_type, pos = None) + var_entry = Entry(name, None, PyrexTypes.py_object_type) + var_entry.is_pyglobal = True + var_entry.scope = mod + entry.as_variable = var_entry for name in ['ClassVar', 'Optional']: + name = EncodedString(name) indexed_type = PyrexTypes.SpecialPythonTypeConstructor(EncodedString("typing."+name)) - mod.declare_type(name, indexed_type, pos = None) + entry = mod.declare_type(name, indexed_type, pos = None) + var_entry = Entry(name, None, PyrexTypes.py_object_type) + var_entry.is_pyglobal = True + var_entry.scope = mod + entry.as_variable = var_entry _known_module_scopes[module_name] = mod elif module_name == "dataclasses": mod = ModuleScope(module_name, None, None) indexed_type = PyrexTypes.SpecialPythonTypeConstructor(EncodedString("dataclasses.InitVar")) - mod.declare_type(EncodedString("InitVar"), indexed_type, pos = None) + initvar_string = EncodedString("InitVar") + entry = mod.declare_type(initvar_string, indexed_type, pos = None) + var_entry = Entry(initvar_string, None, PyrexTypes.py_object_type) + var_entry.is_pyglobal = True + var_entry.scope = mod + entry.as_variable = var_entry _known_module_scopes[module_name] = mod return mod diff --git a/tests/run/cdef_class_dataclass.pyx b/tests/run/cdef_class_dataclass.pyx index 2f69e0f8f..7be88f695 100644 --- a/tests/run/cdef_class_dataclass.pyx +++ b/tests/run/cdef_class_dataclass.pyx @@ -224,6 +224,10 @@ cdef class TestFrozen: """ a: cython.double = 2.0 +def get_dataclass_initvar(): + return py_dataclasses.InitVar + + @dataclass(kw_only=True) cdef class TestKwOnly: """ @@ -251,10 +255,11 @@ cdef class TestKwOnly: a: cython.double = 2.0 b: cython.long + import sys if sys.version_info >= (3, 7): __doc__ = """ - >>> from dataclasses import Field, is_dataclass, fields + >>> from dataclasses import Field, is_dataclass, fields, InitVar # It uses the types from the standard library where available >>> all(isinstance(v, Field) for v in BasicDataclass.__dataclass_fields__.values()) @@ -275,4 +280,6 @@ if sys.version_info >= (3, 7): ['a', 'b', 'c', 'd'] >>> [ f.name for f in fields(InitClassVars)] ['a'] + >>> get_dataclass_initvar() == InitVar + True """ diff --git a/tests/run/pep526_variable_annotations.py b/tests/run/pep526_variable_annotations.py index 56cb0201b..6f430c0af 100644 --- a/tests/run/pep526_variable_annotations.py +++ b/tests/run/pep526_variable_annotations.py @@ -203,6 +203,29 @@ def test_tuple(a: typing.Tuple[cython.int, cython.float], b: typing.Tuple[cython print(cython.typeof(c) + (" object" if not cython.compiled else "")) +def test_use_typing_attributes_as_non_annotations(): + """ + >>> test_use_typing_attributes_as_non_annotations() + typing.Tuple typing.Tuple[int] + typing.Optional True + typing.Optional True + """ + x1 = typing.Tuple + x2 = typing.Tuple[int] + y1 = typing.Optional + y2 = typing.Optional[typing.Dict] + z1 = Optional + z2 = Optional[Dict] + # The result of printing "Optional[type]" is slightly version-dependent + # so accept both possible forms + allowed_optional_strings = [ + "typing.Union[typing.Dict, NoneType]", + "typing.Optional[typing.Dict]" + ] + print(x1, x2) + print(y1, str(y2) in allowed_optional_strings) + print(z1, str(z2) in allowed_optional_strings) + if cython.compiled: __doc__ = """ # passing non-dicts to variables declared as dict now fails @@ -219,6 +242,5 @@ if cython.compiled: TypeError: Expected dict, got D """ - _WARNINGS = """ """ -- cgit v1.2.1 From 6147450d043eca0ee3e47c79558248705e85e41f Mon Sep 17 00:00:00 2001 From: da-woods Date: Sun, 31 Jul 2022 18:41:59 +0100 Subject: Use standard C or C++ atomics for memoryviews (#4925) Use them in preference to the existing compiler-specific atomic types. In both cases this requires the 2011 edition of the standard. Closes https://github.com/cython/cython/issues/4923 --- Cython/Utility/MemoryView_C.c | 58 ++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 54 insertions(+), 4 deletions(-) diff --git a/Cython/Utility/MemoryView_C.c b/Cython/Utility/MemoryView_C.c index 010066c89..774ec1767 100644 --- a/Cython/Utility/MemoryView_C.c +++ b/Cython/Utility/MemoryView_C.c @@ -29,8 +29,57 @@ typedef struct { #define __PYX_CYTHON_ATOMICS_ENABLED() CYTHON_ATOMICS #define __pyx_atomic_int_type int +#define __pyx_nonatomic_int_type int + +// For standard C/C++ atomics, get the headers first so we have ATOMIC_INT_LOCK_FREE +// defined when we decide to use them. +#if CYTHON_ATOMICS && (defined(__STDC_VERSION__) && \ + (__STDC_VERSION__ >= 201112L) && \ + !defined(__STDC_NO_ATOMICS__)) + #include +#elif CYTHON_ATOMICS && (defined(__cplusplus) && ( \ + (__cplusplus >= 201103L) || \ + (defined(_MSC_VER) && _MSC_VER >= 1700))) + #include +#endif -#if CYTHON_ATOMICS && (__GNUC__ >= 5 || (__GNUC__ == 4 && \ +#if CYTHON_ATOMICS && (defined(__STDC_VERSION__) && \ + (__STDC_VERSION__ >= 201112L) && \ + !defined(__STDC_NO_ATOMICS__) && \ + ATOMIC_INT_LOCK_FREE == 2) + // C11 atomics are available. + // Require ATOMIC_INT_LOCK_FREE because I'm nervous about the __pyx_atomic_int[2] + // alignment trick in MemoryView.pyx if it uses mutexes. + #undef __pyx_atomic_int_type + #define __pyx_atomic_int_type atomic_int + // TODO - it might be possible to use a less strict memory ordering here + #define __pyx_atomic_incr_aligned(value) atomic_fetch_add(value, 1) + #define __pyx_atomic_decr_aligned(value) atomic_fetch_sub(value, 1) + #if defined(__PYX_DEBUG_ATOMICS) && defined(_MSC_VER) + #pragma message ("Using standard C atomics") + #elif defined(__PYX_DEBUG_ATOMICS) + #warning "Using standard C atomics" + #endif +#elif CYTHON_ATOMICS && (defined(__cplusplus) && ( \ + (__cplusplus >= 201103L) || \ + /*_MSC_VER 1700 is Visual Studio 2012 */ \ + (defined(_MSC_VER) && _MSC_VER >= 1700)) && \ + ATOMIC_INT_LOCK_FREE == 2) + // C++11 atomics are available. + // Require ATOMIC_INT_LOCK_FREE because I'm nervous about the __pyx_atomic_int[2] + // alignment trick in MemoryView.pyx if it uses mutexes. + #undef __pyx_atomic_int_type + #define __pyx_atomic_int_type std::atomic_int + // TODO - it might be possible to use a less strict memory ordering here + #define __pyx_atomic_incr_aligned(value) std::atomic_fetch_add(value, 1) + #define __pyx_atomic_decr_aligned(value) std::atomic_fetch_sub(value, 1) + + #if defined(__PYX_DEBUG_ATOMICS) && defined(_MSC_VER) + #pragma message ("Using standard C++ atomics") + #elif defined(__PYX_DEBUG_ATOMICS) + #warning "Using standard C++ atomics" + #endif +#elif CYTHON_ATOMICS && (__GNUC__ >= 5 || (__GNUC__ == 4 && \ (__GNUC_MINOR__ > 1 || \ (__GNUC_MINOR__ == 1 && __GNUC_PATCHLEVEL__ >= 2)))) /* gcc >= 4.1.2 */ @@ -40,11 +89,12 @@ typedef struct { #ifdef __PYX_DEBUG_ATOMICS #warning "Using GNU atomics" #endif -#elif CYTHON_ATOMICS && defined(_MSC_VER) && CYTHON_COMPILING_IN_NOGIL +#elif CYTHON_ATOMICS && defined(_MSC_VER) /* msvc */ #include #undef __pyx_atomic_int_type #define __pyx_atomic_int_type long + #define __pyx_nonatomic_int_type long #pragma intrinsic (_InterlockedExchangeAdd) #define __pyx_atomic_incr_aligned(value) _InterlockedExchangeAdd(value, 1) #define __pyx_atomic_decr_aligned(value) _InterlockedExchangeAdd(value, -1) @@ -484,7 +534,7 @@ __pyx_sub_acquisition_count_locked(__pyx_atomic_int *acquisition_count, static CYTHON_INLINE void __Pyx_INC_MEMVIEW({{memviewslice_name}} *memslice, int have_gil, int lineno) { - __pyx_atomic_int_type old_acquisition_count; + __pyx_nonatomic_int_type old_acquisition_count; struct {{memview_struct_name}} *memview = memslice->memview; if (unlikely(!memview || (PyObject *) memview == Py_None)) { // Allow uninitialized memoryview assignment and do not ref-count None. @@ -511,7 +561,7 @@ __Pyx_INC_MEMVIEW({{memviewslice_name}} *memslice, int have_gil, int lineno) static CYTHON_INLINE void __Pyx_XCLEAR_MEMVIEW({{memviewslice_name}} *memslice, int have_gil, int lineno) { - __pyx_atomic_int_type old_acquisition_count; + __pyx_nonatomic_int_type old_acquisition_count; struct {{memview_struct_name}} *memview = memslice->memview; if (unlikely(!memview || (PyObject *) memview == Py_None)) { -- cgit v1.2.1 From 23972a4aeaab166a173797c08f371ada8231614d Mon Sep 17 00:00:00 2001 From: da-woods Date: Sun, 31 Jul 2022 19:59:56 +0100 Subject: Moved "_compile" to shared test util --- Cython/TestUtils.py | 24 +++++++++++++++++++++++- tests/run/test_patma.py | 20 ++------------------ 2 files changed, 25 insertions(+), 19 deletions(-) diff --git a/Cython/TestUtils.py b/Cython/TestUtils.py index bb2070d39..528a828c7 100644 --- a/Cython/TestUtils.py +++ b/Cython/TestUtils.py @@ -10,9 +10,10 @@ from io import open from .Compiler import Errors from .CodeWriter import CodeWriter -from .Compiler.TreeFragment import TreeFragment, strip_common_indent +from .Compiler.TreeFragment import TreeFragment, strip_common_indent, StringParseContext from .Compiler.Visitor import TreeVisitor, VisitorTransform from .Compiler import TreePath +from .Compiler.ParseTreeTransforms import PostParse class NodeTypeWriter(TreeVisitor): @@ -272,3 +273,24 @@ def write_newer_file(file_path, newer_than, content, dedent=False, encoding=None while other_time is None or other_time >= os.path.getmtime(file_path): write_file(file_path, content, dedent=dedent, encoding=encoding) + + +def py_parse_code(code): + """ + Compiles code far enough to get errors from the parser and post-parse stage. + + Is useful for checking for syntax errors, however it doesn't generate runable + code. + """ + context = StringParseContext("test") + # all the errors we care about are in the parsing or postparse stage + try: + with Errors.local_errors() as errors: + result = TreeFragment(code, pipeline=[PostParse(context)]) + result = result.substitute() + if errors: + raise errors[0] # compile error, which should get caught + else: + return result + except Errors.CompileError as e: + raise SyntaxError(e.message_only) diff --git a/tests/run/test_patma.py b/tests/run/test_patma.py index 61ac6957f..e55827f35 100644 --- a/tests/run/test_patma.py +++ b/tests/run/test_patma.py @@ -4,29 +4,13 @@ # new code import cython -from Cython.Compiler.TreeFragment import TreeFragment, StringParseContext -from Cython.Compiler.Errors import local_errors, CompileError -from Cython.Compiler.ParseTreeTransforms import PostParse - -def _compile(code): - context = StringParseContext("test") - # all the errors we care about are in the parsing or postparse stage - try: - with local_errors() as errors: - result = TreeFragment(code, pipeline=[PostParse(context)]) - result = result.substitute() - if errors: - raise errors[0] # compile error, which should get caught - else: - return result - except CompileError as e: - raise SyntaxError(e.message_only) +from Cython.TestUtils import py_parse_code if cython.compiled: def compile(code, name, what): assert what == 'exec' - _compile(code) + py_parse_code(code) def disable(func): -- cgit v1.2.1 From e4ad14fa103dd94d2f1747a9a6b852bbb0a59cfb Mon Sep 17 00:00:00 2001 From: da-woods Date: Sun, 31 Jul 2022 20:33:00 +0100 Subject: indent Tools/ci-run.sh Co-authored-by: 0dminnimda <0dminnimda@gmail.com> --- Tools/ci-run.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Tools/ci-run.sh b/Tools/ci-run.sh index 0585f9a04..818bf68f6 100644 --- a/Tools/ci-run.sh +++ b/Tools/ci-run.sh @@ -77,7 +77,7 @@ else if [[ $PYTHON_VERSION != "pypy"* && $PYTHON_VERSION != "3."[1]* ]]; then python -m pip install -r test-requirements-cpython.txt || exit 1 elif [[ $PYTHON_VERSION == "pypy-2.7" ]]; then - python -m pip install -r test-requirements-pypy27.txt || exit 1 + python -m pip install -r test-requirements-pypy27.txt || exit 1 fi fi fi -- cgit v1.2.1 From 1bb1210126ed17638e7a73b570bde927a45d2a27 Mon Sep 17 00:00:00 2001 From: da-woods Date: Sun, 31 Jul 2022 20:54:19 +0100 Subject: Rename variable in parser --- Cython/Compiler/Parsing.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/Cython/Compiler/Parsing.py b/Cython/Compiler/Parsing.py index bb89875c4..8999a8e4d 100644 --- a/Cython/Compiler/Parsing.py +++ b/Cython/Compiler/Parsing.py @@ -4313,10 +4313,10 @@ def p_mapping_pattern(s): double_star_capture_target = None items_patterns = [] - double_star_is_not_final = None + star_star_arg_pos = None while True: - if double_star_capture_target and not double_star_is_not_final: - double_star_is_not_final = s.position() + if double_star_capture_target and not star_star_arg_pos: + star_star_arg_pos = s.position() if s.sy == '**': s.next() double_star_capture_target = p_pattern_capture_target(s) @@ -4339,9 +4339,9 @@ def p_mapping_pattern(s): break s.expect('}') - if double_star_is_not_final is not None: + if star_star_arg_pos is not None: return Nodes.ErrorNode( - double_star_is_not_final, + star_star_arg_pos, what = "** pattern must be the final part of a mapping pattern." ) return MatchCaseNodes.MatchMappingPatternNode( -- cgit v1.2.1 From cc75ab5cea18decdf47c19bc7483193a2d9e8d00 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Mon, 1 Aug 2022 07:27:29 +0200 Subject: Minor code cleanup. --- Cython/Build/Inline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cython/Build/Inline.py b/Cython/Build/Inline.py index 15d26dbf8..ddb59585e 100644 --- a/Cython/Build/Inline.py +++ b/Cython/Build/Inline.py @@ -52,7 +52,7 @@ else: class UnboundSymbols(EnvTransform, SkipDeclarations): def __init__(self): - CythonTransform.__init__(self, None) + super(EnvTransform, self).__init__(context=None) self.unbound = set() def visit_NameNode(self, node): if not self.current_env().lookup(node.name): -- cgit v1.2.1 From 1482bb8d159a8da3d2d7a53886a64602603ec11a Mon Sep 17 00:00:00 2001 From: da-woods Date: Mon, 1 Aug 2022 17:58:24 +0100 Subject: Apply suggestions from code review Co-authored-by: scoder --- Cython/Compiler/MatchCaseNodes.py | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/Cython/Compiler/MatchCaseNodes.py b/Cython/Compiler/MatchCaseNodes.py index 99fa70ccd..869c361c9 100644 --- a/Cython/Compiler/MatchCaseNodes.py +++ b/Cython/Compiler/MatchCaseNodes.py @@ -56,7 +56,7 @@ class MatchCaseNode(Node): class PatternNode(Node): """ - DW decided that PatternNode shouldn't be an expression because + PatternNode is not an expression because it does several things (evalutating a boolean expression, assignment of targets), and they need to be done at different times. @@ -67,23 +67,22 @@ class PatternNode(Node): child_attrs = ["as_targets"] def __init__(self, pos, **kwds): - super(PatternNode, self).__init__(pos, **kwds) if "as_targets" not in kwds: - self.as_targets = [] + kwds["as_targets"] = [] + super(PatternNode, self).__init__(pos, **kwds) def is_irrefutable(self): return False def get_targets(self): targets = self.get_main_pattern_targets() - for t in self.as_targets: - self.add_target_to_targets(targets, t.name) + for target in self.as_targets: + self.add_target_to_targets(targets, target.name) return targets def update_targets_with_targets(self, targets, other_targets): - intersection = targets.intersection(other_targets) - for i in intersection: - error(self.pos, "multiple assignments to name '%s' in pattern" % i) + for name in targets.intersection(other_targets): + error(self.pos, "multiple assignments to name '%s' in pattern" % name) targets.update(other_targets) def add_target_to_targets(self, targets, target): @@ -98,7 +97,7 @@ class PatternNode(Node): def validate_irrefutable(self): for attr in self.child_attrs: child = getattr(self, attr) - if isinstance(child, PatternNode): + if child is not None and isinstance(child, PatternNode): child.validate_irrefutable() -- cgit v1.2.1 From 57eaf627e5cc3276485ea041cb78aa844e812faf Mon Sep 17 00:00:00 2001 From: da-woods Date: Mon, 1 Aug 2022 18:18:16 +0100 Subject: Changes from review (mainly better names) --- Cython/Compiler/MatchCaseNodes.py | 42 +++++++++++++++++++-------------------- 1 file changed, 21 insertions(+), 21 deletions(-) diff --git a/Cython/Compiler/MatchCaseNodes.py b/Cython/Compiler/MatchCaseNodes.py index 869c361c9..dbd5b8770 100644 --- a/Cython/Compiler/MatchCaseNodes.py +++ b/Cython/Compiler/MatchCaseNodes.py @@ -16,7 +16,7 @@ class MatchNode(StatNode): def validate_irrefutable(self): found_irrefutable_case = None - for c in self.cases: + for case in self.cases: if found_irrefutable_case: error( found_irrefutable_case.pos, @@ -26,9 +26,9 @@ class MatchNode(StatNode): ), ) break - if c.is_irrefutable(): - found_irrefutable_case = c - c.validate_irrefutable() + if case.is_irrefutable(): + found_irrefutable_case = case + case.validate_irrefutable() def analyse_expressions(self, env): error(self.pos, "Structural pattern match is not yet implemented") @@ -149,9 +149,9 @@ class OrPatternNode(PatternNode): child_attrs = PatternNode.child_attrs + ["alternatives"] def get_first_irrefutable(self): - for a in self.alternatives: - if a.is_irrefutable(): - return a + for alternative in self.alternatives: + if alternative.is_irrefutable(): + return alternative return None def is_irrefutable(self): @@ -162,17 +162,17 @@ class OrPatternNode(PatternNode): def get_main_pattern_targets(self): child_targets = None - for ch in self.alternatives: - ch_targets = ch.get_targets() - if child_targets is not None and child_targets != ch_targets: + for alternative in self.alternatives: + alternative_targets = alternative.get_targets() + if child_targets is not None and child_targets != alternative_targets: error(self.pos, "alternative patterns bind different names") - child_targets = ch_targets + child_targets = alternative_targets return child_targets def validate_irrefutable(self): super(OrPatternNode, self).validate_irrefutable() found_irrefutable_case = None - for a in self.alternatives: + for alternative in self.alternatives: if found_irrefutable_case: error( found_irrefutable_case.pos, @@ -182,9 +182,9 @@ class OrPatternNode(PatternNode): ), ) break - if a.is_irrefutable(): - found_irrefutable_case = a - a.validate_irrefutable() + if alternative.is_irrefutable(): + found_irrefutable_case = alternative + alternative.validate_irrefutable() class MatchSequencePatternNode(PatternNode): @@ -196,8 +196,8 @@ class MatchSequencePatternNode(PatternNode): def get_main_pattern_targets(self): targets = set() - for p in self.patterns: - self.update_targets_with_targets(targets, p.get_targets()) + for pattern in self.patterns: + self.update_targets_with_targets(targets, pattern.get_targets()) return targets @@ -220,8 +220,8 @@ class MatchMappingPatternNode(PatternNode): def get_main_pattern_targets(self): targets = set() - for p in self.value_patterns: - self.update_targets_with_targets(targets, p.get_targets()) + for pattern in self.value_patterns: + self.update_targets_with_targets(targets, pattern.get_targets()) if self.double_star_capture_target: self.add_target_to_targets(targets, self.double_star_capture_target.name) return targets @@ -250,6 +250,6 @@ class ClassPatternNode(PatternNode): def get_main_pattern_targets(self): targets = set() - for p in self.positional_patterns + self.keyword_pattern_patterns: - self.update_targets_with_targets(targets, p.get_targets()) + for pattern in self.positional_patterns + self.keyword_pattern_patterns: + self.update_targets_with_targets(targets, pattern.get_targets()) return targets -- cgit v1.2.1 From 2aac3a01bce5edd6bc519d079f96fe8db4556426 Mon Sep 17 00:00:00 2001 From: da-woods Date: Mon, 1 Aug 2022 18:32:58 +0100 Subject: Improvements to Parsing.py --- Cython/Compiler/Parsing.py | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/Cython/Compiler/Parsing.py b/Cython/Compiler/Parsing.py index 8999a8e4d..94efe2c6b 100644 --- a/Cython/Compiler/Parsing.py +++ b/Cython/Compiler/Parsing.py @@ -4,7 +4,6 @@ # from __future__ import absolute_import -from ast import Expression # This should be done automatically import cython @@ -20,7 +19,7 @@ cython.declare(Nodes=object, ExprNodes=object, EncodedString=object, from io import StringIO import re import sys -from unicodedata import lookup as lookup_unicodechar, category as unicode_category, name +from unicodedata import lookup as lookup_unicodechar, category as unicode_category from functools import partial, reduce from .Scanning import PyrexScanner, FileSourceDescriptor, tentatively_scan @@ -4139,22 +4138,22 @@ def p_closed_pattern(s): elif s.sy == '(': with tentatively_scan(s) as errors: result = p_group_pattern(s) - if not errors: - return result + if not errors: + return result return p_sequence_pattern(s) with tentatively_scan(s) as errors: result = p_literal_pattern(s) - if not errors: - return result + if not errors: + return result with tentatively_scan(s) as errors: result = p_capture_pattern(s) - if not errors: - return result + if not errors: + return result with tentatively_scan(s) as errors: result = p_value_pattern(s) - if not errors: - return result + if not errors: + return result return p_class_pattern(s) @@ -4263,7 +4262,7 @@ def p_value_pattern(s): attr_pos = s.position() s.next() attr = p_ident(s) - res = ExprNodes.AttributeNode(attr_pos, obj = res, attribute=attr) + res = ExprNodes.AttributeNode(attr_pos, obj = res, attribute = attr) if s.sy in ['(', '=']: s.error("Unexpected symbol '%s'" % s.sy) return MatchCaseNodes.MatchValuePatternNode(pos, value = res) -- cgit v1.2.1 From 9f35805dc92b820e32d85600dd08ba3aa13ede03 Mon Sep 17 00:00:00 2001 From: da-woods Date: Mon, 1 Aug 2022 18:28:03 +0100 Subject: Factor out shared bits of p_atom and p_literal_pattern --- Cython/Compiler/Parsing.pxd | 2 + Cython/Compiler/Parsing.py | 91 ++++++++++++++++++++++----------------------- 2 files changed, 46 insertions(+), 47 deletions(-) diff --git a/Cython/Compiler/Parsing.pxd b/Cython/Compiler/Parsing.pxd index 1be718581..17fe9f769 100644 --- a/Cython/Compiler/Parsing.pxd +++ b/Cython/Compiler/Parsing.pxd @@ -64,6 +64,8 @@ cdef expect_ellipsis(PyrexScanner s) cdef make_slice_nodes(pos, subscripts) cpdef make_slice_node(pos, start, stop = *, step = *) cdef p_atom(PyrexScanner s) +cdef parse_atom_string(pos, PyrexScanner s): +cdef parse_atom_ident_constants(pos, PyrexScanner s): @cython.locals(value=unicode) cdef p_int_literal(PyrexScanner s) cdef p_name(PyrexScanner s, name) diff --git a/Cython/Compiler/Parsing.py b/Cython/Compiler/Parsing.py index 94efe2c6b..2a6975b0b 100644 --- a/Cython/Compiler/Parsing.py +++ b/Cython/Compiler/Parsing.py @@ -735,36 +735,51 @@ def p_atom(s): s.next() return ExprNodes.ImagNode(pos, value = value) elif sy == 'BEGIN_STRING': - kind, bytes_value, unicode_value = p_cat_string_literal(s) - if kind == 'c': - return ExprNodes.CharNode(pos, value = bytes_value) - elif kind == 'u': - return ExprNodes.UnicodeNode(pos, value = unicode_value, bytes_value = bytes_value) - elif kind == 'b': - return ExprNodes.BytesNode(pos, value = bytes_value) - elif kind == 'f': - return ExprNodes.JoinedStrNode(pos, values = unicode_value) - elif kind == '': - return ExprNodes.StringNode(pos, value = bytes_value, unicode_value = unicode_value) - else: - s.error("invalid string kind '%s'" % kind) + return parse_atom_string(pos, s) elif sy == 'IDENT': - name = s.systring - if name == "None": - result = ExprNodes.NoneNode(pos) - elif name == "True": - result = ExprNodes.BoolNode(pos, value=True) - elif name == "False": - result = ExprNodes.BoolNode(pos, value=False) - elif name == "NULL" and not s.in_python_file: - result = ExprNodes.NullNode(pos) - else: - result = p_name(s, name) + result = parse_atom_ident_constants(pos, s) + if result is None: + result = p_name(s, name = s.systring) s.next() return result else: s.error("Expected an identifier or literal") + +def parse_atom_string(pos, s): + kind, bytes_value, unicode_value = p_cat_string_literal(s) + if kind == 'c': + return ExprNodes.CharNode(pos, value = bytes_value) + elif kind == 'u': + return ExprNodes.UnicodeNode(pos, value = unicode_value, bytes_value = bytes_value) + elif kind == 'b': + return ExprNodes.BytesNode(pos, value = bytes_value) + elif kind == 'f': + return ExprNodes.JoinedStrNode(pos, values = unicode_value) + elif kind == '': + return ExprNodes.StringNode(pos, value = bytes_value, unicode_value = unicode_value) + else: + s.error("invalid string kind '%s'" % kind) + + +def parse_atom_ident_constants(pos, s): + """ + Returns None if it isn't one special-cased named constants. + Does not call s.next() + """ + name = s.systring + result = None + if name == "None": + result = ExprNodes.NoneNode(pos) + elif name == "True": + result = ExprNodes.BoolNode(pos, value=True) + elif name == "False": + result = ExprNodes.BoolNode(pos, value=False) + elif name == "NULL" and not s.in_python_file: + result = ExprNodes.NullNode(pos) + return result + + def p_int_literal(s): pos = s.position() value = s.systring @@ -4210,32 +4225,14 @@ def p_literal_pattern(s): if sy == 'BEGIN_STRING': if next_must_be_a_number: s.error("Expected a number") - kind, bytes_value, unicode_value = p_cat_string_literal(s) - if kind == 'c': - res = ExprNodes.CharNode(pos, value = bytes_value) - elif kind == 'u': - res = ExprNodes.UnicodeNode(pos, value = unicode_value, bytes_value = bytes_value) - elif kind == 'b': - res = ExprNodes.BytesNode(pos, value = bytes_value) - elif kind == 'f': + res = parse_atom_string(pos, s) + if isinstance(res, ExprNodes.JoinedStrNode): res = Nodes.ErrorNode(pos, what = "f-strings are not accepted for pattern matching") - elif kind == '': - res = ExprNodes.StringNode(pos, value = bytes_value, unicode_value = unicode_value) - else: - s.error("invalid string kind '%s'" % kind) return MatchCaseNodes.MatchValuePatternNode(pos, value = res) elif sy == 'IDENT': - name = s.systring - result = None - if name == "None": - result = ExprNodes.NoneNode(pos) - elif name == "True": - result = ExprNodes.BoolNode(pos, value=True) - elif name == "False": - result = ExprNodes.BoolNode(pos, value=False) - elif name == "NULL" and not s.in_python_file: - # Included Null as an exactly matched constant here - result = ExprNodes.NullNode(pos) + # Note that p_atom_ident_constants includes NULL. + # This is a deliberate Cython addition to the pattern matching specification + result = parse_atom_ident_constants(pos, s) if result: s.next() return MatchCaseNodes.MatchValuePatternNode(pos, value = result, is_is_check = True) -- cgit v1.2.1 From 28c7ba703ec94181f9924f7f6c13dab3efa88402 Mon Sep 17 00:00:00 2001 From: da-woods Date: Mon, 1 Aug 2022 19:09:31 +0100 Subject: PEP8 keyword argument spacing --- Cython/Compiler/Parsing.py | 52 +++++++++++++++++++++++----------------------- 1 file changed, 26 insertions(+), 26 deletions(-) diff --git a/Cython/Compiler/Parsing.py b/Cython/Compiler/Parsing.py index 2a6975b0b..3f78e4a79 100644 --- a/Cython/Compiler/Parsing.py +++ b/Cython/Compiler/Parsing.py @@ -739,7 +739,7 @@ def p_atom(s): elif sy == 'IDENT': result = parse_atom_ident_constants(pos, s) if result is None: - result = p_name(s, name = s.systring) + result = p_name(s, s.systring) s.next() return result else: @@ -749,15 +749,15 @@ def p_atom(s): def parse_atom_string(pos, s): kind, bytes_value, unicode_value = p_cat_string_literal(s) if kind == 'c': - return ExprNodes.CharNode(pos, value = bytes_value) + return ExprNodes.CharNode(pos, value=bytes_value) elif kind == 'u': - return ExprNodes.UnicodeNode(pos, value = unicode_value, bytes_value = bytes_value) + return ExprNodes.UnicodeNode(pos, value=unicode_value, bytes_value=bytes_value) elif kind == 'b': - return ExprNodes.BytesNode(pos, value = bytes_value) + return ExprNodes.BytesNode(pos, value=bytes_value) elif kind == 'f': - return ExprNodes.JoinedStrNode(pos, values = unicode_value) + return ExprNodes.JoinedStrNode(pos, values=unicode_value) elif kind == '': - return ExprNodes.StringNode(pos, value = bytes_value, unicode_value = unicode_value) + return ExprNodes.StringNode(pos, value=bytes_value, unicode_value=unicode_value) else: s.error("invalid string kind '%s'" % kind) @@ -4019,7 +4019,7 @@ def p_match_statement(s, ctx): while s.sy != "DEDENT": cases.append(p_case_block(s, ctx)) s.expect_dedent() - return MatchCaseNodes.MatchNode(pos, subject = subject, cases = cases) + return MatchCaseNodes.MatchNode(pos, subject=subject, cases=cases) def p_case_block(s, ctx): @@ -4051,7 +4051,7 @@ def p_patterns(s): break # all is good provided we have at least 1 pattern else: e = errors[0] - s.error(e.args[1], pos = e.args[0]) + s.error(e.args[1], pos=e.args[0]) patterns.append(pattern) if s.sy == ",": @@ -4063,7 +4063,7 @@ def p_patterns(s): break if seq: - return MatchCaseNodes.MatchSequencePatternNode(pos, patterns = patterns) + return MatchCaseNodes.MatchSequencePatternNode(pos, patterns=patterns) else: return patterns[0] @@ -4079,7 +4079,7 @@ def p_maybe_star_pattern(s): else: s.next() pattern = MatchCaseNodes.MatchAndAssignPatternNode( - s.position(), target = target, is_star = True + s.position(), target=target, is_star=True ) return pattern else: @@ -4102,7 +4102,7 @@ def p_pattern(s): if len(patterns) > 1: pattern = MatchCaseNodes.OrPatternNode( pos, - alternatives = patterns + alternatives=patterns ) else: pattern = patterns[0] @@ -4114,11 +4114,11 @@ def p_pattern(s): if errors and s.sy == "_": s.next() # make this a specific error - return Nodes.ErrorNode(errors[0].args[0], what = errors[0].args[1]) + return Nodes.ErrorNode(errors[0].args[0], what=errors[0].args[1]) elif errors: with tentatively_scan(s): expr = p_test(s) - return Nodes.ErrorNode(expr.pos, what = "Invalid pattern target") + return Nodes.ErrorNode(expr.pos, what="Invalid pattern target") s.error(errors[0]) return pattern @@ -4191,7 +4191,7 @@ def p_literal_pattern(s): elif sy == 'FLOAT': value = s.systring s.next() - res = ExprNodes.FloatNode(pos, value = value) + res = ExprNodes.FloatNode(pos, value=value) if res and sign == "-": res = ExprNodes.UnaryMinusNode(sign_pos, operand=res) @@ -4208,34 +4208,34 @@ def p_literal_pattern(s): res = ExprNodes.binop_node( add_pos, sign, - operand1 = res, - operand2 = ExprNodes.ImagNode(s.position(), value = value) + operand1=res, + operand2=ExprNodes.ImagNode(s.position(), value=value) ) if not res and sy == 'IMAG': value = s.systring[:-1] s.next() - res = ExprNodes.ImagNode(pos, value = sign+value) + res = ExprNodes.ImagNode(pos, value=sign+value) if sign == "-": res = ExprNodes.UnaryMinusNode(sign_pos, operand=res) if res: - return MatchCaseNodes.MatchValuePatternNode(pos, value = res) + return MatchCaseNodes.MatchValuePatternNode(pos, value=res) if sy == 'BEGIN_STRING': if next_must_be_a_number: s.error("Expected a number") res = parse_atom_string(pos, s) if isinstance(res, ExprNodes.JoinedStrNode): - res = Nodes.ErrorNode(pos, what = "f-strings are not accepted for pattern matching") - return MatchCaseNodes.MatchValuePatternNode(pos, value = res) + res = Nodes.ErrorNode(pos, what="f-strings are not accepted for pattern matching") + return MatchCaseNodes.MatchValuePatternNode(pos, value=res) elif sy == 'IDENT': # Note that p_atom_ident_constants includes NULL. # This is a deliberate Cython addition to the pattern matching specification result = parse_atom_ident_constants(pos, s) if result: s.next() - return MatchCaseNodes.MatchValuePatternNode(pos, value = result, is_is_check = True) + return MatchCaseNodes.MatchValuePatternNode(pos, value=result, is_is_check=True) s.error("Failed to match literal") @@ -4243,7 +4243,7 @@ def p_literal_pattern(s): def p_capture_pattern(s): return MatchCaseNodes.MatchAndAssignPatternNode( s.position(), - target = p_pattern_capture_target(s) + target=p_pattern_capture_target(s) ) @@ -4259,10 +4259,10 @@ def p_value_pattern(s): attr_pos = s.position() s.next() attr = p_ident(s) - res = ExprNodes.AttributeNode(attr_pos, obj = res, attribute = attr) + res = ExprNodes.AttributeNode(attr_pos, obj=res, attribute=attr) if s.sy in ['(', '=']: s.error("Unexpected symbol '%s'" % s.sy) - return MatchCaseNodes.MatchValuePatternNode(pos, value = res) + return MatchCaseNodes.MatchValuePatternNode(pos, value=res) def p_group_pattern(s): @@ -4357,7 +4357,7 @@ def p_class_pattern(s): attr_pos = s.position() s.next() attr = p_ident(s) - res = ExprNodes.AttributeNode(attr_pos, obj = res, attribute=attr) + res = ExprNodes.AttributeNode(attr_pos, obj=res, attribute=attr) class_ = res s.expect("(") @@ -4390,7 +4390,7 @@ def p_class_pattern(s): if keyword_patterns_error is not None: return Nodes.ErrorNode( keyword_patterns_error, - what = "Positional patterns follow keyword patterns" + what="Positional patterns follow keyword patterns" ) return MatchCaseNodes.ClassPatternNode( pos, class_ = class_, -- cgit v1.2.1 From 039a2c244361da1a6a16f28dc6478dd78438d0d5 Mon Sep 17 00:00:00 2001 From: da-woods Date: Mon, 1 Aug 2022 19:29:00 +0100 Subject: Validate f-string in post-parse --- Cython/Compiler/ParseTreeTransforms.py | 13 +++++++++++++ Cython/Compiler/Parsing.pxd | 4 ++-- Cython/Compiler/Parsing.py | 7 +++---- 3 files changed, 18 insertions(+), 6 deletions(-) diff --git a/Cython/Compiler/ParseTreeTransforms.py b/Cython/Compiler/ParseTreeTransforms.py index 718b5fd85..a9a692cd3 100644 --- a/Cython/Compiler/ParseTreeTransforms.py +++ b/Cython/Compiler/ParseTreeTransforms.py @@ -397,6 +397,19 @@ class PostParse(ScopeTrackingTransform): self.visitchildren(node) return node + def visit_MatchValuePatternNode(self, node): + if isinstance(node.value, ExprNodes.JoinedStrNode): + error(node.value.pos, "f-strings are not accepted for pattern matching") + self.visitchildren(node) + return node + + def visit_MatchMappingPatternNode(self, node): + for key in node.keys: + if isinstance(key, ExprNodes.JoinedStrNode): + error(key.pos, "f-strings are not accepted for pattern matching") + self.visitchildren(node) + return node + class _AssignmentExpressionTargetNameFinder(TreeVisitor): def __init__(self): super(_AssignmentExpressionTargetNameFinder, self).__init__() diff --git a/Cython/Compiler/Parsing.pxd b/Cython/Compiler/Parsing.pxd index 17fe9f769..1df8530cb 100644 --- a/Cython/Compiler/Parsing.pxd +++ b/Cython/Compiler/Parsing.pxd @@ -64,8 +64,8 @@ cdef expect_ellipsis(PyrexScanner s) cdef make_slice_nodes(pos, subscripts) cpdef make_slice_node(pos, start, stop = *, step = *) cdef p_atom(PyrexScanner s) -cdef parse_atom_string(pos, PyrexScanner s): -cdef parse_atom_ident_constants(pos, PyrexScanner s): +cdef parse_atom_string(pos, PyrexScanner s) +cdef parse_atom_ident_constants(pos, PyrexScanner s) @cython.locals(value=unicode) cdef p_int_literal(PyrexScanner s) cdef p_name(PyrexScanner s, name) diff --git a/Cython/Compiler/Parsing.py b/Cython/Compiler/Parsing.py index 3f78e4a79..70d4d26b3 100644 --- a/Cython/Compiler/Parsing.py +++ b/Cython/Compiler/Parsing.py @@ -4222,12 +4222,11 @@ def p_literal_pattern(s): if res: return MatchCaseNodes.MatchValuePatternNode(pos, value=res) + if next_must_be_a_number: + s.error("Expected a number") if sy == 'BEGIN_STRING': - if next_must_be_a_number: - s.error("Expected a number") res = parse_atom_string(pos, s) - if isinstance(res, ExprNodes.JoinedStrNode): - res = Nodes.ErrorNode(pos, what="f-strings are not accepted for pattern matching") + # f-strings not being accepted is validated in PostParse return MatchCaseNodes.MatchValuePatternNode(pos, value=res) elif sy == 'IDENT': # Note that p_atom_ident_constants includes NULL. -- cgit v1.2.1 From b4f84a95ef996a7944d63c1e616811f18a1c5779 Mon Sep 17 00:00:00 2001 From: Golden Rockefeller Date: Mon, 1 Aug 2022 18:07:54 -0400 Subject: [docs] Replace some cinit with init (#4642) * Replace unnecessary __cinit__ with __init__ in documentation examples * Use __cinit__ when necessary in the wrapping C++ examples * Modify existing Rectangle instead of creating a new Rectangle in the pointer to Rectangle example --- docs/examples/userguide/extension_types/dict_animal.pyx | 2 +- docs/examples/userguide/extension_types/extendable_animal.pyx | 2 +- docs/examples/userguide/sharing_declarations/shrubbing.pyx | 2 +- docs/examples/userguide/wrapping_CPlusPlus/rect.pyx | 2 +- docs/examples/userguide/wrapping_CPlusPlus/rect_ptr.pyx | 10 ++++++++-- .../userguide/wrapping_CPlusPlus/rect_with_attributes.pyx | 2 +- 6 files changed, 13 insertions(+), 7 deletions(-) diff --git a/docs/examples/userguide/extension_types/dict_animal.pyx b/docs/examples/userguide/extension_types/dict_animal.pyx index ec8cf6f9a..575b835e9 100644 --- a/docs/examples/userguide/extension_types/dict_animal.pyx +++ b/docs/examples/userguide/extension_types/dict_animal.pyx @@ -4,7 +4,7 @@ cdef class Animal: cdef int number_of_legs cdef dict __dict__ - def __cinit__(self, int number_of_legs): + def __init__(self, int number_of_legs): self.number_of_legs = number_of_legs diff --git a/docs/examples/userguide/extension_types/extendable_animal.pyx b/docs/examples/userguide/extension_types/extendable_animal.pyx index 417760efd..2ec165421 100644 --- a/docs/examples/userguide/extension_types/extendable_animal.pyx +++ b/docs/examples/userguide/extension_types/extendable_animal.pyx @@ -3,7 +3,7 @@ cdef class Animal: cdef int number_of_legs - def __cinit__(self, int number_of_legs): + def __init__(self, int number_of_legs): self.number_of_legs = number_of_legs diff --git a/docs/examples/userguide/sharing_declarations/shrubbing.pyx b/docs/examples/userguide/sharing_declarations/shrubbing.pyx index 8598b5c98..91235e5ec 100644 --- a/docs/examples/userguide/sharing_declarations/shrubbing.pyx +++ b/docs/examples/userguide/sharing_declarations/shrubbing.pyx @@ -2,7 +2,7 @@ cdef class Shrubbery: - def __cinit__(self, int w, int l): + def __init__(self, int w, int l): self.width = w self.length = l diff --git a/docs/examples/userguide/wrapping_CPlusPlus/rect.pyx b/docs/examples/userguide/wrapping_CPlusPlus/rect.pyx index e7c4423ef..d8eec16ef 100644 --- a/docs/examples/userguide/wrapping_CPlusPlus/rect.pyx +++ b/docs/examples/userguide/wrapping_CPlusPlus/rect.pyx @@ -8,7 +8,7 @@ from Rectangle cimport Rectangle cdef class PyRectangle: cdef Rectangle c_rect # Hold a C++ instance which we're wrapping - def __cinit__(self, int x0, int y0, int x1, int y1): + def __init__(self, int x0, int y0, int x1, int y1): self.c_rect = Rectangle(x0, y0, x1, y1) def get_area(self): diff --git a/docs/examples/userguide/wrapping_CPlusPlus/rect_ptr.pyx b/docs/examples/userguide/wrapping_CPlusPlus/rect_ptr.pyx index 0c48689e7..ec4b34ab4 100644 --- a/docs/examples/userguide/wrapping_CPlusPlus/rect_ptr.pyx +++ b/docs/examples/userguide/wrapping_CPlusPlus/rect_ptr.pyx @@ -5,8 +5,14 @@ from Rectangle cimport Rectangle cdef class PyRectangle: cdef Rectangle*c_rect # hold a pointer to the C++ instance which we're wrapping - def __cinit__(self, int x0, int y0, int x1, int y1): - self.c_rect = new Rectangle(x0, y0, x1, y1) + def __cinit__(self): + self.c_rect = new Rectangle() + + def __init__(self, int x0, int y0, int x1, int y1): + self.c_rect.x0 = x0 + self.c_rect.y0 = y0 + self.c_rect.x1 = x1 + self.c_rect.y1 = y1 def __dealloc__(self): del self.c_rect diff --git a/docs/examples/userguide/wrapping_CPlusPlus/rect_with_attributes.pyx b/docs/examples/userguide/wrapping_CPlusPlus/rect_with_attributes.pyx index 1bac30dec..441292ace 100644 --- a/docs/examples/userguide/wrapping_CPlusPlus/rect_with_attributes.pyx +++ b/docs/examples/userguide/wrapping_CPlusPlus/rect_with_attributes.pyx @@ -5,7 +5,7 @@ from Rectangle cimport Rectangle cdef class PyRectangle: cdef Rectangle c_rect - def __cinit__(self, int x0, int y0, int x1, int y1): + def __init__(self, int x0, int y0, int x1, int y1): self.c_rect = Rectangle(x0, y0, x1, y1) def get_area(self): -- cgit v1.2.1 From 7034c959be052664f557128a9299f537b6632582 Mon Sep 17 00:00:00 2001 From: da-woods Date: Tue, 2 Aug 2022 07:14:35 +0100 Subject: Apply suggestions from code review Co-authored-by: scoder --- Cython/Compiler/Parsing.py | 20 +++++++++----------- 1 file changed, 9 insertions(+), 11 deletions(-) diff --git a/Cython/Compiler/Parsing.py b/Cython/Compiler/Parsing.py index 70d4d26b3..d93b4aa64 100644 --- a/Cython/Compiler/Parsing.py +++ b/Cython/Compiler/Parsing.py @@ -4289,7 +4289,7 @@ def p_sequence_pattern(s): if s.sy == closer: break else: - if opener == ')' and len(patterns)==1: + if opener == ')' and len(patterns) == 1: s.error("tuple-like pattern of length 1 must finish with ','") break s.expect(closer) @@ -4326,12 +4326,11 @@ def p_mapping_pattern(s): s.expect(':') value = p_pattern(s) items_patterns.append((key, value)) - if s.sy==',': - s.next() - else: - break - if s.sy=='}': + if s.sy != ',': break + s.next() + if s.sy == '}': + break # Allow trailing comma. s.expect('}') if star_star_arg_pos is not None: @@ -4378,12 +4377,11 @@ def p_class_pattern(s): else: with tentatively_scan(s) as errors: keyword_patterns.append(p_keyword_pattern(s)) - if s.sy == ",": - s.next() - if s.sy == ")": - break - else: + if s.sy != ",": break + s.next() + if s.sy == ")": + break # Allow trailing comma. s.expect(")") if keyword_patterns_error is not None: -- cgit v1.2.1 From bf6362634eecec89f8907e79d7bc68bed0267838 Mon Sep 17 00:00:00 2001 From: Matus Valo Date: Tue, 2 Aug 2022 10:38:24 +0200 Subject: IpythonMagic: Replace deprecated imp.load_dynamic() by importlib (GH-4941) --- Cython/Build/Inline.py | 11 ++++++----- Cython/Build/IpythonMagic.py | 7 +++---- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/Cython/Build/Inline.py b/Cython/Build/Inline.py index ddb59585e..1120f89fc 100644 --- a/Cython/Build/Inline.py +++ b/Cython/Build/Inline.py @@ -41,11 +41,12 @@ if sys.version_info < (3, 5): def load_dynamic(name, module_path): return imp.load_dynamic(name, module_path) else: - import importlib.util as _importlib_util - def load_dynamic(name, module_path): - spec = _importlib_util.spec_from_file_location(name, module_path) - module = _importlib_util.module_from_spec(spec) - # sys.modules[name] = module + import importlib.util + from importlib.machinery import ExtensionFileLoader + + def load_dynamic(name, path): + spec = importlib.util.spec_from_file_location(name, loader=ExtensionFileLoader(name, path)) + module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) return module diff --git a/Cython/Build/IpythonMagic.py b/Cython/Build/IpythonMagic.py index 36031a78c..3fa43c96d 100644 --- a/Cython/Build/IpythonMagic.py +++ b/Cython/Build/IpythonMagic.py @@ -46,7 +46,6 @@ Parts of this code were taken from Cython.inline. from __future__ import absolute_import, print_function -import imp import io import os import re @@ -75,7 +74,7 @@ from IPython.utils.text import dedent from ..Shadow import __version__ as cython_version from ..Compiler.Errors import CompileError -from .Inline import cython_inline +from .Inline import cython_inline, load_dynamic from .Dependencies import cythonize from ..Utils import captured_fd, print_captured @@ -357,7 +356,7 @@ class CythonMagics(Magics): # Build seems ok, but we might still want to show any warnings that occurred print_compiler_output(get_stdout(), get_stderr(), sys.stdout) - module = imp.load_dynamic(module_name, module_path) + module = load_dynamic(module_name, module_path) self._import_all(module) if args.annotate: @@ -420,7 +419,7 @@ class CythonMagics(Magics): # import and execute module code to generate profile so_module_path = os.path.join(lib_dir, pgo_module_name + self.so_ext) - imp.load_dynamic(pgo_module_name, so_module_path) + load_dynamic(pgo_module_name, so_module_path) def _cythonize(self, module_name, code, lib_dir, args, quiet=True): pyx_file = os.path.join(lib_dir, module_name + '.pyx') -- cgit v1.2.1 From 816612e35e05320bf510132c765dde7d4133b2ab Mon Sep 17 00:00:00 2001 From: da-woods Date: Tue, 2 Aug 2022 21:03:59 +0100 Subject: Parsing function names/signatures, flag for in_pattern_node --- Cython/Compiler/ParseTreeTransforms.py | 15 ++++++++------- Cython/Compiler/Parsing.pxd | 4 ++-- Cython/Compiler/Parsing.py | 21 ++++++++++++--------- 3 files changed, 22 insertions(+), 18 deletions(-) diff --git a/Cython/Compiler/ParseTreeTransforms.py b/Cython/Compiler/ParseTreeTransforms.py index a9a692cd3..6d362ae31 100644 --- a/Cython/Compiler/ParseTreeTransforms.py +++ b/Cython/Compiler/ParseTreeTransforms.py @@ -186,6 +186,8 @@ class PostParse(ScopeTrackingTransform): - Some invalid uses of := assignment expressions are detected """ + in_pattern_node = False + def __init__(self, context): super(PostParse, self).__init__(context) self.specialattribute_handlers = { @@ -397,16 +399,15 @@ class PostParse(ScopeTrackingTransform): self.visitchildren(node) return node - def visit_MatchValuePatternNode(self, node): - if isinstance(node.value, ExprNodes.JoinedStrNode): - error(node.value.pos, "f-strings are not accepted for pattern matching") + def visit_PatternNode(self, node): + in_pattern_node, self.in_pattern_node = self.in_pattern_node, True self.visitchildren(node) + self.in_pattern_node = in_pattern_node return node - def visit_MatchMappingPatternNode(self, node): - for key in node.keys: - if isinstance(key, ExprNodes.JoinedStrNode): - error(key.pos, "f-strings are not accepted for pattern matching") + def visit_JoinedStrNode(self, node): + if self.in_pattern_node: + error(node.pos, "f-strings are not accepted for pattern matching") self.visitchildren(node) return node diff --git a/Cython/Compiler/Parsing.pxd b/Cython/Compiler/Parsing.pxd index 1df8530cb..a25652e2c 100644 --- a/Cython/Compiler/Parsing.pxd +++ b/Cython/Compiler/Parsing.pxd @@ -64,8 +64,8 @@ cdef expect_ellipsis(PyrexScanner s) cdef make_slice_nodes(pos, subscripts) cpdef make_slice_node(pos, start, stop = *, step = *) cdef p_atom(PyrexScanner s) -cdef parse_atom_string(pos, PyrexScanner s) -cdef parse_atom_ident_constants(pos, PyrexScanner s) +cdef p_atom_string(PyrexScanner s) +cdef p_atom_ident_constants(PyrexScanner s) @cython.locals(value=unicode) cdef p_int_literal(PyrexScanner s) cdef p_name(PyrexScanner s, name) diff --git a/Cython/Compiler/Parsing.py b/Cython/Compiler/Parsing.py index d93b4aa64..291e1ceea 100644 --- a/Cython/Compiler/Parsing.py +++ b/Cython/Compiler/Parsing.py @@ -735,18 +735,19 @@ def p_atom(s): s.next() return ExprNodes.ImagNode(pos, value = value) elif sy == 'BEGIN_STRING': - return parse_atom_string(pos, s) + return p_atom_string(s) elif sy == 'IDENT': - result = parse_atom_ident_constants(pos, s) + result = p_atom_ident_constants(s) if result is None: result = p_name(s, s.systring) - s.next() + s.next() return result else: s.error("Expected an identifier or literal") -def parse_atom_string(pos, s): +def p_atom_string(s): + pos = s.position() kind, bytes_value, unicode_value = p_cat_string_literal(s) if kind == 'c': return ExprNodes.CharNode(pos, value=bytes_value) @@ -762,11 +763,12 @@ def parse_atom_string(pos, s): s.error("invalid string kind '%s'" % kind) -def parse_atom_ident_constants(pos, s): +def p_atom_ident_constants(s): """ Returns None if it isn't one special-cased named constants. - Does not call s.next() + Only calls s.next() if it successfully matches a matches. """ + pos = s.position() name = s.systring result = None if name == "None": @@ -777,6 +779,8 @@ def parse_atom_ident_constants(pos, s): result = ExprNodes.BoolNode(pos, value=False) elif name == "NULL" and not s.in_python_file: result = ExprNodes.NullNode(pos) + if result: + s.next() return result @@ -4225,15 +4229,14 @@ def p_literal_pattern(s): if next_must_be_a_number: s.error("Expected a number") if sy == 'BEGIN_STRING': - res = parse_atom_string(pos, s) + res = p_atom_string(s) # f-strings not being accepted is validated in PostParse return MatchCaseNodes.MatchValuePatternNode(pos, value=res) elif sy == 'IDENT': # Note that p_atom_ident_constants includes NULL. # This is a deliberate Cython addition to the pattern matching specification - result = parse_atom_ident_constants(pos, s) + result = p_atom_ident_constants(s) if result: - s.next() return MatchCaseNodes.MatchValuePatternNode(pos, value=result, is_is_check=True) s.error("Failed to match literal") -- cgit v1.2.1 From f51d258e74ea0ab073507f05e5f1550cc457c8d7 Mon Sep 17 00:00:00 2001 From: Matus Valo Date: Tue, 2 Aug 2022 22:45:08 +0200 Subject: Use annotation for pointer in docs (#4942) and remove note about bug --- docs/examples/tutorial/clibraries/queue.py | 2 +- docs/src/tutorial/clibraries.rst | 3 --- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/docs/examples/tutorial/clibraries/queue.py b/docs/examples/tutorial/clibraries/queue.py index 45529fa94..e99b9b32c 100644 --- a/docs/examples/tutorial/clibraries/queue.py +++ b/docs/examples/tutorial/clibraries/queue.py @@ -2,7 +2,7 @@ from cython.cimports import cqueue @cython.cclass class Queue: - _c_queue = cython.declare(cython.pointer(cqueue.Queue)) + _c_queue: cython.pointer(cqueue.Queue) def __cinit__(self): self._c_queue = cqueue.queue_new() diff --git a/docs/src/tutorial/clibraries.rst b/docs/src/tutorial/clibraries.rst index ddc02f443..5b8c545b8 100644 --- a/docs/src/tutorial/clibraries.rst +++ b/docs/src/tutorial/clibraries.rst @@ -125,9 +125,6 @@ Here is a first start for the Queue class: .. literalinclude:: ../../examples/tutorial/clibraries/queue.py :caption: queue.py - .. note:: Currently, Cython contains a bug not allowing using - annotations with types containing pointers (GitHub issue :issue:`4293`). - .. group-tab:: Cython .. literalinclude:: ../../examples/tutorial/clibraries/queue.pyx -- cgit v1.2.1 From 6a71aa89a88f0ce86d2ede1c70cf7bff02f16900 Mon Sep 17 00:00:00 2001 From: da-woods Date: Tue, 2 Aug 2022 21:49:59 +0100 Subject: Add flag to compiled PostParse --- Cython/Compiler/ParseTreeTransforms.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/Cython/Compiler/ParseTreeTransforms.py b/Cython/Compiler/ParseTreeTransforms.py index 6d362ae31..8bad4f8bc 100644 --- a/Cython/Compiler/ParseTreeTransforms.py +++ b/Cython/Compiler/ParseTreeTransforms.py @@ -186,13 +186,12 @@ class PostParse(ScopeTrackingTransform): - Some invalid uses of := assignment expressions are detected """ - in_pattern_node = False - def __init__(self, context): super(PostParse, self).__init__(context) self.specialattribute_handlers = { '__cythonbufferdefaults__' : self.handle_bufferdefaults } + self.in_pattern_node = False def visit_LambdaNode(self, node): # unpack a lambda expression into the corresponding DefNode -- cgit v1.2.1 From be4f81be305e14157421a95718557196e4d19abc Mon Sep 17 00:00:00 2001 From: Thomas Li <47963215+lithomas1@users.noreply.github.com> Date: Tue, 2 Aug 2022 22:21:46 -0700 Subject: Add GHA workflow to build wheels using cibuildwheel (GH-4784) Only Python >= 3.6 is supported with this as of now (hopefully that is also the case for Cython 3.1). Closes https://github.com/cython/cython/issues/4736 Closes https://github.com/cython/cython/issues/3916 --- .github/workflows/wheels.yml | 111 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 111 insertions(+) create mode 100644 .github/workflows/wheels.yml diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml new file mode 100644 index 000000000..cacc7b833 --- /dev/null +++ b/.github/workflows/wheels.yml @@ -0,0 +1,111 @@ +# Workflow to build wheels for upload to PyPI. +# While this workflow is not active right now, +# it will be used for Cython 3.1 after Python 2.7 +# support is dropped. +# +# In an attempt to save CI resources, wheel builds do +# not run by default. +# Wheel builds can be triggered from the Actions page +# (if you have the perms) on a commit to master. +# +# Alternatively, if you would like to trigger wheel builds +# on a pull request, the labels that trigger builds are: +# - Build System +name: Wheel Builder +on: + schedule: + # ┌───────────── minute (0 - 59) + # │ ┌───────────── hour (0 - 23) + # │ │ ┌───────────── day of the month (1 - 31) + # │ │ │ ┌───────────── month (1 - 12 or JAN-DEC) + # │ │ │ │ ┌───────────── day of the week (0 - 6 or SUN-SAT) + # │ │ │ │ │ + - cron: "42 1 * * 4" + pull_request: + types: [labeled, opened, synchronize, reopened] + workflow_dispatch: + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +jobs: + build_wheels: + name: Build wheel for ${{ matrix.python }}-${{ matrix.buildplat[1] }} + if: >- + github.event_name == 'schedule' || + github.event_name == 'workflow_dispatch' || + (github.event_name == 'pull_request' && + contains(github.event.pull_request.labels.*.name, 'Build System')) + runs-on: ${{ matrix.buildplat[0] }} + strategy: + # Ensure that a wheel builder finishes even if another fails + fail-fast: false + matrix: + # Github Actions doesn't support pairing matrix values together, let's improvise + # https://github.com/github/feedback/discussions/7835#discussioncomment-1769026 + buildplat: + - [ubuntu-20.04, manylinux_x86_64] + - [ubuntu-20.04, manylinux_aarch64] + - [ubuntu-20.04, manylinux_i686] + - [ubuntu-20.04, musllinux_x86_64] + - [ubuntu-20.04, musllinux_aarch64] + - [macos-11, macosx_*] + - [windows-2019, win_amd64] + - [windows-2019, win32] + python: ["cp36", "cp37", "cp38", "cp39", "cp310", "cp311"] # Note: Wheels not needed for PyPy + steps: + - name: Checkout Cython + uses: actions/checkout@v3 + + - name: Set up QEMU + if: contains(matrix.buildplat[1], '_aarch64') + uses: docker/setup-qemu-action@v1 + with: + platforms: all + + - name: Build wheels + uses: pypa/cibuildwheel@v2.8.1 + env: + # TODO: Build Cython with the compile-all flag? + # Unfortunately, there is no way to modify cibuildwheel's build command + # so there is no way to pass this in directly. + # This would require modifying cython's setup.py to look for these flags + # in env vars. + CIBW_BUILD: ${{ matrix.python }}-${{ matrix.buildplat[1] }} + CIBW_PRERELEASE_PYTHONS: True + CIBW_ARCHS_LINUX: auto aarch64 + CIBW_ENVIRONMENT: CFLAGS='-O3 -g0 -mtune=generic -pipe -fPIC' LDFLAGS='-fPIC' + # TODO: Cython tests take a long time to complete + # consider running a subset in the future? + #CIBW_TEST_COMMAND: python {project}/runtests.py -vv --no-refnanny + + - uses: actions/upload-artifact@v3 + with: + name: ${{ matrix.python }}-${{ startsWith(matrix.buildplat[1], 'macosx') && 'macosx' || matrix.buildplat[1] }} + path: ./wheelhouse/*.whl + build_sdist_pure_wheel: + name: Build sdist and pure wheel + runs-on: ubuntu-latest + steps: + - name: Checkout Cython + uses: actions/checkout@v3 + # Used to push the built wheels + - uses: actions/setup-python@v3 + with: + # Build sdist on lowest supported Python + python-version: '3.8' + - name: Build sdist + run: | + pip install --upgrade wheel setuptools + python setup.py sdist + python setup.py bdist_wheel --no-cython-compile --universal + - uses: actions/upload-artifact@v3 + with: + name: sdist + path: ./dist/*.tar.gz + - uses: actions/upload-artifact@v3 + with: + name: pure-wheel + path: ./dist/*.whl + -- cgit v1.2.1 From 4ef752766d8ac28177996d76ec14165fbac90f4e Mon Sep 17 00:00:00 2001 From: da-woods Date: Wed, 3 Aug 2022 08:12:37 +0100 Subject: Actually add flag to pxd --- Cython/Compiler/ParseTreeTransforms.pxd | 1 + 1 file changed, 1 insertion(+) diff --git a/Cython/Compiler/ParseTreeTransforms.pxd b/Cython/Compiler/ParseTreeTransforms.pxd index 92f9b0601..b79a2492c 100644 --- a/Cython/Compiler/ParseTreeTransforms.pxd +++ b/Cython/Compiler/ParseTreeTransforms.pxd @@ -18,6 +18,7 @@ cdef class PostParse(ScopeTrackingTransform): cdef dict specialattribute_handlers cdef size_t lambda_counter cdef size_t genexpr_counter + cdef bint in_pattern_node cdef _visit_assignment_node(self, node, list expr_list) -- cgit v1.2.1 From fd8a826faecd8d541cc2455c976de21b1df229ff Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Wed, 3 Aug 2022 10:32:07 +0200 Subject: Upload wheels to GH-Releases after a release build. --- .github/workflows/wheels.yml | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index cacc7b833..902f1a49f 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -13,6 +13,8 @@ # - Build System name: Wheel Builder on: + release: + types: [created] schedule: # ┌───────────── minute (0 - 59) # │ ┌───────────── hour (0 - 23) @@ -33,6 +35,7 @@ jobs: build_wheels: name: Build wheel for ${{ matrix.python }}-${{ matrix.buildplat[1] }} if: >- + github.event_name == 'release' || github.event_name == 'schedule' || github.event_name == 'workflow_dispatch' || (github.event_name == 'pull_request' && @@ -80,32 +83,55 @@ jobs: # consider running a subset in the future? #CIBW_TEST_COMMAND: python {project}/runtests.py -vv --no-refnanny + - name: Release + uses: softprops/action-gh-release@v1 + if: startsWith(github.ref, 'refs/tags/') + with: + files: | + dist/*manylinux*.whl + dist/*musllinux*.whl + dist/*macos*.whl + dist/*win32*.whl + dist/*win_amd64*.whl + - uses: actions/upload-artifact@v3 with: name: ${{ matrix.python }}-${{ startsWith(matrix.buildplat[1], 'macosx') && 'macosx' || matrix.buildplat[1] }} path: ./wheelhouse/*.whl + build_sdist_pure_wheel: name: Build sdist and pure wheel runs-on: ubuntu-latest steps: - name: Checkout Cython uses: actions/checkout@v3 + # Used to push the built wheels - uses: actions/setup-python@v3 with: # Build sdist on lowest supported Python python-version: '3.8' + - name: Build sdist run: | pip install --upgrade wheel setuptools python setup.py sdist python setup.py bdist_wheel --no-cython-compile --universal + - uses: actions/upload-artifact@v3 with: name: sdist path: ./dist/*.tar.gz + - uses: actions/upload-artifact@v3 with: name: pure-wheel path: ./dist/*.whl + - name: Release + uses: softprops/action-gh-release@v1 + if: startsWith(github.ref, 'refs/tags/') + with: + files: | + dist/*.tar.gz + dist/*-none-any.whl -- cgit v1.2.1 From d1db86b1e2560afe2e0ec4ce7d6c25628023e16b Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Wed, 3 Aug 2022 10:33:31 +0200 Subject: Delete outdated wheel workflow file. Replaced by wheels.yml. --- .github/workflows/wheel-manylinux.yml | 93 ----------------------------------- .github/workflows/wheels.yml | 6 +-- 2 files changed, 2 insertions(+), 97 deletions(-) delete mode 100644 .github/workflows/wheel-manylinux.yml diff --git a/.github/workflows/wheel-manylinux.yml b/.github/workflows/wheel-manylinux.yml deleted file mode 100644 index 7a2ecd926..000000000 --- a/.github/workflows/wheel-manylinux.yml +++ /dev/null @@ -1,93 +0,0 @@ -name: Linux wheel build - -on: - release: - types: [created] - -jobs: - python: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - - name: Set up Python - uses: actions/setup-python@v3 - with: - python-version: "3.10" - - - name: Install build dependencies - run: pip install -U "setuptools<60" pip wheel - - - name: Make sdist and Python wheel - run: make sdist pywheel - - - name: Release - uses: softprops/action-gh-release@v1 - if: startsWith(github.ref, 'refs/tags/') - with: - files: | - dist/*.tar.gz - dist/*-none-any.whl - - - name: Upload sdist - uses: actions/upload-artifact@v3 - with: - name: sdist - path: dist/*.tar.gz - if-no-files-found: ignore - - - name: Upload Python wheel - uses: actions/upload-artifact@v3 - with: - name: wheel-Python - path: dist/*-none-any.whl - if-no-files-found: ignore - - binary: - strategy: - # Allows for matrix sub-jobs to fail without canceling the rest - fail-fast: false - - matrix: - image: - - manylinux2014_x86_64 - - manylinux2014_i686 - - musllinux_1_1_x86_64 - - musllinux_1_1_aarch64 - - manylinux_2_24_x86_64 - - manylinux_2_24_i686 - - manylinux_2_24_aarch64 - - manylinux_2_28_x86_64 - - manylinux_2_28_aarch64 - - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v3 - - - name: Set up Python 3.10 - uses: actions/setup-python@v3 - with: - python-version: "3.10" - - - name: Building wheel - run: | - make sdist wheel_${{ matrix.image }} - - - name: Copy wheels in dist - run: cp wheelhouse*/*.whl dist/ - - - name: Release - uses: softprops/action-gh-release@v1 - if: startsWith(github.ref, 'refs/tags/') - with: - files: | - dist/*manylinux*.whl - dist/*musllinux*.whl - - - name: Archive Wheels - uses: actions/upload-artifact@v3 - with: - name: ${{ matrix.image }} - path: dist/*m[au][ns][yl]linux*.whl - if-no-files-found: ignore diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index 902f1a49f..58266f96d 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -1,16 +1,14 @@ # Workflow to build wheels for upload to PyPI. -# While this workflow is not active right now, -# it will be used for Cython 3.1 after Python 2.7 -# support is dropped. # # In an attempt to save CI resources, wheel builds do -# not run by default. +# not run on each push but only weekly and for releases. # Wheel builds can be triggered from the Actions page # (if you have the perms) on a commit to master. # # Alternatively, if you would like to trigger wheel builds # on a pull request, the labels that trigger builds are: # - Build System + name: Wheel Builder on: release: -- cgit v1.2.1 From 5fbac16da26e1cbddeafd9fa9c98d31ce8b5e19d Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Wed, 3 Aug 2022 10:53:28 +0200 Subject: Allow passing Cython build options via env vars, not only via CLI. --- .github/workflows/wheels.yml | 4 ---- setup.py | 42 ++++++++++++++++-------------------------- 2 files changed, 16 insertions(+), 30 deletions(-) diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index 58266f96d..a6ce603d6 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -69,10 +69,6 @@ jobs: uses: pypa/cibuildwheel@v2.8.1 env: # TODO: Build Cython with the compile-all flag? - # Unfortunately, there is no way to modify cibuildwheel's build command - # so there is no way to pass this in directly. - # This would require modifying cython's setup.py to look for these flags - # in env vars. CIBW_BUILD: ${{ matrix.python }}-${{ matrix.buildplat[1] }} CIBW_PRERELEASE_PYTHONS: True CIBW_ARCHS_LINUX: auto aarch64 diff --git a/setup.py b/setup.py index 8d5089c18..c67aba9fc 100755 --- a/setup.py +++ b/setup.py @@ -183,37 +183,27 @@ def compile_cython_modules(profile=False, coverage=False, compile_minimal=False, setup_args['ext_modules'] = extensions -cython_profile = '--cython-profile' in sys.argv -if cython_profile: - sys.argv.remove('--cython-profile') +def check_option(name): + cli_arg = "--" + name + if cli_arg in sys.argv: + sys.argv.remove(cli_arg) + return True -cython_coverage = '--cython-coverage' in sys.argv -if cython_coverage: - sys.argv.remove('--cython-coverage') + env_var = name.replace("-", "_").upper() + if os.environ.get(env_var) == "true": + return True -try: - sys.argv.remove("--cython-compile-all") - cython_compile_more = True -except ValueError: - cython_compile_more = False + return False -try: - sys.argv.remove("--cython-compile-minimal") - cython_compile_minimal = True -except ValueError: - cython_compile_minimal = False -try: - sys.argv.remove("--cython-with-refnanny") - cython_with_refnanny = True -except ValueError: - cython_with_refnanny = False +cython_profile = check_option('cython-profile') +cython_coverage = check_option('cython-coverage') +cython_with_refnanny = check_option('cython-with-refnanny') -try: - sys.argv.remove("--no-cython-compile") - compile_cython_itself = False -except ValueError: - compile_cython_itself = True +compile_cython_itself = not check_option('no-cython-compile') +if compile_cython_itself: + cython_compile_more = check_option('cython-compile-all') + cython_compile_minimal = check_option('cython-compile-minimal') setup_args.update(setuptools_extra_args) -- cgit v1.2.1 From c1af54fb1d20cc14adca8c0f4fe60160ed6a1868 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Wed, 3 Aug 2022 10:55:17 +0200 Subject: Use latest Py3.10 for the "compile all" CI builds instead of 3.9. --- .github/workflows/ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4c2193c5d..a03a417cf 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -68,12 +68,12 @@ jobs: env: { CYTHON_COMPILE_ALL: 1 } extra_hash: "-all" - os: ubuntu-18.04 - python-version: 3.9 + python-version: 3.10 backend: c env: { CYTHON_COMPILE_ALL: 1 } extra_hash: "-all" - os: ubuntu-18.04 - python-version: 3.9 + python-version: 3.10 backend: cpp env: { CYTHON_COMPILE_ALL: 1 } extra_hash: "-all" -- cgit v1.2.1 From a7dbdebdc076b18c7a4b442ee2523411184ac085 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Wed, 3 Aug 2022 11:04:40 +0200 Subject: Use "compile all" CI builds to create generic wheels to allow comparing their size to the standard wheels. --- .github/workflows/ci.yml | 2 +- Tools/ci-run.sh | 5 ++++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a03a417cf..a119d39a3 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -260,7 +260,7 @@ jobs: - name: Upload wheels uses: actions/upload-artifact@v3 with: - name: wheels-${{ runner.os }} + name: wheels-${{ runner.os }}${{ matrix.extra_hash }} path: dist/*.whl if-no-files-found: ignore diff --git a/Tools/ci-run.sh b/Tools/ci-run.sh index da6a87c1c..b022eadb3 100644 --- a/Tools/ci-run.sh +++ b/Tools/ci-run.sh @@ -115,6 +115,8 @@ if [[ $OSTYPE == "msys" ]]; then # for MSVC cl # (off by default) 5045 warns that the compiler will insert Spectre mitigations for memory load if the /Qspectre switch is specified # (off by default) 4820 warns about the code in Python\3.9.6\x64\include ... CFLAGS="-Od /Z7 /W4 /wd4711 /wd4127 /wd5045 /wd4820" +elif [[ $CYTHON_COMPILE_ALL == "1" ]]; then + CFLAGS="-O3 -g0 -mtune=generic -Wall -Wextra" # make wheel sizes comparable to standard wheel build else CFLAGS="-O0 -ggdb -Wall -Wextra" fi @@ -143,8 +145,9 @@ if [[ $NO_CYTHON_COMPILE != "1" && $PYTHON_VERSION != "pypy"* ]]; then # STACKLESS can be either "" (empty or not set) or "true" (when we set it) # CYTHON_COMPILE_ALL can be either "" (empty or not set) or "1" (when we set it) if [[ $COVERAGE != "1" && $STACKLESS != "true" && $BACKEND != *"cpp"* && - $CYTHON_COMPILE_ALL != "1" && $LIMITED_API == "" && $EXTRA_CFLAGS == "" ]]; then + $LIMITED_API == "" && $EXTRA_CFLAGS == "" ]]; then python setup.py bdist_wheel || exit 1 + ls -l dist/ || true fi fi -- cgit v1.2.1 From ca889e1ebab3aa9ce549e574fd84e8b018ca1351 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Wed, 3 Aug 2022 11:11:18 +0200 Subject: Print ext module sizes in CI build. --- Tools/ci-run.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/Tools/ci-run.sh b/Tools/ci-run.sh index b022eadb3..c97b9c87c 100644 --- a/Tools/ci-run.sh +++ b/Tools/ci-run.sh @@ -140,6 +140,7 @@ if [[ $NO_CYTHON_COMPILE != "1" && $PYTHON_VERSION != "pypy"* ]]; then CFLAGS=$BUILD_CFLAGS \ python setup.py build_ext -i $SETUP_ARGS || exit 1 + find Cython -name "*.so" -ls # COVERAGE can be either "" (empty or not set) or "1" (when we set it) # STACKLESS can be either "" (empty or not set) or "true" (when we set it) -- cgit v1.2.1 From baa0543cb105ca4a92a8d366b47b3e2282d02e1f Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Wed, 3 Aug 2022 11:16:02 +0200 Subject: CI: Fix Py3.10 version reference to prevent it from coming out as 3.1. --- .github/workflows/ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a119d39a3..ec8dc83e6 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -68,12 +68,12 @@ jobs: env: { CYTHON_COMPILE_ALL: 1 } extra_hash: "-all" - os: ubuntu-18.04 - python-version: 3.10 + python-version: "3.10" backend: c env: { CYTHON_COMPILE_ALL: 1 } extra_hash: "-all" - os: ubuntu-18.04 - python-version: 3.10 + python-version: "3.10" backend: cpp env: { CYTHON_COMPILE_ALL: 1 } extra_hash: "-all" -- cgit v1.2.1 From 441869f51503d4f6ff9184d063e595d150a867fd Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Wed, 3 Aug 2022 11:38:55 +0200 Subject: CI: log how long it takes to build Cython for each of the configurations. --- Tools/ci-run.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Tools/ci-run.sh b/Tools/ci-run.sh index c97b9c87c..0f6b6c88d 100644 --- a/Tools/ci-run.sh +++ b/Tools/ci-run.sh @@ -139,7 +139,7 @@ if [[ $NO_CYTHON_COMPILE != "1" && $PYTHON_VERSION != "pypy"* ]]; then $(python -c 'import sys; print("-j5" if sys.version_info >= (3,5) else "")')" CFLAGS=$BUILD_CFLAGS \ - python setup.py build_ext -i $SETUP_ARGS || exit 1 + time python setup.py build_ext -i $SETUP_ARGS || exit 1 find Cython -name "*.so" -ls # COVERAGE can be either "" (empty or not set) or "1" (when we set it) -- cgit v1.2.1 From 88a46f1184ffed326cafe5d7e879c664acbd2bf8 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Wed, 3 Aug 2022 11:44:45 +0200 Subject: Sort build log output to make the list of Cython's extension modules easier to compare. --- Tools/ci-run.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Tools/ci-run.sh b/Tools/ci-run.sh index 0f6b6c88d..c40917e1b 100644 --- a/Tools/ci-run.sh +++ b/Tools/ci-run.sh @@ -140,7 +140,7 @@ if [[ $NO_CYTHON_COMPILE != "1" && $PYTHON_VERSION != "pypy"* ]]; then CFLAGS=$BUILD_CFLAGS \ time python setup.py build_ext -i $SETUP_ARGS || exit 1 - find Cython -name "*.so" -ls + find Cython -name "*.so" -ls | sort -k11 # COVERAGE can be either "" (empty or not set) or "1" (when we set it) # STACKLESS can be either "" (empty or not set) or "true" (when we set it) -- cgit v1.2.1 From a919d570bd463ab7a0d6c32836c3e41adf2a9810 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Wed, 3 Aug 2022 11:59:08 +0200 Subject: CI: Bring all file size output together at the end of the build part to help with finding it in the logs. --- Tools/ci-run.sh | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/Tools/ci-run.sh b/Tools/ci-run.sh index c40917e1b..0aea7902f 100644 --- a/Tools/ci-run.sh +++ b/Tools/ci-run.sh @@ -140,16 +140,17 @@ if [[ $NO_CYTHON_COMPILE != "1" && $PYTHON_VERSION != "pypy"* ]]; then CFLAGS=$BUILD_CFLAGS \ time python setup.py build_ext -i $SETUP_ARGS || exit 1 - find Cython -name "*.so" -ls | sort -k11 # COVERAGE can be either "" (empty or not set) or "1" (when we set it) # STACKLESS can be either "" (empty or not set) or "true" (when we set it) - # CYTHON_COMPILE_ALL can be either "" (empty or not set) or "1" (when we set it) if [[ $COVERAGE != "1" && $STACKLESS != "true" && $BACKEND != *"cpp"* && $LIMITED_API == "" && $EXTRA_CFLAGS == "" ]]; then python setup.py bdist_wheel || exit 1 ls -l dist/ || true fi + + echo "Extension modules created during the build:" + find Cython -name "*.so" -ls | sort -k11 fi if [[ $TEST_CODE_STYLE == "1" ]]; then -- cgit v1.2.1 From 51749704b10b65fe4a77b5f7df69e4270bf60d76 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Wed, 3 Aug 2022 12:06:36 +0200 Subject: Use -O3 C compiler flags only for the Cython build, not for the test file compilation. --- Tools/ci-run.sh | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/Tools/ci-run.sh b/Tools/ci-run.sh index 0aea7902f..f5f4c2d9e 100644 --- a/Tools/ci-run.sh +++ b/Tools/ci-run.sh @@ -115,8 +115,6 @@ if [[ $OSTYPE == "msys" ]]; then # for MSVC cl # (off by default) 5045 warns that the compiler will insert Spectre mitigations for memory load if the /Qspectre switch is specified # (off by default) 4820 warns about the code in Python\3.9.6\x64\include ... CFLAGS="-Od /Z7 /W4 /wd4711 /wd4127 /wd5045 /wd4820" -elif [[ $CYTHON_COMPILE_ALL == "1" ]]; then - CFLAGS="-O3 -g0 -mtune=generic -Wall -Wextra" # make wheel sizes comparable to standard wheel build else CFLAGS="-O0 -ggdb -Wall -Wextra" fi @@ -124,6 +122,9 @@ fi if [[ $NO_CYTHON_COMPILE != "1" && $PYTHON_VERSION != "pypy"* ]]; then BUILD_CFLAGS="$CFLAGS -O2" + if [[ $CYTHON_COMPILE_ALL == "1" -a $OSTYPE != "msys" ]]; then + BUILD_CFLAGS="$CFLAGS -O3 -g0 -mtune=generic" # make wheel sizes comparable to standard wheel build + fi if [[ $PYTHON_SYS_VERSION == "2"* ]]; then BUILD_CFLAGS="$BUILD_CFLAGS -fno-strict-aliasing" fi -- cgit v1.2.1 From 9d5787523d8110c4d7204df08baef1138a1f2259 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Wed, 3 Aug 2022 12:14:29 +0200 Subject: Minor code simplification. --- Cython/Compiler/ParseTreeTransforms.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/Cython/Compiler/ParseTreeTransforms.py b/Cython/Compiler/ParseTreeTransforms.py index 34fbce72f..fa9c3c571 100644 --- a/Cython/Compiler/ParseTreeTransforms.py +++ b/Cython/Compiler/ParseTreeTransforms.py @@ -2866,8 +2866,7 @@ class RemoveUnreachableCode(CythonTransform): if not self.current_directives['remove_unreachable']: return node self.visitchildren(node) - for idx, stat in enumerate(node.stats): - idx += 1 + for idx, stat in enumerate(node.stats, 1): if stat.is_terminator: if idx < len(node.stats): if self.current_directives['warn.unreachable']: -- cgit v1.2.1 From 16811213c6e21459582e29eb33f4a4fb80322b86 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Wed, 3 Aug 2022 12:42:46 +0200 Subject: CI: Fix syntax problem in build script. --- Tools/ci-run.sh | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/Tools/ci-run.sh b/Tools/ci-run.sh index f5f4c2d9e..93282bc30 100644 --- a/Tools/ci-run.sh +++ b/Tools/ci-run.sh @@ -122,8 +122,10 @@ fi if [[ $NO_CYTHON_COMPILE != "1" && $PYTHON_VERSION != "pypy"* ]]; then BUILD_CFLAGS="$CFLAGS -O2" - if [[ $CYTHON_COMPILE_ALL == "1" -a $OSTYPE != "msys" ]]; then - BUILD_CFLAGS="$CFLAGS -O3 -g0 -mtune=generic" # make wheel sizes comparable to standard wheel build + if [[ $CYTHON_COMPILE_ALL == "1" ]]; then + if [[ $OSTYPE != "msys" ]]; then + BUILD_CFLAGS="$CFLAGS -O3 -g0 -mtune=generic" # make wheel sizes comparable to standard wheel build + fi fi if [[ $PYTHON_SYS_VERSION == "2"* ]]; then BUILD_CFLAGS="$BUILD_CFLAGS -fno-strict-aliasing" -- cgit v1.2.1 From 287a11e55259ffe01cf43a450159be7f946f1cfb Mon Sep 17 00:00:00 2001 From: Xavier <48094549+xavth@users.noreply.github.com> Date: Wed, 3 Aug 2022 16:23:10 +0200 Subject: Fix bug in cpp iteration over rvalue-dependant attribute (#3828) The implementation of the Python-style `for ... in` loop over an iterable cpp sequence (using `begin()` and `end()`) needs to determine whether the sequence should be stored in a temporary variable or if it can be iterated over directly. The previous code assumed that attributes are always safe to be iterated directly, but attributes can depend on an rvalue, as in: for i in returns_object_with_vector_attribute().vector: ... In such a case we need to store the `in` operand in a temporary value on which `begin()` and `end()` can be safely called. --- Cython/Compiler/ExprNodes.py | 26 +++++++++++++++-- tests/run/cpp_iterators.pyx | 33 ++++++++++++++++++++++ ...pp_iterators_over_attribute_of_rvalue_support.h | 11 ++++++++ 3 files changed, 68 insertions(+), 2 deletions(-) create mode 100644 tests/run/cpp_iterators_over_attribute_of_rvalue_support.h diff --git a/Cython/Compiler/ExprNodes.py b/Cython/Compiler/ExprNodes.py index b2990fdf0..bfe065171 100644 --- a/Cython/Compiler/ExprNodes.py +++ b/Cython/Compiler/ExprNodes.py @@ -3043,8 +3043,8 @@ class CppIteratorNode(ExprNode): def generate_result_code(self, code): sequence_type = self.sequence.type # essentially 3 options: - if self.sequence.is_name or self.sequence.is_attribute: - # 1) is a name and can be accessed directly; + if self.sequence.is_simple(): + # 1) Sequence can be accessed directly, like a name; # assigning to it may break the container, but that's the responsibility # of the user code.putln("%s = %s%sbegin();" % (self.result(), @@ -3084,6 +3084,28 @@ class CppIteratorNode(ExprNode): self.result())) code.putln("++%s%s;" % (self.extra_dereference, self.result())) + def generate_subexpr_disposal_code(self, code): + if not self.cpp_sequence_cname: + # the sequence is accessed directly so any temporary result in its + # subexpressions must remain available until the iterator is not needed + return + ExprNode.generate_subexpr_disposal_code(self, code) + + def free_subexpr_temps(self, code): + if not self.cpp_sequence_cname: + # the sequence is accessed directly so any temporary result in its + # subexpressions must remain available until the iterator is not needed + return + ExprNode.free_subexpr_temps(self, code) + + def generate_disposal_code(self, code): + if not self.cpp_sequence_cname: + # postponed from CppIteratorNode.generate_subexpr_disposal_code + # and CppIteratorNode.free_subexpr_temps + ExprNode.generate_subexpr_disposal_code(self, code) + ExprNode.free_subexpr_temps(self, code) + ExprNode.generate_disposal_code(self, code) + def free_temps(self, code): if self.cpp_sequence_cname: code.funcstate.release_temp(self.cpp_sequence_cname) diff --git a/tests/run/cpp_iterators.pyx b/tests/run/cpp_iterators.pyx index 850632581..f2171aae5 100644 --- a/tests/run/cpp_iterators.pyx +++ b/tests/run/cpp_iterators.pyx @@ -201,3 +201,36 @@ def test_const_iterator_calculations(py_v): first == clast, last == cfirst ] + +cdef extern from "cpp_iterators_over_attribute_of_rvalue_support.h": + cdef cppclass HasIterableAttribute: + vector[int] vec + HasIterableAttribute() + HasIterableAttribute(vector[int]) + +cdef HasIterableAttribute get_object_with_iterable_attribute(): + return HasIterableAttribute() + +def test_iteration_over_attribute_of_call(): + """ + >>> test_iteration_over_attribute_of_call() + 1 + 2 + 3 + 42 + 43 + 44 + 1 + 2 + 3 + """ + for i in HasIterableAttribute().vec: + print(i) + cdef vector[int] vec + for i in range(42, 45): + vec.push_back(i) + for i in HasIterableAttribute(vec).vec: + print(i) + for i in get_object_with_iterable_attribute().vec: + print(i) + diff --git a/tests/run/cpp_iterators_over_attribute_of_rvalue_support.h b/tests/run/cpp_iterators_over_attribute_of_rvalue_support.h new file mode 100644 index 000000000..b4a10b5be --- /dev/null +++ b/tests/run/cpp_iterators_over_attribute_of_rvalue_support.h @@ -0,0 +1,11 @@ +#include + +class HasIterableAttribute { +public: + std::vector vec; + HasIterableAttribute() { + for (int i = 1; i<=3; i++) + vec.push_back(i); + } + HasIterableAttribute(std::vector vec) : vec(vec) {} +}; -- cgit v1.2.1 From 8afd932c28d08428d45bba03d6b642093e4c973b Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Thu, 4 Aug 2022 10:34:45 +0200 Subject: Fix missing newlines in HTML annotation file, introduced in 0b4370678e5b00a020cd990f922964d3aba59884. Closes https://github.com/cython/cython/issues/4945 --- Cython/Compiler/Annotate.py | 4 ++-- Cython/Compiler/Code.pxd | 1 + Cython/Compiler/Code.py | 9 ++++++--- Cython/TestUtils.py | 43 ++++++++++++++++++++++++++++++------------- tests/run/annotate_html.pyx | 3 +++ 5 files changed, 42 insertions(+), 18 deletions(-) diff --git a/Cython/Compiler/Annotate.py b/Cython/Compiler/Annotate.py index 48e73f853..27564c21a 100644 --- a/Cython/Compiler/Annotate.py +++ b/Cython/Compiler/Annotate.py @@ -49,8 +49,8 @@ class AnnotationCCodeWriter(CCodeWriter): def create_new(self, create_from, buffer, copy_formatting): return AnnotationCCodeWriter(create_from, buffer, copy_formatting) - def write(self, s): - CCodeWriter.write(self, s) + def _write_to_buffer(self, s): + self.buffer.write(s) self.annotation_buffer.write(s) def mark_pos(self, pos, trace=True): diff --git a/Cython/Compiler/Code.pxd b/Cython/Compiler/Code.pxd index c22f7caa2..2887b44bc 100644 --- a/Cython/Compiler/Code.pxd +++ b/Cython/Compiler/Code.pxd @@ -111,6 +111,7 @@ cdef class CCodeWriter(object): cpdef write(self, s) cpdef write_lines(self, s) + cpdef _write_to_buffer(self, s) cpdef put(self, code) cpdef put_safe(self, code) cpdef putln(self, code=*, bint safe=*) diff --git a/Cython/Compiler/Code.py b/Cython/Compiler/Code.py index 056bfccfc..e0048b84a 100644 --- a/Cython/Compiler/Code.py +++ b/Cython/Compiler/Code.py @@ -1859,11 +1859,14 @@ class CCodeWriter(object): def getvalue(self): return self.buffer.getvalue() + def _write_to_buffer(self, s): + self.buffer.write(s) + def write(self, s): if '\n' in s: self.write_lines(s) else: - self.buffer.write(s) + self._write_to_buffer(s) def write_lines(self, s): # Cygdb needs to know which Cython source line corresponds to which C line. @@ -1872,7 +1875,7 @@ class CCodeWriter(object): filename_line = self.last_marked_pos[:2] if self.last_marked_pos else (None, 0) self.buffer.markers.extend([filename_line] * s.count('\n')) - self.buffer.write(s) + self._write_to_buffer(s) def insertion_point(self): other = self.create_new(create_from=self, buffer=self.buffer.insertion_point(), copy_formatting=True) @@ -2078,7 +2081,7 @@ class CCodeWriter(object): self.putln("}") def indent(self): - self.buffer.write(" " * self.level) + self._write_to_buffer(" " * self.level) def get_py_version_hex(self, pyversion): return "0x%02X%02X%02X%02X" % (tuple(pyversion) + (0,0,0,0))[:4] diff --git a/Cython/TestUtils.py b/Cython/TestUtils.py index 8328a3d6f..8bcd26b6f 100644 --- a/Cython/TestUtils.py +++ b/Cython/TestUtils.py @@ -168,7 +168,7 @@ class TransformTest(CythonTest): # Cython source code. Thus, we discard the comments before matching. # This seems a prime case for re.VERBOSE, but it seems to match some of the whitespace. _strip_c_comments = partial(re.compile( - re.sub('\s+', '', r''' + re.sub(r'\s+', '', r''' /[*] ( (?: [^*\n] | [*][^/] )* [\n] @@ -177,6 +177,14 @@ _strip_c_comments = partial(re.compile( ''') ).sub, '') +_strip_cython_code_from_html = partial(re.compile( + re.sub(r'\s\s+', '', r''' +
+        (?:[^<]|<(?!/pre))+
+        
+ ''') +).sub, '') + class TreeAssertVisitor(VisitorTransform): # actually, a TreeVisitor would be enough, but this needs to run @@ -198,6 +206,17 @@ class TreeAssertVisitor(VisitorTransform): file_path, )) + def validate_file_content(file_path, content): + for pattern in patterns: + #print("Searching pattern '%s'" % pattern) + if not re.search(pattern, content): + fail(self._module_pos, pattern, found=False, file_path=file_path) + + for antipattern in antipatterns: + #print("Searching antipattern '%s'" % antipattern) + if re.search(antipattern, content): + fail(self._module_pos, antipattern, found=True, file_path=file_path) + def validate_c_file(result): c_file = result.c_file if not (patterns or antipatterns): @@ -205,18 +224,16 @@ class TreeAssertVisitor(VisitorTransform): return result with open(c_file, encoding='utf8') as f: - c_content = f.read() - c_content = _strip_c_comments(c_content) - - for pattern in patterns: - #print("Searching pattern '%s'" % pattern) - if not re.search(pattern, c_content): - fail(self._module_pos, pattern, found=False, file_path=c_file) - - for antipattern in antipatterns: - #print("Searching antipattern '%s'" % antipattern) - if re.search(antipattern, c_content): - fail(self._module_pos, antipattern, found=True, file_path=c_file) + content = f.read() + content = _strip_c_comments(content) + validate_file_content(c_file, content) + + html_file = os.path.splitext(c_file)[0] + ".html" + if os.path.exists(html_file) and os.path.getmtime(c_file) <= os.path.getmtime(html_file): + with open(html_file, encoding='utf8') as f: + content = f.read() + content = _strip_cython_code_from_html(content) + validate_file_content(html_file, content) return validate_c_file diff --git a/tests/run/annotate_html.pyx b/tests/run/annotate_html.pyx index 3db7bf190..e98891b4f 100644 --- a/tests/run/annotate_html.pyx +++ b/tests/run/annotate_html.pyx @@ -1,3 +1,6 @@ +# cython: test_assert_c_code_has = Generated by Cython +# cython: test_assert_c_code_has = goto __pyx_L0;\n + """ >>> from codecs import open >>> import os.path as os_path -- cgit v1.2.1 From a97d91e8f78dc7adacd8cd6c786347cb9d0c584a Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Thu, 4 Aug 2022 19:48:52 +0200 Subject: Validate imported "cython.*" module names and reject unknown module names. Closes https://github.com/cython/cython/issues/4947 --- Cython/Compiler/ParseTreeTransforms.py | 44 ++++++++++++++++++++++-- tests/errors/e_invalid_special_cython_modules.py | 42 ++++++++++++++++++++++ tests/errors/e_pure_cimports.pyx | 3 +- 3 files changed, 86 insertions(+), 3 deletions(-) create mode 100644 tests/errors/e_invalid_special_cython_modules.py diff --git a/Cython/Compiler/ParseTreeTransforms.py b/Cython/Compiler/ParseTreeTransforms.py index fa9c3c571..ae271094a 100644 --- a/Cython/Compiler/ParseTreeTransforms.py +++ b/Cython/Compiler/ParseTreeTransforms.py @@ -832,6 +832,14 @@ class InterpretCompilerDirectives(CythonTransform): } special_methods.update(unop_method_nodes) + valid_cython_submodules = { + 'cimports', + 'dataclasses', + 'operator', + 'parallel', + 'view', + } + valid_parallel_directives = { "parallel", "prange", @@ -860,6 +868,34 @@ class InterpretCompilerDirectives(CythonTransform): error(pos, "Invalid directive: '%s'." % (directive,)) return True + def _check_valid_cython_module(self, pos, module_name): + if not module_name.startswith("cython."): + return + if module_name.split('.', 2)[1] in self.valid_cython_submodules: + return + + extra = "" + # This is very rarely used, so don't waste space on static tuples. + hints = [ + line.split() for line in """\ + imp cimports + cimp cimports + para parallel + parra parallel + dataclass dataclasses + """.splitlines()[:-1] + ] + for wrong, correct in hints: + if module_name.startswith("cython." + wrong): + extra = "Did you mean 'cython.%s' ?" % correct + break + + error(pos, "'%s' is not a valid cython.* module%s%s" % ( + module_name, + ". " if extra else "", + extra, + )) + # Set up processing and handle the cython: comments. def visit_ModuleNode(self, node): for key in sorted(node.directive_comments): @@ -930,6 +966,9 @@ class InterpretCompilerDirectives(CythonTransform): elif module_name.startswith(u"cython."): if module_name.startswith(u"cython.parallel."): error(node.pos, node.module_name + " is not a module") + else: + self._check_valid_cython_module(node.pos, module_name) + if module_name == u"cython.parallel": if node.as_name and node.as_name != u"cython": self.parallel_directives[node.as_name] = module_name @@ -956,6 +995,7 @@ class InterpretCompilerDirectives(CythonTransform): node.pos, module_name, node.relative_level, node.imported_names) elif not node.relative_level and ( module_name == u"cython" or module_name.startswith(u"cython.")): + self._check_valid_cython_module(node.pos, module_name) submodule = (module_name + u".")[7:] newimp = [] @@ -995,6 +1035,7 @@ class InterpretCompilerDirectives(CythonTransform): return self._create_cimport_from_import( node.pos, module_name, import_node.level, imported_names) elif module_name == u"cython" or module_name.startswith(u"cython."): + self._check_valid_cython_module(import_node.module_name.pos, module_name) submodule = (module_name + u".")[7:] newimp = [] for name, name_node in node.items: @@ -1035,8 +1076,7 @@ class InterpretCompilerDirectives(CythonTransform): def visit_SingleAssignmentNode(self, node): if isinstance(node.rhs, ExprNodes.ImportNode): module_name = node.rhs.module_name.value - is_special_module = (module_name + u".").startswith((u"cython.parallel.", u"cython.cimports.")) - if module_name != u"cython" and not is_special_module: + if module_name != u"cython" and not module_name.startswith("cython."): return node node = Nodes.CImportStatNode(node.pos, module_name=module_name, as_name=node.lhs.name) diff --git a/tests/errors/e_invalid_special_cython_modules.py b/tests/errors/e_invalid_special_cython_modules.py new file mode 100644 index 000000000..950df5c1c --- /dev/null +++ b/tests/errors/e_invalid_special_cython_modules.py @@ -0,0 +1,42 @@ +# mode: error +# tag: pure, import, cimport + +# nok + +import cython.imports.libc as libc_import +import cython.cimports.labc as labc_cimport + +from cython.imports import libc +from cython.cimport.libc import math +from cython.imports.libc import math +from cython.cimports.labc import math + +import cython.paralel +import cython.parrallel + +import cython.dataclass + +# ok +from cython.cimports.libc import math +from cython.cimports.libc.math import ceil + + +def libc_math_ceil(x): + """ + >>> libc_math_ceil(1.5) + [2, 2] + """ + return [int(n) for n in [ceil(x), math.ceil(x)]] + + +_ERRORS = """ +6:7: 'cython.imports.libc' is not a valid cython.* module. Did you mean 'cython.cimports' ? +7:7: 'labc.pxd' not found +9:0: 'cython.imports' is not a valid cython.* module. Did you mean 'cython.cimports' ? +10:0: 'cython.cimport.libc' is not a valid cython.* module. Did you mean 'cython.cimports' ? +11:0: 'cython.imports.libc' is not a valid cython.* module. Did you mean 'cython.cimports' ? +12:0: 'labc/math.pxd' not found +14:7: 'cython.paralel' is not a valid cython.* module. Did you mean 'cython.parallel' ? +15:7: 'cython.parrallel' is not a valid cython.* module. Did you mean 'cython.parallel' ? +17:7: 'cython.dataclass' is not a valid cython.* module. Did you mean 'cython.dataclasses' ? +""" diff --git a/tests/errors/e_pure_cimports.pyx b/tests/errors/e_pure_cimports.pyx index 231a95959..ef81182ad 100644 --- a/tests/errors/e_pure_cimports.pyx +++ b/tests/errors/e_pure_cimports.pyx @@ -1,7 +1,7 @@ # mode: error # tag: pure, import, cimport -import cython.cimportsy # FIXME: not currently an error? +import cython.cimportsy import cython.cimports import cython.cimports.libc @@ -20,6 +20,7 @@ from cython.cimports cimport libc _ERRORS = """ +4:7: 'cython.cimportsy' is not a valid cython.* module. Did you mean 'cython.cimports' ? 6:7: Cannot cimport the 'cython.cimports' package directly, only submodules. 7:7: Python cimports must use 'from cython.cimports... import ...' or 'import ... as ...', not just 'import ...' 8:7: Cannot cimport the 'cython.cimports' package directly, only submodules. -- cgit v1.2.1 From 189f6684474b9d3c5e27696f2ed961657b951204 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Thu, 4 Aug 2022 21:36:18 +0200 Subject: Hide a C compiler "unused argument" warning in Py3.11 where the "fast thread state" usage is disabled. Closes https://github.com/cython/cython/issues/4948 --- Cython/Utility/Exceptions.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cython/Utility/Exceptions.c b/Cython/Utility/Exceptions.c index 2cd4b604e..7896d40dc 100644 --- a/Cython/Utility/Exceptions.c +++ b/Cython/Utility/Exceptions.c @@ -647,7 +647,7 @@ static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line);/*proto*/ //@substitute: naming #ifndef CYTHON_CLINE_IN_TRACEBACK -static int __Pyx_CLineForTraceback(CYTHON_NCP_UNUSED PyThreadState *tstate, int c_line) { +static int __Pyx_CLineForTraceback(CYTHON_UNUSED PyThreadState *tstate, int c_line) { PyObject *use_cline; PyObject *ptype, *pvalue, *ptraceback; #if CYTHON_COMPILING_IN_CPYTHON -- cgit v1.2.1 From 0528b4c783bd46a6ab96b0bc92d6fd7da8ce0431 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Fri, 5 Aug 2022 08:15:29 +0200 Subject: CI: Disable the pure Python wheel builder unless we actually want wheels. --- .github/workflows/wheels.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index a6ce603d6..bd2aacf6a 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -95,6 +95,12 @@ jobs: build_sdist_pure_wheel: name: Build sdist and pure wheel + if: >- + github.event_name == 'release' || + github.event_name == 'schedule' || + github.event_name == 'workflow_dispatch' || + (github.event_name == 'pull_request' && + contains(github.event.pull_request.labels.*.name, 'Build System')) runs-on: ubuntu-latest steps: - name: Checkout Cython -- cgit v1.2.1 From 007f3869346817401204ab2b0129d2a7b4ba1843 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Fri, 5 Aug 2022 08:42:47 +0200 Subject: Prevent the wheel builder from triggering on unrelated PR changes. --- .github/workflows/wheels.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index bd2aacf6a..67913ff48 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -23,6 +23,11 @@ on: - cron: "42 1 * * 4" pull_request: types: [labeled, opened, synchronize, reopened] + paths: + #- Cython/Build/** + - .github/workflows/wheels.yml + - MANIFEST.in + - setup.* workflow_dispatch: concurrency: -- cgit v1.2.1 From cedf48e5b121b59146f8643e43f6a56fa1c3fb2d Mon Sep 17 00:00:00 2001 From: Matus Valo Date: Fri, 5 Aug 2022 11:28:41 +0200 Subject: Let "cythonize --help" output the supported environment variables (GH-4952) Closes https://github.com/cython/cython/issues/1711 --- Cython/Build/Cythonize.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/Cython/Build/Cythonize.py b/Cython/Build/Cythonize.py index 1f79589f8..ab18a12bc 100644 --- a/Cython/Build/Cythonize.py +++ b/Cython/Build/Cythonize.py @@ -121,10 +121,18 @@ def run_distutils(args): def create_args_parser(): - from argparse import ArgumentParser + from argparse import ArgumentParser, RawDescriptionHelpFormatter from ..Compiler.CmdLine import ParseDirectivesAction, ParseOptionsAction, ParseCompileTimeEnvAction - parser = ArgumentParser() + parser = ArgumentParser( + formatter_class=RawDescriptionHelpFormatter, + epilog="""\ +Environment variables: + CYTHON_FORCE_REGEN: if set to 1, forces cythonize to regenerate the output files regardless + of modification times and changes. + Environment variables accepted by setuptools are supported to configure the C compiler and build: + https://setuptools.pypa.io/en/latest/userguide/ext_modules.html#compiler-and-linker-options""" + ) parser.add_argument('-X', '--directive', metavar='NAME=VALUE,...', dest='directives', default={}, type=str, -- cgit v1.2.1 From c656624a4fc638e4c0cc098dab0f59a6f8331b1c Mon Sep 17 00:00:00 2001 From: Eli Schwartz Date: Fri, 5 Aug 2022 05:49:39 -0400 Subject: [0.29] implement the --depfile command-line option for the "cython" tool (GH-4949) Backports https://github.com/cython/cython/pull/4916 --- Cython/Build/Dependencies.py | 19 +-------- Cython/Compiler/CmdLine.py | 5 ++- Cython/Compiler/Main.py | 5 +++ Cython/Utils.py | 21 +++++++++ tests/build/depfile_package.srctree | 57 ------------------------- tests/build/depfile_package_cython.srctree | 61 +++++++++++++++++++++++++++ tests/build/depfile_package_cythonize.srctree | 60 ++++++++++++++++++++++++++ 7 files changed, 152 insertions(+), 76 deletions(-) delete mode 100644 tests/build/depfile_package.srctree create mode 100644 tests/build/depfile_package_cython.srctree create mode 100644 tests/build/depfile_package_cythonize.srctree diff --git a/Cython/Build/Dependencies.py b/Cython/Build/Dependencies.py index 7eb55e260..1ba574d52 100644 --- a/Cython/Build/Dependencies.py +++ b/Cython/Build/Dependencies.py @@ -43,7 +43,7 @@ except: pythran = None from .. import Utils -from ..Utils import (cached_function, cached_method, path_exists, +from ..Utils import (cached_function, cached_method, path_exists, write_depfile, safe_makedirs, copy_file_to_dir_if_newer, is_package_dir, replace_suffix) from ..Compiler.Main import Context, CompilationOptions, default_options @@ -1030,22 +1030,7 @@ def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False, # write out the depfile, if requested if depfile: dependencies = deps.all_dependencies(source) - src_base_dir, _ = os.path.split(source) - if not src_base_dir.endswith(os.sep): - src_base_dir += os.sep - # paths below the base_dir are relative, otherwise absolute - paths = [] - for fname in dependencies: - if (fname.startswith(src_base_dir) or - fname.startswith('.' + os.path.sep)): - paths.append(os.path.relpath(fname, src_base_dir)) - else: - paths.append(os.path.abspath(fname)) - - depline = os.path.split(c_file)[1] + ": \\\n " - depline += " \\\n ".join(paths) + "\n" - with open(c_file+'.dep', 'w') as outfile: - outfile.write(depline) + write_depfile(c_file, source, dependencies) if os.path.exists(c_file): c_timestamp = os.path.getmtime(c_file) diff --git a/Cython/Compiler/CmdLine.py b/Cython/Compiler/CmdLine.py index 9e2f8beb0..470fe6bd4 100644 --- a/Cython/Compiler/CmdLine.py +++ b/Cython/Compiler/CmdLine.py @@ -53,6 +53,7 @@ Options: --module-name Fully qualified module name. If not given, it is deduced from the import path if source file is in a package, or equals the filename otherwise. + -M, --depfile Produce depfiles for the sources """ @@ -66,7 +67,6 @@ def bad_usage(): sys.stderr.write(usage) sys.exit(1) - def parse_command_line(args): from .Main import CompilationOptions, default_options @@ -195,6 +195,8 @@ def parse_command_line(args): sys.exit(1) elif option == "--module-name": options.module_name = pop_value() + elif option in ('-M', '--depfile'): + options.depfile = True elif option.startswith('--debug'): option = option[2:].replace('-', '_') from . import DebugFlags @@ -236,4 +238,3 @@ def parse_command_line(args): "cython: Only one source file allowed when using --module-name\n") sys.exit(1) return options, sources - diff --git a/Cython/Compiler/Main.py b/Cython/Compiler/Main.py index 128441da6..9c57452ba 100644 --- a/Cython/Compiler/Main.py +++ b/Cython/Compiler/Main.py @@ -514,6 +514,10 @@ def run_pipeline(source, options, full_module_name=None, context=None): context.setup_errors(options, result) err, enddata = Pipeline.run_pipeline(pipeline, source) context.teardown_errors(err, options, result) + if options.depfile: + from ..Build.Dependencies import create_dependency_tree + dependencies = create_dependency_tree(context).all_dependencies(result.main_source_file) + Utils.write_depfile(result.c_file, result.main_source_file, dependencies) return result @@ -881,6 +885,7 @@ default_options = dict( errors_to_stderr = 1, cplus = 0, output_file = None, + depfile = None, annotate = None, annotate_coverage_xml = None, generate_pxi = 0, diff --git a/Cython/Utils.py b/Cython/Utils.py index d59d67d78..69563794c 100644 --- a/Cython/Utils.py +++ b/Cython/Utils.py @@ -447,3 +447,24 @@ def build_hex_version(version_string): hexversion = (hexversion << 8) + digit return '0x%08X' % hexversion + + +def write_depfile(target, source, dependencies): + src_base_dir = os.path.dirname(source) + cwd = os.getcwd() + if not src_base_dir.endswith(os.sep): + src_base_dir += os.sep + # paths below the base_dir are relative, otherwise absolute + paths = [] + for fname in dependencies: + fname = os.path.abspath(fname) + if fname.startswith(src_base_dir): + paths.append(os.path.relpath(fname, cwd)) + else: + paths.append(fname) + + depline = os.path.relpath(target, cwd) + ": \\\n " + depline += " \\\n ".join(paths) + "\n" + + with open(target+'.dep', 'w') as outfile: + outfile.write(depline) diff --git a/tests/build/depfile_package.srctree b/tests/build/depfile_package.srctree deleted file mode 100644 index c1de7b868..000000000 --- a/tests/build/depfile_package.srctree +++ /dev/null @@ -1,57 +0,0 @@ -""" -PYTHON -m Cython.Build.Cythonize -i pkg --depfile -PYTHON package_test.py -""" - -######## package_test.py ######## - -import os.path - -with open(os.path.join("pkg", "test.c.dep"), "r") as f: - contents = f.read().replace("\\\n", " ").replace("\n", " ") - -assert sorted(contents.split()) == sorted(['test.c:', os.path.join('sub', 'incl.pxi'), 'test.pxd', 'test.pyx']), contents - - -with open(os.path.join("pkg", "sub", "test.c.dep"), "r") as f: - contents = f.read().replace("\\\n", " ").replace("\n", " ") - -contents = [os.path.relpath(entry, '.') - if os.path.isabs(entry) else entry for entry in contents.split()] -assert sorted(contents) == sorted(['test.c:', 'incl.pxi', 'test.pyx', os.path.join('..', 'test.pxd')]), contents - - -######## pkg/__init__.py ######## - - -######## pkg/test.pyx ######## - -TEST = "pkg.test" - -include "sub/incl.pxi" - -cdef object get_str(): - return TEST - - -######## pkg/test.pxd ######## - -cdef object get_str() - - -######## pkg/sub/__init__.py ######## - - -######## pkg/sub/test.pyx ######## -# cython: language_level=3 - -from ..test cimport get_str - -include 'incl.pxi' - -TEST = 'pkg.sub.test' - - -######## pkg/sub/incl.pxi ######## - -pass diff --git a/tests/build/depfile_package_cython.srctree b/tests/build/depfile_package_cython.srctree new file mode 100644 index 000000000..5a6674a3d --- /dev/null +++ b/tests/build/depfile_package_cython.srctree @@ -0,0 +1,61 @@ +""" +PYTHON -c 'import os; os.makedirs("builddir/pkg/sub")' +CYTHON -M pkg/test.pyx -o builddir/pkg/test.c +CYTHON --depfile pkg/sub/test.pyx -o builddir/pkg/sub/test.c +PYTHON check.py +""" + +######## check.py ######## + +import os.path + +def pkgpath(*args): + return os.path.join('pkg', *args) + +with open(os.path.join("builddir", "pkg", "test.c.dep"), "r") as f: + contents = f.read().replace("\\\n", " ").replace("\n", " ") + +assert sorted(contents.split()) == sorted([os.path.join('builddir', 'pkg', 'test.c:'), pkgpath('sub', 'incl.pxi'), pkgpath('test.pxd'), pkgpath('test.pyx')]), contents + + +with open(os.path.join("builddir", "pkg", "sub", "test.c.dep"), "r") as f: + contents = f.read().replace("\\\n", " ").replace("\n", " ") + +contents = [os.path.relpath(entry, '.') + if os.path.isabs(entry) else entry for entry in contents.split()] +assert sorted(contents) == sorted([os.path.join('builddir', 'pkg', 'sub', 'test.c:'), pkgpath('sub', 'incl.pxi'), pkgpath('sub', 'test.pyx')]), contents # last is really one level up + +######## pkg/__init__.py ######## + + +######## pkg/test.pyx ######## + +TEST = "pkg.test" + +include "sub/incl.pxi" + +cdef object get_str(): + return TEST + + +######## pkg/test.pxd ######## + +cdef object get_str() + + +######## pkg/sub/__init__.py ######## + + +######## pkg/sub/test.pyx ######## +# cython: language_level=3 + +from ..test cimport get_str + +include 'incl.pxi' + +TEST = 'pkg.sub.test' + + +######## pkg/sub/incl.pxi ######## + +pass diff --git a/tests/build/depfile_package_cythonize.srctree b/tests/build/depfile_package_cythonize.srctree new file mode 100644 index 000000000..0ad4cab78 --- /dev/null +++ b/tests/build/depfile_package_cythonize.srctree @@ -0,0 +1,60 @@ +""" +PYTHON -m Cython.Build.Cythonize -i pkg --depfile +PYTHON package_test.py +""" + +######## package_test.py ######## + +import os.path + +def pkgpath(*args): + return os.path.join('pkg', *args) + +with open(os.path.join("pkg", "test.c.dep"), "r") as f: + contents = f.read().replace("\\\n", " ").replace("\n", " ") + +assert sorted(contents.split()) == sorted([pkgpath('test.c:'), pkgpath('sub', 'incl.pxi'), pkgpath('test.pxd'), pkgpath('test.pyx')]), contents + + +with open(os.path.join("pkg", "sub", "test.c.dep"), "r") as f: + contents = f.read().replace("\\\n", " ").replace("\n", " ") + +contents = [os.path.relpath(entry, '.') + if os.path.isabs(entry) else entry for entry in contents.split()] +assert sorted(contents) == sorted([pkgpath('sub', 'test.c:'), pkgpath('sub', 'incl.pxi'), pkgpath('sub', 'test.pyx'), pkgpath('test.pxd')]), contents # last is really one level up + + +######## pkg/__init__.py ######## + + +######## pkg/test.pyx ######## + +TEST = "pkg.test" + +include "sub/incl.pxi" + +cdef object get_str(): + return TEST + + +######## pkg/test.pxd ######## + +cdef object get_str() + + +######## pkg/sub/__init__.py ######## + + +######## pkg/sub/test.pyx ######## +# cython: language_level=3 + +from ..test cimport get_str + +include 'incl.pxi' + +TEST = 'pkg.sub.test' + + +######## pkg/sub/incl.pxi ######## + +pass -- cgit v1.2.1 From ac97b861a8507f477e0dcce52d6f6332555b843a Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Fri, 5 Aug 2022 16:09:59 +0200 Subject: Reduce unnecessary code generation a little. --- Cython/Compiler/Visitor.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cython/Compiler/Visitor.py b/Cython/Compiler/Visitor.py index d9be14df1..23ff467db 100644 --- a/Cython/Compiler/Visitor.py +++ b/Cython/Compiler/Visitor.py @@ -306,8 +306,8 @@ class CythonTransform(VisitorTransform): self.context = context def __call__(self, node): - from . import ModuleNode - if isinstance(node, ModuleNode.ModuleNode): + from .ModuleNode import ModuleNode + if isinstance(node, ModuleNode): self.current_directives = node.directives return super(CythonTransform, self).__call__(node) -- cgit v1.2.1 From 9d2ba1611b28999663ab71657f4938b0ba92fe07 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Fri, 5 Aug 2022 22:54:19 +0200 Subject: CI: Remove "time" command that does not exist on Windows. --- Tools/ci-run.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Tools/ci-run.sh b/Tools/ci-run.sh index 93282bc30..a6f45909b 100644 --- a/Tools/ci-run.sh +++ b/Tools/ci-run.sh @@ -142,7 +142,7 @@ if [[ $NO_CYTHON_COMPILE != "1" && $PYTHON_VERSION != "pypy"* ]]; then $(python -c 'import sys; print("-j5" if sys.version_info >= (3,5) else "")')" CFLAGS=$BUILD_CFLAGS \ - time python setup.py build_ext -i $SETUP_ARGS || exit 1 + python setup.py build_ext -i $SETUP_ARGS || exit 1 # COVERAGE can be either "" (empty or not set) or "1" (when we set it) # STACKLESS can be either "" (empty or not set) or "true" (when we set it) -- cgit v1.2.1 From bcff552078cc5b91c75a231e7823604e27809872 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Fri, 5 Aug 2022 23:08:20 +0200 Subject: Reformat and clarify a floating point calculation that didn't really look like one. --- Cython/Compiler/Annotate.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cython/Compiler/Annotate.py b/Cython/Compiler/Annotate.py index 27564c21a..8e8d2c4a8 100644 --- a/Cython/Compiler/Annotate.py +++ b/Cython/Compiler/Annotate.py @@ -73,7 +73,7 @@ class AnnotationCCodeWriter(CCodeWriter): """css template will later allow to choose a colormap""" css = [self._css_template] for i in range(255): - color = u"FFFF%02x" % int(255/(1+i/10.0)) + color = u"FFFF%02x" % int(255.0 // (1.0 + i/10.0)) css.append('.cython.score-%d {background-color: #%s;}' % (i, color)) try: from pygments.formatters import HtmlFormatter -- cgit v1.2.1 From 3c9fc5d23f457bca473252a2e79d4538830ea559 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Sat, 6 Aug 2022 11:10:45 +0200 Subject: Simplify code. --- Cython/Compiler/MemoryView.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cython/Compiler/MemoryView.py b/Cython/Compiler/MemoryView.py index 6df53dcb6..5ebd396be 100644 --- a/Cython/Compiler/MemoryView.py +++ b/Cython/Compiler/MemoryView.py @@ -295,7 +295,7 @@ class MemoryViewSliceBufferEntry(Buffer.BufferEntry): dim += 1 access, packing = self.type.axes[dim] - if isinstance(index, ExprNodes.SliceNode): + if index.is_slice: # slice, unspecified dimension, or part of ellipsis d = dict(locals()) for s in "start stop step".split(): -- cgit v1.2.1 From 65cd5eabed156ead3602ee5b96b4a0c7879483c8 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Sat, 6 Aug 2022 11:11:12 +0200 Subject: Turn some often used CCodeWriter methods into (final) cdef methods to reduce their call overhead. --- Cython/Compiler/Code.pxd | 5 ++++- Cython/Compiler/Code.py | 20 ++++++++++---------- 2 files changed, 14 insertions(+), 11 deletions(-) diff --git a/Cython/Compiler/Code.pxd b/Cython/Compiler/Code.pxd index 2887b44bc..4601474b2 100644 --- a/Cython/Compiler/Code.pxd +++ b/Cython/Compiler/Code.pxd @@ -110,7 +110,8 @@ cdef class CCodeWriter(object): cdef bint bol cpdef write(self, s) - cpdef write_lines(self, s) + @cython.final + cdef _write_lines(self, s) cpdef _write_to_buffer(self, s) cpdef put(self, code) cpdef put_safe(self, code) @@ -119,6 +120,8 @@ cdef class CCodeWriter(object): cdef increase_indent(self) @cython.final cdef decrease_indent(self) + @cython.final + cdef indent(self) cdef class PyrexCodeWriter: diff --git a/Cython/Compiler/Code.py b/Cython/Compiler/Code.py index e0048b84a..d5db13c50 100644 --- a/Cython/Compiler/Code.py +++ b/Cython/Compiler/Code.py @@ -1859,16 +1859,13 @@ class CCodeWriter(object): def getvalue(self): return self.buffer.getvalue() - def _write_to_buffer(self, s): - self.buffer.write(s) - def write(self, s): if '\n' in s: - self.write_lines(s) + self._write_lines(s) else: self._write_to_buffer(s) - def write_lines(self, s): + def _write_lines(self, s): # Cygdb needs to know which Cython source line corresponds to which C line. # Therefore, we write this information into "self.buffer.markers" and then write it from there # into cython_debug/cython_debug_info_* (see ModuleNode._serialize_lineno_map). @@ -1877,6 +1874,9 @@ class CCodeWriter(object): self._write_to_buffer(s) + def _write_to_buffer(self, s): + self.buffer.write(s) + def insertion_point(self): other = self.create_new(create_from=self, buffer=self.buffer.insertion_point(), copy_formatting=True) return other @@ -1978,13 +1978,13 @@ class CCodeWriter(object): self.emit_marker() if self.code_config.emit_linenums and self.last_marked_pos: source_desc, line, _ = self.last_marked_pos - self.write_lines('\n#line %s "%s"\n' % (line, source_desc.get_escaped_description())) + self._write_lines('\n#line %s "%s"\n' % (line, source_desc.get_escaped_description())) if code: if safe: self.put_safe(code) else: self.put(code) - self.write_lines("\n") + self._write_lines("\n") self.bol = 1 def mark_pos(self, pos, trace=True): @@ -1998,13 +1998,13 @@ class CCodeWriter(object): pos, trace = self.last_pos self.last_marked_pos = pos self.last_pos = None - self.write_lines("\n") + self._write_lines("\n") if self.code_config.emit_code_comments: self.indent() - self.write_lines("/* %s */\n" % self._build_marker(pos)) + self._write_lines("/* %s */\n" % self._build_marker(pos)) if trace and self.funcstate and self.funcstate.can_trace and self.globalstate.directives['linetrace']: self.indent() - self.write_lines('__Pyx_TraceLine(%d,%d,%s)\n' % ( + self._write_lines('__Pyx_TraceLine(%d,%d,%s)\n' % ( pos[1], not self.funcstate.gil_owned, self.error_goto(pos))) def _build_marker(self, pos): -- cgit v1.2.1 From ff502b17d1ed96976b3593b44da273794fd713fa Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Sat, 6 Aug 2022 11:11:29 +0200 Subject: Go back to compile Parsing.py and Code.py in the default binary wheel packages since it really brings some 10% improvement. Wheels are often cached, whereas CPU time is not. --- setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index c67aba9fc..d0d862382 100755 --- a/setup.py +++ b/setup.py @@ -94,16 +94,16 @@ def compile_cython_modules(profile=False, coverage=False, compile_minimal=False, "Cython.Plex.Machines", "Cython.Plex.Transitions", "Cython.Plex.DFA", + "Cython.Compiler.Code", "Cython.Compiler.FusedNode", + "Cython.Compiler.Parsing", "Cython.Tempita._tempita", "Cython.StringIOTree", "Cython.Utils", ]) if compile_more and not compile_minimal: compiled_modules.extend([ - "Cython.Compiler.Code", "Cython.Compiler.Lexicon", - "Cython.Compiler.Parsing", "Cython.Compiler.Pythran", "Cython.Build.Dependencies", "Cython.Compiler.ParseTreeTransforms", -- cgit v1.2.1 From a0b5e955ccb1ee2a4d37f2f4896446932cf9aae2 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Sat, 6 Aug 2022 11:41:10 +0200 Subject: Fix usage of undeclared variable that should have been a plain empty string (because there is no "__eq__" and thus no code to generate). --- Cython/Compiler/Dataclass.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cython/Compiler/Dataclass.py b/Cython/Compiler/Dataclass.py index 87ef5e76a..ee9825565 100644 --- a/Cython/Compiler/Dataclass.py +++ b/Cython/Compiler/Dataclass.py @@ -470,7 +470,7 @@ def generate_cmp_code(op, funcname, node, fields): def generate_eq_code(eq, node, fields): if not eq: - return code_lines, {}, [] + return "", {}, [] return generate_cmp_code("==", "__eq__", node, fields) -- cgit v1.2.1 From 71db9f7f236f975ce6ea068317fefd25186e4bd8 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Sat, 6 Aug 2022 11:45:51 +0200 Subject: Generate simpler Cython code that handles an invalid case first, before continuing with the normal cases. --- Cython/Compiler/Dataclass.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/Cython/Compiler/Dataclass.py b/Cython/Compiler/Dataclass.py index ee9825565..91ea7482d 100644 --- a/Cython/Compiler/Dataclass.py +++ b/Cython/Compiler/Dataclass.py @@ -433,11 +433,10 @@ def generate_cmp_code(op, funcname, node, fields): code_lines = [ "def %s(self, other):" % funcname, - " cdef %s other_cast" % node.class_name, - " if isinstance(other, %s):" % node.class_name, - " other_cast = <%s>other" % node.class_name, - " else:", - " return NotImplemented" + " if not isinstance(other, %s):" % node.class_name, + " return NotImplemented", + # + " cdef %s other_cast = <%s>other" % (node.class_name, node.class_name), ] # The Python implementation of dataclasses.py does a tuple comparison -- cgit v1.2.1 From 78adcf4645fa76388c9f75fb64d801196921c737 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Sat, 6 Aug 2022 13:18:00 +0200 Subject: Fix last commit: splitting declaration and initialisation of local variables happens later than TreeFragment's tree normalisation. --- Cython/Compiler/Dataclass.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Cython/Compiler/Dataclass.py b/Cython/Compiler/Dataclass.py index 91ea7482d..82ea40724 100644 --- a/Cython/Compiler/Dataclass.py +++ b/Cython/Compiler/Dataclass.py @@ -436,7 +436,8 @@ def generate_cmp_code(op, funcname, node, fields): " if not isinstance(other, %s):" % node.class_name, " return NotImplemented", # - " cdef %s other_cast = <%s>other" % (node.class_name, node.class_name), + " cdef %s other_cast" % node.class_name, + " other_cast = <%s>other" % node.class_name, ] # The Python implementation of dataclasses.py does a tuple comparison -- cgit v1.2.1 From 9cb28fb273b9bb9b2c08d4f6131e08f5a92f951c Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Sat, 6 Aug 2022 13:57:42 +0200 Subject: Refactor the dataclass code generation to use a dedicated TemplateCode class instead of an opaque tuple. That allows moving more functionality into the class that was previously spread over multiple places in the code. --- Cython/Compiler/Dataclass.py | 224 +++++++++++++++++++++++-------------------- 1 file changed, 122 insertions(+), 102 deletions(-) diff --git a/Cython/Compiler/Dataclass.py b/Cython/Compiler/Dataclass.py index 82ea40724..1a3e2bf5c 100644 --- a/Cython/Compiler/Dataclass.py +++ b/Cython/Compiler/Dataclass.py @@ -81,6 +81,59 @@ class RemoveAssignmentsToNames(VisitorTransform, SkipDeclarations): return node +class TemplateCode(object): + _placeholder_count = 0 + + def __init__(self): + self.code_lines = [] + self.placeholders = {} + self.extra_stats = [] + + def insertion_point(self): + return len(self.code_lines) + + def insert_code_line(self, insertion_point, code_line): + self.code_lines.insert(insertion_point, code_line) + + def reset(self, insertion_point=0): + del self.code_lines[insertion_point:] + + def add_code_line(self, code_line): + self.code_lines.append(code_line) + + def add_code_lines(self, code_lines): + self.code_lines.extend(code_lines) + + def add_placeholder(self, field_names, value): + name = self._new_placeholder_name(field_names) + self.placeholders[name] = value + return name + + def add_extra_statements(self, statements): + self.extra_stats.extend(statements) + + def _new_placeholder_name(self, field_names): + while True: + name = "INIT_PLACEHOLDER_%d" % self._placeholder_count + if (name not in self.placeholders + and name not in field_names): + # make sure name isn't already used and doesn't + # conflict with a variable name (which is unlikely but possible) + break + self._placeholder_count += 1 + return name + + def generate_tree(self, level='c_class'): + stat_list_node = TreeFragment( + "\n".join(self.code_lines), + level=level, + pipeline=[NormalizeTree(None)], + ).substitute(self.placeholders) + + stat_list_node.stats += self.extra_stats + return stat_list_node + + class _MISSING_TYPE(object): pass MISSING = _MISSING_TYPE() @@ -249,23 +302,14 @@ def handle_cclass_dataclass(node, dataclass_args, analyse_decs_transform): stats = Nodes.StatListNode(node.pos, stats=[dataclass_params_assignment] + dataclass_fields_stats) - code_lines = [] - placeholders = {} - extra_stats = [] - for cl, ph, es in [ generate_init_code(kwargs['init'], node, fields, kw_only), - generate_repr_code(kwargs['repr'], node, fields), - generate_eq_code(kwargs['eq'], node, fields), - generate_order_code(kwargs['order'], node, fields), - generate_hash_code(kwargs['unsafe_hash'], kwargs['eq'], kwargs['frozen'], node, fields) ]: - code_lines.append(cl) - placeholders.update(ph) - extra_stats.extend(extra_stats) - - code_lines = "\n".join(code_lines) - code_tree = TreeFragment(code_lines, level='c_class', pipeline=[NormalizeTree(node.scope)] - ).substitute(placeholders) - - stats.stats += (code_tree.stats + extra_stats) + code = TemplateCode() + generate_init_code(code, kwargs['init'], node, fields, kw_only) + generate_repr_code(code, kwargs['repr'], node, fields) + generate_eq_code(code, kwargs['eq'], node, fields) + generate_order_code(code, kwargs['order'], node, fields) + generate_hash_code(code, kwargs['unsafe_hash'], kwargs['eq'], kwargs['frozen'], node, fields) + + stats.stats += code.generate_tree().stats # turn off annotation typing, so all arguments to __init__ are accepted as # generic objects and thus can accept _HAS_DEFAULT_FACTORY. @@ -283,7 +327,7 @@ def handle_cclass_dataclass(node, dataclass_args, analyse_decs_transform): node.body.stats.append(comp_directives) -def generate_init_code(init, node, fields, kw_only): +def generate_init_code(code, init, node, fields, kw_only): """ All of these "generate_*_code" functions return a tuple of: - code string @@ -304,7 +348,8 @@ def generate_init_code(init, node, fields, kw_only): CPython. """ if not init or node.scope.lookup_here("__init__"): - return "", {}, [] + return + # selfname behaviour copied from the cpython module selfname = "__dataclass_self__" if "self" in fields else "self" args = [selfname] @@ -312,8 +357,7 @@ def generate_init_code(init, node, fields, kw_only): if kw_only: args.append("*") - placeholders = {} - placeholder_count = [0] + function_start_point = code.insertion_point() # create a temp to get _HAS_DEFAULT_FACTORY dataclass_module = make_dataclasses_module_callnode(node.pos) @@ -323,21 +367,7 @@ def generate_init_code(init, node, fields, kw_only): attribute=EncodedString("_HAS_DEFAULT_FACTORY") ) - def get_placeholder_name(): - while True: - name = "INIT_PLACEHOLDER_%d" % placeholder_count[0] - if (name not in placeholders - and name not in fields): - # make sure name isn't already used and doesn't - # conflict with a variable name (which is unlikely but possible) - break - placeholder_count[0] += 1 - return name - - default_factory_placeholder = get_placeholder_name() - placeholders[default_factory_placeholder] = has_default_factory - - function_body_code_lines = [] + default_factory_placeholder = code.add_placeholder(fields, has_default_factory) seen_default = False for name, field in fields.items(): @@ -352,13 +382,13 @@ def generate_init_code(init, node, fields, kw_only): if field.default_factory is not MISSING: ph_name = default_factory_placeholder else: - ph_name = get_placeholder_name() - placeholders[ph_name] = field.default # should be a node + ph_name = code.add_placeholder(fields, field.default) # 'default' should be a node assignment = u" = %s" % ph_name elif seen_default and not kw_only and field.init.value: error(entry.pos, ("non-default argument '%s' follows default argument " "in dataclass __init__") % name) - return "", {}, [] + code.reset(function_start_point) + return if field.init.value: args.append(u"%s%s%s" % (name, annotation, assignment)) @@ -367,37 +397,36 @@ def generate_init_code(init, node, fields, kw_only): continue elif field.default_factory is MISSING: if field.init.value: - function_body_code_lines.append(u" %s.%s = %s" % (selfname, name, name)) + code.add_code_line(u" %s.%s = %s" % (selfname, name, name)) elif assignment: # not an argument to the function, but is still initialized - function_body_code_lines.append(u" %s.%s%s" % (selfname, name, assignment)) + code.add_code_line(u" %s.%s%s" % (selfname, name, assignment)) else: - ph_name = get_placeholder_name() - placeholders[ph_name] = field.default_factory + ph_name = code.add_placeholder(fields, field.default_factory) if field.init.value: # close to: # def __init__(self, name=_PLACEHOLDER_VALUE): # self.name = name_default_factory() if name is _PLACEHOLDER_VALUE else name - function_body_code_lines.append(u" %s.%s = %s() if %s is %s else %s" % ( + code.add_code_line(u" %s.%s = %s() if %s is %s else %s" % ( selfname, name, ph_name, name, default_factory_placeholder, name)) else: # still need to use the default factory to initialize - function_body_code_lines.append(u" %s.%s = %s()" - % (selfname, name, ph_name)) - - args = u", ".join(args) - func_def = u"def __init__(%s):" % args - - code_lines = [func_def] + (function_body_code_lines or ["pass"]) + code.add_code_line(u" %s.%s = %s()" % ( + selfname, name, ph_name)) if node.scope.lookup("__post_init__"): post_init_vars = ", ".join(name for name, field in fields.items() if field.is_initvar) - code_lines.append(" %s.__post_init__(%s)" % (selfname, post_init_vars)) - return u"\n".join(code_lines), placeholders, [] + code.add_code_line(" %s.__post_init__(%s)" % (selfname, post_init_vars)) + + if function_start_point == code.insertion_point(): + code.add_code_line(" pass") + args = u", ".join(args) + code.insert_code_line(function_start_point, u"def __init__(%s):" % args) -def generate_repr_code(repr, node, fields): + +def generate_repr_code(code, repr, node, fields): """ The CPython implementation is just: ['return self.__class__.__qualname__ + f"(' + @@ -409,36 +438,35 @@ def generate_repr_code(repr, node, fields): which is because Cython currently supports Python 2. """ if not repr or node.scope.lookup("__repr__"): - return "", {}, [] - code_lines = ["def __repr__(self):"] + return + + code.add_code_line("def __repr__(self):") strs = [u"%s={self.%s!r}" % (name, name) for name, field in fields.items() if field.repr.value and not field.is_initvar] format_string = u", ".join(strs) - code_lines.append(u' name = getattr(type(self), "__qualname__", type(self).__name__)') - code_lines.append(u" return f'{name}(%s)'" % format_string) - code_lines = u"\n".join(code_lines) - return code_lines, {}, [] + code.add_code_line(u' name = getattr(type(self), "__qualname__", type(self).__name__)') + code.add_code_line(u" return f'{name}(%s)'" % format_string) -def generate_cmp_code(op, funcname, node, fields): +def generate_cmp_code(code, op, funcname, node, fields): if node.scope.lookup_here(funcname): - return "", {}, [] + return names = [name for name, field in fields.items() if (field.compare.value and not field.is_initvar)] if not names: - return "", {}, [] # no comparable types + return # no comparable types - code_lines = [ + code.add_code_lines([ "def %s(self, other):" % funcname, " if not isinstance(other, %s):" % node.class_name, " return NotImplemented", # " cdef %s other_cast" % node.class_name, " other_cast = <%s>other" % node.class_name, - ] + ]) # The Python implementation of dataclasses.py does a tuple comparison # (roughly): @@ -456,42 +484,32 @@ def generate_cmp_code(op, funcname, node, fields): name, op, name)) if checks: - code_lines.append(" return " + " and ".join(checks)) + code.add_code_line(" return " + " and ".join(checks)) else: if "=" in op: - code_lines.append(" return True") # "() == ()" is True + code.add_code_line(" return True") # "() == ()" is True else: - code_lines.append(" return False") + code.add_code_line(" return False") - code_lines = u"\n".join(code_lines) - return code_lines, {}, [] - - -def generate_eq_code(eq, node, fields): +def generate_eq_code(code, eq, node, fields): if not eq: - return "", {}, [] - return generate_cmp_code("==", "__eq__", node, fields) + return + generate_cmp_code(code, "==", "__eq__", node, fields) -def generate_order_code(order, node, fields): +def generate_order_code(code, order, node, fields): if not order: - return "", {}, [] - code_lines = [] - placeholders = {} - stats = [] + return + for op, name in [("<", "__lt__"), ("<=", "__le__"), (">", "__gt__"), (">=", "__ge__")]: - res = generate_cmp_code(op, name, node, fields) - code_lines.append(res[0]) - placeholders.update(res[1]) - stats.extend(res[2]) - return "\n".join(code_lines), placeholders, stats + generate_cmp_code(code, op, name, node, fields) -def generate_hash_code(unsafe_hash, eq, frozen, node, fields): +def generate_hash_code(code, unsafe_hash, eq, frozen, node, fields): """ Copied from CPython implementation - the intention is to follow this as far as is possible: @@ -536,35 +554,37 @@ def generate_hash_code(unsafe_hash, eq, frozen, node, fields): if unsafe_hash: # error message taken from CPython dataclasses module error(node.pos, "Cannot overwrite attribute __hash__ in class %s" % node.class_name) - return "", {}, [] + return + if not unsafe_hash: if not eq: return if not frozen: - return "", {}, [Nodes.SingleAssignmentNode( - node.pos, - lhs=ExprNodes.NameNode(node.pos, name=EncodedString("__hash__")), - rhs=ExprNodes.NoneNode(node.pos), - )] + code.add_extra_statements([ + Nodes.SingleAssignmentNode( + node.pos, + lhs=ExprNodes.NameNode(node.pos, name=EncodedString("__hash__")), + rhs=ExprNodes.NoneNode(node.pos), + ) + ]) + return names = [ name for name, field in fields.items() - if (not field.is_initvar and - (field.compare.value if field.hash.value is None else field.hash.value)) + if not field.is_initvar and ( + field.compare.value if field.hash.value is None else field.hash.value) ] if not names: - return "", {}, [] # nothing to hash + return # nothing to hash # make a tuple of the hashes - tpl = u", ".join(u"hash(self.%s)" % name for name in names ) + hash_tuple_items = u", ".join(u"hash(self.%s)" % name for name in names) # if we're here we want to generate a hash - code_lines = dedent(u"""\ - def __hash__(self): - return hash((%s)) - """) % tpl - - return code_lines, {}, [] + code.add_code_lines([ + "def __hash__(self):", + " return hash((%s))" % hash_tuple_items, + ]) def get_field_type(pos, entry): -- cgit v1.2.1 From 5b4bc497ad04880d85c7cb64b8c6e6aa919ffdde Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Sat, 6 Aug 2022 14:08:07 +0200 Subject: Rename a method to distinguish it from the "add_*()" methods that do not return anything. --- Cython/Compiler/Dataclass.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Cython/Compiler/Dataclass.py b/Cython/Compiler/Dataclass.py index 1a3e2bf5c..88e147c58 100644 --- a/Cython/Compiler/Dataclass.py +++ b/Cython/Compiler/Dataclass.py @@ -104,7 +104,7 @@ class TemplateCode(object): def add_code_lines(self, code_lines): self.code_lines.extend(code_lines) - def add_placeholder(self, field_names, value): + def new_placeholder(self, field_names, value): name = self._new_placeholder_name(field_names) self.placeholders[name] = value return name @@ -367,7 +367,7 @@ def generate_init_code(code, init, node, fields, kw_only): attribute=EncodedString("_HAS_DEFAULT_FACTORY") ) - default_factory_placeholder = code.add_placeholder(fields, has_default_factory) + default_factory_placeholder = code.new_placeholder(fields, has_default_factory) seen_default = False for name, field in fields.items(): @@ -382,7 +382,7 @@ def generate_init_code(code, init, node, fields, kw_only): if field.default_factory is not MISSING: ph_name = default_factory_placeholder else: - ph_name = code.add_placeholder(fields, field.default) # 'default' should be a node + ph_name = code.new_placeholder(fields, field.default) # 'default' should be a node assignment = u" = %s" % ph_name elif seen_default and not kw_only and field.init.value: error(entry.pos, ("non-default argument '%s' follows default argument " @@ -402,7 +402,7 @@ def generate_init_code(code, init, node, fields, kw_only): # not an argument to the function, but is still initialized code.add_code_line(u" %s.%s%s" % (selfname, name, assignment)) else: - ph_name = code.add_placeholder(fields, field.default_factory) + ph_name = code.new_placeholder(fields, field.default_factory) if field.init.value: # close to: # def __init__(self, name=_PLACEHOLDER_VALUE): -- cgit v1.2.1 From db0af9cd99e0ea06f9755c3ec8fe084b53c6ae9f Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Sat, 6 Aug 2022 23:45:59 +0200 Subject: Add a longer docstring comment to explain how the coverage plugin maps trace calls to source code. --- Cython/Coverage.py | 38 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) diff --git a/Cython/Coverage.py b/Cython/Coverage.py index 7acd54c1f..bf1f0034b 100644 --- a/Cython/Coverage.py +++ b/Cython/Coverage.py @@ -6,6 +6,44 @@ Requires the coverage package at least in version 4.0 (which added the plugin AP This plugin requires the generated C sources to be available, next to the extension module. It parses the C file and reads the original source files from it, which are stored in C comments. It then reports a source file to coverage.py when it hits one of its lines during line tracing. + +Basically, Cython can (on request) emit explicit trace calls into the C code that it generates, +and as a general human debugging helper, it always copies the current source code line +(and its surrounding context) into the C files before it generates code for that line, e.g. + +:: + + /* "line_trace.pyx":147 + * def cy_add_with_nogil(a,b): + * cdef int z, x=a, y=b # 1 + * with nogil: # 2 # <<<<<<<<<<<<<< + * z = 0 # 3 + * z += cy_add_nogil(x, y) # 4 + */ + __Pyx_TraceLine(147,1,__PYX_ERR(0, 147, __pyx_L4_error)) + [C code generated for file line_trace.pyx, line 147, follows here] + +The crux is that multiple source files can contribute code to a single C (or C++) file +(and thus, to a single extension module) besides the main module source file (.py/.pyx), +usually shared declaration files (.pxd) but also literally included files (.pxi). + +Therefore, the coverage plugin doesn't actually try to look at the file that happened +to contribute the current source line for the trace call, but simply looks up the single +.c file from which the extension was compiled (which usually lies right next to it after +the build, having the same name), and parses the code copy comments from that .c file +to recover the original source files and their code as a line-to-file mapping. + +That mapping is then used to report the ``__Pyx_TraceLine()`` calls to the coverage tool. +The plugin also reports the line of source code that it found in the C file to the coverage +tool to support annotated source representations. For this, again, it does not look at the +actual source files but only reports the source code that it found in the C code comments. + +Apart from simplicity (read one file instead of finding and parsing many), part of the +reasoning here is that any line in the original sources for which there is no comment line +(and trace call) in the generated C code cannot count as executed, really, so the C code +comments are a very good source for coverage reporting. They already filter out purely +declarative code lines that do not contribute executable code, and such (missing) lines +can then be marked as excluded from coverage analysis. """ from __future__ import absolute_import -- cgit v1.2.1 From 5ddbe05d419d48161e5938e8de227d41913312ee Mon Sep 17 00:00:00 2001 From: 0dminnimda <0dminnimda@gmail.com> Date: Sun, 7 Aug 2022 18:12:05 +0300 Subject: ci-run.sh: Simplify by compacting two conditions into one (GH-4961) --- Tools/ci-run.sh | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/Tools/ci-run.sh b/Tools/ci-run.sh index a6f45909b..0a5921d64 100644 --- a/Tools/ci-run.sh +++ b/Tools/ci-run.sh @@ -122,10 +122,8 @@ fi if [[ $NO_CYTHON_COMPILE != "1" && $PYTHON_VERSION != "pypy"* ]]; then BUILD_CFLAGS="$CFLAGS -O2" - if [[ $CYTHON_COMPILE_ALL == "1" ]]; then - if [[ $OSTYPE != "msys" ]]; then - BUILD_CFLAGS="$CFLAGS -O3 -g0 -mtune=generic" # make wheel sizes comparable to standard wheel build - fi + if [[ $CYTHON_COMPILE_ALL == "1" && $OSTYPE != "msys" ]]; then + BUILD_CFLAGS="$CFLAGS -O3 -g0 -mtune=generic" # make wheel sizes comparable to standard wheel build fi if [[ $PYTHON_SYS_VERSION == "2"* ]]; then BUILD_CFLAGS="$BUILD_CFLAGS -fno-strict-aliasing" -- cgit v1.2.1 From 23023e991ab3aa2b7a304aa53511460c7f1b67b6 Mon Sep 17 00:00:00 2001 From: Matus Valo Date: Sun, 7 Aug 2022 21:40:24 +0200 Subject: [Doc] Mention explicitely cython.NULL language basics (#4964) --- docs/src/userguide/language_basics.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/userguide/language_basics.rst b/docs/src/userguide/language_basics.rst index 7d056bdfb..a0450b785 100644 --- a/docs/src/userguide/language_basics.rst +++ b/docs/src/userguide/language_basics.rst @@ -1094,7 +1094,7 @@ direct equivalent in Python. * There is an ``&`` operator in Cython, with the same semantics as in C. In pure python mode, use the ``cython.address()`` function instead. * The null C pointer is called ``NULL``, not ``0``. ``NULL`` is a reserved word in Cython - and special object in pure python mode. + and ``cython.NULL`` is a special object in pure python mode. * Type casts are written ``value`` or ``cast(type, value)``, for example, .. tabs:: -- cgit v1.2.1 From 20c1432c9343afe46a0421ef54f9edae2377d475 Mon Sep 17 00:00:00 2001 From: da-woods Date: Mon, 8 Aug 2022 04:59:42 +0100 Subject: Fix handling of annotations in functions in dataclasses (#4954) Fixes #4953. Before this change the annotations were being treated as "dataclass-related" annotations, and thus being made "public". --- Cython/Compiler/ExprNodes.py | 11 +++-------- Cython/Compiler/Nodes.py | 12 +++++++++--- Cython/Compiler/Options.py | 2 +- Cython/Compiler/ParseTreeTransforms.py | 3 +-- Cython/Compiler/Symtab.py | 4 +++- tests/run/pure_cdef_class_dataclass.py | 6 ++++++ 6 files changed, 23 insertions(+), 15 deletions(-) diff --git a/Cython/Compiler/ExprNodes.py b/Cython/Compiler/ExprNodes.py index bfe065171..7eec59542 100644 --- a/Cython/Compiler/ExprNodes.py +++ b/Cython/Compiler/ExprNodes.py @@ -2057,15 +2057,10 @@ class NameNode(AtomicExprNode): atype = error_type visibility = 'private' - if 'dataclasses.dataclass' in env.directives: + if env.is_c_dataclass_scope: # handle "frozen" directive - full inspection of the dataclass directives happens # in Dataclass.py - frozen_directive = None - dataclass_directive = env.directives['dataclasses.dataclass'] - if dataclass_directive: - dataclass_directive_kwds = dataclass_directive[1] - frozen_directive = dataclass_directive_kwds.get('frozen', None) - is_frozen = frozen_directive and frozen_directive.is_literal and frozen_directive.value + is_frozen = env.is_c_dataclass_scope == "frozen" if atype.is_pyobject or atype.can_coerce_to_pyobject(env): visibility = 'readonly' if is_frozen else 'public' # If the object can't be coerced that's fine - we just don't create a property @@ -2160,7 +2155,7 @@ class NameNode(AtomicExprNode): self.entry.known_standard_library_import = "" # already exists somewhere and so is now ambiguous if not self.entry and self.annotation is not None: # name : type = ... - is_dataclass = 'dataclasses.dataclass' in env.directives + is_dataclass = env.is_c_dataclass_scope # In a dataclass, an assignment should not prevent a name from becoming an instance attribute. # Hence, "as_target = not is_dataclass". self.declare_from_annotation(env, as_target=not is_dataclass) diff --git a/Cython/Compiler/Nodes.py b/Cython/Compiler/Nodes.py index 50436561b..3ff6b9d6f 100644 --- a/Cython/Compiler/Nodes.py +++ b/Cython/Compiler/Nodes.py @@ -5169,7 +5169,6 @@ class CClassDefNode(ClassDefNode): check_size = None decorators = None shadow = False - is_dataclass = False @property def punycode_class_name(self): @@ -5219,8 +5218,6 @@ class CClassDefNode(ClassDefNode): if env.in_cinclude and not self.objstruct_name: error(self.pos, "Object struct name specification required for C class defined in 'extern from' block") - if "dataclasses.dataclass" in env.directives: - self.is_dataclass = True if self.decorators: error(self.pos, "Decorators not allowed on cdef classes (used on type '%s')" % self.class_name) self.base_type = None @@ -5310,6 +5307,15 @@ class CClassDefNode(ClassDefNode): self.scope = scope = self.entry.type.scope if scope is not None: scope.directives = env.directives + if "dataclasses.dataclass" in env.directives: + is_frozen = False + # Retrieve the @dataclass config (args, kwargs), as passed into the decorator. + dataclass_config = env.directives["dataclasses.dataclass"] + if dataclass_config: + decorator_kwargs = dataclass_config[1] + frozen_flag = decorator_kwargs.get('frozen') + is_frozen = frozen_flag and frozen_flag.is_literal and frozen_flag.value + scope.is_c_dataclass_scope = "frozen" if is_frozen else True if self.doc and Options.docstrings: scope.doc = embed_position(self.pos, self.doc) diff --git a/Cython/Compiler/Options.py b/Cython/Compiler/Options.py index bb547e978..73778aaf9 100644 --- a/Cython/Compiler/Options.py +++ b/Cython/Compiler/Options.py @@ -391,7 +391,7 @@ directive_scopes = { # defaults to available everywhere # a list of directives that (when used as a decorator) are only applied to # the object they decorate and not to its children. immediate_decorator_directives = { - 'cfunc', 'ccall', 'cclass', + 'cfunc', 'ccall', 'cclass', 'dataclasses.dataclass', # function signature directives 'inline', 'exceptval', 'returns', # class directives diff --git a/Cython/Compiler/ParseTreeTransforms.py b/Cython/Compiler/ParseTreeTransforms.py index ae271094a..f17af31f1 100644 --- a/Cython/Compiler/ParseTreeTransforms.py +++ b/Cython/Compiler/ParseTreeTransforms.py @@ -1318,8 +1318,7 @@ class InterpretCompilerDirectives(CythonTransform): name, value = directive if self.directives.get(name, object()) != value: directives.append(directive) - if (directive[0] == 'staticmethod' or - (directive[0] == 'dataclasses.dataclass' and scope_name == 'class')): + if directive[0] == 'staticmethod': both.append(dec) # Adapt scope type based on decorators that change it. if directive[0] == 'cclass' and scope_name == 'class': diff --git a/Cython/Compiler/Symtab.py b/Cython/Compiler/Symtab.py index 1500c7441..7a76ecde9 100644 --- a/Cython/Compiler/Symtab.py +++ b/Cython/Compiler/Symtab.py @@ -348,6 +348,7 @@ class Scope(object): # is_passthrough boolean Outer scope is passed directly # is_cpp_class_scope boolean Is a C++ class scope # is_property_scope boolean Is a extension type property scope + # is_c_dataclass_scope boolean or "frozen" is a cython.dataclasses.dataclass # scope_prefix string Disambiguator for C names # in_cinclude boolean Suppress C declaration code # qualified_name string "modname" or "modname.classname" @@ -368,6 +369,7 @@ class Scope(object): is_cpp_class_scope = 0 is_property_scope = 0 is_module_scope = 0 + is_c_dataclass_scope = False is_internal = 0 scope_prefix = "" in_cinclude = 0 @@ -2345,7 +2347,7 @@ class CClassScope(ClassScope): type = py_object_type else: type = type.equivalent_type - if "dataclasses.InitVar" in pytyping_modifiers and 'dataclasses.dataclass' not in self.directives: + if "dataclasses.InitVar" in pytyping_modifiers and not self.is_c_dataclass_scope: error(pos, "Use of cython.dataclasses.InitVar does not make sense outside a dataclass") if is_cdef: diff --git a/tests/run/pure_cdef_class_dataclass.py b/tests/run/pure_cdef_class_dataclass.py index b3892586f..e5c4bcd32 100644 --- a/tests/run/pure_cdef_class_dataclass.py +++ b/tests/run/pure_cdef_class_dataclass.py @@ -25,11 +25,17 @@ class MyDataclass: True >>> hash(inst1) != id(inst1) True + >>> inst1.func_with_annotations(2.0) + 4.0 """ a: int = 1 self: list = cython.dataclasses.field(default_factory=list, hash=False) # test that arguments of init don't conflict + def func_with_annotations(self, b: float): + c: float = b + return self.a * c + class DummyObj: def __repr__(self): -- cgit v1.2.1 From 0be7a37283bf6f9f9b563f22659d8aa9d353db73 Mon Sep 17 00:00:00 2001 From: da-woods Date: Mon, 8 Aug 2022 07:57:39 +0100 Subject: Let C++ references work in fused types (#4719) Fixes #4717 --- Cython/Compiler/PyrexTypes.py | 2 ++ tests/run/fused_cpp.pyx | 10 ++++++++++ 2 files changed, 12 insertions(+) diff --git a/Cython/Compiler/PyrexTypes.py b/Cython/Compiler/PyrexTypes.py index 79e144ed1..fdac8412c 100644 --- a/Cython/Compiler/PyrexTypes.py +++ b/Cython/Compiler/PyrexTypes.py @@ -2790,6 +2790,8 @@ class CReferenceBaseType(BaseType): # Common base type for C reference and C++ rvalue reference types. + subtypes = ['ref_base_type'] + def __init__(self, base_type): self.ref_base_type = base_type diff --git a/tests/run/fused_cpp.pyx b/tests/run/fused_cpp.pyx index 9f3bb5104..206ec01e7 100644 --- a/tests/run/fused_cpp.pyx +++ b/tests/run/fused_cpp.pyx @@ -41,3 +41,13 @@ def typeid_call2(cython.integral x): """ cdef const type_info* a = &typeid(cython.integral) return a[0] == tidint[0] + +cdef fused_ref(cython.integral& x): + return x*2 + +def test_fused_ref(int x): + """ + >>> test_fused_ref(5) + (10, 10) + """ + return fused_ref(x), fused_ref[int](x) -- cgit v1.2.1 From f946fe22fff32077dc58beeb64ec1ebc85d37632 Mon Sep 17 00:00:00 2001 From: da-woods Date: Mon, 8 Aug 2022 19:29:48 +0100 Subject: Generator expression and comprehension scope (#4254) Fixes #1159. This should be a simpler way of dealing with the long-standing generator and comprehension scoping issues. Unlike the previous PR it is fairly non-intrusive (i.e. doesn't wrap everything in `ResultRefNodes`) and therefore the changes should be more reasonable. Instead it: * Gives `IteratorNode` a scope (pointed at the outer scope rather than the scope of the generator/comprehension) * Tags each `ExprNode` inside the generator iterator sequence with a number, and uses those tags later to work out what needs to be converted to an argument for the generator. If an `ExprNode` is optimized out then that's fine - one of its children will have been tagged. --- Cython/Compiler/ExprNodes.py | 222 +++++++++++++++++------------ Cython/Compiler/FlowControl.pxd | 8 +- Cython/Compiler/FlowControl.py | 73 ++++++---- Cython/Compiler/Naming.py | 1 + Cython/Compiler/Nodes.py | 10 +- Cython/Compiler/Optimize.py | 3 +- Cython/Compiler/ParseTreeTransforms.py | 142 +++++++++++++++++- Cython/Compiler/TypeInference.py | 7 +- tests/bugs.txt | 1 - tests/run/cpp_iterators.pyx | 34 +++++ tests/run/cpp_iterators_simple.h | 11 ++ tests/run/generators_py.py | 17 +++ tests/run/genexpr_arg_order.py | 181 +++++++++++++++++++++++ tests/run/genexpr_iterable_lookup_T600.pyx | 18 +++ tests/run/locals.pyx | 10 ++ 15 files changed, 606 insertions(+), 132 deletions(-) create mode 100644 tests/run/genexpr_arg_order.py diff --git a/Cython/Compiler/ExprNodes.py b/Cython/Compiler/ExprNodes.py index 7eec59542..5df8d8084 100644 --- a/Cython/Compiler/ExprNodes.py +++ b/Cython/Compiler/ExprNodes.py @@ -333,6 +333,8 @@ class ExprNode(Node): # result_code/temp_result can safely be set to None # is_numpy_attribute boolean Is a Numpy module attribute # annotation ExprNode or None PEP526 annotation for names or expressions + # generator_arg_tag None or Node A tag to mark ExprNodes that potentially need to + # be changed to a generator argument result_ctype = None type = None @@ -342,6 +344,7 @@ class ExprNode(Node): use_managed_ref = True # can be set by optimisation transforms result_is_used = True is_numpy_attribute = False + generator_arg_tag = None # The Analyse Expressions phase for expressions is split # into two sub-phases: @@ -2785,7 +2788,98 @@ class ImportNode(ExprNode): return self.module_name.value -class IteratorNode(ExprNode): +class ScopedExprNode(ExprNode): + # Abstract base class for ExprNodes that have their own local + # scope, such as generator expressions. + # + # expr_scope Scope the inner scope of the expression + + subexprs = [] + expr_scope = None + + # does this node really have a local scope, e.g. does it leak loop + # variables or not? non-leaking Py3 behaviour is default, except + # for list comprehensions where the behaviour differs in Py2 and + # Py3 (set in Parsing.py based on parser context) + has_local_scope = True + + def init_scope(self, outer_scope, expr_scope=None): + if expr_scope is not None: + self.expr_scope = expr_scope + elif self.has_local_scope: + self.expr_scope = Symtab.ComprehensionScope(outer_scope) + elif not self.expr_scope: # don't unset if it's already been set + self.expr_scope = None + + def analyse_declarations(self, env): + self.init_scope(env) + + def analyse_scoped_declarations(self, env): + # this is called with the expr_scope as env + pass + + def analyse_types(self, env): + # no recursion here, the children will be analysed separately below + return self + + def analyse_scoped_expressions(self, env): + # this is called with the expr_scope as env + return self + + def generate_evaluation_code(self, code): + # set up local variables and free their references on exit + generate_inner_evaluation_code = super(ScopedExprNode, self).generate_evaluation_code + if not self.has_local_scope or not self.expr_scope.var_entries: + # no local variables => delegate, done + generate_inner_evaluation_code(code) + return + + code.putln('{ /* enter inner scope */') + py_entries = [] + for _, entry in sorted(item for item in self.expr_scope.entries.items() if item[0]): + if not entry.in_closure: + if entry.type.is_pyobject and entry.used: + py_entries.append(entry) + if not py_entries: + # no local Python references => no cleanup required + generate_inner_evaluation_code(code) + code.putln('} /* exit inner scope */') + return + + # must free all local Python references at each exit point + old_loop_labels = code.new_loop_labels() + old_error_label = code.new_error_label() + + generate_inner_evaluation_code(code) + + # normal (non-error) exit + self._generate_vars_cleanup(code, py_entries) + + # error/loop body exit points + exit_scope = code.new_label('exit_scope') + code.put_goto(exit_scope) + for label, old_label in ([(code.error_label, old_error_label)] + + list(zip(code.get_loop_labels(), old_loop_labels))): + if code.label_used(label): + code.put_label(label) + self._generate_vars_cleanup(code, py_entries) + code.put_goto(old_label) + code.put_label(exit_scope) + code.putln('} /* exit inner scope */') + + code.set_loop_labels(old_loop_labels) + code.error_label = old_error_label + + def _generate_vars_cleanup(self, code, py_entries): + for entry in py_entries: + if entry.is_cglobal: + code.put_var_gotref(entry) + code.put_var_decref_set(entry, "Py_None") + else: + code.put_var_xdecref_clear(entry) + + +class IteratorNode(ScopedExprNode): # Used as part of for statement implementation. # # Implements result = iter(sequence) @@ -2797,10 +2891,13 @@ class IteratorNode(ExprNode): counter_cname = None reversed = False # currently only used for list/tuple types (see Optimize.py) is_async = False + has_local_scope = False subexprs = ['sequence'] def analyse_types(self, env): + if self.expr_scope: + env = self.expr_scope # actually evaluate sequence in this scope instead self.sequence = self.sequence.analyse_types(env) if (self.sequence.type.is_array or self.sequence.type.is_ptr) and \ not self.sequence.type.is_string: @@ -2823,7 +2920,7 @@ class IteratorNode(ExprNode): ])) def type_dependencies(self, env): - return self.sequence.type_dependencies(env) + return self.sequence.type_dependencies(self.expr_scope or env) def infer_type(self, env): sequence_type = self.sequence.infer_type(env) @@ -3157,7 +3254,7 @@ class NextNode(AtomicExprNode): self.iterator.generate_iter_next_result_code(self.result(), code) -class AsyncIteratorNode(ExprNode): +class AsyncIteratorNode(ScopedExprNode): # Used as part of 'async for' statement implementation. # # Implements result = sequence.__aiter__() @@ -3169,11 +3266,14 @@ class AsyncIteratorNode(ExprNode): is_async = True type = py_object_type is_temp = 1 + has_local_scope = False def infer_type(self, env): return py_object_type def analyse_types(self, env): + if self.expr_scope: + env = self.expr_scope self.sequence = self.sequence.analyse_types(env) if not self.sequence.type.is_pyobject: error(self.pos, "async for loops not allowed on C/C++ types") @@ -8483,97 +8583,6 @@ class ListNode(SequenceNode): raise InternalError("List type never specified") -class ScopedExprNode(ExprNode): - # Abstract base class for ExprNodes that have their own local - # scope, such as generator expressions. - # - # expr_scope Scope the inner scope of the expression - - subexprs = [] - expr_scope = None - - # does this node really have a local scope, e.g. does it leak loop - # variables or not? non-leaking Py3 behaviour is default, except - # for list comprehensions where the behaviour differs in Py2 and - # Py3 (set in Parsing.py based on parser context) - has_local_scope = True - - def init_scope(self, outer_scope, expr_scope=None): - if expr_scope is not None: - self.expr_scope = expr_scope - elif self.has_local_scope: - self.expr_scope = Symtab.ComprehensionScope(outer_scope) - else: - self.expr_scope = None - - def analyse_declarations(self, env): - self.init_scope(env) - - def analyse_scoped_declarations(self, env): - # this is called with the expr_scope as env - pass - - def analyse_types(self, env): - # no recursion here, the children will be analysed separately below - return self - - def analyse_scoped_expressions(self, env): - # this is called with the expr_scope as env - return self - - def generate_evaluation_code(self, code): - # set up local variables and free their references on exit - generate_inner_evaluation_code = super(ScopedExprNode, self).generate_evaluation_code - if not self.has_local_scope or not self.expr_scope.var_entries: - # no local variables => delegate, done - generate_inner_evaluation_code(code) - return - - code.putln('{ /* enter inner scope */') - py_entries = [] - for _, entry in sorted(item for item in self.expr_scope.entries.items() if item[0]): - if not entry.in_closure: - if entry.type.is_pyobject and entry.used: - py_entries.append(entry) - if not py_entries: - # no local Python references => no cleanup required - generate_inner_evaluation_code(code) - code.putln('} /* exit inner scope */') - return - - # must free all local Python references at each exit point - old_loop_labels = code.new_loop_labels() - old_error_label = code.new_error_label() - - generate_inner_evaluation_code(code) - - # normal (non-error) exit - self._generate_vars_cleanup(code, py_entries) - - # error/loop body exit points - exit_scope = code.new_label('exit_scope') - code.put_goto(exit_scope) - for label, old_label in ([(code.error_label, old_error_label)] + - list(zip(code.get_loop_labels(), old_loop_labels))): - if code.label_used(label): - code.put_label(label) - self._generate_vars_cleanup(code, py_entries) - code.put_goto(old_label) - code.put_label(exit_scope) - code.putln('} /* exit inner scope */') - - code.set_loop_labels(old_loop_labels) - code.error_label = old_error_label - - def _generate_vars_cleanup(self, code, py_entries): - for entry in py_entries: - if entry.is_cglobal: - code.put_var_gotref(entry) - code.put_var_decref_set(entry, "Py_None") - else: - code.put_var_xdecref_clear(entry) - - class ComprehensionNode(ScopedExprNode): # A list/set/dict comprehension @@ -8588,6 +8597,12 @@ class ComprehensionNode(ScopedExprNode): def analyse_declarations(self, env): self.append.target = self # this is used in the PyList_Append of the inner loop self.init_scope(env) + # setup loop scope + if isinstance(self.loop, Nodes._ForInStatNode): + assert isinstance(self.loop.iterator, ScopedExprNode), self.loop.iterator + self.loop.iterator.init_scope(None, env) + else: + assert isinstance(self.loop, Nodes.ForFromStatNode), self.loop def analyse_scoped_declarations(self, env): self.loop.analyse_declarations(env) @@ -10012,10 +10027,18 @@ class GeneratorExpressionNode(LambdaNode): # # loop ForStatNode the for-loop, containing a YieldExprNode # def_node DefNode the underlying generator 'def' node + # call_parameters [ExprNode] (Internal) parameters passed to the DefNode call name = StringEncoding.EncodedString('genexpr') binding = False + child_attrs = LambdaNode.child_attrs + ["call_parameters"] + subexprs = LambdaNode.subexprs + ["call_parameters"] + + def __init__(self, pos, *args, **kwds): + super(GeneratorExpressionNode, self).__init__(pos, *args, **kwds) + self.call_parameters = [] + def analyse_declarations(self, env): if hasattr(self, "genexpr_name"): # this if-statement makes it safe to run twice @@ -10028,13 +10051,22 @@ class GeneratorExpressionNode(LambdaNode): self.def_node.is_cyfunction = False # Force genexpr signature self.def_node.entry.signature = TypeSlots.pyfunction_noargs + # setup loop scope + if isinstance(self.loop, Nodes._ForInStatNode): + assert isinstance(self.loop.iterator, ScopedExprNode) + self.loop.iterator.init_scope(None, env) + else: + assert isinstance(self.loop, Nodes.ForFromStatNode) def generate_result_code(self, code): + args_to_call = ([self.closure_result_code()] + + [ cp.result() for cp in self.call_parameters ]) + args_to_call = ", ".join(args_to_call) code.putln( '%s = %s(%s); %s' % ( self.result(), self.def_node.entry.pyfunc_cname, - self.closure_result_code(), + args_to_call, code.error_goto_if_null(self.result(), self.pos))) self.generate_gotref(code) diff --git a/Cython/Compiler/FlowControl.pxd b/Cython/Compiler/FlowControl.pxd index 4a8ef19c1..a15f86cf6 100644 --- a/Cython/Compiler/FlowControl.pxd +++ b/Cython/Compiler/FlowControl.pxd @@ -36,6 +36,7 @@ cdef class NameAssignment: cdef public set refs cdef public object bit cdef public object inferred_type + cdef public object rhs_scope cdef class AssignmentList: cdef public object bit @@ -65,7 +66,7 @@ cdef class ControlFlow: cpdef bint is_tracked(self, entry) cpdef bint is_statically_assigned(self, entry) cpdef mark_position(self, node) - cpdef mark_assignment(self, lhs, rhs, entry) + cpdef mark_assignment(self, lhs, rhs, entry, rhs_scope=*) cpdef mark_argument(self, lhs, rhs, entry) cpdef mark_deletion(self, node, entry) cpdef mark_reference(self, node, entry) @@ -103,12 +104,11 @@ cdef class ControlFlowAnalysis(CythonTransform): cdef object gv_ctx cdef object constant_folder cdef set reductions - cdef list env_stack - cdef list stack + cdef list stack # a stack of (env, flow) tuples cdef object env cdef ControlFlow flow cdef object object_expr cdef bint in_inplace_assignment - cpdef mark_assignment(self, lhs, rhs=*) + cpdef mark_assignment(self, lhs, rhs=*, rhs_scope=*) cpdef mark_position(self, node) diff --git a/Cython/Compiler/FlowControl.py b/Cython/Compiler/FlowControl.py index 4018ff851..294bce9ee 100644 --- a/Cython/Compiler/FlowControl.py +++ b/Cython/Compiler/FlowControl.py @@ -172,9 +172,9 @@ class ControlFlow(object): if self.block: self.block.positions.add(node.pos[:2]) - def mark_assignment(self, lhs, rhs, entry): + def mark_assignment(self, lhs, rhs, entry, rhs_scope=None): if self.block and self.is_tracked(entry): - assignment = NameAssignment(lhs, rhs, entry) + assignment = NameAssignment(lhs, rhs, entry, rhs_scope=rhs_scope) self.block.stats.append(assignment) self.block.gen[entry] = assignment self.entries.add(entry) @@ -315,7 +315,7 @@ class ExceptionDescr(object): class NameAssignment(object): - def __init__(self, lhs, rhs, entry): + def __init__(self, lhs, rhs, entry, rhs_scope=None): if lhs.cf_state is None: lhs.cf_state = set() self.lhs = lhs @@ -326,16 +326,18 @@ class NameAssignment(object): self.is_arg = False self.is_deletion = False self.inferred_type = None + # For generator expression targets, the rhs can have a different scope than the lhs. + self.rhs_scope = rhs_scope def __repr__(self): return '%s(entry=%r)' % (self.__class__.__name__, self.entry) def infer_type(self): - self.inferred_type = self.rhs.infer_type(self.entry.scope) + self.inferred_type = self.rhs.infer_type(self.rhs_scope or self.entry.scope) return self.inferred_type def type_dependencies(self): - return self.rhs.type_dependencies(self.entry.scope) + return self.rhs.type_dependencies(self.rhs_scope or self.entry.scope) @property def type(self): @@ -677,6 +679,14 @@ class AssignmentCollector(TreeVisitor): class ControlFlowAnalysis(CythonTransform): + def find_in_stack(self, env): + if env == self.env: + return self.flow + for e, flow in reversed(self.stack): + if e is env: + return flow + assert False + def visit_ModuleNode(self, node): dot_output = self.current_directives['control_flow.dot_output'] self.gv_ctx = GVContext() if dot_output else None @@ -688,10 +698,9 @@ class ControlFlowAnalysis(CythonTransform): self.reductions = set() self.in_inplace_assignment = False - self.env_stack = [] self.env = node.scope - self.stack = [] self.flow = ControlFlow() + self.stack = [] # a stack of (env, flow) tuples self.object_expr = TypedExprNode(PyrexTypes.py_object_type, may_be_none=True) self.visitchildren(node) @@ -708,9 +717,8 @@ class ControlFlowAnalysis(CythonTransform): if arg.default: self.visitchildren(arg) self.visitchildren(node, ('decorators',)) - self.env_stack.append(self.env) + self.stack.append((self.env, self.flow)) self.env = node.local_scope - self.stack.append(self.flow) self.flow = ControlFlow() # Collect all entries @@ -751,8 +759,7 @@ class ControlFlowAnalysis(CythonTransform): if self.gv_ctx is not None: self.gv_ctx.add(GV(node.local_scope.name, self.flow)) - self.flow = self.stack.pop() - self.env = self.env_stack.pop() + self.env, self.flow = self.stack.pop() return node def visit_DefNode(self, node): @@ -765,7 +772,7 @@ class ControlFlowAnalysis(CythonTransform): def visit_CTypeDefNode(self, node): return node - def mark_assignment(self, lhs, rhs=None): + def mark_assignment(self, lhs, rhs=None, rhs_scope=None): if not self.flow.block: return if self.flow.exceptions: @@ -782,7 +789,7 @@ class ControlFlowAnalysis(CythonTransform): entry = self.env.lookup(lhs.name) if entry is None: # TODO: This shouldn't happen... return - self.flow.mark_assignment(lhs, rhs, entry) + self.flow.mark_assignment(lhs, rhs, entry, rhs_scope=rhs_scope) elif lhs.is_sequence_constructor: for i, arg in enumerate(lhs.args): if arg.is_starred: @@ -979,10 +986,11 @@ class ControlFlowAnalysis(CythonTransform): is_special = False sequence = node.iterator.sequence target = node.target + env = node.iterator.expr_scope or self.env if isinstance(sequence, ExprNodes.SimpleCallNode): function = sequence.function if sequence.self is None and function.is_name: - entry = self.env.lookup(function.name) + entry = env.lookup(function.name) if not entry or entry.is_builtin: if function.name == 'reversed' and len(sequence.args) == 1: sequence = sequence.args[0] @@ -990,30 +998,32 @@ class ControlFlowAnalysis(CythonTransform): if target.is_sequence_constructor and len(target.args) == 2: iterator = sequence.args[0] if iterator.is_name: - iterator_type = iterator.infer_type(self.env) + iterator_type = iterator.infer_type(env) if iterator_type.is_builtin_type: # assume that builtin types have a length within Py_ssize_t self.mark_assignment( target.args[0], ExprNodes.IntNode(target.pos, value='PY_SSIZE_T_MAX', - type=PyrexTypes.c_py_ssize_t_type)) + type=PyrexTypes.c_py_ssize_t_type), + rhs_scope=node.iterator.expr_scope) target = target.args[1] sequence = sequence.args[0] if isinstance(sequence, ExprNodes.SimpleCallNode): function = sequence.function if sequence.self is None and function.is_name: - entry = self.env.lookup(function.name) + entry = env.lookup(function.name) if not entry or entry.is_builtin: if function.name in ('range', 'xrange'): is_special = True for arg in sequence.args[:2]: - self.mark_assignment(target, arg) + self.mark_assignment(target, arg, rhs_scope=node.iterator.expr_scope) if len(sequence.args) > 2: self.mark_assignment(target, self.constant_folder( ExprNodes.binop_node(node.pos, '+', sequence.args[0], - sequence.args[2]))) + sequence.args[2])), + rhs_scope=node.iterator.expr_scope) if not is_special: # A for-loop basically translates to subsequent calls to @@ -1022,7 +1032,7 @@ class ControlFlowAnalysis(CythonTransform): # Python strings, etc., while correctly falling back to an # object type when the base type cannot be handled. - self.mark_assignment(target, node.item) + self.mark_assignment(target, node.item, rhs_scope=node.iterator.expr_scope) def visit_AsyncForStatNode(self, node): return self.visit_ForInStatNode(node) @@ -1321,21 +1331,25 @@ class ControlFlowAnalysis(CythonTransform): def visit_ComprehensionNode(self, node): if node.expr_scope: - self.env_stack.append(self.env) + self.stack.append((self.env, self.flow)) self.env = node.expr_scope # Skip append node here self._visit(node.loop) if node.expr_scope: - self.env = self.env_stack.pop() + self.env, _ = self.stack.pop() return node def visit_ScopedExprNode(self, node): + # currently this is written to deal with these two types + # (with comprehensions covered in their own function) + assert isinstance(node, (ExprNodes.IteratorNode, ExprNodes.AsyncIteratorNode)), node if node.expr_scope: - self.env_stack.append(self.env) + self.stack.append((self.env, self.flow)) + self.flow = self.find_in_stack(node.expr_scope) self.env = node.expr_scope self.visitchildren(node) if node.expr_scope: - self.env = self.env_stack.pop() + self.env, self.flow = self.stack.pop() return node def visit_PyClassDefNode(self, node): @@ -1343,14 +1357,21 @@ class ControlFlowAnalysis(CythonTransform): 'mkw', 'bases', 'class_result')) self.flow.mark_assignment(node.target, node.classobj, self.env.lookup(node.target.name)) - self.env_stack.append(self.env) + self.stack.append((self.env, self.flow)) self.env = node.scope self.flow.nextblock() if node.doc_node: self.flow.mark_assignment(node.doc_node, fake_rhs_expr, node.doc_node.entry) self.visitchildren(node, ('body',)) self.flow.nextblock() - self.env = self.env_stack.pop() + self.env, _ = self.stack.pop() + return node + + def visit_CClassDefNode(self, node): + # just make sure the nodes scope is findable in-case there is a list comprehension in it + self.stack.append((node.scope, self.flow)) + self.visitchildren(node) + self.stack.pop() return node def visit_AmpersandNode(self, node): diff --git a/Cython/Compiler/Naming.py b/Cython/Compiler/Naming.py index 96c0b8fbd..1931e5976 100644 --- a/Cython/Compiler/Naming.py +++ b/Cython/Compiler/Naming.py @@ -17,6 +17,7 @@ pyunicode_identifier_prefix = pyrex_prefix + 'U' builtin_prefix = pyrex_prefix + "builtin_" arg_prefix = pyrex_prefix + "arg_" +genexpr_arg_prefix = pyrex_prefix + "genexpr_arg_" funcdoc_prefix = pyrex_prefix + "doc_" enum_prefix = pyrex_prefix + "e_" func_prefix = pyrex_prefix + "f_" diff --git a/Cython/Compiler/Nodes.py b/Cython/Compiler/Nodes.py index 3ff6b9d6f..2127d9c54 100644 --- a/Cython/Compiler/Nodes.py +++ b/Cython/Compiler/Nodes.py @@ -3513,7 +3513,15 @@ class DefNode(FuncDefNode): # Move arguments into closure if required def put_into_closure(entry): if entry.in_closure: - code.putln('%s = %s;' % (entry.cname, entry.original_cname)) + if entry.type.is_array: + # This applies to generator expressions that iterate over C arrays (and need to + # capture them by value), under most other circumstances C array arguments are dropped to + # pointers so this copy isn't used + assert entry.type.size is not None + code.globalstate.use_utility_code(UtilityCode.load_cached("IncludeStringH", "StringTools.c")) + code.putln("memcpy({0}, {1}, sizeof({0}));".format(entry.cname, entry.original_cname)) + else: + code.putln('%s = %s;' % (entry.cname, entry.original_cname)) if entry.type.is_memoryviewslice: # TODO - at some point reference count of memoryviews should # genuinely be unified with PyObjects diff --git a/Cython/Compiler/Optimize.py b/Cython/Compiler/Optimize.py index cea5970f6..076eb2f69 100644 --- a/Cython/Compiler/Optimize.py +++ b/Cython/Compiler/Optimize.py @@ -2105,7 +2105,8 @@ class InlineDefNodeCalls(Visitor.NodeRefCleanupMixin, Visitor.EnvTransform): return node inlined = ExprNodes.InlinedDefNodeCallNode( node.pos, function_name=function_name, - function=function, args=node.args) + function=function, args=node.args, + generator_arg_tag=node.generator_arg_tag) if inlined.can_be_inlined(): return self.replace(node, inlined) return node diff --git a/Cython/Compiler/ParseTreeTransforms.py b/Cython/Compiler/ParseTreeTransforms.py index f17af31f1..d7a8d13e4 100644 --- a/Cython/Compiler/ParseTreeTransforms.py +++ b/Cython/Compiler/ParseTreeTransforms.py @@ -1621,6 +1621,126 @@ class WithTransform(VisitorTransform, SkipDeclarations): visit_Node = VisitorTransform.recurse_to_children +class _GeneratorExpressionArgumentsMarker(TreeVisitor, SkipDeclarations): + # called from "MarkClosureVisitor" + def __init__(self, gen_expr): + super(_GeneratorExpressionArgumentsMarker, self).__init__() + self.gen_expr = gen_expr + + def visit_ExprNode(self, node): + if not node.is_literal: + # Don't bother tagging literal nodes + assert (not node.generator_arg_tag) # nobody has tagged this first + node.generator_arg_tag = self.gen_expr + self.visitchildren(node) + + def visit_Node(self, node): + # We're only interested in the expressions that make up the iterator sequence, + # so don't go beyond ExprNodes (e.g. into ForFromStatNode). + return + + def visit_GeneratorExpressionNode(self, node): + node.generator_arg_tag = self.gen_expr + # don't visit children, can't handle overlapping tags + # (and assume generator expressions don't end up optimized out in a way + # that would require overlapping tags) + + +class _HandleGeneratorArguments(VisitorTransform, SkipDeclarations): + # used from within CreateClosureClasses + + def __call__(self, node): + from . import Visitor + assert isinstance(node, ExprNodes.GeneratorExpressionNode) + self.gen_node = node + + self.args = list(node.def_node.args) + self.call_parameters = list(node.call_parameters) + self.tag_count = 0 + self.substitutions = {} + + self.visitchildren(node) + + for k, v in self.substitutions.items(): + # doing another search for replacements here (at the end) allows us to sweep up + # CloneNodes too (which are often generated by the optimizer) + # (it could arguably be done more efficiently with a single traversal though) + Visitor.recursively_replace_node(node, k, v) + + node.def_node.args = self.args + node.call_parameters = self.call_parameters + return node + + def visit_GeneratorExpressionNode(self, node): + # a generator can also be substituted itself, so handle that case + new_node = self._handle_ExprNode(node, do_visit_children=False) + # However do not traverse into it. A new _HandleGeneratorArguments visitor will be used + # elsewhere to do that. + return node + + def _handle_ExprNode(self, node, do_visit_children): + if (node.generator_arg_tag is not None and self.gen_node is not None and + self.gen_node == node.generator_arg_tag): + pos = node.pos + # The reason for using ".x" as the name is that this is how CPython + # tracks internal variables in loops (e.g. + # { locals() for v in range(10) } + # will produce "v" and ".0"). We don't replicate this behaviour completely + # but use it as a starting point + name_source = self.tag_count + self.tag_count += 1 + name = EncodedString(".{0}".format(name_source)) + def_node = self.gen_node.def_node + if not def_node.local_scope.lookup_here(name): + from . import Symtab + cname = EncodedString(Naming.genexpr_arg_prefix + Symtab.punycodify_name(str(name_source))) + name_decl = Nodes.CNameDeclaratorNode(pos=pos, name=name) + type = node.type + if type.is_reference and not type.is_fake_reference: + # It isn't obvious whether the right thing to do would be to capture by reference or by + # value (C++ itself doesn't know either for lambda functions and forces a choice). + # However, capture by reference involves converting to FakeReference which would require + # re-analysing AttributeNodes. Therefore I've picked capture-by-value out of convenience + # TODO - could probably be optimized by making the arg a reference but the closure not + # (see https://github.com/cython/cython/issues/2468) + type = type.ref_base_type + + name_decl.type = type + new_arg = Nodes.CArgDeclNode(pos=pos, declarator=name_decl, + base_type=None, default=None, annotation=None) + new_arg.name = name_decl.name + new_arg.type = type + + self.args.append(new_arg) + node.generator_arg_tag = None # avoid the possibility of this being caught again + self.call_parameters.append(node) + new_arg.entry = def_node.declare_argument(def_node.local_scope, new_arg) + new_arg.entry.cname = cname + new_arg.entry.in_closure = True + + if do_visit_children: + # now visit the Nodes's children (but remove self.gen_node to not to further + # argument substitution) + gen_node, self.gen_node = self.gen_node, None + self.visitchildren(node) + self.gen_node = gen_node + + # replace the node inside the generator with a looked-up name + name_node = ExprNodes.NameNode(pos=pos, name=name) + name_node.entry = self.gen_node.def_node.gbody.local_scope.lookup(name_node.name) + name_node.type = name_node.entry.type + self.substitutions[node] = name_node + return name_node + if do_visit_children: + self.visitchildren(node) + return node + + def visit_ExprNode(self, node): + return self._handle_ExprNode(node, True) + + visit_Node = VisitorTransform.recurse_to_children + + class DecoratorTransform(ScopeTrackingTransform, SkipDeclarations): """ Transforms method decorators in cdef classes into nested calls or properties. @@ -2332,7 +2452,7 @@ if VALUE is not None: env = self.current_env() node.analyse_declarations(env) # the node may or may not have a local scope - if node.has_local_scope: + if node.expr_scope: self.seen_vars_stack.append(set(self.seen_vars_stack[-1])) self.enter_scope(node, node.expr_scope) node.analyse_scoped_declarations(node.expr_scope) @@ -2340,6 +2460,7 @@ if VALUE is not None: self.exit_scope() self.seen_vars_stack.pop() else: + node.analyse_scoped_declarations(env) self.visitchildren(node) return node @@ -3004,6 +3125,8 @@ class YieldNodeCollector(TreeVisitor): class MarkClosureVisitor(CythonTransform): + # In addition to marking closures this is also responsible to finding parts of the + # generator iterable and marking them def visit_ModuleNode(self, node): self.needs_closure = False @@ -3074,6 +3197,19 @@ class MarkClosureVisitor(CythonTransform): self.needs_closure = True return node + def visit_GeneratorExpressionNode(self, node): + node = self.visit_LambdaNode(node) + if not isinstance(node.loop, Nodes._ForInStatNode): + # Possibly should handle ForFromStatNode + # but for now do nothing + return node + itseq = node.loop.iterator.sequence + # literals do not need replacing with an argument + if itseq.is_literal: + return node + _GeneratorExpressionArgumentsMarker(node).visit(itseq) + return node + class CreateClosureClasses(CythonTransform): # Output closure classes in module scope for all functions @@ -3218,6 +3354,10 @@ class CreateClosureClasses(CythonTransform): self.visitchildren(node) return node + def visit_GeneratorExpressionNode(self, node): + node = _HandleGeneratorArguments()(node) + return self.visit_LambdaNode(node) + class InjectGilHandling(VisitorTransform, SkipDeclarations): """ diff --git a/Cython/Compiler/TypeInference.py b/Cython/Compiler/TypeInference.py index 4ae3ab155..0ef651d24 100644 --- a/Cython/Compiler/TypeInference.py +++ b/Cython/Compiler/TypeInference.py @@ -104,10 +104,11 @@ class MarkParallelAssignments(EnvTransform): is_special = False sequence = node.iterator.sequence target = node.target + iterator_scope = node.iterator.expr_scope or self.current_env() if isinstance(sequence, ExprNodes.SimpleCallNode): function = sequence.function if sequence.self is None and function.is_name: - entry = self.current_env().lookup(function.name) + entry = iterator_scope.lookup(function.name) if not entry or entry.is_builtin: if function.name == 'reversed' and len(sequence.args) == 1: sequence = sequence.args[0] @@ -115,7 +116,7 @@ class MarkParallelAssignments(EnvTransform): if target.is_sequence_constructor and len(target.args) == 2: iterator = sequence.args[0] if iterator.is_name: - iterator_type = iterator.infer_type(self.current_env()) + iterator_type = iterator.infer_type(iterator_scope) if iterator_type.is_builtin_type: # assume that builtin types have a length within Py_ssize_t self.mark_assignment( @@ -127,7 +128,7 @@ class MarkParallelAssignments(EnvTransform): if isinstance(sequence, ExprNodes.SimpleCallNode): function = sequence.function if sequence.self is None and function.is_name: - entry = self.current_env().lookup(function.name) + entry = iterator_scope.lookup(function.name) if not entry or entry.is_builtin: if function.name in ('range', 'xrange'): is_special = True diff --git a/tests/bugs.txt b/tests/bugs.txt index e853b4526..27458889c 100644 --- a/tests/bugs.txt +++ b/tests/bugs.txt @@ -6,7 +6,6 @@ class_attribute_init_values_T18 unsignedbehaviour_T184 missing_baseclass_in_predecl_T262 cfunc_call_tuple_args_T408 -genexpr_iterable_lookup_T600 generator_expressions_in_class for_from_pyvar_loop_T601 temp_sideeffects_T654 # not really a bug, Cython warns about it diff --git a/tests/run/cpp_iterators.pyx b/tests/run/cpp_iterators.pyx index f2171aae5..cc1e185cc 100644 --- a/tests/run/cpp_iterators.pyx +++ b/tests/run/cpp_iterators.pyx @@ -10,6 +10,11 @@ cdef extern from "cpp_iterators_simple.h": DoublePointerIter(double* start, int len) double* begin() double* end() + cdef cppclass DoublePointerIterDefaultConstructible: + DoublePointerIterDefaultConstructible() + DoublePointerIterDefaultConstructible(double* start, int len) + double* begin() + double* end() def test_vector(py_v): """ @@ -98,6 +103,35 @@ def test_custom(): finally: del iter +def test_custom_deref(): + """ + >>> test_custom_deref() + [1.0, 2.0, 3.0] + """ + cdef double* values = [1, 2, 3] + cdef DoublePointerIter* iter + try: + iter = new DoublePointerIter(values, 3) + return [x for x in deref(iter)] + finally: + del iter + +def test_custom_genexp(): + """ + >>> test_custom_genexp() + [1.0, 2.0, 3.0] + """ + def to_list(g): # function to hide the intent to avoid inlined-generator expression optimization + return list(g) + cdef double* values = [1, 2, 3] + cdef DoublePointerIterDefaultConstructible* iter + try: + iter = new DoublePointerIterDefaultConstructible(values, 3) + # TODO: Only needs to copy once - currently copies twice + return to_list(x for x in iter[0]) + finally: + del iter + def test_iteration_over_heap_vector(L): """ >>> test_iteration_over_heap_vector([1,2]) diff --git a/tests/run/cpp_iterators_simple.h b/tests/run/cpp_iterators_simple.h index 3a4b50e3c..8373237d8 100644 --- a/tests/run/cpp_iterators_simple.h +++ b/tests/run/cpp_iterators_simple.h @@ -8,3 +8,14 @@ private: int len_; }; +class DoublePointerIterDefaultConstructible: public DoublePointerIter { + // an alternate version that is default-constructible +public: + DoublePointerIterDefaultConstructible() : + DoublePointerIter(0, 0) + {} + DoublePointerIterDefaultConstructible(double* start, int len) : + DoublePointerIter(start, len) + {} + +}; diff --git a/tests/run/generators_py.py b/tests/run/generators_py.py index 914252bf4..9ec6991cf 100644 --- a/tests/run/generators_py.py +++ b/tests/run/generators_py.py @@ -387,3 +387,20 @@ def test_yield_in_const_conditional_true(): """ if True: print((yield 1)) + + +def test_generator_scope(): + """ + Tests that the function is run at the correct time + (i.e. when the generator is created, not when it's run) + >>> list(test_generator_scope()) + inner running + generator created + [0, 10] + """ + def inner(val): + print("inner running") + return [0, val] + gen = (a for a in inner(10)) + print("generator created") + return gen diff --git a/tests/run/genexpr_arg_order.py b/tests/run/genexpr_arg_order.py new file mode 100644 index 000000000..5b9e27238 --- /dev/null +++ b/tests/run/genexpr_arg_order.py @@ -0,0 +1,181 @@ +# mode: run +# tag: genexpr, py3, py2 + +from __future__ import print_function + +# Tests that function arguments to generator expressions are +# evaluated in the correct order (even after optimization) +# WARNING: there may be an amount of luck in this working correctly (since it +# isn't strictly enforced). Therefore perhaps be prepared to disable these +# tests if they stop working and aren't easily fixed + +import cython + +@cython.cfunc +@cython.returns(cython.int) +def zero(): + print("In zero") + return 0 + +@cython.cfunc +@cython.returns(cython.int) +def five(): + print("In five") + return 5 + +@cython.cfunc +@cython.returns(cython.int) +def one(): + print("In one") + return 1 + +# FIXME - I don't think this is easy to enforce unfortunately, but it is slightly wrong +#@cython.test_assert_path_exists("//ForFromStatNode") +#def genexp_range_argument_order(): +# """ +# >>> list(genexp_range_argument_order()) +# In zero +# In five +# [0, 1, 2, 3, 4] +# """ +# return (a for a in range(zero(), five())) +# +#@cython.test_assert_path_exists("//ForFromStatNode") +#@cython.test_assert_path_exists( +# "//InlinedGeneratorExpressionNode", +# "//ComprehensionAppendNode") +#def list_range_argument_order(): +# """ +# >>> list_range_argument_order() +# In zero +# In five +# [0, 1, 2, 3, 4] +# """ +# return list(a for a in range(zero(), five())) + +@cython.test_assert_path_exists("//ForFromStatNode") +def genexp_array_slice_order(): + """ + >>> list(genexp_array_slice_order()) + In zero + In five + [0, 1, 2, 3, 4] + """ + # TODO ideally find a way to add the evaluation of x to this test too + x = cython.declare(cython.int[20]) + x = list(range(20)) + return (a for a in x[zero():five()]) + +@cython.test_assert_path_exists("//ForFromStatNode") +@cython.test_assert_path_exists( + "//InlinedGeneratorExpressionNode", + "//ComprehensionAppendNode") +def list_array_slice_order(): + """ + >>> list(list_array_slice_order()) + In zero + In five + [0, 1, 2, 3, 4] + """ + # TODO ideally find a way to add the evaluation of x to this test too + x = cython.declare(cython.int[20]) + x = list(range(20)) + return list(a for a in x[zero():five()]) + +class IndexableClass: + def __getitem__(self, idx): + print("In indexer") + return [ idx.start, idx.stop, idx.step ] + +class NoisyAttributeLookup: + @property + def indexer(self): + print("Getting indexer") + return IndexableClass() + + @property + def function(self): + print("Getting function") + def func(a, b, c): + print("In func") + return [a, b, c] + return func + +def genexp_index_order(): + """ + >>> list(genexp_index_order()) + Getting indexer + In zero + In five + In one + In indexer + Made generator expression + [0, 5, 1] + """ + obj = NoisyAttributeLookup() + ret = (a for a in obj.indexer[zero():five():one()]) + print("Made generator expression") + return ret + +@cython.test_assert_path_exists("//InlinedGeneratorExpressionNode") +def list_index_order(): + """ + >>> list_index_order() + Getting indexer + In zero + In five + In one + In indexer + [0, 5, 1] + """ + obj = NoisyAttributeLookup() + return list(a for a in obj.indexer[zero():five():one()]) + + +def genexpr_fcall_order(): + """ + >>> list(genexpr_fcall_order()) + Getting function + In zero + In five + In one + In func + Made generator expression + [0, 5, 1] + """ + obj = NoisyAttributeLookup() + ret = (a for a in obj.function(zero(), five(), one())) + print("Made generator expression") + return ret + +@cython.test_assert_path_exists("//InlinedGeneratorExpressionNode") +def list_fcall_order(): + """ + >>> list_fcall_order() + Getting function + In zero + In five + In one + In func + [0, 5, 1] + """ + obj = NoisyAttributeLookup() + return list(a for a in obj.function(zero(), five(), one())) + +def call1(): + print("In call1") + return ["a"] +def call2(): + print("In call2") + return ["b"] + +def multiple_genexps_to_call_order(): + """ + >>> multiple_genexps_to_call_order() + In call1 + In call2 + """ + def takes_two_genexps(a, b): + pass + + return takes_two_genexps((x for x in call1()), (x for x in call2())) diff --git a/tests/run/genexpr_iterable_lookup_T600.pyx b/tests/run/genexpr_iterable_lookup_T600.pyx index 945652717..c288993a6 100644 --- a/tests/run/genexpr_iterable_lookup_T600.pyx +++ b/tests/run/genexpr_iterable_lookup_T600.pyx @@ -35,6 +35,11 @@ def genexpr_iterable_in_closure(): result = list( x*2 for x in x if x != 'b' ) assert x == 'abc' # don't leak in Py3 code assert f() == 'abc' # don't leak in Py3 code + + # Py2 cleanup (pretty irrelevant to the actual test!) + import sys + if sys.version_info[0] == 2: + result = map(bytes, result) return result @@ -51,6 +56,7 @@ def genexpr_over_complex_arg(func, L): def listcomp(): """ >>> listcomp() + [0, 1, 5, 8] """ data = [('red', 5), ('blue', 1), ('yellow', 8), ('black', 0)] data.sort(key=lambda r: r[1]) @@ -84,3 +90,15 @@ def genexpr_in_dictcomp_dictiter(): """ d = {1:2, 3:4, 5:6} return {k:d for k,d in d.iteritems() if d != 4} + + +def genexpr_over_array_slice(): + """ + >>> list(genexpr_over_array_slice()) + [0.0, 1.0, 2.0, 3.0, 4.0, 5.0] + """ + cdef double x[10] + for i in range(10): + x[i] = i + cdef int n = 5 + return (n for n in x[:n+1]) diff --git a/tests/run/locals.pyx b/tests/run/locals.pyx index f343fe1cb..9473ad01e 100644 --- a/tests/run/locals.pyx +++ b/tests/run/locals.pyx @@ -113,3 +113,13 @@ def buffers_in_locals(object[char, ndim=1] a): cdef object[unsigned char, ndim=1] b = a return locals() + +def set_comp_scope(): + """ + locals should be evaluated in the outer scope + >>> list(set_comp_scope()) + ['something'] + """ + something = 1 + return { b for b in locals().keys() } + -- cgit v1.2.1 From 301abe70ba363fae4d9c3a800cf7aecc50d30387 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Mon, 8 Aug 2022 22:46:08 +0200 Subject: runtests.py: Generalise the C++XX extension setup to support also newer C++ standard versions. --- runtests.py | 87 +++++++++++++++++++++++++++---------------------------------- 1 file changed, 39 insertions(+), 48 deletions(-) diff --git a/runtests.py b/runtests.py index a3bd397da..d8b3992a9 100755 --- a/runtests.py +++ b/runtests.py @@ -294,57 +294,46 @@ def update_openmp_extension(ext): return EXCLUDE_EXT -def update_cpp11_extension(ext): - """ - update cpp11 extensions that will run on versions of gcc >4.8 - """ - gcc_version = get_gcc_version(ext.language) - already_has_std = any(ca for ca in ext.extra_compile_args if "-std" in ca) - if gcc_version: - compiler_version = gcc_version.group(1) - if float(compiler_version) > 4.8 and not already_has_std: - ext.extra_compile_args.append("-std=c++11") - return ext +def update_cpp_extension(cpp_std, min_gcc_version=None, min_macos_version=None): + def _update_cpp_extension(ext): + """ + Update cpp[cpp_std] extensions that will run on minimum versions of gcc / clang / macos. + """ + already_has_std = any( + ca for ca in ext.extra_compile_args + if "-std" in ca and "-stdlib" not in ca + ) - clang_version = get_clang_version(ext.language) - if clang_version: - if not already_has_std: - ext.extra_compile_args.append("-std=c++11") - if sys.platform == "darwin": - ext.extra_compile_args.append("-stdlib=libc++") - ext.extra_compile_args.append("-mmacosx-version-min=10.7") - return ext + if min_gcc_version is not None: + gcc_version = get_gcc_version(ext.language) + if gcc_version: + if cpp_std >= 17 and sys.version_info[0] < 3: + # The Python 2.7 headers contain the 'register' modifier + # which gcc warns about in C++17 mode. + ext.extra_compile_args.append('-Wno-register') + compiler_version = gcc_version.group(1) + if float(compiler_version) > float(min_gcc_version) and not already_has_std: + ext.extra_compile_args.append("-std=c++%s" % cpp_std) + return ext - return EXCLUDE_EXT + clang_version = get_clang_version(ext.language) + if clang_version: + if not already_has_std: + ext.extra_compile_args.append("-std=c++%s" % cpp_std) + if cpp_std >= 17 and sys.version_info[0] < 3: + # The Python 2.7 headers contain the 'register' modifier + # which clang warns about in C++17 mode. + ext.extra_compile_args.append('-Wno-register') + if sys.platform == "darwin": + ext.extra_compile_args.append("-stdlib=libc++") + if min_macos_version is not None: + ext.extra_compile_args.append("-mmacosx-version-min=" + min_macos_version) + return ext -def update_cpp17_extension(ext): - """ - update cpp17 extensions that will run on versions of gcc >=5.0 - """ - gcc_version = get_gcc_version(ext.language) - if gcc_version: - compiler_version = gcc_version.group(1) - if sys.version_info[0] < 3: - # The Python 2.7 headers contain the 'register' modifier - # which gcc warns about in C++17 mode. - ext.extra_compile_args.append('-Wno-register') - if float(compiler_version) >= 5.0: - ext.extra_compile_args.append("-std=c++17") - return ext + return EXCLUDE_EXT - clang_version = get_clang_version(ext.language) - if clang_version: - ext.extra_compile_args.append("-std=c++17") - if sys.version_info[0] < 3: - # The Python 2.7 headers contain the 'register' modifier - # which clang warns about in C++17 mode. - ext.extra_compile_args.append('-Wno-register') - if sys.platform == "darwin": - ext.extra_compile_args.append("-stdlib=libc++") - ext.extra_compile_args.append("-mmacosx-version-min=10.13") - return ext + return _update_cpp_extension - return EXCLUDE_EXT def require_gcc(version): def check(ext): @@ -438,8 +427,8 @@ EXT_EXTRAS = { 'tag:numpy' : update_numpy_extension, 'tag:openmp': update_openmp_extension, 'tag:gdb': update_gdb_extension, - 'tag:cpp11': update_cpp11_extension, - 'tag:cpp17': update_cpp17_extension, + 'tag:cpp11': update_cpp_extension(11, min_gcc_version="4.8", min_macos_version="10.7"), + 'tag:cpp17': update_cpp_extension(17, min_gcc_version="5.0", min_macos_version="10.13"), 'tag:trace' : update_linetrace_extension, 'tag:bytesformat': exclude_extension_in_pyver((3, 3), (3, 4)), # no %-bytes formatting 'tag:no-macos': exclude_extension_on_platform('darwin'), @@ -1927,6 +1916,8 @@ class EndToEndTest(unittest.TestCase): os.chdir(self.old_dir) def _try_decode(self, content): + if not isinstance(content, bytes): + return content try: return content.decode() except UnicodeDecodeError: -- cgit v1.2.1 From c0ba04283ebfb906028d542b52da352c8a326b71 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Mon, 8 Aug 2022 22:58:24 +0200 Subject: runtests.py: Also allow defining a minimum clang version in the C++XX extension setup. --- runtests.py | 26 ++++++++++++++------------ 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/runtests.py b/runtests.py index d8b3992a9..5dd5f6598 100755 --- a/runtests.py +++ b/runtests.py @@ -294,7 +294,7 @@ def update_openmp_extension(ext): return EXCLUDE_EXT -def update_cpp_extension(cpp_std, min_gcc_version=None, min_macos_version=None): +def update_cpp_extension(cpp_std, min_gcc_version=None, min_clang_version=None, min_macos_version=None): def _update_cpp_extension(ext): """ Update cpp[cpp_std] extensions that will run on minimum versions of gcc / clang / macos. @@ -304,26 +304,28 @@ def update_cpp_extension(cpp_std, min_gcc_version=None, min_macos_version=None): if "-std" in ca and "-stdlib" not in ca ) - if min_gcc_version is not None: - gcc_version = get_gcc_version(ext.language) - if gcc_version: - if cpp_std >= 17 and sys.version_info[0] < 3: - # The Python 2.7 headers contain the 'register' modifier - # which gcc warns about in C++17 mode. - ext.extra_compile_args.append('-Wno-register') + gcc_version = get_gcc_version(ext.language) + if gcc_version: + if cpp_std >= 17 and sys.version_info[0] < 3: + # The Python 2.7 headers contain the 'register' modifier + # which gcc warns about in C++17 mode. + ext.extra_compile_args.append('-Wno-register') + if not already_has_std: compiler_version = gcc_version.group(1) - if float(compiler_version) > float(min_gcc_version) and not already_has_std: + if not min_gcc_version or float(compiler_version) >= float(min_gcc_version): ext.extra_compile_args.append("-std=c++%s" % cpp_std) - return ext + return ext clang_version = get_clang_version(ext.language) if clang_version: - if not already_has_std: - ext.extra_compile_args.append("-std=c++%s" % cpp_std) if cpp_std >= 17 and sys.version_info[0] < 3: # The Python 2.7 headers contain the 'register' modifier # which clang warns about in C++17 mode. ext.extra_compile_args.append('-Wno-register') + if not already_has_std: + compiler_version = clang_version.group(1) + if not min_clang_version or float(compiler_version) >= float(min_clang_version): + ext.extra_compile_args.append("-std=c++%s" % cpp_std) if sys.platform == "darwin": ext.extra_compile_args.append("-stdlib=libc++") if min_macos_version is not None: -- cgit v1.2.1 From da32c448814b6f78b5972a5265a0a9d979c9b847 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Mon, 8 Aug 2022 23:15:13 +0200 Subject: runtests.py: Let the decision whether a C++XX test can run at all really depend on the C compiler version. Not just whether the "-std=..." option should be set or not. --- runtests.py | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/runtests.py b/runtests.py index 5dd5f6598..94217c94d 100755 --- a/runtests.py +++ b/runtests.py @@ -299,11 +299,15 @@ def update_cpp_extension(cpp_std, min_gcc_version=None, min_clang_version=None, """ Update cpp[cpp_std] extensions that will run on minimum versions of gcc / clang / macos. """ + # If the extension provides a -std=... option, assume that whatever C compiler we use + # will probably be ok with it. already_has_std = any( ca for ca in ext.extra_compile_args if "-std" in ca and "-stdlib" not in ca ) + use_gcc = use_clang = already_has_std + # check for a usable gcc version gcc_version = get_gcc_version(ext.language) if gcc_version: if cpp_std >= 17 and sys.version_info[0] < 3: @@ -313,9 +317,13 @@ def update_cpp_extension(cpp_std, min_gcc_version=None, min_clang_version=None, if not already_has_std: compiler_version = gcc_version.group(1) if not min_gcc_version or float(compiler_version) >= float(min_gcc_version): + use_gcc = True ext.extra_compile_args.append("-std=c++%s" % cpp_std) - return ext + if use_gcc: + return ext + + # check for a usable clang version clang_version = get_clang_version(ext.language) if clang_version: if cpp_std >= 17 and sys.version_info[0] < 3: @@ -325,13 +333,17 @@ def update_cpp_extension(cpp_std, min_gcc_version=None, min_clang_version=None, if not already_has_std: compiler_version = clang_version.group(1) if not min_clang_version or float(compiler_version) >= float(min_clang_version): + use_clang = True ext.extra_compile_args.append("-std=c++%s" % cpp_std) if sys.platform == "darwin": ext.extra_compile_args.append("-stdlib=libc++") if min_macos_version is not None: ext.extra_compile_args.append("-mmacosx-version-min=" + min_macos_version) - return ext + if use_clang: + return ext + + # no usable C compiler found => exclude the extension return EXCLUDE_EXT return _update_cpp_extension @@ -429,7 +441,7 @@ EXT_EXTRAS = { 'tag:numpy' : update_numpy_extension, 'tag:openmp': update_openmp_extension, 'tag:gdb': update_gdb_extension, - 'tag:cpp11': update_cpp_extension(11, min_gcc_version="4.8", min_macos_version="10.7"), + 'tag:cpp11': update_cpp_extension(11, min_gcc_version="4.9", min_macos_version="10.7"), 'tag:cpp17': update_cpp_extension(17, min_gcc_version="5.0", min_macos_version="10.13"), 'tag:trace' : update_linetrace_extension, 'tag:bytesformat': exclude_extension_in_pyver((3, 3), (3, 4)), # no %-bytes formatting -- cgit v1.2.1 From 3e93b8ed48f76616e24b989809fd0585b4a86ffb Mon Sep 17 00:00:00 2001 From: Jonathan Helgert Date: Tue, 9 Aug 2022 15:42:38 +0200 Subject: Add declarations for C++20's Bit manipulation functions (GH-4962) --- Cython/Includes/libcpp/bit.pxd | 31 ++++++++++ runtests.py | 1 + tests/macos_cpp_bugs.txt | 1 + tests/run/cpp_stl_bit_cpp20.pyx | 131 ++++++++++++++++++++++++++++++++++++++++ 4 files changed, 164 insertions(+) create mode 100644 Cython/Includes/libcpp/bit.pxd create mode 100644 tests/run/cpp_stl_bit_cpp20.pyx diff --git a/Cython/Includes/libcpp/bit.pxd b/Cython/Includes/libcpp/bit.pxd new file mode 100644 index 000000000..cac12ea4f --- /dev/null +++ b/Cython/Includes/libcpp/bit.pxd @@ -0,0 +1,31 @@ +cdef extern from "" namespace "std" nogil: + # bit_cast (gcc >= 11.0, clang >= 14.0) + cdef To bit_cast[To, From](From&) + + # byteswap (C++23) + #cdef T byteswap[T](T) + + # integral powers of 2 (gcc >= 10.0, clang >= 12.0) + cdef bint has_single_bit[T](T) + cdef T bit_ceil[T](T) + cdef T bit_floor[T](T) + cdef int bit_width[T](T) + + # rotating (gcc >= 9.0, clang >= 9.0) + cdef T rotl[T](T, int shift) + cdef T rotr[T](T, int shift) + + # counting (gcc >= 9.0, clang >= 9.0) + cdef int countl_zero[T](T) + cdef int countl_one[T](T) + cdef int countr_zero[T](T) + cdef int countr_one[T](T) + cdef int popcount[T](T) + + # endian + cpdef enum class endian(int): + little, + big, + native + + diff --git a/runtests.py b/runtests.py index 94217c94d..7cbc6e585 100755 --- a/runtests.py +++ b/runtests.py @@ -443,6 +443,7 @@ EXT_EXTRAS = { 'tag:gdb': update_gdb_extension, 'tag:cpp11': update_cpp_extension(11, min_gcc_version="4.9", min_macos_version="10.7"), 'tag:cpp17': update_cpp_extension(17, min_gcc_version="5.0", min_macos_version="10.13"), + 'tag:cpp20': update_cpp_extension(20, min_gcc_version="11.0", min_clang_version="13.0", min_macos_version="10.13"), 'tag:trace' : update_linetrace_extension, 'tag:bytesformat': exclude_extension_in_pyver((3, 3), (3, 4)), # no %-bytes formatting 'tag:no-macos': exclude_extension_on_platform('darwin'), diff --git a/tests/macos_cpp_bugs.txt b/tests/macos_cpp_bugs.txt index e5be6475a..e4c4cc608 100644 --- a/tests/macos_cpp_bugs.txt +++ b/tests/macos_cpp_bugs.txt @@ -12,3 +12,4 @@ cpp_stl_algo_comparison_ops cpp_stl_algo_permutation_ops cpp_stl_algo_sorted_ranges_set_ops cpp_stl_algo_sorted_ranges_other_ops +cpp_stl_bit_cpp20 diff --git a/tests/run/cpp_stl_bit_cpp20.pyx b/tests/run/cpp_stl_bit_cpp20.pyx new file mode 100644 index 000000000..5aae8326a --- /dev/null +++ b/tests/run/cpp_stl_bit_cpp20.pyx @@ -0,0 +1,131 @@ +# mode: run +# tag: cpp, werror, cpp20 + +from libcpp cimport bool +from libc.stdint cimport uint8_t, int8_t +from libcpp.bit cimport (bit_cast, has_single_bit, bit_ceil, bit_floor, + bit_width, rotr, rotl, countl_zero, countl_one, countr_zero, + countr_one, popcount) + +def test_bit_cast(): + """ + Test bit_cast with a signed 8bit wide integer type. + -127U = 0b1000'0001U + >>> test_bit_cast() + 129 + """ + cdef int8_t x = -127 + cdef result = bit_cast[uint8_t, int8_t](x) + return result + +def test_has_single_bit(): + """ + Test has_single_bit with a unsigned 8bit wide integer type. + >>> test_has_single_bit() + True + """ + cdef uint8_t x = 1 + cdef bint res = has_single_bit[uint8_t](x) + return res + +def test_bit_ceil(): + """ + Test bit_ceil with a unsigned 8bit wide integer type. + >>> test_bit_ceil() + 4 + """ + cdef uint8_t x = 3 + cdef uint8_t res = bit_ceil[uint8_t](x) + return res + +def test_bit_floor(): + """ + Test bit_floor with a unsigned 8bit wide integer type. + >>> test_bit_floor() + 4 + """ + cdef uint8_t x = 5 + cdef uint8_t res = bit_floor[uint8_t](x) + return res + +def test_bit_width(): + """ + Test bit_width with a unsigned 8bit wide integer type. + >>> test_bit_width() + 3 + """ + cdef uint8_t x = 5 + cdef int res = bit_width[uint8_t](x) + return res + +def test_rotl(): + """ + Test rotl with a unsigned 8bit wide integer type. + >>> test_rotl() + 209 + """ + cdef uint8_t x = 29 + cdef int s = 4 + cdef uint8_t res = rotl[uint8_t](x, s) + return res + +def test_rotr(): + """ + Test rotr with a unsigned 8bit wide integer type. + >>> test_rotr() + 142 + """ + cdef uint8_t x = 29 + cdef int s = 1 + cdef uint8_t res = rotr[uint8_t](x, s) + return res + +def test_countl_zero(): + """ + Test countl_zero with a unsigned 8bit wide integer type. + >>> test_countl_zero() + 3 + """ + cdef uint8_t x = 24 + cdef int res = countl_zero[uint8_t](x) + return res + +def test_countr_zero(): + """ + Test countr_zero with a unsigned 8bit wide integer type. + >>> test_countr_zero() + 3 + """ + cdef uint8_t x = 24 + cdef int res = countr_zero[uint8_t](x) + return res + +def test_countl_one(): + """ + Test countl_one with a unsigned 8bit wide integer type. + >>> test_countl_one() + 3 + """ + cdef uint8_t x = 231 + cdef int res = countl_one[uint8_t](x) + return res + +def test_countr_one(): + """ + Test countr_one with a unsigned 8bit wide integer type. + >>> test_countr_one() + 3 + """ + cdef uint8_t x = 231 + cdef int res = countr_one[uint8_t](x) + return res + +def test_popcount(): + """ + Test popcount with a unsigned 8bit wide integer type. + >>> test_popcount() + 8 + """ + cdef uint8_t x = 255 + cdef int res = popcount[uint8_t](x) + return res -- cgit v1.2.1 From d9e205077e8770cee6e0aa8a61cbfd23b8813847 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Tue, 9 Aug 2022 10:12:28 +0200 Subject: Move some declarations next to the optimisation method that uses them. They used to be there before another method was added. --- Cython/Compiler/Optimize.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/Cython/Compiler/Optimize.py b/Cython/Compiler/Optimize.py index 076eb2f69..231d23419 100644 --- a/Cython/Compiler/Optimize.py +++ b/Cython/Compiler/Optimize.py @@ -319,16 +319,6 @@ class IterationTransform(Visitor.EnvTransform): return self._optimise_for_loop(node, arg, reversed=True) - PyBytes_AS_STRING_func_type = PyrexTypes.CFuncType( - PyrexTypes.c_char_ptr_type, [ - PyrexTypes.CFuncTypeArg("s", Builtin.bytes_type, None) - ]) - - PyBytes_GET_SIZE_func_type = PyrexTypes.CFuncType( - PyrexTypes.c_py_ssize_t_type, [ - PyrexTypes.CFuncTypeArg("s", Builtin.bytes_type, None) - ]) - def _transform_indexable_iteration(self, node, slice_node, is_mutable, reversed=False): """In principle can handle any iterable that Cython has a len() for and knows how to index""" unpack_temp_node = UtilNodes.LetRefNode( @@ -415,6 +405,16 @@ class IterationTransform(Visitor.EnvTransform): body.stats.insert(1, node.body) return ret + PyBytes_AS_STRING_func_type = PyrexTypes.CFuncType( + PyrexTypes.c_char_ptr_type, [ + PyrexTypes.CFuncTypeArg("s", Builtin.bytes_type, None) + ]) + + PyBytes_GET_SIZE_func_type = PyrexTypes.CFuncType( + PyrexTypes.c_py_ssize_t_type, [ + PyrexTypes.CFuncTypeArg("s", Builtin.bytes_type, None) + ]) + def _transform_bytes_iteration(self, node, slice_node, reversed=False): target_type = node.target.type if not target_type.is_int and target_type is not Builtin.bytes_type: -- cgit v1.2.1 From dc2b1dea0a5e0be66ecee3642a83fccd478acc16 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Tue, 9 Aug 2022 15:50:42 +0200 Subject: Remove outdated CI build configurations for Travis and Appveyor. --- .travis.yml | 48 --------------------- appveyor.yml | 138 ----------------------------------------------------------- 2 files changed, 186 deletions(-) delete mode 100644 .travis.yml delete mode 100644 appveyor.yml diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 551a38cb7..000000000 --- a/.travis.yml +++ /dev/null @@ -1,48 +0,0 @@ -os: linux -language: python - -addons: - apt: - packages: - - gdb - - python-dbg - - python3-dbg - - libzmq-dev # needed by IPython/Tornado - #- gcc-8 - #- g++-8 - -cache: - pip: true - directories: - - $HOME/.ccache - -env: - global: - - USE_CCACHE=1 - - CCACHE_SLOPPINESS=pch_defines,time_macros - - CCACHE_COMPRESS=1 - - CCACHE_MAXSIZE=250M - - PATH="/usr/lib/ccache:$PATH" - - PYTHON_VERSION=3.8 - - OS_NAME=ubuntu - -python: 3.8 - -matrix: - include: - - arch: arm64 - env: BACKEND=c - - arch: arm64 - env: BACKEND=cpp - - arch: ppc64le - env: BACKEND=c - - arch: ppc64le - env: BACKEND=cpp - # Disabled due to test errors - # - arch: s390x - # env: BACKEND=c - # - arch: s390x - # env: BACKEND=cpp - -script: - - bash ./Tools/ci-run.sh diff --git a/appveyor.yml b/appveyor.yml deleted file mode 100644 index 6787b98cc..000000000 --- a/appveyor.yml +++ /dev/null @@ -1,138 +0,0 @@ -# https://ci.appveyor.com/project/cython/cython - -environment: - - global: - # SDK v7.0 MSVC Express 2008's SetEnv.cmd script will fail if the - # /E:ON and /V:ON options are not enabled in the batch script interpreter - # See: https://stackoverflow.com/questions/11267463/compiling-python-modules-on-windows-x64/13751649#13751649 - WITH_ENV: "cmd /E:ON /V:ON /C .\\appveyor\\run_with_env.cmd" - BACKEND: c - PARALLEL: "-j4" - EXTRA_CFLAGS: "" - - matrix: - - PYTHON: "C:\\Python27" - PYTHON_VERSION: "2.7" - PYTHON_ARCH: "32" - PYTHONIOENCODING: "utf-8" - PARALLEL: "" - - - PYTHON: "C:\\Python27-x64" - PYTHON_VERSION: "2.7" - PYTHON_ARCH: "64" - PYTHONIOENCODING: "utf-8" - PARALLEL: "" - - - PYTHON: "C:\\Python39" - PYTHON_VERSION: "3.9" - PYTHON_ARCH: "32" - - - PYTHON: "C:\\Python39-x64" - PYTHON_VERSION: "3.9" - PYTHON_ARCH: "64" - - - PYTHON: "C:\\Python38" - PYTHON_VERSION: "3.8" - PYTHON_ARCH: "32" - - - PYTHON: "C:\\Python38-x64" - PYTHON_VERSION: "3.8" - PYTHON_ARCH: "64" - EXTRA_CFLAGS: "-DCYTHON_USE_TYPE_SPECS=1" - - - PYTHON: "C:\\Python38-x64" - PYTHON_VERSION: "3.8" - PYTHON_ARCH: "64" - BACKEND: c,cpp - - - PYTHON: "C:\\Python37" - PYTHON_VERSION: "3.7" - PYTHON_ARCH: "32" - BACKEND: c,cpp - - - PYTHON: "C:\\Python37-x64" - PYTHON_VERSION: "3.7" - PYTHON_ARCH: "64" - - - PYTHON: "C:\\Python37-x64" - PYTHON_VERSION: "3.7" - PYTHON_ARCH: "64" - EXTRA_CFLAGS: "-DCYTHON_USE_TYPE_SPECS=1" - - - PYTHON: "C:\\Python37-x64" - PYTHON_VERSION: "3.7" - PYTHON_ARCH: "64" - BACKEND: cpp - - - PYTHON: "C:\\Python36" - PYTHON_VERSION: "3.6" - PYTHON_ARCH: "32" - - - PYTHON: "C:\\Python36-x64" - PYTHON_VERSION: "3.6" - PYTHON_ARCH: "64" - - - PYTHON: "C:\\Python35" - PYTHON_VERSION: "3.5" - PYTHON_ARCH: "32" - - - PYTHON: "C:\\Python35-x64" - PYTHON_VERSION: "3.5" - PYTHON_ARCH: "64" - - - PYTHON: "C:\\Python34" - PYTHON_VERSION: "3.4" - PYTHON_ARCH: "32" - PARALLEL: "" - - - PYTHON: "C:\\Python34-x64" - PYTHON_VERSION: "3.4" - PYTHON_ARCH: "64" - PARALLEL: "" - - - PYTHON: "C:\\Python27-x64" - PYTHON_VERSION: "2.7" - PYTHON_ARCH: "64" - BACKEND: cpp - PYTHONIOENCODING: "utf-8" - PARALLEL: "" - -clone_depth: 5 - -branches: - only: - - master - - release - - 0.29.x - -init: - - "ECHO Python %PYTHON_VERSION% (%PYTHON_ARCH%bit) from %PYTHON%" - -install: - - "powershell appveyor\\install.ps1" - - "%PYTHON%\\python.exe --version" - - "%PYTHON%\\Scripts\\pip.exe --version" - - "%PYTHON%\\Scripts\\wheel.exe version" - -build: off -build_script: - - "%WITH_ENV% %PYTHON%\\python.exe setup.py build_ext %PARALLEL%" - - "%WITH_ENV% %PYTHON%\\python.exe setup.py build_ext --inplace" - - "%WITH_ENV% %PYTHON%\\python.exe setup.py bdist_wheel" - -test: off -test_script: - - "%PYTHON%\\Scripts\\pip.exe install -r test-requirements.txt" - - "%PYTHON%\\Scripts\\pip.exe install win_unicode_console" - - "set CFLAGS=/Od /W3 %EXTRA_CFLAGS%" - - "%WITH_ENV% %PYTHON%\\python.exe runtests.py -vv --backend=%BACKEND% --no-code-style -j5" - -artifacts: - - path: dist\* - -cache: - - C:\Downloads\Cython -> appveyor\install.ps1 - -#on_success: -# - TODO: upload the content of dist/*.whl to a public wheelhouse -- cgit v1.2.1 From b721a96d13b43f7018ed6f697715e829d0e5a9af Mon Sep 17 00:00:00 2001 From: da-woods Date: Tue, 9 Aug 2022 14:55:52 +0100 Subject: Fix memoryview iteration in generator expressions (GH-4968) As a result of f946fe22fff32077dc58beeb64ec1ebc85d37632 And fix at least one pre-existing reference counting leak for memoryview references in things like `any(i > 0 for i in memview)` --- Cython/Compiler/Code.py | 16 ++++++++-------- Cython/Compiler/ExprNodes.py | 2 ++ Cython/Compiler/ParseTreeTransforms.py | 4 +++- tests/memoryview/memoryview.pyx | 4 +++- tests/memoryview/memslice.pyx | 29 +++++++++++++++++++++++++++++ 5 files changed, 45 insertions(+), 10 deletions(-) diff --git a/Cython/Compiler/Code.py b/Cython/Compiler/Code.py index d5db13c50..e66e0129d 100644 --- a/Cython/Compiler/Code.py +++ b/Cython/Compiler/Code.py @@ -831,14 +831,14 @@ class FunctionState(object): allocated and released one of the same type). Type is simply registered and handed back, but will usually be a PyrexType. - If type.is_pyobject, manage_ref comes into play. If manage_ref is set to + If type.needs_refcounting, manage_ref comes into play. If manage_ref is set to True, the temp will be decref-ed on return statements and in exception handling clauses. Otherwise the caller has to deal with any reference counting of the variable. - If not type.is_pyobject, then manage_ref will be ignored, but it + If not type.needs_refcounting, then manage_ref will be ignored, but it still has to be passed. It is recommended to pass False by convention - if it is known that type will never be a Python object. + if it is known that type will never be a reference counted type. static=True marks the temporary declaration with "static". This is only used when allocating backing store for a module-level @@ -857,7 +857,7 @@ class FunctionState(object): type = PyrexTypes.c_ptr_type(type) # A function itself isn't an l-value elif type.is_cpp_class and not type.is_fake_reference and self.scope.directives['cpp_locals']: self.scope.use_utility_code(UtilityCode.load_cached("OptionalLocals", "CppSupport.cpp")) - if not type.is_pyobject and not type.is_memoryviewslice: + if not type.needs_refcounting: # Make manage_ref canonical, so that manage_ref will always mean # a decref is needed. manage_ref = False @@ -910,17 +910,17 @@ class FunctionState(object): for name, type, manage_ref, static in self.temps_allocated: freelist = self.temps_free.get((type, manage_ref)) if freelist is None or name not in freelist[1]: - used.append((name, type, manage_ref and type.is_pyobject)) + used.append((name, type, manage_ref and type.needs_refcounting)) return used def temps_holding_reference(self): """Return a list of (cname,type) tuples of temp names and their type - that are currently in use. This includes only temps of a - Python object type which owns its reference. + that are currently in use. This includes only temps + with a reference counted type which owns its reference. """ return [(name, type) for name, type, manage_ref in self.temps_in_use() - if manage_ref and type.is_pyobject] + if manage_ref and type.needs_refcounting] def all_managed_temps(self): """Return a list of (cname, type) tuples of refcount-managed Python objects. diff --git a/Cython/Compiler/ExprNodes.py b/Cython/Compiler/ExprNodes.py index 5df8d8084..f6de976f7 100644 --- a/Cython/Compiler/ExprNodes.py +++ b/Cython/Compiler/ExprNodes.py @@ -10170,6 +10170,8 @@ class YieldExprNode(ExprNode): if type.is_pyobject: code.putln('%s = 0;' % save_cname) code.put_xgotref(cname, type) + elif type.is_memoryviewslice: + code.putln('%s.memview = NULL; %s.data = NULL;' % (save_cname, save_cname)) self.generate_sent_value_handling_code(code, Naming.sent_value_cname) if self.result_is_used: self.allocate_temp_result(code) diff --git a/Cython/Compiler/ParseTreeTransforms.py b/Cython/Compiler/ParseTreeTransforms.py index d7a8d13e4..baf5b4ef7 100644 --- a/Cython/Compiler/ParseTreeTransforms.py +++ b/Cython/Compiler/ParseTreeTransforms.py @@ -1726,7 +1726,9 @@ class _HandleGeneratorArguments(VisitorTransform, SkipDeclarations): self.gen_node = gen_node # replace the node inside the generator with a looked-up name - name_node = ExprNodes.NameNode(pos=pos, name=name) + # (initialized_check can safely be False because the source variable will be checked + # before it is captured if the check is required) + name_node = ExprNodes.NameNode(pos, name=name, initialized_check=False) name_node.entry = self.gen_node.def_node.gbody.local_scope.lookup(name_node.name) name_node.type = name_node.entry.type self.substitutions[node] = name_node diff --git a/tests/memoryview/memoryview.pyx b/tests/memoryview/memoryview.pyx index d2832a0b6..2c5de40b5 100644 --- a/tests/memoryview/memoryview.pyx +++ b/tests/memoryview/memoryview.pyx @@ -443,7 +443,9 @@ def type_infer(double[:, :] arg): @cython.test_fail_if_path_exists("//CoerceToPyTypeNode") def memview_iter(double[:, :] arg): """ - memview_iter(DoubleMockBuffer("C", range(6), (2,3))) + >>> memview_iter(DoubleMockBuffer("C", range(6), (2,3))) + acquired C + released C True """ cdef double total = 0 diff --git a/tests/memoryview/memslice.pyx b/tests/memoryview/memslice.pyx index c8c6eb7fa..773c2ec95 100644 --- a/tests/memoryview/memslice.pyx +++ b/tests/memoryview/memslice.pyx @@ -2669,3 +2669,32 @@ def test_local_in_closure(a): def inner(): return (a_view[0], a_view[1]) return inner + +@testcase +def test_local_in_generator_expression(a, initialize, execute_now): + """ + >>> A1 = IntMockBuffer("A1", range(6), shape=(6,)) + >>> A2 = IntMockBuffer("A2", range(6), shape=(6,)) + >>> test_local_in_generator_expression(A1, initialize=False, execute_now=False) # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + UnboundLocalError... + + >>> test_local_in_generator_expression(A1, initialize=True, execute_now=True) + acquired A1 + released A1 + True + + >>> genexp = test_local_in_generator_expression(A2, initialize=True, execute_now=False) + acquired A2 + >>> sum(genexp) + released A2 + 2 + """ + cdef int[:] a_view + if initialize: + a_view = a + if execute_now: + return any(ai > 3 for ai in a_view) + else: + return (ai > 3 for ai in a_view) -- cgit v1.2.1 From 68ce046a823d8d5efc9572ce6510326972da1864 Mon Sep 17 00:00:00 2001 From: da-woods Date: Wed, 10 Aug 2022 08:11:53 +0100 Subject: Set is_variable on "special" entries (#4965) Fixes a small omission in https://github.com/cython/cython/commit/f6edb652db32e12505be8bc4767e64b50790f378 --- Cython/Compiler/Builtin.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Cython/Compiler/Builtin.py b/Cython/Compiler/Builtin.py index 0e904cf81..3843f12df 100644 --- a/Cython/Compiler/Builtin.py +++ b/Cython/Compiler/Builtin.py @@ -484,6 +484,7 @@ def get_known_standard_library_module_scope(module_name): entry = mod.declare_type(name, indexed_type, pos = None) var_entry = Entry(name, None, PyrexTypes.py_object_type) var_entry.is_pyglobal = True + var_entry.is_variable = True var_entry.scope = mod entry.as_variable = var_entry @@ -493,6 +494,7 @@ def get_known_standard_library_module_scope(module_name): entry = mod.declare_type(name, indexed_type, pos = None) var_entry = Entry(name, None, PyrexTypes.py_object_type) var_entry.is_pyglobal = True + var_entry.is_variable = True var_entry.scope = mod entry.as_variable = var_entry _known_module_scopes[module_name] = mod -- cgit v1.2.1 From 278d07603e9ae9ce167f489c3c9855cb424cd5e7 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Wed, 10 Aug 2022 16:05:59 +0200 Subject: Update the GH issue template to hint more at trying out 3.0 first. --- .github/ISSUE_TEMPLATE/bug_report.md | 25 +++++++++++++------------ 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index be0b183dc..0bcfa708d 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -4,7 +4,6 @@ about: Create a report to help us improve title: "[BUG] " labels: '' assignees: '' - --- **Describe the bug** -A clear and concise description of what the bug is. +[A clear and concise description of what the bug is.] -**To Reproduce** -Code to reproduce the behaviour: +**Code to reproduce the behaviour:** ```cython +... ``` -**Expected behavior** -A clear and concise description of what you expected to happen. +**Expected behaviour** +[A clear and concise description of what you expected to happen.] **Environment (please complete the following information):** - OS: [e.g. Linux, Windows, macOS] - - Python version [e.g. 3.8.4] - - Cython version [e.g. 0.29.18] + - Python version [e.g. 3.10.2] + - Cython version [e.g. 3.0.0a11] **Additional context** -Add any other context about the problem here. +[Add any other context about the problem here.] -- cgit v1.2.1 From 78a3af71a95c2f70ef0f65dab39ab64a6d427a8d Mon Sep 17 00:00:00 2001 From: da-woods Date: Thu, 11 Aug 2022 10:01:06 +0100 Subject: Switch to YAML issue templates (#4971) I think it's fairly hard to read the markdown forms and pick out instructional comments, bits that you're supposed to be modifying, etc. (even when you're trying to do it right and not just click through to submit your support question as quickly as possible). Hopefully the YAML forms are a little clearer - they at least separate "instructions" from "user input" clearly. I've also added a 3rd "other" free-form form for anything that doesn't quite fit into the two categories. We may regret that but it seemed sensible to have it as an option first. --- .github/ISSUE_TEMPLATE/bug_report.md | 40 ---------------------- .github/ISSUE_TEMPLATE/bug_report.yml | 55 ++++++++++++++++++++++++++++++ .github/ISSUE_TEMPLATE/feature_request.md | 41 ---------------------- .github/ISSUE_TEMPLATE/feature_request.yml | 46 +++++++++++++++++++++++++ .github/ISSUE_TEMPLATE/other.md | 20 +++++++++++ 5 files changed, 121 insertions(+), 81 deletions(-) delete mode 100644 .github/ISSUE_TEMPLATE/bug_report.md create mode 100644 .github/ISSUE_TEMPLATE/bug_report.yml delete mode 100644 .github/ISSUE_TEMPLATE/feature_request.md create mode 100644 .github/ISSUE_TEMPLATE/feature_request.yml create mode 100644 .github/ISSUE_TEMPLATE/other.md diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md deleted file mode 100644 index 0bcfa708d..000000000 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ /dev/null @@ -1,40 +0,0 @@ ---- -name: Bug report -about: Create a report to help us improve -title: "[BUG] " -labels: '' -assignees: '' ---- - - - -**Describe the bug** -[A clear and concise description of what the bug is.] - -**Code to reproduce the behaviour:** -```cython -... -``` - -**Expected behaviour** -[A clear and concise description of what you expected to happen.] - -**Environment (please complete the following information):** - - OS: [e.g. Linux, Windows, macOS] - - Python version [e.g. 3.10.2] - - Cython version [e.g. 3.0.0a11] - -**Additional context** -[Add any other context about the problem here.] diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml new file mode 100644 index 000000000..1c2f8aa83 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -0,0 +1,55 @@ +name: Bug Report +description: File a bug report +title: "[BUG] " +body: + - type: markdown + attributes: + value: | + **PLEASE READ THIS FIRST:** + - DO NOT use the bug and feature tracker for general questions and support requests. + Use the [`cython-users`](https://groups.google.com/g/cython-users) mailing list instead. + It has a wider audience, so you get more and better answers. + - Did you search for SIMILAR ISSUES already? + Please do, it helps to save us precious time that we otherwise could not invest into development. + - Did you try the LATEST MASTER BRANCH or pre-release? + It might already have what you want to report. + Specifically, the legacy stable 0.29.x release series receives only important low-risk bug fixes. + Also see the [Changelog](https://github.com/cython/cython/blob/master/CHANGES.rst) regarding recent changes + - type: textarea + id: describe + attributes: + label: "Describe the bug" + description: "A clear and concise description of what the bug is." + placeholder: "Tell us what you see!" + validations: + required: true + - type: textarea + id: reproduce + attributes: + label: "Code to reproduce the behaviour:" + value: | + ```cython + # example code + ``` + - type: textarea + id: expected + attributes: + label: "Expected behaviour" + description: "A clear and concise description of what you expected to happen." + - type: textarea + id: environment + attributes: + label: Environment + description: "please complete the following information" + value: | + OS: [e.g. Linux, Windows, macOS] + Python version [e.g. 3.10.2] + Cython version [e.g. 3.0.0a11] + validations: + required: true + - type: textarea + id: context + attributes: + label: Additional context + description: Add any other context about the problem here. + diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md deleted file mode 100644 index c35dfae51..000000000 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ /dev/null @@ -1,41 +0,0 @@ ---- -name: Feature request -about: Suggest an idea for this project -title: "[ENH] " -labels: '' -assignees: '' - ---- - - - -**Is your feature request related to a problem? Please describe.** -A clear and concise description of what the problem is. Ex. In my code, I would like to do [...] -```cython -# add use case related code here -``` - -**Describe the solution you'd like** -A clear and concise description of what you want to happen, including code examples if applicable. -```cython -# add a proposed code/syntax example here -``` - -**Describe alternatives you've considered** -A clear and concise description of any alternative solutions or features you've considered. -```cython -# add alternative code/syntax proposals here -``` - -**Additional context** -Add any other context about the feature request here. diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml new file mode 100644 index 000000000..3d46fe3bc --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.yml @@ -0,0 +1,46 @@ +name: Feature request +description: Suggest an idea for this project +title: "[ENH] " +body: + - type: markdown + attributes: + value: | + **PLEASE READ THIS FIRST:** + - DO NOT use the bug and feature tracker for general questions and support requests. + Use the [`cython-users`](https://groups.google.com/g/cython-users) mailing list instead. + It has a wider audience, so you get more and better answers. + - Did you search for SIMILAR ISSUES already? + Please do, it helps to save us precious time that we otherwise could not invest into development. + - Did you try the LATEST MASTER BRANCH or pre-release? + It might already have what you want to report. + Specifically, the legacy stable 0.29.x release series receives only important low-risk bug fixes. + Also see the [Changelog](https://github.com/cython/cython/blob/master/CHANGES.rst) regarding recent changes + - type: textarea + id: problem + attributes: + label: "Is your feature request related to a problem? Please describe." + description: "A clear and concise description of what the problem is." + value: | + In my code, I would like to do [...] + ```cython + # add use case related code here + ``` + validations: + required: true + - type: textarea + id: solution + attributes: + label: "Describe the solution you'd like." + description: "A clear and concise description of what you want to happen, including code examples if applicable." + placeholder: add a proposed code/syntax example here + - type: textarea + id: alternatives + attributes: + label: "Describe alternatives you've considered." + description: "A clear and concise description of any alternative solutions or features you've considered." + placeholder: "add alternative code/syntax proposals here" + - type: textarea + id: context + attributes: + label: "Additional context" + description: "Add any other context about the feature request here." diff --git a/.github/ISSUE_TEMPLATE/other.md b/.github/ISSUE_TEMPLATE/other.md new file mode 100644 index 000000000..95aa5153e --- /dev/null +++ b/.github/ISSUE_TEMPLATE/other.md @@ -0,0 +1,20 @@ +--- +name: Other +about: Anything that does not qualify as either "bug" or "feature request". DO NOT post support requests here. +title: "" +labels: '' +assignees: '' + +--- + + -- cgit v1.2.1 From 13e5024dc18a47d87f8347b76aa071c8750212f4 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Thu, 11 Aug 2022 14:23:32 +0200 Subject: Add some more gitignores, in case we start (or try) compiling more of Cython's modules. --- .gitignore | 26 +++++++++++++++----------- 1 file changed, 15 insertions(+), 11 deletions(-) diff --git a/.gitignore b/.gitignore index 979aa41e1..18940cd9a 100644 --- a/.gitignore +++ b/.gitignore @@ -10,17 +10,21 @@ __pycache__ .*cache*/ *venv*/ -Cython/Compiler/*.c -Cython/Plex/*.c -Cython/Runtime/refnanny.c -Cython/Tempita/*.c -Cython/*.c -Cython/*.html -Cython/*/*.html - -Tools/*.elc -Demos/*.html -Demos/*/*.html +/Cython/Build/*.c +/Cython/Compiler/*.c +/Cython/Debugger/*.c +/Cython/Distutils/*.c +/Cython/Parser/*.c +/Cython/Plex/*.c +/Cython/Runtime/refnanny.c +/Cython/Tempita/*.c +/Cython/*.c +/Cython/*.html +/Cython/*/*.html + +/Tools/*.elc +/Demos/*.html +/Demos/*/*.html /TEST_TMP/ /build/ -- cgit v1.2.1 From a30eba6f6ea2fffe4a9670c3403f17e416bf2227 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Thu, 11 Aug 2022 14:23:54 +0200 Subject: CI: Prevent changes to irrelevant files from triggering a CI run. --- .github/workflows/ci.yml | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ec8dc83e6..09644b52b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,6 +1,17 @@ name: CI -on: [push, pull_request, workflow_dispatch] +on: + push: + paths: + - '**' + - '!.github/**' + - '.github/workflows/ci.yml' + pull_request: + paths: + - '**' + - '!.github/**' + - '.github/workflows/ci.yml' + workflow_dispatch: jobs: ci: -- cgit v1.2.1 From 6006a7e8ad4be5f9fcd269b472ee955a3ef931a4 Mon Sep 17 00:00:00 2001 From: Matus Valo Date: Thu, 11 Aug 2022 22:18:31 +0200 Subject: [Doc] Document final C methods (#4960) --- docs/src/userguide/extension_types.rst | 141 ++++++++++++++++++--------------- 1 file changed, 79 insertions(+), 62 deletions(-) diff --git a/docs/src/userguide/extension_types.rst b/docs/src/userguide/extension_types.rst index b2690dc49..3d0c2257f 100644 --- a/docs/src/userguide/extension_types.rst +++ b/docs/src/userguide/extension_types.rst @@ -479,6 +479,64 @@ when it is deleted: We don't have: ['camembert', 'cheddar'] We don't have: [] + +C methods +========= + +Extension types can have C methods as well as Python methods. Like C +functions, C methods are declared using + +* :keyword:`cdef` instead of :keyword:`def` or ``@cfunc`` decorator for *C methods*, or +* :keyword:`cpdef` instead of :keyword:`def` or ``@ccall`` decorator for *hybrid methods*. + +C methods are "virtual", and may be overridden in derived extension types. +In addition, :keyword:`cpdef`/``@ccall`` methods can even be overridden by Python +methods when called as C method. This adds a little to their calling overhead +compared to a :keyword:`cdef`/``@cfunc`` method: + +.. tabs:: + + .. group-tab:: Pure Python + + .. literalinclude:: ../../examples/userguide/extension_types/pets.py + + .. group-tab:: Cython + + .. literalinclude:: ../../examples/userguide/extension_types/pets.pyx + +.. code-block:: text + + # Output + p1: + This parrot is resting. + p2: + This parrot is resting. + Lovely plumage! + +The above example also illustrates that a C method can call an inherited C +method using the usual Python technique, i.e.:: + + Parrot.describe(self) + +:keyword:`cdef`/``@ccall`` methods can be declared static by using the ``@staticmethod`` decorator. +This can be especially useful for constructing classes that take non-Python compatible types: + +.. tabs:: + + .. group-tab:: Pure Python + + .. literalinclude:: ../../examples/userguide/extension_types/owned_pointer.py + + .. group-tab:: Cython + + .. literalinclude:: ../../examples/userguide/extension_types/owned_pointer.pyx + +.. note:: + + Cython currently does not support decorating :keyword:`cdef`/``@ccall`` methods with + the ``@classmethod`` decorator. + + .. _subclassing: Subclassing @@ -519,7 +577,7 @@ extern extension type. If the base type is defined in another Cython module, it must either be declared as an extern extension type or imported using the :keyword:`cimport` statement or importing from the special ``cython.cimports`` package. -Multiple inheritance is supported, however the second and subsequent base +Multiple inheritance is supported, however the second and subsequent base classes must be an ordinary Python class (not an extension type or a built-in type). @@ -530,7 +588,7 @@ must be compatible). There is a way to prevent extension types from being subtyped in Python. This is done via the ``final`` directive, -usually set on an extension type using a decorator: +usually set on an extension type or C method using a decorator: .. tabs:: @@ -543,6 +601,13 @@ usually set on an extension type using a decorator: @cython.final @cython.cclass class Parrot: + def describe(self): pass + + @cython.cclass + class Lizard: + + @cython.final + @cython.cfunc def done(self): pass .. group-tab:: Cython @@ -553,72 +618,24 @@ usually set on an extension type using a decorator: @cython.final cdef class Parrot: - def done(self): pass - -Trying to create a Python subclass from this type will raise a -:class:`TypeError` at runtime. Cython will also prevent subtyping a -final type inside of the same module, i.e. creating an extension type -that uses a final type as its base type will fail at compile time. -Note, however, that this restriction does not currently propagate to -other extension modules, so even final extension types can still be -subtyped at the C level by foreign code. - - -C methods -========= + def describe(self): pass -Extension types can have C methods as well as Python methods. Like C -functions, C methods are declared using - -* :keyword:`cdef` instead of :keyword:`def` or ``@cfunc`` decorator for *C methods*, or -* :keyword:`cpdef` instead of :keyword:`def` or ``@ccall`` decorator for *hybrid methods*. - -C methods are "virtual", and may be overridden in derived -extension types. In addition, :keyword:`cpdef`/``@ccall`` methods can even be overridden by Python -methods when called as C method. This adds a little to their calling overhead -compared to a :keyword:`cdef`/``@cfunc`` method: - -.. tabs:: - - .. group-tab:: Pure Python - - .. literalinclude:: ../../examples/userguide/extension_types/pets.py - - .. group-tab:: Cython - - .. literalinclude:: ../../examples/userguide/extension_types/pets.pyx -.. code-block:: text - - # Output - p1: - This parrot is resting. - p2: - This parrot is resting. - Lovely plumage! - -The above example also illustrates that a C method can call an inherited C -method using the usual Python technique, i.e.:: - - Parrot.describe(self) - -:keyword:`cdef`/``@ccall`` methods can be declared static by using the ``@staticmethod`` decorator. -This can be especially useful for constructing classes that take non-Python -compatible types: - -.. tabs:: - - .. group-tab:: Pure Python - - .. literalinclude:: ../../examples/userguide/extension_types/owned_pointer.py + @cython.cclass + cdef class Lizard: - .. group-tab:: Cython - .. literalinclude:: ../../examples/userguide/extension_types/owned_pointer.pyx + @cython.final + cdef done(self): pass -.. note:: +Trying to create a Python subclass from a final type or overriding a final method will raise +a :class:`TypeError` at runtime. Cython will also prevent subtyping a +final type or overriding a final method inside of the same module, i.e. creating +an extension type that uses a final type as its base type will fail at compile time. +Note, however, that this restriction does not currently propagate to +other extension modules, so Cython is unable to prevent final extension types +from being subtyped at the C level by foreign code. - Cython currently does not support decorating :keyword:`cdef`/``@ccall`` methods with ``@classmethod`` decorator. .. _forward_declaring_extension_types: -- cgit v1.2.1 From 07c03d343983c2df07ad0288fc64583776acc9fb Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Fri, 12 Aug 2022 12:27:25 +0200 Subject: CI: Silence an annoying C compiler command line warning. --- Tools/ci-run.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Tools/ci-run.sh b/Tools/ci-run.sh index 0a5921d64..7328dc034 100644 --- a/Tools/ci-run.sh +++ b/Tools/ci-run.sh @@ -114,7 +114,8 @@ if [[ $OSTYPE == "msys" ]]; then # for MSVC cl # 4127 warns that a conditional expression is constant, should be fixed here https://github.com/cython/cython/pull/4317 # (off by default) 5045 warns that the compiler will insert Spectre mitigations for memory load if the /Qspectre switch is specified # (off by default) 4820 warns about the code in Python\3.9.6\x64\include ... - CFLAGS="-Od /Z7 /W4 /wd4711 /wd4127 /wd5045 /wd4820" + # 9025 warns that we override prior command line arguments with our own ones + CFLAGS="-Od /Z7 /W4 /wd4711 /wd4127 /wd5045 /wd4820 /wd9025" else CFLAGS="-O0 -ggdb -Wall -Wextra" fi -- cgit v1.2.1 From 18a3413283a9fcdf921acfd9d5b5dee00eeb2d69 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Fri, 12 Aug 2022 12:28:57 +0200 Subject: Test and fix extended_iglob() with Windows paths (as used by "cythonize" when compiling whole packages). --- Cython/Build/Dependencies.py | 11 ++- Cython/Build/Tests/TestDependencies.py | 142 +++++++++++++++++++++++++++++++++ 2 files changed, 149 insertions(+), 4 deletions(-) create mode 100644 Cython/Build/Tests/TestDependencies.py diff --git a/Cython/Build/Dependencies.py b/Cython/Build/Dependencies.py index e0414f8da..c34f32792 100644 --- a/Cython/Build/Dependencies.py +++ b/Cython/Build/Dependencies.py @@ -85,11 +85,14 @@ def extended_iglob(pattern): for path in extended_iglob(before + case + after): yield path return - if '**/' in pattern: + + # We always accept '/' and also '\' on Windows, + # because '/' is generally common for relative paths. + if '**/' in pattern or os.sep == '\\' and '**\\' in pattern: seen = set() - first, rest = pattern.split('**/', 1) + first, rest = re.split(r'\*\*[%s]' % ('/\\\\' if os.sep == '\\' else '/'), pattern, 1) if first: - first = iglob(first+'/') + first = iglob(first + os.sep) else: first = [''] for root in first: @@ -97,7 +100,7 @@ def extended_iglob(pattern): if path not in seen: seen.add(path) yield path - for path in extended_iglob(join_path(root, '*', '**/' + rest)): + for path in extended_iglob(join_path(root, '*', '**', rest)): if path not in seen: seen.add(path) yield path diff --git a/Cython/Build/Tests/TestDependencies.py b/Cython/Build/Tests/TestDependencies.py new file mode 100644 index 000000000..d3888117d --- /dev/null +++ b/Cython/Build/Tests/TestDependencies.py @@ -0,0 +1,142 @@ +import contextlib +import os.path +import sys +import tempfile +import unittest +from io import open +from os.path import join as pjoin + +from ..Dependencies import extended_iglob + + +@contextlib.contextmanager +def writable_file(dir_path, filename): + with open(pjoin(dir_path, filename), "w", encoding="utf8") as f: + yield f + + +class TestGlobbing(unittest.TestCase): + @classmethod + def setUpClass(cls): + cls._orig_dir = os.getcwd() + if sys.version_info[0] < 3: + temp_path = cls._tmpdir = tempfile.mkdtemp() + else: + cls._tmpdir = tempfile.TemporaryDirectory() + temp_path = cls._tmpdir.name + os.chdir(temp_path) + + for dir1 in "abcd": + for dir1x in [dir1, dir1 + 'x']: + for dir2 in "xyz": + dir_path = pjoin(dir1x, dir2) + os.makedirs(dir_path) + with writable_file(dir_path, "file2_pyx.pyx") as f: + f.write(u'""" PYX """') + with writable_file(dir_path, "file2_py.py") as f: + f.write(u'""" PY """') + + with writable_file(dir1x, "file1_pyx.pyx") as f: + f.write(u'""" PYX """') + with writable_file(dir1x, "file1_py.py") as f: + f.write(u'""" PY """') + + @classmethod + def tearDownClass(cls): + os.chdir(cls._orig_dir) + if sys.version_info[0] < 3: + import shutil + shutil.rmtree(cls._tmpdir) + else: + cls._tmpdir.cleanup() + + def files_equal(self, pattern, expected_files): + expected_files = sorted(expected_files) + # It's the users's choice whether '/' will appear on Windows. + matched_files = sorted(path.replace('/', os.sep) for path in extended_iglob(pattern)) + self.assertListEqual(matched_files, expected_files) # / + + # Special case for Windows: also support '\' in patterns. + if os.sep == '\\' and '/' in pattern: + matched_files = sorted(extended_iglob(pattern.replace('/', '\\'))) + self.assertListEqual(matched_files, expected_files) # \ + + def test_extended_iglob_simple(self): + ax_files = [pjoin("a", "x", "file2_pyx.pyx"), pjoin("a", "x", "file2_py.py")] + self.files_equal("a/x/*", ax_files) + self.files_equal("a/x/*.c12", []) + self.files_equal("a/x/*.{py,pyx,c12}", ax_files) + self.files_equal("a/x/*.{py,pyx}", ax_files) + self.files_equal("a/x/*.{pyx}", ax_files[:1]) + self.files_equal("a/x/*.pyx", ax_files[:1]) + self.files_equal("a/x/*.{py}", ax_files[1:]) + self.files_equal("a/x/*.py", ax_files[1:]) + + def test_extended_iglob_simple_star(self): + for basedir in "ad": + files = [ + pjoin(basedir, dirname, filename) + for dirname in "xyz" + for filename in ["file2_pyx.pyx", "file2_py.py"] + ] + self.files_equal(basedir + "/*/*", files) + self.files_equal(basedir + "/*/*.c12", []) + self.files_equal(basedir + "/*/*.{py,pyx,c12}", files) + self.files_equal(basedir + "/*/*.{py,pyx}", files) + self.files_equal(basedir + "/*/*.{pyx}", files[::2]) + self.files_equal(basedir + "/*/*.pyx", files[::2]) + self.files_equal(basedir + "/*/*.{py}", files[1::2]) + self.files_equal(basedir + "/*/*.py", files[1::2]) + + for subdir in "xy*": + files = [ + pjoin(basedir, dirname, filename) + for dirname in "xyz" + if subdir in ('*', dirname) + for filename in ["file2_pyx.pyx", "file2_py.py"] + ] + path = basedir + '/' + subdir + '/' + self.files_equal(path + "*", files) + self.files_equal(path + "*.{py,pyx}", files) + self.files_equal(path + "*.{pyx}", files[::2]) + self.files_equal(path + "*.pyx", files[::2]) + self.files_equal(path + "*.{py}", files[1::2]) + self.files_equal(path + "*.py", files[1::2]) + + def test_extended_iglob_double_star(self): + basedirs = os.listdir(".") + files = [ + pjoin(basedir, dirname, filename) + for basedir in basedirs + for dirname in "xyz" + for filename in ["file2_pyx.pyx", "file2_py.py"] + ] + all_files = [ + pjoin(basedir, filename) + for basedir in basedirs + for filename in ["file1_pyx.pyx", "file1_py.py"] + ] + files + self.files_equal("*/*/*", files) + self.files_equal("*/*/**/*", files) + self.files_equal("*/**/*.*", all_files) + self.files_equal("**/*.*", all_files) + self.files_equal("*/**/*.c12", []) + self.files_equal("**/*.c12", []) + self.files_equal("*/*/*.{py,pyx,c12}", files) + self.files_equal("*/*/**/*.{py,pyx,c12}", files) + self.files_equal("*/**/*/*.{py,pyx,c12}", files) + self.files_equal("**/*/*/*.{py,pyx,c12}", files) + self.files_equal("**/*.{py,pyx,c12}", all_files) + self.files_equal("*/*/*.{py,pyx}", files) + self.files_equal("**/*/*/*.{py,pyx}", files) + self.files_equal("*/**/*/*.{py,pyx}", files) + self.files_equal("**/*.{py,pyx}", all_files) + self.files_equal("*/*/*.{pyx}", files[::2]) + self.files_equal("**/*.{pyx}", all_files[::2]) + self.files_equal("*/**/*/*.pyx", files[::2]) + self.files_equal("*/*/*.pyx", files[::2]) + self.files_equal("**/*.pyx", all_files[::2]) + self.files_equal("*/*/*.{py}", files[1::2]) + self.files_equal("**/*.{py}", all_files[1::2]) + self.files_equal("*/*/*.py", files[1::2]) + self.files_equal("**/*.py", all_files[1::2]) -- cgit v1.2.1 From 0509398738ccf970ef9863a7198fdd814660169c Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Fri, 12 Aug 2022 12:48:29 +0200 Subject: Allow Windows path names also in test error/warning output. --- runtests.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/runtests.py b/runtests.py index 7cbc6e585..be32838c2 100755 --- a/runtests.py +++ b/runtests.py @@ -1431,6 +1431,8 @@ class CythonCompileTestCase(unittest.TestCase): def _match_output(self, expected_output, actual_output, write): try: for expected, actual in zip(expected_output, actual_output): + if expected != actual and '\\' in actual and os.sep == '\\' and '/' in expected and '\\' not in expected: + expected = expected.replace('/', '\\') self.assertEqual(expected, actual) if len(actual_output) < len(expected_output): expected = expected_output[len(actual_output)] -- cgit v1.2.1 From a7e8d60afa928831862c1997d3c4bf8c85d6f3b6 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Sat, 13 Aug 2022 12:43:37 +0200 Subject: Revert "CI: Silence an annoying C compiler command line warning." MSCV does not seem to support silencing warnings regarding the command line. This reverts commit 07c03d343983c2df07ad0288fc64583776acc9fb. --- Tools/ci-run.sh | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/Tools/ci-run.sh b/Tools/ci-run.sh index 7328dc034..0a5921d64 100644 --- a/Tools/ci-run.sh +++ b/Tools/ci-run.sh @@ -114,8 +114,7 @@ if [[ $OSTYPE == "msys" ]]; then # for MSVC cl # 4127 warns that a conditional expression is constant, should be fixed here https://github.com/cython/cython/pull/4317 # (off by default) 5045 warns that the compiler will insert Spectre mitigations for memory load if the /Qspectre switch is specified # (off by default) 4820 warns about the code in Python\3.9.6\x64\include ... - # 9025 warns that we override prior command line arguments with our own ones - CFLAGS="-Od /Z7 /W4 /wd4711 /wd4127 /wd5045 /wd4820 /wd9025" + CFLAGS="-Od /Z7 /W4 /wd4711 /wd4127 /wd5045 /wd4820" else CFLAGS="-O0 -ggdb -Wall -Wextra" fi -- cgit v1.2.1 From 14a818772974232d46c56aff916ed0311e9f2dd9 Mon Sep 17 00:00:00 2001 From: Matus Valo Date: Tue, 16 Aug 2022 18:17:10 +0200 Subject: [Doc] Add pure python mode to parallelism.rst (#4974) --- .../userguide/parallelism/breaking_loop.py | 15 ++++ .../userguide/parallelism/breaking_loop.pyx | 2 + .../userguide/parallelism/cimport_openmp.py | 11 +++ .../userguide/parallelism/cimport_openmp.pyx | 2 - .../userguide/parallelism/memoryview_sum.py | 7 ++ .../userguide/parallelism/memoryview_sum.pyx | 7 ++ docs/examples/userguide/parallelism/parallel.py | 30 ++++++++ docs/examples/userguide/parallelism/parallel.pyx | 30 ++++++++ docs/examples/userguide/parallelism/setup.py | 16 ---- docs/examples/userguide/parallelism/setup_py.py | 16 ++++ docs/examples/userguide/parallelism/setup_pyx.py | 16 ++++ docs/examples/userguide/parallelism/simple_sum.py | 10 +++ docs/src/userguide/parallelism.rst | 89 ++++++++++++++-------- 13 files changed, 202 insertions(+), 49 deletions(-) create mode 100644 docs/examples/userguide/parallelism/breaking_loop.py create mode 100644 docs/examples/userguide/parallelism/cimport_openmp.py create mode 100644 docs/examples/userguide/parallelism/memoryview_sum.py create mode 100644 docs/examples/userguide/parallelism/memoryview_sum.pyx create mode 100644 docs/examples/userguide/parallelism/parallel.py create mode 100644 docs/examples/userguide/parallelism/parallel.pyx delete mode 100644 docs/examples/userguide/parallelism/setup.py create mode 100644 docs/examples/userguide/parallelism/setup_py.py create mode 100644 docs/examples/userguide/parallelism/setup_pyx.py create mode 100644 docs/examples/userguide/parallelism/simple_sum.py diff --git a/docs/examples/userguide/parallelism/breaking_loop.py b/docs/examples/userguide/parallelism/breaking_loop.py new file mode 100644 index 000000000..00d0225b5 --- /dev/null +++ b/docs/examples/userguide/parallelism/breaking_loop.py @@ -0,0 +1,15 @@ +from cython.parallel import prange + +@cython.exceptval(-1) +@cython.cfunc +def func(n: cython.Py_ssize_t) -> cython.int: + i: cython.Py_ssize_t + + for i in prange(n, nogil=True): + if i == 8: + with cython.gil: + raise Exception() + elif i == 4: + break + elif i == 2: + return i diff --git a/docs/examples/userguide/parallelism/breaking_loop.pyx b/docs/examples/userguide/parallelism/breaking_loop.pyx index 2cf562edf..e7445082d 100644 --- a/docs/examples/userguide/parallelism/breaking_loop.pyx +++ b/docs/examples/userguide/parallelism/breaking_loop.pyx @@ -1,5 +1,7 @@ from cython.parallel import prange + + cdef int func(Py_ssize_t n) except -1: cdef Py_ssize_t i diff --git a/docs/examples/userguide/parallelism/cimport_openmp.py b/docs/examples/userguide/parallelism/cimport_openmp.py new file mode 100644 index 000000000..9288a4381 --- /dev/null +++ b/docs/examples/userguide/parallelism/cimport_openmp.py @@ -0,0 +1,11 @@ +# tag: openmp + +from cython.parallel import parallel +from cython.cimports.openmp import omp_set_dynamic, omp_get_num_threads + +num_threads = cython.declare(cython.int) + +omp_set_dynamic(1) +with cython.nogil, parallel(): + num_threads = omp_get_num_threads() + # ... diff --git a/docs/examples/userguide/parallelism/cimport_openmp.pyx b/docs/examples/userguide/parallelism/cimport_openmp.pyx index 797936fe7..54d5f18db 100644 --- a/docs/examples/userguide/parallelism/cimport_openmp.pyx +++ b/docs/examples/userguide/parallelism/cimport_openmp.pyx @@ -1,6 +1,4 @@ # tag: openmp -# You can ignore the previous line. -# It's for internal testing of the Cython documentation. from cython.parallel cimport parallel cimport openmp diff --git a/docs/examples/userguide/parallelism/memoryview_sum.py b/docs/examples/userguide/parallelism/memoryview_sum.py new file mode 100644 index 000000000..6cff5d587 --- /dev/null +++ b/docs/examples/userguide/parallelism/memoryview_sum.py @@ -0,0 +1,7 @@ +from cython.parallel import prange + +def func(x: cython.double[:], alpha: cython.double): + i: cython.Py_ssize_t + + for i in prange(x.shape[0], nogil=True): + x[i] = alpha * x[i] diff --git a/docs/examples/userguide/parallelism/memoryview_sum.pyx b/docs/examples/userguide/parallelism/memoryview_sum.pyx new file mode 100644 index 000000000..bdc1c9feb --- /dev/null +++ b/docs/examples/userguide/parallelism/memoryview_sum.pyx @@ -0,0 +1,7 @@ +from cython.parallel import prange + +def func(double[:] x, double alpha): + cdef Py_ssize_t i + + for i in prange(x.shape[0], nogil=True): + x[i] = alpha * x[i] diff --git a/docs/examples/userguide/parallelism/parallel.py b/docs/examples/userguide/parallelism/parallel.py new file mode 100644 index 000000000..0fb62d10f --- /dev/null +++ b/docs/examples/userguide/parallelism/parallel.py @@ -0,0 +1,30 @@ +from cython.parallel import parallel, prange +from cython.cimports.libc.stdlib import abort, malloc, free + +@cython.nogil +@cython.cfunc +def func(buf: cython.p_int) -> cython.void: + pass + # ... + +idx = cython.declare(cython.Py_ssize_t) +i = cython.declare(cython.Py_ssize_t) +j = cython.declare(cython.Py_ssize_t) +n = cython.declare(cython.Py_ssize_t, 100) +local_buf = cython.declare(p_int) +size = cython.declare(cython.size_t, 10) + +with cython.nogil, parallel(): + local_buf: cython.p_int = cython.cast(cython.p_int, malloc(cython.sizeof(cython.int) * size)) + if local_buf is cython.NULL: + abort() + + # populate our local buffer in a sequential loop + for i in range(size): + local_buf[i] = i * 2 + + # share the work using the thread-local buffer(s) + for j in prange(n, schedule='guided'): + func(local_buf) + + free(local_buf) diff --git a/docs/examples/userguide/parallelism/parallel.pyx b/docs/examples/userguide/parallelism/parallel.pyx new file mode 100644 index 000000000..2a952d537 --- /dev/null +++ b/docs/examples/userguide/parallelism/parallel.pyx @@ -0,0 +1,30 @@ +from cython.parallel import parallel, prange +from libc.stdlib cimport abort, malloc, free + + + +cdef void func(int *buf) nogil: + pass + # ... + +cdef Py_ssize_t idx, i, j, n = 100 +cdef int * local_buf +cdef size_t size = 10 + + + + +with nogil, parallel(): + local_buf = malloc(sizeof(int) * size) + if local_buf is NULL: + abort() + + # populate our local buffer in a sequential loop + for i in range(size): + local_buf[i] = i * 2 + + # share the work using the thread-local buffer(s) + for j in prange(n, schedule='guided'): + func(local_buf) + + free(local_buf) diff --git a/docs/examples/userguide/parallelism/setup.py b/docs/examples/userguide/parallelism/setup.py deleted file mode 100644 index fe6d0a64c..000000000 --- a/docs/examples/userguide/parallelism/setup.py +++ /dev/null @@ -1,16 +0,0 @@ -from setuptools import Extension, setup -from Cython.Build import cythonize - -ext_modules = [ - Extension( - "hello", - ["hello.pyx"], - extra_compile_args=['-fopenmp'], - extra_link_args=['-fopenmp'], - ) -] - -setup( - name='hello-parallel-world', - ext_modules=cythonize(ext_modules), -) diff --git a/docs/examples/userguide/parallelism/setup_py.py b/docs/examples/userguide/parallelism/setup_py.py new file mode 100644 index 000000000..85a037dc5 --- /dev/null +++ b/docs/examples/userguide/parallelism/setup_py.py @@ -0,0 +1,16 @@ +from setuptools import Extension, setup +from Cython.Build import cythonize + +ext_modules = [ + Extension( + "hello", + ["hello.py"], + extra_compile_args=['-fopenmp'], + extra_link_args=['-fopenmp'], + ) +] + +setup( + name='hello-parallel-world', + ext_modules=cythonize(ext_modules), +) diff --git a/docs/examples/userguide/parallelism/setup_pyx.py b/docs/examples/userguide/parallelism/setup_pyx.py new file mode 100644 index 000000000..fe6d0a64c --- /dev/null +++ b/docs/examples/userguide/parallelism/setup_pyx.py @@ -0,0 +1,16 @@ +from setuptools import Extension, setup +from Cython.Build import cythonize + +ext_modules = [ + Extension( + "hello", + ["hello.pyx"], + extra_compile_args=['-fopenmp'], + extra_link_args=['-fopenmp'], + ) +] + +setup( + name='hello-parallel-world', + ext_modules=cythonize(ext_modules), +) diff --git a/docs/examples/userguide/parallelism/simple_sum.py b/docs/examples/userguide/parallelism/simple_sum.py new file mode 100644 index 000000000..f952a8556 --- /dev/null +++ b/docs/examples/userguide/parallelism/simple_sum.py @@ -0,0 +1,10 @@ +from cython.parallel import prange + +i = cython.declare(cython.int) +n = cython.declare(cython.int, 30) +sum = cython.declare(cython.int, 0) + +for i in prange(n, nogil=True): + sum += i + +print(sum) diff --git a/docs/src/userguide/parallelism.rst b/docs/src/userguide/parallelism.rst index e9d473e66..7cdae95b3 100644 --- a/docs/src/userguide/parallelism.rst +++ b/docs/src/userguide/parallelism.rst @@ -8,6 +8,9 @@ Using Parallelism ********************************** +.. include:: + ../two-syntax-variants-used + Cython supports native parallelism through the :py:mod:`cython.parallel` module. To use this kind of parallelism, the GIL must be released (see :ref:`Releasing the GIL `). @@ -87,7 +90,7 @@ It currently supports OpenMP, but later on more backends might be supported. runtime: The schedule and chunk size are taken from the runtime scheduling variable, which can be set through the ``openmp.omp_set_schedule()`` - function call, or the OMP_SCHEDULE environment variable. Note that + function call, or the ``OMP_SCHEDULE`` environment variable. Note that this essentially disables any static compile time optimisations of the scheduling code itself and may therefore show a slightly worse performance than when the same scheduling policy is statically @@ -116,17 +119,27 @@ It currently supports OpenMP, but later on more backends might be supported. Example with a reduction: -.. literalinclude:: ../../examples/userguide/parallelism/simple_sum.pyx +.. tabs:: + + .. group-tab:: Pure Python + + .. literalinclude:: ../../examples/userguide/parallelism/simple_sum.py + + .. group-tab:: Cython + + .. literalinclude:: ../../examples/userguide/parallelism/simple_sum.pyx -Example with a :term:`typed memoryview` (e.g. a NumPy array):: +Example with a :term:`typed memoryview` (e.g. a NumPy array) - from cython.parallel import prange +.. tabs:: - def func(double[:] x, double alpha): - cdef Py_ssize_t i + .. group-tab:: Pure Python - for i in prange(x.shape[0]): - x[i] = alpha * x[i] + .. literalinclude:: ../../examples/userguide/parallelism/memoryview_sum.py + + .. group-tab:: Cython + + .. literalinclude:: ../../examples/userguide/parallelism/memoryview_sum.pyx .. function:: parallel(num_threads=None) @@ -137,29 +150,17 @@ Example with a :term:`typed memoryview` (e.g. a NumPy array):: is also private to the prange. Variables that are private in the parallel block are unavailable after the parallel block. - Example with thread-local buffers:: - - from cython.parallel import parallel, prange - from libc.stdlib cimport abort, malloc, free + Example with thread-local buffers - cdef Py_ssize_t idx, i, n = 100 - cdef int * local_buf - cdef size_t size = 10 + .. tabs:: - with nogil, parallel(): - local_buf = malloc(sizeof(int) * size) - if local_buf is NULL: - abort() + .. group-tab:: Pure Python - # populate our local buffer in a sequential loop - for i in xrange(size): - local_buf[i] = i * 2 + .. literalinclude:: ../../examples/userguide/parallelism/parallel.py - # share the work using the thread-local buffer(s) - for i in prange(n, schedule='guided'): - func(local_buf) + .. group-tab:: Cython - free(local_buf) + .. literalinclude:: ../../examples/userguide/parallelism/parallel.pyx Later on sections might be supported in parallel blocks, to distribute code sections of work among threads. @@ -174,9 +175,17 @@ Compiling ========= To actually use the OpenMP support, you need to tell the C or C++ compiler to -enable OpenMP. For gcc this can be done as follows in a setup.py: +enable OpenMP. For gcc this can be done as follows in a ``setup.py``: + +.. tabs:: + + .. group-tab:: Pure Python -.. literalinclude:: ../../examples/userguide/parallelism/setup.py + .. literalinclude:: ../../examples/userguide/parallelism/setup_py.py + + .. group-tab:: Cython + + .. literalinclude:: ../../examples/userguide/parallelism/setup_pyx.py For Microsoft Visual C++ compiler, use ``'/openmp'`` instead of ``'-fopenmp'``. @@ -188,13 +197,21 @@ The parallel with and prange blocks support the statements break, continue and return in nogil mode. Additionally, it is valid to use a ``with gil`` block inside these blocks, and have exceptions propagate from them. However, because the blocks use OpenMP, they can not just be left, so the -exiting procedure is best-effort. For prange() this means that the loop +exiting procedure is best-effort. For ``prange()`` this means that the loop body is skipped after the first break, return or exception for any subsequent iteration in any thread. It is undefined which value shall be returned if multiple different values may be returned, as the iterations are in no particular order: -.. literalinclude:: ../../examples/userguide/parallelism/breaking_loop.pyx +.. tabs:: + + .. group-tab:: Pure Python + + .. literalinclude:: ../../examples/userguide/parallelism/breaking_loop.py + + .. group-tab:: Cython + + .. literalinclude:: ../../examples/userguide/parallelism/breaking_loop.pyx In the example above it is undefined whether an exception shall be raised, whether it will simply break or whether it will return 2. @@ -203,7 +220,17 @@ Using OpenMP Functions ====================== OpenMP functions can be used by cimporting ``openmp``: -.. literalinclude:: ../../examples/userguide/parallelism/cimport_openmp.pyx +.. tabs:: + + .. group-tab:: Pure Python + + .. literalinclude:: ../../examples/userguide/parallelism/cimport_openmp.py + :lines: 3- + + .. group-tab:: Cython + + .. literalinclude:: ../../examples/userguide/parallelism/cimport_openmp.pyx + :lines: 3- .. rubric:: References -- cgit v1.2.1 From 322aaaf412aea8d008fdbff015731d07492d2052 Mon Sep 17 00:00:00 2001 From: Emmanuel Leblond Date: Tue, 16 Aug 2022 20:18:35 +0200 Subject: Remove cclass decorator from cython snippet in userguide (#4979) --- docs/src/userguide/extension_types.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/userguide/extension_types.rst b/docs/src/userguide/extension_types.rst index 3d0c2257f..42d77c378 100644 --- a/docs/src/userguide/extension_types.rst +++ b/docs/src/userguide/extension_types.rst @@ -621,7 +621,7 @@ usually set on an extension type or C method using a decorator: def describe(self): pass - @cython.cclass + cdef class Lizard: -- cgit v1.2.1 From cdecd15e6f52d7b9364fc5c6eb12aeda89ff7437 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Fri, 19 Aug 2022 11:52:35 +0200 Subject: Do not make SkipDeclarations a cdef class since it is only used as a mixin, also together with a main cdef base class. Making it an extension type has no apparent advantages when mixing it with Python transform classes, but risks introducing struct layout conflicts when combined with cdef transforms. Closes https://github.com/cython/cython/pull/4986 See https://github.com/cython/cython/issues/4350 See https://github.com/cython/cython/issues/4351 --- Cython/Compiler/ParseTreeTransforms.pxd | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cython/Compiler/ParseTreeTransforms.pxd b/Cython/Compiler/ParseTreeTransforms.pxd index 92f9b0601..efbb14f70 100644 --- a/Cython/Compiler/ParseTreeTransforms.pxd +++ b/Cython/Compiler/ParseTreeTransforms.pxd @@ -6,8 +6,8 @@ from .Visitor cimport ( CythonTransform, VisitorTransform, TreeVisitor, ScopeTrackingTransform, EnvTransform) -cdef class SkipDeclarations: # (object): - pass +# Don't include mixins, only the main classes. +#cdef class SkipDeclarations: cdef class NormalizeTree(CythonTransform): cdef bint is_in_statlist -- cgit v1.2.1 From 3424926e9c8f03061b55516d2516a9f98999399e Mon Sep 17 00:00:00 2001 From: da-woods Date: Sat, 20 Aug 2022 12:10:33 +0100 Subject: Handle fused types containing indirect fused types (#4725) For example, a fused typedef that contains a vector of a fused type. This needs to do a more complex iteration to discover the range of fused types that its subtype can contain. --- Cython/Compiler/PyrexTypes.py | 23 ++++++++++++++++++++++- tests/run/fused_cpp.pyx | 37 +++++++++++++++++++++++++++++++++++++ 2 files changed, 59 insertions(+), 1 deletion(-) diff --git a/Cython/Compiler/PyrexTypes.py b/Cython/Compiler/PyrexTypes.py index fdac8412c..e8a87b42d 100644 --- a/Cython/Compiler/PyrexTypes.py +++ b/Cython/Compiler/PyrexTypes.py @@ -13,6 +13,7 @@ try: except NameError: from functools import reduce from functools import partial +from itertools import product from Cython.Utils import cached_function from .Code import UtilityCode, LazyUtilityCode, TempitaUtilityCode @@ -1835,7 +1836,27 @@ class FusedType(CType): for t in types: if t.is_fused: # recursively merge in subtypes - for subtype in t.types: + if isinstance(t, FusedType): + t_types = t.types + else: + # handle types that aren't a fused type themselves but contain fused types + # for example a C++ template where the template type is fused. + t_fused_types = t.get_fused_types() + t_types = [] + for substitution in product( + *[fused_type.types for fused_type in t_fused_types] + ): + t_types.append( + t.specialize( + { + fused_type: sub + for fused_type, sub in zip( + t_fused_types, substitution + ) + } + ) + ) + for subtype in t_types: if subtype not in flattened_types: flattened_types.append(subtype) elif t not in flattened_types: diff --git a/tests/run/fused_cpp.pyx b/tests/run/fused_cpp.pyx index 206ec01e7..95b326904 100644 --- a/tests/run/fused_cpp.pyx +++ b/tests/run/fused_cpp.pyx @@ -2,6 +2,7 @@ cimport cython from libcpp.vector cimport vector +from libcpp.map cimport map from libcpp.typeinfo cimport type_info from cython.operator cimport typeid @@ -51,3 +52,39 @@ def test_fused_ref(int x): (10, 10) """ return fused_ref(x), fused_ref[int](x) + +ctypedef fused nested_fused: + vector[cython.integral] + +cdef vec_of_fused(nested_fused v): + x = v[0] + return cython.typeof(x) + +def test_nested_fused(): + """ + >>> test_nested_fused() + int + long + """ + cdef vector[int] vi = [0,1] + cdef vector[long] vl = [0,1] + print vec_of_fused(vi) + print vec_of_fused(vl) + +ctypedef fused nested_fused2: + map[cython.integral, cython.floating] + +cdef map_of_fused(nested_fused2 m): + for pair in m: + return cython.typeof(pair.first), cython.typeof(pair.second) + +def test_nested_fused2(): + """ + >>> test_nested_fused2() + ('int', 'float') + ('long', 'double') + """ + cdef map[int, float] mif = { 0: 0.0 } + cdef map[long, double] mld = { 0: 0.0 } + print map_of_fused(mif) + print map_of_fused(mld) -- cgit v1.2.1 From 380d9ef98b22be168ba790409f09c4a9a688edb3 Mon Sep 17 00:00:00 2001 From: Max Bachmann Date: Sun, 4 Sep 2022 11:22:57 +0200 Subject: cancel unfinished CI run to save CI time (#5006) This cancels the ci for previous commits on the same branch and previous commits in a PR, which should save CI time since the CI builds do no longer stack up when pushing multiple times while the CI is still running. --- .github/workflows/ci.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 09644b52b..a29fef787 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -13,6 +13,10 @@ on: - '.github/workflows/ci.yml' workflow_dispatch: +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + jobs: ci: strategy: -- cgit v1.2.1 From 261536b3b929fc37dd9e7e160c8f209a3e19a5f6 Mon Sep 17 00:00:00 2001 From: Max Bachmann Date: Sun, 4 Sep 2022 11:22:57 +0200 Subject: cancel unfinished CI run to save CI time (#5006) This cancels the ci for previous commits on the same branch and previous commits in a PR, which should save CI time since the CI builds do no longer stack up when pushing multiple times while the CI is still running. --- .github/workflows/ci.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 60d2a862b..8b63eceae 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -2,6 +2,10 @@ name: CI on: [push, pull_request] +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + jobs: ci: strategy: -- cgit v1.2.1 From 5eeb363ea5117a44e687953d1ff770d8179e457a Mon Sep 17 00:00:00 2001 From: Max Bachmann Date: Sun, 4 Sep 2022 11:40:41 +0200 Subject: only use [[fallthrough]] starting C++17 (#5005) This led to warnings when building with clang and the -pedantic warnings when building for C++ versions below C++17 --- Cython/Utility/ModuleSetupCode.c | 24 ++++++++++++++++-------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/Cython/Utility/ModuleSetupCode.c b/Cython/Utility/ModuleSetupCode.c index d54426a4f..a03654734 100644 --- a/Cython/Utility/ModuleSetupCode.c +++ b/Cython/Utility/ModuleSetupCode.c @@ -496,13 +496,21 @@ #ifndef CYTHON_FALLTHROUGH - #if defined(__cplusplus) && __cplusplus >= 201103L - #if __has_cpp_attribute(fallthrough) - #define CYTHON_FALLTHROUGH [[fallthrough]] - #elif __has_cpp_attribute(clang::fallthrough) - #define CYTHON_FALLTHROUGH [[clang::fallthrough]] - #elif __has_cpp_attribute(gnu::fallthrough) - #define CYTHON_FALLTHROUGH [[gnu::fallthrough]] + #if defined(__cplusplus) + /* for clang __has_cpp_attribute(fallthrough) is true even before C++17 + * but leads to warnings with -pedantic, since it is a C++17 feature */ + #if ((defined(_MSVC_LANG) && _MSVC_LANG >= 201703L) || __cplusplus >= 201703L) + #if __has_cpp_attribute(fallthrough) + #define CYTHON_FALLTHROUGH [[fallthrough]] + #endif + #endif + + #ifndef CYTHON_FALLTHROUGH + #if __has_cpp_attribute(clang::fallthrough) + #define CYTHON_FALLTHROUGH [[clang::fallthrough]] + #elif __has_cpp_attribute(gnu::fallthrough) + #define CYTHON_FALLTHROUGH [[gnu::fallthrough]] + #endif #endif #endif @@ -514,7 +522,7 @@ #endif #endif - #if defined(__clang__ ) && defined(__apple_build_version__) + #if defined(__clang__) && defined(__apple_build_version__) #if __apple_build_version__ < 7000000 /* Xcode < 7.0 */ #undef CYTHON_FALLTHROUGH #define CYTHON_FALLTHROUGH -- cgit v1.2.1 From f0a31b720ee20407976a16efd7a9cb4368fb8936 Mon Sep 17 00:00:00 2001 From: Max Bachmann Date: Sun, 4 Sep 2022 15:19:25 +0200 Subject: only cancel successive runs for pull requests (#5008) This should skip workflows only in pull requests, since the commit sha should be unique. In case the ci is run for the same commit sha (e.g. when creating new branches), I think it is fine to cancel the CI. --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a29fef787..c945ff5eb 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -14,7 +14,7 @@ on: workflow_dispatch: concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }} cancel-in-progress: true jobs: -- cgit v1.2.1 From 9b989f4300f13c068d3e1486b86bb78d2d54ac19 Mon Sep 17 00:00:00 2001 From: Max Bachmann Date: Sun, 4 Sep 2022 15:19:25 +0200 Subject: only cancel successive runs for pull requests (#5008) This should skip workflows only in pull requests, since the commit sha should be unique. In case the ci is run for the same commit sha (e.g. when creating new branches), I think it is fine to cancel the CI. --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8b63eceae..ccad88a9b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -3,7 +3,7 @@ name: CI on: [push, pull_request] concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }} cancel-in-progress: true jobs: -- cgit v1.2.1 From f5e14787a25cdceeb4d2424590edac4d0e9494a2 Mon Sep 17 00:00:00 2001 From: Max Bachmann Date: Mon, 5 Sep 2022 12:40:42 +0200 Subject: Prevent more "unused variable" C compiler warnings (GH-5004) See https://github.com/cython/cython/pull/4693 --- Cython/Compiler/ExprNodes.py | 2 +- Cython/Utility/AsyncGen.c | 2 +- Cython/Utility/CommonStructures.c | 2 +- Cython/Utility/Coroutine.c | 11 +++++------ Cython/Utility/CythonFunction.c | 6 +++--- Cython/Utility/Exceptions.c | 4 +++- Cython/Utility/ExtensionTypes.c | 6 +++--- Cython/Utility/ModuleSetupCode.c | 11 +++++++++++ Cython/Utility/Optimize.c | 15 ++++++++------- 9 files changed, 36 insertions(+), 23 deletions(-) diff --git a/Cython/Compiler/ExprNodes.py b/Cython/Compiler/ExprNodes.py index f6de976f7..f9e854183 100644 --- a/Cython/Compiler/ExprNodes.py +++ b/Cython/Compiler/ExprNodes.py @@ -8270,7 +8270,7 @@ class SequenceNode(ExprNode): code.put_decref(target_list, py_object_type) code.putln('%s = %s; %s = NULL;' % (target_list, sublist_temp, sublist_temp)) code.putln('#else') - code.putln('(void)%s;' % sublist_temp) # avoid warning about unused variable + code.putln('CYTHON_UNUSED_VAR(%s);' % sublist_temp) code.funcstate.release_temp(sublist_temp) code.putln('#endif') diff --git a/Cython/Utility/AsyncGen.c b/Cython/Utility/AsyncGen.c index 4b8c8f678..fa374525f 100644 --- a/Cython/Utility/AsyncGen.c +++ b/Cython/Utility/AsyncGen.c @@ -1245,7 +1245,7 @@ static int __pyx_AsyncGen_init(PyObject *module) { #if CYTHON_USE_TYPE_SPECS __pyx_AsyncGenType = __Pyx_FetchCommonTypeFromSpec(module, &__pyx_AsyncGenType_spec, NULL); #else - (void) module; + CYTHON_MAYBE_UNUSED_VAR(module); // on Windows, C-API functions can't be used in slots statically __pyx_AsyncGenType_type.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict; __pyx_AsyncGenType = __Pyx_FetchCommonType(&__pyx_AsyncGenType_type); diff --git a/Cython/Utility/CommonStructures.c b/Cython/Utility/CommonStructures.c index 5449e6902..f39f3d70d 100644 --- a/Cython/Utility/CommonStructures.c +++ b/Cython/Utility/CommonStructures.c @@ -121,7 +121,7 @@ static PyTypeObject *__Pyx_FetchCommonTypeFromSpec(PyObject *module, PyType_Spec if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad; PyErr_Clear(); // We pass the ABI module reference to avoid keeping the user module alive by foreign type usages. - (void) module; + CYTHON_UNUSED_VAR(module); cached_type = __Pyx_PyType_FromModuleAndSpec(abi_module, spec, bases); if (unlikely(!cached_type)) goto bad; if (unlikely(__Pyx_fix_up_extension_type_from_spec(spec, (PyTypeObject *) cached_type) < 0)) goto bad; diff --git a/Cython/Utility/Coroutine.c b/Cython/Utility/Coroutine.c index 15ed61cc4..0c478fdbe 100644 --- a/Cython/Utility/Coroutine.c +++ b/Cython/Utility/Coroutine.c @@ -256,7 +256,7 @@ static PyObject *__Pyx_Coroutine_GetAsyncIter_Generic(PyObject *obj) { } #else // avoid C warning about 'unused function' - if ((0)) (void) __Pyx_PyObject_CallMethod0(obj, PYIDENT("__aiter__")); + (void)&__Pyx_PyObject_CallMethod0; #endif obj_type_name = __Pyx_PyType_GetName(Py_TYPE(obj)); @@ -1860,11 +1860,11 @@ static PyTypeObject __pyx_CoroutineType_type = { #endif /* CYTHON_USE_TYPE_SPECS */ static int __pyx_Coroutine_init(PyObject *module) { + CYTHON_MAYBE_UNUSED_VAR(module); // on Windows, C-API functions can't be used in slots statically #if CYTHON_USE_TYPE_SPECS __pyx_CoroutineType = __Pyx_FetchCommonTypeFromSpec(module, &__pyx_CoroutineType_spec, NULL); #else - (void) module; __pyx_CoroutineType_type.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict; __pyx_CoroutineType = __Pyx_FetchCommonType(&__pyx_CoroutineType_type); #endif @@ -2014,7 +2014,7 @@ static int __pyx_IterableCoroutine_init(PyObject *module) { #if CYTHON_USE_TYPE_SPECS __pyx_IterableCoroutineType = __Pyx_FetchCommonTypeFromSpec(module, &__pyx_IterableCoroutineType_spec, NULL); #else - (void) module; + CYTHON_UNUSED_VAR(module); __pyx_IterableCoroutineType_type.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict; __pyx_IterableCoroutineType = __Pyx_FetchCommonType(&__pyx_IterableCoroutineType_type); #endif @@ -2159,7 +2159,7 @@ static int __pyx_Generator_init(PyObject *module) { #if CYTHON_USE_TYPE_SPECS __pyx_GeneratorType = __Pyx_FetchCommonTypeFromSpec(module, &__pyx_GeneratorType_spec, NULL); #else - (void) module; + CYTHON_UNUSED_VAR(module); // on Windows, C-API functions can't be used in slots statically __pyx_GeneratorType_type.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict; __pyx_GeneratorType_type.tp_iter = PyObject_SelfIter; @@ -2565,8 +2565,8 @@ static PyTypeObject __Pyx__PyExc_StopAsyncIteration_type = { #endif static int __pyx_StopAsyncIteration_init(PyObject *module) { + CYTHON_UNUSED_VAR(module); #if PY_VERSION_HEX >= 0x030500B1 - (void) module; __Pyx_PyExc_StopAsyncIteration = PyExc_StopAsyncIteration; #else PyObject *builtins = PyEval_GetBuiltins(); @@ -2584,7 +2584,6 @@ static int __pyx_StopAsyncIteration_init(PyObject *module) { __Pyx__PyExc_StopAsyncIteration_type.tp_dictoffset = ((PyTypeObject*)PyExc_BaseException)->tp_dictoffset; __Pyx__PyExc_StopAsyncIteration_type.tp_base = (PyTypeObject*)PyExc_Exception; - (void) module; __Pyx_PyExc_StopAsyncIteration = (PyObject*) __Pyx_FetchCommonType(&__Pyx__PyExc_StopAsyncIteration_type); if (unlikely(!__Pyx_PyExc_StopAsyncIteration)) return -1; diff --git a/Cython/Utility/CythonFunction.c b/Cython/Utility/CythonFunction.c index 870dcf620..435b4ee18 100644 --- a/Cython/Utility/CythonFunction.c +++ b/Cython/Utility/CythonFunction.c @@ -1055,7 +1055,7 @@ static int __pyx_CyFunction_init(PyObject *module) { #if CYTHON_USE_TYPE_SPECS __pyx_CyFunctionType = __Pyx_FetchCommonTypeFromSpec(module, &__pyx_CyFunctionType_spec, NULL); #else - (void) module; + CYTHON_UNUSED_VAR(module); __pyx_CyFunctionType = __Pyx_FetchCommonType(&__pyx_CyFunctionType_type); #endif if (unlikely(__pyx_CyFunctionType == NULL)) { @@ -1587,7 +1587,7 @@ static int __pyx_FusedFunction_init(PyObject *module) { __pyx_FusedFunctionType = __Pyx_FetchCommonTypeFromSpec(module, &__pyx_FusedFunctionType_spec, bases); Py_DECREF(bases); #else - (void) module; + CYTHON_UNUSED_VAR(module); // Set base from __Pyx_FetchCommonTypeFromSpec, in case it's different from the local static value. __pyx_FusedFunctionType_type.tp_base = __pyx_CyFunctionType; __pyx_FusedFunctionType = __Pyx_FetchCommonType(&__pyx_FusedFunctionType_type); @@ -1601,7 +1601,7 @@ static int __pyx_FusedFunction_init(PyObject *module) { //////////////////// ClassMethod.proto //////////////////// #include "descrobject.h" -static CYTHON_UNUSED PyObject* __Pyx_Method_ClassMethod(PyObject *method); /*proto*/ +CYTHON_UNUSED static PyObject* __Pyx_Method_ClassMethod(PyObject *method); /*proto*/ //////////////////// ClassMethod //////////////////// diff --git a/Cython/Utility/Exceptions.c b/Cython/Utility/Exceptions.c index dce82348b..f39ca988d 100644 --- a/Cython/Utility/Exceptions.c +++ b/Cython/Utility/Exceptions.c @@ -725,13 +725,15 @@ static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line);/*proto*/ //@substitute: naming #ifndef CYTHON_CLINE_IN_TRACEBACK -static int __Pyx_CLineForTraceback(CYTHON_UNUSED PyThreadState *tstate, int c_line) { +static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line) { PyObject *use_cline; PyObject *ptype, *pvalue, *ptraceback; #if CYTHON_COMPILING_IN_CPYTHON PyObject **cython_runtime_dict; #endif + CYTHON_MAYBE_UNUSED_VAR(tstate); + if (unlikely(!${cython_runtime_cname})) { // Very early error where the runtime module is not set up yet. return c_line; diff --git a/Cython/Utility/ExtensionTypes.c b/Cython/Utility/ExtensionTypes.c index aa39a860a..700bf1468 100644 --- a/Cython/Utility/ExtensionTypes.c +++ b/Cython/Utility/ExtensionTypes.c @@ -11,8 +11,8 @@ static int __Pyx_fix_up_extension_type_from_spec(PyType_Spec *spec, PyTypeObject #if CYTHON_USE_TYPE_SPECS static int __Pyx_fix_up_extension_type_from_spec(PyType_Spec *spec, PyTypeObject *type) { #if PY_VERSION_HEX > 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API - (void) spec; - (void) type; + CYTHON_UNUSED_VAR(spec); + CYTHON_UNUSED_VAR(type); #else // Set tp_weakreflist, tp_dictoffset, tp_vectorcalloffset // Copied and adapted from https://bugs.python.org/issue38140 @@ -156,7 +156,7 @@ static int __Pyx_validate_bases_tuple(const char *type_name, Py_ssize_t dictoffs /////////////// PyType_Ready.proto /////////////// // unused when using type specs -static CYTHON_UNUSED int __Pyx_PyType_Ready(PyTypeObject *t);/*proto*/ +CYTHON_UNUSED static int __Pyx_PyType_Ready(PyTypeObject *t);/*proto*/ /////////////// PyType_Ready /////////////// //@requires: ObjectHandling.c::PyObjectCallMethod0 diff --git a/Cython/Utility/ModuleSetupCode.c b/Cython/Utility/ModuleSetupCode.c index a03654734..3e209dd87 100644 --- a/Cython/Utility/ModuleSetupCode.c +++ b/Cython/Utility/ModuleSetupCode.c @@ -428,6 +428,17 @@ #endif // unused attribute +#ifndef CYTHON_UNUSED + #if defined(__cplusplus) + /* for clang __has_cpp_attribute(maybe_unused) is true even before C++17 + * but leads to warnings with -pedantic, since it is a C++17 feature */ + #if ((defined(_MSVC_LANG) && _MSVC_LANG >= 201703L) || __cplusplus >= 201703L) + #if __has_cpp_attribute(maybe_unused) + #define CYTHON_UNUSED [[maybe_unused]] + #endif + #endif + #endif +#endif #ifndef CYTHON_UNUSED # if defined(__GNUC__) # if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) diff --git a/Cython/Utility/Optimize.c b/Cython/Utility/Optimize.c index 81aeb316e..7a3e3cd3d 100644 --- a/Cython/Utility/Optimize.c +++ b/Cython/Utility/Optimize.c @@ -445,7 +445,7 @@ static CYTHON_INLINE PyObject* __Pyx_set_iterator(PyObject* iterable, int is_set return iterable; } #else - (void)is_set; + CYTHON_UNUSED_VAR(is_set); *p_source_is_set = 0; #endif *p_orig_length = 0; @@ -461,8 +461,8 @@ static CYTHON_INLINE int __Pyx_set_iter_next( if (unlikely(!*value)) { return __Pyx_IterFinish(); } - (void)orig_length; - (void)ppos; + CYTHON_UNUSED_VAR(orig_length); + CYTHON_UNUSED_VAR(ppos); return 1; } #if CYTHON_COMPILING_IN_CPYTHON @@ -904,7 +904,7 @@ static CYTHON_INLINE int __Pyx__PyBytes_AsDouble_IsSpace(char ch) { return (ch == 0x20) | !((ch < 0x9) | (ch > 0xd)); } -static CYTHON_UNUSED double __Pyx__PyBytes_AsDouble(PyObject *obj, const char* start, Py_ssize_t length) { +CYTHON_UNUSED static double __Pyx__PyBytes_AsDouble(PyObject *obj, const char* start, Py_ssize_t length) { double value; Py_ssize_t i, digits; const char *last = start + length; @@ -1318,7 +1318,8 @@ static {{c_ret_type}} {{cfunc_name}}(PyObject *op1, PyObject *op2, long intval, } {{else}} {{if c_op == '*'}} - (void)a; (void)b; + CYTHON_UNUSED_VAR(a); + CYTHON_UNUSED_VAR(b); #ifdef HAVE_LONG_LONG ll{{ival}} = {{ival}}; goto long_long; @@ -1464,8 +1465,8 @@ def zerodiv_check(operand, _is_mod=op == 'Remainder', _needs_check=(order == 'CO static {{c_ret_type}} {{cfunc_name}}(PyObject *op1, PyObject *op2, double floatval, int inplace, int zerodivision_check) { const double {{'a' if order == 'CObj' else 'b'}} = floatval; double {{fval}}{{if op not in ('Eq', 'Ne')}}, result{{endif}}; - // Prevent "unused" warnings. - (void)inplace; (void)zerodivision_check; + CYTHON_UNUSED_VAR(inplace); + CYTHON_UNUSED_VAR(zerodivision_check); {{if op in ('Eq', 'Ne')}} if (op1 == op2) { -- cgit v1.2.1 From 85a29fbd4d74be8560d379a0e443ed819d2c0534 Mon Sep 17 00:00:00 2001 From: da-woods Date: Mon, 5 Sep 2022 11:59:18 +0100 Subject: Remove `p_lambdef_nocond` from the parser, following Python 3.9+. (GH-4992) Note that it wasn't correct before since it didn't pass the correct flag to `p_lambdef` and thus was equivalent to just using `p_lambdef`. Note also that there's a difference in behaviour between Python3.9+ and before. Python <3.9 allowed `[i for i in range(10) if lambda: i]` while Python >=3.9 disallows this. Arguably it's pointless because the lambda always evaluates to True. See https://github.com/python/cpython/issues/86014 for the Python issue. With this change Cython will follow the Python 3.9 behaviour at the cost of potentially breaking some code that does use the pattern above. Part of the cleanup in https://github.com/cython/cython/issues/4595 --- Cython/Compiler/Parsing.pxd | 4 +--- Cython/Compiler/Parsing.py | 28 ++++++++-------------------- 2 files changed, 9 insertions(+), 23 deletions(-) diff --git a/Cython/Compiler/Parsing.pxd b/Cython/Compiler/Parsing.pxd index 233ef214e..038dc9c85 100644 --- a/Cython/Compiler/Parsing.pxd +++ b/Cython/Compiler/Parsing.pxd @@ -21,11 +21,9 @@ cdef p_ident_list(PyrexScanner s) cdef tuple p_binop_operator(PyrexScanner s) cdef p_binop_expr(PyrexScanner s, ops, p_sub_expr_func p_sub_expr) -cdef p_lambdef(PyrexScanner s, bint allow_conditional=*) -cdef p_lambdef_nocond(PyrexScanner s) +cdef p_lambdef(PyrexScanner s) cdef p_test(PyrexScanner s) cdef p_test_allow_walrus_after(PyrexScanner s) -cdef p_test_nocond(PyrexScanner s) cdef p_namedexpr_test(PyrexScanner s) cdef p_or_test(PyrexScanner s) cdef p_rassoc_binop_expr(PyrexScanner s, unicode op, p_sub_expr_func p_subexpr) diff --git a/Cython/Compiler/Parsing.py b/Cython/Compiler/Parsing.py index 8160149af..0d94ae1a9 100644 --- a/Cython/Compiler/Parsing.py +++ b/Cython/Compiler/Parsing.py @@ -108,7 +108,7 @@ def p_binop_expr(s, ops, p_sub_expr): #lambdef: 'lambda' [varargslist] ':' test -def p_lambdef(s, allow_conditional=True): +def p_lambdef(s): # s.sy == 'lambda' pos = s.position() s.next() @@ -119,20 +119,12 @@ def p_lambdef(s, allow_conditional=True): args, star_arg, starstar_arg = p_varargslist( s, terminator=':', annotated=False) s.expect(':') - if allow_conditional: - expr = p_test(s) - else: - expr = p_test_nocond(s) + expr = p_test(s) return ExprNodes.LambdaNode( pos, args = args, star_arg = star_arg, starstar_arg = starstar_arg, result_expr = expr) -#lambdef_nocond: 'lambda' [varargslist] ':' test_nocond - -def p_lambdef_nocond(s): - return p_lambdef(s) - #test: or_test ['if' or_test 'else' test] | lambdef def p_test(s): @@ -161,15 +153,6 @@ def p_test_allow_walrus_after(s): else: return expr - -#test_nocond: or_test | lambdef_nocond - -def p_test_nocond(s): - if s.sy == 'lambda': - return p_lambdef_nocond(s) - else: - return p_or_test(s) - def p_namedexpr_test(s): # defined in the LL parser as # namedexpr_test: test [':=' test] @@ -1344,7 +1327,12 @@ def p_comp_if(s, body): # s.sy == 'if' pos = s.position() s.next() - test = p_test_nocond(s) + # Note that Python 3.9+ is actually more restrictive here and Cython now follows + # the Python 3.9+ behaviour: https://github.com/python/cpython/issues/86014 + # On Python <3.9 `[i for i in range(10) if lambda: i if True else 1]` was disallowed + # but `[i for i in range(10) if lambda: i]` was allowed. + # On Python >=3.9 they're both disallowed. + test = p_or_test(s) return Nodes.IfStatNode(pos, if_clauses = [Nodes.IfClauseNode(pos, condition = test, body = p_comp_iter(s, body))], -- cgit v1.2.1 From de2ca1746ba7c8a4c373005d4c6ed02aba3c1978 Mon Sep 17 00:00:00 2001 From: 0dminnimda <0dminnimda@gmail.com> Date: Mon, 5 Sep 2022 14:05:00 +0300 Subject: ci-run.sh: run msvc build with multiple processes (GH-4977) --- Tools/ci-run.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Tools/ci-run.sh b/Tools/ci-run.sh index 0a5921d64..bd8ef0c8b 100644 --- a/Tools/ci-run.sh +++ b/Tools/ci-run.sh @@ -114,7 +114,7 @@ if [[ $OSTYPE == "msys" ]]; then # for MSVC cl # 4127 warns that a conditional expression is constant, should be fixed here https://github.com/cython/cython/pull/4317 # (off by default) 5045 warns that the compiler will insert Spectre mitigations for memory load if the /Qspectre switch is specified # (off by default) 4820 warns about the code in Python\3.9.6\x64\include ... - CFLAGS="-Od /Z7 /W4 /wd4711 /wd4127 /wd5045 /wd4820" + CFLAGS="-Od /Z7 /MP /W4 /wd4711 /wd4127 /wd5045 /wd4820" else CFLAGS="-O0 -ggdb -Wall -Wextra" fi -- cgit v1.2.1 From 0c7f5534923284ba42f6d5140f17f303a4f4cd8f Mon Sep 17 00:00:00 2001 From: da-woods Date: Mon, 5 Sep 2022 18:02:17 +0100 Subject: Allow bound cfuncs to be coerced to object (#4988) Fixes https://github.com/cython/cython/issues/4890 (although likely doesn't generate the optimized code they were really after). This is an initial pass at the problem that just uses functools. It's likely that Cython could generate more efficient code in future. --- Cython/Compiler/ExprNodes.py | 28 ++++++++++++++++++++++++++++ tests/run/cfunc_convert.pyx | 25 +++++++++++++++++++++++++ 2 files changed, 53 insertions(+) diff --git a/Cython/Compiler/ExprNodes.py b/Cython/Compiler/ExprNodes.py index f9e854183..c6f5feab1 100644 --- a/Cython/Compiler/ExprNodes.py +++ b/Cython/Compiler/ExprNodes.py @@ -7157,6 +7157,34 @@ class AttributeNode(ExprNode): self.entry = entry.as_variable self.analyse_as_python_attribute(env) return self + elif entry and entry.is_cfunction and self.obj.type is not Builtin.type_type: + # "bound" cdef function. + # This implementation is likely a little inefficient and could be improved. + # Essentially it does: + # __import__("functools").partial(coerce_to_object(self), self.obj) + from .UtilNodes import EvalWithTempExprNode, ResultRefNode + # take self.obj out to a temp because it's used twice + obj_node = ResultRefNode(self.obj, type=self.obj.type) + obj_node.result_ctype = self.obj.result_ctype + self.obj = obj_node + unbound_node = ExprNode.coerce_to(self, dst_type, env) + functools = SimpleCallNode( + self.pos, + function=NameNode(self.pos, name=StringEncoding.EncodedString("__import__")), + args=[StringNode(self.pos, value=StringEncoding.EncodedString("functools"))], + ) + partial = AttributeNode( + self.pos, + obj=functools, + attribute=StringEncoding.EncodedString("partial"), + ) + partial_call = SimpleCallNode( + self.pos, + function=partial, + args=[unbound_node, obj_node], + ) + complete_call = EvalWithTempExprNode(obj_node, partial_call) + return complete_call.analyse_types(env) return ExprNode.coerce_to(self, dst_type, env) def calculate_constant_result(self): diff --git a/tests/run/cfunc_convert.pyx b/tests/run/cfunc_convert.pyx index 6db0765d4..7e41a5371 100644 --- a/tests/run/cfunc_convert.pyx +++ b/tests/run/cfunc_convert.pyx @@ -266,3 +266,28 @@ def make_map(): "f2": cfunc_dup_f2, } return map + + +cdef class HasCdefFunc: + cdef int x + def __init__(self, x): + self.x = x + + cdef int func(self, int y): + return self.x + y + +def test_unbound_methods(): + """ + >>> f = test_unbound_methods() + >>> f(HasCdefFunc(1), 2) + 3 + """ + return HasCdefFunc.func + +def test_bound_methods(): + """ + >>> f = test_bound_methods() + >>> f(2) + 3 + """ + return HasCdefFunc(1).func -- cgit v1.2.1 From 866e1a01fab85ef9ae5cf0be9a75ffaac9b9e104 Mon Sep 17 00:00:00 2001 From: da-woods Date: Mon, 5 Sep 2022 18:23:37 +0100 Subject: Make sure we call __del__ for final types (#4996) including final types that inherit __del__ from elsewhere (either known or unknown). Fixes #4995 --- Cython/Compiler/ModuleNode.py | 3 +- Cython/Compiler/Nodes.py | 4 ++ Cython/Compiler/PyrexTypes.py | 2 + Cython/Compiler/Symtab.py | 19 +++++++ Cython/Compiler/TypeSlots.py | 3 +- runtests.py | 1 + tests/run/pep442_tp_finalize.pyx | 79 ++++++++++++++++++++++++++++ tests/run/pep442_tp_finalize_cimport.srctree | 67 +++++++++++++++++++++++ 8 files changed, 174 insertions(+), 4 deletions(-) create mode 100644 tests/run/pep442_tp_finalize_cimport.srctree diff --git a/Cython/Compiler/ModuleNode.py b/Cython/Compiler/ModuleNode.py index d140caff1..8b5fdd16e 100644 --- a/Cython/Compiler/ModuleNode.py +++ b/Cython/Compiler/ModuleNode.py @@ -1632,7 +1632,6 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): entry = scope.lookup_here("__del__") if entry is None or not entry.is_special: return # nothing to wrap - slot_func_cname = scope.mangle_internal("tp_finalize") code.putln("") if tp_slot.used_ifdef: @@ -1677,7 +1676,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): if py_attrs or cpp_destructable_attrs or memoryview_slices or weakref_slot or dict_slot: self.generate_self_cast(scope, code) - if not is_final_type: + if not is_final_type or scope.may_have_finalize(): # in Py3.4+, call tp_finalize() as early as possible code.putln("#if CYTHON_USE_TP_FINALIZE") if needs_gc: diff --git a/Cython/Compiler/Nodes.py b/Cython/Compiler/Nodes.py index 2127d9c54..21fb6334e 100644 --- a/Cython/Compiler/Nodes.py +++ b/Cython/Compiler/Nodes.py @@ -5218,6 +5218,8 @@ class CClassDefNode(ClassDefNode): api=self.api, buffer_defaults=self.buffer_defaults(env), shadow=self.shadow) + if self.bases and len(self.bases.args) > 1: + self.entry.type.multiple_bases = True def analyse_declarations(self, env): #print "CClassDefNode.analyse_declarations:", self.class_name @@ -5307,6 +5309,8 @@ class CClassDefNode(ClassDefNode): api=self.api, buffer_defaults=self.buffer_defaults(env), shadow=self.shadow) + if self.bases and len(self.bases.args) > 1: + self.entry.type.multiple_bases = True if self.shadow: home_scope.lookup(self.class_name).as_variable = self.entry diff --git a/Cython/Compiler/PyrexTypes.py b/Cython/Compiler/PyrexTypes.py index e8a87b42d..89d8bfac9 100644 --- a/Cython/Compiler/PyrexTypes.py +++ b/Cython/Compiler/PyrexTypes.py @@ -1523,6 +1523,7 @@ class PyExtensionType(PyObjectType): # is_external boolean Defined in a extern block # check_size 'warn', 'error', 'ignore' What to do if tp_basicsize does not match # dataclass_fields OrderedDict nor None Used for inheriting from dataclasses + # multiple_bases boolean Does this class have multiple bases is_extension_type = 1 has_attributes = 1 @@ -1530,6 +1531,7 @@ class PyExtensionType(PyObjectType): objtypedef_cname = None dataclass_fields = None + multiple_bases = False def __init__(self, name, typedef_flag, base_type, is_external=0, check_size=None): self.name = name diff --git a/Cython/Compiler/Symtab.py b/Cython/Compiler/Symtab.py index 7a76ecde9..92afd8779 100644 --- a/Cython/Compiler/Symtab.py +++ b/Cython/Compiler/Symtab.py @@ -2314,6 +2314,25 @@ class CClassScope(ClassScope): """ return self.needs_gc() and not self.directives.get('no_gc_clear', False) + def may_have_finalize(self): + """ + This covers cases where we definitely have a __del__ function + and also cases where one of the base classes could have a __del__ + function but we don't know. + """ + current_type_scope = self + while current_type_scope: + del_entry = current_type_scope.lookup_here("__del__") + if del_entry and del_entry.is_special: + return True + if (current_type_scope.parent_type.is_extern or not current_type_scope.implemented or + current_type_scope.parent_type.multiple_bases): + # we don't know if we have __del__, so assume we do and call it + return True + current_base_type = current_type_scope.parent_type.base_type + current_type_scope = current_base_type.scope if current_base_type else None + return False + def get_refcounted_entries(self, include_weakref=False, include_gc_simple=True): py_attrs = [] diff --git a/Cython/Compiler/TypeSlots.py b/Cython/Compiler/TypeSlots.py index ea310a6d3..fe3867f9d 100644 --- a/Cython/Compiler/TypeSlots.py +++ b/Cython/Compiler/TypeSlots.py @@ -556,8 +556,7 @@ class TypeFlagsSlot(SlotDescriptor): value += "|Py_TPFLAGS_BASETYPE" if scope.needs_gc(): value += "|Py_TPFLAGS_HAVE_GC" - entry = scope.lookup("__del__") - if entry and entry.is_special: + if scope.may_have_finalize(): value += "|Py_TPFLAGS_HAVE_FINALIZE" return value diff --git a/runtests.py b/runtests.py index be32838c2..8a938094e 100755 --- a/runtests.py +++ b/runtests.py @@ -482,6 +482,7 @@ VER_DEP_MODULES = { (3,4): (operator.lt, lambda x: x in ['run.py34_signature', 'run.test_unicode', # taken from Py3.7, difficult to backport 'run.pep442_tp_finalize', + 'run.pep442_tp_finalize_cimport', ]), (3,4,999): (operator.gt, lambda x: x in ['run.initial_file_path', ]), diff --git a/tests/run/pep442_tp_finalize.pyx b/tests/run/pep442_tp_finalize.pyx index 49bed3268..6532757f9 100644 --- a/tests/run/pep442_tp_finalize.pyx +++ b/tests/run/pep442_tp_finalize.pyx @@ -1,5 +1,9 @@ # mode: run +from __future__ import print_function + +cimport cython + import gc cdef class nontrivial_del: @@ -49,6 +53,80 @@ def test_del_and_dealloc(): gc.collect() print("finish") +@cython.final +cdef class FinalClass: + def __init__(self): + print("init") + def __del__(self): + print("del") + +def test_final_class(): + """ + >>> test_final_class() + start + init + del + finish + """ + print("start") + d = FinalClass() + d = None + gc.collect() + print("finish") + +@cython.final +cdef class FinalInherits(nontrivial_del): + def __init__(self): + super().__init__() + print("FinalInherits init") + # no __del__ but nontrivial_del should still be called + def __dealloc__(self): + pass # define __dealloc__ so as not to fall back on base __dealloc__ + +def test_final_inherited(): + """ + >>> test_final_inherited() + start + init + FinalInherits init + del + finish + """ + print("start") + d = FinalInherits() + d = None + gc.collect() + print("finish") + +cdef class DummyBase: + pass + +class RegularClass: + __slots__ = () + def __del__(self): + print("del") + +@cython.final +cdef class FinalMultipleInheritance(DummyBase, RegularClass): + def __init__(self): + super().__init__() + print("init") + def __dealloc__(self): + pass + +def test_final_multiple_inheritance(): + """ + >>> test_final_multiple_inheritance() + start + init + del + finish + """ + print("start") + d = FinalMultipleInheritance() + d = None + gc.collect() + print("finish") cdef class del_with_exception: def __init__(self): @@ -301,3 +379,4 @@ class derived_python_child(cdef_nontrivial_parent): raise RuntimeError("End function") func(derived_python_child) + diff --git a/tests/run/pep442_tp_finalize_cimport.srctree b/tests/run/pep442_tp_finalize_cimport.srctree new file mode 100644 index 000000000..8a257177f --- /dev/null +++ b/tests/run/pep442_tp_finalize_cimport.srctree @@ -0,0 +1,67 @@ +""" +PYTHON setup.py build_ext -i +PYTHON runtests.py +""" + +####### runtests.py ####### + +import gc +from testclasses import * +import baseclasses + +def test_has_del(): + inst = HasIndirectDel() + inst = None + gc.collect() + assert baseclasses.HasDel_del_called_count + +def test_no_del(): + inst = NoIndirectDel() + inst = None + gc.collect() + # The test here is that it doesn't crash + +test_has_del() +test_no_del() + +######## setup.py ######## + +from setuptools import setup +from Cython.Build import cythonize + +setup(ext_modules = cythonize('*.pyx')) + +####### baseclasses.pxd ###### + +cdef class HasDel: + pass + +cdef class DoesntHaveDel: + pass + +####### baseclasses.pyx ###### + +HasDel_del_called_count = 0 + +cdef class HasDel: + def __del__(self): + global HasDel_del_called_count + HasDel_del_called_count += 1 + +cdef class DoesntHaveDel: + pass + +######## testclasses.pyx ###### + +cimport cython +from baseclasses cimport HasDel, DoesntHaveDel + +@cython.final +cdef class HasIndirectDel(HasDel): + pass + +@cython.final +cdef class NoIndirectDel(DoesntHaveDel): + # But Cython can't tell that we don't have __del__ until runtime, + # so has to generate code to call it (and not crash!) + pass -- cgit v1.2.1 From 606bd8cf235149c3be6876d0f5ae60032c8aab6c Mon Sep 17 00:00:00 2001 From: da-woods Date: Sat, 10 Sep 2022 07:57:07 +0100 Subject: Try to use test_dataclass from CPython (#4955) It needs a bit of translation to get it Cythonable (since Cython dataclasses are extension types and so must be at the global scope). At the moment bugs are identified but not fixed --- Tools/dataclass_test_data/test_dataclasses.py | 4266 +++++++++++++++++++++++++ Tools/make_dataclass_tests.py | 451 +++ runtests.py | 1 + tests/run/test_dataclasses.pxi | 19 + tests/run/test_dataclasses.pyx | 966 ++++++ 5 files changed, 5703 insertions(+) create mode 100644 Tools/dataclass_test_data/test_dataclasses.py create mode 100644 Tools/make_dataclass_tests.py create mode 100644 tests/run/test_dataclasses.pxi create mode 100644 tests/run/test_dataclasses.pyx diff --git a/Tools/dataclass_test_data/test_dataclasses.py b/Tools/dataclass_test_data/test_dataclasses.py new file mode 100644 index 000000000..e2eab6957 --- /dev/null +++ b/Tools/dataclass_test_data/test_dataclasses.py @@ -0,0 +1,4266 @@ +# Deliberately use "from dataclasses import *". Every name in __all__ +# is tested, so they all must be present. This is a way to catch +# missing ones. + +from dataclasses import * + +import abc +import pickle +import inspect +import builtins +import types +import weakref +import unittest +from unittest.mock import Mock +from typing import ClassVar, Any, List, Union, Tuple, Dict, Generic, TypeVar, Optional, Protocol +from typing import get_type_hints +from collections import deque, OrderedDict, namedtuple +from functools import total_ordering + +import typing # Needed for the string "typing.ClassVar[int]" to work as an annotation. +import dataclasses # Needed for the string "dataclasses.InitVar[int]" to work as an annotation. + +# Just any custom exception we can catch. +class CustomError(Exception): pass + +class TestCase(unittest.TestCase): + def test_no_fields(self): + @dataclass + class C: + pass + + o = C() + self.assertEqual(len(fields(C)), 0) + + def test_no_fields_but_member_variable(self): + @dataclass + class C: + i = 0 + + o = C() + self.assertEqual(len(fields(C)), 0) + + def test_one_field_no_default(self): + @dataclass + class C: + x: int + + o = C(42) + self.assertEqual(o.x, 42) + + def test_field_default_default_factory_error(self): + msg = "cannot specify both default and default_factory" + with self.assertRaisesRegex(ValueError, msg): + @dataclass + class C: + x: int = field(default=1, default_factory=int) + + def test_field_repr(self): + int_field = field(default=1, init=True, repr=False) + int_field.name = "id" + repr_output = repr(int_field) + expected_output = "Field(name='id',type=None," \ + f"default=1,default_factory={MISSING!r}," \ + "init=True,repr=False,hash=None," \ + "compare=True,metadata=mappingproxy({})," \ + f"kw_only={MISSING!r}," \ + "_field_type=None)" + + self.assertEqual(repr_output, expected_output) + + def test_named_init_params(self): + @dataclass + class C: + x: int + + o = C(x=32) + self.assertEqual(o.x, 32) + + def test_two_fields_one_default(self): + @dataclass + class C: + x: int + y: int = 0 + + o = C(3) + self.assertEqual((o.x, o.y), (3, 0)) + + # Non-defaults following defaults. + with self.assertRaisesRegex(TypeError, + "non-default argument 'y' follows " + "default argument"): + @dataclass + class C: + x: int = 0 + y: int + + # A derived class adds a non-default field after a default one. + with self.assertRaisesRegex(TypeError, + "non-default argument 'y' follows " + "default argument"): + @dataclass + class B: + x: int = 0 + + @dataclass + class C(B): + y: int + + # Override a base class field and add a default to + # a field which didn't use to have a default. + with self.assertRaisesRegex(TypeError, + "non-default argument 'y' follows " + "default argument"): + @dataclass + class B: + x: int + y: int + + @dataclass + class C(B): + x: int = 0 + + def test_overwrite_hash(self): + # Test that declaring this class isn't an error. It should + # use the user-provided __hash__. + @dataclass(frozen=True) + class C: + x: int + def __hash__(self): + return 301 + self.assertEqual(hash(C(100)), 301) + + # Test that declaring this class isn't an error. It should + # use the generated __hash__. + @dataclass(frozen=True) + class C: + x: int + def __eq__(self, other): + return False + self.assertEqual(hash(C(100)), hash((100,))) + + # But this one should generate an exception, because with + # unsafe_hash=True, it's an error to have a __hash__ defined. + with self.assertRaisesRegex(TypeError, + 'Cannot overwrite attribute __hash__'): + @dataclass(unsafe_hash=True) + class C: + def __hash__(self): + pass + + # Creating this class should not generate an exception, + # because even though __hash__ exists before @dataclass is + # called, (due to __eq__ being defined), since it's None + # that's okay. + @dataclass(unsafe_hash=True) + class C: + x: int + def __eq__(self): + pass + # The generated hash function works as we'd expect. + self.assertEqual(hash(C(10)), hash((10,))) + + # Creating this class should generate an exception, because + # __hash__ exists and is not None, which it would be if it + # had been auto-generated due to __eq__ being defined. + with self.assertRaisesRegex(TypeError, + 'Cannot overwrite attribute __hash__'): + @dataclass(unsafe_hash=True) + class C: + x: int + def __eq__(self): + pass + def __hash__(self): + pass + + def test_overwrite_fields_in_derived_class(self): + # Note that x from C1 replaces x in Base, but the order remains + # the same as defined in Base. + @dataclass + class Base: + x: Any = 15.0 + y: int = 0 + + @dataclass + class C1(Base): + z: int = 10 + x: int = 15 + + o = Base() + self.assertEqual(repr(o), 'TestCase.test_overwrite_fields_in_derived_class..Base(x=15.0, y=0)') + + o = C1() + self.assertEqual(repr(o), 'TestCase.test_overwrite_fields_in_derived_class..C1(x=15, y=0, z=10)') + + o = C1(x=5) + self.assertEqual(repr(o), 'TestCase.test_overwrite_fields_in_derived_class..C1(x=5, y=0, z=10)') + + def test_field_named_self(self): + @dataclass + class C: + self: str + c=C('foo') + self.assertEqual(c.self, 'foo') + + # Make sure the first parameter is not named 'self'. + sig = inspect.signature(C.__init__) + first = next(iter(sig.parameters)) + self.assertNotEqual('self', first) + + # But we do use 'self' if no field named self. + @dataclass + class C: + selfx: str + + # Make sure the first parameter is named 'self'. + sig = inspect.signature(C.__init__) + first = next(iter(sig.parameters)) + self.assertEqual('self', first) + + def test_field_named_object(self): + @dataclass + class C: + object: str + c = C('foo') + self.assertEqual(c.object, 'foo') + + def test_field_named_object_frozen(self): + @dataclass(frozen=True) + class C: + object: str + c = C('foo') + self.assertEqual(c.object, 'foo') + + def test_field_named_like_builtin(self): + # Attribute names can shadow built-in names + # since code generation is used. + # Ensure that this is not happening. + exclusions = {'None', 'True', 'False'} + builtins_names = sorted( + b for b in builtins.__dict__.keys() + if not b.startswith('__') and b not in exclusions + ) + attributes = [(name, str) for name in builtins_names] + C = make_dataclass('C', attributes) + + c = C(*[name for name in builtins_names]) + + for name in builtins_names: + self.assertEqual(getattr(c, name), name) + + def test_field_named_like_builtin_frozen(self): + # Attribute names can shadow built-in names + # since code generation is used. + # Ensure that this is not happening + # for frozen data classes. + exclusions = {'None', 'True', 'False'} + builtins_names = sorted( + b for b in builtins.__dict__.keys() + if not b.startswith('__') and b not in exclusions + ) + attributes = [(name, str) for name in builtins_names] + C = make_dataclass('C', attributes, frozen=True) + + c = C(*[name for name in builtins_names]) + + for name in builtins_names: + self.assertEqual(getattr(c, name), name) + + def test_0_field_compare(self): + # Ensure that order=False is the default. + @dataclass + class C0: + pass + + @dataclass(order=False) + class C1: + pass + + for cls in [C0, C1]: + with self.subTest(cls=cls): + self.assertEqual(cls(), cls()) + for idx, fn in enumerate([lambda a, b: a < b, + lambda a, b: a <= b, + lambda a, b: a > b, + lambda a, b: a >= b]): + with self.subTest(idx=idx): + with self.assertRaisesRegex(TypeError, + f"not supported between instances of '{cls.__name__}' and '{cls.__name__}'"): + fn(cls(), cls()) + + @dataclass(order=True) + class C: + pass + self.assertLessEqual(C(), C()) + self.assertGreaterEqual(C(), C()) + + def test_1_field_compare(self): + # Ensure that order=False is the default. + @dataclass + class C0: + x: int + + @dataclass(order=False) + class C1: + x: int + + for cls in [C0, C1]: + with self.subTest(cls=cls): + self.assertEqual(cls(1), cls(1)) + self.assertNotEqual(cls(0), cls(1)) + for idx, fn in enumerate([lambda a, b: a < b, + lambda a, b: a <= b, + lambda a, b: a > b, + lambda a, b: a >= b]): + with self.subTest(idx=idx): + with self.assertRaisesRegex(TypeError, + f"not supported between instances of '{cls.__name__}' and '{cls.__name__}'"): + fn(cls(0), cls(0)) + + @dataclass(order=True) + class C: + x: int + self.assertLess(C(0), C(1)) + self.assertLessEqual(C(0), C(1)) + self.assertLessEqual(C(1), C(1)) + self.assertGreater(C(1), C(0)) + self.assertGreaterEqual(C(1), C(0)) + self.assertGreaterEqual(C(1), C(1)) + + def test_simple_compare(self): + # Ensure that order=False is the default. + @dataclass + class C0: + x: int + y: int + + @dataclass(order=False) + class C1: + x: int + y: int + + for cls in [C0, C1]: + with self.subTest(cls=cls): + self.assertEqual(cls(0, 0), cls(0, 0)) + self.assertEqual(cls(1, 2), cls(1, 2)) + self.assertNotEqual(cls(1, 0), cls(0, 0)) + self.assertNotEqual(cls(1, 0), cls(1, 1)) + for idx, fn in enumerate([lambda a, b: a < b, + lambda a, b: a <= b, + lambda a, b: a > b, + lambda a, b: a >= b]): + with self.subTest(idx=idx): + with self.assertRaisesRegex(TypeError, + f"not supported between instances of '{cls.__name__}' and '{cls.__name__}'"): + fn(cls(0, 0), cls(0, 0)) + + @dataclass(order=True) + class C: + x: int + y: int + + for idx, fn in enumerate([lambda a, b: a == b, + lambda a, b: a <= b, + lambda a, b: a >= b]): + with self.subTest(idx=idx): + self.assertTrue(fn(C(0, 0), C(0, 0))) + + for idx, fn in enumerate([lambda a, b: a < b, + lambda a, b: a <= b, + lambda a, b: a != b]): + with self.subTest(idx=idx): + self.assertTrue(fn(C(0, 0), C(0, 1))) + self.assertTrue(fn(C(0, 1), C(1, 0))) + self.assertTrue(fn(C(1, 0), C(1, 1))) + + for idx, fn in enumerate([lambda a, b: a > b, + lambda a, b: a >= b, + lambda a, b: a != b]): + with self.subTest(idx=idx): + self.assertTrue(fn(C(0, 1), C(0, 0))) + self.assertTrue(fn(C(1, 0), C(0, 1))) + self.assertTrue(fn(C(1, 1), C(1, 0))) + + def test_compare_subclasses(self): + # Comparisons fail for subclasses, even if no fields + # are added. + @dataclass + class B: + i: int + + @dataclass + class C(B): + pass + + for idx, (fn, expected) in enumerate([(lambda a, b: a == b, False), + (lambda a, b: a != b, True)]): + with self.subTest(idx=idx): + self.assertEqual(fn(B(0), C(0)), expected) + + for idx, fn in enumerate([lambda a, b: a < b, + lambda a, b: a <= b, + lambda a, b: a > b, + lambda a, b: a >= b]): + with self.subTest(idx=idx): + with self.assertRaisesRegex(TypeError, + "not supported between instances of 'B' and 'C'"): + fn(B(0), C(0)) + + def test_eq_order(self): + # Test combining eq and order. + for (eq, order, result ) in [ + (False, False, 'neither'), + (False, True, 'exception'), + (True, False, 'eq_only'), + (True, True, 'both'), + ]: + with self.subTest(eq=eq, order=order): + if result == 'exception': + with self.assertRaisesRegex(ValueError, 'eq must be true if order is true'): + @dataclass(eq=eq, order=order) + class C: + pass + else: + @dataclass(eq=eq, order=order) + class C: + pass + + if result == 'neither': + self.assertNotIn('__eq__', C.__dict__) + self.assertNotIn('__lt__', C.__dict__) + self.assertNotIn('__le__', C.__dict__) + self.assertNotIn('__gt__', C.__dict__) + self.assertNotIn('__ge__', C.__dict__) + elif result == 'both': + self.assertIn('__eq__', C.__dict__) + self.assertIn('__lt__', C.__dict__) + self.assertIn('__le__', C.__dict__) + self.assertIn('__gt__', C.__dict__) + self.assertIn('__ge__', C.__dict__) + elif result == 'eq_only': + self.assertIn('__eq__', C.__dict__) + self.assertNotIn('__lt__', C.__dict__) + self.assertNotIn('__le__', C.__dict__) + self.assertNotIn('__gt__', C.__dict__) + self.assertNotIn('__ge__', C.__dict__) + else: + assert False, f'unknown result {result!r}' + + def test_field_no_default(self): + @dataclass + class C: + x: int = field() + + self.assertEqual(C(5).x, 5) + + with self.assertRaisesRegex(TypeError, + r"__init__\(\) missing 1 required " + "positional argument: 'x'"): + C() + + def test_field_default(self): + default = object() + @dataclass + class C: + x: object = field(default=default) + + self.assertIs(C.x, default) + c = C(10) + self.assertEqual(c.x, 10) + + # If we delete the instance attribute, we should then see the + # class attribute. + del c.x + self.assertIs(c.x, default) + + self.assertIs(C().x, default) + + def test_not_in_repr(self): + @dataclass + class C: + x: int = field(repr=False) + with self.assertRaises(TypeError): + C() + c = C(10) + self.assertEqual(repr(c), 'TestCase.test_not_in_repr..C()') + + @dataclass + class C: + x: int = field(repr=False) + y: int + c = C(10, 20) + self.assertEqual(repr(c), 'TestCase.test_not_in_repr..C(y=20)') + + def test_not_in_compare(self): + @dataclass + class C: + x: int = 0 + y: int = field(compare=False, default=4) + + self.assertEqual(C(), C(0, 20)) + self.assertEqual(C(1, 10), C(1, 20)) + self.assertNotEqual(C(3), C(4, 10)) + self.assertNotEqual(C(3, 10), C(4, 10)) + + def test_no_unhashable_default(self): + # See bpo-44674. + class Unhashable: + __hash__ = None + + unhashable_re = 'mutable default .* for field a is not allowed' + with self.assertRaisesRegex(ValueError, unhashable_re): + @dataclass + class A: + a: dict = {} + + with self.assertRaisesRegex(ValueError, unhashable_re): + @dataclass + class A: + a: Any = Unhashable() + + # Make sure that the machinery looking for hashability is using the + # class's __hash__, not the instance's __hash__. + with self.assertRaisesRegex(ValueError, unhashable_re): + unhashable = Unhashable() + # This shouldn't make the variable hashable. + unhashable.__hash__ = lambda: 0 + @dataclass + class A: + a: Any = unhashable + + def test_hash_field_rules(self): + # Test all 6 cases of: + # hash=True/False/None + # compare=True/False + for (hash_, compare, result ) in [ + (True, False, 'field' ), + (True, True, 'field' ), + (False, False, 'absent'), + (False, True, 'absent'), + (None, False, 'absent'), + (None, True, 'field' ), + ]: + with self.subTest(hash=hash_, compare=compare): + @dataclass(unsafe_hash=True) + class C: + x: int = field(compare=compare, hash=hash_, default=5) + + if result == 'field': + # __hash__ contains the field. + self.assertEqual(hash(C(5)), hash((5,))) + elif result == 'absent': + # The field is not present in the hash. + self.assertEqual(hash(C(5)), hash(())) + else: + assert False, f'unknown result {result!r}' + + def test_init_false_no_default(self): + # If init=False and no default value, then the field won't be + # present in the instance. + @dataclass + class C: + x: int = field(init=False) + + self.assertNotIn('x', C().__dict__) + + @dataclass + class C: + x: int + y: int = 0 + z: int = field(init=False) + t: int = 10 + + self.assertNotIn('z', C(0).__dict__) + self.assertEqual(vars(C(5)), {'t': 10, 'x': 5, 'y': 0}) + + def test_class_marker(self): + @dataclass + class C: + x: int + y: str = field(init=False, default=None) + z: str = field(repr=False) + + the_fields = fields(C) + # the_fields is a tuple of 3 items, each value + # is in __annotations__. + self.assertIsInstance(the_fields, tuple) + for f in the_fields: + self.assertIs(type(f), Field) + self.assertIn(f.name, C.__annotations__) + + self.assertEqual(len(the_fields), 3) + + self.assertEqual(the_fields[0].name, 'x') + self.assertEqual(the_fields[0].type, int) + self.assertFalse(hasattr(C, 'x')) + self.assertTrue (the_fields[0].init) + self.assertTrue (the_fields[0].repr) + self.assertEqual(the_fields[1].name, 'y') + self.assertEqual(the_fields[1].type, str) + self.assertIsNone(getattr(C, 'y')) + self.assertFalse(the_fields[1].init) + self.assertTrue (the_fields[1].repr) + self.assertEqual(the_fields[2].name, 'z') + self.assertEqual(the_fields[2].type, str) + self.assertFalse(hasattr(C, 'z')) + self.assertTrue (the_fields[2].init) + self.assertFalse(the_fields[2].repr) + + def test_field_order(self): + @dataclass + class B: + a: str = 'B:a' + b: str = 'B:b' + c: str = 'B:c' + + @dataclass + class C(B): + b: str = 'C:b' + + self.assertEqual([(f.name, f.default) for f in fields(C)], + [('a', 'B:a'), + ('b', 'C:b'), + ('c', 'B:c')]) + + @dataclass + class D(B): + c: str = 'D:c' + + self.assertEqual([(f.name, f.default) for f in fields(D)], + [('a', 'B:a'), + ('b', 'B:b'), + ('c', 'D:c')]) + + @dataclass + class E(D): + a: str = 'E:a' + d: str = 'E:d' + + self.assertEqual([(f.name, f.default) for f in fields(E)], + [('a', 'E:a'), + ('b', 'B:b'), + ('c', 'D:c'), + ('d', 'E:d')]) + + def test_class_attrs(self): + # We only have a class attribute if a default value is + # specified, either directly or via a field with a default. + default = object() + @dataclass + class C: + x: int + y: int = field(repr=False) + z: object = default + t: int = field(default=100) + + self.assertFalse(hasattr(C, 'x')) + self.assertFalse(hasattr(C, 'y')) + self.assertIs (C.z, default) + self.assertEqual(C.t, 100) + + def test_disallowed_mutable_defaults(self): + # For the known types, don't allow mutable default values. + for typ, empty, non_empty in [(list, [], [1]), + (dict, {}, {0:1}), + (set, set(), set([1])), + ]: + with self.subTest(typ=typ): + # Can't use a zero-length value. + with self.assertRaisesRegex(ValueError, + f'mutable default {typ} for field ' + 'x is not allowed'): + @dataclass + class Point: + x: typ = empty + + + # Nor a non-zero-length value + with self.assertRaisesRegex(ValueError, + f'mutable default {typ} for field ' + 'y is not allowed'): + @dataclass + class Point: + y: typ = non_empty + + # Check subtypes also fail. + class Subclass(typ): pass + + with self.assertRaisesRegex(ValueError, + f"mutable default .*Subclass'>" + ' for field z is not allowed' + ): + @dataclass + class Point: + z: typ = Subclass() + + # Because this is a ClassVar, it can be mutable. + @dataclass + class C: + z: ClassVar[typ] = typ() + + # Because this is a ClassVar, it can be mutable. + @dataclass + class C: + x: ClassVar[typ] = Subclass() + + def test_deliberately_mutable_defaults(self): + # If a mutable default isn't in the known list of + # (list, dict, set), then it's okay. + class Mutable: + def __init__(self): + self.l = [] + + @dataclass + class C: + x: Mutable + + # These 2 instances will share this value of x. + lst = Mutable() + o1 = C(lst) + o2 = C(lst) + self.assertEqual(o1, o2) + o1.x.l.extend([1, 2]) + self.assertEqual(o1, o2) + self.assertEqual(o1.x.l, [1, 2]) + self.assertIs(o1.x, o2.x) + + def test_no_options(self): + # Call with dataclass(). + @dataclass() + class C: + x: int + + self.assertEqual(C(42).x, 42) + + def test_not_tuple(self): + # Make sure we can't be compared to a tuple. + @dataclass + class Point: + x: int + y: int + self.assertNotEqual(Point(1, 2), (1, 2)) + + # And that we can't compare to another unrelated dataclass. + @dataclass + class C: + x: int + y: int + self.assertNotEqual(Point(1, 3), C(1, 3)) + + def test_not_other_dataclass(self): + # Test that some of the problems with namedtuple don't happen + # here. + @dataclass + class Point3D: + x: int + y: int + z: int + + @dataclass + class Date: + year: int + month: int + day: int + + self.assertNotEqual(Point3D(2017, 6, 3), Date(2017, 6, 3)) + self.assertNotEqual(Point3D(1, 2, 3), (1, 2, 3)) + + # Make sure we can't unpack. + with self.assertRaisesRegex(TypeError, 'unpack'): + x, y, z = Point3D(4, 5, 6) + + # Make sure another class with the same field names isn't + # equal. + @dataclass + class Point3Dv1: + x: int = 0 + y: int = 0 + z: int = 0 + self.assertNotEqual(Point3D(0, 0, 0), Point3Dv1()) + + def test_function_annotations(self): + # Some dummy class and instance to use as a default. + class F: + pass + f = F() + + def validate_class(cls): + # First, check __annotations__, even though they're not + # function annotations. + self.assertEqual(cls.__annotations__['i'], int) + self.assertEqual(cls.__annotations__['j'], str) + self.assertEqual(cls.__annotations__['k'], F) + self.assertEqual(cls.__annotations__['l'], float) + self.assertEqual(cls.__annotations__['z'], complex) + + # Verify __init__. + + signature = inspect.signature(cls.__init__) + # Check the return type, should be None. + self.assertIs(signature.return_annotation, None) + + # Check each parameter. + params = iter(signature.parameters.values()) + param = next(params) + # This is testing an internal name, and probably shouldn't be tested. + self.assertEqual(param.name, 'self') + param = next(params) + self.assertEqual(param.name, 'i') + self.assertIs (param.annotation, int) + self.assertEqual(param.default, inspect.Parameter.empty) + self.assertEqual(param.kind, inspect.Parameter.POSITIONAL_OR_KEYWORD) + param = next(params) + self.assertEqual(param.name, 'j') + self.assertIs (param.annotation, str) + self.assertEqual(param.default, inspect.Parameter.empty) + self.assertEqual(param.kind, inspect.Parameter.POSITIONAL_OR_KEYWORD) + param = next(params) + self.assertEqual(param.name, 'k') + self.assertIs (param.annotation, F) + # Don't test for the default, since it's set to MISSING. + self.assertEqual(param.kind, inspect.Parameter.POSITIONAL_OR_KEYWORD) + param = next(params) + self.assertEqual(param.name, 'l') + self.assertIs (param.annotation, float) + # Don't test for the default, since it's set to MISSING. + self.assertEqual(param.kind, inspect.Parameter.POSITIONAL_OR_KEYWORD) + self.assertRaises(StopIteration, next, params) + + + @dataclass + class C: + i: int + j: str + k: F = f + l: float=field(default=None) + z: complex=field(default=3+4j, init=False) + + validate_class(C) + + # Now repeat with __hash__. + @dataclass(frozen=True, unsafe_hash=True) + class C: + i: int + j: str + k: F = f + l: float=field(default=None) + z: complex=field(default=3+4j, init=False) + + validate_class(C) + + def test_missing_default(self): + # Test that MISSING works the same as a default not being + # specified. + @dataclass + class C: + x: int=field(default=MISSING) + with self.assertRaisesRegex(TypeError, + r'__init__\(\) missing 1 required ' + 'positional argument'): + C() + self.assertNotIn('x', C.__dict__) + + @dataclass + class D: + x: int + with self.assertRaisesRegex(TypeError, + r'__init__\(\) missing 1 required ' + 'positional argument'): + D() + self.assertNotIn('x', D.__dict__) + + def test_missing_default_factory(self): + # Test that MISSING works the same as a default factory not + # being specified (which is really the same as a default not + # being specified, too). + @dataclass + class C: + x: int=field(default_factory=MISSING) + with self.assertRaisesRegex(TypeError, + r'__init__\(\) missing 1 required ' + 'positional argument'): + C() + self.assertNotIn('x', C.__dict__) + + @dataclass + class D: + x: int=field(default=MISSING, default_factory=MISSING) + with self.assertRaisesRegex(TypeError, + r'__init__\(\) missing 1 required ' + 'positional argument'): + D() + self.assertNotIn('x', D.__dict__) + + def test_missing_repr(self): + self.assertIn('MISSING_TYPE object', repr(MISSING)) + + def test_dont_include_other_annotations(self): + @dataclass + class C: + i: int + def foo(self) -> int: + return 4 + @property + def bar(self) -> int: + return 5 + self.assertEqual(list(C.__annotations__), ['i']) + self.assertEqual(C(10).foo(), 4) + self.assertEqual(C(10).bar, 5) + self.assertEqual(C(10).i, 10) + + def test_post_init(self): + # Just make sure it gets called + @dataclass + class C: + def __post_init__(self): + raise CustomError() + with self.assertRaises(CustomError): + C() + + @dataclass + class C: + i: int = 10 + def __post_init__(self): + if self.i == 10: + raise CustomError() + with self.assertRaises(CustomError): + C() + # post-init gets called, but doesn't raise. This is just + # checking that self is used correctly. + C(5) + + # If there's not an __init__, then post-init won't get called. + @dataclass(init=False) + class C: + def __post_init__(self): + raise CustomError() + # Creating the class won't raise + C() + + @dataclass + class C: + x: int = 0 + def __post_init__(self): + self.x *= 2 + self.assertEqual(C().x, 0) + self.assertEqual(C(2).x, 4) + + # Make sure that if we're frozen, post-init can't set + # attributes. + @dataclass(frozen=True) + class C: + x: int = 0 + def __post_init__(self): + self.x *= 2 + with self.assertRaises(FrozenInstanceError): + C() + + def test_post_init_super(self): + # Make sure super() post-init isn't called by default. + class B: + def __post_init__(self): + raise CustomError() + + @dataclass + class C(B): + def __post_init__(self): + self.x = 5 + + self.assertEqual(C().x, 5) + + # Now call super(), and it will raise. + @dataclass + class C(B): + def __post_init__(self): + super().__post_init__() + + with self.assertRaises(CustomError): + C() + + # Make sure post-init is called, even if not defined in our + # class. + @dataclass + class C(B): + pass + + with self.assertRaises(CustomError): + C() + + def test_post_init_staticmethod(self): + flag = False + @dataclass + class C: + x: int + y: int + @staticmethod + def __post_init__(): + nonlocal flag + flag = True + + self.assertFalse(flag) + c = C(3, 4) + self.assertEqual((c.x, c.y), (3, 4)) + self.assertTrue(flag) + + def test_post_init_classmethod(self): + @dataclass + class C: + flag = False + x: int + y: int + @classmethod + def __post_init__(cls): + cls.flag = True + + self.assertFalse(C.flag) + c = C(3, 4) + self.assertEqual((c.x, c.y), (3, 4)) + self.assertTrue(C.flag) + + def test_post_init_not_auto_added(self): + # See bpo-46757, which had proposed always adding __post_init__. As + # Raymond Hettinger pointed out, that would be a breaking change. So, + # add a test to make sure that the current behavior doesn't change. + + @dataclass + class A0: + pass + + @dataclass + class B0: + b_called: bool = False + def __post_init__(self): + self.b_called = True + + @dataclass + class C0(A0, B0): + c_called: bool = False + def __post_init__(self): + super().__post_init__() + self.c_called = True + + # Since A0 has no __post_init__, and one wasn't automatically added + # (because that's the rule: it's never added by @dataclass, it's only + # the class author that can add it), then B0.__post_init__ is called. + # Verify that. + c = C0() + self.assertTrue(c.b_called) + self.assertTrue(c.c_called) + + ###################################### + # Now, the same thing, except A1 defines __post_init__. + @dataclass + class A1: + def __post_init__(self): + pass + + @dataclass + class B1: + b_called: bool = False + def __post_init__(self): + self.b_called = True + + @dataclass + class C1(A1, B1): + c_called: bool = False + def __post_init__(self): + super().__post_init__() + self.c_called = True + + # This time, B1.__post_init__ isn't being called. This mimics what + # would happen if A1.__post_init__ had been automatically added, + # instead of manually added as we see here. This test isn't really + # needed, but I'm including it just to demonstrate the changed + # behavior when A1 does define __post_init__. + c = C1() + self.assertFalse(c.b_called) + self.assertTrue(c.c_called) + + def test_class_var(self): + # Make sure ClassVars are ignored in __init__, __repr__, etc. + @dataclass + class C: + x: int + y: int = 10 + z: ClassVar[int] = 1000 + w: ClassVar[int] = 2000 + t: ClassVar[int] = 3000 + s: ClassVar = 4000 + + c = C(5) + self.assertEqual(repr(c), 'TestCase.test_class_var..C(x=5, y=10)') + self.assertEqual(len(fields(C)), 2) # We have 2 fields. + self.assertEqual(len(C.__annotations__), 6) # And 4 ClassVars. + self.assertEqual(c.z, 1000) + self.assertEqual(c.w, 2000) + self.assertEqual(c.t, 3000) + self.assertEqual(c.s, 4000) + C.z += 1 + self.assertEqual(c.z, 1001) + c = C(20) + self.assertEqual((c.x, c.y), (20, 10)) + self.assertEqual(c.z, 1001) + self.assertEqual(c.w, 2000) + self.assertEqual(c.t, 3000) + self.assertEqual(c.s, 4000) + + def test_class_var_no_default(self): + # If a ClassVar has no default value, it should not be set on the class. + @dataclass + class C: + x: ClassVar[int] + + self.assertNotIn('x', C.__dict__) + + def test_class_var_default_factory(self): + # It makes no sense for a ClassVar to have a default factory. When + # would it be called? Call it yourself, since it's class-wide. + with self.assertRaisesRegex(TypeError, + 'cannot have a default factory'): + @dataclass + class C: + x: ClassVar[int] = field(default_factory=int) + + self.assertNotIn('x', C.__dict__) + + def test_class_var_with_default(self): + # If a ClassVar has a default value, it should be set on the class. + @dataclass + class C: + x: ClassVar[int] = 10 + self.assertEqual(C.x, 10) + + @dataclass + class C: + x: ClassVar[int] = field(default=10) + self.assertEqual(C.x, 10) + + def test_class_var_frozen(self): + # Make sure ClassVars work even if we're frozen. + @dataclass(frozen=True) + class C: + x: int + y: int = 10 + z: ClassVar[int] = 1000 + w: ClassVar[int] = 2000 + t: ClassVar[int] = 3000 + + c = C(5) + self.assertEqual(repr(C(5)), 'TestCase.test_class_var_frozen..C(x=5, y=10)') + self.assertEqual(len(fields(C)), 2) # We have 2 fields + self.assertEqual(len(C.__annotations__), 5) # And 3 ClassVars + self.assertEqual(c.z, 1000) + self.assertEqual(c.w, 2000) + self.assertEqual(c.t, 3000) + # We can still modify the ClassVar, it's only instances that are + # frozen. + C.z += 1 + self.assertEqual(c.z, 1001) + c = C(20) + self.assertEqual((c.x, c.y), (20, 10)) + self.assertEqual(c.z, 1001) + self.assertEqual(c.w, 2000) + self.assertEqual(c.t, 3000) + + def test_init_var_no_default(self): + # If an InitVar has no default value, it should not be set on the class. + @dataclass + class C: + x: InitVar[int] + + self.assertNotIn('x', C.__dict__) + + def test_init_var_default_factory(self): + # It makes no sense for an InitVar to have a default factory. When + # would it be called? Call it yourself, since it's class-wide. + with self.assertRaisesRegex(TypeError, + 'cannot have a default factory'): + @dataclass + class C: + x: InitVar[int] = field(default_factory=int) + + self.assertNotIn('x', C.__dict__) + + def test_init_var_with_default(self): + # If an InitVar has a default value, it should be set on the class. + @dataclass + class C: + x: InitVar[int] = 10 + self.assertEqual(C.x, 10) + + @dataclass + class C: + x: InitVar[int] = field(default=10) + self.assertEqual(C.x, 10) + + def test_init_var(self): + @dataclass + class C: + x: int = None + init_param: InitVar[int] = None + + def __post_init__(self, init_param): + if self.x is None: + self.x = init_param*2 + + c = C(init_param=10) + self.assertEqual(c.x, 20) + + def test_init_var_preserve_type(self): + self.assertEqual(InitVar[int].type, int) + + # Make sure the repr is correct. + self.assertEqual(repr(InitVar[int]), 'dataclasses.InitVar[int]') + self.assertEqual(repr(InitVar[List[int]]), + 'dataclasses.InitVar[typing.List[int]]') + self.assertEqual(repr(InitVar[list[int]]), + 'dataclasses.InitVar[list[int]]') + self.assertEqual(repr(InitVar[int|str]), + 'dataclasses.InitVar[int | str]') + + def test_init_var_inheritance(self): + # Note that this deliberately tests that a dataclass need not + # have a __post_init__ function if it has an InitVar field. + # It could just be used in a derived class, as shown here. + @dataclass + class Base: + x: int + init_base: InitVar[int] + + # We can instantiate by passing the InitVar, even though + # it's not used. + b = Base(0, 10) + self.assertEqual(vars(b), {'x': 0}) + + @dataclass + class C(Base): + y: int + init_derived: InitVar[int] + + def __post_init__(self, init_base, init_derived): + self.x = self.x + init_base + self.y = self.y + init_derived + + c = C(10, 11, 50, 51) + self.assertEqual(vars(c), {'x': 21, 'y': 101}) + + def test_default_factory(self): + # Test a factory that returns a new list. + @dataclass + class C: + x: int + y: list = field(default_factory=list) + + c0 = C(3) + c1 = C(3) + self.assertEqual(c0.x, 3) + self.assertEqual(c0.y, []) + self.assertEqual(c0, c1) + self.assertIsNot(c0.y, c1.y) + self.assertEqual(astuple(C(5, [1])), (5, [1])) + + # Test a factory that returns a shared list. + l = [] + @dataclass + class C: + x: int + y: list = field(default_factory=lambda: l) + + c0 = C(3) + c1 = C(3) + self.assertEqual(c0.x, 3) + self.assertEqual(c0.y, []) + self.assertEqual(c0, c1) + self.assertIs(c0.y, c1.y) + self.assertEqual(astuple(C(5, [1])), (5, [1])) + + # Test various other field flags. + # repr + @dataclass + class C: + x: list = field(default_factory=list, repr=False) + self.assertEqual(repr(C()), 'TestCase.test_default_factory..C()') + self.assertEqual(C().x, []) + + # hash + @dataclass(unsafe_hash=True) + class C: + x: list = field(default_factory=list, hash=False) + self.assertEqual(astuple(C()), ([],)) + self.assertEqual(hash(C()), hash(())) + + # init (see also test_default_factory_with_no_init) + @dataclass + class C: + x: list = field(default_factory=list, init=False) + self.assertEqual(astuple(C()), ([],)) + + # compare + @dataclass + class C: + x: list = field(default_factory=list, compare=False) + self.assertEqual(C(), C([1])) + + def test_default_factory_with_no_init(self): + # We need a factory with a side effect. + factory = Mock() + + @dataclass + class C: + x: list = field(default_factory=factory, init=False) + + # Make sure the default factory is called for each new instance. + C().x + self.assertEqual(factory.call_count, 1) + C().x + self.assertEqual(factory.call_count, 2) + + def test_default_factory_not_called_if_value_given(self): + # We need a factory that we can test if it's been called. + factory = Mock() + + @dataclass + class C: + x: int = field(default_factory=factory) + + # Make sure that if a field has a default factory function, + # it's not called if a value is specified. + C().x + self.assertEqual(factory.call_count, 1) + self.assertEqual(C(10).x, 10) + self.assertEqual(factory.call_count, 1) + C().x + self.assertEqual(factory.call_count, 2) + + def test_default_factory_derived(self): + # See bpo-32896. + @dataclass + class Foo: + x: dict = field(default_factory=dict) + + @dataclass + class Bar(Foo): + y: int = 1 + + self.assertEqual(Foo().x, {}) + self.assertEqual(Bar().x, {}) + self.assertEqual(Bar().y, 1) + + @dataclass + class Baz(Foo): + pass + self.assertEqual(Baz().x, {}) + + def test_intermediate_non_dataclass(self): + # Test that an intermediate class that defines + # annotations does not define fields. + + @dataclass + class A: + x: int + + class B(A): + y: int + + @dataclass + class C(B): + z: int + + c = C(1, 3) + self.assertEqual((c.x, c.z), (1, 3)) + + # .y was not initialized. + with self.assertRaisesRegex(AttributeError, + 'object has no attribute'): + c.y + + # And if we again derive a non-dataclass, no fields are added. + class D(C): + t: int + d = D(4, 5) + self.assertEqual((d.x, d.z), (4, 5)) + + def test_classvar_default_factory(self): + # It's an error for a ClassVar to have a factory function. + with self.assertRaisesRegex(TypeError, + 'cannot have a default factory'): + @dataclass + class C: + x: ClassVar[int] = field(default_factory=int) + + def test_is_dataclass(self): + class NotDataClass: + pass + + self.assertFalse(is_dataclass(0)) + self.assertFalse(is_dataclass(int)) + self.assertFalse(is_dataclass(NotDataClass)) + self.assertFalse(is_dataclass(NotDataClass())) + + @dataclass + class C: + x: int + + @dataclass + class D: + d: C + e: int + + c = C(10) + d = D(c, 4) + + self.assertTrue(is_dataclass(C)) + self.assertTrue(is_dataclass(c)) + self.assertFalse(is_dataclass(c.x)) + self.assertTrue(is_dataclass(d.d)) + self.assertFalse(is_dataclass(d.e)) + + def test_is_dataclass_when_getattr_always_returns(self): + # See bpo-37868. + class A: + def __getattr__(self, key): + return 0 + self.assertFalse(is_dataclass(A)) + a = A() + + # Also test for an instance attribute. + class B: + pass + b = B() + b.__dataclass_fields__ = [] + + for obj in a, b: + with self.subTest(obj=obj): + self.assertFalse(is_dataclass(obj)) + + # Indirect tests for _is_dataclass_instance(). + with self.assertRaisesRegex(TypeError, 'should be called on dataclass instances'): + asdict(obj) + with self.assertRaisesRegex(TypeError, 'should be called on dataclass instances'): + astuple(obj) + with self.assertRaisesRegex(TypeError, 'should be called on dataclass instances'): + replace(obj, x=0) + + def test_is_dataclass_genericalias(self): + @dataclass + class A(types.GenericAlias): + origin: type + args: type + self.assertTrue(is_dataclass(A)) + a = A(list, int) + self.assertTrue(is_dataclass(type(a))) + self.assertTrue(is_dataclass(a)) + + + def test_helper_fields_with_class_instance(self): + # Check that we can call fields() on either a class or instance, + # and get back the same thing. + @dataclass + class C: + x: int + y: float + + self.assertEqual(fields(C), fields(C(0, 0.0))) + + def test_helper_fields_exception(self): + # Check that TypeError is raised if not passed a dataclass or + # instance. + with self.assertRaisesRegex(TypeError, 'dataclass type or instance'): + fields(0) + + class C: pass + with self.assertRaisesRegex(TypeError, 'dataclass type or instance'): + fields(C) + with self.assertRaisesRegex(TypeError, 'dataclass type or instance'): + fields(C()) + + def test_helper_asdict(self): + # Basic tests for asdict(), it should return a new dictionary. + @dataclass + class C: + x: int + y: int + c = C(1, 2) + + self.assertEqual(asdict(c), {'x': 1, 'y': 2}) + self.assertEqual(asdict(c), asdict(c)) + self.assertIsNot(asdict(c), asdict(c)) + c.x = 42 + self.assertEqual(asdict(c), {'x': 42, 'y': 2}) + self.assertIs(type(asdict(c)), dict) + + def test_helper_asdict_raises_on_classes(self): + # asdict() should raise on a class object. + @dataclass + class C: + x: int + y: int + with self.assertRaisesRegex(TypeError, 'dataclass instance'): + asdict(C) + with self.assertRaisesRegex(TypeError, 'dataclass instance'): + asdict(int) + + def test_helper_asdict_copy_values(self): + @dataclass + class C: + x: int + y: List[int] = field(default_factory=list) + initial = [] + c = C(1, initial) + d = asdict(c) + self.assertEqual(d['y'], initial) + self.assertIsNot(d['y'], initial) + c = C(1) + d = asdict(c) + d['y'].append(1) + self.assertEqual(c.y, []) + + def test_helper_asdict_nested(self): + @dataclass + class UserId: + token: int + group: int + @dataclass + class User: + name: str + id: UserId + u = User('Joe', UserId(123, 1)) + d = asdict(u) + self.assertEqual(d, {'name': 'Joe', 'id': {'token': 123, 'group': 1}}) + self.assertIsNot(asdict(u), asdict(u)) + u.id.group = 2 + self.assertEqual(asdict(u), {'name': 'Joe', + 'id': {'token': 123, 'group': 2}}) + + def test_helper_asdict_builtin_containers(self): + @dataclass + class User: + name: str + id: int + @dataclass + class GroupList: + id: int + users: List[User] + @dataclass + class GroupTuple: + id: int + users: Tuple[User, ...] + @dataclass + class GroupDict: + id: int + users: Dict[str, User] + a = User('Alice', 1) + b = User('Bob', 2) + gl = GroupList(0, [a, b]) + gt = GroupTuple(0, (a, b)) + gd = GroupDict(0, {'first': a, 'second': b}) + self.assertEqual(asdict(gl), {'id': 0, 'users': [{'name': 'Alice', 'id': 1}, + {'name': 'Bob', 'id': 2}]}) + self.assertEqual(asdict(gt), {'id': 0, 'users': ({'name': 'Alice', 'id': 1}, + {'name': 'Bob', 'id': 2})}) + self.assertEqual(asdict(gd), {'id': 0, 'users': {'first': {'name': 'Alice', 'id': 1}, + 'second': {'name': 'Bob', 'id': 2}}}) + + def test_helper_asdict_builtin_object_containers(self): + @dataclass + class Child: + d: object + + @dataclass + class Parent: + child: Child + + self.assertEqual(asdict(Parent(Child([1]))), {'child': {'d': [1]}}) + self.assertEqual(asdict(Parent(Child({1: 2}))), {'child': {'d': {1: 2}}}) + + def test_helper_asdict_factory(self): + @dataclass + class C: + x: int + y: int + c = C(1, 2) + d = asdict(c, dict_factory=OrderedDict) + self.assertEqual(d, OrderedDict([('x', 1), ('y', 2)])) + self.assertIsNot(d, asdict(c, dict_factory=OrderedDict)) + c.x = 42 + d = asdict(c, dict_factory=OrderedDict) + self.assertEqual(d, OrderedDict([('x', 42), ('y', 2)])) + self.assertIs(type(d), OrderedDict) + + def test_helper_asdict_namedtuple(self): + T = namedtuple('T', 'a b c') + @dataclass + class C: + x: str + y: T + c = C('outer', T(1, C('inner', T(11, 12, 13)), 2)) + + d = asdict(c) + self.assertEqual(d, {'x': 'outer', + 'y': T(1, + {'x': 'inner', + 'y': T(11, 12, 13)}, + 2), + } + ) + + # Now with a dict_factory. OrderedDict is convenient, but + # since it compares to dicts, we also need to have separate + # assertIs tests. + d = asdict(c, dict_factory=OrderedDict) + self.assertEqual(d, {'x': 'outer', + 'y': T(1, + {'x': 'inner', + 'y': T(11, 12, 13)}, + 2), + } + ) + + # Make sure that the returned dicts are actually OrderedDicts. + self.assertIs(type(d), OrderedDict) + self.assertIs(type(d['y'][1]), OrderedDict) + + def test_helper_asdict_namedtuple_key(self): + # Ensure that a field that contains a dict which has a + # namedtuple as a key works with asdict(). + + @dataclass + class C: + f: dict + T = namedtuple('T', 'a') + + c = C({T('an a'): 0}) + + self.assertEqual(asdict(c), {'f': {T(a='an a'): 0}}) + + def test_helper_asdict_namedtuple_derived(self): + class T(namedtuple('Tbase', 'a')): + def my_a(self): + return self.a + + @dataclass + class C: + f: T + + t = T(6) + c = C(t) + + d = asdict(c) + self.assertEqual(d, {'f': T(a=6)}) + # Make sure that t has been copied, not used directly. + self.assertIsNot(d['f'], t) + self.assertEqual(d['f'].my_a(), 6) + + def test_helper_astuple(self): + # Basic tests for astuple(), it should return a new tuple. + @dataclass + class C: + x: int + y: int = 0 + c = C(1) + + self.assertEqual(astuple(c), (1, 0)) + self.assertEqual(astuple(c), astuple(c)) + self.assertIsNot(astuple(c), astuple(c)) + c.y = 42 + self.assertEqual(astuple(c), (1, 42)) + self.assertIs(type(astuple(c)), tuple) + + def test_helper_astuple_raises_on_classes(self): + # astuple() should raise on a class object. + @dataclass + class C: + x: int + y: int + with self.assertRaisesRegex(TypeError, 'dataclass instance'): + astuple(C) + with self.assertRaisesRegex(TypeError, 'dataclass instance'): + astuple(int) + + def test_helper_astuple_copy_values(self): + @dataclass + class C: + x: int + y: List[int] = field(default_factory=list) + initial = [] + c = C(1, initial) + t = astuple(c) + self.assertEqual(t[1], initial) + self.assertIsNot(t[1], initial) + c = C(1) + t = astuple(c) + t[1].append(1) + self.assertEqual(c.y, []) + + def test_helper_astuple_nested(self): + @dataclass + class UserId: + token: int + group: int + @dataclass + class User: + name: str + id: UserId + u = User('Joe', UserId(123, 1)) + t = astuple(u) + self.assertEqual(t, ('Joe', (123, 1))) + self.assertIsNot(astuple(u), astuple(u)) + u.id.group = 2 + self.assertEqual(astuple(u), ('Joe', (123, 2))) + + def test_helper_astuple_builtin_containers(self): + @dataclass + class User: + name: str + id: int + @dataclass + class GroupList: + id: int + users: List[User] + @dataclass + class GroupTuple: + id: int + users: Tuple[User, ...] + @dataclass + class GroupDict: + id: int + users: Dict[str, User] + a = User('Alice', 1) + b = User('Bob', 2) + gl = GroupList(0, [a, b]) + gt = GroupTuple(0, (a, b)) + gd = GroupDict(0, {'first': a, 'second': b}) + self.assertEqual(astuple(gl), (0, [('Alice', 1), ('Bob', 2)])) + self.assertEqual(astuple(gt), (0, (('Alice', 1), ('Bob', 2)))) + self.assertEqual(astuple(gd), (0, {'first': ('Alice', 1), 'second': ('Bob', 2)})) + + def test_helper_astuple_builtin_object_containers(self): + @dataclass + class Child: + d: object + + @dataclass + class Parent: + child: Child + + self.assertEqual(astuple(Parent(Child([1]))), (([1],),)) + self.assertEqual(astuple(Parent(Child({1: 2}))), (({1: 2},),)) + + def test_helper_astuple_factory(self): + @dataclass + class C: + x: int + y: int + NT = namedtuple('NT', 'x y') + def nt(lst): + return NT(*lst) + c = C(1, 2) + t = astuple(c, tuple_factory=nt) + self.assertEqual(t, NT(1, 2)) + self.assertIsNot(t, astuple(c, tuple_factory=nt)) + c.x = 42 + t = astuple(c, tuple_factory=nt) + self.assertEqual(t, NT(42, 2)) + self.assertIs(type(t), NT) + + def test_helper_astuple_namedtuple(self): + T = namedtuple('T', 'a b c') + @dataclass + class C: + x: str + y: T + c = C('outer', T(1, C('inner', T(11, 12, 13)), 2)) + + t = astuple(c) + self.assertEqual(t, ('outer', T(1, ('inner', (11, 12, 13)), 2))) + + # Now, using a tuple_factory. list is convenient here. + t = astuple(c, tuple_factory=list) + self.assertEqual(t, ['outer', T(1, ['inner', T(11, 12, 13)], 2)]) + + def test_dynamic_class_creation(self): + cls_dict = {'__annotations__': {'x': int, 'y': int}, + } + + # Create the class. + cls = type('C', (), cls_dict) + + # Make it a dataclass. + cls1 = dataclass(cls) + + self.assertEqual(cls1, cls) + self.assertEqual(asdict(cls(1, 2)), {'x': 1, 'y': 2}) + + def test_dynamic_class_creation_using_field(self): + cls_dict = {'__annotations__': {'x': int, 'y': int}, + 'y': field(default=5), + } + + # Create the class. + cls = type('C', (), cls_dict) + + # Make it a dataclass. + cls1 = dataclass(cls) + + self.assertEqual(cls1, cls) + self.assertEqual(asdict(cls1(1)), {'x': 1, 'y': 5}) + + def test_init_in_order(self): + @dataclass + class C: + a: int + b: int = field() + c: list = field(default_factory=list, init=False) + d: list = field(default_factory=list) + e: int = field(default=4, init=False) + f: int = 4 + + calls = [] + def setattr(self, name, value): + calls.append((name, value)) + + C.__setattr__ = setattr + c = C(0, 1) + self.assertEqual(('a', 0), calls[0]) + self.assertEqual(('b', 1), calls[1]) + self.assertEqual(('c', []), calls[2]) + self.assertEqual(('d', []), calls[3]) + self.assertNotIn(('e', 4), calls) + self.assertEqual(('f', 4), calls[4]) + + def test_items_in_dicts(self): + @dataclass + class C: + a: int + b: list = field(default_factory=list, init=False) + c: list = field(default_factory=list) + d: int = field(default=4, init=False) + e: int = 0 + + c = C(0) + # Class dict + self.assertNotIn('a', C.__dict__) + self.assertNotIn('b', C.__dict__) + self.assertNotIn('c', C.__dict__) + self.assertIn('d', C.__dict__) + self.assertEqual(C.d, 4) + self.assertIn('e', C.__dict__) + self.assertEqual(C.e, 0) + # Instance dict + self.assertIn('a', c.__dict__) + self.assertEqual(c.a, 0) + self.assertIn('b', c.__dict__) + self.assertEqual(c.b, []) + self.assertIn('c', c.__dict__) + self.assertEqual(c.c, []) + self.assertNotIn('d', c.__dict__) + self.assertIn('e', c.__dict__) + self.assertEqual(c.e, 0) + + def test_alternate_classmethod_constructor(self): + # Since __post_init__ can't take params, use a classmethod + # alternate constructor. This is mostly an example to show + # how to use this technique. + @dataclass + class C: + x: int + @classmethod + def from_file(cls, filename): + # In a real example, create a new instance + # and populate 'x' from contents of a file. + value_in_file = 20 + return cls(value_in_file) + + self.assertEqual(C.from_file('filename').x, 20) + + def test_field_metadata_default(self): + # Make sure the default metadata is read-only and of + # zero length. + @dataclass + class C: + i: int + + self.assertFalse(fields(C)[0].metadata) + self.assertEqual(len(fields(C)[0].metadata), 0) + with self.assertRaisesRegex(TypeError, + 'does not support item assignment'): + fields(C)[0].metadata['test'] = 3 + + def test_field_metadata_mapping(self): + # Make sure only a mapping can be passed as metadata + # zero length. + with self.assertRaises(TypeError): + @dataclass + class C: + i: int = field(metadata=0) + + # Make sure an empty dict works. + d = {} + @dataclass + class C: + i: int = field(metadata=d) + self.assertFalse(fields(C)[0].metadata) + self.assertEqual(len(fields(C)[0].metadata), 0) + # Update should work (see bpo-35960). + d['foo'] = 1 + self.assertEqual(len(fields(C)[0].metadata), 1) + self.assertEqual(fields(C)[0].metadata['foo'], 1) + with self.assertRaisesRegex(TypeError, + 'does not support item assignment'): + fields(C)[0].metadata['test'] = 3 + + # Make sure a non-empty dict works. + d = {'test': 10, 'bar': '42', 3: 'three'} + @dataclass + class C: + i: int = field(metadata=d) + self.assertEqual(len(fields(C)[0].metadata), 3) + self.assertEqual(fields(C)[0].metadata['test'], 10) + self.assertEqual(fields(C)[0].metadata['bar'], '42') + self.assertEqual(fields(C)[0].metadata[3], 'three') + # Update should work. + d['foo'] = 1 + self.assertEqual(len(fields(C)[0].metadata), 4) + self.assertEqual(fields(C)[0].metadata['foo'], 1) + with self.assertRaises(KeyError): + # Non-existent key. + fields(C)[0].metadata['baz'] + with self.assertRaisesRegex(TypeError, + 'does not support item assignment'): + fields(C)[0].metadata['test'] = 3 + + def test_field_metadata_custom_mapping(self): + # Try a custom mapping. + class SimpleNameSpace: + def __init__(self, **kw): + self.__dict__.update(kw) + + def __getitem__(self, item): + if item == 'xyzzy': + return 'plugh' + return getattr(self, item) + + def __len__(self): + return self.__dict__.__len__() + + @dataclass + class C: + i: int = field(metadata=SimpleNameSpace(a=10)) + + self.assertEqual(len(fields(C)[0].metadata), 1) + self.assertEqual(fields(C)[0].metadata['a'], 10) + with self.assertRaises(AttributeError): + fields(C)[0].metadata['b'] + # Make sure we're still talking to our custom mapping. + self.assertEqual(fields(C)[0].metadata['xyzzy'], 'plugh') + + def test_generic_dataclasses(self): + T = TypeVar('T') + + @dataclass + class LabeledBox(Generic[T]): + content: T + label: str = '' + + box = LabeledBox(42) + self.assertEqual(box.content, 42) + self.assertEqual(box.label, '') + + # Subscripting the resulting class should work, etc. + Alias = List[LabeledBox[int]] + + def test_generic_extending(self): + S = TypeVar('S') + T = TypeVar('T') + + @dataclass + class Base(Generic[T, S]): + x: T + y: S + + @dataclass + class DataDerived(Base[int, T]): + new_field: str + Alias = DataDerived[str] + c = Alias(0, 'test1', 'test2') + self.assertEqual(astuple(c), (0, 'test1', 'test2')) + + class NonDataDerived(Base[int, T]): + def new_method(self): + return self.y + Alias = NonDataDerived[float] + c = Alias(10, 1.0) + self.assertEqual(c.new_method(), 1.0) + + def test_generic_dynamic(self): + T = TypeVar('T') + + @dataclass + class Parent(Generic[T]): + x: T + Child = make_dataclass('Child', [('y', T), ('z', Optional[T], None)], + bases=(Parent[int], Generic[T]), namespace={'other': 42}) + self.assertIs(Child[int](1, 2).z, None) + self.assertEqual(Child[int](1, 2, 3).z, 3) + self.assertEqual(Child[int](1, 2, 3).other, 42) + # Check that type aliases work correctly. + Alias = Child[T] + self.assertEqual(Alias[int](1, 2).x, 1) + # Check MRO resolution. + self.assertEqual(Child.__mro__, (Child, Parent, Generic, object)) + + def test_dataclasses_pickleable(self): + global P, Q, R + @dataclass + class P: + x: int + y: int = 0 + @dataclass + class Q: + x: int + y: int = field(default=0, init=False) + @dataclass + class R: + x: int + y: List[int] = field(default_factory=list) + q = Q(1) + q.y = 2 + samples = [P(1), P(1, 2), Q(1), q, R(1), R(1, [2, 3, 4])] + for sample in samples: + for proto in range(pickle.HIGHEST_PROTOCOL + 1): + with self.subTest(sample=sample, proto=proto): + new_sample = pickle.loads(pickle.dumps(sample, proto)) + self.assertEqual(sample.x, new_sample.x) + self.assertEqual(sample.y, new_sample.y) + self.assertIsNot(sample, new_sample) + new_sample.x = 42 + another_new_sample = pickle.loads(pickle.dumps(new_sample, proto)) + self.assertEqual(new_sample.x, another_new_sample.x) + self.assertEqual(sample.y, another_new_sample.y) + + def test_dataclasses_qualnames(self): + @dataclass(order=True, unsafe_hash=True, frozen=True) + class A: + x: int + y: int + + self.assertEqual(A.__init__.__name__, "__init__") + for function in ( + '__eq__', + '__lt__', + '__le__', + '__gt__', + '__ge__', + '__hash__', + '__init__', + '__repr__', + '__setattr__', + '__delattr__', + ): + self.assertEqual(getattr(A, function).__qualname__, f"TestCase.test_dataclasses_qualnames..A.{function}") + + with self.assertRaisesRegex(TypeError, r"A\.__init__\(\) missing"): + A() + + +class TestFieldNoAnnotation(unittest.TestCase): + def test_field_without_annotation(self): + with self.assertRaisesRegex(TypeError, + "'f' is a field but has no type annotation"): + @dataclass + class C: + f = field() + + def test_field_without_annotation_but_annotation_in_base(self): + @dataclass + class B: + f: int + + with self.assertRaisesRegex(TypeError, + "'f' is a field but has no type annotation"): + # This is still an error: make sure we don't pick up the + # type annotation in the base class. + @dataclass + class C(B): + f = field() + + def test_field_without_annotation_but_annotation_in_base_not_dataclass(self): + # Same test, but with the base class not a dataclass. + class B: + f: int + + with self.assertRaisesRegex(TypeError, + "'f' is a field but has no type annotation"): + # This is still an error: make sure we don't pick up the + # type annotation in the base class. + @dataclass + class C(B): + f = field() + + +class TestDocString(unittest.TestCase): + def assertDocStrEqual(self, a, b): + # Because 3.6 and 3.7 differ in how inspect.signature work + # (see bpo #32108), for the time being just compare them with + # whitespace stripped. + self.assertEqual(a.replace(' ', ''), b.replace(' ', '')) + + def test_existing_docstring_not_overridden(self): + @dataclass + class C: + """Lorem ipsum""" + x: int + + self.assertEqual(C.__doc__, "Lorem ipsum") + + def test_docstring_no_fields(self): + @dataclass + class C: + pass + + self.assertDocStrEqual(C.__doc__, "C()") + + def test_docstring_one_field(self): + @dataclass + class C: + x: int + + self.assertDocStrEqual(C.__doc__, "C(x:int)") + + def test_docstring_two_fields(self): + @dataclass + class C: + x: int + y: int + + self.assertDocStrEqual(C.__doc__, "C(x:int, y:int)") + + def test_docstring_three_fields(self): + @dataclass + class C: + x: int + y: int + z: str + + self.assertDocStrEqual(C.__doc__, "C(x:int, y:int, z:str)") + + def test_docstring_one_field_with_default(self): + @dataclass + class C: + x: int = 3 + + self.assertDocStrEqual(C.__doc__, "C(x:int=3)") + + def test_docstring_one_field_with_default_none(self): + @dataclass + class C: + x: Union[int, type(None)] = None + + self.assertDocStrEqual(C.__doc__, "C(x:Optional[int]=None)") + + def test_docstring_list_field(self): + @dataclass + class C: + x: List[int] + + self.assertDocStrEqual(C.__doc__, "C(x:List[int])") + + def test_docstring_list_field_with_default_factory(self): + @dataclass + class C: + x: List[int] = field(default_factory=list) + + self.assertDocStrEqual(C.__doc__, "C(x:List[int]=)") + + def test_docstring_deque_field(self): + @dataclass + class C: + x: deque + + self.assertDocStrEqual(C.__doc__, "C(x:collections.deque)") + + def test_docstring_deque_field_with_default_factory(self): + @dataclass + class C: + x: deque = field(default_factory=deque) + + self.assertDocStrEqual(C.__doc__, "C(x:collections.deque=)") + + +class TestInit(unittest.TestCase): + def test_base_has_init(self): + class B: + def __init__(self): + self.z = 100 + pass + + # Make sure that declaring this class doesn't raise an error. + # The issue is that we can't override __init__ in our class, + # but it should be okay to add __init__ to us if our base has + # an __init__. + @dataclass + class C(B): + x: int = 0 + c = C(10) + self.assertEqual(c.x, 10) + self.assertNotIn('z', vars(c)) + + # Make sure that if we don't add an init, the base __init__ + # gets called. + @dataclass(init=False) + class C(B): + x: int = 10 + c = C() + self.assertEqual(c.x, 10) + self.assertEqual(c.z, 100) + + def test_no_init(self): + @dataclass(init=False) + class C: + i: int = 0 + self.assertEqual(C().i, 0) + + @dataclass(init=False) + class C: + i: int = 2 + def __init__(self): + self.i = 3 + self.assertEqual(C().i, 3) + + def test_overwriting_init(self): + # If the class has __init__, use it no matter the value of + # init=. + + @dataclass + class C: + x: int + def __init__(self, x): + self.x = 2 * x + self.assertEqual(C(3).x, 6) + + @dataclass(init=True) + class C: + x: int + def __init__(self, x): + self.x = 2 * x + self.assertEqual(C(4).x, 8) + + @dataclass(init=False) + class C: + x: int + def __init__(self, x): + self.x = 2 * x + self.assertEqual(C(5).x, 10) + + def test_inherit_from_protocol(self): + # Dataclasses inheriting from protocol should preserve their own `__init__`. + # See bpo-45081. + + class P(Protocol): + a: int + + @dataclass + class C(P): + a: int + + self.assertEqual(C(5).a, 5) + + @dataclass + class D(P): + def __init__(self, a): + self.a = a * 2 + + self.assertEqual(D(5).a, 10) + + +class TestRepr(unittest.TestCase): + def test_repr(self): + @dataclass + class B: + x: int + + @dataclass + class C(B): + y: int = 10 + + o = C(4) + self.assertEqual(repr(o), 'TestRepr.test_repr..C(x=4, y=10)') + + @dataclass + class D(C): + x: int = 20 + self.assertEqual(repr(D()), 'TestRepr.test_repr..D(x=20, y=10)') + + @dataclass + class C: + @dataclass + class D: + i: int + @dataclass + class E: + pass + self.assertEqual(repr(C.D(0)), 'TestRepr.test_repr..C.D(i=0)') + self.assertEqual(repr(C.E()), 'TestRepr.test_repr..C.E()') + + def test_no_repr(self): + # Test a class with no __repr__ and repr=False. + @dataclass(repr=False) + class C: + x: int + self.assertIn(f'{__name__}.TestRepr.test_no_repr..C object at', + repr(C(3))) + + # Test a class with a __repr__ and repr=False. + @dataclass(repr=False) + class C: + x: int + def __repr__(self): + return 'C-class' + self.assertEqual(repr(C(3)), 'C-class') + + def test_overwriting_repr(self): + # If the class has __repr__, use it no matter the value of + # repr=. + + @dataclass + class C: + x: int + def __repr__(self): + return 'x' + self.assertEqual(repr(C(0)), 'x') + + @dataclass(repr=True) + class C: + x: int + def __repr__(self): + return 'x' + self.assertEqual(repr(C(0)), 'x') + + @dataclass(repr=False) + class C: + x: int + def __repr__(self): + return 'x' + self.assertEqual(repr(C(0)), 'x') + + +class TestEq(unittest.TestCase): + def test_no_eq(self): + # Test a class with no __eq__ and eq=False. + @dataclass(eq=False) + class C: + x: int + self.assertNotEqual(C(0), C(0)) + c = C(3) + self.assertEqual(c, c) + + # Test a class with an __eq__ and eq=False. + @dataclass(eq=False) + class C: + x: int + def __eq__(self, other): + return other == 10 + self.assertEqual(C(3), 10) + + def test_overwriting_eq(self): + # If the class has __eq__, use it no matter the value of + # eq=. + + @dataclass + class C: + x: int + def __eq__(self, other): + return other == 3 + self.assertEqual(C(1), 3) + self.assertNotEqual(C(1), 1) + + @dataclass(eq=True) + class C: + x: int + def __eq__(self, other): + return other == 4 + self.assertEqual(C(1), 4) + self.assertNotEqual(C(1), 1) + + @dataclass(eq=False) + class C: + x: int + def __eq__(self, other): + return other == 5 + self.assertEqual(C(1), 5) + self.assertNotEqual(C(1), 1) + + +class TestOrdering(unittest.TestCase): + def test_functools_total_ordering(self): + # Test that functools.total_ordering works with this class. + @total_ordering + @dataclass + class C: + x: int + def __lt__(self, other): + # Perform the test "backward", just to make + # sure this is being called. + return self.x >= other + + self.assertLess(C(0), -1) + self.assertLessEqual(C(0), -1) + self.assertGreater(C(0), 1) + self.assertGreaterEqual(C(0), 1) + + def test_no_order(self): + # Test that no ordering functions are added by default. + @dataclass(order=False) + class C: + x: int + # Make sure no order methods are added. + self.assertNotIn('__le__', C.__dict__) + self.assertNotIn('__lt__', C.__dict__) + self.assertNotIn('__ge__', C.__dict__) + self.assertNotIn('__gt__', C.__dict__) + + # Test that __lt__ is still called + @dataclass(order=False) + class C: + x: int + def __lt__(self, other): + return False + # Make sure other methods aren't added. + self.assertNotIn('__le__', C.__dict__) + self.assertNotIn('__ge__', C.__dict__) + self.assertNotIn('__gt__', C.__dict__) + + def test_overwriting_order(self): + with self.assertRaisesRegex(TypeError, + 'Cannot overwrite attribute __lt__' + '.*using functools.total_ordering'): + @dataclass(order=True) + class C: + x: int + def __lt__(self): + pass + + with self.assertRaisesRegex(TypeError, + 'Cannot overwrite attribute __le__' + '.*using functools.total_ordering'): + @dataclass(order=True) + class C: + x: int + def __le__(self): + pass + + with self.assertRaisesRegex(TypeError, + 'Cannot overwrite attribute __gt__' + '.*using functools.total_ordering'): + @dataclass(order=True) + class C: + x: int + def __gt__(self): + pass + + with self.assertRaisesRegex(TypeError, + 'Cannot overwrite attribute __ge__' + '.*using functools.total_ordering'): + @dataclass(order=True) + class C: + x: int + def __ge__(self): + pass + +class TestHash(unittest.TestCase): + def test_unsafe_hash(self): + @dataclass(unsafe_hash=True) + class C: + x: int + y: str + self.assertEqual(hash(C(1, 'foo')), hash((1, 'foo'))) + + def test_hash_rules(self): + def non_bool(value): + # Map to something else that's True, but not a bool. + if value is None: + return None + if value: + return (3,) + return 0 + + def test(case, unsafe_hash, eq, frozen, with_hash, result): + with self.subTest(case=case, unsafe_hash=unsafe_hash, eq=eq, + frozen=frozen): + if result != 'exception': + if with_hash: + @dataclass(unsafe_hash=unsafe_hash, eq=eq, frozen=frozen) + class C: + def __hash__(self): + return 0 + else: + @dataclass(unsafe_hash=unsafe_hash, eq=eq, frozen=frozen) + class C: + pass + + # See if the result matches what's expected. + if result == 'fn': + # __hash__ contains the function we generated. + self.assertIn('__hash__', C.__dict__) + self.assertIsNotNone(C.__dict__['__hash__']) + + elif result == '': + # __hash__ is not present in our class. + if not with_hash: + self.assertNotIn('__hash__', C.__dict__) + + elif result == 'none': + # __hash__ is set to None. + self.assertIn('__hash__', C.__dict__) + self.assertIsNone(C.__dict__['__hash__']) + + elif result == 'exception': + # Creating the class should cause an exception. + # This only happens with with_hash==True. + assert(with_hash) + with self.assertRaisesRegex(TypeError, 'Cannot overwrite attribute __hash__'): + @dataclass(unsafe_hash=unsafe_hash, eq=eq, frozen=frozen) + class C: + def __hash__(self): + return 0 + + else: + assert False, f'unknown result {result!r}' + + # There are 8 cases of: + # unsafe_hash=True/False + # eq=True/False + # frozen=True/False + # And for each of these, a different result if + # __hash__ is defined or not. + for case, (unsafe_hash, eq, frozen, res_no_defined_hash, res_defined_hash) in enumerate([ + (False, False, False, '', ''), + (False, False, True, '', ''), + (False, True, False, 'none', ''), + (False, True, True, 'fn', ''), + (True, False, False, 'fn', 'exception'), + (True, False, True, 'fn', 'exception'), + (True, True, False, 'fn', 'exception'), + (True, True, True, 'fn', 'exception'), + ], 1): + test(case, unsafe_hash, eq, frozen, False, res_no_defined_hash) + test(case, unsafe_hash, eq, frozen, True, res_defined_hash) + + # Test non-bool truth values, too. This is just to + # make sure the data-driven table in the decorator + # handles non-bool values. + test(case, non_bool(unsafe_hash), non_bool(eq), non_bool(frozen), False, res_no_defined_hash) + test(case, non_bool(unsafe_hash), non_bool(eq), non_bool(frozen), True, res_defined_hash) + + + def test_eq_only(self): + # If a class defines __eq__, __hash__ is automatically added + # and set to None. This is normal Python behavior, not + # related to dataclasses. Make sure we don't interfere with + # that (see bpo=32546). + + @dataclass + class C: + i: int + def __eq__(self, other): + return self.i == other.i + self.assertEqual(C(1), C(1)) + self.assertNotEqual(C(1), C(4)) + + # And make sure things work in this case if we specify + # unsafe_hash=True. + @dataclass(unsafe_hash=True) + class C: + i: int + def __eq__(self, other): + return self.i == other.i + self.assertEqual(C(1), C(1.0)) + self.assertEqual(hash(C(1)), hash(C(1.0))) + + # And check that the classes __eq__ is being used, despite + # specifying eq=True. + @dataclass(unsafe_hash=True, eq=True) + class C: + i: int + def __eq__(self, other): + return self.i == 3 and self.i == other.i + self.assertEqual(C(3), C(3)) + self.assertNotEqual(C(1), C(1)) + self.assertEqual(hash(C(1)), hash(C(1.0))) + + def test_0_field_hash(self): + @dataclass(frozen=True) + class C: + pass + self.assertEqual(hash(C()), hash(())) + + @dataclass(unsafe_hash=True) + class C: + pass + self.assertEqual(hash(C()), hash(())) + + def test_1_field_hash(self): + @dataclass(frozen=True) + class C: + x: int + self.assertEqual(hash(C(4)), hash((4,))) + self.assertEqual(hash(C(42)), hash((42,))) + + @dataclass(unsafe_hash=True) + class C: + x: int + self.assertEqual(hash(C(4)), hash((4,))) + self.assertEqual(hash(C(42)), hash((42,))) + + def test_hash_no_args(self): + # Test dataclasses with no hash= argument. This exists to + # make sure that if the @dataclass parameter name is changed + # or the non-default hashing behavior changes, the default + # hashability keeps working the same way. + + class Base: + def __hash__(self): + return 301 + + # If frozen or eq is None, then use the default value (do not + # specify any value in the decorator). + for frozen, eq, base, expected in [ + (None, None, object, 'unhashable'), + (None, None, Base, 'unhashable'), + (None, False, object, 'object'), + (None, False, Base, 'base'), + (None, True, object, 'unhashable'), + (None, True, Base, 'unhashable'), + (False, None, object, 'unhashable'), + (False, None, Base, 'unhashable'), + (False, False, object, 'object'), + (False, False, Base, 'base'), + (False, True, object, 'unhashable'), + (False, True, Base, 'unhashable'), + (True, None, object, 'tuple'), + (True, None, Base, 'tuple'), + (True, False, object, 'object'), + (True, False, Base, 'base'), + (True, True, object, 'tuple'), + (True, True, Base, 'tuple'), + ]: + + with self.subTest(frozen=frozen, eq=eq, base=base, expected=expected): + # First, create the class. + if frozen is None and eq is None: + @dataclass + class C(base): + i: int + elif frozen is None: + @dataclass(eq=eq) + class C(base): + i: int + elif eq is None: + @dataclass(frozen=frozen) + class C(base): + i: int + else: + @dataclass(frozen=frozen, eq=eq) + class C(base): + i: int + + # Now, make sure it hashes as expected. + if expected == 'unhashable': + c = C(10) + with self.assertRaisesRegex(TypeError, 'unhashable type'): + hash(c) + + elif expected == 'base': + self.assertEqual(hash(C(10)), 301) + + elif expected == 'object': + # I'm not sure what test to use here. object's + # hash isn't based on id(), so calling hash() + # won't tell us much. So, just check the + # function used is object's. + self.assertIs(C.__hash__, object.__hash__) + + elif expected == 'tuple': + self.assertEqual(hash(C(42)), hash((42,))) + + else: + assert False, f'unknown value for expected={expected!r}' + + +class TestFrozen(unittest.TestCase): + def test_frozen(self): + @dataclass(frozen=True) + class C: + i: int + + c = C(10) + self.assertEqual(c.i, 10) + with self.assertRaises(FrozenInstanceError): + c.i = 5 + self.assertEqual(c.i, 10) + + def test_inherit(self): + @dataclass(frozen=True) + class C: + i: int + + @dataclass(frozen=True) + class D(C): + j: int + + d = D(0, 10) + with self.assertRaises(FrozenInstanceError): + d.i = 5 + with self.assertRaises(FrozenInstanceError): + d.j = 6 + self.assertEqual(d.i, 0) + self.assertEqual(d.j, 10) + + def test_inherit_nonfrozen_from_empty_frozen(self): + @dataclass(frozen=True) + class C: + pass + + with self.assertRaisesRegex(TypeError, + 'cannot inherit non-frozen dataclass from a frozen one'): + @dataclass + class D(C): + j: int + + def test_inherit_nonfrozen_from_empty(self): + @dataclass + class C: + pass + + @dataclass + class D(C): + j: int + + d = D(3) + self.assertEqual(d.j, 3) + self.assertIsInstance(d, C) + + # Test both ways: with an intermediate normal (non-dataclass) + # class and without an intermediate class. + def test_inherit_nonfrozen_from_frozen(self): + for intermediate_class in [True, False]: + with self.subTest(intermediate_class=intermediate_class): + @dataclass(frozen=True) + class C: + i: int + + if intermediate_class: + class I(C): pass + else: + I = C + + with self.assertRaisesRegex(TypeError, + 'cannot inherit non-frozen dataclass from a frozen one'): + @dataclass + class D(I): + pass + + def test_inherit_frozen_from_nonfrozen(self): + for intermediate_class in [True, False]: + with self.subTest(intermediate_class=intermediate_class): + @dataclass + class C: + i: int + + if intermediate_class: + class I(C): pass + else: + I = C + + with self.assertRaisesRegex(TypeError, + 'cannot inherit frozen dataclass from a non-frozen one'): + @dataclass(frozen=True) + class D(I): + pass + + def test_inherit_from_normal_class(self): + for intermediate_class in [True, False]: + with self.subTest(intermediate_class=intermediate_class): + class C: + pass + + if intermediate_class: + class I(C): pass + else: + I = C + + @dataclass(frozen=True) + class D(I): + i: int + + d = D(10) + with self.assertRaises(FrozenInstanceError): + d.i = 5 + + def test_non_frozen_normal_derived(self): + # See bpo-32953. + + @dataclass(frozen=True) + class D: + x: int + y: int = 10 + + class S(D): + pass + + s = S(3) + self.assertEqual(s.x, 3) + self.assertEqual(s.y, 10) + s.cached = True + + # But can't change the frozen attributes. + with self.assertRaises(FrozenInstanceError): + s.x = 5 + with self.assertRaises(FrozenInstanceError): + s.y = 5 + self.assertEqual(s.x, 3) + self.assertEqual(s.y, 10) + self.assertEqual(s.cached, True) + + def test_overwriting_frozen(self): + # frozen uses __setattr__ and __delattr__. + with self.assertRaisesRegex(TypeError, + 'Cannot overwrite attribute __setattr__'): + @dataclass(frozen=True) + class C: + x: int + def __setattr__(self): + pass + + with self.assertRaisesRegex(TypeError, + 'Cannot overwrite attribute __delattr__'): + @dataclass(frozen=True) + class C: + x: int + def __delattr__(self): + pass + + @dataclass(frozen=False) + class C: + x: int + def __setattr__(self, name, value): + self.__dict__['x'] = value * 2 + self.assertEqual(C(10).x, 20) + + def test_frozen_hash(self): + @dataclass(frozen=True) + class C: + x: Any + + # If x is immutable, we can compute the hash. No exception is + # raised. + hash(C(3)) + + # If x is mutable, computing the hash is an error. + with self.assertRaisesRegex(TypeError, 'unhashable type'): + hash(C({})) + + +class TestSlots(unittest.TestCase): + def test_simple(self): + @dataclass + class C: + __slots__ = ('x',) + x: Any + + # There was a bug where a variable in a slot was assumed to + # also have a default value (of type + # types.MemberDescriptorType). + with self.assertRaisesRegex(TypeError, + r"__init__\(\) missing 1 required positional argument: 'x'"): + C() + + # We can create an instance, and assign to x. + c = C(10) + self.assertEqual(c.x, 10) + c.x = 5 + self.assertEqual(c.x, 5) + + # We can't assign to anything else. + with self.assertRaisesRegex(AttributeError, "'C' object has no attribute 'y'"): + c.y = 5 + + def test_derived_added_field(self): + # See bpo-33100. + @dataclass + class Base: + __slots__ = ('x',) + x: Any + + @dataclass + class Derived(Base): + x: int + y: int + + d = Derived(1, 2) + self.assertEqual((d.x, d.y), (1, 2)) + + # We can add a new field to the derived instance. + d.z = 10 + + def test_generated_slots(self): + @dataclass(slots=True) + class C: + x: int + y: int + + c = C(1, 2) + self.assertEqual((c.x, c.y), (1, 2)) + + c.x = 3 + c.y = 4 + self.assertEqual((c.x, c.y), (3, 4)) + + with self.assertRaisesRegex(AttributeError, "'C' object has no attribute 'z'"): + c.z = 5 + + def test_add_slots_when_slots_exists(self): + with self.assertRaisesRegex(TypeError, '^C already specifies __slots__$'): + @dataclass(slots=True) + class C: + __slots__ = ('x',) + x: int + + def test_generated_slots_value(self): + + class Root: + __slots__ = {'x'} + + class Root2(Root): + __slots__ = {'k': '...', 'j': ''} + + class Root3(Root2): + __slots__ = ['h'] + + class Root4(Root3): + __slots__ = 'aa' + + @dataclass(slots=True) + class Base(Root4): + y: int + j: str + h: str + + self.assertEqual(Base.__slots__, ('y', )) + + @dataclass(slots=True) + class Derived(Base): + aa: float + x: str + z: int + k: str + h: str + + self.assertEqual(Derived.__slots__, ('z', )) + + @dataclass + class AnotherDerived(Base): + z: int + + self.assertNotIn('__slots__', AnotherDerived.__dict__) + + def test_cant_inherit_from_iterator_slots(self): + + class Root: + __slots__ = iter(['a']) + + class Root2(Root): + __slots__ = ('b', ) + + with self.assertRaisesRegex( + TypeError, + "^Slots of 'Root' cannot be determined" + ): + @dataclass(slots=True) + class C(Root2): + x: int + + def test_returns_new_class(self): + class A: + x: int + + B = dataclass(A, slots=True) + self.assertIsNot(A, B) + + self.assertFalse(hasattr(A, "__slots__")) + self.assertTrue(hasattr(B, "__slots__")) + + # Can't be local to test_frozen_pickle. + @dataclass(frozen=True, slots=True) + class FrozenSlotsClass: + foo: str + bar: int + + @dataclass(frozen=True) + class FrozenWithoutSlotsClass: + foo: str + bar: int + + def test_frozen_pickle(self): + # bpo-43999 + + self.assertEqual(self.FrozenSlotsClass.__slots__, ("foo", "bar")) + for proto in range(pickle.HIGHEST_PROTOCOL + 1): + with self.subTest(proto=proto): + obj = self.FrozenSlotsClass("a", 1) + p = pickle.loads(pickle.dumps(obj, protocol=proto)) + self.assertIsNot(obj, p) + self.assertEqual(obj, p) + + obj = self.FrozenWithoutSlotsClass("a", 1) + p = pickle.loads(pickle.dumps(obj, protocol=proto)) + self.assertIsNot(obj, p) + self.assertEqual(obj, p) + + def test_slots_with_default_no_init(self): + # Originally reported in bpo-44649. + @dataclass(slots=True) + class A: + a: str + b: str = field(default='b', init=False) + + obj = A("a") + self.assertEqual(obj.a, 'a') + self.assertEqual(obj.b, 'b') + + def test_slots_with_default_factory_no_init(self): + # Originally reported in bpo-44649. + @dataclass(slots=True) + class A: + a: str + b: str = field(default_factory=lambda:'b', init=False) + + obj = A("a") + self.assertEqual(obj.a, 'a') + self.assertEqual(obj.b, 'b') + + def test_slots_no_weakref(self): + @dataclass(slots=True) + class A: + # No weakref. + pass + + self.assertNotIn("__weakref__", A.__slots__) + a = A() + with self.assertRaisesRegex(TypeError, + "cannot create weak reference"): + weakref.ref(a) + + def test_slots_weakref(self): + @dataclass(slots=True, weakref_slot=True) + class A: + a: int + + self.assertIn("__weakref__", A.__slots__) + a = A(1) + weakref.ref(a) + + def test_slots_weakref_base_str(self): + class Base: + __slots__ = '__weakref__' + + @dataclass(slots=True) + class A(Base): + a: int + + # __weakref__ is in the base class, not A. But an A is still weakref-able. + self.assertIn("__weakref__", Base.__slots__) + self.assertNotIn("__weakref__", A.__slots__) + a = A(1) + weakref.ref(a) + + def test_slots_weakref_base_tuple(self): + # Same as test_slots_weakref_base, but use a tuple instead of a string + # in the base class. + class Base: + __slots__ = ('__weakref__',) + + @dataclass(slots=True) + class A(Base): + a: int + + # __weakref__ is in the base class, not A. But an A is still + # weakref-able. + self.assertIn("__weakref__", Base.__slots__) + self.assertNotIn("__weakref__", A.__slots__) + a = A(1) + weakref.ref(a) + + def test_weakref_slot_without_slot(self): + with self.assertRaisesRegex(TypeError, + "weakref_slot is True but slots is False"): + @dataclass(weakref_slot=True) + class A: + a: int + + def test_weakref_slot_make_dataclass(self): + A = make_dataclass('A', [('a', int),], slots=True, weakref_slot=True) + self.assertIn("__weakref__", A.__slots__) + a = A(1) + weakref.ref(a) + + # And make sure if raises if slots=True is not given. + with self.assertRaisesRegex(TypeError, + "weakref_slot is True but slots is False"): + B = make_dataclass('B', [('a', int),], weakref_slot=True) + + def test_weakref_slot_subclass_weakref_slot(self): + @dataclass(slots=True, weakref_slot=True) + class Base: + field: int + + # A *can* also specify weakref_slot=True if it wants to (gh-93521) + @dataclass(slots=True, weakref_slot=True) + class A(Base): + ... + + # __weakref__ is in the base class, not A. But an instance of A + # is still weakref-able. + self.assertIn("__weakref__", Base.__slots__) + self.assertNotIn("__weakref__", A.__slots__) + a = A(1) + weakref.ref(a) + + def test_weakref_slot_subclass_no_weakref_slot(self): + @dataclass(slots=True, weakref_slot=True) + class Base: + field: int + + @dataclass(slots=True) + class A(Base): + ... + + # __weakref__ is in the base class, not A. Even though A doesn't + # specify weakref_slot, it should still be weakref-able. + self.assertIn("__weakref__", Base.__slots__) + self.assertNotIn("__weakref__", A.__slots__) + a = A(1) + weakref.ref(a) + + def test_weakref_slot_normal_base_weakref_slot(self): + class Base: + __slots__ = ('__weakref__',) + + @dataclass(slots=True, weakref_slot=True) + class A(Base): + field: int + + # __weakref__ is in the base class, not A. But an instance of + # A is still weakref-able. + self.assertIn("__weakref__", Base.__slots__) + self.assertNotIn("__weakref__", A.__slots__) + a = A(1) + weakref.ref(a) + + +class TestDescriptors(unittest.TestCase): + def test_set_name(self): + # See bpo-33141. + + # Create a descriptor. + class D: + def __set_name__(self, owner, name): + self.name = name + 'x' + def __get__(self, instance, owner): + if instance is not None: + return 1 + return self + + # This is the case of just normal descriptor behavior, no + # dataclass code is involved in initializing the descriptor. + @dataclass + class C: + c: int=D() + self.assertEqual(C.c.name, 'cx') + + # Now test with a default value and init=False, which is the + # only time this is really meaningful. If not using + # init=False, then the descriptor will be overwritten, anyway. + @dataclass + class C: + c: int=field(default=D(), init=False) + self.assertEqual(C.c.name, 'cx') + self.assertEqual(C().c, 1) + + def test_non_descriptor(self): + # PEP 487 says __set_name__ should work on non-descriptors. + # Create a descriptor. + + class D: + def __set_name__(self, owner, name): + self.name = name + 'x' + + @dataclass + class C: + c: int=field(default=D(), init=False) + self.assertEqual(C.c.name, 'cx') + + def test_lookup_on_instance(self): + # See bpo-33175. + class D: + pass + + d = D() + # Create an attribute on the instance, not type. + d.__set_name__ = Mock() + + # Make sure d.__set_name__ is not called. + @dataclass + class C: + i: int=field(default=d, init=False) + + self.assertEqual(d.__set_name__.call_count, 0) + + def test_lookup_on_class(self): + # See bpo-33175. + class D: + pass + D.__set_name__ = Mock() + + # Make sure D.__set_name__ is called. + @dataclass + class C: + i: int=field(default=D(), init=False) + + self.assertEqual(D.__set_name__.call_count, 1) + + def test_init_calls_set(self): + class D: + pass + + D.__set__ = Mock() + + @dataclass + class C: + i: D = D() + + # Make sure D.__set__ is called. + D.__set__.reset_mock() + c = C(5) + self.assertEqual(D.__set__.call_count, 1) + + def test_getting_field_calls_get(self): + class D: + pass + + D.__set__ = Mock() + D.__get__ = Mock() + + @dataclass + class C: + i: D = D() + + c = C(5) + + # Make sure D.__get__ is called. + D.__get__.reset_mock() + value = c.i + self.assertEqual(D.__get__.call_count, 1) + + def test_setting_field_calls_set(self): + class D: + pass + + D.__set__ = Mock() + + @dataclass + class C: + i: D = D() + + c = C(5) + + # Make sure D.__set__ is called. + D.__set__.reset_mock() + c.i = 10 + self.assertEqual(D.__set__.call_count, 1) + + def test_setting_uninitialized_descriptor_field(self): + class D: + pass + + D.__set__ = Mock() + + @dataclass + class C: + i: D + + # D.__set__ is not called because there's no D instance to call it on + D.__set__.reset_mock() + c = C(5) + self.assertEqual(D.__set__.call_count, 0) + + # D.__set__ still isn't called after setting i to an instance of D + # because descriptors don't behave like that when stored as instance vars + c.i = D() + c.i = 5 + self.assertEqual(D.__set__.call_count, 0) + + def test_default_value(self): + class D: + def __get__(self, instance: Any, owner: object) -> int: + if instance is None: + return 100 + + return instance._x + + def __set__(self, instance: Any, value: int) -> None: + instance._x = value + + @dataclass + class C: + i: D = D() + + c = C() + self.assertEqual(c.i, 100) + + c = C(5) + self.assertEqual(c.i, 5) + + def test_no_default_value(self): + class D: + def __get__(self, instance: Any, owner: object) -> int: + if instance is None: + raise AttributeError() + + return instance._x + + def __set__(self, instance: Any, value: int) -> None: + instance._x = value + + @dataclass + class C: + i: D = D() + + with self.assertRaisesRegex(TypeError, 'missing 1 required positional argument'): + c = C() + +class TestStringAnnotations(unittest.TestCase): + def test_classvar(self): + # Some expressions recognized as ClassVar really aren't. But + # if you're using string annotations, it's not an exact + # science. + # These tests assume that both "import typing" and "from + # typing import *" have been run in this file. + for typestr in ('ClassVar[int]', + 'ClassVar [int]', + ' ClassVar [int]', + 'ClassVar', + ' ClassVar ', + 'typing.ClassVar[int]', + 'typing.ClassVar[str]', + ' typing.ClassVar[str]', + 'typing .ClassVar[str]', + 'typing. ClassVar[str]', + 'typing.ClassVar [str]', + 'typing.ClassVar [ str]', + + # Not syntactically valid, but these will + # be treated as ClassVars. + 'typing.ClassVar.[int]', + 'typing.ClassVar+', + ): + with self.subTest(typestr=typestr): + @dataclass + class C: + x: typestr + + # x is a ClassVar, so C() takes no args. + C() + + # And it won't appear in the class's dict because it doesn't + # have a default. + self.assertNotIn('x', C.__dict__) + + def test_isnt_classvar(self): + for typestr in ('CV', + 't.ClassVar', + 't.ClassVar[int]', + 'typing..ClassVar[int]', + 'Classvar', + 'Classvar[int]', + 'typing.ClassVarx[int]', + 'typong.ClassVar[int]', + 'dataclasses.ClassVar[int]', + 'typingxClassVar[str]', + ): + with self.subTest(typestr=typestr): + @dataclass + class C: + x: typestr + + # x is not a ClassVar, so C() takes one arg. + self.assertEqual(C(10).x, 10) + + def test_initvar(self): + # These tests assume that both "import dataclasses" and "from + # dataclasses import *" have been run in this file. + for typestr in ('InitVar[int]', + 'InitVar [int]' + ' InitVar [int]', + 'InitVar', + ' InitVar ', + 'dataclasses.InitVar[int]', + 'dataclasses.InitVar[str]', + ' dataclasses.InitVar[str]', + 'dataclasses .InitVar[str]', + 'dataclasses. InitVar[str]', + 'dataclasses.InitVar [str]', + 'dataclasses.InitVar [ str]', + + # Not syntactically valid, but these will + # be treated as InitVars. + 'dataclasses.InitVar.[int]', + 'dataclasses.InitVar+', + ): + with self.subTest(typestr=typestr): + @dataclass + class C: + x: typestr + + # x is an InitVar, so doesn't create a member. + with self.assertRaisesRegex(AttributeError, + "object has no attribute 'x'"): + C(1).x + + def test_isnt_initvar(self): + for typestr in ('IV', + 'dc.InitVar', + 'xdataclasses.xInitVar', + 'typing.xInitVar[int]', + ): + with self.subTest(typestr=typestr): + @dataclass + class C: + x: typestr + + # x is not an InitVar, so there will be a member x. + self.assertEqual(C(10).x, 10) + + def test_classvar_module_level_import(self): + from test import dataclass_module_1 + from test import dataclass_module_1_str + from test import dataclass_module_2 + from test import dataclass_module_2_str + + for m in (dataclass_module_1, dataclass_module_1_str, + dataclass_module_2, dataclass_module_2_str, + ): + with self.subTest(m=m): + # There's a difference in how the ClassVars are + # interpreted when using string annotations or + # not. See the imported modules for details. + if m.USING_STRINGS: + c = m.CV(10) + else: + c = m.CV() + self.assertEqual(c.cv0, 20) + + + # There's a difference in how the InitVars are + # interpreted when using string annotations or + # not. See the imported modules for details. + c = m.IV(0, 1, 2, 3, 4) + + for field_name in ('iv0', 'iv1', 'iv2', 'iv3'): + with self.subTest(field_name=field_name): + with self.assertRaisesRegex(AttributeError, f"object has no attribute '{field_name}'"): + # Since field_name is an InitVar, it's + # not an instance field. + getattr(c, field_name) + + if m.USING_STRINGS: + # iv4 is interpreted as a normal field. + self.assertIn('not_iv4', c.__dict__) + self.assertEqual(c.not_iv4, 4) + else: + # iv4 is interpreted as an InitVar, so it + # won't exist on the instance. + self.assertNotIn('not_iv4', c.__dict__) + + def test_text_annotations(self): + from test import dataclass_textanno + + self.assertEqual( + get_type_hints(dataclass_textanno.Bar), + {'foo': dataclass_textanno.Foo}) + self.assertEqual( + get_type_hints(dataclass_textanno.Bar.__init__), + {'foo': dataclass_textanno.Foo, + 'return': type(None)}) + + +class TestMakeDataclass(unittest.TestCase): + def test_simple(self): + C = make_dataclass('C', + [('x', int), + ('y', int, field(default=5))], + namespace={'add_one': lambda self: self.x + 1}) + c = C(10) + self.assertEqual((c.x, c.y), (10, 5)) + self.assertEqual(c.add_one(), 11) + + + def test_no_mutate_namespace(self): + # Make sure a provided namespace isn't mutated. + ns = {} + C = make_dataclass('C', + [('x', int), + ('y', int, field(default=5))], + namespace=ns) + self.assertEqual(ns, {}) + + def test_base(self): + class Base1: + pass + class Base2: + pass + C = make_dataclass('C', + [('x', int)], + bases=(Base1, Base2)) + c = C(2) + self.assertIsInstance(c, C) + self.assertIsInstance(c, Base1) + self.assertIsInstance(c, Base2) + + def test_base_dataclass(self): + @dataclass + class Base1: + x: int + class Base2: + pass + C = make_dataclass('C', + [('y', int)], + bases=(Base1, Base2)) + with self.assertRaisesRegex(TypeError, 'required positional'): + c = C(2) + c = C(1, 2) + self.assertIsInstance(c, C) + self.assertIsInstance(c, Base1) + self.assertIsInstance(c, Base2) + + self.assertEqual((c.x, c.y), (1, 2)) + + def test_init_var(self): + def post_init(self, y): + self.x *= y + + C = make_dataclass('C', + [('x', int), + ('y', InitVar[int]), + ], + namespace={'__post_init__': post_init}, + ) + c = C(2, 3) + self.assertEqual(vars(c), {'x': 6}) + self.assertEqual(len(fields(c)), 1) + + def test_class_var(self): + C = make_dataclass('C', + [('x', int), + ('y', ClassVar[int], 10), + ('z', ClassVar[int], field(default=20)), + ]) + c = C(1) + self.assertEqual(vars(c), {'x': 1}) + self.assertEqual(len(fields(c)), 1) + self.assertEqual(C.y, 10) + self.assertEqual(C.z, 20) + + def test_other_params(self): + C = make_dataclass('C', + [('x', int), + ('y', ClassVar[int], 10), + ('z', ClassVar[int], field(default=20)), + ], + init=False) + # Make sure we have a repr, but no init. + self.assertNotIn('__init__', vars(C)) + self.assertIn('__repr__', vars(C)) + + # Make sure random other params don't work. + with self.assertRaisesRegex(TypeError, 'unexpected keyword argument'): + C = make_dataclass('C', + [], + xxinit=False) + + def test_no_types(self): + C = make_dataclass('Point', ['x', 'y', 'z']) + c = C(1, 2, 3) + self.assertEqual(vars(c), {'x': 1, 'y': 2, 'z': 3}) + self.assertEqual(C.__annotations__, {'x': 'typing.Any', + 'y': 'typing.Any', + 'z': 'typing.Any'}) + + C = make_dataclass('Point', ['x', ('y', int), 'z']) + c = C(1, 2, 3) + self.assertEqual(vars(c), {'x': 1, 'y': 2, 'z': 3}) + self.assertEqual(C.__annotations__, {'x': 'typing.Any', + 'y': int, + 'z': 'typing.Any'}) + + def test_invalid_type_specification(self): + for bad_field in [(), + (1, 2, 3, 4), + ]: + with self.subTest(bad_field=bad_field): + with self.assertRaisesRegex(TypeError, r'Invalid field: '): + make_dataclass('C', ['a', bad_field]) + + # And test for things with no len(). + for bad_field in [float, + lambda x:x, + ]: + with self.subTest(bad_field=bad_field): + with self.assertRaisesRegex(TypeError, r'has no len\(\)'): + make_dataclass('C', ['a', bad_field]) + + def test_duplicate_field_names(self): + for field in ['a', 'ab']: + with self.subTest(field=field): + with self.assertRaisesRegex(TypeError, 'Field name duplicated'): + make_dataclass('C', [field, 'a', field]) + + def test_keyword_field_names(self): + for field in ['for', 'async', 'await', 'as']: + with self.subTest(field=field): + with self.assertRaisesRegex(TypeError, 'must not be keywords'): + make_dataclass('C', ['a', field]) + with self.assertRaisesRegex(TypeError, 'must not be keywords'): + make_dataclass('C', [field]) + with self.assertRaisesRegex(TypeError, 'must not be keywords'): + make_dataclass('C', [field, 'a']) + + def test_non_identifier_field_names(self): + for field in ['()', 'x,y', '*', '2@3', '', 'little johnny tables']: + with self.subTest(field=field): + with self.assertRaisesRegex(TypeError, 'must be valid identifiers'): + make_dataclass('C', ['a', field]) + with self.assertRaisesRegex(TypeError, 'must be valid identifiers'): + make_dataclass('C', [field]) + with self.assertRaisesRegex(TypeError, 'must be valid identifiers'): + make_dataclass('C', [field, 'a']) + + def test_underscore_field_names(self): + # Unlike namedtuple, it's okay if dataclass field names have + # an underscore. + make_dataclass('C', ['_', '_a', 'a_a', 'a_']) + + def test_funny_class_names_names(self): + # No reason to prevent weird class names, since + # types.new_class allows them. + for classname in ['()', 'x,y', '*', '2@3', '']: + with self.subTest(classname=classname): + C = make_dataclass(classname, ['a', 'b']) + self.assertEqual(C.__name__, classname) + +class TestReplace(unittest.TestCase): + def test(self): + @dataclass(frozen=True) + class C: + x: int + y: int + + c = C(1, 2) + c1 = replace(c, x=3) + self.assertEqual(c1.x, 3) + self.assertEqual(c1.y, 2) + + def test_frozen(self): + @dataclass(frozen=True) + class C: + x: int + y: int + z: int = field(init=False, default=10) + t: int = field(init=False, default=100) + + c = C(1, 2) + c1 = replace(c, x=3) + self.assertEqual((c.x, c.y, c.z, c.t), (1, 2, 10, 100)) + self.assertEqual((c1.x, c1.y, c1.z, c1.t), (3, 2, 10, 100)) + + + with self.assertRaisesRegex(ValueError, 'init=False'): + replace(c, x=3, z=20, t=50) + with self.assertRaisesRegex(ValueError, 'init=False'): + replace(c, z=20) + replace(c, x=3, z=20, t=50) + + # Make sure the result is still frozen. + with self.assertRaisesRegex(FrozenInstanceError, "cannot assign to field 'x'"): + c1.x = 3 + + # Make sure we can't replace an attribute that doesn't exist, + # if we're also replacing one that does exist. Test this + # here, because setting attributes on frozen instances is + # handled slightly differently from non-frozen ones. + with self.assertRaisesRegex(TypeError, r"__init__\(\) got an unexpected " + "keyword argument 'a'"): + c1 = replace(c, x=20, a=5) + + def test_invalid_field_name(self): + @dataclass(frozen=True) + class C: + x: int + y: int + + c = C(1, 2) + with self.assertRaisesRegex(TypeError, r"__init__\(\) got an unexpected " + "keyword argument 'z'"): + c1 = replace(c, z=3) + + def test_invalid_object(self): + @dataclass(frozen=True) + class C: + x: int + y: int + + with self.assertRaisesRegex(TypeError, 'dataclass instance'): + replace(C, x=3) + + with self.assertRaisesRegex(TypeError, 'dataclass instance'): + replace(0, x=3) + + def test_no_init(self): + @dataclass + class C: + x: int + y: int = field(init=False, default=10) + + c = C(1) + c.y = 20 + + # Make sure y gets the default value. + c1 = replace(c, x=5) + self.assertEqual((c1.x, c1.y), (5, 10)) + + # Trying to replace y is an error. + with self.assertRaisesRegex(ValueError, 'init=False'): + replace(c, x=2, y=30) + + with self.assertRaisesRegex(ValueError, 'init=False'): + replace(c, y=30) + + def test_classvar(self): + @dataclass + class C: + x: int + y: ClassVar[int] = 1000 + + c = C(1) + d = C(2) + + self.assertIs(c.y, d.y) + self.assertEqual(c.y, 1000) + + # Trying to replace y is an error: can't replace ClassVars. + with self.assertRaisesRegex(TypeError, r"__init__\(\) got an " + "unexpected keyword argument 'y'"): + replace(c, y=30) + + replace(c, x=5) + + def test_initvar_is_specified(self): + @dataclass + class C: + x: int + y: InitVar[int] + + def __post_init__(self, y): + self.x *= y + + c = C(1, 10) + self.assertEqual(c.x, 10) + with self.assertRaisesRegex(ValueError, r"InitVar 'y' must be " + "specified with replace()"): + replace(c, x=3) + c = replace(c, x=3, y=5) + self.assertEqual(c.x, 15) + + def test_initvar_with_default_value(self): + @dataclass + class C: + x: int + y: InitVar[int] = None + z: InitVar[int] = 42 + + def __post_init__(self, y, z): + if y is not None: + self.x += y + if z is not None: + self.x += z + + c = C(x=1, y=10, z=1) + self.assertEqual(replace(c), C(x=12)) + self.assertEqual(replace(c, y=4), C(x=12, y=4, z=42)) + self.assertEqual(replace(c, y=4, z=1), C(x=12, y=4, z=1)) + + def test_recursive_repr(self): + @dataclass + class C: + f: "C" + + c = C(None) + c.f = c + self.assertEqual(repr(c), "TestReplace.test_recursive_repr..C(f=...)") + + def test_recursive_repr_two_attrs(self): + @dataclass + class C: + f: "C" + g: "C" + + c = C(None, None) + c.f = c + c.g = c + self.assertEqual(repr(c), "TestReplace.test_recursive_repr_two_attrs" + "..C(f=..., g=...)") + + def test_recursive_repr_indirection(self): + @dataclass + class C: + f: "D" + + @dataclass + class D: + f: "C" + + c = C(None) + d = D(None) + c.f = d + d.f = c + self.assertEqual(repr(c), "TestReplace.test_recursive_repr_indirection" + "..C(f=TestReplace.test_recursive_repr_indirection" + "..D(f=...))") + + def test_recursive_repr_indirection_two(self): + @dataclass + class C: + f: "D" + + @dataclass + class D: + f: "E" + + @dataclass + class E: + f: "C" + + c = C(None) + d = D(None) + e = E(None) + c.f = d + d.f = e + e.f = c + self.assertEqual(repr(c), "TestReplace.test_recursive_repr_indirection_two" + "..C(f=TestReplace.test_recursive_repr_indirection_two" + "..D(f=TestReplace.test_recursive_repr_indirection_two" + "..E(f=...)))") + + def test_recursive_repr_misc_attrs(self): + @dataclass + class C: + f: "C" + g: int + + c = C(None, 1) + c.f = c + self.assertEqual(repr(c), "TestReplace.test_recursive_repr_misc_attrs" + "..C(f=..., g=1)") + + ## def test_initvar(self): + ## @dataclass + ## class C: + ## x: int + ## y: InitVar[int] + + ## c = C(1, 10) + ## d = C(2, 20) + + ## # In our case, replacing an InitVar is a no-op + ## self.assertEqual(c, replace(c, y=5)) + + ## replace(c, x=5) + +class TestAbstract(unittest.TestCase): + def test_abc_implementation(self): + class Ordered(abc.ABC): + @abc.abstractmethod + def __lt__(self, other): + pass + + @abc.abstractmethod + def __le__(self, other): + pass + + @dataclass(order=True) + class Date(Ordered): + year: int + month: 'Month' + day: 'int' + + self.assertFalse(inspect.isabstract(Date)) + self.assertGreater(Date(2020,12,25), Date(2020,8,31)) + + def test_maintain_abc(self): + class A(abc.ABC): + @abc.abstractmethod + def foo(self): + pass + + @dataclass + class Date(A): + year: int + month: 'Month' + day: 'int' + + self.assertTrue(inspect.isabstract(Date)) + msg = 'class Date without an implementation for abstract method foo' + self.assertRaisesRegex(TypeError, msg, Date) + + +class TestMatchArgs(unittest.TestCase): + def test_match_args(self): + @dataclass + class C: + a: int + self.assertEqual(C(42).__match_args__, ('a',)) + + def test_explicit_match_args(self): + ma = () + @dataclass + class C: + a: int + __match_args__ = ma + self.assertIs(C(42).__match_args__, ma) + + def test_bpo_43764(self): + @dataclass(repr=False, eq=False, init=False) + class X: + a: int + b: int + c: int + self.assertEqual(X.__match_args__, ("a", "b", "c")) + + def test_match_args_argument(self): + @dataclass(match_args=False) + class X: + a: int + self.assertNotIn('__match_args__', X.__dict__) + + @dataclass(match_args=False) + class Y: + a: int + __match_args__ = ('b',) + self.assertEqual(Y.__match_args__, ('b',)) + + @dataclass(match_args=False) + class Z(Y): + z: int + self.assertEqual(Z.__match_args__, ('b',)) + + # Ensure parent dataclass __match_args__ is seen, if child class + # specifies match_args=False. + @dataclass + class A: + a: int + z: int + @dataclass(match_args=False) + class B(A): + b: int + self.assertEqual(B.__match_args__, ('a', 'z')) + + def test_make_dataclasses(self): + C = make_dataclass('C', [('x', int), ('y', int)]) + self.assertEqual(C.__match_args__, ('x', 'y')) + + C = make_dataclass('C', [('x', int), ('y', int)], match_args=True) + self.assertEqual(C.__match_args__, ('x', 'y')) + + C = make_dataclass('C', [('x', int), ('y', int)], match_args=False) + self.assertNotIn('__match__args__', C.__dict__) + + C = make_dataclass('C', [('x', int), ('y', int)], namespace={'__match_args__': ('z',)}) + self.assertEqual(C.__match_args__, ('z',)) + + +class TestKeywordArgs(unittest.TestCase): + def test_no_classvar_kwarg(self): + msg = 'field a is a ClassVar but specifies kw_only' + with self.assertRaisesRegex(TypeError, msg): + @dataclass + class A: + a: ClassVar[int] = field(kw_only=True) + + with self.assertRaisesRegex(TypeError, msg): + @dataclass + class A: + a: ClassVar[int] = field(kw_only=False) + + with self.assertRaisesRegex(TypeError, msg): + @dataclass(kw_only=True) + class A: + a: ClassVar[int] = field(kw_only=False) + + def test_field_marked_as_kwonly(self): + ####################### + # Using dataclass(kw_only=True) + @dataclass(kw_only=True) + class A: + a: int + self.assertTrue(fields(A)[0].kw_only) + + @dataclass(kw_only=True) + class A: + a: int = field(kw_only=True) + self.assertTrue(fields(A)[0].kw_only) + + @dataclass(kw_only=True) + class A: + a: int = field(kw_only=False) + self.assertFalse(fields(A)[0].kw_only) + + ####################### + # Using dataclass(kw_only=False) + @dataclass(kw_only=False) + class A: + a: int + self.assertFalse(fields(A)[0].kw_only) + + @dataclass(kw_only=False) + class A: + a: int = field(kw_only=True) + self.assertTrue(fields(A)[0].kw_only) + + @dataclass(kw_only=False) + class A: + a: int = field(kw_only=False) + self.assertFalse(fields(A)[0].kw_only) + + ####################### + # Not specifying dataclass(kw_only) + @dataclass + class A: + a: int + self.assertFalse(fields(A)[0].kw_only) + + @dataclass + class A: + a: int = field(kw_only=True) + self.assertTrue(fields(A)[0].kw_only) + + @dataclass + class A: + a: int = field(kw_only=False) + self.assertFalse(fields(A)[0].kw_only) + + def test_match_args(self): + # kw fields don't show up in __match_args__. + @dataclass(kw_only=True) + class C: + a: int + self.assertEqual(C(a=42).__match_args__, ()) + + @dataclass + class C: + a: int + b: int = field(kw_only=True) + self.assertEqual(C(42, b=10).__match_args__, ('a',)) + + def test_KW_ONLY(self): + @dataclass + class A: + a: int + _: KW_ONLY + b: int + c: int + A(3, c=5, b=4) + msg = "takes 2 positional arguments but 4 were given" + with self.assertRaisesRegex(TypeError, msg): + A(3, 4, 5) + + + @dataclass(kw_only=True) + class B: + a: int + _: KW_ONLY + b: int + c: int + B(a=3, b=4, c=5) + msg = "takes 1 positional argument but 4 were given" + with self.assertRaisesRegex(TypeError, msg): + B(3, 4, 5) + + # Explicitly make a field that follows KW_ONLY be non-keyword-only. + @dataclass + class C: + a: int + _: KW_ONLY + b: int + c: int = field(kw_only=False) + c = C(1, 2, b=3) + self.assertEqual(c.a, 1) + self.assertEqual(c.b, 3) + self.assertEqual(c.c, 2) + c = C(1, b=3, c=2) + self.assertEqual(c.a, 1) + self.assertEqual(c.b, 3) + self.assertEqual(c.c, 2) + c = C(1, b=3, c=2) + self.assertEqual(c.a, 1) + self.assertEqual(c.b, 3) + self.assertEqual(c.c, 2) + c = C(c=2, b=3, a=1) + self.assertEqual(c.a, 1) + self.assertEqual(c.b, 3) + self.assertEqual(c.c, 2) + + def test_KW_ONLY_as_string(self): + @dataclass + class A: + a: int + _: 'dataclasses.KW_ONLY' + b: int + c: int + A(3, c=5, b=4) + msg = "takes 2 positional arguments but 4 were given" + with self.assertRaisesRegex(TypeError, msg): + A(3, 4, 5) + + def test_KW_ONLY_twice(self): + msg = "'Y' is KW_ONLY, but KW_ONLY has already been specified" + + with self.assertRaisesRegex(TypeError, msg): + @dataclass + class A: + a: int + X: KW_ONLY + Y: KW_ONLY + b: int + c: int + + with self.assertRaisesRegex(TypeError, msg): + @dataclass + class A: + a: int + X: KW_ONLY + b: int + Y: KW_ONLY + c: int + + with self.assertRaisesRegex(TypeError, msg): + @dataclass + class A: + a: int + X: KW_ONLY + b: int + c: int + Y: KW_ONLY + + # But this usage is okay, since it's not using KW_ONLY. + @dataclass + class A: + a: int + _: KW_ONLY + b: int + c: int = field(kw_only=True) + + # And if inheriting, it's okay. + @dataclass + class A: + a: int + _: KW_ONLY + b: int + c: int + @dataclass + class B(A): + _: KW_ONLY + d: int + + # Make sure the error is raised in a derived class. + with self.assertRaisesRegex(TypeError, msg): + @dataclass + class A: + a: int + _: KW_ONLY + b: int + c: int + @dataclass + class B(A): + X: KW_ONLY + d: int + Y: KW_ONLY + + + def test_post_init(self): + @dataclass + class A: + a: int + _: KW_ONLY + b: InitVar[int] + c: int + d: InitVar[int] + def __post_init__(self, b, d): + raise CustomError(f'{b=} {d=}') + with self.assertRaisesRegex(CustomError, 'b=3 d=4'): + A(1, c=2, b=3, d=4) + + @dataclass + class B: + a: int + _: KW_ONLY + b: InitVar[int] + c: int + d: InitVar[int] + def __post_init__(self, b, d): + self.a = b + self.c = d + b = B(1, c=2, b=3, d=4) + self.assertEqual(asdict(b), {'a': 3, 'c': 4}) + + def test_defaults(self): + # For kwargs, make sure we can have defaults after non-defaults. + @dataclass + class A: + a: int = 0 + _: KW_ONLY + b: int + c: int = 1 + d: int + + a = A(d=4, b=3) + self.assertEqual(a.a, 0) + self.assertEqual(a.b, 3) + self.assertEqual(a.c, 1) + self.assertEqual(a.d, 4) + + # Make sure we still check for non-kwarg non-defaults not following + # defaults. + err_regex = "non-default argument 'z' follows default argument" + with self.assertRaisesRegex(TypeError, err_regex): + @dataclass + class A: + a: int = 0 + z: int + _: KW_ONLY + b: int + c: int = 1 + d: int + + def test_make_dataclass(self): + A = make_dataclass("A", ['a'], kw_only=True) + self.assertTrue(fields(A)[0].kw_only) + + B = make_dataclass("B", + ['a', ('b', int, field(kw_only=False))], + kw_only=True) + self.assertTrue(fields(B)[0].kw_only) + self.assertFalse(fields(B)[1].kw_only) + + +if __name__ == '__main__': + unittest.main() diff --git a/Tools/make_dataclass_tests.py b/Tools/make_dataclass_tests.py new file mode 100644 index 000000000..63cf0a657 --- /dev/null +++ b/Tools/make_dataclass_tests.py @@ -0,0 +1,451 @@ +# Used to generate tests/run/test_dataclasses.pyx but translating the CPython test suite +# dataclass file. Initially run using Python 3.10 - this file is not designed to be +# backwards compatible since it will be run manually and infrequently. + +import ast +import os.path +import sys + +unavailable_functions = frozenset( + { + "dataclass_textanno", # part of CPython test module + "dataclass_module_1", # part of CPython test module + "make_dataclass", # not implemented in Cython dataclasses (probably won't be implemented) + } +) + +skip_tests = frozenset( + { + # needs Cython compile + ("TestCase", "test_field_default_default_factory_error"), + ("TestCase", "test_two_fields_one_default"), + ("TestCase", "test_overwrite_hash"), + ("TestCase", "test_eq_order"), + ("TestCase", "test_no_unhashable_default"), + ("TestCase", "test_disallowed_mutable_defaults"), + ("TestCase", "test_classvar_default_factory"), + ("TestCase", "test_field_metadata_mapping"), + ("TestFieldNoAnnotation", "test_field_without_annotation"), + ( + "TestFieldNoAnnotation", + "test_field_without_annotation_but_annotation_in_base", + ), + ( + "TestFieldNoAnnotation", + "test_field_without_annotation_but_annotation_in_base_not_dataclass", + ), + ("TestOrdering", "test_overwriting_order"), + ("TestHash", "test_hash_rules"), + ("TestHash", "test_hash_no_args"), + ("TestFrozen", "test_inherit_nonfrozen_from_empty_frozen"), + ("TestFrozen", "test_inherit_nonfrozen_from_frozen"), + ("TestFrozen", "test_inherit_frozen_from_nonfrozen"), + ("TestFrozen", "test_overwriting_frozen"), + ("TestSlots", "test_add_slots_when_slots_exists"), + ("TestSlots", "test_cant_inherit_from_iterator_slots"), + ("TestSlots", "test_weakref_slot_without_slot"), + ("TestKeywordArgs", "test_no_classvar_kwarg"), + ("TestKeywordArgs", "test_KW_ONLY_twice"), + ("TestKeywordArgs", "test_defaults"), + # uses local variable in class definition + ("TestCase", "test_default_factory"), + ("TestCase", "test_default_factory_with_no_init"), + ("TestCase", "test_field_default"), + ("TestCase", "test_function_annotations"), + ("TestDescriptors", "test_lookup_on_instance"), + ("TestCase", "test_default_factory_not_called_if_value_given"), + ("TestCase", "test_class_attrs"), + ("TestStringAnnotations",), # almost all the texts here use local variables + # Currently unsupported + ( + "TestOrdering", + "test_functools_total_ordering", + ), # combination of cython dataclass and total_ordering + ("TestCase", "test_missing_default_factory"), # we're MISSING MISSING + ("TestCase", "test_missing_default"), # MISSING + ("TestCase", "test_missing_repr"), # MISSING + ("TestSlots",), # __slots__ isn't understood + ("TestMatchArgs",), + ("TestKeywordArgs", "test_field_marked_as_kwonly"), + ("TestKeywordArgs", "test_match_args"), + ("TestKeywordArgs", "test_KW_ONLY"), + ("TestKeywordArgs", "test_KW_ONLY_as_string"), + ("TestKeywordArgs", "test_post_init"), + ( + "TestCase", + "test_class_var_frozen", + ), # __annotations__ not present on cdef classes https://github.com/cython/cython/issues/4519 + ("TestCase", "test_dont_include_other_annotations"), # __annotations__ + ("TestDocString",), # don't think cython dataclasses currently set __doc__ + # either cython.dataclasses.field or cython.dataclasses.dataclass called directly as functions + # (will probably never be supported) + ("TestCase", "test_field_repr"), + ("TestCase", "test_dynamic_class_creation"), + ("TestCase", "test_dynamic_class_creation_using_field"), + # Requires inheritance from non-cdef class + ("TestCase", "test_is_dataclass_genericalias"), + ("TestCase", "test_generic_extending"), + ("TestCase", "test_generic_dataclasses"), + ("TestCase", "test_generic_dynamic"), + ("TestInit", "test_inherit_from_protocol"), + ("TestAbstract", "test_abc_implementation"), + ("TestAbstract", "test_maintain_abc"), + # Requires multiple inheritance from extension types + ("TestCase", "test_post_init_not_auto_added"), + # Refers to nonlocal from enclosing function + ( + "TestCase", + "test_post_init_staticmethod", + ), # TODO replicate the gist of the test elsewhere + # PEP487 isn't support in Cython + ("TestDescriptors", "test_non_descriptor"), + ("TestDescriptors", "test_set_name"), + ("TestDescriptors", "test_setting_field_calls_set"), + ("TestDescriptors", "test_setting_uninitialized_descriptor_field"), + # Looks up __dict__, which cdef classes don't typically have + ("TestCase", "test_init_false_no_default"), + ("TestCase", "test_init_var_inheritance"), # __dict__ again + ("TestCase", "test_base_has_init"), + ("TestInit", "test_base_has_init"), # needs __dict__ for vars + # Requires arbitrary attributes to be writeable + ("TestCase", "test_post_init_super"), + # Cython being strict about argument types - expected difference + ("TestDescriptors", "test_getting_field_calls_get"), + ("TestDescriptors", "test_init_calls_set"), + ("TestHash", "test_eq_only"), + # I think an expected difference with cdef classes - the property will be in the dict + ("TestCase", "test_items_in_dicts"), + # These tests are probably fine, but the string substitution in this file doesn't get it right + ("TestRepr", "test_repr"), + ("TestCase", "test_not_in_repr"), + # Bugs + ("TestCase", "test_no_options"), # @dataclass() + ("TestCase", "test_field_no_default"), # field() + ("TestCase", "test_init_in_order"), # field() + ("TestCase", "test_hash_field_rules"), # compiler crash + ("TestCase", "test_class_var"), # not sure but compiler crash + ("TestCase", "test_field_order"), # invalid C code (__pyx_base?) + ( + "TestCase", + "test_overwrite_fields_in_derived_class", + ), # invalid C code (__pyx_base?) + ("TestReplace", "test_recursive_repr"), # recursion error + ("TestReplace", "test_recursive_repr_two_attrs"), # recursion error + ("TestReplace", "test_recursive_repr_misc_attrs"), # recursion error + ("TestReplace", "test_recursive_repr_indirection"), # recursion error + ("TestReplace", "test_recursive_repr_indirection_two"), # recursion error + ("TestCase", "test_0_field_compare"), # should return False + ("TestCase", "test_1_field_compare"), # order=False is apparently ignored + ("TestOrdering", "test_no_order"), # probably order=False being ignored + ("TestRepr", "test_no_repr"), # turning off repr doesn't work + ( + "TestCase", + "test_intermediate_non_dataclass", + ), # issue with propagating through intermediate class + ("TestCase", "test_post_init"), # init=False being ignored + ( + "TestFrozen", + ), # raises AttributeError, not FrozenInstanceError (may be hard to fix) + ("TestReplace", "test_frozen"), # AttributeError not FrozenInstanceError + ( + "TestCase", + "test_dataclasses_qualnames", + ), # doesn't define __setattr__ and just relies on Cython to enforce readonly properties + ("TestCase", "test_compare_subclasses"), # wrong comparison + ("TestCase", "test_simple_compare"), # wrong comparison + ("TestEq", "test_no_eq"), # wrong comparison (probably eq=False being ignored) + ( + "TestCase", + "test_field_named_self", + ), # I think just an error in inspecting the signature + ( + "TestCase", + "test_init_var_default_factory", + ), # should be raising a compile error + ("TestCase", "test_init_var_no_default"), # should be raising a compile error + ("TestCase", "test_init_var_with_default"), # not sure... + ("TestReplace", "test_initvar_with_default_value"), # needs investigating + # Maybe bugs? + # non-default argument 'z' follows default argument in dataclass __init__ - this message looks right to me! + ("TestCase", "test_class_marker"), + # cython.dataclasses.field parameter 'metadata' must be a literal value - possibly not something we can support? + ("TestCase", "test_field_metadata_custom_mapping"), + ( + "TestCase", + "test_class_var_default_factory", + ), # possibly to do with ClassVar being assigned a field + ( + "TestCase", + "test_class_var_with_default", + ), # possibly to do with ClassVar being assigned a field + ( + "TestHash", + "test_unsafe_hash", + ), # not sure if it's a bug or just a difference in how the hash is calculated + ( + "TestHash", + "test_1_field_hash", + ), # not sure if it's a bug or just a difference in how the hash is calculated + ( + "TestHash", + "test_0_field_hash", + ), # not sure if it's a bug or just a difference in how the hash is calculated + ( + "TestDescriptors", + ), # mostly don't work - I think this may be a limitation of cdef classes but needs investigating + } +) + +version_specific_skips = { + # The version numbers are the first version that the test should be run on + ("TestCase", "test_init_var_preserve_type"): ( + 3, + 10, + ), # needs language support for | operator on types + ("TestCase", "test_post_init_classmethod"): ( + 3, + 10, + ), # not possible to add attributes on extension types +} + + +class DataclassInDecorators(ast.NodeVisitor): + found = False + + def visit_Name(self, node): + if node.id == "dataclass": + self.found = True + return self.generic_visit(node) + + def generic_visit(self, node): + if self.found: + return # skip + return super().generic_visit(node) + + +def dataclass_in_decorators(decorator_list): + finder = DataclassInDecorators() + for dec in decorator_list: + finder.visit(dec) + if finder.found: + return True + return False + + +class SubstituteNameString(ast.NodeTransformer): + def __init__(self, substitutions): + super().__init__() + self.substitutions = substitutions + + def visit_Constant(self, node): + # attempt to handle some difference in class names + # (note: requires Python>=3.8) + if isinstance(node.value, str): + if node.value.find("") != -1: + import re + + new_value = re.sub("[\w.]*", "", node.value) + for key, value in self.substitutions.items(): + new_value2 = re.sub(f"(?= version: + return func + return decorator diff --git a/tests/run/test_dataclasses.pyx b/tests/run/test_dataclasses.pyx new file mode 100644 index 000000000..2fb8f3f64 --- /dev/null +++ b/tests/run/test_dataclasses.pyx @@ -0,0 +1,966 @@ +# AUTO-GENERATED BY Tools/make_dataclass_tests.py +# DO NOT EDIT + +# cython: language_level=3 +include "test_dataclasses.pxi" + +@dataclass +@cclass +class C_TestCase_test_no_fields: + pass + +@dataclass +@cclass +class C_TestCase_test_no_fields_but_member_variable: + i = 0 + +@dataclass +@cclass +class C_TestCase_test_one_field_no_default: + x: int + +@dataclass +@cclass +class C_TestCase_test_named_init_params: + x: int + +@dataclass +@cclass +class C_TestCase_test_field_named_object: + object: str + +@dataclass(frozen=True) +@cclass +class C_TestCase_test_field_named_object_frozen: + object: str + +@dataclass +@cclass +class C_TestCase_test_not_in_compare: + x: int = 0 + y: int = field(compare=False, default=4) + +class Mutable_TestCase_test_deliberately_mutable_defaults: + + def __init__(self): + self.l = [] + +@dataclass +@cclass +class C_TestCase_test_deliberately_mutable_defaults: + x: Mutable_TestCase_test_deliberately_mutable_defaults + +@dataclass +@cclass +class Point_TestCase_test_not_tuple: + x: int + y: int + +@dataclass +@cclass +class C_TestCase_test_not_tuple: + x: int + y: int + +@dataclass +@cclass +class Point3D_TestCase_test_not_other_dataclass: + x: int + y: int + z: int + +@dataclass +@cclass +class Date_TestCase_test_not_other_dataclass: + year: int + month: int + day: int + +@dataclass +@cclass +class Point3Dv1_TestCase_test_not_other_dataclass: + x: int = 0 + y: int = 0 + z: int = 0 + +@dataclass +@cclass +class C_TestCase_test_post_init_classmethod: + flag = False + x: int + y: int + + @classmethod + def __post_init__(cls): + cls.flag = True + +@dataclass +@cclass +class C_TestCase_test_class_var_no_default: + x: ClassVar[int] + +@dataclass +@cclass +class C_TestCase_test_init_var: + x: int = None + init_param: InitVar[int] = None + + def __post_init__(self, init_param): + if self.x is None: + self.x = init_param * 2 + +@dataclass +@cclass +class Foo_TestCase_test_default_factory_derived: + x: dict = field(default_factory=dict) + +@dataclass +@cclass +class Bar_TestCase_test_default_factory_derived(Foo_TestCase_test_default_factory_derived): + y: int = 1 + +@dataclass +@cclass +class Baz_TestCase_test_default_factory_derived(Foo_TestCase_test_default_factory_derived): + pass + +class NotDataClass_TestCase_test_is_dataclass: + pass + +@dataclass +@cclass +class C_TestCase_test_is_dataclass: + x: int + +@dataclass +@cclass +class D_TestCase_test_is_dataclass: + d: C_TestCase_test_is_dataclass + e: int + +class A_TestCase_test_is_dataclass_when_getattr_always_returns: + + def __getattr__(self, key): + return 0 + +class B_TestCase_test_is_dataclass_when_getattr_always_returns: + pass + +@dataclass +@cclass +class C_TestCase_test_helper_fields_with_class_instance: + x: int + y: float + +class C_TestCase_test_helper_fields_exception: + pass + +@dataclass +@cclass +class C_TestCase_test_helper_asdict: + x: int + y: int + +@dataclass +@cclass +class C_TestCase_test_helper_asdict_raises_on_classes: + x: int + y: int + +@dataclass +@cclass +class C_TestCase_test_helper_asdict_copy_values: + x: int + y: List[int] = field(default_factory=list) + +@dataclass +@cclass +class UserId_TestCase_test_helper_asdict_nested: + token: int + group: int + +@dataclass +@cclass +class User_TestCase_test_helper_asdict_nested: + name: str + id: UserId_TestCase_test_helper_asdict_nested + +@dataclass +@cclass +class User_TestCase_test_helper_asdict_builtin_containers: + name: str + id: int + +@dataclass +@cclass +class GroupList_TestCase_test_helper_asdict_builtin_containers: + id: int + users: List[User_TestCase_test_helper_asdict_builtin_containers] + +@dataclass +@cclass +class GroupTuple_TestCase_test_helper_asdict_builtin_containers: + id: int + users: Tuple[User_TestCase_test_helper_asdict_builtin_containers, ...] + +@dataclass +@cclass +class GroupDict_TestCase_test_helper_asdict_builtin_containers: + id: int + users: Dict[str, User_TestCase_test_helper_asdict_builtin_containers] + +@dataclass +@cclass +class Child_TestCase_test_helper_asdict_builtin_object_containers: + d: object + +@dataclass +@cclass +class Parent_TestCase_test_helper_asdict_builtin_object_containers: + child: Child_TestCase_test_helper_asdict_builtin_object_containers + +@dataclass +@cclass +class C_TestCase_test_helper_asdict_factory: + x: int + y: int + +@dataclass +@cclass +class C_TestCase_test_helper_asdict_namedtuple: + x: str + y: T + +@dataclass +@cclass +class C_TestCase_test_helper_asdict_namedtuple_key: + f: dict + +class T_TestCase_test_helper_asdict_namedtuple_derived(namedtuple('Tbase', 'a')): + + def my_a(self): + return self.a + +@dataclass +@cclass +class C_TestCase_test_helper_asdict_namedtuple_derived: + f: T_TestCase_test_helper_asdict_namedtuple_derived + +@dataclass +@cclass +class C_TestCase_test_helper_astuple: + x: int + y: int = 0 + +@dataclass +@cclass +class C_TestCase_test_helper_astuple_raises_on_classes: + x: int + y: int + +@dataclass +@cclass +class C_TestCase_test_helper_astuple_copy_values: + x: int + y: List[int] = field(default_factory=list) + +@dataclass +@cclass +class UserId_TestCase_test_helper_astuple_nested: + token: int + group: int + +@dataclass +@cclass +class User_TestCase_test_helper_astuple_nested: + name: str + id: UserId_TestCase_test_helper_astuple_nested + +@dataclass +@cclass +class User_TestCase_test_helper_astuple_builtin_containers: + name: str + id: int + +@dataclass +@cclass +class GroupList_TestCase_test_helper_astuple_builtin_containers: + id: int + users: List[User_TestCase_test_helper_astuple_builtin_containers] + +@dataclass +@cclass +class GroupTuple_TestCase_test_helper_astuple_builtin_containers: + id: int + users: Tuple[User_TestCase_test_helper_astuple_builtin_containers, ...] + +@dataclass +@cclass +class GroupDict_TestCase_test_helper_astuple_builtin_containers: + id: int + users: Dict[str, User_TestCase_test_helper_astuple_builtin_containers] + +@dataclass +@cclass +class Child_TestCase_test_helper_astuple_builtin_object_containers: + d: object + +@dataclass +@cclass +class Parent_TestCase_test_helper_astuple_builtin_object_containers: + child: Child_TestCase_test_helper_astuple_builtin_object_containers + +@dataclass +@cclass +class C_TestCase_test_helper_astuple_factory: + x: int + y: int + +@dataclass +@cclass +class C_TestCase_test_helper_astuple_namedtuple: + x: str + y: T + +@dataclass +@cclass +class C_TestCase_test_alternate_classmethod_constructor: + x: int + + @classmethod + def from_file(cls, filename): + value_in_file = 20 + return cls(value_in_file) + +@dataclass +@cclass +class C_TestCase_test_field_metadata_default: + i: int + +@dataclass +@cclass +class P_TestCase_test_dataclasses_pickleable: + x: int + y: int = 0 + +@dataclass +@cclass +class Q_TestCase_test_dataclasses_pickleable: + x: int + y: int = field(default=0, init=False) + +@dataclass +@cclass +class R_TestCase_test_dataclasses_pickleable: + x: int + y: List[int] = field(default_factory=list) + +@dataclass(init=False) +@cclass +class C_TestInit_test_no_init: + i: int = 0 + +@dataclass(init=False) +@cclass +class C_TestInit_test_no_init_: + i: int = 2 + + def __init__(self): + self.i = 3 + +@dataclass +@cclass +class C_TestInit_test_overwriting_init: + x: int + + def __init__(self, x): + self.x = 2 * x + +@dataclass(init=True) +@cclass +class C_TestInit_test_overwriting_init_: + x: int + + def __init__(self, x): + self.x = 2 * x + +@dataclass(init=False) +@cclass +class C_TestInit_test_overwriting_init__: + x: int + + def __init__(self, x): + self.x = 2 * x + +@dataclass +@cclass +class C_TestRepr_test_overwriting_repr: + x: int + + def __repr__(self): + return 'x' + +@dataclass(repr=True) +@cclass +class C_TestRepr_test_overwriting_repr_: + x: int + + def __repr__(self): + return 'x' + +@dataclass(repr=False) +@cclass +class C_TestRepr_test_overwriting_repr__: + x: int + + def __repr__(self): + return 'x' + +@dataclass +@cclass +class C_TestEq_test_overwriting_eq: + x: int + + def __eq__(self, other): + return other == 3 + +@dataclass(eq=True) +@cclass +class C_TestEq_test_overwriting_eq_: + x: int + + def __eq__(self, other): + return other == 4 + +@dataclass(eq=False) +@cclass +class C_TestEq_test_overwriting_eq__: + x: int + + def __eq__(self, other): + return other == 5 + +class Base1_TestMakeDataclass_test_base: + pass + +class Base2_TestMakeDataclass_test_base: + pass + +@dataclass +@cclass +class Base1_TestMakeDataclass_test_base_dataclass: + x: int + +class Base2_TestMakeDataclass_test_base_dataclass: + pass + +@dataclass(frozen=True) +@cclass +class C_TestReplace_test: + x: int + y: int + +@dataclass(frozen=True) +@cclass +class C_TestReplace_test_invalid_field_name: + x: int + y: int + +@dataclass(frozen=True) +@cclass +class C_TestReplace_test_invalid_object: + x: int + y: int + +@dataclass +@cclass +class C_TestReplace_test_no_init: + x: int + y: int = field(init=False, default=10) + +@dataclass +@cclass +class C_TestReplace_test_classvar: + x: int + y: ClassVar[int] = 1000 + +@dataclass +@cclass +class C_TestReplace_test_initvar_is_specified: + x: int + y: InitVar[int] + + def __post_init__(self, y): + self.x *= y + +class CustomError(Exception): + pass + +class TestCase(unittest.TestCase): + + def test_no_fields(self): + C = C_TestCase_test_no_fields + o = C() + self.assertEqual(len(fields(C)), 0) + + def test_no_fields_but_member_variable(self): + C = C_TestCase_test_no_fields_but_member_variable + o = C() + self.assertEqual(len(fields(C)), 0) + + def test_one_field_no_default(self): + C = C_TestCase_test_one_field_no_default + o = C(42) + self.assertEqual(o.x, 42) + + def test_named_init_params(self): + C = C_TestCase_test_named_init_params + o = C(x=32) + self.assertEqual(o.x, 32) + + def test_field_named_object(self): + C = C_TestCase_test_field_named_object + c = C('foo') + self.assertEqual(c.object, 'foo') + + def test_field_named_object_frozen(self): + C = C_TestCase_test_field_named_object_frozen + c = C('foo') + self.assertEqual(c.object, 'foo') + + def test_not_in_compare(self): + C = C_TestCase_test_not_in_compare + self.assertEqual(C(), C(0, 20)) + self.assertEqual(C(1, 10), C(1, 20)) + self.assertNotEqual(C(3), C(4, 10)) + self.assertNotEqual(C(3, 10), C(4, 10)) + + def test_deliberately_mutable_defaults(self): + Mutable = Mutable_TestCase_test_deliberately_mutable_defaults + C = C_TestCase_test_deliberately_mutable_defaults + lst = Mutable() + o1 = C(lst) + o2 = C(lst) + self.assertEqual(o1, o2) + o1.x.l.extend([1, 2]) + self.assertEqual(o1, o2) + self.assertEqual(o1.x.l, [1, 2]) + self.assertIs(o1.x, o2.x) + + def test_not_tuple(self): + Point = Point_TestCase_test_not_tuple + self.assertNotEqual(Point(1, 2), (1, 2)) + C = C_TestCase_test_not_tuple + self.assertNotEqual(Point(1, 3), C(1, 3)) + + def test_not_other_dataclass(self): + Point3D = Point3D_TestCase_test_not_other_dataclass + Date = Date_TestCase_test_not_other_dataclass + self.assertNotEqual(Point3D(2017, 6, 3), Date(2017, 6, 3)) + self.assertNotEqual(Point3D(1, 2, 3), (1, 2, 3)) + with self.assertRaises(TypeError): + (x, y, z) = Point3D(4, 5, 6) + Point3Dv1 = Point3Dv1_TestCase_test_not_other_dataclass + self.assertNotEqual(Point3D(0, 0, 0), Point3Dv1()) + + @skip_on_versions_below((3, 10)) + def test_post_init_classmethod(self): + C = C_TestCase_test_post_init_classmethod + self.assertFalse(C.flag) + c = C(3, 4) + self.assertEqual((c.x, c.y), (3, 4)) + self.assertTrue(C.flag) + + def test_class_var_no_default(self): + C = C_TestCase_test_class_var_no_default + self.assertNotIn('x', C.__dict__) + + def test_init_var(self): + C = C_TestCase_test_init_var + c = C(init_param=10) + self.assertEqual(c.x, 20) + + @skip_on_versions_below((3, 10)) + def test_init_var_preserve_type(self): + self.assertEqual(InitVar[int].type, int) + self.assertEqual(repr(InitVar[int]), 'dataclasses.InitVar[int]') + self.assertEqual(repr(InitVar[List[int]]), 'dataclasses.InitVar[typing.List[int]]') + self.assertEqual(repr(InitVar[list[int]]), 'dataclasses.InitVar[list[int]]') + self.assertEqual(repr(InitVar[int | str]), 'dataclasses.InitVar[int | str]') + + def test_default_factory_derived(self): + Foo = Foo_TestCase_test_default_factory_derived + Bar = Bar_TestCase_test_default_factory_derived + self.assertEqual(Foo().x, {}) + self.assertEqual(Bar().x, {}) + self.assertEqual(Bar().y, 1) + Baz = Baz_TestCase_test_default_factory_derived + self.assertEqual(Baz().x, {}) + + def test_is_dataclass(self): + NotDataClass = NotDataClass_TestCase_test_is_dataclass + self.assertFalse(is_dataclass(0)) + self.assertFalse(is_dataclass(int)) + self.assertFalse(is_dataclass(NotDataClass)) + self.assertFalse(is_dataclass(NotDataClass())) + C = C_TestCase_test_is_dataclass + D = D_TestCase_test_is_dataclass + c = C(10) + d = D(c, 4) + self.assertTrue(is_dataclass(C)) + self.assertTrue(is_dataclass(c)) + self.assertFalse(is_dataclass(c.x)) + self.assertTrue(is_dataclass(d.d)) + self.assertFalse(is_dataclass(d.e)) + + def test_is_dataclass_when_getattr_always_returns(self): + A = A_TestCase_test_is_dataclass_when_getattr_always_returns + self.assertFalse(is_dataclass(A)) + a = A() + B = B_TestCase_test_is_dataclass_when_getattr_always_returns + b = B() + b.__dataclass_fields__ = [] + for obj in (a, b): + with self.subTest(obj=obj): + self.assertFalse(is_dataclass(obj)) + with self.assertRaises(TypeError): + asdict(obj) + with self.assertRaises(TypeError): + astuple(obj) + with self.assertRaises(TypeError): + replace(obj, x=0) + + def test_helper_fields_with_class_instance(self): + C = C_TestCase_test_helper_fields_with_class_instance + self.assertEqual(fields(C), fields(C(0, 0.0))) + + def test_helper_fields_exception(self): + with self.assertRaises(TypeError): + fields(0) + C = C_TestCase_test_helper_fields_exception + with self.assertRaises(TypeError): + fields(C) + with self.assertRaises(TypeError): + fields(C()) + + def test_helper_asdict(self): + C = C_TestCase_test_helper_asdict + c = C(1, 2) + self.assertEqual(asdict(c), {'x': 1, 'y': 2}) + self.assertEqual(asdict(c), asdict(c)) + self.assertIsNot(asdict(c), asdict(c)) + c.x = 42 + self.assertEqual(asdict(c), {'x': 42, 'y': 2}) + self.assertIs(type(asdict(c)), dict) + + def test_helper_asdict_raises_on_classes(self): + C = C_TestCase_test_helper_asdict_raises_on_classes + with self.assertRaises(TypeError): + asdict(C) + with self.assertRaises(TypeError): + asdict(int) + + def test_helper_asdict_copy_values(self): + C = C_TestCase_test_helper_asdict_copy_values + initial = [] + c = C(1, initial) + d = asdict(c) + self.assertEqual(d['y'], initial) + self.assertIsNot(d['y'], initial) + c = C(1) + d = asdict(c) + d['y'].append(1) + self.assertEqual(c.y, []) + + def test_helper_asdict_nested(self): + UserId = UserId_TestCase_test_helper_asdict_nested + User = User_TestCase_test_helper_asdict_nested + u = User('Joe', UserId(123, 1)) + d = asdict(u) + self.assertEqual(d, {'name': 'Joe', 'id': {'token': 123, 'group': 1}}) + self.assertIsNot(asdict(u), asdict(u)) + u.id.group = 2 + self.assertEqual(asdict(u), {'name': 'Joe', 'id': {'token': 123, 'group': 2}}) + + def test_helper_asdict_builtin_containers(self): + User = User_TestCase_test_helper_asdict_builtin_containers + GroupList = GroupList_TestCase_test_helper_asdict_builtin_containers + GroupTuple = GroupTuple_TestCase_test_helper_asdict_builtin_containers + GroupDict = GroupDict_TestCase_test_helper_asdict_builtin_containers + a = User('Alice', 1) + b = User('Bob', 2) + gl = GroupList(0, [a, b]) + gt = GroupTuple(0, (a, b)) + gd = GroupDict(0, {'first': a, 'second': b}) + self.assertEqual(asdict(gl), {'id': 0, 'users': [{'name': 'Alice', 'id': 1}, {'name': 'Bob', 'id': 2}]}) + self.assertEqual(asdict(gt), {'id': 0, 'users': ({'name': 'Alice', 'id': 1}, {'name': 'Bob', 'id': 2})}) + self.assertEqual(asdict(gd), {'id': 0, 'users': {'first': {'name': 'Alice', 'id': 1}, 'second': {'name': 'Bob', 'id': 2}}}) + + def test_helper_asdict_builtin_object_containers(self): + Child = Child_TestCase_test_helper_asdict_builtin_object_containers + Parent = Parent_TestCase_test_helper_asdict_builtin_object_containers + self.assertEqual(asdict(Parent(Child([1]))), {'child': {'d': [1]}}) + self.assertEqual(asdict(Parent(Child({1: 2}))), {'child': {'d': {1: 2}}}) + + def test_helper_asdict_factory(self): + C = C_TestCase_test_helper_asdict_factory + c = C(1, 2) + d = asdict(c, dict_factory=OrderedDict) + self.assertEqual(d, OrderedDict([('x', 1), ('y', 2)])) + self.assertIsNot(d, asdict(c, dict_factory=OrderedDict)) + c.x = 42 + d = asdict(c, dict_factory=OrderedDict) + self.assertEqual(d, OrderedDict([('x', 42), ('y', 2)])) + self.assertIs(type(d), OrderedDict) + + def test_helper_asdict_namedtuple(self): + T = namedtuple('T', 'a b c') + C = C_TestCase_test_helper_asdict_namedtuple + c = C('outer', T(1, C('inner', T(11, 12, 13)), 2)) + d = asdict(c) + self.assertEqual(d, {'x': 'outer', 'y': T(1, {'x': 'inner', 'y': T(11, 12, 13)}, 2)}) + d = asdict(c, dict_factory=OrderedDict) + self.assertEqual(d, {'x': 'outer', 'y': T(1, {'x': 'inner', 'y': T(11, 12, 13)}, 2)}) + self.assertIs(type(d), OrderedDict) + self.assertIs(type(d['y'][1]), OrderedDict) + + def test_helper_asdict_namedtuple_key(self): + C = C_TestCase_test_helper_asdict_namedtuple_key + T = namedtuple('T', 'a') + c = C({T('an a'): 0}) + self.assertEqual(asdict(c), {'f': {T(a='an a'): 0}}) + + def test_helper_asdict_namedtuple_derived(self): + T = T_TestCase_test_helper_asdict_namedtuple_derived + C = C_TestCase_test_helper_asdict_namedtuple_derived + t = T(6) + c = C(t) + d = asdict(c) + self.assertEqual(d, {'f': T(a=6)}) + self.assertIsNot(d['f'], t) + self.assertEqual(d['f'].my_a(), 6) + + def test_helper_astuple(self): + C = C_TestCase_test_helper_astuple + c = C(1) + self.assertEqual(astuple(c), (1, 0)) + self.assertEqual(astuple(c), astuple(c)) + self.assertIsNot(astuple(c), astuple(c)) + c.y = 42 + self.assertEqual(astuple(c), (1, 42)) + self.assertIs(type(astuple(c)), tuple) + + def test_helper_astuple_raises_on_classes(self): + C = C_TestCase_test_helper_astuple_raises_on_classes + with self.assertRaises(TypeError): + astuple(C) + with self.assertRaises(TypeError): + astuple(int) + + def test_helper_astuple_copy_values(self): + C = C_TestCase_test_helper_astuple_copy_values + initial = [] + c = C(1, initial) + t = astuple(c) + self.assertEqual(t[1], initial) + self.assertIsNot(t[1], initial) + c = C(1) + t = astuple(c) + t[1].append(1) + self.assertEqual(c.y, []) + + def test_helper_astuple_nested(self): + UserId = UserId_TestCase_test_helper_astuple_nested + User = User_TestCase_test_helper_astuple_nested + u = User('Joe', UserId(123, 1)) + t = astuple(u) + self.assertEqual(t, ('Joe', (123, 1))) + self.assertIsNot(astuple(u), astuple(u)) + u.id.group = 2 + self.assertEqual(astuple(u), ('Joe', (123, 2))) + + def test_helper_astuple_builtin_containers(self): + User = User_TestCase_test_helper_astuple_builtin_containers + GroupList = GroupList_TestCase_test_helper_astuple_builtin_containers + GroupTuple = GroupTuple_TestCase_test_helper_astuple_builtin_containers + GroupDict = GroupDict_TestCase_test_helper_astuple_builtin_containers + a = User('Alice', 1) + b = User('Bob', 2) + gl = GroupList(0, [a, b]) + gt = GroupTuple(0, (a, b)) + gd = GroupDict(0, {'first': a, 'second': b}) + self.assertEqual(astuple(gl), (0, [('Alice', 1), ('Bob', 2)])) + self.assertEqual(astuple(gt), (0, (('Alice', 1), ('Bob', 2)))) + self.assertEqual(astuple(gd), (0, {'first': ('Alice', 1), 'second': ('Bob', 2)})) + + def test_helper_astuple_builtin_object_containers(self): + Child = Child_TestCase_test_helper_astuple_builtin_object_containers + Parent = Parent_TestCase_test_helper_astuple_builtin_object_containers + self.assertEqual(astuple(Parent(Child([1]))), (([1],),)) + self.assertEqual(astuple(Parent(Child({1: 2}))), (({1: 2},),)) + + def test_helper_astuple_factory(self): + C = C_TestCase_test_helper_astuple_factory + NT = namedtuple('NT', 'x y') + + def nt(lst): + return NT(*lst) + c = C(1, 2) + t = astuple(c, tuple_factory=nt) + self.assertEqual(t, NT(1, 2)) + self.assertIsNot(t, astuple(c, tuple_factory=nt)) + c.x = 42 + t = astuple(c, tuple_factory=nt) + self.assertEqual(t, NT(42, 2)) + self.assertIs(type(t), NT) + + def test_helper_astuple_namedtuple(self): + T = namedtuple('T', 'a b c') + C = C_TestCase_test_helper_astuple_namedtuple + c = C('outer', T(1, C('inner', T(11, 12, 13)), 2)) + t = astuple(c) + self.assertEqual(t, ('outer', T(1, ('inner', (11, 12, 13)), 2))) + t = astuple(c, tuple_factory=list) + self.assertEqual(t, ['outer', T(1, ['inner', T(11, 12, 13)], 2)]) + + def test_alternate_classmethod_constructor(self): + C = C_TestCase_test_alternate_classmethod_constructor + self.assertEqual(C.from_file('filename').x, 20) + + def test_field_metadata_default(self): + C = C_TestCase_test_field_metadata_default + self.assertFalse(fields(C)[0].metadata) + self.assertEqual(len(fields(C)[0].metadata), 0) + with self.assertRaises(TypeError): + fields(C)[0].metadata['test'] = 3 + + def test_dataclasses_pickleable(self): + global P, Q, R + P = P_TestCase_test_dataclasses_pickleable + Q = Q_TestCase_test_dataclasses_pickleable + R = R_TestCase_test_dataclasses_pickleable + q = Q(1) + q.y = 2 + samples = [P(1), P(1, 2), Q(1), q, R(1), R(1, [2, 3, 4])] + for sample in samples: + for proto in range(pickle.HIGHEST_PROTOCOL + 1): + with self.subTest(sample=sample, proto=proto): + new_sample = pickle.loads(pickle.dumps(sample, proto)) + self.assertEqual(sample.x, new_sample.x) + self.assertEqual(sample.y, new_sample.y) + self.assertIsNot(sample, new_sample) + new_sample.x = 42 + another_new_sample = pickle.loads(pickle.dumps(new_sample, proto)) + self.assertEqual(new_sample.x, another_new_sample.x) + self.assertEqual(sample.y, another_new_sample.y) + +class TestFieldNoAnnotation(unittest.TestCase): + pass + +class TestInit(unittest.TestCase): + + def test_no_init(self): + C = C_TestInit_test_no_init + self.assertEqual(C().i, 0) + C = C_TestInit_test_no_init_ + self.assertEqual(C().i, 3) + + def test_overwriting_init(self): + C = C_TestInit_test_overwriting_init + self.assertEqual(C(3).x, 6) + C = C_TestInit_test_overwriting_init_ + self.assertEqual(C(4).x, 8) + C = C_TestInit_test_overwriting_init__ + self.assertEqual(C(5).x, 10) + +class TestRepr(unittest.TestCase): + + def test_overwriting_repr(self): + C = C_TestRepr_test_overwriting_repr + self.assertEqual(repr(C(0)), 'x') + C = C_TestRepr_test_overwriting_repr_ + self.assertEqual(repr(C(0)), 'x') + C = C_TestRepr_test_overwriting_repr__ + self.assertEqual(repr(C(0)), 'x') + +class TestEq(unittest.TestCase): + + def test_overwriting_eq(self): + C = C_TestEq_test_overwriting_eq + self.assertEqual(C(1), 3) + self.assertNotEqual(C(1), 1) + C = C_TestEq_test_overwriting_eq_ + self.assertEqual(C(1), 4) + self.assertNotEqual(C(1), 1) + C = C_TestEq_test_overwriting_eq__ + self.assertEqual(C(1), 5) + self.assertNotEqual(C(1), 1) + +class TestOrdering(unittest.TestCase): + pass + +class TestHash(unittest.TestCase): + pass + +class TestMakeDataclass(unittest.TestCase): + pass + +class TestReplace(unittest.TestCase): + + def test(self): + C = C_TestReplace_test + c = C(1, 2) + c1 = replace(c, x=3) + self.assertEqual(c1.x, 3) + self.assertEqual(c1.y, 2) + + def test_invalid_field_name(self): + C = C_TestReplace_test_invalid_field_name + c = C(1, 2) + with self.assertRaises(TypeError): + c1 = replace(c, z=3) + + def test_invalid_object(self): + C = C_TestReplace_test_invalid_object + with self.assertRaises(TypeError): + replace(C, x=3) + with self.assertRaises(TypeError): + replace(0, x=3) + + def test_no_init(self): + C = C_TestReplace_test_no_init + c = C(1) + c.y = 20 + c1 = replace(c, x=5) + self.assertEqual((c1.x, c1.y), (5, 10)) + with self.assertRaises(ValueError): + replace(c, x=2, y=30) + with self.assertRaises(ValueError): + replace(c, y=30) + + def test_classvar(self): + C = C_TestReplace_test_classvar + c = C(1) + d = C(2) + self.assertIs(c.y, d.y) + self.assertEqual(c.y, 1000) + with self.assertRaises(TypeError): + replace(c, y=30) + replace(c, x=5) + + def test_initvar_is_specified(self): + C = C_TestReplace_test_initvar_is_specified + c = C(1, 10) + self.assertEqual(c.x, 10) + with self.assertRaises(ValueError): + replace(c, x=3) + c = replace(c, x=3, y=5) + self.assertEqual(c.x, 15) + +class TestAbstract(unittest.TestCase): + pass + +class TestKeywordArgs(unittest.TestCase): + pass +if __name__ == '__main__': + unittest.main() -- cgit v1.2.1 From 77918c57b5f36bee708f625e2499bf05b23a87f1 Mon Sep 17 00:00:00 2001 From: Ashwin Srinath <3190405+shwina@users.noreply.github.com> Date: Sat, 10 Sep 2022 06:55:15 -0700 Subject: [ENH] Propagate exceptions from `cdef` functions by default (#4670) Change the default behavior to always check for exceptions after a call to a cdef function defined in Cython. Calls to extern cdef functions are not checked by default. --- Cython/Compiler/Nodes.py | 16 ++++--- Cython/Compiler/Parsing.pxd | 2 +- Cython/Compiler/Parsing.py | 65 +++++++++++++++++++++++---- Cython/Compiler/PyrexTypes.py | 4 +- Cython/Includes/cpython/time.pxd | 2 +- Cython/Utility/MemoryView.pyx | 16 +++---- docs/src/userguide/language_basics.rst | 33 ++++++++++---- docs/src/userguide/migrating_to_cy30.rst | 37 +++++++++++++++ tests/build/cythonize_options.srctree | 2 +- tests/compile/branch_hints.pyx | 2 +- tests/compile/cpp_nogil.pyx | 2 +- tests/compile/declarations.srctree | 2 +- tests/compile/publicapi_pxd_mix.pxd | 2 +- tests/compile/publicapi_pxd_mix.pyx | 2 +- tests/errors/cfuncptr.pyx | 4 +- tests/errors/e_excvalfunctype.pyx | 2 +- tests/errors/e_nogilfunctype.pyx | 2 +- tests/errors/nogil.pyx | 2 +- tests/errors/nogilfunctype.pyx | 2 +- tests/memoryview/cythonarray.pyx | 2 +- tests/memoryview/memoryview_acq_count.srctree | 2 +- tests/memoryview/memslice.pyx | 4 +- tests/memoryview/numpy_memoryview.pyx | 2 +- tests/run/builtin_abs.pyx | 6 +-- tests/run/cfunc_convert.pyx | 2 +- tests/run/cpdef_void_return.pyx | 2 +- tests/run/cpp_classes.pyx | 2 +- tests/run/cpp_classes_def.pyx | 2 +- tests/run/cpp_exceptions_nogil.pyx | 2 +- tests/run/cpp_function_lib.pxd | 6 +-- tests/run/cpp_stl_function.pyx | 18 ++++---- tests/run/exceptionpropagation.pyx | 24 +++++++++- tests/run/exceptions_nogil.pyx | 2 +- tests/run/line_trace.pyx | 2 +- tests/run/nogil.pyx | 4 +- tests/run/nogil_conditional.pyx | 4 +- tests/run/parallel.pyx | 2 +- tests/run/sequential_parallel.pyx | 14 +++--- tests/run/trace_nogil.pyx | 2 +- tests/run/type_inference.pyx | 4 +- tests/run/with_gil.pyx | 6 +-- tests/run/with_gil_automatic.pyx | 6 +-- tests/run/withnogil.pyx | 2 +- tests/testsupport/cythonarrayutil.pxi | 2 +- 44 files changed, 225 insertions(+), 98 deletions(-) diff --git a/Cython/Compiler/Nodes.py b/Cython/Compiler/Nodes.py index 21fb6334e..316a013db 100644 --- a/Cython/Compiler/Nodes.py +++ b/Cython/Compiler/Nodes.py @@ -730,13 +730,15 @@ class CFuncDeclaratorNode(CDeclaratorNode): # Use an explicit exception return value to speed up exception checks. # Even if it is not declared, we can use the default exception value of the return type, # unless the function is some kind of external function that we do not control. - if (return_type.exception_value is not None and (visibility != 'extern' and not in_pxd) - # Ideally the function-pointer test would be better after self.base is analysed - # however that is hard to do with the current implementation so it lives here - # for now - and not isinstance(self.base, CPtrDeclaratorNode)): - # Extension types are more difficult because the signature must match the base type signature. - if not env.is_c_class_scope: + if (return_type.exception_value is not None and (visibility != 'extern' and not in_pxd)): + # - We skip this optimization for extension types; they are more difficult because + # the signature must match the base type signature. + # - Same for function pointers, as we want them to be able to match functions + # with any exception value. + # - Ideally the function-pointer test would be better after self.base is analysed + # however that is hard to do with the current implementation so it lives here + # for now. + if not env.is_c_class_scope and not isinstance(self.base, CPtrDeclaratorNode): from .ExprNodes import ConstNode self.exception_value = ConstNode( self.pos, value=return_type.exception_value, type=return_type) diff --git a/Cython/Compiler/Parsing.pxd b/Cython/Compiler/Parsing.pxd index 038dc9c85..0c58df3f7 100644 --- a/Cython/Compiler/Parsing.pxd +++ b/Cython/Compiler/Parsing.pxd @@ -168,7 +168,7 @@ cdef p_c_simple_declarator(PyrexScanner s, ctx, bint empty, bint is_type, bint c bint assignable, bint nonempty) cdef p_nogil(PyrexScanner s) cdef p_with_gil(PyrexScanner s) -cdef p_exception_value_clause(PyrexScanner s) +cdef p_exception_value_clause(PyrexScanner s, ctx) cpdef p_c_arg_list(PyrexScanner s, ctx = *, bint in_pyfunc = *, bint cmethod_flag = *, bint nonempty_declarators = *, bint kw_only = *, bint annotated = *) cdef p_optional_ellipsis(PyrexScanner s) diff --git a/Cython/Compiler/Parsing.py b/Cython/Compiler/Parsing.py index 0d94ae1a9..6aa1da975 100644 --- a/Cython/Compiler/Parsing.py +++ b/Cython/Compiler/Parsing.py @@ -2945,7 +2945,17 @@ def p_c_func_declarator(s, pos, ctx, base, cmethod_flag): ellipsis = p_optional_ellipsis(s) s.expect(')') nogil = p_nogil(s) - exc_val, exc_check = p_exception_value_clause(s) + exc_val, exc_check, exc_clause = p_exception_value_clause(s, ctx) + if nogil and exc_clause: + warning( + s.position(), + "The keyword 'nogil' should appear at the end of the " + "function signature line. Placing it before 'except' " + "or 'noexcept' will be disallowed in a future version " + "of Cython.", + level=2 + ) + nogil = nogil or p_nogil(s) with_gil = p_with_gil(s) return Nodes.CFuncDeclaratorNode(pos, base = base, args = args, has_varargs = ellipsis, @@ -3055,18 +3065,54 @@ def p_with_gil(s): else: return 0 -def p_exception_value_clause(s): +def p_exception_value_clause(s, ctx): + """ + Parse exception value clause. + + Maps clauses to exc_check / exc_value / exc_clause as follows: + ______________________________________________________________________ + | | | | | + | Clause | exc_check | exc_value | exc_clause | + | ___________________________ | ___________ | ___________ | __________ | + | | | | | + | (default func.) | True | None | False | + | (cdef extern) | False | None | False | + | noexcept | False | None | True | + | except | False | | True | + | except? | True | | True | + | except * | True | None | True | + | except + | '+' | None | True | + | except +* | '+' | '*' | True | + | except + | '+' | | True | + | ___________________________ | ___________ | ___________ | __________ | + + Note that the only reason we need `exc_clause` is to raise a + warning when `'except'` or `'noexcept'` is placed after the + `'nogil'` keyword. + """ + exc_clause = False exc_val = None - exc_check = 0 - if s.sy == 'except': + if ctx.visibility == 'extern': + exc_check = False + else: + exc_check = True + + if s.sy == 'IDENT' and s.systring == 'noexcept': + exc_clause = True + s.next() + exc_check = False + elif s.sy == 'except': + exc_clause = True s.next() if s.sy == '*': - exc_check = 1 + exc_check = True s.next() elif s.sy == '+': exc_check = '+' s.next() - if s.sy == 'IDENT': + if p_nogil(s): + ctx.nogil = True + elif s.sy == 'IDENT': name = s.systring s.next() exc_val = p_name(s, name) @@ -3075,10 +3121,13 @@ def p_exception_value_clause(s): s.next() else: if s.sy == '?': - exc_check = 1 + exc_check = True s.next() + else: + exc_check = False + # exc_val can be non-None even if exc_check is False, c.f. "except -1" exc_val = p_test(s) - return exc_val, exc_check + return exc_val, exc_check, exc_clause c_arg_list_terminators = cython.declare(frozenset, frozenset(( '*', '**', '...', ')', ':', '/'))) diff --git a/Cython/Compiler/PyrexTypes.py b/Cython/Compiler/PyrexTypes.py index 89d8bfac9..da30809a3 100644 --- a/Cython/Compiler/PyrexTypes.py +++ b/Cython/Compiler/PyrexTypes.py @@ -3159,8 +3159,10 @@ class CFuncType(CType): if (pyrex or for_display) and not self.return_type.is_pyobject: if self.exception_value and self.exception_check: trailer = " except? %s" % self.exception_value - elif self.exception_value: + elif self.exception_value and not self.exception_check: trailer = " except %s" % self.exception_value + elif not self.exception_value and not self.exception_check: + trailer = " noexcept" elif self.exception_check == '+': trailer = " except +" elif self.exception_check and for_display: diff --git a/Cython/Includes/cpython/time.pxd b/Cython/Includes/cpython/time.pxd index 076abd931..7f20095a1 100644 --- a/Cython/Includes/cpython/time.pxd +++ b/Cython/Includes/cpython/time.pxd @@ -30,7 +30,7 @@ cdef inline int _raise_from_errno() except -1 with gil: return -1 # Let the C compiler know that this function always raises. -cdef inline tm localtime() nogil except *: +cdef inline tm localtime() except * nogil: """ Analogue to the stdlib time.localtime. The returned struct has some entries that the stdlib version does not: tm_gmtoff, tm_zone diff --git a/Cython/Utility/MemoryView.pyx b/Cython/Utility/MemoryView.pyx index 9b54fb78f..d36e7f60c 100644 --- a/Cython/Utility/MemoryView.pyx +++ b/Cython/Utility/MemoryView.pyx @@ -81,7 +81,7 @@ cdef extern from *: __Pyx_memviewslice *from_mvs, char *mode, int ndim, size_t sizeof_dtype, int contig_flag, - bint dtype_is_object) nogil except * + bint dtype_is_object) except * nogil bint slice_is_contig "__pyx_memviewslice_is_contig" ( {{memviewslice_name}} mvs, char order, int ndim) nogil bint slices_overlap "__pyx_slices_overlap" ({{memviewslice_name}} *slice1, @@ -122,7 +122,7 @@ cdef class array: Py_ssize_t itemsize unicode mode # FIXME: this should have been a simple 'char' bytes _format - void (*callback_free_data)(void *data) + void (*callback_free_data)(void *data) noexcept # cdef object _memview cdef bint free_data cdef bint dtype_is_object @@ -808,7 +808,7 @@ cdef int slice_memviewslice( int dim, int new_ndim, int *suboffset_dim, Py_ssize_t start, Py_ssize_t stop, Py_ssize_t step, int have_start, int have_stop, int have_step, - bint is_slice) nogil except -1: + bint is_slice) except -1 nogil: """ Create a new slice dst given slice src. @@ -938,7 +938,7 @@ cdef char *pybuffer_index(Py_buffer *view, char *bufp, Py_ssize_t index, ### Transposing a memoryviewslice # @cname('__pyx_memslice_transpose') -cdef int transpose_memslice({{memviewslice_name}} *memslice) nogil except -1: +cdef int transpose_memslice({{memviewslice_name}} *memslice) except -1 nogil: cdef int ndim = memslice.memview.view.ndim cdef Py_ssize_t *shape = memslice.shape @@ -1182,7 +1182,7 @@ cdef void copy_strided_to_strided({{memviewslice_name}} *src, src.shape, dst.shape, ndim, itemsize) @cname('__pyx_memoryview_slice_get_size') -cdef Py_ssize_t slice_get_size({{memviewslice_name}} *src, int ndim) nogil: +cdef Py_ssize_t slice_get_size({{memviewslice_name}} *src, int ndim) noexcept nogil: "Return the size of the memory occupied by the slice in number of bytes" cdef Py_ssize_t shape, size = src.memview.view.itemsize @@ -1216,7 +1216,7 @@ cdef Py_ssize_t fill_contig_strides_array( cdef void *copy_data_to_temp({{memviewslice_name}} *src, {{memviewslice_name}} *tmpslice, char order, - int ndim) nogil except NULL: + int ndim) except NULL nogil: """ Copy a direct slice to temporary contiguous memory. The caller should free the result when done. @@ -1276,7 +1276,7 @@ cdef int _err_no_memory() except -1 with gil: cdef int memoryview_copy_contents({{memviewslice_name}} src, {{memviewslice_name}} dst, int src_ndim, int dst_ndim, - bint dtype_is_object) nogil except -1: + bint dtype_is_object) except -1 nogil: """ Copy memory from slice src to slice dst. Check for overlapping memory and verify the shapes. @@ -1380,7 +1380,7 @@ cdef void refcount_objects_in_slice_with_gil(char *data, Py_ssize_t *shape, @cname('__pyx_memoryview_refcount_objects_in_slice') cdef void refcount_objects_in_slice(char *data, Py_ssize_t *shape, - Py_ssize_t *strides, int ndim, bint inc): + Py_ssize_t *strides, int ndim, bint inc) noexcept: cdef Py_ssize_t i cdef Py_ssize_t stride = strides[0] diff --git a/docs/src/userguide/language_basics.rst b/docs/src/userguide/language_basics.rst index a0450b785..ad8e9d85d 100644 --- a/docs/src/userguide/language_basics.rst +++ b/docs/src/userguide/language_basics.rst @@ -48,7 +48,7 @@ the use of ‘early binding’ programming techniques. C variable and type definitions =============================== -C variables can be declared by +C variables can be declared by * using the Cython specific :keyword:`cdef` statement, * using PEP-484/526 type annotations with C data types or @@ -459,7 +459,7 @@ passed in directly using a normal C function call. C Functions declared using :keyword:`cdef` or the ``@cfunc`` decorator with a Python object return type, like Python functions, will return a :keyword:`None` value when execution leaves the function body without an explicit return value. This is in -contrast to C/C++, which leaves the return value undefined. +contrast to C/C++, which leaves the return value undefined. In the case of non-Python object return types, the equivalent of zero is returned, for example, 0 for ``int``, :keyword:`False` for ``bint`` and :keyword:`NULL` for pointer types. A more complete comparison of the pros and cons of these different method @@ -654,14 +654,12 @@ error return value. While this is always the case for Python functions, functions defined as C functions or ``cpdef``/``@ccall`` functions can return arbitrary C types, -which do not have such a well-defined error return value. Thus, if an -exception is detected in such a function, a warning message is printed, -the exception is ignored, and the function returns immediately without -propagating the exception to its caller. +which do not have such a well-defined error return value. +Extra care must be taken to ensure Python exceptions are correctly +propagated from such functions. -If you want such a C function to be able to propagate exceptions, you need -to declare an exception return value for it as a contract with the caller. -Here is an example +A ``cdef`` function may be declared with an exception return value for it +as a contract with the caller. Here is an example: .. tabs:: @@ -760,12 +758,29 @@ An external C++ function that may raise an exception can be declared with:: See :ref:`wrapping-cplusplus` for more details. +Finally, if you are certain that your function should not raise an exception, (e.g., it +does not use Python objects at all, or you plan to use it as a callback in C code that +is unaware of Python exceptions), you can declare it as such using ``noexcept``:: + + cdef int spam() noexcept + +If a ``noexcept`` function *does* finish with an exception then it will print a warning message but not allow the exception to propagate further. Some things to note: +* ``cdef`` functions that are also ``extern`` are implicitly declared ``noexcept``. + In the uncommon case of external C/C++ functions that _can_ raise Python exceptions, + e.g., external functions that use the Python C API, you should explicitly declare + them with an exception value. + +* ``cdef`` functions that are *not* ``extern`` are implicitly declared with a suitable + exception specification for the return type (e.g. ``except *`` for a ``void`` return + type, ``except? -1`` for an ``int`` return type). + * Exception values can only be declared for functions returning a C integer, enum, float or pointer type, and the value must be a constant expression. Functions that return ``void``, or a struct/union by value, can only use the ``except *`` or ``exceptval(check=True)`` form. + * The exception value specification is part of the signature of the function. If you're passing a pointer to a function as a parameter or assigning it to a variable, the declared type of the parameter or variable must have diff --git a/docs/src/userguide/migrating_to_cy30.rst b/docs/src/userguide/migrating_to_cy30.rst index 1105ee15d..50b0b6734 100644 --- a/docs/src/userguide/migrating_to_cy30.rst +++ b/docs/src/userguide/migrating_to_cy30.rst @@ -173,6 +173,43 @@ The old behaviour can be restored with the :ref:`directive ` ``c_api_binop_methods=True``. More details are given in :ref:`arithmetic_methods`. +Exception values and ``noexcept`` +================================= + +``cdef`` functions that are not ``extern`` now propagate Python +exceptions by default, where previously they needed to explicitly be +declated with an :ref:`exception value ` in order +for them to do so. A new ``noexcept`` modifier can be used to declare +``cdef`` functions that will not raise exceptions. + +In existing code, you should mainly look out for ``cdef`` functions +that are declared without an exception value:: + + cdef int spam(int x): + pass + +If you left out the exception value by mistake, i.e., the function +should propagate Python exceptions, then the new behaviour will take +care of this for you, and correctly propagate any exceptions. +This was a common mistake in Cython code and the main reason to change the behaviour. + +On the other hand, if you didn't declare an exception value because +you want to avoid exceptions propagating out of this function, the new behaviour +will result in slightly less efficient code being generated, now involving an exception check. +To prevent that, you must declare the function explicitly as being +``noexcept``:: + + cdef int spam(int x) noexcept: + pass + +The behaviour for ``cdef`` functions that are also ``extern`` is +unchanged as ``extern`` functions are less likely to raise Python +exceptions + +The behaviour for any ``cdef`` function that is declared with an +explicit exception value (e.g., ``cdef int spam(int x) except -1``) is +also unchanged. + Annotation typing ================= diff --git a/tests/build/cythonize_options.srctree b/tests/build/cythonize_options.srctree index fcef9645b..0dc7f724f 100644 --- a/tests/build/cythonize_options.srctree +++ b/tests/build/cythonize_options.srctree @@ -49,5 +49,5 @@ def mod_int_c(int a, int b): assert mod_int_c(-1, 10) < 0 # unraisable exceptions should produce a warning -cdef int no_exc_propagate(): +cdef int no_exc_propagate() noexcept: raise TypeError() diff --git a/tests/compile/branch_hints.pyx b/tests/compile/branch_hints.pyx index 575ee6cba..e6bd0b5c3 100644 --- a/tests/compile/branch_hints.pyx +++ b/tests/compile/branch_hints.pyx @@ -82,7 +82,7 @@ def if_elif_raise_else_raise(x): "//IfClauseNode[@branch_hint = 'likely']", "//IfClauseNode[not(@branch_hint)]", ) -cpdef int nogil_if_raise(int x) nogil except -1: +cpdef int nogil_if_raise(int x) except -1 nogil: if x: raise TypeError() elif not x: diff --git a/tests/compile/cpp_nogil.pyx b/tests/compile/cpp_nogil.pyx index 1007054dc..658dc37cb 100644 --- a/tests/compile/cpp_nogil.pyx +++ b/tests/compile/cpp_nogil.pyx @@ -19,5 +19,5 @@ with nogil: # We can override nogil methods as with gil methods. cdef cppclass WithGilSubclass(NoGilTest1): - void doSomething() with gil: + void doSomething() noexcept with gil: print "have the gil" diff --git a/tests/compile/declarations.srctree b/tests/compile/declarations.srctree index babf2e4e3..bfbbcd4b3 100644 --- a/tests/compile/declarations.srctree +++ b/tests/compile/declarations.srctree @@ -40,7 +40,7 @@ cdef extern int a(int[][3], int[][3][5]) cdef void f(): cdef void *p=NULL global ifnp, cpa - ifnp = p + ifnp = p cdef char *g(): pass diff --git a/tests/compile/publicapi_pxd_mix.pxd b/tests/compile/publicapi_pxd_mix.pxd index 09452f116..414274d45 100644 --- a/tests/compile/publicapi_pxd_mix.pxd +++ b/tests/compile/publicapi_pxd_mix.pxd @@ -61,7 +61,7 @@ cdef public api void bar3() cdef inline void* spam (object o) except NULL: return NULL cdef void* spam0(object o) except NULL cdef public void* spam1(object o) except NULL -cdef api void* spam2(object o) nogil except NULL +cdef api void* spam2(object o) except NULL nogil cdef public api void* spam3(object o) except NULL with gil # -- diff --git a/tests/compile/publicapi_pxd_mix.pyx b/tests/compile/publicapi_pxd_mix.pyx index 588f6b79c..dd748053f 100644 --- a/tests/compile/publicapi_pxd_mix.pyx +++ b/tests/compile/publicapi_pxd_mix.pyx @@ -15,7 +15,7 @@ cdef public api void bar3(): pass cdef void* spam0(object o) except NULL: return NULL cdef public void* spam1(object o) except NULL: return NULL -cdef api void* spam2(object o) nogil except NULL: return NULL +cdef api void* spam2(object o) except NULL nogil: return NULL cdef public api void* spam3(object o) except NULL with gil: return NULL cdef int i0 = 0 # XXX This should not be required! diff --git a/tests/errors/cfuncptr.pyx b/tests/errors/cfuncptr.pyx index e05efa519..f07ef2167 100644 --- a/tests/errors/cfuncptr.pyx +++ b/tests/errors/cfuncptr.pyx @@ -19,7 +19,7 @@ cdef extern from *: cdef int exceptstar(int bad) except * def fail_exceptstar(bad): - cdef int (*fptr_a)(int) # noexcept + cdef int (*fptr_a)(int) noexcept cdef int (*fptr_b)(int) except -1 cdef int (*fptr_c)(int) except ?-1 fptr_a = exceptstar @@ -30,7 +30,7 @@ _ERRORS = """ 13:13: Cannot assign type 'int (int) except? -2' to 'int (*)(int) except -2' 14:13: Cannot assign type 'int (int) except? -2' to 'int (*)(int) except -1' 15:13: Cannot assign type 'int (int) except? -2' to 'int (*)(int) except? -1' -25:13: Cannot assign type 'int (int) except *' to 'int (*)(int)' +25:13: Cannot assign type 'int (int) except *' to 'int (*)(int) noexcept' 26:13: Cannot assign type 'int (int) except *' to 'int (*)(int) except -1' 27:13: Cannot assign type 'int (int) except *' to 'int (*)(int) except? -1' """ diff --git a/tests/errors/e_excvalfunctype.pyx b/tests/errors/e_excvalfunctype.pyx index a1d978322..25cae47c6 100644 --- a/tests/errors/e_excvalfunctype.pyx +++ b/tests/errors/e_excvalfunctype.pyx @@ -1,7 +1,7 @@ # mode: error ctypedef int (*spamfunc)(int, char *) except 42 -ctypedef int (*grailfunc)(int, char *) +ctypedef int (*grailfunc)(int, char *) noexcept cdef grailfunc grail cdef spamfunc spam diff --git a/tests/errors/e_nogilfunctype.pyx b/tests/errors/e_nogilfunctype.pyx index ccac37b7e..ac06af27e 100644 --- a/tests/errors/e_nogilfunctype.pyx +++ b/tests/errors/e_nogilfunctype.pyx @@ -10,7 +10,7 @@ fp = f fp = f _ERRORS = u""" -9:5: Cannot assign type 'void (void)' to 'void (*)(void) nogil' +9:5: Cannot assign type 'void (void) noexcept' to 'void (*)(void) noexcept nogil' """ _WARNINGS = """ diff --git a/tests/errors/nogil.pyx b/tests/errors/nogil.pyx index aa3011d00..dfdebeebd 100644 --- a/tests/errors/nogil.pyx +++ b/tests/errors/nogil.pyx @@ -90,7 +90,7 @@ def bare_pyvar_name(object x): with nogil: x -cdef int fstrings(int x, object obj) nogil except -1: +cdef int fstrings(int x, object obj) except -1 nogil: f"" # allowed f"a" # allowed f"a"f"b" # allowed diff --git a/tests/errors/nogilfunctype.pyx b/tests/errors/nogilfunctype.pyx index 91127bee4..c0ca2bb15 100644 --- a/tests/errors/nogilfunctype.pyx +++ b/tests/errors/nogilfunctype.pyx @@ -12,5 +12,5 @@ gp = g fp = f _ERRORS = u""" -12:5: Cannot assign type 'void (void)' to 'void (*)(void) nogil' +12:5: Cannot assign type 'void (void) noexcept' to 'void (*)(void) noexcept nogil' """ diff --git a/tests/memoryview/cythonarray.pyx b/tests/memoryview/cythonarray.pyx index 6bfd7397e..15d61d086 100644 --- a/tests/memoryview/cythonarray.pyx +++ b/tests/memoryview/cythonarray.pyx @@ -130,7 +130,7 @@ cdef int *getp(int dim1=10, int dim2=10, dim3=1) except NULL: return p -cdef void callback_free_data(void *p): +cdef void callback_free_data(void *p) noexcept: print 'callback free data called' free(p) diff --git a/tests/memoryview/memoryview_acq_count.srctree b/tests/memoryview/memoryview_acq_count.srctree index e7e6dfc69..3bc2f1cc9 100644 --- a/tests/memoryview/memoryview_acq_count.srctree +++ b/tests/memoryview/memoryview_acq_count.srctree @@ -35,7 +35,7 @@ cdef Py_ssize_t i for i in prange(1000000, nogil=True, num_threads=16): use_slice(m[::2]) -cdef int use_slice(int[:] m) nogil except -1: +cdef int use_slice(int[:] m) except -1 nogil: cdef int[:] m2 = m[1:] m = m2[:-1] del m, m2 diff --git a/tests/memoryview/memslice.pyx b/tests/memoryview/memslice.pyx index 773c2ec95..0de47d9b6 100644 --- a/tests/memoryview/memslice.pyx +++ b/tests/memoryview/memslice.pyx @@ -1728,7 +1728,7 @@ def test_oob(): print a[:, 20] -cdef int nogil_oob(int[:, :] a) nogil except 0: +cdef int nogil_oob(int[:, :] a) except 0 nogil: a[100, 9:] return 1 @@ -1772,7 +1772,7 @@ def test_nogil_oob2(): a[100, 9:] @cython.boundscheck(False) -cdef int cdef_nogil(int[:, :] a) nogil except 0: +cdef int cdef_nogil(int[:, :] a) except 0 nogil: cdef int i, j cdef int[:, :] b = a[::-1, 3:10:2] for i in range(b.shape[0]): diff --git a/tests/memoryview/numpy_memoryview.pyx b/tests/memoryview/numpy_memoryview.pyx index 350e94489..2af6bfea4 100644 --- a/tests/memoryview/numpy_memoryview.pyx +++ b/tests/memoryview/numpy_memoryview.pyx @@ -248,7 +248,7 @@ cdef extern from "bufaccess.h": ctypedef unsigned int td_h_ushort # Defined as unsigned short ctypedef td_h_short td_h_cy_short -cdef void dealloc_callback(void *data): +cdef void dealloc_callback(void *data) noexcept: print "deallocating..." def build_numarray(array array): diff --git a/tests/run/builtin_abs.pyx b/tests/run/builtin_abs.pyx index 59f3a93c4..e0b31b7e1 100644 --- a/tests/run/builtin_abs.pyx +++ b/tests/run/builtin_abs.pyx @@ -63,7 +63,7 @@ def int_abs(int a): @cython.overflowcheck(True) @cython.test_assert_path_exists("//ReturnStatNode//NameNode[@entry.name = 'abs']", "//ReturnStatNode//NameNode[@entry.cname = 'abs']") -cdef int c_int_abs(int a) nogil except *: +cdef int c_int_abs(int a) except * nogil: return abs(a) def test_c_int_abs(int a): @@ -125,7 +125,7 @@ def long_abs(long a): @cython.overflowcheck(True) @cython.test_assert_path_exists("//ReturnStatNode//NameNode[@entry.name = 'abs']", "//ReturnStatNode//NameNode[@entry.cname = 'labs']") -cdef long c_long_abs(long a) nogil except *: +cdef long c_long_abs(long a) except * nogil: return abs(a) def test_c_long_abs(long a): @@ -189,7 +189,7 @@ def long_long_abs(long long a): @cython.overflowcheck(True) @cython.test_assert_path_exists("//ReturnStatNode//NameNode[@entry.name = 'abs']", "//ReturnStatNode//NameNode[@entry.cname = '__Pyx_abs_longlong']") -cdef long long c_long_long_abs(long long a) nogil except *: +cdef long long c_long_long_abs(long long a) except * nogil: return abs(a) def test_c_long_long_abs(long long a): diff --git a/tests/run/cfunc_convert.pyx b/tests/run/cfunc_convert.pyx index 7e41a5371..89e09ea36 100644 --- a/tests/run/cfunc_convert.pyx +++ b/tests/run/cfunc_convert.pyx @@ -74,7 +74,7 @@ def test_global(): >>> global_csqrt.__doc__ 'wrap(x: float) -> float' >>> test_global() - double (double) nogil + double (double) noexcept nogil Python object """ print cython.typeof(sqrt) diff --git a/tests/run/cpdef_void_return.pyx b/tests/run/cpdef_void_return.pyx index e15448505..7943c3466 100644 --- a/tests/run/cpdef_void_return.pyx +++ b/tests/run/cpdef_void_return.pyx @@ -1,4 +1,4 @@ -cpdef void unraisable(): +cpdef void unraisable() noexcept: """ >>> unraisable() here diff --git a/tests/run/cpp_classes.pyx b/tests/run/cpp_classes.pyx index d2babdac3..1a1110b91 100644 --- a/tests/run/cpp_classes.pyx +++ b/tests/run/cpp_classes.pyx @@ -9,7 +9,7 @@ cdef extern from "shapes.h" namespace "shapes": float area() cdef cppclass Ellipse(Shape): - Ellipse(int a, int b) nogil except + + Ellipse(int a, int b) except + nogil cdef cppclass Circle(Ellipse): int radius diff --git a/tests/run/cpp_classes_def.pyx b/tests/run/cpp_classes_def.pyx index e36fc4fbd..855de7051 100644 --- a/tests/run/cpp_classes_def.pyx +++ b/tests/run/cpp_classes_def.pyx @@ -21,7 +21,7 @@ cdef cppclass RegularPolygon(Shape): __init__(int n, float radius): this.n = n this.radius = radius - float area() const: + float area() noexcept const: cdef double theta = pi / this.n return this.radius * this.radius * sin(theta) * cos(theta) * this.n void do_with() except *: diff --git a/tests/run/cpp_exceptions_nogil.pyx b/tests/run/cpp_exceptions_nogil.pyx index 1d21d40f9..5c6315323 100644 --- a/tests/run/cpp_exceptions_nogil.pyx +++ b/tests/run/cpp_exceptions_nogil.pyx @@ -9,7 +9,7 @@ cdef extern from "cpp_exceptions_nogil_helper.h" nogil: cdef void bar "foo"(int i) except +ValueError cdef void spam"foo"(int i) except +raise_TypeError -cdef int foo_nogil(int i) nogil except *: +cdef int foo_nogil(int i) except * nogil: foo(i) def test_foo_nogil(): diff --git a/tests/run/cpp_function_lib.pxd b/tests/run/cpp_function_lib.pxd index 2a5d72886..ba6694cb9 100644 --- a/tests/run/cpp_function_lib.pxd +++ b/tests/run/cpp_function_lib.pxd @@ -13,7 +13,7 @@ cdef extern from "cpp_function_lib.h": double call "operator()"(double a, int b) cdef cppclass FunctionKeeper: - FunctionKeeper(function[double(double, int)] user_function) - void set_function(function[double(double, int)] user_function) - function[double(double, int)] get_function() + FunctionKeeper(function[double(double, int) noexcept] user_function) + void set_function(function[double(double, int) noexcept] user_function) + function[double(double, int) noexcept] get_function() double call_function(double a, int b) except + diff --git a/tests/run/cpp_stl_function.pyx b/tests/run/cpp_stl_function.pyx index 723773481..14a92c586 100644 --- a/tests/run/cpp_stl_function.pyx +++ b/tests/run/cpp_stl_function.pyx @@ -49,25 +49,25 @@ cdef class FunctionKeeper: """ cdef cpp_function_lib.FunctionKeeper* function_keeper - cdef function[double(double, int)]* _get_function_ptr_from_name(self, function_name): - cdef function[double(double, int)] *f + cdef function[double(double, int) noexcept]* _get_function_ptr_from_name(self, function_name): + cdef function[double(double, int) noexcept] *f if function_name == 'add_one': - f = new function[double(double, int)](cpp_function_lib.add_one) + f = new function[double(double, int) noexcept](cpp_function_lib.add_one) elif function_name == 'add_two': - f = new function[double(double, int)](cpp_function_lib.add_two) + f = new function[double(double, int) noexcept](cpp_function_lib.add_two) elif function_name == 'AddAnotherFunctor5': - f = new function[double(double, int)]() + f = new function[double(double, int) noexcept]() f[0] = cpp_function_lib.AddAnotherFunctor(5.0) elif function_name == 'NULL': - f = new function[double(double, int)](NULL) + f = new function[double(double, int) noexcept](NULL) elif function_name == 'default': - f = new function[double(double, int)]() + f = new function[double(double, int) noexcept]() return f def __cinit__(self, function_name): - cdef function[double(double, int)] *f = self._get_function_ptr_from_name(function_name) + cdef function[double(double, int) noexcept] *f = self._get_function_ptr_from_name(function_name) self.function_keeper = new cpp_function_lib.FunctionKeeper(f[0]) del f @@ -81,6 +81,6 @@ cdef class FunctionKeeper: return self.function_keeper.get_function() def set_function(self, function_name): - cdef function[double(double, int)] *f = self._get_function_ptr_from_name(function_name) + cdef function[double(double, int) noexcept] *f = self._get_function_ptr_from_name(function_name) self.function_keeper.set_function(f[0]) del f diff --git a/tests/run/exceptionpropagation.pyx b/tests/run/exceptionpropagation.pyx index 2c79bf26e..2466550d5 100644 --- a/tests/run/exceptionpropagation.pyx +++ b/tests/run/exceptionpropagation.pyx @@ -56,4 +56,26 @@ def test_except_promotion_compare(bint fire): ... RuntimeError """ - except_promotion_compare(fire) \ No newline at end of file + except_promotion_compare(fire) + + +cdef int cdef_function_that_raises(): + raise RuntimeError + +cdef int cdef_noexcept_function_that_raises() noexcept: + raise RuntimeError + +def test_except_raise_by_default(): + """ + >>> test_except_raise_by_default() + Traceback (most recent call last): + ... + RuntimeError + """ + cdef_function_that_raises() + +def test_noexcept(): + """ + >>> test_noexcept() + """ + cdef_noexcept_function_that_raises() diff --git a/tests/run/exceptions_nogil.pyx b/tests/run/exceptions_nogil.pyx index 2bcedd9ed..31af84ae2 100644 --- a/tests/run/exceptions_nogil.pyx +++ b/tests/run/exceptions_nogil.pyx @@ -1,7 +1,7 @@ # mode: run # tag: nogil, withgil, exceptions -cdef void foo_nogil(int i) nogil except *: +cdef void foo_nogil(int i) except * nogil: if i != 0: raise ValueError("huhu !") diff --git a/tests/run/line_trace.pyx b/tests/run/line_trace.pyx index 32579aff7..0a3dc13fa 100644 --- a/tests/run/line_trace.pyx +++ b/tests/run/line_trace.pyx @@ -155,7 +155,7 @@ def global_name(global_name): return global_name + 321 -cdef int cy_add_nogil(int a, int b) nogil except -1: +cdef int cy_add_nogil(int a, int b) except -1 nogil: x = a + b # 1 return x # 2 diff --git a/tests/run/nogil.pyx b/tests/run/nogil.pyx index efaee4ff6..356021149 100644 --- a/tests/run/nogil.pyx +++ b/tests/run/nogil.pyx @@ -71,7 +71,7 @@ def test_get_gil_in_nogil(): cdef int with_gil_func() except -1 with gil: raise Exception("error!") -cdef int nogil_func() nogil except -1: +cdef int nogil_func() except -1 nogil: with_gil_func() def test_nogil_exception_propagation(): @@ -85,7 +85,7 @@ def test_nogil_exception_propagation(): nogil_func() -cdef int write_unraisable() nogil: +cdef int write_unraisable() noexcept nogil: with gil: raise ValueError() diff --git a/tests/run/nogil_conditional.pyx b/tests/run/nogil_conditional.pyx index eba22d5b2..92eff0853 100644 --- a/tests/run/nogil_conditional.pyx +++ b/tests/run/nogil_conditional.pyx @@ -34,7 +34,7 @@ cdef int with_gil_func() except? -1 with gil: raise Exception("error!") -cdef int nogil_func() nogil except? -1: +cdef int nogil_func() except? -1 nogil: with_gil_func() @@ -51,7 +51,7 @@ def test_nogil_exception_propagation(): nogil_func() -cdef int write_unraisable() nogil: +cdef int write_unraisable() noexcept nogil: with gil: raise ValueError() diff --git a/tests/run/parallel.pyx b/tests/run/parallel.pyx index c3739b10b..40d7ac10d 100644 --- a/tests/run/parallel.pyx +++ b/tests/run/parallel.pyx @@ -32,7 +32,7 @@ def test_parallel(): free(buf) -cdef int get_num_threads() with gil: +cdef int get_num_threads() noexcept with gil: print "get_num_threads called" return 3 diff --git a/tests/run/sequential_parallel.pyx b/tests/run/sequential_parallel.pyx index 3d8e1efff..cd4bbd6bc 100644 --- a/tests/run/sequential_parallel.pyx +++ b/tests/run/sequential_parallel.pyx @@ -315,7 +315,7 @@ def test_nan_init(): c1 = 16 -cdef void nogil_print(char *s) with gil: +cdef void nogil_print(char *s) noexcept with gil: print s.decode('ascii') def test_else_clause(): @@ -406,7 +406,7 @@ def test_nested_break_continue(): print i -cdef int parallel_return() nogil: +cdef int parallel_return() noexcept nogil: cdef int i for i in prange(10): @@ -640,7 +640,7 @@ def test_parallel_with_gil_continue_unnested(): print sum -cdef int inner_parallel_section() nogil: +cdef int inner_parallel_section() noexcept nogil: cdef int j, sum = 0 for j in prange(10): sum += j @@ -656,10 +656,10 @@ def outer_parallel_section(): sum += inner_parallel_section() return sum -cdef int nogil_cdef_except_clause() nogil except -1: +cdef int nogil_cdef_except_clause() except -1 nogil: return 1 -cdef void nogil_cdef_except_star() nogil except *: +cdef void nogil_cdef_except_star() except * nogil: pass def test_nogil_cdef_except_clause(): @@ -683,7 +683,7 @@ def test_num_threads_compile(): for i in prange(10): pass -cdef int chunksize() nogil: +cdef int chunksize() noexcept nogil: return 3 def test_chunksize(): @@ -784,7 +784,7 @@ cdef extern from *: """ void address_of_temp(...) nogil void address_of_temp2(...) nogil - double get_value() nogil except -1.0 # will generate a temp for exception checking + double get_value() except -1.0 nogil # will generate a temp for exception checking def test_inner_private(): """ diff --git a/tests/run/trace_nogil.pyx b/tests/run/trace_nogil.pyx index dee443e5b..175935ced 100644 --- a/tests/run/trace_nogil.pyx +++ b/tests/run/trace_nogil.pyx @@ -1,6 +1,6 @@ # cython: linetrace=True -cdef void foo(int err) nogil except *: +cdef void foo(int err) except * nogil: with gil: raise ValueError(err) diff --git a/tests/run/type_inference.pyx b/tests/run/type_inference.pyx index df77f6bd9..9a72022b2 100644 --- a/tests/run/type_inference.pyx +++ b/tests/run/type_inference.pyx @@ -242,7 +242,7 @@ def c_functions(): >>> c_functions() """ f = cfunc - assert typeof(f) == 'int (*)(int)', typeof(f) + assert typeof(f) == 'int (*)(int) except? -1', typeof(f) assert 2 == f(1) def builtin_functions(): @@ -537,7 +537,7 @@ def safe_c_functions(): >>> safe_c_functions() """ f = cfunc - assert typeof(f) == 'int (*)(int)', typeof(f) + assert typeof(f) == 'int (*)(int) except? -1', typeof(f) assert 2 == f(1) @infer_types(None) diff --git a/tests/run/with_gil.pyx b/tests/run/with_gil.pyx index 6fee3f192..2eed27eac 100644 --- a/tests/run/with_gil.pyx +++ b/tests/run/with_gil.pyx @@ -259,7 +259,7 @@ cpdef test_cpdef(): # Now test some cdef functions with different return types -cdef void void_nogil_ignore_exception() nogil: +cdef void void_nogil_ignore_exception() noexcept nogil: with gil: raise ExceptionWithMsg("This is swallowed") @@ -267,7 +267,7 @@ cdef void void_nogil_ignore_exception() nogil: with gil: print "unreachable" -cdef void void_nogil_nested_gil() nogil: +cdef void void_nogil_nested_gil() noexcept nogil: with gil: with nogil: with gil: @@ -304,7 +304,7 @@ def test_nogil_void_funcs_with_nogil(): void_nogil_nested_gil() -cdef PyObject *nogil_propagate_exception() nogil except NULL: +cdef PyObject *nogil_propagate_exception() except NULL nogil: with nogil: with gil: raise Exception("This exception propagates!") diff --git a/tests/run/with_gil_automatic.pyx b/tests/run/with_gil_automatic.pyx index 425dbbce7..954ed6d47 100644 --- a/tests/run/with_gil_automatic.pyx +++ b/tests/run/with_gil_automatic.pyx @@ -28,7 +28,7 @@ def test_print_in_nogil_section(x): @cython.test_fail_if_path_exists( "//GILStatNode//GILStatNode", ) -cpdef int test_print_in_nogil_func(x) nogil except -1: +cpdef int test_print_in_nogil_func(x) except -1 nogil: """ >>> _ = test_print_in_nogil_func(123) --123-- @@ -61,7 +61,7 @@ def test_raise_in_nogil_section(x): @cython.test_fail_if_path_exists( "//GILStatNode//GILStatNode", ) -cpdef int test_raise_in_nogil_func(x) nogil except -1: +cpdef int test_raise_in_nogil_func(x) except -1 nogil: """ >>> test_raise_in_nogil_func(123) Traceback (most recent call last): @@ -128,7 +128,7 @@ def assert_in_nogil_section_string(int x): "//AssertStatNode//GILStatNode", "//AssertStatNode//GILStatNode//RaiseStatNode", ) -cpdef int assert_in_nogil_func(int x) nogil except -1: +cpdef int assert_in_nogil_func(int x) except -1 nogil: """ >>> _ = assert_in_nogil_func(123) >>> assert_in_nogil_func(0) diff --git a/tests/run/withnogil.pyx b/tests/run/withnogil.pyx index 55b7896a7..a64779dfe 100644 --- a/tests/run/withnogil.pyx +++ b/tests/run/withnogil.pyx @@ -19,5 +19,5 @@ def g(): h() return 1 -cdef int h() nogil except -1: +cdef int h() except -1 nogil: pass diff --git a/tests/testsupport/cythonarrayutil.pxi b/tests/testsupport/cythonarrayutil.pxi index 50d764acd..683dc4b71 100644 --- a/tests/testsupport/cythonarrayutil.pxi +++ b/tests/testsupport/cythonarrayutil.pxi @@ -2,7 +2,7 @@ from libc.stdlib cimport malloc, free cimport cython from cython.view cimport array -cdef void callback(void *data): +cdef void callback(void *data) noexcept: print "callback called" free(data) -- cgit v1.2.1 From 9fcf55fa86488bba666e918ba6550e976557411f Mon Sep 17 00:00:00 2001 From: Max Bachmann Date: Mon, 12 Sep 2022 07:56:12 +0200 Subject: fix PyPy unused variable warning in Coroutines.c (#5019) --- Cython/Utility/Coroutine.c | 1 + 1 file changed, 1 insertion(+) diff --git a/Cython/Utility/Coroutine.c b/Cython/Utility/Coroutine.c index 0c478fdbe..1a4a78ff5 100644 --- a/Cython/Utility/Coroutine.c +++ b/Cython/Utility/Coroutine.c @@ -800,6 +800,7 @@ static CYTHON_INLINE void __Pyx_Coroutine_ResetFrameBackpointer(__Pyx_ExcInfoStr // cycle. #if CYTHON_COMPILING_IN_PYPY // FIXME: what to do in PyPy? + CYTHON_UNUSED_VAR(exc_state); #else PyObject *exc_tb; -- cgit v1.2.1 From a6e7218aae9ec3b2ca7a9d8be4276e802a480da1 Mon Sep 17 00:00:00 2001 From: da-woods Date: Tue, 13 Sep 2022 18:26:06 +0100 Subject: Remove dataclass tests that used mutability (#5026) We accidently made cdef classes mutable in Python 3.10. @maxbachmann has shown that it's trivially easy to crash them (e.g. by trying to change the name). Therefore we should make them immutable. I included this test from the CPython test-suite because it seemed to work. However, it relies on a feature that's unsafe and unintended. --- Tools/make_dataclass_tests.py | 10 ++++++---- tests/run/test_dataclasses.pyx | 19 ------------------- 2 files changed, 6 insertions(+), 23 deletions(-) diff --git a/Tools/make_dataclass_tests.py b/Tools/make_dataclass_tests.py index 63cf0a657..0c02f3d14 100644 --- a/Tools/make_dataclass_tests.py +++ b/Tools/make_dataclass_tests.py @@ -17,6 +17,7 @@ unavailable_functions = frozenset( skip_tests = frozenset( { # needs Cython compile + # ==================== ("TestCase", "test_field_default_default_factory_error"), ("TestCase", "test_two_fields_one_default"), ("TestCase", "test_overwrite_hash"), @@ -57,6 +58,7 @@ skip_tests = frozenset( ("TestCase", "test_class_attrs"), ("TestStringAnnotations",), # almost all the texts here use local variables # Currently unsupported + # ===================== ( "TestOrdering", "test_functools_total_ordering", @@ -118,7 +120,10 @@ skip_tests = frozenset( # These tests are probably fine, but the string substitution in this file doesn't get it right ("TestRepr", "test_repr"), ("TestCase", "test_not_in_repr"), + # not possible to add attributes on extension types + ("TestCase", "test_post_init_classmethod"), # Bugs + # ==== ("TestCase", "test_no_options"), # @dataclass() ("TestCase", "test_field_no_default"), # field() ("TestCase", "test_init_in_order"), # field() @@ -166,6 +171,7 @@ skip_tests = frozenset( ("TestCase", "test_init_var_with_default"), # not sure... ("TestReplace", "test_initvar_with_default_value"), # needs investigating # Maybe bugs? + # ========== # non-default argument 'z' follows default argument in dataclass __init__ - this message looks right to me! ("TestCase", "test_class_marker"), # cython.dataclasses.field parameter 'metadata' must be a literal value - possibly not something we can support? @@ -202,10 +208,6 @@ version_specific_skips = { 3, 10, ), # needs language support for | operator on types - ("TestCase", "test_post_init_classmethod"): ( - 3, - 10, - ), # not possible to add attributes on extension types } diff --git a/tests/run/test_dataclasses.pyx b/tests/run/test_dataclasses.pyx index 2fb8f3f64..d9e8fd2b0 100644 --- a/tests/run/test_dataclasses.pyx +++ b/tests/run/test_dataclasses.pyx @@ -83,17 +83,6 @@ class Point3Dv1_TestCase_test_not_other_dataclass: y: int = 0 z: int = 0 -@dataclass -@cclass -class C_TestCase_test_post_init_classmethod: - flag = False - x: int - y: int - - @classmethod - def __post_init__(cls): - cls.flag = True - @dataclass @cclass class C_TestCase_test_class_var_no_default: @@ -563,14 +552,6 @@ class TestCase(unittest.TestCase): Point3Dv1 = Point3Dv1_TestCase_test_not_other_dataclass self.assertNotEqual(Point3D(0, 0, 0), Point3Dv1()) - @skip_on_versions_below((3, 10)) - def test_post_init_classmethod(self): - C = C_TestCase_test_post_init_classmethod - self.assertFalse(C.flag) - c = C(3, 4) - self.assertEqual((c.x, c.y), (3, 4)) - self.assertTrue(C.flag) - def test_class_var_no_default(self): C = C_TestCase_test_class_var_no_default self.assertNotIn('x', C.__dict__) -- cgit v1.2.1 From ba2269e1d40bdfe13cee35b79ebc7175b8648ebf Mon Sep 17 00:00:00 2001 From: da-woods Date: Wed, 14 Sep 2022 17:58:42 +0100 Subject: Fix C++ error from Cython unused (#5029) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit I was getting a whole lot on errors along the lines of ``` cfunc_convert_with_memoryview.cpp:11097:8: note: in expansion of macro ‘CYTHON_UNUSED’ 11097 | static CYTHON_UNUSED int __pyx_memoryview_getbuffer(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags) { | ^~~~~~~~~~~~~ cfunc_convert_with_memoryview.cpp:407:31: note: an attribute that appertains to a type-specifier is ignored 407 | #define CYTHON_UNUSED [[maybe_unused]] ``` This swaps the order of static and CYTHON_UNUSED. I think whether the error appears is probably dependent on the exact compiler version. --- Cython/Compiler/Code.py | 4 ++-- Cython/Compiler/Nodes.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cython/Compiler/Code.py b/Cython/Compiler/Code.py index e66e0129d..06f38936d 100644 --- a/Cython/Compiler/Code.py +++ b/Cython/Compiler/Code.py @@ -2103,10 +2103,10 @@ class CCodeWriter(object): if entry.visibility == "private" and not entry.used: #print "...private and not used, skipping", entry.cname ### return - if storage_class: - self.put("%s " % storage_class) if not entry.cf_used: self.put('CYTHON_UNUSED ') + if storage_class: + self.put("%s " % storage_class) if entry.is_cpp_optional: self.put(entry.type.cpp_optional_declaration_code( entry.cname, dll_linkage=dll_linkage)) diff --git a/Cython/Compiler/Nodes.py b/Cython/Compiler/Nodes.py index 316a013db..99d6e837e 100644 --- a/Cython/Compiler/Nodes.py +++ b/Cython/Compiler/Nodes.py @@ -3752,7 +3752,7 @@ class DefNodeWrapper(FuncDefNode): with_pymethdef = False dc = self.return_type.declaration_code(entry.func_cname) - header = "static %s%s(%s)" % (mf, dc, arg_code) + header = "%sstatic %s(%s)" % (mf, dc, arg_code) code.putln("%s; /*proto*/" % header) if proto_only: -- cgit v1.2.1 From 490d3ebaf17fb3ad369cfd913d31de902324f184 Mon Sep 17 00:00:00 2001 From: Maximilian Date: Fri, 16 Sep 2022 12:39:03 +0200 Subject: Use proper SPDX identifier (GH-5032) --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index d0d862382..1398c1574 100755 --- a/setup.py +++ b/setup.py @@ -273,7 +273,7 @@ def run_build(): .. _Pyrex: https://www.cosc.canterbury.ac.nz/greg.ewing/python/Pyrex/ """), - license='Apache', + license='Apache-2.0', classifiers=[ dev_status(version), "Intended Audience :: Developers", -- cgit v1.2.1 From e0dda4555d1a1fae9d3f5ffc91e601710f7a7744 Mon Sep 17 00:00:00 2001 From: Max Bachmann Date: Sun, 18 Sep 2022 10:06:40 +0200 Subject: Fix incorrect operator lookup for postincrement (#4536) Fix incorrect operator lookup for postincrement. Before this Cython always called the c++ "preincrement" operator. --- Cython/Compiler/ExprNodes.py | 24 +++++++++++++++++++++-- docs/src/userguide/migrating_to_cy30.rst | 13 +++++++++++++ tests/errors/cpp_increment.pyx | 33 ++++++++++++++++++++++++++++++++ 3 files changed, 68 insertions(+), 2 deletions(-) create mode 100644 tests/errors/cpp_increment.pyx diff --git a/Cython/Compiler/ExprNodes.py b/Cython/Compiler/ExprNodes.py index c6f5feab1..050bc614e 100644 --- a/Cython/Compiler/ExprNodes.py +++ b/Cython/Compiler/ExprNodes.py @@ -10423,6 +10423,7 @@ class UnopNode(ExprNode): subexprs = ['operand'] infix = True + is_inc_dec_op = False def calculate_constant_result(self): func = compile_time_unary_operators[self.operator] @@ -10534,7 +10535,10 @@ class UnopNode(ExprNode): self.type = PyrexTypes.error_type def analyse_cpp_operation(self, env, overload_check=True): - entry = env.lookup_operator(self.operator, [self.operand]) + operand_types = [self.operand.type] + if self.is_inc_dec_op and not self.is_prefix: + operand_types.append(PyrexTypes.c_int_type) + entry = env.lookup_operator_for_types(self.pos, self.operator, operand_types) if overload_check and not entry: self.type_error() return @@ -10548,7 +10552,12 @@ class UnopNode(ExprNode): else: self.exception_check = '' self.exception_value = '' - cpp_type = self.operand.type.find_cpp_operation_type(self.operator) + if self.is_inc_dec_op and not self.is_prefix: + cpp_type = self.operand.type.find_cpp_operation_type( + self.operator, operand_type=PyrexTypes.c_int_type + ) + else: + cpp_type = self.operand.type.find_cpp_operation_type(self.operator) if overload_check and cpp_type is None: error(self.pos, "'%s' operator not defined for %s" % ( self.operator, type)) @@ -10690,6 +10699,17 @@ class DereferenceNode(CUnopNode): class DecrementIncrementNode(CUnopNode): # unary ++/-- operator + is_inc_dec_op = True + + def type_error(self): + if not self.operand.type.is_error: + if self.is_prefix: + error(self.pos, "No match for 'operator%s' (operand type is '%s')" % + (self.operator, self.operand.type)) + else: + error(self.pos, "No 'operator%s(int)' declared for postfix '%s' (operand type is '%s')" % + (self.operator, self.operator, self.operand.type)) + self.type = PyrexTypes.error_type def analyse_c_operation(self, env): if self.operand.type.is_numeric: diff --git a/docs/src/userguide/migrating_to_cy30.rst b/docs/src/userguide/migrating_to_cy30.rst index 50b0b6734..292a2e943 100644 --- a/docs/src/userguide/migrating_to_cy30.rst +++ b/docs/src/userguide/migrating_to_cy30.rst @@ -226,3 +226,16 @@ To make it easier to handle cases where your interpretation of type annotations differs from Cython's, Cython 3 now supports setting the ``annotation_typing`` :ref:`directive ` on a per-class or per-function level. + +C++ postincrement/postdecrement operator +======================================== + +Cython 3 differentiates between pre/post-increment and pre/post-decrement +operators (Cython 0.29 implemented both as pre(in/de)crement operator). +This only has an effect when using ``cython.operator.postdecrement`` / ``cython.operator.postincrement``. +When running into an error it is required to add the corresponding operator:: + + cdef cppclass Example: + Example operator++(int) + Example operator--(int) + diff --git a/tests/errors/cpp_increment.pyx b/tests/errors/cpp_increment.pyx new file mode 100644 index 000000000..45e978d95 --- /dev/null +++ b/tests/errors/cpp_increment.pyx @@ -0,0 +1,33 @@ +# mode: error + +cimport cython + +cdef extern from *: + cdef cppclass Foo: + Foo operator++() + Foo operator--() + + cdef cppclass Bar: + Bar operator++(int) + Bar operator--(int) + +cdef void foo(): + cdef Foo f + cdef Bar b + cython.operator.postincrement(f) + cython.operator.postincrement(b) + cython.operator.postdecrement(f) + cython.operator.postdecrement(b) + + cython.operator.preincrement(f) + cython.operator.preincrement(b) + cython.operator.predecrement(f) + cython.operator.predecrement(b) + + +_ERRORS = u""" +17:19: No 'operator++(int)' declared for postfix '++' (operand type is 'Foo') +19:19: No 'operator--(int)' declared for postfix '--' (operand type is 'Foo') +23:19: No match for 'operator++' (operand type is 'Bar') +25:19: No match for 'operator--' (operand type is 'Bar') +""" -- cgit v1.2.1 From 31b41b3ad45e061be7d1c63624727c8abf1d605c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=D0=A0=D0=BE=D0=BC=D0=B0=D0=BD=20=D0=94=D0=BE=D0=BD=D1=87?= =?UTF-8?q?=D0=B5=D0=BD=D0=BA=D0=BE?= Date: Sun, 18 Sep 2022 16:00:43 +0300 Subject: Mark new GitHub releases as pre-releases depending on the tag name (#5015) --- .github/workflows/wheels.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index 67913ff48..42d4ee5c1 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -92,6 +92,9 @@ jobs: dist/*macos*.whl dist/*win32*.whl dist/*win_amd64*.whl + prerelease: >- + ${{ contains(github.ref_name, 'a') || contains(github.ref_name, 'b') + || contains(github.ref_name, 'rc') || contains(github.ref_name, 'dev') }} - uses: actions/upload-artifact@v3 with: @@ -140,3 +143,6 @@ jobs: files: | dist/*.tar.gz dist/*-none-any.whl + prerelease: >- + ${{ contains(github.ref_name, 'a') || contains(github.ref_name, 'b') + || contains(github.ref_name, 'rc') || contains(github.ref_name, 'dev') }} -- cgit v1.2.1 From 553f6b9a9917f3e37bb7d677ccc2b114609033e2 Mon Sep 17 00:00:00 2001 From: EpigeneMax <111890372+EpigeneMax@users.noreply.github.com> Date: Mon, 19 Sep 2022 13:14:28 +0200 Subject: Fix mangling for .pxd cdef public functions (#5025) This commit fixes a typo that caused `cdef public` functions from .pxd files to be erroneously mangled. The unmangled name is chosen when the definition from the .py file is processed, but on the condition that the cname determined while processing the declaration from the .pxd file was the mangled one (instead of a user-defined one). The commit also adds a test for the ticket. --- Cython/Compiler/Symtab.py | 2 +- tests/compile/pxd_mangling_names.srctree | 46 ++++++++++++++++++++++++++++++++ tests/pypy2_bugs.txt | 3 ++- 3 files changed, 49 insertions(+), 2 deletions(-) create mode 100644 tests/compile/pxd_mangling_names.srctree diff --git a/Cython/Compiler/Symtab.py b/Cython/Compiler/Symtab.py index 92afd8779..4d834d896 100644 --- a/Cython/Compiler/Symtab.py +++ b/Cython/Compiler/Symtab.py @@ -1617,7 +1617,7 @@ class ModuleScope(Scope): entry = self.lookup_here(name) if entry and entry.defined_in_pxd: if entry.visibility != "private": - mangled_cname = self.mangle(Naming.var_prefix, name) + mangled_cname = self.mangle(Naming.func_prefix, name) if entry.cname == mangled_cname: cname = name entry.cname = cname diff --git a/tests/compile/pxd_mangling_names.srctree b/tests/compile/pxd_mangling_names.srctree new file mode 100644 index 000000000..3797fc0f9 --- /dev/null +++ b/tests/compile/pxd_mangling_names.srctree @@ -0,0 +1,46 @@ +# mode: compile +# ticket: 2940 + +PYTHON setup.py build_ext --inplace +PYTHON -c "import a; a.test()" + +######## setup.py ######## + +from Cython.Build import cythonize +from Cython.Distutils.extension import Extension +from distutils.core import setup + +setup( + ext_modules=cythonize([Extension("a", ["a.py", "b.c"])]), +) + +######## a.pxd ######## + +cdef public int foo() + +cdef extern from "b.h": + cpdef int bar() + +######## a.py ######## + +def foo(): + return 42 + +def test(): + assert bar() == 42 + +######## b.h ######## + +#ifndef B_H +#define B_H + +int bar(); + +#endif + +######## b.c ######## + +#include "a.h" + +int bar() { return foo(); } + diff --git a/tests/pypy2_bugs.txt b/tests/pypy2_bugs.txt index 200f0dcf3..1ac25918f 100644 --- a/tests/pypy2_bugs.txt +++ b/tests/pypy2_bugs.txt @@ -16,8 +16,9 @@ run.partial_circular_import # https://foss.heptapod.net/pypy/pypy/issues/3185 run.language_level run.pure_pxd +compile.pxd_mangling_names -# Silly error with doctest matching slightly different string outputs rather than +# Silly error with doctest matching slightly different string outputs rather than # an actual bug but one I can't easily resolve run.with_gil -- cgit v1.2.1 From d0b95046274f6d800b808e479a818cd1d03fba51 Mon Sep 17 00:00:00 2001 From: da-woods Date: Mon, 19 Sep 2022 12:17:06 +0100 Subject: Fix bug with complex powers of negative numbers (#5014) * Fix bug with complex powers of negative numbers A shortcut was incorrectly applied that returned NaN instead of an imaginary number * Add stress test --- Cython/Utility/Complex.c | 2 +- tests/run/complex_numbers_T305.pyx | 86 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 87 insertions(+), 1 deletion(-) diff --git a/Cython/Utility/Complex.c b/Cython/Utility/Complex.c index 28062a061..15d5f544d 100644 --- a/Cython/Utility/Complex.c +++ b/Cython/Utility/Complex.c @@ -265,7 +265,7 @@ static {{type}} __Pyx_PyComplex_As_{{type_name}}(PyObject* o) { if (a.imag == 0) { if (a.real == 0) { return a; - } else if (b.imag == 0) { + } else if ((b.imag == 0) && (a.real >= 0)) { z.real = pow{{m}}(a.real, b.real); z.imag = 0; return z; diff --git a/tests/run/complex_numbers_T305.pyx b/tests/run/complex_numbers_T305.pyx index 310b7233e..2cfc8a0d4 100644 --- a/tests/run/complex_numbers_T305.pyx +++ b/tests/run/complex_numbers_T305.pyx @@ -80,6 +80,8 @@ def test_pow(double complex z, double complex w, tol=None): True >>> test_pow(-0.5, 1j, tol=1e-15) True + >>> test_pow(-1, 0.5, tol=1e-15) + True """ if tol is None: return z**w @@ -264,3 +266,87 @@ cpdef double complex complex_retval(): 1j """ return 1j + +def stress_test(): + """ + Run the main operations on 1000 pseudo-random numbers to + try to spot anything accidentally missed from the test cases + (doesn't cover inf and NaN as inputs though) + >>> stress_test() + """ + cdef double complex x + cdef double complex y + + from random import Random + from math import ldexp + r = Random() + r.seed("I'm a seed") # try to make the test somewhat reproducible + + # copied from https://docs.python.org/3/library/random.html#recipes + # gets evenly distributed random numbers + def full_random(): + mantissa = 0x10_0000_0000_0000 | r.getrandbits(52) + exponent = -53 + x = 0 + while not x: + x = r.getrandbits(32) + exponent += x.bit_length() - 32 + return ldexp(mantissa, exponent) + + for n in range(1, 1001): + if n % 50 == 0: + # strategical insert some 0 values + a = 0 + else: + a = full_random() + if n % 51 == 0: + b = 0 + else: + b = full_random() + if n % 52 == 0: + c = 0 + else: + c = full_random() + if n % 53 == 0: + d = 0 + else: + d = full_random() + + x= a+1j*b + y = c+1j*d + py_dict = dict(x=x, y=y) + + sum_ = x+y + sum_py = eval("x+y", py_dict) + delta_sum = abs(sum_/sum_py - 1) + assert delta_sum < 1e-15, f"{x} {y} {sum_} {sum_py} {delta_sum}" + + minus = x-y + minus_py = eval("x-y", py_dict) + delta_minus = abs(minus/minus_py - 1) + assert delta_minus < 1e-15, f"{x} {y} {minus} {minus_py} {delta_minus}" + + times = x*y + times_py = eval("x*y", py_dict) + delta_times = abs(times/times_py - 1) + assert delta_times < 1e-15, f"{x} {y} {times} {times_py} {delta_times}" + + divide = x/y + divide_py = eval("x/y", py_dict) + delta_divide = abs(divide/divide_py - 1) + assert delta_divide < 1e-15, f"{x} {y} {divide} {divide_py} {delta_divide}" + + divide2 = y/x + divide2_py = eval("y/x", py_dict) + delta_divide2 = abs(divide2/divide2_py - 1) + assert delta_divide2 < 1e-15, f"{x} {y} {divide2} {divide2_py} {delta_divide2}" + + pow_ = x**y + pow_py = eval("x**y", py_dict) + delta_pow = abs(pow_/pow_py - 1) + assert delta_pow < 1e-15, f"{x} {y} {pow_} {pow_py} {delta_pow}" + + pow2 = y**x + pow2_py = eval("y**x", py_dict) + delta_pow2 = abs(pow2/pow2_py - 1) + assert delta_pow2 < 1e-15, f"{x} {y} {pow2} {pow2_py} {delta_pow2}" -- cgit v1.2.1 From 44b64546e6c90801bf8dc279d6cf2e33b02e4ed2 Mon Sep 17 00:00:00 2001 From: Alex Date: Fri, 23 Sep 2022 10:09:03 +0200 Subject: Set explicit permissions for GitHub Workflows (GH-5038) --- .github/workflows/ci.yml | 3 +++ .github/workflows/wheels.yml | 3 +++ 2 files changed, 6 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c945ff5eb..00593d0a8 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -17,6 +17,9 @@ concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }} cancel-in-progress: true +permissions: + contents: read # to fetch code (actions/checkout) + jobs: ci: strategy: diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index 42d4ee5c1..4cf308590 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -34,6 +34,9 @@ concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true +permissions: + contents: write # to create GitHub release (softprops/action-gh-release) + jobs: build_wheels: name: Build wheel for ${{ matrix.python }}-${{ matrix.buildplat[1] }} -- cgit v1.2.1 From 1f74f750fddf202ed2030b5715821e9205f7f5c1 Mon Sep 17 00:00:00 2001 From: da-woods Date: Fri, 23 Sep 2022 18:17:35 +0100 Subject: Unpack methods in non-dict mapping Minor optimization --- Cython/Utility/MatchCase.c | 24 +++++++++++++++++++++--- 1 file changed, 21 insertions(+), 3 deletions(-) diff --git a/Cython/Utility/MatchCase.c b/Cython/Utility/MatchCase.c index f76950335..1b92d4312 100644 --- a/Cython/Utility/MatchCase.c +++ b/Cython/Utility/MatchCase.c @@ -564,6 +564,9 @@ static int __Pyx__MatchCase_Mapping_ExtractNonDict(void *__pyx_refnanny, PyObjec PyObject *dummy=NULL, *get=NULL; Py_ssize_t i; int result = 0; +#if CYTHON_UNPACK_METHODS && CYTHON_VECTORCALL + PyObject *get_method = NULL, *get_self = NULL; +#endif dummy = PyObject_CallObject((PyObject *)&PyBaseObject_Type, NULL); if (!dummy) { @@ -574,16 +577,32 @@ static int __Pyx__MatchCase_Mapping_ExtractNonDict(void *__pyx_refnanny, PyObjec result = -1; goto end; } +#if CYTHON_UNPACK_METHODS && CYTHON_VECTORCALL + if (likely(PyMethod_Check(get))) { + // both of these are borrowed + get_method = PyMethod_GET_FUNCTION(get); + get_self = PyMethod_GET_SELF(get); + } +#endif for (i=0; i Date: Fri, 23 Sep 2022 22:10:56 +0100 Subject: Update Nuitka vs cython_freeze in the readme It's genuinely much more appropriate for this. I think I've answered enough support questions about making self-contained executable. So if we can point people to the right tool that's a good thing --- README.rst | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 0f56f5661..27d7d8150 100644 --- a/README.rst +++ b/README.rst @@ -102,7 +102,9 @@ Similar projects that have a relevance today include: * Pros: highly language compliant, reasonable performance gains, support for static application linking (similar to - `cython_freeze `_) + `cython_freeze `_ + but with the ability to bundle library dependencies into a self-contained + executable) * Cons: no support for low-level optimisations and typing In comparison to the above, Cython provides -- cgit v1.2.1 From 230d5083704d8d7fe32f1998dcc375b18752b8f8 Mon Sep 17 00:00:00 2001 From: Ruben Vorderman Date: Sat, 24 Sep 2022 09:08:39 +0200 Subject: Fix coverage with packages in "src" layout (#3831) This change makes the coverage plugin match paths like `src/pkg/mod.c` with `src/pkg/mod.pyx`. Co-authored-by: Sviatoslav Sydorenko --- Cython/Coverage.py | 17 +++ tests/run/coverage_cmd_src_pkg_layout.srctree | 177 ++++++++++++++++++++++++++ 2 files changed, 194 insertions(+) create mode 100644 tests/run/coverage_cmd_src_pkg_layout.srctree diff --git a/Cython/Coverage.py b/Cython/Coverage.py index bf1f0034b..147df8050 100644 --- a/Cython/Coverage.py +++ b/Cython/Coverage.py @@ -83,6 +83,23 @@ def _find_dep_file_path(main_file, file_path, relative_path_search=False): rel_file_path = os.path.join(os.path.dirname(main_file), file_path) if os.path.exists(rel_file_path): abs_path = os.path.abspath(rel_file_path) + + abs_no_ext = os.path.splitext(abs_path)[0] + file_no_ext, extension = os.path.splitext(file_path) + # We check if the paths match by matching the directories in reverse order. + # pkg/module.pyx /long/absolute_path/bla/bla/site-packages/pkg/module.c should match. + # this will match the pairs: module-module and pkg-pkg. After which there is nothing left to zip. + abs_no_ext = os.path.normpath(abs_no_ext) + file_no_ext = os.path.normpath(file_no_ext) + matching_paths = zip(reversed(abs_no_ext.split(os.sep)), reversed(file_no_ext.split(os.sep))) + for one, other in matching_paths: + if one != other: + break + else: # No mismatches detected + matching_abs_path = os.path.splitext(main_file)[0] + extension + if os.path.exists(matching_abs_path): + return canonical_filename(matching_abs_path) + # search sys.path for external locations if a valid file hasn't been found if not os.path.exists(abs_path): for sys_path in sys.path: diff --git a/tests/run/coverage_cmd_src_pkg_layout.srctree b/tests/run/coverage_cmd_src_pkg_layout.srctree new file mode 100644 index 000000000..e2c58691a --- /dev/null +++ b/tests/run/coverage_cmd_src_pkg_layout.srctree @@ -0,0 +1,177 @@ +# mode: run +# tag: coverage,trace + +""" +PYTHON -m pip install . +PYTHON setup.py build_ext --inplace +PYTHON -m coverage run --source=pkg coverage_test.py +PYTHON collect_coverage.py +""" + +######## setup.py ######## + +from setuptools import Extension, find_packages, setup +from Cython.Build import cythonize + +MODULES = [ + Extension("pkg.module1", ["src/pkg/module1.pyx"]), + ] + +setup( + name="pkg", + zip_safe=False, + packages=find_packages('src'), + package_data={'pkg': ['*.pxd', '*.pyx']}, + package_dir={'': 'src'}, + ext_modules= cythonize(MODULES) + ) + + +######## .coveragerc ######## +[run] +plugins = Cython.Coverage + +######## src/pkg/__init__.py ######## + +######## src/pkg/module1.pyx ######## +# cython: linetrace=True +# distutils: define_macros=CYTHON_TRACE=1 + +def func1(int a, int b): + cdef int x = 1 # 5 + c = func2(a) + b # 6 + return x + c # 7 + + +def func2(int a): + return a * 2 # 11 + +######## coverage_test.py ######## + +import os.path +from pkg import module1 + + +assert not any( + module1.__file__.endswith(ext) + for ext in '.py .pyc .pyo .pyw .pyx .pxi'.split() +), module.__file__ + + +def run_coverage(module): + assert module.func1(1, 2) == (1 * 2) + 2 + 1 + assert module.func2(2) == 2 * 2 + + +if __name__ == '__main__': + run_coverage(module1) + + +######## collect_coverage.py ######## + +import re +import sys +import os +import os.path +import subprocess +from glob import iglob + + +def run_coverage_command(*command): + env = dict(os.environ, LANG='', LC_ALL='C') + process = subprocess.Popen( + [sys.executable, '-m', 'coverage'] + list(command), + stdout=subprocess.PIPE, env=env) + stdout, _ = process.communicate() + return stdout + + +def run_report(): + stdout = run_coverage_command('report', '--show-missing') + stdout = stdout.decode('iso8859-1') # 'safe' decoding + lines = stdout.splitlines() + print(stdout) + + module_path = 'module1.pyx' + assert any(module_path in line for line in lines), ( + "'%s' not found in coverage report:\n\n%s" % (module_path, stdout)) + + files = {} + line_iter = iter(lines) + for line in line_iter: + if line.startswith('---'): + break + extend = [''] * 2 + for line in line_iter: + if not line or line.startswith('---'): + continue + name, statements, missed, covered, _missing = (line.split(None, 4) + extend)[:5] + missing = [] + for start, end in re.findall('([0-9]+)(?:-([0-9]+))?', _missing): + if end: + missing.extend(range(int(start), int(end)+1)) + else: + missing.append(int(start)) + files[os.path.basename(name)] = (statements, missed, covered, missing) + assert 5 not in files[module_path][-1], files[module_path] + assert 6 not in files[module_path][-1], files[module_path] + assert 7 not in files[module_path][-1], files[module_path] + assert 11 not in files[module_path][-1], files[module_path] + + +def run_xml_report(): + stdout = run_coverage_command('xml', '-o', '-') + print(stdout) + + import xml.etree.ElementTree as etree + data = etree.fromstring(stdout) + + files = {} + for module in data.iterfind('.//class'): + files[module.get('filename').replace('\\', '/')] = dict( + (int(line.get('number')), int(line.get('hits'))) + for line in module.findall('lines/line') + ) + + module_path = 'src/pkg/module1.pyx' + + assert files[module_path][5] > 0, files[module_path] + assert files[module_path][6] > 0, files[module_path] + assert files[module_path][7] > 0, files[module_path] + assert files[module_path][11] > 0, files[module_path] + + +def run_html_report(): + from collections import defaultdict + + stdout = run_coverage_command('html', '-d', 'html') + # coverage 6.1+ changed the order of the attributes => need to parse them separately + _parse_id = re.compile(r'id=["\'][^0-9"\']*(?P[0-9]+)[^0-9"\']*["\']').search + _parse_state = re.compile(r'class=["\'][^"\']*(?Pmis|run|exc)[^"\']*["\']').search + + files = {} + for file_path in iglob('html/*.html'): + with open(file_path) as f: + page = f.read() + report = defaultdict(set) + for line in re.split(r'id=["\']source["\']', page)[-1].splitlines(): + lineno = _parse_id(line) + state = _parse_state(line) + if not lineno or not state: + continue + report[state.group('state')].add(int(lineno.group('id'))) + files[file_path] = report + + file_report = [data for path, data in files.items() if 'module1' in path][0] + executed, missing = file_report["run"], file_report["mis"] + assert executed + assert 5 in executed, executed + assert 6 in executed, executed + assert 7 in executed, executed + assert 11 in executed, executed + + +if __name__ == '__main__': + run_report() + run_xml_report() + run_html_report() -- cgit v1.2.1 From ab1053b2b1171664038488cb6721b9e407fe5679 Mon Sep 17 00:00:00 2001 From: da-woods Date: Sat, 24 Sep 2022 11:24:38 +0100 Subject: Fix arguments like `init=False` being ignored in dataclasses (#4958) Fixes some of https://github.com/cython/cython/issues/4956 --- Cython/Compiler/Dataclass.py | 16 +++---- Tools/make_dataclass_tests.py | 14 +++--- tests/run/test_dataclasses.pyx | 103 +++++++++++++++++++++++++++++++++-------- 3 files changed, 97 insertions(+), 36 deletions(-) diff --git a/Cython/Compiler/Dataclass.py b/Cython/Compiler/Dataclass.py index 88e147c58..609520004 100644 --- a/Cython/Compiler/Dataclass.py +++ b/Cython/Compiler/Dataclass.py @@ -269,7 +269,7 @@ def handle_cclass_dataclass(node, dataclass_args, analyse_decs_transform): if not isinstance(v, ExprNodes.BoolNode): error(node.pos, "Arguments passed to cython.dataclasses.dataclass must be True or False") - kwargs[k] = v + kwargs[k] = v.value # remove everything that does not belong into _DataclassParams() kw_only = kwargs.pop("kw_only") @@ -329,12 +329,6 @@ def handle_cclass_dataclass(node, dataclass_args, analyse_decs_transform): def generate_init_code(code, init, node, fields, kw_only): """ - All of these "generate_*_code" functions return a tuple of: - - code string - - placeholder dict (often empty) - - stat list (often empty) - which can then be combined later and processed once. - Notes on CPython generated "__init__": * Implemented in `_init_fn`. * The use of the `dataclasses._HAS_DEFAULT_FACTORY` sentinel value as @@ -346,6 +340,11 @@ def generate_init_code(code, init, node, fields, kw_only): * seen_default and the associated error message are copied directly from Python * Call to user-defined __post_init__ function (if it exists) is copied from CPython. + + Cython behaviour deviates a little here (to be decided if this is right...) + Because the class variable from the assignment does not exist Cython fields will + return None (or whatever their type default is) if not initialized while Python + dataclasses will fall back to looking up the class variable. """ if not init or node.scope.lookup_here("__init__"): return @@ -456,9 +455,6 @@ def generate_cmp_code(code, op, funcname, node, fields): names = [name for name, field in fields.items() if (field.compare.value and not field.is_initvar)] - if not names: - return # no comparable types - code.add_code_lines([ "def %s(self, other):" % funcname, " if not isinstance(other, %s):" % node.class_name, diff --git a/Tools/make_dataclass_tests.py b/Tools/make_dataclass_tests.py index 0c02f3d14..6a3cee7ac 100644 --- a/Tools/make_dataclass_tests.py +++ b/Tools/make_dataclass_tests.py @@ -120,6 +120,12 @@ skip_tests = frozenset( # These tests are probably fine, but the string substitution in this file doesn't get it right ("TestRepr", "test_repr"), ("TestCase", "test_not_in_repr"), + ('TestRepr', 'test_no_repr'), + # class variable doesn't exist in Cython so uninitialized variable appears differently - for now this is deliberate + ('TestInit', 'test_no_init'), + # I believe the test works but the ordering functions do appear in the class dict (and default slot wrappers which + # just raise NotImplementedError + ('TestOrdering', 'test_no_order'), # not possible to add attributes on extension types ("TestCase", "test_post_init_classmethod"), # Bugs @@ -139,18 +145,14 @@ skip_tests = frozenset( ("TestReplace", "test_recursive_repr_misc_attrs"), # recursion error ("TestReplace", "test_recursive_repr_indirection"), # recursion error ("TestReplace", "test_recursive_repr_indirection_two"), # recursion error - ("TestCase", "test_0_field_compare"), # should return False - ("TestCase", "test_1_field_compare"), # order=False is apparently ignored - ("TestOrdering", "test_no_order"), # probably order=False being ignored - ("TestRepr", "test_no_repr"), # turning off repr doesn't work ( "TestCase", "test_intermediate_non_dataclass", ), # issue with propagating through intermediate class - ("TestCase", "test_post_init"), # init=False being ignored ( "TestFrozen", ), # raises AttributeError, not FrozenInstanceError (may be hard to fix) + ('TestCase', 'test_post_init'), # Works except for AttributeError instead of FrozenInstanceError ("TestReplace", "test_frozen"), # AttributeError not FrozenInstanceError ( "TestCase", @@ -158,7 +160,6 @@ skip_tests = frozenset( ), # doesn't define __setattr__ and just relies on Cython to enforce readonly properties ("TestCase", "test_compare_subclasses"), # wrong comparison ("TestCase", "test_simple_compare"), # wrong comparison - ("TestEq", "test_no_eq"), # wrong comparison (probably eq=False being ignored) ( "TestCase", "test_field_named_self", @@ -210,7 +211,6 @@ version_specific_skips = { ), # needs language support for | operator on types } - class DataclassInDecorators(ast.NodeVisitor): found = False diff --git a/tests/run/test_dataclasses.pyx b/tests/run/test_dataclasses.pyx index d9e8fd2b0..8321b9de0 100644 --- a/tests/run/test_dataclasses.pyx +++ b/tests/run/test_dataclasses.pyx @@ -34,6 +34,36 @@ class C_TestCase_test_field_named_object: class C_TestCase_test_field_named_object_frozen: object: str +@dataclass +@cclass +class C0_TestCase_test_0_field_compare: + pass + +@dataclass(order=False) +@cclass +class C1_TestCase_test_0_field_compare: + pass + +@dataclass(order=True) +@cclass +class C_TestCase_test_0_field_compare: + pass + +@dataclass +@cclass +class C0_TestCase_test_1_field_compare: + x: int + +@dataclass(order=False) +@cclass +class C1_TestCase_test_1_field_compare: + x: int + +@dataclass(order=True) +@cclass +class C_TestCase_test_1_field_compare: + x: int + @dataclass @cclass class C_TestCase_test_not_in_compare: @@ -344,19 +374,6 @@ class R_TestCase_test_dataclasses_pickleable: x: int y: List[int] = field(default_factory=list) -@dataclass(init=False) -@cclass -class C_TestInit_test_no_init: - i: int = 0 - -@dataclass(init=False) -@cclass -class C_TestInit_test_no_init_: - i: int = 2 - - def __init__(self): - self.i = 3 - @dataclass @cclass class C_TestInit_test_overwriting_init: @@ -405,6 +422,19 @@ class C_TestRepr_test_overwriting_repr__: def __repr__(self): return 'x' +@dataclass(eq=False) +@cclass +class C_TestEq_test_no_eq: + x: int + +@dataclass(eq=False) +@cclass +class C_TestEq_test_no_eq_: + x: int + + def __eq__(self, other): + return other == 10 + @dataclass @cclass class C_TestEq_test_overwriting_eq: @@ -517,6 +547,39 @@ class TestCase(unittest.TestCase): c = C('foo') self.assertEqual(c.object, 'foo') + def test_0_field_compare(self): + C0 = C0_TestCase_test_0_field_compare + C1 = C1_TestCase_test_0_field_compare + for cls in [C0, C1]: + with self.subTest(cls=cls): + self.assertEqual(cls(), cls()) + for (idx, fn) in enumerate([lambda a, b: a < b, lambda a, b: a <= b, lambda a, b: a > b, lambda a, b: a >= b]): + with self.subTest(idx=idx): + with self.assertRaises(TypeError): + fn(cls(), cls()) + C = C_TestCase_test_0_field_compare + self.assertLessEqual(C(), C()) + self.assertGreaterEqual(C(), C()) + + def test_1_field_compare(self): + C0 = C0_TestCase_test_1_field_compare + C1 = C1_TestCase_test_1_field_compare + for cls in [C0, C1]: + with self.subTest(cls=cls): + self.assertEqual(cls(1), cls(1)) + self.assertNotEqual(cls(0), cls(1)) + for (idx, fn) in enumerate([lambda a, b: a < b, lambda a, b: a <= b, lambda a, b: a > b, lambda a, b: a >= b]): + with self.subTest(idx=idx): + with self.assertRaises(TypeError): + fn(cls(0), cls(0)) + C = C_TestCase_test_1_field_compare + self.assertLess(C(0), C(1)) + self.assertLessEqual(C(0), C(1)) + self.assertLessEqual(C(1), C(1)) + self.assertGreater(C(1), C(0)) + self.assertGreaterEqual(C(1), C(0)) + self.assertGreaterEqual(C(1), C(1)) + def test_not_in_compare(self): C = C_TestCase_test_not_in_compare self.assertEqual(C(), C(0, 20)) @@ -840,12 +903,6 @@ class TestFieldNoAnnotation(unittest.TestCase): class TestInit(unittest.TestCase): - def test_no_init(self): - C = C_TestInit_test_no_init - self.assertEqual(C().i, 0) - C = C_TestInit_test_no_init_ - self.assertEqual(C().i, 3) - def test_overwriting_init(self): C = C_TestInit_test_overwriting_init self.assertEqual(C(3).x, 6) @@ -866,6 +923,14 @@ class TestRepr(unittest.TestCase): class TestEq(unittest.TestCase): + def test_no_eq(self): + C = C_TestEq_test_no_eq + self.assertNotEqual(C(0), C(0)) + c = C(3) + self.assertEqual(c, c) + C = C_TestEq_test_no_eq_ + self.assertEqual(C(3), 10) + def test_overwriting_eq(self): C = C_TestEq_test_overwriting_eq self.assertEqual(C(1), 3) -- cgit v1.2.1 From c2a54864217a6d4295c7f3748c15943da898b1c2 Mon Sep 17 00:00:00 2001 From: da-woods Date: Sat, 24 Sep 2022 13:35:25 +0100 Subject: Allow empty args to dataclass and field directives (#4957) Part of the bug fixes in https://github.com/cython/cython/issues/4956 --- Cython/Compiler/Dataclass.py | 12 +++++++----- Cython/Compiler/ParseTreeTransforms.py | 2 +- Tools/make_dataclass_tests.py | 5 +---- tests/run/test_dataclasses.pyx | 20 ++++++++++++++++++++ 4 files changed, 29 insertions(+), 10 deletions(-) diff --git a/Cython/Compiler/Dataclass.py b/Cython/Compiler/Dataclass.py index 609520004..327f11e19 100644 --- a/Cython/Compiler/Dataclass.py +++ b/Cython/Compiler/Dataclass.py @@ -217,14 +217,16 @@ def process_class_get_fields(node): and assignment.function.as_cython_attribute() == "dataclasses.field"): # I believe most of this is well-enforced when it's treated as a directive # but it doesn't hurt to make sure - if (not isinstance(assignment, ExprNodes.GeneralCallNode) - or not isinstance(assignment.positional_args, ExprNodes.TupleNode) - or assignment.positional_args.args - or not isinstance(assignment.keyword_args, ExprNodes.DictNode)): + valid_general_call = (isinstance(assignment, ExprNodes.GeneralCallNode) + and isinstance(assignment.positional_args, ExprNodes.TupleNode) + and not assignment.positional_args.args + and (assignment.keyword_args is None or isinstance(assignment.keyword_args, ExprNodes.DictNode))) + valid_simple_call = (isinstance(assignment, ExprNodes.SimpleCallNode) and not assignment.args) + if not (valid_general_call or valid_simple_call): error(assignment.pos, "Call to 'cython.dataclasses.field' must only consist " "of compile-time keyword arguments") continue - keyword_args = assignment.keyword_args.as_python_dict() + keyword_args = assignment.keyword_args.as_python_dict() if valid_general_call and assignment.keyword_args else {} if 'default' in keyword_args and 'default_factory' in keyword_args: error(assignment.pos, "cannot specify both default and default_factory") continue diff --git a/Cython/Compiler/ParseTreeTransforms.py b/Cython/Compiler/ParseTreeTransforms.py index baf5b4ef7..54d861d8a 100644 --- a/Cython/Compiler/ParseTreeTransforms.py +++ b/Cython/Compiler/ParseTreeTransforms.py @@ -1225,7 +1225,7 @@ class InterpretCompilerDirectives(CythonTransform): return (optname, directivetype(optname, str(args[0].value))) elif directivetype is Options.DEFER_ANALYSIS_OF_ARGUMENTS: # signal to pass things on without processing - return (optname, (args, kwds.as_python_dict())) + return (optname, (args, kwds.as_python_dict() if kwds else {})) else: assert False diff --git a/Tools/make_dataclass_tests.py b/Tools/make_dataclass_tests.py index 6a3cee7ac..25f43cc2d 100644 --- a/Tools/make_dataclass_tests.py +++ b/Tools/make_dataclass_tests.py @@ -111,6 +111,7 @@ skip_tests = frozenset( ("TestInit", "test_base_has_init"), # needs __dict__ for vars # Requires arbitrary attributes to be writeable ("TestCase", "test_post_init_super"), + ('TestCase', 'test_init_in_order'), # Cython being strict about argument types - expected difference ("TestDescriptors", "test_getting_field_calls_get"), ("TestDescriptors", "test_init_calls_set"), @@ -129,10 +130,6 @@ skip_tests = frozenset( # not possible to add attributes on extension types ("TestCase", "test_post_init_classmethod"), # Bugs - # ==== - ("TestCase", "test_no_options"), # @dataclass() - ("TestCase", "test_field_no_default"), # field() - ("TestCase", "test_init_in_order"), # field() ("TestCase", "test_hash_field_rules"), # compiler crash ("TestCase", "test_class_var"), # not sure but compiler crash ("TestCase", "test_field_order"), # invalid C code (__pyx_base?) diff --git a/tests/run/test_dataclasses.pyx b/tests/run/test_dataclasses.pyx index 8321b9de0..5ea83f82a 100644 --- a/tests/run/test_dataclasses.pyx +++ b/tests/run/test_dataclasses.pyx @@ -64,6 +64,11 @@ class C1_TestCase_test_1_field_compare: class C_TestCase_test_1_field_compare: x: int +@dataclass +@cclass +class C_TestCase_test_field_no_default: + x: int = field() + @dataclass @cclass class C_TestCase_test_not_in_compare: @@ -80,6 +85,11 @@ class Mutable_TestCase_test_deliberately_mutable_defaults: class C_TestCase_test_deliberately_mutable_defaults: x: Mutable_TestCase_test_deliberately_mutable_defaults +@dataclass() +@cclass +class C_TestCase_test_no_options: + x: int + @dataclass @cclass class Point_TestCase_test_not_tuple: @@ -580,6 +590,12 @@ class TestCase(unittest.TestCase): self.assertGreaterEqual(C(1), C(0)) self.assertGreaterEqual(C(1), C(1)) + def test_field_no_default(self): + C = C_TestCase_test_field_no_default + self.assertEqual(C(5).x, 5) + with self.assertRaises(TypeError): + C() + def test_not_in_compare(self): C = C_TestCase_test_not_in_compare self.assertEqual(C(), C(0, 20)) @@ -599,6 +615,10 @@ class TestCase(unittest.TestCase): self.assertEqual(o1.x.l, [1, 2]) self.assertIs(o1.x, o2.x) + def test_no_options(self): + C = C_TestCase_test_no_options + self.assertEqual(C(42).x, 42) + def test_not_tuple(self): Point = Point_TestCase_test_not_tuple self.assertNotEqual(Point(1, 2), (1, 2)) -- cgit v1.2.1 From 61d98ad972d3e2d61ed4c9c86688420cc078a63e Mon Sep 17 00:00:00 2001 From: da-woods Date: Sat, 24 Sep 2022 14:22:05 +0100 Subject: Handle dataclass 0 and 1 item hashes consistently with Python (#4959) The issue was that hash wasn't generated for a 0 field class, and for a 1 field class it hashed the field rather than a len-1 tuple containing the field. I don't think compliance here is strictly necessary, but it's fairly simple and allows us to enable a few more tests. Part of #4956 --- Cython/Compiler/Dataclass.py | 6 +++--- Tools/make_dataclass_tests.py | 12 ----------- tests/run/test_dataclasses.pyx | 45 +++++++++++++++++++++++++++++++++++++++++- 3 files changed, 47 insertions(+), 16 deletions(-) diff --git a/Cython/Compiler/Dataclass.py b/Cython/Compiler/Dataclass.py index 327f11e19..74a5fe01b 100644 --- a/Cython/Compiler/Dataclass.py +++ b/Cython/Compiler/Dataclass.py @@ -572,11 +572,11 @@ def generate_hash_code(code, unsafe_hash, eq, frozen, node, fields): if not field.is_initvar and ( field.compare.value if field.hash.value is None else field.hash.value) ] - if not names: - return # nothing to hash # make a tuple of the hashes - hash_tuple_items = u", ".join(u"hash(self.%s)" % name for name in names) + hash_tuple_items = u", ".join(u"self.%s" % name for name in names) + if hash_tuple_items: + hash_tuple_items += u"," # ensure that one arg form is a tuple # if we're here we want to generate a hash code.add_code_lines([ diff --git a/Tools/make_dataclass_tests.py b/Tools/make_dataclass_tests.py index 25f43cc2d..1d8b8b8d0 100644 --- a/Tools/make_dataclass_tests.py +++ b/Tools/make_dataclass_tests.py @@ -182,18 +182,6 @@ skip_tests = frozenset( "TestCase", "test_class_var_with_default", ), # possibly to do with ClassVar being assigned a field - ( - "TestHash", - "test_unsafe_hash", - ), # not sure if it's a bug or just a difference in how the hash is calculated - ( - "TestHash", - "test_1_field_hash", - ), # not sure if it's a bug or just a difference in how the hash is calculated - ( - "TestHash", - "test_0_field_hash", - ), # not sure if it's a bug or just a difference in how the hash is calculated ( "TestDescriptors", ), # mostly don't work - I think this may be a limitation of cdef classes but needs investigating diff --git a/tests/run/test_dataclasses.pyx b/tests/run/test_dataclasses.pyx index 5ea83f82a..3f5545fb3 100644 --- a/tests/run/test_dataclasses.pyx +++ b/tests/run/test_dataclasses.pyx @@ -469,6 +469,32 @@ class C_TestEq_test_overwriting_eq__: def __eq__(self, other): return other == 5 +@dataclass(unsafe_hash=True) +@cclass +class C_TestHash_test_unsafe_hash: + x: int + y: str + +@dataclass(frozen=True) +@cclass +class C_TestHash_test_0_field_hash: + pass + +@dataclass(unsafe_hash=True) +@cclass +class C_TestHash_test_0_field_hash_: + pass + +@dataclass(frozen=True) +@cclass +class C_TestHash_test_1_field_hash: + x: int + +@dataclass(unsafe_hash=True) +@cclass +class C_TestHash_test_1_field_hash_: + x: int + class Base1_TestMakeDataclass_test_base: pass @@ -966,7 +992,24 @@ class TestOrdering(unittest.TestCase): pass class TestHash(unittest.TestCase): - pass + + def test_unsafe_hash(self): + C = C_TestHash_test_unsafe_hash + self.assertEqual(hash(C(1, 'foo')), hash((1, 'foo'))) + + def test_0_field_hash(self): + C = C_TestHash_test_0_field_hash + self.assertEqual(hash(C()), hash(())) + C = C_TestHash_test_0_field_hash_ + self.assertEqual(hash(C()), hash(())) + + def test_1_field_hash(self): + C = C_TestHash_test_1_field_hash + self.assertEqual(hash(C(4)), hash((4,))) + self.assertEqual(hash(C(42)), hash((42,))) + C = C_TestHash_test_1_field_hash_ + self.assertEqual(hash(C(4)), hash((4,))) + self.assertEqual(hash(C(42)), hash((42,))) class TestMakeDataclass(unittest.TestCase): pass -- cgit v1.2.1 From 7b5fc0b13a086b2c03708db2e82e8df482d36803 Mon Sep 17 00:00:00 2001 From: da-woods Date: Sat, 24 Sep 2022 17:29:54 +0100 Subject: Recategorise a few dataclass tests --- Tools/make_dataclass_tests.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/Tools/make_dataclass_tests.py b/Tools/make_dataclass_tests.py index 1d8b8b8d0..e8bd1b188 100644 --- a/Tools/make_dataclass_tests.py +++ b/Tools/make_dataclass_tests.py @@ -56,6 +56,7 @@ skip_tests = frozenset( ("TestDescriptors", "test_lookup_on_instance"), ("TestCase", "test_default_factory_not_called_if_value_given"), ("TestCase", "test_class_attrs"), + ("TestCase", "test_hash_field_rules"), ("TestStringAnnotations",), # almost all the texts here use local variables # Currently unsupported # ===================== @@ -130,8 +131,9 @@ skip_tests = frozenset( # not possible to add attributes on extension types ("TestCase", "test_post_init_classmethod"), # Bugs - ("TestCase", "test_hash_field_rules"), # compiler crash - ("TestCase", "test_class_var"), # not sure but compiler crash + #====== + # not specifically a dataclass issue - a C int crashes classvar + ("TestCase", "test_class_var"), ("TestCase", "test_field_order"), # invalid C code (__pyx_base?) ( "TestCase", -- cgit v1.2.1 From d6ab97c7027451bc6db64618020ef3a20f1e569f Mon Sep 17 00:00:00 2001 From: da-woods Date: Mon, 26 Sep 2022 17:53:39 +0100 Subject: Update pyxcodewriter.indenter (#5048) It was written before `with` was universally available. Update the interface to make it harder to get wrong. --- Cython/Compiler/Code.py | 22 +++++++--------------- Cython/Compiler/FusedNode.py | 18 ++++++------------ 2 files changed, 13 insertions(+), 27 deletions(-) diff --git a/Cython/Compiler/Code.py b/Cython/Compiler/Code.py index 06f38936d..74060733d 100644 --- a/Cython/Compiler/Code.py +++ b/Cython/Compiler/Code.py @@ -21,7 +21,7 @@ import shutil import textwrap from string import Template from functools import partial -from contextlib import closing +from contextlib import closing, contextmanager from collections import defaultdict from . import Naming @@ -2619,9 +2619,7 @@ class PyrexCodeWriter(object): class PyxCodeWriter(object): """ - Can be used for writing out some Cython code. To use the indenter - functionality, the Cython.Compiler.Importer module will have to be used - to load the code to support python 2.4 + Can be used for writing out some Cython code. """ def __init__(self, buffer=None, indent_level=0, context=None, encoding='ascii'): @@ -2637,22 +2635,16 @@ class PyxCodeWriter(object): def dedent(self, levels=1): self.level -= levels + @contextmanager def indenter(self, line): """ - Instead of - - with pyx_code.indenter("for i in range(10):"): - pyx_code.putln("print i") - - write - - if pyx_code.indenter("for i in range(10);"): - pyx_code.putln("print i") - pyx_code.dedent() + with pyx_code.indenter("for i in range(10):"): + pyx_code.putln("print i") """ self.putln(line) self.indent() - return True + yield + self.dedent() def getvalue(self): result = self.buffer.getvalue() diff --git a/Cython/Compiler/FusedNode.py b/Cython/Compiler/FusedNode.py index 5639cdf28..4643cfb65 100644 --- a/Cython/Compiler/FusedNode.py +++ b/Cython/Compiler/FusedNode.py @@ -321,25 +321,21 @@ class FusedCFuncDefNode(StatListNode): def _buffer_check_numpy_dtype_setup_cases(self, pyx_code): "Setup some common cases to match dtypes against specializations" - if pyx_code.indenter("if kind in b'iu':"): + with pyx_code.indenter("if kind in b'iu':"): pyx_code.putln("pass") pyx_code.named_insertion_point("dtype_int") - pyx_code.dedent() - if pyx_code.indenter("elif kind == b'f':"): + with pyx_code.indenter("elif kind == b'f':"): pyx_code.putln("pass") pyx_code.named_insertion_point("dtype_float") - pyx_code.dedent() - if pyx_code.indenter("elif kind == b'c':"): + with pyx_code.indenter("elif kind == b'c':"): pyx_code.putln("pass") pyx_code.named_insertion_point("dtype_complex") - pyx_code.dedent() - if pyx_code.indenter("elif kind == b'O':"): + with pyx_code.indenter("elif kind == b'O':"): pyx_code.putln("pass") pyx_code.named_insertion_point("dtype_object") - pyx_code.dedent() match = "dest_sig[{{dest_sig_idx}}] = '{{specialized_type_name}}'" no_match = "dest_sig[{{dest_sig_idx}}] = None" @@ -376,11 +372,10 @@ class FusedCFuncDefNode(StatListNode): if final_type.is_pythran_expr: cond += ' and arg_is_pythran_compatible' - if codewriter.indenter("if %s:" % cond): + with codewriter.indenter("if %s:" % cond): #codewriter.putln("print 'buffer match found based on numpy dtype'") codewriter.putln(self.match) codewriter.putln("break") - codewriter.dedent() def _buffer_parse_format_string_check(self, pyx_code, decl_code, specialized_type, env): @@ -697,7 +692,7 @@ class FusedCFuncDefNode(StatListNode): self._unpack_argument(pyx_code) # 'unrolled' loop, first match breaks out of it - if pyx_code.indenter("while 1:"): + with pyx_code.indenter("while 1:"): if normal_types: self._fused_instance_checks(normal_types, pyx_code, env) if buffer_types or pythran_types: @@ -709,7 +704,6 @@ class FusedCFuncDefNode(StatListNode): else: pyx_code.putln(self.no_match) pyx_code.putln("break") - pyx_code.dedent() fused_index += 1 all_buffer_types.update(buffer_types) -- cgit v1.2.1 From 673dab20eea77b86273d850c8c94b6d3fe3c5a48 Mon Sep 17 00:00:00 2001 From: Ralf Gommers Date: Mon, 26 Sep 2022 23:25:24 +0200 Subject: Don't use unsupported GCC pragma's with Intel compilers The Intel compilers define `__GNUC__` but do not implement the warning pragma's used by Cython in `TypeConversion.c`. That results in a large amount of warnings like: ``` scipy/special/_ufuncs_cxx.cpython-310-x86_64-linux-gnu.so.p/_ufuncs_cxx.cpp(4940): warning #2282: unrecognized GCC pragma #pragma GCC diagnostic push ^ scipy/special/_ufuncs_cxx.cpython-310-x86_64-linux-gnu.so.p/_ufuncs_cxx.cpp(4941): warning #2282: unrecognized GCC pragma #pragma GCC diagnostic ignored "-Wconversion" ^ scipy/special/_ufuncs_cxx.cpython-310-x86_64-linux-gnu.so.p/_ufuncs_cxx.cpp(4945): warning #2282: unrecognized GCC pragma #pragma GCC diagnostic pop ``` Hence disable the use of these pragma's when we see an Intel compiler is used. --- Cython/Utility/TypeConversion.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cython/Utility/TypeConversion.c b/Cython/Utility/TypeConversion.c index 7a7bf0f79..a76cfde06 100644 --- a/Cython/Utility/TypeConversion.c +++ b/Cython/Utility/TypeConversion.c @@ -454,7 +454,7 @@ static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { // GCC diagnostic pragmas were introduced in GCC 4.6 // Used to silence conversion warnings that are ok but cannot be avoided. -#if defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6)) +#if !defined(__INTEL_COMPILER) && defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6)) #define __Pyx_HAS_GCC_DIAGNOSTIC #endif -- cgit v1.2.1 From 1ba6a55579e57aa9f104d01d1f962886d447ec9a Mon Sep 17 00:00:00 2001 From: da-woods Date: Tue, 27 Sep 2022 18:04:07 +0100 Subject: Fix recursive repr on cdef dataclasses (#5045) The dataclass module specifically guards repr from being invoked recursively. I use a slightly different method here to do the same thing. Part of https://github.com/cython/cython/issues/4956 --- Cython/Compiler/Dataclass.py | 37 +++++++++++++++++-- Tools/make_dataclass_tests.py | 26 ++++++++------ tests/run/test_dataclasses.pyx | 82 ++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 132 insertions(+), 13 deletions(-) diff --git a/Cython/Compiler/Dataclass.py b/Cython/Compiler/Dataclass.py index 74a5fe01b..e202ed6eb 100644 --- a/Cython/Compiler/Dataclass.py +++ b/Cython/Compiler/Dataclass.py @@ -429,7 +429,7 @@ def generate_init_code(code, init, node, fields, kw_only): def generate_repr_code(code, repr, node, fields): """ - The CPython implementation is just: + The core of the CPython implementation is just: ['return self.__class__.__qualname__ + f"(' + ', '.join([f"{f.name}={{self.{f.name}!r}}" for f in fields]) + @@ -437,18 +437,49 @@ def generate_repr_code(code, repr, node, fields): The only notable difference here is self.__class__.__qualname__ -> type(self).__name__ which is because Cython currently supports Python 2. + + However, it also has some guards for recursive repr invokations. In the standard + library implementation they're done with a wrapper decorator that captures a set + (with the set keyed by id and thread). Here we create a set as a thread local + variable and key only by id. """ if not repr or node.scope.lookup("__repr__"): return + # The recursive guard is likely a little costly, so skip it if possible. + # is_gc_simple defines where it can contain recursive objects + needs_recursive_guard = False + for name in fields.keys(): + entry = node.scope.lookup(name) + type_ = entry.type + if type_.is_memoryviewslice: + type_ = type_.dtype + if not type_.is_pyobject: + continue # no GC + if not type_.is_gc_simple: + needs_recursive_guard = True + break + + if needs_recursive_guard: + code.add_code_line("__pyx_recursive_repr_guard = __import__('threading').local()") + code.add_code_line("__pyx_recursive_repr_guard.running = set()") code.add_code_line("def __repr__(self):") + if needs_recursive_guard: + code.add_code_line(" key = id(self)") + code.add_code_line(" guard_set = self.__pyx_recursive_repr_guard.running") + code.add_code_line(" if key in guard_set: return '...'") + code.add_code_line(" guard_set.add(key)") + code.add_code_line(" try:") strs = [u"%s={self.%s!r}" % (name, name) for name, field in fields.items() if field.repr.value and not field.is_initvar] format_string = u", ".join(strs) - code.add_code_line(u' name = getattr(type(self), "__qualname__", type(self).__name__)') - code.add_code_line(u" return f'{name}(%s)'" % format_string) + code.add_code_line(u' name = getattr(type(self), "__qualname__", type(self).__name__)') + code.add_code_line(u" return f'{name}(%s)'" % format_string) + if needs_recursive_guard: + code.add_code_line(" finally:") + code.add_code_line(" guard_set.remove(key)") def generate_cmp_code(code, op, funcname, node, fields): diff --git a/Tools/make_dataclass_tests.py b/Tools/make_dataclass_tests.py index e8bd1b188..c39a4b2db 100644 --- a/Tools/make_dataclass_tests.py +++ b/Tools/make_dataclass_tests.py @@ -139,11 +139,6 @@ skip_tests = frozenset( "TestCase", "test_overwrite_fields_in_derived_class", ), # invalid C code (__pyx_base?) - ("TestReplace", "test_recursive_repr"), # recursion error - ("TestReplace", "test_recursive_repr_two_attrs"), # recursion error - ("TestReplace", "test_recursive_repr_misc_attrs"), # recursion error - ("TestReplace", "test_recursive_repr_indirection"), # recursion error - ("TestReplace", "test_recursive_repr_indirection_two"), # recursion error ( "TestCase", "test_intermediate_non_dataclass", @@ -233,12 +228,11 @@ class SubstituteNameString(ast.NodeTransformer): if node.value.find("") != -1: import re - new_value = re.sub("[\w.]*", "", node.value) + new_value = new_value2 = re.sub("[\w.]*", "", node.value) for key, value in self.substitutions.items(): - new_value2 = re.sub(f"(? Date: Sat, 1 Oct 2022 20:47:41 +0800 Subject: Implement reversed C++ iteration (#5002) * support C++ classes with rbegin/rend implemented for reversed iteration, and * solve the compilation error when the type of temporary variable used to store the result of iterator has 'constness'. One example of 2. is when iterating std::map, Cython uses std::map::value_type as the type of temporary variable to store the value of an iterator. However, std::map::value_type will deduce to std::pair, which cannot be reused within a loop as its first component is const. --- Cython/Compiler/ExprNodes.py | 94 ++++++++++++++++++++++++++++++++++++-------- tests/run/cpp_iterators.pyx | 87 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 165 insertions(+), 16 deletions(-) diff --git a/Cython/Compiler/ExprNodes.py b/Cython/Compiler/ExprNodes.py index 050bc614e..7a98429d4 100644 --- a/Cython/Compiler/ExprNodes.py +++ b/Cython/Compiler/ExprNodes.py @@ -2905,6 +2905,9 @@ class IteratorNode(ScopedExprNode): self.type = self.sequence.type elif self.sequence.type.is_cpp_class: return CppIteratorNode(self.pos, sequence=self.sequence).analyse_types(env) + elif self.is_reversed_cpp_iteration(): + sequence = self.sequence.arg_tuple.args[0].arg + return CppIteratorNode(self.pos, sequence=sequence, reversed=True).analyse_types(env) else: self.sequence = self.sequence.coerce_to_pyobject(env) if self.sequence.type in (list_type, tuple_type): @@ -2919,6 +2922,25 @@ class IteratorNode(ScopedExprNode): PyrexTypes.CFuncTypeArg("it", PyrexTypes.py_object_type, None), ])) + def is_reversed_cpp_iteration(self): + """ + Returns True if the 'reversed' function is applied to a C++ iterable. + + This supports C++ classes with reverse_iterator implemented. + """ + if not (isinstance(self.sequence, SimpleCallNode) and + self.sequence.arg_tuple and len(self.sequence.arg_tuple.args) == 1): + return False + func = self.sequence.function + if func.is_name and func.name == "reversed": + if not func.entry.is_builtin: + return False + arg = self.sequence.arg_tuple.args[0] + if isinstance(arg, CoercionNode) and arg.arg.is_name: + arg = arg.arg.entry + return arg.type.is_cpp_class + return False + def type_dependencies(self, env): return self.sequence.type_dependencies(self.expr_scope or env) @@ -3082,25 +3104,30 @@ class CppIteratorNode(ExprNode): cpp_attribute_op = "." extra_dereference = "" is_temp = True + reversed = False subexprs = ['sequence'] + def get_iterator_func_names(self): + return ("begin", "end") if not self.reversed else ("rbegin", "rend") + def analyse_types(self, env): sequence_type = self.sequence.type if sequence_type.is_ptr: sequence_type = sequence_type.base_type - begin = sequence_type.scope.lookup("begin") - end = sequence_type.scope.lookup("end") + begin_name, end_name = self.get_iterator_func_names() + begin = sequence_type.scope.lookup(begin_name) + end = sequence_type.scope.lookup(end_name) if (begin is None or not begin.type.is_cfunction or begin.type.args): - error(self.pos, "missing begin() on %s" % self.sequence.type) + error(self.pos, "missing %s() on %s" % (begin_name, self.sequence.type)) self.type = error_type return self if (end is None or not end.type.is_cfunction or end.type.args): - error(self.pos, "missing end() on %s" % self.sequence.type) + error(self.pos, "missing %s() on %s" % (end_name, self.sequence.type)) self.type = error_type return self iter_type = begin.type.return_type @@ -3111,37 +3138,40 @@ class CppIteratorNode(ExprNode): self.pos, "!=", [iter_type, end.type.return_type]) is None: - error(self.pos, "missing operator!= on result of begin() on %s" % self.sequence.type) + error(self.pos, "missing operator!= on result of %s() on %s" % (begin_name, self.sequence.type)) self.type = error_type return self if env.lookup_operator_for_types(self.pos, '++', [iter_type]) is None: - error(self.pos, "missing operator++ on result of begin() on %s" % self.sequence.type) + error(self.pos, "missing operator++ on result of %s() on %s" % (begin_name, self.sequence.type)) self.type = error_type return self if env.lookup_operator_for_types(self.pos, '*', [iter_type]) is None: - error(self.pos, "missing operator* on result of begin() on %s" % self.sequence.type) + error(self.pos, "missing operator* on result of %s() on %s" % (begin_name, self.sequence.type)) self.type = error_type return self self.type = iter_type elif iter_type.is_ptr: if not (iter_type == end.type.return_type): - error(self.pos, "incompatible types for begin() and end()") + error(self.pos, "incompatible types for %s() and %s()" % (begin_name, end_name)) self.type = iter_type else: - error(self.pos, "result type of begin() on %s must be a C++ class or pointer" % self.sequence.type) + error(self.pos, "result type of %s() on %s must be a C++ class or pointer" % (begin_name, self.sequence.type)) self.type = error_type return self def generate_result_code(self, code): sequence_type = self.sequence.type + begin_name, _ = self.get_iterator_func_names() # essentially 3 options: if self.sequence.is_simple(): # 1) Sequence can be accessed directly, like a name; # assigning to it may break the container, but that's the responsibility # of the user - code.putln("%s = %s%sbegin();" % (self.result(), - self.sequence.result(), - self.cpp_attribute_op)) + code.putln("%s = %s%s%s();" % ( + self.result(), + self.sequence.result(), + self.cpp_attribute_op, + begin_name)) else: # (while it'd be nice to limit the scope of the loop temp, it's essentially # impossible to do while supporting generators) @@ -3159,17 +3189,22 @@ class CppIteratorNode(ExprNode): code.putln("%s = %s%s;" % (self.cpp_sequence_cname, "&" if temp_type.is_ptr else "", self.sequence.move_result_rhs())) - code.putln("%s = %s%sbegin();" % (self.result(), self.cpp_sequence_cname, - self.cpp_attribute_op)) + code.putln("%s = %s%s%s();" % ( + self.result(), + self.cpp_sequence_cname, + self.cpp_attribute_op, + begin_name)) def generate_iter_next_result_code(self, result_name, code): # end call isn't cached to support containers that allow adding while iterating # (much as this is usually a bad idea) - code.putln("if (!(%s%s != %s%send())) break;" % ( + _, end_name = self.get_iterator_func_names() + code.putln("if (!(%s%s != %s%s%s())) break;" % ( self.extra_dereference, self.result(), self.cpp_sequence_cname or self.sequence.result(), - self.cpp_attribute_op)) + self.cpp_attribute_op, + end_name)) code.putln("%s = *%s%s;" % ( result_name, self.extra_dereference, @@ -3205,6 +3240,32 @@ class CppIteratorNode(ExprNode): ExprNode.free_temps(self, code) +def remove_const(item_type): + """ + Removes the constness of a given type and its underlying templates + if any. + + This is to solve the compilation error when the temporary variable used to + store the result of an iterator cannot be changed due to its constness. + For example, the value_type of std::map, which will also be the type of + the temporarry variable, is std::pair. This means the first + component of the variable cannot be reused to store the result of each + iteration, which leads to a compilation error. + """ + if item_type.is_const: + item_type = item_type.cv_base_type + if item_type.is_typedef: + item_type = remove_const(item_type.typedef_base_type) + if item_type.is_cpp_class and item_type.templates: + templates = [remove_const(t) if t.is_const else t for t in item_type.templates] + template_type = item_type.template_type + item_type = PyrexTypes.CppClassType( + template_type.name, template_type.scope, + template_type.cname, template_type.base_classes, + templates, template_type) + return item_type + + class NextNode(AtomicExprNode): # Used as part of for statement implementation. # Implements result = next(iterator) @@ -3247,6 +3308,7 @@ class NextNode(AtomicExprNode): def analyse_types(self, env): self.type = self.infer_type(env, self.iterator.type) + self.type = remove_const(self.type) self.is_temp = 1 return self diff --git a/tests/run/cpp_iterators.pyx b/tests/run/cpp_iterators.pyx index cc1e185cc..81048d0b3 100644 --- a/tests/run/cpp_iterators.pyx +++ b/tests/run/cpp_iterators.pyx @@ -2,6 +2,10 @@ # tag: cpp, werror, no-cpp-locals from libcpp.deque cimport deque +from libcpp.list cimport list as stdlist +from libcpp.map cimport map as stdmap +from libcpp.set cimport set as stdset +from libcpp.string cimport string from libcpp.vector cimport vector from cython.operator cimport dereference as deref @@ -268,3 +272,86 @@ def test_iteration_over_attribute_of_call(): for i in get_object_with_iterable_attribute().vec: print(i) +def test_iteration_over_reversed_list(py_v): + """ + >>> test_iteration_over_reversed_list([2, 4, 6]) + 6 + 4 + 2 + """ + cdef stdlist[int] lint + for e in py_v: + lint.push_back(e) + for e in reversed(lint): + print(e) + +def test_iteration_over_reversed_map(py_v): + """ + >>> test_iteration_over_reversed_map([(1, 10), (2, 20), (3, 30)]) + 3 30 + 2 20 + 1 10 + """ + cdef stdmap[int, int] m + for k, v in py_v: + m[k] = v + for k, v in reversed(m): + print("%s %s" % (k, v)) + +def test_iteration_over_reversed_set(py_v): + """ + >>> test_iteration_over_reversed_set([1, 2, 3]) + 3 + 2 + 1 + """ + cdef stdset[int] s + for e in py_v: + s.insert(e) + for e in reversed(s): + print(e) + +def test_iteration_over_reversed_string(): + """ + >>> test_iteration_over_reversed_string() + n + o + h + t + y + c + """ + cdef string cppstr = "cython" + for c in reversed(cppstr): + print(chr(c)) + +def test_iteration_over_reversed_vector(py_v): + """ + >>> test_iteration_over_reversed_vector([1, 2, 3]) + 3 + 2 + 1 + """ + cdef vector[int] vint + for e in py_v: + vint.push_back(e) + for e in reversed(vint): + print(e) + +def test_non_built_in_reversed_function(py_v): + """ + >>> test_non_built_in_reversed_function([1, 3, 5]) + Non-built-in reversed called. + 5 + 3 + 1 + """ + def reversed(arg): + print("Non-built-in reversed called.") + return arg[::-1] + + cdef vector[int] vint + for e in py_v: + vint.push_back(e) + for e in reversed(vint): + print(e) -- cgit v1.2.1 From 0c8dea13946833606ab7e290d51cd12aee077c04 Mon Sep 17 00:00:00 2001 From: Matus Valo Date: Sat, 1 Oct 2022 15:49:29 +0200 Subject: Fix failing relative imports in __init__ files. (#4552) Fixes lookup of from `. cimport modulename` when called in `__init__.*` files - previously it looked in the parent directory of the package while now it looks in the package. --- Cython/Compiler/CythonScope.py | 2 +- Cython/Compiler/Main.py | 35 ++- Cython/Compiler/Nodes.py | 13 +- Cython/Compiler/Symtab.py | 27 +-- tests/errors/e_relative_cimport.pyx | 4 +- tests/run/relative_cimport_compare.srctree | 327 +++++++++++++++++++++++++++++ 6 files changed, 383 insertions(+), 25 deletions(-) create mode 100644 tests/run/relative_cimport_compare.srctree diff --git a/Cython/Compiler/CythonScope.py b/Cython/Compiler/CythonScope.py index 08f3da9eb..f73be0070 100644 --- a/Cython/Compiler/CythonScope.py +++ b/Cython/Compiler/CythonScope.py @@ -51,7 +51,7 @@ class CythonScope(ModuleScope): def find_module(self, module_name, pos): error("cython.%s is not available" % module_name, pos) - def find_submodule(self, module_name): + def find_submodule(self, module_name, as_package=False): entry = self.entries.get(module_name, None) if not entry: self.load_cythonscope() diff --git a/Cython/Compiler/Main.py b/Cython/Compiler/Main.py index 28a12c8d5..eecd49feb 100644 --- a/Cython/Compiler/Main.py +++ b/Cython/Compiler/Main.py @@ -143,6 +143,29 @@ class Context(object): def nonfatal_error(self, exc): return Errors.report_error(exc) + def _split_qualified_name(self, qualified_name): + # Splits qualified_name into parts in form of 2-tuples: (PART_NAME, IS_PACKAGE). + qualified_name_parts = qualified_name.split('.') + last_part = qualified_name_parts.pop() + qualified_name_parts = [(p, True) for p in qualified_name_parts] + if last_part != '__init__': + # If Last part is __init__, then it is omitted. Otherwise, we need to check whether we can find + # __init__.pyx/__init__.py file to determine if last part is package or not. + is_package = False + for suffix in ('.py', '.pyx'): + path = self.search_include_directories( + qualified_name, suffix=suffix, source_pos=None, source_file_path=None) + if path: + is_package = self._is_init_file(path) + break + + qualified_name_parts.append((last_part, is_package)) + return qualified_name_parts + + @staticmethod + def _is_init_file(path): + return os.path.basename(path) in ('__init__.pyx', '__init__.py', '__init__.pxd') if path else False + def find_module(self, module_name, relative_to=None, pos=None, need_pxd=1, absolute_fallback=True): # Finds and returns the module scope corresponding to @@ -182,16 +205,16 @@ class Context(object): if not scope: pxd_pathname = self.find_pxd_file(qualified_name, pos) if pxd_pathname: - scope = relative_to.find_submodule(module_name) + is_package = self._is_init_file(pxd_pathname) + scope = relative_to.find_submodule(module_name, as_package=is_package) if not scope: if debug_find_module: print("...trying absolute import") if absolute_fallback: qualified_name = module_name scope = self - for name in qualified_name.split("."): - scope = scope.find_submodule(name) - + for name, is_package in self._split_qualified_name(qualified_name): + scope = scope.find_submodule(name, as_package=is_package) if debug_find_module: print("...scope = %s" % scope) if not scope.pxd_file_loaded: @@ -321,12 +344,12 @@ class Context(object): # Look up a top-level module. Returns None if not found. return self.modules.get(name, None) - def find_submodule(self, name): + def find_submodule(self, name, as_package=False): # Find a top-level module, creating a new one if needed. scope = self.lookup_submodule(name) if not scope: scope = ModuleScope(name, - parent_module = None, context = self) + parent_module = None, context = self, is_package=as_package) self.modules[name] = scope return scope diff --git a/Cython/Compiler/Nodes.py b/Cython/Compiler/Nodes.py index 99d6e837e..92a0cfea9 100644 --- a/Cython/Compiler/Nodes.py +++ b/Cython/Compiler/Nodes.py @@ -8748,9 +8748,16 @@ class FromCImportStatNode(StatNode): if not env.is_module_scope: error(self.pos, "cimport only allowed at module level") return - if self.relative_level and self.relative_level > env.qualified_name.count('.'): - error(self.pos, "relative cimport beyond main package is not allowed") - return + qualified_name_components = env.qualified_name.count('.') + 1 + if self.relative_level: + if self.relative_level > qualified_name_components: + # 1. case: importing beyond package: from .. import pkg + error(self.pos, "relative cimport beyond main package is not allowed") + return + elif self.relative_level == qualified_name_components and not env.is_package: + # 2. case: importing from same level but current dir is not package: from . import module + error(self.pos, "relative cimport from non-package directory is not allowed") + return module_scope = env.find_module(self.module_name, self.pos, relative_level=self.relative_level) module_name = module_scope.qualified_name env.add_imported_module(module_scope) diff --git a/Cython/Compiler/Symtab.py b/Cython/Compiler/Symtab.py index 4d834d896..765652d0d 100644 --- a/Cython/Compiler/Symtab.py +++ b/Cython/Compiler/Symtab.py @@ -1298,19 +1298,13 @@ class ModuleScope(Scope): is_cython_builtin = 0 old_style_globals = 0 - def __init__(self, name, parent_module, context): + def __init__(self, name, parent_module, context, is_package=False): from . import Builtin self.parent_module = parent_module outer_scope = Builtin.builtin_scope Scope.__init__(self, name, outer_scope, parent_module) - if name == "__init__": - # Treat Spam/__init__.pyx specially, so that when Python loads - # Spam/__init__.so, initSpam() is defined. - self.module_name = parent_module.module_name - self.is_package = True - else: - self.module_name = name - self.is_package = False + self.is_package = is_package + self.module_name = name self.module_name = EncodedString(self.module_name) self.context = context self.module_cname = Naming.module_cname @@ -1423,9 +1417,16 @@ class ModuleScope(Scope): # explicit relative cimport # error of going beyond top-level is handled in cimport node relative_to = self - while relative_level > 0 and relative_to: + + top_level = 1 if self.is_package else 0 + # * top_level == 1 when file is __init__.pyx, current package (relative_to) is the current module + # i.e. dot in `from . import ...` points to the current package + # * top_level == 0 when file is regular module, current package (relative_to) is parent module + # i.e. dot in `from . import ...` points to the package where module is placed + while relative_level > top_level and relative_to: relative_to = relative_to.parent_module relative_level -= 1 + elif relative_level != 0: # -1 or None: try relative cimport first, then absolute relative_to = self.parent_module @@ -1435,7 +1436,7 @@ class ModuleScope(Scope): return module_scope.context.find_module( module_name, relative_to=relative_to, pos=pos, absolute_fallback=absolute_fallback) - def find_submodule(self, name): + def find_submodule(self, name, as_package=False): # Find and return scope for a submodule of this module, # creating a new empty one if necessary. Doesn't parse .pxd. if '.' in name: @@ -1444,10 +1445,10 @@ class ModuleScope(Scope): submodule = None scope = self.lookup_submodule(name) if not scope: - scope = ModuleScope(name, parent_module=self, context=self.context) + scope = ModuleScope(name, parent_module=self, context=self.context, is_package=True if submodule else as_package) self.module_entries[name] = scope if submodule: - scope = scope.find_submodule(submodule) + scope = scope.find_submodule(submodule, as_package=as_package) return scope def lookup_submodule(self, name): diff --git a/tests/errors/e_relative_cimport.pyx b/tests/errors/e_relative_cimport.pyx index 36a134411..709cbd71d 100644 --- a/tests/errors/e_relative_cimport.pyx +++ b/tests/errors/e_relative_cimport.pyx @@ -9,7 +9,7 @@ from . cimport e_relative_cimport _ERRORS=""" 4:0: relative cimport beyond main package is not allowed -5:0: relative cimport beyond main package is not allowed +5:0: relative cimport from non-package directory is not allowed 6:0: relative cimport beyond main package is not allowed -7:0: relative cimport beyond main package is not allowed +7:0: relative cimport from non-package directory is not allowed """ diff --git a/tests/run/relative_cimport_compare.srctree b/tests/run/relative_cimport_compare.srctree new file mode 100644 index 000000000..77b6fb22e --- /dev/null +++ b/tests/run/relative_cimport_compare.srctree @@ -0,0 +1,327 @@ +# mode: run +# tag: cimport, pep489 + +PYTHON setup.py build_ext --inplace +PYTHON -c "import test_import" +PYTHON -c "import test_cimport" + + +######## setup.py ######## + +from distutils.core import setup +from Cython.Build import cythonize +from Cython.Distutils.extension import Extension + +setup( + ext_modules=cythonize('**/*.pyx'), +) + +######## test_import.py ######## +import sys +SUPPORTS_PEP_489 = sys.version_info > (3, 5) +if SUPPORTS_PEP_489: + import cypkg.sub.submodule + import cypkg.sub.sub2.sub2module + import pypkg.module + import pypkg.sub.submodule + import pypkg.sub.sub2.sub2module + +######## test_cimport.py ######## +import sys +SUPPORTS_PEP_489 = sys.version_info > (3, 5) +if SUPPORTS_PEP_489: + import module + + +######## module.pyx ######## +cimport cypkg + +cdef cypkg.a_type a1 = 3 +assert a1 == 3 +cdef cypkg.a.a_type a2 = 3 +assert a2 == 3 +cdef cypkg.b_type b1 = 4 +assert b1 == 4 +cdef cypkg.b.b_type b2 = 4 +assert b2 == 4 + + +cimport cypkg.sub +cdef cypkg.sub.a_type a3 = 3 +assert a3 == 3 +cdef cypkg.sub.a.a_type a4 = 3 +assert a4 == 3 +cdef cypkg.sub.b_type b3 = 4 +assert b3 == 4 +cdef cypkg.sub.b.b_type b4 = 4 +assert b4 == 4 + + +cimport cypkg.sub.sub2 +cdef cypkg.sub.sub2.a_type a5 = 3 +assert a5 == 3 +cdef cypkg.sub.sub2.a.a_type a6 = 3 +assert a6 == 3 +cdef cypkg.sub.sub2.b_type b5 = 4 +assert b5 == 4 +cdef cypkg.sub.sub2.b.b_type b6 = 4 +assert b6 == 4 + +import pypkg +assert pypkg.a_value == 3 +assert pypkg.a.a_value == 3 +assert pypkg.b_value == 4 +assert pypkg.b.b_value == 4 + + +import pypkg.sub +assert pypkg.sub.a_value == 3 +assert pypkg.sub.a.a_value == 3 +assert pypkg.sub.b_value == 4 +assert pypkg.sub.b.b_value == 4 + + +import cypkg.sub.sub2 +assert pypkg.sub.sub2.a_value == 3 +assert pypkg.sub.sub2.a.a_value == 3 +assert pypkg.sub.sub2.b_value == 4 +assert pypkg.sub.sub2.b.b_value == 4 + + +######## cypkg/__init__.pxd ######## + +cimport cypkg.sub +cimport cypkg.sub.sub2 + +from cypkg.sub cimport a +from cypkg.sub.a cimport a_type +from cypkg.sub.sub2 cimport b +from cypkg.sub.sub2.b cimport b_type + +from . cimport sub +from .sub cimport a +from .sub.a cimport a_type +from .sub.sub2 cimport b +from .sub.sub2.b cimport b_type + +######## cypkg/__init__.pyx ######## + + +######## cypkg/module.pyx ######## + +cimport cypkg +cimport cypkg.sub +cimport cypkg.sub.sub2 +from cypkg.sub cimport a +from cypkg.sub.a cimport a_type +from cypkg.sub.sub2 cimport b +from cypkg.sub.sub2.b cimport b_type + +from . cimport sub +from .sub cimport a +from .sub.a cimport a_type +from .sub.sub2 cimport b +from .sub.sub2.b cimport b_type + + +######## cypkg/sub/__init__.pxd ######## + +cimport cypkg +from cypkg.sub cimport a +from cypkg.sub.a cimport a_type +from cypkg.sub.sub2 cimport b +from cypkg.sub.sub2.b cimport b_type + +from . cimport a +from .a cimport a_type + +from .. cimport sub +from ..sub cimport a +from ..sub.a cimport a_type +from ..sub.sub2 cimport b +from ..sub.sub2.b cimport b_type + +######## cypkg/sub/__init__.pyx ######## + +######## cypkg/sub/a.pxd ######## + +ctypedef int a_type + +######## cypkg/sub/submodule.pyx ######## + +cimport cypkg +cimport cypkg.sub +from cypkg.sub cimport a +from cypkg.sub.a cimport a_type +from cypkg.sub.sub2 cimport b +from cypkg.sub.sub2.b cimport b_type + +from . cimport a +from .a cimport a_type + +from .. cimport sub +from ..sub cimport a +from ..sub.a cimport a_type +from ..sub.sub2 cimport b +from ..sub.sub2.b cimport b_type + +######## cypkg/sub/sub2/__init__.pxd ######## + +cimport cypkg +cimport cypkg.sub +from cypkg.sub cimport a +from cypkg.sub.a cimport a_type +from cypkg.sub.sub2 cimport b +from cypkg.sub.sub2.b cimport b_type + +from ..sub2 cimport b +from ..sub2.b cimport b_type + +from ...sub cimport a +from ...sub.a cimport a_type + +from ... cimport sub +from ...sub.sub2 cimport b +from ...sub.sub2.b cimport b_type + +######## cypkg/sub/sub2/__init__.pyx ######## + +######## cypkg/sub/sub2/b.pxd ######## + +ctypedef int b_type + + +######## cypkg/sub/sub2/sub2module.pyx ######## + +cimport cypkg +cimport cypkg.sub +from cypkg.sub cimport a +from cypkg.sub.a cimport a_type +from cypkg.sub.sub2 cimport b +from cypkg.sub.sub2.b cimport b_type + +from .. cimport sub2 +from ..sub2 cimport b +from ..sub2.b cimport b_type + +from ...sub cimport a +from ...sub.a cimport a_type + +from ... cimport sub +from ...sub.sub2 cimport b +from ...sub.sub2.b cimport b_type + +######## pypkg/__init__.py ######## + +import pypkg.sub +import pypkg.sub.sub2 + +from pypkg.sub import a +from pypkg.sub.a import a_value +from pypkg.sub.sub2 import b +from pypkg.sub.sub2.b import b_value + +from . import sub +from .sub import a +from .sub.a import a_value +from .sub.sub2 import b +from .sub.sub2.b import b_value + +######## pypkg/module.py ######## + +import pypkg +import pypkg.sub +import pypkg.sub.sub2 +from pypkg.sub import a +from pypkg.sub.a import a_value +from pypkg.sub.sub2 import b +from pypkg.sub.sub2.b import b_value + +from . import sub +from .sub import a +from .sub.a import a_value +from .sub.sub2 import b +from .sub.sub2.b import b_value + +######## pypkg/sub/__init__.py ######## + +import pypkg +from pypkg.sub import a +from pypkg.sub.a import a_value +from pypkg.sub.sub2 import b +from pypkg.sub.sub2.b import b_value + +from . import a +from .a import a_value + +from .. import sub +from ..sub import a +from ..sub.a import a_value +from ..sub.sub2 import b +from ..sub.sub2.b import b_value + +######## pypkg/sub/a.py ######## + +a_value = 3 + +######## pypkg/sub/submodule.py ######## + +import pypkg +import pypkg.sub +from pypkg.sub import a +from pypkg.sub.a import a_value +from pypkg.sub.sub2 import b +from pypkg.sub.sub2.b import b_value + +from . import a +from .a import a_value + +from .. import sub +from ..sub import a +from ..sub.a import a_value +from ..sub.sub2 import b +from ..sub.sub2.b import b_value + +######## pypkg/sub/sub2/__init__.py ######## + +import pypkg +import pypkg.sub +from pypkg.sub import a +from pypkg.sub.a import a_value +from pypkg.sub.sub2 import b +from pypkg.sub.sub2.b import b_value + +from ..sub2 import b +from ..sub2.b import b_value + +from ...sub import a +from ...sub.a import a_value + +from ... import sub +from ...sub.sub2 import b +from ...sub.sub2.b import b_value + +######## pypkg/sub/sub2/b.py ######## + +b_value = 4 + + +######## pypkg/sub/sub2/sub2module.py ######## + +import pypkg +import pypkg.sub +from pypkg.sub import a +from pypkg.sub.a import a_value +from pypkg.sub.sub2 import b +from pypkg.sub.sub2.b import b_value + +from .. import sub2 +from ..sub2 import b +from ..sub2.b import b_value + +from ...sub import a +from ...sub.a import a_value + +from ... import sub +from ...sub.sub2 import b +from ...sub.sub2.b import b_value -- cgit v1.2.1 From c0a5ab2de754eec8787693073207aff30b22a318 Mon Sep 17 00:00:00 2001 From: da-woods Date: Sat, 1 Oct 2022 15:05:22 +0100 Subject: PEP614 decorators (#4991) Relaxes the grammar restrictions on decorators --- Cython/Compiler/ParseTreeTransforms.py | 3 ++- Cython/Compiler/Parsing.py | 9 +-------- tests/errors/e_decorators.pyx | 12 ------------ tests/errors/w_uninitialized.pyx | 4 ++-- tests/run/decorators.pyx | 33 +++++++++++++++++++++++++++++++++ tests/run/test_grammar.py | 6 ------ 6 files changed, 38 insertions(+), 29 deletions(-) delete mode 100644 tests/errors/e_decorators.pyx diff --git a/Cython/Compiler/ParseTreeTransforms.py b/Cython/Compiler/ParseTreeTransforms.py index 54d861d8a..388123020 100644 --- a/Cython/Compiler/ParseTreeTransforms.py +++ b/Cython/Compiler/ParseTreeTransforms.py @@ -1328,10 +1328,11 @@ class InterpretCompilerDirectives(CythonTransform): if realdecs and (scope_name == 'cclass' or isinstance(node, (Nodes.CClassDefNode, Nodes.CVarDefNode))): for realdec in realdecs: + dec_pos = realdec.pos realdec = realdec.decorator if ((realdec.is_name and realdec.name == "dataclass") or (realdec.is_attribute and realdec.attribute == "dataclass")): - error(realdec.pos, + error(dec_pos, "Use '@cython.dataclasses.dataclass' on cdef classes to create a dataclass") # Note - arbitrary C function decorators are caught later in DecoratorTransform raise PostParseError(realdecs[0].pos, "Cdef functions/classes cannot take arbitrary decorators.") diff --git a/Cython/Compiler/Parsing.py b/Cython/Compiler/Parsing.py index 6aa1da975..408f9483a 100644 --- a/Cython/Compiler/Parsing.py +++ b/Cython/Compiler/Parsing.py @@ -3566,14 +3566,7 @@ def p_decorators(s): while s.sy == '@': pos = s.position() s.next() - decstring = p_dotted_name(s, as_allowed=0)[2] - names = decstring.split('.') - decorator = ExprNodes.NameNode(pos, name=s.context.intern_ustring(names[0])) - for name in names[1:]: - decorator = ExprNodes.AttributeNode( - pos, attribute=s.context.intern_ustring(name), obj=decorator) - if s.sy == '(': - decorator = p_call(s, decorator) + decorator = p_namedexpr_test(s) decorators.append(Nodes.DecoratorNode(pos, decorator=decorator)) s.expect_newline("Expected a newline after decorator") return decorators diff --git a/tests/errors/e_decorators.pyx b/tests/errors/e_decorators.pyx deleted file mode 100644 index 33ef2355d..000000000 --- a/tests/errors/e_decorators.pyx +++ /dev/null @@ -1,12 +0,0 @@ -# mode: error - -class A: - pass - -@A().a -def f(): - pass - -_ERRORS = u""" -6:4: Expected a newline after decorator -""" diff --git a/tests/errors/w_uninitialized.pyx b/tests/errors/w_uninitialized.pyx index c2046ce19..066f9ed5b 100644 --- a/tests/errors/w_uninitialized.pyx +++ b/tests/errors/w_uninitialized.pyx @@ -127,10 +127,10 @@ _ERRORS = """ 66:10: local variable 'foo' referenced before assignment 71:14: local variable 'exc' referenced before assignment 71:19: local variable 'msg' referenced before assignment -78:4: local variable 'decorator' referenced before assignment +78:5: local variable 'decorator' referenced before assignment 85:16: local variable 'default' referenced before assignment 91:14: local variable 'bar' referenced before assignment -97:4: local variable 'decorator' referenced before assignment +97:5: local variable 'decorator' referenced before assignment 104:24: local variable 'Meta' referenced before assignment 110:15: local variable 'args' referenced before assignment 110:23: local variable 'kwargs' referenced before assignment diff --git a/tests/run/decorators.pyx b/tests/run/decorators.pyx index fc20235e2..64b0f0e20 100644 --- a/tests/run/decorators.pyx +++ b/tests/run/decorators.pyx @@ -17,6 +17,10 @@ __doc__ = u""" 3 >>> i.HERE 1 + >>> i_called_directly(4) + 3 + >>> i_called_directly.HERE + 1 """ class wrap: @@ -62,6 +66,35 @@ a = A() def i(x): return x - 1 +@A().decorate +def i_called_directly(x): + # PEP 614 means this now works + return x - 1 + +list_of_decorators = [decorate, decorate2] + +@list_of_decorators[0] +def test_index_from_decorator_list0(a, b): + """ + PEP 614 means this now works + >>> test_index_from_decorator_list0(1, 2) + 4 + >>> test_index_from_decorator_list0.HERE + 1 + """ + return a+b+1 + +@list_of_decorators[1](1,2) +def test_index_from_decorator_list1(a, b): + """ + PEP 614 means this now works + >>> test_index_from_decorator_list1(1, 2) + 4 + >>> test_index_from_decorator_list1.HERE + 1 + """ + return a+b+1 + def append_to_list_decorator(lst): def do_append_to_list_dec(func): def new_func(): diff --git a/tests/run/test_grammar.py b/tests/run/test_grammar.py index bb937348e..dfa11e087 100644 --- a/tests/run/test_grammar.py +++ b/tests/run/test_grammar.py @@ -797,8 +797,6 @@ class GrammarTests(unittest.TestCase): self.assertEqual(f.__annotations__, {'return': 'list'}) # Test expressions as decorators (PEP 614): - # FIXME: implement PEP 614 - """ @False or null def f(x): pass @d := null @@ -811,7 +809,6 @@ class GrammarTests(unittest.TestCase): def f(x): pass @[null][0].__call__.__call__ def f(x): pass - """ # test closures with a variety of opargs closure = 1 @@ -1705,8 +1702,6 @@ class GrammarTests(unittest.TestCase): class G: pass # Test expressions as decorators (PEP 614): - # FIXME: implement PEP 614 - """ @False or class_decorator class H: pass @d := class_decorator @@ -1719,7 +1714,6 @@ class GrammarTests(unittest.TestCase): class L: pass @[class_decorator][0].__call__.__call__ class M: pass - """ def test_dictcomps(self): # dictorsetmaker: ( (test ':' test (comp_for | -- cgit v1.2.1 From 5266b9c929a3e8348998b70df7561c0d7c3a0758 Mon Sep 17 00:00:00 2001 From: da-woods Date: Tue, 4 Oct 2022 19:55:28 +0100 Subject: "Fix" an exception formatting issue on Py2 (#5018) --- Cython/Utility/CythonFunction.c | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/Cython/Utility/CythonFunction.c b/Cython/Utility/CythonFunction.c index b2c67dcce..dbe9a0a9d 100644 --- a/Cython/Utility/CythonFunction.c +++ b/Cython/Utility/CythonFunction.c @@ -660,9 +660,17 @@ static PyObject *__Pyx_CyFunction_CallAsMethod(PyObject *func, PyObject *args, P self = PyTuple_GetItem(args, 0); if (unlikely(!self)) { Py_DECREF(new_args); +#if PY_MAJOR_VERSION > 2 PyErr_Format(PyExc_TypeError, "unbound method %.200S() needs an argument", cyfunc->func_qualname); +#else + // %S doesn't work in PyErr_Format on Py2 and replicating + // the formatting seems more trouble than it's worth + // (so produce a less useful error message). + PyErr_SetString(PyExc_TypeError, + "unbound method needs an argument"); +#endif return NULL; } -- cgit v1.2.1 From 25c0aa4b3536b6307a3b66f65cad702ed0f33e06 Mon Sep 17 00:00:00 2001 From: Matus Valo Date: Wed, 5 Oct 2022 20:00:35 +0200 Subject: Docs: Migrate early binding for speed userguide to pure python (#5061) --- .../userguide/early_binding_for_speed/rectangle.py | 22 +++++++++++++ .../early_binding_for_speed/rectangle.pyx | 3 ++ .../early_binding_for_speed/rectangle_cdef.py | 26 +++++++++++++++ .../early_binding_for_speed/rectangle_cdef.pyx | 4 +++ .../early_binding_for_speed/rectangle_cpdef.py | 23 ++++++++++++++ .../early_binding_for_speed/rectangle_cpdef.pyx | 6 +++- docs/src/userguide/early_binding_for_speed.rst | 37 +++++++++++++++++++--- 7 files changed, 115 insertions(+), 6 deletions(-) create mode 100644 docs/examples/userguide/early_binding_for_speed/rectangle.py create mode 100644 docs/examples/userguide/early_binding_for_speed/rectangle_cdef.py create mode 100644 docs/examples/userguide/early_binding_for_speed/rectangle_cpdef.py diff --git a/docs/examples/userguide/early_binding_for_speed/rectangle.py b/docs/examples/userguide/early_binding_for_speed/rectangle.py new file mode 100644 index 000000000..cd534d051 --- /dev/null +++ b/docs/examples/userguide/early_binding_for_speed/rectangle.py @@ -0,0 +1,22 @@ +@cython.cclass +class Rectangle: + x0: cython.int + y0: cython.int + x1: cython.int + y1: cython.int + + def __init__(self, x0: cython.int, y0: cython.int, x1: cython.int, y1: cython.int): + self.x0 = x0 + self.y0 = y0 + self.x1 = x1 + self.y1 = y1 + + def area(self): + area = (self.x1 - self.x0) * (self.y1 - self.y0) + if area < 0: + area = -area + return area + +def rectArea(x0, y0, x1, y1): + rect = Rectangle(x0, y0, x1, y1) + return rect.area() diff --git a/docs/examples/userguide/early_binding_for_speed/rectangle.pyx b/docs/examples/userguide/early_binding_for_speed/rectangle.pyx index de70b0263..b58f6534b 100644 --- a/docs/examples/userguide/early_binding_for_speed/rectangle.pyx +++ b/docs/examples/userguide/early_binding_for_speed/rectangle.pyx @@ -1,7 +1,10 @@ + cdef class Rectangle: cdef int x0, y0 cdef int x1, y1 + + def __init__(self, int x0, int y0, int x1, int y1): self.x0 = x0 self.y0 = y0 diff --git a/docs/examples/userguide/early_binding_for_speed/rectangle_cdef.py b/docs/examples/userguide/early_binding_for_speed/rectangle_cdef.py new file mode 100644 index 000000000..ee2a14fb8 --- /dev/null +++ b/docs/examples/userguide/early_binding_for_speed/rectangle_cdef.py @@ -0,0 +1,26 @@ +@cython.cclass +class Rectangle: + x0: cython.int + y0: cython.int + x1: cython.int + y1: cython.int + + def __init__(self, x0: cython.int, y0: cython.int, x1: cython.int, y1: cython.int): + self.x0 = x0 + self.y0 = y0 + self.x1 = x1 + self.y1 = y1 + + @cython.cfunc + def _area(self) -> cython.int: + area: cython.int = (self.x1 - self.x0) * (self.y1 - self.y0) + if area < 0: + area = -area + return area + + def area(self): + return self._area() + +def rectArea(x0, y0, x1, y1): + rect: Rectangle = Rectangle(x0, y0, x1, y1) + return rect._area() diff --git a/docs/examples/userguide/early_binding_for_speed/rectangle_cdef.pyx b/docs/examples/userguide/early_binding_for_speed/rectangle_cdef.pyx index 1933326d2..3b64d766b 100644 --- a/docs/examples/userguide/early_binding_for_speed/rectangle_cdef.pyx +++ b/docs/examples/userguide/early_binding_for_speed/rectangle_cdef.pyx @@ -1,13 +1,17 @@ + cdef class Rectangle: cdef int x0, y0 cdef int x1, y1 + + def __init__(self, int x0, int y0, int x1, int y1): self.x0 = x0 self.y0 = y0 self.x1 = x1 self.y1 = y1 + cdef int _area(self): cdef int area = (self.x1 - self.x0) * (self.y1 - self.y0) if area < 0: diff --git a/docs/examples/userguide/early_binding_for_speed/rectangle_cpdef.py b/docs/examples/userguide/early_binding_for_speed/rectangle_cpdef.py new file mode 100644 index 000000000..670f340a4 --- /dev/null +++ b/docs/examples/userguide/early_binding_for_speed/rectangle_cpdef.py @@ -0,0 +1,23 @@ +@cython.cclass +class Rectangle: + x0: cython.int + y0: cython.int + x1: cython.int + y1: cython.int + + def __init__(self, x0: cython.int, y0: cython.int, x1: cython.int, y1: cython.int): + self.x0 = x0 + self.y0 = y0 + self.x1 = x1 + self.y1 = y1 + + @cython.ccall + def area(self)-> cython.int: + area: cython.int = (self.x1 - self.x0) * (self.y1 - self.y0) + if area < 0: + area = -area + return area + +def rectArea(x0, y0, x1, y1): + rect: Rectangle = Rectangle(x0, y0, x1, y1) + return rect.area() diff --git a/docs/examples/userguide/early_binding_for_speed/rectangle_cpdef.pyx b/docs/examples/userguide/early_binding_for_speed/rectangle_cpdef.pyx index f8b7d86a8..53f2a8ad2 100644 --- a/docs/examples/userguide/early_binding_for_speed/rectangle_cpdef.pyx +++ b/docs/examples/userguide/early_binding_for_speed/rectangle_cpdef.pyx @@ -1,15 +1,19 @@ + cdef class Rectangle: cdef int x0, y0 cdef int x1, y1 + + def __init__(self, int x0, int y0, int x1, int y1): self.x0 = x0 self.y0 = y0 self.x1 = x1 self.y1 = y1 + cpdef int area(self): - area = (self.x1 - self.x0) * (self.y1 - self.y0) + cdef int area = (self.x1 - self.x0) * (self.y1 - self.y0) if area < 0: area = -area return area diff --git a/docs/src/userguide/early_binding_for_speed.rst b/docs/src/userguide/early_binding_for_speed.rst index 9bb8cf724..4a442d973 100644 --- a/docs/src/userguide/early_binding_for_speed.rst +++ b/docs/src/userguide/early_binding_for_speed.rst @@ -6,6 +6,9 @@ Early Binding for Speed ************************** +.. include:: + ../two-syntax-variants-used + As a dynamic language, Python encourages a programming style of considering classes and objects in terms of their methods and attributes, more than where they fit into the class hierarchy. @@ -22,7 +25,15 @@ use of 'early binding' programming techniques. For example, consider the following (silly) code example: -.. literalinclude:: ../../examples/userguide/early_binding_for_speed/rectangle.pyx +.. tabs:: + + .. group-tab:: Pure Python + + .. literalinclude:: ../../examples/userguide/early_binding_for_speed/rectangle.py + + .. group-tab:: Cython + + .. literalinclude:: ../../examples/userguide/early_binding_for_speed/rectangle.pyx In the :func:`rectArea` method, the call to :meth:`rect.area` and the :meth:`.area` method contain a lot of Python overhead. @@ -30,7 +41,15 @@ In the :func:`rectArea` method, the call to :meth:`rect.area` and the However, in Cython, it is possible to eliminate a lot of this overhead in cases where calls occur within Cython code. For example: -.. literalinclude:: ../../examples/userguide/early_binding_for_speed/rectangle_cdef.pyx +.. tabs:: + + .. group-tab:: Pure Python + + .. literalinclude:: ../../examples/userguide/early_binding_for_speed/rectangle_cdef.py + + .. group-tab:: Cython + + .. literalinclude:: ../../examples/userguide/early_binding_for_speed/rectangle_cdef.pyx Here, in the Rectangle extension class, we have defined two different area calculation methods, the efficient :meth:`_area` C method, and the @@ -46,10 +65,18 @@ dual-access methods - methods that can be efficiently called at C level, but can also be accessed from pure Python code at the cost of the Python access overheads. Consider this code: -.. literalinclude:: ../../examples/userguide/early_binding_for_speed/rectangle_cpdef.pyx +.. tabs:: + + .. group-tab:: Pure Python + + .. literalinclude:: ../../examples/userguide/early_binding_for_speed/rectangle_cpdef.py + + .. group-tab:: Cython + + .. literalinclude:: ../../examples/userguide/early_binding_for_speed/rectangle_cpdef.pyx -Here, we just have a single area method, declared as :keyword:`cpdef` to make it -efficiently callable as a C function, but still accessible from pure Python +Here, we just have a single area method, declared as :keyword:`cpdef` or with ``@ccall`` decorator +to make it efficiently callable as a C function, but still accessible from pure Python (or late-binding Cython) code. If within Cython code, we have a variable already 'early-bound' (ie, declared -- cgit v1.2.1 From 4215f740ab84975078fc0e98d33c55cc5b891f2e Mon Sep 17 00:00:00 2001 From: Ewout ter Hoeven Date: Wed, 5 Oct 2022 20:10:51 +0200 Subject: setup.py: Add project_urls for PyPI (#5064) Add project_urls dictionary to setup.py with URLs to the Cython documentation, funding, source code and bug tracker. This will add those URLs under the "Project links" section on https://pypi.org/project/cython, making it easy to find these resources directly from PyPI. --- setup.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/setup.py b/setup.py index 1398c1574..26beca2f4 100755 --- a/setup.py +++ b/setup.py @@ -298,6 +298,13 @@ def run_build(): "Topic :: Software Development :: Compilers", "Topic :: Software Development :: Libraries :: Python Modules" ], + project_urls={ + "Documentation": "https://cython.readthedocs.io/", + "Donate": "https://cython.readthedocs.io/en/latest/src/donating.html", + "Source Code": "https://github.com/cython/cython", + "Bug Tracker": "https://github.com/cython/cython/issues", + "User Group": "https://groups.google.com/g/cython-users", + }, scripts=scripts, packages=packages, -- cgit v1.2.1 From f2e8b2f3dca7436630b1b4a4d42305adcf173ece Mon Sep 17 00:00:00 2001 From: Ewout ter Hoeven Date: Wed, 5 Oct 2022 20:10:51 +0200 Subject: setup.py: Add project_urls for PyPI (#5064) Add project_urls dictionary to setup.py with URLs to the Cython documentation, funding, source code and bug tracker. This will add those URLs under the "Project links" section on https://pypi.org/project/cython, making it easy to find these resources directly from PyPI. --- setup.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/setup.py b/setup.py index b7043e5e7..1b10c5bd2 100755 --- a/setup.py +++ b/setup.py @@ -282,6 +282,13 @@ setup( "Topic :: Software Development :: Compilers", "Topic :: Software Development :: Libraries :: Python Modules" ], + project_urls={ + "Documentation": "https://cython.readthedocs.io/", + "Donate": "https://cython.readthedocs.io/en/latest/src/donating.html", + "Source Code": "https://github.com/cython/cython", + "Bug Tracker": "https://github.com/cython/cython/issues", + "User Group": "https://groups.google.com/g/cython-users", + }, scripts=scripts, packages=packages, -- cgit v1.2.1 From 3c4f009dbd039bcd4f74a0133e132d0b96ce1335 Mon Sep 17 00:00:00 2001 From: Matus Valo Date: Thu, 6 Oct 2022 09:25:54 +0200 Subject: Docs: Fix example in buffer user guide (#5065) To make sure it works right with Python 3 semantics. --- docs/examples/userguide/buffer/matrix_with_buffer.pyx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/examples/userguide/buffer/matrix_with_buffer.pyx b/docs/examples/userguide/buffer/matrix_with_buffer.pyx index c355f0fe8..46af91b07 100644 --- a/docs/examples/userguide/buffer/matrix_with_buffer.pyx +++ b/docs/examples/userguide/buffer/matrix_with_buffer.pyx @@ -19,7 +19,7 @@ cdef class Matrix: def __getbuffer__(self, Py_buffer *buffer, int flags): cdef Py_ssize_t itemsize = sizeof(self.v[0]) - self.shape[0] = self.v.size() / self.ncols + self.shape[0] = self.v.size() // self.ncols self.shape[1] = self.ncols # Stride 1 is the distance, in bytes, between two items in a row; -- cgit v1.2.1 From 009b00b413258d9324675f31e838a75651f2e08c Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Thu, 6 Oct 2022 13:21:26 +0200 Subject: Update changelog. --- CHANGES.rst | 109 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 109 insertions(+) diff --git a/CHANGES.rst b/CHANGES.rst index 8c8462e5e..096391806 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -2,6 +2,115 @@ Cython Changelog ================ +3.0.0 alpha 12 (2022-??-??) +=========================== + +Features added +-------------- + +* Cython implemented C functions now propagate exceptions by default, rather than + swallowing them in non-object returning function if the user forgot to add an + ``except`` declaration to the signature. This was a long-standing source of bugs, + but can require adding the ``noexcept`` declaration to existing functions if + exception propagation is really undesired. + (Github issue :issue:`4280`) + +* `PEP-614 `_: + decorators can now be arbitrary Python expressions. + (Github issue :issue:`4570`) + +* Bound C methods can now coerce to Python objects. + (Github issue :issue:`4890`) + +* ``reversed()`` can now be used together with C++ iteration. + Patch by Chia-Hsiang Cheng. (Github issue :issue:`5002`) + +* Standard C/C++ atomic operations are now used for memory views, if available. + (Github issue :issue:`4925`) + +* ``cythonize --help`` now also prints information about the supported environment variables. + Patch by Matus Valo. (Github issue :issue:`1711`) + +* Declarations were added for the bit operations in C++20. + Patch by Jonathan Helgert. (Github issue :issue:`4962`) + +Bugs fixed +---------- + +* Generator expressions and comprehensions now look up their outer-most iterable + on creation, as Python does, and not later on start, as they did previously. + (Github issue :issue:`1159`) + +* Iterating over memoryviews in generator expressions could leak a buffer reference. + (Github issue :issue:`4968`) + +* ``__del__`` finaliser methods were not always called if they were only inherited. + (Github issue :issue:`4995`) + +* ``cdef public`` functions declared in .pxd files could use an incorrectly mangled C name. + Patch by EpigeneMax. (Github issue :issue:`2940`) + +* C++ post-increment/-decrement operators were not correctly looked up on declared C++ + classes, thus allowing Cython declarations to be missing for them and incorrect C++ + code to be generated. + Patch by Max Bachmann. (Github issue :issue:`4536`) + +* C++ iteration more safely stores the iterable in temporary variables. + Patch by Xavier. (Github issue :issue:`3828`) + +* C++ references did not work on fused types. + (Github issue :issue:`4717`) + +* Nesting fused types in other fused types could fail to specialise the inner type. + (Github issue :issue:`4725`) + +* Coverage analysis failed in projects with a separate source subdirectory. + Patch by Sviatoslav Sydorenko and Ruben Vorderman. (Github issue :issue:`3636`) + +* The ``@dataclass`` directive was accidentally inherited by methods and subclasses. + (Github issue :issue:`4953`) + +* Some issues with Cython ``@dataclass`` arguments, hashing and ``repr()`` were resolved. + (Github issue :issue:`4956`) + +* cdef data classes (not their instances) were accidentally modifiable after creation, + which lead to potential problems and crashes. They are now immutable. + (Github issue :issue:`5026`) + +* Relative imports failed in compiled ``__init__.py`` package modules. + Patches by Matus Valo. (Github issues :issue:`4941`, :issue:``) + +* Some old usages of the deprecated Python ``imp`` module were replaced with ``importlib``. + Patches by Matus Valo. (Github issues :issue:`4941`, :issue:``) + +* Invalid and misspelled ``cython.*`` module names were not reported as errors. + (Github issue :issue:`4947`) + +* Extended glob paths with ``/**/`` and ``\**\`` failed on Windows. + +* Annotated HTML generation was missing newlines in 3.0.0a11. + (Github issue :issue:`4945`) + +* Some parser issues were resolved. + (Github issue :issue:`4992`) + +* Some C/C++ warnings were resolved. + Patches by Max Bachmann at al. + (Github issues :issue:`5004`, :issue:`5005`, :issue:`5019`, :issue:`5029`) + +* Intel C compilers could complain about unsupported gcc pragmas. + Patch by Ralf Gommers. (Github issue :issue:`5052`) + +Other changes +------------- + +* The wheel building process was migrated to use the ``cibuildwheel`` tool. + Patch by Thomas Li. (Github issue :issue:`4736`) + +* Wheels now include a compiled parser again, which increases their size a little + but gives about a 10% speed-up when running Cython. + + 3.0.0 alpha 11 (2022-07-31) =========================== -- cgit v1.2.1 From a1e06d038594929aece02ba171b84f586bf20ead Mon Sep 17 00:00:00 2001 From: da-woods Date: Thu, 6 Oct 2022 19:11:39 +0100 Subject: Clean up empty issue numbers in changelog --- CHANGES.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 096391806..3146736d0 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -78,10 +78,10 @@ Bugs fixed (Github issue :issue:`5026`) * Relative imports failed in compiled ``__init__.py`` package modules. - Patches by Matus Valo. (Github issues :issue:`4941`, :issue:``) + Patches by Matus Valo. (Github issue :issue:`4941`) * Some old usages of the deprecated Python ``imp`` module were replaced with ``importlib``. - Patches by Matus Valo. (Github issues :issue:`4941`, :issue:``) + Patches by Matus Valo. (Github issue :issue:`4941`) * Invalid and misspelled ``cython.*`` module names were not reported as errors. (Github issue :issue:`4947`) -- cgit v1.2.1 From 1e84f30a4a397cecb888997fc97f5c2604af0733 Mon Sep 17 00:00:00 2001 From: da-woods Date: Fri, 7 Oct 2022 12:21:25 +0100 Subject: Fix type slot definitions of some binop self types (#5068) `__matmul__`, `__truediv__`, `__floordiv__` all forgot to set the type of `self`. Fixes #5067 --- Cython/Compiler/TypeSlots.py | 6 ++-- tests/run/binop_reverse_methods_GH2056.pyx | 44 ++++++++++++++++++++++++++++++ 2 files changed, 47 insertions(+), 3 deletions(-) diff --git a/Cython/Compiler/TypeSlots.py b/Cython/Compiler/TypeSlots.py index fe3867f9d..0bd550d8c 100644 --- a/Cython/Compiler/TypeSlots.py +++ b/Cython/Compiler/TypeSlots.py @@ -965,8 +965,8 @@ class SlotTable(object): # Added in release 2.2 # The following require the Py_TPFLAGS_HAVE_CLASS flag - BinopSlot(binaryfunc, "nb_floor_divide", "__floordiv__", method_name_to_slot), - BinopSlot(binaryfunc, "nb_true_divide", "__truediv__", method_name_to_slot), + BinopSlot(bf, "nb_floor_divide", "__floordiv__", method_name_to_slot), + BinopSlot(bf, "nb_true_divide", "__truediv__", method_name_to_slot), MethodSlot(ibinaryfunc, "nb_inplace_floor_divide", "__ifloordiv__", method_name_to_slot), MethodSlot(ibinaryfunc, "nb_inplace_true_divide", "__itruediv__", method_name_to_slot), @@ -974,7 +974,7 @@ class SlotTable(object): MethodSlot(unaryfunc, "nb_index", "__index__", method_name_to_slot), # Added in release 3.5 - BinopSlot(binaryfunc, "nb_matrix_multiply", "__matmul__", method_name_to_slot, + BinopSlot(bf, "nb_matrix_multiply", "__matmul__", method_name_to_slot, ifdef="PY_VERSION_HEX >= 0x03050000"), MethodSlot(ibinaryfunc, "nb_inplace_matrix_multiply", "__imatmul__", method_name_to_slot, ifdef="PY_VERSION_HEX >= 0x03050000"), diff --git a/tests/run/binop_reverse_methods_GH2056.pyx b/tests/run/binop_reverse_methods_GH2056.pyx index 4938f0d15..43bfcde86 100644 --- a/tests/run/binop_reverse_methods_GH2056.pyx +++ b/tests/run/binop_reverse_methods_GH2056.pyx @@ -30,6 +30,12 @@ class Base(object): 'Base.__rpow__(Base(), 2, None)' >>> pow(Base(), 2, 100) 'Base.__pow__(Base(), 2, 100)' + >>> Base() // 1 + True + >>> set() // Base() + True + + # version dependent tests for @ and / are external """ implemented: cython.bint @@ -67,6 +73,44 @@ class Base(object): def __repr__(self): return "%s()" % (self.__class__.__name__) + # The following methods were missed from the initial implementation + # that typed 'self'. These tests are a quick test to confirm that + # but not the full binop behaviour + def __matmul__(self, other): + return cython.typeof(self) == 'Base' + + def __rmatmul__(self, other): + return cython.typeof(self) == 'Base' + + def __truediv__(self, other): + return cython.typeof(self) == 'Base' + + def __rtruediv__(self, other): + return cython.typeof(self) == 'Base' + + def __floordiv__(self, other): + return cython.typeof(self) == 'Base' + + def __rfloordiv__(self, other): + return cython.typeof(self) == 'Base' + + +if sys.version_info >= (3, 5): + __doc__ += """ + >>> Base() @ 1 + True + >>> set() @ Base() + True + """ + +if sys.version_info >= (3, 0): + __doc__ += """ + >>> Base() / 1 + True + >>> set() / Base() + True + """ + @cython.c_api_binop_methods(False) @cython.cclass -- cgit v1.2.1 From e1364b5a4b546be34829360d0d1471bc33c01262 Mon Sep 17 00:00:00 2001 From: Matus Valo Date: Sat, 8 Oct 2022 09:30:49 +0200 Subject: Docs: Migrate buffer userguide to pure python (#5069) --- docs/examples/userguide/buffer/matrix.py | 15 +++++++ docs/examples/userguide/buffer/matrix.pyx | 3 +- .../userguide/buffer/matrix_with_buffer.py | 48 ++++++++++++++++++++++ .../userguide/buffer/matrix_with_buffer.pyx | 5 ++- docs/examples/userguide/buffer/view_count.py | 30 ++++++++++++++ docs/examples/userguide/buffer/view_count.pyx | 3 +- docs/src/userguide/buffer.rst | 34 +++++++++++++-- 7 files changed, 131 insertions(+), 7 deletions(-) create mode 100644 docs/examples/userguide/buffer/matrix.py create mode 100644 docs/examples/userguide/buffer/matrix_with_buffer.py create mode 100644 docs/examples/userguide/buffer/view_count.py diff --git a/docs/examples/userguide/buffer/matrix.py b/docs/examples/userguide/buffer/matrix.py new file mode 100644 index 000000000..79a3d3f12 --- /dev/null +++ b/docs/examples/userguide/buffer/matrix.py @@ -0,0 +1,15 @@ +# distutils: language = c++ + +from cython.cimports.libcpp.vector import vector + +@cython.cclass +class Matrix: + ncols: cython.unsigned + v: vector[cython.float] + + def __cinit__(self, ncols: cython.unsigned): + self.ncols = ncols + + def add_row(self): + """Adds a row, initially zero-filled.""" + self.v.resize(self.v.size() + self.ncols) diff --git a/docs/examples/userguide/buffer/matrix.pyx b/docs/examples/userguide/buffer/matrix.pyx index ca597c2f2..f2547f6c3 100644 --- a/docs/examples/userguide/buffer/matrix.pyx +++ b/docs/examples/userguide/buffer/matrix.pyx @@ -1,9 +1,8 @@ # distutils: language = c++ -# matrix.pyx - from libcpp.vector cimport vector + cdef class Matrix: cdef unsigned ncols cdef vector[float] v diff --git a/docs/examples/userguide/buffer/matrix_with_buffer.py b/docs/examples/userguide/buffer/matrix_with_buffer.py new file mode 100644 index 000000000..34ccc6591 --- /dev/null +++ b/docs/examples/userguide/buffer/matrix_with_buffer.py @@ -0,0 +1,48 @@ +# distutils: language = c++ +from cython.cimports.cpython import Py_buffer +from cython.cimports.libcpp.vector import vector + +@cython.cclass +class Matrix: + ncols: cython.Py_ssize_t + shape: cython.Py_ssize_t[2] + strides: cython.Py_ssize_t[2] + v: vector[cython.float] + + def __cinit__(self, ncols: cython.Py_ssize_t): + self.ncols = ncols + + def add_row(self): + """Adds a row, initially zero-filled.""" + self.v.resize(self.v.size() + self.ncols) + + def __getbuffer__(self, buffer: cython.pointer(Py_buffer), flags: cython.int): + itemsize: cython.Py_ssize_t = cython.sizeof(self.v[0]) + + self.shape[0] = self.v.size() // self.ncols + self.shape[1] = self.ncols + + # Stride 1 is the distance, in bytes, between two items in a row; + # this is the distance between two adjacent items in the vector. + # Stride 0 is the distance between the first elements of adjacent rows. + self.strides[1] = cython.cast(cython.Py_ssize_t, ( + cython.cast(cython.p_char, cython.address(self.v[1])) + - cython.cast(cython.p_char, cython.address(self.v[0])) + ) + ) + self.strides[0] = self.ncols * self.strides[1] + + buffer.buf = cython.cast(cython.p_char, cython.address(self.v[0])) + buffer.format = 'f' # float + buffer.internal = cython.NULL # see References + buffer.itemsize = itemsize + buffer.len = self.v.size() * itemsize # product(shape) * itemsize + buffer.ndim = 2 + buffer.obj = self + buffer.readonly = 0 + buffer.shape = self.shape + buffer.strides = self.strides + buffer.suboffsets = cython.NULL # for pointer arrays only + + def __releasebuffer__(self, buffer: cython.pointer(Py_buffer)): + pass diff --git a/docs/examples/userguide/buffer/matrix_with_buffer.pyx b/docs/examples/userguide/buffer/matrix_with_buffer.pyx index 46af91b07..16239d199 100644 --- a/docs/examples/userguide/buffer/matrix_with_buffer.pyx +++ b/docs/examples/userguide/buffer/matrix_with_buffer.pyx @@ -1,8 +1,8 @@ # distutils: language = c++ - from cpython cimport Py_buffer from libcpp.vector cimport vector + cdef class Matrix: cdef Py_ssize_t ncols cdef Py_ssize_t shape[2] @@ -27,6 +27,9 @@ cdef class Matrix: # Stride 0 is the distance between the first elements of adjacent rows. self.strides[1] = ( &(self.v[1]) - &(self.v[0])) + + + self.strides[0] = self.ncols * self.strides[1] buffer.buf = &(self.v[0]) diff --git a/docs/examples/userguide/buffer/view_count.py b/docs/examples/userguide/buffer/view_count.py new file mode 100644 index 000000000..6a0554abc --- /dev/null +++ b/docs/examples/userguide/buffer/view_count.py @@ -0,0 +1,30 @@ +# distutils: language = c++ + +from cython.cimports.cpython import Py_buffer +from cython.cimports.libcpp.vector import vector + +@cython.cclass +class Matrix: + + view_count: cython.int + + ncols: cython.Py_ssize_t + v: vector[cython.float] + # ... + + def __cinit__(self, ncols: cython.Py_ssize_t): + self.ncols = ncols + self.view_count = 0 + + def add_row(self): + if self.view_count > 0: + raise ValueError("can't add row while being viewed") + self.v.resize(self.v.size() + self.ncols) + + def __getbuffer__(self, buffer: cython.pointer(Py_buffer), flags: cython.int): + # ... as before + + self.view_count += 1 + + def __releasebuffer__(self, buffer: cython.pointer(Py_buffer)): + self.view_count -= 1 diff --git a/docs/examples/userguide/buffer/view_count.pyx b/docs/examples/userguide/buffer/view_count.pyx index 8027f3ee9..8c4b1d524 100644 --- a/docs/examples/userguide/buffer/view_count.pyx +++ b/docs/examples/userguide/buffer/view_count.pyx @@ -3,6 +3,7 @@ from cpython cimport Py_buffer from libcpp.vector cimport vector + cdef class Matrix: cdef int view_count @@ -26,4 +27,4 @@ cdef class Matrix: self.view_count += 1 def __releasebuffer__(self, Py_buffer *buffer): - self.view_count -= 1 \ No newline at end of file + self.view_count -= 1 diff --git a/docs/src/userguide/buffer.rst b/docs/src/userguide/buffer.rst index 08661a184..3687cf2fd 100644 --- a/docs/src/userguide/buffer.rst +++ b/docs/src/userguide/buffer.rst @@ -3,6 +3,10 @@ Implementing the buffer protocol ================================ +.. include:: + ../two-syntax-variants-used + + Cython objects can expose memory buffers to Python code by implementing the "buffer protocol". This chapter shows how to implement the protocol @@ -16,7 +20,15 @@ The following Cython/C++ code implements a matrix of floats, where the number of columns is fixed at construction time but rows can be added dynamically. -.. literalinclude:: ../../examples/userguide/buffer/matrix.pyx +.. tabs:: + + .. group-tab:: Pure Python + + .. literalinclude:: ../../examples/userguide/buffer/matrix.py + + .. group-tab:: Cython + + .. literalinclude:: ../../examples/userguide/buffer/matrix.pyx There are no methods to do anything productive with the matrices' contents. We could implement custom ``__getitem__``, ``__setitem__``, etc. for this, @@ -27,7 +39,15 @@ Implementing the buffer protocol requires adding two methods, ``__getbuffer__`` and ``__releasebuffer__``, which Cython handles specially. -.. literalinclude:: ../../examples/userguide/buffer/matrix_with_buffer.pyx +.. tabs:: + + .. group-tab:: Pure Python + + .. literalinclude:: ../../examples/userguide/buffer/matrix_with_buffer.py + + .. group-tab:: Cython + + .. literalinclude:: ../../examples/userguide/buffer/matrix_with_buffer.pyx The method ``Matrix.__getbuffer__`` fills a descriptor structure, called a ``Py_buffer``, that is defined by the Python C-API. @@ -75,7 +95,15 @@ This is where ``__releasebuffer__`` comes in. We can add a reference count to each matrix, and lock it for mutation whenever a view exists. -.. literalinclude:: ../../examples/userguide/buffer/view_count.pyx +.. tabs:: + + .. group-tab:: Pure Python + + .. literalinclude:: ../../examples/userguide/buffer/view_count.py + + .. group-tab:: Cython + + .. literalinclude:: ../../examples/userguide/buffer/view_count.pyx Flags ----- -- cgit v1.2.1 From 08d1e0a2f407d418f88f5fc1ca5ac482ed2fdb23 Mon Sep 17 00:00:00 2001 From: Matti Picus Date: Sun, 9 Oct 2022 19:05:47 +0300 Subject: Fix PYPY_VERSION_HEX/PYPY_VERSION_NUM typo (#5074) Fix a typo: s/PYPY_VERSION_HEX/PYPY_VERSION_NUM --- Cython/Utility/ModuleSetupCode.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cython/Utility/ModuleSetupCode.c b/Cython/Utility/ModuleSetupCode.c index 3e209dd87..57b761af7 100644 --- a/Cython/Utility/ModuleSetupCode.c +++ b/Cython/Utility/ModuleSetupCode.c @@ -169,7 +169,7 @@ #undef CYTHON_USE_EXC_INFO_STACK #define CYTHON_USE_EXC_INFO_STACK 0 #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC - #define CYTHON_UPDATE_DESCRIPTOR_DOC (PYPY_VERSION_HEX >= 0x07030900) + #define CYTHON_UPDATE_DESCRIPTOR_DOC (PYPY_VERSION_NUM >= 0x07030900) #endif #elif defined(CYTHON_LIMITED_API) -- cgit v1.2.1 From 3a063cb43a94786261032266fb02c7234a7c2c07 Mon Sep 17 00:00:00 2001 From: Matti Picus Date: Sun, 9 Oct 2022 19:05:47 +0300 Subject: Fix PYPY_VERSION_HEX/PYPY_VERSION_NUM typo (#5074) Fix a typo: s/PYPY_VERSION_HEX/PYPY_VERSION_NUM --- Cython/Utility/ModuleSetupCode.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cython/Utility/ModuleSetupCode.c b/Cython/Utility/ModuleSetupCode.c index f8bf885bc..8471bf692 100644 --- a/Cython/Utility/ModuleSetupCode.c +++ b/Cython/Utility/ModuleSetupCode.c @@ -85,7 +85,7 @@ #undef CYTHON_USE_EXC_INFO_STACK #define CYTHON_USE_EXC_INFO_STACK 0 #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC - #define CYTHON_UPDATE_DESCRIPTOR_DOC (PYPY_VERSION_HEX >= 0x07030900) + #define CYTHON_UPDATE_DESCRIPTOR_DOC (PYPY_VERSION_NUM >= 0x07030900) #endif #elif defined(PYSTON_VERSION) -- cgit v1.2.1 From 1741bac96ce49d7e31b2434dd798fa38dad660dd Mon Sep 17 00:00:00 2001 From: da-woods Date: Sun, 9 Oct 2022 22:15:13 +0100 Subject: Revert "Fix PYPY_VERSION_HEX/PYPY_VERSION_NUM typo (#5074)" This reverts commit 08d1e0a2f407d418f88f5fc1ca5ac482ed2fdb23. It looks to cause some segmentation faults in PyPy3 --- Cython/Utility/ModuleSetupCode.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cython/Utility/ModuleSetupCode.c b/Cython/Utility/ModuleSetupCode.c index 57b761af7..3e209dd87 100644 --- a/Cython/Utility/ModuleSetupCode.c +++ b/Cython/Utility/ModuleSetupCode.c @@ -169,7 +169,7 @@ #undef CYTHON_USE_EXC_INFO_STACK #define CYTHON_USE_EXC_INFO_STACK 0 #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC - #define CYTHON_UPDATE_DESCRIPTOR_DOC (PYPY_VERSION_NUM >= 0x07030900) + #define CYTHON_UPDATE_DESCRIPTOR_DOC (PYPY_VERSION_HEX >= 0x07030900) #endif #elif defined(CYTHON_LIMITED_API) -- cgit v1.2.1 From bcafbfb19093da9068dc854c2df3897f72902770 Mon Sep 17 00:00:00 2001 From: da-woods Date: Sun, 9 Oct 2022 22:16:01 +0100 Subject: Revert "Fix PYPY_VERSION_HEX/PYPY_VERSION_NUM typo (#5074)" This reverts commit 3a063cb43a94786261032266fb02c7234a7c2c07. At least on master it was causing segmentation faults on PyPy3 --- Cython/Utility/ModuleSetupCode.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cython/Utility/ModuleSetupCode.c b/Cython/Utility/ModuleSetupCode.c index 8471bf692..f8bf885bc 100644 --- a/Cython/Utility/ModuleSetupCode.c +++ b/Cython/Utility/ModuleSetupCode.c @@ -85,7 +85,7 @@ #undef CYTHON_USE_EXC_INFO_STACK #define CYTHON_USE_EXC_INFO_STACK 0 #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC - #define CYTHON_UPDATE_DESCRIPTOR_DOC (PYPY_VERSION_NUM >= 0x07030900) + #define CYTHON_UPDATE_DESCRIPTOR_DOC (PYPY_VERSION_HEX >= 0x07030900) #endif #elif defined(PYSTON_VERSION) -- cgit v1.2.1 From 0976aa3e2dd1ad96a3ce9be5521b951f7d5e4390 Mon Sep 17 00:00:00 2001 From: da-woods Date: Tue, 11 Oct 2022 08:22:40 +0100 Subject: Remove "from x cimport class C" (#4904) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit (and "struct S"/"union U") This appears to be an old Pyrex feature introduced in https://github.com/cython/cython/commit/9bac9c2e014ed63c2ac435e9b431fae124fba668 to provide a way of "forward cimporting" a class. I originally tried to make a test for it, but couldn't actually come up with a useful way of using it in the intended way, where the name would be unavailable initially but avaialble later. It looks to be completely untested, and responsible for some missing coverage in Nodes.py (https://github.com/cython/cython/issues/4163). The large section containing ``` if kind == 'struct' or kind == 'union': # 8479 ↛ 8480 ``` I propose to fix the missing coverage by killing off the feature. The only people I could find using this syntax were H5Py, who look to have removed their sole use of it 3 years ago https://github.com/h5py/h5py/commit/8d2498c7f5e3fec884ff56e9aca905c325d82484 Therefore it seems a good candidate to go in Cython 3 --- Cython/Compiler/Nodes.py | 24 ++++++++---------------- Cython/Compiler/ParseTreeTransforms.py | 14 +++++--------- Cython/Compiler/Parsing.pxd | 2 +- Cython/Compiler/Parsing.py | 23 ++++++++--------------- 4 files changed, 22 insertions(+), 41 deletions(-) diff --git a/Cython/Compiler/Nodes.py b/Cython/Compiler/Nodes.py index 92a0cfea9..c3cde85f3 100644 --- a/Cython/Compiler/Nodes.py +++ b/Cython/Compiler/Nodes.py @@ -8737,7 +8737,7 @@ class FromCImportStatNode(StatNode): # # module_name string Qualified name of module # relative_level int or None Relative import: number of dots before module_name - # imported_names [(pos, name, as_name, kind)] Names to be imported + # imported_names [(pos, name, as_name)] Names to be imported child_attrs = [] module_name = None @@ -8761,29 +8761,21 @@ class FromCImportStatNode(StatNode): module_scope = env.find_module(self.module_name, self.pos, relative_level=self.relative_level) module_name = module_scope.qualified_name env.add_imported_module(module_scope) - for pos, name, as_name, kind in self.imported_names: + for pos, name, as_name in self.imported_names: if name == "*": for local_name, entry in list(module_scope.entries.items()): env.add_imported_entry(local_name, entry, pos) else: entry = module_scope.lookup(name) if entry: - if kind and not self.declaration_matches(entry, kind): - entry.redeclared(pos) entry.used = 1 else: - if kind == 'struct' or kind == 'union': - entry = module_scope.declare_struct_or_union( - name, kind=kind, scope=None, typedef_flag=0, pos=pos) - elif kind == 'class': - entry = module_scope.declare_c_class(name, pos=pos, module_name=module_name) + submodule_scope = env.context.find_module( + name, relative_to=module_scope, pos=self.pos, absolute_fallback=False) + if submodule_scope.parent_module is module_scope: + env.declare_module(as_name or name, submodule_scope, self.pos) else: - submodule_scope = env.context.find_module( - name, relative_to=module_scope, pos=self.pos, absolute_fallback=False) - if submodule_scope.parent_module is module_scope: - env.declare_module(as_name or name, submodule_scope, self.pos) - else: - error(pos, "Name '%s' not declared in module '%s'" % (name, module_name)) + error(pos, "Name '%s' not declared in module '%s'" % (name, module_name)) if entry: local_name = as_name or name @@ -8792,7 +8784,7 @@ class FromCImportStatNode(StatNode): if module_name.startswith('cpython') or module_name.startswith('cython'): # enough for now if module_name in utility_code_for_cimports: env.use_utility_code(utility_code_for_cimports[module_name]()) - for _, name, _, _ in self.imported_names: + for _, name, _ in self.imported_names: fqname = '%s.%s' % (module_name, name) if fqname in utility_code_for_cimports: env.use_utility_code(utility_code_for_cimports[fqname]()) diff --git a/Cython/Compiler/ParseTreeTransforms.py b/Cython/Compiler/ParseTreeTransforms.py index 388123020..bc4943b79 100644 --- a/Cython/Compiler/ParseTreeTransforms.py +++ b/Cython/Compiler/ParseTreeTransforms.py @@ -998,8 +998,7 @@ class InterpretCompilerDirectives(CythonTransform): self._check_valid_cython_module(node.pos, module_name) submodule = (module_name + u".")[7:] newimp = [] - - for pos, name, as_name, kind in node.imported_names: + for pos, name, as_name in node.imported_names: full_name = submodule + name qualified_name = u"cython." + full_name if self.is_parallel_directive(qualified_name, node.pos): @@ -1008,15 +1007,12 @@ class InterpretCompilerDirectives(CythonTransform): self.parallel_directives[as_name or name] = qualified_name elif self.is_cython_directive(full_name): self.directive_names[as_name or name] = full_name - if kind is not None: - self.context.nonfatal_error(PostParseError(pos, - "Compiler directive imports must be plain imports")) elif full_name in ['dataclasses', 'typing']: self.directive_names[as_name or name] = full_name # unlike many directives, still treat it as a regular module - newimp.append((pos, name, as_name, kind)) + newimp.append((pos, name, as_name)) else: - newimp.append((pos, name, as_name, kind)) + newimp.append((pos, name, as_name)) if not newimp: return None @@ -1031,7 +1027,7 @@ class InterpretCompilerDirectives(CythonTransform): imported_names = [] for name, name_node in node.items: imported_names.append( - (name_node.pos, name, None if name == name_node.name else name_node.name, None)) + (name_node.pos, name, None if name == name_node.name else name_node.name)) return self._create_cimport_from_import( node.pos, module_name, import_node.level, imported_names) elif module_name == u"cython" or module_name.startswith(u"cython."): @@ -1070,7 +1066,7 @@ class InterpretCompilerDirectives(CythonTransform): module_name=dotted_name, as_name=as_name, is_absolute=level == 0) - for pos, dotted_name, as_name, _ in imported_names + for pos, dotted_name, as_name in imported_names ] def visit_SingleAssignmentNode(self, node): diff --git a/Cython/Compiler/Parsing.pxd b/Cython/Compiler/Parsing.pxd index 0c58df3f7..72a855fd4 100644 --- a/Cython/Compiler/Parsing.pxd +++ b/Cython/Compiler/Parsing.pxd @@ -110,7 +110,7 @@ cdef p_return_statement(PyrexScanner s) cdef p_raise_statement(PyrexScanner s) cdef p_import_statement(PyrexScanner s) cdef p_from_import_statement(PyrexScanner s, bint first_statement = *) -cdef p_imported_name(PyrexScanner s, bint is_cimport) +cdef p_imported_name(PyrexScanner s) cdef p_dotted_name(PyrexScanner s, bint as_allowed) cdef p_as_name(PyrexScanner s) cdef p_assert_statement(PyrexScanner s) diff --git a/Cython/Compiler/Parsing.py b/Cython/Compiler/Parsing.py index 408f9483a..30d73588d 100644 --- a/Cython/Compiler/Parsing.py +++ b/Cython/Compiler/Parsing.py @@ -1799,18 +1799,18 @@ def p_from_import_statement(s, first_statement = 0): is_cimport = kind == 'cimport' is_parenthesized = False if s.sy == '*': - imported_names = [(s.position(), s.context.intern_ustring("*"), None, None)] + imported_names = [(s.position(), s.context.intern_ustring("*"), None)] s.next() else: if s.sy == '(': is_parenthesized = True s.next() - imported_names = [p_imported_name(s, is_cimport)] + imported_names = [p_imported_name(s)] while s.sy == ',': s.next() if is_parenthesized and s.sy == ')': break - imported_names.append(p_imported_name(s, is_cimport)) + imported_names.append(p_imported_name(s)) if is_parenthesized: s.expect(')') if dotted_name == '__future__': @@ -1819,7 +1819,7 @@ def p_from_import_statement(s, first_statement = 0): elif level: s.error("invalid syntax") else: - for (name_pos, name, as_name, kind) in imported_names: + for (name_pos, name, as_name) in imported_names: if name == "braces": s.error("not a chance", name_pos) break @@ -1830,7 +1830,7 @@ def p_from_import_statement(s, first_statement = 0): break s.context.future_directives.add(directive) return Nodes.PassStatNode(pos) - elif kind == 'cimport': + elif is_cimport: return Nodes.FromCImportStatNode( pos, module_name=dotted_name, relative_level=level, @@ -1838,7 +1838,7 @@ def p_from_import_statement(s, first_statement = 0): else: imported_name_strings = [] items = [] - for (name_pos, name, as_name, kind) in imported_names: + for (name_pos, name, as_name) in imported_names: imported_name_strings.append( ExprNodes.IdentifierStringNode(name_pos, value=name)) items.append( @@ -1853,18 +1853,11 @@ def p_from_import_statement(s, first_statement = 0): items = items) -imported_name_kinds = cython.declare(frozenset, frozenset(( - 'class', 'struct', 'union'))) - -def p_imported_name(s, is_cimport): +def p_imported_name(s): pos = s.position() - kind = None - if is_cimport and s.systring in imported_name_kinds: - kind = s.systring - s.next() name = p_ident(s) as_name = p_as_name(s) - return (pos, name, as_name, kind) + return (pos, name, as_name) def p_dotted_name(s, as_allowed): -- cgit v1.2.1 From 877167351d2b1da993ea14ec4001406850ba7371 Mon Sep 17 00:00:00 2001 From: da-woods Date: Tue, 11 Oct 2022 08:24:01 +0100 Subject: [0.29] deprecation warning for "from x cimport class A" syntax (#4905) Unused/rarely used syntax which we plan to remove in Cython 3. See https://github.com/cython/cython/pull/4904 --- Cython/Compiler/Parsing.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Cython/Compiler/Parsing.py b/Cython/Compiler/Parsing.py index 2aafcb99a..1f20b4c95 100644 --- a/Cython/Compiler/Parsing.py +++ b/Cython/Compiler/Parsing.py @@ -1795,6 +1795,8 @@ def p_imported_name(s, is_cimport): kind = None if is_cimport and s.systring in imported_name_kinds: kind = s.systring + warning(pos, 'the "from module cimport %s name" syntax is deprecated and ' + 'will be removed in Cython 3.0' % kind, 2) s.next() name = p_ident(s) as_name = p_as_name(s) -- cgit v1.2.1 From 4eacb83cbb4ff496f4fc1bc37b98aaf424823a27 Mon Sep 17 00:00:00 2001 From: Matus Valo Date: Wed, 12 Oct 2022 18:48:59 +0200 Subject: [Doc] Improve documentation of disabling annotations in pure mode (#5080) --- .../examples/tutorial/pure/disabled_annotations.py | 33 ++++++++++++++++++++++ docs/src/tutorial/pure.rst | 15 ++++++++-- 2 files changed, 46 insertions(+), 2 deletions(-) create mode 100644 docs/examples/tutorial/pure/disabled_annotations.py diff --git a/docs/examples/tutorial/pure/disabled_annotations.py b/docs/examples/tutorial/pure/disabled_annotations.py new file mode 100644 index 000000000..c92b4cf8e --- /dev/null +++ b/docs/examples/tutorial/pure/disabled_annotations.py @@ -0,0 +1,33 @@ +import cython + +@cython.annotation_typing(False) +def function_without_typing(a: int, b: int) -> int: + """Cython is ignoring annotations in this function""" + c: int = a + b + return c * a + + +@cython.annotation_typing(False) +@cython.cclass +class NotAnnotatedClass: + """Cython is ignoring annotatons in this class except annotated_method""" + d: dict + + def __init__(self, dictionary: dict): + self.d = dictionary + + @cython.annotation_typing(True) + def annotated_method(self, key: str, a: cython.int, b: cython.int): + prefixed_key: str = 'prefix_' + key + self.d[prefixed_key] = a + b + + +def annotated_function(a: cython.int, b: cython.int): + s: cython.int = a + b + with cython.annotation_typing(False): + # Cython is ignoring annotations within this code block + c: list = [] + c.append(a) + c.append(b) + c.append(s) + return c diff --git a/docs/src/tutorial/pure.rst b/docs/src/tutorial/pure.rst index 417b7d1b2..91a381d1a 100644 --- a/docs/src/tutorial/pure.rst +++ b/docs/src/tutorial/pure.rst @@ -347,8 +347,7 @@ PEP-484 type annotations Python `type hints `_ can be used to declare argument types, as shown in the -following example. To avoid conflicts with other kinds of annotation -usages, this can be disabled with the directive ``annotation_typing=False``. +following example: .. literalinclude:: ../../examples/tutorial/pure/annotations.py @@ -378,6 +377,18 @@ declare types of variables in a Python 3.6 compatible way as follows: There is currently no way to express the visibility of object attributes. +Disabling annotations +^^^^^^^^^^^^^^^^^^^^^ + +To avoid conflicts with other kinds of annotation +usages, Cython's use of annotations to specify types can be disabled with the +``annotation_typing`` :ref:`compiler directive`. From Cython 3 +you can use this as a decorator or a with statement, as shown in the following example: + +.. literalinclude:: ../../examples/tutorial/pure/disabled_annotations.py + + + ``typing`` Module ^^^^^^^^^^^^^^^^^ -- cgit v1.2.1 From c900b6a587801aa3efc918809e53d6c33bc8ed73 Mon Sep 17 00:00:00 2001 From: Matti Picus Date: Wed, 12 Oct 2022 23:48:08 +0300 Subject: always set CYTHON_UPDATE_DESCRIPTOR_DOC to 0 on PyPy (#5083) It was previously set to 0 via a typo. --- Cython/Utility/ModuleSetupCode.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cython/Utility/ModuleSetupCode.c b/Cython/Utility/ModuleSetupCode.c index 3e209dd87..fa16485b5 100644 --- a/Cython/Utility/ModuleSetupCode.c +++ b/Cython/Utility/ModuleSetupCode.c @@ -169,7 +169,7 @@ #undef CYTHON_USE_EXC_INFO_STACK #define CYTHON_USE_EXC_INFO_STACK 0 #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC - #define CYTHON_UPDATE_DESCRIPTOR_DOC (PYPY_VERSION_HEX >= 0x07030900) + #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 #endif #elif defined(CYTHON_LIMITED_API) -- cgit v1.2.1 From 95009ba67a4c0e4cbe2285d59cde5029574c6fc3 Mon Sep 17 00:00:00 2001 From: Matti Picus Date: Wed, 12 Oct 2022 23:48:08 +0300 Subject: always set CYTHON_UPDATE_DESCRIPTOR_DOC to 0 on PyPy (#5083) It was previously set to 0 via a typo. --- Cython/Utility/ModuleSetupCode.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cython/Utility/ModuleSetupCode.c b/Cython/Utility/ModuleSetupCode.c index f8bf885bc..463d2b7ac 100644 --- a/Cython/Utility/ModuleSetupCode.c +++ b/Cython/Utility/ModuleSetupCode.c @@ -85,7 +85,7 @@ #undef CYTHON_USE_EXC_INFO_STACK #define CYTHON_USE_EXC_INFO_STACK 0 #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC - #define CYTHON_UPDATE_DESCRIPTOR_DOC (PYPY_VERSION_HEX >= 0x07030900) + #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 #endif #elif defined(PYSTON_VERSION) -- cgit v1.2.1 From b37cfc9a7028f79025c83a9315544b22c9b43d31 Mon Sep 17 00:00:00 2001 From: Lisandro Dalcin Date: Mon, 17 Oct 2022 22:42:51 +0300 Subject: Fix various clang warnings (#5086) * Fix clang -Wcomma * Fix clang -Wextra-semi-stmt * Fix clang -Wconditional-uninitialized * Fix clang -Wunreachable-code-return --- Cython/Compiler/Code.py | 2 +- Cython/Compiler/ModuleNode.py | 4 ++-- Cython/Compiler/PyrexTypes.py | 2 +- Cython/Utility/Exceptions.c | 6 ++---- Cython/Utility/ObjectHandling.c | 21 ++++++++++----------- 5 files changed, 16 insertions(+), 19 deletions(-) diff --git a/Cython/Compiler/Code.py b/Cython/Compiler/Code.py index 45c5a325c..d0b4756e5 100644 --- a/Cython/Compiler/Code.py +++ b/Cython/Compiler/Code.py @@ -1559,7 +1559,7 @@ class GlobalState(object): init_globals = self.parts['init_globals'] init_globals.putln( - "if (__Pyx_InitStrings(%s) < 0) %s;" % ( + "if (__Pyx_InitStrings(%s) < 0) %s" % ( Naming.stringtab_cname, init_globals.error_goto(self.module_pos))) diff --git a/Cython/Compiler/ModuleNode.py b/Cython/Compiler/ModuleNode.py index bb5aa66fb..56845330d 100644 --- a/Cython/Compiler/ModuleNode.py +++ b/Cython/Compiler/ModuleNode.py @@ -2491,7 +2491,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): if Options.generate_cleanup_code: code.globalstate.use_utility_code( UtilityCode.load_cached("RegisterModuleCleanup", "ModuleSetupCode.c")) - code.putln("if (__Pyx_RegisterCleanup()) %s;" % code.error_goto(self.pos)) + code.putln("if (__Pyx_RegisterCleanup()) %s" % code.error_goto(self.pos)) code.put_goto(code.return_label) code.put_label(code.error_label) @@ -2835,7 +2835,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): code.error_goto_if_null(Naming.cython_runtime_cname, self.pos))) code.put_incref(Naming.cython_runtime_cname, py_object_type, nanny=False) code.putln( - 'if (PyObject_SetAttrString(%s, "__builtins__", %s) < 0) %s;' % ( + 'if (PyObject_SetAttrString(%s, "__builtins__", %s) < 0) %s' % ( env.module_cname, Naming.builtins_cname, code.error_goto(self.pos))) diff --git a/Cython/Compiler/PyrexTypes.py b/Cython/Compiler/PyrexTypes.py index dcb51fe34..922c39367 100644 --- a/Cython/Compiler/PyrexTypes.py +++ b/Cython/Compiler/PyrexTypes.py @@ -1297,7 +1297,7 @@ class BuiltinObjectType(PyObjectType): name = '"%s"' % self.name # avoid wasting too much space but limit number of different format strings space_for_name = (len(self.name) // 16 + 1) * 16 - error = '(PyErr_Format(PyExc_TypeError, "Expected %%.%ds, got %%.200s", %s, Py_TYPE(%s)->tp_name), 0)' % ( + error = '((void)PyErr_Format(PyExc_TypeError, "Expected %%.%ds, got %%.200s", %s, Py_TYPE(%s)->tp_name), 0)' % ( space_for_name, name, arg) return check + '||' + error diff --git a/Cython/Utility/Exceptions.c b/Cython/Utility/Exceptions.c index 7896d40dc..2c87f720c 100644 --- a/Cython/Utility/Exceptions.c +++ b/Cython/Utility/Exceptions.c @@ -600,10 +600,8 @@ static void __Pyx_WriteUnraisable(const char *name, CYTHON_UNUSED int clineno, PyGILState_STATE state; if (nogil) state = PyGILState_Ensure(); -#ifdef _MSC_VER - /* arbitrary, to suppress warning */ - else state = (PyGILState_STATE)-1; -#endif + /* initalize to suppress warning */ + else state = (PyGILState_STATE)0; #endif __Pyx_PyThreadState_assign __Pyx_ErrFetch(&old_exc, &old_val, &old_tb); diff --git a/Cython/Utility/ObjectHandling.c b/Cython/Utility/ObjectHandling.c index 864b658f7..8b572ca6f 100644 --- a/Cython/Utility/ObjectHandling.c +++ b/Cython/Utility/ObjectHandling.c @@ -72,10 +72,9 @@ static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected) { Py_DECREF(retval); __Pyx_RaiseTooManyValuesError(expected); return -1; - } else { - return __Pyx_IterFinish(); } - return 0; + + return __Pyx_IterFinish(); } /////////////// UnpackTuple2.proto /////////////// @@ -285,7 +284,7 @@ static CYTHON_INLINE PyObject *__Pyx_PyObject_GetItem(PyObject *obj, PyObject* k #if CYTHON_USE_TYPE_SLOTS static PyObject *__Pyx_PyObject_GetIndex(PyObject *obj, PyObject* index) { - PyObject *runerr; + PyObject *runerr = NULL; Py_ssize_t key_value; PySequenceMethods *m = Py_TYPE(obj)->tp_as_sequence; if (unlikely(!(m && m->sq_item))) { @@ -1169,18 +1168,18 @@ static PyObject *__Pyx__GetNameInClass(PyObject *nmspace, PyObject *name) { //@substitute: naming #if CYTHON_USE_DICT_VERSIONS -#define __Pyx_GetModuleGlobalName(var, name) { \ +#define __Pyx_GetModuleGlobalName(var, name) do { \ static PY_UINT64_T __pyx_dict_version = 0; \ static PyObject *__pyx_dict_cached_value = NULL; \ (var) = (likely(__pyx_dict_version == __PYX_GET_DICT_VERSION($moddict_cname))) ? \ (likely(__pyx_dict_cached_value) ? __Pyx_NewRef(__pyx_dict_cached_value) : __Pyx_GetBuiltinName(name)) : \ __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value); \ -} -#define __Pyx_GetModuleGlobalNameUncached(var, name) { \ +} while(0) +#define __Pyx_GetModuleGlobalNameUncached(var, name) do { \ PY_UINT64_T __pyx_dict_version; \ PyObject *__pyx_dict_cached_value; \ (var) = __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value); \ -} +} while(0) static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value); /*proto*/ #else #define __Pyx_GetModuleGlobalName(var, name) (var) = __Pyx__GetModuleGlobalName(name) @@ -1668,11 +1667,11 @@ static CYTHON_INLINE PyObject* __Pyx_CallUnboundCMethod1(__Pyx_CachedCFunction* if (flag == METH_O) { return (*(cfunc->func))(self, arg); } else if (PY_VERSION_HEX >= 0x030600B1 && flag == METH_FASTCALL) { - if (PY_VERSION_HEX >= 0x030700A0) { + #if PY_VERSION_HEX >= 0x030700A0 return (*(__Pyx_PyCFunctionFast)(void*)(PyCFunction)cfunc->func)(self, &arg, 1); - } else { + #else return (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)cfunc->func)(self, &arg, 1, NULL); - } + #endif } else if (PY_VERSION_HEX >= 0x030700A0 && flag == (METH_FASTCALL | METH_KEYWORDS)) { return (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)cfunc->func)(self, &arg, 1, NULL); } -- cgit v1.2.1 From 2e62a1560ac2241975534a8aaec4a3b4049591ba Mon Sep 17 00:00:00 2001 From: da-woods Date: Fri, 21 Oct 2022 13:23:03 +0100 Subject: docs: Recommend Cython 3 for pure-Python syntax (GH-5093) While this is officially in the "Cython 3" documentation, I think most users end up with this documentation. It'd be good to link the annotation typing mainly to Cython 3. --- docs/src/two-syntax-variants-used | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/src/two-syntax-variants-used b/docs/src/two-syntax-variants-used index af583a0a9..c5cd02cb1 100644 --- a/docs/src/two-syntax-variants-used +++ b/docs/src/two-syntax-variants-used @@ -16,3 +16,7 @@ .. code-block:: python import cython + + If you use the pure Python syntax we strongly recommend you use a recent + Cython 3 release, since significant improvements have been made here + compared to the 0.29.x releases. -- cgit v1.2.1 From b5b481132f6fa26c707504133b932e6bd0778c4f Mon Sep 17 00:00:00 2001 From: Matus Valo Date: Sat, 22 Oct 2022 16:15:02 +0200 Subject: [Docs] Refactor and extend structs, unions, enums, function pointer documentation in language basics userguide (#5082) --- docs/examples/userguide/language_basics/enum.pyx | 11 ++ .../userguide/language_basics/function_pointer.pyx | 8 ++ .../language_basics/function_pointer_struct.pyx | 9 ++ docs/examples/userguide/language_basics/struct.py | 7 + docs/examples/userguide/language_basics/struct.pyx | 7 + .../userguide/language_basics/struct_union_enum.py | 7 - .../language_basics/struct_union_enum.pyx | 16 --- docs/examples/userguide/language_basics/union.py | 9 ++ docs/examples/userguide/language_basics/union.pyx | 9 ++ docs/src/userguide/language_basics.rst | 149 ++++++++++++--------- docs/src/userguide/memoryviews.rst | 1 + 11 files changed, 144 insertions(+), 89 deletions(-) create mode 100644 docs/examples/userguide/language_basics/enum.pyx create mode 100644 docs/examples/userguide/language_basics/function_pointer.pyx create mode 100644 docs/examples/userguide/language_basics/function_pointer_struct.pyx create mode 100644 docs/examples/userguide/language_basics/struct.py create mode 100644 docs/examples/userguide/language_basics/struct.pyx delete mode 100644 docs/examples/userguide/language_basics/struct_union_enum.py delete mode 100644 docs/examples/userguide/language_basics/struct_union_enum.pyx create mode 100644 docs/examples/userguide/language_basics/union.py create mode 100644 docs/examples/userguide/language_basics/union.pyx diff --git a/docs/examples/userguide/language_basics/enum.pyx b/docs/examples/userguide/language_basics/enum.pyx new file mode 100644 index 000000000..1b5f5d614 --- /dev/null +++ b/docs/examples/userguide/language_basics/enum.pyx @@ -0,0 +1,11 @@ +cdef enum CheeseType: + cheddar, edam, + camembert + +cdef enum CheeseState: + hard = 1 + soft = 2 + runny = 3 + +print(CheeseType.cheddar) +print(CheeseState.hard) diff --git a/docs/examples/userguide/language_basics/function_pointer.pyx b/docs/examples/userguide/language_basics/function_pointer.pyx new file mode 100644 index 000000000..b345c62b4 --- /dev/null +++ b/docs/examples/userguide/language_basics/function_pointer.pyx @@ -0,0 +1,8 @@ +cdef int(*ptr_add)(int, int) + +cdef int add(int a, int b): + return a + b + +ptr_add = add + +print(ptr_add(1, 3)) diff --git a/docs/examples/userguide/language_basics/function_pointer_struct.pyx b/docs/examples/userguide/language_basics/function_pointer_struct.pyx new file mode 100644 index 000000000..5ef618961 --- /dev/null +++ b/docs/examples/userguide/language_basics/function_pointer_struct.pyx @@ -0,0 +1,9 @@ +cdef struct Bar: + int sum(int a, int b) + +cdef int add(int a, int b): + return a + b + +cdef Bar bar = Bar(add) + +print(bar.sum(1, 2)) diff --git a/docs/examples/userguide/language_basics/struct.py b/docs/examples/userguide/language_basics/struct.py new file mode 100644 index 000000000..32b6b252a --- /dev/null +++ b/docs/examples/userguide/language_basics/struct.py @@ -0,0 +1,7 @@ +Grail = cython.struct( + age=cython.int, + volume=cython.float) + +def main(): + grail: Grail = Grail(5, 3.0) + print(grail.age, grail.volume) diff --git a/docs/examples/userguide/language_basics/struct.pyx b/docs/examples/userguide/language_basics/struct.pyx new file mode 100644 index 000000000..3ef79172b --- /dev/null +++ b/docs/examples/userguide/language_basics/struct.pyx @@ -0,0 +1,7 @@ +cdef struct Grail: + int age + float volume + +def main(): + cdef Grail grail = Grail(5, 3.0) + print(grail.age, grail.volume) diff --git a/docs/examples/userguide/language_basics/struct_union_enum.py b/docs/examples/userguide/language_basics/struct_union_enum.py deleted file mode 100644 index b78c0aa02..000000000 --- a/docs/examples/userguide/language_basics/struct_union_enum.py +++ /dev/null @@ -1,7 +0,0 @@ -Grail = cython.struct( - age=cython.int, - volume=cython.float) - -Food = cython.union( - spam=cython.p_char, - eggs=cython.p_float) diff --git a/docs/examples/userguide/language_basics/struct_union_enum.pyx b/docs/examples/userguide/language_basics/struct_union_enum.pyx deleted file mode 100644 index af9b06d9a..000000000 --- a/docs/examples/userguide/language_basics/struct_union_enum.pyx +++ /dev/null @@ -1,16 +0,0 @@ -cdef struct Grail: - int age - float volume - -cdef union Food: - char *spam - float *eggs - -cdef enum CheeseType: - cheddar, edam, - camembert - -cdef enum CheeseState: - hard = 1 - soft = 2 - runny = 3 diff --git a/docs/examples/userguide/language_basics/union.py b/docs/examples/userguide/language_basics/union.py new file mode 100644 index 000000000..efcda358b --- /dev/null +++ b/docs/examples/userguide/language_basics/union.py @@ -0,0 +1,9 @@ +Food = cython.union( + spam=cython.p_char, + eggs=cython.p_float) + +def main(): + arr: cython.p_float = [1.0, 2.0] + spam: Food = Food(spam='b') + eggs: Food = Food(eggs=arr) + print(spam.spam, eggs.eggs[0]) diff --git a/docs/examples/userguide/language_basics/union.pyx b/docs/examples/userguide/language_basics/union.pyx new file mode 100644 index 000000000..e05f63fcc --- /dev/null +++ b/docs/examples/userguide/language_basics/union.pyx @@ -0,0 +1,9 @@ +cdef union Food: + char *spam + float *eggs + +def main(): + cdef float *arr = [1.0, 2.0] + cdef Food spam = Food(spam='b') + cdef Food eggs = Food(eggs=arr) + print(spam.spam, eggs.eggs[0]) diff --git a/docs/src/userguide/language_basics.rst b/docs/src/userguide/language_basics.rst index ad8e9d85d..98553f542 100644 --- a/docs/src/userguide/language_basics.rst +++ b/docs/src/userguide/language_basics.rst @@ -128,51 +128,6 @@ the declaration in most cases: cdef float *g = [1, 2, 3, 4] cdef float *h = &f -In addition to the basic types, C :keyword:`struct`, :keyword:`union` and :keyword:`enum` -are supported: - -.. tabs:: - - .. group-tab:: Pure Python - - .. literalinclude:: ../../examples/userguide/language_basics/struct_union_enum.py - - .. note:: Currently, Pure Python mode does not support enums. (GitHub issue :issue:`4252`) - - .. group-tab:: Cython - - .. literalinclude:: ../../examples/userguide/language_basics/struct_union_enum.pyx - - See also :ref:`struct-union-enum-styles` - - .. note:: - - Structs can be declared as ``cdef packed struct``, which has - the same effect as the C directive ``#pragma pack(1)``. - - Declaring an enum as ``cpdef`` will create a :pep:`435`-style Python wrapper:: - - cpdef enum CheeseState: - hard = 1 - soft = 2 - runny = 3 - - There is currently no special syntax for defining a constant, but you can use - an anonymous :keyword:`enum` declaration for this purpose, for example,:: - - cdef enum: - tons_of_spam = 3 - - .. note:: - the words ``struct``, ``union`` and ``enum`` are used only when - defining a type, not when referring to it. For example, to declare a variable - pointing to a ``Grail`` struct, you would write:: - - cdef Grail *gp - - and not:: - - cdef struct Grail *gp # WRONG .. note:: @@ -197,46 +152,82 @@ are supported: ctypedef int* IntPtr +.. _structs: -You can create a C function by declaring it with :keyword:`cdef` or by decorating a Python function with ``@cfunc``: +Structs, Unions, Enums +---------------------- + +In addition to the basic types, C :keyword:`struct`, :keyword:`union` and :keyword:`enum` +are supported: .. tabs:: .. group-tab:: Pure Python - .. code-block:: python - - @cython.cfunc - def eggs(l: cython.ulong, f: cython.float) -> cython.int: - ... + .. literalinclude:: ../../examples/userguide/language_basics/struct.py .. group-tab:: Cython - .. code-block:: cython + .. literalinclude:: ../../examples/userguide/language_basics/struct.pyx - cdef int eggs(unsigned long l, float f): - ... +Structs can be declared as ``cdef packed struct``, which has +the same effect as the C directive ``#pragma pack(1)``:: -You can read more about them in :ref:`python_functions_vs_c_functions`. + cdef packed struct StructArray: + int spam[4] + signed char eggs[5] -Classes can be declared as :ref:`extension-types`. Those will -have a behavior very close to python classes, but are faster because they use a ``struct`` -internally to store attributes. -They are declared with the :keyword:`cdef` keyword or the ``@cclass`` class decorator. +.. note:: + This declaration removes the empty + space between members that C automatically to ensure that they're aligned in memory + (see `Wikipedia article `_ for more details). + The main use is that numpy structured arrays store their data in packed form, so a ``cdef packed struct`` + can be :ref:`used in a memoryview` to match that. -Here is a simple example: + Pure python mode does not support packed structs. + +The following example shows a declaration of unions: .. tabs:: .. group-tab:: Pure Python - .. literalinclude:: ../../examples/userguide/extension_types/shrubbery.py + .. literalinclude:: ../../examples/userguide/language_basics/union.py .. group-tab:: Cython - .. literalinclude:: ../../examples/userguide/extension_types/shrubbery.pyx + .. literalinclude:: ../../examples/userguide/language_basics/union.pyx -You can read more about them in :ref:`extension-types`. +Enums are created by ``cdef enum`` statement: + +.. literalinclude:: ../../examples/userguide/language_basics/enum.pyx + + +.. note:: Currently, Pure Python mode does not support enums. (GitHub issue :issue:`4252`) + +Declaring an enum as ``cpdef`` will create a :pep:`435`-style Python wrapper:: + + cpdef enum CheeseState: + hard = 1 + soft = 2 + runny = 3 + +There is currently no special syntax for defining a constant, but you can use +an anonymous :keyword:`enum` declaration for this purpose, for example,:: + + cdef enum: + tons_of_spam = 3 + +.. note:: + In the Cython syntax, the words ``struct``, ``union`` and ``enum`` are used only when + defining a type, not when referring to it. For example, to declare a variable + pointing to a ``Grail`` struct, you would write:: + + cdef Grail *gp + + and not:: + + cdef struct Grail *gp # WRONG .. _typing_types: @@ -326,12 +317,30 @@ and is typically what one wants). If you want to use these numeric Python types simply omit the type declaration and let them be objects. +Extension Types +--------------- + It is also possible to declare :ref:`extension-types` (declared with ``cdef class`` or the ``@cclass`` decorator). -This does allow subclasses. This typing is mostly used to access -``cdef``/``@cfunc`` methods and attributes of the extension type. +Those will have a behaviour very close to python classes (e.g. creating subclasses), +but access to their members is faster from Cython code. Typing a variable +as extension type is mostly used to access ``cdef``/``@cfunc`` methods and attributes of the extension type. The C code uses a variable which is a pointer to a structure of the specific type, something like ``struct MyExtensionTypeObject*``. +Here is a simple example: + +.. tabs:: + + .. group-tab:: Pure Python + + .. literalinclude:: ../../examples/userguide/extension_types/shrubbery.py + + .. group-tab:: Cython + + .. literalinclude:: ../../examples/userguide/extension_types/shrubbery.pyx + +You can read more about them in :ref:`extension-types`. + Grouping multiple C declarations -------------------------------- @@ -634,7 +643,15 @@ parameters and has two required keyword parameters. Function Pointers ----------------- -Functions declared in a ``struct`` are automatically converted to function pointers. +.. note:: Pointers to functions are currently not supported by pure Python mode. (GitHub issue :issue:`4279`) + +The following example shows declaring a ``ptr_add`` function pointer and assigning the ``add`` function to it: + +.. literalinclude:: ../../examples/userguide/language_basics/function_pointer.pyx + +Functions declared in a ``struct`` are automatically converted to function pointers: + +.. literalinclude:: ../../examples/userguide/language_basics/function_pointer_struct.pyx For using error return values with function pointers, see the note at the bottom of :ref:`error_return_values`. diff --git a/docs/src/userguide/memoryviews.rst b/docs/src/userguide/memoryviews.rst index 1a0a0b282..285cc67ea 100644 --- a/docs/src/userguide/memoryviews.rst +++ b/docs/src/userguide/memoryviews.rst @@ -42,6 +42,7 @@ This code should give the following output:: Memoryview sum of Cython array is 1351 Memoryview sum of C memoryview is 451 +.. _using_memoryviews: Using memoryviews ================= -- cgit v1.2.1 From 1f65318a60660a2c4f07852e1341c1d48e19ea28 Mon Sep 17 00:00:00 2001 From: da-woods Date: Wed, 26 Oct 2022 09:01:39 +0100 Subject: Restore 'int'->'object' in type annotations for now (GH-5057) a44bbd363029aa9ba16fefcb485c68162f8ab663 changed an `int` annotation so that it was interpreted as a Python `int` instead of a Python object, which potentially breaks the usage of large (long) integers on Python 2. I think this was unintentional so I've added the special-cases for this in. (I'm happy with this PR being rejected though if it was intentional since I don't *really* care that much about Python 2). See https://github.com/cython/cython/issues/4944 --- Cython/Compiler/Builtin.py | 3 +++ Cython/Compiler/ExprNodes.py | 9 +++++++ docs/src/userguide/migrating_to_cy30.rst | 4 ---- tests/run/annotation_typing.pyx | 41 +++++++++++++++++++------------- 4 files changed, 37 insertions(+), 20 deletions(-) diff --git a/Cython/Compiler/Builtin.py b/Cython/Compiler/Builtin.py index 3843f12df..26fd68ff6 100644 --- a/Cython/Compiler/Builtin.py +++ b/Cython/Compiler/Builtin.py @@ -427,6 +427,7 @@ def init_builtins(): global list_type, tuple_type, dict_type, set_type, frozenset_type global bytes_type, str_type, unicode_type, basestring_type, slice_type global float_type, long_type, bool_type, type_type, complex_type, bytearray_type + global int_type type_type = builtin_scope.lookup('type').type list_type = builtin_scope.lookup('list').type tuple_type = builtin_scope.lookup('tuple').type @@ -443,6 +444,8 @@ def init_builtins(): long_type = builtin_scope.lookup('long').type bool_type = builtin_scope.lookup('bool').type complex_type = builtin_scope.lookup('complex').type + # Be careful with int type while Py2 is still supported + int_type = builtin_scope.lookup('int').type # Set up type inference links between equivalent Python/C types bool_type.equivalent_type = PyrexTypes.c_bint_type diff --git a/Cython/Compiler/ExprNodes.py b/Cython/Compiler/ExprNodes.py index 7a98429d4..bb7629496 100644 --- a/Cython/Compiler/ExprNodes.py +++ b/Cython/Compiler/ExprNodes.py @@ -1538,6 +1538,11 @@ def _analyse_name_as_type(name, pos, env): global_entry = env.global_scope().lookup(name) if global_entry and global_entry.is_type: type = global_entry.type + if (not env.in_c_type_context and + name == 'int' and type is Builtin.int_type): + # While we still support Python2 this needs to be downgraded + # to a generic Python object to include both int and long + type = py_object_type if type and (type.is_pyobject or env.in_c_type_context): return type ctype = ctype or type @@ -2119,6 +2124,10 @@ class NameNode(AtomicExprNode): type = py_object_type elif type.is_pyobject and type.equivalent_type: type = type.equivalent_type + elif type is Builtin.int_type: + # while we still support Python 2 this must be an object + # so that it can be either int or long + type = py_object_type return type if self.name == 'object': # This is normally parsed as "simple C type", but not if we don't parse C types. diff --git a/docs/src/userguide/migrating_to_cy30.rst b/docs/src/userguide/migrating_to_cy30.rst index 292a2e943..4576ce864 100644 --- a/docs/src/userguide/migrating_to_cy30.rst +++ b/docs/src/userguide/migrating_to_cy30.rst @@ -218,10 +218,6 @@ annotations and it is well worth reading :ref:`the pure Python tutorial` to understand some of the improvements. -A notable backwards-compatible change is that ``x: int`` is now typed -such that ``x`` is an exact Python ``int`` (Cython 0.29 would accept -any Python object for ``x``). - To make it easier to handle cases where your interpretation of type annotations differs from Cython's, Cython 3 now supports setting the ``annotation_typing`` :ref:`directive ` on a diff --git a/tests/run/annotation_typing.pyx b/tests/run/annotation_typing.pyx index ce74ef1dd..4b6b2da16 100644 --- a/tests/run/annotation_typing.pyx +++ b/tests/run/annotation_typing.pyx @@ -14,10 +14,10 @@ except ImportError: def old_dict_syntax(a: list, b: "int" = 2, c: {'ctype': 'long int'} = 3, d: {'type': 'long int'} = 4) -> list: """ >>> old_dict_syntax([1]) - ('list object', 'int object', 'long', 'long') + ('list object', 'Python object', 'long', 'long') [1, 2, 3, 4] >>> old_dict_syntax([1], 3) - ('list object', 'int object', 'long', 'long') + ('list object', 'Python object', 'long', 'long') [1, 3, 3, 4] >>> old_dict_syntax(123) Traceback (most recent call last): @@ -36,13 +36,13 @@ def old_dict_syntax(a: list, b: "int" = 2, c: {'ctype': 'long int'} = 3, d: {'ty def pytypes_def(a: list, b: int = 2, c: long = 3, d: float = 4.0, n: list = None, o: Optional[tuple] = ()) -> list: """ >>> pytypes_def([1]) - ('list object', 'int object', 'Python object', 'double', 'list object', 'tuple object') + ('list object', 'Python object', 'Python object', 'double', 'list object', 'tuple object') [1, 2, 3, 4.0, None, ()] >>> pytypes_def([1], 3) - ('list object', 'int object', 'Python object', 'double', 'list object', 'tuple object') + ('list object', 'Python object', 'Python object', 'double', 'list object', 'tuple object') [1, 3, 3, 4.0, None, ()] >>> pytypes_def([1], 3, 2, 1, [], None) - ('list object', 'int object', 'Python object', 'double', 'list object', 'tuple object') + ('list object', 'Python object', 'Python object', 'double', 'list object', 'tuple object') [1, 3, 2, 1.0, [], None] >>> pytypes_def(123) Traceback (most recent call last): @@ -63,13 +63,13 @@ def pytypes_def(a: list, b: int = 2, c: long = 3, d: float = 4.0, n: list = None cpdef pytypes_cpdef(a: list, b: int = 2, c: long = 3, d: float = 4.0, n: list = None, o: Optional[tuple] = ()): """ >>> pytypes_cpdef([1]) - ('list object', 'int object', 'Python object', 'double', 'list object', 'tuple object') + ('list object', 'Python object', 'Python object', 'double', 'list object', 'tuple object') [1, 2, 3, 4.0, None, ()] >>> pytypes_cpdef([1], 3) - ('list object', 'int object', 'Python object', 'double', 'list object', 'tuple object') + ('list object', 'Python object', 'Python object', 'double', 'list object', 'tuple object') [1, 3, 3, 4.0, None, ()] >>> pytypes_cpdef([1], 3, 2, 1, [], None) - ('list object', 'int object', 'Python object', 'double', 'list object', 'tuple object') + ('list object', 'Python object', 'Python object', 'double', 'list object', 'tuple object') [1, 3, 2, 1.0, [], None] >>> pytypes_cpdef(123) Traceback (most recent call last): @@ -99,10 +99,10 @@ cdef c_pytypes_cdef(a: list, b: int = 2, c: long = 3, d: float = 4.0, n: list = def pytypes_cdef(a, b=2, c=3, d=4): """ >>> pytypes_cdef([1]) - ('list object', 'int object', 'Python object', 'double', 'list object') + ('list object', 'Python object', 'Python object', 'double', 'list object') [1, 2, 3, 4.0, None] >>> pytypes_cdef([1], 3) - ('list object', 'int object', 'Python object', 'double', 'list object') + ('list object', 'Python object', 'Python object', 'double', 'list object') [1, 3, 3, 4.0, None] >>> pytypes_cdef(123) # doctest: +ELLIPSIS Traceback (most recent call last): @@ -111,6 +111,15 @@ def pytypes_cdef(a, b=2, c=3, d=4): return c_pytypes_cdef(a, b, c, d) +def pyint(a: int): + """ + >>> large_int = eval('0x'+'F'*64) # definitely bigger than C int64 + >>> pyint(large_int) == large_int + True + """ + return a + + def ctypes_def(a: list, b: cython.int = 2, c: cython.long = 3, d: cython.float = 4) -> list: """ >>> ctypes_def([1]) @@ -372,14 +381,14 @@ _WARNINGS = """ 63:70: PEP-484 recommends 'typing.Optional[...]' for arguments that can be None. 90:44: Found Python 2.x type 'long' in a Python annotation. Did you mean to use 'cython.long'? 90:70: PEP-484 recommends 'typing.Optional[...]' for arguments that can be None. -274:44: Unknown type declaration in annotation, ignoring -302:15: Annotation ignored since class-level attributes must be Python objects. Were you trying to set up an instance attribute? +283:44: Unknown type declaration in annotation, ignoring +311:15: Annotation ignored since class-level attributes must be Python objects. Were you trying to set up an instance attribute? # DUPLICATE: 63:44: Found Python 2.x type 'long' in a Python annotation. Did you mean to use 'cython.long'? # BUG: 63:6: 'pytypes_cpdef' redeclared -146:0: 'struct_io' redeclared -181:0: 'struct_convert' redeclared -200:0: 'exception_default' redeclared -231:0: 'exception_default_uint' redeclared +155:0: 'struct_io' redeclared +190:0: 'struct_convert' redeclared +209:0: 'exception_default' redeclared +240:0: 'exception_default_uint' redeclared """ -- cgit v1.2.1 From a9f914969f3057b8695bfc6218a1e434d9119447 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Fri, 21 Oct 2022 09:07:21 +0200 Subject: Update changelog. --- CHANGES.rst | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 3146736d0..15e01ac39 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -64,6 +64,10 @@ Bugs fixed * Nesting fused types in other fused types could fail to specialise the inner type. (Github issue :issue:`4725`) +* The special methods ``__matmul__``, ``__truediv__``, ``__floordiv__`` failed to type + their ``self`` argument. + (Github issue :issue:`5067`) + * Coverage analysis failed in projects with a separate source subdirectory. Patch by Sviatoslav Sydorenko and Ruben Vorderman. (Github issue :issue:`3636`) @@ -78,10 +82,10 @@ Bugs fixed (Github issue :issue:`5026`) * Relative imports failed in compiled ``__init__.py`` package modules. - Patches by Matus Valo. (Github issue :issue:`4941`) + Patch by Matus Valo. (Github issue :issue:`3442`) * Some old usages of the deprecated Python ``imp`` module were replaced with ``importlib``. - Patches by Matus Valo. (Github issue :issue:`4941`) + Patch by Matus Valo. (Github issue :issue:`4640`) * Invalid and misspelled ``cython.*`` module names were not reported as errors. (Github issue :issue:`4947`) @@ -104,6 +108,11 @@ Bugs fixed Other changes ------------- +* The undocumented, untested and apparently useless syntax + ``from somemodule cimport class/struct/union somename`` was removed. The type + modifier is not needed here and a plain ``cimport`` of the name will do. + (Github issue :issue:`4904`) + * The wheel building process was migrated to use the ``cibuildwheel`` tool. Patch by Thomas Li. (Github issue :issue:`4736`) -- cgit v1.2.1 From 2aa219d6268618b2b56879d70e9f09544d3c48fe Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Fri, 21 Oct 2022 09:25:29 +0200 Subject: Add missing transitive test dependency for IPython/Jupyter integration. --- test-requirements-cpython.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/test-requirements-cpython.txt b/test-requirements-cpython.txt index 1cfae040b..28db037b2 100644 --- a/test-requirements-cpython.txt +++ b/test-requirements-cpython.txt @@ -1,3 +1,4 @@ jupyter +pytest # needed by IPython/Jupyter integration tests line_profiler setuptools<60 -- cgit v1.2.1 From c19f0b872bc2a469777a7e65118a428ab0adffd3 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Wed, 26 Oct 2022 10:15:44 +0200 Subject: Change the type check for 'x: int' to 'isinstance(x, (int, long))' in Py2, to also accept large integers there (which is probably intended, given Py3 annotation semantics). --- Cython/Compiler/PyrexTypes.py | 3 +++ Cython/Utility/ModuleSetupCode.c | 5 +++++ 2 files changed, 8 insertions(+) diff --git a/Cython/Compiler/PyrexTypes.py b/Cython/Compiler/PyrexTypes.py index da30809a3..4061b3b0b 100644 --- a/Cython/Compiler/PyrexTypes.py +++ b/Cython/Compiler/PyrexTypes.py @@ -1456,6 +1456,9 @@ class BuiltinObjectType(PyObjectType): type_check = 'PyByteArray_Check' elif type_name == 'frozenset': type_check = 'PyFrozenSet_Check' + elif type_name == 'int': + # For backwards compatibility of (Py3) 'x: int' annotations in Py2, we also allow 'long' there. + type_check = '__Pyx_Py3Int_Check' else: type_check = 'Py%s_Check' % type_name.capitalize() if exact and type_name not in ('bool', 'slice', 'Exception'): diff --git a/Cython/Utility/ModuleSetupCode.c b/Cython/Utility/ModuleSetupCode.c index fa16485b5..af1680bd6 100644 --- a/Cython/Utility/ModuleSetupCode.c +++ b/Cython/Utility/ModuleSetupCode.c @@ -1142,6 +1142,8 @@ static CYTHON_INLINE PyObject * __Pyx_PyDict_GetItemStrWithError(PyObject *dict, #define PyInt_Type PyLong_Type #define PyInt_Check(op) PyLong_Check(op) #define PyInt_CheckExact(op) PyLong_CheckExact(op) + #define __Pyx_Py3Int_Check(op) PyLong_Check(op) + #define __Pyx_Py3Int_CheckExact(op) PyLong_CheckExact(op) #define PyInt_FromString PyLong_FromString #define PyInt_FromUnicode PyLong_FromUnicode #define PyInt_FromLong PyLong_FromLong @@ -1153,6 +1155,9 @@ static CYTHON_INLINE PyObject * __Pyx_PyDict_GetItemStrWithError(PyObject *dict, #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask #define PyNumber_Int PyNumber_Long +#else + #define __Pyx_Py3Int_Check(op) (PyLong_Check(op) || PyInt_Check(op)) + #define __Pyx_Py3Int_CheckExact(op) (PyLong_CheckExact(op) || PyInt_CheckExact(op)) #endif #if PY_MAJOR_VERSION >= 3 -- cgit v1.2.1 From 0e1d4bc61225bf382abb5d27399e3fe339552f6e Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Wed, 26 Oct 2022 13:06:44 +0200 Subject: Revert "Change the type check for 'x: int' to 'isinstance(x, (int, long))' in Py2, to also accept large integers there (which is probably intended, given Py3 annotation semantics)." This reverts commit c19f0b872bc2a469777a7e65118a428ab0adffd3. --- Cython/Compiler/PyrexTypes.py | 3 --- Cython/Utility/ModuleSetupCode.c | 5 ----- 2 files changed, 8 deletions(-) diff --git a/Cython/Compiler/PyrexTypes.py b/Cython/Compiler/PyrexTypes.py index 4061b3b0b..da30809a3 100644 --- a/Cython/Compiler/PyrexTypes.py +++ b/Cython/Compiler/PyrexTypes.py @@ -1456,9 +1456,6 @@ class BuiltinObjectType(PyObjectType): type_check = 'PyByteArray_Check' elif type_name == 'frozenset': type_check = 'PyFrozenSet_Check' - elif type_name == 'int': - # For backwards compatibility of (Py3) 'x: int' annotations in Py2, we also allow 'long' there. - type_check = '__Pyx_Py3Int_Check' else: type_check = 'Py%s_Check' % type_name.capitalize() if exact and type_name not in ('bool', 'slice', 'Exception'): diff --git a/Cython/Utility/ModuleSetupCode.c b/Cython/Utility/ModuleSetupCode.c index af1680bd6..fa16485b5 100644 --- a/Cython/Utility/ModuleSetupCode.c +++ b/Cython/Utility/ModuleSetupCode.c @@ -1142,8 +1142,6 @@ static CYTHON_INLINE PyObject * __Pyx_PyDict_GetItemStrWithError(PyObject *dict, #define PyInt_Type PyLong_Type #define PyInt_Check(op) PyLong_Check(op) #define PyInt_CheckExact(op) PyLong_CheckExact(op) - #define __Pyx_Py3Int_Check(op) PyLong_Check(op) - #define __Pyx_Py3Int_CheckExact(op) PyLong_CheckExact(op) #define PyInt_FromString PyLong_FromString #define PyInt_FromUnicode PyLong_FromUnicode #define PyInt_FromLong PyLong_FromLong @@ -1155,9 +1153,6 @@ static CYTHON_INLINE PyObject * __Pyx_PyDict_GetItemStrWithError(PyObject *dict, #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask #define PyNumber_Int PyNumber_Long -#else - #define __Pyx_Py3Int_Check(op) (PyLong_Check(op) || PyInt_Check(op)) - #define __Pyx_Py3Int_CheckExact(op) (PyLong_CheckExact(op) || PyInt_CheckExact(op)) #endif #if PY_MAJOR_VERSION >= 3 -- cgit v1.2.1 From 1d957b2b33ab9a6b86d1fdad75cda705fb8d9e4f Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Thu, 27 Oct 2022 09:24:32 +0200 Subject: Provide better debug output on crashes in CI. --- runtests.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/runtests.py b/runtests.py index b12eec328..7b0b74c30 100755 --- a/runtests.py +++ b/runtests.py @@ -2546,12 +2546,17 @@ def configure_cython(options): CompilationOptions, \ default_options as pyrex_default_options from Cython.Compiler.Options import _directive_defaults as directive_defaults + from Cython.Compiler import Errors Errors.LEVEL = 0 # show all warnings + from Cython.Compiler import Options Options.generate_cleanup_code = 3 # complete cleanup code + from Cython.Compiler import DebugFlags DebugFlags.debug_temp_code_comments = 1 + DebugFlags.debug_no_exception_intercept = 1 # provide better crash output in CI runs + pyrex_default_options['formal_grammar'] = options.use_formal_grammar if options.profile: directive_defaults['profile'] = True -- cgit v1.2.1 From 7b0a8a921fec5ec2dec087a9aabfe3be1722c6a0 Mon Sep 17 00:00:00 2001 From: Jonathan Helgert Date: Sat, 29 Oct 2022 14:08:57 +0200 Subject: Add missing C++17/20 STL functions and methods (#5101) * Add .contains() method for associated containers * Add string::starts_with and ends_with methods * Add missing C++17/20 numeric functions --- Cython/Includes/libcpp/map.pxd | 3 + Cython/Includes/libcpp/numeric.pxd | 7 ++ Cython/Includes/libcpp/set.pxd | 4 + Cython/Includes/libcpp/string.pxd | 9 ++ Cython/Includes/libcpp/unordered_map.pxd | 4 + Cython/Includes/libcpp/unordered_set.pxd | 4 + ...pp_stl_associated_containers_contains_cpp20.pyx | 106 +++++++++++++++++++++ tests/run/cpp_stl_numeric_ops_cpp17.pyx | 18 +++- tests/run/cpp_stl_numeric_ops_cpp20.pyx | 23 +++++ tests/run/cpp_stl_string_cpp20.pyx | 61 ++++++++++++ 10 files changed, 238 insertions(+), 1 deletion(-) create mode 100644 tests/run/cpp_stl_associated_containers_contains_cpp20.pyx create mode 100644 tests/run/cpp_stl_numeric_ops_cpp20.pyx create mode 100644 tests/run/cpp_stl_string_cpp20.pyx diff --git a/Cython/Includes/libcpp/map.pxd b/Cython/Includes/libcpp/map.pxd index 2f8238f14..d81af66e0 100644 --- a/Cython/Includes/libcpp/map.pxd +++ b/Cython/Includes/libcpp/map.pxd @@ -121,6 +121,8 @@ cdef extern from "" namespace "std" nogil: iterator upper_bound(const T&) const_iterator const_upper_bound "upper_bound"(const T&) #value_compare value_comp() + # C++20 + bint contains(const T&) cdef cppclass multimap[T, U, COMPARE=*, ALLOCATOR=*]: ctypedef T key_type @@ -239,3 +241,4 @@ cdef extern from "" namespace "std" nogil: iterator upper_bound(const T&) const_iterator const_upper_bound "upper_bound"(const T&) #value_compare value_comp() + bint contains(const T&) diff --git a/Cython/Includes/libcpp/numeric.pxd b/Cython/Includes/libcpp/numeric.pxd index 670c6cfe8..0335a0bac 100644 --- a/Cython/Includes/libcpp/numeric.pxd +++ b/Cython/Includes/libcpp/numeric.pxd @@ -122,3 +122,10 @@ cdef extern from "" namespace "std" nogil: ForwardIt2 transform_exclusive_scan[ExecutionPolicy, ForwardIt1, ForwardIt2, T, BinaryOperation, UnaryOperation]( ExecutionPolicy&& policy, ForwardIt1 first, ForwardIt1 last, ForwardIt2 d_first, T init, BinaryOperation binary_op, UnaryOperation unary_op) + + # C++17 + T gcd[T](T a, T b) + T lcm[T](T a, T b) + + # C++20 + T midpoint[T](T a, T b) except + \ No newline at end of file diff --git a/Cython/Includes/libcpp/set.pxd b/Cython/Includes/libcpp/set.pxd index 8ba47cb7f..7e6449ca2 100644 --- a/Cython/Includes/libcpp/set.pxd +++ b/Cython/Includes/libcpp/set.pxd @@ -112,6 +112,8 @@ cdef extern from "" namespace "std" nogil: iterator upper_bound(const T&) const_iterator const_upper_bound "upper_bound"(const T&) #value_compare value_comp() + # C++20 + bint contains(const T&) cdef cppclass multiset[T]: ctypedef T value_type @@ -222,3 +224,5 @@ cdef extern from "" namespace "std" nogil: void swap(multiset&) iterator upper_bound(const T&) const_iterator const_upper_bound "upper_bound"(const T&) + # C++20 + bint contains(const T&) diff --git a/Cython/Includes/libcpp/string.pxd b/Cython/Includes/libcpp/string.pxd index 0fee703ea..23518806a 100644 --- a/Cython/Includes/libcpp/string.pxd +++ b/Cython/Includes/libcpp/string.pxd @@ -251,6 +251,15 @@ cdef extern from "" namespace "std" nogil: string substr(size_t pos) except + string substr() + # C++20 + bint starts_with(char c) except + + bint starts_with(const char* s) + bint ends_with(char c) except + + bint ends_with(const char* s) + # C++23 + bint contains(char c) except + + bint contains(const char* s) + #string& operator= (const string&) #string& operator= (const char*) #string& operator= (char) diff --git a/Cython/Includes/libcpp/unordered_map.pxd b/Cython/Includes/libcpp/unordered_map.pxd index 05f3338fa..61d11b0be 100644 --- a/Cython/Includes/libcpp/unordered_map.pxd +++ b/Cython/Includes/libcpp/unordered_map.pxd @@ -95,6 +95,8 @@ cdef extern from "" namespace "std" nogil: size_t max_bucket_count() size_t bucket_size(size_t) size_t bucket(const T&) + # C++20 + bint contains(const T&) cdef cppclass unordered_multimap[T, U, HASH=*, PRED=*, ALLOCATOR=*]: ctypedef T key_type @@ -186,3 +188,5 @@ cdef extern from "" namespace "std" nogil: size_t max_bucket_count() size_t bucket_size(size_t) size_t bucket(const T&) + # C++20 + bint contains(const T&) diff --git a/Cython/Includes/libcpp/unordered_set.pxd b/Cython/Includes/libcpp/unordered_set.pxd index f3fdfb56e..6aae890d9 100644 --- a/Cython/Includes/libcpp/unordered_set.pxd +++ b/Cython/Includes/libcpp/unordered_set.pxd @@ -75,6 +75,8 @@ cdef extern from "" namespace "std" nogil: size_t max_bucket_count() size_t bucket_size(size_t) size_t bucket(const T&) + # C++20 + bint contains(const T&) cdef cppclass unordered_multiset[T,HASH=*,PRED=*,ALLOCATOR=*]: ctypedef T value_type @@ -146,3 +148,5 @@ cdef extern from "" namespace "std" nogil: size_t max_bucket_count() size_t bucket_size(size_t) size_t bucket(const T&) + # C++20 + bint contains(const T&) diff --git a/tests/run/cpp_stl_associated_containers_contains_cpp20.pyx b/tests/run/cpp_stl_associated_containers_contains_cpp20.pyx new file mode 100644 index 000000000..ebe8d8fa8 --- /dev/null +++ b/tests/run/cpp_stl_associated_containers_contains_cpp20.pyx @@ -0,0 +1,106 @@ +# mode: run +# tag: cpp, cpp20 + +# cython: language_level=3 + +from libcpp.map cimport map, multimap +from libcpp.set cimport set, multiset +from libcpp.unordered_map cimport unordered_map, unordered_multimap +from libcpp.unordered_set cimport unordered_set, unordered_multiset + +def test_map_contains(vals, int key_to_find): + """ + >>> test_map_contains([(1,100),(2,200),(3,300)], 3) + True + >>> test_map_contains([(1,100),(2,200),(3,300)], 4) + False + """ + cdef map[int,int] m = map[int, int]() + for v in vals: + m.insert(v) + return m.contains(key_to_find) + +def test_unordered_map_contains(vals, int key_to_find): + """ + >>> test_unordered_map_contains([(1,100),(2,200),(3,300)], 3) + True + >>> test_unordered_map_contains([(1,100),(2,200),(3,300)], 4) + False + """ + cdef unordered_map[int,int] um = unordered_map[int, int]() + for v in vals: + um.insert(v) + return um.contains(key_to_find) + +def test_multimap_contains(vals, int key_to_find): + """ + >>> test_multimap_contains([(1,100),(2,200),(3,300)], 3) + True + >>> test_multimap_contains([(1,100),(2,200),(3,300)], 4) + False + """ + cdef multimap[int,int] mm = multimap[int, int]() + for v in vals: + mm.insert(v) + return mm.contains(key_to_find) + +def test_unordered_multimap_contains(vals, int key_to_find): + """ + >>> test_unordered_multimap_contains([(1,100),(2,200),(3,300)], 3) + True + >>> test_unordered_multimap_contains([(1,100),(2,200),(3,300)], 4) + False + """ + cdef unordered_multimap[int,int] umm = unordered_multimap[int, int]() + for v in vals: + umm.insert(v) + return umm.contains(key_to_find) + + +def test_set_contains(vals, int val_to_find): + """ + >>> test_set_contains([1, 2, 3], 3) + True + >>> test_set_contains([1, 2, 3], 4) + False + """ + cdef set[int] s = set[int]() + for v in vals: + s.insert(v) + return s.contains(val_to_find) + +def test_unordered_set_contains(vals, int val_to_find): + """ + >>> test_unordered_set_contains([1, 2, 3], 3) + True + >>> test_unordered_set_contains([1, 2, 3], 4) + False + """ + cdef unordered_set[int] us = unordered_set[int]() + for v in vals: + us.insert(v) + return us.contains(val_to_find) + +def test_multiset_contains(vals, int val_to_find): + """ + >>> test_multiset_contains([1, 2, 3], 3) + True + >>> test_multiset_contains([1, 2, 3], 4) + False + """ + cdef multiset[int] ms = multiset[int]() + for v in vals: + ms.insert(v) + return ms.contains(val_to_find) + +def test_unordered_multiset_contains(vals, int val_to_find): + """ + >>> test_unordered_multiset_contains([1, 2, 3], 3) + True + >>> test_unordered_multiset_contains([1, 2, 3], 4) + False + """ + cdef unordered_multiset[int] ums = unordered_multiset[int]() + for v in vals: + ums.insert(v) + return ums.contains(val_to_find) diff --git a/tests/run/cpp_stl_numeric_ops_cpp17.pyx b/tests/run/cpp_stl_numeric_ops_cpp17.pyx index eba4d2beb..e89540d35 100644 --- a/tests/run/cpp_stl_numeric_ops_cpp17.pyx +++ b/tests/run/cpp_stl_numeric_ops_cpp17.pyx @@ -3,7 +3,7 @@ from libcpp.numeric cimport (reduce, transform_reduce, inclusive_scan, exclusive_scan, transform_inclusive_scan, - transform_exclusive_scan) + transform_exclusive_scan, gcd, lcm) from libcpp.execution cimport seq from libcpp.vector cimport vector @@ -275,3 +275,19 @@ def test_transform_exclusive_scan_with_execpolicy(vector[int] v, int init): cdef vector[int] out = vector[int](v.size()) transform_exclusive_scan(seq, v.begin(), v.end(), out.begin(), init, add_integers, multiply_with_2) return out + +def test_gcd(int a, int b): + """ + Test gcd + >>> test_gcd(12, 18) + 6 + """ + return gcd[int](a, b) + +def test_lcm(int a, int b): + """ + Test lcm + >>> test_lcm(45, 75) + 225 + """ + return lcm[int](a, b) \ No newline at end of file diff --git a/tests/run/cpp_stl_numeric_ops_cpp20.pyx b/tests/run/cpp_stl_numeric_ops_cpp20.pyx new file mode 100644 index 000000000..e3a8c01df --- /dev/null +++ b/tests/run/cpp_stl_numeric_ops_cpp20.pyx @@ -0,0 +1,23 @@ +# mode: run +# tag: cpp, werror, cpp20 + +from libcpp.numeric cimport midpoint + +def test_midpoint_integer(int a, int b): + """ + Test midpoint for integer types + >>> test_midpoint_integer(2, 6) + 4 + """ + cdef int res = midpoint[int](a, b) + return res + + +def test_midpoint_float(float a, float b): + """ + Test midpoint for float + >>> test_midpoint_float(2, 6) + 4.0 + """ + cdef float res = midpoint[float](a, b) + return res diff --git a/tests/run/cpp_stl_string_cpp20.pyx b/tests/run/cpp_stl_string_cpp20.pyx new file mode 100644 index 000000000..f3a2b80d1 --- /dev/null +++ b/tests/run/cpp_stl_string_cpp20.pyx @@ -0,0 +1,61 @@ +# mode: run +# tag: cpp, werror, cpp20 + +from libcpp cimport bool +from libcpp.string cimport string + +b_A = b'A' +b_F = b'F' +b_abc = b"ABC" +b_def = b"DEF" + +def test_string_starts_with_char(bytes py_str): + """ + Test std::string.starts_with() with char type argument + >>> test_string_starts_with_char(b'A') + True + >>> test_string_starts_with_char(b'F') + False + """ + cdef char c = py_str[0] + cdef string s = b"ABCDEF" + return s.starts_with(c) + + +def test_string_starts_with_cstr(bytes py_str): + """ + Test std::string.starts_with() with c str type argument (char*) + >>> test_string_starts_with_cstr(b"ABC") + True + >>> test_string_starts_with_cstr(b"DEF") + False + """ + cdef char* c = py_str + cdef string s = b"ABCDEF" + return s.starts_with(c) + + +def test_string_ends_with_char(bytes py_str): + """ + Test std::string.ends_with() with char type argument + >>> test_string_ends_with_char(b'F') + True + >>> test_string_ends_with_char(b'A') + False + """ + cdef char c = py_str[0] + cdef string s = b"ABCDEF" + return s.ends_with(c) + + +def test_string_ends_with_cstr(bytes py_str): + """ + Test std::string.ends_with() with c str type argument (char*) + >>> test_string_ends_with_cstr(b"DEF") + True + >>> test_string_ends_with_cstr(b"ABC") + False + """ + cdef char* c = py_str + cdef string s = b"ABCDEF" + return s.ends_with(c) \ No newline at end of file -- cgit v1.2.1 From 67b7149d4a6ef9c4b8679cd52efca71913eec044 Mon Sep 17 00:00:00 2001 From: da-woods Date: Sat, 29 Oct 2022 13:10:27 +0100 Subject: Fix faulty calls to Context (#5098) which were mistakenly passing options instead of compiler directives. --- Cython/Build/Dependencies.py | 6 ++++-- Cython/Build/Inline.py | 12 +++++++++--- 2 files changed, 13 insertions(+), 5 deletions(-) diff --git a/Cython/Build/Dependencies.py b/Cython/Build/Dependencies.py index c34f32792..c60cbf34a 100644 --- a/Cython/Build/Dependencies.py +++ b/Cython/Build/Dependencies.py @@ -46,7 +46,8 @@ from ..Utils import (cached_function, cached_method, path_exists, safe_makedirs, copy_file_to_dir_if_newer, is_package_dir, write_depfile) from ..Compiler import Errors from ..Compiler.Main import Context -from ..Compiler.Options import CompilationOptions, default_options +from ..Compiler.Options import (CompilationOptions, default_options, + get_directive_defaults) join_path = cached_function(os.path.join) copy_once_if_newer = cached_function(copy_file_to_dir_if_newer) @@ -731,7 +732,8 @@ def create_dependency_tree(ctx=None, quiet=False): global _dep_tree if _dep_tree is None: if ctx is None: - ctx = Context(["."], CompilationOptions(default_options)) + ctx = Context(["."], get_directive_defaults(), + options=CompilationOptions(default_options)) _dep_tree = DependencyTree(ctx, quiet=quiet) return _dep_tree diff --git a/Cython/Build/Inline.py b/Cython/Build/Inline.py index 1120f89fc..abb891265 100644 --- a/Cython/Build/Inline.py +++ b/Cython/Build/Inline.py @@ -11,7 +11,8 @@ from distutils.command.build_ext import build_ext import Cython from ..Compiler.Main import Context -from ..Compiler.Options import default_options +from ..Compiler.Options import (default_options, CompilationOptions, + get_directive_defaults) from ..Compiler.Visitor import CythonTransform, EnvTransform from ..Compiler.ParseTreeTransforms import SkipDeclarations @@ -68,7 +69,8 @@ class UnboundSymbols(EnvTransform, SkipDeclarations): def unbound_symbols(code, context=None): code = to_unicode(code) if context is None: - context = Context([], default_options) + context = Context([], get_directive_defaults(), + options=CompilationOptions(default_options)) from ..Compiler.ParseTreeTransforms import AnalyseDeclarationsTransform tree = parse_from_strings('(tree fragment)', code) for phase in Pipeline.create_pipeline(context, 'pyx'): @@ -129,7 +131,11 @@ def _get_build_extension(): @cached_function def _create_context(cython_include_dirs): - return Context(list(cython_include_dirs), default_options) + return Context( + list(cython_include_dirs), + get_directive_defaults(), + options=CompilationOptions(default_options) + ) _cython_inline_cache = {} -- cgit v1.2.1 From 9020af1d06aeab12012492fb90afc3ba02a3cf7d Mon Sep 17 00:00:00 2001 From: Matus Valo Date: Sat, 29 Oct 2022 15:00:44 +0200 Subject: Docs: Update documentation after introduction of noexcept (#5095) * Update language basics after introduction of noexcept * Update clibraries tutorial --- docs/src/tutorial/clibraries.rst | 34 ++++++++++------------------------ docs/src/userguide/language_basics.rst | 32 ++++++++++++++++++++++++-------- 2 files changed, 34 insertions(+), 32 deletions(-) diff --git a/docs/src/tutorial/clibraries.rst b/docs/src/tutorial/clibraries.rst index 5b8c545b8..3542dbe8e 100644 --- a/docs/src/tutorial/clibraries.rst +++ b/docs/src/tutorial/clibraries.rst @@ -581,7 +581,6 @@ and check if the queue really is empty or not: .. code-block:: python @cython.cfunc - @cython.exceptval(-1, check=True) def peek(self) -> cython.int: value: cython.int = cython.cast(cython.Py_ssize_t, cqueue.queue_peek_head(self._c_queue)) if value == 0: @@ -595,7 +594,7 @@ and check if the queue really is empty or not: .. code-block:: cython - cdef int peek(self) except? -1: + cdef int peek(self): cdef int value = cqueue.queue_peek_head(self._c_queue) if value == 0: # this may mean that the queue is empty, or @@ -608,39 +607,27 @@ Note how we have effectively created a fast path through the method in the hopefully common cases that the return value is not ``0``. Only that specific case needs an additional check if the queue is empty. -The ``except? -1`` or ``@cython.exceptval(-1, check=True)`` declaration -in the method signature falls into the -same category. If the function was a Python function returning a +If the ``peek`` function was a Python function returning a Python object value, CPython would simply return ``NULL`` internally instead of a Python object to indicate an exception, which would immediately be propagated by the surrounding code. The problem is that the return type is ``int`` and any ``int`` value is a valid queue item value, so there is no way to explicitly signal an error to the -calling code. In fact, without such a declaration, there is no -obvious way for Cython to know what to return on exceptions and for -calling code to even know that this method *may* exit with an -exception. +calling code. The only way calling code can deal with this situation is to call ``PyErr_Occurred()`` when returning from a function to check if an exception was raised, and if so, propagate the exception. This -obviously has a performance penalty. Cython therefore allows you to -declare which value it should implicitly return in the case of an +obviously has a performance penalty. Cython therefore uses a dedicated value +that it implicitly returns in the case of an exception, so that the surrounding code only needs to check for an exception when receiving this exact value. -We chose to use ``-1`` as the exception return value as we expect it -to be an unlikely value to be put into the queue. The question mark -in the ``except? -1`` declaration and ``check=True`` in ``@cython.exceptval`` -indicates that the return value is -ambiguous (there *may* be a ``-1`` value in the queue, after all) and -that an additional exception check using ``PyErr_Occurred()`` is -needed in calling code. Without it, Cython code that calls this -method and receives the exception return value would silently (and -sometimes incorrectly) assume that an exception has been raised. In -any case, all other return values will be passed through almost +By default, the value ``-1`` is used as the exception return value. +All other return values will be passed through almost without a penalty, thus again creating a fast path for 'normal' -values. +values. See :ref:`error_return_values` for more details. + Now that the ``peek()`` method is implemented, the ``pop()`` method also needs adaptation. Since it removes a value from the queue, @@ -654,7 +641,6 @@ removal. Instead, we must test it on entry: .. code-block:: python @cython.cfunc - @cython.exceptval(-1, check=True) def pop(self) -> cython.int: if cqueue.queue_is_empty(self._c_queue): raise IndexError("Queue is empty") @@ -664,7 +650,7 @@ removal. Instead, we must test it on entry: .. code-block:: cython - cdef int pop(self) except? -1: + cdef int pop(self): if cqueue.queue_is_empty(self._c_queue): raise IndexError("Queue is empty") return cqueue.queue_pop_head(self._c_queue) diff --git a/docs/src/userguide/language_basics.rst b/docs/src/userguide/language_basics.rst index 98553f542..9635fe5f1 100644 --- a/docs/src/userguide/language_basics.rst +++ b/docs/src/userguide/language_basics.rst @@ -725,7 +725,7 @@ form of exception value declaration def spam() -> cython.int: ... - The keyword argument ``check=True`` indicates that the value ``-1`` _may_ signal an error. + The keyword argument ``check=True`` indicates that the value ``-1`` **may** signal an error. .. group-tab:: Cython @@ -734,7 +734,7 @@ form of exception value declaration cdef int spam() except? -1: ... - The ``?`` indicates that the value ``-1`` _may_ signal an error. + The ``?`` indicates that the value ``-1`` **may** signal an error. In this case, Cython generates a call to :c:func:`PyErr_Occurred` if the exception value is returned, to make sure it really received an exception and not just a normal @@ -777,21 +777,37 @@ See :ref:`wrapping-cplusplus` for more details. Finally, if you are certain that your function should not raise an exception, (e.g., it does not use Python objects at all, or you plan to use it as a callback in C code that -is unaware of Python exceptions), you can declare it as such using ``noexcept``:: +is unaware of Python exceptions), you can declare it as such using ``noexcept`` or by ``@cython.exceptval(check=False)``: - cdef int spam() noexcept +.. tabs:: + + .. group-tab:: Pure Python + + .. code-block:: python + + @cython.cfunc + @cython.exceptval(check=False) + def spam() -> cython.int: + ... + + .. group-tab:: Cython + + .. code-block:: cython + + cdef int spam() noexcept: + ... If a ``noexcept`` function *does* finish with an exception then it will print a warning message but not allow the exception to propagate further. Some things to note: -* ``cdef`` functions that are also ``extern`` are implicitly declared ``noexcept``. - In the uncommon case of external C/C++ functions that _can_ raise Python exceptions, +* ``cdef`` functions that are also ``extern`` are implicitly declared ``noexcept`` or ``@cython.exceptval(check=False)``. + In the uncommon case of external C/C++ functions that **can** raise Python exceptions, e.g., external functions that use the Python C API, you should explicitly declare them with an exception value. * ``cdef`` functions that are *not* ``extern`` are implicitly declared with a suitable - exception specification for the return type (e.g. ``except *`` for a ``void`` return - type, ``except? -1`` for an ``int`` return type). + exception specification for the return type (e.g. ``except *`` or ``@cython.exceptval(check=True)`` for a ``void`` return + type, ``except? -1`` or ``@cython.exceptval(-1, check=True)`` for an ``int`` return type). * Exception values can only be declared for functions returning a C integer, enum, float or pointer type, and the value must be a constant expression. -- cgit v1.2.1 From a4f2c4c26509391c21bbae4ae20ed59986752a28 Mon Sep 17 00:00:00 2001 From: da-woods Date: Sun, 30 Oct 2022 15:24:19 +0000 Subject: Update CCache script version (#5106) Hopefully fixes #5103 --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 00593d0a8..ce62f36f3 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -257,7 +257,7 @@ jobs: python-version: ${{ matrix.python-version }} - name: Cache [ccache] - uses: pat-s/always-upload-cache@v3.0.1 + uses: pat-s/always-upload-cache@v3.0.11 if: startsWith(runner.os, 'Linux') with: path: ~/.ccache -- cgit v1.2.1 From 48b5a957e4c681d363f3bc7020e3213cdabdb601 Mon Sep 17 00:00:00 2001 From: Tortar <68152031+Tortar@users.noreply.github.com> Date: Sun, 30 Oct 2022 17:26:35 +0100 Subject: [docs] Use double quotes for arguments in timeit (#5107) --- docs/src/tutorial/cython_tutorial.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/src/tutorial/cython_tutorial.rst b/docs/src/tutorial/cython_tutorial.rst index 647ec62b2..e3ab46005 100644 --- a/docs/src/tutorial/cython_tutorial.rst +++ b/docs/src/tutorial/cython_tutorial.rst @@ -390,13 +390,13 @@ Now we can ensure that those two programs output the same values:: It's possible to compare the speed now:: - python -m timeit -s 'from primes_python import primes' 'primes(1000)' + python -m timeit -s "from primes_python import primes" "primes(1000)" 10 loops, best of 3: 23 msec per loop - python -m timeit -s 'from primes_python_compiled import primes' 'primes(1000)' + python -m timeit -s "from primes_python_compiled import primes" "primes(1000)" 100 loops, best of 3: 11.9 msec per loop - python -m timeit -s 'from primes import primes' 'primes(1000)' + python -m timeit -s "from primes import primes" "primes(1000)" 1000 loops, best of 3: 1.65 msec per loop The cythonize version of ``primes_python`` is 2 times faster than the Python one, -- cgit v1.2.1 From 66225e8e83a3a40f177e7c6db4018791a248e031 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Tue, 1 Nov 2022 11:19:06 +0100 Subject: docs: Add a note about the "cython-lint" tool. --- docs/src/userguide/source_files_and_compilation.rst | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/docs/src/userguide/source_files_and_compilation.rst b/docs/src/userguide/source_files_and_compilation.rst index a833c61ed..d1c8f696c 100644 --- a/docs/src/userguide/source_files_and_compilation.rst +++ b/docs/src/userguide/source_files_and_compilation.rst @@ -12,17 +12,21 @@ file named :file:`primes.pyx`. Cython code, unlike Python, must be compiled. This happens in two stages: - * A ``.pyx`` file is compiled by Cython to a ``.c`` file. + * A ``.pyx`` (or ``.py``) file is compiled by Cython to a ``.c`` file. * The ``.c`` file is compiled by a C compiler to a ``.so`` file (or a ``.pyd`` file on Windows) -Once you have written your ``.pyx`` file, there are a couple of ways of turning it -into an extension module. +Once you have written your ``.pyx``/``.py`` file, there are a couple of ways +how to turn it into an extension module. The following sub-sections describe several ways to build your extension modules, and how to pass directives to the Cython compiler. +There are also a number of tools that process ``.pyx`` files apart from Cython, e.g. + +- Linting: https://pypi.org/project/cython-lint/ + .. _compiling_command_line: -- cgit v1.2.1 From d317c588a19a2597571d21a3b78beef9d836045e Mon Sep 17 00:00:00 2001 From: Ewout ter Hoeven Date: Mon, 7 Nov 2022 11:54:35 +0100 Subject: Wheel CI: Update cibuildwheel to v2.11.2 (#5115) Updates cibuildwheel from v.2.8.1 to v2.11.2. The v2.8.1 release still used a Python 3.11 beta to build wheels, v2.11.2 uses the stable Python 3.11.0 release. --- .github/workflows/wheels.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index 4cf308590..910d86a4e 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -74,7 +74,7 @@ jobs: platforms: all - name: Build wheels - uses: pypa/cibuildwheel@v2.8.1 + uses: pypa/cibuildwheel@v2.11.2 env: # TODO: Build Cython with the compile-all flag? CIBW_BUILD: ${{ matrix.python }}-${{ matrix.buildplat[1] }} -- cgit v1.2.1 From 5bc34cd4cbe5641aa191cdfe099f02850f7b1472 Mon Sep 17 00:00:00 2001 From: Stefan Behnel Date: Mon, 7 Nov 2022 12:13:59 +0100 Subject: Remove a bit of overhead from the Visitor implementation. --- Cython/Compiler/Visitor.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/Cython/Compiler/Visitor.py b/Cython/Compiler/Visitor.py index 23ff467db..92e2eb9c0 100644 --- a/Cython/Compiler/Visitor.py +++ b/Cython/Compiler/Visitor.py @@ -1,5 +1,5 @@ # cython: infer_types=True -# cython: language_level=3 +# cython: language_level=3str # cython: auto_pickle=False # @@ -80,7 +80,7 @@ class TreeVisitor(object): def dump_node(self, node): ignored = list(node.child_attrs or []) + [ - u'child_attrs', u'pos', u'gil_message', u'cpp_message', u'subexprs'] + 'child_attrs', 'pos', 'gil_message', 'cpp_message', 'subexprs'] values = [] pos = getattr(node, 'pos', None) if pos: @@ -116,7 +116,7 @@ class TreeVisitor(object): nodes = [] while hasattr(stacktrace, 'tb_frame'): frame = stacktrace.tb_frame - node = frame.f_locals.get(u'self') + node = frame.f_locals.get('self') if isinstance(node, Nodes.Node): code = frame.f_code method_name = code.co_name @@ -153,12 +153,12 @@ class TreeVisitor(object): def find_handler(self, obj): # to resolve, try entire hierarchy cls = type(obj) - pattern = "visit_%s" mro = inspect.getmro(cls) for mro_cls in mro: - handler_method = getattr(self, pattern % mro_cls.__name__, None) + handler_method = getattr(self, "visit_" + mro_cls.__name__, None) if handler_method is not None: return handler_method + print(type(self), cls) if self.access_path: print(self.access_path) @@ -594,7 +594,7 @@ class MethodDispatcherTransform(EnvTransform): # Python 2 and 3 return None - call_type = has_kwargs and 'general' or 'simple' + call_type = 'general' if has_kwargs else 'simple' handler = getattr(self, '_handle_%s_%s' % (call_type, match_name), None) if handler is None: handler = getattr(self, '_handle_any_%s' % match_name, None) -- cgit v1.2.1 From 612ab66d720917e39fee2d845c0415fccae7cd59 Mon Sep 17 00:00:00 2001 From: Matus Valo Date: Tue, 8 Nov 2022 22:16:35 +0100 Subject: Docs: Improve documentation about error return values (#5108) --- docs/src/userguide/language_basics.rst | 34 +++++++++++++++++++++++++++++----- 1 file changed, 29 insertions(+), 5 deletions(-) diff --git a/docs/src/userguide/language_basics.rst b/docs/src/userguide/language_basics.rst index 9635fe5f1..11561e1ee 100644 --- a/docs/src/userguide/language_basics.rst +++ b/docs/src/userguide/language_basics.rst @@ -699,7 +699,12 @@ as a contract with the caller. Here is an example: With this declaration, whenever an exception occurs inside ``spam``, it will immediately return with the value ``-1``. From the caller's side, whenever a call to spam returns ``-1``, the caller will assume that an exception has -occurred and can now process or propagate it. +occurred and can now process or propagate it. Calling ``spam()`` is roughly translated to the following C code: + +.. code-block:: C + + ret_val = spam(); + if (ret_val == -1) goto error_handler; When you declare an exception value for a function, you should never explicitly or implicitly return that value. This includes empty :keyword:`return` @@ -738,7 +743,13 @@ form of exception value declaration In this case, Cython generates a call to :c:func:`PyErr_Occurred` if the exception value is returned, to make sure it really received an exception and not just a normal -result. +result. Calling ``spam()`` is roughly translated to the following C code: + + +.. code-block:: C + + ret_val = spam(); + if (ret_val == -1 && PyErr_Occurred()) goto error_handler; There is also a third form of exception value declaration @@ -750,18 +761,25 @@ There is also a third form of exception value declaration @cython.cfunc @cython.exceptval(check=True) - def spam() -> cython.int: + def spam() -> cython.void: ... .. group-tab:: Cython .. code-block:: cython - cdef int spam() except *: + cdef void spam() except *: ... This form causes Cython to generate a call to :c:func:`PyErr_Occurred` after -*every* call to spam, regardless of what value it returns. If you have a +*every* call to spam, regardless of what value it returns. Calling ``spam()`` is roughly translated to the following C code: + +.. code-block:: C + + spam() + if (PyErr_Occurred()) goto error_handler; + +If you have a function returning ``void`` that needs to propagate errors, you will have to use this form, since there isn't any error return value to test. Otherwise, an explicit error return value allows the C compiler to generate @@ -798,6 +816,8 @@ is unaware of Python exceptions), you can declare it as such using ``noexcept`` ... If a ``noexcept`` function *does* finish with an exception then it will print a warning message but not allow the exception to propagate further. +On the other hand, calling a ``noexcept`` function has zero overhead related to managing exceptions, unlike the previous declarations. + Some things to note: * ``cdef`` functions that are also ``extern`` are implicitly declared ``noexcept`` or ``@cython.exceptval(check=False)``. @@ -824,6 +844,10 @@ Some things to note: .. note:: Pointers to functions are currently not supported by pure Python mode. (GitHub issue :issue:`4279`) +* If the returning type of a ``cdef`` function with ``except *`` or ``@cython.exceptval(check=True)`` is C integer, + enum, float or pointer type, Cython calls :c:func:`PyErr_Occurred` only when + dedicated value is returned instead of checking after every call of the function. + * You don't need to (and shouldn't) declare exception values for functions which return Python objects. Remember that a function with no declared return type implicitly returns a Python object. (Exceptions on such -- cgit v1.2.1 From e14f241cecf5d37b29a057b93779737f81c5a68f Mon Sep 17 00:00:00 2001 From: da-woods Date: Tue, 8 Nov 2022 21:24:46 +0000 Subject: Optimized implementation of bound c methods (#5105) Fixes #5062 --- Cython/Compiler/ExprNodes.py | 23 ++++++++++++----------- Cython/Utility/ObjectHandling.c | 9 +++++++++ 2 files changed, 21 insertions(+), 11 deletions(-) diff --git a/Cython/Compiler/ExprNodes.py b/Cython/Compiler/ExprNodes.py index bb7629496..1ce459541 100644 --- a/Cython/Compiler/ExprNodes.py +++ b/Cython/Compiler/ExprNodes.py @@ -7239,22 +7239,23 @@ class AttributeNode(ExprNode): obj_node.result_ctype = self.obj.result_ctype self.obj = obj_node unbound_node = ExprNode.coerce_to(self, dst_type, env) - functools = SimpleCallNode( - self.pos, - function=NameNode(self.pos, name=StringEncoding.EncodedString("__import__")), - args=[StringNode(self.pos, value=StringEncoding.EncodedString("functools"))], + utility_code=UtilityCode.load_cached( + "PyMethodNew2Arg", "ObjectHandling.c" ) - partial = AttributeNode( - self.pos, - obj=functools, - attribute=StringEncoding.EncodedString("partial"), + func_type = PyrexTypes.CFuncType( + PyrexTypes.py_object_type, [ + PyrexTypes.CFuncTypeArg("func", PyrexTypes.py_object_type, None), + PyrexTypes.CFuncTypeArg("self", PyrexTypes.py_object_type, None) + ], ) - partial_call = SimpleCallNode( + binding_call = PythonCapiCallNode( self.pos, - function=partial, + function_name="__Pyx_PyMethod_New2Arg", + func_type=func_type, args=[unbound_node, obj_node], + utility_code=utility_code, ) - complete_call = EvalWithTempExprNode(obj_node, partial_call) + complete_call = EvalWithTempExprNode(obj_node, binding_call) return complete_call.analyse_types(env) return ExprNode.coerce_to(self, dst_type, env) diff --git a/Cython/Utility/ObjectHandling.c b/Cython/Utility/ObjectHandling.c index 43e13bde9..6b212ca79 100644 --- a/Cython/Utility/ObjectHandling.c +++ b/Cython/Utility/ObjectHandling.c @@ -2900,6 +2900,15 @@ static PyObject *__Pyx_PyMethod_New(PyObject *func, PyObject *self, PyObject *ty #define __Pyx_PyMethod_New PyMethod_New #endif +///////////// PyMethodNew2Arg.proto ///////////// + +// Another wrapping of PyMethod_New that matches the Python3 signature +#if PY_MAJOR_VERSION >= 3 +#define __Pyx_PyMethod_New2Arg PyMethod_New +#else +#define __Pyx_PyMethod_New2Arg(func, self) PyMethod_New(func, self, (PyObject*)Py_TYPE(self)) +#endif + /////////////// UnicodeConcatInPlace.proto //////////////// # if CYTHON_COMPILING_IN_CPYTHON && PY_MAJOR_VERSION >= 3 -- cgit v1.2.1 From 2d40828375f2a25a4b931e582bf0ad5161926e42 Mon Sep 17 00:00:00 2001 From: Maximilien Colange <111890372+EpigeneMax@users.noreply.github.com> Date: Tue, 8 Nov 2022 22:30:07 +0100 Subject: Fix linkage for `cdef public` functions in C++ mode (#5040) Fixes #1839 cdef public functions should be declared with the appropriate linkage: * in C mode, either extern or extern "C", depending on whether the header file is included in (resp. object code is linked against) a C or a C++ compilation unit. Choice is made at compile-time through #ifdef __cplusplus macros. NB: This is the current behavior. * in C++ mode, extern "C++" is the only option, as C code cannot call C++ code. Note that extern "C++" should be preferred over extern to allow users to #include the C++ header inside a extern "C" block (which is legal, although barely used). Note that the current behavior is OK for C mode, but is incorrect for the C++ mode. As described in #1839, this incorrect behavior is diagnosed by compilers emitting warnings when cdef public functions return a C++ type (e.g. std::vector). The test introduced in this PR checks that the current behavior for C mode (with both C and C++ compatibility) is preserved, and that the behavior for C++ mode is fixed. --- Cython/Compiler/ModuleNode.py | 19 ++-- docs/examples/tutorial/embedding/embedded.pyx | 1 + tests/compile/excvalcheck.h | 6 - tests/compile/nogil.h | 12 -- tests/run/cpp_extern.srctree | 151 ++++++++++++++++++++++++++ 5 files changed, 163 insertions(+), 26 deletions(-) create mode 100644 tests/run/cpp_extern.srctree diff --git a/Cython/Compiler/ModuleNode.py b/Cython/Compiler/ModuleNode.py index a6c594cc4..4cf125cae 100644 --- a/Cython/Compiler/ModuleNode.py +++ b/Cython/Compiler/ModuleNode.py @@ -262,7 +262,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): api_guard = self.api_name(Naming.api_guard_prefix, env) h_code_start.putln("#ifndef %s" % api_guard) h_code_start.putln("") - self.generate_extern_c_macro_definition(h_code_start) + self.generate_extern_c_macro_definition(h_code_start, env.is_cpp()) h_code_start.putln("") self.generate_dl_import_macro(h_code_start) if h_extension_types: @@ -804,7 +804,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): code.putln(" { __PYX_MARK_ERR_POS(f_index, lineno) goto Ln_error; }") code.putln("") - self.generate_extern_c_macro_definition(code) + self.generate_extern_c_macro_definition(code, env.is_cpp()) code.putln("") code.putln("#define %s" % self.api_name(Naming.h_guard_prefix, env)) @@ -876,14 +876,17 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): if has_np_pythran(env): env.use_utility_code(UtilityCode.load_cached("PythranConversion", "CppSupport.cpp")) - def generate_extern_c_macro_definition(self, code): + def generate_extern_c_macro_definition(self, code, is_cpp): name = Naming.extern_c_macro code.putln("#ifndef %s" % name) - code.putln(" #ifdef __cplusplus") - code.putln(' #define %s extern "C"' % name) - code.putln(" #else") - code.putln(" #define %s extern" % name) - code.putln(" #endif") + if is_cpp: + code.putln(' #define %s extern "C++"' % name) + else: + code.putln(" #ifdef __cplusplus") + code.putln(' #define %s extern "C"' % name) + code.putln(" #else") + code.putln(" #define %s extern" % name) + code.putln(" #endif") code.putln("#endif") def generate_dl_import_macro(self, code): diff --git a/docs/examples/tutorial/embedding/embedded.pyx b/docs/examples/tutorial/embedding/embedded.pyx index 719702c75..2ed823945 100644 --- a/docs/examples/tutorial/embedding/embedded.pyx +++ b/docs/examples/tutorial/embedding/embedded.pyx @@ -3,6 +3,7 @@ # The following two lines are for test purposes only, please ignore them. # distutils: sources = embedded_main.c # tag: py3only +# tag: no-cpp TEXT_TO_SAY = 'Hello from Python!' diff --git a/tests/compile/excvalcheck.h b/tests/compile/excvalcheck.h index 4c92acd2b..ba7a760e1 100644 --- a/tests/compile/excvalcheck.h +++ b/tests/compile/excvalcheck.h @@ -1,12 +1,6 @@ -#ifdef __cplusplus -extern "C" { -#endif extern DL_EXPORT(int) spam(void); extern DL_EXPORT(void) grail(void); extern DL_EXPORT(char *)tomato(void); -#ifdef __cplusplus -} -#endif int spam(void) {return 0;} void grail(void) {return;} diff --git a/tests/compile/nogil.h b/tests/compile/nogil.h index 42878109b..764a3fc8a 100644 --- a/tests/compile/nogil.h +++ b/tests/compile/nogil.h @@ -1,25 +1,13 @@ -#ifdef __cplusplus -extern "C" { -#endif extern DL_EXPORT(void) e1(void); extern DL_EXPORT(int*) e2(void); -#ifdef __cplusplus -} -#endif void e1(void) {return;} int* e2(void) {return 0;} -#ifdef __cplusplus -extern "C" { -#endif extern DL_EXPORT(PyObject *) g(PyObject*); extern DL_EXPORT(void) g2(PyObject*); -#ifdef __cplusplus -} -#endif PyObject *g(PyObject* o) {if (o) {}; return 0;} void g2(PyObject* o) {if (o) {}; return;} diff --git a/tests/run/cpp_extern.srctree b/tests/run/cpp_extern.srctree new file mode 100644 index 000000000..d2c11bb5f --- /dev/null +++ b/tests/run/cpp_extern.srctree @@ -0,0 +1,151 @@ +# mode: run +# tag: cpp +# ticket: 1839 + +""" +PYTHON setup.py build_ext --inplace +PYTHON -c "from foo import test; test()" +PYTHON -c "from bar import test; test()" +PYTHON -c "from baz import test; test()" +""" + +######## setup.py ######## + +from Cython.Build import cythonize +from Cython.Distutils.extension import Extension +from distutils.core import setup + +foo = Extension( + "foo", + ["foo.pyx", "foo1.cpp", "foo2.cpp"], +) +bar = Extension( + "bar", + ["bar.pyx", "bar1.c", "bar2.cpp"], +) +baz = Extension( + "baz", + ["baz.pyx", "baz1.c", "baz2.cpp"], + define_macros = [("__PYX_EXTERN_C", 'extern "C"')], +) + +setup( + ext_modules=cythonize([foo, bar, baz]), +) + +######## foo.pyx ######## + +# distutils: language = c++ + +from libcpp cimport vector + +cdef public vector.vector[int] get_vector(): + return [1,2,3] + +cdef extern from "foo_header.h": + cdef size_t size_vector1() + cdef size_t size_vector2() + +def test(): + assert size_vector1() == 3 + assert size_vector2() == 3 + +######## foo_header.h ######## + +size_t size_vector1(); +size_t size_vector2(); + +######## foo1.cpp ######## + +#include +#include "foo.h" + +size_t size_vector1() { + return get_vector().size(); +} + +######## foo2.cpp ######## + +#include +extern "C" { +// #include within `extern "C"` is legal. +// We want to make sure here that Cython C++ functions are flagged as `extern "C++"`. +// Otherwise they would be interpreted with C-linkage if the header is include within a `extern "C"` block. +#include "foo.h" +} + +size_t size_vector2() { + return get_vector().size(); +} + +######## bar.pyx ######## + +cdef public char get_char(): + return 42 + +cdef extern from "bar_header.h": + cdef int get_int1() + cdef int get_int2() + +def test(): + assert get_int1() == 42 + assert get_int2() == 42 + +######## bar_header.h ######## + +int get_int1(); +int get_int2(); + +######## bar1.c ######## + +#include "bar.h" + +int get_int1() { return (int)get_char(); } + +######## bar2.cpp ######## + +extern "C" { +#include "bar.h" +} + +extern "C" int get_int2() { return (int)get_char(); } + +######## baz.pyx ######## + +# distutils: language = c++ + +cdef public char get_char(): + return 42 + +cdef extern from "baz_header.h": + cdef int get_int1() + cdef int get_int2() + +def test(): + assert get_int1() == 42 + assert get_int2() == 42 + +######## baz_header.h ######## + +#ifdef __cplusplus + #define BAZ_EXTERN_C extern "C" +#else + #define BAZ_EXTERN_C +#endif + +BAZ_EXTERN_C int get_int1(); +int get_int2(); + +######## baz1.c ######## + +#undef __PYX_EXTERN_C +#define __PYX_EXTERN_C +#include "baz.h" + +int get_int1() { return (int)get_char(); } + +######## baz2.cpp ######## + +#include "baz.h" + +int get_int2() { return (int)get_char(); } -- cgit v1.2.1 From 327b87a240b8e03fb333c7ff8ad36b576fd774f0 Mon Sep 17 00:00:00 2001 From: Matus Valo Date: Tue, 8 Nov 2022 22:35:59 +0100 Subject: cython, cythonize commands print a specific error when file does not exist (#4629) --- Cython/Build/Cythonize.py | 17 +++++++++++++---- Cython/Compiler/CmdLine.py | 10 ++++++++++ Cython/Compiler/Main.py | 13 +++++++++++-- Cython/Compiler/Tests/TestCmdLine.py | 12 ++++++++++++ Tools/ci-run.sh | 4 +++- test-requirements-27.txt | 1 + test-requirements-pypy27.txt | 2 ++ tests/run/cython_no_files.srctree | 34 ++++++++++++++++++++++++++++++++++ 8 files changed, 86 insertions(+), 7 deletions(-) create mode 100644 test-requirements-pypy27.txt create mode 100644 tests/run/cython_no_files.srctree diff --git a/Cython/Build/Cythonize.py b/Cython/Build/Cythonize.py index ab18a12bc..179c04060 100644 --- a/Cython/Build/Cythonize.py +++ b/Cython/Build/Cythonize.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -from __future__ import absolute_import +from __future__ import absolute_import, print_function import os import shutil @@ -45,10 +45,12 @@ def find_package_base(path): package_path = '%s/%s' % (parent, package_path) return base_dir, package_path - def cython_compile(path_pattern, options): - pool = None all_paths = map(os.path.abspath, extended_iglob(path_pattern)) + _cython_compile_files(all_paths, options) + +def _cython_compile_files(all_paths, options): + pool = None try: for path in all_paths: if options.build_inplace: @@ -230,8 +232,15 @@ def parse_args(args): def main(args=None): options, paths = parse_args(args) + all_paths = [] for path in paths: - cython_compile(path, options) + expanded_path = [os.path.abspath(p) for p in extended_iglob(path)] + if not expanded_path: + import sys + print("{}: No such file or directory: '{}'".format(sys.argv[0], path), file=sys.stderr) + sys.exit(1) + all_paths.extend(expanded_path) + _cython_compile_files(all_paths, options) if __name__ == '__main__': diff --git a/Cython/Compiler/CmdLine.py b/Cython/Compiler/CmdLine.py index 80a0cc99d..c330fcc05 100644 --- a/Cython/Compiler/CmdLine.py +++ b/Cython/Compiler/CmdLine.py @@ -4,11 +4,17 @@ from __future__ import absolute_import +import sys import os from argparse import ArgumentParser, Action, SUPPRESS from . import Options +if sys.version_info < (3, 3): + # TODO: This workaround can be removed in Cython 3.1 + FileNotFoundError = IOError + + class ParseDirectivesAction(Action): def __call__(self, parser, namespace, values, option_string=None): old_directives = dict(getattr(namespace, self.dest, @@ -209,6 +215,10 @@ def parse_command_line_raw(parser, args): def parse_command_line(args): parser = create_cython_argparser() arguments, sources = parse_command_line_raw(parser, args) + for source in sources: + if not os.path.exists(source): + import errno + raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), source) options = Options.CompilationOptions(Options.default_options) for name, value in vars(arguments).items(): diff --git a/Cython/Compiler/Main.py b/Cython/Compiler/Main.py index eecd49feb..d5985457d 100644 --- a/Cython/Compiler/Main.py +++ b/Cython/Compiler/Main.py @@ -2,7 +2,7 @@ # Cython Top Level # -from __future__ import absolute_import +from __future__ import absolute_import, print_function import os import re @@ -747,7 +747,16 @@ def main(command_line = 0): args = sys.argv[1:] any_failures = 0 if command_line: - options, sources = parse_command_line(args) + try: + options, sources = parse_command_line(args) + except IOError as e: + # TODO: IOError can be replaced with FileNotFoundError in Cython 3.1 + import errno + if errno.ENOENT != e.errno: + # Raised IOError is not caused by missing file. + raise + print("{}: No such file or directory: '{}'".format(sys.argv[0], e.filename), file=sys.stderr) + sys.exit(1) else: options = CompilationOptions(default_options) sources = args diff --git a/Cython/Compiler/Tests/TestCmdLine.py b/Cython/Compiler/Tests/TestCmdLine.py index 6c74fe3a2..0961dfa03 100644 --- a/Cython/Compiler/Tests/TestCmdLine.py +++ b/Cython/Compiler/Tests/TestCmdLine.py @@ -2,6 +2,10 @@ import os import sys import re from unittest import TestCase +try: + from unittest.mock import patch, Mock +except ImportError: # Py2 + from mock import patch, Mock try: from StringIO import StringIO except ImportError: @@ -12,7 +16,15 @@ from ..CmdLine import parse_command_line from .Utils import backup_Options, restore_Options, check_global_options +unpatched_exists = os.path.exists + +def patched_exists(path): + # avoid the Cython command raising a file not found error + if path in ('source.pyx', 'file.pyx', 'file1.pyx', 'file2.pyx', 'file3.pyx', 'foo.pyx', 'bar.pyx'): + return True + return unpatched_exists(path) +@patch('os.path.exists', new=Mock(side_effect=patched_exists)) class CmdLineParserTest(TestCase): def setUp(self): self._options_backup = backup_Options() diff --git a/Tools/ci-run.sh b/Tools/ci-run.sh index bd8ef0c8b..f25041415 100644 --- a/Tools/ci-run.sh +++ b/Tools/ci-run.sh @@ -68,12 +68,14 @@ if [[ $PYTHON_VERSION == "2.7"* ]]; then elif [[ $PYTHON_VERSION == "3."[45]* ]]; then python -m pip install wheel || exit 1 python -m pip install -r test-requirements-34.txt || exit 1 +elif [[ $PYTHON_VERSION == "pypy-2.7" ]]; then + pip install wheel || exit 1 + pip install -r test-requirements-pypy27.txt || exit 1 else python -m pip install -U pip "setuptools<60" wheel || exit 1 if [[ $PYTHON_VERSION != *"-dev" || $COVERAGE == "1" ]]; then python -m pip install -r test-requirements.txt || exit 1 - if [[ $PYTHON_VERSION != "pypy"* && $PYTHON_VERSION != "3."[1]* ]]; then python -m pip install -r test-requirements-cpython.txt || exit 1 fi diff --git a/test-requirements-27.txt b/test-requirements-27.txt index efec3bbbf..b518c2570 100644 --- a/test-requirements-27.txt +++ b/test-requirements-27.txt @@ -62,3 +62,4 @@ wcwidth==0.2.5 webencodings==0.5.1 widgetsnbextension==3.5.1 zipp==1.2.0 +mock==3.0.5 diff --git a/test-requirements-pypy27.txt b/test-requirements-pypy27.txt new file mode 100644 index 000000000..9f9505240 --- /dev/null +++ b/test-requirements-pypy27.txt @@ -0,0 +1,2 @@ +-r test-requirements.txt +mock==3.0.5 diff --git a/tests/run/cython_no_files.srctree b/tests/run/cython_no_files.srctree new file mode 100644 index 000000000..455258c03 --- /dev/null +++ b/tests/run/cython_no_files.srctree @@ -0,0 +1,34 @@ +PYTHON test_cythonize_no_files.py +PYTHON test_cython_no_files.py + +######## a.py ########### +a = 1 + +######## b.py ########### +b = 2 + +######## c.pyx ########### +c = 3 + +######## d.pyx ########### +d = 4 + +######## test_cythonize_no_files.py ########### +import subprocess +import sys + +cmd = [sys.executable, '-c', 'from Cython.Build.Cythonize import main; main()', 'a.py', 'b.py', 'c.py', '*.pyx'] +proc = subprocess.Popen(cmd, stderr=subprocess.PIPE) +_, err = proc.communicate() +assert proc.returncode == 1, proc.returncode +assert b"No such file or directory: 'c.py'" in err, err + +######## test_cython_no_files.py ########### +import subprocess +import sys + +cmd = [sys.executable, '-c', 'from Cython.Compiler.Main import main; main(command_line = 1)', 'a.py', 'b.py', 'c.py', '*.pyx'] +proc = subprocess.Popen(cmd, stderr=subprocess.PIPE) +_, err = proc.communicate() +assert proc.returncode == 1, proc.returncode +assert b"No such file or directory: 'c.py'" in err, err -- cgit v1.2.1 From f97aa82ce5fb89138ad8c55e0802069774c73b76 Mon Sep 17 00:00:00 2001 From: da-woods Date: Fri, 11 Nov 2022 20:23:05 +0000 Subject: Remove CHANGES comment about immutable cdef dataclasses They aren't (yet) immutable - I just removed a test that relied on them being mutable. --- CHANGES.rst | 4 ---- 1 file changed, 4 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 15e01ac39..b18c43d4c 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -77,10 +77,6 @@ Bugs fixed * Some issues with Cython ``@dataclass`` arguments, hashing and ``repr()`` were resolved. (Github issue :issue:`4956`) -* cdef data classes (not their instances) were accidentally modifiable after creation, - which lead to potential problems and crashes. They are now immutable. - (Github issue :issue:`5026`) - * Relative imports failed in compiled ``__init__.py`` package modules. Patch by Matus Valo. (Github issue :issue:`3442`) -- cgit v1.2.1 From 323e5c8c7583b6d8be7f3c2540a63283a21c27e6 Mon Sep 17 00:00:00 2001 From: da-woods Date: Fri, 11 Nov 2022 20:32:38 +0000 Subject: Run GCC 11 tests with the most recent language standard (#5078) It doesn't look like we're testing with a compiler using up-to-date language standards, and this meant we missed at least one bug recently: https://github.com/cython/cython/pull/5029. It seems like the GCC11 tests may as well double as "GCC11 with up-to-date language standards" * Try to fix fake reference copy-constructor issue --- .github/workflows/ci.yml | 6 +++--- Cython/Utility/ModuleSetupCode.c | 6 ++++-- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ce62f36f3..4dd771ad9 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -63,16 +63,16 @@ jobs: # Ubuntu sub-jobs: # ================ - # GCC 11 + # GCC 11 (with latest language standards) - os: ubuntu-18.04 python-version: 3.9 backend: c - env: { GCC_VERSION: 11 } + env: { GCC_VERSION: 11, EXTRA_CFLAGS: "-std=c17" } extra_hash: "-gcc11" - os: ubuntu-18.04 python-version: 3.9 backend: cpp - env: { GCC_VERSION: 11 } + env: { GCC_VERSION: 11, EXTRA_CFLAGS: "-std=c++20" } extra_hash: "-gcc11" # compile all modules - os: ubuntu-18.04 diff --git a/Cython/Utility/ModuleSetupCode.c b/Cython/Utility/ModuleSetupCode.c index fa16485b5..df2a4ee4a 100644 --- a/Cython/Utility/ModuleSetupCode.c +++ b/Cython/Utility/ModuleSetupCode.c @@ -592,8 +592,10 @@ class __Pyx_FakeReference { T *operator&() { return ptr; } operator T&() { return *ptr; } // TODO(robertwb): Delegate all operators (or auto-generate unwrapping code where needed). - template bool operator ==(U other) { return *ptr == other; } - template bool operator !=(U other) { return *ptr != other; } + template bool operator ==(const U& other) const { return *ptr == other; } + template bool operator !=(const U& other) const { return *ptr != other; } + template bool operator==(const __Pyx_FakeReference& other) const { return *ptr == *other.ptr; } + template bool operator!=(const __Pyx_FakeReference& other) const { return *ptr != *other.ptr; } private: T *ptr; }; -- cgit v1.2.1 From 9a55e5924c45e452e05ccfca4453fb5629e29f47 Mon Sep 17 00:00:00 2001 From: samaingw <31617584+samaingw@users.noreply.github.com> Date: Sat, 12 Nov 2022 10:22:52 +0100 Subject: Allow nested cppclass definitions (#3221) This is a subset of PR #2951, containing the code concerning nested class definition support. For the records, you'll find below the original #2951 description of theses changes: 4 - Nested classes ------------------------ This is not about wrapping nested C++ classes, because it is already present and functional (and there's a test which can prove it). This is about being able to generate C++ classes definitions with nested classes. So it is exactly like the batch about method overloading: being able to define and not just declare. If I have understood it properly, this addresses issue #1218 a) Make cppclass generate code for nested definitions (for example nested cppclass) b) Fix Symtab for the inherited nested types: this is basically the same thing than for other attributes, have a separate "inherited types" array, to be able to have them in scope but ignore them during code generation. c) Amend the nested classes test --- Cython/Compiler/ModuleNode.py | 4 +++- Cython/Compiler/Nodes.py | 3 +++ Cython/Compiler/Symtab.py | 2 +- tests/run/cpp_nested_classes.pyx | 44 ++++++++++++++++++++++++++++++++++++++++ 4 files changed, 51 insertions(+), 2 deletions(-) diff --git a/Cython/Compiler/ModuleNode.py b/Cython/Compiler/ModuleNode.py index 4cf125cae..53aaf026e 100644 --- a/Cython/Compiler/ModuleNode.py +++ b/Cython/Compiler/ModuleNode.py @@ -975,7 +975,8 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): def generate_typedef(self, entry, code): base_type = entry.type.typedef_base_type - if base_type.is_numeric: + enclosing_scope = entry.scope + if base_type.is_numeric and not enclosing_scope.is_cpp_class_scope: try: writer = code.globalstate['numeric_typedefs'] except KeyError: @@ -1051,6 +1052,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): [base_class.empty_declaration_code() for base_class in type.base_classes]) code.put(" : public %s" % base_class_decl) code.putln(" {") + self.generate_type_header_code(scope.type_entries, code) py_attrs = [e for e in scope.entries.values() if e.type.is_pyobject and not e.is_inherited] has_virtual_methods = False diff --git a/Cython/Compiler/Nodes.py b/Cython/Compiler/Nodes.py index c3cde85f3..7a9192234 100644 --- a/Cython/Compiler/Nodes.py +++ b/Cython/Compiler/Nodes.py @@ -1635,6 +1635,9 @@ class CppClassNode(CStructOrUnionDefNode, BlockNode): elif isinstance(attr, CompilerDirectivesNode): for sub_attr in func_attributes(attr.body.stats): yield sub_attr + elif isinstance(attr, CppClassNode) and attr.attributes is not None: + for sub_attr in func_attributes(attr.attributes): + yield sub_attr if self.attributes is not None: if self.in_pxd and not env.in_cinclude: self.entry.defined_in_pxd = 1 diff --git a/Cython/Compiler/Symtab.py b/Cython/Compiler/Symtab.py index 765652d0d..0950e1cc2 100644 --- a/Cython/Compiler/Symtab.py +++ b/Cython/Compiler/Symtab.py @@ -2768,7 +2768,7 @@ class CppClassScope(Scope): if base_entry.name not in base_templates: entry = self.declare_type(base_entry.name, base_entry.type, base_entry.pos, base_entry.cname, - base_entry.visibility) + base_entry.visibility, defining=False) entry.is_inherited = 1 def specialize(self, values, type_entry): diff --git a/tests/run/cpp_nested_classes.pyx b/tests/run/cpp_nested_classes.pyx index b50f79936..8877c0440 100644 --- a/tests/run/cpp_nested_classes.pyx +++ b/tests/run/cpp_nested_classes.pyx @@ -25,6 +25,22 @@ cdef extern from "cpp_nested_classes_support.h": cdef cppclass SpecializedTypedClass(TypedClass[double]): pass +cdef cppclass AA: + cppclass BB: + int square(int x): + return x * x + cppclass CC: + int cube(int x): + return x * x * x + BB* createB(): + return new BB() + ctypedef int my_int + @staticmethod + my_int negate(my_int x): + return -x + +cdef cppclass DD(AA): + ctypedef int my_other_int ctypedef A AliasA1 ctypedef AliasA1 AliasA2 @@ -44,6 +60,27 @@ def test_nested_classes(): assert b_ptr.square(4) == 16 del b_ptr +def test_nested_defined_classes(): + """ + >>> test_nested_defined_classes() + """ + cdef AA a + cdef AA.BB b + assert b.square(3) == 9 + cdef AA.BB.CC c + assert c.cube(3) == 27 + + cdef AA.BB *b_ptr = a.createB() + assert b_ptr.square(4) == 16 + del b_ptr + +def test_nested_inherited_classes(): + """ + >>> test_nested_inherited_classes() + """ + cdef DD.BB b + assert b.square(3) == 9 + def test_nested_typedef(py_x): """ >>> test_nested_typedef(5) @@ -51,6 +88,13 @@ def test_nested_typedef(py_x): cdef A.my_int x = py_x assert A.negate(x) == -py_x +def test_nested_defined_typedef(py_x): + """ + >>> test_nested_typedef(5) + """ + cdef AA.my_int x = py_x + assert AA.negate(x) == -py_x + def test_typedef_for_nested(py_x): """ >>> test_typedef_for_nested(5) -- cgit v1.2.1 From 7a667e69e178ebcc4917d6ca3f102d0e3c729891 Mon Sep 17 00:00:00 2001 From: da-woods Date: Sat, 12 Nov 2022 09:34:40 +0000 Subject: Remove "assignable" requirement from cpp_locals (#4692) Fixes #4558 by making the assignment operation of a cpp_locals variable actually use "emplace". I've done this by creating a class that implements __Pyx_Optional_Type rather than justing using std::optional directly (and rewriting operator= for that). This seemed less intrusive that changing the code generation to call "emplace" directly, but possibly less efficient in some cases. --- Cython/Utility/CppSupport.cpp | 39 ++++++++++++++++++++++++++++++++++++-- tests/run/cpp_locals_directive.pyx | 10 +++------- 2 files changed, 40 insertions(+), 9 deletions(-) diff --git a/Cython/Utility/CppSupport.cpp b/Cython/Utility/CppSupport.cpp index ca5579918..ba0002c94 100644 --- a/Cython/Utility/CppSupport.cpp +++ b/Cython/Utility/CppSupport.cpp @@ -84,15 +84,50 @@ auto __Pyx_pythran_to_python(T &&value) -> decltype(to_python( ////////////// OptionalLocals.proto //////////////// //@proto_block: utility_code_proto_before_types +#include #if defined(CYTHON_USE_BOOST_OPTIONAL) // fallback mode - std::optional is preferred but this gives // people with a less up-to-date compiler a chance #include - #define __Pyx_Optional_Type boost::optional + #define __Pyx_Optional_BaseType boost::optional #else #include // since std::optional is a C++17 features, a templated using declaration should be safe // (although it could be replaced with a define) template - using __Pyx_Optional_Type = std::optional; + using __Pyx_Optional_BaseType = std::optional; #endif + +// This class reuses as much of the implementation of std::optional as possible. +// The only place it differs significantly is the assignment operators, which use +// "emplace" (thus requiring move/copy constructors, but not move/copy +// assignment operators). This is preferred because it lets us work with assignable +// types (for example those with const members) +template +class __Pyx_Optional_Type : private __Pyx_Optional_BaseType { +public: + using __Pyx_Optional_BaseType::__Pyx_Optional_BaseType; + using __Pyx_Optional_BaseType::has_value; + using __Pyx_Optional_BaseType::operator*; + using __Pyx_Optional_BaseType::operator->; +#if __cplusplus >= 201103L || (defined(_MSC_VER) && _MSC_VER >= 1600) + __Pyx_Optional_Type& operator=(const __Pyx_Optional_Type& rhs) { + this->emplace(*rhs); + return *this; + } + __Pyx_Optional_Type& operator=(__Pyx_Optional_Type&& rhs) { + this->emplace(std::move(*rhs)); + return *this; + } + template + __Pyx_Optional_Type& operator=(U&& rhs) { + this->emplace(std::forward(rhs)); + return *this; + } +#else + // Note - the "cpp_locals" feature is designed to require C++14. + // This pre-c++11 fallback is largely untested, and definitely won't work + // in all the cases that the more modern version does + using __Pyx_Optional_BaseType::operator=; // the chances are emplace can't work... +#endif +}; diff --git a/tests/run/cpp_locals_directive.pyx b/tests/run/cpp_locals_directive.pyx index 6c9c89ba5..359ae0b10 100644 --- a/tests/run/cpp_locals_directive.pyx +++ b/tests/run/cpp_locals_directive.pyx @@ -19,13 +19,9 @@ cdef extern from *: C(C&& rhs) : x(rhs.x), print_destructor(rhs.print_destructor) { rhs.print_destructor = false; // moved-from instances are deleted silently } - C& operator=(C&& rhs) { - x=rhs.x; - print_destructor=rhs.print_destructor; - rhs.print_destructor = false; // moved-from instances are deleted silently - return *this; - } - C(const C& rhs) = default; + // also test that we don't require the assignment operator + C& operator=(C&& rhs) = delete; + C(const C& rhs) = delete; C& operator=(const C& rhs) = default; ~C() { if (print_destructor) print_C_destructor(); -- cgit v1.2.1 From 2fdb74a02667d4c42d0280e03edd65ff3c934a82 Mon Sep 17 00:00:00 2001 From: da-woods Date: Sat, 12 Nov 2022 11:29:54 +0000 Subject: Clean up issues with dataclasses and inheritance (#5046) Ban overriding of inherited fields - it's very hard to make this work so better just go give an error. Note limitation that dataclass decorator can't see into cimported base class. Doesn't fix but helps people avoid #4799 Part of https://github.com/cython/cython/issues/4956 --- Cython/Compiler/Dataclass.py | 14 ++++++++++---- Cython/Compiler/ExprNodes.py | 3 +++ Cython/Compiler/Symtab.py | 1 + Tools/make_dataclass_tests.py | 15 ++++++--------- tests/errors/dataclass_e6.pyx | 23 +++++++++++++++++++++++ tests/errors/dataclass_w1.pyx | 13 +++++++++++++ tests/errors/dataclass_w1_othermod.pxd | 3 +++ tests/run/test_dataclasses.pyx | 29 +++++++++++++++++++++++++++++ 8 files changed, 88 insertions(+), 13 deletions(-) create mode 100644 tests/errors/dataclass_e6.pyx create mode 100644 tests/errors/dataclass_w1.pyx create mode 100644 tests/errors/dataclass_w1_othermod.pxd diff --git a/Cython/Compiler/Dataclass.py b/Cython/Compiler/Dataclass.py index e202ed6eb..7cbbab954 100644 --- a/Cython/Compiler/Dataclass.py +++ b/Cython/Compiler/Dataclass.py @@ -200,10 +200,16 @@ def process_class_get_fields(node): transform(node) default_value_assignments = transform.removed_assignments - if node.base_type and node.base_type.dataclass_fields: - fields = node.base_type.dataclass_fields.copy() - else: - fields = OrderedDict() + base_type = node.base_type + fields = OrderedDict() + while base_type: + if base_type.is_external or not base_type.scope.implemented: + warning(node.pos, "Cannot reliably handle Cython dataclasses with base types " + "in external modules since it is not possible to tell what fields they have", 2) + if base_type.dataclass_fields: + fields = base_type.dataclass_fields.copy() + break + base_type = base_type.base_type for entry in var_entries: name = entry.name diff --git a/Cython/Compiler/ExprNodes.py b/Cython/Compiler/ExprNodes.py index 1ce459541..4f8207ee8 100644 --- a/Cython/Compiler/ExprNodes.py +++ b/Cython/Compiler/ExprNodes.py @@ -2171,6 +2171,9 @@ class NameNode(AtomicExprNode): # In a dataclass, an assignment should not prevent a name from becoming an instance attribute. # Hence, "as_target = not is_dataclass". self.declare_from_annotation(env, as_target=not is_dataclass) + elif (self.entry and self.entry.is_inherited and + self.annotation and env.is_c_dataclass_scope): + error(self.pos, "Cannot redeclare inherited fields in Cython dataclasses") if not self.entry: if env.directives['warn.undeclared']: warning(self.pos, "implicit declaration of '%s'" % self.name, 1) diff --git a/Cython/Compiler/Symtab.py b/Cython/Compiler/Symtab.py index 0950e1cc2..984e10f05 100644 --- a/Cython/Compiler/Symtab.py +++ b/Cython/Compiler/Symtab.py @@ -2618,6 +2618,7 @@ class CClassScope(ClassScope): base_entry.name, adapt(base_entry.cname), base_entry.type, None, 'private') entry.is_variable = 1 + entry.is_inherited = True entry.annotation = base_entry.annotation self.inherited_var_entries.append(entry) diff --git a/Tools/make_dataclass_tests.py b/Tools/make_dataclass_tests.py index c39a4b2db..dc38eee70 100644 --- a/Tools/make_dataclass_tests.py +++ b/Tools/make_dataclass_tests.py @@ -130,19 +130,16 @@ skip_tests = frozenset( ('TestOrdering', 'test_no_order'), # not possible to add attributes on extension types ("TestCase", "test_post_init_classmethod"), + # Cannot redefine the same field in a base dataclass (tested in dataclass_e6) + ("TestCase", "test_field_order"), + ( + "TestCase", + "test_overwrite_fields_in_derived_class", + ), # Bugs #====== # not specifically a dataclass issue - a C int crashes classvar ("TestCase", "test_class_var"), - ("TestCase", "test_field_order"), # invalid C code (__pyx_base?) - ( - "TestCase", - "test_overwrite_fields_in_derived_class", - ), # invalid C code (__pyx_base?) - ( - "TestCase", - "test_intermediate_non_dataclass", - ), # issue with propagating through intermediate class ( "TestFrozen", ), # raises AttributeError, not FrozenInstanceError (may be hard to fix) diff --git a/tests/errors/dataclass_e6.pyx b/tests/errors/dataclass_e6.pyx new file mode 100644 index 000000000..64dc1ae05 --- /dev/null +++ b/tests/errors/dataclass_e6.pyx @@ -0,0 +1,23 @@ +# mode: error + +from cython.dataclasses cimport dataclass + +@dataclass +cdef class BaseDataclass: + a: str = "value" + +@dataclass +cdef class MainDataclass(BaseDataclass): + a: str = "new value" + +cdef class Intermediate(BaseDataclass): + pass + +@dataclass +cdef class AnotherDataclass(Intermediate): + a: str = "ooops" + +_ERRORS = """ +11:4: Cannot redeclare inherited fields in Cython dataclasses +18:4: Cannot redeclare inherited fields in Cython dataclasses +""" diff --git a/tests/errors/dataclass_w1.pyx b/tests/errors/dataclass_w1.pyx new file mode 100644 index 000000000..c0d9790e2 --- /dev/null +++ b/tests/errors/dataclass_w1.pyx @@ -0,0 +1,13 @@ +# mode: compile +# tag: warnings + +from dataclass_w1_othermod cimport SomeBase +from cython.dataclasses cimport dataclass + +@dataclass +cdef class DC(SomeBase): + a: str = "" + +_WARNINGS = """ +8:5: Cannot reliably handle Cython dataclasses with base types in external modules since it is not possible to tell what fields they have +""" diff --git a/tests/errors/dataclass_w1_othermod.pxd b/tests/errors/dataclass_w1_othermod.pxd new file mode 100644 index 000000000..02dddf492 --- /dev/null +++ b/tests/errors/dataclass_w1_othermod.pxd @@ -0,0 +1,3 @@ +# Extern class for test "dataclass_w1" +cdef class SomeBase: + pass diff --git a/tests/run/test_dataclasses.pyx b/tests/run/test_dataclasses.pyx index 59e1f0a49..4daf62cf8 100644 --- a/tests/run/test_dataclasses.pyx +++ b/tests/run/test_dataclasses.pyx @@ -153,6 +153,23 @@ class Bar_TestCase_test_default_factory_derived(Foo_TestCase_test_default_factor class Baz_TestCase_test_default_factory_derived(Foo_TestCase_test_default_factory_derived): pass +@dataclass +@cclass +class A_TestCase_test_intermediate_non_dataclass: + x: int + +@cclass +class B_TestCase_test_intermediate_non_dataclass(A_TestCase_test_intermediate_non_dataclass): + y: int + +@dataclass +@cclass +class C_TestCase_test_intermediate_non_dataclass(B_TestCase_test_intermediate_non_dataclass): + z: int + +class D_TestCase_test_intermediate_non_dataclass(C_TestCase_test_intermediate_non_dataclass): + t: int + class NotDataClass_TestCase_test_is_dataclass: pass @@ -729,6 +746,18 @@ class TestCase(unittest.TestCase): Baz = Baz_TestCase_test_default_factory_derived self.assertEqual(Baz().x, {}) + def test_intermediate_non_dataclass(self): + A = A_TestCase_test_intermediate_non_dataclass + B = B_TestCase_test_intermediate_non_dataclass + C = C_TestCase_test_intermediate_non_dataclass + c = C(1, 3) + self.assertEqual((c.x, c.z), (1, 3)) + with self.assertRaises(AttributeError): + c.y + D = D_TestCase_test_intermediate_non_dataclass + d = D(4, 5) + self.assertEqual((d.x, d.z), (4, 5)) + def test_is_dataclass(self): NotDataClass = NotDataClass_TestCase_test_is_dataclass self.assertFalse(is_dataclass(0)) -- cgit v1.2.1 From d71d855614542d22535c9cc121f1c1c2e1730087 Mon Sep 17 00:00:00 2001 From: da-woods Date: Sat, 12 Nov 2022 13:27:52 +0000 Subject: Update Cython 0.29.33 changelog --- CHANGES.rst | 38 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) diff --git a/CHANGES.rst b/CHANGES.rst index 8b5297166..873cfdaeb 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -2,6 +2,44 @@ Cython Changelog ================ +0.29.33 (????-??-??) +==================== + +Features added +-------------- + +* The ``cythonize`` and ``cython`` commands have a new option ``-M`` / ``--depfile`` + to generate ``.dep`` dependency files for the compilation unit. This can be used + by external build tools to track these dependencies. + The ``cythonize`` option was already available in Cython :ref:`0.29.27`. + Patches by Evgeni Burovski and Eli Schwartz. (Github issue :issue:`1214`) + +Bugs fixed +---------- + +* Fixed various compiler warnings. One patch by Lisandro Dalcin. + (Github issues :issue:`4948`, :issue:`5086`) + +* Fixed error when calculating complex powers of negative numbers. + (Github issue :issue:`5014`) + +* Corrected a small mis-formatting of exception messages on Python 2. + (Github issue :issue:`5018`) + +Other changes +------------- + +* The undocumented, untested and apparently useless syntax + ``from somemodule cimport class/struct/union somename`` was deprecated + in anticipation of its removal in Cython 3. The type + modifier is not needed here and a plain ``cimport`` of the name will do. + (Github issue :issue:`4905`) + +* Properly disable generation of descriptor docstrings on PyPy since + they cause crashes. It was previously disabled, but only accidentally + via a typo. Patch by Matti Picus. + (Github issue :issue:`5083`) + 0.29.32 (2022-07-29) ==================== -- cgit v1.2.1 From 363780535e7bfe8adb1b858feb3fccbdf3434d97 Mon Sep 17 00:00:00 2001 From: da-woods Date: Sat, 12 Nov 2022 13:35:08 +0000 Subject: Fix whitespace issue Introduced in 2fdb74a02667d4c42d0280e03edd65ff3c934a82 --- CHANGES.rst | 3 +++ Cython/Compiler/ExprNodes.py | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index 9f4884947..41fdc1aac 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -100,6 +100,9 @@ Bugs fixed * Intel C compilers could complain about unsupported gcc pragmas. Patch by Ralf Gommers. (Github issue :issue:`5052`) + +* Includes all bug-fixes and features from the 0.29 maintenance branch + up to the :ref:`0.29.33` release. Other changes ------------- diff --git a/Cython/Compiler/ExprNodes.py b/Cython/Compiler/ExprNodes.py index 4f8207ee8..881851535 100644 --- a/Cython/Compiler/ExprNodes.py +++ b/Cython/Compiler/ExprNodes.py @@ -2171,7 +2171,7 @@ class NameNode(AtomicExprNode): # In a dataclass, an assignment should not prevent a name from becoming an instance attribute. # Hence, "as_target = not is_dataclass". self.declare_from_annotation(env, as_target=not is_dataclass) - elif (self.entry and self.entry.is_inherited and + elif (self.entry and self.entry.is_inherited and self.annotation and env.is_c_dataclass_scope): error(self.pos, "Cannot redeclare inherited fields in Cython dataclasses") if not self.entry: -- cgit v1.2.1 From 1b028d34d6a3ec82f4687df1e0db598721c7a695 Mon Sep 17 00:00:00 2001 From: da-woods Date: Sun, 13 Nov 2022 09:03:10 +0000 Subject: Add note about PyMSVC to the docs --- docs/src/quickstart/install.rst | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/docs/src/quickstart/install.rst b/docs/src/quickstart/install.rst index 04a47afdc..979d0f178 100644 --- a/docs/src/quickstart/install.rst +++ b/docs/src/quickstart/install.rst @@ -29,7 +29,11 @@ according to the system used: built with. This is usually a specific version of Microsoft Visual C/C++ (MSVC) - see https://wiki.python.org/moin/WindowsCompilers. MSVC is the only compiler that Cython is currently tested with on - Windows. A possible alternative is the open source MinGW (a + Windows. If you're having difficulty making setuptools detect + MSVC then `PyMSVC `_ + aims to solve this. + + A possible alternative is the open source MinGW (a Windows distribution of gcc). See the appendix for instructions for setting up MinGW manually. Enthought Canopy and Python(x,y) bundle MinGW, but some of the configuration steps in the appendix might -- cgit v1.2.1 From 82f7e9bfca66144712e2b0f8f05c268737595385 Mon Sep 17 00:00:00 2001 From: da-woods Date: Sat, 19 Nov 2022 17:35:24 +0000 Subject: Avoid crashing due to ErrorNodes --- Cython/Compiler/MatchCaseNodes.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/Cython/Compiler/MatchCaseNodes.py b/Cython/Compiler/MatchCaseNodes.py index dbd5b8770..96d297bc7 100644 --- a/Cython/Compiler/MatchCaseNodes.py +++ b/Cython/Compiler/MatchCaseNodes.py @@ -2,7 +2,7 @@ # # In a separate file because they're unlikely to be useful for much else. -from .Nodes import Node, StatNode +from .Nodes import Node, StatNode, ErrorNode from .Errors import error @@ -17,6 +17,10 @@ class MatchNode(StatNode): def validate_irrefutable(self): found_irrefutable_case = None for case in self.cases: + if isinstance(case, ErrorNode): + # This validation happens before error nodes have been + # transformed into actual errors, so we need to ignore them + continue if found_irrefutable_case: error( found_irrefutable_case.pos, @@ -45,12 +49,18 @@ class MatchCaseNode(Node): child_attrs = ["pattern", "body", "guard"] def is_irrefutable(self): + if isinstance(self.pattern, ErrorNode): + return True # value doesn't really matter return self.pattern.is_irrefutable() and not self.guard def validate_targets(self): + if isinstance(self.pattern, ErrorNode): + return self.pattern.get_targets() def validate_irrefutable(self): + if isinstance(self.pattern, ErrorNode): + return self.pattern.validate_irrefutable() -- cgit v1.2.1 From f9825ddec9178095760c83fc80430a6c477790ac Mon Sep 17 00:00:00 2001 From: da-woods Date: Thu, 8 Dec 2022 20:40:55 +0000 Subject: Type bools to bool for pattern matching --- Cython/Compiler/Parsing.pxd | 2 +- Cython/Compiler/Parsing.py | 12 ++++++++---- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/Cython/Compiler/Parsing.pxd b/Cython/Compiler/Parsing.pxd index 997cdf513..fc3e2749f 100644 --- a/Cython/Compiler/Parsing.pxd +++ b/Cython/Compiler/Parsing.pxd @@ -63,7 +63,7 @@ cdef make_slice_nodes(pos, subscripts) cpdef make_slice_node(pos, start, stop = *, step = *) cdef p_atom(PyrexScanner s) cdef p_atom_string(PyrexScanner s) -cdef p_atom_ident_constants(PyrexScanner s) +cdef p_atom_ident_constants(PyrexScanner s, bint bools_are_pybool = *) @cython.locals(value=unicode) cdef p_int_literal(PyrexScanner s) cdef p_name(PyrexScanner s, name) diff --git a/Cython/Compiler/Parsing.py b/Cython/Compiler/Parsing.py index 94fc2eca1..3fc6c3f26 100644 --- a/Cython/Compiler/Parsing.py +++ b/Cython/Compiler/Parsing.py @@ -746,7 +746,7 @@ def p_atom_string(s): s.error("invalid string kind '%s'" % kind) -def p_atom_ident_constants(s): +def p_atom_ident_constants(s, bools_are_pybool=False): """ Returns None if it isn't one special-cased named constants. Only calls s.next() if it successfully matches a matches. @@ -754,12 +754,16 @@ def p_atom_ident_constants(s): pos = s.position() name = s.systring result = None + if bools_are_pybool: + extra_kwds = {'type': Builtin.bool_type} + else: + extra_kwds = {} if name == "None": result = ExprNodes.NoneNode(pos) elif name == "True": - result = ExprNodes.BoolNode(pos, value=True) + result = ExprNodes.BoolNode(pos, value=True, **extra_kwds) elif name == "False": - result = ExprNodes.BoolNode(pos, value=False) + result = ExprNodes.BoolNode(pos, value=False, **extra_kwds) elif name == "NULL" and not s.in_python_file: result = ExprNodes.NullNode(pos) if result: @@ -4286,7 +4290,7 @@ def p_literal_pattern(s): elif sy == 'IDENT': # Note that p_atom_ident_constants includes NULL. # This is a deliberate Cython addition to the pattern matching specification - result = p_atom_ident_constants(s) + result = p_atom_ident_constants(s, bools_are_pybool=True) if result: return MatchCaseNodes.MatchValuePatternNode(pos, value=result, is_is_check=True) -- cgit v1.2.1