summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorda-woods <dw-git@d-woods.co.uk>2022-11-17 18:04:15 +0000
committerGitHub <noreply@github.com>2022-11-17 18:04:15 +0000
commit9299a3fefc6c93bdb11f7fc35025615f5c1c0d56 (patch)
tree40c2a7ce6c3d2bf9576374706280449bc84c93db
parent4ef752766d8ac28177996d76ec14165fbac90f4e (diff)
parent1b028d34d6a3ec82f4687df1e0db598721c7a695 (diff)
downloadcython-9299a3fefc6c93bdb11f7fc35025615f5c1c0d56.tar.gz
Merge branch 'master' into parse-match
-rw-r--r--.github/ISSUE_TEMPLATE/bug_report.yml55
-rw-r--r--.github/ISSUE_TEMPLATE/feature_request.md41
-rw-r--r--.github/ISSUE_TEMPLATE/feature_request.yml46
-rw-r--r--.github/ISSUE_TEMPLATE/other.md (renamed from .github/ISSUE_TEMPLATE/bug_report.md)27
-rw-r--r--.github/workflows/ci.yml151
-rw-r--r--.github/workflows/wheel-manylinux.yml91
-rw-r--r--.github/workflows/wheels.yml151
-rw-r--r--.gitignore27
-rw-r--r--.travis.yml48
-rw-r--r--CHANGES.rst292
-rw-r--r--Cython/Build/Cythonize.py29
-rw-r--r--Cython/Build/Dependencies.py35
-rw-r--r--Cython/Build/Inline.py25
-rw-r--r--Cython/Build/IpythonMagic.py7
-rw-r--r--Cython/Build/Tests/TestDependencies.py142
-rw-r--r--Cython/Compiler/Annotate.py6
-rw-r--r--Cython/Compiler/Builtin.py28
-rw-r--r--Cython/Compiler/CmdLine.py23
-rw-r--r--Cython/Compiler/Code.pxd5
-rw-r--r--Cython/Compiler/Code.py66
-rw-r--r--Cython/Compiler/CythonScope.py2
-rw-r--r--Cython/Compiler/Dataclass.py315
-rw-r--r--Cython/Compiler/ExprNodes.py430
-rw-r--r--Cython/Compiler/FlowControl.pxd8
-rw-r--r--Cython/Compiler/FlowControl.py73
-rw-r--r--Cython/Compiler/FusedNode.py18
-rw-r--r--Cython/Compiler/Main.py60
-rw-r--r--Cython/Compiler/MemoryView.py2
-rw-r--r--Cython/Compiler/ModuleNode.py38
-rw-r--r--Cython/Compiler/Naming.py1
-rw-r--r--Cython/Compiler/Nodes.py117
-rw-r--r--Cython/Compiler/Optimize.py23
-rw-r--r--Cython/Compiler/Options.py18
-rw-r--r--Cython/Compiler/ParseTreeTransforms.pxd4
-rw-r--r--Cython/Compiler/ParseTreeTransforms.py253
-rw-r--r--Cython/Compiler/Parsing.pxd11
-rw-r--r--Cython/Compiler/Parsing.py197
-rw-r--r--Cython/Compiler/Pipeline.py15
-rw-r--r--Cython/Compiler/PyrexTypes.py31
-rw-r--r--Cython/Compiler/Symtab.py55
-rw-r--r--Cython/Compiler/Tests/TestCmdLine.py77
-rw-r--r--Cython/Compiler/Tests/TestParseTreeTransforms.py9
-rw-r--r--Cython/Compiler/TypeInference.py7
-rw-r--r--Cython/Compiler/TypeSlots.py9
-rw-r--r--Cython/Compiler/Visitor.py16
-rw-r--r--Cython/Coverage.py55
-rw-r--r--Cython/Distutils/old_build_ext.py4
-rw-r--r--Cython/Includes/cpython/time.pxd2
-rw-r--r--Cython/Includes/cpython/unicode.pxd35
-rw-r--r--Cython/Includes/libcpp/bit.pxd31
-rw-r--r--Cython/Includes/libcpp/map.pxd3
-rw-r--r--Cython/Includes/libcpp/numeric.pxd7
-rw-r--r--Cython/Includes/libcpp/set.pxd4
-rw-r--r--Cython/Includes/libcpp/string.pxd9
-rw-r--r--Cython/Includes/libcpp/unordered_map.pxd4
-rw-r--r--Cython/Includes/libcpp/unordered_set.pxd4
-rw-r--r--Cython/Shadow.py2
-rw-r--r--Cython/TestUtils.py87
-rw-r--r--Cython/Utility/AsyncGen.c2
-rw-r--r--Cython/Utility/CommonStructures.c2
-rw-r--r--Cython/Utility/Complex.c2
-rw-r--r--Cython/Utility/Coroutine.c12
-rw-r--r--Cython/Utility/CppSupport.cpp39
-rw-r--r--Cython/Utility/CythonFunction.c14
-rw-r--r--Cython/Utility/Exceptions.c6
-rw-r--r--Cython/Utility/ExtensionTypes.c6
-rw-r--r--Cython/Utility/ImportExport.c27
-rw-r--r--Cython/Utility/MemoryView.pyx32
-rw-r--r--Cython/Utility/MemoryView_C.c92
-rw-r--r--Cython/Utility/ModuleSetupCode.c105
-rw-r--r--Cython/Utility/ObjectHandling.c25
-rw-r--r--Cython/Utility/Optimize.c15
-rw-r--r--Cython/Utility/TypeConversion.c2
-rw-r--r--Cython/Utils.py25
-rw-r--r--Makefile6
-rw-r--r--README.rst4
-rw-r--r--Tools/ci-run.sh27
-rw-r--r--Tools/dataclass_test_data/test_dataclasses.py4266
-rw-r--r--Tools/make_dataclass_tests.py443
-rw-r--r--appveyor.yml138
-rw-r--r--docs/examples/tutorial/clibraries/queue.py2
-rw-r--r--docs/examples/tutorial/embedding/embedded.pyx3
-rw-r--r--docs/examples/tutorial/pure/disabled_annotations.py33
-rw-r--r--docs/examples/userguide/buffer/matrix.py15
-rw-r--r--docs/examples/userguide/buffer/matrix.pyx3
-rw-r--r--docs/examples/userguide/buffer/matrix_with_buffer.py48
-rw-r--r--docs/examples/userguide/buffer/matrix_with_buffer.pyx7
-rw-r--r--docs/examples/userguide/buffer/view_count.py30
-rw-r--r--docs/examples/userguide/buffer/view_count.pyx3
-rw-r--r--docs/examples/userguide/early_binding_for_speed/rectangle.py22
-rw-r--r--docs/examples/userguide/early_binding_for_speed/rectangle.pyx3
-rw-r--r--docs/examples/userguide/early_binding_for_speed/rectangle_cdef.py26
-rw-r--r--docs/examples/userguide/early_binding_for_speed/rectangle_cdef.pyx4
-rw-r--r--docs/examples/userguide/early_binding_for_speed/rectangle_cpdef.py23
-rw-r--r--docs/examples/userguide/early_binding_for_speed/rectangle_cpdef.pyx6
-rw-r--r--docs/examples/userguide/extension_types/dict_animal.pyx2
-rw-r--r--docs/examples/userguide/extension_types/extendable_animal.pyx2
-rw-r--r--docs/examples/userguide/language_basics/enum.pyx (renamed from docs/examples/userguide/language_basics/struct_union_enum.pyx)11
-rw-r--r--docs/examples/userguide/language_basics/function_pointer.pyx8
-rw-r--r--docs/examples/userguide/language_basics/function_pointer_struct.pyx9
-rw-r--r--docs/examples/userguide/language_basics/struct.py7
-rw-r--r--docs/examples/userguide/language_basics/struct.pyx7
-rw-r--r--docs/examples/userguide/language_basics/struct_union_enum.py7
-rw-r--r--docs/examples/userguide/language_basics/union.py9
-rw-r--r--docs/examples/userguide/language_basics/union.pyx9
-rw-r--r--docs/examples/userguide/parallelism/breaking_loop.py15
-rw-r--r--docs/examples/userguide/parallelism/breaking_loop.pyx2
-rw-r--r--docs/examples/userguide/parallelism/cimport_openmp.py11
-rw-r--r--docs/examples/userguide/parallelism/cimport_openmp.pyx2
-rw-r--r--docs/examples/userguide/parallelism/memoryview_sum.py7
-rw-r--r--docs/examples/userguide/parallelism/memoryview_sum.pyx7
-rw-r--r--docs/examples/userguide/parallelism/parallel.py30
-rw-r--r--docs/examples/userguide/parallelism/parallel.pyx30
-rw-r--r--docs/examples/userguide/parallelism/setup_py.py16
-rw-r--r--docs/examples/userguide/parallelism/setup_pyx.py (renamed from docs/examples/userguide/parallelism/setup.py)0
-rw-r--r--docs/examples/userguide/parallelism/simple_sum.py10
-rw-r--r--docs/examples/userguide/sharing_declarations/shrubbing.pyx2
-rw-r--r--docs/examples/userguide/wrapping_CPlusPlus/rect.pyx2
-rw-r--r--docs/examples/userguide/wrapping_CPlusPlus/rect_ptr.pyx10
-rw-r--r--docs/examples/userguide/wrapping_CPlusPlus/rect_with_attributes.pyx2
-rw-r--r--docs/src/quickstart/install.rst6
-rw-r--r--docs/src/tutorial/clibraries.rst37
-rw-r--r--docs/src/tutorial/cython_tutorial.rst6
-rw-r--r--docs/src/tutorial/pure.rst15
-rw-r--r--docs/src/two-syntax-variants-used4
-rw-r--r--docs/src/userguide/buffer.rst34
-rw-r--r--docs/src/userguide/early_binding_for_speed.rst37
-rw-r--r--docs/src/userguide/extension_types.rst139
-rw-r--r--docs/src/userguide/language_basics.rst238
-rw-r--r--docs/src/userguide/memoryviews.rst1
-rw-r--r--docs/src/userguide/migrating_to_cy30.rst54
-rw-r--r--docs/src/userguide/parallelism.rst89
-rw-r--r--docs/src/userguide/source_files_and_compilation.rst10
-rw-r--r--pyximport/_pyximport2.py26
-rw-r--r--pyximport/_pyximport3.py26
-rwxr-xr-xruntests.py118
-rwxr-xr-xsetup.py55
-rw-r--r--test-requirements-27.txt3
-rw-r--r--test-requirements-34.txt2
-rw-r--r--test-requirements-cpython.txt1
-rw-r--r--test-requirements-pypy27.txt2
-rw-r--r--tests/bugs.txt1
-rw-r--r--tests/build/cythonize_options.srctree2
-rw-r--r--tests/build/depfile_package_cython.srctree61
-rw-r--r--tests/build/depfile_package_cythonize.srctree (renamed from tests/build/depfile_package.srctree)7
-rw-r--r--tests/compile/branch_hints.pyx2
-rw-r--r--tests/compile/buildenv.pyx4
-rw-r--r--tests/compile/c_directives.pyx2
-rw-r--r--tests/compile/cpp_nogil.pyx2
-rw-r--r--tests/compile/declarations.srctree2
-rw-r--r--tests/compile/excvalcheck.h6
-rw-r--r--tests/compile/fused_buffers.pyx16
-rw-r--r--tests/compile/fused_no_numpy.pyx13
-rw-r--r--tests/compile/fused_redeclare_T3111.pyx12
-rw-r--r--tests/compile/module_name_arg.srctree52
-rw-r--r--tests/compile/nogil.h12
-rw-r--r--tests/compile/publicapi_pxd_mix.pxd2
-rw-r--r--tests/compile/publicapi_pxd_mix.pyx2
-rw-r--r--tests/compile/pxd_mangling_names.srctree46
-rw-r--r--tests/errors/cfuncptr.pyx4
-rw-r--r--tests/errors/cpp_increment.pyx33
-rw-r--r--tests/errors/dataclass_e6.pyx23
-rw-r--r--tests/errors/dataclass_w1.pyx13
-rw-r--r--tests/errors/dataclass_w1_othermod.pxd3
-rw-r--r--tests/errors/e_decorators.pyx12
-rw-r--r--tests/errors/e_excvalfunctype.pyx2
-rw-r--r--tests/errors/e_invalid_special_cython_modules.py42
-rw-r--r--tests/errors/e_nogilfunctype.pyx2
-rw-r--r--tests/errors/e_pure_cimports.pyx3
-rw-r--r--tests/errors/e_relative_cimport.pyx4
-rw-r--r--tests/errors/nogil.pyx2
-rw-r--r--tests/errors/nogilfunctype.pyx2
-rw-r--r--tests/errors/w_uninitialized.pyx4
-rw-r--r--tests/macos_cpp_bugs.txt1
-rw-r--r--tests/memoryview/cythonarray.pyx2
-rw-r--r--tests/memoryview/memoryview.pyx4
-rw-r--r--tests/memoryview/memoryview_acq_count.srctree2
-rw-r--r--tests/memoryview/memslice.pyx102
-rw-r--r--tests/memoryview/numpy_memoryview.pyx2
-rw-r--r--tests/pypy2_bugs.txt3
-rw-r--r--tests/run/annotate_html.pyx3
-rw-r--r--tests/run/annotation_typing.pyx41
-rw-r--r--tests/run/binop_reverse_methods_GH2056.pyx44
-rw-r--r--tests/run/builtin_abs.pyx6
-rw-r--r--tests/run/c_file_validation.srctree72
-rw-r--r--tests/run/cdef_class_dataclass.pyx9
-rw-r--r--tests/run/cfunc_convert.pyx27
-rw-r--r--tests/run/complex_numbers_T305.pyx86
-rw-r--r--tests/run/coverage_cmd_src_pkg_layout.srctree177
-rw-r--r--tests/run/cpdef_void_return.pyx2
-rw-r--r--tests/run/cpp_classes.pyx2
-rw-r--r--tests/run/cpp_classes_def.pyx2
-rw-r--r--tests/run/cpp_exceptions_nogil.pyx2
-rw-r--r--tests/run/cpp_extern.srctree151
-rw-r--r--tests/run/cpp_function_lib.pxd6
-rw-r--r--tests/run/cpp_iterators.pyx154
-rw-r--r--tests/run/cpp_iterators_over_attribute_of_rvalue_support.h11
-rw-r--r--tests/run/cpp_iterators_simple.h11
-rw-r--r--tests/run/cpp_locals_directive.pyx10
-rw-r--r--tests/run/cpp_nested_classes.pyx44
-rw-r--r--tests/run/cpp_stl_associated_containers_contains_cpp20.pyx106
-rw-r--r--tests/run/cpp_stl_bit_cpp20.pyx131
-rw-r--r--tests/run/cpp_stl_function.pyx18
-rw-r--r--tests/run/cpp_stl_numeric_ops_cpp17.pyx18
-rw-r--r--tests/run/cpp_stl_numeric_ops_cpp20.pyx23
-rw-r--r--tests/run/cpp_stl_string_cpp20.pyx61
-rw-r--r--tests/run/cython_no_files.srctree34
-rw-r--r--tests/run/decorators.pyx33
-rw-r--r--tests/run/exceptionpropagation.pyx24
-rw-r--r--tests/run/exceptions_nogil.pyx2
-rw-r--r--tests/run/fused_cpp.pyx47
-rw-r--r--tests/run/generators_py.py17
-rw-r--r--tests/run/genexpr_arg_order.py181
-rw-r--r--tests/run/genexpr_iterable_lookup_T600.pyx18
-rw-r--r--tests/run/line_trace.pyx2
-rw-r--r--tests/run/locals.pyx10
-rw-r--r--tests/run/nogil.pyx4
-rw-r--r--tests/run/nogil_conditional.pyx4
-rw-r--r--tests/run/parallel.pyx2
-rw-r--r--tests/run/pep442_tp_finalize.pyx79
-rw-r--r--tests/run/pep442_tp_finalize_cimport.srctree67
-rw-r--r--tests/run/pep526_variable_annotations.py24
-rw-r--r--tests/run/pure_cdef_class_dataclass.py47
-rw-r--r--tests/run/pure_py.py8
-rw-r--r--tests/run/relative_cimport_compare.srctree327
-rw-r--r--tests/run/sequential_parallel.pyx14
-rw-r--r--tests/run/special_methods_T561.pyx41
-rw-r--r--tests/run/test_coroutines_pep492.pyx8
-rw-r--r--tests/run/test_dataclasses.pxi19
-rw-r--r--tests/run/test_dataclasses.pyx1186
-rw-r--r--tests/run/test_grammar.py118
-rw-r--r--tests/run/trace_nogil.pyx2
-rw-r--r--tests/run/type_inference.pyx4
-rw-r--r--tests/run/with_gil.pyx6
-rw-r--r--tests/run/with_gil_automatic.pyx6
-rw-r--r--tests/run/withnogil.pyx2
-rw-r--r--tests/testsupport/cythonarrayutil.pxi2
-rw-r--r--tests/windows_bugs_39.txt3
238 files changed, 12305 insertions, 1731 deletions
diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml
new file mode 100644
index 000000000..1c2f8aa83
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/bug_report.yml
@@ -0,0 +1,55 @@
+name: Bug Report
+description: File a bug report
+title: "[BUG] "
+body:
+ - type: markdown
+ attributes:
+ value: |
+ **PLEASE READ THIS FIRST:**
+ - DO NOT use the bug and feature tracker for general questions and support requests.
+ Use the [`cython-users`](https://groups.google.com/g/cython-users) mailing list instead.
+ It has a wider audience, so you get more and better answers.
+ - Did you search for SIMILAR ISSUES already?
+ Please do, it helps to save us precious time that we otherwise could not invest into development.
+ - Did you try the LATEST MASTER BRANCH or pre-release?
+ It might already have what you want to report.
+ Specifically, the legacy stable 0.29.x release series receives only important low-risk bug fixes.
+ Also see the [Changelog](https://github.com/cython/cython/blob/master/CHANGES.rst) regarding recent changes
+ - type: textarea
+ id: describe
+ attributes:
+ label: "Describe the bug"
+ description: "A clear and concise description of what the bug is."
+ placeholder: "Tell us what you see!"
+ validations:
+ required: true
+ - type: textarea
+ id: reproduce
+ attributes:
+ label: "Code to reproduce the behaviour:"
+ value: |
+ ```cython
+ # example code
+ ```
+ - type: textarea
+ id: expected
+ attributes:
+ label: "Expected behaviour"
+ description: "A clear and concise description of what you expected to happen."
+ - type: textarea
+ id: environment
+ attributes:
+ label: Environment
+ description: "please complete the following information"
+ value: |
+ OS: [e.g. Linux, Windows, macOS]
+ Python version [e.g. 3.10.2]
+ Cython version [e.g. 3.0.0a11]
+ validations:
+ required: true
+ - type: textarea
+ id: context
+ attributes:
+ label: Additional context
+ description: Add any other context about the problem here.
+
diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md
deleted file mode 100644
index c35dfae51..000000000
--- a/.github/ISSUE_TEMPLATE/feature_request.md
+++ /dev/null
@@ -1,41 +0,0 @@
----
-name: Feature request
-about: Suggest an idea for this project
-title: "[ENH] "
-labels: ''
-assignees: ''
-
----
-
-<!--
-**Note:**
-- DO NOT use the bug and feature tracker for general questions and support requests.
- Use the `cython-users` mailing list instead.
- It has a wider audience, so you get more and better answers.
-- Did you search for similar issues already?
- Please do, it helps to save us precious time that we otherwise could not invest into development.
-- Did you try the latest master branch or pre-release?
- It might already have what you want to report.
- Also see the [Changelog](https://github.com/cython/cython/blob/master/CHANGES.rst) regarding recent changes.
--->
-
-**Is your feature request related to a problem? Please describe.**
-A clear and concise description of what the problem is. Ex. In my code, I would like to do [...]
-```cython
-# add use case related code here
-```
-
-**Describe the solution you'd like**
-A clear and concise description of what you want to happen, including code examples if applicable.
-```cython
-# add a proposed code/syntax example here
-```
-
-**Describe alternatives you've considered**
-A clear and concise description of any alternative solutions or features you've considered.
-```cython
-# add alternative code/syntax proposals here
-```
-
-**Additional context**
-Add any other context about the feature request here.
diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml
new file mode 100644
index 000000000..3d46fe3bc
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/feature_request.yml
@@ -0,0 +1,46 @@
+name: Feature request
+description: Suggest an idea for this project
+title: "[ENH] "
+body:
+ - type: markdown
+ attributes:
+ value: |
+ **PLEASE READ THIS FIRST:**
+ - DO NOT use the bug and feature tracker for general questions and support requests.
+ Use the [`cython-users`](https://groups.google.com/g/cython-users) mailing list instead.
+ It has a wider audience, so you get more and better answers.
+ - Did you search for SIMILAR ISSUES already?
+ Please do, it helps to save us precious time that we otherwise could not invest into development.
+ - Did you try the LATEST MASTER BRANCH or pre-release?
+ It might already have what you want to report.
+ Specifically, the legacy stable 0.29.x release series receives only important low-risk bug fixes.
+ Also see the [Changelog](https://github.com/cython/cython/blob/master/CHANGES.rst) regarding recent changes
+ - type: textarea
+ id: problem
+ attributes:
+ label: "Is your feature request related to a problem? Please describe."
+ description: "A clear and concise description of what the problem is."
+ value: |
+ In my code, I would like to do [...]
+ ```cython
+ # add use case related code here
+ ```
+ validations:
+ required: true
+ - type: textarea
+ id: solution
+ attributes:
+ label: "Describe the solution you'd like."
+ description: "A clear and concise description of what you want to happen, including code examples if applicable."
+ placeholder: add a proposed code/syntax example here
+ - type: textarea
+ id: alternatives
+ attributes:
+ label: "Describe alternatives you've considered."
+ description: "A clear and concise description of any alternative solutions or features you've considered."
+ placeholder: "add alternative code/syntax proposals here"
+ - type: textarea
+ id: context
+ attributes:
+ label: "Additional context"
+ description: "Add any other context about the feature request here."
diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/other.md
index be0b183dc..95aa5153e 100644
--- a/.github/ISSUE_TEMPLATE/bug_report.md
+++ b/.github/ISSUE_TEMPLATE/other.md
@@ -1,14 +1,14 @@
---
-name: Bug report
-about: Create a report to help us improve
-title: "[BUG] "
+name: Other
+about: Anything that does not qualify as either "bug" or "feature request". DO NOT post support requests here.
+title: ""
labels: ''
assignees: ''
---
<!--
-**PLEASE READ THIS FIRST:**
+**Note:**
- DO NOT use the bug and feature tracker for general questions and support requests.
Use the `cython-users` mailing list instead.
It has a wider audience, so you get more and better answers.
@@ -18,22 +18,3 @@ assignees: ''
It might already have what you want to report.
Also see the [Changelog](https://github.com/cython/cython/blob/master/CHANGES.rst) regarding recent changes.
-->
-
-**Describe the bug**
-A clear and concise description of what the bug is.
-
-**To Reproduce**
-Code to reproduce the behaviour:
-```cython
-```
-
-**Expected behavior**
-A clear and concise description of what you expected to happen.
-
-**Environment (please complete the following information):**
- - OS: [e.g. Linux, Windows, macOS]
- - Python version [e.g. 3.8.4]
- - Cython version [e.g. 0.29.18]
-
-**Additional context**
-Add any other context about the problem here.
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index f5f645555..4dd771ad9 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -1,6 +1,24 @@
name: CI
-on: [push, pull_request, workflow_dispatch]
+on:
+ push:
+ paths:
+ - '**'
+ - '!.github/**'
+ - '.github/workflows/ci.yml'
+ pull_request:
+ paths:
+ - '**'
+ - '!.github/**'
+ - '.github/workflows/ci.yml'
+ workflow_dispatch:
+
+concurrency:
+ group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }}
+ cancel-in-progress: true
+
+permissions:
+ contents: read # to fetch code (actions/checkout)
jobs:
ci:
@@ -23,7 +41,7 @@ jobs:
#
# FIXME: 'cpp' tests seems to fail due to compilation errors (numpy_pythran_unit)
# in all python versions and test failures (builtin_float) in 3.5<
- os: [ubuntu-18.04]
+ os: [windows-2019, ubuntu-18.04]
backend: [c, cpp]
python-version:
- "2.7"
@@ -39,24 +57,22 @@ jobs:
env: [{}]
include:
- # Temporary - Allow failure on Python 3.11-dev jobs until they are considered stable
- - python-version: 3.11-dev
- allowed_failure: true
+ # Temporary - Allow failure on Python 3.12-dev jobs until they are in beta (feature frozen)
#- python-version: 3.12-dev
# allowed_failure: true
# Ubuntu sub-jobs:
# ================
- # GCC 11
+ # GCC 11 (with latest language standards)
- os: ubuntu-18.04
python-version: 3.9
backend: c
- env: { GCC_VERSION: 11 }
+ env: { GCC_VERSION: 11, EXTRA_CFLAGS: "-std=c17" }
extra_hash: "-gcc11"
- os: ubuntu-18.04
python-version: 3.9
backend: cpp
- env: { GCC_VERSION: 11 }
+ env: { GCC_VERSION: 11, EXTRA_CFLAGS: "-std=c++20" }
extra_hash: "-gcc11"
# compile all modules
- os: ubuntu-18.04
@@ -70,12 +86,12 @@ jobs:
env: { CYTHON_COMPILE_ALL: 1 }
extra_hash: "-all"
- os: ubuntu-18.04
- python-version: 3.9
+ python-version: "3.10"
backend: c
env: { CYTHON_COMPILE_ALL: 1 }
extra_hash: "-all"
- os: ubuntu-18.04
- python-version: 3.9
+ python-version: "3.10"
backend: cpp
env: { CYTHON_COMPILE_ALL: 1 }
extra_hash: "-all"
@@ -147,66 +163,81 @@ jobs:
python-version: pypy-3.7
backend: c
env: { NO_CYTHON_COMPILE: 1 }
- # Coverage - Disabled due to taking too long to run
- # - os: ubuntu-18.04
- # python-version: 3.7
- # backend: "c,cpp"
- # env: { COVERAGE: 1 }
- # extra_hash: '-coverage'
+ # Coverage
+ - os: ubuntu-18.04
+ python-version: 3.8
+ backend: "c,cpp"
+ env: { COVERAGE: 1 }
+ extra_hash: '-coverage'
+
+ - os: windows-2019
+ allowed_failure: true
# MacOS sub-jobs
# ==============
# (C-only builds are used to create wheels)
- - os: macos-10.15
+ - os: macos-11
python-version: 2.7
backend: c
env: { MACOSX_DEPLOYMENT_TARGET: 10.14 }
- - os: macos-10.15
+ - os: macos-11
python-version: 2.7
backend: cpp
env: { MACOSX_DEPLOYMENT_TARGET: 10.14 }
- - os: macos-10.15
+ - os: macos-11
python-version: 3.5
backend: c
env: { MACOSX_DEPLOYMENT_TARGET: 10.14 }
- - os: macos-10.15
+ - os: macos-11
python-version: 3.6
backend: c
env: { MACOSX_DEPLOYMENT_TARGET: 10.14 }
- - os: macos-10.15
+ - os: macos-11
python-version: 3.7
backend: c
env: { MACOSX_DEPLOYMENT_TARGET: 10.15 }
- - os: macos-10.15
+ - os: macos-11
python-version: 3.8
backend: c
env: { MACOSX_DEPLOYMENT_TARGET: 10.15 }
- - os: macos-10.15
+ - os: macos-11
python-version: 3.9
backend: c
env: { MACOSX_DEPLOYMENT_TARGET: 10.14 }
- - os: macos-10.15
+ - os: macos-11
python-version: 3.9
backend: cpp
env: { MACOSX_DEPLOYMENT_TARGET: 10.14 }
- - os: macos-10.15
+ - os: macos-11
python-version: "3.10"
backend: c
env: { MACOSX_DEPLOYMENT_TARGET: 10.14 }
- - os: macos-10.15
+ - os: macos-11
python-version: "3.10"
backend: cpp
env: { MACOSX_DEPLOYMENT_TARGET: 10.14 }
+ exclude:
+ # fails due to lack of a compatible compiler
+ - os: windows-2019
+ python-version: 2.7
+ - os: windows-2019
+ python-version: 3.4
+
+ # cpp specific test fails
+ - os: windows-2019
+ python-version: 3.5
+ backend: cpp
+
+
# This defaults to 360 minutes (6h) which is way too long and if a test gets stuck, it can block other pipelines.
- # From testing, the runs tend to take ~20/~30 minutes, so a limit of 40 minutes should be enough. This can always be
- # changed in the future if needed.
- timeout-minutes: 40
+ # From testing, the runs tend to take ~20 minutes for ubuntu / macos and ~40 for windows,
+ # so a limit of 50 minutes should be enough. This can always be changed in the future if needed.
+ timeout-minutes: 50
runs-on: ${{ matrix.os }}
env:
BACKEND: ${{ matrix.backend }}
- OS_NAME: ${{ matrix.os }}
PYTHON_VERSION: ${{ matrix.python-version }}
GCC_VERSION: 8
USE_CCACHE: 1
@@ -226,7 +257,7 @@ jobs:
python-version: ${{ matrix.python-version }}
- name: Cache [ccache]
- uses: pat-s/always-upload-cache@v3.0.1
+ uses: pat-s/always-upload-cache@v3.0.11
if: startsWith(runner.os, 'Linux')
with:
path: ~/.ccache
@@ -247,7 +278,7 @@ jobs:
- name: Upload wheels
uses: actions/upload-artifact@v3
with:
- name: wheels-${{ runner.os }}
+ name: wheels-${{ runner.os }}${{ matrix.extra_hash }}
path: dist/*.whl
if-no-files-found: ignore
@@ -279,31 +310,31 @@ jobs:
name: pycoverage_html
path: coverage-report-html
-# cycoverage:
-# runs-on: ubuntu-18.04
-#
-# env:
-# BACKEND: c,cpp
-# OS_NAME: ubuntu-18.04
-# PYTHON_VERSION: 3.9
-#
-# steps:
-# - name: Checkout repo
-# uses: actions/checkout@v2
-# with:
-# fetch-depth: 1
-#
-# - name: Setup python
-# uses: actions/setup-python@v2
-# with:
-# python-version: 3.9
-#
-# - name: Run Coverage
-# env: { COVERAGE: 1 }
-# run: bash ./Tools/ci-run.sh
-#
-# - name: Upload Coverage Report
-# uses: actions/upload-artifact@v2
-# with:
-# name: cycoverage_html
-# path: coverage-report-html
+ cycoverage:
+ runs-on: ubuntu-18.04
+
+ env:
+ BACKEND: c,cpp
+ OS_NAME: ubuntu-18.04
+ PYTHON_VERSION: 3.9
+
+ steps:
+ - name: Checkout repo
+ uses: actions/checkout@v2
+ with:
+ fetch-depth: 1
+
+ - name: Setup python
+ uses: actions/setup-python@v2
+ with:
+ python-version: 3.9
+
+ - name: Run Coverage
+ env: { COVERAGE: 1 }
+ run: bash ./Tools/ci-run.sh
+
+ - name: Upload Coverage Report
+ uses: actions/upload-artifact@v2
+ with:
+ name: cycoverage_html
+ path: coverage-report-html
diff --git a/.github/workflows/wheel-manylinux.yml b/.github/workflows/wheel-manylinux.yml
deleted file mode 100644
index 40ef7ad47..000000000
--- a/.github/workflows/wheel-manylinux.yml
+++ /dev/null
@@ -1,91 +0,0 @@
-name: Linux wheel build
-
-on:
- release:
- types: [created]
-
-jobs:
- python:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v3
-
- - name: Set up Python
- uses: actions/setup-python@v3
- with:
- python-version: "3.10"
-
- - name: Install build dependencies
- run: pip install -U "setuptools<60" pip wheel
-
- - name: Make sdist and Python wheel
- run: make sdist pywheel
-
- - name: Release
- uses: softprops/action-gh-release@v1
- if: startsWith(github.ref, 'refs/tags/')
- with:
- files: |
- dist/*.tar.gz
- dist/*-none-any.whl
-
- - name: Upload sdist
- uses: actions/upload-artifact@v3
- with:
- name: sdist
- path: dist/*.tar.gz
- if-no-files-found: ignore
-
- - name: Upload Python wheel
- uses: actions/upload-artifact@v3
- with:
- name: wheel-Python
- path: dist/*-none-any.whl
- if-no-files-found: ignore
-
- binary:
- strategy:
- # Allows for matrix sub-jobs to fail without canceling the rest
- fail-fast: false
-
- matrix:
- image:
- - manylinux1_x86_64
- - manylinux1_i686
- - musllinux_1_1_x86_64
- - musllinux_1_1_aarch64
- - manylinux_2_24_x86_64
- - manylinux_2_24_i686
- - manylinux_2_24_aarch64
-
- runs-on: ubuntu-latest
-
- steps:
- - uses: actions/checkout@v3
-
- - name: Set up Python 3.10
- uses: actions/setup-python@v3
- with:
- python-version: "3.10"
-
- - name: Building wheel
- run: |
- make sdist wheel_${{ matrix.image }}
-
- - name: Copy wheels in dist
- run: cp wheelhouse*/*.whl dist/
-
- - name: Release
- uses: softprops/action-gh-release@v1
- if: startsWith(github.ref, 'refs/tags/')
- with:
- files: |
- dist/*manylinux*.whl
- dist/*musllinux*.whl
-
- - name: Archive Wheels
- uses: actions/upload-artifact@v3
- with:
- name: ${{ matrix.image }}
- path: dist/*m[au][ns][yl]linux*.whl
- if-no-files-found: ignore
diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml
new file mode 100644
index 000000000..910d86a4e
--- /dev/null
+++ b/.github/workflows/wheels.yml
@@ -0,0 +1,151 @@
+# Workflow to build wheels for upload to PyPI.
+#
+# In an attempt to save CI resources, wheel builds do
+# not run on each push but only weekly and for releases.
+# Wheel builds can be triggered from the Actions page
+# (if you have the perms) on a commit to master.
+#
+# Alternatively, if you would like to trigger wheel builds
+# on a pull request, the labels that trigger builds are:
+# - Build System
+
+name: Wheel Builder
+on:
+ release:
+ types: [created]
+ schedule:
+ # ┌───────────── minute (0 - 59)
+ # │ ┌───────────── hour (0 - 23)
+ # │ │ ┌───────────── day of the month (1 - 31)
+ # │ │ │ ┌───────────── month (1 - 12 or JAN-DEC)
+ # │ │ │ │ ┌───────────── day of the week (0 - 6 or SUN-SAT)
+ # │ │ │ │ │
+ - cron: "42 1 * * 4"
+ pull_request:
+ types: [labeled, opened, synchronize, reopened]
+ paths:
+ #- Cython/Build/**
+ - .github/workflows/wheels.yml
+ - MANIFEST.in
+ - setup.*
+ workflow_dispatch:
+
+concurrency:
+ group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+ cancel-in-progress: true
+
+permissions:
+ contents: write # to create GitHub release (softprops/action-gh-release)
+
+jobs:
+ build_wheels:
+ name: Build wheel for ${{ matrix.python }}-${{ matrix.buildplat[1] }}
+ if: >-
+ github.event_name == 'release' ||
+ github.event_name == 'schedule' ||
+ github.event_name == 'workflow_dispatch' ||
+ (github.event_name == 'pull_request' &&
+ contains(github.event.pull_request.labels.*.name, 'Build System'))
+ runs-on: ${{ matrix.buildplat[0] }}
+ strategy:
+ # Ensure that a wheel builder finishes even if another fails
+ fail-fast: false
+ matrix:
+ # Github Actions doesn't support pairing matrix values together, let's improvise
+ # https://github.com/github/feedback/discussions/7835#discussioncomment-1769026
+ buildplat:
+ - [ubuntu-20.04, manylinux_x86_64]
+ - [ubuntu-20.04, manylinux_aarch64]
+ - [ubuntu-20.04, manylinux_i686]
+ - [ubuntu-20.04, musllinux_x86_64]
+ - [ubuntu-20.04, musllinux_aarch64]
+ - [macos-11, macosx_*]
+ - [windows-2019, win_amd64]
+ - [windows-2019, win32]
+ python: ["cp36", "cp37", "cp38", "cp39", "cp310", "cp311"] # Note: Wheels not needed for PyPy
+ steps:
+ - name: Checkout Cython
+ uses: actions/checkout@v3
+
+ - name: Set up QEMU
+ if: contains(matrix.buildplat[1], '_aarch64')
+ uses: docker/setup-qemu-action@v1
+ with:
+ platforms: all
+
+ - name: Build wheels
+ uses: pypa/cibuildwheel@v2.11.2
+ env:
+ # TODO: Build Cython with the compile-all flag?
+ CIBW_BUILD: ${{ matrix.python }}-${{ matrix.buildplat[1] }}
+ CIBW_PRERELEASE_PYTHONS: True
+ CIBW_ARCHS_LINUX: auto aarch64
+ CIBW_ENVIRONMENT: CFLAGS='-O3 -g0 -mtune=generic -pipe -fPIC' LDFLAGS='-fPIC'
+ # TODO: Cython tests take a long time to complete
+ # consider running a subset in the future?
+ #CIBW_TEST_COMMAND: python {project}/runtests.py -vv --no-refnanny
+
+ - name: Release
+ uses: softprops/action-gh-release@v1
+ if: startsWith(github.ref, 'refs/tags/')
+ with:
+ files: |
+ dist/*manylinux*.whl
+ dist/*musllinux*.whl
+ dist/*macos*.whl
+ dist/*win32*.whl
+ dist/*win_amd64*.whl
+ prerelease: >-
+ ${{ contains(github.ref_name, 'a') || contains(github.ref_name, 'b')
+ || contains(github.ref_name, 'rc') || contains(github.ref_name, 'dev') }}
+
+ - uses: actions/upload-artifact@v3
+ with:
+ name: ${{ matrix.python }}-${{ startsWith(matrix.buildplat[1], 'macosx') && 'macosx' || matrix.buildplat[1] }}
+ path: ./wheelhouse/*.whl
+
+ build_sdist_pure_wheel:
+ name: Build sdist and pure wheel
+ if: >-
+ github.event_name == 'release' ||
+ github.event_name == 'schedule' ||
+ github.event_name == 'workflow_dispatch' ||
+ (github.event_name == 'pull_request' &&
+ contains(github.event.pull_request.labels.*.name, 'Build System'))
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout Cython
+ uses: actions/checkout@v3
+
+ # Used to push the built wheels
+ - uses: actions/setup-python@v3
+ with:
+ # Build sdist on lowest supported Python
+ python-version: '3.8'
+
+ - name: Build sdist
+ run: |
+ pip install --upgrade wheel setuptools
+ python setup.py sdist
+ python setup.py bdist_wheel --no-cython-compile --universal
+
+ - uses: actions/upload-artifact@v3
+ with:
+ name: sdist
+ path: ./dist/*.tar.gz
+
+ - uses: actions/upload-artifact@v3
+ with:
+ name: pure-wheel
+ path: ./dist/*.whl
+
+ - name: Release
+ uses: softprops/action-gh-release@v1
+ if: startsWith(github.ref, 'refs/tags/')
+ with:
+ files: |
+ dist/*.tar.gz
+ dist/*-none-any.whl
+ prerelease: >-
+ ${{ contains(github.ref_name, 'a') || contains(github.ref_name, 'b')
+ || contains(github.ref_name, 'rc') || contains(github.ref_name, 'dev') }}
diff --git a/.gitignore b/.gitignore
index deb4c6fce..18940cd9a 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,4 +1,5 @@
*.pyc
+*.pyd
*.pyo
__pycache__
*.so
@@ -9,17 +10,21 @@ __pycache__
.*cache*/
*venv*/
-Cython/Compiler/*.c
-Cython/Plex/*.c
-Cython/Runtime/refnanny.c
-Cython/Tempita/*.c
-Cython/*.c
-Cython/*.html
-Cython/*/*.html
-
-Tools/*.elc
-Demos/*.html
-Demos/*/*.html
+/Cython/Build/*.c
+/Cython/Compiler/*.c
+/Cython/Debugger/*.c
+/Cython/Distutils/*.c
+/Cython/Parser/*.c
+/Cython/Plex/*.c
+/Cython/Runtime/refnanny.c
+/Cython/Tempita/*.c
+/Cython/*.c
+/Cython/*.html
+/Cython/*/*.html
+
+/Tools/*.elc
+/Demos/*.html
+/Demos/*/*.html
/TEST_TMP/
/build/
diff --git a/.travis.yml b/.travis.yml
deleted file mode 100644
index 551a38cb7..000000000
--- a/.travis.yml
+++ /dev/null
@@ -1,48 +0,0 @@
-os: linux
-language: python
-
-addons:
- apt:
- packages:
- - gdb
- - python-dbg
- - python3-dbg
- - libzmq-dev # needed by IPython/Tornado
- #- gcc-8
- #- g++-8
-
-cache:
- pip: true
- directories:
- - $HOME/.ccache
-
-env:
- global:
- - USE_CCACHE=1
- - CCACHE_SLOPPINESS=pch_defines,time_macros
- - CCACHE_COMPRESS=1
- - CCACHE_MAXSIZE=250M
- - PATH="/usr/lib/ccache:$PATH"
- - PYTHON_VERSION=3.8
- - OS_NAME=ubuntu
-
-python: 3.8
-
-matrix:
- include:
- - arch: arm64
- env: BACKEND=c
- - arch: arm64
- env: BACKEND=cpp
- - arch: ppc64le
- env: BACKEND=c
- - arch: ppc64le
- env: BACKEND=cpp
- # Disabled due to test errors
- # - arch: s390x
- # env: BACKEND=c
- # - arch: s390x
- # env: BACKEND=cpp
-
-script:
- - bash ./Tools/ci-run.sh
diff --git a/CHANGES.rst b/CHANGES.rst
index 94abc418e..41fdc1aac 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -2,7 +2,124 @@
Cython Changelog
================
-3.0.0 alpha 11 (2022-0?-??)
+3.0.0 alpha 12 (2022-??-??)
+===========================
+
+Features added
+--------------
+
+* Cython implemented C functions now propagate exceptions by default, rather than
+ swallowing them in non-object returning function if the user forgot to add an
+ ``except`` declaration to the signature. This was a long-standing source of bugs,
+ but can require adding the ``noexcept`` declaration to existing functions if
+ exception propagation is really undesired.
+ (Github issue :issue:`4280`)
+
+* `PEP-614 <https://peps.python.org/pep-0614/>`_:
+ decorators can now be arbitrary Python expressions.
+ (Github issue :issue:`4570`)
+
+* Bound C methods can now coerce to Python objects.
+ (Github issue :issue:`4890`)
+
+* ``reversed()`` can now be used together with C++ iteration.
+ Patch by Chia-Hsiang Cheng. (Github issue :issue:`5002`)
+
+* Standard C/C++ atomic operations are now used for memory views, if available.
+ (Github issue :issue:`4925`)
+
+* ``cythonize --help`` now also prints information about the supported environment variables.
+ Patch by Matus Valo. (Github issue :issue:`1711`)
+
+* Declarations were added for the bit operations in C++20.
+ Patch by Jonathan Helgert. (Github issue :issue:`4962`)
+
+Bugs fixed
+----------
+
+* Generator expressions and comprehensions now look up their outer-most iterable
+ on creation, as Python does, and not later on start, as they did previously.
+ (Github issue :issue:`1159`)
+
+* Iterating over memoryviews in generator expressions could leak a buffer reference.
+ (Github issue :issue:`4968`)
+
+* ``__del__`` finaliser methods were not always called if they were only inherited.
+ (Github issue :issue:`4995`)
+
+* ``cdef public`` functions declared in .pxd files could use an incorrectly mangled C name.
+ Patch by EpigeneMax. (Github issue :issue:`2940`)
+
+* C++ post-increment/-decrement operators were not correctly looked up on declared C++
+ classes, thus allowing Cython declarations to be missing for them and incorrect C++
+ code to be generated.
+ Patch by Max Bachmann. (Github issue :issue:`4536`)
+
+* C++ iteration more safely stores the iterable in temporary variables.
+ Patch by Xavier. (Github issue :issue:`3828`)
+
+* C++ references did not work on fused types.
+ (Github issue :issue:`4717`)
+
+* Nesting fused types in other fused types could fail to specialise the inner type.
+ (Github issue :issue:`4725`)
+
+* The special methods ``__matmul__``, ``__truediv__``, ``__floordiv__`` failed to type
+ their ``self`` argument.
+ (Github issue :issue:`5067`)
+
+* Coverage analysis failed in projects with a separate source subdirectory.
+ Patch by Sviatoslav Sydorenko and Ruben Vorderman. (Github issue :issue:`3636`)
+
+* The ``@dataclass`` directive was accidentally inherited by methods and subclasses.
+ (Github issue :issue:`4953`)
+
+* Some issues with Cython ``@dataclass`` arguments, hashing and ``repr()`` were resolved.
+ (Github issue :issue:`4956`)
+
+* Relative imports failed in compiled ``__init__.py`` package modules.
+ Patch by Matus Valo. (Github issue :issue:`3442`)
+
+* Some old usages of the deprecated Python ``imp`` module were replaced with ``importlib``.
+ Patch by Matus Valo. (Github issue :issue:`4640`)
+
+* Invalid and misspelled ``cython.*`` module names were not reported as errors.
+ (Github issue :issue:`4947`)
+
+* Extended glob paths with ``/**/`` and ``\**\`` failed on Windows.
+
+* Annotated HTML generation was missing newlines in 3.0.0a11.
+ (Github issue :issue:`4945`)
+
+* Some parser issues were resolved.
+ (Github issue :issue:`4992`)
+
+* Some C/C++ warnings were resolved.
+ Patches by Max Bachmann at al.
+ (Github issues :issue:`5004`, :issue:`5005`, :issue:`5019`, :issue:`5029`)
+
+* Intel C compilers could complain about unsupported gcc pragmas.
+ Patch by Ralf Gommers. (Github issue :issue:`5052`)
+
+* Includes all bug-fixes and features from the 0.29 maintenance branch
+ up to the :ref:`0.29.33` release.
+
+Other changes
+-------------
+
+* The undocumented, untested and apparently useless syntax
+ ``from somemodule cimport class/struct/union somename`` was removed. The type
+ modifier is not needed here and a plain ``cimport`` of the name will do.
+ (Github issue :issue:`4904`)
+
+* The wheel building process was migrated to use the ``cibuildwheel`` tool.
+ Patch by Thomas Li. (Github issue :issue:`4736`)
+
+* Wheels now include a compiled parser again, which increases their size a little
+ but gives about a 10% speed-up when running Cython.
+
+
+3.0.0 alpha 11 (2022-07-31)
===========================
Features added
@@ -17,22 +134,36 @@ Features added
``:=``) were implemented.
Patch by David Woods. (Github issue :issue:`2636`)
+* Context managers can be written in parentheses.
+ Patch by David Woods. (Github issue :issue:`4814`)
+
* Cython avoids raising ``StopIteration`` in ``__next__`` methods when possible.
Patch by David Woods. (Github issue :issue:`3447`)
-* Some C++ library declarations were extended and fixed.
+* Some C++ and CPython library declarations were extended and fixed.
Patches by Max Bachmann, Till Hoffmann, Julien Jerphanion, Wenjun Si.
(Github issues :issue:`4530`, :issue:`4528`, :issue:`4710`, :issue:`4746`,
- :issue:`4751`, :issue:`4818`, :issue:`4762`)
+ :issue:`4751`, :issue:`4818`, :issue:`4762`, :issue:`4910`)
-* The ``cythonize`` command has a new option ``-M`` to generate ``.dep`` dependency
- files for the compilation unit. This can be used by external build tools to track
- these dependencies. Already available in Cython :ref:`0.29.27`.
- Patch by Evgeni Burovski. (Github issue :issue:`1214`)
+* The ``cythonize`` and ``cython`` commands have a new option ``-M`` / ``--depfile``
+ to generate ``.dep`` dependency files for the compilation unit. This can be used
+ by external build tools to track these dependencies.
+ The ``cythonize`` option was already available in Cython :ref:`0.29.27`.
+ Patches by Evgeni Burovski and Eli Schwartz. (Github issue :issue:`1214`)
* ``cythonize()`` and the corresponding CLI command now regenerate the output files
also when they already exist but were generated by a different Cython version.
+* Memory views and the internal Cython array type now identify as ``collections.abc.Sequence``.
+ Patch by David Woods. (Github issue :issue:`4817`)
+
+* Cython generators and coroutines now identify as ``CO_ASYNC_GENERATOR``,
+ ``CO_COROUTINE`` and ``CO_GENERATOR`` accordingly.
+ (Github issue :issue:`4902`)
+
+* Memory views can use atomic CPU instructions instead of locks in more cases.
+ Patch by Sam Gross. (Github issue :issue:`4912`)
+
* The environment variable ``CYTHON_FORCE_REGEN=1`` can be used to force ``cythonize``
to regenerate the output files regardless of modification times and changes.
@@ -53,9 +184,16 @@ Bugs fixed
* Exceptions within for-loops that run over memoryviews could lead to a ref-counting error.
Patch by David Woods. (Github issue :issue:`4662`)
+* Using memoryview arguments in closures of inner functions could lead to ref-counting errors.
+ Patch by David Woods. (Github issue :issue:`4798`)
+
* Several optimised string methods failed to accept ``None`` as arguments to their options.
Test patch by Kirill Smelkov. (Github issue :issue:`4737`)
+* A regression in 3.0.0a10 was resolved that prevented property setter methods from
+ having the same name as their value argument.
+ Patch by David Woods. (Github issue :issue:`4836`)
+
* Typedefs for the ``bint`` type did not always behave like ``bint``.
Patch by Nathan Manville and 0dminnimda. (Github issue :issue:`4660`)
@@ -70,6 +208,9 @@ Bugs fixed
* ``pyximport`` no longer uses the deprecated ``imp`` module.
Patch by Matus Valo. (Github issue :issue:`4560`)
+* ``pyximport`` failed for long filenames on Windows.
+ Patch by Matti Picus. (Github issue :issue:`4630`)
+
* The generated C code failed to compile in CPython 3.11a4 and later.
(Github issue :issue:`4500`)
@@ -85,6 +226,14 @@ Bugs fixed
compatible exception specifications. Patches by David Woods.
(Github issues :issue:`4770`, :issue:`4689`)
+* The runtime size check for imported ``PyVarObject`` types was improved
+ to reduce false positives and adapt to Python 3.11.
+ Patch by David Woods. (Github issues :issue:`4827`, :issue:`4894`)
+
+* The generated modules no longer import NumPy internally when using
+ fused types but no memoryviews.
+ Patch by David Woods. (Github issue :issue:`4935`)
+
* Improve compatibility with forthcoming CPython 3.12 release.
* Limited API C preprocessor warning is compatible with MSVC. Patch by
@@ -96,7 +245,8 @@ Bugs fixed
* The parser allowed some invalid spellings of ``...``.
Patch by 0dminnimda. (Github issue :issue:`4868`)
-* Includes all bug-fixes from the 0.29 branch up to the :ref:`0.29.31` release.
+* Includes all bug-fixes and features from the 0.29 maintenance branch
+ up to the :ref:`0.29.32` release.
Other changes
-------------
@@ -1016,41 +1166,119 @@ Other changes
.. _`PEP-563`: https://www.python.org/dev/peps/pep-0563
.. _`PEP-479`: https://www.python.org/dev/peps/pep-0479
-.. _0.29.31:
+.. _0.29.33:
-0.29.31 (2022-??-??)
+0.29.33 (????-??-??)
====================
+Features added
+--------------
+
+* The ``cythonize`` and ``cython`` commands have a new option ``-M`` / ``--depfile``
+ to generate ``.dep`` dependency files for the compilation unit. This can be used
+ by external build tools to track these dependencies.
+ The ``cythonize`` option was already available in Cython :ref:`0.29.27`.
+ Patches by Evgeni Burovski and Eli Schwartz. (Github issue :issue:`1214`)
+
Bugs fixed
----------
-* Use ``importlib.util.find_spec()`` instead of the deprecated ``importlib.find_loader()``
- function when setting up the package path at import-time. Patch by Matti Picus.
- (Github issue :issue:`4764`)
-
-* Require the C compiler to support the two-arg form of ``va_start`` on Python 3.10
- and higher. Patch by Thomas Caswell.
- (Github issue :issue:`4820`)
+* Fixed various compiler warnings. One patch by Lisandro Dalcin.
+ (Github issues :issue:`4948`, :issue:`5086`)
+
+* Fixed error when calculating complex powers of negative numbers.
+ (Github issue :issue:`5014`)
-* Make ``fused_type`` subscriptable in Shadow.py. Patch by Pfebrer.
- (Github issue :issue:`4842`)
+* Corrected a small mis-formatting of exception messages on Python 2.
+ (Github issue :issue:`5018`)
+
+Other changes
+-------------
-* Fix the incorrect code generation of the target type in ``bytearray`` loops.
- Patch by Kenrick Everett.
- (Github issue :issue:`4108`)
+* The undocumented, untested and apparently useless syntax
+ ``from somemodule cimport class/struct/union somename`` was deprecated
+ in anticipation of its removal in Cython 3. The type
+ modifier is not needed here and a plain ``cimport`` of the name will do.
+ (Github issue :issue:`4905`)
+* Properly disable generation of descriptor docstrings on PyPy since
+ they cause crashes. It was previously disabled, but only accidentally
+ via a typo. Patch by Matti Picus.
+ (Github issue :issue:`5083`)
+
+
+
+.. _0.29.32:
+
+0.29.32 (2022-07-29)
+====================
+
+Bugs fixed
+----------
+
+* Revert "Using memoryview typed arguments in inner functions is now rejected as unsupported."
+ Patch by David Woods. (Github issue :issue:`4798`)
+
+* ``from module import *`` failed in 0.29.31 when using memoryviews.
+ Patch by David Woods. (Github issue :issue:`4927`)
+
+
+.. _0.29.31:
+
+0.29.31 (2022-07-27)
+====================
+
+Features added
+--------------
+
+* A new argument ``--module-name`` was added to the ``cython`` command to
+ provide the (one) exact target module name from the command line.
+ Patch by Matthew Brett and h-vetinari. (Github issue :issue:`4906`)
+
+Bugs fixed
+----------
+
+* Use ``importlib.util.find_spec()`` instead of the deprecated ``importlib.find_loader()``
+ function when setting up the package path at import-time.
+ Patch by Matti Picus. (Github issue :issue:`4764`)
+
+* Require the C compiler to support the two-arg form of ``va_start``
+ on Python 3.10 and higher.
+ Patch by Thomas Caswell. (Github issue :issue:`4820`)
+
+* Make ``fused_type`` subscriptable in Shadow.py.
+ Patch by Pfebrer. (Github issue :issue:`4842`)
+
+* Fix the incorrect code generation of the target type in ``bytearray`` loops.
+ Patch by Kenrick Everett. (Github issue :issue:`4108`)
+
+* Atomic refcounts for memoryviews were not used on some GCC versions by accident.
+ Patch by Sam Gross. (Github issue :issue:`4915`)
+
* Silence some GCC ``-Wconversion`` warnings in C utility code.
- Patch by Lisandro Dalcin.
- (Github issue :issue:`4854`)
-
-* Stop tuple multiplication being ignored in expressions such as ``[*(1,) * 2]``.
- Patch by David Woods.
- (Github issue :issue:`4864`)
-
-* Ensure that object buffers (e.g. ``ndarray[object, ndim=1]``) containing
+ Patch by Lisandro Dalcin. (Github issue :issue:`4854`)
+
+* Tuple multiplication was ignored in expressions such as ``[*(1,) * 2]``.
+ Patch by David Woods. (Github issue :issue:`4864`)
+
+* Calling ``append`` methods on extension types could fail to find the method
+ in some cases.
+ Patch by David Woods. (Github issue :issue:`4828`)
+
+* Ensure that object buffers (e.g. ``ndarray[object, ndim=1]``) containing
``NULL`` pointers are safe to use, returning ``None`` instead of the ``NULL``
- pointer. Patch by Sebastian Berg.
- (Github issue :issue:`4859`)
+ pointer.
+ Patch by Sebastian Berg. (Github issue :issue:`4859`)
+
+* Using memoryview typed arguments in inner functions is now rejected as unsupported.
+ Patch by David Woods. (Github issue :issue:`4798`)
+
+* Compilation could fail on systems (e.g. FIPS) that block MD5 checksums at runtime.
+ (Github issue :issue:`4909`)
+
+* Experimental adaptations for the CPython "nogil" fork was added.
+ Note that there is no official support for this in Cython 0.x.
+ Patch by Sam Gross. (Github issue :issue:`4912`)
.. _0.29.30:
diff --git a/Cython/Build/Cythonize.py b/Cython/Build/Cythonize.py
index 1f79589f8..179c04060 100644
--- a/Cython/Build/Cythonize.py
+++ b/Cython/Build/Cythonize.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
-from __future__ import absolute_import
+from __future__ import absolute_import, print_function
import os
import shutil
@@ -45,10 +45,12 @@ def find_package_base(path):
package_path = '%s/%s' % (parent, package_path)
return base_dir, package_path
-
def cython_compile(path_pattern, options):
- pool = None
all_paths = map(os.path.abspath, extended_iglob(path_pattern))
+ _cython_compile_files(all_paths, options)
+
+def _cython_compile_files(all_paths, options):
+ pool = None
try:
for path in all_paths:
if options.build_inplace:
@@ -121,10 +123,18 @@ def run_distutils(args):
def create_args_parser():
- from argparse import ArgumentParser
+ from argparse import ArgumentParser, RawDescriptionHelpFormatter
from ..Compiler.CmdLine import ParseDirectivesAction, ParseOptionsAction, ParseCompileTimeEnvAction
- parser = ArgumentParser()
+ parser = ArgumentParser(
+ formatter_class=RawDescriptionHelpFormatter,
+ epilog="""\
+Environment variables:
+ CYTHON_FORCE_REGEN: if set to 1, forces cythonize to regenerate the output files regardless
+ of modification times and changes.
+ Environment variables accepted by setuptools are supported to configure the C compiler and build:
+ https://setuptools.pypa.io/en/latest/userguide/ext_modules.html#compiler-and-linker-options"""
+ )
parser.add_argument('-X', '--directive', metavar='NAME=VALUE,...',
dest='directives', default={}, type=str,
@@ -222,8 +232,15 @@ def parse_args(args):
def main(args=None):
options, paths = parse_args(args)
+ all_paths = []
for path in paths:
- cython_compile(path, options)
+ expanded_path = [os.path.abspath(p) for p in extended_iglob(path)]
+ if not expanded_path:
+ import sys
+ print("{}: No such file or directory: '{}'".format(sys.argv[0], path), file=sys.stderr)
+ sys.exit(1)
+ all_paths.extend(expanded_path)
+ _cython_compile_files(all_paths, options)
if __name__ == '__main__':
diff --git a/Cython/Build/Dependencies.py b/Cython/Build/Dependencies.py
index f14166f7a..c60cbf34a 100644
--- a/Cython/Build/Dependencies.py
+++ b/Cython/Build/Dependencies.py
@@ -43,10 +43,11 @@ except:
from .. import Utils
from ..Utils import (cached_function, cached_method, path_exists,
- safe_makedirs, copy_file_to_dir_if_newer, is_package_dir)
+ safe_makedirs, copy_file_to_dir_if_newer, is_package_dir, write_depfile)
from ..Compiler import Errors
from ..Compiler.Main import Context
-from ..Compiler.Options import CompilationOptions, default_options
+from ..Compiler.Options import (CompilationOptions, default_options,
+ get_directive_defaults)
join_path = cached_function(os.path.join)
copy_once_if_newer = cached_function(copy_file_to_dir_if_newer)
@@ -85,11 +86,14 @@ def extended_iglob(pattern):
for path in extended_iglob(before + case + after):
yield path
return
- if '**/' in pattern:
+
+ # We always accept '/' and also '\' on Windows,
+ # because '/' is generally common for relative paths.
+ if '**/' in pattern or os.sep == '\\' and '**\\' in pattern:
seen = set()
- first, rest = pattern.split('**/', 1)
+ first, rest = re.split(r'\*\*[%s]' % ('/\\\\' if os.sep == '\\' else '/'), pattern, 1)
if first:
- first = iglob(first+'/')
+ first = iglob(first + os.sep)
else:
first = ['']
for root in first:
@@ -97,7 +101,7 @@ def extended_iglob(pattern):
if path not in seen:
seen.add(path)
yield path
- for path in extended_iglob(join_path(root, '*', '**/' + rest)):
+ for path in extended_iglob(join_path(root, '*', '**', rest)):
if path not in seen:
seen.add(path)
yield path
@@ -728,7 +732,8 @@ def create_dependency_tree(ctx=None, quiet=False):
global _dep_tree
if _dep_tree is None:
if ctx is None:
- ctx = Context(["."], CompilationOptions(default_options))
+ ctx = Context(["."], get_directive_defaults(),
+ options=CompilationOptions(default_options))
_dep_tree = DependencyTree(ctx, quiet=quiet)
return _dep_tree
@@ -1049,21 +1054,7 @@ def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False,
# write out the depfile, if requested
if depfile:
dependencies = deps.all_dependencies(source)
- src_base_dir, _ = os.path.split(source)
- if not src_base_dir.endswith(os.sep):
- src_base_dir += os.sep
- # paths below the base_dir are relative, otherwise absolute
- paths = []
- for fname in dependencies:
- if fname.startswith(src_base_dir):
- paths.append(os.path.relpath(fname, src_base_dir))
- else:
- paths.append(os.path.abspath(fname))
-
- depline = os.path.split(c_file)[1] + ": \\\n "
- depline += " \\\n ".join(paths) + "\n"
- with open(c_file+'.dep', 'w') as outfile:
- outfile.write(depline)
+ write_depfile(c_file, source, dependencies)
# Missing files and those generated by other Cython versions should always be recreated.
if Utils.file_generated_by_this_cython(c_file):
diff --git a/Cython/Build/Inline.py b/Cython/Build/Inline.py
index 15d26dbf8..abb891265 100644
--- a/Cython/Build/Inline.py
+++ b/Cython/Build/Inline.py
@@ -11,7 +11,8 @@ from distutils.command.build_ext import build_ext
import Cython
from ..Compiler.Main import Context
-from ..Compiler.Options import default_options
+from ..Compiler.Options import (default_options, CompilationOptions,
+ get_directive_defaults)
from ..Compiler.Visitor import CythonTransform, EnvTransform
from ..Compiler.ParseTreeTransforms import SkipDeclarations
@@ -41,18 +42,19 @@ if sys.version_info < (3, 5):
def load_dynamic(name, module_path):
return imp.load_dynamic(name, module_path)
else:
- import importlib.util as _importlib_util
- def load_dynamic(name, module_path):
- spec = _importlib_util.spec_from_file_location(name, module_path)
- module = _importlib_util.module_from_spec(spec)
- # sys.modules[name] = module
+ import importlib.util
+ from importlib.machinery import ExtensionFileLoader
+
+ def load_dynamic(name, path):
+ spec = importlib.util.spec_from_file_location(name, loader=ExtensionFileLoader(name, path))
+ module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
return module
class UnboundSymbols(EnvTransform, SkipDeclarations):
def __init__(self):
- CythonTransform.__init__(self, None)
+ super(EnvTransform, self).__init__(context=None)
self.unbound = set()
def visit_NameNode(self, node):
if not self.current_env().lookup(node.name):
@@ -67,7 +69,8 @@ class UnboundSymbols(EnvTransform, SkipDeclarations):
def unbound_symbols(code, context=None):
code = to_unicode(code)
if context is None:
- context = Context([], default_options)
+ context = Context([], get_directive_defaults(),
+ options=CompilationOptions(default_options))
from ..Compiler.ParseTreeTransforms import AnalyseDeclarationsTransform
tree = parse_from_strings('(tree fragment)', code)
for phase in Pipeline.create_pipeline(context, 'pyx'):
@@ -128,7 +131,11 @@ def _get_build_extension():
@cached_function
def _create_context(cython_include_dirs):
- return Context(list(cython_include_dirs), default_options)
+ return Context(
+ list(cython_include_dirs),
+ get_directive_defaults(),
+ options=CompilationOptions(default_options)
+ )
_cython_inline_cache = {}
diff --git a/Cython/Build/IpythonMagic.py b/Cython/Build/IpythonMagic.py
index 36031a78c..3fa43c96d 100644
--- a/Cython/Build/IpythonMagic.py
+++ b/Cython/Build/IpythonMagic.py
@@ -46,7 +46,6 @@ Parts of this code were taken from Cython.inline.
from __future__ import absolute_import, print_function
-import imp
import io
import os
import re
@@ -75,7 +74,7 @@ from IPython.utils.text import dedent
from ..Shadow import __version__ as cython_version
from ..Compiler.Errors import CompileError
-from .Inline import cython_inline
+from .Inline import cython_inline, load_dynamic
from .Dependencies import cythonize
from ..Utils import captured_fd, print_captured
@@ -357,7 +356,7 @@ class CythonMagics(Magics):
# Build seems ok, but we might still want to show any warnings that occurred
print_compiler_output(get_stdout(), get_stderr(), sys.stdout)
- module = imp.load_dynamic(module_name, module_path)
+ module = load_dynamic(module_name, module_path)
self._import_all(module)
if args.annotate:
@@ -420,7 +419,7 @@ class CythonMagics(Magics):
# import and execute module code to generate profile
so_module_path = os.path.join(lib_dir, pgo_module_name + self.so_ext)
- imp.load_dynamic(pgo_module_name, so_module_path)
+ load_dynamic(pgo_module_name, so_module_path)
def _cythonize(self, module_name, code, lib_dir, args, quiet=True):
pyx_file = os.path.join(lib_dir, module_name + '.pyx')
diff --git a/Cython/Build/Tests/TestDependencies.py b/Cython/Build/Tests/TestDependencies.py
new file mode 100644
index 000000000..d3888117d
--- /dev/null
+++ b/Cython/Build/Tests/TestDependencies.py
@@ -0,0 +1,142 @@
+import contextlib
+import os.path
+import sys
+import tempfile
+import unittest
+from io import open
+from os.path import join as pjoin
+
+from ..Dependencies import extended_iglob
+
+
+@contextlib.contextmanager
+def writable_file(dir_path, filename):
+ with open(pjoin(dir_path, filename), "w", encoding="utf8") as f:
+ yield f
+
+
+class TestGlobbing(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ cls._orig_dir = os.getcwd()
+ if sys.version_info[0] < 3:
+ temp_path = cls._tmpdir = tempfile.mkdtemp()
+ else:
+ cls._tmpdir = tempfile.TemporaryDirectory()
+ temp_path = cls._tmpdir.name
+ os.chdir(temp_path)
+
+ for dir1 in "abcd":
+ for dir1x in [dir1, dir1 + 'x']:
+ for dir2 in "xyz":
+ dir_path = pjoin(dir1x, dir2)
+ os.makedirs(dir_path)
+ with writable_file(dir_path, "file2_pyx.pyx") as f:
+ f.write(u'""" PYX """')
+ with writable_file(dir_path, "file2_py.py") as f:
+ f.write(u'""" PY """')
+
+ with writable_file(dir1x, "file1_pyx.pyx") as f:
+ f.write(u'""" PYX """')
+ with writable_file(dir1x, "file1_py.py") as f:
+ f.write(u'""" PY """')
+
+ @classmethod
+ def tearDownClass(cls):
+ os.chdir(cls._orig_dir)
+ if sys.version_info[0] < 3:
+ import shutil
+ shutil.rmtree(cls._tmpdir)
+ else:
+ cls._tmpdir.cleanup()
+
+ def files_equal(self, pattern, expected_files):
+ expected_files = sorted(expected_files)
+ # It's the users's choice whether '/' will appear on Windows.
+ matched_files = sorted(path.replace('/', os.sep) for path in extended_iglob(pattern))
+ self.assertListEqual(matched_files, expected_files) # /
+
+ # Special case for Windows: also support '\' in patterns.
+ if os.sep == '\\' and '/' in pattern:
+ matched_files = sorted(extended_iglob(pattern.replace('/', '\\')))
+ self.assertListEqual(matched_files, expected_files) # \
+
+ def test_extended_iglob_simple(self):
+ ax_files = [pjoin("a", "x", "file2_pyx.pyx"), pjoin("a", "x", "file2_py.py")]
+ self.files_equal("a/x/*", ax_files)
+ self.files_equal("a/x/*.c12", [])
+ self.files_equal("a/x/*.{py,pyx,c12}", ax_files)
+ self.files_equal("a/x/*.{py,pyx}", ax_files)
+ self.files_equal("a/x/*.{pyx}", ax_files[:1])
+ self.files_equal("a/x/*.pyx", ax_files[:1])
+ self.files_equal("a/x/*.{py}", ax_files[1:])
+ self.files_equal("a/x/*.py", ax_files[1:])
+
+ def test_extended_iglob_simple_star(self):
+ for basedir in "ad":
+ files = [
+ pjoin(basedir, dirname, filename)
+ for dirname in "xyz"
+ for filename in ["file2_pyx.pyx", "file2_py.py"]
+ ]
+ self.files_equal(basedir + "/*/*", files)
+ self.files_equal(basedir + "/*/*.c12", [])
+ self.files_equal(basedir + "/*/*.{py,pyx,c12}", files)
+ self.files_equal(basedir + "/*/*.{py,pyx}", files)
+ self.files_equal(basedir + "/*/*.{pyx}", files[::2])
+ self.files_equal(basedir + "/*/*.pyx", files[::2])
+ self.files_equal(basedir + "/*/*.{py}", files[1::2])
+ self.files_equal(basedir + "/*/*.py", files[1::2])
+
+ for subdir in "xy*":
+ files = [
+ pjoin(basedir, dirname, filename)
+ for dirname in "xyz"
+ if subdir in ('*', dirname)
+ for filename in ["file2_pyx.pyx", "file2_py.py"]
+ ]
+ path = basedir + '/' + subdir + '/'
+ self.files_equal(path + "*", files)
+ self.files_equal(path + "*.{py,pyx}", files)
+ self.files_equal(path + "*.{pyx}", files[::2])
+ self.files_equal(path + "*.pyx", files[::2])
+ self.files_equal(path + "*.{py}", files[1::2])
+ self.files_equal(path + "*.py", files[1::2])
+
+ def test_extended_iglob_double_star(self):
+ basedirs = os.listdir(".")
+ files = [
+ pjoin(basedir, dirname, filename)
+ for basedir in basedirs
+ for dirname in "xyz"
+ for filename in ["file2_pyx.pyx", "file2_py.py"]
+ ]
+ all_files = [
+ pjoin(basedir, filename)
+ for basedir in basedirs
+ for filename in ["file1_pyx.pyx", "file1_py.py"]
+ ] + files
+ self.files_equal("*/*/*", files)
+ self.files_equal("*/*/**/*", files)
+ self.files_equal("*/**/*.*", all_files)
+ self.files_equal("**/*.*", all_files)
+ self.files_equal("*/**/*.c12", [])
+ self.files_equal("**/*.c12", [])
+ self.files_equal("*/*/*.{py,pyx,c12}", files)
+ self.files_equal("*/*/**/*.{py,pyx,c12}", files)
+ self.files_equal("*/**/*/*.{py,pyx,c12}", files)
+ self.files_equal("**/*/*/*.{py,pyx,c12}", files)
+ self.files_equal("**/*.{py,pyx,c12}", all_files)
+ self.files_equal("*/*/*.{py,pyx}", files)
+ self.files_equal("**/*/*/*.{py,pyx}", files)
+ self.files_equal("*/**/*/*.{py,pyx}", files)
+ self.files_equal("**/*.{py,pyx}", all_files)
+ self.files_equal("*/*/*.{pyx}", files[::2])
+ self.files_equal("**/*.{pyx}", all_files[::2])
+ self.files_equal("*/**/*/*.pyx", files[::2])
+ self.files_equal("*/*/*.pyx", files[::2])
+ self.files_equal("**/*.pyx", all_files[::2])
+ self.files_equal("*/*/*.{py}", files[1::2])
+ self.files_equal("**/*.{py}", all_files[1::2])
+ self.files_equal("*/*/*.py", files[1::2])
+ self.files_equal("**/*.py", all_files[1::2])
diff --git a/Cython/Compiler/Annotate.py b/Cython/Compiler/Annotate.py
index 48e73f853..8e8d2c4a8 100644
--- a/Cython/Compiler/Annotate.py
+++ b/Cython/Compiler/Annotate.py
@@ -49,8 +49,8 @@ class AnnotationCCodeWriter(CCodeWriter):
def create_new(self, create_from, buffer, copy_formatting):
return AnnotationCCodeWriter(create_from, buffer, copy_formatting)
- def write(self, s):
- CCodeWriter.write(self, s)
+ def _write_to_buffer(self, s):
+ self.buffer.write(s)
self.annotation_buffer.write(s)
def mark_pos(self, pos, trace=True):
@@ -73,7 +73,7 @@ class AnnotationCCodeWriter(CCodeWriter):
"""css template will later allow to choose a colormap"""
css = [self._css_template]
for i in range(255):
- color = u"FFFF%02x" % int(255/(1+i/10.0))
+ color = u"FFFF%02x" % int(255.0 // (1.0 + i/10.0))
css.append('.cython.score-%d {background-color: #%s;}' % (i, color))
try:
from pygments.formatters import HtmlFormatter
diff --git a/Cython/Compiler/Builtin.py b/Cython/Compiler/Builtin.py
index 46a4dbb5b..26fd68ff6 100644
--- a/Cython/Compiler/Builtin.py
+++ b/Cython/Compiler/Builtin.py
@@ -5,7 +5,7 @@
from __future__ import absolute_import
from .StringEncoding import EncodedString
-from .Symtab import BuiltinScope, StructOrUnionScope, ModuleScope
+from .Symtab import BuiltinScope, StructOrUnionScope, ModuleScope, Entry
from .Code import UtilityCode
from .TypeSlots import Signature
from . import PyrexTypes
@@ -427,6 +427,7 @@ def init_builtins():
global list_type, tuple_type, dict_type, set_type, frozenset_type
global bytes_type, str_type, unicode_type, basestring_type, slice_type
global float_type, long_type, bool_type, type_type, complex_type, bytearray_type
+ global int_type
type_type = builtin_scope.lookup('type').type
list_type = builtin_scope.lookup('list').type
tuple_type = builtin_scope.lookup('tuple').type
@@ -443,6 +444,8 @@ def init_builtins():
long_type = builtin_scope.lookup('long').type
bool_type = builtin_scope.lookup('bool').type
complex_type = builtin_scope.lookup('complex').type
+ # Be careful with int type while Py2 is still supported
+ int_type = builtin_scope.lookup('int').type
# Set up type inference links between equivalent Python/C types
bool_type.equivalent_type = PyrexTypes.c_bint_type
@@ -480,16 +483,33 @@ def get_known_standard_library_module_scope(module_name):
indexed_type = PyrexTypes.PythonTupleTypeConstructor(EncodedString("typing."+name), tp)
else:
indexed_type = PyrexTypes.PythonTypeConstructor(EncodedString("typing."+name), tp)
- mod.declare_type(EncodedString(name), indexed_type, pos = None)
+ name = EncodedString(name)
+ entry = mod.declare_type(name, indexed_type, pos = None)
+ var_entry = Entry(name, None, PyrexTypes.py_object_type)
+ var_entry.is_pyglobal = True
+ var_entry.is_variable = True
+ var_entry.scope = mod
+ entry.as_variable = var_entry
for name in ['ClassVar', 'Optional']:
+ name = EncodedString(name)
indexed_type = PyrexTypes.SpecialPythonTypeConstructor(EncodedString("typing."+name))
- mod.declare_type(name, indexed_type, pos = None)
+ entry = mod.declare_type(name, indexed_type, pos = None)
+ var_entry = Entry(name, None, PyrexTypes.py_object_type)
+ var_entry.is_pyglobal = True
+ var_entry.is_variable = True
+ var_entry.scope = mod
+ entry.as_variable = var_entry
_known_module_scopes[module_name] = mod
elif module_name == "dataclasses":
mod = ModuleScope(module_name, None, None)
indexed_type = PyrexTypes.SpecialPythonTypeConstructor(EncodedString("dataclasses.InitVar"))
- mod.declare_type(EncodedString("InitVar"), indexed_type, pos = None)
+ initvar_string = EncodedString("InitVar")
+ entry = mod.declare_type(initvar_string, indexed_type, pos = None)
+ var_entry = Entry(initvar_string, None, PyrexTypes.py_object_type)
+ var_entry.is_pyglobal = True
+ var_entry.scope = mod
+ entry.as_variable = var_entry
_known_module_scopes[module_name] = mod
return mod
diff --git a/Cython/Compiler/CmdLine.py b/Cython/Compiler/CmdLine.py
index ffff6a61c..c330fcc05 100644
--- a/Cython/Compiler/CmdLine.py
+++ b/Cython/Compiler/CmdLine.py
@@ -4,11 +4,17 @@
from __future__ import absolute_import
+import sys
import os
from argparse import ArgumentParser, Action, SUPPRESS
from . import Options
+if sys.version_info < (3, 3):
+ # TODO: This workaround can be removed in Cython 3.1
+ FileNotFoundError = IOError
+
+
class ParseDirectivesAction(Action):
def __call__(self, parser, namespace, values, option_string=None):
old_directives = dict(getattr(namespace, self.dest,
@@ -145,6 +151,12 @@ def create_cython_argparser():
dest='compile_time_env', type=str,
action=ParseCompileTimeEnvAction,
help='Provides compile time env like DEF would do.')
+ parser.add_argument("--module-name",
+ dest='module_name', type=str, action='store',
+ help='Fully qualified module name. If not given, is '
+ 'deduced from the import path if source file is in '
+ 'a package, or equals the filename otherwise.')
+ parser.add_argument('-M', '--depfile', action='store_true', help='produce depfiles for the sources')
parser.add_argument('sources', nargs='*', default=[])
# TODO: add help
@@ -203,6 +215,10 @@ def parse_command_line_raw(parser, args):
def parse_command_line(args):
parser = create_cython_argparser()
arguments, sources = parse_command_line_raw(parser, args)
+ for source in sources:
+ if not os.path.exists(source):
+ import errno
+ raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), source)
options = Options.CompilationOptions(Options.default_options)
for name, value in vars(arguments).items():
@@ -222,5 +238,10 @@ def parse_command_line(args):
if len(sources) == 0 and not options.show_version:
parser.error("cython: Need at least one source file\n")
if Options.embed and len(sources) > 1:
- parser.error("cython: Only one source file allowed when using -embed\n")
+ parser.error("cython: Only one source file allowed when using --embed\n")
+ if options.module_name:
+ if options.timestamps:
+ parser.error("cython: Cannot use --module-name with --timestamps\n")
+ if len(sources) > 1:
+ parser.error("cython: Only one source file allowed when using --module-name\n")
return options, sources
diff --git a/Cython/Compiler/Code.pxd b/Cython/Compiler/Code.pxd
index 59779f8bc..4601474b2 100644
--- a/Cython/Compiler/Code.pxd
+++ b/Cython/Compiler/Code.pxd
@@ -110,6 +110,9 @@ cdef class CCodeWriter(object):
cdef bint bol
cpdef write(self, s)
+ @cython.final
+ cdef _write_lines(self, s)
+ cpdef _write_to_buffer(self, s)
cpdef put(self, code)
cpdef put_safe(self, code)
cpdef putln(self, code=*, bint safe=*)
@@ -117,6 +120,8 @@ cdef class CCodeWriter(object):
cdef increase_indent(self)
@cython.final
cdef decrease_indent(self)
+ @cython.final
+ cdef indent(self)
cdef class PyrexCodeWriter:
diff --git a/Cython/Compiler/Code.py b/Cython/Compiler/Code.py
index 4c67ac400..1f561da02 100644
--- a/Cython/Compiler/Code.py
+++ b/Cython/Compiler/Code.py
@@ -21,7 +21,7 @@ import shutil
import textwrap
from string import Template
from functools import partial
-from contextlib import closing
+from contextlib import closing, contextmanager
from collections import defaultdict
from . import Naming
@@ -831,14 +831,14 @@ class FunctionState(object):
allocated and released one of the same type). Type is simply registered
and handed back, but will usually be a PyrexType.
- If type.is_pyobject, manage_ref comes into play. If manage_ref is set to
+ If type.needs_refcounting, manage_ref comes into play. If manage_ref is set to
True, the temp will be decref-ed on return statements and in exception
handling clauses. Otherwise the caller has to deal with any reference
counting of the variable.
- If not type.is_pyobject, then manage_ref will be ignored, but it
+ If not type.needs_refcounting, then manage_ref will be ignored, but it
still has to be passed. It is recommended to pass False by convention
- if it is known that type will never be a Python object.
+ if it is known that type will never be a reference counted type.
static=True marks the temporary declaration with "static".
This is only used when allocating backing store for a module-level
@@ -857,7 +857,7 @@ class FunctionState(object):
type = PyrexTypes.c_ptr_type(type) # A function itself isn't an l-value
elif type.is_cpp_class and not type.is_fake_reference and self.scope.directives['cpp_locals']:
self.scope.use_utility_code(UtilityCode.load_cached("OptionalLocals", "CppSupport.cpp"))
- if not type.is_pyobject and not type.is_memoryviewslice:
+ if not type.needs_refcounting:
# Make manage_ref canonical, so that manage_ref will always mean
# a decref is needed.
manage_ref = False
@@ -910,17 +910,17 @@ class FunctionState(object):
for name, type, manage_ref, static in self.temps_allocated:
freelist = self.temps_free.get((type, manage_ref))
if freelist is None or name not in freelist[1]:
- used.append((name, type, manage_ref and type.is_pyobject))
+ used.append((name, type, manage_ref and type.needs_refcounting))
return used
def temps_holding_reference(self):
"""Return a list of (cname,type) tuples of temp names and their type
- that are currently in use. This includes only temps of a
- Python object type which owns its reference.
+ that are currently in use. This includes only temps
+ with a reference counted type which owns its reference.
"""
return [(name, type)
for name, type, manage_ref in self.temps_in_use()
- if manage_ref and type.is_pyobject]
+ if manage_ref and type.needs_refcounting]
def all_managed_temps(self):
"""Return a list of (cname, type) tuples of refcount-managed Python objects.
@@ -1650,7 +1650,7 @@ class GlobalState(object):
init_constants.putln("#if !CYTHON_USE_MODULE_STATE")
init_constants.putln(
- "if (__Pyx_InitStrings(%s) < 0) %s;" % (
+ "if (__Pyx_InitStrings(%s) < 0) %s" % (
Naming.stringtab_cname,
init_constants.error_goto(self.module_pos)))
init_constants.putln("#endif")
@@ -1860,13 +1860,21 @@ class CCodeWriter(object):
return self.buffer.getvalue()
def write(self, s):
+ if '\n' in s:
+ self._write_lines(s)
+ else:
+ self._write_to_buffer(s)
+
+ def _write_lines(self, s):
# Cygdb needs to know which Cython source line corresponds to which C line.
# Therefore, we write this information into "self.buffer.markers" and then write it from there
# into cython_debug/cython_debug_info_* (see ModuleNode._serialize_lineno_map).
-
filename_line = self.last_marked_pos[:2] if self.last_marked_pos else (None, 0)
self.buffer.markers.extend([filename_line] * s.count('\n'))
+ self._write_to_buffer(s)
+
+ def _write_to_buffer(self, s):
self.buffer.write(s)
def insertion_point(self):
@@ -1970,13 +1978,13 @@ class CCodeWriter(object):
self.emit_marker()
if self.code_config.emit_linenums and self.last_marked_pos:
source_desc, line, _ = self.last_marked_pos
- self.write('\n#line %s "%s"\n' % (line, source_desc.get_escaped_description()))
+ self._write_lines('\n#line %s "%s"\n' % (line, source_desc.get_escaped_description()))
if code:
if safe:
self.put_safe(code)
else:
self.put(code)
- self.write("\n")
+ self._write_lines("\n")
self.bol = 1
def mark_pos(self, pos, trace=True):
@@ -1990,13 +1998,13 @@ class CCodeWriter(object):
pos, trace = self.last_pos
self.last_marked_pos = pos
self.last_pos = None
- self.write("\n")
+ self._write_lines("\n")
if self.code_config.emit_code_comments:
self.indent()
- self.write("/* %s */\n" % self._build_marker(pos))
+ self._write_lines("/* %s */\n" % self._build_marker(pos))
if trace and self.funcstate and self.funcstate.can_trace and self.globalstate.directives['linetrace']:
self.indent()
- self.write('__Pyx_TraceLine(%d,%d,%s)\n' % (
+ self._write_lines('__Pyx_TraceLine(%d,%d,%s)\n' % (
pos[1], not self.funcstate.gil_owned, self.error_goto(pos)))
def _build_marker(self, pos):
@@ -2073,7 +2081,7 @@ class CCodeWriter(object):
self.putln("}")
def indent(self):
- self.write(" " * self.level)
+ self._write_to_buffer(" " * self.level)
def get_py_version_hex(self, pyversion):
return "0x%02X%02X%02X%02X" % (tuple(pyversion) + (0,0,0,0))[:4]
@@ -2095,10 +2103,10 @@ class CCodeWriter(object):
if entry.visibility == "private" and not entry.used:
#print "...private and not used, skipping", entry.cname ###
return
- if storage_class:
- self.put("%s " % storage_class)
if not entry.cf_used:
self.put('CYTHON_UNUSED ')
+ if storage_class:
+ self.put("%s " % storage_class)
if entry.is_cpp_optional:
self.put(entry.type.cpp_optional_declaration_code(
entry.cname, dll_linkage=dll_linkage))
@@ -2611,9 +2619,7 @@ class PyrexCodeWriter(object):
class PyxCodeWriter(object):
"""
- Can be used for writing out some Cython code. To use the indenter
- functionality, the Cython.Compiler.Importer module will have to be used
- to load the code to support python 2.4
+ Can be used for writing out some Cython code.
"""
def __init__(self, buffer=None, indent_level=0, context=None, encoding='ascii'):
@@ -2629,22 +2635,16 @@ class PyxCodeWriter(object):
def dedent(self, levels=1):
self.level -= levels
+ @contextmanager
def indenter(self, line):
"""
- Instead of
-
- with pyx_code.indenter("for i in range(10):"):
- pyx_code.putln("print i")
-
- write
-
- if pyx_code.indenter("for i in range(10);"):
- pyx_code.putln("print i")
- pyx_code.dedent()
+ with pyx_code.indenter("for i in range(10):"):
+ pyx_code.putln("print i")
"""
self.putln(line)
self.indent()
- return True
+ yield
+ self.dedent()
def getvalue(self):
result = self.buffer.getvalue()
diff --git a/Cython/Compiler/CythonScope.py b/Cython/Compiler/CythonScope.py
index 08f3da9eb..f73be0070 100644
--- a/Cython/Compiler/CythonScope.py
+++ b/Cython/Compiler/CythonScope.py
@@ -51,7 +51,7 @@ class CythonScope(ModuleScope):
def find_module(self, module_name, pos):
error("cython.%s is not available" % module_name, pos)
- def find_submodule(self, module_name):
+ def find_submodule(self, module_name, as_package=False):
entry = self.entries.get(module_name, None)
if not entry:
self.load_cythonscope()
diff --git a/Cython/Compiler/Dataclass.py b/Cython/Compiler/Dataclass.py
index 0d0bb4768..7cbbab954 100644
--- a/Cython/Compiler/Dataclass.py
+++ b/Cython/Compiler/Dataclass.py
@@ -81,6 +81,59 @@ class RemoveAssignmentsToNames(VisitorTransform, SkipDeclarations):
return node
+class TemplateCode(object):
+ _placeholder_count = 0
+
+ def __init__(self):
+ self.code_lines = []
+ self.placeholders = {}
+ self.extra_stats = []
+
+ def insertion_point(self):
+ return len(self.code_lines)
+
+ def insert_code_line(self, insertion_point, code_line):
+ self.code_lines.insert(insertion_point, code_line)
+
+ def reset(self, insertion_point=0):
+ del self.code_lines[insertion_point:]
+
+ def add_code_line(self, code_line):
+ self.code_lines.append(code_line)
+
+ def add_code_lines(self, code_lines):
+ self.code_lines.extend(code_lines)
+
+ def new_placeholder(self, field_names, value):
+ name = self._new_placeholder_name(field_names)
+ self.placeholders[name] = value
+ return name
+
+ def add_extra_statements(self, statements):
+ self.extra_stats.extend(statements)
+
+ def _new_placeholder_name(self, field_names):
+ while True:
+ name = "INIT_PLACEHOLDER_%d" % self._placeholder_count
+ if (name not in self.placeholders
+ and name not in field_names):
+ # make sure name isn't already used and doesn't
+ # conflict with a variable name (which is unlikely but possible)
+ break
+ self._placeholder_count += 1
+ return name
+
+ def generate_tree(self, level='c_class'):
+ stat_list_node = TreeFragment(
+ "\n".join(self.code_lines),
+ level=level,
+ pipeline=[NormalizeTree(None)],
+ ).substitute(self.placeholders)
+
+ stat_list_node.stats += self.extra_stats
+ return stat_list_node
+
+
class _MISSING_TYPE(object):
pass
MISSING = _MISSING_TYPE()
@@ -147,10 +200,16 @@ def process_class_get_fields(node):
transform(node)
default_value_assignments = transform.removed_assignments
- if node.base_type and node.base_type.dataclass_fields:
- fields = node.base_type.dataclass_fields.copy()
- else:
- fields = OrderedDict()
+ base_type = node.base_type
+ fields = OrderedDict()
+ while base_type:
+ if base_type.is_external or not base_type.scope.implemented:
+ warning(node.pos, "Cannot reliably handle Cython dataclasses with base types "
+ "in external modules since it is not possible to tell what fields they have", 2)
+ if base_type.dataclass_fields:
+ fields = base_type.dataclass_fields.copy()
+ break
+ base_type = base_type.base_type
for entry in var_entries:
name = entry.name
@@ -164,14 +223,16 @@ def process_class_get_fields(node):
and assignment.function.as_cython_attribute() == "dataclasses.field"):
# I believe most of this is well-enforced when it's treated as a directive
# but it doesn't hurt to make sure
- if (not isinstance(assignment, ExprNodes.GeneralCallNode)
- or not isinstance(assignment.positional_args, ExprNodes.TupleNode)
- or assignment.positional_args.args
- or not isinstance(assignment.keyword_args, ExprNodes.DictNode)):
+ valid_general_call = (isinstance(assignment, ExprNodes.GeneralCallNode)
+ and isinstance(assignment.positional_args, ExprNodes.TupleNode)
+ and not assignment.positional_args.args
+ and (assignment.keyword_args is None or isinstance(assignment.keyword_args, ExprNodes.DictNode)))
+ valid_simple_call = (isinstance(assignment, ExprNodes.SimpleCallNode) and not assignment.args)
+ if not (valid_general_call or valid_simple_call):
error(assignment.pos, "Call to 'cython.dataclasses.field' must only consist "
"of compile-time keyword arguments")
continue
- keyword_args = assignment.keyword_args.as_python_dict()
+ keyword_args = assignment.keyword_args.as_python_dict() if valid_general_call and assignment.keyword_args else {}
if 'default' in keyword_args and 'default_factory' in keyword_args:
error(assignment.pos, "cannot specify both default and default_factory")
continue
@@ -216,7 +277,7 @@ def handle_cclass_dataclass(node, dataclass_args, analyse_decs_transform):
if not isinstance(v, ExprNodes.BoolNode):
error(node.pos,
"Arguments passed to cython.dataclasses.dataclass must be True or False")
- kwargs[k] = v
+ kwargs[k] = v.value
# remove everything that does not belong into _DataclassParams()
kw_only = kwargs.pop("kw_only")
@@ -249,23 +310,14 @@ def handle_cclass_dataclass(node, dataclass_args, analyse_decs_transform):
stats = Nodes.StatListNode(node.pos,
stats=[dataclass_params_assignment] + dataclass_fields_stats)
- code_lines = []
- placeholders = {}
- extra_stats = []
- for cl, ph, es in [ generate_init_code(kwargs['init'], node, fields, kw_only),
- generate_repr_code(kwargs['repr'], node, fields),
- generate_eq_code(kwargs['eq'], node, fields),
- generate_order_code(kwargs['order'], node, fields),
- generate_hash_code(kwargs['unsafe_hash'], kwargs['eq'], kwargs['frozen'], node, fields) ]:
- code_lines.append(cl)
- placeholders.update(ph)
- extra_stats.extend(extra_stats)
-
- code_lines = "\n".join(code_lines)
- code_tree = TreeFragment(code_lines, level='c_class', pipeline=[NormalizeTree(node.scope)]
- ).substitute(placeholders)
-
- stats.stats += (code_tree.stats + extra_stats)
+ code = TemplateCode()
+ generate_init_code(code, kwargs['init'], node, fields, kw_only)
+ generate_repr_code(code, kwargs['repr'], node, fields)
+ generate_eq_code(code, kwargs['eq'], node, fields)
+ generate_order_code(code, kwargs['order'], node, fields)
+ generate_hash_code(code, kwargs['unsafe_hash'], kwargs['eq'], kwargs['frozen'], node, fields)
+
+ stats.stats += code.generate_tree().stats
# turn off annotation typing, so all arguments to __init__ are accepted as
# generic objects and thus can accept _HAS_DEFAULT_FACTORY.
@@ -283,14 +335,8 @@ def handle_cclass_dataclass(node, dataclass_args, analyse_decs_transform):
node.body.stats.append(comp_directives)
-def generate_init_code(init, node, fields, kw_only):
+def generate_init_code(code, init, node, fields, kw_only):
"""
- All of these "generate_*_code" functions return a tuple of:
- - code string
- - placeholder dict (often empty)
- - stat list (often empty)
- which can then be combined later and processed once.
-
Notes on CPython generated "__init__":
* Implemented in `_init_fn`.
* The use of the `dataclasses._HAS_DEFAULT_FACTORY` sentinel value as
@@ -302,9 +348,15 @@ def generate_init_code(init, node, fields, kw_only):
* seen_default and the associated error message are copied directly from Python
* Call to user-defined __post_init__ function (if it exists) is copied from
CPython.
+
+ Cython behaviour deviates a little here (to be decided if this is right...)
+ Because the class variable from the assignment does not exist Cython fields will
+ return None (or whatever their type default is) if not initialized while Python
+ dataclasses will fall back to looking up the class variable.
"""
if not init or node.scope.lookup_here("__init__"):
- return "", {}, []
+ return
+
# selfname behaviour copied from the cpython module
selfname = "__dataclass_self__" if "self" in fields else "self"
args = [selfname]
@@ -312,8 +364,7 @@ def generate_init_code(init, node, fields, kw_only):
if kw_only:
args.append("*")
- placeholders = {}
- placeholder_count = [0]
+ function_start_point = code.insertion_point()
# create a temp to get _HAS_DEFAULT_FACTORY
dataclass_module = make_dataclasses_module_callnode(node.pos)
@@ -323,26 +374,10 @@ def generate_init_code(init, node, fields, kw_only):
attribute=EncodedString("_HAS_DEFAULT_FACTORY")
)
- def get_placeholder_name():
- while True:
- name = "INIT_PLACEHOLDER_%d" % placeholder_count[0]
- if (name not in placeholders
- and name not in fields):
- # make sure name isn't already used and doesn't
- # conflict with a variable name (which is unlikely but possible)
- break
- placeholder_count[0] += 1
- return name
-
- default_factory_placeholder = get_placeholder_name()
- placeholders[default_factory_placeholder] = has_default_factory
-
- function_body_code_lines = []
+ default_factory_placeholder = code.new_placeholder(fields, has_default_factory)
seen_default = False
for name, field in fields.items():
- if not field.init.value:
- continue
entry = node.scope.lookup(name)
if entry.annotation:
annotation = u": %s" % entry.annotation.string.value
@@ -354,50 +389,53 @@ def generate_init_code(init, node, fields, kw_only):
if field.default_factory is not MISSING:
ph_name = default_factory_placeholder
else:
- ph_name = get_placeholder_name()
- placeholders[ph_name] = field.default # should be a node
+ ph_name = code.new_placeholder(fields, field.default) # 'default' should be a node
assignment = u" = %s" % ph_name
- elif seen_default and not kw_only:
+ elif seen_default and not kw_only and field.init.value:
error(entry.pos, ("non-default argument '%s' follows default argument "
"in dataclass __init__") % name)
- return "", {}, []
+ code.reset(function_start_point)
+ return
- args.append(u"%s%s%s" % (name, annotation, assignment))
+ if field.init.value:
+ args.append(u"%s%s%s" % (name, annotation, assignment))
if field.is_initvar:
continue
elif field.default_factory is MISSING:
if field.init.value:
- function_body_code_lines.append(u" %s.%s = %s" % (selfname, name, name))
+ code.add_code_line(u" %s.%s = %s" % (selfname, name, name))
+ elif assignment:
+ # not an argument to the function, but is still initialized
+ code.add_code_line(u" %s.%s%s" % (selfname, name, assignment))
else:
- ph_name = get_placeholder_name()
- placeholders[ph_name] = field.default_factory
+ ph_name = code.new_placeholder(fields, field.default_factory)
if field.init.value:
# close to:
# def __init__(self, name=_PLACEHOLDER_VALUE):
# self.name = name_default_factory() if name is _PLACEHOLDER_VALUE else name
- function_body_code_lines.append(u" %s.%s = %s() if %s is %s else %s" % (
+ code.add_code_line(u" %s.%s = %s() if %s is %s else %s" % (
selfname, name, ph_name, name, default_factory_placeholder, name))
else:
# still need to use the default factory to initialize
- function_body_code_lines.append(u" %s.%s = %s()"
- % (selfname, name, ph_name))
-
- args = u", ".join(args)
- func_def = u"def __init__(%s):" % args
-
- code_lines = [func_def] + (function_body_code_lines or ["pass"])
+ code.add_code_line(u" %s.%s = %s()" % (
+ selfname, name, ph_name))
if node.scope.lookup("__post_init__"):
post_init_vars = ", ".join(name for name, field in fields.items()
if field.is_initvar)
- code_lines.append(" %s.__post_init__(%s)" % (selfname, post_init_vars))
- return u"\n".join(code_lines), placeholders, []
+ code.add_code_line(" %s.__post_init__(%s)" % (selfname, post_init_vars))
+
+ if function_start_point == code.insertion_point():
+ code.add_code_line(" pass")
+
+ args = u", ".join(args)
+ code.insert_code_line(function_start_point, u"def __init__(%s):" % args)
-def generate_repr_code(repr, node, fields):
+def generate_repr_code(code, repr, node, fields):
"""
- The CPython implementation is just:
+ The core of the CPython implementation is just:
['return self.__class__.__qualname__ + f"(' +
', '.join([f"{f.name}={{self.{f.name}!r}}"
for f in fields]) +
@@ -405,38 +443,65 @@ def generate_repr_code(repr, node, fields):
The only notable difference here is self.__class__.__qualname__ -> type(self).__name__
which is because Cython currently supports Python 2.
+
+ However, it also has some guards for recursive repr invokations. In the standard
+ library implementation they're done with a wrapper decorator that captures a set
+ (with the set keyed by id and thread). Here we create a set as a thread local
+ variable and key only by id.
"""
if not repr or node.scope.lookup("__repr__"):
- return "", {}, []
- code_lines = ["def __repr__(self):"]
+ return
+
+ # The recursive guard is likely a little costly, so skip it if possible.
+ # is_gc_simple defines where it can contain recursive objects
+ needs_recursive_guard = False
+ for name in fields.keys():
+ entry = node.scope.lookup(name)
+ type_ = entry.type
+ if type_.is_memoryviewslice:
+ type_ = type_.dtype
+ if not type_.is_pyobject:
+ continue # no GC
+ if not type_.is_gc_simple:
+ needs_recursive_guard = True
+ break
+
+ if needs_recursive_guard:
+ code.add_code_line("__pyx_recursive_repr_guard = __import__('threading').local()")
+ code.add_code_line("__pyx_recursive_repr_guard.running = set()")
+ code.add_code_line("def __repr__(self):")
+ if needs_recursive_guard:
+ code.add_code_line(" key = id(self)")
+ code.add_code_line(" guard_set = self.__pyx_recursive_repr_guard.running")
+ code.add_code_line(" if key in guard_set: return '...'")
+ code.add_code_line(" guard_set.add(key)")
+ code.add_code_line(" try:")
strs = [u"%s={self.%s!r}" % (name, name)
for name, field in fields.items()
if field.repr.value and not field.is_initvar]
format_string = u", ".join(strs)
- code_lines.append(u' name = getattr(type(self), "__qualname__", type(self).__name__)')
- code_lines.append(u" return f'{name}(%s)'" % format_string)
- code_lines = u"\n".join(code_lines)
- return code_lines, {}, []
+ code.add_code_line(u' name = getattr(type(self), "__qualname__", type(self).__name__)')
+ code.add_code_line(u" return f'{name}(%s)'" % format_string)
+ if needs_recursive_guard:
+ code.add_code_line(" finally:")
+ code.add_code_line(" guard_set.remove(key)")
-def generate_cmp_code(op, funcname, node, fields):
+def generate_cmp_code(code, op, funcname, node, fields):
if node.scope.lookup_here(funcname):
- return "", {}, []
+ return
names = [name for name, field in fields.items() if (field.compare.value and not field.is_initvar)]
- if not names:
- return "", {}, [] # no comparable types
-
- code_lines = [
+ code.add_code_lines([
"def %s(self, other):" % funcname,
+ " if not isinstance(other, %s):" % node.class_name,
+ " return NotImplemented",
+ #
" cdef %s other_cast" % node.class_name,
- " if isinstance(other, %s):" % node.class_name,
- " other_cast = <%s>other" % node.class_name,
- " else:",
- " return NotImplemented"
- ]
+ " other_cast = <%s>other" % node.class_name,
+ ])
# The Python implementation of dataclasses.py does a tuple comparison
# (roughly):
@@ -454,42 +519,32 @@ def generate_cmp_code(op, funcname, node, fields):
name, op, name))
if checks:
- code_lines.append(" return " + " and ".join(checks))
+ code.add_code_line(" return " + " and ".join(checks))
else:
if "=" in op:
- code_lines.append(" return True") # "() == ()" is True
+ code.add_code_line(" return True") # "() == ()" is True
else:
- code_lines.append(" return False")
-
- code_lines = u"\n".join(code_lines)
-
- return code_lines, {}, []
+ code.add_code_line(" return False")
-def generate_eq_code(eq, node, fields):
+def generate_eq_code(code, eq, node, fields):
if not eq:
- return code_lines, {}, []
- return generate_cmp_code("==", "__eq__", node, fields)
+ return
+ generate_cmp_code(code, "==", "__eq__", node, fields)
-def generate_order_code(order, node, fields):
+def generate_order_code(code, order, node, fields):
if not order:
- return "", {}, []
- code_lines = []
- placeholders = {}
- stats = []
+ return
+
for op, name in [("<", "__lt__"),
("<=", "__le__"),
(">", "__gt__"),
(">=", "__ge__")]:
- res = generate_cmp_code(op, name, node, fields)
- code_lines.append(res[0])
- placeholders.update(res[1])
- stats.extend(res[2])
- return "\n".join(code_lines), placeholders, stats
+ generate_cmp_code(code, op, name, node, fields)
-def generate_hash_code(unsafe_hash, eq, frozen, node, fields):
+def generate_hash_code(code, unsafe_hash, eq, frozen, node, fields):
"""
Copied from CPython implementation - the intention is to follow this as far as
is possible:
@@ -534,35 +589,37 @@ def generate_hash_code(unsafe_hash, eq, frozen, node, fields):
if unsafe_hash:
# error message taken from CPython dataclasses module
error(node.pos, "Cannot overwrite attribute __hash__ in class %s" % node.class_name)
- return "", {}, []
+ return
+
if not unsafe_hash:
if not eq:
return
if not frozen:
- return "", {}, [Nodes.SingleAssignmentNode(
- node.pos,
- lhs=ExprNodes.NameNode(node.pos, name=EncodedString("__hash__")),
- rhs=ExprNodes.NoneNode(node.pos),
- )]
+ code.add_extra_statements([
+ Nodes.SingleAssignmentNode(
+ node.pos,
+ lhs=ExprNodes.NameNode(node.pos, name=EncodedString("__hash__")),
+ rhs=ExprNodes.NoneNode(node.pos),
+ )
+ ])
+ return
names = [
name for name, field in fields.items()
- if (not field.is_initvar and
- (field.compare.value if field.hash.value is None else field.hash.value))
+ if not field.is_initvar and (
+ field.compare.value if field.hash.value is None else field.hash.value)
]
- if not names:
- return "", {}, [] # nothing to hash
# make a tuple of the hashes
- tpl = u", ".join(u"hash(self.%s)" % name for name in names )
+ hash_tuple_items = u", ".join(u"self.%s" % name for name in names)
+ if hash_tuple_items:
+ hash_tuple_items += u"," # ensure that one arg form is a tuple
# if we're here we want to generate a hash
- code_lines = dedent(u"""\
- def __hash__(self):
- return hash((%s))
- """) % tpl
-
- return code_lines, {}, []
+ code.add_code_lines([
+ "def __hash__(self):",
+ " return hash((%s))" % hash_tuple_items,
+ ])
def get_field_type(pos, entry):
diff --git a/Cython/Compiler/ExprNodes.py b/Cython/Compiler/ExprNodes.py
index ab228c552..881851535 100644
--- a/Cython/Compiler/ExprNodes.py
+++ b/Cython/Compiler/ExprNodes.py
@@ -333,6 +333,8 @@ class ExprNode(Node):
# result_code/temp_result can safely be set to None
# is_numpy_attribute boolean Is a Numpy module attribute
# annotation ExprNode or None PEP526 annotation for names or expressions
+ # generator_arg_tag None or Node A tag to mark ExprNodes that potentially need to
+ # be changed to a generator argument
result_ctype = None
type = None
@@ -342,6 +344,7 @@ class ExprNode(Node):
use_managed_ref = True # can be set by optimisation transforms
result_is_used = True
is_numpy_attribute = False
+ generator_arg_tag = None
# The Analyse Expressions phase for expressions is split
# into two sub-phases:
@@ -544,7 +547,7 @@ class ExprNode(Node):
if is_pythran_supported_node_or_none(self):
return to_pythran(self)
- assert(type_ is not None)
+ assert type_ is not None
return to_pythran(self, type_)
def is_c_result_required(self):
@@ -1535,6 +1538,11 @@ def _analyse_name_as_type(name, pos, env):
global_entry = env.global_scope().lookup(name)
if global_entry and global_entry.is_type:
type = global_entry.type
+ if (not env.in_c_type_context and
+ name == 'int' and type is Builtin.int_type):
+ # While we still support Python2 this needs to be downgraded
+ # to a generic Python object to include both int and long
+ type = py_object_type
if type and (type.is_pyobject or env.in_c_type_context):
return type
ctype = ctype or type
@@ -2057,15 +2065,10 @@ class NameNode(AtomicExprNode):
atype = error_type
visibility = 'private'
- if 'dataclasses.dataclass' in env.directives:
+ if env.is_c_dataclass_scope:
# handle "frozen" directive - full inspection of the dataclass directives happens
# in Dataclass.py
- frozen_directive = None
- dataclass_directive = env.directives['dataclasses.dataclass']
- if dataclass_directive:
- dataclass_directive_kwds = dataclass_directive[1]
- frozen_directive = dataclass_directive_kwds.get('frozen', None)
- is_frozen = frozen_directive and frozen_directive.is_literal and frozen_directive.value
+ is_frozen = env.is_c_dataclass_scope == "frozen"
if atype.is_pyobject or atype.can_coerce_to_pyobject(env):
visibility = 'readonly' if is_frozen else 'public'
# If the object can't be coerced that's fine - we just don't create a property
@@ -2121,6 +2124,10 @@ class NameNode(AtomicExprNode):
type = py_object_type
elif type.is_pyobject and type.equivalent_type:
type = type.equivalent_type
+ elif type is Builtin.int_type:
+ # while we still support Python 2 this must be an object
+ # so that it can be either int or long
+ type = py_object_type
return type
if self.name == 'object':
# This is normally parsed as "simple C type", but not if we don't parse C types.
@@ -2160,10 +2167,13 @@ class NameNode(AtomicExprNode):
self.entry.known_standard_library_import = "" # already exists somewhere and so is now ambiguous
if not self.entry and self.annotation is not None:
# name : type = ...
- is_dataclass = 'dataclasses.dataclass' in env.directives
+ is_dataclass = env.is_c_dataclass_scope
# In a dataclass, an assignment should not prevent a name from becoming an instance attribute.
# Hence, "as_target = not is_dataclass".
self.declare_from_annotation(env, as_target=not is_dataclass)
+ elif (self.entry and self.entry.is_inherited and
+ self.annotation and env.is_c_dataclass_scope):
+ error(self.pos, "Cannot redeclare inherited fields in Cython dataclasses")
if not self.entry:
if env.directives['warn.undeclared']:
warning(self.pos, "implicit declaration of '%s'" % self.name, 1)
@@ -2790,7 +2800,98 @@ class ImportNode(ExprNode):
return self.module_name.value
-class IteratorNode(ExprNode):
+class ScopedExprNode(ExprNode):
+ # Abstract base class for ExprNodes that have their own local
+ # scope, such as generator expressions.
+ #
+ # expr_scope Scope the inner scope of the expression
+
+ subexprs = []
+ expr_scope = None
+
+ # does this node really have a local scope, e.g. does it leak loop
+ # variables or not? non-leaking Py3 behaviour is default, except
+ # for list comprehensions where the behaviour differs in Py2 and
+ # Py3 (set in Parsing.py based on parser context)
+ has_local_scope = True
+
+ def init_scope(self, outer_scope, expr_scope=None):
+ if expr_scope is not None:
+ self.expr_scope = expr_scope
+ elif self.has_local_scope:
+ self.expr_scope = Symtab.ComprehensionScope(outer_scope)
+ elif not self.expr_scope: # don't unset if it's already been set
+ self.expr_scope = None
+
+ def analyse_declarations(self, env):
+ self.init_scope(env)
+
+ def analyse_scoped_declarations(self, env):
+ # this is called with the expr_scope as env
+ pass
+
+ def analyse_types(self, env):
+ # no recursion here, the children will be analysed separately below
+ return self
+
+ def analyse_scoped_expressions(self, env):
+ # this is called with the expr_scope as env
+ return self
+
+ def generate_evaluation_code(self, code):
+ # set up local variables and free their references on exit
+ generate_inner_evaluation_code = super(ScopedExprNode, self).generate_evaluation_code
+ if not self.has_local_scope or not self.expr_scope.var_entries:
+ # no local variables => delegate, done
+ generate_inner_evaluation_code(code)
+ return
+
+ code.putln('{ /* enter inner scope */')
+ py_entries = []
+ for _, entry in sorted(item for item in self.expr_scope.entries.items() if item[0]):
+ if not entry.in_closure:
+ if entry.type.is_pyobject and entry.used:
+ py_entries.append(entry)
+ if not py_entries:
+ # no local Python references => no cleanup required
+ generate_inner_evaluation_code(code)
+ code.putln('} /* exit inner scope */')
+ return
+
+ # must free all local Python references at each exit point
+ old_loop_labels = code.new_loop_labels()
+ old_error_label = code.new_error_label()
+
+ generate_inner_evaluation_code(code)
+
+ # normal (non-error) exit
+ self._generate_vars_cleanup(code, py_entries)
+
+ # error/loop body exit points
+ exit_scope = code.new_label('exit_scope')
+ code.put_goto(exit_scope)
+ for label, old_label in ([(code.error_label, old_error_label)] +
+ list(zip(code.get_loop_labels(), old_loop_labels))):
+ if code.label_used(label):
+ code.put_label(label)
+ self._generate_vars_cleanup(code, py_entries)
+ code.put_goto(old_label)
+ code.put_label(exit_scope)
+ code.putln('} /* exit inner scope */')
+
+ code.set_loop_labels(old_loop_labels)
+ code.error_label = old_error_label
+
+ def _generate_vars_cleanup(self, code, py_entries):
+ for entry in py_entries:
+ if entry.is_cglobal:
+ code.put_var_gotref(entry)
+ code.put_var_decref_set(entry, "Py_None")
+ else:
+ code.put_var_xdecref_clear(entry)
+
+
+class IteratorNode(ScopedExprNode):
# Used as part of for statement implementation.
#
# Implements result = iter(sequence)
@@ -2802,10 +2903,13 @@ class IteratorNode(ExprNode):
counter_cname = None
reversed = False # currently only used for list/tuple types (see Optimize.py)
is_async = False
+ has_local_scope = False
subexprs = ['sequence']
def analyse_types(self, env):
+ if self.expr_scope:
+ env = self.expr_scope # actually evaluate sequence in this scope instead
self.sequence = self.sequence.analyse_types(env)
if (self.sequence.type.is_array or self.sequence.type.is_ptr) and \
not self.sequence.type.is_string:
@@ -2813,6 +2917,9 @@ class IteratorNode(ExprNode):
self.type = self.sequence.type
elif self.sequence.type.is_cpp_class:
return CppIteratorNode(self.pos, sequence=self.sequence).analyse_types(env)
+ elif self.is_reversed_cpp_iteration():
+ sequence = self.sequence.arg_tuple.args[0].arg
+ return CppIteratorNode(self.pos, sequence=sequence, reversed=True).analyse_types(env)
else:
self.sequence = self.sequence.coerce_to_pyobject(env)
if self.sequence.type in (list_type, tuple_type):
@@ -2827,8 +2934,27 @@ class IteratorNode(ExprNode):
PyrexTypes.CFuncTypeArg("it", PyrexTypes.py_object_type, None),
]))
+ def is_reversed_cpp_iteration(self):
+ """
+ Returns True if the 'reversed' function is applied to a C++ iterable.
+
+ This supports C++ classes with reverse_iterator implemented.
+ """
+ if not (isinstance(self.sequence, SimpleCallNode) and
+ self.sequence.arg_tuple and len(self.sequence.arg_tuple.args) == 1):
+ return False
+ func = self.sequence.function
+ if func.is_name and func.name == "reversed":
+ if not func.entry.is_builtin:
+ return False
+ arg = self.sequence.arg_tuple.args[0]
+ if isinstance(arg, CoercionNode) and arg.arg.is_name:
+ arg = arg.arg.entry
+ return arg.type.is_cpp_class
+ return False
+
def type_dependencies(self, env):
- return self.sequence.type_dependencies(env)
+ return self.sequence.type_dependencies(self.expr_scope or env)
def infer_type(self, env):
sequence_type = self.sequence.infer_type(env)
@@ -2990,25 +3116,30 @@ class CppIteratorNode(ExprNode):
cpp_attribute_op = "."
extra_dereference = ""
is_temp = True
+ reversed = False
subexprs = ['sequence']
+ def get_iterator_func_names(self):
+ return ("begin", "end") if not self.reversed else ("rbegin", "rend")
+
def analyse_types(self, env):
sequence_type = self.sequence.type
if sequence_type.is_ptr:
sequence_type = sequence_type.base_type
- begin = sequence_type.scope.lookup("begin")
- end = sequence_type.scope.lookup("end")
+ begin_name, end_name = self.get_iterator_func_names()
+ begin = sequence_type.scope.lookup(begin_name)
+ end = sequence_type.scope.lookup(end_name)
if (begin is None
or not begin.type.is_cfunction
or begin.type.args):
- error(self.pos, "missing begin() on %s" % self.sequence.type)
+ error(self.pos, "missing %s() on %s" % (begin_name, self.sequence.type))
self.type = error_type
return self
if (end is None
or not end.type.is_cfunction
or end.type.args):
- error(self.pos, "missing end() on %s" % self.sequence.type)
+ error(self.pos, "missing %s() on %s" % (end_name, self.sequence.type))
self.type = error_type
return self
iter_type = begin.type.return_type
@@ -3019,37 +3150,40 @@ class CppIteratorNode(ExprNode):
self.pos,
"!=",
[iter_type, end.type.return_type]) is None:
- error(self.pos, "missing operator!= on result of begin() on %s" % self.sequence.type)
+ error(self.pos, "missing operator!= on result of %s() on %s" % (begin_name, self.sequence.type))
self.type = error_type
return self
if env.lookup_operator_for_types(self.pos, '++', [iter_type]) is None:
- error(self.pos, "missing operator++ on result of begin() on %s" % self.sequence.type)
+ error(self.pos, "missing operator++ on result of %s() on %s" % (begin_name, self.sequence.type))
self.type = error_type
return self
if env.lookup_operator_for_types(self.pos, '*', [iter_type]) is None:
- error(self.pos, "missing operator* on result of begin() on %s" % self.sequence.type)
+ error(self.pos, "missing operator* on result of %s() on %s" % (begin_name, self.sequence.type))
self.type = error_type
return self
self.type = iter_type
elif iter_type.is_ptr:
if not (iter_type == end.type.return_type):
- error(self.pos, "incompatible types for begin() and end()")
+ error(self.pos, "incompatible types for %s() and %s()" % (begin_name, end_name))
self.type = iter_type
else:
- error(self.pos, "result type of begin() on %s must be a C++ class or pointer" % self.sequence.type)
+ error(self.pos, "result type of %s() on %s must be a C++ class or pointer" % (begin_name, self.sequence.type))
self.type = error_type
return self
def generate_result_code(self, code):
sequence_type = self.sequence.type
+ begin_name, _ = self.get_iterator_func_names()
# essentially 3 options:
- if self.sequence.is_name or self.sequence.is_attribute:
- # 1) is a name and can be accessed directly;
+ if self.sequence.is_simple():
+ # 1) Sequence can be accessed directly, like a name;
# assigning to it may break the container, but that's the responsibility
# of the user
- code.putln("%s = %s%sbegin();" % (self.result(),
- self.sequence.result(),
- self.cpp_attribute_op))
+ code.putln("%s = %s%s%s();" % (
+ self.result(),
+ self.sequence.result(),
+ self.cpp_attribute_op,
+ begin_name))
else:
# (while it'd be nice to limit the scope of the loop temp, it's essentially
# impossible to do while supporting generators)
@@ -3067,23 +3201,50 @@ class CppIteratorNode(ExprNode):
code.putln("%s = %s%s;" % (self.cpp_sequence_cname,
"&" if temp_type.is_ptr else "",
self.sequence.move_result_rhs()))
- code.putln("%s = %s%sbegin();" % (self.result(), self.cpp_sequence_cname,
- self.cpp_attribute_op))
+ code.putln("%s = %s%s%s();" % (
+ self.result(),
+ self.cpp_sequence_cname,
+ self.cpp_attribute_op,
+ begin_name))
def generate_iter_next_result_code(self, result_name, code):
# end call isn't cached to support containers that allow adding while iterating
# (much as this is usually a bad idea)
- code.putln("if (!(%s%s != %s%send())) break;" % (
+ _, end_name = self.get_iterator_func_names()
+ code.putln("if (!(%s%s != %s%s%s())) break;" % (
self.extra_dereference,
self.result(),
self.cpp_sequence_cname or self.sequence.result(),
- self.cpp_attribute_op))
+ self.cpp_attribute_op,
+ end_name))
code.putln("%s = *%s%s;" % (
result_name,
self.extra_dereference,
self.result()))
code.putln("++%s%s;" % (self.extra_dereference, self.result()))
+ def generate_subexpr_disposal_code(self, code):
+ if not self.cpp_sequence_cname:
+ # the sequence is accessed directly so any temporary result in its
+ # subexpressions must remain available until the iterator is not needed
+ return
+ ExprNode.generate_subexpr_disposal_code(self, code)
+
+ def free_subexpr_temps(self, code):
+ if not self.cpp_sequence_cname:
+ # the sequence is accessed directly so any temporary result in its
+ # subexpressions must remain available until the iterator is not needed
+ return
+ ExprNode.free_subexpr_temps(self, code)
+
+ def generate_disposal_code(self, code):
+ if not self.cpp_sequence_cname:
+ # postponed from CppIteratorNode.generate_subexpr_disposal_code
+ # and CppIteratorNode.free_subexpr_temps
+ ExprNode.generate_subexpr_disposal_code(self, code)
+ ExprNode.free_subexpr_temps(self, code)
+ ExprNode.generate_disposal_code(self, code)
+
def free_temps(self, code):
if self.cpp_sequence_cname:
code.funcstate.release_temp(self.cpp_sequence_cname)
@@ -3091,6 +3252,32 @@ class CppIteratorNode(ExprNode):
ExprNode.free_temps(self, code)
+def remove_const(item_type):
+ """
+ Removes the constness of a given type and its underlying templates
+ if any.
+
+ This is to solve the compilation error when the temporary variable used to
+ store the result of an iterator cannot be changed due to its constness.
+ For example, the value_type of std::map, which will also be the type of
+ the temporarry variable, is std::pair<const Key, T>. This means the first
+ component of the variable cannot be reused to store the result of each
+ iteration, which leads to a compilation error.
+ """
+ if item_type.is_const:
+ item_type = item_type.cv_base_type
+ if item_type.is_typedef:
+ item_type = remove_const(item_type.typedef_base_type)
+ if item_type.is_cpp_class and item_type.templates:
+ templates = [remove_const(t) if t.is_const else t for t in item_type.templates]
+ template_type = item_type.template_type
+ item_type = PyrexTypes.CppClassType(
+ template_type.name, template_type.scope,
+ template_type.cname, template_type.base_classes,
+ templates, template_type)
+ return item_type
+
+
class NextNode(AtomicExprNode):
# Used as part of for statement implementation.
# Implements result = next(iterator)
@@ -3133,6 +3320,7 @@ class NextNode(AtomicExprNode):
def analyse_types(self, env):
self.type = self.infer_type(env, self.iterator.type)
+ self.type = remove_const(self.type)
self.is_temp = 1
return self
@@ -3140,7 +3328,7 @@ class NextNode(AtomicExprNode):
self.iterator.generate_iter_next_result_code(self.result(), code)
-class AsyncIteratorNode(ExprNode):
+class AsyncIteratorNode(ScopedExprNode):
# Used as part of 'async for' statement implementation.
#
# Implements result = sequence.__aiter__()
@@ -3152,11 +3340,14 @@ class AsyncIteratorNode(ExprNode):
is_async = True
type = py_object_type
is_temp = 1
+ has_local_scope = False
def infer_type(self, env):
return py_object_type
def analyse_types(self, env):
+ if self.expr_scope:
+ env = self.expr_scope
self.sequence = self.sequence.analyse_types(env)
if not self.sequence.type.is_pyobject:
error(self.pos, "async for loops not allowed on C/C++ types")
@@ -7040,6 +7231,35 @@ class AttributeNode(ExprNode):
self.entry = entry.as_variable
self.analyse_as_python_attribute(env)
return self
+ elif entry and entry.is_cfunction and self.obj.type is not Builtin.type_type:
+ # "bound" cdef function.
+ # This implementation is likely a little inefficient and could be improved.
+ # Essentially it does:
+ # __import__("functools").partial(coerce_to_object(self), self.obj)
+ from .UtilNodes import EvalWithTempExprNode, ResultRefNode
+ # take self.obj out to a temp because it's used twice
+ obj_node = ResultRefNode(self.obj, type=self.obj.type)
+ obj_node.result_ctype = self.obj.result_ctype
+ self.obj = obj_node
+ unbound_node = ExprNode.coerce_to(self, dst_type, env)
+ utility_code=UtilityCode.load_cached(
+ "PyMethodNew2Arg", "ObjectHandling.c"
+ )
+ func_type = PyrexTypes.CFuncType(
+ PyrexTypes.py_object_type, [
+ PyrexTypes.CFuncTypeArg("func", PyrexTypes.py_object_type, None),
+ PyrexTypes.CFuncTypeArg("self", PyrexTypes.py_object_type, None)
+ ],
+ )
+ binding_call = PythonCapiCallNode(
+ self.pos,
+ function_name="__Pyx_PyMethod_New2Arg",
+ func_type=func_type,
+ args=[unbound_node, obj_node],
+ utility_code=utility_code,
+ )
+ complete_call = EvalWithTempExprNode(obj_node, binding_call)
+ return complete_call.analyse_types(env)
return ExprNode.coerce_to(self, dst_type, env)
def calculate_constant_result(self):
@@ -8153,7 +8373,7 @@ class SequenceNode(ExprNode):
code.put_decref(target_list, py_object_type)
code.putln('%s = %s; %s = NULL;' % (target_list, sublist_temp, sublist_temp))
code.putln('#else')
- code.putln('(void)%s;' % sublist_temp) # avoid warning about unused variable
+ code.putln('CYTHON_UNUSED_VAR(%s);' % sublist_temp)
code.funcstate.release_temp(sublist_temp)
code.putln('#endif')
@@ -8466,97 +8686,6 @@ class ListNode(SequenceNode):
raise InternalError("List type never specified")
-class ScopedExprNode(ExprNode):
- # Abstract base class for ExprNodes that have their own local
- # scope, such as generator expressions.
- #
- # expr_scope Scope the inner scope of the expression
-
- subexprs = []
- expr_scope = None
-
- # does this node really have a local scope, e.g. does it leak loop
- # variables or not? non-leaking Py3 behaviour is default, except
- # for list comprehensions where the behaviour differs in Py2 and
- # Py3 (set in Parsing.py based on parser context)
- has_local_scope = True
-
- def init_scope(self, outer_scope, expr_scope=None):
- if expr_scope is not None:
- self.expr_scope = expr_scope
- elif self.has_local_scope:
- self.expr_scope = Symtab.ComprehensionScope(outer_scope)
- else:
- self.expr_scope = None
-
- def analyse_declarations(self, env):
- self.init_scope(env)
-
- def analyse_scoped_declarations(self, env):
- # this is called with the expr_scope as env
- pass
-
- def analyse_types(self, env):
- # no recursion here, the children will be analysed separately below
- return self
-
- def analyse_scoped_expressions(self, env):
- # this is called with the expr_scope as env
- return self
-
- def generate_evaluation_code(self, code):
- # set up local variables and free their references on exit
- generate_inner_evaluation_code = super(ScopedExprNode, self).generate_evaluation_code
- if not self.has_local_scope or not self.expr_scope.var_entries:
- # no local variables => delegate, done
- generate_inner_evaluation_code(code)
- return
-
- code.putln('{ /* enter inner scope */')
- py_entries = []
- for _, entry in sorted(item for item in self.expr_scope.entries.items() if item[0]):
- if not entry.in_closure:
- if entry.type.is_pyobject and entry.used:
- py_entries.append(entry)
- if not py_entries:
- # no local Python references => no cleanup required
- generate_inner_evaluation_code(code)
- code.putln('} /* exit inner scope */')
- return
-
- # must free all local Python references at each exit point
- old_loop_labels = code.new_loop_labels()
- old_error_label = code.new_error_label()
-
- generate_inner_evaluation_code(code)
-
- # normal (non-error) exit
- self._generate_vars_cleanup(code, py_entries)
-
- # error/loop body exit points
- exit_scope = code.new_label('exit_scope')
- code.put_goto(exit_scope)
- for label, old_label in ([(code.error_label, old_error_label)] +
- list(zip(code.get_loop_labels(), old_loop_labels))):
- if code.label_used(label):
- code.put_label(label)
- self._generate_vars_cleanup(code, py_entries)
- code.put_goto(old_label)
- code.put_label(exit_scope)
- code.putln('} /* exit inner scope */')
-
- code.set_loop_labels(old_loop_labels)
- code.error_label = old_error_label
-
- def _generate_vars_cleanup(self, code, py_entries):
- for entry in py_entries:
- if entry.is_cglobal:
- code.put_var_gotref(entry)
- code.put_var_decref_set(entry, "Py_None")
- else:
- code.put_var_xdecref_clear(entry)
-
-
class ComprehensionNode(ScopedExprNode):
# A list/set/dict comprehension
@@ -8571,6 +8700,12 @@ class ComprehensionNode(ScopedExprNode):
def analyse_declarations(self, env):
self.append.target = self # this is used in the PyList_Append of the inner loop
self.init_scope(env)
+ # setup loop scope
+ if isinstance(self.loop, Nodes._ForInStatNode):
+ assert isinstance(self.loop.iterator, ScopedExprNode), self.loop.iterator
+ self.loop.iterator.init_scope(None, env)
+ else:
+ assert isinstance(self.loop, Nodes.ForFromStatNode), self.loop
def analyse_scoped_declarations(self, env):
self.loop.analyse_declarations(env)
@@ -9839,6 +9974,12 @@ class CodeObjectNode(ExprNode):
flags.append('CO_VARARGS')
if self.def_node.starstar_arg:
flags.append('CO_VARKEYWORDS')
+ if self.def_node.is_asyncgen:
+ flags.append('CO_ASYNC_GENERATOR')
+ elif self.def_node.is_coroutine:
+ flags.append('CO_COROUTINE')
+ elif self.def_node.is_generator:
+ flags.append('CO_GENERATOR')
code.putln("%s = (PyObject*)__Pyx_PyCode_New(%d, %d, %d, %d, 0, %s, %s, %s, %s, %s, %s, %s, %s, %s, %d, %s); %s" % (
self.result_code,
@@ -9989,10 +10130,18 @@ class GeneratorExpressionNode(LambdaNode):
#
# loop ForStatNode the for-loop, containing a YieldExprNode
# def_node DefNode the underlying generator 'def' node
+ # call_parameters [ExprNode] (Internal) parameters passed to the DefNode call
name = StringEncoding.EncodedString('genexpr')
binding = False
+ child_attrs = LambdaNode.child_attrs + ["call_parameters"]
+ subexprs = LambdaNode.subexprs + ["call_parameters"]
+
+ def __init__(self, pos, *args, **kwds):
+ super(GeneratorExpressionNode, self).__init__(pos, *args, **kwds)
+ self.call_parameters = []
+
def analyse_declarations(self, env):
if hasattr(self, "genexpr_name"):
# this if-statement makes it safe to run twice
@@ -10005,13 +10154,22 @@ class GeneratorExpressionNode(LambdaNode):
self.def_node.is_cyfunction = False
# Force genexpr signature
self.def_node.entry.signature = TypeSlots.pyfunction_noargs
+ # setup loop scope
+ if isinstance(self.loop, Nodes._ForInStatNode):
+ assert isinstance(self.loop.iterator, ScopedExprNode)
+ self.loop.iterator.init_scope(None, env)
+ else:
+ assert isinstance(self.loop, Nodes.ForFromStatNode)
def generate_result_code(self, code):
+ args_to_call = ([self.closure_result_code()] +
+ [ cp.result() for cp in self.call_parameters ])
+ args_to_call = ", ".join(args_to_call)
code.putln(
'%s = %s(%s); %s' % (
self.result(),
self.def_node.entry.pyfunc_cname,
- self.closure_result_code(),
+ args_to_call,
code.error_goto_if_null(self.result(), self.pos)))
self.generate_gotref(code)
@@ -10115,6 +10273,8 @@ class YieldExprNode(ExprNode):
if type.is_pyobject:
code.putln('%s = 0;' % save_cname)
code.put_xgotref(cname, type)
+ elif type.is_memoryviewslice:
+ code.putln('%s.memview = NULL; %s.data = NULL;' % (save_cname, save_cname))
self.generate_sent_value_handling_code(code, Naming.sent_value_cname)
if self.result_is_used:
self.allocate_temp_result(code)
@@ -10338,6 +10498,7 @@ class UnopNode(ExprNode):
subexprs = ['operand']
infix = True
+ is_inc_dec_op = False
def calculate_constant_result(self):
func = compile_time_unary_operators[self.operator]
@@ -10449,7 +10610,10 @@ class UnopNode(ExprNode):
self.type = PyrexTypes.error_type
def analyse_cpp_operation(self, env, overload_check=True):
- entry = env.lookup_operator(self.operator, [self.operand])
+ operand_types = [self.operand.type]
+ if self.is_inc_dec_op and not self.is_prefix:
+ operand_types.append(PyrexTypes.c_int_type)
+ entry = env.lookup_operator_for_types(self.pos, self.operator, operand_types)
if overload_check and not entry:
self.type_error()
return
@@ -10463,7 +10627,12 @@ class UnopNode(ExprNode):
else:
self.exception_check = ''
self.exception_value = ''
- cpp_type = self.operand.type.find_cpp_operation_type(self.operator)
+ if self.is_inc_dec_op and not self.is_prefix:
+ cpp_type = self.operand.type.find_cpp_operation_type(
+ self.operator, operand_type=PyrexTypes.c_int_type
+ )
+ else:
+ cpp_type = self.operand.type.find_cpp_operation_type(self.operator)
if overload_check and cpp_type is None:
error(self.pos, "'%s' operator not defined for %s" % (
self.operator, type))
@@ -10605,6 +10774,17 @@ class DereferenceNode(CUnopNode):
class DecrementIncrementNode(CUnopNode):
# unary ++/-- operator
+ is_inc_dec_op = True
+
+ def type_error(self):
+ if not self.operand.type.is_error:
+ if self.is_prefix:
+ error(self.pos, "No match for 'operator%s' (operand type is '%s')" %
+ (self.operator, self.operand.type))
+ else:
+ error(self.pos, "No 'operator%s(int)' declared for postfix '%s' (operand type is '%s')" %
+ (self.operator, self.operator, self.operand.type))
+ self.type = PyrexTypes.error_type
def analyse_c_operation(self, env):
if self.operand.type.is_numeric:
diff --git a/Cython/Compiler/FlowControl.pxd b/Cython/Compiler/FlowControl.pxd
index 4a8ef19c1..a15f86cf6 100644
--- a/Cython/Compiler/FlowControl.pxd
+++ b/Cython/Compiler/FlowControl.pxd
@@ -36,6 +36,7 @@ cdef class NameAssignment:
cdef public set refs
cdef public object bit
cdef public object inferred_type
+ cdef public object rhs_scope
cdef class AssignmentList:
cdef public object bit
@@ -65,7 +66,7 @@ cdef class ControlFlow:
cpdef bint is_tracked(self, entry)
cpdef bint is_statically_assigned(self, entry)
cpdef mark_position(self, node)
- cpdef mark_assignment(self, lhs, rhs, entry)
+ cpdef mark_assignment(self, lhs, rhs, entry, rhs_scope=*)
cpdef mark_argument(self, lhs, rhs, entry)
cpdef mark_deletion(self, node, entry)
cpdef mark_reference(self, node, entry)
@@ -103,12 +104,11 @@ cdef class ControlFlowAnalysis(CythonTransform):
cdef object gv_ctx
cdef object constant_folder
cdef set reductions
- cdef list env_stack
- cdef list stack
+ cdef list stack # a stack of (env, flow) tuples
cdef object env
cdef ControlFlow flow
cdef object object_expr
cdef bint in_inplace_assignment
- cpdef mark_assignment(self, lhs, rhs=*)
+ cpdef mark_assignment(self, lhs, rhs=*, rhs_scope=*)
cpdef mark_position(self, node)
diff --git a/Cython/Compiler/FlowControl.py b/Cython/Compiler/FlowControl.py
index 4018ff851..294bce9ee 100644
--- a/Cython/Compiler/FlowControl.py
+++ b/Cython/Compiler/FlowControl.py
@@ -172,9 +172,9 @@ class ControlFlow(object):
if self.block:
self.block.positions.add(node.pos[:2])
- def mark_assignment(self, lhs, rhs, entry):
+ def mark_assignment(self, lhs, rhs, entry, rhs_scope=None):
if self.block and self.is_tracked(entry):
- assignment = NameAssignment(lhs, rhs, entry)
+ assignment = NameAssignment(lhs, rhs, entry, rhs_scope=rhs_scope)
self.block.stats.append(assignment)
self.block.gen[entry] = assignment
self.entries.add(entry)
@@ -315,7 +315,7 @@ class ExceptionDescr(object):
class NameAssignment(object):
- def __init__(self, lhs, rhs, entry):
+ def __init__(self, lhs, rhs, entry, rhs_scope=None):
if lhs.cf_state is None:
lhs.cf_state = set()
self.lhs = lhs
@@ -326,16 +326,18 @@ class NameAssignment(object):
self.is_arg = False
self.is_deletion = False
self.inferred_type = None
+ # For generator expression targets, the rhs can have a different scope than the lhs.
+ self.rhs_scope = rhs_scope
def __repr__(self):
return '%s(entry=%r)' % (self.__class__.__name__, self.entry)
def infer_type(self):
- self.inferred_type = self.rhs.infer_type(self.entry.scope)
+ self.inferred_type = self.rhs.infer_type(self.rhs_scope or self.entry.scope)
return self.inferred_type
def type_dependencies(self):
- return self.rhs.type_dependencies(self.entry.scope)
+ return self.rhs.type_dependencies(self.rhs_scope or self.entry.scope)
@property
def type(self):
@@ -677,6 +679,14 @@ class AssignmentCollector(TreeVisitor):
class ControlFlowAnalysis(CythonTransform):
+ def find_in_stack(self, env):
+ if env == self.env:
+ return self.flow
+ for e, flow in reversed(self.stack):
+ if e is env:
+ return flow
+ assert False
+
def visit_ModuleNode(self, node):
dot_output = self.current_directives['control_flow.dot_output']
self.gv_ctx = GVContext() if dot_output else None
@@ -688,10 +698,9 @@ class ControlFlowAnalysis(CythonTransform):
self.reductions = set()
self.in_inplace_assignment = False
- self.env_stack = []
self.env = node.scope
- self.stack = []
self.flow = ControlFlow()
+ self.stack = [] # a stack of (env, flow) tuples
self.object_expr = TypedExprNode(PyrexTypes.py_object_type, may_be_none=True)
self.visitchildren(node)
@@ -708,9 +717,8 @@ class ControlFlowAnalysis(CythonTransform):
if arg.default:
self.visitchildren(arg)
self.visitchildren(node, ('decorators',))
- self.env_stack.append(self.env)
+ self.stack.append((self.env, self.flow))
self.env = node.local_scope
- self.stack.append(self.flow)
self.flow = ControlFlow()
# Collect all entries
@@ -751,8 +759,7 @@ class ControlFlowAnalysis(CythonTransform):
if self.gv_ctx is not None:
self.gv_ctx.add(GV(node.local_scope.name, self.flow))
- self.flow = self.stack.pop()
- self.env = self.env_stack.pop()
+ self.env, self.flow = self.stack.pop()
return node
def visit_DefNode(self, node):
@@ -765,7 +772,7 @@ class ControlFlowAnalysis(CythonTransform):
def visit_CTypeDefNode(self, node):
return node
- def mark_assignment(self, lhs, rhs=None):
+ def mark_assignment(self, lhs, rhs=None, rhs_scope=None):
if not self.flow.block:
return
if self.flow.exceptions:
@@ -782,7 +789,7 @@ class ControlFlowAnalysis(CythonTransform):
entry = self.env.lookup(lhs.name)
if entry is None: # TODO: This shouldn't happen...
return
- self.flow.mark_assignment(lhs, rhs, entry)
+ self.flow.mark_assignment(lhs, rhs, entry, rhs_scope=rhs_scope)
elif lhs.is_sequence_constructor:
for i, arg in enumerate(lhs.args):
if arg.is_starred:
@@ -979,10 +986,11 @@ class ControlFlowAnalysis(CythonTransform):
is_special = False
sequence = node.iterator.sequence
target = node.target
+ env = node.iterator.expr_scope or self.env
if isinstance(sequence, ExprNodes.SimpleCallNode):
function = sequence.function
if sequence.self is None and function.is_name:
- entry = self.env.lookup(function.name)
+ entry = env.lookup(function.name)
if not entry or entry.is_builtin:
if function.name == 'reversed' and len(sequence.args) == 1:
sequence = sequence.args[0]
@@ -990,30 +998,32 @@ class ControlFlowAnalysis(CythonTransform):
if target.is_sequence_constructor and len(target.args) == 2:
iterator = sequence.args[0]
if iterator.is_name:
- iterator_type = iterator.infer_type(self.env)
+ iterator_type = iterator.infer_type(env)
if iterator_type.is_builtin_type:
# assume that builtin types have a length within Py_ssize_t
self.mark_assignment(
target.args[0],
ExprNodes.IntNode(target.pos, value='PY_SSIZE_T_MAX',
- type=PyrexTypes.c_py_ssize_t_type))
+ type=PyrexTypes.c_py_ssize_t_type),
+ rhs_scope=node.iterator.expr_scope)
target = target.args[1]
sequence = sequence.args[0]
if isinstance(sequence, ExprNodes.SimpleCallNode):
function = sequence.function
if sequence.self is None and function.is_name:
- entry = self.env.lookup(function.name)
+ entry = env.lookup(function.name)
if not entry or entry.is_builtin:
if function.name in ('range', 'xrange'):
is_special = True
for arg in sequence.args[:2]:
- self.mark_assignment(target, arg)
+ self.mark_assignment(target, arg, rhs_scope=node.iterator.expr_scope)
if len(sequence.args) > 2:
self.mark_assignment(target, self.constant_folder(
ExprNodes.binop_node(node.pos,
'+',
sequence.args[0],
- sequence.args[2])))
+ sequence.args[2])),
+ rhs_scope=node.iterator.expr_scope)
if not is_special:
# A for-loop basically translates to subsequent calls to
@@ -1022,7 +1032,7 @@ class ControlFlowAnalysis(CythonTransform):
# Python strings, etc., while correctly falling back to an
# object type when the base type cannot be handled.
- self.mark_assignment(target, node.item)
+ self.mark_assignment(target, node.item, rhs_scope=node.iterator.expr_scope)
def visit_AsyncForStatNode(self, node):
return self.visit_ForInStatNode(node)
@@ -1321,21 +1331,25 @@ class ControlFlowAnalysis(CythonTransform):
def visit_ComprehensionNode(self, node):
if node.expr_scope:
- self.env_stack.append(self.env)
+ self.stack.append((self.env, self.flow))
self.env = node.expr_scope
# Skip append node here
self._visit(node.loop)
if node.expr_scope:
- self.env = self.env_stack.pop()
+ self.env, _ = self.stack.pop()
return node
def visit_ScopedExprNode(self, node):
+ # currently this is written to deal with these two types
+ # (with comprehensions covered in their own function)
+ assert isinstance(node, (ExprNodes.IteratorNode, ExprNodes.AsyncIteratorNode)), node
if node.expr_scope:
- self.env_stack.append(self.env)
+ self.stack.append((self.env, self.flow))
+ self.flow = self.find_in_stack(node.expr_scope)
self.env = node.expr_scope
self.visitchildren(node)
if node.expr_scope:
- self.env = self.env_stack.pop()
+ self.env, self.flow = self.stack.pop()
return node
def visit_PyClassDefNode(self, node):
@@ -1343,14 +1357,21 @@ class ControlFlowAnalysis(CythonTransform):
'mkw', 'bases', 'class_result'))
self.flow.mark_assignment(node.target, node.classobj,
self.env.lookup(node.target.name))
- self.env_stack.append(self.env)
+ self.stack.append((self.env, self.flow))
self.env = node.scope
self.flow.nextblock()
if node.doc_node:
self.flow.mark_assignment(node.doc_node, fake_rhs_expr, node.doc_node.entry)
self.visitchildren(node, ('body',))
self.flow.nextblock()
- self.env = self.env_stack.pop()
+ self.env, _ = self.stack.pop()
+ return node
+
+ def visit_CClassDefNode(self, node):
+ # just make sure the nodes scope is findable in-case there is a list comprehension in it
+ self.stack.append((node.scope, self.flow))
+ self.visitchildren(node)
+ self.stack.pop()
return node
def visit_AmpersandNode(self, node):
diff --git a/Cython/Compiler/FusedNode.py b/Cython/Compiler/FusedNode.py
index 5639cdf28..4643cfb65 100644
--- a/Cython/Compiler/FusedNode.py
+++ b/Cython/Compiler/FusedNode.py
@@ -321,25 +321,21 @@ class FusedCFuncDefNode(StatListNode):
def _buffer_check_numpy_dtype_setup_cases(self, pyx_code):
"Setup some common cases to match dtypes against specializations"
- if pyx_code.indenter("if kind in b'iu':"):
+ with pyx_code.indenter("if kind in b'iu':"):
pyx_code.putln("pass")
pyx_code.named_insertion_point("dtype_int")
- pyx_code.dedent()
- if pyx_code.indenter("elif kind == b'f':"):
+ with pyx_code.indenter("elif kind == b'f':"):
pyx_code.putln("pass")
pyx_code.named_insertion_point("dtype_float")
- pyx_code.dedent()
- if pyx_code.indenter("elif kind == b'c':"):
+ with pyx_code.indenter("elif kind == b'c':"):
pyx_code.putln("pass")
pyx_code.named_insertion_point("dtype_complex")
- pyx_code.dedent()
- if pyx_code.indenter("elif kind == b'O':"):
+ with pyx_code.indenter("elif kind == b'O':"):
pyx_code.putln("pass")
pyx_code.named_insertion_point("dtype_object")
- pyx_code.dedent()
match = "dest_sig[{{dest_sig_idx}}] = '{{specialized_type_name}}'"
no_match = "dest_sig[{{dest_sig_idx}}] = None"
@@ -376,11 +372,10 @@ class FusedCFuncDefNode(StatListNode):
if final_type.is_pythran_expr:
cond += ' and arg_is_pythran_compatible'
- if codewriter.indenter("if %s:" % cond):
+ with codewriter.indenter("if %s:" % cond):
#codewriter.putln("print 'buffer match found based on numpy dtype'")
codewriter.putln(self.match)
codewriter.putln("break")
- codewriter.dedent()
def _buffer_parse_format_string_check(self, pyx_code, decl_code,
specialized_type, env):
@@ -697,7 +692,7 @@ class FusedCFuncDefNode(StatListNode):
self._unpack_argument(pyx_code)
# 'unrolled' loop, first match breaks out of it
- if pyx_code.indenter("while 1:"):
+ with pyx_code.indenter("while 1:"):
if normal_types:
self._fused_instance_checks(normal_types, pyx_code, env)
if buffer_types or pythran_types:
@@ -709,7 +704,6 @@ class FusedCFuncDefNode(StatListNode):
else:
pyx_code.putln(self.no_match)
pyx_code.putln("break")
- pyx_code.dedent()
fused_index += 1
all_buffer_types.update(buffer_types)
diff --git a/Cython/Compiler/Main.py b/Cython/Compiler/Main.py
index 764d9af21..d5985457d 100644
--- a/Cython/Compiler/Main.py
+++ b/Cython/Compiler/Main.py
@@ -2,7 +2,7 @@
# Cython Top Level
#
-from __future__ import absolute_import
+from __future__ import absolute_import, print_function
import os
import re
@@ -143,6 +143,29 @@ class Context(object):
def nonfatal_error(self, exc):
return Errors.report_error(exc)
+ def _split_qualified_name(self, qualified_name):
+ # Splits qualified_name into parts in form of 2-tuples: (PART_NAME, IS_PACKAGE).
+ qualified_name_parts = qualified_name.split('.')
+ last_part = qualified_name_parts.pop()
+ qualified_name_parts = [(p, True) for p in qualified_name_parts]
+ if last_part != '__init__':
+ # If Last part is __init__, then it is omitted. Otherwise, we need to check whether we can find
+ # __init__.pyx/__init__.py file to determine if last part is package or not.
+ is_package = False
+ for suffix in ('.py', '.pyx'):
+ path = self.search_include_directories(
+ qualified_name, suffix=suffix, source_pos=None, source_file_path=None)
+ if path:
+ is_package = self._is_init_file(path)
+ break
+
+ qualified_name_parts.append((last_part, is_package))
+ return qualified_name_parts
+
+ @staticmethod
+ def _is_init_file(path):
+ return os.path.basename(path) in ('__init__.pyx', '__init__.py', '__init__.pxd') if path else False
+
def find_module(self, module_name, relative_to=None, pos=None, need_pxd=1,
absolute_fallback=True):
# Finds and returns the module scope corresponding to
@@ -182,16 +205,16 @@ class Context(object):
if not scope:
pxd_pathname = self.find_pxd_file(qualified_name, pos)
if pxd_pathname:
- scope = relative_to.find_submodule(module_name)
+ is_package = self._is_init_file(pxd_pathname)
+ scope = relative_to.find_submodule(module_name, as_package=is_package)
if not scope:
if debug_find_module:
print("...trying absolute import")
if absolute_fallback:
qualified_name = module_name
scope = self
- for name in qualified_name.split("."):
- scope = scope.find_submodule(name)
-
+ for name, is_package in self._split_qualified_name(qualified_name):
+ scope = scope.find_submodule(name, as_package=is_package)
if debug_find_module:
print("...scope = %s" % scope)
if not scope.pxd_file_loaded:
@@ -321,12 +344,12 @@ class Context(object):
# Look up a top-level module. Returns None if not found.
return self.modules.get(name, None)
- def find_submodule(self, name):
+ def find_submodule(self, name, as_package=False):
# Find a top-level module, creating a new one if needed.
scope = self.lookup_submodule(name)
if not scope:
scope = ModuleScope(name,
- parent_module = None, context = self)
+ parent_module = None, context = self, is_package=as_package)
self.modules[name] = scope
return scope
@@ -502,6 +525,10 @@ def run_pipeline(source, options, full_module_name=None, context=None):
err, enddata = Pipeline.run_pipeline(pipeline, source)
context.teardown_errors(err, options, result)
+ if options.depfile:
+ from ..Build.Dependencies import create_dependency_tree
+ dependencies = create_dependency_tree(context).all_dependencies(result.main_source_file)
+ Utils.write_depfile(result.c_file, result.main_source_file, dependencies)
return result
@@ -583,6 +610,9 @@ def compile_multiple(sources, options):
a CompilationResultSet. Performs timestamp checking and/or recursion
if these are specified in the options.
"""
+ if len(sources) > 1 and options.module_name:
+ raise RuntimeError('Full module name can only be set '
+ 'for single source compilation')
# run_pipeline creates the context
# context = Context.from_options(options)
sources = [os.path.abspath(source) for source in sources]
@@ -601,8 +631,9 @@ def compile_multiple(sources, options):
if (not timestamps) or out_of_date:
if verbose:
sys.stderr.write("Compiling %s\n" % source)
-
- result = run_pipeline(source, options, context=context)
+ result = run_pipeline(source, options,
+ full_module_name=options.module_name,
+ context=context)
results.add(source, result)
# Compiling multiple sources in one context doesn't quite
# work properly yet.
@@ -716,7 +747,16 @@ def main(command_line = 0):
args = sys.argv[1:]
any_failures = 0
if command_line:
- options, sources = parse_command_line(args)
+ try:
+ options, sources = parse_command_line(args)
+ except IOError as e:
+ # TODO: IOError can be replaced with FileNotFoundError in Cython 3.1
+ import errno
+ if errno.ENOENT != e.errno:
+ # Raised IOError is not caused by missing file.
+ raise
+ print("{}: No such file or directory: '{}'".format(sys.argv[0], e.filename), file=sys.stderr)
+ sys.exit(1)
else:
options = CompilationOptions(default_options)
sources = args
diff --git a/Cython/Compiler/MemoryView.py b/Cython/Compiler/MemoryView.py
index 6df53dcb6..5ebd396be 100644
--- a/Cython/Compiler/MemoryView.py
+++ b/Cython/Compiler/MemoryView.py
@@ -295,7 +295,7 @@ class MemoryViewSliceBufferEntry(Buffer.BufferEntry):
dim += 1
access, packing = self.type.axes[dim]
- if isinstance(index, ExprNodes.SliceNode):
+ if index.is_slice:
# slice, unspecified dimension, or part of ellipsis
d = dict(locals())
for s in "start stop step".split():
diff --git a/Cython/Compiler/ModuleNode.py b/Cython/Compiler/ModuleNode.py
index f83a51706..53aaf026e 100644
--- a/Cython/Compiler/ModuleNode.py
+++ b/Cython/Compiler/ModuleNode.py
@@ -262,7 +262,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
api_guard = self.api_name(Naming.api_guard_prefix, env)
h_code_start.putln("#ifndef %s" % api_guard)
h_code_start.putln("")
- self.generate_extern_c_macro_definition(h_code_start)
+ self.generate_extern_c_macro_definition(h_code_start, env.is_cpp())
h_code_start.putln("")
self.generate_dl_import_macro(h_code_start)
if h_extension_types:
@@ -804,7 +804,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
code.putln(" { __PYX_MARK_ERR_POS(f_index, lineno) goto Ln_error; }")
code.putln("")
- self.generate_extern_c_macro_definition(code)
+ self.generate_extern_c_macro_definition(code, env.is_cpp())
code.putln("")
code.putln("#define %s" % self.api_name(Naming.h_guard_prefix, env))
@@ -876,14 +876,17 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
if has_np_pythran(env):
env.use_utility_code(UtilityCode.load_cached("PythranConversion", "CppSupport.cpp"))
- def generate_extern_c_macro_definition(self, code):
+ def generate_extern_c_macro_definition(self, code, is_cpp):
name = Naming.extern_c_macro
code.putln("#ifndef %s" % name)
- code.putln(" #ifdef __cplusplus")
- code.putln(' #define %s extern "C"' % name)
- code.putln(" #else")
- code.putln(" #define %s extern" % name)
- code.putln(" #endif")
+ if is_cpp:
+ code.putln(' #define %s extern "C++"' % name)
+ else:
+ code.putln(" #ifdef __cplusplus")
+ code.putln(' #define %s extern "C"' % name)
+ code.putln(" #else")
+ code.putln(" #define %s extern" % name)
+ code.putln(" #endif")
code.putln("#endif")
def generate_dl_import_macro(self, code):
@@ -972,7 +975,8 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
def generate_typedef(self, entry, code):
base_type = entry.type.typedef_base_type
- if base_type.is_numeric:
+ enclosing_scope = entry.scope
+ if base_type.is_numeric and not enclosing_scope.is_cpp_class_scope:
try:
writer = code.globalstate['numeric_typedefs']
except KeyError:
@@ -1048,6 +1052,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
[base_class.empty_declaration_code() for base_class in type.base_classes])
code.put(" : public %s" % base_class_decl)
code.putln(" {")
+ self.generate_type_header_code(scope.type_entries, code)
py_attrs = [e for e in scope.entries.values()
if e.type.is_pyobject and not e.is_inherited]
has_virtual_methods = False
@@ -1632,7 +1637,6 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
entry = scope.lookup_here("__del__")
if entry is None or not entry.is_special:
return # nothing to wrap
- slot_func_cname = scope.mangle_internal("tp_finalize")
code.putln("")
if tp_slot.used_ifdef:
@@ -1677,7 +1681,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
if py_attrs or cpp_destructable_attrs or memoryview_slices or weakref_slot or dict_slot:
self.generate_self_cast(scope, code)
- if not is_final_type:
+ if not is_final_type or scope.may_have_finalize():
# in Py3.4+, call tp_finalize() as early as possible
code.putln("#if CYTHON_USE_TP_FINALIZE")
if needs_gc:
@@ -3112,7 +3116,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
if Options.generate_cleanup_code:
code.globalstate.use_utility_code(
UtilityCode.load_cached("RegisterModuleCleanup", "ModuleSetupCode.c"))
- code.putln("if (__Pyx_RegisterCleanup()) %s;" % code.error_goto(self.pos))
+ code.putln("if (__Pyx_RegisterCleanup()) %s" % code.error_goto(self.pos))
code.put_goto(code.return_label)
code.put_label(code.error_label)
@@ -3526,7 +3530,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
code.error_goto_if_null(Naming.cython_runtime_cname, self.pos)))
code.put_incref(Naming.cython_runtime_cname, py_object_type, nanny=False)
code.putln(
- 'if (PyObject_SetAttrString(%s, "__builtins__", %s) < 0) %s;' % (
+ 'if (PyObject_SetAttrString(%s, "__builtins__", %s) < 0) %s' % (
env.module_cname,
Naming.builtins_cname,
code.error_goto(self.pos)))
@@ -3772,14 +3776,14 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
if not condition:
code.putln("") # start in new line
code.putln("#if defined(PYPY_VERSION_NUM) && PYPY_VERSION_NUM < 0x050B0000")
- code.putln('sizeof(%s),' % objstruct)
+ code.putln('sizeof(%s), __PYX_GET_STRUCT_ALIGNMENT(%s),' % (objstruct, objstruct))
code.putln("#elif CYTHON_COMPILING_IN_LIMITED_API")
- code.putln('sizeof(%s),' % objstruct)
+ code.putln('sizeof(%s), __PYX_GET_STRUCT_ALIGNMENT(%s),' % (objstruct, objstruct))
code.putln("#else")
- code.putln('sizeof(%s),' % sizeof_objstruct)
+ code.putln('sizeof(%s), __PYX_GET_STRUCT_ALIGNMENT(%s),' % (sizeof_objstruct, sizeof_objstruct))
code.putln("#endif")
else:
- code.put('sizeof(%s), ' % objstruct)
+ code.putln('sizeof(%s), __PYX_GET_STRUCT_ALIGNMENT(%s),' % (objstruct, objstruct))
# check_size
if type.check_size and type.check_size in ('error', 'warn', 'ignore'):
diff --git a/Cython/Compiler/Naming.py b/Cython/Compiler/Naming.py
index 96c0b8fbd..1931e5976 100644
--- a/Cython/Compiler/Naming.py
+++ b/Cython/Compiler/Naming.py
@@ -17,6 +17,7 @@ pyunicode_identifier_prefix = pyrex_prefix + 'U'
builtin_prefix = pyrex_prefix + "builtin_"
arg_prefix = pyrex_prefix + "arg_"
+genexpr_arg_prefix = pyrex_prefix + "genexpr_arg_"
funcdoc_prefix = pyrex_prefix + "doc_"
enum_prefix = pyrex_prefix + "e_"
func_prefix = pyrex_prefix + "f_"
diff --git a/Cython/Compiler/Nodes.py b/Cython/Compiler/Nodes.py
index 476b380a3..5c3321326 100644
--- a/Cython/Compiler/Nodes.py
+++ b/Cython/Compiler/Nodes.py
@@ -730,13 +730,15 @@ class CFuncDeclaratorNode(CDeclaratorNode):
# Use an explicit exception return value to speed up exception checks.
# Even if it is not declared, we can use the default exception value of the return type,
# unless the function is some kind of external function that we do not control.
- if (return_type.exception_value is not None and (visibility != 'extern' and not in_pxd)
- # Ideally the function-pointer test would be better after self.base is analysed
- # however that is hard to do with the current implementation so it lives here
- # for now
- and not isinstance(self.base, CPtrDeclaratorNode)):
- # Extension types are more difficult because the signature must match the base type signature.
- if not env.is_c_class_scope:
+ if (return_type.exception_value is not None and (visibility != 'extern' and not in_pxd)):
+ # - We skip this optimization for extension types; they are more difficult because
+ # the signature must match the base type signature.
+ # - Same for function pointers, as we want them to be able to match functions
+ # with any exception value.
+ # - Ideally the function-pointer test would be better after self.base is analysed
+ # however that is hard to do with the current implementation so it lives here
+ # for now.
+ if not env.is_c_class_scope and not isinstance(self.base, CPtrDeclaratorNode):
from .ExprNodes import ConstNode
self.exception_value = ConstNode(
self.pos, value=return_type.exception_value, type=return_type)
@@ -1633,6 +1635,9 @@ class CppClassNode(CStructOrUnionDefNode, BlockNode):
elif isinstance(attr, CompilerDirectivesNode):
for sub_attr in func_attributes(attr.body.stats):
yield sub_attr
+ elif isinstance(attr, CppClassNode) and attr.attributes is not None:
+ for sub_attr in func_attributes(attr.attributes):
+ yield sub_attr
if self.attributes is not None:
if self.in_pxd and not env.in_cinclude:
self.entry.defined_in_pxd = 1
@@ -2117,7 +2122,6 @@ class FuncDefNode(StatNode, BlockNode):
self.generate_argument_parsing_code(env, code)
# If an argument is assigned to in the body, we must
# incref it to properly keep track of refcounts.
- is_cdef = isinstance(self, CFuncDefNode)
for entry in lenv.arg_entries:
if not entry.type.is_memoryviewslice:
if (acquire_gil or entry.cf_is_reassigned) and not entry.in_closure:
@@ -2126,7 +2130,7 @@ class FuncDefNode(StatNode, BlockNode):
# we acquire arguments from object conversion, so we have
# new references. If we are a cdef function, we need to
# incref our arguments
- elif is_cdef and entry.cf_is_reassigned:
+ elif entry.cf_is_reassigned and not entry.in_closure:
code.put_var_incref_memoryviewslice(entry,
have_gil=code.funcstate.gil_owned)
for entry in lenv.var_entries:
@@ -2328,14 +2332,14 @@ class FuncDefNode(StatNode, BlockNode):
# Decref any increfed args
for entry in lenv.arg_entries:
+ if entry.in_closure:
+ continue
if entry.type.is_memoryviewslice:
# decref slices of def functions and acquired slices from cdef
# functions, but not borrowed slices from cdef functions.
- if is_cdef and not entry.cf_is_reassigned:
+ if not entry.cf_is_reassigned:
continue
else:
- if entry.in_closure:
- continue
if not acquire_gil and not entry.cf_is_reassigned:
continue
if entry.type.needs_refcounting:
@@ -2881,8 +2885,11 @@ class CFuncDefNode(FuncDefNode):
def put_into_closure(entry):
if entry.in_closure and not arg.default:
code.putln('%s = %s;' % (entry.cname, entry.original_cname))
- code.put_var_incref(entry)
- code.put_var_giveref(entry)
+ if entry.type.is_memoryviewslice:
+ entry.type.generate_incref_memoryviewslice(code, entry.cname, True)
+ else:
+ code.put_var_incref(entry)
+ code.put_var_giveref(entry)
for arg in self.args:
put_into_closure(scope.lookup_here(arg.name))
@@ -3511,8 +3518,20 @@ class DefNode(FuncDefNode):
# Move arguments into closure if required
def put_into_closure(entry):
if entry.in_closure:
- code.putln('%s = %s;' % (entry.cname, entry.original_cname))
- if entry.xdecref_cleanup:
+ if entry.type.is_array:
+ # This applies to generator expressions that iterate over C arrays (and need to
+ # capture them by value), under most other circumstances C array arguments are dropped to
+ # pointers so this copy isn't used
+ assert entry.type.size is not None
+ code.globalstate.use_utility_code(UtilityCode.load_cached("IncludeStringH", "StringTools.c"))
+ code.putln("memcpy({0}, {1}, sizeof({0}));".format(entry.cname, entry.original_cname))
+ else:
+ code.putln('%s = %s;' % (entry.cname, entry.original_cname))
+ if entry.type.is_memoryviewslice:
+ # TODO - at some point reference count of memoryviews should
+ # genuinely be unified with PyObjects
+ entry.type.generate_incref_memoryviewslice(code, entry.cname, True)
+ elif entry.xdecref_cleanup:
# mostly applies to the starstar arg - this can sometimes be NULL
# so must be xincrefed instead
code.put_var_xincref(entry)
@@ -3670,11 +3689,20 @@ class DefNodeWrapper(FuncDefNode):
# ----- Non-error return cleanup
code.put_label(code.return_label)
for entry in lenv.var_entries:
- if entry.is_arg and entry.type.is_pyobject:
+ if entry.is_arg:
+ # mainly captures the star/starstar args
if entry.xdecref_cleanup:
code.put_var_xdecref(entry)
else:
code.put_var_decref(entry)
+ for arg in self.args:
+ if not arg.type.is_pyobject:
+ # This captures anything that's been converted from a PyObject.
+ # Primarily memoryviews at the moment
+ if arg.entry.xdecref_cleanup:
+ code.put_var_xdecref(arg.entry)
+ else:
+ code.put_var_decref(arg.entry)
code.put_finish_refcount_context()
if not self.return_type.is_void:
@@ -3727,7 +3755,7 @@ class DefNodeWrapper(FuncDefNode):
with_pymethdef = False
dc = self.return_type.declaration_code(entry.func_cname)
- header = "static %s%s(%s)" % (mf, dc, arg_code)
+ header = "%sstatic %s(%s)" % (mf, dc, arg_code)
code.putln("%s; /*proto*/" % header)
if proto_only:
@@ -5154,7 +5182,6 @@ class CClassDefNode(ClassDefNode):
check_size = None
decorators = None
shadow = False
- is_dataclass = False
@property
def punycode_class_name(self):
@@ -5196,6 +5223,8 @@ class CClassDefNode(ClassDefNode):
api=self.api,
buffer_defaults=self.buffer_defaults(env),
shadow=self.shadow)
+ if self.bases and len(self.bases.args) > 1:
+ self.entry.type.multiple_bases = True
def analyse_declarations(self, env):
#print "CClassDefNode.analyse_declarations:", self.class_name
@@ -5204,8 +5233,6 @@ class CClassDefNode(ClassDefNode):
if env.in_cinclude and not self.objstruct_name:
error(self.pos, "Object struct name specification required for C class defined in 'extern from' block")
- if "dataclasses.dataclass" in env.directives:
- self.is_dataclass = True
if self.decorators:
error(self.pos, "Decorators not allowed on cdef classes (used on type '%s')" % self.class_name)
self.base_type = None
@@ -5287,6 +5314,8 @@ class CClassDefNode(ClassDefNode):
api=self.api,
buffer_defaults=self.buffer_defaults(env),
shadow=self.shadow)
+ if self.bases and len(self.bases.args) > 1:
+ self.entry.type.multiple_bases = True
if self.shadow:
home_scope.lookup(self.class_name).as_variable = self.entry
@@ -5295,6 +5324,15 @@ class CClassDefNode(ClassDefNode):
self.scope = scope = self.entry.type.scope
if scope is not None:
scope.directives = env.directives
+ if "dataclasses.dataclass" in env.directives:
+ is_frozen = False
+ # Retrieve the @dataclass config (args, kwargs), as passed into the decorator.
+ dataclass_config = env.directives["dataclasses.dataclass"]
+ if dataclass_config:
+ decorator_kwargs = dataclass_config[1]
+ frozen_flag = decorator_kwargs.get('frozen')
+ is_frozen = frozen_flag and frozen_flag.is_literal and frozen_flag.value
+ scope.is_c_dataclass_scope = "frozen" if is_frozen else True
if self.doc and Options.docstrings:
scope.doc = embed_position(self.pos, self.doc)
@@ -8702,7 +8740,7 @@ class FromCImportStatNode(StatNode):
#
# module_name string Qualified name of module
# relative_level int or None Relative import: number of dots before module_name
- # imported_names [(pos, name, as_name, kind)] Names to be imported
+ # imported_names [(pos, name, as_name)] Names to be imported
child_attrs = []
module_name = None
@@ -8713,35 +8751,34 @@ class FromCImportStatNode(StatNode):
if not env.is_module_scope:
error(self.pos, "cimport only allowed at module level")
return
- if self.relative_level and self.relative_level > env.qualified_name.count('.'):
- error(self.pos, "relative cimport beyond main package is not allowed")
- return
+ qualified_name_components = env.qualified_name.count('.') + 1
+ if self.relative_level:
+ if self.relative_level > qualified_name_components:
+ # 1. case: importing beyond package: from .. import pkg
+ error(self.pos, "relative cimport beyond main package is not allowed")
+ return
+ elif self.relative_level == qualified_name_components and not env.is_package:
+ # 2. case: importing from same level but current dir is not package: from . import module
+ error(self.pos, "relative cimport from non-package directory is not allowed")
+ return
module_scope = env.find_module(self.module_name, self.pos, relative_level=self.relative_level)
module_name = module_scope.qualified_name
env.add_imported_module(module_scope)
- for pos, name, as_name, kind in self.imported_names:
+ for pos, name, as_name in self.imported_names:
if name == "*":
for local_name, entry in list(module_scope.entries.items()):
env.add_imported_entry(local_name, entry, pos)
else:
entry = module_scope.lookup(name)
if entry:
- if kind and not self.declaration_matches(entry, kind):
- entry.redeclared(pos)
entry.used = 1
else:
- if kind == 'struct' or kind == 'union':
- entry = module_scope.declare_struct_or_union(
- name, kind=kind, scope=None, typedef_flag=0, pos=pos)
- elif kind == 'class':
- entry = module_scope.declare_c_class(name, pos=pos, module_name=module_name)
+ submodule_scope = env.context.find_module(
+ name, relative_to=module_scope, pos=self.pos, absolute_fallback=False)
+ if submodule_scope.parent_module is module_scope:
+ env.declare_module(as_name or name, submodule_scope, self.pos)
else:
- submodule_scope = env.context.find_module(
- name, relative_to=module_scope, pos=self.pos, absolute_fallback=False)
- if submodule_scope.parent_module is module_scope:
- env.declare_module(as_name or name, submodule_scope, self.pos)
- else:
- error(pos, "Name '%s' not declared in module '%s'" % (name, module_name))
+ error(pos, "Name '%s' not declared in module '%s'" % (name, module_name))
if entry:
local_name = as_name or name
@@ -8750,7 +8787,7 @@ class FromCImportStatNode(StatNode):
if module_name.startswith('cpython') or module_name.startswith('cython'): # enough for now
if module_name in utility_code_for_cimports:
env.use_utility_code(utility_code_for_cimports[module_name]())
- for _, name, _, _ in self.imported_names:
+ for _, name, _ in self.imported_names:
fqname = '%s.%s' % (module_name, name)
if fqname in utility_code_for_cimports:
env.use_utility_code(utility_code_for_cimports[fqname]())
diff --git a/Cython/Compiler/Optimize.py b/Cython/Compiler/Optimize.py
index cea5970f6..231d23419 100644
--- a/Cython/Compiler/Optimize.py
+++ b/Cython/Compiler/Optimize.py
@@ -319,16 +319,6 @@ class IterationTransform(Visitor.EnvTransform):
return self._optimise_for_loop(node, arg, reversed=True)
- PyBytes_AS_STRING_func_type = PyrexTypes.CFuncType(
- PyrexTypes.c_char_ptr_type, [
- PyrexTypes.CFuncTypeArg("s", Builtin.bytes_type, None)
- ])
-
- PyBytes_GET_SIZE_func_type = PyrexTypes.CFuncType(
- PyrexTypes.c_py_ssize_t_type, [
- PyrexTypes.CFuncTypeArg("s", Builtin.bytes_type, None)
- ])
-
def _transform_indexable_iteration(self, node, slice_node, is_mutable, reversed=False):
"""In principle can handle any iterable that Cython has a len() for and knows how to index"""
unpack_temp_node = UtilNodes.LetRefNode(
@@ -415,6 +405,16 @@ class IterationTransform(Visitor.EnvTransform):
body.stats.insert(1, node.body)
return ret
+ PyBytes_AS_STRING_func_type = PyrexTypes.CFuncType(
+ PyrexTypes.c_char_ptr_type, [
+ PyrexTypes.CFuncTypeArg("s", Builtin.bytes_type, None)
+ ])
+
+ PyBytes_GET_SIZE_func_type = PyrexTypes.CFuncType(
+ PyrexTypes.c_py_ssize_t_type, [
+ PyrexTypes.CFuncTypeArg("s", Builtin.bytes_type, None)
+ ])
+
def _transform_bytes_iteration(self, node, slice_node, reversed=False):
target_type = node.target.type
if not target_type.is_int and target_type is not Builtin.bytes_type:
@@ -2105,7 +2105,8 @@ class InlineDefNodeCalls(Visitor.NodeRefCleanupMixin, Visitor.EnvTransform):
return node
inlined = ExprNodes.InlinedDefNodeCallNode(
node.pos, function_name=function_name,
- function=function, args=node.args)
+ function=function, args=node.args,
+ generator_arg_tag=node.generator_arg_tag)
if inlined.can_be_inlined():
return self.replace(node, inlined)
return node
diff --git a/Cython/Compiler/Options.py b/Cython/Compiler/Options.py
index 97f288905..73778aaf9 100644
--- a/Cython/Compiler/Options.py
+++ b/Cython/Compiler/Options.py
@@ -171,7 +171,7 @@ def copy_inherited_directives(outer_directives, **new_directives):
# For example, test_assert_path_exists and test_fail_if_path_exists should not be inherited
# otherwise they can produce very misleading test failures
new_directives_out = dict(outer_directives)
- for name in ('test_assert_path_exists', 'test_fail_if_path_exists'):
+ for name in ('test_assert_path_exists', 'test_fail_if_path_exists', 'test_assert_c_code_has', 'test_fail_if_c_code_has'):
new_directives_out.pop(name, None)
new_directives_out.update(new_directives)
return new_directives_out
@@ -247,6 +247,8 @@ _directive_defaults = {
# test support
'test_assert_path_exists' : [],
'test_fail_if_path_exists' : [],
+ 'test_assert_c_code_has' : [],
+ 'test_fail_if_c_code_has' : [],
# experimental, subject to change
'formal_grammar': False,
@@ -364,6 +366,8 @@ directive_scopes = { # defaults to available everywhere
'set_initial_path' : ('module',),
'test_assert_path_exists' : ('function', 'class', 'cclass'),
'test_fail_if_path_exists' : ('function', 'class', 'cclass'),
+ 'test_assert_c_code_has' : ('module',),
+ 'test_fail_if_c_code_has' : ('module',),
'freelist': ('cclass',),
'emit_code_comments': ('module',),
# Avoid scope-specific to/from_py_functions for c_string.
@@ -387,7 +391,7 @@ directive_scopes = { # defaults to available everywhere
# a list of directives that (when used as a decorator) are only applied to
# the object they decorate and not to its children.
immediate_decorator_directives = {
- 'cfunc', 'ccall', 'cclass',
+ 'cfunc', 'ccall', 'cclass', 'dataclasses.dataclass',
# function signature directives
'inline', 'exceptval', 'returns',
# class directives
@@ -509,6 +513,11 @@ def parse_directive_list(s, relaxed_bool=False, ignore_unknown=False,
result[directive] = parsed_value
if not found and not ignore_unknown:
raise ValueError('Unknown option: "%s"' % name)
+ elif directive_types.get(name) is list:
+ if name in result:
+ result[name].append(value)
+ else:
+ result[name] = [value]
else:
parsed_value = parse_directive_value(name, value, relaxed_bool=relaxed_bool)
result[name] = parsed_value
@@ -661,6 +670,9 @@ class CompilationOptions(object):
elif key in ['output_file', 'output_dir']:
# ignore the exact name of the output file
continue
+ elif key in ['depfile']:
+ # external build system dependency tracking file does not influence outputs
+ continue
elif key in ['timestamps']:
# the cache cares about the content of files, not about the timestamps of sources
continue
@@ -739,6 +751,7 @@ default_options = dict(
errors_to_stderr=1,
cplus=0,
output_file=None,
+ depfile=None,
annotate=None,
annotate_coverage_xml=None,
generate_pxi=0,
@@ -757,6 +770,7 @@ default_options = dict(
formal_grammar=False,
gdb_debug=False,
compile_time_env=None,
+ module_name=None,
common_utility_include_dir=None,
output_dir=None,
build_dir=None,
diff --git a/Cython/Compiler/ParseTreeTransforms.pxd b/Cython/Compiler/ParseTreeTransforms.pxd
index b79a2492c..2778be4ef 100644
--- a/Cython/Compiler/ParseTreeTransforms.pxd
+++ b/Cython/Compiler/ParseTreeTransforms.pxd
@@ -6,8 +6,8 @@ from .Visitor cimport (
CythonTransform, VisitorTransform, TreeVisitor,
ScopeTrackingTransform, EnvTransform)
-cdef class SkipDeclarations: # (object):
- pass
+# Don't include mixins, only the main classes.
+#cdef class SkipDeclarations:
cdef class NormalizeTree(CythonTransform):
cdef bint is_in_statlist
diff --git a/Cython/Compiler/ParseTreeTransforms.py b/Cython/Compiler/ParseTreeTransforms.py
index 8bad4f8bc..5301578c3 100644
--- a/Cython/Compiler/ParseTreeTransforms.py
+++ b/Cython/Compiler/ParseTreeTransforms.py
@@ -6,10 +6,12 @@ import cython
cython.declare(PyrexTypes=object, Naming=object, ExprNodes=object, Nodes=object,
Options=object, UtilNodes=object, LetNode=object,
LetRefNode=object, TreeFragment=object, EncodedString=object,
- error=object, warning=object, copy=object, _unicode=object)
+ error=object, warning=object, copy=object, hashlib=object, sys=object,
+ _unicode=object)
import copy
import hashlib
+import sys
from . import PyrexTypes
from . import Naming
@@ -857,6 +859,14 @@ class InterpretCompilerDirectives(CythonTransform):
}
special_methods.update(unop_method_nodes)
+ valid_cython_submodules = {
+ 'cimports',
+ 'dataclasses',
+ 'operator',
+ 'parallel',
+ 'view',
+ }
+
valid_parallel_directives = {
"parallel",
"prange",
@@ -885,6 +895,34 @@ class InterpretCompilerDirectives(CythonTransform):
error(pos, "Invalid directive: '%s'." % (directive,))
return True
+ def _check_valid_cython_module(self, pos, module_name):
+ if not module_name.startswith("cython."):
+ return
+ if module_name.split('.', 2)[1] in self.valid_cython_submodules:
+ return
+
+ extra = ""
+ # This is very rarely used, so don't waste space on static tuples.
+ hints = [
+ line.split() for line in """\
+ imp cimports
+ cimp cimports
+ para parallel
+ parra parallel
+ dataclass dataclasses
+ """.splitlines()[:-1]
+ ]
+ for wrong, correct in hints:
+ if module_name.startswith("cython." + wrong):
+ extra = "Did you mean 'cython.%s' ?" % correct
+ break
+
+ error(pos, "'%s' is not a valid cython.* module%s%s" % (
+ module_name,
+ ". " if extra else "",
+ extra,
+ ))
+
# Set up processing and handle the cython: comments.
def visit_ModuleNode(self, node):
for key in sorted(node.directive_comments):
@@ -955,6 +993,9 @@ class InterpretCompilerDirectives(CythonTransform):
elif module_name.startswith(u"cython."):
if module_name.startswith(u"cython.parallel."):
error(node.pos, node.module_name + " is not a module")
+ else:
+ self._check_valid_cython_module(node.pos, module_name)
+
if module_name == u"cython.parallel":
if node.as_name and node.as_name != u"cython":
self.parallel_directives[node.as_name] = module_name
@@ -981,10 +1022,10 @@ class InterpretCompilerDirectives(CythonTransform):
node.pos, module_name, node.relative_level, node.imported_names)
elif not node.relative_level and (
module_name == u"cython" or module_name.startswith(u"cython.")):
+ self._check_valid_cython_module(node.pos, module_name)
submodule = (module_name + u".")[7:]
newimp = []
-
- for pos, name, as_name, kind in node.imported_names:
+ for pos, name, as_name in node.imported_names:
full_name = submodule + name
qualified_name = u"cython." + full_name
if self.is_parallel_directive(qualified_name, node.pos):
@@ -993,15 +1034,12 @@ class InterpretCompilerDirectives(CythonTransform):
self.parallel_directives[as_name or name] = qualified_name
elif self.is_cython_directive(full_name):
self.directive_names[as_name or name] = full_name
- if kind is not None:
- self.context.nonfatal_error(PostParseError(pos,
- "Compiler directive imports must be plain imports"))
elif full_name in ['dataclasses', 'typing']:
self.directive_names[as_name or name] = full_name
# unlike many directives, still treat it as a regular module
- newimp.append((pos, name, as_name, kind))
+ newimp.append((pos, name, as_name))
else:
- newimp.append((pos, name, as_name, kind))
+ newimp.append((pos, name, as_name))
if not newimp:
return None
@@ -1016,10 +1054,11 @@ class InterpretCompilerDirectives(CythonTransform):
imported_names = []
for name, name_node in node.items:
imported_names.append(
- (name_node.pos, name, None if name == name_node.name else name_node.name, None))
+ (name_node.pos, name, None if name == name_node.name else name_node.name))
return self._create_cimport_from_import(
node.pos, module_name, import_node.level, imported_names)
elif module_name == u"cython" or module_name.startswith(u"cython."):
+ self._check_valid_cython_module(import_node.module_name.pos, module_name)
submodule = (module_name + u".")[7:]
newimp = []
for name, name_node in node.items:
@@ -1054,14 +1093,13 @@ class InterpretCompilerDirectives(CythonTransform):
module_name=dotted_name,
as_name=as_name,
is_absolute=level == 0)
- for pos, dotted_name, as_name, _ in imported_names
+ for pos, dotted_name, as_name in imported_names
]
def visit_SingleAssignmentNode(self, node):
if isinstance(node.rhs, ExprNodes.ImportNode):
module_name = node.rhs.module_name.value
- is_special_module = (module_name + u".").startswith((u"cython.parallel.", u"cython.cimports."))
- if module_name != u"cython" and not is_special_module:
+ if module_name != u"cython" and not module_name.startswith("cython."):
return node
node = Nodes.CImportStatNode(node.pos, module_name=module_name, as_name=node.lhs.name)
@@ -1210,7 +1248,7 @@ class InterpretCompilerDirectives(CythonTransform):
return (optname, directivetype(optname, str(args[0].value)))
elif directivetype is Options.DEFER_ANALYSIS_OF_ARGUMENTS:
# signal to pass things on without processing
- return (optname, (args, kwds.as_python_dict()))
+ return (optname, (args, kwds.as_python_dict() if kwds else {}))
else:
assert False
@@ -1303,8 +1341,7 @@ class InterpretCompilerDirectives(CythonTransform):
name, value = directive
if self.directives.get(name, object()) != value:
directives.append(directive)
- if (directive[0] == 'staticmethod' or
- (directive[0] == 'dataclasses.dataclass' and scope_name == 'class')):
+ if directive[0] == 'staticmethod':
both.append(dec)
# Adapt scope type based on decorators that change it.
if directive[0] == 'cclass' and scope_name == 'class':
@@ -1314,10 +1351,11 @@ class InterpretCompilerDirectives(CythonTransform):
if realdecs and (scope_name == 'cclass' or
isinstance(node, (Nodes.CClassDefNode, Nodes.CVarDefNode))):
for realdec in realdecs:
+ dec_pos = realdec.pos
realdec = realdec.decorator
if ((realdec.is_name and realdec.name == "dataclass") or
(realdec.is_attribute and realdec.attribute == "dataclass")):
- error(realdec.pos,
+ error(dec_pos,
"Use '@cython.dataclasses.dataclass' on cdef classes to create a dataclass")
# Note - arbitrary C function decorators are caught later in DecoratorTransform
raise PostParseError(realdecs[0].pos, "Cdef functions/classes cannot take arbitrary decorators.")
@@ -1607,6 +1645,128 @@ class WithTransform(VisitorTransform, SkipDeclarations):
visit_Node = VisitorTransform.recurse_to_children
+class _GeneratorExpressionArgumentsMarker(TreeVisitor, SkipDeclarations):
+ # called from "MarkClosureVisitor"
+ def __init__(self, gen_expr):
+ super(_GeneratorExpressionArgumentsMarker, self).__init__()
+ self.gen_expr = gen_expr
+
+ def visit_ExprNode(self, node):
+ if not node.is_literal:
+ # Don't bother tagging literal nodes
+ assert (not node.generator_arg_tag) # nobody has tagged this first
+ node.generator_arg_tag = self.gen_expr
+ self.visitchildren(node)
+
+ def visit_Node(self, node):
+ # We're only interested in the expressions that make up the iterator sequence,
+ # so don't go beyond ExprNodes (e.g. into ForFromStatNode).
+ return
+
+ def visit_GeneratorExpressionNode(self, node):
+ node.generator_arg_tag = self.gen_expr
+ # don't visit children, can't handle overlapping tags
+ # (and assume generator expressions don't end up optimized out in a way
+ # that would require overlapping tags)
+
+
+class _HandleGeneratorArguments(VisitorTransform, SkipDeclarations):
+ # used from within CreateClosureClasses
+
+ def __call__(self, node):
+ from . import Visitor
+ assert isinstance(node, ExprNodes.GeneratorExpressionNode)
+ self.gen_node = node
+
+ self.args = list(node.def_node.args)
+ self.call_parameters = list(node.call_parameters)
+ self.tag_count = 0
+ self.substitutions = {}
+
+ self.visitchildren(node)
+
+ for k, v in self.substitutions.items():
+ # doing another search for replacements here (at the end) allows us to sweep up
+ # CloneNodes too (which are often generated by the optimizer)
+ # (it could arguably be done more efficiently with a single traversal though)
+ Visitor.recursively_replace_node(node, k, v)
+
+ node.def_node.args = self.args
+ node.call_parameters = self.call_parameters
+ return node
+
+ def visit_GeneratorExpressionNode(self, node):
+ # a generator can also be substituted itself, so handle that case
+ new_node = self._handle_ExprNode(node, do_visit_children=False)
+ # However do not traverse into it. A new _HandleGeneratorArguments visitor will be used
+ # elsewhere to do that.
+ return node
+
+ def _handle_ExprNode(self, node, do_visit_children):
+ if (node.generator_arg_tag is not None and self.gen_node is not None and
+ self.gen_node == node.generator_arg_tag):
+ pos = node.pos
+ # The reason for using ".x" as the name is that this is how CPython
+ # tracks internal variables in loops (e.g.
+ # { locals() for v in range(10) }
+ # will produce "v" and ".0"). We don't replicate this behaviour completely
+ # but use it as a starting point
+ name_source = self.tag_count
+ self.tag_count += 1
+ name = EncodedString(".{0}".format(name_source))
+ def_node = self.gen_node.def_node
+ if not def_node.local_scope.lookup_here(name):
+ from . import Symtab
+ cname = EncodedString(Naming.genexpr_arg_prefix + Symtab.punycodify_name(str(name_source)))
+ name_decl = Nodes.CNameDeclaratorNode(pos=pos, name=name)
+ type = node.type
+ if type.is_reference and not type.is_fake_reference:
+ # It isn't obvious whether the right thing to do would be to capture by reference or by
+ # value (C++ itself doesn't know either for lambda functions and forces a choice).
+ # However, capture by reference involves converting to FakeReference which would require
+ # re-analysing AttributeNodes. Therefore I've picked capture-by-value out of convenience
+ # TODO - could probably be optimized by making the arg a reference but the closure not
+ # (see https://github.com/cython/cython/issues/2468)
+ type = type.ref_base_type
+
+ name_decl.type = type
+ new_arg = Nodes.CArgDeclNode(pos=pos, declarator=name_decl,
+ base_type=None, default=None, annotation=None)
+ new_arg.name = name_decl.name
+ new_arg.type = type
+
+ self.args.append(new_arg)
+ node.generator_arg_tag = None # avoid the possibility of this being caught again
+ self.call_parameters.append(node)
+ new_arg.entry = def_node.declare_argument(def_node.local_scope, new_arg)
+ new_arg.entry.cname = cname
+ new_arg.entry.in_closure = True
+
+ if do_visit_children:
+ # now visit the Nodes's children (but remove self.gen_node to not to further
+ # argument substitution)
+ gen_node, self.gen_node = self.gen_node, None
+ self.visitchildren(node)
+ self.gen_node = gen_node
+
+ # replace the node inside the generator with a looked-up name
+ # (initialized_check can safely be False because the source variable will be checked
+ # before it is captured if the check is required)
+ name_node = ExprNodes.NameNode(pos, name=name, initialized_check=False)
+ name_node.entry = self.gen_node.def_node.gbody.local_scope.lookup(name_node.name)
+ name_node.type = name_node.entry.type
+ self.substitutions[node] = name_node
+ return name_node
+ if do_visit_children:
+ self.visitchildren(node)
+ return node
+
+ def visit_ExprNode(self, node):
+ return self._handle_ExprNode(node, True)
+
+ visit_Node = VisitorTransform.recurse_to_children
+
+
class DecoratorTransform(ScopeTrackingTransform, SkipDeclarations):
"""
Transforms method decorators in cdef classes into nested calls or properties.
@@ -2062,22 +2222,10 @@ if VALUE is not None:
if not e.type.is_pyobject:
e.type.create_to_py_utility_code(env)
e.type.create_from_py_utility_code(env)
- all_members_names = sorted([e.name for e in all_members])
-
- # Cython 0.x used MD5 for the checksum, which a few Python installations remove for security reasons.
- # SHA-256 should be ok for years to come, but early Cython 3.0 alpha releases used SHA-1,
- # which may not be.
- checksum_algos = [hashlib.sha256, hashlib.sha1]
- try:
- checksum_algos.append(hashlib.md5)
- except AttributeError:
- pass
- member_names_string = ' '.join(all_members_names).encode('utf-8')
- checksums = [
- '0x' + mkchecksum(member_names_string).hexdigest()[:7]
- for mkchecksum in checksum_algos
- ]
+ all_members_names = [e.name for e in all_members]
+ checksums = _calculate_pickle_checksums(all_members_names)
+
unpickle_func_name = '__pyx_unpickle_%s' % node.punycode_class_name
# TODO(robertwb): Move the state into the third argument
@@ -2330,7 +2478,7 @@ if VALUE is not None:
env = self.current_env()
node.analyse_declarations(env)
# the node may or may not have a local scope
- if node.has_local_scope:
+ if node.expr_scope:
self.seen_vars_stack.append(set(self.seen_vars_stack[-1]))
self.enter_scope(node, node.expr_scope)
node.analyse_scoped_declarations(node.expr_scope)
@@ -2338,6 +2486,7 @@ if VALUE is not None:
self.exit_scope()
self.seen_vars_stack.pop()
else:
+
node.analyse_scoped_declarations(env)
self.visitchildren(node)
return node
@@ -2494,6 +2643,24 @@ if VALUE is not None:
return node
+def _calculate_pickle_checksums(member_names):
+ # Cython 0.x used MD5 for the checksum, which a few Python installations remove for security reasons.
+ # SHA-256 should be ok for years to come, but early Cython 3.0 alpha releases used SHA-1,
+ # which may not be.
+ member_names_string = ' '.join(member_names).encode('utf-8')
+ hash_kwargs = {'usedforsecurity': False} if sys.version_info >= (3, 9) else {}
+ checksums = []
+ for algo_name in ['sha256', 'sha1', 'md5']:
+ try:
+ mkchecksum = getattr(hashlib, algo_name)
+ checksum = mkchecksum(member_names_string, **hash_kwargs).hexdigest()
+ except (AttributeError, ValueError):
+ # The algorithm (i.e. MD5) might not be there at all, or might be blocked at runtime.
+ continue
+ checksums.append('0x' + checksum[:7])
+ return checksums
+
+
class CalculateQualifiedNamesTransform(EnvTransform):
"""
Calculate and store the '__qualname__' and the global
@@ -2885,8 +3052,7 @@ class RemoveUnreachableCode(CythonTransform):
if not self.current_directives['remove_unreachable']:
return node
self.visitchildren(node)
- for idx, stat in enumerate(node.stats):
- idx += 1
+ for idx, stat in enumerate(node.stats, 1):
if stat.is_terminator:
if idx < len(node.stats):
if self.current_directives['warn.unreachable']:
@@ -2985,6 +3151,8 @@ class YieldNodeCollector(TreeVisitor):
class MarkClosureVisitor(CythonTransform):
+ # In addition to marking closures this is also responsible to finding parts of the
+ # generator iterable and marking them
def visit_ModuleNode(self, node):
self.needs_closure = False
@@ -3055,6 +3223,19 @@ class MarkClosureVisitor(CythonTransform):
self.needs_closure = True
return node
+ def visit_GeneratorExpressionNode(self, node):
+ node = self.visit_LambdaNode(node)
+ if not isinstance(node.loop, Nodes._ForInStatNode):
+ # Possibly should handle ForFromStatNode
+ # but for now do nothing
+ return node
+ itseq = node.loop.iterator.sequence
+ # literals do not need replacing with an argument
+ if itseq.is_literal:
+ return node
+ _GeneratorExpressionArgumentsMarker(node).visit(itseq)
+ return node
+
class CreateClosureClasses(CythonTransform):
# Output closure classes in module scope for all functions
@@ -3199,6 +3380,10 @@ class CreateClosureClasses(CythonTransform):
self.visitchildren(node)
return node
+ def visit_GeneratorExpressionNode(self, node):
+ node = _HandleGeneratorArguments()(node)
+ return self.visit_LambdaNode(node)
+
class InjectGilHandling(VisitorTransform, SkipDeclarations):
"""
diff --git a/Cython/Compiler/Parsing.pxd b/Cython/Compiler/Parsing.pxd
index a25652e2c..997cdf513 100644
--- a/Cython/Compiler/Parsing.pxd
+++ b/Cython/Compiler/Parsing.pxd
@@ -21,11 +21,9 @@ cdef p_ident_list(PyrexScanner s)
cdef tuple p_binop_operator(PyrexScanner s)
cdef p_binop_expr(PyrexScanner s, ops, p_sub_expr_func p_sub_expr)
-cdef p_lambdef(PyrexScanner s, bint allow_conditional=*)
-cdef p_lambdef_nocond(PyrexScanner s)
+cdef p_lambdef(PyrexScanner s)
cdef p_test(PyrexScanner s)
cdef p_test_allow_walrus_after(PyrexScanner s)
-cdef p_test_nocond(PyrexScanner s)
cdef p_namedexpr_test(PyrexScanner s)
cdef p_or_test(PyrexScanner s)
cdef p_rassoc_binop_expr(PyrexScanner s, unicode op, p_sub_expr_func p_subexpr)
@@ -114,7 +112,7 @@ cdef p_return_statement(PyrexScanner s)
cdef p_raise_statement(PyrexScanner s)
cdef p_import_statement(PyrexScanner s)
cdef p_from_import_statement(PyrexScanner s, bint first_statement = *)
-cdef p_imported_name(PyrexScanner s, bint is_cimport)
+cdef p_imported_name(PyrexScanner s)
cdef p_dotted_name(PyrexScanner s, bint as_allowed)
cdef p_as_name(PyrexScanner s)
cdef p_assert_statement(PyrexScanner s)
@@ -134,6 +132,8 @@ cdef p_except_clause(PyrexScanner s)
cdef p_include_statement(PyrexScanner s, ctx)
cdef p_with_statement(PyrexScanner s)
cdef p_with_items(PyrexScanner s, bint is_async=*)
+cdef p_with_items_list(PyrexScanner s, bint is_async)
+cdef tuple p_with_item(PyrexScanner s, bint is_async)
cdef p_with_template(PyrexScanner s)
cdef p_simple_statement(PyrexScanner s, bint first_statement = *)
cdef p_simple_statement_list(PyrexScanner s, ctx, bint first_statement = *)
@@ -159,7 +159,6 @@ cdef bint looking_at_name(PyrexScanner s) except -2
cdef object looking_at_expr(PyrexScanner s)# except -2
cdef bint looking_at_base_type(PyrexScanner s) except -2
cdef bint looking_at_dotted_name(PyrexScanner s) except -2
-cdef bint looking_at_call(PyrexScanner s) except -2
cdef p_sign_and_longness(PyrexScanner s)
cdef p_opt_cname(PyrexScanner s)
cpdef p_c_declarator(PyrexScanner s, ctx = *, bint empty = *, bint is_type = *, bint cmethod_flag = *,
@@ -171,7 +170,7 @@ cdef p_c_simple_declarator(PyrexScanner s, ctx, bint empty, bint is_type, bint c
bint assignable, bint nonempty)
cdef p_nogil(PyrexScanner s)
cdef p_with_gil(PyrexScanner s)
-cdef p_exception_value_clause(PyrexScanner s)
+cdef p_exception_value_clause(PyrexScanner s, ctx)
cpdef p_c_arg_list(PyrexScanner s, ctx = *, bint in_pyfunc = *, bint cmethod_flag = *,
bint nonempty_declarators = *, bint kw_only = *, bint annotated = *)
cdef p_optional_ellipsis(PyrexScanner s)
diff --git a/Cython/Compiler/Parsing.py b/Cython/Compiler/Parsing.py
index 291e1ceea..94fc2eca1 100644
--- a/Cython/Compiler/Parsing.py
+++ b/Cython/Compiler/Parsing.py
@@ -109,7 +109,7 @@ def p_binop_expr(s, ops, p_sub_expr):
#lambdef: 'lambda' [varargslist] ':' test
-def p_lambdef(s, allow_conditional=True):
+def p_lambdef(s):
# s.sy == 'lambda'
pos = s.position()
s.next()
@@ -120,20 +120,12 @@ def p_lambdef(s, allow_conditional=True):
args, star_arg, starstar_arg = p_varargslist(
s, terminator=':', annotated=False)
s.expect(':')
- if allow_conditional:
- expr = p_test(s)
- else:
- expr = p_test_nocond(s)
+ expr = p_test(s)
return ExprNodes.LambdaNode(
pos, args = args,
star_arg = star_arg, starstar_arg = starstar_arg,
result_expr = expr)
-#lambdef_nocond: 'lambda' [varargslist] ':' test_nocond
-
-def p_lambdef_nocond(s):
- return p_lambdef(s)
-
#test: or_test ['if' or_test 'else' test] | lambdef
def p_test(s):
@@ -162,15 +154,6 @@ def p_test_allow_walrus_after(s):
else:
return expr
-
-#test_nocond: or_test | lambdef_nocond
-
-def p_test_nocond(s):
- if s.sy == 'lambda':
- return p_lambdef_nocond(s)
- else:
- return p_or_test(s)
-
def p_namedexpr_test(s):
# defined in the LL parser as
# namedexpr_test: test [':=' test]
@@ -1364,7 +1347,12 @@ def p_comp_if(s, body):
# s.sy == 'if'
pos = s.position()
s.next()
- test = p_test_nocond(s)
+ # Note that Python 3.9+ is actually more restrictive here and Cython now follows
+ # the Python 3.9+ behaviour: https://github.com/python/cpython/issues/86014
+ # On Python <3.9 `[i for i in range(10) if lambda: i if True else 1]` was disallowed
+ # but `[i for i in range(10) if lambda: i]` was allowed.
+ # On Python >=3.9 they're both disallowed.
+ test = p_or_test(s)
return Nodes.IfStatNode(pos,
if_clauses = [Nodes.IfClauseNode(pos, condition = test,
body = p_comp_iter(s, body))],
@@ -1831,18 +1819,18 @@ def p_from_import_statement(s, first_statement = 0):
is_cimport = kind == 'cimport'
is_parenthesized = False
if s.sy == '*':
- imported_names = [(s.position(), s.context.intern_ustring("*"), None, None)]
+ imported_names = [(s.position(), s.context.intern_ustring("*"), None)]
s.next()
else:
if s.sy == '(':
is_parenthesized = True
s.next()
- imported_names = [p_imported_name(s, is_cimport)]
+ imported_names = [p_imported_name(s)]
while s.sy == ',':
s.next()
if is_parenthesized and s.sy == ')':
break
- imported_names.append(p_imported_name(s, is_cimport))
+ imported_names.append(p_imported_name(s))
if is_parenthesized:
s.expect(')')
if dotted_name == '__future__':
@@ -1851,7 +1839,7 @@ def p_from_import_statement(s, first_statement = 0):
elif level:
s.error("invalid syntax")
else:
- for (name_pos, name, as_name, kind) in imported_names:
+ for (name_pos, name, as_name) in imported_names:
if name == "braces":
s.error("not a chance", name_pos)
break
@@ -1862,7 +1850,7 @@ def p_from_import_statement(s, first_statement = 0):
break
s.context.future_directives.add(directive)
return Nodes.PassStatNode(pos)
- elif kind == 'cimport':
+ elif is_cimport:
return Nodes.FromCImportStatNode(
pos, module_name=dotted_name,
relative_level=level,
@@ -1870,7 +1858,7 @@ def p_from_import_statement(s, first_statement = 0):
else:
imported_name_strings = []
items = []
- for (name_pos, name, as_name, kind) in imported_names:
+ for (name_pos, name, as_name) in imported_names:
imported_name_strings.append(
ExprNodes.IdentifierStringNode(name_pos, value=name))
items.append(
@@ -1885,18 +1873,11 @@ def p_from_import_statement(s, first_statement = 0):
items = items)
-imported_name_kinds = cython.declare(frozenset, frozenset((
- 'class', 'struct', 'union')))
-
-def p_imported_name(s, is_cimport):
+def p_imported_name(s):
pos = s.position()
- kind = None
- if is_cimport and s.systring in imported_name_kinds:
- kind = s.systring
- s.next()
name = p_ident(s)
as_name = p_as_name(s)
- return (pos, name, as_name, kind)
+ return (pos, name, as_name)
def p_dotted_name(s, as_allowed):
@@ -2164,6 +2145,51 @@ def p_with_statement(s):
def p_with_items(s, is_async=False):
+ """
+ Copied from CPython:
+ | 'with' '(' a[asdl_withitem_seq*]=','.with_item+ ','? ')' ':' b=block {
+ _PyAST_With(a, b, NULL, EXTRA) }
+ | 'with' a[asdl_withitem_seq*]=','.with_item+ ':' tc=[TYPE_COMMENT] b=block {
+ _PyAST_With(a, b, NEW_TYPE_COMMENT(p, tc), EXTRA) }
+ Therefore the first thing to try is the bracket-enclosed
+ version and if that fails try the regular version
+ """
+ brackets_succeeded = False
+ items = () # unused, but static analysis fails to track that below
+ if s.sy == '(':
+ with tentatively_scan(s) as errors:
+ s.next()
+ items = p_with_items_list(s, is_async)
+ s.expect(")")
+ brackets_succeeded = not errors
+ if not brackets_succeeded:
+ # try the non-bracket version
+ items = p_with_items_list(s, is_async)
+ body = p_suite(s)
+ for cls, pos, kwds in reversed(items):
+ # construct the actual nodes now that we know what the body is
+ body = cls(pos, body=body, **kwds)
+ return body
+
+
+def p_with_items_list(s, is_async):
+ items = []
+ while True:
+ items.append(p_with_item(s, is_async))
+ if s.sy != ",":
+ break
+ s.next()
+ if s.sy == ")":
+ # trailing commas allowed
+ break
+ return items
+
+
+def p_with_item(s, is_async):
+ # In contrast to most parsing functions, this returns a tuple of
+ # class, pos, kwd_dict
+ # This is because GILStatNode does a reasonable amount of initialization in its
+ # constructor, and requires "body" to be set, which we don't currently have
pos = s.position()
if not s.in_python_file and s.sy == 'IDENT' and s.systring in ('nogil', 'gil'):
if is_async:
@@ -2178,24 +2204,14 @@ def p_with_items(s, is_async=False):
condition = p_test(s)
s.expect(')')
- if s.sy == ',':
- s.next()
- body = p_with_items(s)
- else:
- body = p_suite(s)
- return Nodes.GILStatNode(pos, state=state, body=body, condition=condition)
+ return Nodes.GILStatNode, pos, {"state": state, "condition": condition}
else:
manager = p_test(s)
target = None
if s.sy == 'IDENT' and s.systring == 'as':
s.next()
target = p_starred_expr(s)
- if s.sy == ',':
- s.next()
- body = p_with_items(s, is_async=is_async)
- else:
- body = p_suite(s)
- return Nodes.WithStatNode(pos, manager=manager, target=target, body=body, is_async=is_async)
+ return Nodes.WithStatNode, pos, {"manager": manager, "target": target, "is_async": is_async}
def p_with_template(s):
@@ -2843,16 +2859,6 @@ def looking_at_dotted_name(s):
else:
return 0
-def looking_at_call(s):
- "See if we're looking at a.b.c("
- # Don't mess up the original position, so save and restore it.
- # Unfortunately there's no good way to handle this, as a subsequent call
- # to next() will not advance the position until it reads a new token.
- position = s.start_line, s.start_col
- result = looking_at_expr(s) == u'('
- if not result:
- s.start_line, s.start_col = position
- return result
basic_c_type_names = cython.declare(frozenset, frozenset((
"void", "char", "int", "float", "double", "bint")))
@@ -2957,7 +2963,17 @@ def p_c_func_declarator(s, pos, ctx, base, cmethod_flag):
ellipsis = p_optional_ellipsis(s)
s.expect(')')
nogil = p_nogil(s)
- exc_val, exc_check = p_exception_value_clause(s)
+ exc_val, exc_check, exc_clause = p_exception_value_clause(s, ctx)
+ if nogil and exc_clause:
+ warning(
+ s.position(),
+ "The keyword 'nogil' should appear at the end of the "
+ "function signature line. Placing it before 'except' "
+ "or 'noexcept' will be disallowed in a future version "
+ "of Cython.",
+ level=2
+ )
+ nogil = nogil or p_nogil(s)
with_gil = p_with_gil(s)
return Nodes.CFuncDeclaratorNode(pos,
base = base, args = args, has_varargs = ellipsis,
@@ -3067,18 +3083,54 @@ def p_with_gil(s):
else:
return 0
-def p_exception_value_clause(s):
+def p_exception_value_clause(s, ctx):
+ """
+ Parse exception value clause.
+
+ Maps clauses to exc_check / exc_value / exc_clause as follows:
+ ______________________________________________________________________
+ | | | | |
+ | Clause | exc_check | exc_value | exc_clause |
+ | ___________________________ | ___________ | ___________ | __________ |
+ | | | | |
+ | <nothing> (default func.) | True | None | False |
+ | <nothing> (cdef extern) | False | None | False |
+ | noexcept | False | None | True |
+ | except <val> | False | <val> | True |
+ | except? <val> | True | <val> | True |
+ | except * | True | None | True |
+ | except + | '+' | None | True |
+ | except +* | '+' | '*' | True |
+ | except +<PyErr> | '+' | <PyErr> | True |
+ | ___________________________ | ___________ | ___________ | __________ |
+
+ Note that the only reason we need `exc_clause` is to raise a
+ warning when `'except'` or `'noexcept'` is placed after the
+ `'nogil'` keyword.
+ """
+ exc_clause = False
exc_val = None
- exc_check = 0
- if s.sy == 'except':
+ if ctx.visibility == 'extern':
+ exc_check = False
+ else:
+ exc_check = True
+
+ if s.sy == 'IDENT' and s.systring == 'noexcept':
+ exc_clause = True
+ s.next()
+ exc_check = False
+ elif s.sy == 'except':
+ exc_clause = True
s.next()
if s.sy == '*':
- exc_check = 1
+ exc_check = True
s.next()
elif s.sy == '+':
exc_check = '+'
s.next()
- if s.sy == 'IDENT':
+ if p_nogil(s):
+ ctx.nogil = True
+ elif s.sy == 'IDENT':
name = s.systring
s.next()
exc_val = p_name(s, name)
@@ -3087,10 +3139,13 @@ def p_exception_value_clause(s):
s.next()
else:
if s.sy == '?':
- exc_check = 1
+ exc_check = True
s.next()
+ else:
+ exc_check = False
+ # exc_val can be non-None even if exc_check is False, c.f. "except -1"
exc_val = p_test(s)
- return exc_val, exc_check
+ return exc_val, exc_check, exc_clause
c_arg_list_terminators = cython.declare(frozenset, frozenset((
'*', '**', '...', ')', ':', '/')))
@@ -3529,14 +3584,7 @@ def p_decorators(s):
while s.sy == '@':
pos = s.position()
s.next()
- decstring = p_dotted_name(s, as_allowed=0)[2]
- names = decstring.split('.')
- decorator = ExprNodes.NameNode(pos, name=s.context.intern_ustring(names[0]))
- for name in names[1:]:
- decorator = ExprNodes.AttributeNode(
- pos, attribute=s.context.intern_ustring(name), obj=decorator)
- if s.sy == '(':
- decorator = p_call(s, decorator)
+ decorator = p_namedexpr_test(s)
decorators.append(Nodes.DecoratorNode(pos, decorator=decorator))
s.expect_newline("Expected a newline after decorator")
return decorators
@@ -3853,6 +3901,9 @@ def p_compiler_directive_comments(s):
for name in new_directives:
if name not in result:
pass
+ elif Options.directive_types.get(name) is list:
+ result[name] += new_directives[name]
+ new_directives[name] = result[name]
elif new_directives[name] == result[name]:
warning(pos, "Duplicate directive found: %s" % (name,))
else:
diff --git a/Cython/Compiler/Pipeline.py b/Cython/Compiler/Pipeline.py
index 3a5c42352..2fd3a1d3f 100644
--- a/Cython/Compiler/Pipeline.py
+++ b/Cython/Compiler/Pipeline.py
@@ -231,14 +231,15 @@ def create_pipeline(context, mode, exclude_classes=()):
return stages
def create_pyx_pipeline(context, options, result, py=False, exclude_classes=()):
- if py:
- mode = 'py'
- else:
- mode = 'pyx'
+ mode = 'py' if py else 'pyx'
+
test_support = []
+ ctest_support = []
if options.evaluate_tree_assertions:
from ..TestUtils import TreeAssertVisitor
- test_support.append(TreeAssertVisitor())
+ test_validator = TreeAssertVisitor()
+ test_support.append(test_validator)
+ ctest_support.append(test_validator.create_c_file_validator())
if options.gdb_debug:
from ..Debugger import DebugWriter # requires Py2.5+
@@ -257,7 +258,9 @@ def create_pyx_pipeline(context, options, result, py=False, exclude_classes=()):
inject_utility_code_stage_factory(context),
abort_on_errors],
debug_transform,
- [generate_pyx_code_stage_factory(options, result)]))
+ [generate_pyx_code_stage_factory(options, result)],
+ ctest_support,
+ ))
def create_pxd_pipeline(context, scope, module_name):
from .CodeGeneration import ExtractPxdCode
diff --git a/Cython/Compiler/PyrexTypes.py b/Cython/Compiler/PyrexTypes.py
index 79e144ed1..da30809a3 100644
--- a/Cython/Compiler/PyrexTypes.py
+++ b/Cython/Compiler/PyrexTypes.py
@@ -13,6 +13,7 @@ try:
except NameError:
from functools import reduce
from functools import partial
+from itertools import product
from Cython.Utils import cached_function
from .Code import UtilityCode, LazyUtilityCode, TempitaUtilityCode
@@ -1522,6 +1523,7 @@ class PyExtensionType(PyObjectType):
# is_external boolean Defined in a extern block
# check_size 'warn', 'error', 'ignore' What to do if tp_basicsize does not match
# dataclass_fields OrderedDict nor None Used for inheriting from dataclasses
+ # multiple_bases boolean Does this class have multiple bases
is_extension_type = 1
has_attributes = 1
@@ -1529,6 +1531,7 @@ class PyExtensionType(PyObjectType):
objtypedef_cname = None
dataclass_fields = None
+ multiple_bases = False
def __init__(self, name, typedef_flag, base_type, is_external=0, check_size=None):
self.name = name
@@ -1835,7 +1838,27 @@ class FusedType(CType):
for t in types:
if t.is_fused:
# recursively merge in subtypes
- for subtype in t.types:
+ if isinstance(t, FusedType):
+ t_types = t.types
+ else:
+ # handle types that aren't a fused type themselves but contain fused types
+ # for example a C++ template where the template type is fused.
+ t_fused_types = t.get_fused_types()
+ t_types = []
+ for substitution in product(
+ *[fused_type.types for fused_type in t_fused_types]
+ ):
+ t_types.append(
+ t.specialize(
+ {
+ fused_type: sub
+ for fused_type, sub in zip(
+ t_fused_types, substitution
+ )
+ }
+ )
+ )
+ for subtype in t_types:
if subtype not in flattened_types:
flattened_types.append(subtype)
elif t not in flattened_types:
@@ -2790,6 +2813,8 @@ class CReferenceBaseType(BaseType):
# Common base type for C reference and C++ rvalue reference types.
+ subtypes = ['ref_base_type']
+
def __init__(self, base_type):
self.ref_base_type = base_type
@@ -3134,8 +3159,10 @@ class CFuncType(CType):
if (pyrex or for_display) and not self.return_type.is_pyobject:
if self.exception_value and self.exception_check:
trailer = " except? %s" % self.exception_value
- elif self.exception_value:
+ elif self.exception_value and not self.exception_check:
trailer = " except %s" % self.exception_value
+ elif not self.exception_value and not self.exception_check:
+ trailer = " noexcept"
elif self.exception_check == '+':
trailer = " except +"
elif self.exception_check and for_display:
diff --git a/Cython/Compiler/Symtab.py b/Cython/Compiler/Symtab.py
index 1500c7441..984e10f05 100644
--- a/Cython/Compiler/Symtab.py
+++ b/Cython/Compiler/Symtab.py
@@ -348,6 +348,7 @@ class Scope(object):
# is_passthrough boolean Outer scope is passed directly
# is_cpp_class_scope boolean Is a C++ class scope
# is_property_scope boolean Is a extension type property scope
+ # is_c_dataclass_scope boolean or "frozen" is a cython.dataclasses.dataclass
# scope_prefix string Disambiguator for C names
# in_cinclude boolean Suppress C declaration code
# qualified_name string "modname" or "modname.classname"
@@ -368,6 +369,7 @@ class Scope(object):
is_cpp_class_scope = 0
is_property_scope = 0
is_module_scope = 0
+ is_c_dataclass_scope = False
is_internal = 0
scope_prefix = ""
in_cinclude = 0
@@ -1296,19 +1298,13 @@ class ModuleScope(Scope):
is_cython_builtin = 0
old_style_globals = 0
- def __init__(self, name, parent_module, context):
+ def __init__(self, name, parent_module, context, is_package=False):
from . import Builtin
self.parent_module = parent_module
outer_scope = Builtin.builtin_scope
Scope.__init__(self, name, outer_scope, parent_module)
- if name == "__init__":
- # Treat Spam/__init__.pyx specially, so that when Python loads
- # Spam/__init__.so, initSpam() is defined.
- self.module_name = parent_module.module_name
- self.is_package = True
- else:
- self.module_name = name
- self.is_package = False
+ self.is_package = is_package
+ self.module_name = name
self.module_name = EncodedString(self.module_name)
self.context = context
self.module_cname = Naming.module_cname
@@ -1421,9 +1417,16 @@ class ModuleScope(Scope):
# explicit relative cimport
# error of going beyond top-level is handled in cimport node
relative_to = self
- while relative_level > 0 and relative_to:
+
+ top_level = 1 if self.is_package else 0
+ # * top_level == 1 when file is __init__.pyx, current package (relative_to) is the current module
+ # i.e. dot in `from . import ...` points to the current package
+ # * top_level == 0 when file is regular module, current package (relative_to) is parent module
+ # i.e. dot in `from . import ...` points to the package where module is placed
+ while relative_level > top_level and relative_to:
relative_to = relative_to.parent_module
relative_level -= 1
+
elif relative_level != 0:
# -1 or None: try relative cimport first, then absolute
relative_to = self.parent_module
@@ -1433,7 +1436,7 @@ class ModuleScope(Scope):
return module_scope.context.find_module(
module_name, relative_to=relative_to, pos=pos, absolute_fallback=absolute_fallback)
- def find_submodule(self, name):
+ def find_submodule(self, name, as_package=False):
# Find and return scope for a submodule of this module,
# creating a new empty one if necessary. Doesn't parse .pxd.
if '.' in name:
@@ -1442,10 +1445,10 @@ class ModuleScope(Scope):
submodule = None
scope = self.lookup_submodule(name)
if not scope:
- scope = ModuleScope(name, parent_module=self, context=self.context)
+ scope = ModuleScope(name, parent_module=self, context=self.context, is_package=True if submodule else as_package)
self.module_entries[name] = scope
if submodule:
- scope = scope.find_submodule(submodule)
+ scope = scope.find_submodule(submodule, as_package=as_package)
return scope
def lookup_submodule(self, name):
@@ -1615,7 +1618,7 @@ class ModuleScope(Scope):
entry = self.lookup_here(name)
if entry and entry.defined_in_pxd:
if entry.visibility != "private":
- mangled_cname = self.mangle(Naming.var_prefix, name)
+ mangled_cname = self.mangle(Naming.func_prefix, name)
if entry.cname == mangled_cname:
cname = name
entry.cname = cname
@@ -2312,6 +2315,25 @@ class CClassScope(ClassScope):
"""
return self.needs_gc() and not self.directives.get('no_gc_clear', False)
+ def may_have_finalize(self):
+ """
+ This covers cases where we definitely have a __del__ function
+ and also cases where one of the base classes could have a __del__
+ function but we don't know.
+ """
+ current_type_scope = self
+ while current_type_scope:
+ del_entry = current_type_scope.lookup_here("__del__")
+ if del_entry and del_entry.is_special:
+ return True
+ if (current_type_scope.parent_type.is_extern or not current_type_scope.implemented or
+ current_type_scope.parent_type.multiple_bases):
+ # we don't know if we have __del__, so assume we do and call it
+ return True
+ current_base_type = current_type_scope.parent_type.base_type
+ current_type_scope = current_base_type.scope if current_base_type else None
+ return False
+
def get_refcounted_entries(self, include_weakref=False,
include_gc_simple=True):
py_attrs = []
@@ -2345,7 +2367,7 @@ class CClassScope(ClassScope):
type = py_object_type
else:
type = type.equivalent_type
- if "dataclasses.InitVar" in pytyping_modifiers and 'dataclasses.dataclass' not in self.directives:
+ if "dataclasses.InitVar" in pytyping_modifiers and not self.is_c_dataclass_scope:
error(pos, "Use of cython.dataclasses.InitVar does not make sense outside a dataclass")
if is_cdef:
@@ -2596,6 +2618,7 @@ class CClassScope(ClassScope):
base_entry.name, adapt(base_entry.cname),
base_entry.type, None, 'private')
entry.is_variable = 1
+ entry.is_inherited = True
entry.annotation = base_entry.annotation
self.inherited_var_entries.append(entry)
@@ -2746,7 +2769,7 @@ class CppClassScope(Scope):
if base_entry.name not in base_templates:
entry = self.declare_type(base_entry.name, base_entry.type,
base_entry.pos, base_entry.cname,
- base_entry.visibility)
+ base_entry.visibility, defining=False)
entry.is_inherited = 1
def specialize(self, values, type_entry):
diff --git a/Cython/Compiler/Tests/TestCmdLine.py b/Cython/Compiler/Tests/TestCmdLine.py
index 5953112dc..0961dfa03 100644
--- a/Cython/Compiler/Tests/TestCmdLine.py
+++ b/Cython/Compiler/Tests/TestCmdLine.py
@@ -1,7 +1,12 @@
import os
import sys
+import re
from unittest import TestCase
try:
+ from unittest.mock import patch, Mock
+except ImportError: # Py2
+ from mock import patch, Mock
+try:
from StringIO import StringIO
except ImportError:
from io import StringIO # doesn't accept 'str' in Py2
@@ -11,7 +16,15 @@ from ..CmdLine import parse_command_line
from .Utils import backup_Options, restore_Options, check_global_options
+unpatched_exists = os.path.exists
+
+def patched_exists(path):
+ # avoid the Cython command raising a file not found error
+ if path in ('source.pyx', 'file.pyx', 'file1.pyx', 'file2.pyx', 'file3.pyx', 'foo.pyx', 'bar.pyx'):
+ return True
+ return unpatched_exists(path)
+@patch('os.path.exists', new=Mock(side_effect=patched_exists))
class CmdLineParserTest(TestCase):
def setUp(self):
self._options_backup = backup_Options()
@@ -495,22 +508,62 @@ class CmdLineParserTest(TestCase):
self.check_default_global_options()
self.check_default_options(options, ['compiler_directives'])
+ def test_module_name(self):
+ options, sources = parse_command_line([
+ 'source.pyx'
+ ])
+ self.assertEqual(options.module_name, None)
+ self.check_default_global_options()
+ self.check_default_options(options)
+ options, sources = parse_command_line([
+ '--module-name', 'foo.bar',
+ 'source.pyx'
+ ])
+ self.assertEqual(options.module_name, 'foo.bar')
+ self.check_default_global_options()
+ self.check_default_options(options, ['module_name'])
+
def test_errors(self):
- def error(*args):
+ def error(args, regex=None):
old_stderr = sys.stderr
stderr = sys.stderr = StringIO()
try:
self.assertRaises(SystemExit, parse_command_line, list(args))
finally:
sys.stderr = old_stderr
- self.assertTrue(stderr.getvalue())
-
- error('-1')
- error('-I')
- error('--version=-a')
- error('--version=--annotate=true')
- error('--working')
- error('--verbose=1')
- error('--verbose=1')
- error('--cleanup')
- error('--debug-disposal-code-wrong-name', 'file3.pyx')
+ msg = stderr.getvalue()
+ err_msg = 'Message "{}"'.format(msg.strip())
+ self.assertTrue(msg.startswith('usage: '),
+ '%s does not start with "usage :"' % err_msg)
+ self.assertTrue(': error: ' in msg,
+ '%s does not contain ": error :"' % err_msg)
+ if regex:
+ self.assertTrue(re.search(regex, msg),
+ '%s does not match search "%s"' %
+ (err_msg, regex))
+
+ error(['-1'],
+ 'unknown option -1')
+ error(['-I'],
+ 'argument -I/--include-dir: expected one argument')
+ error(['--version=-a'],
+ "argument -V/--version: ignored explicit argument '-a'")
+ error(['--version=--annotate=true'],
+ "argument -V/--version: ignored explicit argument "
+ "'--annotate=true'")
+ error(['--working'],
+ "argument -w/--working: expected one argument")
+ error(['--verbose=1'],
+ "argument -v/--verbose: ignored explicit argument '1'")
+ error(['--cleanup'],
+ "argument --cleanup: expected one argument")
+ error(['--debug-disposal-code-wrong-name', 'file3.pyx'],
+ "unknown option --debug-disposal-code-wrong-name")
+ error(['--module-name', 'foo.pyx'],
+ "Need at least one source file")
+ error(['--module-name', 'foo.bar'],
+ "Need at least one source file")
+ error(['--module-name', 'foo.bar', 'foo.pyx', 'bar.pyx'],
+ "Only one source file allowed when using --module-name")
+ error(['--module-name', 'foo.bar', '--timestamps', 'foo.pyx'],
+ "Cannot use --module-name with --timestamps")
diff --git a/Cython/Compiler/Tests/TestParseTreeTransforms.py b/Cython/Compiler/Tests/TestParseTreeTransforms.py
index e6889f8f2..6e29263e5 100644
--- a/Cython/Compiler/Tests/TestParseTreeTransforms.py
+++ b/Cython/Compiler/Tests/TestParseTreeTransforms.py
@@ -1,7 +1,9 @@
-import os
+import os.path
+import unittest
from Cython.TestUtils import TransformTest
from Cython.Compiler.ParseTreeTransforms import *
+from Cython.Compiler.ParseTreeTransforms import _calculate_pickle_checksums
from Cython.Compiler.Nodes import *
from Cython.Compiler import Main, Symtab, Options
@@ -276,6 +278,11 @@ class TestDebugTransform(DebuggerTestCase):
raise
+class TestAnalyseDeclarationsTransform(unittest.TestCase):
+ def test_calculate_pickle_checksums(self):
+ checksums = _calculate_pickle_checksums(['member1', 'member2', 'member3'])
+ assert 2 <= len(checksums) <= 3, checksums # expecting ['0xc0af380' (MD5), '0x0c75bd4', '0xa7a7b94']
+
if __name__ == "__main__":
import unittest
diff --git a/Cython/Compiler/TypeInference.py b/Cython/Compiler/TypeInference.py
index 4ae3ab155..0ef651d24 100644
--- a/Cython/Compiler/TypeInference.py
+++ b/Cython/Compiler/TypeInference.py
@@ -104,10 +104,11 @@ class MarkParallelAssignments(EnvTransform):
is_special = False
sequence = node.iterator.sequence
target = node.target
+ iterator_scope = node.iterator.expr_scope or self.current_env()
if isinstance(sequence, ExprNodes.SimpleCallNode):
function = sequence.function
if sequence.self is None and function.is_name:
- entry = self.current_env().lookup(function.name)
+ entry = iterator_scope.lookup(function.name)
if not entry or entry.is_builtin:
if function.name == 'reversed' and len(sequence.args) == 1:
sequence = sequence.args[0]
@@ -115,7 +116,7 @@ class MarkParallelAssignments(EnvTransform):
if target.is_sequence_constructor and len(target.args) == 2:
iterator = sequence.args[0]
if iterator.is_name:
- iterator_type = iterator.infer_type(self.current_env())
+ iterator_type = iterator.infer_type(iterator_scope)
if iterator_type.is_builtin_type:
# assume that builtin types have a length within Py_ssize_t
self.mark_assignment(
@@ -127,7 +128,7 @@ class MarkParallelAssignments(EnvTransform):
if isinstance(sequence, ExprNodes.SimpleCallNode):
function = sequence.function
if sequence.self is None and function.is_name:
- entry = self.current_env().lookup(function.name)
+ entry = iterator_scope.lookup(function.name)
if not entry or entry.is_builtin:
if function.name in ('range', 'xrange'):
is_special = True
diff --git a/Cython/Compiler/TypeSlots.py b/Cython/Compiler/TypeSlots.py
index ea310a6d3..0bd550d8c 100644
--- a/Cython/Compiler/TypeSlots.py
+++ b/Cython/Compiler/TypeSlots.py
@@ -556,8 +556,7 @@ class TypeFlagsSlot(SlotDescriptor):
value += "|Py_TPFLAGS_BASETYPE"
if scope.needs_gc():
value += "|Py_TPFLAGS_HAVE_GC"
- entry = scope.lookup("__del__")
- if entry and entry.is_special:
+ if scope.may_have_finalize():
value += "|Py_TPFLAGS_HAVE_FINALIZE"
return value
@@ -966,8 +965,8 @@ class SlotTable(object):
# Added in release 2.2
# The following require the Py_TPFLAGS_HAVE_CLASS flag
- BinopSlot(binaryfunc, "nb_floor_divide", "__floordiv__", method_name_to_slot),
- BinopSlot(binaryfunc, "nb_true_divide", "__truediv__", method_name_to_slot),
+ BinopSlot(bf, "nb_floor_divide", "__floordiv__", method_name_to_slot),
+ BinopSlot(bf, "nb_true_divide", "__truediv__", method_name_to_slot),
MethodSlot(ibinaryfunc, "nb_inplace_floor_divide", "__ifloordiv__", method_name_to_slot),
MethodSlot(ibinaryfunc, "nb_inplace_true_divide", "__itruediv__", method_name_to_slot),
@@ -975,7 +974,7 @@ class SlotTable(object):
MethodSlot(unaryfunc, "nb_index", "__index__", method_name_to_slot),
# Added in release 3.5
- BinopSlot(binaryfunc, "nb_matrix_multiply", "__matmul__", method_name_to_slot,
+ BinopSlot(bf, "nb_matrix_multiply", "__matmul__", method_name_to_slot,
ifdef="PY_VERSION_HEX >= 0x03050000"),
MethodSlot(ibinaryfunc, "nb_inplace_matrix_multiply", "__imatmul__", method_name_to_slot,
ifdef="PY_VERSION_HEX >= 0x03050000"),
diff --git a/Cython/Compiler/Visitor.py b/Cython/Compiler/Visitor.py
index d9be14df1..92e2eb9c0 100644
--- a/Cython/Compiler/Visitor.py
+++ b/Cython/Compiler/Visitor.py
@@ -1,5 +1,5 @@
# cython: infer_types=True
-# cython: language_level=3
+# cython: language_level=3str
# cython: auto_pickle=False
#
@@ -80,7 +80,7 @@ class TreeVisitor(object):
def dump_node(self, node):
ignored = list(node.child_attrs or []) + [
- u'child_attrs', u'pos', u'gil_message', u'cpp_message', u'subexprs']
+ 'child_attrs', 'pos', 'gil_message', 'cpp_message', 'subexprs']
values = []
pos = getattr(node, 'pos', None)
if pos:
@@ -116,7 +116,7 @@ class TreeVisitor(object):
nodes = []
while hasattr(stacktrace, 'tb_frame'):
frame = stacktrace.tb_frame
- node = frame.f_locals.get(u'self')
+ node = frame.f_locals.get('self')
if isinstance(node, Nodes.Node):
code = frame.f_code
method_name = code.co_name
@@ -153,12 +153,12 @@ class TreeVisitor(object):
def find_handler(self, obj):
# to resolve, try entire hierarchy
cls = type(obj)
- pattern = "visit_%s"
mro = inspect.getmro(cls)
for mro_cls in mro:
- handler_method = getattr(self, pattern % mro_cls.__name__, None)
+ handler_method = getattr(self, "visit_" + mro_cls.__name__, None)
if handler_method is not None:
return handler_method
+
print(type(self), cls)
if self.access_path:
print(self.access_path)
@@ -306,8 +306,8 @@ class CythonTransform(VisitorTransform):
self.context = context
def __call__(self, node):
- from . import ModuleNode
- if isinstance(node, ModuleNode.ModuleNode):
+ from .ModuleNode import ModuleNode
+ if isinstance(node, ModuleNode):
self.current_directives = node.directives
return super(CythonTransform, self).__call__(node)
@@ -594,7 +594,7 @@ class MethodDispatcherTransform(EnvTransform):
# Python 2 and 3
return None
- call_type = has_kwargs and 'general' or 'simple'
+ call_type = 'general' if has_kwargs else 'simple'
handler = getattr(self, '_handle_%s_%s' % (call_type, match_name), None)
if handler is None:
handler = getattr(self, '_handle_any_%s' % match_name, None)
diff --git a/Cython/Coverage.py b/Cython/Coverage.py
index 7acd54c1f..147df8050 100644
--- a/Cython/Coverage.py
+++ b/Cython/Coverage.py
@@ -6,6 +6,44 @@ Requires the coverage package at least in version 4.0 (which added the plugin AP
This plugin requires the generated C sources to be available, next to the extension module.
It parses the C file and reads the original source files from it, which are stored in C comments.
It then reports a source file to coverage.py when it hits one of its lines during line tracing.
+
+Basically, Cython can (on request) emit explicit trace calls into the C code that it generates,
+and as a general human debugging helper, it always copies the current source code line
+(and its surrounding context) into the C files before it generates code for that line, e.g.
+
+::
+
+ /* "line_trace.pyx":147
+ * def cy_add_with_nogil(a,b):
+ * cdef int z, x=a, y=b # 1
+ * with nogil: # 2 # <<<<<<<<<<<<<<
+ * z = 0 # 3
+ * z += cy_add_nogil(x, y) # 4
+ */
+ __Pyx_TraceLine(147,1,__PYX_ERR(0, 147, __pyx_L4_error))
+ [C code generated for file line_trace.pyx, line 147, follows here]
+
+The crux is that multiple source files can contribute code to a single C (or C++) file
+(and thus, to a single extension module) besides the main module source file (.py/.pyx),
+usually shared declaration files (.pxd) but also literally included files (.pxi).
+
+Therefore, the coverage plugin doesn't actually try to look at the file that happened
+to contribute the current source line for the trace call, but simply looks up the single
+.c file from which the extension was compiled (which usually lies right next to it after
+the build, having the same name), and parses the code copy comments from that .c file
+to recover the original source files and their code as a line-to-file mapping.
+
+That mapping is then used to report the ``__Pyx_TraceLine()`` calls to the coverage tool.
+The plugin also reports the line of source code that it found in the C file to the coverage
+tool to support annotated source representations. For this, again, it does not look at the
+actual source files but only reports the source code that it found in the C code comments.
+
+Apart from simplicity (read one file instead of finding and parsing many), part of the
+reasoning here is that any line in the original sources for which there is no comment line
+(and trace call) in the generated C code cannot count as executed, really, so the C code
+comments are a very good source for coverage reporting. They already filter out purely
+declarative code lines that do not contribute executable code, and such (missing) lines
+can then be marked as excluded from coverage analysis.
"""
from __future__ import absolute_import
@@ -45,6 +83,23 @@ def _find_dep_file_path(main_file, file_path, relative_path_search=False):
rel_file_path = os.path.join(os.path.dirname(main_file), file_path)
if os.path.exists(rel_file_path):
abs_path = os.path.abspath(rel_file_path)
+
+ abs_no_ext = os.path.splitext(abs_path)[0]
+ file_no_ext, extension = os.path.splitext(file_path)
+ # We check if the paths match by matching the directories in reverse order.
+ # pkg/module.pyx /long/absolute_path/bla/bla/site-packages/pkg/module.c should match.
+ # this will match the pairs: module-module and pkg-pkg. After which there is nothing left to zip.
+ abs_no_ext = os.path.normpath(abs_no_ext)
+ file_no_ext = os.path.normpath(file_no_ext)
+ matching_paths = zip(reversed(abs_no_ext.split(os.sep)), reversed(file_no_ext.split(os.sep)))
+ for one, other in matching_paths:
+ if one != other:
+ break
+ else: # No mismatches detected
+ matching_abs_path = os.path.splitext(main_file)[0] + extension
+ if os.path.exists(matching_abs_path):
+ return canonical_filename(matching_abs_path)
+
# search sys.path for external locations if a valid file hasn't been found
if not os.path.exists(abs_path):
for sys_path in sys.path:
diff --git a/Cython/Distutils/old_build_ext.py b/Cython/Distutils/old_build_ext.py
index 3595d80e0..cec54d93d 100644
--- a/Cython/Distutils/old_build_ext.py
+++ b/Cython/Distutils/old_build_ext.py
@@ -321,8 +321,8 @@ class old_build_ext(_build_ext.build_ext):
for source in cython_sources:
target = cython_targets[source]
depends = [source] + list(extension.depends or ())
- if(source[-4:].lower()==".pyx" and os.path.isfile(source[:-3]+"pxd")):
- depends += [source[:-3]+"pxd"]
+ if source[-4:].lower() == ".pyx" and os.path.isfile(source[:-3] + "pxd"):
+ depends += [source[:-3] + "pxd"]
rebuild = self.force or newer_group(depends, target, 'newer')
if not rebuild and newest_dependency is not None:
rebuild = newer(newest_dependency, target)
diff --git a/Cython/Includes/cpython/time.pxd b/Cython/Includes/cpython/time.pxd
index 076abd931..7f20095a1 100644
--- a/Cython/Includes/cpython/time.pxd
+++ b/Cython/Includes/cpython/time.pxd
@@ -30,7 +30,7 @@ cdef inline int _raise_from_errno() except -1 with gil:
return <int> -1 # Let the C compiler know that this function always raises.
-cdef inline tm localtime() nogil except *:
+cdef inline tm localtime() except * nogil:
"""
Analogue to the stdlib time.localtime. The returned struct
has some entries that the stdlib version does not: tm_gmtoff, tm_zone
diff --git a/Cython/Includes/cpython/unicode.pxd b/Cython/Includes/cpython/unicode.pxd
index ba11f5736..6ec77f7b3 100644
--- a/Cython/Includes/cpython/unicode.pxd
+++ b/Cython/Includes/cpython/unicode.pxd
@@ -1,4 +1,8 @@
+
cdef extern from *:
+ ctypedef unsigned char Py_UCS1 # uint8_t
+ ctypedef unsigned short Py_UCS2 # uint16_t
+
# Return true if the object o is a Unicode object or an instance
# of a Unicode subtype. Changed in version 2.2: Allowed subtypes
# to be accepted.
@@ -23,6 +27,21 @@ cdef extern from *:
# New in version 3.3.
Py_ssize_t PyUnicode_GET_LENGTH(object o)
+ Py_UCS1 *PyUnicode_1BYTE_DATA(object o)
+ Py_UCS2 *PyUnicode_2BYTE_DATA(object o)
+ Py_UCS4 *PyUnicode_4BYTE_DATA(object o)
+
+ int PyUnicode_WCHAR_KIND # Deprecated since Python 3.10, removed in 3.12.
+ int PyUnicode_1BYTE_KIND
+ int PyUnicode_2BYTE_KIND
+ int PyUnicode_4BYTE_KIND
+ void PyUnicode_WRITE(int kind, void *data, Py_ssize_t index, Py_UCS4 value)
+ Py_UCS4 PyUnicode_READ(int kind, void *data, Py_ssize_t index)
+ Py_UCS4 PyUnicode_READ_CHAR(object o, Py_ssize_t index)
+
+ unsigned int PyUnicode_KIND(object o)
+ void *PyUnicode_DATA(object o)
+
# Return the size of the object's internal buffer in bytes. o has
# to be a PyUnicodeObject (not checked).
Py_ssize_t PyUnicode_GET_DATA_SIZE(object o)
@@ -35,6 +54,8 @@ cdef extern from *:
# be a PyUnicodeObject (not checked).
char* PyUnicode_AS_DATA(object o)
+ bint PyUnicode_IsIdentifier(object o)
+
# Return 1 or 0 depending on whether ch is a whitespace character.
bint Py_UNICODE_ISSPACE(Py_UCS4 ch)
@@ -65,6 +86,8 @@ cdef extern from *:
# Return 1 or 0 depending on whether ch is an alphanumeric character.
bint Py_UNICODE_ISALNUM(Py_UCS4 ch)
+ bint Py_UNICODE_ISPRINTABLE(Py_UCS4 ch)
+
# Return the character ch converted to lower case.
# Used to return a Py_UNICODE value before Py3.3.
Py_UCS4 Py_UNICODE_TOLOWER(Py_UCS4 ch)
@@ -111,6 +134,18 @@ cdef extern from *:
# UTF-8 encoded bytes. The size is determined with strlen().
unicode PyUnicode_FromString(const char *u)
+ unicode PyUnicode_New(Py_ssize_t size, Py_UCS4 maxchar)
+ unicode PyUnicode_FromKindAndData(int kind, const void *buffer, Py_ssize_t size)
+ unicode PyUnicode_FromFormat(const char *format, ...)
+ Py_ssize_t PyUnicode_GetLength(object unicode) except -1
+ Py_ssize_t PyUnicode_CopyCharacters(object to, Py_ssize_t to_start, object from_, Py_ssize_t from_start, Py_ssize_t how_many) except -1
+ Py_ssize_t PyUnicode_Fill(object unicode, Py_ssize_t start, Py_ssize_t length, Py_UCS4 fill_char) except -1
+ int PyUnicode_WriteChar(object unicode, Py_ssize_t index, Py_UCS4 character) except -1
+ Py_UCS4 PyUnicode_ReadChar(object unicode, Py_ssize_t index) except -1
+ unicode PyUnicode_Substring(object str, Py_ssize_t start, Py_ssize_t end)
+ Py_UCS4 *PyUnicode_AsUCS4(object u, Py_UCS4 *buffer, Py_ssize_t buflen, int copy_null) except NULL
+ Py_UCS4 *PyUnicode_AsUCS4Copy(object u) except NULL
+
# Create a Unicode Object from the given Unicode code point ordinal.
#
# The ordinal must be in range(0x10000) on narrow Python builds
diff --git a/Cython/Includes/libcpp/bit.pxd b/Cython/Includes/libcpp/bit.pxd
new file mode 100644
index 000000000..cac12ea4f
--- /dev/null
+++ b/Cython/Includes/libcpp/bit.pxd
@@ -0,0 +1,31 @@
+cdef extern from "<bit>" namespace "std" nogil:
+ # bit_cast (gcc >= 11.0, clang >= 14.0)
+ cdef To bit_cast[To, From](From&)
+
+ # byteswap (C++23)
+ #cdef T byteswap[T](T)
+
+ # integral powers of 2 (gcc >= 10.0, clang >= 12.0)
+ cdef bint has_single_bit[T](T)
+ cdef T bit_ceil[T](T)
+ cdef T bit_floor[T](T)
+ cdef int bit_width[T](T)
+
+ # rotating (gcc >= 9.0, clang >= 9.0)
+ cdef T rotl[T](T, int shift)
+ cdef T rotr[T](T, int shift)
+
+ # counting (gcc >= 9.0, clang >= 9.0)
+ cdef int countl_zero[T](T)
+ cdef int countl_one[T](T)
+ cdef int countr_zero[T](T)
+ cdef int countr_one[T](T)
+ cdef int popcount[T](T)
+
+ # endian
+ cpdef enum class endian(int):
+ little,
+ big,
+ native
+
+
diff --git a/Cython/Includes/libcpp/map.pxd b/Cython/Includes/libcpp/map.pxd
index 2f8238f14..d81af66e0 100644
--- a/Cython/Includes/libcpp/map.pxd
+++ b/Cython/Includes/libcpp/map.pxd
@@ -121,6 +121,8 @@ cdef extern from "<map>" namespace "std" nogil:
iterator upper_bound(const T&)
const_iterator const_upper_bound "upper_bound"(const T&)
#value_compare value_comp()
+ # C++20
+ bint contains(const T&)
cdef cppclass multimap[T, U, COMPARE=*, ALLOCATOR=*]:
ctypedef T key_type
@@ -239,3 +241,4 @@ cdef extern from "<map>" namespace "std" nogil:
iterator upper_bound(const T&)
const_iterator const_upper_bound "upper_bound"(const T&)
#value_compare value_comp()
+ bint contains(const T&)
diff --git a/Cython/Includes/libcpp/numeric.pxd b/Cython/Includes/libcpp/numeric.pxd
index 670c6cfe8..0335a0bac 100644
--- a/Cython/Includes/libcpp/numeric.pxd
+++ b/Cython/Includes/libcpp/numeric.pxd
@@ -122,3 +122,10 @@ cdef extern from "<numeric>" namespace "std" nogil:
ForwardIt2 transform_exclusive_scan[ExecutionPolicy, ForwardIt1, ForwardIt2, T, BinaryOperation, UnaryOperation](
ExecutionPolicy&& policy, ForwardIt1 first, ForwardIt1 last, ForwardIt2 d_first,
T init, BinaryOperation binary_op, UnaryOperation unary_op)
+
+ # C++17
+ T gcd[T](T a, T b)
+ T lcm[T](T a, T b)
+
+ # C++20
+ T midpoint[T](T a, T b) except + \ No newline at end of file
diff --git a/Cython/Includes/libcpp/set.pxd b/Cython/Includes/libcpp/set.pxd
index 8ba47cb7f..7e6449ca2 100644
--- a/Cython/Includes/libcpp/set.pxd
+++ b/Cython/Includes/libcpp/set.pxd
@@ -112,6 +112,8 @@ cdef extern from "<set>" namespace "std" nogil:
iterator upper_bound(const T&)
const_iterator const_upper_bound "upper_bound"(const T&)
#value_compare value_comp()
+ # C++20
+ bint contains(const T&)
cdef cppclass multiset[T]:
ctypedef T value_type
@@ -222,3 +224,5 @@ cdef extern from "<set>" namespace "std" nogil:
void swap(multiset&)
iterator upper_bound(const T&)
const_iterator const_upper_bound "upper_bound"(const T&)
+ # C++20
+ bint contains(const T&)
diff --git a/Cython/Includes/libcpp/string.pxd b/Cython/Includes/libcpp/string.pxd
index 0fee703ea..23518806a 100644
--- a/Cython/Includes/libcpp/string.pxd
+++ b/Cython/Includes/libcpp/string.pxd
@@ -251,6 +251,15 @@ cdef extern from "<string>" namespace "std" nogil:
string substr(size_t pos) except +
string substr()
+ # C++20
+ bint starts_with(char c) except +
+ bint starts_with(const char* s)
+ bint ends_with(char c) except +
+ bint ends_with(const char* s)
+ # C++23
+ bint contains(char c) except +
+ bint contains(const char* s)
+
#string& operator= (const string&)
#string& operator= (const char*)
#string& operator= (char)
diff --git a/Cython/Includes/libcpp/unordered_map.pxd b/Cython/Includes/libcpp/unordered_map.pxd
index 05f3338fa..61d11b0be 100644
--- a/Cython/Includes/libcpp/unordered_map.pxd
+++ b/Cython/Includes/libcpp/unordered_map.pxd
@@ -95,6 +95,8 @@ cdef extern from "<unordered_map>" namespace "std" nogil:
size_t max_bucket_count()
size_t bucket_size(size_t)
size_t bucket(const T&)
+ # C++20
+ bint contains(const T&)
cdef cppclass unordered_multimap[T, U, HASH=*, PRED=*, ALLOCATOR=*]:
ctypedef T key_type
@@ -186,3 +188,5 @@ cdef extern from "<unordered_map>" namespace "std" nogil:
size_t max_bucket_count()
size_t bucket_size(size_t)
size_t bucket(const T&)
+ # C++20
+ bint contains(const T&)
diff --git a/Cython/Includes/libcpp/unordered_set.pxd b/Cython/Includes/libcpp/unordered_set.pxd
index f3fdfb56e..6aae890d9 100644
--- a/Cython/Includes/libcpp/unordered_set.pxd
+++ b/Cython/Includes/libcpp/unordered_set.pxd
@@ -75,6 +75,8 @@ cdef extern from "<unordered_set>" namespace "std" nogil:
size_t max_bucket_count()
size_t bucket_size(size_t)
size_t bucket(const T&)
+ # C++20
+ bint contains(const T&)
cdef cppclass unordered_multiset[T,HASH=*,PRED=*,ALLOCATOR=*]:
ctypedef T value_type
@@ -146,3 +148,5 @@ cdef extern from "<unordered_set>" namespace "std" nogil:
size_t max_bucket_count()
size_t bucket_size(size_t)
size_t bucket(const T&)
+ # C++20
+ bint contains(const T&)
diff --git a/Cython/Shadow.py b/Cython/Shadow.py
index 78d950ce2..097126475 100644
--- a/Cython/Shadow.py
+++ b/Cython/Shadow.py
@@ -1,7 +1,7 @@
# cython.* namespace for pure mode.
from __future__ import absolute_import
-__version__ = "3.0.0a10"
+__version__ = "3.0.0a11"
try:
from __builtin__ import basestring
diff --git a/Cython/TestUtils.py b/Cython/TestUtils.py
index 528a828c7..45a8e6f59 100644
--- a/Cython/TestUtils.py
+++ b/Cython/TestUtils.py
@@ -1,12 +1,14 @@
from __future__ import absolute_import
import os
+import re
import unittest
import shlex
import sys
import tempfile
import textwrap
from io import open
+from functools import partial
from .Compiler import Errors
from .CodeWriter import CodeWriter
@@ -162,11 +164,81 @@ class TransformTest(CythonTest):
return tree
+# For the test C code validation, we have to take care that the test directives (and thus
+# the match strings) do not just appear in (multiline) C code comments containing the original
+# Cython source code. Thus, we discard the comments before matching.
+# This seems a prime case for re.VERBOSE, but it seems to match some of the whitespace.
+_strip_c_comments = partial(re.compile(
+ re.sub(r'\s+', '', r'''
+ /[*] (
+ (?: [^*\n] | [*][^/] )*
+ [\n]
+ (?: [^*] | [*][^/] )*
+ ) [*]/
+ ''')
+).sub, '')
+
+_strip_cython_code_from_html = partial(re.compile(
+ re.sub(r'\s\s+', '', r'''
+ <pre class=["'][^"']*cython\s+line[^"']*["']\s*>
+ (?:[^<]|<(?!/pre))+
+ </pre>
+ ''')
+).sub, '')
+
+
class TreeAssertVisitor(VisitorTransform):
# actually, a TreeVisitor would be enough, but this needs to run
# as part of the compiler pipeline
- def visit_CompilerDirectivesNode(self, node):
+ def __init__(self):
+ super(TreeAssertVisitor, self).__init__()
+ self._module_pos = None
+ self._c_patterns = []
+ self._c_antipatterns = []
+
+ def create_c_file_validator(self):
+ patterns, antipatterns = self._c_patterns, self._c_antipatterns
+
+ def fail(pos, pattern, found, file_path):
+ Errors.error(pos, "Pattern '%s' %s found in %s" %(
+ pattern,
+ 'was' if found else 'was not',
+ file_path,
+ ))
+
+ def validate_file_content(file_path, content):
+ for pattern in patterns:
+ #print("Searching pattern '%s'" % pattern)
+ if not re.search(pattern, content):
+ fail(self._module_pos, pattern, found=False, file_path=file_path)
+
+ for antipattern in antipatterns:
+ #print("Searching antipattern '%s'" % antipattern)
+ if re.search(antipattern, content):
+ fail(self._module_pos, antipattern, found=True, file_path=file_path)
+
+ def validate_c_file(result):
+ c_file = result.c_file
+ if not (patterns or antipatterns):
+ #print("No patterns defined for %s" % c_file)
+ return result
+
+ with open(c_file, encoding='utf8') as f:
+ content = f.read()
+ content = _strip_c_comments(content)
+ validate_file_content(c_file, content)
+
+ html_file = os.path.splitext(c_file)[0] + ".html"
+ if os.path.exists(html_file) and os.path.getmtime(c_file) <= os.path.getmtime(html_file):
+ with open(html_file, encoding='utf8') as f:
+ content = f.read()
+ content = _strip_cython_code_from_html(content)
+ validate_file_content(html_file, content)
+
+ return validate_c_file
+
+ def _check_directives(self, node):
directives = node.directives
if 'test_assert_path_exists' in directives:
for path in directives['test_assert_path_exists']:
@@ -180,6 +252,19 @@ class TreeAssertVisitor(VisitorTransform):
Errors.error(
node.pos,
"Unexpected path '%s' found in result tree" % path)
+ if 'test_assert_c_code_has' in directives:
+ self._c_patterns.extend(directives['test_assert_c_code_has'])
+ if 'test_fail_if_c_code_has' in directives:
+ self._c_antipatterns.extend(directives['test_fail_if_c_code_has'])
+
+ def visit_ModuleNode(self, node):
+ self._module_pos = node.pos
+ self._check_directives(node)
+ self.visitchildren(node)
+ return node
+
+ def visit_CompilerDirectivesNode(self, node):
+ self._check_directives(node)
self.visitchildren(node)
return node
diff --git a/Cython/Utility/AsyncGen.c b/Cython/Utility/AsyncGen.c
index 4b8c8f678..fa374525f 100644
--- a/Cython/Utility/AsyncGen.c
+++ b/Cython/Utility/AsyncGen.c
@@ -1245,7 +1245,7 @@ static int __pyx_AsyncGen_init(PyObject *module) {
#if CYTHON_USE_TYPE_SPECS
__pyx_AsyncGenType = __Pyx_FetchCommonTypeFromSpec(module, &__pyx_AsyncGenType_spec, NULL);
#else
- (void) module;
+ CYTHON_MAYBE_UNUSED_VAR(module);
// on Windows, C-API functions can't be used in slots statically
__pyx_AsyncGenType_type.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict;
__pyx_AsyncGenType = __Pyx_FetchCommonType(&__pyx_AsyncGenType_type);
diff --git a/Cython/Utility/CommonStructures.c b/Cython/Utility/CommonStructures.c
index 5449e6902..f39f3d70d 100644
--- a/Cython/Utility/CommonStructures.c
+++ b/Cython/Utility/CommonStructures.c
@@ -121,7 +121,7 @@ static PyTypeObject *__Pyx_FetchCommonTypeFromSpec(PyObject *module, PyType_Spec
if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad;
PyErr_Clear();
// We pass the ABI module reference to avoid keeping the user module alive by foreign type usages.
- (void) module;
+ CYTHON_UNUSED_VAR(module);
cached_type = __Pyx_PyType_FromModuleAndSpec(abi_module, spec, bases);
if (unlikely(!cached_type)) goto bad;
if (unlikely(__Pyx_fix_up_extension_type_from_spec(spec, (PyTypeObject *) cached_type) < 0)) goto bad;
diff --git a/Cython/Utility/Complex.c b/Cython/Utility/Complex.c
index 28062a061..15d5f544d 100644
--- a/Cython/Utility/Complex.c
+++ b/Cython/Utility/Complex.c
@@ -265,7 +265,7 @@ static {{type}} __Pyx_PyComplex_As_{{type_name}}(PyObject* o) {
if (a.imag == 0) {
if (a.real == 0) {
return a;
- } else if (b.imag == 0) {
+ } else if ((b.imag == 0) && (a.real >= 0)) {
z.real = pow{{m}}(a.real, b.real);
z.imag = 0;
return z;
diff --git a/Cython/Utility/Coroutine.c b/Cython/Utility/Coroutine.c
index 15ed61cc4..1a4a78ff5 100644
--- a/Cython/Utility/Coroutine.c
+++ b/Cython/Utility/Coroutine.c
@@ -256,7 +256,7 @@ static PyObject *__Pyx_Coroutine_GetAsyncIter_Generic(PyObject *obj) {
}
#else
// avoid C warning about 'unused function'
- if ((0)) (void) __Pyx_PyObject_CallMethod0(obj, PYIDENT("__aiter__"));
+ (void)&__Pyx_PyObject_CallMethod0;
#endif
obj_type_name = __Pyx_PyType_GetName(Py_TYPE(obj));
@@ -800,6 +800,7 @@ static CYTHON_INLINE void __Pyx_Coroutine_ResetFrameBackpointer(__Pyx_ExcInfoStr
// cycle.
#if CYTHON_COMPILING_IN_PYPY
// FIXME: what to do in PyPy?
+ CYTHON_UNUSED_VAR(exc_state);
#else
PyObject *exc_tb;
@@ -1860,11 +1861,11 @@ static PyTypeObject __pyx_CoroutineType_type = {
#endif /* CYTHON_USE_TYPE_SPECS */
static int __pyx_Coroutine_init(PyObject *module) {
+ CYTHON_MAYBE_UNUSED_VAR(module);
// on Windows, C-API functions can't be used in slots statically
#if CYTHON_USE_TYPE_SPECS
__pyx_CoroutineType = __Pyx_FetchCommonTypeFromSpec(module, &__pyx_CoroutineType_spec, NULL);
#else
- (void) module;
__pyx_CoroutineType_type.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict;
__pyx_CoroutineType = __Pyx_FetchCommonType(&__pyx_CoroutineType_type);
#endif
@@ -2014,7 +2015,7 @@ static int __pyx_IterableCoroutine_init(PyObject *module) {
#if CYTHON_USE_TYPE_SPECS
__pyx_IterableCoroutineType = __Pyx_FetchCommonTypeFromSpec(module, &__pyx_IterableCoroutineType_spec, NULL);
#else
- (void) module;
+ CYTHON_UNUSED_VAR(module);
__pyx_IterableCoroutineType_type.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict;
__pyx_IterableCoroutineType = __Pyx_FetchCommonType(&__pyx_IterableCoroutineType_type);
#endif
@@ -2159,7 +2160,7 @@ static int __pyx_Generator_init(PyObject *module) {
#if CYTHON_USE_TYPE_SPECS
__pyx_GeneratorType = __Pyx_FetchCommonTypeFromSpec(module, &__pyx_GeneratorType_spec, NULL);
#else
- (void) module;
+ CYTHON_UNUSED_VAR(module);
// on Windows, C-API functions can't be used in slots statically
__pyx_GeneratorType_type.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict;
__pyx_GeneratorType_type.tp_iter = PyObject_SelfIter;
@@ -2565,8 +2566,8 @@ static PyTypeObject __Pyx__PyExc_StopAsyncIteration_type = {
#endif
static int __pyx_StopAsyncIteration_init(PyObject *module) {
+ CYTHON_UNUSED_VAR(module);
#if PY_VERSION_HEX >= 0x030500B1
- (void) module;
__Pyx_PyExc_StopAsyncIteration = PyExc_StopAsyncIteration;
#else
PyObject *builtins = PyEval_GetBuiltins();
@@ -2584,7 +2585,6 @@ static int __pyx_StopAsyncIteration_init(PyObject *module) {
__Pyx__PyExc_StopAsyncIteration_type.tp_dictoffset = ((PyTypeObject*)PyExc_BaseException)->tp_dictoffset;
__Pyx__PyExc_StopAsyncIteration_type.tp_base = (PyTypeObject*)PyExc_Exception;
- (void) module;
__Pyx_PyExc_StopAsyncIteration = (PyObject*) __Pyx_FetchCommonType(&__Pyx__PyExc_StopAsyncIteration_type);
if (unlikely(!__Pyx_PyExc_StopAsyncIteration))
return -1;
diff --git a/Cython/Utility/CppSupport.cpp b/Cython/Utility/CppSupport.cpp
index ca5579918..ba0002c94 100644
--- a/Cython/Utility/CppSupport.cpp
+++ b/Cython/Utility/CppSupport.cpp
@@ -84,15 +84,50 @@ auto __Pyx_pythran_to_python(T &&value) -> decltype(to_python(
////////////// OptionalLocals.proto ////////////////
//@proto_block: utility_code_proto_before_types
+#include <utility>
#if defined(CYTHON_USE_BOOST_OPTIONAL)
// fallback mode - std::optional is preferred but this gives
// people with a less up-to-date compiler a chance
#include <boost/optional.hpp>
- #define __Pyx_Optional_Type boost::optional
+ #define __Pyx_Optional_BaseType boost::optional
#else
#include <optional>
// since std::optional is a C++17 features, a templated using declaration should be safe
// (although it could be replaced with a define)
template <typename T>
- using __Pyx_Optional_Type = std::optional<T>;
+ using __Pyx_Optional_BaseType = std::optional<T>;
#endif
+
+// This class reuses as much of the implementation of std::optional as possible.
+// The only place it differs significantly is the assignment operators, which use
+// "emplace" (thus requiring move/copy constructors, but not move/copy
+// assignment operators). This is preferred because it lets us work with assignable
+// types (for example those with const members)
+template <typename T>
+class __Pyx_Optional_Type : private __Pyx_Optional_BaseType<T> {
+public:
+ using __Pyx_Optional_BaseType<T>::__Pyx_Optional_BaseType;
+ using __Pyx_Optional_BaseType<T>::has_value;
+ using __Pyx_Optional_BaseType<T>::operator*;
+ using __Pyx_Optional_BaseType<T>::operator->;
+#if __cplusplus >= 201103L || (defined(_MSC_VER) && _MSC_VER >= 1600)
+ __Pyx_Optional_Type& operator=(const __Pyx_Optional_Type& rhs) {
+ this->emplace(*rhs);
+ return *this;
+ }
+ __Pyx_Optional_Type& operator=(__Pyx_Optional_Type&& rhs) {
+ this->emplace(std::move(*rhs));
+ return *this;
+ }
+ template <typename U=T>
+ __Pyx_Optional_Type& operator=(U&& rhs) {
+ this->emplace(std::forward<U>(rhs));
+ return *this;
+ }
+#else
+ // Note - the "cpp_locals" feature is designed to require C++14.
+ // This pre-c++11 fallback is largely untested, and definitely won't work
+ // in all the cases that the more modern version does
+ using __Pyx_Optional_BaseType<T>::operator=; // the chances are emplace can't work...
+#endif
+};
diff --git a/Cython/Utility/CythonFunction.c b/Cython/Utility/CythonFunction.c
index 870dcf620..226019cee 100644
--- a/Cython/Utility/CythonFunction.c
+++ b/Cython/Utility/CythonFunction.c
@@ -780,9 +780,17 @@ static PyObject *__Pyx_CyFunction_CallAsMethod(PyObject *func, PyObject *args, P
self = PyTuple_GetItem(args, 0);
if (unlikely(!self)) {
Py_DECREF(new_args);
+#if PY_MAJOR_VERSION > 2
PyErr_Format(PyExc_TypeError,
"unbound method %.200S() needs an argument",
cyfunc->func_qualname);
+#else
+ // %S doesn't work in PyErr_Format on Py2 and replicating
+ // the formatting seems more trouble than it's worth
+ // (so produce a less useful error message).
+ PyErr_SetString(PyExc_TypeError,
+ "unbound method needs an argument");
+#endif
return NULL;
}
@@ -1055,7 +1063,7 @@ static int __pyx_CyFunction_init(PyObject *module) {
#if CYTHON_USE_TYPE_SPECS
__pyx_CyFunctionType = __Pyx_FetchCommonTypeFromSpec(module, &__pyx_CyFunctionType_spec, NULL);
#else
- (void) module;
+ CYTHON_UNUSED_VAR(module);
__pyx_CyFunctionType = __Pyx_FetchCommonType(&__pyx_CyFunctionType_type);
#endif
if (unlikely(__pyx_CyFunctionType == NULL)) {
@@ -1587,7 +1595,7 @@ static int __pyx_FusedFunction_init(PyObject *module) {
__pyx_FusedFunctionType = __Pyx_FetchCommonTypeFromSpec(module, &__pyx_FusedFunctionType_spec, bases);
Py_DECREF(bases);
#else
- (void) module;
+ CYTHON_UNUSED_VAR(module);
// Set base from __Pyx_FetchCommonTypeFromSpec, in case it's different from the local static value.
__pyx_FusedFunctionType_type.tp_base = __pyx_CyFunctionType;
__pyx_FusedFunctionType = __Pyx_FetchCommonType(&__pyx_FusedFunctionType_type);
@@ -1601,7 +1609,7 @@ static int __pyx_FusedFunction_init(PyObject *module) {
//////////////////// ClassMethod.proto ////////////////////
#include "descrobject.h"
-static CYTHON_UNUSED PyObject* __Pyx_Method_ClassMethod(PyObject *method); /*proto*/
+CYTHON_UNUSED static PyObject* __Pyx_Method_ClassMethod(PyObject *method); /*proto*/
//////////////////// ClassMethod ////////////////////
diff --git a/Cython/Utility/Exceptions.c b/Cython/Utility/Exceptions.c
index 9f96225d1..3c9784f88 100644
--- a/Cython/Utility/Exceptions.c
+++ b/Cython/Utility/Exceptions.c
@@ -676,7 +676,7 @@ static void __Pyx_WriteUnraisable(const char *name, int clineno,
if (nogil)
state = PyGILState_Ensure();
/* arbitrary, to suppress warning */
- else state = (PyGILState_STATE)-1;
+ else state = (PyGILState_STATE)0;
#endif
CYTHON_UNUSED_VAR(clineno);
CYTHON_UNUSED_VAR(lineno);
@@ -725,13 +725,15 @@ static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line);/*proto*/
//@substitute: naming
#ifndef CYTHON_CLINE_IN_TRACEBACK
-static int __Pyx_CLineForTraceback(CYTHON_NCP_UNUSED PyThreadState *tstate, int c_line) {
+static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line) {
PyObject *use_cline;
PyObject *ptype, *pvalue, *ptraceback;
#if CYTHON_COMPILING_IN_CPYTHON
PyObject **cython_runtime_dict;
#endif
+ CYTHON_MAYBE_UNUSED_VAR(tstate);
+
if (unlikely(!${cython_runtime_cname})) {
// Very early error where the runtime module is not set up yet.
return c_line;
diff --git a/Cython/Utility/ExtensionTypes.c b/Cython/Utility/ExtensionTypes.c
index aa39a860a..700bf1468 100644
--- a/Cython/Utility/ExtensionTypes.c
+++ b/Cython/Utility/ExtensionTypes.c
@@ -11,8 +11,8 @@ static int __Pyx_fix_up_extension_type_from_spec(PyType_Spec *spec, PyTypeObject
#if CYTHON_USE_TYPE_SPECS
static int __Pyx_fix_up_extension_type_from_spec(PyType_Spec *spec, PyTypeObject *type) {
#if PY_VERSION_HEX > 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API
- (void) spec;
- (void) type;
+ CYTHON_UNUSED_VAR(spec);
+ CYTHON_UNUSED_VAR(type);
#else
// Set tp_weakreflist, tp_dictoffset, tp_vectorcalloffset
// Copied and adapted from https://bugs.python.org/issue38140
@@ -156,7 +156,7 @@ static int __Pyx_validate_bases_tuple(const char *type_name, Py_ssize_t dictoffs
/////////////// PyType_Ready.proto ///////////////
// unused when using type specs
-static CYTHON_UNUSED int __Pyx_PyType_Ready(PyTypeObject *t);/*proto*/
+CYTHON_UNUSED static int __Pyx_PyType_Ready(PyTypeObject *t);/*proto*/
/////////////// PyType_Ready ///////////////
//@requires: ObjectHandling.c::PyObjectCallMethod0
diff --git a/Cython/Utility/ImportExport.c b/Cython/Utility/ImportExport.c
index 897657281..66e75ea00 100644
--- a/Cython/Utility/ImportExport.c
+++ b/Cython/Utility/ImportExport.c
@@ -478,13 +478,24 @@ set_path:
#ifndef __PYX_HAVE_RT_ImportType_proto
#define __PYX_HAVE_RT_ImportType_proto
+#if __STDC_VERSION__ >= 201112L
+#include <stdalign.h>
+#endif
+
+#if __STDC_VERSION__ >= 201112L || __cplusplus >= 201103L
+#define __PYX_GET_STRUCT_ALIGNMENT(s) alignof(s)
+#else
+// best guess at what the alignment could be since we can't measure it
+#define __PYX_GET_STRUCT_ALIGNMENT(s) sizeof(void*)
+#endif
+
enum __Pyx_ImportType_CheckSize {
__Pyx_ImportType_CheckSize_Error = 0,
__Pyx_ImportType_CheckSize_Warn = 1,
__Pyx_ImportType_CheckSize_Ignore = 2
};
-static PyTypeObject *__Pyx_ImportType(PyObject* module, const char *module_name, const char *class_name, size_t size, enum __Pyx_ImportType_CheckSize check_size); /*proto*/
+static PyTypeObject *__Pyx_ImportType(PyObject* module, const char *module_name, const char *class_name, size_t size, size_t alignment, enum __Pyx_ImportType_CheckSize check_size); /*proto*/
#endif
@@ -493,7 +504,7 @@ static PyTypeObject *__Pyx_ImportType(PyObject* module, const char *module_name,
#ifndef __PYX_HAVE_RT_ImportType
#define __PYX_HAVE_RT_ImportType
static PyTypeObject *__Pyx_ImportType(PyObject *module, const char *module_name, const char *class_name,
- size_t size, enum __Pyx_ImportType_CheckSize check_size)
+ size_t size, size_t alignment, enum __Pyx_ImportType_CheckSize check_size)
{
PyObject *result = 0;
char warning[200];
@@ -534,6 +545,18 @@ static PyTypeObject *__Pyx_ImportType(PyObject *module, const char *module_name,
if (itemsize == (Py_ssize_t)-1 && PyErr_Occurred())
goto bad;
#endif
+ if (itemsize) {
+ // If itemsize is smaller than the alignment the struct can end up with some extra
+ // padding at the end. In this case we need to work out the maximum size that
+ // the padding could be when calculating the range of valid struct sizes.
+ if (size % alignment) {
+ // if this is true we've probably calculated the alignment wrongly
+ // (most likely because alignof isn't available)
+ alignment = size % alignment;
+ }
+ if (itemsize < (Py_ssize_t)alignment)
+ itemsize = (Py_ssize_t)alignment;
+ }
if ((size_t)(basicsize + itemsize) < size) {
PyErr_Format(PyExc_ValueError,
"%.200s.%.200s size changed, may indicate binary incompatibility. "
diff --git a/Cython/Utility/MemoryView.pyx b/Cython/Utility/MemoryView.pyx
index 9361249fb..d36e7f60c 100644
--- a/Cython/Utility/MemoryView.pyx
+++ b/Cython/Utility/MemoryView.pyx
@@ -26,6 +26,7 @@ cdef extern from "<string.h>":
void *memset(void *b, int c, size_t len)
cdef extern from *:
+ bint __PYX_CYTHON_ATOMICS_ENABLED()
int __Pyx_GetBuffer(object, Py_buffer *, int) except -1
void __Pyx_ReleaseBuffer(Py_buffer *)
@@ -80,7 +81,7 @@ cdef extern from *:
__Pyx_memviewslice *from_mvs,
char *mode, int ndim,
size_t sizeof_dtype, int contig_flag,
- bint dtype_is_object) nogil except *
+ bint dtype_is_object) except * nogil
bint slice_is_contig "__pyx_memviewslice_is_contig" (
{{memviewslice_name}} mvs, char order, int ndim) nogil
bint slices_overlap "__pyx_slices_overlap" ({{memviewslice_name}} *slice1,
@@ -121,7 +122,7 @@ cdef class array:
Py_ssize_t itemsize
unicode mode # FIXME: this should have been a simple 'char'
bytes _format
- void (*callback_free_data)(void *data)
+ void (*callback_free_data)(void *data) noexcept
# cdef object _memview
cdef bint free_data
cdef bint dtype_is_object
@@ -366,14 +367,15 @@ cdef class memoryview:
(<__pyx_buffer *> &self.view).obj = Py_None
Py_INCREF(Py_None)
- global __pyx_memoryview_thread_locks_used
- if __pyx_memoryview_thread_locks_used < {{THREAD_LOCKS_PREALLOCATED}}:
- self.lock = __pyx_memoryview_thread_locks[__pyx_memoryview_thread_locks_used]
- __pyx_memoryview_thread_locks_used += 1
- if self.lock is NULL:
- self.lock = PyThread_allocate_lock()
+ if not __PYX_CYTHON_ATOMICS_ENABLED():
+ global __pyx_memoryview_thread_locks_used
+ if __pyx_memoryview_thread_locks_used < {{THREAD_LOCKS_PREALLOCATED}}:
+ self.lock = __pyx_memoryview_thread_locks[__pyx_memoryview_thread_locks_used]
+ __pyx_memoryview_thread_locks_used += 1
if self.lock is NULL:
- raise MemoryError
+ self.lock = PyThread_allocate_lock()
+ if self.lock is NULL:
+ raise MemoryError
if flags & PyBUF_FORMAT:
self.dtype_is_object = (self.view.format[0] == b'O' and self.view.format[1] == b'\0')
@@ -806,7 +808,7 @@ cdef int slice_memviewslice(
int dim, int new_ndim, int *suboffset_dim,
Py_ssize_t start, Py_ssize_t stop, Py_ssize_t step,
int have_start, int have_stop, int have_step,
- bint is_slice) nogil except -1:
+ bint is_slice) except -1 nogil:
"""
Create a new slice dst given slice src.
@@ -936,7 +938,7 @@ cdef char *pybuffer_index(Py_buffer *view, char *bufp, Py_ssize_t index,
### Transposing a memoryviewslice
#
@cname('__pyx_memslice_transpose')
-cdef int transpose_memslice({{memviewslice_name}} *memslice) nogil except -1:
+cdef int transpose_memslice({{memviewslice_name}} *memslice) except -1 nogil:
cdef int ndim = memslice.memview.view.ndim
cdef Py_ssize_t *shape = memslice.shape
@@ -1180,7 +1182,7 @@ cdef void copy_strided_to_strided({{memviewslice_name}} *src,
src.shape, dst.shape, ndim, itemsize)
@cname('__pyx_memoryview_slice_get_size')
-cdef Py_ssize_t slice_get_size({{memviewslice_name}} *src, int ndim) nogil:
+cdef Py_ssize_t slice_get_size({{memviewslice_name}} *src, int ndim) noexcept nogil:
"Return the size of the memory occupied by the slice in number of bytes"
cdef Py_ssize_t shape, size = src.memview.view.itemsize
@@ -1214,7 +1216,7 @@ cdef Py_ssize_t fill_contig_strides_array(
cdef void *copy_data_to_temp({{memviewslice_name}} *src,
{{memviewslice_name}} *tmpslice,
char order,
- int ndim) nogil except NULL:
+ int ndim) except NULL nogil:
"""
Copy a direct slice to temporary contiguous memory. The caller should free
the result when done.
@@ -1274,7 +1276,7 @@ cdef int _err_no_memory() except -1 with gil:
cdef int memoryview_copy_contents({{memviewslice_name}} src,
{{memviewslice_name}} dst,
int src_ndim, int dst_ndim,
- bint dtype_is_object) nogil except -1:
+ bint dtype_is_object) except -1 nogil:
"""
Copy memory from slice src to slice dst.
Check for overlapping memory and verify the shapes.
@@ -1378,7 +1380,7 @@ cdef void refcount_objects_in_slice_with_gil(char *data, Py_ssize_t *shape,
@cname('__pyx_memoryview_refcount_objects_in_slice')
cdef void refcount_objects_in_slice(char *data, Py_ssize_t *shape,
- Py_ssize_t *strides, int ndim, bint inc):
+ Py_ssize_t *strides, int ndim, bint inc) noexcept:
cdef Py_ssize_t i
cdef Py_ssize_t stride = strides[0]
diff --git a/Cython/Utility/MemoryView_C.c b/Cython/Utility/MemoryView_C.c
index de003a2ee..774ec1767 100644
--- a/Cython/Utility/MemoryView_C.c
+++ b/Cython/Utility/MemoryView_C.c
@@ -24,38 +24,84 @@ typedef struct {
#ifndef CYTHON_ATOMICS
#define CYTHON_ATOMICS 1
#endif
+// using CYTHON_ATOMICS as a cdef extern bint in the Cython memoryview code
+// interacts badly with "import *". Therefore, define a helper function-like macro
+#define __PYX_CYTHON_ATOMICS_ENABLED() CYTHON_ATOMICS
#define __pyx_atomic_int_type int
-// todo: Portland pgcc, maybe OS X's OSAtomicIncrement32,
-// libatomic + autotools-like distutils support? Such a pain...
-#if CYTHON_ATOMICS && __GNUC__ >= 4 && (__GNUC_MINOR__ > 1 || \
- (__GNUC_MINOR__ == 1 && __GNUC_PATCHLEVEL >= 2)) && \
- !defined(__i386__)
+#define __pyx_nonatomic_int_type int
+
+// For standard C/C++ atomics, get the headers first so we have ATOMIC_INT_LOCK_FREE
+// defined when we decide to use them.
+#if CYTHON_ATOMICS && (defined(__STDC_VERSION__) && \
+ (__STDC_VERSION__ >= 201112L) && \
+ !defined(__STDC_NO_ATOMICS__))
+ #include <stdatomic.h>
+#elif CYTHON_ATOMICS && (defined(__cplusplus) && ( \
+ (__cplusplus >= 201103L) || \
+ (defined(_MSC_VER) && _MSC_VER >= 1700)))
+ #include <atomic>
+#endif
+
+#if CYTHON_ATOMICS && (defined(__STDC_VERSION__) && \
+ (__STDC_VERSION__ >= 201112L) && \
+ !defined(__STDC_NO_ATOMICS__) && \
+ ATOMIC_INT_LOCK_FREE == 2)
+ // C11 atomics are available.
+ // Require ATOMIC_INT_LOCK_FREE because I'm nervous about the __pyx_atomic_int[2]
+ // alignment trick in MemoryView.pyx if it uses mutexes.
+ #undef __pyx_atomic_int_type
+ #define __pyx_atomic_int_type atomic_int
+ // TODO - it might be possible to use a less strict memory ordering here
+ #define __pyx_atomic_incr_aligned(value) atomic_fetch_add(value, 1)
+ #define __pyx_atomic_decr_aligned(value) atomic_fetch_sub(value, 1)
+ #if defined(__PYX_DEBUG_ATOMICS) && defined(_MSC_VER)
+ #pragma message ("Using standard C atomics")
+ #elif defined(__PYX_DEBUG_ATOMICS)
+ #warning "Using standard C atomics"
+ #endif
+#elif CYTHON_ATOMICS && (defined(__cplusplus) && ( \
+ (__cplusplus >= 201103L) || \
+ /*_MSC_VER 1700 is Visual Studio 2012 */ \
+ (defined(_MSC_VER) && _MSC_VER >= 1700)) && \
+ ATOMIC_INT_LOCK_FREE == 2)
+ // C++11 atomics are available.
+ // Require ATOMIC_INT_LOCK_FREE because I'm nervous about the __pyx_atomic_int[2]
+ // alignment trick in MemoryView.pyx if it uses mutexes.
+ #undef __pyx_atomic_int_type
+ #define __pyx_atomic_int_type std::atomic_int
+ // TODO - it might be possible to use a less strict memory ordering here
+ #define __pyx_atomic_incr_aligned(value) std::atomic_fetch_add(value, 1)
+ #define __pyx_atomic_decr_aligned(value) std::atomic_fetch_sub(value, 1)
+
+ #if defined(__PYX_DEBUG_ATOMICS) && defined(_MSC_VER)
+ #pragma message ("Using standard C++ atomics")
+ #elif defined(__PYX_DEBUG_ATOMICS)
+ #warning "Using standard C++ atomics"
+ #endif
+#elif CYTHON_ATOMICS && (__GNUC__ >= 5 || (__GNUC__ == 4 && \
+ (__GNUC_MINOR__ > 1 || \
+ (__GNUC_MINOR__ == 1 && __GNUC_PATCHLEVEL__ >= 2))))
/* gcc >= 4.1.2 */
- #define __pyx_atomic_incr_aligned(value, lock) __sync_fetch_and_add(value, 1)
- #define __pyx_atomic_decr_aligned(value, lock) __sync_fetch_and_sub(value, 1)
+ #define __pyx_atomic_incr_aligned(value) __sync_fetch_and_add(value, 1)
+ #define __pyx_atomic_decr_aligned(value) __sync_fetch_and_sub(value, 1)
#ifdef __PYX_DEBUG_ATOMICS
#warning "Using GNU atomics"
#endif
-#elif CYTHON_ATOMICS && defined(_MSC_VER) && 0
+#elif CYTHON_ATOMICS && defined(_MSC_VER)
/* msvc */
- #include <Windows.h>
+ #include <intrin.h>
#undef __pyx_atomic_int_type
- #define __pyx_atomic_int_type LONG
- #define __pyx_atomic_incr_aligned(value, lock) InterlockedIncrement(value)
- #define __pyx_atomic_decr_aligned(value, lock) InterlockedDecrement(value)
+ #define __pyx_atomic_int_type long
+ #define __pyx_nonatomic_int_type long
+ #pragma intrinsic (_InterlockedExchangeAdd)
+ #define __pyx_atomic_incr_aligned(value) _InterlockedExchangeAdd(value, 1)
+ #define __pyx_atomic_decr_aligned(value) _InterlockedExchangeAdd(value, -1)
#ifdef __PYX_DEBUG_ATOMICS
#pragma message ("Using MSVC atomics")
#endif
-#elif CYTHON_ATOMICS && (defined(__ICC) || defined(__INTEL_COMPILER)) && 0
- #define __pyx_atomic_incr_aligned(value, lock) _InterlockedIncrement(value)
- #define __pyx_atomic_decr_aligned(value, lock) _InterlockedDecrement(value)
-
- #ifdef __PYX_DEBUG_ATOMICS
- #warning "Using Intel atomics"
- #endif
#else
#undef CYTHON_ATOMICS
#define CYTHON_ATOMICS 0
@@ -69,9 +115,9 @@ typedef volatile __pyx_atomic_int_type __pyx_atomic_int;
#if CYTHON_ATOMICS
#define __pyx_add_acquisition_count(memview) \
- __pyx_atomic_incr_aligned(__pyx_get_slice_count_pointer(memview), memview->lock)
+ __pyx_atomic_incr_aligned(__pyx_get_slice_count_pointer(memview))
#define __pyx_sub_acquisition_count(memview) \
- __pyx_atomic_decr_aligned(__pyx_get_slice_count_pointer(memview), memview->lock)
+ __pyx_atomic_decr_aligned(__pyx_get_slice_count_pointer(memview))
#else
#define __pyx_add_acquisition_count(memview) \
__pyx_add_acquisition_count_locked(__pyx_get_slice_count_pointer(memview), memview->lock)
@@ -488,7 +534,7 @@ __pyx_sub_acquisition_count_locked(__pyx_atomic_int *acquisition_count,
static CYTHON_INLINE void
__Pyx_INC_MEMVIEW({{memviewslice_name}} *memslice, int have_gil, int lineno)
{
- __pyx_atomic_int_type old_acquisition_count;
+ __pyx_nonatomic_int_type old_acquisition_count;
struct {{memview_struct_name}} *memview = memslice->memview;
if (unlikely(!memview || (PyObject *) memview == Py_None)) {
// Allow uninitialized memoryview assignment and do not ref-count None.
@@ -515,7 +561,7 @@ __Pyx_INC_MEMVIEW({{memviewslice_name}} *memslice, int have_gil, int lineno)
static CYTHON_INLINE void __Pyx_XCLEAR_MEMVIEW({{memviewslice_name}} *memslice,
int have_gil, int lineno) {
- __pyx_atomic_int_type old_acquisition_count;
+ __pyx_nonatomic_int_type old_acquisition_count;
struct {{memview_struct_name}} *memview = memslice->memview;
if (unlikely(!memview || (PyObject *) memview == Py_None)) {
diff --git a/Cython/Utility/ModuleSetupCode.c b/Cython/Utility/ModuleSetupCode.c
index 533689788..df2a4ee4a 100644
--- a/Cython/Utility/ModuleSetupCode.c
+++ b/Cython/Utility/ModuleSetupCode.c
@@ -61,6 +61,7 @@
#define CYTHON_COMPILING_IN_CPYTHON 0
#define CYTHON_COMPILING_IN_LIMITED_API 0
#define CYTHON_COMPILING_IN_GRAAL 1
+ #define CYTHON_COMPILING_IN_NOGIL 0
#undef CYTHON_USE_TYPE_SLOTS
#define CYTHON_USE_TYPE_SLOTS 0
@@ -112,11 +113,13 @@
#ifndef CYTHON_UPDATE_DESCRIPTOR_DOC
#define CYTHON_UPDATE_DESCRIPTOR_DOC 0
#endif
+
#elif defined(PYPY_VERSION)
#define CYTHON_COMPILING_IN_PYPY 1
#define CYTHON_COMPILING_IN_CPYTHON 0
#define CYTHON_COMPILING_IN_LIMITED_API 0
#define CYTHON_COMPILING_IN_GRAAL 0
+ #define CYTHON_COMPILING_IN_NOGIL 0
#undef CYTHON_USE_TYPE_SLOTS
#define CYTHON_USE_TYPE_SLOTS 0
@@ -166,14 +169,16 @@
#undef CYTHON_USE_EXC_INFO_STACK
#define CYTHON_USE_EXC_INFO_STACK 0
#ifndef CYTHON_UPDATE_DESCRIPTOR_DOC
- #define CYTHON_UPDATE_DESCRIPTOR_DOC (PYPY_VERSION_HEX >= 0x07030900)
+ #define CYTHON_UPDATE_DESCRIPTOR_DOC 0
#endif
+
#elif defined(CYTHON_LIMITED_API)
// EXPERIMENTAL !!
#define CYTHON_COMPILING_IN_PYPY 0
#define CYTHON_COMPILING_IN_CPYTHON 0
#define CYTHON_COMPILING_IN_LIMITED_API 1
#define CYTHON_COMPILING_IN_GRAAL 0
+ #define CYTHON_COMPILING_IN_NOGIL 0
// CYTHON_CLINE_IN_TRACEBACK is currently disabled for the Limited API
#undef CYTHON_CLINE_IN_TRACEBACK
@@ -228,11 +233,61 @@
#ifndef CYTHON_UPDATE_DESCRIPTOR_DOC
#define CYTHON_UPDATE_DESCRIPTOR_DOC 0
#endif
+
+#elif defined(PY_NOGIL)
+ #define CYTHON_COMPILING_IN_PYPY 0
+ #define CYTHON_COMPILING_IN_CPYTHON 0
+ #define CYTHON_COMPILING_IN_LIMITED_API 0
+ #define CYTHON_COMPILING_IN_GRAAL 0
+ #define CYTHON_COMPILING_IN_NOGIL 1
+
+ #ifndef CYTHON_USE_TYPE_SLOTS
+ #define CYTHON_USE_TYPE_SLOTS 1
+ #endif
+ #undef CYTHON_USE_PYTYPE_LOOKUP
+ #define CYTHON_USE_PYTYPE_LOOKUP 0
+ #ifndef CYTHON_USE_ASYNC_SLOTS
+ #define CYTHON_USE_ASYNC_SLOTS 1
+ #endif
+ #undef CYTHON_USE_PYLIST_INTERNALS
+ #define CYTHON_USE_PYLIST_INTERNALS 0
+ #ifndef CYTHON_USE_UNICODE_INTERNALS
+ #define CYTHON_USE_UNICODE_INTERNALS 1
+ #endif
+ #undef CYTHON_USE_UNICODE_WRITER
+ #define CYTHON_USE_UNICODE_WRITER 0
+ #undef CYTHON_USE_PYLONG_INTERNALS
+ #define CYTHON_USE_PYLONG_INTERNALS 0
+ #ifndef CYTHON_AVOID_BORROWED_REFS
+ #define CYTHON_AVOID_BORROWED_REFS 0
+ #endif
+ #ifndef CYTHON_ASSUME_SAFE_MACROS
+ #define CYTHON_ASSUME_SAFE_MACROS 1
+ #endif
+ #ifndef CYTHON_UNPACK_METHODS
+ #define CYTHON_UNPACK_METHODS 1
+ #endif
+ #undef CYTHON_FAST_THREAD_STATE
+ #define CYTHON_FAST_THREAD_STATE 0
+ #undef CYTHON_FAST_PYCALL
+ #define CYTHON_FAST_PYCALL 0
+ #ifndef CYTHON_PEP489_MULTI_PHASE_INIT
+ #define CYTHON_PEP489_MULTI_PHASE_INIT 1
+ #endif
+ #ifndef CYTHON_USE_TP_FINALIZE
+ #define CYTHON_USE_TP_FINALIZE 1
+ #endif
+ #undef CYTHON_USE_DICT_VERSIONS
+ #define CYTHON_USE_DICT_VERSIONS 0
+ #undef CYTHON_USE_EXC_INFO_STACK
+ #define CYTHON_USE_EXC_INFO_STACK 0
+
#else
#define CYTHON_COMPILING_IN_PYPY 0
#define CYTHON_COMPILING_IN_CPYTHON 1
#define CYTHON_COMPILING_IN_LIMITED_API 0
#define CYTHON_COMPILING_IN_GRAAL 0
+ #define CYTHON_COMPILING_IN_NOGIL 0
#ifndef CYTHON_USE_TYPE_SLOTS
#define CYTHON_USE_TYPE_SLOTS 1
@@ -374,6 +429,17 @@
// unused attribute
#ifndef CYTHON_UNUSED
+ #if defined(__cplusplus)
+ /* for clang __has_cpp_attribute(maybe_unused) is true even before C++17
+ * but leads to warnings with -pedantic, since it is a C++17 feature */
+ #if ((defined(_MSVC_LANG) && _MSVC_LANG >= 201703L) || __cplusplus >= 201703L)
+ #if __has_cpp_attribute(maybe_unused)
+ #define CYTHON_UNUSED [[maybe_unused]]
+ #endif
+ #endif
+ #endif
+#endif
+#ifndef CYTHON_UNUSED
# if defined(__GNUC__)
# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4))
# define CYTHON_UNUSED __attribute__ ((__unused__))
@@ -441,13 +507,21 @@
#ifndef CYTHON_FALLTHROUGH
- #if defined(__cplusplus) && __cplusplus >= 201103L
- #if __has_cpp_attribute(fallthrough)
- #define CYTHON_FALLTHROUGH [[fallthrough]]
- #elif __has_cpp_attribute(clang::fallthrough)
- #define CYTHON_FALLTHROUGH [[clang::fallthrough]]
- #elif __has_cpp_attribute(gnu::fallthrough)
- #define CYTHON_FALLTHROUGH [[gnu::fallthrough]]
+ #if defined(__cplusplus)
+ /* for clang __has_cpp_attribute(fallthrough) is true even before C++17
+ * but leads to warnings with -pedantic, since it is a C++17 feature */
+ #if ((defined(_MSVC_LANG) && _MSVC_LANG >= 201703L) || __cplusplus >= 201703L)
+ #if __has_cpp_attribute(fallthrough)
+ #define CYTHON_FALLTHROUGH [[fallthrough]]
+ #endif
+ #endif
+
+ #ifndef CYTHON_FALLTHROUGH
+ #if __has_cpp_attribute(clang::fallthrough)
+ #define CYTHON_FALLTHROUGH [[clang::fallthrough]]
+ #elif __has_cpp_attribute(gnu::fallthrough)
+ #define CYTHON_FALLTHROUGH [[gnu::fallthrough]]
+ #endif
#endif
#endif
@@ -459,7 +533,7 @@
#endif
#endif
- #if defined(__clang__ ) && defined(__apple_build_version__)
+ #if defined(__clang__) && defined(__apple_build_version__)
#if __apple_build_version__ < 7000000 /* Xcode < 7.0 */
#undef CYTHON_FALLTHROUGH
#define CYTHON_FALLTHROUGH
@@ -518,8 +592,10 @@ class __Pyx_FakeReference {
T *operator&() { return ptr; }
operator T&() { return *ptr; }
// TODO(robertwb): Delegate all operators (or auto-generate unwrapping code where needed).
- template<typename U> bool operator ==(U other) { return *ptr == other; }
- template<typename U> bool operator !=(U other) { return *ptr != other; }
+ template<typename U> bool operator ==(const U& other) const { return *ptr == other; }
+ template<typename U> bool operator !=(const U& other) const { return *ptr != other; }
+ template<typename U=T> bool operator==(const __Pyx_FakeReference<U>& other) const { return *ptr == *other.ptr; }
+ template<typename U=T> bool operator!=(const __Pyx_FakeReference<U>& other) const { return *ptr != *other.ptr; }
private:
T *ptr;
};
@@ -650,6 +726,13 @@ class __Pyx_FakeReference {
#endif
#define __Pyx_NoneAsNull(obj) (__Pyx_Py_IsNone(obj) ? NULL : (obj))
+#ifndef CO_COROUTINE
+ #define CO_COROUTINE 0x80
+#endif
+#ifndef CO_ASYNC_GENERATOR
+ #define CO_ASYNC_GENERATOR 0x200
+#endif
+
#ifndef Py_TPFLAGS_CHECKTYPES
#define Py_TPFLAGS_CHECKTYPES 0
#endif
diff --git a/Cython/Utility/ObjectHandling.c b/Cython/Utility/ObjectHandling.c
index 56be4ea27..6b212ca79 100644
--- a/Cython/Utility/ObjectHandling.c
+++ b/Cython/Utility/ObjectHandling.c
@@ -287,7 +287,7 @@ static CYTHON_INLINE PyObject *__Pyx_PyObject_GetItem(PyObject *obj, PyObject *k
#if CYTHON_USE_TYPE_SLOTS
static PyObject *__Pyx_PyObject_GetIndex(PyObject *obj, PyObject *index) {
// Get element from sequence object `obj` at index `index`.
- PyObject *runerr;
+ PyObject *runerr = NULL;
Py_ssize_t key_value;
key_value = __Pyx_PyIndex_AsSsize_t(index);
if (likely(key_value != -1 || !(runerr = PyErr_Occurred()))) {
@@ -1553,18 +1553,18 @@ static int __Pyx_SetNewInClass(PyObject *ns, PyObject *name, PyObject *value) {
//@substitute: naming
#if CYTHON_USE_DICT_VERSIONS
-#define __Pyx_GetModuleGlobalName(var, name) { \
+#define __Pyx_GetModuleGlobalName(var, name) do { \
static PY_UINT64_T __pyx_dict_version = 0; \
static PyObject *__pyx_dict_cached_value = NULL; \
(var) = (likely(__pyx_dict_version == __PYX_GET_DICT_VERSION($moddict_cname))) ? \
(likely(__pyx_dict_cached_value) ? __Pyx_NewRef(__pyx_dict_cached_value) : __Pyx_GetBuiltinName(name)) : \
__Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value); \
-}
-#define __Pyx_GetModuleGlobalNameUncached(var, name) { \
+} while(0)
+#define __Pyx_GetModuleGlobalNameUncached(var, name) do { \
PY_UINT64_T __pyx_dict_version; \
PyObject *__pyx_dict_cached_value; \
(var) = __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value); \
-}
+} while(0)
static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value); /*proto*/
#else
#define __Pyx_GetModuleGlobalName(var, name) (var) = __Pyx__GetModuleGlobalName(name)
@@ -2081,11 +2081,11 @@ static CYTHON_INLINE PyObject* __Pyx_CallUnboundCMethod1(__Pyx_CachedCFunction*
if (flag == METH_O) {
return (*(cfunc->func))(self, arg);
} else if ((PY_VERSION_HEX >= 0x030600B1) && flag == METH_FASTCALL) {
- if ((PY_VERSION_HEX >= 0x030700A0)) {
+ #if PY_VERSION_HEX >= 0x030700A0
return (*(__Pyx_PyCFunctionFast)(void*)(PyCFunction)cfunc->func)(self, &arg, 1);
- } else {
+ #else
return (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)cfunc->func)(self, &arg, 1, NULL);
- }
+ #endif
} else if ((PY_VERSION_HEX >= 0x030700A0) && flag == (METH_FASTCALL | METH_KEYWORDS)) {
return (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)cfunc->func)(self, &arg, 1, NULL);
}
@@ -2900,6 +2900,15 @@ static PyObject *__Pyx_PyMethod_New(PyObject *func, PyObject *self, PyObject *ty
#define __Pyx_PyMethod_New PyMethod_New
#endif
+///////////// PyMethodNew2Arg.proto /////////////
+
+// Another wrapping of PyMethod_New that matches the Python3 signature
+#if PY_MAJOR_VERSION >= 3
+#define __Pyx_PyMethod_New2Arg PyMethod_New
+#else
+#define __Pyx_PyMethod_New2Arg(func, self) PyMethod_New(func, self, (PyObject*)Py_TYPE(self))
+#endif
+
/////////////// UnicodeConcatInPlace.proto ////////////////
# if CYTHON_COMPILING_IN_CPYTHON && PY_MAJOR_VERSION >= 3
diff --git a/Cython/Utility/Optimize.c b/Cython/Utility/Optimize.c
index 81aeb316e..7a3e3cd3d 100644
--- a/Cython/Utility/Optimize.c
+++ b/Cython/Utility/Optimize.c
@@ -445,7 +445,7 @@ static CYTHON_INLINE PyObject* __Pyx_set_iterator(PyObject* iterable, int is_set
return iterable;
}
#else
- (void)is_set;
+ CYTHON_UNUSED_VAR(is_set);
*p_source_is_set = 0;
#endif
*p_orig_length = 0;
@@ -461,8 +461,8 @@ static CYTHON_INLINE int __Pyx_set_iter_next(
if (unlikely(!*value)) {
return __Pyx_IterFinish();
}
- (void)orig_length;
- (void)ppos;
+ CYTHON_UNUSED_VAR(orig_length);
+ CYTHON_UNUSED_VAR(ppos);
return 1;
}
#if CYTHON_COMPILING_IN_CPYTHON
@@ -904,7 +904,7 @@ static CYTHON_INLINE int __Pyx__PyBytes_AsDouble_IsSpace(char ch) {
return (ch == 0x20) | !((ch < 0x9) | (ch > 0xd));
}
-static CYTHON_UNUSED double __Pyx__PyBytes_AsDouble(PyObject *obj, const char* start, Py_ssize_t length) {
+CYTHON_UNUSED static double __Pyx__PyBytes_AsDouble(PyObject *obj, const char* start, Py_ssize_t length) {
double value;
Py_ssize_t i, digits;
const char *last = start + length;
@@ -1318,7 +1318,8 @@ static {{c_ret_type}} {{cfunc_name}}(PyObject *op1, PyObject *op2, long intval,
}
{{else}}
{{if c_op == '*'}}
- (void)a; (void)b;
+ CYTHON_UNUSED_VAR(a);
+ CYTHON_UNUSED_VAR(b);
#ifdef HAVE_LONG_LONG
ll{{ival}} = {{ival}};
goto long_long;
@@ -1464,8 +1465,8 @@ def zerodiv_check(operand, _is_mod=op == 'Remainder', _needs_check=(order == 'CO
static {{c_ret_type}} {{cfunc_name}}(PyObject *op1, PyObject *op2, double floatval, int inplace, int zerodivision_check) {
const double {{'a' if order == 'CObj' else 'b'}} = floatval;
double {{fval}}{{if op not in ('Eq', 'Ne')}}, result{{endif}};
- // Prevent "unused" warnings.
- (void)inplace; (void)zerodivision_check;
+ CYTHON_UNUSED_VAR(inplace);
+ CYTHON_UNUSED_VAR(zerodivision_check);
{{if op in ('Eq', 'Ne')}}
if (op1 == op2) {
diff --git a/Cython/Utility/TypeConversion.c b/Cython/Utility/TypeConversion.c
index cf190f401..a4befa79f 100644
--- a/Cython/Utility/TypeConversion.c
+++ b/Cython/Utility/TypeConversion.c
@@ -519,7 +519,7 @@ no_error:
// GCC diagnostic pragmas were introduced in GCC 4.6
// Used to silence conversion warnings that are ok but cannot be avoided.
-#if defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6))
+#if !defined(__INTEL_COMPILER) && defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6))
#define __Pyx_HAS_GCC_DIAGNOSTIC
#endif
diff --git a/Cython/Utils.py b/Cython/Utils.py
index d6a49f973..fa4801731 100644
--- a/Cython/Utils.py
+++ b/Cython/Utils.py
@@ -579,6 +579,11 @@ class OrderedSet(object):
self._list.append(e)
self._set.add(e)
+ def __bool__(self):
+ return bool(self._set)
+
+ __nonzero__ = __bool__
+
# Class decorator that adds a metaclass and recreates the class with it.
# Copied from 'six'.
@@ -626,3 +631,23 @@ def build_hex_version(version_string):
hexversion = (hexversion << 8) + digit
return '0x%08X' % hexversion
+
+
+def write_depfile(target, source, dependencies):
+ src_base_dir = os.path.dirname(source)
+ cwd = os.getcwd()
+ if not src_base_dir.endswith(os.sep):
+ src_base_dir += os.sep
+ # paths below the base_dir are relative, otherwise absolute
+ paths = []
+ for fname in dependencies:
+ if fname.startswith(src_base_dir):
+ paths.append(os.path.relpath(fname, cwd))
+ else:
+ paths.append(os.path.abspath(fname))
+
+ depline = os.path.relpath(target, cwd) + ": \\\n "
+ depline += " \\\n ".join(paths) + "\n"
+
+ with open(target+'.dep', 'w') as outfile:
+ outfile.write(depline)
diff --git a/Makefile b/Makefile
index 280f30a3a..ddc9e3cc0 100644
--- a/Makefile
+++ b/Makefile
@@ -8,13 +8,15 @@ PARALLEL?=$(shell ${PYTHON} -c 'import sys; print("-j5" if sys.version_info >= (
MANYLINUX_CFLAGS=-O3 -g0 -mtune=generic -pipe -fPIC
MANYLINUX_LDFLAGS=
MANYLINUX_IMAGES= \
- manylinux1_x86_64 \
- manylinux1_i686 \
+ manylinux2014_x86_64 \
+ manylinux2014_i686 \
musllinux_1_1_x86_64 \
musllinux_1_1_aarch64 \
manylinux_2_24_x86_64 \
manylinux_2_24_i686 \
manylinux_2_24_aarch64 \
+ manylinux_2_28_x86_64 \
+ manylinux_2_28_aarch64 \
# manylinux_2_24_ppc64le \
# manylinux_2_24_s390x
diff --git a/README.rst b/README.rst
index 0f56f5661..27d7d8150 100644
--- a/README.rst
+++ b/README.rst
@@ -102,7 +102,9 @@ Similar projects that have a relevance today include:
* Pros: highly language compliant, reasonable performance gains,
support for static application linking (similar to
- `cython_freeze <https://github.com/cython/cython/blob/master/bin/cython_freeze>`_)
+ `cython_freeze <https://github.com/cython/cython/blob/master/bin/cython_freeze>`_
+ but with the ability to bundle library dependencies into a self-contained
+ executable)
* Cons: no support for low-level optimisations and typing
In comparison to the above, Cython provides
diff --git a/Tools/ci-run.sh b/Tools/ci-run.sh
index 818bf68f6..0fde602fd 100644
--- a/Tools/ci-run.sh
+++ b/Tools/ci-run.sh
@@ -68,12 +68,14 @@ if [[ $PYTHON_VERSION == "2.7"* ]]; then
elif [[ $PYTHON_VERSION == "3."[45]* ]]; then
python -m pip install wheel || exit 1
python -m pip install -r test-requirements-34.txt || exit 1
+elif [[ $PYTHON_VERSION == "pypy-2.7" ]]; then
+ pip install wheel || exit 1
+ pip install -r test-requirements-pypy27.txt || exit 1
else
python -m pip install -U pip "setuptools<60" wheel || exit 1
if [[ $PYTHON_VERSION != *"-dev" || $COVERAGE == "1" ]]; then
python -m pip install -r test-requirements.txt || exit 1
-
if [[ $PYTHON_VERSION != "pypy"* && $PYTHON_VERSION != "3."[1]* ]]; then
python -m pip install -r test-requirements-cpython.txt || exit 1
elif [[ $PYTHON_VERSION == "pypy-2.7" ]]; then
@@ -110,11 +112,23 @@ export PATH="/usr/lib/ccache:$PATH"
# Most modern compilers allow the last conflicting option
# to override the previous ones, so '-O0 -O3' == '-O3'
# This is true for the latest msvc, gcc and clang
-CFLAGS="-O0 -ggdb -Wall -Wextra"
+if [[ $OSTYPE == "msys" ]]; then # for MSVC cl
+ # /wd disables warnings
+ # 4711 warns that function `x` was selected for automatic inline expansion
+ # 4127 warns that a conditional expression is constant, should be fixed here https://github.com/cython/cython/pull/4317
+ # (off by default) 5045 warns that the compiler will insert Spectre mitigations for memory load if the /Qspectre switch is specified
+ # (off by default) 4820 warns about the code in Python\3.9.6\x64\include ...
+ CFLAGS="-Od /Z7 /MP /W4 /wd4711 /wd4127 /wd5045 /wd4820"
+else
+ CFLAGS="-O0 -ggdb -Wall -Wextra"
+fi
if [[ $NO_CYTHON_COMPILE != "1" && $PYTHON_VERSION != "pypy"* ]]; then
BUILD_CFLAGS="$CFLAGS -O2"
+ if [[ $CYTHON_COMPILE_ALL == "1" && $OSTYPE != "msys" ]]; then
+ BUILD_CFLAGS="$CFLAGS -O3 -g0 -mtune=generic" # make wheel sizes comparable to standard wheel build
+ fi
if [[ $PYTHON_SYS_VERSION == "2"* ]]; then
BUILD_CFLAGS="$BUILD_CFLAGS -fno-strict-aliasing"
fi
@@ -134,16 +148,19 @@ if [[ $NO_CYTHON_COMPILE != "1" && $PYTHON_VERSION != "pypy"* ]]; then
# COVERAGE can be either "" (empty or not set) or "1" (when we set it)
# STACKLESS can be either "" (empty or not set) or "true" (when we set it)
- # CYTHON_COMPILE_ALL can be either "" (empty or not set) or "1" (when we set it)
if [[ $COVERAGE != "1" && $STACKLESS != "true" && $BACKEND != *"cpp"* &&
- $CYTHON_COMPILE_ALL != "1" && $LIMITED_API == "" && $EXTRA_CFLAGS == "" ]]; then
+ $LIMITED_API == "" && $EXTRA_CFLAGS == "" ]]; then
python setup.py bdist_wheel || exit 1
+ ls -l dist/ || true
fi
+
+ echo "Extension modules created during the build:"
+ find Cython -name "*.so" -ls | sort -k11
fi
if [[ $TEST_CODE_STYLE == "1" ]]; then
make -C docs html || exit 1
-elif [[ $PYTHON_VERSION != "pypy"* ]]; then
+elif [[ $PYTHON_VERSION != "pypy"* && $OSTYPE != "msys" ]]; then
# Run the debugger tests in python-dbg if available
# (but don't fail, because they currently do fail)
PYTHON_DBG=$(python -c 'import sys; print("%d.%d" % sys.version_info[:2])')
diff --git a/Tools/dataclass_test_data/test_dataclasses.py b/Tools/dataclass_test_data/test_dataclasses.py
new file mode 100644
index 000000000..e2eab6957
--- /dev/null
+++ b/Tools/dataclass_test_data/test_dataclasses.py
@@ -0,0 +1,4266 @@
+# Deliberately use "from dataclasses import *". Every name in __all__
+# is tested, so they all must be present. This is a way to catch
+# missing ones.
+
+from dataclasses import *
+
+import abc
+import pickle
+import inspect
+import builtins
+import types
+import weakref
+import unittest
+from unittest.mock import Mock
+from typing import ClassVar, Any, List, Union, Tuple, Dict, Generic, TypeVar, Optional, Protocol
+from typing import get_type_hints
+from collections import deque, OrderedDict, namedtuple
+from functools import total_ordering
+
+import typing # Needed for the string "typing.ClassVar[int]" to work as an annotation.
+import dataclasses # Needed for the string "dataclasses.InitVar[int]" to work as an annotation.
+
+# Just any custom exception we can catch.
+class CustomError(Exception): pass
+
+class TestCase(unittest.TestCase):
+ def test_no_fields(self):
+ @dataclass
+ class C:
+ pass
+
+ o = C()
+ self.assertEqual(len(fields(C)), 0)
+
+ def test_no_fields_but_member_variable(self):
+ @dataclass
+ class C:
+ i = 0
+
+ o = C()
+ self.assertEqual(len(fields(C)), 0)
+
+ def test_one_field_no_default(self):
+ @dataclass
+ class C:
+ x: int
+
+ o = C(42)
+ self.assertEqual(o.x, 42)
+
+ def test_field_default_default_factory_error(self):
+ msg = "cannot specify both default and default_factory"
+ with self.assertRaisesRegex(ValueError, msg):
+ @dataclass
+ class C:
+ x: int = field(default=1, default_factory=int)
+
+ def test_field_repr(self):
+ int_field = field(default=1, init=True, repr=False)
+ int_field.name = "id"
+ repr_output = repr(int_field)
+ expected_output = "Field(name='id',type=None," \
+ f"default=1,default_factory={MISSING!r}," \
+ "init=True,repr=False,hash=None," \
+ "compare=True,metadata=mappingproxy({})," \
+ f"kw_only={MISSING!r}," \
+ "_field_type=None)"
+
+ self.assertEqual(repr_output, expected_output)
+
+ def test_named_init_params(self):
+ @dataclass
+ class C:
+ x: int
+
+ o = C(x=32)
+ self.assertEqual(o.x, 32)
+
+ def test_two_fields_one_default(self):
+ @dataclass
+ class C:
+ x: int
+ y: int = 0
+
+ o = C(3)
+ self.assertEqual((o.x, o.y), (3, 0))
+
+ # Non-defaults following defaults.
+ with self.assertRaisesRegex(TypeError,
+ "non-default argument 'y' follows "
+ "default argument"):
+ @dataclass
+ class C:
+ x: int = 0
+ y: int
+
+ # A derived class adds a non-default field after a default one.
+ with self.assertRaisesRegex(TypeError,
+ "non-default argument 'y' follows "
+ "default argument"):
+ @dataclass
+ class B:
+ x: int = 0
+
+ @dataclass
+ class C(B):
+ y: int
+
+ # Override a base class field and add a default to
+ # a field which didn't use to have a default.
+ with self.assertRaisesRegex(TypeError,
+ "non-default argument 'y' follows "
+ "default argument"):
+ @dataclass
+ class B:
+ x: int
+ y: int
+
+ @dataclass
+ class C(B):
+ x: int = 0
+
+ def test_overwrite_hash(self):
+ # Test that declaring this class isn't an error. It should
+ # use the user-provided __hash__.
+ @dataclass(frozen=True)
+ class C:
+ x: int
+ def __hash__(self):
+ return 301
+ self.assertEqual(hash(C(100)), 301)
+
+ # Test that declaring this class isn't an error. It should
+ # use the generated __hash__.
+ @dataclass(frozen=True)
+ class C:
+ x: int
+ def __eq__(self, other):
+ return False
+ self.assertEqual(hash(C(100)), hash((100,)))
+
+ # But this one should generate an exception, because with
+ # unsafe_hash=True, it's an error to have a __hash__ defined.
+ with self.assertRaisesRegex(TypeError,
+ 'Cannot overwrite attribute __hash__'):
+ @dataclass(unsafe_hash=True)
+ class C:
+ def __hash__(self):
+ pass
+
+ # Creating this class should not generate an exception,
+ # because even though __hash__ exists before @dataclass is
+ # called, (due to __eq__ being defined), since it's None
+ # that's okay.
+ @dataclass(unsafe_hash=True)
+ class C:
+ x: int
+ def __eq__(self):
+ pass
+ # The generated hash function works as we'd expect.
+ self.assertEqual(hash(C(10)), hash((10,)))
+
+ # Creating this class should generate an exception, because
+ # __hash__ exists and is not None, which it would be if it
+ # had been auto-generated due to __eq__ being defined.
+ with self.assertRaisesRegex(TypeError,
+ 'Cannot overwrite attribute __hash__'):
+ @dataclass(unsafe_hash=True)
+ class C:
+ x: int
+ def __eq__(self):
+ pass
+ def __hash__(self):
+ pass
+
+ def test_overwrite_fields_in_derived_class(self):
+ # Note that x from C1 replaces x in Base, but the order remains
+ # the same as defined in Base.
+ @dataclass
+ class Base:
+ x: Any = 15.0
+ y: int = 0
+
+ @dataclass
+ class C1(Base):
+ z: int = 10
+ x: int = 15
+
+ o = Base()
+ self.assertEqual(repr(o), 'TestCase.test_overwrite_fields_in_derived_class.<locals>.Base(x=15.0, y=0)')
+
+ o = C1()
+ self.assertEqual(repr(o), 'TestCase.test_overwrite_fields_in_derived_class.<locals>.C1(x=15, y=0, z=10)')
+
+ o = C1(x=5)
+ self.assertEqual(repr(o), 'TestCase.test_overwrite_fields_in_derived_class.<locals>.C1(x=5, y=0, z=10)')
+
+ def test_field_named_self(self):
+ @dataclass
+ class C:
+ self: str
+ c=C('foo')
+ self.assertEqual(c.self, 'foo')
+
+ # Make sure the first parameter is not named 'self'.
+ sig = inspect.signature(C.__init__)
+ first = next(iter(sig.parameters))
+ self.assertNotEqual('self', first)
+
+ # But we do use 'self' if no field named self.
+ @dataclass
+ class C:
+ selfx: str
+
+ # Make sure the first parameter is named 'self'.
+ sig = inspect.signature(C.__init__)
+ first = next(iter(sig.parameters))
+ self.assertEqual('self', first)
+
+ def test_field_named_object(self):
+ @dataclass
+ class C:
+ object: str
+ c = C('foo')
+ self.assertEqual(c.object, 'foo')
+
+ def test_field_named_object_frozen(self):
+ @dataclass(frozen=True)
+ class C:
+ object: str
+ c = C('foo')
+ self.assertEqual(c.object, 'foo')
+
+ def test_field_named_like_builtin(self):
+ # Attribute names can shadow built-in names
+ # since code generation is used.
+ # Ensure that this is not happening.
+ exclusions = {'None', 'True', 'False'}
+ builtins_names = sorted(
+ b for b in builtins.__dict__.keys()
+ if not b.startswith('__') and b not in exclusions
+ )
+ attributes = [(name, str) for name in builtins_names]
+ C = make_dataclass('C', attributes)
+
+ c = C(*[name for name in builtins_names])
+
+ for name in builtins_names:
+ self.assertEqual(getattr(c, name), name)
+
+ def test_field_named_like_builtin_frozen(self):
+ # Attribute names can shadow built-in names
+ # since code generation is used.
+ # Ensure that this is not happening
+ # for frozen data classes.
+ exclusions = {'None', 'True', 'False'}
+ builtins_names = sorted(
+ b for b in builtins.__dict__.keys()
+ if not b.startswith('__') and b not in exclusions
+ )
+ attributes = [(name, str) for name in builtins_names]
+ C = make_dataclass('C', attributes, frozen=True)
+
+ c = C(*[name for name in builtins_names])
+
+ for name in builtins_names:
+ self.assertEqual(getattr(c, name), name)
+
+ def test_0_field_compare(self):
+ # Ensure that order=False is the default.
+ @dataclass
+ class C0:
+ pass
+
+ @dataclass(order=False)
+ class C1:
+ pass
+
+ for cls in [C0, C1]:
+ with self.subTest(cls=cls):
+ self.assertEqual(cls(), cls())
+ for idx, fn in enumerate([lambda a, b: a < b,
+ lambda a, b: a <= b,
+ lambda a, b: a > b,
+ lambda a, b: a >= b]):
+ with self.subTest(idx=idx):
+ with self.assertRaisesRegex(TypeError,
+ f"not supported between instances of '{cls.__name__}' and '{cls.__name__}'"):
+ fn(cls(), cls())
+
+ @dataclass(order=True)
+ class C:
+ pass
+ self.assertLessEqual(C(), C())
+ self.assertGreaterEqual(C(), C())
+
+ def test_1_field_compare(self):
+ # Ensure that order=False is the default.
+ @dataclass
+ class C0:
+ x: int
+
+ @dataclass(order=False)
+ class C1:
+ x: int
+
+ for cls in [C0, C1]:
+ with self.subTest(cls=cls):
+ self.assertEqual(cls(1), cls(1))
+ self.assertNotEqual(cls(0), cls(1))
+ for idx, fn in enumerate([lambda a, b: a < b,
+ lambda a, b: a <= b,
+ lambda a, b: a > b,
+ lambda a, b: a >= b]):
+ with self.subTest(idx=idx):
+ with self.assertRaisesRegex(TypeError,
+ f"not supported between instances of '{cls.__name__}' and '{cls.__name__}'"):
+ fn(cls(0), cls(0))
+
+ @dataclass(order=True)
+ class C:
+ x: int
+ self.assertLess(C(0), C(1))
+ self.assertLessEqual(C(0), C(1))
+ self.assertLessEqual(C(1), C(1))
+ self.assertGreater(C(1), C(0))
+ self.assertGreaterEqual(C(1), C(0))
+ self.assertGreaterEqual(C(1), C(1))
+
+ def test_simple_compare(self):
+ # Ensure that order=False is the default.
+ @dataclass
+ class C0:
+ x: int
+ y: int
+
+ @dataclass(order=False)
+ class C1:
+ x: int
+ y: int
+
+ for cls in [C0, C1]:
+ with self.subTest(cls=cls):
+ self.assertEqual(cls(0, 0), cls(0, 0))
+ self.assertEqual(cls(1, 2), cls(1, 2))
+ self.assertNotEqual(cls(1, 0), cls(0, 0))
+ self.assertNotEqual(cls(1, 0), cls(1, 1))
+ for idx, fn in enumerate([lambda a, b: a < b,
+ lambda a, b: a <= b,
+ lambda a, b: a > b,
+ lambda a, b: a >= b]):
+ with self.subTest(idx=idx):
+ with self.assertRaisesRegex(TypeError,
+ f"not supported between instances of '{cls.__name__}' and '{cls.__name__}'"):
+ fn(cls(0, 0), cls(0, 0))
+
+ @dataclass(order=True)
+ class C:
+ x: int
+ y: int
+
+ for idx, fn in enumerate([lambda a, b: a == b,
+ lambda a, b: a <= b,
+ lambda a, b: a >= b]):
+ with self.subTest(idx=idx):
+ self.assertTrue(fn(C(0, 0), C(0, 0)))
+
+ for idx, fn in enumerate([lambda a, b: a < b,
+ lambda a, b: a <= b,
+ lambda a, b: a != b]):
+ with self.subTest(idx=idx):
+ self.assertTrue(fn(C(0, 0), C(0, 1)))
+ self.assertTrue(fn(C(0, 1), C(1, 0)))
+ self.assertTrue(fn(C(1, 0), C(1, 1)))
+
+ for idx, fn in enumerate([lambda a, b: a > b,
+ lambda a, b: a >= b,
+ lambda a, b: a != b]):
+ with self.subTest(idx=idx):
+ self.assertTrue(fn(C(0, 1), C(0, 0)))
+ self.assertTrue(fn(C(1, 0), C(0, 1)))
+ self.assertTrue(fn(C(1, 1), C(1, 0)))
+
+ def test_compare_subclasses(self):
+ # Comparisons fail for subclasses, even if no fields
+ # are added.
+ @dataclass
+ class B:
+ i: int
+
+ @dataclass
+ class C(B):
+ pass
+
+ for idx, (fn, expected) in enumerate([(lambda a, b: a == b, False),
+ (lambda a, b: a != b, True)]):
+ with self.subTest(idx=idx):
+ self.assertEqual(fn(B(0), C(0)), expected)
+
+ for idx, fn in enumerate([lambda a, b: a < b,
+ lambda a, b: a <= b,
+ lambda a, b: a > b,
+ lambda a, b: a >= b]):
+ with self.subTest(idx=idx):
+ with self.assertRaisesRegex(TypeError,
+ "not supported between instances of 'B' and 'C'"):
+ fn(B(0), C(0))
+
+ def test_eq_order(self):
+ # Test combining eq and order.
+ for (eq, order, result ) in [
+ (False, False, 'neither'),
+ (False, True, 'exception'),
+ (True, False, 'eq_only'),
+ (True, True, 'both'),
+ ]:
+ with self.subTest(eq=eq, order=order):
+ if result == 'exception':
+ with self.assertRaisesRegex(ValueError, 'eq must be true if order is true'):
+ @dataclass(eq=eq, order=order)
+ class C:
+ pass
+ else:
+ @dataclass(eq=eq, order=order)
+ class C:
+ pass
+
+ if result == 'neither':
+ self.assertNotIn('__eq__', C.__dict__)
+ self.assertNotIn('__lt__', C.__dict__)
+ self.assertNotIn('__le__', C.__dict__)
+ self.assertNotIn('__gt__', C.__dict__)
+ self.assertNotIn('__ge__', C.__dict__)
+ elif result == 'both':
+ self.assertIn('__eq__', C.__dict__)
+ self.assertIn('__lt__', C.__dict__)
+ self.assertIn('__le__', C.__dict__)
+ self.assertIn('__gt__', C.__dict__)
+ self.assertIn('__ge__', C.__dict__)
+ elif result == 'eq_only':
+ self.assertIn('__eq__', C.__dict__)
+ self.assertNotIn('__lt__', C.__dict__)
+ self.assertNotIn('__le__', C.__dict__)
+ self.assertNotIn('__gt__', C.__dict__)
+ self.assertNotIn('__ge__', C.__dict__)
+ else:
+ assert False, f'unknown result {result!r}'
+
+ def test_field_no_default(self):
+ @dataclass
+ class C:
+ x: int = field()
+
+ self.assertEqual(C(5).x, 5)
+
+ with self.assertRaisesRegex(TypeError,
+ r"__init__\(\) missing 1 required "
+ "positional argument: 'x'"):
+ C()
+
+ def test_field_default(self):
+ default = object()
+ @dataclass
+ class C:
+ x: object = field(default=default)
+
+ self.assertIs(C.x, default)
+ c = C(10)
+ self.assertEqual(c.x, 10)
+
+ # If we delete the instance attribute, we should then see the
+ # class attribute.
+ del c.x
+ self.assertIs(c.x, default)
+
+ self.assertIs(C().x, default)
+
+ def test_not_in_repr(self):
+ @dataclass
+ class C:
+ x: int = field(repr=False)
+ with self.assertRaises(TypeError):
+ C()
+ c = C(10)
+ self.assertEqual(repr(c), 'TestCase.test_not_in_repr.<locals>.C()')
+
+ @dataclass
+ class C:
+ x: int = field(repr=False)
+ y: int
+ c = C(10, 20)
+ self.assertEqual(repr(c), 'TestCase.test_not_in_repr.<locals>.C(y=20)')
+
+ def test_not_in_compare(self):
+ @dataclass
+ class C:
+ x: int = 0
+ y: int = field(compare=False, default=4)
+
+ self.assertEqual(C(), C(0, 20))
+ self.assertEqual(C(1, 10), C(1, 20))
+ self.assertNotEqual(C(3), C(4, 10))
+ self.assertNotEqual(C(3, 10), C(4, 10))
+
+ def test_no_unhashable_default(self):
+ # See bpo-44674.
+ class Unhashable:
+ __hash__ = None
+
+ unhashable_re = 'mutable default .* for field a is not allowed'
+ with self.assertRaisesRegex(ValueError, unhashable_re):
+ @dataclass
+ class A:
+ a: dict = {}
+
+ with self.assertRaisesRegex(ValueError, unhashable_re):
+ @dataclass
+ class A:
+ a: Any = Unhashable()
+
+ # Make sure that the machinery looking for hashability is using the
+ # class's __hash__, not the instance's __hash__.
+ with self.assertRaisesRegex(ValueError, unhashable_re):
+ unhashable = Unhashable()
+ # This shouldn't make the variable hashable.
+ unhashable.__hash__ = lambda: 0
+ @dataclass
+ class A:
+ a: Any = unhashable
+
+ def test_hash_field_rules(self):
+ # Test all 6 cases of:
+ # hash=True/False/None
+ # compare=True/False
+ for (hash_, compare, result ) in [
+ (True, False, 'field' ),
+ (True, True, 'field' ),
+ (False, False, 'absent'),
+ (False, True, 'absent'),
+ (None, False, 'absent'),
+ (None, True, 'field' ),
+ ]:
+ with self.subTest(hash=hash_, compare=compare):
+ @dataclass(unsafe_hash=True)
+ class C:
+ x: int = field(compare=compare, hash=hash_, default=5)
+
+ if result == 'field':
+ # __hash__ contains the field.
+ self.assertEqual(hash(C(5)), hash((5,)))
+ elif result == 'absent':
+ # The field is not present in the hash.
+ self.assertEqual(hash(C(5)), hash(()))
+ else:
+ assert False, f'unknown result {result!r}'
+
+ def test_init_false_no_default(self):
+ # If init=False and no default value, then the field won't be
+ # present in the instance.
+ @dataclass
+ class C:
+ x: int = field(init=False)
+
+ self.assertNotIn('x', C().__dict__)
+
+ @dataclass
+ class C:
+ x: int
+ y: int = 0
+ z: int = field(init=False)
+ t: int = 10
+
+ self.assertNotIn('z', C(0).__dict__)
+ self.assertEqual(vars(C(5)), {'t': 10, 'x': 5, 'y': 0})
+
+ def test_class_marker(self):
+ @dataclass
+ class C:
+ x: int
+ y: str = field(init=False, default=None)
+ z: str = field(repr=False)
+
+ the_fields = fields(C)
+ # the_fields is a tuple of 3 items, each value
+ # is in __annotations__.
+ self.assertIsInstance(the_fields, tuple)
+ for f in the_fields:
+ self.assertIs(type(f), Field)
+ self.assertIn(f.name, C.__annotations__)
+
+ self.assertEqual(len(the_fields), 3)
+
+ self.assertEqual(the_fields[0].name, 'x')
+ self.assertEqual(the_fields[0].type, int)
+ self.assertFalse(hasattr(C, 'x'))
+ self.assertTrue (the_fields[0].init)
+ self.assertTrue (the_fields[0].repr)
+ self.assertEqual(the_fields[1].name, 'y')
+ self.assertEqual(the_fields[1].type, str)
+ self.assertIsNone(getattr(C, 'y'))
+ self.assertFalse(the_fields[1].init)
+ self.assertTrue (the_fields[1].repr)
+ self.assertEqual(the_fields[2].name, 'z')
+ self.assertEqual(the_fields[2].type, str)
+ self.assertFalse(hasattr(C, 'z'))
+ self.assertTrue (the_fields[2].init)
+ self.assertFalse(the_fields[2].repr)
+
+ def test_field_order(self):
+ @dataclass
+ class B:
+ a: str = 'B:a'
+ b: str = 'B:b'
+ c: str = 'B:c'
+
+ @dataclass
+ class C(B):
+ b: str = 'C:b'
+
+ self.assertEqual([(f.name, f.default) for f in fields(C)],
+ [('a', 'B:a'),
+ ('b', 'C:b'),
+ ('c', 'B:c')])
+
+ @dataclass
+ class D(B):
+ c: str = 'D:c'
+
+ self.assertEqual([(f.name, f.default) for f in fields(D)],
+ [('a', 'B:a'),
+ ('b', 'B:b'),
+ ('c', 'D:c')])
+
+ @dataclass
+ class E(D):
+ a: str = 'E:a'
+ d: str = 'E:d'
+
+ self.assertEqual([(f.name, f.default) for f in fields(E)],
+ [('a', 'E:a'),
+ ('b', 'B:b'),
+ ('c', 'D:c'),
+ ('d', 'E:d')])
+
+ def test_class_attrs(self):
+ # We only have a class attribute if a default value is
+ # specified, either directly or via a field with a default.
+ default = object()
+ @dataclass
+ class C:
+ x: int
+ y: int = field(repr=False)
+ z: object = default
+ t: int = field(default=100)
+
+ self.assertFalse(hasattr(C, 'x'))
+ self.assertFalse(hasattr(C, 'y'))
+ self.assertIs (C.z, default)
+ self.assertEqual(C.t, 100)
+
+ def test_disallowed_mutable_defaults(self):
+ # For the known types, don't allow mutable default values.
+ for typ, empty, non_empty in [(list, [], [1]),
+ (dict, {}, {0:1}),
+ (set, set(), set([1])),
+ ]:
+ with self.subTest(typ=typ):
+ # Can't use a zero-length value.
+ with self.assertRaisesRegex(ValueError,
+ f'mutable default {typ} for field '
+ 'x is not allowed'):
+ @dataclass
+ class Point:
+ x: typ = empty
+
+
+ # Nor a non-zero-length value
+ with self.assertRaisesRegex(ValueError,
+ f'mutable default {typ} for field '
+ 'y is not allowed'):
+ @dataclass
+ class Point:
+ y: typ = non_empty
+
+ # Check subtypes also fail.
+ class Subclass(typ): pass
+
+ with self.assertRaisesRegex(ValueError,
+ f"mutable default .*Subclass'>"
+ ' for field z is not allowed'
+ ):
+ @dataclass
+ class Point:
+ z: typ = Subclass()
+
+ # Because this is a ClassVar, it can be mutable.
+ @dataclass
+ class C:
+ z: ClassVar[typ] = typ()
+
+ # Because this is a ClassVar, it can be mutable.
+ @dataclass
+ class C:
+ x: ClassVar[typ] = Subclass()
+
+ def test_deliberately_mutable_defaults(self):
+ # If a mutable default isn't in the known list of
+ # (list, dict, set), then it's okay.
+ class Mutable:
+ def __init__(self):
+ self.l = []
+
+ @dataclass
+ class C:
+ x: Mutable
+
+ # These 2 instances will share this value of x.
+ lst = Mutable()
+ o1 = C(lst)
+ o2 = C(lst)
+ self.assertEqual(o1, o2)
+ o1.x.l.extend([1, 2])
+ self.assertEqual(o1, o2)
+ self.assertEqual(o1.x.l, [1, 2])
+ self.assertIs(o1.x, o2.x)
+
+ def test_no_options(self):
+ # Call with dataclass().
+ @dataclass()
+ class C:
+ x: int
+
+ self.assertEqual(C(42).x, 42)
+
+ def test_not_tuple(self):
+ # Make sure we can't be compared to a tuple.
+ @dataclass
+ class Point:
+ x: int
+ y: int
+ self.assertNotEqual(Point(1, 2), (1, 2))
+
+ # And that we can't compare to another unrelated dataclass.
+ @dataclass
+ class C:
+ x: int
+ y: int
+ self.assertNotEqual(Point(1, 3), C(1, 3))
+
+ def test_not_other_dataclass(self):
+ # Test that some of the problems with namedtuple don't happen
+ # here.
+ @dataclass
+ class Point3D:
+ x: int
+ y: int
+ z: int
+
+ @dataclass
+ class Date:
+ year: int
+ month: int
+ day: int
+
+ self.assertNotEqual(Point3D(2017, 6, 3), Date(2017, 6, 3))
+ self.assertNotEqual(Point3D(1, 2, 3), (1, 2, 3))
+
+ # Make sure we can't unpack.
+ with self.assertRaisesRegex(TypeError, 'unpack'):
+ x, y, z = Point3D(4, 5, 6)
+
+ # Make sure another class with the same field names isn't
+ # equal.
+ @dataclass
+ class Point3Dv1:
+ x: int = 0
+ y: int = 0
+ z: int = 0
+ self.assertNotEqual(Point3D(0, 0, 0), Point3Dv1())
+
+ def test_function_annotations(self):
+ # Some dummy class and instance to use as a default.
+ class F:
+ pass
+ f = F()
+
+ def validate_class(cls):
+ # First, check __annotations__, even though they're not
+ # function annotations.
+ self.assertEqual(cls.__annotations__['i'], int)
+ self.assertEqual(cls.__annotations__['j'], str)
+ self.assertEqual(cls.__annotations__['k'], F)
+ self.assertEqual(cls.__annotations__['l'], float)
+ self.assertEqual(cls.__annotations__['z'], complex)
+
+ # Verify __init__.
+
+ signature = inspect.signature(cls.__init__)
+ # Check the return type, should be None.
+ self.assertIs(signature.return_annotation, None)
+
+ # Check each parameter.
+ params = iter(signature.parameters.values())
+ param = next(params)
+ # This is testing an internal name, and probably shouldn't be tested.
+ self.assertEqual(param.name, 'self')
+ param = next(params)
+ self.assertEqual(param.name, 'i')
+ self.assertIs (param.annotation, int)
+ self.assertEqual(param.default, inspect.Parameter.empty)
+ self.assertEqual(param.kind, inspect.Parameter.POSITIONAL_OR_KEYWORD)
+ param = next(params)
+ self.assertEqual(param.name, 'j')
+ self.assertIs (param.annotation, str)
+ self.assertEqual(param.default, inspect.Parameter.empty)
+ self.assertEqual(param.kind, inspect.Parameter.POSITIONAL_OR_KEYWORD)
+ param = next(params)
+ self.assertEqual(param.name, 'k')
+ self.assertIs (param.annotation, F)
+ # Don't test for the default, since it's set to MISSING.
+ self.assertEqual(param.kind, inspect.Parameter.POSITIONAL_OR_KEYWORD)
+ param = next(params)
+ self.assertEqual(param.name, 'l')
+ self.assertIs (param.annotation, float)
+ # Don't test for the default, since it's set to MISSING.
+ self.assertEqual(param.kind, inspect.Parameter.POSITIONAL_OR_KEYWORD)
+ self.assertRaises(StopIteration, next, params)
+
+
+ @dataclass
+ class C:
+ i: int
+ j: str
+ k: F = f
+ l: float=field(default=None)
+ z: complex=field(default=3+4j, init=False)
+
+ validate_class(C)
+
+ # Now repeat with __hash__.
+ @dataclass(frozen=True, unsafe_hash=True)
+ class C:
+ i: int
+ j: str
+ k: F = f
+ l: float=field(default=None)
+ z: complex=field(default=3+4j, init=False)
+
+ validate_class(C)
+
+ def test_missing_default(self):
+ # Test that MISSING works the same as a default not being
+ # specified.
+ @dataclass
+ class C:
+ x: int=field(default=MISSING)
+ with self.assertRaisesRegex(TypeError,
+ r'__init__\(\) missing 1 required '
+ 'positional argument'):
+ C()
+ self.assertNotIn('x', C.__dict__)
+
+ @dataclass
+ class D:
+ x: int
+ with self.assertRaisesRegex(TypeError,
+ r'__init__\(\) missing 1 required '
+ 'positional argument'):
+ D()
+ self.assertNotIn('x', D.__dict__)
+
+ def test_missing_default_factory(self):
+ # Test that MISSING works the same as a default factory not
+ # being specified (which is really the same as a default not
+ # being specified, too).
+ @dataclass
+ class C:
+ x: int=field(default_factory=MISSING)
+ with self.assertRaisesRegex(TypeError,
+ r'__init__\(\) missing 1 required '
+ 'positional argument'):
+ C()
+ self.assertNotIn('x', C.__dict__)
+
+ @dataclass
+ class D:
+ x: int=field(default=MISSING, default_factory=MISSING)
+ with self.assertRaisesRegex(TypeError,
+ r'__init__\(\) missing 1 required '
+ 'positional argument'):
+ D()
+ self.assertNotIn('x', D.__dict__)
+
+ def test_missing_repr(self):
+ self.assertIn('MISSING_TYPE object', repr(MISSING))
+
+ def test_dont_include_other_annotations(self):
+ @dataclass
+ class C:
+ i: int
+ def foo(self) -> int:
+ return 4
+ @property
+ def bar(self) -> int:
+ return 5
+ self.assertEqual(list(C.__annotations__), ['i'])
+ self.assertEqual(C(10).foo(), 4)
+ self.assertEqual(C(10).bar, 5)
+ self.assertEqual(C(10).i, 10)
+
+ def test_post_init(self):
+ # Just make sure it gets called
+ @dataclass
+ class C:
+ def __post_init__(self):
+ raise CustomError()
+ with self.assertRaises(CustomError):
+ C()
+
+ @dataclass
+ class C:
+ i: int = 10
+ def __post_init__(self):
+ if self.i == 10:
+ raise CustomError()
+ with self.assertRaises(CustomError):
+ C()
+ # post-init gets called, but doesn't raise. This is just
+ # checking that self is used correctly.
+ C(5)
+
+ # If there's not an __init__, then post-init won't get called.
+ @dataclass(init=False)
+ class C:
+ def __post_init__(self):
+ raise CustomError()
+ # Creating the class won't raise
+ C()
+
+ @dataclass
+ class C:
+ x: int = 0
+ def __post_init__(self):
+ self.x *= 2
+ self.assertEqual(C().x, 0)
+ self.assertEqual(C(2).x, 4)
+
+ # Make sure that if we're frozen, post-init can't set
+ # attributes.
+ @dataclass(frozen=True)
+ class C:
+ x: int = 0
+ def __post_init__(self):
+ self.x *= 2
+ with self.assertRaises(FrozenInstanceError):
+ C()
+
+ def test_post_init_super(self):
+ # Make sure super() post-init isn't called by default.
+ class B:
+ def __post_init__(self):
+ raise CustomError()
+
+ @dataclass
+ class C(B):
+ def __post_init__(self):
+ self.x = 5
+
+ self.assertEqual(C().x, 5)
+
+ # Now call super(), and it will raise.
+ @dataclass
+ class C(B):
+ def __post_init__(self):
+ super().__post_init__()
+
+ with self.assertRaises(CustomError):
+ C()
+
+ # Make sure post-init is called, even if not defined in our
+ # class.
+ @dataclass
+ class C(B):
+ pass
+
+ with self.assertRaises(CustomError):
+ C()
+
+ def test_post_init_staticmethod(self):
+ flag = False
+ @dataclass
+ class C:
+ x: int
+ y: int
+ @staticmethod
+ def __post_init__():
+ nonlocal flag
+ flag = True
+
+ self.assertFalse(flag)
+ c = C(3, 4)
+ self.assertEqual((c.x, c.y), (3, 4))
+ self.assertTrue(flag)
+
+ def test_post_init_classmethod(self):
+ @dataclass
+ class C:
+ flag = False
+ x: int
+ y: int
+ @classmethod
+ def __post_init__(cls):
+ cls.flag = True
+
+ self.assertFalse(C.flag)
+ c = C(3, 4)
+ self.assertEqual((c.x, c.y), (3, 4))
+ self.assertTrue(C.flag)
+
+ def test_post_init_not_auto_added(self):
+ # See bpo-46757, which had proposed always adding __post_init__. As
+ # Raymond Hettinger pointed out, that would be a breaking change. So,
+ # add a test to make sure that the current behavior doesn't change.
+
+ @dataclass
+ class A0:
+ pass
+
+ @dataclass
+ class B0:
+ b_called: bool = False
+ def __post_init__(self):
+ self.b_called = True
+
+ @dataclass
+ class C0(A0, B0):
+ c_called: bool = False
+ def __post_init__(self):
+ super().__post_init__()
+ self.c_called = True
+
+ # Since A0 has no __post_init__, and one wasn't automatically added
+ # (because that's the rule: it's never added by @dataclass, it's only
+ # the class author that can add it), then B0.__post_init__ is called.
+ # Verify that.
+ c = C0()
+ self.assertTrue(c.b_called)
+ self.assertTrue(c.c_called)
+
+ ######################################
+ # Now, the same thing, except A1 defines __post_init__.
+ @dataclass
+ class A1:
+ def __post_init__(self):
+ pass
+
+ @dataclass
+ class B1:
+ b_called: bool = False
+ def __post_init__(self):
+ self.b_called = True
+
+ @dataclass
+ class C1(A1, B1):
+ c_called: bool = False
+ def __post_init__(self):
+ super().__post_init__()
+ self.c_called = True
+
+ # This time, B1.__post_init__ isn't being called. This mimics what
+ # would happen if A1.__post_init__ had been automatically added,
+ # instead of manually added as we see here. This test isn't really
+ # needed, but I'm including it just to demonstrate the changed
+ # behavior when A1 does define __post_init__.
+ c = C1()
+ self.assertFalse(c.b_called)
+ self.assertTrue(c.c_called)
+
+ def test_class_var(self):
+ # Make sure ClassVars are ignored in __init__, __repr__, etc.
+ @dataclass
+ class C:
+ x: int
+ y: int = 10
+ z: ClassVar[int] = 1000
+ w: ClassVar[int] = 2000
+ t: ClassVar[int] = 3000
+ s: ClassVar = 4000
+
+ c = C(5)
+ self.assertEqual(repr(c), 'TestCase.test_class_var.<locals>.C(x=5, y=10)')
+ self.assertEqual(len(fields(C)), 2) # We have 2 fields.
+ self.assertEqual(len(C.__annotations__), 6) # And 4 ClassVars.
+ self.assertEqual(c.z, 1000)
+ self.assertEqual(c.w, 2000)
+ self.assertEqual(c.t, 3000)
+ self.assertEqual(c.s, 4000)
+ C.z += 1
+ self.assertEqual(c.z, 1001)
+ c = C(20)
+ self.assertEqual((c.x, c.y), (20, 10))
+ self.assertEqual(c.z, 1001)
+ self.assertEqual(c.w, 2000)
+ self.assertEqual(c.t, 3000)
+ self.assertEqual(c.s, 4000)
+
+ def test_class_var_no_default(self):
+ # If a ClassVar has no default value, it should not be set on the class.
+ @dataclass
+ class C:
+ x: ClassVar[int]
+
+ self.assertNotIn('x', C.__dict__)
+
+ def test_class_var_default_factory(self):
+ # It makes no sense for a ClassVar to have a default factory. When
+ # would it be called? Call it yourself, since it's class-wide.
+ with self.assertRaisesRegex(TypeError,
+ 'cannot have a default factory'):
+ @dataclass
+ class C:
+ x: ClassVar[int] = field(default_factory=int)
+
+ self.assertNotIn('x', C.__dict__)
+
+ def test_class_var_with_default(self):
+ # If a ClassVar has a default value, it should be set on the class.
+ @dataclass
+ class C:
+ x: ClassVar[int] = 10
+ self.assertEqual(C.x, 10)
+
+ @dataclass
+ class C:
+ x: ClassVar[int] = field(default=10)
+ self.assertEqual(C.x, 10)
+
+ def test_class_var_frozen(self):
+ # Make sure ClassVars work even if we're frozen.
+ @dataclass(frozen=True)
+ class C:
+ x: int
+ y: int = 10
+ z: ClassVar[int] = 1000
+ w: ClassVar[int] = 2000
+ t: ClassVar[int] = 3000
+
+ c = C(5)
+ self.assertEqual(repr(C(5)), 'TestCase.test_class_var_frozen.<locals>.C(x=5, y=10)')
+ self.assertEqual(len(fields(C)), 2) # We have 2 fields
+ self.assertEqual(len(C.__annotations__), 5) # And 3 ClassVars
+ self.assertEqual(c.z, 1000)
+ self.assertEqual(c.w, 2000)
+ self.assertEqual(c.t, 3000)
+ # We can still modify the ClassVar, it's only instances that are
+ # frozen.
+ C.z += 1
+ self.assertEqual(c.z, 1001)
+ c = C(20)
+ self.assertEqual((c.x, c.y), (20, 10))
+ self.assertEqual(c.z, 1001)
+ self.assertEqual(c.w, 2000)
+ self.assertEqual(c.t, 3000)
+
+ def test_init_var_no_default(self):
+ # If an InitVar has no default value, it should not be set on the class.
+ @dataclass
+ class C:
+ x: InitVar[int]
+
+ self.assertNotIn('x', C.__dict__)
+
+ def test_init_var_default_factory(self):
+ # It makes no sense for an InitVar to have a default factory. When
+ # would it be called? Call it yourself, since it's class-wide.
+ with self.assertRaisesRegex(TypeError,
+ 'cannot have a default factory'):
+ @dataclass
+ class C:
+ x: InitVar[int] = field(default_factory=int)
+
+ self.assertNotIn('x', C.__dict__)
+
+ def test_init_var_with_default(self):
+ # If an InitVar has a default value, it should be set on the class.
+ @dataclass
+ class C:
+ x: InitVar[int] = 10
+ self.assertEqual(C.x, 10)
+
+ @dataclass
+ class C:
+ x: InitVar[int] = field(default=10)
+ self.assertEqual(C.x, 10)
+
+ def test_init_var(self):
+ @dataclass
+ class C:
+ x: int = None
+ init_param: InitVar[int] = None
+
+ def __post_init__(self, init_param):
+ if self.x is None:
+ self.x = init_param*2
+
+ c = C(init_param=10)
+ self.assertEqual(c.x, 20)
+
+ def test_init_var_preserve_type(self):
+ self.assertEqual(InitVar[int].type, int)
+
+ # Make sure the repr is correct.
+ self.assertEqual(repr(InitVar[int]), 'dataclasses.InitVar[int]')
+ self.assertEqual(repr(InitVar[List[int]]),
+ 'dataclasses.InitVar[typing.List[int]]')
+ self.assertEqual(repr(InitVar[list[int]]),
+ 'dataclasses.InitVar[list[int]]')
+ self.assertEqual(repr(InitVar[int|str]),
+ 'dataclasses.InitVar[int | str]')
+
+ def test_init_var_inheritance(self):
+ # Note that this deliberately tests that a dataclass need not
+ # have a __post_init__ function if it has an InitVar field.
+ # It could just be used in a derived class, as shown here.
+ @dataclass
+ class Base:
+ x: int
+ init_base: InitVar[int]
+
+ # We can instantiate by passing the InitVar, even though
+ # it's not used.
+ b = Base(0, 10)
+ self.assertEqual(vars(b), {'x': 0})
+
+ @dataclass
+ class C(Base):
+ y: int
+ init_derived: InitVar[int]
+
+ def __post_init__(self, init_base, init_derived):
+ self.x = self.x + init_base
+ self.y = self.y + init_derived
+
+ c = C(10, 11, 50, 51)
+ self.assertEqual(vars(c), {'x': 21, 'y': 101})
+
+ def test_default_factory(self):
+ # Test a factory that returns a new list.
+ @dataclass
+ class C:
+ x: int
+ y: list = field(default_factory=list)
+
+ c0 = C(3)
+ c1 = C(3)
+ self.assertEqual(c0.x, 3)
+ self.assertEqual(c0.y, [])
+ self.assertEqual(c0, c1)
+ self.assertIsNot(c0.y, c1.y)
+ self.assertEqual(astuple(C(5, [1])), (5, [1]))
+
+ # Test a factory that returns a shared list.
+ l = []
+ @dataclass
+ class C:
+ x: int
+ y: list = field(default_factory=lambda: l)
+
+ c0 = C(3)
+ c1 = C(3)
+ self.assertEqual(c0.x, 3)
+ self.assertEqual(c0.y, [])
+ self.assertEqual(c0, c1)
+ self.assertIs(c0.y, c1.y)
+ self.assertEqual(astuple(C(5, [1])), (5, [1]))
+
+ # Test various other field flags.
+ # repr
+ @dataclass
+ class C:
+ x: list = field(default_factory=list, repr=False)
+ self.assertEqual(repr(C()), 'TestCase.test_default_factory.<locals>.C()')
+ self.assertEqual(C().x, [])
+
+ # hash
+ @dataclass(unsafe_hash=True)
+ class C:
+ x: list = field(default_factory=list, hash=False)
+ self.assertEqual(astuple(C()), ([],))
+ self.assertEqual(hash(C()), hash(()))
+
+ # init (see also test_default_factory_with_no_init)
+ @dataclass
+ class C:
+ x: list = field(default_factory=list, init=False)
+ self.assertEqual(astuple(C()), ([],))
+
+ # compare
+ @dataclass
+ class C:
+ x: list = field(default_factory=list, compare=False)
+ self.assertEqual(C(), C([1]))
+
+ def test_default_factory_with_no_init(self):
+ # We need a factory with a side effect.
+ factory = Mock()
+
+ @dataclass
+ class C:
+ x: list = field(default_factory=factory, init=False)
+
+ # Make sure the default factory is called for each new instance.
+ C().x
+ self.assertEqual(factory.call_count, 1)
+ C().x
+ self.assertEqual(factory.call_count, 2)
+
+ def test_default_factory_not_called_if_value_given(self):
+ # We need a factory that we can test if it's been called.
+ factory = Mock()
+
+ @dataclass
+ class C:
+ x: int = field(default_factory=factory)
+
+ # Make sure that if a field has a default factory function,
+ # it's not called if a value is specified.
+ C().x
+ self.assertEqual(factory.call_count, 1)
+ self.assertEqual(C(10).x, 10)
+ self.assertEqual(factory.call_count, 1)
+ C().x
+ self.assertEqual(factory.call_count, 2)
+
+ def test_default_factory_derived(self):
+ # See bpo-32896.
+ @dataclass
+ class Foo:
+ x: dict = field(default_factory=dict)
+
+ @dataclass
+ class Bar(Foo):
+ y: int = 1
+
+ self.assertEqual(Foo().x, {})
+ self.assertEqual(Bar().x, {})
+ self.assertEqual(Bar().y, 1)
+
+ @dataclass
+ class Baz(Foo):
+ pass
+ self.assertEqual(Baz().x, {})
+
+ def test_intermediate_non_dataclass(self):
+ # Test that an intermediate class that defines
+ # annotations does not define fields.
+
+ @dataclass
+ class A:
+ x: int
+
+ class B(A):
+ y: int
+
+ @dataclass
+ class C(B):
+ z: int
+
+ c = C(1, 3)
+ self.assertEqual((c.x, c.z), (1, 3))
+
+ # .y was not initialized.
+ with self.assertRaisesRegex(AttributeError,
+ 'object has no attribute'):
+ c.y
+
+ # And if we again derive a non-dataclass, no fields are added.
+ class D(C):
+ t: int
+ d = D(4, 5)
+ self.assertEqual((d.x, d.z), (4, 5))
+
+ def test_classvar_default_factory(self):
+ # It's an error for a ClassVar to have a factory function.
+ with self.assertRaisesRegex(TypeError,
+ 'cannot have a default factory'):
+ @dataclass
+ class C:
+ x: ClassVar[int] = field(default_factory=int)
+
+ def test_is_dataclass(self):
+ class NotDataClass:
+ pass
+
+ self.assertFalse(is_dataclass(0))
+ self.assertFalse(is_dataclass(int))
+ self.assertFalse(is_dataclass(NotDataClass))
+ self.assertFalse(is_dataclass(NotDataClass()))
+
+ @dataclass
+ class C:
+ x: int
+
+ @dataclass
+ class D:
+ d: C
+ e: int
+
+ c = C(10)
+ d = D(c, 4)
+
+ self.assertTrue(is_dataclass(C))
+ self.assertTrue(is_dataclass(c))
+ self.assertFalse(is_dataclass(c.x))
+ self.assertTrue(is_dataclass(d.d))
+ self.assertFalse(is_dataclass(d.e))
+
+ def test_is_dataclass_when_getattr_always_returns(self):
+ # See bpo-37868.
+ class A:
+ def __getattr__(self, key):
+ return 0
+ self.assertFalse(is_dataclass(A))
+ a = A()
+
+ # Also test for an instance attribute.
+ class B:
+ pass
+ b = B()
+ b.__dataclass_fields__ = []
+
+ for obj in a, b:
+ with self.subTest(obj=obj):
+ self.assertFalse(is_dataclass(obj))
+
+ # Indirect tests for _is_dataclass_instance().
+ with self.assertRaisesRegex(TypeError, 'should be called on dataclass instances'):
+ asdict(obj)
+ with self.assertRaisesRegex(TypeError, 'should be called on dataclass instances'):
+ astuple(obj)
+ with self.assertRaisesRegex(TypeError, 'should be called on dataclass instances'):
+ replace(obj, x=0)
+
+ def test_is_dataclass_genericalias(self):
+ @dataclass
+ class A(types.GenericAlias):
+ origin: type
+ args: type
+ self.assertTrue(is_dataclass(A))
+ a = A(list, int)
+ self.assertTrue(is_dataclass(type(a)))
+ self.assertTrue(is_dataclass(a))
+
+
+ def test_helper_fields_with_class_instance(self):
+ # Check that we can call fields() on either a class or instance,
+ # and get back the same thing.
+ @dataclass
+ class C:
+ x: int
+ y: float
+
+ self.assertEqual(fields(C), fields(C(0, 0.0)))
+
+ def test_helper_fields_exception(self):
+ # Check that TypeError is raised if not passed a dataclass or
+ # instance.
+ with self.assertRaisesRegex(TypeError, 'dataclass type or instance'):
+ fields(0)
+
+ class C: pass
+ with self.assertRaisesRegex(TypeError, 'dataclass type or instance'):
+ fields(C)
+ with self.assertRaisesRegex(TypeError, 'dataclass type or instance'):
+ fields(C())
+
+ def test_helper_asdict(self):
+ # Basic tests for asdict(), it should return a new dictionary.
+ @dataclass
+ class C:
+ x: int
+ y: int
+ c = C(1, 2)
+
+ self.assertEqual(asdict(c), {'x': 1, 'y': 2})
+ self.assertEqual(asdict(c), asdict(c))
+ self.assertIsNot(asdict(c), asdict(c))
+ c.x = 42
+ self.assertEqual(asdict(c), {'x': 42, 'y': 2})
+ self.assertIs(type(asdict(c)), dict)
+
+ def test_helper_asdict_raises_on_classes(self):
+ # asdict() should raise on a class object.
+ @dataclass
+ class C:
+ x: int
+ y: int
+ with self.assertRaisesRegex(TypeError, 'dataclass instance'):
+ asdict(C)
+ with self.assertRaisesRegex(TypeError, 'dataclass instance'):
+ asdict(int)
+
+ def test_helper_asdict_copy_values(self):
+ @dataclass
+ class C:
+ x: int
+ y: List[int] = field(default_factory=list)
+ initial = []
+ c = C(1, initial)
+ d = asdict(c)
+ self.assertEqual(d['y'], initial)
+ self.assertIsNot(d['y'], initial)
+ c = C(1)
+ d = asdict(c)
+ d['y'].append(1)
+ self.assertEqual(c.y, [])
+
+ def test_helper_asdict_nested(self):
+ @dataclass
+ class UserId:
+ token: int
+ group: int
+ @dataclass
+ class User:
+ name: str
+ id: UserId
+ u = User('Joe', UserId(123, 1))
+ d = asdict(u)
+ self.assertEqual(d, {'name': 'Joe', 'id': {'token': 123, 'group': 1}})
+ self.assertIsNot(asdict(u), asdict(u))
+ u.id.group = 2
+ self.assertEqual(asdict(u), {'name': 'Joe',
+ 'id': {'token': 123, 'group': 2}})
+
+ def test_helper_asdict_builtin_containers(self):
+ @dataclass
+ class User:
+ name: str
+ id: int
+ @dataclass
+ class GroupList:
+ id: int
+ users: List[User]
+ @dataclass
+ class GroupTuple:
+ id: int
+ users: Tuple[User, ...]
+ @dataclass
+ class GroupDict:
+ id: int
+ users: Dict[str, User]
+ a = User('Alice', 1)
+ b = User('Bob', 2)
+ gl = GroupList(0, [a, b])
+ gt = GroupTuple(0, (a, b))
+ gd = GroupDict(0, {'first': a, 'second': b})
+ self.assertEqual(asdict(gl), {'id': 0, 'users': [{'name': 'Alice', 'id': 1},
+ {'name': 'Bob', 'id': 2}]})
+ self.assertEqual(asdict(gt), {'id': 0, 'users': ({'name': 'Alice', 'id': 1},
+ {'name': 'Bob', 'id': 2})})
+ self.assertEqual(asdict(gd), {'id': 0, 'users': {'first': {'name': 'Alice', 'id': 1},
+ 'second': {'name': 'Bob', 'id': 2}}})
+
+ def test_helper_asdict_builtin_object_containers(self):
+ @dataclass
+ class Child:
+ d: object
+
+ @dataclass
+ class Parent:
+ child: Child
+
+ self.assertEqual(asdict(Parent(Child([1]))), {'child': {'d': [1]}})
+ self.assertEqual(asdict(Parent(Child({1: 2}))), {'child': {'d': {1: 2}}})
+
+ def test_helper_asdict_factory(self):
+ @dataclass
+ class C:
+ x: int
+ y: int
+ c = C(1, 2)
+ d = asdict(c, dict_factory=OrderedDict)
+ self.assertEqual(d, OrderedDict([('x', 1), ('y', 2)]))
+ self.assertIsNot(d, asdict(c, dict_factory=OrderedDict))
+ c.x = 42
+ d = asdict(c, dict_factory=OrderedDict)
+ self.assertEqual(d, OrderedDict([('x', 42), ('y', 2)]))
+ self.assertIs(type(d), OrderedDict)
+
+ def test_helper_asdict_namedtuple(self):
+ T = namedtuple('T', 'a b c')
+ @dataclass
+ class C:
+ x: str
+ y: T
+ c = C('outer', T(1, C('inner', T(11, 12, 13)), 2))
+
+ d = asdict(c)
+ self.assertEqual(d, {'x': 'outer',
+ 'y': T(1,
+ {'x': 'inner',
+ 'y': T(11, 12, 13)},
+ 2),
+ }
+ )
+
+ # Now with a dict_factory. OrderedDict is convenient, but
+ # since it compares to dicts, we also need to have separate
+ # assertIs tests.
+ d = asdict(c, dict_factory=OrderedDict)
+ self.assertEqual(d, {'x': 'outer',
+ 'y': T(1,
+ {'x': 'inner',
+ 'y': T(11, 12, 13)},
+ 2),
+ }
+ )
+
+ # Make sure that the returned dicts are actually OrderedDicts.
+ self.assertIs(type(d), OrderedDict)
+ self.assertIs(type(d['y'][1]), OrderedDict)
+
+ def test_helper_asdict_namedtuple_key(self):
+ # Ensure that a field that contains a dict which has a
+ # namedtuple as a key works with asdict().
+
+ @dataclass
+ class C:
+ f: dict
+ T = namedtuple('T', 'a')
+
+ c = C({T('an a'): 0})
+
+ self.assertEqual(asdict(c), {'f': {T(a='an a'): 0}})
+
+ def test_helper_asdict_namedtuple_derived(self):
+ class T(namedtuple('Tbase', 'a')):
+ def my_a(self):
+ return self.a
+
+ @dataclass
+ class C:
+ f: T
+
+ t = T(6)
+ c = C(t)
+
+ d = asdict(c)
+ self.assertEqual(d, {'f': T(a=6)})
+ # Make sure that t has been copied, not used directly.
+ self.assertIsNot(d['f'], t)
+ self.assertEqual(d['f'].my_a(), 6)
+
+ def test_helper_astuple(self):
+ # Basic tests for astuple(), it should return a new tuple.
+ @dataclass
+ class C:
+ x: int
+ y: int = 0
+ c = C(1)
+
+ self.assertEqual(astuple(c), (1, 0))
+ self.assertEqual(astuple(c), astuple(c))
+ self.assertIsNot(astuple(c), astuple(c))
+ c.y = 42
+ self.assertEqual(astuple(c), (1, 42))
+ self.assertIs(type(astuple(c)), tuple)
+
+ def test_helper_astuple_raises_on_classes(self):
+ # astuple() should raise on a class object.
+ @dataclass
+ class C:
+ x: int
+ y: int
+ with self.assertRaisesRegex(TypeError, 'dataclass instance'):
+ astuple(C)
+ with self.assertRaisesRegex(TypeError, 'dataclass instance'):
+ astuple(int)
+
+ def test_helper_astuple_copy_values(self):
+ @dataclass
+ class C:
+ x: int
+ y: List[int] = field(default_factory=list)
+ initial = []
+ c = C(1, initial)
+ t = astuple(c)
+ self.assertEqual(t[1], initial)
+ self.assertIsNot(t[1], initial)
+ c = C(1)
+ t = astuple(c)
+ t[1].append(1)
+ self.assertEqual(c.y, [])
+
+ def test_helper_astuple_nested(self):
+ @dataclass
+ class UserId:
+ token: int
+ group: int
+ @dataclass
+ class User:
+ name: str
+ id: UserId
+ u = User('Joe', UserId(123, 1))
+ t = astuple(u)
+ self.assertEqual(t, ('Joe', (123, 1)))
+ self.assertIsNot(astuple(u), astuple(u))
+ u.id.group = 2
+ self.assertEqual(astuple(u), ('Joe', (123, 2)))
+
+ def test_helper_astuple_builtin_containers(self):
+ @dataclass
+ class User:
+ name: str
+ id: int
+ @dataclass
+ class GroupList:
+ id: int
+ users: List[User]
+ @dataclass
+ class GroupTuple:
+ id: int
+ users: Tuple[User, ...]
+ @dataclass
+ class GroupDict:
+ id: int
+ users: Dict[str, User]
+ a = User('Alice', 1)
+ b = User('Bob', 2)
+ gl = GroupList(0, [a, b])
+ gt = GroupTuple(0, (a, b))
+ gd = GroupDict(0, {'first': a, 'second': b})
+ self.assertEqual(astuple(gl), (0, [('Alice', 1), ('Bob', 2)]))
+ self.assertEqual(astuple(gt), (0, (('Alice', 1), ('Bob', 2))))
+ self.assertEqual(astuple(gd), (0, {'first': ('Alice', 1), 'second': ('Bob', 2)}))
+
+ def test_helper_astuple_builtin_object_containers(self):
+ @dataclass
+ class Child:
+ d: object
+
+ @dataclass
+ class Parent:
+ child: Child
+
+ self.assertEqual(astuple(Parent(Child([1]))), (([1],),))
+ self.assertEqual(astuple(Parent(Child({1: 2}))), (({1: 2},),))
+
+ def test_helper_astuple_factory(self):
+ @dataclass
+ class C:
+ x: int
+ y: int
+ NT = namedtuple('NT', 'x y')
+ def nt(lst):
+ return NT(*lst)
+ c = C(1, 2)
+ t = astuple(c, tuple_factory=nt)
+ self.assertEqual(t, NT(1, 2))
+ self.assertIsNot(t, astuple(c, tuple_factory=nt))
+ c.x = 42
+ t = astuple(c, tuple_factory=nt)
+ self.assertEqual(t, NT(42, 2))
+ self.assertIs(type(t), NT)
+
+ def test_helper_astuple_namedtuple(self):
+ T = namedtuple('T', 'a b c')
+ @dataclass
+ class C:
+ x: str
+ y: T
+ c = C('outer', T(1, C('inner', T(11, 12, 13)), 2))
+
+ t = astuple(c)
+ self.assertEqual(t, ('outer', T(1, ('inner', (11, 12, 13)), 2)))
+
+ # Now, using a tuple_factory. list is convenient here.
+ t = astuple(c, tuple_factory=list)
+ self.assertEqual(t, ['outer', T(1, ['inner', T(11, 12, 13)], 2)])
+
+ def test_dynamic_class_creation(self):
+ cls_dict = {'__annotations__': {'x': int, 'y': int},
+ }
+
+ # Create the class.
+ cls = type('C', (), cls_dict)
+
+ # Make it a dataclass.
+ cls1 = dataclass(cls)
+
+ self.assertEqual(cls1, cls)
+ self.assertEqual(asdict(cls(1, 2)), {'x': 1, 'y': 2})
+
+ def test_dynamic_class_creation_using_field(self):
+ cls_dict = {'__annotations__': {'x': int, 'y': int},
+ 'y': field(default=5),
+ }
+
+ # Create the class.
+ cls = type('C', (), cls_dict)
+
+ # Make it a dataclass.
+ cls1 = dataclass(cls)
+
+ self.assertEqual(cls1, cls)
+ self.assertEqual(asdict(cls1(1)), {'x': 1, 'y': 5})
+
+ def test_init_in_order(self):
+ @dataclass
+ class C:
+ a: int
+ b: int = field()
+ c: list = field(default_factory=list, init=False)
+ d: list = field(default_factory=list)
+ e: int = field(default=4, init=False)
+ f: int = 4
+
+ calls = []
+ def setattr(self, name, value):
+ calls.append((name, value))
+
+ C.__setattr__ = setattr
+ c = C(0, 1)
+ self.assertEqual(('a', 0), calls[0])
+ self.assertEqual(('b', 1), calls[1])
+ self.assertEqual(('c', []), calls[2])
+ self.assertEqual(('d', []), calls[3])
+ self.assertNotIn(('e', 4), calls)
+ self.assertEqual(('f', 4), calls[4])
+
+ def test_items_in_dicts(self):
+ @dataclass
+ class C:
+ a: int
+ b: list = field(default_factory=list, init=False)
+ c: list = field(default_factory=list)
+ d: int = field(default=4, init=False)
+ e: int = 0
+
+ c = C(0)
+ # Class dict
+ self.assertNotIn('a', C.__dict__)
+ self.assertNotIn('b', C.__dict__)
+ self.assertNotIn('c', C.__dict__)
+ self.assertIn('d', C.__dict__)
+ self.assertEqual(C.d, 4)
+ self.assertIn('e', C.__dict__)
+ self.assertEqual(C.e, 0)
+ # Instance dict
+ self.assertIn('a', c.__dict__)
+ self.assertEqual(c.a, 0)
+ self.assertIn('b', c.__dict__)
+ self.assertEqual(c.b, [])
+ self.assertIn('c', c.__dict__)
+ self.assertEqual(c.c, [])
+ self.assertNotIn('d', c.__dict__)
+ self.assertIn('e', c.__dict__)
+ self.assertEqual(c.e, 0)
+
+ def test_alternate_classmethod_constructor(self):
+ # Since __post_init__ can't take params, use a classmethod
+ # alternate constructor. This is mostly an example to show
+ # how to use this technique.
+ @dataclass
+ class C:
+ x: int
+ @classmethod
+ def from_file(cls, filename):
+ # In a real example, create a new instance
+ # and populate 'x' from contents of a file.
+ value_in_file = 20
+ return cls(value_in_file)
+
+ self.assertEqual(C.from_file('filename').x, 20)
+
+ def test_field_metadata_default(self):
+ # Make sure the default metadata is read-only and of
+ # zero length.
+ @dataclass
+ class C:
+ i: int
+
+ self.assertFalse(fields(C)[0].metadata)
+ self.assertEqual(len(fields(C)[0].metadata), 0)
+ with self.assertRaisesRegex(TypeError,
+ 'does not support item assignment'):
+ fields(C)[0].metadata['test'] = 3
+
+ def test_field_metadata_mapping(self):
+ # Make sure only a mapping can be passed as metadata
+ # zero length.
+ with self.assertRaises(TypeError):
+ @dataclass
+ class C:
+ i: int = field(metadata=0)
+
+ # Make sure an empty dict works.
+ d = {}
+ @dataclass
+ class C:
+ i: int = field(metadata=d)
+ self.assertFalse(fields(C)[0].metadata)
+ self.assertEqual(len(fields(C)[0].metadata), 0)
+ # Update should work (see bpo-35960).
+ d['foo'] = 1
+ self.assertEqual(len(fields(C)[0].metadata), 1)
+ self.assertEqual(fields(C)[0].metadata['foo'], 1)
+ with self.assertRaisesRegex(TypeError,
+ 'does not support item assignment'):
+ fields(C)[0].metadata['test'] = 3
+
+ # Make sure a non-empty dict works.
+ d = {'test': 10, 'bar': '42', 3: 'three'}
+ @dataclass
+ class C:
+ i: int = field(metadata=d)
+ self.assertEqual(len(fields(C)[0].metadata), 3)
+ self.assertEqual(fields(C)[0].metadata['test'], 10)
+ self.assertEqual(fields(C)[0].metadata['bar'], '42')
+ self.assertEqual(fields(C)[0].metadata[3], 'three')
+ # Update should work.
+ d['foo'] = 1
+ self.assertEqual(len(fields(C)[0].metadata), 4)
+ self.assertEqual(fields(C)[0].metadata['foo'], 1)
+ with self.assertRaises(KeyError):
+ # Non-existent key.
+ fields(C)[0].metadata['baz']
+ with self.assertRaisesRegex(TypeError,
+ 'does not support item assignment'):
+ fields(C)[0].metadata['test'] = 3
+
+ def test_field_metadata_custom_mapping(self):
+ # Try a custom mapping.
+ class SimpleNameSpace:
+ def __init__(self, **kw):
+ self.__dict__.update(kw)
+
+ def __getitem__(self, item):
+ if item == 'xyzzy':
+ return 'plugh'
+ return getattr(self, item)
+
+ def __len__(self):
+ return self.__dict__.__len__()
+
+ @dataclass
+ class C:
+ i: int = field(metadata=SimpleNameSpace(a=10))
+
+ self.assertEqual(len(fields(C)[0].metadata), 1)
+ self.assertEqual(fields(C)[0].metadata['a'], 10)
+ with self.assertRaises(AttributeError):
+ fields(C)[0].metadata['b']
+ # Make sure we're still talking to our custom mapping.
+ self.assertEqual(fields(C)[0].metadata['xyzzy'], 'plugh')
+
+ def test_generic_dataclasses(self):
+ T = TypeVar('T')
+
+ @dataclass
+ class LabeledBox(Generic[T]):
+ content: T
+ label: str = '<unknown>'
+
+ box = LabeledBox(42)
+ self.assertEqual(box.content, 42)
+ self.assertEqual(box.label, '<unknown>')
+
+ # Subscripting the resulting class should work, etc.
+ Alias = List[LabeledBox[int]]
+
+ def test_generic_extending(self):
+ S = TypeVar('S')
+ T = TypeVar('T')
+
+ @dataclass
+ class Base(Generic[T, S]):
+ x: T
+ y: S
+
+ @dataclass
+ class DataDerived(Base[int, T]):
+ new_field: str
+ Alias = DataDerived[str]
+ c = Alias(0, 'test1', 'test2')
+ self.assertEqual(astuple(c), (0, 'test1', 'test2'))
+
+ class NonDataDerived(Base[int, T]):
+ def new_method(self):
+ return self.y
+ Alias = NonDataDerived[float]
+ c = Alias(10, 1.0)
+ self.assertEqual(c.new_method(), 1.0)
+
+ def test_generic_dynamic(self):
+ T = TypeVar('T')
+
+ @dataclass
+ class Parent(Generic[T]):
+ x: T
+ Child = make_dataclass('Child', [('y', T), ('z', Optional[T], None)],
+ bases=(Parent[int], Generic[T]), namespace={'other': 42})
+ self.assertIs(Child[int](1, 2).z, None)
+ self.assertEqual(Child[int](1, 2, 3).z, 3)
+ self.assertEqual(Child[int](1, 2, 3).other, 42)
+ # Check that type aliases work correctly.
+ Alias = Child[T]
+ self.assertEqual(Alias[int](1, 2).x, 1)
+ # Check MRO resolution.
+ self.assertEqual(Child.__mro__, (Child, Parent, Generic, object))
+
+ def test_dataclasses_pickleable(self):
+ global P, Q, R
+ @dataclass
+ class P:
+ x: int
+ y: int = 0
+ @dataclass
+ class Q:
+ x: int
+ y: int = field(default=0, init=False)
+ @dataclass
+ class R:
+ x: int
+ y: List[int] = field(default_factory=list)
+ q = Q(1)
+ q.y = 2
+ samples = [P(1), P(1, 2), Q(1), q, R(1), R(1, [2, 3, 4])]
+ for sample in samples:
+ for proto in range(pickle.HIGHEST_PROTOCOL + 1):
+ with self.subTest(sample=sample, proto=proto):
+ new_sample = pickle.loads(pickle.dumps(sample, proto))
+ self.assertEqual(sample.x, new_sample.x)
+ self.assertEqual(sample.y, new_sample.y)
+ self.assertIsNot(sample, new_sample)
+ new_sample.x = 42
+ another_new_sample = pickle.loads(pickle.dumps(new_sample, proto))
+ self.assertEqual(new_sample.x, another_new_sample.x)
+ self.assertEqual(sample.y, another_new_sample.y)
+
+ def test_dataclasses_qualnames(self):
+ @dataclass(order=True, unsafe_hash=True, frozen=True)
+ class A:
+ x: int
+ y: int
+
+ self.assertEqual(A.__init__.__name__, "__init__")
+ for function in (
+ '__eq__',
+ '__lt__',
+ '__le__',
+ '__gt__',
+ '__ge__',
+ '__hash__',
+ '__init__',
+ '__repr__',
+ '__setattr__',
+ '__delattr__',
+ ):
+ self.assertEqual(getattr(A, function).__qualname__, f"TestCase.test_dataclasses_qualnames.<locals>.A.{function}")
+
+ with self.assertRaisesRegex(TypeError, r"A\.__init__\(\) missing"):
+ A()
+
+
+class TestFieldNoAnnotation(unittest.TestCase):
+ def test_field_without_annotation(self):
+ with self.assertRaisesRegex(TypeError,
+ "'f' is a field but has no type annotation"):
+ @dataclass
+ class C:
+ f = field()
+
+ def test_field_without_annotation_but_annotation_in_base(self):
+ @dataclass
+ class B:
+ f: int
+
+ with self.assertRaisesRegex(TypeError,
+ "'f' is a field but has no type annotation"):
+ # This is still an error: make sure we don't pick up the
+ # type annotation in the base class.
+ @dataclass
+ class C(B):
+ f = field()
+
+ def test_field_without_annotation_but_annotation_in_base_not_dataclass(self):
+ # Same test, but with the base class not a dataclass.
+ class B:
+ f: int
+
+ with self.assertRaisesRegex(TypeError,
+ "'f' is a field but has no type annotation"):
+ # This is still an error: make sure we don't pick up the
+ # type annotation in the base class.
+ @dataclass
+ class C(B):
+ f = field()
+
+
+class TestDocString(unittest.TestCase):
+ def assertDocStrEqual(self, a, b):
+ # Because 3.6 and 3.7 differ in how inspect.signature work
+ # (see bpo #32108), for the time being just compare them with
+ # whitespace stripped.
+ self.assertEqual(a.replace(' ', ''), b.replace(' ', ''))
+
+ def test_existing_docstring_not_overridden(self):
+ @dataclass
+ class C:
+ """Lorem ipsum"""
+ x: int
+
+ self.assertEqual(C.__doc__, "Lorem ipsum")
+
+ def test_docstring_no_fields(self):
+ @dataclass
+ class C:
+ pass
+
+ self.assertDocStrEqual(C.__doc__, "C()")
+
+ def test_docstring_one_field(self):
+ @dataclass
+ class C:
+ x: int
+
+ self.assertDocStrEqual(C.__doc__, "C(x:int)")
+
+ def test_docstring_two_fields(self):
+ @dataclass
+ class C:
+ x: int
+ y: int
+
+ self.assertDocStrEqual(C.__doc__, "C(x:int, y:int)")
+
+ def test_docstring_three_fields(self):
+ @dataclass
+ class C:
+ x: int
+ y: int
+ z: str
+
+ self.assertDocStrEqual(C.__doc__, "C(x:int, y:int, z:str)")
+
+ def test_docstring_one_field_with_default(self):
+ @dataclass
+ class C:
+ x: int = 3
+
+ self.assertDocStrEqual(C.__doc__, "C(x:int=3)")
+
+ def test_docstring_one_field_with_default_none(self):
+ @dataclass
+ class C:
+ x: Union[int, type(None)] = None
+
+ self.assertDocStrEqual(C.__doc__, "C(x:Optional[int]=None)")
+
+ def test_docstring_list_field(self):
+ @dataclass
+ class C:
+ x: List[int]
+
+ self.assertDocStrEqual(C.__doc__, "C(x:List[int])")
+
+ def test_docstring_list_field_with_default_factory(self):
+ @dataclass
+ class C:
+ x: List[int] = field(default_factory=list)
+
+ self.assertDocStrEqual(C.__doc__, "C(x:List[int]=<factory>)")
+
+ def test_docstring_deque_field(self):
+ @dataclass
+ class C:
+ x: deque
+
+ self.assertDocStrEqual(C.__doc__, "C(x:collections.deque)")
+
+ def test_docstring_deque_field_with_default_factory(self):
+ @dataclass
+ class C:
+ x: deque = field(default_factory=deque)
+
+ self.assertDocStrEqual(C.__doc__, "C(x:collections.deque=<factory>)")
+
+
+class TestInit(unittest.TestCase):
+ def test_base_has_init(self):
+ class B:
+ def __init__(self):
+ self.z = 100
+ pass
+
+ # Make sure that declaring this class doesn't raise an error.
+ # The issue is that we can't override __init__ in our class,
+ # but it should be okay to add __init__ to us if our base has
+ # an __init__.
+ @dataclass
+ class C(B):
+ x: int = 0
+ c = C(10)
+ self.assertEqual(c.x, 10)
+ self.assertNotIn('z', vars(c))
+
+ # Make sure that if we don't add an init, the base __init__
+ # gets called.
+ @dataclass(init=False)
+ class C(B):
+ x: int = 10
+ c = C()
+ self.assertEqual(c.x, 10)
+ self.assertEqual(c.z, 100)
+
+ def test_no_init(self):
+ @dataclass(init=False)
+ class C:
+ i: int = 0
+ self.assertEqual(C().i, 0)
+
+ @dataclass(init=False)
+ class C:
+ i: int = 2
+ def __init__(self):
+ self.i = 3
+ self.assertEqual(C().i, 3)
+
+ def test_overwriting_init(self):
+ # If the class has __init__, use it no matter the value of
+ # init=.
+
+ @dataclass
+ class C:
+ x: int
+ def __init__(self, x):
+ self.x = 2 * x
+ self.assertEqual(C(3).x, 6)
+
+ @dataclass(init=True)
+ class C:
+ x: int
+ def __init__(self, x):
+ self.x = 2 * x
+ self.assertEqual(C(4).x, 8)
+
+ @dataclass(init=False)
+ class C:
+ x: int
+ def __init__(self, x):
+ self.x = 2 * x
+ self.assertEqual(C(5).x, 10)
+
+ def test_inherit_from_protocol(self):
+ # Dataclasses inheriting from protocol should preserve their own `__init__`.
+ # See bpo-45081.
+
+ class P(Protocol):
+ a: int
+
+ @dataclass
+ class C(P):
+ a: int
+
+ self.assertEqual(C(5).a, 5)
+
+ @dataclass
+ class D(P):
+ def __init__(self, a):
+ self.a = a * 2
+
+ self.assertEqual(D(5).a, 10)
+
+
+class TestRepr(unittest.TestCase):
+ def test_repr(self):
+ @dataclass
+ class B:
+ x: int
+
+ @dataclass
+ class C(B):
+ y: int = 10
+
+ o = C(4)
+ self.assertEqual(repr(o), 'TestRepr.test_repr.<locals>.C(x=4, y=10)')
+
+ @dataclass
+ class D(C):
+ x: int = 20
+ self.assertEqual(repr(D()), 'TestRepr.test_repr.<locals>.D(x=20, y=10)')
+
+ @dataclass
+ class C:
+ @dataclass
+ class D:
+ i: int
+ @dataclass
+ class E:
+ pass
+ self.assertEqual(repr(C.D(0)), 'TestRepr.test_repr.<locals>.C.D(i=0)')
+ self.assertEqual(repr(C.E()), 'TestRepr.test_repr.<locals>.C.E()')
+
+ def test_no_repr(self):
+ # Test a class with no __repr__ and repr=False.
+ @dataclass(repr=False)
+ class C:
+ x: int
+ self.assertIn(f'{__name__}.TestRepr.test_no_repr.<locals>.C object at',
+ repr(C(3)))
+
+ # Test a class with a __repr__ and repr=False.
+ @dataclass(repr=False)
+ class C:
+ x: int
+ def __repr__(self):
+ return 'C-class'
+ self.assertEqual(repr(C(3)), 'C-class')
+
+ def test_overwriting_repr(self):
+ # If the class has __repr__, use it no matter the value of
+ # repr=.
+
+ @dataclass
+ class C:
+ x: int
+ def __repr__(self):
+ return 'x'
+ self.assertEqual(repr(C(0)), 'x')
+
+ @dataclass(repr=True)
+ class C:
+ x: int
+ def __repr__(self):
+ return 'x'
+ self.assertEqual(repr(C(0)), 'x')
+
+ @dataclass(repr=False)
+ class C:
+ x: int
+ def __repr__(self):
+ return 'x'
+ self.assertEqual(repr(C(0)), 'x')
+
+
+class TestEq(unittest.TestCase):
+ def test_no_eq(self):
+ # Test a class with no __eq__ and eq=False.
+ @dataclass(eq=False)
+ class C:
+ x: int
+ self.assertNotEqual(C(0), C(0))
+ c = C(3)
+ self.assertEqual(c, c)
+
+ # Test a class with an __eq__ and eq=False.
+ @dataclass(eq=False)
+ class C:
+ x: int
+ def __eq__(self, other):
+ return other == 10
+ self.assertEqual(C(3), 10)
+
+ def test_overwriting_eq(self):
+ # If the class has __eq__, use it no matter the value of
+ # eq=.
+
+ @dataclass
+ class C:
+ x: int
+ def __eq__(self, other):
+ return other == 3
+ self.assertEqual(C(1), 3)
+ self.assertNotEqual(C(1), 1)
+
+ @dataclass(eq=True)
+ class C:
+ x: int
+ def __eq__(self, other):
+ return other == 4
+ self.assertEqual(C(1), 4)
+ self.assertNotEqual(C(1), 1)
+
+ @dataclass(eq=False)
+ class C:
+ x: int
+ def __eq__(self, other):
+ return other == 5
+ self.assertEqual(C(1), 5)
+ self.assertNotEqual(C(1), 1)
+
+
+class TestOrdering(unittest.TestCase):
+ def test_functools_total_ordering(self):
+ # Test that functools.total_ordering works with this class.
+ @total_ordering
+ @dataclass
+ class C:
+ x: int
+ def __lt__(self, other):
+ # Perform the test "backward", just to make
+ # sure this is being called.
+ return self.x >= other
+
+ self.assertLess(C(0), -1)
+ self.assertLessEqual(C(0), -1)
+ self.assertGreater(C(0), 1)
+ self.assertGreaterEqual(C(0), 1)
+
+ def test_no_order(self):
+ # Test that no ordering functions are added by default.
+ @dataclass(order=False)
+ class C:
+ x: int
+ # Make sure no order methods are added.
+ self.assertNotIn('__le__', C.__dict__)
+ self.assertNotIn('__lt__', C.__dict__)
+ self.assertNotIn('__ge__', C.__dict__)
+ self.assertNotIn('__gt__', C.__dict__)
+
+ # Test that __lt__ is still called
+ @dataclass(order=False)
+ class C:
+ x: int
+ def __lt__(self, other):
+ return False
+ # Make sure other methods aren't added.
+ self.assertNotIn('__le__', C.__dict__)
+ self.assertNotIn('__ge__', C.__dict__)
+ self.assertNotIn('__gt__', C.__dict__)
+
+ def test_overwriting_order(self):
+ with self.assertRaisesRegex(TypeError,
+ 'Cannot overwrite attribute __lt__'
+ '.*using functools.total_ordering'):
+ @dataclass(order=True)
+ class C:
+ x: int
+ def __lt__(self):
+ pass
+
+ with self.assertRaisesRegex(TypeError,
+ 'Cannot overwrite attribute __le__'
+ '.*using functools.total_ordering'):
+ @dataclass(order=True)
+ class C:
+ x: int
+ def __le__(self):
+ pass
+
+ with self.assertRaisesRegex(TypeError,
+ 'Cannot overwrite attribute __gt__'
+ '.*using functools.total_ordering'):
+ @dataclass(order=True)
+ class C:
+ x: int
+ def __gt__(self):
+ pass
+
+ with self.assertRaisesRegex(TypeError,
+ 'Cannot overwrite attribute __ge__'
+ '.*using functools.total_ordering'):
+ @dataclass(order=True)
+ class C:
+ x: int
+ def __ge__(self):
+ pass
+
+class TestHash(unittest.TestCase):
+ def test_unsafe_hash(self):
+ @dataclass(unsafe_hash=True)
+ class C:
+ x: int
+ y: str
+ self.assertEqual(hash(C(1, 'foo')), hash((1, 'foo')))
+
+ def test_hash_rules(self):
+ def non_bool(value):
+ # Map to something else that's True, but not a bool.
+ if value is None:
+ return None
+ if value:
+ return (3,)
+ return 0
+
+ def test(case, unsafe_hash, eq, frozen, with_hash, result):
+ with self.subTest(case=case, unsafe_hash=unsafe_hash, eq=eq,
+ frozen=frozen):
+ if result != 'exception':
+ if with_hash:
+ @dataclass(unsafe_hash=unsafe_hash, eq=eq, frozen=frozen)
+ class C:
+ def __hash__(self):
+ return 0
+ else:
+ @dataclass(unsafe_hash=unsafe_hash, eq=eq, frozen=frozen)
+ class C:
+ pass
+
+ # See if the result matches what's expected.
+ if result == 'fn':
+ # __hash__ contains the function we generated.
+ self.assertIn('__hash__', C.__dict__)
+ self.assertIsNotNone(C.__dict__['__hash__'])
+
+ elif result == '':
+ # __hash__ is not present in our class.
+ if not with_hash:
+ self.assertNotIn('__hash__', C.__dict__)
+
+ elif result == 'none':
+ # __hash__ is set to None.
+ self.assertIn('__hash__', C.__dict__)
+ self.assertIsNone(C.__dict__['__hash__'])
+
+ elif result == 'exception':
+ # Creating the class should cause an exception.
+ # This only happens with with_hash==True.
+ assert(with_hash)
+ with self.assertRaisesRegex(TypeError, 'Cannot overwrite attribute __hash__'):
+ @dataclass(unsafe_hash=unsafe_hash, eq=eq, frozen=frozen)
+ class C:
+ def __hash__(self):
+ return 0
+
+ else:
+ assert False, f'unknown result {result!r}'
+
+ # There are 8 cases of:
+ # unsafe_hash=True/False
+ # eq=True/False
+ # frozen=True/False
+ # And for each of these, a different result if
+ # __hash__ is defined or not.
+ for case, (unsafe_hash, eq, frozen, res_no_defined_hash, res_defined_hash) in enumerate([
+ (False, False, False, '', ''),
+ (False, False, True, '', ''),
+ (False, True, False, 'none', ''),
+ (False, True, True, 'fn', ''),
+ (True, False, False, 'fn', 'exception'),
+ (True, False, True, 'fn', 'exception'),
+ (True, True, False, 'fn', 'exception'),
+ (True, True, True, 'fn', 'exception'),
+ ], 1):
+ test(case, unsafe_hash, eq, frozen, False, res_no_defined_hash)
+ test(case, unsafe_hash, eq, frozen, True, res_defined_hash)
+
+ # Test non-bool truth values, too. This is just to
+ # make sure the data-driven table in the decorator
+ # handles non-bool values.
+ test(case, non_bool(unsafe_hash), non_bool(eq), non_bool(frozen), False, res_no_defined_hash)
+ test(case, non_bool(unsafe_hash), non_bool(eq), non_bool(frozen), True, res_defined_hash)
+
+
+ def test_eq_only(self):
+ # If a class defines __eq__, __hash__ is automatically added
+ # and set to None. This is normal Python behavior, not
+ # related to dataclasses. Make sure we don't interfere with
+ # that (see bpo=32546).
+
+ @dataclass
+ class C:
+ i: int
+ def __eq__(self, other):
+ return self.i == other.i
+ self.assertEqual(C(1), C(1))
+ self.assertNotEqual(C(1), C(4))
+
+ # And make sure things work in this case if we specify
+ # unsafe_hash=True.
+ @dataclass(unsafe_hash=True)
+ class C:
+ i: int
+ def __eq__(self, other):
+ return self.i == other.i
+ self.assertEqual(C(1), C(1.0))
+ self.assertEqual(hash(C(1)), hash(C(1.0)))
+
+ # And check that the classes __eq__ is being used, despite
+ # specifying eq=True.
+ @dataclass(unsafe_hash=True, eq=True)
+ class C:
+ i: int
+ def __eq__(self, other):
+ return self.i == 3 and self.i == other.i
+ self.assertEqual(C(3), C(3))
+ self.assertNotEqual(C(1), C(1))
+ self.assertEqual(hash(C(1)), hash(C(1.0)))
+
+ def test_0_field_hash(self):
+ @dataclass(frozen=True)
+ class C:
+ pass
+ self.assertEqual(hash(C()), hash(()))
+
+ @dataclass(unsafe_hash=True)
+ class C:
+ pass
+ self.assertEqual(hash(C()), hash(()))
+
+ def test_1_field_hash(self):
+ @dataclass(frozen=True)
+ class C:
+ x: int
+ self.assertEqual(hash(C(4)), hash((4,)))
+ self.assertEqual(hash(C(42)), hash((42,)))
+
+ @dataclass(unsafe_hash=True)
+ class C:
+ x: int
+ self.assertEqual(hash(C(4)), hash((4,)))
+ self.assertEqual(hash(C(42)), hash((42,)))
+
+ def test_hash_no_args(self):
+ # Test dataclasses with no hash= argument. This exists to
+ # make sure that if the @dataclass parameter name is changed
+ # or the non-default hashing behavior changes, the default
+ # hashability keeps working the same way.
+
+ class Base:
+ def __hash__(self):
+ return 301
+
+ # If frozen or eq is None, then use the default value (do not
+ # specify any value in the decorator).
+ for frozen, eq, base, expected in [
+ (None, None, object, 'unhashable'),
+ (None, None, Base, 'unhashable'),
+ (None, False, object, 'object'),
+ (None, False, Base, 'base'),
+ (None, True, object, 'unhashable'),
+ (None, True, Base, 'unhashable'),
+ (False, None, object, 'unhashable'),
+ (False, None, Base, 'unhashable'),
+ (False, False, object, 'object'),
+ (False, False, Base, 'base'),
+ (False, True, object, 'unhashable'),
+ (False, True, Base, 'unhashable'),
+ (True, None, object, 'tuple'),
+ (True, None, Base, 'tuple'),
+ (True, False, object, 'object'),
+ (True, False, Base, 'base'),
+ (True, True, object, 'tuple'),
+ (True, True, Base, 'tuple'),
+ ]:
+
+ with self.subTest(frozen=frozen, eq=eq, base=base, expected=expected):
+ # First, create the class.
+ if frozen is None and eq is None:
+ @dataclass
+ class C(base):
+ i: int
+ elif frozen is None:
+ @dataclass(eq=eq)
+ class C(base):
+ i: int
+ elif eq is None:
+ @dataclass(frozen=frozen)
+ class C(base):
+ i: int
+ else:
+ @dataclass(frozen=frozen, eq=eq)
+ class C(base):
+ i: int
+
+ # Now, make sure it hashes as expected.
+ if expected == 'unhashable':
+ c = C(10)
+ with self.assertRaisesRegex(TypeError, 'unhashable type'):
+ hash(c)
+
+ elif expected == 'base':
+ self.assertEqual(hash(C(10)), 301)
+
+ elif expected == 'object':
+ # I'm not sure what test to use here. object's
+ # hash isn't based on id(), so calling hash()
+ # won't tell us much. So, just check the
+ # function used is object's.
+ self.assertIs(C.__hash__, object.__hash__)
+
+ elif expected == 'tuple':
+ self.assertEqual(hash(C(42)), hash((42,)))
+
+ else:
+ assert False, f'unknown value for expected={expected!r}'
+
+
+class TestFrozen(unittest.TestCase):
+ def test_frozen(self):
+ @dataclass(frozen=True)
+ class C:
+ i: int
+
+ c = C(10)
+ self.assertEqual(c.i, 10)
+ with self.assertRaises(FrozenInstanceError):
+ c.i = 5
+ self.assertEqual(c.i, 10)
+
+ def test_inherit(self):
+ @dataclass(frozen=True)
+ class C:
+ i: int
+
+ @dataclass(frozen=True)
+ class D(C):
+ j: int
+
+ d = D(0, 10)
+ with self.assertRaises(FrozenInstanceError):
+ d.i = 5
+ with self.assertRaises(FrozenInstanceError):
+ d.j = 6
+ self.assertEqual(d.i, 0)
+ self.assertEqual(d.j, 10)
+
+ def test_inherit_nonfrozen_from_empty_frozen(self):
+ @dataclass(frozen=True)
+ class C:
+ pass
+
+ with self.assertRaisesRegex(TypeError,
+ 'cannot inherit non-frozen dataclass from a frozen one'):
+ @dataclass
+ class D(C):
+ j: int
+
+ def test_inherit_nonfrozen_from_empty(self):
+ @dataclass
+ class C:
+ pass
+
+ @dataclass
+ class D(C):
+ j: int
+
+ d = D(3)
+ self.assertEqual(d.j, 3)
+ self.assertIsInstance(d, C)
+
+ # Test both ways: with an intermediate normal (non-dataclass)
+ # class and without an intermediate class.
+ def test_inherit_nonfrozen_from_frozen(self):
+ for intermediate_class in [True, False]:
+ with self.subTest(intermediate_class=intermediate_class):
+ @dataclass(frozen=True)
+ class C:
+ i: int
+
+ if intermediate_class:
+ class I(C): pass
+ else:
+ I = C
+
+ with self.assertRaisesRegex(TypeError,
+ 'cannot inherit non-frozen dataclass from a frozen one'):
+ @dataclass
+ class D(I):
+ pass
+
+ def test_inherit_frozen_from_nonfrozen(self):
+ for intermediate_class in [True, False]:
+ with self.subTest(intermediate_class=intermediate_class):
+ @dataclass
+ class C:
+ i: int
+
+ if intermediate_class:
+ class I(C): pass
+ else:
+ I = C
+
+ with self.assertRaisesRegex(TypeError,
+ 'cannot inherit frozen dataclass from a non-frozen one'):
+ @dataclass(frozen=True)
+ class D(I):
+ pass
+
+ def test_inherit_from_normal_class(self):
+ for intermediate_class in [True, False]:
+ with self.subTest(intermediate_class=intermediate_class):
+ class C:
+ pass
+
+ if intermediate_class:
+ class I(C): pass
+ else:
+ I = C
+
+ @dataclass(frozen=True)
+ class D(I):
+ i: int
+
+ d = D(10)
+ with self.assertRaises(FrozenInstanceError):
+ d.i = 5
+
+ def test_non_frozen_normal_derived(self):
+ # See bpo-32953.
+
+ @dataclass(frozen=True)
+ class D:
+ x: int
+ y: int = 10
+
+ class S(D):
+ pass
+
+ s = S(3)
+ self.assertEqual(s.x, 3)
+ self.assertEqual(s.y, 10)
+ s.cached = True
+
+ # But can't change the frozen attributes.
+ with self.assertRaises(FrozenInstanceError):
+ s.x = 5
+ with self.assertRaises(FrozenInstanceError):
+ s.y = 5
+ self.assertEqual(s.x, 3)
+ self.assertEqual(s.y, 10)
+ self.assertEqual(s.cached, True)
+
+ def test_overwriting_frozen(self):
+ # frozen uses __setattr__ and __delattr__.
+ with self.assertRaisesRegex(TypeError,
+ 'Cannot overwrite attribute __setattr__'):
+ @dataclass(frozen=True)
+ class C:
+ x: int
+ def __setattr__(self):
+ pass
+
+ with self.assertRaisesRegex(TypeError,
+ 'Cannot overwrite attribute __delattr__'):
+ @dataclass(frozen=True)
+ class C:
+ x: int
+ def __delattr__(self):
+ pass
+
+ @dataclass(frozen=False)
+ class C:
+ x: int
+ def __setattr__(self, name, value):
+ self.__dict__['x'] = value * 2
+ self.assertEqual(C(10).x, 20)
+
+ def test_frozen_hash(self):
+ @dataclass(frozen=True)
+ class C:
+ x: Any
+
+ # If x is immutable, we can compute the hash. No exception is
+ # raised.
+ hash(C(3))
+
+ # If x is mutable, computing the hash is an error.
+ with self.assertRaisesRegex(TypeError, 'unhashable type'):
+ hash(C({}))
+
+
+class TestSlots(unittest.TestCase):
+ def test_simple(self):
+ @dataclass
+ class C:
+ __slots__ = ('x',)
+ x: Any
+
+ # There was a bug where a variable in a slot was assumed to
+ # also have a default value (of type
+ # types.MemberDescriptorType).
+ with self.assertRaisesRegex(TypeError,
+ r"__init__\(\) missing 1 required positional argument: 'x'"):
+ C()
+
+ # We can create an instance, and assign to x.
+ c = C(10)
+ self.assertEqual(c.x, 10)
+ c.x = 5
+ self.assertEqual(c.x, 5)
+
+ # We can't assign to anything else.
+ with self.assertRaisesRegex(AttributeError, "'C' object has no attribute 'y'"):
+ c.y = 5
+
+ def test_derived_added_field(self):
+ # See bpo-33100.
+ @dataclass
+ class Base:
+ __slots__ = ('x',)
+ x: Any
+
+ @dataclass
+ class Derived(Base):
+ x: int
+ y: int
+
+ d = Derived(1, 2)
+ self.assertEqual((d.x, d.y), (1, 2))
+
+ # We can add a new field to the derived instance.
+ d.z = 10
+
+ def test_generated_slots(self):
+ @dataclass(slots=True)
+ class C:
+ x: int
+ y: int
+
+ c = C(1, 2)
+ self.assertEqual((c.x, c.y), (1, 2))
+
+ c.x = 3
+ c.y = 4
+ self.assertEqual((c.x, c.y), (3, 4))
+
+ with self.assertRaisesRegex(AttributeError, "'C' object has no attribute 'z'"):
+ c.z = 5
+
+ def test_add_slots_when_slots_exists(self):
+ with self.assertRaisesRegex(TypeError, '^C already specifies __slots__$'):
+ @dataclass(slots=True)
+ class C:
+ __slots__ = ('x',)
+ x: int
+
+ def test_generated_slots_value(self):
+
+ class Root:
+ __slots__ = {'x'}
+
+ class Root2(Root):
+ __slots__ = {'k': '...', 'j': ''}
+
+ class Root3(Root2):
+ __slots__ = ['h']
+
+ class Root4(Root3):
+ __slots__ = 'aa'
+
+ @dataclass(slots=True)
+ class Base(Root4):
+ y: int
+ j: str
+ h: str
+
+ self.assertEqual(Base.__slots__, ('y', ))
+
+ @dataclass(slots=True)
+ class Derived(Base):
+ aa: float
+ x: str
+ z: int
+ k: str
+ h: str
+
+ self.assertEqual(Derived.__slots__, ('z', ))
+
+ @dataclass
+ class AnotherDerived(Base):
+ z: int
+
+ self.assertNotIn('__slots__', AnotherDerived.__dict__)
+
+ def test_cant_inherit_from_iterator_slots(self):
+
+ class Root:
+ __slots__ = iter(['a'])
+
+ class Root2(Root):
+ __slots__ = ('b', )
+
+ with self.assertRaisesRegex(
+ TypeError,
+ "^Slots of 'Root' cannot be determined"
+ ):
+ @dataclass(slots=True)
+ class C(Root2):
+ x: int
+
+ def test_returns_new_class(self):
+ class A:
+ x: int
+
+ B = dataclass(A, slots=True)
+ self.assertIsNot(A, B)
+
+ self.assertFalse(hasattr(A, "__slots__"))
+ self.assertTrue(hasattr(B, "__slots__"))
+
+ # Can't be local to test_frozen_pickle.
+ @dataclass(frozen=True, slots=True)
+ class FrozenSlotsClass:
+ foo: str
+ bar: int
+
+ @dataclass(frozen=True)
+ class FrozenWithoutSlotsClass:
+ foo: str
+ bar: int
+
+ def test_frozen_pickle(self):
+ # bpo-43999
+
+ self.assertEqual(self.FrozenSlotsClass.__slots__, ("foo", "bar"))
+ for proto in range(pickle.HIGHEST_PROTOCOL + 1):
+ with self.subTest(proto=proto):
+ obj = self.FrozenSlotsClass("a", 1)
+ p = pickle.loads(pickle.dumps(obj, protocol=proto))
+ self.assertIsNot(obj, p)
+ self.assertEqual(obj, p)
+
+ obj = self.FrozenWithoutSlotsClass("a", 1)
+ p = pickle.loads(pickle.dumps(obj, protocol=proto))
+ self.assertIsNot(obj, p)
+ self.assertEqual(obj, p)
+
+ def test_slots_with_default_no_init(self):
+ # Originally reported in bpo-44649.
+ @dataclass(slots=True)
+ class A:
+ a: str
+ b: str = field(default='b', init=False)
+
+ obj = A("a")
+ self.assertEqual(obj.a, 'a')
+ self.assertEqual(obj.b, 'b')
+
+ def test_slots_with_default_factory_no_init(self):
+ # Originally reported in bpo-44649.
+ @dataclass(slots=True)
+ class A:
+ a: str
+ b: str = field(default_factory=lambda:'b', init=False)
+
+ obj = A("a")
+ self.assertEqual(obj.a, 'a')
+ self.assertEqual(obj.b, 'b')
+
+ def test_slots_no_weakref(self):
+ @dataclass(slots=True)
+ class A:
+ # No weakref.
+ pass
+
+ self.assertNotIn("__weakref__", A.__slots__)
+ a = A()
+ with self.assertRaisesRegex(TypeError,
+ "cannot create weak reference"):
+ weakref.ref(a)
+
+ def test_slots_weakref(self):
+ @dataclass(slots=True, weakref_slot=True)
+ class A:
+ a: int
+
+ self.assertIn("__weakref__", A.__slots__)
+ a = A(1)
+ weakref.ref(a)
+
+ def test_slots_weakref_base_str(self):
+ class Base:
+ __slots__ = '__weakref__'
+
+ @dataclass(slots=True)
+ class A(Base):
+ a: int
+
+ # __weakref__ is in the base class, not A. But an A is still weakref-able.
+ self.assertIn("__weakref__", Base.__slots__)
+ self.assertNotIn("__weakref__", A.__slots__)
+ a = A(1)
+ weakref.ref(a)
+
+ def test_slots_weakref_base_tuple(self):
+ # Same as test_slots_weakref_base, but use a tuple instead of a string
+ # in the base class.
+ class Base:
+ __slots__ = ('__weakref__',)
+
+ @dataclass(slots=True)
+ class A(Base):
+ a: int
+
+ # __weakref__ is in the base class, not A. But an A is still
+ # weakref-able.
+ self.assertIn("__weakref__", Base.__slots__)
+ self.assertNotIn("__weakref__", A.__slots__)
+ a = A(1)
+ weakref.ref(a)
+
+ def test_weakref_slot_without_slot(self):
+ with self.assertRaisesRegex(TypeError,
+ "weakref_slot is True but slots is False"):
+ @dataclass(weakref_slot=True)
+ class A:
+ a: int
+
+ def test_weakref_slot_make_dataclass(self):
+ A = make_dataclass('A', [('a', int),], slots=True, weakref_slot=True)
+ self.assertIn("__weakref__", A.__slots__)
+ a = A(1)
+ weakref.ref(a)
+
+ # And make sure if raises if slots=True is not given.
+ with self.assertRaisesRegex(TypeError,
+ "weakref_slot is True but slots is False"):
+ B = make_dataclass('B', [('a', int),], weakref_slot=True)
+
+ def test_weakref_slot_subclass_weakref_slot(self):
+ @dataclass(slots=True, weakref_slot=True)
+ class Base:
+ field: int
+
+ # A *can* also specify weakref_slot=True if it wants to (gh-93521)
+ @dataclass(slots=True, weakref_slot=True)
+ class A(Base):
+ ...
+
+ # __weakref__ is in the base class, not A. But an instance of A
+ # is still weakref-able.
+ self.assertIn("__weakref__", Base.__slots__)
+ self.assertNotIn("__weakref__", A.__slots__)
+ a = A(1)
+ weakref.ref(a)
+
+ def test_weakref_slot_subclass_no_weakref_slot(self):
+ @dataclass(slots=True, weakref_slot=True)
+ class Base:
+ field: int
+
+ @dataclass(slots=True)
+ class A(Base):
+ ...
+
+ # __weakref__ is in the base class, not A. Even though A doesn't
+ # specify weakref_slot, it should still be weakref-able.
+ self.assertIn("__weakref__", Base.__slots__)
+ self.assertNotIn("__weakref__", A.__slots__)
+ a = A(1)
+ weakref.ref(a)
+
+ def test_weakref_slot_normal_base_weakref_slot(self):
+ class Base:
+ __slots__ = ('__weakref__',)
+
+ @dataclass(slots=True, weakref_slot=True)
+ class A(Base):
+ field: int
+
+ # __weakref__ is in the base class, not A. But an instance of
+ # A is still weakref-able.
+ self.assertIn("__weakref__", Base.__slots__)
+ self.assertNotIn("__weakref__", A.__slots__)
+ a = A(1)
+ weakref.ref(a)
+
+
+class TestDescriptors(unittest.TestCase):
+ def test_set_name(self):
+ # See bpo-33141.
+
+ # Create a descriptor.
+ class D:
+ def __set_name__(self, owner, name):
+ self.name = name + 'x'
+ def __get__(self, instance, owner):
+ if instance is not None:
+ return 1
+ return self
+
+ # This is the case of just normal descriptor behavior, no
+ # dataclass code is involved in initializing the descriptor.
+ @dataclass
+ class C:
+ c: int=D()
+ self.assertEqual(C.c.name, 'cx')
+
+ # Now test with a default value and init=False, which is the
+ # only time this is really meaningful. If not using
+ # init=False, then the descriptor will be overwritten, anyway.
+ @dataclass
+ class C:
+ c: int=field(default=D(), init=False)
+ self.assertEqual(C.c.name, 'cx')
+ self.assertEqual(C().c, 1)
+
+ def test_non_descriptor(self):
+ # PEP 487 says __set_name__ should work on non-descriptors.
+ # Create a descriptor.
+
+ class D:
+ def __set_name__(self, owner, name):
+ self.name = name + 'x'
+
+ @dataclass
+ class C:
+ c: int=field(default=D(), init=False)
+ self.assertEqual(C.c.name, 'cx')
+
+ def test_lookup_on_instance(self):
+ # See bpo-33175.
+ class D:
+ pass
+
+ d = D()
+ # Create an attribute on the instance, not type.
+ d.__set_name__ = Mock()
+
+ # Make sure d.__set_name__ is not called.
+ @dataclass
+ class C:
+ i: int=field(default=d, init=False)
+
+ self.assertEqual(d.__set_name__.call_count, 0)
+
+ def test_lookup_on_class(self):
+ # See bpo-33175.
+ class D:
+ pass
+ D.__set_name__ = Mock()
+
+ # Make sure D.__set_name__ is called.
+ @dataclass
+ class C:
+ i: int=field(default=D(), init=False)
+
+ self.assertEqual(D.__set_name__.call_count, 1)
+
+ def test_init_calls_set(self):
+ class D:
+ pass
+
+ D.__set__ = Mock()
+
+ @dataclass
+ class C:
+ i: D = D()
+
+ # Make sure D.__set__ is called.
+ D.__set__.reset_mock()
+ c = C(5)
+ self.assertEqual(D.__set__.call_count, 1)
+
+ def test_getting_field_calls_get(self):
+ class D:
+ pass
+
+ D.__set__ = Mock()
+ D.__get__ = Mock()
+
+ @dataclass
+ class C:
+ i: D = D()
+
+ c = C(5)
+
+ # Make sure D.__get__ is called.
+ D.__get__.reset_mock()
+ value = c.i
+ self.assertEqual(D.__get__.call_count, 1)
+
+ def test_setting_field_calls_set(self):
+ class D:
+ pass
+
+ D.__set__ = Mock()
+
+ @dataclass
+ class C:
+ i: D = D()
+
+ c = C(5)
+
+ # Make sure D.__set__ is called.
+ D.__set__.reset_mock()
+ c.i = 10
+ self.assertEqual(D.__set__.call_count, 1)
+
+ def test_setting_uninitialized_descriptor_field(self):
+ class D:
+ pass
+
+ D.__set__ = Mock()
+
+ @dataclass
+ class C:
+ i: D
+
+ # D.__set__ is not called because there's no D instance to call it on
+ D.__set__.reset_mock()
+ c = C(5)
+ self.assertEqual(D.__set__.call_count, 0)
+
+ # D.__set__ still isn't called after setting i to an instance of D
+ # because descriptors don't behave like that when stored as instance vars
+ c.i = D()
+ c.i = 5
+ self.assertEqual(D.__set__.call_count, 0)
+
+ def test_default_value(self):
+ class D:
+ def __get__(self, instance: Any, owner: object) -> int:
+ if instance is None:
+ return 100
+
+ return instance._x
+
+ def __set__(self, instance: Any, value: int) -> None:
+ instance._x = value
+
+ @dataclass
+ class C:
+ i: D = D()
+
+ c = C()
+ self.assertEqual(c.i, 100)
+
+ c = C(5)
+ self.assertEqual(c.i, 5)
+
+ def test_no_default_value(self):
+ class D:
+ def __get__(self, instance: Any, owner: object) -> int:
+ if instance is None:
+ raise AttributeError()
+
+ return instance._x
+
+ def __set__(self, instance: Any, value: int) -> None:
+ instance._x = value
+
+ @dataclass
+ class C:
+ i: D = D()
+
+ with self.assertRaisesRegex(TypeError, 'missing 1 required positional argument'):
+ c = C()
+
+class TestStringAnnotations(unittest.TestCase):
+ def test_classvar(self):
+ # Some expressions recognized as ClassVar really aren't. But
+ # if you're using string annotations, it's not an exact
+ # science.
+ # These tests assume that both "import typing" and "from
+ # typing import *" have been run in this file.
+ for typestr in ('ClassVar[int]',
+ 'ClassVar [int]',
+ ' ClassVar [int]',
+ 'ClassVar',
+ ' ClassVar ',
+ 'typing.ClassVar[int]',
+ 'typing.ClassVar[str]',
+ ' typing.ClassVar[str]',
+ 'typing .ClassVar[str]',
+ 'typing. ClassVar[str]',
+ 'typing.ClassVar [str]',
+ 'typing.ClassVar [ str]',
+
+ # Not syntactically valid, but these will
+ # be treated as ClassVars.
+ 'typing.ClassVar.[int]',
+ 'typing.ClassVar+',
+ ):
+ with self.subTest(typestr=typestr):
+ @dataclass
+ class C:
+ x: typestr
+
+ # x is a ClassVar, so C() takes no args.
+ C()
+
+ # And it won't appear in the class's dict because it doesn't
+ # have a default.
+ self.assertNotIn('x', C.__dict__)
+
+ def test_isnt_classvar(self):
+ for typestr in ('CV',
+ 't.ClassVar',
+ 't.ClassVar[int]',
+ 'typing..ClassVar[int]',
+ 'Classvar',
+ 'Classvar[int]',
+ 'typing.ClassVarx[int]',
+ 'typong.ClassVar[int]',
+ 'dataclasses.ClassVar[int]',
+ 'typingxClassVar[str]',
+ ):
+ with self.subTest(typestr=typestr):
+ @dataclass
+ class C:
+ x: typestr
+
+ # x is not a ClassVar, so C() takes one arg.
+ self.assertEqual(C(10).x, 10)
+
+ def test_initvar(self):
+ # These tests assume that both "import dataclasses" and "from
+ # dataclasses import *" have been run in this file.
+ for typestr in ('InitVar[int]',
+ 'InitVar [int]'
+ ' InitVar [int]',
+ 'InitVar',
+ ' InitVar ',
+ 'dataclasses.InitVar[int]',
+ 'dataclasses.InitVar[str]',
+ ' dataclasses.InitVar[str]',
+ 'dataclasses .InitVar[str]',
+ 'dataclasses. InitVar[str]',
+ 'dataclasses.InitVar [str]',
+ 'dataclasses.InitVar [ str]',
+
+ # Not syntactically valid, but these will
+ # be treated as InitVars.
+ 'dataclasses.InitVar.[int]',
+ 'dataclasses.InitVar+',
+ ):
+ with self.subTest(typestr=typestr):
+ @dataclass
+ class C:
+ x: typestr
+
+ # x is an InitVar, so doesn't create a member.
+ with self.assertRaisesRegex(AttributeError,
+ "object has no attribute 'x'"):
+ C(1).x
+
+ def test_isnt_initvar(self):
+ for typestr in ('IV',
+ 'dc.InitVar',
+ 'xdataclasses.xInitVar',
+ 'typing.xInitVar[int]',
+ ):
+ with self.subTest(typestr=typestr):
+ @dataclass
+ class C:
+ x: typestr
+
+ # x is not an InitVar, so there will be a member x.
+ self.assertEqual(C(10).x, 10)
+
+ def test_classvar_module_level_import(self):
+ from test import dataclass_module_1
+ from test import dataclass_module_1_str
+ from test import dataclass_module_2
+ from test import dataclass_module_2_str
+
+ for m in (dataclass_module_1, dataclass_module_1_str,
+ dataclass_module_2, dataclass_module_2_str,
+ ):
+ with self.subTest(m=m):
+ # There's a difference in how the ClassVars are
+ # interpreted when using string annotations or
+ # not. See the imported modules for details.
+ if m.USING_STRINGS:
+ c = m.CV(10)
+ else:
+ c = m.CV()
+ self.assertEqual(c.cv0, 20)
+
+
+ # There's a difference in how the InitVars are
+ # interpreted when using string annotations or
+ # not. See the imported modules for details.
+ c = m.IV(0, 1, 2, 3, 4)
+
+ for field_name in ('iv0', 'iv1', 'iv2', 'iv3'):
+ with self.subTest(field_name=field_name):
+ with self.assertRaisesRegex(AttributeError, f"object has no attribute '{field_name}'"):
+ # Since field_name is an InitVar, it's
+ # not an instance field.
+ getattr(c, field_name)
+
+ if m.USING_STRINGS:
+ # iv4 is interpreted as a normal field.
+ self.assertIn('not_iv4', c.__dict__)
+ self.assertEqual(c.not_iv4, 4)
+ else:
+ # iv4 is interpreted as an InitVar, so it
+ # won't exist on the instance.
+ self.assertNotIn('not_iv4', c.__dict__)
+
+ def test_text_annotations(self):
+ from test import dataclass_textanno
+
+ self.assertEqual(
+ get_type_hints(dataclass_textanno.Bar),
+ {'foo': dataclass_textanno.Foo})
+ self.assertEqual(
+ get_type_hints(dataclass_textanno.Bar.__init__),
+ {'foo': dataclass_textanno.Foo,
+ 'return': type(None)})
+
+
+class TestMakeDataclass(unittest.TestCase):
+ def test_simple(self):
+ C = make_dataclass('C',
+ [('x', int),
+ ('y', int, field(default=5))],
+ namespace={'add_one': lambda self: self.x + 1})
+ c = C(10)
+ self.assertEqual((c.x, c.y), (10, 5))
+ self.assertEqual(c.add_one(), 11)
+
+
+ def test_no_mutate_namespace(self):
+ # Make sure a provided namespace isn't mutated.
+ ns = {}
+ C = make_dataclass('C',
+ [('x', int),
+ ('y', int, field(default=5))],
+ namespace=ns)
+ self.assertEqual(ns, {})
+
+ def test_base(self):
+ class Base1:
+ pass
+ class Base2:
+ pass
+ C = make_dataclass('C',
+ [('x', int)],
+ bases=(Base1, Base2))
+ c = C(2)
+ self.assertIsInstance(c, C)
+ self.assertIsInstance(c, Base1)
+ self.assertIsInstance(c, Base2)
+
+ def test_base_dataclass(self):
+ @dataclass
+ class Base1:
+ x: int
+ class Base2:
+ pass
+ C = make_dataclass('C',
+ [('y', int)],
+ bases=(Base1, Base2))
+ with self.assertRaisesRegex(TypeError, 'required positional'):
+ c = C(2)
+ c = C(1, 2)
+ self.assertIsInstance(c, C)
+ self.assertIsInstance(c, Base1)
+ self.assertIsInstance(c, Base2)
+
+ self.assertEqual((c.x, c.y), (1, 2))
+
+ def test_init_var(self):
+ def post_init(self, y):
+ self.x *= y
+
+ C = make_dataclass('C',
+ [('x', int),
+ ('y', InitVar[int]),
+ ],
+ namespace={'__post_init__': post_init},
+ )
+ c = C(2, 3)
+ self.assertEqual(vars(c), {'x': 6})
+ self.assertEqual(len(fields(c)), 1)
+
+ def test_class_var(self):
+ C = make_dataclass('C',
+ [('x', int),
+ ('y', ClassVar[int], 10),
+ ('z', ClassVar[int], field(default=20)),
+ ])
+ c = C(1)
+ self.assertEqual(vars(c), {'x': 1})
+ self.assertEqual(len(fields(c)), 1)
+ self.assertEqual(C.y, 10)
+ self.assertEqual(C.z, 20)
+
+ def test_other_params(self):
+ C = make_dataclass('C',
+ [('x', int),
+ ('y', ClassVar[int], 10),
+ ('z', ClassVar[int], field(default=20)),
+ ],
+ init=False)
+ # Make sure we have a repr, but no init.
+ self.assertNotIn('__init__', vars(C))
+ self.assertIn('__repr__', vars(C))
+
+ # Make sure random other params don't work.
+ with self.assertRaisesRegex(TypeError, 'unexpected keyword argument'):
+ C = make_dataclass('C',
+ [],
+ xxinit=False)
+
+ def test_no_types(self):
+ C = make_dataclass('Point', ['x', 'y', 'z'])
+ c = C(1, 2, 3)
+ self.assertEqual(vars(c), {'x': 1, 'y': 2, 'z': 3})
+ self.assertEqual(C.__annotations__, {'x': 'typing.Any',
+ 'y': 'typing.Any',
+ 'z': 'typing.Any'})
+
+ C = make_dataclass('Point', ['x', ('y', int), 'z'])
+ c = C(1, 2, 3)
+ self.assertEqual(vars(c), {'x': 1, 'y': 2, 'z': 3})
+ self.assertEqual(C.__annotations__, {'x': 'typing.Any',
+ 'y': int,
+ 'z': 'typing.Any'})
+
+ def test_invalid_type_specification(self):
+ for bad_field in [(),
+ (1, 2, 3, 4),
+ ]:
+ with self.subTest(bad_field=bad_field):
+ with self.assertRaisesRegex(TypeError, r'Invalid field: '):
+ make_dataclass('C', ['a', bad_field])
+
+ # And test for things with no len().
+ for bad_field in [float,
+ lambda x:x,
+ ]:
+ with self.subTest(bad_field=bad_field):
+ with self.assertRaisesRegex(TypeError, r'has no len\(\)'):
+ make_dataclass('C', ['a', bad_field])
+
+ def test_duplicate_field_names(self):
+ for field in ['a', 'ab']:
+ with self.subTest(field=field):
+ with self.assertRaisesRegex(TypeError, 'Field name duplicated'):
+ make_dataclass('C', [field, 'a', field])
+
+ def test_keyword_field_names(self):
+ for field in ['for', 'async', 'await', 'as']:
+ with self.subTest(field=field):
+ with self.assertRaisesRegex(TypeError, 'must not be keywords'):
+ make_dataclass('C', ['a', field])
+ with self.assertRaisesRegex(TypeError, 'must not be keywords'):
+ make_dataclass('C', [field])
+ with self.assertRaisesRegex(TypeError, 'must not be keywords'):
+ make_dataclass('C', [field, 'a'])
+
+ def test_non_identifier_field_names(self):
+ for field in ['()', 'x,y', '*', '2@3', '', 'little johnny tables']:
+ with self.subTest(field=field):
+ with self.assertRaisesRegex(TypeError, 'must be valid identifiers'):
+ make_dataclass('C', ['a', field])
+ with self.assertRaisesRegex(TypeError, 'must be valid identifiers'):
+ make_dataclass('C', [field])
+ with self.assertRaisesRegex(TypeError, 'must be valid identifiers'):
+ make_dataclass('C', [field, 'a'])
+
+ def test_underscore_field_names(self):
+ # Unlike namedtuple, it's okay if dataclass field names have
+ # an underscore.
+ make_dataclass('C', ['_', '_a', 'a_a', 'a_'])
+
+ def test_funny_class_names_names(self):
+ # No reason to prevent weird class names, since
+ # types.new_class allows them.
+ for classname in ['()', 'x,y', '*', '2@3', '']:
+ with self.subTest(classname=classname):
+ C = make_dataclass(classname, ['a', 'b'])
+ self.assertEqual(C.__name__, classname)
+
+class TestReplace(unittest.TestCase):
+ def test(self):
+ @dataclass(frozen=True)
+ class C:
+ x: int
+ y: int
+
+ c = C(1, 2)
+ c1 = replace(c, x=3)
+ self.assertEqual(c1.x, 3)
+ self.assertEqual(c1.y, 2)
+
+ def test_frozen(self):
+ @dataclass(frozen=True)
+ class C:
+ x: int
+ y: int
+ z: int = field(init=False, default=10)
+ t: int = field(init=False, default=100)
+
+ c = C(1, 2)
+ c1 = replace(c, x=3)
+ self.assertEqual((c.x, c.y, c.z, c.t), (1, 2, 10, 100))
+ self.assertEqual((c1.x, c1.y, c1.z, c1.t), (3, 2, 10, 100))
+
+
+ with self.assertRaisesRegex(ValueError, 'init=False'):
+ replace(c, x=3, z=20, t=50)
+ with self.assertRaisesRegex(ValueError, 'init=False'):
+ replace(c, z=20)
+ replace(c, x=3, z=20, t=50)
+
+ # Make sure the result is still frozen.
+ with self.assertRaisesRegex(FrozenInstanceError, "cannot assign to field 'x'"):
+ c1.x = 3
+
+ # Make sure we can't replace an attribute that doesn't exist,
+ # if we're also replacing one that does exist. Test this
+ # here, because setting attributes on frozen instances is
+ # handled slightly differently from non-frozen ones.
+ with self.assertRaisesRegex(TypeError, r"__init__\(\) got an unexpected "
+ "keyword argument 'a'"):
+ c1 = replace(c, x=20, a=5)
+
+ def test_invalid_field_name(self):
+ @dataclass(frozen=True)
+ class C:
+ x: int
+ y: int
+
+ c = C(1, 2)
+ with self.assertRaisesRegex(TypeError, r"__init__\(\) got an unexpected "
+ "keyword argument 'z'"):
+ c1 = replace(c, z=3)
+
+ def test_invalid_object(self):
+ @dataclass(frozen=True)
+ class C:
+ x: int
+ y: int
+
+ with self.assertRaisesRegex(TypeError, 'dataclass instance'):
+ replace(C, x=3)
+
+ with self.assertRaisesRegex(TypeError, 'dataclass instance'):
+ replace(0, x=3)
+
+ def test_no_init(self):
+ @dataclass
+ class C:
+ x: int
+ y: int = field(init=False, default=10)
+
+ c = C(1)
+ c.y = 20
+
+ # Make sure y gets the default value.
+ c1 = replace(c, x=5)
+ self.assertEqual((c1.x, c1.y), (5, 10))
+
+ # Trying to replace y is an error.
+ with self.assertRaisesRegex(ValueError, 'init=False'):
+ replace(c, x=2, y=30)
+
+ with self.assertRaisesRegex(ValueError, 'init=False'):
+ replace(c, y=30)
+
+ def test_classvar(self):
+ @dataclass
+ class C:
+ x: int
+ y: ClassVar[int] = 1000
+
+ c = C(1)
+ d = C(2)
+
+ self.assertIs(c.y, d.y)
+ self.assertEqual(c.y, 1000)
+
+ # Trying to replace y is an error: can't replace ClassVars.
+ with self.assertRaisesRegex(TypeError, r"__init__\(\) got an "
+ "unexpected keyword argument 'y'"):
+ replace(c, y=30)
+
+ replace(c, x=5)
+
+ def test_initvar_is_specified(self):
+ @dataclass
+ class C:
+ x: int
+ y: InitVar[int]
+
+ def __post_init__(self, y):
+ self.x *= y
+
+ c = C(1, 10)
+ self.assertEqual(c.x, 10)
+ with self.assertRaisesRegex(ValueError, r"InitVar 'y' must be "
+ "specified with replace()"):
+ replace(c, x=3)
+ c = replace(c, x=3, y=5)
+ self.assertEqual(c.x, 15)
+
+ def test_initvar_with_default_value(self):
+ @dataclass
+ class C:
+ x: int
+ y: InitVar[int] = None
+ z: InitVar[int] = 42
+
+ def __post_init__(self, y, z):
+ if y is not None:
+ self.x += y
+ if z is not None:
+ self.x += z
+
+ c = C(x=1, y=10, z=1)
+ self.assertEqual(replace(c), C(x=12))
+ self.assertEqual(replace(c, y=4), C(x=12, y=4, z=42))
+ self.assertEqual(replace(c, y=4, z=1), C(x=12, y=4, z=1))
+
+ def test_recursive_repr(self):
+ @dataclass
+ class C:
+ f: "C"
+
+ c = C(None)
+ c.f = c
+ self.assertEqual(repr(c), "TestReplace.test_recursive_repr.<locals>.C(f=...)")
+
+ def test_recursive_repr_two_attrs(self):
+ @dataclass
+ class C:
+ f: "C"
+ g: "C"
+
+ c = C(None, None)
+ c.f = c
+ c.g = c
+ self.assertEqual(repr(c), "TestReplace.test_recursive_repr_two_attrs"
+ ".<locals>.C(f=..., g=...)")
+
+ def test_recursive_repr_indirection(self):
+ @dataclass
+ class C:
+ f: "D"
+
+ @dataclass
+ class D:
+ f: "C"
+
+ c = C(None)
+ d = D(None)
+ c.f = d
+ d.f = c
+ self.assertEqual(repr(c), "TestReplace.test_recursive_repr_indirection"
+ ".<locals>.C(f=TestReplace.test_recursive_repr_indirection"
+ ".<locals>.D(f=...))")
+
+ def test_recursive_repr_indirection_two(self):
+ @dataclass
+ class C:
+ f: "D"
+
+ @dataclass
+ class D:
+ f: "E"
+
+ @dataclass
+ class E:
+ f: "C"
+
+ c = C(None)
+ d = D(None)
+ e = E(None)
+ c.f = d
+ d.f = e
+ e.f = c
+ self.assertEqual(repr(c), "TestReplace.test_recursive_repr_indirection_two"
+ ".<locals>.C(f=TestReplace.test_recursive_repr_indirection_two"
+ ".<locals>.D(f=TestReplace.test_recursive_repr_indirection_two"
+ ".<locals>.E(f=...)))")
+
+ def test_recursive_repr_misc_attrs(self):
+ @dataclass
+ class C:
+ f: "C"
+ g: int
+
+ c = C(None, 1)
+ c.f = c
+ self.assertEqual(repr(c), "TestReplace.test_recursive_repr_misc_attrs"
+ ".<locals>.C(f=..., g=1)")
+
+ ## def test_initvar(self):
+ ## @dataclass
+ ## class C:
+ ## x: int
+ ## y: InitVar[int]
+
+ ## c = C(1, 10)
+ ## d = C(2, 20)
+
+ ## # In our case, replacing an InitVar is a no-op
+ ## self.assertEqual(c, replace(c, y=5))
+
+ ## replace(c, x=5)
+
+class TestAbstract(unittest.TestCase):
+ def test_abc_implementation(self):
+ class Ordered(abc.ABC):
+ @abc.abstractmethod
+ def __lt__(self, other):
+ pass
+
+ @abc.abstractmethod
+ def __le__(self, other):
+ pass
+
+ @dataclass(order=True)
+ class Date(Ordered):
+ year: int
+ month: 'Month'
+ day: 'int'
+
+ self.assertFalse(inspect.isabstract(Date))
+ self.assertGreater(Date(2020,12,25), Date(2020,8,31))
+
+ def test_maintain_abc(self):
+ class A(abc.ABC):
+ @abc.abstractmethod
+ def foo(self):
+ pass
+
+ @dataclass
+ class Date(A):
+ year: int
+ month: 'Month'
+ day: 'int'
+
+ self.assertTrue(inspect.isabstract(Date))
+ msg = 'class Date without an implementation for abstract method foo'
+ self.assertRaisesRegex(TypeError, msg, Date)
+
+
+class TestMatchArgs(unittest.TestCase):
+ def test_match_args(self):
+ @dataclass
+ class C:
+ a: int
+ self.assertEqual(C(42).__match_args__, ('a',))
+
+ def test_explicit_match_args(self):
+ ma = ()
+ @dataclass
+ class C:
+ a: int
+ __match_args__ = ma
+ self.assertIs(C(42).__match_args__, ma)
+
+ def test_bpo_43764(self):
+ @dataclass(repr=False, eq=False, init=False)
+ class X:
+ a: int
+ b: int
+ c: int
+ self.assertEqual(X.__match_args__, ("a", "b", "c"))
+
+ def test_match_args_argument(self):
+ @dataclass(match_args=False)
+ class X:
+ a: int
+ self.assertNotIn('__match_args__', X.__dict__)
+
+ @dataclass(match_args=False)
+ class Y:
+ a: int
+ __match_args__ = ('b',)
+ self.assertEqual(Y.__match_args__, ('b',))
+
+ @dataclass(match_args=False)
+ class Z(Y):
+ z: int
+ self.assertEqual(Z.__match_args__, ('b',))
+
+ # Ensure parent dataclass __match_args__ is seen, if child class
+ # specifies match_args=False.
+ @dataclass
+ class A:
+ a: int
+ z: int
+ @dataclass(match_args=False)
+ class B(A):
+ b: int
+ self.assertEqual(B.__match_args__, ('a', 'z'))
+
+ def test_make_dataclasses(self):
+ C = make_dataclass('C', [('x', int), ('y', int)])
+ self.assertEqual(C.__match_args__, ('x', 'y'))
+
+ C = make_dataclass('C', [('x', int), ('y', int)], match_args=True)
+ self.assertEqual(C.__match_args__, ('x', 'y'))
+
+ C = make_dataclass('C', [('x', int), ('y', int)], match_args=False)
+ self.assertNotIn('__match__args__', C.__dict__)
+
+ C = make_dataclass('C', [('x', int), ('y', int)], namespace={'__match_args__': ('z',)})
+ self.assertEqual(C.__match_args__, ('z',))
+
+
+class TestKeywordArgs(unittest.TestCase):
+ def test_no_classvar_kwarg(self):
+ msg = 'field a is a ClassVar but specifies kw_only'
+ with self.assertRaisesRegex(TypeError, msg):
+ @dataclass
+ class A:
+ a: ClassVar[int] = field(kw_only=True)
+
+ with self.assertRaisesRegex(TypeError, msg):
+ @dataclass
+ class A:
+ a: ClassVar[int] = field(kw_only=False)
+
+ with self.assertRaisesRegex(TypeError, msg):
+ @dataclass(kw_only=True)
+ class A:
+ a: ClassVar[int] = field(kw_only=False)
+
+ def test_field_marked_as_kwonly(self):
+ #######################
+ # Using dataclass(kw_only=True)
+ @dataclass(kw_only=True)
+ class A:
+ a: int
+ self.assertTrue(fields(A)[0].kw_only)
+
+ @dataclass(kw_only=True)
+ class A:
+ a: int = field(kw_only=True)
+ self.assertTrue(fields(A)[0].kw_only)
+
+ @dataclass(kw_only=True)
+ class A:
+ a: int = field(kw_only=False)
+ self.assertFalse(fields(A)[0].kw_only)
+
+ #######################
+ # Using dataclass(kw_only=False)
+ @dataclass(kw_only=False)
+ class A:
+ a: int
+ self.assertFalse(fields(A)[0].kw_only)
+
+ @dataclass(kw_only=False)
+ class A:
+ a: int = field(kw_only=True)
+ self.assertTrue(fields(A)[0].kw_only)
+
+ @dataclass(kw_only=False)
+ class A:
+ a: int = field(kw_only=False)
+ self.assertFalse(fields(A)[0].kw_only)
+
+ #######################
+ # Not specifying dataclass(kw_only)
+ @dataclass
+ class A:
+ a: int
+ self.assertFalse(fields(A)[0].kw_only)
+
+ @dataclass
+ class A:
+ a: int = field(kw_only=True)
+ self.assertTrue(fields(A)[0].kw_only)
+
+ @dataclass
+ class A:
+ a: int = field(kw_only=False)
+ self.assertFalse(fields(A)[0].kw_only)
+
+ def test_match_args(self):
+ # kw fields don't show up in __match_args__.
+ @dataclass(kw_only=True)
+ class C:
+ a: int
+ self.assertEqual(C(a=42).__match_args__, ())
+
+ @dataclass
+ class C:
+ a: int
+ b: int = field(kw_only=True)
+ self.assertEqual(C(42, b=10).__match_args__, ('a',))
+
+ def test_KW_ONLY(self):
+ @dataclass
+ class A:
+ a: int
+ _: KW_ONLY
+ b: int
+ c: int
+ A(3, c=5, b=4)
+ msg = "takes 2 positional arguments but 4 were given"
+ with self.assertRaisesRegex(TypeError, msg):
+ A(3, 4, 5)
+
+
+ @dataclass(kw_only=True)
+ class B:
+ a: int
+ _: KW_ONLY
+ b: int
+ c: int
+ B(a=3, b=4, c=5)
+ msg = "takes 1 positional argument but 4 were given"
+ with self.assertRaisesRegex(TypeError, msg):
+ B(3, 4, 5)
+
+ # Explicitly make a field that follows KW_ONLY be non-keyword-only.
+ @dataclass
+ class C:
+ a: int
+ _: KW_ONLY
+ b: int
+ c: int = field(kw_only=False)
+ c = C(1, 2, b=3)
+ self.assertEqual(c.a, 1)
+ self.assertEqual(c.b, 3)
+ self.assertEqual(c.c, 2)
+ c = C(1, b=3, c=2)
+ self.assertEqual(c.a, 1)
+ self.assertEqual(c.b, 3)
+ self.assertEqual(c.c, 2)
+ c = C(1, b=3, c=2)
+ self.assertEqual(c.a, 1)
+ self.assertEqual(c.b, 3)
+ self.assertEqual(c.c, 2)
+ c = C(c=2, b=3, a=1)
+ self.assertEqual(c.a, 1)
+ self.assertEqual(c.b, 3)
+ self.assertEqual(c.c, 2)
+
+ def test_KW_ONLY_as_string(self):
+ @dataclass
+ class A:
+ a: int
+ _: 'dataclasses.KW_ONLY'
+ b: int
+ c: int
+ A(3, c=5, b=4)
+ msg = "takes 2 positional arguments but 4 were given"
+ with self.assertRaisesRegex(TypeError, msg):
+ A(3, 4, 5)
+
+ def test_KW_ONLY_twice(self):
+ msg = "'Y' is KW_ONLY, but KW_ONLY has already been specified"
+
+ with self.assertRaisesRegex(TypeError, msg):
+ @dataclass
+ class A:
+ a: int
+ X: KW_ONLY
+ Y: KW_ONLY
+ b: int
+ c: int
+
+ with self.assertRaisesRegex(TypeError, msg):
+ @dataclass
+ class A:
+ a: int
+ X: KW_ONLY
+ b: int
+ Y: KW_ONLY
+ c: int
+
+ with self.assertRaisesRegex(TypeError, msg):
+ @dataclass
+ class A:
+ a: int
+ X: KW_ONLY
+ b: int
+ c: int
+ Y: KW_ONLY
+
+ # But this usage is okay, since it's not using KW_ONLY.
+ @dataclass
+ class A:
+ a: int
+ _: KW_ONLY
+ b: int
+ c: int = field(kw_only=True)
+
+ # And if inheriting, it's okay.
+ @dataclass
+ class A:
+ a: int
+ _: KW_ONLY
+ b: int
+ c: int
+ @dataclass
+ class B(A):
+ _: KW_ONLY
+ d: int
+
+ # Make sure the error is raised in a derived class.
+ with self.assertRaisesRegex(TypeError, msg):
+ @dataclass
+ class A:
+ a: int
+ _: KW_ONLY
+ b: int
+ c: int
+ @dataclass
+ class B(A):
+ X: KW_ONLY
+ d: int
+ Y: KW_ONLY
+
+
+ def test_post_init(self):
+ @dataclass
+ class A:
+ a: int
+ _: KW_ONLY
+ b: InitVar[int]
+ c: int
+ d: InitVar[int]
+ def __post_init__(self, b, d):
+ raise CustomError(f'{b=} {d=}')
+ with self.assertRaisesRegex(CustomError, 'b=3 d=4'):
+ A(1, c=2, b=3, d=4)
+
+ @dataclass
+ class B:
+ a: int
+ _: KW_ONLY
+ b: InitVar[int]
+ c: int
+ d: InitVar[int]
+ def __post_init__(self, b, d):
+ self.a = b
+ self.c = d
+ b = B(1, c=2, b=3, d=4)
+ self.assertEqual(asdict(b), {'a': 3, 'c': 4})
+
+ def test_defaults(self):
+ # For kwargs, make sure we can have defaults after non-defaults.
+ @dataclass
+ class A:
+ a: int = 0
+ _: KW_ONLY
+ b: int
+ c: int = 1
+ d: int
+
+ a = A(d=4, b=3)
+ self.assertEqual(a.a, 0)
+ self.assertEqual(a.b, 3)
+ self.assertEqual(a.c, 1)
+ self.assertEqual(a.d, 4)
+
+ # Make sure we still check for non-kwarg non-defaults not following
+ # defaults.
+ err_regex = "non-default argument 'z' follows default argument"
+ with self.assertRaisesRegex(TypeError, err_regex):
+ @dataclass
+ class A:
+ a: int = 0
+ z: int
+ _: KW_ONLY
+ b: int
+ c: int = 1
+ d: int
+
+ def test_make_dataclass(self):
+ A = make_dataclass("A", ['a'], kw_only=True)
+ self.assertTrue(fields(A)[0].kw_only)
+
+ B = make_dataclass("B",
+ ['a', ('b', int, field(kw_only=False))],
+ kw_only=True)
+ self.assertTrue(fields(B)[0].kw_only)
+ self.assertFalse(fields(B)[1].kw_only)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/Tools/make_dataclass_tests.py b/Tools/make_dataclass_tests.py
new file mode 100644
index 000000000..dc38eee70
--- /dev/null
+++ b/Tools/make_dataclass_tests.py
@@ -0,0 +1,443 @@
+# Used to generate tests/run/test_dataclasses.pyx but translating the CPython test suite
+# dataclass file. Initially run using Python 3.10 - this file is not designed to be
+# backwards compatible since it will be run manually and infrequently.
+
+import ast
+import os.path
+import sys
+
+unavailable_functions = frozenset(
+ {
+ "dataclass_textanno", # part of CPython test module
+ "dataclass_module_1", # part of CPython test module
+ "make_dataclass", # not implemented in Cython dataclasses (probably won't be implemented)
+ }
+)
+
+skip_tests = frozenset(
+ {
+ # needs Cython compile
+ # ====================
+ ("TestCase", "test_field_default_default_factory_error"),
+ ("TestCase", "test_two_fields_one_default"),
+ ("TestCase", "test_overwrite_hash"),
+ ("TestCase", "test_eq_order"),
+ ("TestCase", "test_no_unhashable_default"),
+ ("TestCase", "test_disallowed_mutable_defaults"),
+ ("TestCase", "test_classvar_default_factory"),
+ ("TestCase", "test_field_metadata_mapping"),
+ ("TestFieldNoAnnotation", "test_field_without_annotation"),
+ (
+ "TestFieldNoAnnotation",
+ "test_field_without_annotation_but_annotation_in_base",
+ ),
+ (
+ "TestFieldNoAnnotation",
+ "test_field_without_annotation_but_annotation_in_base_not_dataclass",
+ ),
+ ("TestOrdering", "test_overwriting_order"),
+ ("TestHash", "test_hash_rules"),
+ ("TestHash", "test_hash_no_args"),
+ ("TestFrozen", "test_inherit_nonfrozen_from_empty_frozen"),
+ ("TestFrozen", "test_inherit_nonfrozen_from_frozen"),
+ ("TestFrozen", "test_inherit_frozen_from_nonfrozen"),
+ ("TestFrozen", "test_overwriting_frozen"),
+ ("TestSlots", "test_add_slots_when_slots_exists"),
+ ("TestSlots", "test_cant_inherit_from_iterator_slots"),
+ ("TestSlots", "test_weakref_slot_without_slot"),
+ ("TestKeywordArgs", "test_no_classvar_kwarg"),
+ ("TestKeywordArgs", "test_KW_ONLY_twice"),
+ ("TestKeywordArgs", "test_defaults"),
+ # uses local variable in class definition
+ ("TestCase", "test_default_factory"),
+ ("TestCase", "test_default_factory_with_no_init"),
+ ("TestCase", "test_field_default"),
+ ("TestCase", "test_function_annotations"),
+ ("TestDescriptors", "test_lookup_on_instance"),
+ ("TestCase", "test_default_factory_not_called_if_value_given"),
+ ("TestCase", "test_class_attrs"),
+ ("TestCase", "test_hash_field_rules"),
+ ("TestStringAnnotations",), # almost all the texts here use local variables
+ # Currently unsupported
+ # =====================
+ (
+ "TestOrdering",
+ "test_functools_total_ordering",
+ ), # combination of cython dataclass and total_ordering
+ ("TestCase", "test_missing_default_factory"), # we're MISSING MISSING
+ ("TestCase", "test_missing_default"), # MISSING
+ ("TestCase", "test_missing_repr"), # MISSING
+ ("TestSlots",), # __slots__ isn't understood
+ ("TestMatchArgs",),
+ ("TestKeywordArgs", "test_field_marked_as_kwonly"),
+ ("TestKeywordArgs", "test_match_args"),
+ ("TestKeywordArgs", "test_KW_ONLY"),
+ ("TestKeywordArgs", "test_KW_ONLY_as_string"),
+ ("TestKeywordArgs", "test_post_init"),
+ (
+ "TestCase",
+ "test_class_var_frozen",
+ ), # __annotations__ not present on cdef classes https://github.com/cython/cython/issues/4519
+ ("TestCase", "test_dont_include_other_annotations"), # __annotations__
+ ("TestDocString",), # don't think cython dataclasses currently set __doc__
+ # either cython.dataclasses.field or cython.dataclasses.dataclass called directly as functions
+ # (will probably never be supported)
+ ("TestCase", "test_field_repr"),
+ ("TestCase", "test_dynamic_class_creation"),
+ ("TestCase", "test_dynamic_class_creation_using_field"),
+ # Requires inheritance from non-cdef class
+ ("TestCase", "test_is_dataclass_genericalias"),
+ ("TestCase", "test_generic_extending"),
+ ("TestCase", "test_generic_dataclasses"),
+ ("TestCase", "test_generic_dynamic"),
+ ("TestInit", "test_inherit_from_protocol"),
+ ("TestAbstract", "test_abc_implementation"),
+ ("TestAbstract", "test_maintain_abc"),
+ # Requires multiple inheritance from extension types
+ ("TestCase", "test_post_init_not_auto_added"),
+ # Refers to nonlocal from enclosing function
+ (
+ "TestCase",
+ "test_post_init_staticmethod",
+ ), # TODO replicate the gist of the test elsewhere
+ # PEP487 isn't support in Cython
+ ("TestDescriptors", "test_non_descriptor"),
+ ("TestDescriptors", "test_set_name"),
+ ("TestDescriptors", "test_setting_field_calls_set"),
+ ("TestDescriptors", "test_setting_uninitialized_descriptor_field"),
+ # Looks up __dict__, which cdef classes don't typically have
+ ("TestCase", "test_init_false_no_default"),
+ ("TestCase", "test_init_var_inheritance"), # __dict__ again
+ ("TestCase", "test_base_has_init"),
+ ("TestInit", "test_base_has_init"), # needs __dict__ for vars
+ # Requires arbitrary attributes to be writeable
+ ("TestCase", "test_post_init_super"),
+ ('TestCase', 'test_init_in_order'),
+ # Cython being strict about argument types - expected difference
+ ("TestDescriptors", "test_getting_field_calls_get"),
+ ("TestDescriptors", "test_init_calls_set"),
+ ("TestHash", "test_eq_only"),
+ # I think an expected difference with cdef classes - the property will be in the dict
+ ("TestCase", "test_items_in_dicts"),
+ # These tests are probably fine, but the string substitution in this file doesn't get it right
+ ("TestRepr", "test_repr"),
+ ("TestCase", "test_not_in_repr"),
+ ('TestRepr', 'test_no_repr'),
+ # class variable doesn't exist in Cython so uninitialized variable appears differently - for now this is deliberate
+ ('TestInit', 'test_no_init'),
+ # I believe the test works but the ordering functions do appear in the class dict (and default slot wrappers which
+ # just raise NotImplementedError
+ ('TestOrdering', 'test_no_order'),
+ # not possible to add attributes on extension types
+ ("TestCase", "test_post_init_classmethod"),
+ # Cannot redefine the same field in a base dataclass (tested in dataclass_e6)
+ ("TestCase", "test_field_order"),
+ (
+ "TestCase",
+ "test_overwrite_fields_in_derived_class",
+ ),
+ # Bugs
+ #======
+ # not specifically a dataclass issue - a C int crashes classvar
+ ("TestCase", "test_class_var"),
+ (
+ "TestFrozen",
+ ), # raises AttributeError, not FrozenInstanceError (may be hard to fix)
+ ('TestCase', 'test_post_init'), # Works except for AttributeError instead of FrozenInstanceError
+ ("TestReplace", "test_frozen"), # AttributeError not FrozenInstanceError
+ (
+ "TestCase",
+ "test_dataclasses_qualnames",
+ ), # doesn't define __setattr__ and just relies on Cython to enforce readonly properties
+ ("TestCase", "test_compare_subclasses"), # wrong comparison
+ ("TestCase", "test_simple_compare"), # wrong comparison
+ (
+ "TestCase",
+ "test_field_named_self",
+ ), # I think just an error in inspecting the signature
+ (
+ "TestCase",
+ "test_init_var_default_factory",
+ ), # should be raising a compile error
+ ("TestCase", "test_init_var_no_default"), # should be raising a compile error
+ ("TestCase", "test_init_var_with_default"), # not sure...
+ ("TestReplace", "test_initvar_with_default_value"), # needs investigating
+ # Maybe bugs?
+ # ==========
+ # non-default argument 'z' follows default argument in dataclass __init__ - this message looks right to me!
+ ("TestCase", "test_class_marker"),
+ # cython.dataclasses.field parameter 'metadata' must be a literal value - possibly not something we can support?
+ ("TestCase", "test_field_metadata_custom_mapping"),
+ (
+ "TestCase",
+ "test_class_var_default_factory",
+ ), # possibly to do with ClassVar being assigned a field
+ (
+ "TestCase",
+ "test_class_var_with_default",
+ ), # possibly to do with ClassVar being assigned a field
+ (
+ "TestDescriptors",
+ ), # mostly don't work - I think this may be a limitation of cdef classes but needs investigating
+ }
+)
+
+version_specific_skips = {
+ # The version numbers are the first version that the test should be run on
+ ("TestCase", "test_init_var_preserve_type"): (
+ 3,
+ 10,
+ ), # needs language support for | operator on types
+}
+
+class DataclassInDecorators(ast.NodeVisitor):
+ found = False
+
+ def visit_Name(self, node):
+ if node.id == "dataclass":
+ self.found = True
+ return self.generic_visit(node)
+
+ def generic_visit(self, node):
+ if self.found:
+ return # skip
+ return super().generic_visit(node)
+
+
+def dataclass_in_decorators(decorator_list):
+ finder = DataclassInDecorators()
+ for dec in decorator_list:
+ finder.visit(dec)
+ if finder.found:
+ return True
+ return False
+
+
+class SubstituteNameString(ast.NodeTransformer):
+ def __init__(self, substitutions):
+ super().__init__()
+ self.substitutions = substitutions
+
+ def visit_Constant(self, node):
+ # attempt to handle some difference in class names
+ # (note: requires Python>=3.8)
+ if isinstance(node.value, str):
+ if node.value.find("<locals>") != -1:
+ import re
+
+ new_value = new_value2 = re.sub("[\w.]*<locals>", "", node.value)
+ for key, value in self.substitutions.items():
+ new_value2 = re.sub(f"(?<![\w])[.]{key}(?![\w])", value, new_value2)
+ if new_value != new_value2:
+ node.value = new_value2
+ return node
+
+
+class SubstituteName(SubstituteNameString):
+ def visit_Name(self, node):
+ if isinstance(node.ctx, ast.Store): # don't reassign lhs
+ return node
+ replacement = self.substitutions.get(node.id, None)
+ if replacement is not None:
+ return ast.Name(id=replacement, ctx=node.ctx)
+ else:
+ return node
+
+
+class IdentifyCdefClasses(ast.NodeVisitor):
+ def __init__(self):
+ super().__init__()
+ self.top_level_class = True
+ self.classes = {}
+ self.cdef_classes = set()
+
+ def visit_ClassDef(self, node):
+ top_level_class, self.top_level_class = self.top_level_class, False
+ try:
+ if not top_level_class:
+ self.classes[node.name] = node
+ if dataclass_in_decorators(node.decorator_list):
+ self.handle_cdef_class(node)
+ self.generic_visit(node) # any nested classes in it?
+ else:
+ self.generic_visit(node)
+ finally:
+ self.top_level_class = top_level_class
+
+ def visit_FunctionDef(self, node):
+ classes, self.classes = self.classes, {}
+ self.generic_visit(node)
+ self.classes = classes
+
+ def handle_cdef_class(self, cls_node):
+ if cls_node not in self.cdef_classes:
+ self.cdef_classes.add(cls_node)
+ # go back through previous classes we've seen and pick out any first bases
+ if cls_node.bases and isinstance(cls_node.bases[0], ast.Name):
+ base0_node = self.classes.get(cls_node.bases[0].id)
+ if base0_node:
+ self.handle_cdef_class(base0_node)
+
+
+class ExtractDataclassesToTopLevel(ast.NodeTransformer):
+ def __init__(self, cdef_classes_set):
+ super().__init__()
+ self.nested_name = []
+ self.current_function_global_classes = []
+ self.global_classes = []
+ self.cdef_classes_set = cdef_classes_set
+ self.used_names = set()
+ self.collected_substitutions = {}
+ self.uses_unavailable_name = False
+ self.top_level_class = True
+
+ def visit_ClassDef(self, node):
+ if not self.top_level_class:
+ # Include any non-toplevel class in this to be able
+ # to test inheritance.
+
+ self.generic_visit(node) # any nested classes in it?
+ if not node.body:
+ node.body.append(ast.Pass)
+
+ # First, make it a C class.
+ if node in self.cdef_classes_set:
+ node.decorator_list.append(ast.Name(id="cclass", ctx=ast.Load()))
+ # otherwise move it to the global scope, but don't make it cdef
+ # change the name
+ old_name = node.name
+ new_name = "_".join([node.name] + self.nested_name)
+ while new_name in self.used_names:
+ new_name = new_name + "_"
+ node.name = new_name
+ self.current_function_global_classes.append(node)
+ self.used_names.add(new_name)
+ # hmmmm... possibly there's a few cases where there's more than one name?
+ self.collected_substitutions[old_name] = node.name
+
+ return ast.Assign(
+ targets=[ast.Name(id=old_name, ctx=ast.Store())],
+ value=ast.Name(id=new_name, ctx=ast.Load()),
+ lineno=-1,
+ )
+ else:
+ top_level_class, self.top_level_class = self.top_level_class, False
+ self.nested_name.append(node.name)
+ if tuple(self.nested_name) in skip_tests:
+ self.top_level_class = top_level_class
+ self.nested_name.pop()
+ return None
+ self.generic_visit(node)
+ self.nested_name.pop()
+ if not node.body:
+ node.body.append(ast.Pass())
+ self.top_level_class = top_level_class
+ return node
+
+ def visit_FunctionDef(self, node):
+ self.nested_name.append(node.name)
+ if tuple(self.nested_name) in skip_tests:
+ self.nested_name.pop()
+ return None
+ if tuple(self.nested_name) in version_specific_skips:
+ version = version_specific_skips[tuple(self.nested_name)]
+ decorator = ast.parse(
+ f"skip_on_versions_below({version})", mode="eval"
+ ).body
+ node.decorator_list.append(decorator)
+ collected_subs, self.collected_substitutions = self.collected_substitutions, {}
+ uses_unavailable_name, self.uses_unavailable_name = (
+ self.uses_unavailable_name,
+ False,
+ )
+ current_func_globs, self.current_function_global_classes = (
+ self.current_function_global_classes,
+ [],
+ )
+
+ # visit once to work out what the substitutions should be
+ self.generic_visit(node)
+ if self.collected_substitutions:
+ # replace strings in this function
+ node = SubstituteNameString(self.collected_substitutions).visit(node)
+ replacer = SubstituteName(self.collected_substitutions)
+ # replace any base classes
+ for global_class in self.current_function_global_classes:
+ global_class = replacer.visit(global_class)
+ self.global_classes.append(self.current_function_global_classes)
+
+ self.nested_name.pop()
+ self.collected_substitutions = collected_subs
+ if self.uses_unavailable_name:
+ node = None
+ self.uses_unavailable_name = uses_unavailable_name
+ self.current_function_global_classes = current_func_globs
+ return node
+
+ def visit_Name(self, node):
+ if node.id in unavailable_functions:
+ self.uses_unavailable_name = True
+ return self.generic_visit(node)
+
+ def visit_Import(self, node):
+ return None # drop imports, we add these into the text ourself
+
+ def visit_ImportFrom(self, node):
+ return None # drop imports, we add these into the text ourself
+
+ def visit_Call(self, node):
+ if (
+ isinstance(node.func, ast.Attribute)
+ and node.func.attr == "assertRaisesRegex"
+ ):
+ # we end up with a bunch of subtle name changes that are very hard to correct for
+ # therefore, replace with "assertRaises"
+ node.func.attr = "assertRaises"
+ node.args.pop()
+ return self.generic_visit(node)
+
+ def visit_Module(self, node):
+ self.generic_visit(node)
+ node.body[0:0] = self.global_classes
+ return node
+
+ def visit_AnnAssign(self, node):
+ # string annotations are forward declarations but the string will be wrong
+ # (because we're renaming the class)
+ if (isinstance(node.annotation, ast.Constant) and
+ isinstance(node.annotation.value, str)):
+ # although it'd be good to resolve these declarations, for the
+ # sake of the tests they only need to be "object"
+ node.annotation = ast.Name(id="object", ctx=ast.Load)
+
+ return node
+
+
+def main():
+ script_path = os.path.split(sys.argv[0])[0]
+ filename = "test_dataclasses.py"
+ py_module_path = os.path.join(script_path, "dataclass_test_data", filename)
+ with open(py_module_path, "r") as f:
+ tree = ast.parse(f.read(), filename)
+
+ cdef_class_finder = IdentifyCdefClasses()
+ cdef_class_finder.visit(tree)
+ transformer = ExtractDataclassesToTopLevel(cdef_class_finder.cdef_classes)
+ tree = transformer.visit(tree)
+
+ output_path = os.path.join(script_path, "..", "tests", "run", filename + "x")
+ with open(output_path, "w") as f:
+ print("# AUTO-GENERATED BY Tools/make_dataclass_tests.py", file=f)
+ print("# DO NOT EDIT", file=f)
+ print(file=f)
+ # the directive doesn't get applied outside the include if it's put
+ # in the pxi file
+ print("# cython: language_level=3", file=f)
+ # any extras Cython needs to add go in this include file
+ print('include "test_dataclasses.pxi"', file=f)
+ print(file=f)
+ print(ast.unparse(tree), file=f)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/appveyor.yml b/appveyor.yml
deleted file mode 100644
index 6787b98cc..000000000
--- a/appveyor.yml
+++ /dev/null
@@ -1,138 +0,0 @@
-# https://ci.appveyor.com/project/cython/cython
-
-environment:
-
- global:
- # SDK v7.0 MSVC Express 2008's SetEnv.cmd script will fail if the
- # /E:ON and /V:ON options are not enabled in the batch script interpreter
- # See: https://stackoverflow.com/questions/11267463/compiling-python-modules-on-windows-x64/13751649#13751649
- WITH_ENV: "cmd /E:ON /V:ON /C .\\appveyor\\run_with_env.cmd"
- BACKEND: c
- PARALLEL: "-j4"
- EXTRA_CFLAGS: ""
-
- matrix:
- - PYTHON: "C:\\Python27"
- PYTHON_VERSION: "2.7"
- PYTHON_ARCH: "32"
- PYTHONIOENCODING: "utf-8"
- PARALLEL: ""
-
- - PYTHON: "C:\\Python27-x64"
- PYTHON_VERSION: "2.7"
- PYTHON_ARCH: "64"
- PYTHONIOENCODING: "utf-8"
- PARALLEL: ""
-
- - PYTHON: "C:\\Python39"
- PYTHON_VERSION: "3.9"
- PYTHON_ARCH: "32"
-
- - PYTHON: "C:\\Python39-x64"
- PYTHON_VERSION: "3.9"
- PYTHON_ARCH: "64"
-
- - PYTHON: "C:\\Python38"
- PYTHON_VERSION: "3.8"
- PYTHON_ARCH: "32"
-
- - PYTHON: "C:\\Python38-x64"
- PYTHON_VERSION: "3.8"
- PYTHON_ARCH: "64"
- EXTRA_CFLAGS: "-DCYTHON_USE_TYPE_SPECS=1"
-
- - PYTHON: "C:\\Python38-x64"
- PYTHON_VERSION: "3.8"
- PYTHON_ARCH: "64"
- BACKEND: c,cpp
-
- - PYTHON: "C:\\Python37"
- PYTHON_VERSION: "3.7"
- PYTHON_ARCH: "32"
- BACKEND: c,cpp
-
- - PYTHON: "C:\\Python37-x64"
- PYTHON_VERSION: "3.7"
- PYTHON_ARCH: "64"
-
- - PYTHON: "C:\\Python37-x64"
- PYTHON_VERSION: "3.7"
- PYTHON_ARCH: "64"
- EXTRA_CFLAGS: "-DCYTHON_USE_TYPE_SPECS=1"
-
- - PYTHON: "C:\\Python37-x64"
- PYTHON_VERSION: "3.7"
- PYTHON_ARCH: "64"
- BACKEND: cpp
-
- - PYTHON: "C:\\Python36"
- PYTHON_VERSION: "3.6"
- PYTHON_ARCH: "32"
-
- - PYTHON: "C:\\Python36-x64"
- PYTHON_VERSION: "3.6"
- PYTHON_ARCH: "64"
-
- - PYTHON: "C:\\Python35"
- PYTHON_VERSION: "3.5"
- PYTHON_ARCH: "32"
-
- - PYTHON: "C:\\Python35-x64"
- PYTHON_VERSION: "3.5"
- PYTHON_ARCH: "64"
-
- - PYTHON: "C:\\Python34"
- PYTHON_VERSION: "3.4"
- PYTHON_ARCH: "32"
- PARALLEL: ""
-
- - PYTHON: "C:\\Python34-x64"
- PYTHON_VERSION: "3.4"
- PYTHON_ARCH: "64"
- PARALLEL: ""
-
- - PYTHON: "C:\\Python27-x64"
- PYTHON_VERSION: "2.7"
- PYTHON_ARCH: "64"
- BACKEND: cpp
- PYTHONIOENCODING: "utf-8"
- PARALLEL: ""
-
-clone_depth: 5
-
-branches:
- only:
- - master
- - release
- - 0.29.x
-
-init:
- - "ECHO Python %PYTHON_VERSION% (%PYTHON_ARCH%bit) from %PYTHON%"
-
-install:
- - "powershell appveyor\\install.ps1"
- - "%PYTHON%\\python.exe --version"
- - "%PYTHON%\\Scripts\\pip.exe --version"
- - "%PYTHON%\\Scripts\\wheel.exe version"
-
-build: off
-build_script:
- - "%WITH_ENV% %PYTHON%\\python.exe setup.py build_ext %PARALLEL%"
- - "%WITH_ENV% %PYTHON%\\python.exe setup.py build_ext --inplace"
- - "%WITH_ENV% %PYTHON%\\python.exe setup.py bdist_wheel"
-
-test: off
-test_script:
- - "%PYTHON%\\Scripts\\pip.exe install -r test-requirements.txt"
- - "%PYTHON%\\Scripts\\pip.exe install win_unicode_console"
- - "set CFLAGS=/Od /W3 %EXTRA_CFLAGS%"
- - "%WITH_ENV% %PYTHON%\\python.exe runtests.py -vv --backend=%BACKEND% --no-code-style -j5"
-
-artifacts:
- - path: dist\*
-
-cache:
- - C:\Downloads\Cython -> appveyor\install.ps1
-
-#on_success:
-# - TODO: upload the content of dist/*.whl to a public wheelhouse
diff --git a/docs/examples/tutorial/clibraries/queue.py b/docs/examples/tutorial/clibraries/queue.py
index 45529fa94..e99b9b32c 100644
--- a/docs/examples/tutorial/clibraries/queue.py
+++ b/docs/examples/tutorial/clibraries/queue.py
@@ -2,7 +2,7 @@ from cython.cimports import cqueue
@cython.cclass
class Queue:
- _c_queue = cython.declare(cython.pointer(cqueue.Queue))
+ _c_queue: cython.pointer(cqueue.Queue)
def __cinit__(self):
self._c_queue = cqueue.queue_new()
diff --git a/docs/examples/tutorial/embedding/embedded.pyx b/docs/examples/tutorial/embedding/embedded.pyx
index 26704d45f..2ed823945 100644
--- a/docs/examples/tutorial/embedding/embedded.pyx
+++ b/docs/examples/tutorial/embedding/embedded.pyx
@@ -1,8 +1,9 @@
# embedded.pyx
-# The following two lines are for test purposed only, please ignore them.
+# The following two lines are for test purposes only, please ignore them.
# distutils: sources = embedded_main.c
# tag: py3only
+# tag: no-cpp
TEXT_TO_SAY = 'Hello from Python!'
diff --git a/docs/examples/tutorial/pure/disabled_annotations.py b/docs/examples/tutorial/pure/disabled_annotations.py
new file mode 100644
index 000000000..c92b4cf8e
--- /dev/null
+++ b/docs/examples/tutorial/pure/disabled_annotations.py
@@ -0,0 +1,33 @@
+import cython
+
+@cython.annotation_typing(False)
+def function_without_typing(a: int, b: int) -> int:
+ """Cython is ignoring annotations in this function"""
+ c: int = a + b
+ return c * a
+
+
+@cython.annotation_typing(False)
+@cython.cclass
+class NotAnnotatedClass:
+ """Cython is ignoring annotatons in this class except annotated_method"""
+ d: dict
+
+ def __init__(self, dictionary: dict):
+ self.d = dictionary
+
+ @cython.annotation_typing(True)
+ def annotated_method(self, key: str, a: cython.int, b: cython.int):
+ prefixed_key: str = 'prefix_' + key
+ self.d[prefixed_key] = a + b
+
+
+def annotated_function(a: cython.int, b: cython.int):
+ s: cython.int = a + b
+ with cython.annotation_typing(False):
+ # Cython is ignoring annotations within this code block
+ c: list = []
+ c.append(a)
+ c.append(b)
+ c.append(s)
+ return c
diff --git a/docs/examples/userguide/buffer/matrix.py b/docs/examples/userguide/buffer/matrix.py
new file mode 100644
index 000000000..79a3d3f12
--- /dev/null
+++ b/docs/examples/userguide/buffer/matrix.py
@@ -0,0 +1,15 @@
+# distutils: language = c++
+
+from cython.cimports.libcpp.vector import vector
+
+@cython.cclass
+class Matrix:
+ ncols: cython.unsigned
+ v: vector[cython.float]
+
+ def __cinit__(self, ncols: cython.unsigned):
+ self.ncols = ncols
+
+ def add_row(self):
+ """Adds a row, initially zero-filled."""
+ self.v.resize(self.v.size() + self.ncols)
diff --git a/docs/examples/userguide/buffer/matrix.pyx b/docs/examples/userguide/buffer/matrix.pyx
index ca597c2f2..f2547f6c3 100644
--- a/docs/examples/userguide/buffer/matrix.pyx
+++ b/docs/examples/userguide/buffer/matrix.pyx
@@ -1,9 +1,8 @@
# distutils: language = c++
-# matrix.pyx
-
from libcpp.vector cimport vector
+
cdef class Matrix:
cdef unsigned ncols
cdef vector[float] v
diff --git a/docs/examples/userguide/buffer/matrix_with_buffer.py b/docs/examples/userguide/buffer/matrix_with_buffer.py
new file mode 100644
index 000000000..34ccc6591
--- /dev/null
+++ b/docs/examples/userguide/buffer/matrix_with_buffer.py
@@ -0,0 +1,48 @@
+# distutils: language = c++
+from cython.cimports.cpython import Py_buffer
+from cython.cimports.libcpp.vector import vector
+
+@cython.cclass
+class Matrix:
+ ncols: cython.Py_ssize_t
+ shape: cython.Py_ssize_t[2]
+ strides: cython.Py_ssize_t[2]
+ v: vector[cython.float]
+
+ def __cinit__(self, ncols: cython.Py_ssize_t):
+ self.ncols = ncols
+
+ def add_row(self):
+ """Adds a row, initially zero-filled."""
+ self.v.resize(self.v.size() + self.ncols)
+
+ def __getbuffer__(self, buffer: cython.pointer(Py_buffer), flags: cython.int):
+ itemsize: cython.Py_ssize_t = cython.sizeof(self.v[0])
+
+ self.shape[0] = self.v.size() // self.ncols
+ self.shape[1] = self.ncols
+
+ # Stride 1 is the distance, in bytes, between two items in a row;
+ # this is the distance between two adjacent items in the vector.
+ # Stride 0 is the distance between the first elements of adjacent rows.
+ self.strides[1] = cython.cast(cython.Py_ssize_t, (
+ cython.cast(cython.p_char, cython.address(self.v[1]))
+ - cython.cast(cython.p_char, cython.address(self.v[0]))
+ )
+ )
+ self.strides[0] = self.ncols * self.strides[1]
+
+ buffer.buf = cython.cast(cython.p_char, cython.address(self.v[0]))
+ buffer.format = 'f' # float
+ buffer.internal = cython.NULL # see References
+ buffer.itemsize = itemsize
+ buffer.len = self.v.size() * itemsize # product(shape) * itemsize
+ buffer.ndim = 2
+ buffer.obj = self
+ buffer.readonly = 0
+ buffer.shape = self.shape
+ buffer.strides = self.strides
+ buffer.suboffsets = cython.NULL # for pointer arrays only
+
+ def __releasebuffer__(self, buffer: cython.pointer(Py_buffer)):
+ pass
diff --git a/docs/examples/userguide/buffer/matrix_with_buffer.pyx b/docs/examples/userguide/buffer/matrix_with_buffer.pyx
index c355f0fe8..16239d199 100644
--- a/docs/examples/userguide/buffer/matrix_with_buffer.pyx
+++ b/docs/examples/userguide/buffer/matrix_with_buffer.pyx
@@ -1,8 +1,8 @@
# distutils: language = c++
-
from cpython cimport Py_buffer
from libcpp.vector cimport vector
+
cdef class Matrix:
cdef Py_ssize_t ncols
cdef Py_ssize_t shape[2]
@@ -19,7 +19,7 @@ cdef class Matrix:
def __getbuffer__(self, Py_buffer *buffer, int flags):
cdef Py_ssize_t itemsize = sizeof(self.v[0])
- self.shape[0] = self.v.size() / self.ncols
+ self.shape[0] = self.v.size() // self.ncols
self.shape[1] = self.ncols
# Stride 1 is the distance, in bytes, between two items in a row;
@@ -27,6 +27,9 @@ cdef class Matrix:
# Stride 0 is the distance between the first elements of adjacent rows.
self.strides[1] = <Py_ssize_t>( <char *>&(self.v[1])
- <char *>&(self.v[0]))
+
+
+
self.strides[0] = self.ncols * self.strides[1]
buffer.buf = <char *>&(self.v[0])
diff --git a/docs/examples/userguide/buffer/view_count.py b/docs/examples/userguide/buffer/view_count.py
new file mode 100644
index 000000000..6a0554abc
--- /dev/null
+++ b/docs/examples/userguide/buffer/view_count.py
@@ -0,0 +1,30 @@
+# distutils: language = c++
+
+from cython.cimports.cpython import Py_buffer
+from cython.cimports.libcpp.vector import vector
+
+@cython.cclass
+class Matrix:
+
+ view_count: cython.int
+
+ ncols: cython.Py_ssize_t
+ v: vector[cython.float]
+ # ...
+
+ def __cinit__(self, ncols: cython.Py_ssize_t):
+ self.ncols = ncols
+ self.view_count = 0
+
+ def add_row(self):
+ if self.view_count > 0:
+ raise ValueError("can't add row while being viewed")
+ self.v.resize(self.v.size() + self.ncols)
+
+ def __getbuffer__(self, buffer: cython.pointer(Py_buffer), flags: cython.int):
+ # ... as before
+
+ self.view_count += 1
+
+ def __releasebuffer__(self, buffer: cython.pointer(Py_buffer)):
+ self.view_count -= 1
diff --git a/docs/examples/userguide/buffer/view_count.pyx b/docs/examples/userguide/buffer/view_count.pyx
index 8027f3ee9..8c4b1d524 100644
--- a/docs/examples/userguide/buffer/view_count.pyx
+++ b/docs/examples/userguide/buffer/view_count.pyx
@@ -3,6 +3,7 @@
from cpython cimport Py_buffer
from libcpp.vector cimport vector
+
cdef class Matrix:
cdef int view_count
@@ -26,4 +27,4 @@ cdef class Matrix:
self.view_count += 1
def __releasebuffer__(self, Py_buffer *buffer):
- self.view_count -= 1 \ No newline at end of file
+ self.view_count -= 1
diff --git a/docs/examples/userguide/early_binding_for_speed/rectangle.py b/docs/examples/userguide/early_binding_for_speed/rectangle.py
new file mode 100644
index 000000000..cd534d051
--- /dev/null
+++ b/docs/examples/userguide/early_binding_for_speed/rectangle.py
@@ -0,0 +1,22 @@
+@cython.cclass
+class Rectangle:
+ x0: cython.int
+ y0: cython.int
+ x1: cython.int
+ y1: cython.int
+
+ def __init__(self, x0: cython.int, y0: cython.int, x1: cython.int, y1: cython.int):
+ self.x0 = x0
+ self.y0 = y0
+ self.x1 = x1
+ self.y1 = y1
+
+ def area(self):
+ area = (self.x1 - self.x0) * (self.y1 - self.y0)
+ if area < 0:
+ area = -area
+ return area
+
+def rectArea(x0, y0, x1, y1):
+ rect = Rectangle(x0, y0, x1, y1)
+ return rect.area()
diff --git a/docs/examples/userguide/early_binding_for_speed/rectangle.pyx b/docs/examples/userguide/early_binding_for_speed/rectangle.pyx
index de70b0263..b58f6534b 100644
--- a/docs/examples/userguide/early_binding_for_speed/rectangle.pyx
+++ b/docs/examples/userguide/early_binding_for_speed/rectangle.pyx
@@ -1,7 +1,10 @@
+
cdef class Rectangle:
cdef int x0, y0
cdef int x1, y1
+
+
def __init__(self, int x0, int y0, int x1, int y1):
self.x0 = x0
self.y0 = y0
diff --git a/docs/examples/userguide/early_binding_for_speed/rectangle_cdef.py b/docs/examples/userguide/early_binding_for_speed/rectangle_cdef.py
new file mode 100644
index 000000000..ee2a14fb8
--- /dev/null
+++ b/docs/examples/userguide/early_binding_for_speed/rectangle_cdef.py
@@ -0,0 +1,26 @@
+@cython.cclass
+class Rectangle:
+ x0: cython.int
+ y0: cython.int
+ x1: cython.int
+ y1: cython.int
+
+ def __init__(self, x0: cython.int, y0: cython.int, x1: cython.int, y1: cython.int):
+ self.x0 = x0
+ self.y0 = y0
+ self.x1 = x1
+ self.y1 = y1
+
+ @cython.cfunc
+ def _area(self) -> cython.int:
+ area: cython.int = (self.x1 - self.x0) * (self.y1 - self.y0)
+ if area < 0:
+ area = -area
+ return area
+
+ def area(self):
+ return self._area()
+
+def rectArea(x0, y0, x1, y1):
+ rect: Rectangle = Rectangle(x0, y0, x1, y1)
+ return rect._area()
diff --git a/docs/examples/userguide/early_binding_for_speed/rectangle_cdef.pyx b/docs/examples/userguide/early_binding_for_speed/rectangle_cdef.pyx
index 1933326d2..3b64d766b 100644
--- a/docs/examples/userguide/early_binding_for_speed/rectangle_cdef.pyx
+++ b/docs/examples/userguide/early_binding_for_speed/rectangle_cdef.pyx
@@ -1,13 +1,17 @@
+
cdef class Rectangle:
cdef int x0, y0
cdef int x1, y1
+
+
def __init__(self, int x0, int y0, int x1, int y1):
self.x0 = x0
self.y0 = y0
self.x1 = x1
self.y1 = y1
+
cdef int _area(self):
cdef int area = (self.x1 - self.x0) * (self.y1 - self.y0)
if area < 0:
diff --git a/docs/examples/userguide/early_binding_for_speed/rectangle_cpdef.py b/docs/examples/userguide/early_binding_for_speed/rectangle_cpdef.py
new file mode 100644
index 000000000..670f340a4
--- /dev/null
+++ b/docs/examples/userguide/early_binding_for_speed/rectangle_cpdef.py
@@ -0,0 +1,23 @@
+@cython.cclass
+class Rectangle:
+ x0: cython.int
+ y0: cython.int
+ x1: cython.int
+ y1: cython.int
+
+ def __init__(self, x0: cython.int, y0: cython.int, x1: cython.int, y1: cython.int):
+ self.x0 = x0
+ self.y0 = y0
+ self.x1 = x1
+ self.y1 = y1
+
+ @cython.ccall
+ def area(self)-> cython.int:
+ area: cython.int = (self.x1 - self.x0) * (self.y1 - self.y0)
+ if area < 0:
+ area = -area
+ return area
+
+def rectArea(x0, y0, x1, y1):
+ rect: Rectangle = Rectangle(x0, y0, x1, y1)
+ return rect.area()
diff --git a/docs/examples/userguide/early_binding_for_speed/rectangle_cpdef.pyx b/docs/examples/userguide/early_binding_for_speed/rectangle_cpdef.pyx
index f8b7d86a8..53f2a8ad2 100644
--- a/docs/examples/userguide/early_binding_for_speed/rectangle_cpdef.pyx
+++ b/docs/examples/userguide/early_binding_for_speed/rectangle_cpdef.pyx
@@ -1,15 +1,19 @@
+
cdef class Rectangle:
cdef int x0, y0
cdef int x1, y1
+
+
def __init__(self, int x0, int y0, int x1, int y1):
self.x0 = x0
self.y0 = y0
self.x1 = x1
self.y1 = y1
+
cpdef int area(self):
- area = (self.x1 - self.x0) * (self.y1 - self.y0)
+ cdef int area = (self.x1 - self.x0) * (self.y1 - self.y0)
if area < 0:
area = -area
return area
diff --git a/docs/examples/userguide/extension_types/dict_animal.pyx b/docs/examples/userguide/extension_types/dict_animal.pyx
index ec8cf6f9a..575b835e9 100644
--- a/docs/examples/userguide/extension_types/dict_animal.pyx
+++ b/docs/examples/userguide/extension_types/dict_animal.pyx
@@ -4,7 +4,7 @@ cdef class Animal:
cdef int number_of_legs
cdef dict __dict__
- def __cinit__(self, int number_of_legs):
+ def __init__(self, int number_of_legs):
self.number_of_legs = number_of_legs
diff --git a/docs/examples/userguide/extension_types/extendable_animal.pyx b/docs/examples/userguide/extension_types/extendable_animal.pyx
index 417760efd..2ec165421 100644
--- a/docs/examples/userguide/extension_types/extendable_animal.pyx
+++ b/docs/examples/userguide/extension_types/extendable_animal.pyx
@@ -3,7 +3,7 @@ cdef class Animal:
cdef int number_of_legs
- def __cinit__(self, int number_of_legs):
+ def __init__(self, int number_of_legs):
self.number_of_legs = number_of_legs
diff --git a/docs/examples/userguide/language_basics/struct_union_enum.pyx b/docs/examples/userguide/language_basics/enum.pyx
index af9b06d9a..1b5f5d614 100644
--- a/docs/examples/userguide/language_basics/struct_union_enum.pyx
+++ b/docs/examples/userguide/language_basics/enum.pyx
@@ -1,11 +1,3 @@
-cdef struct Grail:
- int age
- float volume
-
-cdef union Food:
- char *spam
- float *eggs
-
cdef enum CheeseType:
cheddar, edam,
camembert
@@ -14,3 +6,6 @@ cdef enum CheeseState:
hard = 1
soft = 2
runny = 3
+
+print(CheeseType.cheddar)
+print(CheeseState.hard)
diff --git a/docs/examples/userguide/language_basics/function_pointer.pyx b/docs/examples/userguide/language_basics/function_pointer.pyx
new file mode 100644
index 000000000..b345c62b4
--- /dev/null
+++ b/docs/examples/userguide/language_basics/function_pointer.pyx
@@ -0,0 +1,8 @@
+cdef int(*ptr_add)(int, int)
+
+cdef int add(int a, int b):
+ return a + b
+
+ptr_add = add
+
+print(ptr_add(1, 3))
diff --git a/docs/examples/userguide/language_basics/function_pointer_struct.pyx b/docs/examples/userguide/language_basics/function_pointer_struct.pyx
new file mode 100644
index 000000000..5ef618961
--- /dev/null
+++ b/docs/examples/userguide/language_basics/function_pointer_struct.pyx
@@ -0,0 +1,9 @@
+cdef struct Bar:
+ int sum(int a, int b)
+
+cdef int add(int a, int b):
+ return a + b
+
+cdef Bar bar = Bar(add)
+
+print(bar.sum(1, 2))
diff --git a/docs/examples/userguide/language_basics/struct.py b/docs/examples/userguide/language_basics/struct.py
new file mode 100644
index 000000000..32b6b252a
--- /dev/null
+++ b/docs/examples/userguide/language_basics/struct.py
@@ -0,0 +1,7 @@
+Grail = cython.struct(
+ age=cython.int,
+ volume=cython.float)
+
+def main():
+ grail: Grail = Grail(5, 3.0)
+ print(grail.age, grail.volume)
diff --git a/docs/examples/userguide/language_basics/struct.pyx b/docs/examples/userguide/language_basics/struct.pyx
new file mode 100644
index 000000000..3ef79172b
--- /dev/null
+++ b/docs/examples/userguide/language_basics/struct.pyx
@@ -0,0 +1,7 @@
+cdef struct Grail:
+ int age
+ float volume
+
+def main():
+ cdef Grail grail = Grail(5, 3.0)
+ print(grail.age, grail.volume)
diff --git a/docs/examples/userguide/language_basics/struct_union_enum.py b/docs/examples/userguide/language_basics/struct_union_enum.py
deleted file mode 100644
index b78c0aa02..000000000
--- a/docs/examples/userguide/language_basics/struct_union_enum.py
+++ /dev/null
@@ -1,7 +0,0 @@
-Grail = cython.struct(
- age=cython.int,
- volume=cython.float)
-
-Food = cython.union(
- spam=cython.p_char,
- eggs=cython.p_float)
diff --git a/docs/examples/userguide/language_basics/union.py b/docs/examples/userguide/language_basics/union.py
new file mode 100644
index 000000000..efcda358b
--- /dev/null
+++ b/docs/examples/userguide/language_basics/union.py
@@ -0,0 +1,9 @@
+Food = cython.union(
+ spam=cython.p_char,
+ eggs=cython.p_float)
+
+def main():
+ arr: cython.p_float = [1.0, 2.0]
+ spam: Food = Food(spam='b')
+ eggs: Food = Food(eggs=arr)
+ print(spam.spam, eggs.eggs[0])
diff --git a/docs/examples/userguide/language_basics/union.pyx b/docs/examples/userguide/language_basics/union.pyx
new file mode 100644
index 000000000..e05f63fcc
--- /dev/null
+++ b/docs/examples/userguide/language_basics/union.pyx
@@ -0,0 +1,9 @@
+cdef union Food:
+ char *spam
+ float *eggs
+
+def main():
+ cdef float *arr = [1.0, 2.0]
+ cdef Food spam = Food(spam='b')
+ cdef Food eggs = Food(eggs=arr)
+ print(spam.spam, eggs.eggs[0])
diff --git a/docs/examples/userguide/parallelism/breaking_loop.py b/docs/examples/userguide/parallelism/breaking_loop.py
new file mode 100644
index 000000000..00d0225b5
--- /dev/null
+++ b/docs/examples/userguide/parallelism/breaking_loop.py
@@ -0,0 +1,15 @@
+from cython.parallel import prange
+
+@cython.exceptval(-1)
+@cython.cfunc
+def func(n: cython.Py_ssize_t) -> cython.int:
+ i: cython.Py_ssize_t
+
+ for i in prange(n, nogil=True):
+ if i == 8:
+ with cython.gil:
+ raise Exception()
+ elif i == 4:
+ break
+ elif i == 2:
+ return i
diff --git a/docs/examples/userguide/parallelism/breaking_loop.pyx b/docs/examples/userguide/parallelism/breaking_loop.pyx
index 2cf562edf..e7445082d 100644
--- a/docs/examples/userguide/parallelism/breaking_loop.pyx
+++ b/docs/examples/userguide/parallelism/breaking_loop.pyx
@@ -1,5 +1,7 @@
from cython.parallel import prange
+
+
cdef int func(Py_ssize_t n) except -1:
cdef Py_ssize_t i
diff --git a/docs/examples/userguide/parallelism/cimport_openmp.py b/docs/examples/userguide/parallelism/cimport_openmp.py
new file mode 100644
index 000000000..9288a4381
--- /dev/null
+++ b/docs/examples/userguide/parallelism/cimport_openmp.py
@@ -0,0 +1,11 @@
+# tag: openmp
+
+from cython.parallel import parallel
+from cython.cimports.openmp import omp_set_dynamic, omp_get_num_threads
+
+num_threads = cython.declare(cython.int)
+
+omp_set_dynamic(1)
+with cython.nogil, parallel():
+ num_threads = omp_get_num_threads()
+ # ...
diff --git a/docs/examples/userguide/parallelism/cimport_openmp.pyx b/docs/examples/userguide/parallelism/cimport_openmp.pyx
index 797936fe7..54d5f18db 100644
--- a/docs/examples/userguide/parallelism/cimport_openmp.pyx
+++ b/docs/examples/userguide/parallelism/cimport_openmp.pyx
@@ -1,6 +1,4 @@
# tag: openmp
-# You can ignore the previous line.
-# It's for internal testing of the Cython documentation.
from cython.parallel cimport parallel
cimport openmp
diff --git a/docs/examples/userguide/parallelism/memoryview_sum.py b/docs/examples/userguide/parallelism/memoryview_sum.py
new file mode 100644
index 000000000..6cff5d587
--- /dev/null
+++ b/docs/examples/userguide/parallelism/memoryview_sum.py
@@ -0,0 +1,7 @@
+from cython.parallel import prange
+
+def func(x: cython.double[:], alpha: cython.double):
+ i: cython.Py_ssize_t
+
+ for i in prange(x.shape[0], nogil=True):
+ x[i] = alpha * x[i]
diff --git a/docs/examples/userguide/parallelism/memoryview_sum.pyx b/docs/examples/userguide/parallelism/memoryview_sum.pyx
new file mode 100644
index 000000000..bdc1c9feb
--- /dev/null
+++ b/docs/examples/userguide/parallelism/memoryview_sum.pyx
@@ -0,0 +1,7 @@
+from cython.parallel import prange
+
+def func(double[:] x, double alpha):
+ cdef Py_ssize_t i
+
+ for i in prange(x.shape[0], nogil=True):
+ x[i] = alpha * x[i]
diff --git a/docs/examples/userguide/parallelism/parallel.py b/docs/examples/userguide/parallelism/parallel.py
new file mode 100644
index 000000000..0fb62d10f
--- /dev/null
+++ b/docs/examples/userguide/parallelism/parallel.py
@@ -0,0 +1,30 @@
+from cython.parallel import parallel, prange
+from cython.cimports.libc.stdlib import abort, malloc, free
+
+@cython.nogil
+@cython.cfunc
+def func(buf: cython.p_int) -> cython.void:
+ pass
+ # ...
+
+idx = cython.declare(cython.Py_ssize_t)
+i = cython.declare(cython.Py_ssize_t)
+j = cython.declare(cython.Py_ssize_t)
+n = cython.declare(cython.Py_ssize_t, 100)
+local_buf = cython.declare(p_int)
+size = cython.declare(cython.size_t, 10)
+
+with cython.nogil, parallel():
+ local_buf: cython.p_int = cython.cast(cython.p_int, malloc(cython.sizeof(cython.int) * size))
+ if local_buf is cython.NULL:
+ abort()
+
+ # populate our local buffer in a sequential loop
+ for i in range(size):
+ local_buf[i] = i * 2
+
+ # share the work using the thread-local buffer(s)
+ for j in prange(n, schedule='guided'):
+ func(local_buf)
+
+ free(local_buf)
diff --git a/docs/examples/userguide/parallelism/parallel.pyx b/docs/examples/userguide/parallelism/parallel.pyx
new file mode 100644
index 000000000..2a952d537
--- /dev/null
+++ b/docs/examples/userguide/parallelism/parallel.pyx
@@ -0,0 +1,30 @@
+from cython.parallel import parallel, prange
+from libc.stdlib cimport abort, malloc, free
+
+
+
+cdef void func(int *buf) nogil:
+ pass
+ # ...
+
+cdef Py_ssize_t idx, i, j, n = 100
+cdef int * local_buf
+cdef size_t size = 10
+
+
+
+
+with nogil, parallel():
+ local_buf = <int *> malloc(sizeof(int) * size)
+ if local_buf is NULL:
+ abort()
+
+ # populate our local buffer in a sequential loop
+ for i in range(size):
+ local_buf[i] = i * 2
+
+ # share the work using the thread-local buffer(s)
+ for j in prange(n, schedule='guided'):
+ func(local_buf)
+
+ free(local_buf)
diff --git a/docs/examples/userguide/parallelism/setup_py.py b/docs/examples/userguide/parallelism/setup_py.py
new file mode 100644
index 000000000..85a037dc5
--- /dev/null
+++ b/docs/examples/userguide/parallelism/setup_py.py
@@ -0,0 +1,16 @@
+from setuptools import Extension, setup
+from Cython.Build import cythonize
+
+ext_modules = [
+ Extension(
+ "hello",
+ ["hello.py"],
+ extra_compile_args=['-fopenmp'],
+ extra_link_args=['-fopenmp'],
+ )
+]
+
+setup(
+ name='hello-parallel-world',
+ ext_modules=cythonize(ext_modules),
+)
diff --git a/docs/examples/userguide/parallelism/setup.py b/docs/examples/userguide/parallelism/setup_pyx.py
index fe6d0a64c..fe6d0a64c 100644
--- a/docs/examples/userguide/parallelism/setup.py
+++ b/docs/examples/userguide/parallelism/setup_pyx.py
diff --git a/docs/examples/userguide/parallelism/simple_sum.py b/docs/examples/userguide/parallelism/simple_sum.py
new file mode 100644
index 000000000..f952a8556
--- /dev/null
+++ b/docs/examples/userguide/parallelism/simple_sum.py
@@ -0,0 +1,10 @@
+from cython.parallel import prange
+
+i = cython.declare(cython.int)
+n = cython.declare(cython.int, 30)
+sum = cython.declare(cython.int, 0)
+
+for i in prange(n, nogil=True):
+ sum += i
+
+print(sum)
diff --git a/docs/examples/userguide/sharing_declarations/shrubbing.pyx b/docs/examples/userguide/sharing_declarations/shrubbing.pyx
index 8598b5c98..91235e5ec 100644
--- a/docs/examples/userguide/sharing_declarations/shrubbing.pyx
+++ b/docs/examples/userguide/sharing_declarations/shrubbing.pyx
@@ -2,7 +2,7 @@
cdef class Shrubbery:
- def __cinit__(self, int w, int l):
+ def __init__(self, int w, int l):
self.width = w
self.length = l
diff --git a/docs/examples/userguide/wrapping_CPlusPlus/rect.pyx b/docs/examples/userguide/wrapping_CPlusPlus/rect.pyx
index e7c4423ef..d8eec16ef 100644
--- a/docs/examples/userguide/wrapping_CPlusPlus/rect.pyx
+++ b/docs/examples/userguide/wrapping_CPlusPlus/rect.pyx
@@ -8,7 +8,7 @@ from Rectangle cimport Rectangle
cdef class PyRectangle:
cdef Rectangle c_rect # Hold a C++ instance which we're wrapping
- def __cinit__(self, int x0, int y0, int x1, int y1):
+ def __init__(self, int x0, int y0, int x1, int y1):
self.c_rect = Rectangle(x0, y0, x1, y1)
def get_area(self):
diff --git a/docs/examples/userguide/wrapping_CPlusPlus/rect_ptr.pyx b/docs/examples/userguide/wrapping_CPlusPlus/rect_ptr.pyx
index 0c48689e7..ec4b34ab4 100644
--- a/docs/examples/userguide/wrapping_CPlusPlus/rect_ptr.pyx
+++ b/docs/examples/userguide/wrapping_CPlusPlus/rect_ptr.pyx
@@ -5,8 +5,14 @@ from Rectangle cimport Rectangle
cdef class PyRectangle:
cdef Rectangle*c_rect # hold a pointer to the C++ instance which we're wrapping
- def __cinit__(self, int x0, int y0, int x1, int y1):
- self.c_rect = new Rectangle(x0, y0, x1, y1)
+ def __cinit__(self):
+ self.c_rect = new Rectangle()
+
+ def __init__(self, int x0, int y0, int x1, int y1):
+ self.c_rect.x0 = x0
+ self.c_rect.y0 = y0
+ self.c_rect.x1 = x1
+ self.c_rect.y1 = y1
def __dealloc__(self):
del self.c_rect
diff --git a/docs/examples/userguide/wrapping_CPlusPlus/rect_with_attributes.pyx b/docs/examples/userguide/wrapping_CPlusPlus/rect_with_attributes.pyx
index 1bac30dec..441292ace 100644
--- a/docs/examples/userguide/wrapping_CPlusPlus/rect_with_attributes.pyx
+++ b/docs/examples/userguide/wrapping_CPlusPlus/rect_with_attributes.pyx
@@ -5,7 +5,7 @@ from Rectangle cimport Rectangle
cdef class PyRectangle:
cdef Rectangle c_rect
- def __cinit__(self, int x0, int y0, int x1, int y1):
+ def __init__(self, int x0, int y0, int x1, int y1):
self.c_rect = Rectangle(x0, y0, x1, y1)
def get_area(self):
diff --git a/docs/src/quickstart/install.rst b/docs/src/quickstart/install.rst
index 04a47afdc..979d0f178 100644
--- a/docs/src/quickstart/install.rst
+++ b/docs/src/quickstart/install.rst
@@ -29,7 +29,11 @@ according to the system used:
built with. This is usually a specific version of Microsoft Visual
C/C++ (MSVC) - see https://wiki.python.org/moin/WindowsCompilers.
MSVC is the only compiler that Cython is currently tested with on
- Windows. A possible alternative is the open source MinGW (a
+ Windows. If you're having difficulty making setuptools detect
+ MSVC then `PyMSVC <https://github.com/kdschlosser/python_msvc>`_
+ aims to solve this.
+
+ A possible alternative is the open source MinGW (a
Windows distribution of gcc). See the appendix for instructions for
setting up MinGW manually. Enthought Canopy and Python(x,y) bundle
MinGW, but some of the configuration steps in the appendix might
diff --git a/docs/src/tutorial/clibraries.rst b/docs/src/tutorial/clibraries.rst
index ddc02f443..3542dbe8e 100644
--- a/docs/src/tutorial/clibraries.rst
+++ b/docs/src/tutorial/clibraries.rst
@@ -125,9 +125,6 @@ Here is a first start for the Queue class:
.. literalinclude:: ../../examples/tutorial/clibraries/queue.py
:caption: queue.py
- .. note:: Currently, Cython contains a bug not allowing using
- annotations with types containing pointers (GitHub issue :issue:`4293`).
-
.. group-tab:: Cython
.. literalinclude:: ../../examples/tutorial/clibraries/queue.pyx
@@ -584,7 +581,6 @@ and check if the queue really is empty or not:
.. code-block:: python
@cython.cfunc
- @cython.exceptval(-1, check=True)
def peek(self) -> cython.int:
value: cython.int = cython.cast(cython.Py_ssize_t, cqueue.queue_peek_head(self._c_queue))
if value == 0:
@@ -598,7 +594,7 @@ and check if the queue really is empty or not:
.. code-block:: cython
- cdef int peek(self) except? -1:
+ cdef int peek(self):
cdef int value = <Py_ssize_t>cqueue.queue_peek_head(self._c_queue)
if value == 0:
# this may mean that the queue is empty, or
@@ -611,39 +607,27 @@ Note how we have effectively created a fast path through the method in
the hopefully common cases that the return value is not ``0``. Only
that specific case needs an additional check if the queue is empty.
-The ``except? -1`` or ``@cython.exceptval(-1, check=True)`` declaration
-in the method signature falls into the
-same category. If the function was a Python function returning a
+If the ``peek`` function was a Python function returning a
Python object value, CPython would simply return ``NULL`` internally
instead of a Python object to indicate an exception, which would
immediately be propagated by the surrounding code. The problem is
that the return type is ``int`` and any ``int`` value is a valid queue
item value, so there is no way to explicitly signal an error to the
-calling code. In fact, without such a declaration, there is no
-obvious way for Cython to know what to return on exceptions and for
-calling code to even know that this method *may* exit with an
-exception.
+calling code.
The only way calling code can deal with this situation is to call
``PyErr_Occurred()`` when returning from a function to check if an
exception was raised, and if so, propagate the exception. This
-obviously has a performance penalty. Cython therefore allows you to
-declare which value it should implicitly return in the case of an
+obviously has a performance penalty. Cython therefore uses a dedicated value
+that it implicitly returns in the case of an
exception, so that the surrounding code only needs to check for an
exception when receiving this exact value.
-We chose to use ``-1`` as the exception return value as we expect it
-to be an unlikely value to be put into the queue. The question mark
-in the ``except? -1`` declaration and ``check=True`` in ``@cython.exceptval``
-indicates that the return value is
-ambiguous (there *may* be a ``-1`` value in the queue, after all) and
-that an additional exception check using ``PyErr_Occurred()`` is
-needed in calling code. Without it, Cython code that calls this
-method and receives the exception return value would silently (and
-sometimes incorrectly) assume that an exception has been raised. In
-any case, all other return values will be passed through almost
+By default, the value ``-1`` is used as the exception return value.
+All other return values will be passed through almost
without a penalty, thus again creating a fast path for 'normal'
-values.
+values. See :ref:`error_return_values` for more details.
+
Now that the ``peek()`` method is implemented, the ``pop()`` method
also needs adaptation. Since it removes a value from the queue,
@@ -657,7 +641,6 @@ removal. Instead, we must test it on entry:
.. code-block:: python
@cython.cfunc
- @cython.exceptval(-1, check=True)
def pop(self) -> cython.int:
if cqueue.queue_is_empty(self._c_queue):
raise IndexError("Queue is empty")
@@ -667,7 +650,7 @@ removal. Instead, we must test it on entry:
.. code-block:: cython
- cdef int pop(self) except? -1:
+ cdef int pop(self):
if cqueue.queue_is_empty(self._c_queue):
raise IndexError("Queue is empty")
return <Py_ssize_t>cqueue.queue_pop_head(self._c_queue)
diff --git a/docs/src/tutorial/cython_tutorial.rst b/docs/src/tutorial/cython_tutorial.rst
index 647ec62b2..e3ab46005 100644
--- a/docs/src/tutorial/cython_tutorial.rst
+++ b/docs/src/tutorial/cython_tutorial.rst
@@ -390,13 +390,13 @@ Now we can ensure that those two programs output the same values::
It's possible to compare the speed now::
- python -m timeit -s 'from primes_python import primes' 'primes(1000)'
+ python -m timeit -s "from primes_python import primes" "primes(1000)"
10 loops, best of 3: 23 msec per loop
- python -m timeit -s 'from primes_python_compiled import primes' 'primes(1000)'
+ python -m timeit -s "from primes_python_compiled import primes" "primes(1000)"
100 loops, best of 3: 11.9 msec per loop
- python -m timeit -s 'from primes import primes' 'primes(1000)'
+ python -m timeit -s "from primes import primes" "primes(1000)"
1000 loops, best of 3: 1.65 msec per loop
The cythonize version of ``primes_python`` is 2 times faster than the Python one,
diff --git a/docs/src/tutorial/pure.rst b/docs/src/tutorial/pure.rst
index 417b7d1b2..91a381d1a 100644
--- a/docs/src/tutorial/pure.rst
+++ b/docs/src/tutorial/pure.rst
@@ -347,8 +347,7 @@ PEP-484 type annotations
Python `type hints <https://www.python.org/dev/peps/pep-0484>`_
can be used to declare argument types, as shown in the
-following example. To avoid conflicts with other kinds of annotation
-usages, this can be disabled with the directive ``annotation_typing=False``.
+following example:
.. literalinclude:: ../../examples/tutorial/pure/annotations.py
@@ -378,6 +377,18 @@ declare types of variables in a Python 3.6 compatible way as follows:
There is currently no way to express the visibility of object attributes.
+Disabling annotations
+^^^^^^^^^^^^^^^^^^^^^
+
+To avoid conflicts with other kinds of annotation
+usages, Cython's use of annotations to specify types can be disabled with the
+``annotation_typing`` :ref:`compiler directive<compiler-directives>`. From Cython 3
+you can use this as a decorator or a with statement, as shown in the following example:
+
+.. literalinclude:: ../../examples/tutorial/pure/disabled_annotations.py
+
+
+
``typing`` Module
^^^^^^^^^^^^^^^^^
diff --git a/docs/src/two-syntax-variants-used b/docs/src/two-syntax-variants-used
index af583a0a9..c5cd02cb1 100644
--- a/docs/src/two-syntax-variants-used
+++ b/docs/src/two-syntax-variants-used
@@ -16,3 +16,7 @@
.. code-block:: python
import cython
+
+ If you use the pure Python syntax we strongly recommend you use a recent
+ Cython 3 release, since significant improvements have been made here
+ compared to the 0.29.x releases.
diff --git a/docs/src/userguide/buffer.rst b/docs/src/userguide/buffer.rst
index 08661a184..3687cf2fd 100644
--- a/docs/src/userguide/buffer.rst
+++ b/docs/src/userguide/buffer.rst
@@ -3,6 +3,10 @@
Implementing the buffer protocol
================================
+.. include::
+ ../two-syntax-variants-used
+
+
Cython objects can expose memory buffers to Python code
by implementing the "buffer protocol".
This chapter shows how to implement the protocol
@@ -16,7 +20,15 @@ The following Cython/C++ code implements a matrix of floats,
where the number of columns is fixed at construction time
but rows can be added dynamically.
-.. literalinclude:: ../../examples/userguide/buffer/matrix.pyx
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. literalinclude:: ../../examples/userguide/buffer/matrix.py
+
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/buffer/matrix.pyx
There are no methods to do anything productive with the matrices' contents.
We could implement custom ``__getitem__``, ``__setitem__``, etc. for this,
@@ -27,7 +39,15 @@ Implementing the buffer protocol requires adding two methods,
``__getbuffer__`` and ``__releasebuffer__``,
which Cython handles specially.
-.. literalinclude:: ../../examples/userguide/buffer/matrix_with_buffer.pyx
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. literalinclude:: ../../examples/userguide/buffer/matrix_with_buffer.py
+
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/buffer/matrix_with_buffer.pyx
The method ``Matrix.__getbuffer__`` fills a descriptor structure,
called a ``Py_buffer``, that is defined by the Python C-API.
@@ -75,7 +95,15 @@ This is where ``__releasebuffer__`` comes in.
We can add a reference count to each matrix,
and lock it for mutation whenever a view exists.
-.. literalinclude:: ../../examples/userguide/buffer/view_count.pyx
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. literalinclude:: ../../examples/userguide/buffer/view_count.py
+
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/buffer/view_count.pyx
Flags
-----
diff --git a/docs/src/userguide/early_binding_for_speed.rst b/docs/src/userguide/early_binding_for_speed.rst
index 9bb8cf724..4a442d973 100644
--- a/docs/src/userguide/early_binding_for_speed.rst
+++ b/docs/src/userguide/early_binding_for_speed.rst
@@ -6,6 +6,9 @@
Early Binding for Speed
**************************
+.. include::
+ ../two-syntax-variants-used
+
As a dynamic language, Python encourages a programming style of considering
classes and objects in terms of their methods and attributes, more than where
they fit into the class hierarchy.
@@ -22,7 +25,15 @@ use of 'early binding' programming techniques.
For example, consider the following (silly) code example:
-.. literalinclude:: ../../examples/userguide/early_binding_for_speed/rectangle.pyx
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. literalinclude:: ../../examples/userguide/early_binding_for_speed/rectangle.py
+
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/early_binding_for_speed/rectangle.pyx
In the :func:`rectArea` method, the call to :meth:`rect.area` and the
:meth:`.area` method contain a lot of Python overhead.
@@ -30,7 +41,15 @@ In the :func:`rectArea` method, the call to :meth:`rect.area` and the
However, in Cython, it is possible to eliminate a lot of this overhead in cases
where calls occur within Cython code. For example:
-.. literalinclude:: ../../examples/userguide/early_binding_for_speed/rectangle_cdef.pyx
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. literalinclude:: ../../examples/userguide/early_binding_for_speed/rectangle_cdef.py
+
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/early_binding_for_speed/rectangle_cdef.pyx
Here, in the Rectangle extension class, we have defined two different area
calculation methods, the efficient :meth:`_area` C method, and the
@@ -46,10 +65,18 @@ dual-access methods - methods that can be efficiently called at C level, but
can also be accessed from pure Python code at the cost of the Python access
overheads. Consider this code:
-.. literalinclude:: ../../examples/userguide/early_binding_for_speed/rectangle_cpdef.pyx
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. literalinclude:: ../../examples/userguide/early_binding_for_speed/rectangle_cpdef.py
+
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/early_binding_for_speed/rectangle_cpdef.pyx
-Here, we just have a single area method, declared as :keyword:`cpdef` to make it
-efficiently callable as a C function, but still accessible from pure Python
+Here, we just have a single area method, declared as :keyword:`cpdef` or with ``@ccall`` decorator
+to make it efficiently callable as a C function, but still accessible from pure Python
(or late-binding Cython) code.
If within Cython code, we have a variable already 'early-bound' (ie, declared
diff --git a/docs/src/userguide/extension_types.rst b/docs/src/userguide/extension_types.rst
index b2690dc49..42d77c378 100644
--- a/docs/src/userguide/extension_types.rst
+++ b/docs/src/userguide/extension_types.rst
@@ -479,6 +479,64 @@ when it is deleted:
We don't have: ['camembert', 'cheddar']
We don't have: []
+
+C methods
+=========
+
+Extension types can have C methods as well as Python methods. Like C
+functions, C methods are declared using
+
+* :keyword:`cdef` instead of :keyword:`def` or ``@cfunc`` decorator for *C methods*, or
+* :keyword:`cpdef` instead of :keyword:`def` or ``@ccall`` decorator for *hybrid methods*.
+
+C methods are "virtual", and may be overridden in derived extension types.
+In addition, :keyword:`cpdef`/``@ccall`` methods can even be overridden by Python
+methods when called as C method. This adds a little to their calling overhead
+compared to a :keyword:`cdef`/``@cfunc`` method:
+
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. literalinclude:: ../../examples/userguide/extension_types/pets.py
+
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/extension_types/pets.pyx
+
+.. code-block:: text
+
+ # Output
+ p1:
+ This parrot is resting.
+ p2:
+ This parrot is resting.
+ Lovely plumage!
+
+The above example also illustrates that a C method can call an inherited C
+method using the usual Python technique, i.e.::
+
+ Parrot.describe(self)
+
+:keyword:`cdef`/``@ccall`` methods can be declared static by using the ``@staticmethod`` decorator.
+This can be especially useful for constructing classes that take non-Python compatible types:
+
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. literalinclude:: ../../examples/userguide/extension_types/owned_pointer.py
+
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/extension_types/owned_pointer.pyx
+
+.. note::
+
+ Cython currently does not support decorating :keyword:`cdef`/``@ccall`` methods with
+ the ``@classmethod`` decorator.
+
+
.. _subclassing:
Subclassing
@@ -519,7 +577,7 @@ extern extension type. If the base type is defined in another Cython module, it
must either be declared as an extern extension type or imported using the
:keyword:`cimport` statement or importing from the special ``cython.cimports`` package.
-Multiple inheritance is supported, however the second and subsequent base
+Multiple inheritance is supported, however the second and subsequent base
classes must be an ordinary Python class (not an extension type or a built-in
type).
@@ -530,7 +588,7 @@ must be compatible).
There is a way to prevent extension types from
being subtyped in Python. This is done via the ``final`` directive,
-usually set on an extension type using a decorator:
+usually set on an extension type or C method using a decorator:
.. tabs::
@@ -543,6 +601,13 @@ usually set on an extension type using a decorator:
@cython.final
@cython.cclass
class Parrot:
+ def describe(self): pass
+
+ @cython.cclass
+ class Lizard:
+
+ @cython.final
+ @cython.cfunc
def done(self): pass
.. group-tab:: Cython
@@ -553,72 +618,24 @@ usually set on an extension type using a decorator:
@cython.final
cdef class Parrot:
- def done(self): pass
-
-Trying to create a Python subclass from this type will raise a
-:class:`TypeError` at runtime. Cython will also prevent subtyping a
-final type inside of the same module, i.e. creating an extension type
-that uses a final type as its base type will fail at compile time.
-Note, however, that this restriction does not currently propagate to
-other extension modules, so even final extension types can still be
-subtyped at the C level by foreign code.
-
-
-C methods
-=========
-
-Extension types can have C methods as well as Python methods. Like C
-functions, C methods are declared using
-
-* :keyword:`cdef` instead of :keyword:`def` or ``@cfunc`` decorator for *C methods*, or
-* :keyword:`cpdef` instead of :keyword:`def` or ``@ccall`` decorator for *hybrid methods*.
-
-C methods are "virtual", and may be overridden in derived
-extension types. In addition, :keyword:`cpdef`/``@ccall`` methods can even be overridden by Python
-methods when called as C method. This adds a little to their calling overhead
-compared to a :keyword:`cdef`/``@cfunc`` method:
-
-.. tabs::
-
- .. group-tab:: Pure Python
-
- .. literalinclude:: ../../examples/userguide/extension_types/pets.py
-
- .. group-tab:: Cython
-
- .. literalinclude:: ../../examples/userguide/extension_types/pets.pyx
-
-.. code-block:: text
-
- # Output
- p1:
- This parrot is resting.
- p2:
- This parrot is resting.
- Lovely plumage!
-
-The above example also illustrates that a C method can call an inherited C
-method using the usual Python technique, i.e.::
+ def describe(self): pass
- Parrot.describe(self)
-:keyword:`cdef`/``@ccall`` methods can be declared static by using the ``@staticmethod`` decorator.
-This can be especially useful for constructing classes that take non-Python
-compatible types:
-.. tabs::
+ cdef class Lizard:
- .. group-tab:: Pure Python
- .. literalinclude:: ../../examples/userguide/extension_types/owned_pointer.py
+ @cython.final
+ cdef done(self): pass
- .. group-tab:: Cython
-
- .. literalinclude:: ../../examples/userguide/extension_types/owned_pointer.pyx
-
-.. note::
+Trying to create a Python subclass from a final type or overriding a final method will raise
+a :class:`TypeError` at runtime. Cython will also prevent subtyping a
+final type or overriding a final method inside of the same module, i.e. creating
+an extension type that uses a final type as its base type will fail at compile time.
+Note, however, that this restriction does not currently propagate to
+other extension modules, so Cython is unable to prevent final extension types
+from being subtyped at the C level by foreign code.
- Cython currently does not support decorating :keyword:`cdef`/``@ccall`` methods with ``@classmethod`` decorator.
.. _forward_declaring_extension_types:
diff --git a/docs/src/userguide/language_basics.rst b/docs/src/userguide/language_basics.rst
index 7d056bdfb..11561e1ee 100644
--- a/docs/src/userguide/language_basics.rst
+++ b/docs/src/userguide/language_basics.rst
@@ -48,7 +48,7 @@ the use of ‘early binding’ programming techniques.
C variable and type definitions
===============================
-C variables can be declared by
+C variables can be declared by
* using the Cython specific :keyword:`cdef` statement,
* using PEP-484/526 type annotations with C data types or
@@ -128,51 +128,6 @@ the declaration in most cases:
cdef float *g = [1, 2, 3, 4]
cdef float *h = &f
-In addition to the basic types, C :keyword:`struct`, :keyword:`union` and :keyword:`enum`
-are supported:
-
-.. tabs::
-
- .. group-tab:: Pure Python
-
- .. literalinclude:: ../../examples/userguide/language_basics/struct_union_enum.py
-
- .. note:: Currently, Pure Python mode does not support enums. (GitHub issue :issue:`4252`)
-
- .. group-tab:: Cython
-
- .. literalinclude:: ../../examples/userguide/language_basics/struct_union_enum.pyx
-
- See also :ref:`struct-union-enum-styles`
-
- .. note::
-
- Structs can be declared as ``cdef packed struct``, which has
- the same effect as the C directive ``#pragma pack(1)``.
-
- Declaring an enum as ``cpdef`` will create a :pep:`435`-style Python wrapper::
-
- cpdef enum CheeseState:
- hard = 1
- soft = 2
- runny = 3
-
- There is currently no special syntax for defining a constant, but you can use
- an anonymous :keyword:`enum` declaration for this purpose, for example,::
-
- cdef enum:
- tons_of_spam = 3
-
- .. note::
- the words ``struct``, ``union`` and ``enum`` are used only when
- defining a type, not when referring to it. For example, to declare a variable
- pointing to a ``Grail`` struct, you would write::
-
- cdef Grail *gp
-
- and not::
-
- cdef struct Grail *gp # WRONG
.. note::
@@ -197,46 +152,82 @@ are supported:
ctypedef int* IntPtr
+.. _structs:
+
+Structs, Unions, Enums
+----------------------
-You can create a C function by declaring it with :keyword:`cdef` or by decorating a Python function with ``@cfunc``:
+In addition to the basic types, C :keyword:`struct`, :keyword:`union` and :keyword:`enum`
+are supported:
.. tabs::
.. group-tab:: Pure Python
- .. code-block:: python
-
- @cython.cfunc
- def eggs(l: cython.ulong, f: cython.float) -> cython.int:
- ...
+ .. literalinclude:: ../../examples/userguide/language_basics/struct.py
.. group-tab:: Cython
- .. code-block:: cython
+ .. literalinclude:: ../../examples/userguide/language_basics/struct.pyx
- cdef int eggs(unsigned long l, float f):
- ...
+Structs can be declared as ``cdef packed struct``, which has
+the same effect as the C directive ``#pragma pack(1)``::
-You can read more about them in :ref:`python_functions_vs_c_functions`.
+ cdef packed struct StructArray:
+ int spam[4]
+ signed char eggs[5]
-Classes can be declared as :ref:`extension-types`. Those will
-have a behavior very close to python classes, but are faster because they use a ``struct``
-internally to store attributes.
-They are declared with the :keyword:`cdef` keyword or the ``@cclass`` class decorator.
+.. note::
+ This declaration removes the empty
+ space between members that C automatically to ensure that they're aligned in memory
+ (see `Wikipedia article <https://en.wikipedia.org/wiki/Data_structure_alignment>`_ for more details).
+ The main use is that numpy structured arrays store their data in packed form, so a ``cdef packed struct``
+ can be :ref:`used in a memoryview<using_memoryviews>` to match that.
-Here is a simple example:
+ Pure python mode does not support packed structs.
+
+The following example shows a declaration of unions:
.. tabs::
.. group-tab:: Pure Python
- .. literalinclude:: ../../examples/userguide/extension_types/shrubbery.py
+ .. literalinclude:: ../../examples/userguide/language_basics/union.py
.. group-tab:: Cython
- .. literalinclude:: ../../examples/userguide/extension_types/shrubbery.pyx
+ .. literalinclude:: ../../examples/userguide/language_basics/union.pyx
-You can read more about them in :ref:`extension-types`.
+Enums are created by ``cdef enum`` statement:
+
+.. literalinclude:: ../../examples/userguide/language_basics/enum.pyx
+
+
+.. note:: Currently, Pure Python mode does not support enums. (GitHub issue :issue:`4252`)
+
+Declaring an enum as ``cpdef`` will create a :pep:`435`-style Python wrapper::
+
+ cpdef enum CheeseState:
+ hard = 1
+ soft = 2
+ runny = 3
+
+There is currently no special syntax for defining a constant, but you can use
+an anonymous :keyword:`enum` declaration for this purpose, for example,::
+
+ cdef enum:
+ tons_of_spam = 3
+
+.. note::
+ In the Cython syntax, the words ``struct``, ``union`` and ``enum`` are used only when
+ defining a type, not when referring to it. For example, to declare a variable
+ pointing to a ``Grail`` struct, you would write::
+
+ cdef Grail *gp
+
+ and not::
+
+ cdef struct Grail *gp # WRONG
.. _typing_types:
@@ -326,12 +317,30 @@ and is typically what one wants).
If you want to use these numeric Python types simply omit the
type declaration and let them be objects.
+Extension Types
+---------------
+
It is also possible to declare :ref:`extension-types` (declared with ``cdef class`` or the ``@cclass`` decorator).
-This does allow subclasses. This typing is mostly used to access
-``cdef``/``@cfunc`` methods and attributes of the extension type.
+Those will have a behaviour very close to python classes (e.g. creating subclasses),
+but access to their members is faster from Cython code. Typing a variable
+as extension type is mostly used to access ``cdef``/``@cfunc`` methods and attributes of the extension type.
The C code uses a variable which is a pointer to a structure of the
specific type, something like ``struct MyExtensionTypeObject*``.
+Here is a simple example:
+
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. literalinclude:: ../../examples/userguide/extension_types/shrubbery.py
+
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/extension_types/shrubbery.pyx
+
+You can read more about them in :ref:`extension-types`.
+
Grouping multiple C declarations
--------------------------------
@@ -459,7 +468,7 @@ passed in directly using a normal C function call.
C Functions declared using :keyword:`cdef` or the ``@cfunc`` decorator with a
Python object return type, like Python functions, will return a :keyword:`None`
value when execution leaves the function body without an explicit return value. This is in
-contrast to C/C++, which leaves the return value undefined.
+contrast to C/C++, which leaves the return value undefined.
In the case of non-Python object return types, the equivalent of zero is returned, for example, 0 for ``int``, :keyword:`False` for ``bint`` and :keyword:`NULL` for pointer types.
A more complete comparison of the pros and cons of these different method
@@ -634,7 +643,15 @@ parameters and has two required keyword parameters.
Function Pointers
-----------------
-Functions declared in a ``struct`` are automatically converted to function pointers.
+.. note:: Pointers to functions are currently not supported by pure Python mode. (GitHub issue :issue:`4279`)
+
+The following example shows declaring a ``ptr_add`` function pointer and assigning the ``add`` function to it:
+
+.. literalinclude:: ../../examples/userguide/language_basics/function_pointer.pyx
+
+Functions declared in a ``struct`` are automatically converted to function pointers:
+
+.. literalinclude:: ../../examples/userguide/language_basics/function_pointer_struct.pyx
For using error return values with function pointers, see the note at the bottom
of :ref:`error_return_values`.
@@ -654,14 +671,12 @@ error return value.
While this is always the case for Python functions, functions
defined as C functions or ``cpdef``/``@ccall`` functions can return arbitrary C types,
-which do not have such a well-defined error return value. Thus, if an
-exception is detected in such a function, a warning message is printed,
-the exception is ignored, and the function returns immediately without
-propagating the exception to its caller.
+which do not have such a well-defined error return value.
+Extra care must be taken to ensure Python exceptions are correctly
+propagated from such functions.
-If you want such a C function to be able to propagate exceptions, you need
-to declare an exception return value for it as a contract with the caller.
-Here is an example
+A ``cdef`` function may be declared with an exception return value for it
+as a contract with the caller. Here is an example:
.. tabs::
@@ -684,7 +699,12 @@ Here is an example
With this declaration, whenever an exception occurs inside ``spam``, it will
immediately return with the value ``-1``. From the caller's side, whenever
a call to spam returns ``-1``, the caller will assume that an exception has
-occurred and can now process or propagate it.
+occurred and can now process or propagate it. Calling ``spam()`` is roughly translated to the following C code:
+
+.. code-block:: C
+
+ ret_val = spam();
+ if (ret_val == -1) goto error_handler;
When you declare an exception value for a function, you should never explicitly
or implicitly return that value. This includes empty :keyword:`return`
@@ -710,7 +730,7 @@ form of exception value declaration
def spam() -> cython.int:
...
- The keyword argument ``check=True`` indicates that the value ``-1`` _may_ signal an error.
+ The keyword argument ``check=True`` indicates that the value ``-1`` **may** signal an error.
.. group-tab:: Cython
@@ -719,11 +739,17 @@ form of exception value declaration
cdef int spam() except? -1:
...
- The ``?`` indicates that the value ``-1`` _may_ signal an error.
+ The ``?`` indicates that the value ``-1`` **may** signal an error.
In this case, Cython generates a call to :c:func:`PyErr_Occurred` if the exception value
is returned, to make sure it really received an exception and not just a normal
-result.
+result. Calling ``spam()`` is roughly translated to the following C code:
+
+
+.. code-block:: C
+
+ ret_val = spam();
+ if (ret_val == -1 && PyErr_Occurred()) goto error_handler;
There is also a third form of exception value declaration
@@ -735,18 +761,25 @@ There is also a third form of exception value declaration
@cython.cfunc
@cython.exceptval(check=True)
- def spam() -> cython.int:
+ def spam() -> cython.void:
...
.. group-tab:: Cython
.. code-block:: cython
- cdef int spam() except *:
+ cdef void spam() except *:
...
This form causes Cython to generate a call to :c:func:`PyErr_Occurred` after
-*every* call to spam, regardless of what value it returns. If you have a
+*every* call to spam, regardless of what value it returns. Calling ``spam()`` is roughly translated to the following C code:
+
+.. code-block:: C
+
+ spam()
+ if (PyErr_Occurred()) goto error_handler;
+
+If you have a
function returning ``void`` that needs to propagate errors, you will have to
use this form, since there isn't any error return value to test.
Otherwise, an explicit error return value allows the C compiler to generate
@@ -760,12 +793,47 @@ An external C++ function that may raise an exception can be declared with::
See :ref:`wrapping-cplusplus` for more details.
+Finally, if you are certain that your function should not raise an exception, (e.g., it
+does not use Python objects at all, or you plan to use it as a callback in C code that
+is unaware of Python exceptions), you can declare it as such using ``noexcept`` or by ``@cython.exceptval(check=False)``:
+
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. code-block:: python
+
+ @cython.cfunc
+ @cython.exceptval(check=False)
+ def spam() -> cython.int:
+ ...
+
+ .. group-tab:: Cython
+
+ .. code-block:: cython
+
+ cdef int spam() noexcept:
+ ...
+
+If a ``noexcept`` function *does* finish with an exception then it will print a warning message but not allow the exception to propagate further.
+On the other hand, calling a ``noexcept`` function has zero overhead related to managing exceptions, unlike the previous declarations.
+
Some things to note:
+* ``cdef`` functions that are also ``extern`` are implicitly declared ``noexcept`` or ``@cython.exceptval(check=False)``.
+ In the uncommon case of external C/C++ functions that **can** raise Python exceptions,
+ e.g., external functions that use the Python C API, you should explicitly declare
+ them with an exception value.
+
+* ``cdef`` functions that are *not* ``extern`` are implicitly declared with a suitable
+ exception specification for the return type (e.g. ``except *`` or ``@cython.exceptval(check=True)`` for a ``void`` return
+ type, ``except? -1`` or ``@cython.exceptval(-1, check=True)`` for an ``int`` return type).
+
* Exception values can only be declared for functions returning a C integer,
enum, float or pointer type, and the value must be a constant expression.
Functions that return ``void``, or a struct/union by value, can only use
the ``except *`` or ``exceptval(check=True)`` form.
+
* The exception value specification is part of the signature of the function.
If you're passing a pointer to a function as a parameter or assigning it
to a variable, the declared type of the parameter or variable must have
@@ -776,6 +844,10 @@ Some things to note:
.. note:: Pointers to functions are currently not supported by pure Python mode. (GitHub issue :issue:`4279`)
+* If the returning type of a ``cdef`` function with ``except *`` or ``@cython.exceptval(check=True)`` is C integer,
+ enum, float or pointer type, Cython calls :c:func:`PyErr_Occurred` only when
+ dedicated value is returned instead of checking after every call of the function.
+
* You don't need to (and shouldn't) declare exception values for functions
which return Python objects. Remember that a function with no declared
return type implicitly returns a Python object. (Exceptions on such
@@ -1094,7 +1166,7 @@ direct equivalent in Python.
* There is an ``&`` operator in Cython, with the same semantics as in C.
In pure python mode, use the ``cython.address()`` function instead.
* The null C pointer is called ``NULL``, not ``0``. ``NULL`` is a reserved word in Cython
- and special object in pure python mode.
+ and ``cython.NULL`` is a special object in pure python mode.
* Type casts are written ``<type>value`` or ``cast(type, value)``, for example,
.. tabs::
diff --git a/docs/src/userguide/memoryviews.rst b/docs/src/userguide/memoryviews.rst
index 1a0a0b282..285cc67ea 100644
--- a/docs/src/userguide/memoryviews.rst
+++ b/docs/src/userguide/memoryviews.rst
@@ -42,6 +42,7 @@ This code should give the following output::
Memoryview sum of Cython array is 1351
Memoryview sum of C memoryview is 451
+.. _using_memoryviews:
Using memoryviews
=================
diff --git a/docs/src/userguide/migrating_to_cy30.rst b/docs/src/userguide/migrating_to_cy30.rst
index 1105ee15d..4576ce864 100644
--- a/docs/src/userguide/migrating_to_cy30.rst
+++ b/docs/src/userguide/migrating_to_cy30.rst
@@ -173,6 +173,43 @@ The old behaviour can be restored with the
:ref:`directive <compiler-directives>` ``c_api_binop_methods=True``.
More details are given in :ref:`arithmetic_methods`.
+Exception values and ``noexcept``
+=================================
+
+``cdef`` functions that are not ``extern`` now propagate Python
+exceptions by default, where previously they needed to explicitly be
+declated with an :ref:`exception value <error_return_values>` in order
+for them to do so. A new ``noexcept`` modifier can be used to declare
+``cdef`` functions that will not raise exceptions.
+
+In existing code, you should mainly look out for ``cdef`` functions
+that are declared without an exception value::
+
+ cdef int spam(int x):
+ pass
+
+If you left out the exception value by mistake, i.e., the function
+should propagate Python exceptions, then the new behaviour will take
+care of this for you, and correctly propagate any exceptions.
+This was a common mistake in Cython code and the main reason to change the behaviour.
+
+On the other hand, if you didn't declare an exception value because
+you want to avoid exceptions propagating out of this function, the new behaviour
+will result in slightly less efficient code being generated, now involving an exception check.
+To prevent that, you must declare the function explicitly as being
+``noexcept``::
+
+ cdef int spam(int x) noexcept:
+ pass
+
+The behaviour for ``cdef`` functions that are also ``extern`` is
+unchanged as ``extern`` functions are less likely to raise Python
+exceptions
+
+The behaviour for any ``cdef`` function that is declared with an
+explicit exception value (e.g., ``cdef int spam(int x) except -1``) is
+also unchanged.
+
Annotation typing
=================
@@ -181,11 +218,20 @@ annotations and it is well worth reading
:ref:`the pure Python tutorial<pep484_type_annotations>` to understand
some of the improvements.
-A notable backwards-compatible change is that ``x: int`` is now typed
-such that ``x`` is an exact Python ``int`` (Cython 0.29 would accept
-any Python object for ``x``).
-
To make it easier to handle cases where your interpretation of type
annotations differs from Cython's, Cython 3 now supports setting the
``annotation_typing`` :ref:`directive <compiler-directives>` on a
per-class or per-function level.
+
+C++ postincrement/postdecrement operator
+========================================
+
+Cython 3 differentiates between pre/post-increment and pre/post-decrement
+operators (Cython 0.29 implemented both as pre(in/de)crement operator).
+This only has an effect when using ``cython.operator.postdecrement`` / ``cython.operator.postincrement``.
+When running into an error it is required to add the corresponding operator::
+
+ cdef cppclass Example:
+ Example operator++(int)
+ Example operator--(int)
+
diff --git a/docs/src/userguide/parallelism.rst b/docs/src/userguide/parallelism.rst
index e9d473e66..7cdae95b3 100644
--- a/docs/src/userguide/parallelism.rst
+++ b/docs/src/userguide/parallelism.rst
@@ -8,6 +8,9 @@
Using Parallelism
**********************************
+.. include::
+ ../two-syntax-variants-used
+
Cython supports native parallelism through the :py:mod:`cython.parallel`
module. To use this kind of parallelism, the GIL must be released
(see :ref:`Releasing the GIL <nogil>`).
@@ -87,7 +90,7 @@ It currently supports OpenMP, but later on more backends might be supported.
runtime:
The schedule and chunk size are taken from the runtime scheduling
variable, which can be set through the ``openmp.omp_set_schedule()``
- function call, or the OMP_SCHEDULE environment variable. Note that
+ function call, or the ``OMP_SCHEDULE`` environment variable. Note that
this essentially disables any static compile time optimisations of
the scheduling code itself and may therefore show a slightly worse
performance than when the same scheduling policy is statically
@@ -116,17 +119,27 @@ It currently supports OpenMP, but later on more backends might be supported.
Example with a reduction:
-.. literalinclude:: ../../examples/userguide/parallelism/simple_sum.pyx
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. literalinclude:: ../../examples/userguide/parallelism/simple_sum.py
+
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/parallelism/simple_sum.pyx
-Example with a :term:`typed memoryview<Typed memoryview>` (e.g. a NumPy array)::
+Example with a :term:`typed memoryview<Typed memoryview>` (e.g. a NumPy array)
- from cython.parallel import prange
+.. tabs::
- def func(double[:] x, double alpha):
- cdef Py_ssize_t i
+ .. group-tab:: Pure Python
- for i in prange(x.shape[0]):
- x[i] = alpha * x[i]
+ .. literalinclude:: ../../examples/userguide/parallelism/memoryview_sum.py
+
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/parallelism/memoryview_sum.pyx
.. function:: parallel(num_threads=None)
@@ -137,29 +150,17 @@ Example with a :term:`typed memoryview<Typed memoryview>` (e.g. a NumPy array)::
is also private to the prange. Variables that are private in the parallel
block are unavailable after the parallel block.
- Example with thread-local buffers::
-
- from cython.parallel import parallel, prange
- from libc.stdlib cimport abort, malloc, free
+ Example with thread-local buffers
- cdef Py_ssize_t idx, i, n = 100
- cdef int * local_buf
- cdef size_t size = 10
+ .. tabs::
- with nogil, parallel():
- local_buf = <int *> malloc(sizeof(int) * size)
- if local_buf is NULL:
- abort()
+ .. group-tab:: Pure Python
- # populate our local buffer in a sequential loop
- for i in xrange(size):
- local_buf[i] = i * 2
+ .. literalinclude:: ../../examples/userguide/parallelism/parallel.py
- # share the work using the thread-local buffer(s)
- for i in prange(n, schedule='guided'):
- func(local_buf)
+ .. group-tab:: Cython
- free(local_buf)
+ .. literalinclude:: ../../examples/userguide/parallelism/parallel.pyx
Later on sections might be supported in parallel blocks, to distribute
code sections of work among threads.
@@ -174,9 +175,17 @@ Compiling
=========
To actually use the OpenMP support, you need to tell the C or C++ compiler to
-enable OpenMP. For gcc this can be done as follows in a setup.py:
+enable OpenMP. For gcc this can be done as follows in a ``setup.py``:
+
+.. tabs::
+
+ .. group-tab:: Pure Python
-.. literalinclude:: ../../examples/userguide/parallelism/setup.py
+ .. literalinclude:: ../../examples/userguide/parallelism/setup_py.py
+
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/parallelism/setup_pyx.py
For Microsoft Visual C++ compiler, use ``'/openmp'`` instead of ``'-fopenmp'``.
@@ -188,13 +197,21 @@ The parallel with and prange blocks support the statements break, continue and
return in nogil mode. Additionally, it is valid to use a ``with gil`` block
inside these blocks, and have exceptions propagate from them.
However, because the blocks use OpenMP, they can not just be left, so the
-exiting procedure is best-effort. For prange() this means that the loop
+exiting procedure is best-effort. For ``prange()`` this means that the loop
body is skipped after the first break, return or exception for any subsequent
iteration in any thread. It is undefined which value shall be returned if
multiple different values may be returned, as the iterations are in no
particular order:
-.. literalinclude:: ../../examples/userguide/parallelism/breaking_loop.pyx
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. literalinclude:: ../../examples/userguide/parallelism/breaking_loop.py
+
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/parallelism/breaking_loop.pyx
In the example above it is undefined whether an exception shall be raised,
whether it will simply break or whether it will return 2.
@@ -203,7 +220,17 @@ Using OpenMP Functions
======================
OpenMP functions can be used by cimporting ``openmp``:
-.. literalinclude:: ../../examples/userguide/parallelism/cimport_openmp.pyx
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. literalinclude:: ../../examples/userguide/parallelism/cimport_openmp.py
+ :lines: 3-
+
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/parallelism/cimport_openmp.pyx
+ :lines: 3-
.. rubric:: References
diff --git a/docs/src/userguide/source_files_and_compilation.rst b/docs/src/userguide/source_files_and_compilation.rst
index a833c61ed..d1c8f696c 100644
--- a/docs/src/userguide/source_files_and_compilation.rst
+++ b/docs/src/userguide/source_files_and_compilation.rst
@@ -12,17 +12,21 @@ file named :file:`primes.pyx`.
Cython code, unlike Python, must be compiled. This happens in two stages:
- * A ``.pyx`` file is compiled by Cython to a ``.c`` file.
+ * A ``.pyx`` (or ``.py``) file is compiled by Cython to a ``.c`` file.
* The ``.c`` file is compiled by a C compiler to a ``.so`` file (or a
``.pyd`` file on Windows)
-Once you have written your ``.pyx`` file, there are a couple of ways of turning it
-into an extension module.
+Once you have written your ``.pyx``/``.py`` file, there are a couple of ways
+how to turn it into an extension module.
The following sub-sections describe several ways to build your
extension modules, and how to pass directives to the Cython compiler.
+There are also a number of tools that process ``.pyx`` files apart from Cython, e.g.
+
+- Linting: https://pypi.org/project/cython-lint/
+
.. _compiling_command_line:
diff --git a/pyximport/_pyximport2.py b/pyximport/_pyximport2.py
index b2077826a..00e88a8ac 100644
--- a/pyximport/_pyximport2.py
+++ b/pyximport/_pyximport2.py
@@ -185,12 +185,26 @@ def build_module(name, pyxfilename, pyxbuild_dir=None, inplace=False, language_l
build_in_temp = sargs.pop('build_in_temp',build_in_temp)
from . import pyxbuild
- so_path = pyxbuild.pyx_to_dll(pyxfilename, extension_mod,
- build_in_temp=build_in_temp,
- pyxbuild_dir=pyxbuild_dir,
- setup_args=sargs,
- inplace=inplace,
- reload_support=pyxargs.reload_support)
+ olddir = os.getcwd()
+ common = ''
+ if pyxbuild_dir:
+ # Windows concantenates the pyxbuild_dir to the pyxfilename when
+ # compiling, and then complains that the filename is too long
+ common = os.path.commonprefix([pyxbuild_dir, pyxfilename])
+ if len(common) > 30:
+ pyxfilename = os.path.relpath(pyxfilename)
+ pyxbuild_dir = os.path.relpath(pyxbuild_dir)
+ os.chdir(common)
+ try:
+ so_path = pyxbuild.pyx_to_dll(pyxfilename, extension_mod,
+ build_in_temp=build_in_temp,
+ pyxbuild_dir=pyxbuild_dir,
+ setup_args=sargs,
+ inplace=inplace,
+ reload_support=pyxargs.reload_support)
+ finally:
+ os.chdir(olddir)
+ so_path = os.path.join(common, so_path)
assert os.path.exists(so_path), "Cannot find: %s" % so_path
junkpath = os.path.join(os.path.dirname(so_path), name+"_*") #very dangerous with --inplace ? yes, indeed, trying to eat my files ;)
diff --git a/pyximport/_pyximport3.py b/pyximport/_pyximport3.py
index dccd1d09e..4fa811f8a 100644
--- a/pyximport/_pyximport3.py
+++ b/pyximport/_pyximport3.py
@@ -183,12 +183,26 @@ def build_module(name, pyxfilename, pyxbuild_dir=None, inplace=False, language_l
build_in_temp = sargs.pop('build_in_temp',build_in_temp)
from . import pyxbuild
- so_path = pyxbuild.pyx_to_dll(pyxfilename, extension_mod,
- build_in_temp=build_in_temp,
- pyxbuild_dir=pyxbuild_dir,
- setup_args=sargs,
- inplace=inplace,
- reload_support=pyxargs.reload_support)
+ olddir = os.getcwd()
+ common = ''
+ if pyxbuild_dir:
+ # Windows concantenates the pyxbuild_dir to the pyxfilename when
+ # compiling, and then complains that the filename is too long
+ common = os.path.commonprefix([pyxbuild_dir, pyxfilename])
+ if len(common) > 30:
+ pyxfilename = os.path.relpath(pyxfilename)
+ pyxbuild_dir = os.path.relpath(pyxbuild_dir)
+ os.chdir(common)
+ try:
+ so_path = pyxbuild.pyx_to_dll(pyxfilename, extension_mod,
+ build_in_temp=build_in_temp,
+ pyxbuild_dir=pyxbuild_dir,
+ setup_args=sargs,
+ inplace=inplace,
+ reload_support=pyxargs.reload_support)
+ finally:
+ os.chdir(olddir)
+ so_path = os.path.join(common, so_path)
assert os.path.exists(so_path), "Cannot find: %s" % so_path
junkpath = os.path.join(os.path.dirname(so_path), name+"_*") #very dangerous with --inplace ? yes, indeed, trying to eat my files ;)
diff --git a/runtests.py b/runtests.py
index a5c12e65e..7b0b74c30 100755
--- a/runtests.py
+++ b/runtests.py
@@ -294,57 +294,60 @@ def update_openmp_extension(ext):
return EXCLUDE_EXT
-def update_cpp11_extension(ext):
- """
- update cpp11 extensions that will run on versions of gcc >4.8
- """
- gcc_version = get_gcc_version(ext.language)
- already_has_std = any(ca for ca in ext.extra_compile_args if "-std" in ca)
- if gcc_version:
- compiler_version = gcc_version.group(1)
- if float(compiler_version) > 4.8 and not already_has_std:
- ext.extra_compile_args.append("-std=c++11")
- return ext
+def update_cpp_extension(cpp_std, min_gcc_version=None, min_clang_version=None, min_macos_version=None):
+ def _update_cpp_extension(ext):
+ """
+ Update cpp[cpp_std] extensions that will run on minimum versions of gcc / clang / macos.
+ """
+ # If the extension provides a -std=... option, assume that whatever C compiler we use
+ # will probably be ok with it.
+ already_has_std = any(
+ ca for ca in ext.extra_compile_args
+ if "-std" in ca and "-stdlib" not in ca
+ )
+ use_gcc = use_clang = already_has_std
- clang_version = get_clang_version(ext.language)
- if clang_version:
- if not already_has_std:
- ext.extra_compile_args.append("-std=c++11")
- if sys.platform == "darwin":
- ext.extra_compile_args.append("-stdlib=libc++")
- ext.extra_compile_args.append("-mmacosx-version-min=10.7")
- return ext
+ # check for a usable gcc version
+ gcc_version = get_gcc_version(ext.language)
+ if gcc_version:
+ if cpp_std >= 17 and sys.version_info[0] < 3:
+ # The Python 2.7 headers contain the 'register' modifier
+ # which gcc warns about in C++17 mode.
+ ext.extra_compile_args.append('-Wno-register')
+ if not already_has_std:
+ compiler_version = gcc_version.group(1)
+ if not min_gcc_version or float(compiler_version) >= float(min_gcc_version):
+ use_gcc = True
+ ext.extra_compile_args.append("-std=c++%s" % cpp_std)
+
+ if use_gcc:
+ return ext
- return EXCLUDE_EXT
+ # check for a usable clang version
+ clang_version = get_clang_version(ext.language)
+ if clang_version:
+ if cpp_std >= 17 and sys.version_info[0] < 3:
+ # The Python 2.7 headers contain the 'register' modifier
+ # which clang warns about in C++17 mode.
+ ext.extra_compile_args.append('-Wno-register')
+ if not already_has_std:
+ compiler_version = clang_version.group(1)
+ if not min_clang_version or float(compiler_version) >= float(min_clang_version):
+ use_clang = True
+ ext.extra_compile_args.append("-std=c++%s" % cpp_std)
+ if sys.platform == "darwin":
+ ext.extra_compile_args.append("-stdlib=libc++")
+ if min_macos_version is not None:
+ ext.extra_compile_args.append("-mmacosx-version-min=" + min_macos_version)
+
+ if use_clang:
+ return ext
-def update_cpp17_extension(ext):
- """
- update cpp17 extensions that will run on versions of gcc >=5.0
- """
- gcc_version = get_gcc_version(ext.language)
- if gcc_version:
- compiler_version = gcc_version.group(1)
- if sys.version_info[0] < 3:
- # The Python 2.7 headers contain the 'register' modifier
- # which gcc warns about in C++17 mode.
- ext.extra_compile_args.append('-Wno-register')
- if float(compiler_version) >= 5.0:
- ext.extra_compile_args.append("-std=c++17")
- return ext
+ # no usable C compiler found => exclude the extension
+ return EXCLUDE_EXT
- clang_version = get_clang_version(ext.language)
- if clang_version:
- ext.extra_compile_args.append("-std=c++17")
- if sys.version_info[0] < 3:
- # The Python 2.7 headers contain the 'register' modifier
- # which clang warns about in C++17 mode.
- ext.extra_compile_args.append('-Wno-register')
- if sys.platform == "darwin":
- ext.extra_compile_args.append("-stdlib=libc++")
- ext.extra_compile_args.append("-mmacosx-version-min=10.13")
- return ext
+ return _update_cpp_extension
- return EXCLUDE_EXT
def require_gcc(version):
def check(ext):
@@ -438,8 +441,9 @@ EXT_EXTRAS = {
'tag:numpy' : update_numpy_extension,
'tag:openmp': update_openmp_extension,
'tag:gdb': update_gdb_extension,
- 'tag:cpp11': update_cpp11_extension,
- 'tag:cpp17': update_cpp17_extension,
+ 'tag:cpp11': update_cpp_extension(11, min_gcc_version="4.9", min_macos_version="10.7"),
+ 'tag:cpp17': update_cpp_extension(17, min_gcc_version="5.0", min_macos_version="10.13"),
+ 'tag:cpp20': update_cpp_extension(20, min_gcc_version="11.0", min_clang_version="13.0", min_macos_version="10.13"),
'tag:trace' : update_linetrace_extension,
'tag:bytesformat': exclude_extension_in_pyver((3, 3), (3, 4)), # no %-bytes formatting
'tag:no-macos': exclude_extension_on_platform('darwin'),
@@ -478,6 +482,7 @@ VER_DEP_MODULES = {
(3,4): (operator.lt, lambda x: x in ['run.py34_signature',
'run.test_unicode', # taken from Py3.7, difficult to backport
'run.pep442_tp_finalize',
+ 'run.pep442_tp_finalize_cimport',
]),
(3,4,999): (operator.gt, lambda x: x in ['run.initial_file_path',
]),
@@ -492,6 +497,7 @@ VER_DEP_MODULES = {
]),
(3,7): (operator.lt, lambda x: x in ['run.pycontextvar',
'run.pep557_dataclasses', # dataclasses module
+ 'run.test_dataclasses',
]),
}
@@ -1427,6 +1433,8 @@ class CythonCompileTestCase(unittest.TestCase):
def _match_output(self, expected_output, actual_output, write):
try:
for expected, actual in zip(expected_output, actual_output):
+ if expected != actual and '\\' in actual and os.sep == '\\' and '/' in expected and '\\' not in expected:
+ expected = expected.replace('/', '\\')
self.assertEqual(expected, actual)
if len(actual_output) < len(expected_output):
expected = expected_output[len(actual_output)]
@@ -1927,6 +1935,8 @@ class EndToEndTest(unittest.TestCase):
os.chdir(self.old_dir)
def _try_decode(self, content):
+ if not isinstance(content, bytes):
+ return content
try:
return content.decode()
except UnicodeDecodeError:
@@ -1966,6 +1976,10 @@ class EndToEndTest(unittest.TestCase):
for c, o, e in zip(cmd, out, err):
sys.stderr.write("[%d] %s\n%s\n%s\n\n" % (
self.shard_num, c, self._try_decode(o), self._try_decode(e)))
+ sys.stderr.write("Final directory layout of '%s':\n%s\n\n" % (
+ self.name,
+ '\n'.join(os.path.join(dirpath, filename) for dirpath, dirs, files in os.walk(".") for filename in files),
+ ))
self.assertEqual(0, res, "non-zero exit status, last output was:\n%r\n-- stdout:%s\n-- stderr:%s\n" % (
' '.join(command), self._try_decode(out[-1]), self._try_decode(err[-1])))
self.success = True
@@ -2532,12 +2546,17 @@ def configure_cython(options):
CompilationOptions, \
default_options as pyrex_default_options
from Cython.Compiler.Options import _directive_defaults as directive_defaults
+
from Cython.Compiler import Errors
Errors.LEVEL = 0 # show all warnings
+
from Cython.Compiler import Options
Options.generate_cleanup_code = 3 # complete cleanup code
+
from Cython.Compiler import DebugFlags
DebugFlags.debug_temp_code_comments = 1
+ DebugFlags.debug_no_exception_intercept = 1 # provide better crash output in CI runs
+
pyrex_default_options['formal_grammar'] = options.use_formal_grammar
if options.profile:
directive_defaults['profile'] = True
@@ -2702,7 +2721,8 @@ def runtests(options, cmd_args, coverage=None):
('graal_bugs.txt', IS_GRAAL),
('limited_api_bugs.txt', options.limited_api),
('windows_bugs.txt', sys.platform == 'win32'),
- ('cygwin_bugs.txt', sys.platform == 'cygwin')
+ ('cygwin_bugs.txt', sys.platform == 'cygwin'),
+ ('windows_bugs_39.txt', sys.platform == 'win32' and sys.version_info[:2] == (3, 9))
]
exclude_selectors += [
diff --git a/setup.py b/setup.py
index 8d5089c18..26beca2f4 100755
--- a/setup.py
+++ b/setup.py
@@ -94,16 +94,16 @@ def compile_cython_modules(profile=False, coverage=False, compile_minimal=False,
"Cython.Plex.Machines",
"Cython.Plex.Transitions",
"Cython.Plex.DFA",
+ "Cython.Compiler.Code",
"Cython.Compiler.FusedNode",
+ "Cython.Compiler.Parsing",
"Cython.Tempita._tempita",
"Cython.StringIOTree",
"Cython.Utils",
])
if compile_more and not compile_minimal:
compiled_modules.extend([
- "Cython.Compiler.Code",
"Cython.Compiler.Lexicon",
- "Cython.Compiler.Parsing",
"Cython.Compiler.Pythran",
"Cython.Build.Dependencies",
"Cython.Compiler.ParseTreeTransforms",
@@ -183,37 +183,27 @@ def compile_cython_modules(profile=False, coverage=False, compile_minimal=False,
setup_args['ext_modules'] = extensions
-cython_profile = '--cython-profile' in sys.argv
-if cython_profile:
- sys.argv.remove('--cython-profile')
+def check_option(name):
+ cli_arg = "--" + name
+ if cli_arg in sys.argv:
+ sys.argv.remove(cli_arg)
+ return True
-cython_coverage = '--cython-coverage' in sys.argv
-if cython_coverage:
- sys.argv.remove('--cython-coverage')
+ env_var = name.replace("-", "_").upper()
+ if os.environ.get(env_var) == "true":
+ return True
-try:
- sys.argv.remove("--cython-compile-all")
- cython_compile_more = True
-except ValueError:
- cython_compile_more = False
+ return False
-try:
- sys.argv.remove("--cython-compile-minimal")
- cython_compile_minimal = True
-except ValueError:
- cython_compile_minimal = False
-try:
- sys.argv.remove("--cython-with-refnanny")
- cython_with_refnanny = True
-except ValueError:
- cython_with_refnanny = False
+cython_profile = check_option('cython-profile')
+cython_coverage = check_option('cython-coverage')
+cython_with_refnanny = check_option('cython-with-refnanny')
-try:
- sys.argv.remove("--no-cython-compile")
- compile_cython_itself = False
-except ValueError:
- compile_cython_itself = True
+compile_cython_itself = not check_option('no-cython-compile')
+if compile_cython_itself:
+ cython_compile_more = check_option('cython-compile-all')
+ cython_compile_minimal = check_option('cython-compile-minimal')
setup_args.update(setuptools_extra_args)
@@ -283,7 +273,7 @@ def run_build():
.. _Pyrex: https://www.cosc.canterbury.ac.nz/greg.ewing/python/Pyrex/
"""),
- license='Apache',
+ license='Apache-2.0',
classifiers=[
dev_status(version),
"Intended Audience :: Developers",
@@ -308,6 +298,13 @@ def run_build():
"Topic :: Software Development :: Compilers",
"Topic :: Software Development :: Libraries :: Python Modules"
],
+ project_urls={
+ "Documentation": "https://cython.readthedocs.io/",
+ "Donate": "https://cython.readthedocs.io/en/latest/src/donating.html",
+ "Source Code": "https://github.com/cython/cython",
+ "Bug Tracker": "https://github.com/cython/cython/issues",
+ "User Group": "https://groups.google.com/g/cython-users",
+ },
scripts=scripts,
packages=packages,
diff --git a/test-requirements-27.txt b/test-requirements-27.txt
index a3ad0439e..b518c2570 100644
--- a/test-requirements-27.txt
+++ b/test-requirements-27.txt
@@ -26,6 +26,7 @@ jupyter-console==5.2.0
jupyter-core==4.6.3
line-profiler==3.1.0
MarkupSafe==1.1.1
+maturin==0.7.6; os_name == "nt" # actually 0.9.4, but it's not available; pywinpty dependency
mistune==0.8.4
nbconvert==5.6.1
nbformat==4.4.0
@@ -44,6 +45,7 @@ Pygments==2.5.2
pyparsing==2.4.7
pyrsistent==0.15.7
python-dateutil==2.8.1
+pywinpty==0.5.7 # terminado dependency (pywinpty>=0.5)
pyzmq==16.0.4
qtconsole==4.7.7
QtPy==1.9.0
@@ -60,3 +62,4 @@ wcwidth==0.2.5
webencodings==0.5.1
widgetsnbextension==3.5.1
zipp==1.2.0
+mock==3.0.5
diff --git a/test-requirements-34.txt b/test-requirements-34.txt
index 8697eff4b..8a48d1ae6 100644
--- a/test-requirements-34.txt
+++ b/test-requirements-34.txt
@@ -1,3 +1,3 @@
-numpy < 1.19.0
+numpy<1.16.0
coverage
pycodestyle
diff --git a/test-requirements-cpython.txt b/test-requirements-cpython.txt
index 1cfae040b..28db037b2 100644
--- a/test-requirements-cpython.txt
+++ b/test-requirements-cpython.txt
@@ -1,3 +1,4 @@
jupyter
+pytest # needed by IPython/Jupyter integration tests
line_profiler
setuptools<60
diff --git a/test-requirements-pypy27.txt b/test-requirements-pypy27.txt
index 369b7225a..6d4f83bca 100644
--- a/test-requirements-pypy27.txt
+++ b/test-requirements-pypy27.txt
@@ -1 +1,3 @@
+-r test-requirements.txt
enum34==1.1.10
+mock==3.0.5
diff --git a/tests/bugs.txt b/tests/bugs.txt
index e853b4526..27458889c 100644
--- a/tests/bugs.txt
+++ b/tests/bugs.txt
@@ -6,7 +6,6 @@ class_attribute_init_values_T18
unsignedbehaviour_T184
missing_baseclass_in_predecl_T262
cfunc_call_tuple_args_T408
-genexpr_iterable_lookup_T600
generator_expressions_in_class
for_from_pyvar_loop_T601
temp_sideeffects_T654 # not really a bug, Cython warns about it
diff --git a/tests/build/cythonize_options.srctree b/tests/build/cythonize_options.srctree
index fcef9645b..0dc7f724f 100644
--- a/tests/build/cythonize_options.srctree
+++ b/tests/build/cythonize_options.srctree
@@ -49,5 +49,5 @@ def mod_int_c(int a, int b):
assert mod_int_c(-1, 10) < 0
# unraisable exceptions should produce a warning
-cdef int no_exc_propagate():
+cdef int no_exc_propagate() noexcept:
raise TypeError()
diff --git a/tests/build/depfile_package_cython.srctree b/tests/build/depfile_package_cython.srctree
new file mode 100644
index 000000000..ccb1dc230
--- /dev/null
+++ b/tests/build/depfile_package_cython.srctree
@@ -0,0 +1,61 @@
+"""
+PYTHON -c 'import os; os.makedirs("builddir/pkg/sub")'
+CYTHON -M pkg/test.pyx -o builddir/pkg/test.c
+CYTHON --depfile pkg/sub/test.pyx -o builddir/pkg/sub/test.c
+PYTHON check.py
+"""
+
+######## check.py ########
+
+import os.path
+
+def pkgpath(*args):
+ return os.path.join('pkg', *args)
+
+with open(os.path.join("builddir", "pkg", "test.c.dep"), "r") as f:
+ contents = f.read().replace("\\\n", " ").replace("\n", " ")
+
+assert sorted(contents.split()) == sorted([os.path.join('builddir', 'pkg', 'test.c:'), pkgpath('sub', 'incl.pxi'), pkgpath('test.pxd'), pkgpath('test.pyx')]), contents
+
+
+with open(os.path.join("builddir", "pkg", "sub", "test.c.dep"), "r") as f:
+ contents = f.read().replace("\\\n", " ").replace("\n", " ")
+
+contents = [os.path.relpath(entry, '.')
+ if os.path.isabs(entry) else entry for entry in contents.split()]
+assert sorted(contents) == sorted([os.path.join('builddir', 'pkg', 'sub', 'test.c:'), pkgpath('sub', 'incl.pxi'), pkgpath('sub', 'test.pyx'), pkgpath('test.pxd')]), contents # last is really one level up
+
+######## pkg/__init__.py ########
+
+
+######## pkg/test.pyx ########
+
+TEST = "pkg.test"
+
+include "sub/incl.pxi"
+
+cdef object get_str():
+ return TEST
+
+
+######## pkg/test.pxd ########
+
+cdef object get_str()
+
+
+######## pkg/sub/__init__.py ########
+
+
+######## pkg/sub/test.pyx ########
+# cython: language_level=3
+
+from ..test cimport get_str
+
+include 'incl.pxi'
+
+TEST = 'pkg.sub.test'
+
+
+######## pkg/sub/incl.pxi ########
+
+pass
diff --git a/tests/build/depfile_package.srctree b/tests/build/depfile_package_cythonize.srctree
index d96294b27..d68e82ece 100644
--- a/tests/build/depfile_package.srctree
+++ b/tests/build/depfile_package_cythonize.srctree
@@ -7,10 +7,13 @@ PYTHON package_test.py
import os.path
+def pkgpath(*args):
+ return os.path.join('pkg', *args)
+
with open(os.path.join("pkg", "test.c.dep"), "r") as f:
contents = f.read().replace("\\\n", " ").replace("\n", " ")
-assert sorted(contents.split()) == sorted(['test.c:', os.path.join('sub', 'incl.pxi'), 'test.pxd', 'test.pyx']), contents
+assert sorted(contents.split()) == sorted([pkgpath('test.c:'), pkgpath('sub', 'incl.pxi'), pkgpath('test.pxd'), pkgpath('test.pyx')]), contents
with open(os.path.join("pkg", "sub", "test.c.dep"), "r") as f:
@@ -18,7 +21,7 @@ with open(os.path.join("pkg", "sub", "test.c.dep"), "r") as f:
contents = [os.path.relpath(entry, '.')
if os.path.isabs(entry) else entry for entry in contents.split()]
-assert sorted(contents) == sorted(['test.c:', 'incl.pxi', 'test.pyx', os.path.join('pkg', 'test.pxd')]), contents # last is really one level up
+assert sorted(contents) == sorted([pkgpath('sub', 'test.c:'), pkgpath('sub', 'incl.pxi'), pkgpath('sub', 'test.pyx'), pkgpath('test.pxd')]), contents # last is really one level up
######## pkg/__init__.py ########
diff --git a/tests/compile/branch_hints.pyx b/tests/compile/branch_hints.pyx
index 575ee6cba..e6bd0b5c3 100644
--- a/tests/compile/branch_hints.pyx
+++ b/tests/compile/branch_hints.pyx
@@ -82,7 +82,7 @@ def if_elif_raise_else_raise(x):
"//IfClauseNode[@branch_hint = 'likely']",
"//IfClauseNode[not(@branch_hint)]",
)
-cpdef int nogil_if_raise(int x) nogil except -1:
+cpdef int nogil_if_raise(int x) except -1 nogil:
if x:
raise TypeError()
elif not x:
diff --git a/tests/compile/buildenv.pyx b/tests/compile/buildenv.pyx
index 01f29883a..0f06ccc71 100644
--- a/tests/compile/buildenv.pyx
+++ b/tests/compile/buildenv.pyx
@@ -36,6 +36,8 @@ cdef extern from *:
cdef int CYTHON_COMPILING_IN_CPYTHON
cdef int CYTHON_COMPILING_IN_LIMITED_API
cdef int CYTHON_COMPILING_IN_PYPY
+ cdef int CYTHON_COMPILING_IN_GRAAL
+ cdef int CYTHON_COMPILING_IN_NOGIL
cdef int CYTHON_USE_PYLONG_INTERNALS
cdef int CYTHON_USE_PYLIST_INTERNALS
cdef int CYTHON_USE_UNICODE_INTERNALS
@@ -79,6 +81,8 @@ PY_VERSION_HEX 0x{PY_VERSION_HEX:X}
CYTHON_COMPILING_IN_CPYTHON {CYTHON_COMPILING_IN_CPYTHON}
CYTHON_COMPILING_IN_LIMITED_API {CYTHON_COMPILING_IN_LIMITED_API}
CYTHON_COMPILING_IN_PYPY {CYTHON_COMPILING_IN_PYPY}
+CYTHON_COMPILING_IN_GRAAL {CYTHON_COMPILING_IN_GRAAL}
+CYTHON_COMPILING_IN_NOGIL {CYTHON_COMPILING_IN_NOGIL}
CYTHON_USE_PYLONG_INTERNALS {CYTHON_USE_PYLONG_INTERNALS}
CYTHON_USE_PYLIST_INTERNALS {CYTHON_USE_PYLIST_INTERNALS}
diff --git a/tests/compile/c_directives.pyx b/tests/compile/c_directives.pyx
index 0ede90ba8..ee19e652f 100644
--- a/tests/compile/c_directives.pyx
+++ b/tests/compile/c_directives.pyx
@@ -2,6 +2,8 @@
# cython: boundscheck = False
# cython: ignoreme = OK
# cython: warn.undeclared = False
+# cython: test_assert_c_code_has = Generated by Cython
+# cython: test_fail_if_c_code_has = Generated by Python
# This testcase is most useful if you inspect the generated C file
diff --git a/tests/compile/cpp_nogil.pyx b/tests/compile/cpp_nogil.pyx
index 1007054dc..658dc37cb 100644
--- a/tests/compile/cpp_nogil.pyx
+++ b/tests/compile/cpp_nogil.pyx
@@ -19,5 +19,5 @@ with nogil:
# We can override nogil methods as with gil methods.
cdef cppclass WithGilSubclass(NoGilTest1):
- void doSomething() with gil:
+ void doSomething() noexcept with gil:
print "have the gil"
diff --git a/tests/compile/declarations.srctree b/tests/compile/declarations.srctree
index babf2e4e3..bfbbcd4b3 100644
--- a/tests/compile/declarations.srctree
+++ b/tests/compile/declarations.srctree
@@ -40,7 +40,7 @@ cdef extern int a(int[][3], int[][3][5])
cdef void f():
cdef void *p=NULL
global ifnp, cpa
- ifnp = <int (*)()>p
+ ifnp = <int (*)() noexcept>p
cdef char *g():
pass
diff --git a/tests/compile/excvalcheck.h b/tests/compile/excvalcheck.h
index 4c92acd2b..ba7a760e1 100644
--- a/tests/compile/excvalcheck.h
+++ b/tests/compile/excvalcheck.h
@@ -1,12 +1,6 @@
-#ifdef __cplusplus
-extern "C" {
-#endif
extern DL_EXPORT(int) spam(void);
extern DL_EXPORT(void) grail(void);
extern DL_EXPORT(char *)tomato(void);
-#ifdef __cplusplus
-}
-#endif
int spam(void) {return 0;}
void grail(void) {return;}
diff --git a/tests/compile/fused_buffers.pyx b/tests/compile/fused_buffers.pyx
new file mode 100644
index 000000000..73b0315ed
--- /dev/null
+++ b/tests/compile/fused_buffers.pyx
@@ -0,0 +1,16 @@
+# mode: compile
+
+# cython: test_assert_c_code_has = __Pyx_ImportNumPyArrayTypeIfAvailable
+# cython: test_assert_c_code_has = ndarray
+
+# counterpart test to fused_no_numpy - buffer types are compared against Numpy
+# dtypes as a quick test. fused_no_numpy tests that the mechanism isn't
+# accidentally generated, while this just confirms that the same mechanism is
+# still in use
+
+ctypedef fused IntOrFloat:
+ int
+ float
+
+def f(IntOrFloat[:] x):
+ return x
diff --git a/tests/compile/fused_no_numpy.pyx b/tests/compile/fused_no_numpy.pyx
new file mode 100644
index 000000000..efb49c322
--- /dev/null
+++ b/tests/compile/fused_no_numpy.pyx
@@ -0,0 +1,13 @@
+# mode: compile
+
+# cython: test_fail_if_c_code_has = __Pyx_ImportNumPyArrayTypeIfAvailable
+
+ctypedef fused IntOrFloat:
+ int
+ float
+
+# This function does not use buffers so has no reason to import numpy to
+# look up dtypes. fused_buffers.pyx is the corresponding test for the case
+# where numpy is imported
+def f(IntOrFloat x):
+ return x
diff --git a/tests/compile/fused_redeclare_T3111.pyx b/tests/compile/fused_redeclare_T3111.pyx
index d91f1d132..bc3d54a99 100644
--- a/tests/compile/fused_redeclare_T3111.pyx
+++ b/tests/compile/fused_redeclare_T3111.pyx
@@ -27,10 +27,10 @@ _WARNINGS = """
36:10: 'cpdef_cname_method' redeclared
# from MemoryView.pyx
-975:29: Ambiguous exception value, same as default return value: 0
-975:29: Ambiguous exception value, same as default return value: 0
-1016:46: Ambiguous exception value, same as default return value: 0
-1016:46: Ambiguous exception value, same as default return value: 0
-1106:29: Ambiguous exception value, same as default return value: 0
-1106:29: Ambiguous exception value, same as default return value: 0
+977:29: Ambiguous exception value, same as default return value: 0
+977:29: Ambiguous exception value, same as default return value: 0
+1018:46: Ambiguous exception value, same as default return value: 0
+1018:46: Ambiguous exception value, same as default return value: 0
+1108:29: Ambiguous exception value, same as default return value: 0
+1108:29: Ambiguous exception value, same as default return value: 0
"""
diff --git a/tests/compile/module_name_arg.srctree b/tests/compile/module_name_arg.srctree
new file mode 100644
index 000000000..81e75b008
--- /dev/null
+++ b/tests/compile/module_name_arg.srctree
@@ -0,0 +1,52 @@
+# Test that we can set module name with --module-name arg to cython
+CYTHON a.pyx
+CYTHON --module-name w b.pyx
+CYTHON --module-name my_module.submod.x c.pyx
+PYTHON setup.py build_ext --inplace
+PYTHON checks.py
+
+######## checks.py ########
+
+from importlib import import_module
+
+try:
+ exc = ModuleNotFoundError
+except NameError:
+ exc = ImportError
+
+for module_name, should_import in (
+ ('a', True),
+ ('b', False),
+ ('w', True),
+ ('my_module.submod.x', True),
+ ('c', False),
+ ):
+ try:
+ import_module(module_name)
+ except exc:
+ if should_import:
+ assert False, "Cannot import module " + module_name
+ else:
+ if not should_import:
+ assert False, ("Can import module " + module_name +
+ " but import should not be possible")
+
+
+######## setup.py ########
+
+from distutils.core import setup
+from distutils.extension import Extension
+
+setup(
+ ext_modules = [
+ Extension("a", ["a.c"]),
+ Extension("w", ["b.c"]),
+ Extension("my_module.submod.x", ["c.c"]),
+ ],
+)
+
+######## a.pyx ########
+######## b.pyx ########
+######## c.pyx ########
+######## my_module/__init__.py ########
+######## my_module/submod/__init__.py ########
diff --git a/tests/compile/nogil.h b/tests/compile/nogil.h
index 42878109b..764a3fc8a 100644
--- a/tests/compile/nogil.h
+++ b/tests/compile/nogil.h
@@ -1,25 +1,13 @@
-#ifdef __cplusplus
-extern "C" {
-#endif
extern DL_EXPORT(void) e1(void);
extern DL_EXPORT(int*) e2(void);
-#ifdef __cplusplus
-}
-#endif
void e1(void) {return;}
int* e2(void) {return 0;}
-#ifdef __cplusplus
-extern "C" {
-#endif
extern DL_EXPORT(PyObject *) g(PyObject*);
extern DL_EXPORT(void) g2(PyObject*);
-#ifdef __cplusplus
-}
-#endif
PyObject *g(PyObject* o) {if (o) {}; return 0;}
void g2(PyObject* o) {if (o) {}; return;}
diff --git a/tests/compile/publicapi_pxd_mix.pxd b/tests/compile/publicapi_pxd_mix.pxd
index 09452f116..414274d45 100644
--- a/tests/compile/publicapi_pxd_mix.pxd
+++ b/tests/compile/publicapi_pxd_mix.pxd
@@ -61,7 +61,7 @@ cdef public api void bar3()
cdef inline void* spam (object o) except NULL: return NULL
cdef void* spam0(object o) except NULL
cdef public void* spam1(object o) except NULL
-cdef api void* spam2(object o) nogil except NULL
+cdef api void* spam2(object o) except NULL nogil
cdef public api void* spam3(object o) except NULL with gil
# --
diff --git a/tests/compile/publicapi_pxd_mix.pyx b/tests/compile/publicapi_pxd_mix.pyx
index 588f6b79c..dd748053f 100644
--- a/tests/compile/publicapi_pxd_mix.pyx
+++ b/tests/compile/publicapi_pxd_mix.pyx
@@ -15,7 +15,7 @@ cdef public api void bar3(): pass
cdef void* spam0(object o) except NULL: return NULL
cdef public void* spam1(object o) except NULL: return NULL
-cdef api void* spam2(object o) nogil except NULL: return NULL
+cdef api void* spam2(object o) except NULL nogil: return NULL
cdef public api void* spam3(object o) except NULL with gil: return NULL
cdef int i0 = 0 # XXX This should not be required!
diff --git a/tests/compile/pxd_mangling_names.srctree b/tests/compile/pxd_mangling_names.srctree
new file mode 100644
index 000000000..3797fc0f9
--- /dev/null
+++ b/tests/compile/pxd_mangling_names.srctree
@@ -0,0 +1,46 @@
+# mode: compile
+# ticket: 2940
+
+PYTHON setup.py build_ext --inplace
+PYTHON -c "import a; a.test()"
+
+######## setup.py ########
+
+from Cython.Build import cythonize
+from Cython.Distutils.extension import Extension
+from distutils.core import setup
+
+setup(
+ ext_modules=cythonize([Extension("a", ["a.py", "b.c"])]),
+)
+
+######## a.pxd ########
+
+cdef public int foo()
+
+cdef extern from "b.h":
+ cpdef int bar()
+
+######## a.py ########
+
+def foo():
+ return 42
+
+def test():
+ assert bar() == 42
+
+######## b.h ########
+
+#ifndef B_H
+#define B_H
+
+int bar();
+
+#endif
+
+######## b.c ########
+
+#include "a.h"
+
+int bar() { return foo(); }
+
diff --git a/tests/errors/cfuncptr.pyx b/tests/errors/cfuncptr.pyx
index e05efa519..f07ef2167 100644
--- a/tests/errors/cfuncptr.pyx
+++ b/tests/errors/cfuncptr.pyx
@@ -19,7 +19,7 @@ cdef extern from *:
cdef int exceptstar(int bad) except *
def fail_exceptstar(bad):
- cdef int (*fptr_a)(int) # noexcept
+ cdef int (*fptr_a)(int) noexcept
cdef int (*fptr_b)(int) except -1
cdef int (*fptr_c)(int) except ?-1
fptr_a = exceptstar
@@ -30,7 +30,7 @@ _ERRORS = """
13:13: Cannot assign type 'int (int) except? -2' to 'int (*)(int) except -2'
14:13: Cannot assign type 'int (int) except? -2' to 'int (*)(int) except -1'
15:13: Cannot assign type 'int (int) except? -2' to 'int (*)(int) except? -1'
-25:13: Cannot assign type 'int (int) except *' to 'int (*)(int)'
+25:13: Cannot assign type 'int (int) except *' to 'int (*)(int) noexcept'
26:13: Cannot assign type 'int (int) except *' to 'int (*)(int) except -1'
27:13: Cannot assign type 'int (int) except *' to 'int (*)(int) except? -1'
"""
diff --git a/tests/errors/cpp_increment.pyx b/tests/errors/cpp_increment.pyx
new file mode 100644
index 000000000..45e978d95
--- /dev/null
+++ b/tests/errors/cpp_increment.pyx
@@ -0,0 +1,33 @@
+# mode: error
+
+cimport cython
+
+cdef extern from *:
+ cdef cppclass Foo:
+ Foo operator++()
+ Foo operator--()
+
+ cdef cppclass Bar:
+ Bar operator++(int)
+ Bar operator--(int)
+
+cdef void foo():
+ cdef Foo f
+ cdef Bar b
+ cython.operator.postincrement(f)
+ cython.operator.postincrement(b)
+ cython.operator.postdecrement(f)
+ cython.operator.postdecrement(b)
+
+ cython.operator.preincrement(f)
+ cython.operator.preincrement(b)
+ cython.operator.predecrement(f)
+ cython.operator.predecrement(b)
+
+
+_ERRORS = u"""
+17:19: No 'operator++(int)' declared for postfix '++' (operand type is 'Foo')
+19:19: No 'operator--(int)' declared for postfix '--' (operand type is 'Foo')
+23:19: No match for 'operator++' (operand type is 'Bar')
+25:19: No match for 'operator--' (operand type is 'Bar')
+"""
diff --git a/tests/errors/dataclass_e6.pyx b/tests/errors/dataclass_e6.pyx
new file mode 100644
index 000000000..64dc1ae05
--- /dev/null
+++ b/tests/errors/dataclass_e6.pyx
@@ -0,0 +1,23 @@
+# mode: error
+
+from cython.dataclasses cimport dataclass
+
+@dataclass
+cdef class BaseDataclass:
+ a: str = "value"
+
+@dataclass
+cdef class MainDataclass(BaseDataclass):
+ a: str = "new value"
+
+cdef class Intermediate(BaseDataclass):
+ pass
+
+@dataclass
+cdef class AnotherDataclass(Intermediate):
+ a: str = "ooops"
+
+_ERRORS = """
+11:4: Cannot redeclare inherited fields in Cython dataclasses
+18:4: Cannot redeclare inherited fields in Cython dataclasses
+"""
diff --git a/tests/errors/dataclass_w1.pyx b/tests/errors/dataclass_w1.pyx
new file mode 100644
index 000000000..c0d9790e2
--- /dev/null
+++ b/tests/errors/dataclass_w1.pyx
@@ -0,0 +1,13 @@
+# mode: compile
+# tag: warnings
+
+from dataclass_w1_othermod cimport SomeBase
+from cython.dataclasses cimport dataclass
+
+@dataclass
+cdef class DC(SomeBase):
+ a: str = ""
+
+_WARNINGS = """
+8:5: Cannot reliably handle Cython dataclasses with base types in external modules since it is not possible to tell what fields they have
+"""
diff --git a/tests/errors/dataclass_w1_othermod.pxd b/tests/errors/dataclass_w1_othermod.pxd
new file mode 100644
index 000000000..02dddf492
--- /dev/null
+++ b/tests/errors/dataclass_w1_othermod.pxd
@@ -0,0 +1,3 @@
+# Extern class for test "dataclass_w1"
+cdef class SomeBase:
+ pass
diff --git a/tests/errors/e_decorators.pyx b/tests/errors/e_decorators.pyx
deleted file mode 100644
index 33ef2355d..000000000
--- a/tests/errors/e_decorators.pyx
+++ /dev/null
@@ -1,12 +0,0 @@
-# mode: error
-
-class A:
- pass
-
-@A().a
-def f():
- pass
-
-_ERRORS = u"""
-6:4: Expected a newline after decorator
-"""
diff --git a/tests/errors/e_excvalfunctype.pyx b/tests/errors/e_excvalfunctype.pyx
index a1d978322..25cae47c6 100644
--- a/tests/errors/e_excvalfunctype.pyx
+++ b/tests/errors/e_excvalfunctype.pyx
@@ -1,7 +1,7 @@
# mode: error
ctypedef int (*spamfunc)(int, char *) except 42
-ctypedef int (*grailfunc)(int, char *)
+ctypedef int (*grailfunc)(int, char *) noexcept
cdef grailfunc grail
cdef spamfunc spam
diff --git a/tests/errors/e_invalid_special_cython_modules.py b/tests/errors/e_invalid_special_cython_modules.py
new file mode 100644
index 000000000..950df5c1c
--- /dev/null
+++ b/tests/errors/e_invalid_special_cython_modules.py
@@ -0,0 +1,42 @@
+# mode: error
+# tag: pure, import, cimport
+
+# nok
+
+import cython.imports.libc as libc_import
+import cython.cimports.labc as labc_cimport
+
+from cython.imports import libc
+from cython.cimport.libc import math
+from cython.imports.libc import math
+from cython.cimports.labc import math
+
+import cython.paralel
+import cython.parrallel
+
+import cython.dataclass
+
+# ok
+from cython.cimports.libc import math
+from cython.cimports.libc.math import ceil
+
+
+def libc_math_ceil(x):
+ """
+ >>> libc_math_ceil(1.5)
+ [2, 2]
+ """
+ return [int(n) for n in [ceil(x), math.ceil(x)]]
+
+
+_ERRORS = """
+6:7: 'cython.imports.libc' is not a valid cython.* module. Did you mean 'cython.cimports' ?
+7:7: 'labc.pxd' not found
+9:0: 'cython.imports' is not a valid cython.* module. Did you mean 'cython.cimports' ?
+10:0: 'cython.cimport.libc' is not a valid cython.* module. Did you mean 'cython.cimports' ?
+11:0: 'cython.imports.libc' is not a valid cython.* module. Did you mean 'cython.cimports' ?
+12:0: 'labc/math.pxd' not found
+14:7: 'cython.paralel' is not a valid cython.* module. Did you mean 'cython.parallel' ?
+15:7: 'cython.parrallel' is not a valid cython.* module. Did you mean 'cython.parallel' ?
+17:7: 'cython.dataclass' is not a valid cython.* module. Did you mean 'cython.dataclasses' ?
+"""
diff --git a/tests/errors/e_nogilfunctype.pyx b/tests/errors/e_nogilfunctype.pyx
index ccac37b7e..ac06af27e 100644
--- a/tests/errors/e_nogilfunctype.pyx
+++ b/tests/errors/e_nogilfunctype.pyx
@@ -10,7 +10,7 @@ fp = f
fp = <fp_t>f
_ERRORS = u"""
-9:5: Cannot assign type 'void (void)' to 'void (*)(void) nogil'
+9:5: Cannot assign type 'void (void) noexcept' to 'void (*)(void) noexcept nogil'
"""
_WARNINGS = """
diff --git a/tests/errors/e_pure_cimports.pyx b/tests/errors/e_pure_cimports.pyx
index 231a95959..ef81182ad 100644
--- a/tests/errors/e_pure_cimports.pyx
+++ b/tests/errors/e_pure_cimports.pyx
@@ -1,7 +1,7 @@
# mode: error
# tag: pure, import, cimport
-import cython.cimportsy # FIXME: not currently an error?
+import cython.cimportsy
import cython.cimports
import cython.cimports.libc
@@ -20,6 +20,7 @@ from cython.cimports cimport libc
_ERRORS = """
+4:7: 'cython.cimportsy' is not a valid cython.* module. Did you mean 'cython.cimports' ?
6:7: Cannot cimport the 'cython.cimports' package directly, only submodules.
7:7: Python cimports must use 'from cython.cimports... import ...' or 'import ... as ...', not just 'import ...'
8:7: Cannot cimport the 'cython.cimports' package directly, only submodules.
diff --git a/tests/errors/e_relative_cimport.pyx b/tests/errors/e_relative_cimport.pyx
index 36a134411..709cbd71d 100644
--- a/tests/errors/e_relative_cimport.pyx
+++ b/tests/errors/e_relative_cimport.pyx
@@ -9,7 +9,7 @@ from . cimport e_relative_cimport
_ERRORS="""
4:0: relative cimport beyond main package is not allowed
-5:0: relative cimport beyond main package is not allowed
+5:0: relative cimport from non-package directory is not allowed
6:0: relative cimport beyond main package is not allowed
-7:0: relative cimport beyond main package is not allowed
+7:0: relative cimport from non-package directory is not allowed
"""
diff --git a/tests/errors/nogil.pyx b/tests/errors/nogil.pyx
index aa3011d00..dfdebeebd 100644
--- a/tests/errors/nogil.pyx
+++ b/tests/errors/nogil.pyx
@@ -90,7 +90,7 @@ def bare_pyvar_name(object x):
with nogil:
x
-cdef int fstrings(int x, object obj) nogil except -1:
+cdef int fstrings(int x, object obj) except -1 nogil:
f"" # allowed
f"a" # allowed
f"a"f"b" # allowed
diff --git a/tests/errors/nogilfunctype.pyx b/tests/errors/nogilfunctype.pyx
index 91127bee4..c0ca2bb15 100644
--- a/tests/errors/nogilfunctype.pyx
+++ b/tests/errors/nogilfunctype.pyx
@@ -12,5 +12,5 @@ gp = g
fp = f
_ERRORS = u"""
-12:5: Cannot assign type 'void (void)' to 'void (*)(void) nogil'
+12:5: Cannot assign type 'void (void) noexcept' to 'void (*)(void) noexcept nogil'
"""
diff --git a/tests/errors/w_uninitialized.pyx b/tests/errors/w_uninitialized.pyx
index c2046ce19..066f9ed5b 100644
--- a/tests/errors/w_uninitialized.pyx
+++ b/tests/errors/w_uninitialized.pyx
@@ -127,10 +127,10 @@ _ERRORS = """
66:10: local variable 'foo' referenced before assignment
71:14: local variable 'exc' referenced before assignment
71:19: local variable 'msg' referenced before assignment
-78:4: local variable 'decorator' referenced before assignment
+78:5: local variable 'decorator' referenced before assignment
85:16: local variable 'default' referenced before assignment
91:14: local variable 'bar' referenced before assignment
-97:4: local variable 'decorator' referenced before assignment
+97:5: local variable 'decorator' referenced before assignment
104:24: local variable 'Meta' referenced before assignment
110:15: local variable 'args' referenced before assignment
110:23: local variable 'kwargs' referenced before assignment
diff --git a/tests/macos_cpp_bugs.txt b/tests/macos_cpp_bugs.txt
index e5be6475a..e4c4cc608 100644
--- a/tests/macos_cpp_bugs.txt
+++ b/tests/macos_cpp_bugs.txt
@@ -12,3 +12,4 @@ cpp_stl_algo_comparison_ops
cpp_stl_algo_permutation_ops
cpp_stl_algo_sorted_ranges_set_ops
cpp_stl_algo_sorted_ranges_other_ops
+cpp_stl_bit_cpp20
diff --git a/tests/memoryview/cythonarray.pyx b/tests/memoryview/cythonarray.pyx
index 6bfd7397e..15d61d086 100644
--- a/tests/memoryview/cythonarray.pyx
+++ b/tests/memoryview/cythonarray.pyx
@@ -130,7 +130,7 @@ cdef int *getp(int dim1=10, int dim2=10, dim3=1) except NULL:
return p
-cdef void callback_free_data(void *p):
+cdef void callback_free_data(void *p) noexcept:
print 'callback free data called'
free(p)
diff --git a/tests/memoryview/memoryview.pyx b/tests/memoryview/memoryview.pyx
index d2832a0b6..2c5de40b5 100644
--- a/tests/memoryview/memoryview.pyx
+++ b/tests/memoryview/memoryview.pyx
@@ -443,7 +443,9 @@ def type_infer(double[:, :] arg):
@cython.test_fail_if_path_exists("//CoerceToPyTypeNode")
def memview_iter(double[:, :] arg):
"""
- memview_iter(DoubleMockBuffer("C", range(6), (2,3)))
+ >>> memview_iter(DoubleMockBuffer("C", range(6), (2,3)))
+ acquired C
+ released C
True
"""
cdef double total = 0
diff --git a/tests/memoryview/memoryview_acq_count.srctree b/tests/memoryview/memoryview_acq_count.srctree
index e7e6dfc69..3bc2f1cc9 100644
--- a/tests/memoryview/memoryview_acq_count.srctree
+++ b/tests/memoryview/memoryview_acq_count.srctree
@@ -35,7 +35,7 @@ cdef Py_ssize_t i
for i in prange(1000000, nogil=True, num_threads=16):
use_slice(m[::2])
-cdef int use_slice(int[:] m) nogil except -1:
+cdef int use_slice(int[:] m) except -1 nogil:
cdef int[:] m2 = m[1:]
m = m2[:-1]
del m, m2
diff --git a/tests/memoryview/memslice.pyx b/tests/memoryview/memslice.pyx
index 5f6134135..0de47d9b6 100644
--- a/tests/memoryview/memslice.pyx
+++ b/tests/memoryview/memslice.pyx
@@ -23,6 +23,12 @@ if sys.version_info[0] < 3:
else:
import builtins
+try:
+ from Cython.Tests.this_module_does_not_exist import *
+except ImportError:
+ # Fails, but the existence of "import *" interacted badly with some utility code
+ pass
+
def testcase(func):
@wraps(func)
@@ -1722,7 +1728,7 @@ def test_oob():
print a[:, 20]
-cdef int nogil_oob(int[:, :] a) nogil except 0:
+cdef int nogil_oob(int[:, :] a) except 0 nogil:
a[100, 9:]
return 1
@@ -1766,7 +1772,7 @@ def test_nogil_oob2():
a[100, 9:]
@cython.boundscheck(False)
-cdef int cdef_nogil(int[:, :] a) nogil except 0:
+cdef int cdef_nogil(int[:, :] a) except 0 nogil:
cdef int i, j
cdef int[:, :] b = a[::-1, 3:10:2]
for i in range(b.shape[0]):
@@ -2560,6 +2566,7 @@ def test_const_buffer(const int[:] a):
print(a[0])
print(c[-1])
+
@testcase
def test_loop(int[:] a, throw_exception):
"""
@@ -2582,6 +2589,7 @@ def test_loop(int[:] a, throw_exception):
raise ValueError()
print(sum)
+
@testcase
def test_loop_reassign(int[:] a):
"""
@@ -2594,13 +2602,99 @@ def test_loop_reassign(int[:] a):
3
4
5
- released A
15
+ released A
"""
cdef int sum = 0
for ai in a:
sum += ai
print(ai)
a = None # this should not mess up the loop though!
- # release happens here, when the loop temp is released
print(sum)
+ # release happens in the wrapper function
+
+
+@testcase
+def test_arg_in_closure(int [:] a):
+ """
+ >>> A = IntMockBuffer("A", range(6), shape=(6,))
+ >>> inner = test_arg_in_closure(A)
+ acquired A
+ >>> inner()
+ (0, 1)
+
+ The assignment below is just to avoid printing what was collected
+ >>> del inner; ignore_me = gc.collect()
+ released A
+ """
+ def inner():
+ return (a[0], a[1])
+ return inner
+
+
+cdef arg_in_closure_cdef(int [:] a):
+ def inner():
+ return (a[0], a[1])
+ return inner
+
+def test_arg_in_closure_cdef(a):
+ """
+ >>> A = IntMockBuffer("A", range(6), shape=(6,))
+ >>> inner = test_arg_in_closure_cdef(A)
+ acquired A
+ >>> inner()
+ (0, 1)
+
+ The assignment below is just to avoid printing what was collected
+ >>> del inner; ignore_me = gc.collect()
+ released A
+ """
+ return arg_in_closure_cdef(a)
+
+
+@testcase
+def test_local_in_closure(a):
+ """
+ >>> A = IntMockBuffer("A", range(6), shape=(6,))
+ >>> inner = test_local_in_closure(A)
+ acquired A
+ >>> inner()
+ (0, 1)
+
+ The assignment below is just to avoid printing what was collected
+ >>> del inner; ignore_me = gc.collect()
+ released A
+ """
+ cdef int[:] a_view = a
+ def inner():
+ return (a_view[0], a_view[1])
+ return inner
+
+@testcase
+def test_local_in_generator_expression(a, initialize, execute_now):
+ """
+ >>> A1 = IntMockBuffer("A1", range(6), shape=(6,))
+ >>> A2 = IntMockBuffer("A2", range(6), shape=(6,))
+ >>> test_local_in_generator_expression(A1, initialize=False, execute_now=False) # doctest: +ELLIPSIS
+ Traceback (most recent call last):
+ ...
+ UnboundLocalError...
+
+ >>> test_local_in_generator_expression(A1, initialize=True, execute_now=True)
+ acquired A1
+ released A1
+ True
+
+ >>> genexp = test_local_in_generator_expression(A2, initialize=True, execute_now=False)
+ acquired A2
+ >>> sum(genexp)
+ released A2
+ 2
+ """
+ cdef int[:] a_view
+ if initialize:
+ a_view = a
+ if execute_now:
+ return any(ai > 3 for ai in a_view)
+ else:
+ return (ai > 3 for ai in a_view)
diff --git a/tests/memoryview/numpy_memoryview.pyx b/tests/memoryview/numpy_memoryview.pyx
index 350e94489..2af6bfea4 100644
--- a/tests/memoryview/numpy_memoryview.pyx
+++ b/tests/memoryview/numpy_memoryview.pyx
@@ -248,7 +248,7 @@ cdef extern from "bufaccess.h":
ctypedef unsigned int td_h_ushort # Defined as unsigned short
ctypedef td_h_short td_h_cy_short
-cdef void dealloc_callback(void *data):
+cdef void dealloc_callback(void *data) noexcept:
print "deallocating..."
def build_numarray(array array):
diff --git a/tests/pypy2_bugs.txt b/tests/pypy2_bugs.txt
index 200f0dcf3..1ac25918f 100644
--- a/tests/pypy2_bugs.txt
+++ b/tests/pypy2_bugs.txt
@@ -16,8 +16,9 @@ run.partial_circular_import
# https://foss.heptapod.net/pypy/pypy/issues/3185
run.language_level
run.pure_pxd
+compile.pxd_mangling_names
-# Silly error with doctest matching slightly different string outputs rather than
+# Silly error with doctest matching slightly different string outputs rather than
# an actual bug but one I can't easily resolve
run.with_gil
diff --git a/tests/run/annotate_html.pyx b/tests/run/annotate_html.pyx
index 3db7bf190..e98891b4f 100644
--- a/tests/run/annotate_html.pyx
+++ b/tests/run/annotate_html.pyx
@@ -1,3 +1,6 @@
+# cython: test_assert_c_code_has = Generated by Cython
+# cython: test_assert_c_code_has = goto __pyx_L0;\n
+
"""
>>> from codecs import open
>>> import os.path as os_path
diff --git a/tests/run/annotation_typing.pyx b/tests/run/annotation_typing.pyx
index ce74ef1dd..4b6b2da16 100644
--- a/tests/run/annotation_typing.pyx
+++ b/tests/run/annotation_typing.pyx
@@ -14,10 +14,10 @@ except ImportError:
def old_dict_syntax(a: list, b: "int" = 2, c: {'ctype': 'long int'} = 3, d: {'type': 'long int'} = 4) -> list:
"""
>>> old_dict_syntax([1])
- ('list object', 'int object', 'long', 'long')
+ ('list object', 'Python object', 'long', 'long')
[1, 2, 3, 4]
>>> old_dict_syntax([1], 3)
- ('list object', 'int object', 'long', 'long')
+ ('list object', 'Python object', 'long', 'long')
[1, 3, 3, 4]
>>> old_dict_syntax(123)
Traceback (most recent call last):
@@ -36,13 +36,13 @@ def old_dict_syntax(a: list, b: "int" = 2, c: {'ctype': 'long int'} = 3, d: {'ty
def pytypes_def(a: list, b: int = 2, c: long = 3, d: float = 4.0, n: list = None, o: Optional[tuple] = ()) -> list:
"""
>>> pytypes_def([1])
- ('list object', 'int object', 'Python object', 'double', 'list object', 'tuple object')
+ ('list object', 'Python object', 'Python object', 'double', 'list object', 'tuple object')
[1, 2, 3, 4.0, None, ()]
>>> pytypes_def([1], 3)
- ('list object', 'int object', 'Python object', 'double', 'list object', 'tuple object')
+ ('list object', 'Python object', 'Python object', 'double', 'list object', 'tuple object')
[1, 3, 3, 4.0, None, ()]
>>> pytypes_def([1], 3, 2, 1, [], None)
- ('list object', 'int object', 'Python object', 'double', 'list object', 'tuple object')
+ ('list object', 'Python object', 'Python object', 'double', 'list object', 'tuple object')
[1, 3, 2, 1.0, [], None]
>>> pytypes_def(123)
Traceback (most recent call last):
@@ -63,13 +63,13 @@ def pytypes_def(a: list, b: int = 2, c: long = 3, d: float = 4.0, n: list = None
cpdef pytypes_cpdef(a: list, b: int = 2, c: long = 3, d: float = 4.0, n: list = None, o: Optional[tuple] = ()):
"""
>>> pytypes_cpdef([1])
- ('list object', 'int object', 'Python object', 'double', 'list object', 'tuple object')
+ ('list object', 'Python object', 'Python object', 'double', 'list object', 'tuple object')
[1, 2, 3, 4.0, None, ()]
>>> pytypes_cpdef([1], 3)
- ('list object', 'int object', 'Python object', 'double', 'list object', 'tuple object')
+ ('list object', 'Python object', 'Python object', 'double', 'list object', 'tuple object')
[1, 3, 3, 4.0, None, ()]
>>> pytypes_cpdef([1], 3, 2, 1, [], None)
- ('list object', 'int object', 'Python object', 'double', 'list object', 'tuple object')
+ ('list object', 'Python object', 'Python object', 'double', 'list object', 'tuple object')
[1, 3, 2, 1.0, [], None]
>>> pytypes_cpdef(123)
Traceback (most recent call last):
@@ -99,10 +99,10 @@ cdef c_pytypes_cdef(a: list, b: int = 2, c: long = 3, d: float = 4.0, n: list =
def pytypes_cdef(a, b=2, c=3, d=4):
"""
>>> pytypes_cdef([1])
- ('list object', 'int object', 'Python object', 'double', 'list object')
+ ('list object', 'Python object', 'Python object', 'double', 'list object')
[1, 2, 3, 4.0, None]
>>> pytypes_cdef([1], 3)
- ('list object', 'int object', 'Python object', 'double', 'list object')
+ ('list object', 'Python object', 'Python object', 'double', 'list object')
[1, 3, 3, 4.0, None]
>>> pytypes_cdef(123) # doctest: +ELLIPSIS
Traceback (most recent call last):
@@ -111,6 +111,15 @@ def pytypes_cdef(a, b=2, c=3, d=4):
return c_pytypes_cdef(a, b, c, d)
+def pyint(a: int):
+ """
+ >>> large_int = eval('0x'+'F'*64) # definitely bigger than C int64
+ >>> pyint(large_int) == large_int
+ True
+ """
+ return a
+
+
def ctypes_def(a: list, b: cython.int = 2, c: cython.long = 3, d: cython.float = 4) -> list:
"""
>>> ctypes_def([1])
@@ -372,14 +381,14 @@ _WARNINGS = """
63:70: PEP-484 recommends 'typing.Optional[...]' for arguments that can be None.
90:44: Found Python 2.x type 'long' in a Python annotation. Did you mean to use 'cython.long'?
90:70: PEP-484 recommends 'typing.Optional[...]' for arguments that can be None.
-274:44: Unknown type declaration in annotation, ignoring
-302:15: Annotation ignored since class-level attributes must be Python objects. Were you trying to set up an instance attribute?
+283:44: Unknown type declaration in annotation, ignoring
+311:15: Annotation ignored since class-level attributes must be Python objects. Were you trying to set up an instance attribute?
# DUPLICATE:
63:44: Found Python 2.x type 'long' in a Python annotation. Did you mean to use 'cython.long'?
# BUG:
63:6: 'pytypes_cpdef' redeclared
-146:0: 'struct_io' redeclared
-181:0: 'struct_convert' redeclared
-200:0: 'exception_default' redeclared
-231:0: 'exception_default_uint' redeclared
+155:0: 'struct_io' redeclared
+190:0: 'struct_convert' redeclared
+209:0: 'exception_default' redeclared
+240:0: 'exception_default_uint' redeclared
"""
diff --git a/tests/run/binop_reverse_methods_GH2056.pyx b/tests/run/binop_reverse_methods_GH2056.pyx
index 4938f0d15..43bfcde86 100644
--- a/tests/run/binop_reverse_methods_GH2056.pyx
+++ b/tests/run/binop_reverse_methods_GH2056.pyx
@@ -30,6 +30,12 @@ class Base(object):
'Base.__rpow__(Base(), 2, None)'
>>> pow(Base(), 2, 100)
'Base.__pow__(Base(), 2, 100)'
+ >>> Base() // 1
+ True
+ >>> set() // Base()
+ True
+
+ # version dependent tests for @ and / are external
"""
implemented: cython.bint
@@ -67,6 +73,44 @@ class Base(object):
def __repr__(self):
return "%s()" % (self.__class__.__name__)
+ # The following methods were missed from the initial implementation
+ # that typed 'self'. These tests are a quick test to confirm that
+ # but not the full binop behaviour
+ def __matmul__(self, other):
+ return cython.typeof(self) == 'Base'
+
+ def __rmatmul__(self, other):
+ return cython.typeof(self) == 'Base'
+
+ def __truediv__(self, other):
+ return cython.typeof(self) == 'Base'
+
+ def __rtruediv__(self, other):
+ return cython.typeof(self) == 'Base'
+
+ def __floordiv__(self, other):
+ return cython.typeof(self) == 'Base'
+
+ def __rfloordiv__(self, other):
+ return cython.typeof(self) == 'Base'
+
+
+if sys.version_info >= (3, 5):
+ __doc__ += """
+ >>> Base() @ 1
+ True
+ >>> set() @ Base()
+ True
+ """
+
+if sys.version_info >= (3, 0):
+ __doc__ += """
+ >>> Base() / 1
+ True
+ >>> set() / Base()
+ True
+ """
+
@cython.c_api_binop_methods(False)
@cython.cclass
diff --git a/tests/run/builtin_abs.pyx b/tests/run/builtin_abs.pyx
index 59f3a93c4..e0b31b7e1 100644
--- a/tests/run/builtin_abs.pyx
+++ b/tests/run/builtin_abs.pyx
@@ -63,7 +63,7 @@ def int_abs(int a):
@cython.overflowcheck(True)
@cython.test_assert_path_exists("//ReturnStatNode//NameNode[@entry.name = 'abs']",
"//ReturnStatNode//NameNode[@entry.cname = 'abs']")
-cdef int c_int_abs(int a) nogil except *:
+cdef int c_int_abs(int a) except * nogil:
return abs(a)
def test_c_int_abs(int a):
@@ -125,7 +125,7 @@ def long_abs(long a):
@cython.overflowcheck(True)
@cython.test_assert_path_exists("//ReturnStatNode//NameNode[@entry.name = 'abs']",
"//ReturnStatNode//NameNode[@entry.cname = 'labs']")
-cdef long c_long_abs(long a) nogil except *:
+cdef long c_long_abs(long a) except * nogil:
return abs(a)
def test_c_long_abs(long a):
@@ -189,7 +189,7 @@ def long_long_abs(long long a):
@cython.overflowcheck(True)
@cython.test_assert_path_exists("//ReturnStatNode//NameNode[@entry.name = 'abs']",
"//ReturnStatNode//NameNode[@entry.cname = '__Pyx_abs_longlong']")
-cdef long long c_long_long_abs(long long a) nogil except *:
+cdef long long c_long_long_abs(long long a) except * nogil:
return abs(a)
def test_c_long_long_abs(long long a):
diff --git a/tests/run/c_file_validation.srctree b/tests/run/c_file_validation.srctree
new file mode 100644
index 000000000..cceb014ac
--- /dev/null
+++ b/tests/run/c_file_validation.srctree
@@ -0,0 +1,72 @@
+"""
+PYTHON run_test.py
+"""
+
+######## run_test.py ########
+
+import os
+from collections import defaultdict
+from os.path import basename, splitext
+
+from Cython.Compiler.Options import CompilationOptions
+from Cython.Compiler.Main import compile as cython_compile
+from Cython.Compiler.Options import default_options
+
+
+def validate_file(filename):
+ module_name = basename(filename)
+ c_file = splitext(filename)[0] + '.c'
+
+ options = CompilationOptions(
+ default_options,
+ language_level="3",
+ evaluate_tree_assertions=True,
+ )
+ result = cython_compile(filename, options=options)
+ return result.num_errors
+
+
+counts = defaultdict(int)
+failed = False
+
+for filename in sorted(os.listdir(".")):
+ if "run_test" in filename:
+ continue
+
+ print("Testing '%s'" % filename)
+ num_errors = validate_file(filename)
+ print(num_errors, filename)
+ counts[num_errors] += 1
+
+ if '_ok' in filename:
+ if num_errors > 0:
+ failed = True
+ print("ERROR: Compilation failed: %s (%s errors)" % (filename, num_errors))
+ else:
+ if num_errors == 0:
+ failed = True
+ print("ERROR: Expected failure, but compilation succeeded: %s" % filename)
+
+assert counts == {0: 2, 1: 2}, counts
+assert not failed
+
+
+######## assert_ok.py ########
+
+# cython: test_assert_c_code_has = Generated by Cython
+# cython: test_assert_c_code_has = CYTHON_HEX_VERSION
+
+
+######## assert_missing.py ########
+
+# cython: test_assert_c_code_has = Generated by Python
+
+
+######## fail_if_ok.py ########
+
+# cython: test_fail_if_c_code_has = Generated by Python
+
+
+######## fail_if_found.py ########
+
+# cython: test_fail_if_c_code_has = Generated by Cython
diff --git a/tests/run/cdef_class_dataclass.pyx b/tests/run/cdef_class_dataclass.pyx
index 2f69e0f8f..7be88f695 100644
--- a/tests/run/cdef_class_dataclass.pyx
+++ b/tests/run/cdef_class_dataclass.pyx
@@ -224,6 +224,10 @@ cdef class TestFrozen:
"""
a: cython.double = 2.0
+def get_dataclass_initvar():
+ return py_dataclasses.InitVar
+
+
@dataclass(kw_only=True)
cdef class TestKwOnly:
"""
@@ -251,10 +255,11 @@ cdef class TestKwOnly:
a: cython.double = 2.0
b: cython.long
+
import sys
if sys.version_info >= (3, 7):
__doc__ = """
- >>> from dataclasses import Field, is_dataclass, fields
+ >>> from dataclasses import Field, is_dataclass, fields, InitVar
# It uses the types from the standard library where available
>>> all(isinstance(v, Field) for v in BasicDataclass.__dataclass_fields__.values())
@@ -275,4 +280,6 @@ if sys.version_info >= (3, 7):
['a', 'b', 'c', 'd']
>>> [ f.name for f in fields(InitClassVars)]
['a']
+ >>> get_dataclass_initvar() == InitVar
+ True
"""
diff --git a/tests/run/cfunc_convert.pyx b/tests/run/cfunc_convert.pyx
index 6db0765d4..89e09ea36 100644
--- a/tests/run/cfunc_convert.pyx
+++ b/tests/run/cfunc_convert.pyx
@@ -74,7 +74,7 @@ def test_global():
>>> global_csqrt.__doc__
'wrap(x: float) -> float'
>>> test_global()
- double (double) nogil
+ double (double) noexcept nogil
Python object
"""
print cython.typeof(sqrt)
@@ -266,3 +266,28 @@ def make_map():
"f2": cfunc_dup_f2,
}
return map
+
+
+cdef class HasCdefFunc:
+ cdef int x
+ def __init__(self, x):
+ self.x = x
+
+ cdef int func(self, int y):
+ return self.x + y
+
+def test_unbound_methods():
+ """
+ >>> f = test_unbound_methods()
+ >>> f(HasCdefFunc(1), 2)
+ 3
+ """
+ return HasCdefFunc.func
+
+def test_bound_methods():
+ """
+ >>> f = test_bound_methods()
+ >>> f(2)
+ 3
+ """
+ return HasCdefFunc(1).func
diff --git a/tests/run/complex_numbers_T305.pyx b/tests/run/complex_numbers_T305.pyx
index acbc0a5fa..8ca98ff50 100644
--- a/tests/run/complex_numbers_T305.pyx
+++ b/tests/run/complex_numbers_T305.pyx
@@ -80,6 +80,8 @@ def test_pow(double complex z, double complex w, tol=None):
True
>>> test_pow(-0.5, 1j, tol=1e-15)
True
+ >>> test_pow(-1, 0.5, tol=1e-15)
+ True
"""
if tol is None:
return z**w
@@ -264,3 +266,87 @@ cpdef double complex complex_retval():
1j
"""
return 1j
+
+def stress_test():
+ """
+ Run the main operations on 1000 pseudo-random numbers to
+ try to spot anything accidentally missed from the test cases
+ (doesn't cover inf and NaN as inputs though)
+ >>> stress_test()
+ """
+ cdef double complex x
+ cdef double complex y
+
+ from random import Random
+ from math import ldexp
+ r = Random()
+ r.seed("I'm a seed") # try to make the test somewhat reproducible
+
+ # copied from https://docs.python.org/3/library/random.html#recipes
+ # gets evenly distributed random numbers
+ def full_random():
+ mantissa = 0x10_0000_0000_0000 | r.getrandbits(52)
+ exponent = -53
+ x = 0
+ while not x:
+ x = r.getrandbits(32)
+ exponent += x.bit_length() - 32
+ return ldexp(mantissa, exponent)
+
+ for n in range(1, 1001):
+ if n % 50 == 0:
+ # strategical insert some 0 values
+ a = 0
+ else:
+ a = full_random()
+ if n % 51 == 0:
+ b = 0
+ else:
+ b = full_random()
+ if n % 52 == 0:
+ c = 0
+ else:
+ c = full_random()
+ if n % 53 == 0:
+ d = 0
+ else:
+ d = full_random()
+
+ x= a+1j*b
+ y = c+1j*d
+ py_dict = dict(x=x, y=y)
+
+ sum_ = x+y
+ sum_py = eval("x+y", py_dict)
+ delta_sum = abs(sum_/sum_py - 1)
+ assert delta_sum < 1e-15, f"{x} {y} {sum_} {sum_py} {delta_sum}"
+
+ minus = x-y
+ minus_py = eval("x-y", py_dict)
+ delta_minus = abs(minus/minus_py - 1)
+ assert delta_minus < 1e-15, f"{x} {y} {minus} {minus_py} {delta_minus}"
+
+ times = x*y
+ times_py = eval("x*y", py_dict)
+ delta_times = abs(times/times_py - 1)
+ assert delta_times < 1e-15, f"{x} {y} {times} {times_py} {delta_times}"
+
+ divide = x/y
+ divide_py = eval("x/y", py_dict)
+ delta_divide = abs(divide/divide_py - 1)
+ assert delta_divide < 1e-15, f"{x} {y} {divide} {divide_py} {delta_divide}"
+
+ divide2 = y/x
+ divide2_py = eval("y/x", py_dict)
+ delta_divide2 = abs(divide2/divide2_py - 1)
+ assert delta_divide2 < 1e-15, f"{x} {y} {divide2} {divide2_py} {delta_divide2}"
+
+ pow_ = x**y
+ pow_py = eval("x**y", py_dict)
+ delta_pow = abs(pow_/pow_py - 1)
+ assert delta_pow < 1e-15, f"{x} {y} {pow_} {pow_py} {delta_pow}"
+
+ pow2 = y**x
+ pow2_py = eval("y**x", py_dict)
+ delta_pow2 = abs(pow2/pow2_py - 1)
+ assert delta_pow2 < 1e-15, f"{x} {y} {pow2} {pow2_py} {delta_pow2}"
diff --git a/tests/run/coverage_cmd_src_pkg_layout.srctree b/tests/run/coverage_cmd_src_pkg_layout.srctree
new file mode 100644
index 000000000..e2c58691a
--- /dev/null
+++ b/tests/run/coverage_cmd_src_pkg_layout.srctree
@@ -0,0 +1,177 @@
+# mode: run
+# tag: coverage,trace
+
+"""
+PYTHON -m pip install .
+PYTHON setup.py build_ext --inplace
+PYTHON -m coverage run --source=pkg coverage_test.py
+PYTHON collect_coverage.py
+"""
+
+######## setup.py ########
+
+from setuptools import Extension, find_packages, setup
+from Cython.Build import cythonize
+
+MODULES = [
+ Extension("pkg.module1", ["src/pkg/module1.pyx"]),
+ ]
+
+setup(
+ name="pkg",
+ zip_safe=False,
+ packages=find_packages('src'),
+ package_data={'pkg': ['*.pxd', '*.pyx']},
+ package_dir={'': 'src'},
+ ext_modules= cythonize(MODULES)
+ )
+
+
+######## .coveragerc ########
+[run]
+plugins = Cython.Coverage
+
+######## src/pkg/__init__.py ########
+
+######## src/pkg/module1.pyx ########
+# cython: linetrace=True
+# distutils: define_macros=CYTHON_TRACE=1
+
+def func1(int a, int b):
+ cdef int x = 1 # 5
+ c = func2(a) + b # 6
+ return x + c # 7
+
+
+def func2(int a):
+ return a * 2 # 11
+
+######## coverage_test.py ########
+
+import os.path
+from pkg import module1
+
+
+assert not any(
+ module1.__file__.endswith(ext)
+ for ext in '.py .pyc .pyo .pyw .pyx .pxi'.split()
+), module.__file__
+
+
+def run_coverage(module):
+ assert module.func1(1, 2) == (1 * 2) + 2 + 1
+ assert module.func2(2) == 2 * 2
+
+
+if __name__ == '__main__':
+ run_coverage(module1)
+
+
+######## collect_coverage.py ########
+
+import re
+import sys
+import os
+import os.path
+import subprocess
+from glob import iglob
+
+
+def run_coverage_command(*command):
+ env = dict(os.environ, LANG='', LC_ALL='C')
+ process = subprocess.Popen(
+ [sys.executable, '-m', 'coverage'] + list(command),
+ stdout=subprocess.PIPE, env=env)
+ stdout, _ = process.communicate()
+ return stdout
+
+
+def run_report():
+ stdout = run_coverage_command('report', '--show-missing')
+ stdout = stdout.decode('iso8859-1') # 'safe' decoding
+ lines = stdout.splitlines()
+ print(stdout)
+
+ module_path = 'module1.pyx'
+ assert any(module_path in line for line in lines), (
+ "'%s' not found in coverage report:\n\n%s" % (module_path, stdout))
+
+ files = {}
+ line_iter = iter(lines)
+ for line in line_iter:
+ if line.startswith('---'):
+ break
+ extend = [''] * 2
+ for line in line_iter:
+ if not line or line.startswith('---'):
+ continue
+ name, statements, missed, covered, _missing = (line.split(None, 4) + extend)[:5]
+ missing = []
+ for start, end in re.findall('([0-9]+)(?:-([0-9]+))?', _missing):
+ if end:
+ missing.extend(range(int(start), int(end)+1))
+ else:
+ missing.append(int(start))
+ files[os.path.basename(name)] = (statements, missed, covered, missing)
+ assert 5 not in files[module_path][-1], files[module_path]
+ assert 6 not in files[module_path][-1], files[module_path]
+ assert 7 not in files[module_path][-1], files[module_path]
+ assert 11 not in files[module_path][-1], files[module_path]
+
+
+def run_xml_report():
+ stdout = run_coverage_command('xml', '-o', '-')
+ print(stdout)
+
+ import xml.etree.ElementTree as etree
+ data = etree.fromstring(stdout)
+
+ files = {}
+ for module in data.iterfind('.//class'):
+ files[module.get('filename').replace('\\', '/')] = dict(
+ (int(line.get('number')), int(line.get('hits')))
+ for line in module.findall('lines/line')
+ )
+
+ module_path = 'src/pkg/module1.pyx'
+
+ assert files[module_path][5] > 0, files[module_path]
+ assert files[module_path][6] > 0, files[module_path]
+ assert files[module_path][7] > 0, files[module_path]
+ assert files[module_path][11] > 0, files[module_path]
+
+
+def run_html_report():
+ from collections import defaultdict
+
+ stdout = run_coverage_command('html', '-d', 'html')
+ # coverage 6.1+ changed the order of the attributes => need to parse them separately
+ _parse_id = re.compile(r'id=["\'][^0-9"\']*(?P<id>[0-9]+)[^0-9"\']*["\']').search
+ _parse_state = re.compile(r'class=["\'][^"\']*(?P<state>mis|run|exc)[^"\']*["\']').search
+
+ files = {}
+ for file_path in iglob('html/*.html'):
+ with open(file_path) as f:
+ page = f.read()
+ report = defaultdict(set)
+ for line in re.split(r'id=["\']source["\']', page)[-1].splitlines():
+ lineno = _parse_id(line)
+ state = _parse_state(line)
+ if not lineno or not state:
+ continue
+ report[state.group('state')].add(int(lineno.group('id')))
+ files[file_path] = report
+
+ file_report = [data for path, data in files.items() if 'module1' in path][0]
+ executed, missing = file_report["run"], file_report["mis"]
+ assert executed
+ assert 5 in executed, executed
+ assert 6 in executed, executed
+ assert 7 in executed, executed
+ assert 11 in executed, executed
+
+
+if __name__ == '__main__':
+ run_report()
+ run_xml_report()
+ run_html_report()
diff --git a/tests/run/cpdef_void_return.pyx b/tests/run/cpdef_void_return.pyx
index e15448505..7943c3466 100644
--- a/tests/run/cpdef_void_return.pyx
+++ b/tests/run/cpdef_void_return.pyx
@@ -1,4 +1,4 @@
-cpdef void unraisable():
+cpdef void unraisable() noexcept:
"""
>>> unraisable()
here
diff --git a/tests/run/cpp_classes.pyx b/tests/run/cpp_classes.pyx
index d2babdac3..1a1110b91 100644
--- a/tests/run/cpp_classes.pyx
+++ b/tests/run/cpp_classes.pyx
@@ -9,7 +9,7 @@ cdef extern from "shapes.h" namespace "shapes":
float area()
cdef cppclass Ellipse(Shape):
- Ellipse(int a, int b) nogil except +
+ Ellipse(int a, int b) except + nogil
cdef cppclass Circle(Ellipse):
int radius
diff --git a/tests/run/cpp_classes_def.pyx b/tests/run/cpp_classes_def.pyx
index e36fc4fbd..855de7051 100644
--- a/tests/run/cpp_classes_def.pyx
+++ b/tests/run/cpp_classes_def.pyx
@@ -21,7 +21,7 @@ cdef cppclass RegularPolygon(Shape):
__init__(int n, float radius):
this.n = n
this.radius = radius
- float area() const:
+ float area() noexcept const:
cdef double theta = pi / this.n
return this.radius * this.radius * sin(theta) * cos(theta) * this.n
void do_with() except *:
diff --git a/tests/run/cpp_exceptions_nogil.pyx b/tests/run/cpp_exceptions_nogil.pyx
index 1d21d40f9..5c6315323 100644
--- a/tests/run/cpp_exceptions_nogil.pyx
+++ b/tests/run/cpp_exceptions_nogil.pyx
@@ -9,7 +9,7 @@ cdef extern from "cpp_exceptions_nogil_helper.h" nogil:
cdef void bar "foo"(int i) except +ValueError
cdef void spam"foo"(int i) except +raise_TypeError
-cdef int foo_nogil(int i) nogil except *:
+cdef int foo_nogil(int i) except * nogil:
foo(i)
def test_foo_nogil():
diff --git a/tests/run/cpp_extern.srctree b/tests/run/cpp_extern.srctree
new file mode 100644
index 000000000..d2c11bb5f
--- /dev/null
+++ b/tests/run/cpp_extern.srctree
@@ -0,0 +1,151 @@
+# mode: run
+# tag: cpp
+# ticket: 1839
+
+"""
+PYTHON setup.py build_ext --inplace
+PYTHON -c "from foo import test; test()"
+PYTHON -c "from bar import test; test()"
+PYTHON -c "from baz import test; test()"
+"""
+
+######## setup.py ########
+
+from Cython.Build import cythonize
+from Cython.Distutils.extension import Extension
+from distutils.core import setup
+
+foo = Extension(
+ "foo",
+ ["foo.pyx", "foo1.cpp", "foo2.cpp"],
+)
+bar = Extension(
+ "bar",
+ ["bar.pyx", "bar1.c", "bar2.cpp"],
+)
+baz = Extension(
+ "baz",
+ ["baz.pyx", "baz1.c", "baz2.cpp"],
+ define_macros = [("__PYX_EXTERN_C", 'extern "C"')],
+)
+
+setup(
+ ext_modules=cythonize([foo, bar, baz]),
+)
+
+######## foo.pyx ########
+
+# distutils: language = c++
+
+from libcpp cimport vector
+
+cdef public vector.vector[int] get_vector():
+ return [1,2,3]
+
+cdef extern from "foo_header.h":
+ cdef size_t size_vector1()
+ cdef size_t size_vector2()
+
+def test():
+ assert size_vector1() == 3
+ assert size_vector2() == 3
+
+######## foo_header.h ########
+
+size_t size_vector1();
+size_t size_vector2();
+
+######## foo1.cpp ########
+
+#include <vector>
+#include "foo.h"
+
+size_t size_vector1() {
+ return get_vector().size();
+}
+
+######## foo2.cpp ########
+
+#include <vector>
+extern "C" {
+// #include within `extern "C"` is legal.
+// We want to make sure here that Cython C++ functions are flagged as `extern "C++"`.
+// Otherwise they would be interpreted with C-linkage if the header is include within a `extern "C"` block.
+#include "foo.h"
+}
+
+size_t size_vector2() {
+ return get_vector().size();
+}
+
+######## bar.pyx ########
+
+cdef public char get_char():
+ return 42
+
+cdef extern from "bar_header.h":
+ cdef int get_int1()
+ cdef int get_int2()
+
+def test():
+ assert get_int1() == 42
+ assert get_int2() == 42
+
+######## bar_header.h ########
+
+int get_int1();
+int get_int2();
+
+######## bar1.c ########
+
+#include "bar.h"
+
+int get_int1() { return (int)get_char(); }
+
+######## bar2.cpp ########
+
+extern "C" {
+#include "bar.h"
+}
+
+extern "C" int get_int2() { return (int)get_char(); }
+
+######## baz.pyx ########
+
+# distutils: language = c++
+
+cdef public char get_char():
+ return 42
+
+cdef extern from "baz_header.h":
+ cdef int get_int1()
+ cdef int get_int2()
+
+def test():
+ assert get_int1() == 42
+ assert get_int2() == 42
+
+######## baz_header.h ########
+
+#ifdef __cplusplus
+ #define BAZ_EXTERN_C extern "C"
+#else
+ #define BAZ_EXTERN_C
+#endif
+
+BAZ_EXTERN_C int get_int1();
+int get_int2();
+
+######## baz1.c ########
+
+#undef __PYX_EXTERN_C
+#define __PYX_EXTERN_C
+#include "baz.h"
+
+int get_int1() { return (int)get_char(); }
+
+######## baz2.cpp ########
+
+#include "baz.h"
+
+int get_int2() { return (int)get_char(); }
diff --git a/tests/run/cpp_function_lib.pxd b/tests/run/cpp_function_lib.pxd
index 2a5d72886..ba6694cb9 100644
--- a/tests/run/cpp_function_lib.pxd
+++ b/tests/run/cpp_function_lib.pxd
@@ -13,7 +13,7 @@ cdef extern from "cpp_function_lib.h":
double call "operator()"(double a, int b)
cdef cppclass FunctionKeeper:
- FunctionKeeper(function[double(double, int)] user_function)
- void set_function(function[double(double, int)] user_function)
- function[double(double, int)] get_function()
+ FunctionKeeper(function[double(double, int) noexcept] user_function)
+ void set_function(function[double(double, int) noexcept] user_function)
+ function[double(double, int) noexcept] get_function()
double call_function(double a, int b) except +
diff --git a/tests/run/cpp_iterators.pyx b/tests/run/cpp_iterators.pyx
index 850632581..81048d0b3 100644
--- a/tests/run/cpp_iterators.pyx
+++ b/tests/run/cpp_iterators.pyx
@@ -2,6 +2,10 @@
# tag: cpp, werror, no-cpp-locals
from libcpp.deque cimport deque
+from libcpp.list cimport list as stdlist
+from libcpp.map cimport map as stdmap
+from libcpp.set cimport set as stdset
+from libcpp.string cimport string
from libcpp.vector cimport vector
from cython.operator cimport dereference as deref
@@ -10,6 +14,11 @@ cdef extern from "cpp_iterators_simple.h":
DoublePointerIter(double* start, int len)
double* begin()
double* end()
+ cdef cppclass DoublePointerIterDefaultConstructible:
+ DoublePointerIterDefaultConstructible()
+ DoublePointerIterDefaultConstructible(double* start, int len)
+ double* begin()
+ double* end()
def test_vector(py_v):
"""
@@ -98,6 +107,35 @@ def test_custom():
finally:
del iter
+def test_custom_deref():
+ """
+ >>> test_custom_deref()
+ [1.0, 2.0, 3.0]
+ """
+ cdef double* values = [1, 2, 3]
+ cdef DoublePointerIter* iter
+ try:
+ iter = new DoublePointerIter(values, 3)
+ return [x for x in deref(iter)]
+ finally:
+ del iter
+
+def test_custom_genexp():
+ """
+ >>> test_custom_genexp()
+ [1.0, 2.0, 3.0]
+ """
+ def to_list(g): # function to hide the intent to avoid inlined-generator expression optimization
+ return list(g)
+ cdef double* values = [1, 2, 3]
+ cdef DoublePointerIterDefaultConstructible* iter
+ try:
+ iter = new DoublePointerIterDefaultConstructible(values, 3)
+ # TODO: Only needs to copy once - currently copies twice
+ return to_list(x for x in iter[0])
+ finally:
+ del iter
+
def test_iteration_over_heap_vector(L):
"""
>>> test_iteration_over_heap_vector([1,2])
@@ -201,3 +239,119 @@ def test_const_iterator_calculations(py_v):
first == clast,
last == cfirst
]
+
+cdef extern from "cpp_iterators_over_attribute_of_rvalue_support.h":
+ cdef cppclass HasIterableAttribute:
+ vector[int] vec
+ HasIterableAttribute()
+ HasIterableAttribute(vector[int])
+
+cdef HasIterableAttribute get_object_with_iterable_attribute():
+ return HasIterableAttribute()
+
+def test_iteration_over_attribute_of_call():
+ """
+ >>> test_iteration_over_attribute_of_call()
+ 1
+ 2
+ 3
+ 42
+ 43
+ 44
+ 1
+ 2
+ 3
+ """
+ for i in HasIterableAttribute().vec:
+ print(i)
+ cdef vector[int] vec
+ for i in range(42, 45):
+ vec.push_back(i)
+ for i in HasIterableAttribute(vec).vec:
+ print(i)
+ for i in get_object_with_iterable_attribute().vec:
+ print(i)
+
+def test_iteration_over_reversed_list(py_v):
+ """
+ >>> test_iteration_over_reversed_list([2, 4, 6])
+ 6
+ 4
+ 2
+ """
+ cdef stdlist[int] lint
+ for e in py_v:
+ lint.push_back(e)
+ for e in reversed(lint):
+ print(e)
+
+def test_iteration_over_reversed_map(py_v):
+ """
+ >>> test_iteration_over_reversed_map([(1, 10), (2, 20), (3, 30)])
+ 3 30
+ 2 20
+ 1 10
+ """
+ cdef stdmap[int, int] m
+ for k, v in py_v:
+ m[k] = v
+ for k, v in reversed(m):
+ print("%s %s" % (k, v))
+
+def test_iteration_over_reversed_set(py_v):
+ """
+ >>> test_iteration_over_reversed_set([1, 2, 3])
+ 3
+ 2
+ 1
+ """
+ cdef stdset[int] s
+ for e in py_v:
+ s.insert(e)
+ for e in reversed(s):
+ print(e)
+
+def test_iteration_over_reversed_string():
+ """
+ >>> test_iteration_over_reversed_string()
+ n
+ o
+ h
+ t
+ y
+ c
+ """
+ cdef string cppstr = "cython"
+ for c in reversed(cppstr):
+ print(chr(c))
+
+def test_iteration_over_reversed_vector(py_v):
+ """
+ >>> test_iteration_over_reversed_vector([1, 2, 3])
+ 3
+ 2
+ 1
+ """
+ cdef vector[int] vint
+ for e in py_v:
+ vint.push_back(e)
+ for e in reversed(vint):
+ print(e)
+
+def test_non_built_in_reversed_function(py_v):
+ """
+ >>> test_non_built_in_reversed_function([1, 3, 5])
+ Non-built-in reversed called.
+ 5
+ 3
+ 1
+ """
+ def reversed(arg):
+ print("Non-built-in reversed called.")
+ return arg[::-1]
+
+ cdef vector[int] vint
+ for e in py_v:
+ vint.push_back(e)
+ for e in reversed(vint):
+ print(e)
diff --git a/tests/run/cpp_iterators_over_attribute_of_rvalue_support.h b/tests/run/cpp_iterators_over_attribute_of_rvalue_support.h
new file mode 100644
index 000000000..b4a10b5be
--- /dev/null
+++ b/tests/run/cpp_iterators_over_attribute_of_rvalue_support.h
@@ -0,0 +1,11 @@
+#include <vector>
+
+class HasIterableAttribute {
+public:
+ std::vector<int> vec;
+ HasIterableAttribute() {
+ for (int i = 1; i<=3; i++)
+ vec.push_back(i);
+ }
+ HasIterableAttribute(std::vector<int> vec) : vec(vec) {}
+};
diff --git a/tests/run/cpp_iterators_simple.h b/tests/run/cpp_iterators_simple.h
index 3a4b50e3c..8373237d8 100644
--- a/tests/run/cpp_iterators_simple.h
+++ b/tests/run/cpp_iterators_simple.h
@@ -8,3 +8,14 @@ private:
int len_;
};
+class DoublePointerIterDefaultConstructible: public DoublePointerIter {
+ // an alternate version that is default-constructible
+public:
+ DoublePointerIterDefaultConstructible() :
+ DoublePointerIter(0, 0)
+ {}
+ DoublePointerIterDefaultConstructible(double* start, int len) :
+ DoublePointerIter(start, len)
+ {}
+
+};
diff --git a/tests/run/cpp_locals_directive.pyx b/tests/run/cpp_locals_directive.pyx
index 6c9c89ba5..359ae0b10 100644
--- a/tests/run/cpp_locals_directive.pyx
+++ b/tests/run/cpp_locals_directive.pyx
@@ -19,13 +19,9 @@ cdef extern from *:
C(C&& rhs) : x(rhs.x), print_destructor(rhs.print_destructor) {
rhs.print_destructor = false; // moved-from instances are deleted silently
}
- C& operator=(C&& rhs) {
- x=rhs.x;
- print_destructor=rhs.print_destructor;
- rhs.print_destructor = false; // moved-from instances are deleted silently
- return *this;
- }
- C(const C& rhs) = default;
+ // also test that we don't require the assignment operator
+ C& operator=(C&& rhs) = delete;
+ C(const C& rhs) = delete;
C& operator=(const C& rhs) = default;
~C() {
if (print_destructor) print_C_destructor();
diff --git a/tests/run/cpp_nested_classes.pyx b/tests/run/cpp_nested_classes.pyx
index b50f79936..8877c0440 100644
--- a/tests/run/cpp_nested_classes.pyx
+++ b/tests/run/cpp_nested_classes.pyx
@@ -25,6 +25,22 @@ cdef extern from "cpp_nested_classes_support.h":
cdef cppclass SpecializedTypedClass(TypedClass[double]):
pass
+cdef cppclass AA:
+ cppclass BB:
+ int square(int x):
+ return x * x
+ cppclass CC:
+ int cube(int x):
+ return x * x * x
+ BB* createB():
+ return new BB()
+ ctypedef int my_int
+ @staticmethod
+ my_int negate(my_int x):
+ return -x
+
+cdef cppclass DD(AA):
+ ctypedef int my_other_int
ctypedef A AliasA1
ctypedef AliasA1 AliasA2
@@ -44,6 +60,27 @@ def test_nested_classes():
assert b_ptr.square(4) == 16
del b_ptr
+def test_nested_defined_classes():
+ """
+ >>> test_nested_defined_classes()
+ """
+ cdef AA a
+ cdef AA.BB b
+ assert b.square(3) == 9
+ cdef AA.BB.CC c
+ assert c.cube(3) == 27
+
+ cdef AA.BB *b_ptr = a.createB()
+ assert b_ptr.square(4) == 16
+ del b_ptr
+
+def test_nested_inherited_classes():
+ """
+ >>> test_nested_inherited_classes()
+ """
+ cdef DD.BB b
+ assert b.square(3) == 9
+
def test_nested_typedef(py_x):
"""
>>> test_nested_typedef(5)
@@ -51,6 +88,13 @@ def test_nested_typedef(py_x):
cdef A.my_int x = py_x
assert A.negate(x) == -py_x
+def test_nested_defined_typedef(py_x):
+ """
+ >>> test_nested_typedef(5)
+ """
+ cdef AA.my_int x = py_x
+ assert AA.negate(x) == -py_x
+
def test_typedef_for_nested(py_x):
"""
>>> test_typedef_for_nested(5)
diff --git a/tests/run/cpp_stl_associated_containers_contains_cpp20.pyx b/tests/run/cpp_stl_associated_containers_contains_cpp20.pyx
new file mode 100644
index 000000000..ebe8d8fa8
--- /dev/null
+++ b/tests/run/cpp_stl_associated_containers_contains_cpp20.pyx
@@ -0,0 +1,106 @@
+# mode: run
+# tag: cpp, cpp20
+
+# cython: language_level=3
+
+from libcpp.map cimport map, multimap
+from libcpp.set cimport set, multiset
+from libcpp.unordered_map cimport unordered_map, unordered_multimap
+from libcpp.unordered_set cimport unordered_set, unordered_multiset
+
+def test_map_contains(vals, int key_to_find):
+ """
+ >>> test_map_contains([(1,100),(2,200),(3,300)], 3)
+ True
+ >>> test_map_contains([(1,100),(2,200),(3,300)], 4)
+ False
+ """
+ cdef map[int,int] m = map[int, int]()
+ for v in vals:
+ m.insert(v)
+ return m.contains(key_to_find)
+
+def test_unordered_map_contains(vals, int key_to_find):
+ """
+ >>> test_unordered_map_contains([(1,100),(2,200),(3,300)], 3)
+ True
+ >>> test_unordered_map_contains([(1,100),(2,200),(3,300)], 4)
+ False
+ """
+ cdef unordered_map[int,int] um = unordered_map[int, int]()
+ for v in vals:
+ um.insert(v)
+ return um.contains(key_to_find)
+
+def test_multimap_contains(vals, int key_to_find):
+ """
+ >>> test_multimap_contains([(1,100),(2,200),(3,300)], 3)
+ True
+ >>> test_multimap_contains([(1,100),(2,200),(3,300)], 4)
+ False
+ """
+ cdef multimap[int,int] mm = multimap[int, int]()
+ for v in vals:
+ mm.insert(v)
+ return mm.contains(key_to_find)
+
+def test_unordered_multimap_contains(vals, int key_to_find):
+ """
+ >>> test_unordered_multimap_contains([(1,100),(2,200),(3,300)], 3)
+ True
+ >>> test_unordered_multimap_contains([(1,100),(2,200),(3,300)], 4)
+ False
+ """
+ cdef unordered_multimap[int,int] umm = unordered_multimap[int, int]()
+ for v in vals:
+ umm.insert(v)
+ return umm.contains(key_to_find)
+
+
+def test_set_contains(vals, int val_to_find):
+ """
+ >>> test_set_contains([1, 2, 3], 3)
+ True
+ >>> test_set_contains([1, 2, 3], 4)
+ False
+ """
+ cdef set[int] s = set[int]()
+ for v in vals:
+ s.insert(v)
+ return s.contains(val_to_find)
+
+def test_unordered_set_contains(vals, int val_to_find):
+ """
+ >>> test_unordered_set_contains([1, 2, 3], 3)
+ True
+ >>> test_unordered_set_contains([1, 2, 3], 4)
+ False
+ """
+ cdef unordered_set[int] us = unordered_set[int]()
+ for v in vals:
+ us.insert(v)
+ return us.contains(val_to_find)
+
+def test_multiset_contains(vals, int val_to_find):
+ """
+ >>> test_multiset_contains([1, 2, 3], 3)
+ True
+ >>> test_multiset_contains([1, 2, 3], 4)
+ False
+ """
+ cdef multiset[int] ms = multiset[int]()
+ for v in vals:
+ ms.insert(v)
+ return ms.contains(val_to_find)
+
+def test_unordered_multiset_contains(vals, int val_to_find):
+ """
+ >>> test_unordered_multiset_contains([1, 2, 3], 3)
+ True
+ >>> test_unordered_multiset_contains([1, 2, 3], 4)
+ False
+ """
+ cdef unordered_multiset[int] ums = unordered_multiset[int]()
+ for v in vals:
+ ums.insert(v)
+ return ums.contains(val_to_find)
diff --git a/tests/run/cpp_stl_bit_cpp20.pyx b/tests/run/cpp_stl_bit_cpp20.pyx
new file mode 100644
index 000000000..5aae8326a
--- /dev/null
+++ b/tests/run/cpp_stl_bit_cpp20.pyx
@@ -0,0 +1,131 @@
+# mode: run
+# tag: cpp, werror, cpp20
+
+from libcpp cimport bool
+from libc.stdint cimport uint8_t, int8_t
+from libcpp.bit cimport (bit_cast, has_single_bit, bit_ceil, bit_floor,
+ bit_width, rotr, rotl, countl_zero, countl_one, countr_zero,
+ countr_one, popcount)
+
+def test_bit_cast():
+ """
+ Test bit_cast with a signed 8bit wide integer type.
+ -127U = 0b1000'0001U
+ >>> test_bit_cast()
+ 129
+ """
+ cdef int8_t x = -127
+ cdef result = bit_cast[uint8_t, int8_t](x)
+ return result
+
+def test_has_single_bit():
+ """
+ Test has_single_bit with a unsigned 8bit wide integer type.
+ >>> test_has_single_bit()
+ True
+ """
+ cdef uint8_t x = 1
+ cdef bint res = has_single_bit[uint8_t](x)
+ return res
+
+def test_bit_ceil():
+ """
+ Test bit_ceil with a unsigned 8bit wide integer type.
+ >>> test_bit_ceil()
+ 4
+ """
+ cdef uint8_t x = 3
+ cdef uint8_t res = bit_ceil[uint8_t](x)
+ return res
+
+def test_bit_floor():
+ """
+ Test bit_floor with a unsigned 8bit wide integer type.
+ >>> test_bit_floor()
+ 4
+ """
+ cdef uint8_t x = 5
+ cdef uint8_t res = bit_floor[uint8_t](x)
+ return res
+
+def test_bit_width():
+ """
+ Test bit_width with a unsigned 8bit wide integer type.
+ >>> test_bit_width()
+ 3
+ """
+ cdef uint8_t x = 5
+ cdef int res = bit_width[uint8_t](x)
+ return res
+
+def test_rotl():
+ """
+ Test rotl with a unsigned 8bit wide integer type.
+ >>> test_rotl()
+ 209
+ """
+ cdef uint8_t x = 29
+ cdef int s = 4
+ cdef uint8_t res = rotl[uint8_t](x, s)
+ return res
+
+def test_rotr():
+ """
+ Test rotr with a unsigned 8bit wide integer type.
+ >>> test_rotr()
+ 142
+ """
+ cdef uint8_t x = 29
+ cdef int s = 1
+ cdef uint8_t res = rotr[uint8_t](x, s)
+ return res
+
+def test_countl_zero():
+ """
+ Test countl_zero with a unsigned 8bit wide integer type.
+ >>> test_countl_zero()
+ 3
+ """
+ cdef uint8_t x = 24
+ cdef int res = countl_zero[uint8_t](x)
+ return res
+
+def test_countr_zero():
+ """
+ Test countr_zero with a unsigned 8bit wide integer type.
+ >>> test_countr_zero()
+ 3
+ """
+ cdef uint8_t x = 24
+ cdef int res = countr_zero[uint8_t](x)
+ return res
+
+def test_countl_one():
+ """
+ Test countl_one with a unsigned 8bit wide integer type.
+ >>> test_countl_one()
+ 3
+ """
+ cdef uint8_t x = 231
+ cdef int res = countl_one[uint8_t](x)
+ return res
+
+def test_countr_one():
+ """
+ Test countr_one with a unsigned 8bit wide integer type.
+ >>> test_countr_one()
+ 3
+ """
+ cdef uint8_t x = 231
+ cdef int res = countr_one[uint8_t](x)
+ return res
+
+def test_popcount():
+ """
+ Test popcount with a unsigned 8bit wide integer type.
+ >>> test_popcount()
+ 8
+ """
+ cdef uint8_t x = 255
+ cdef int res = popcount[uint8_t](x)
+ return res
diff --git a/tests/run/cpp_stl_function.pyx b/tests/run/cpp_stl_function.pyx
index 723773481..14a92c586 100644
--- a/tests/run/cpp_stl_function.pyx
+++ b/tests/run/cpp_stl_function.pyx
@@ -49,25 +49,25 @@ cdef class FunctionKeeper:
"""
cdef cpp_function_lib.FunctionKeeper* function_keeper
- cdef function[double(double, int)]* _get_function_ptr_from_name(self, function_name):
- cdef function[double(double, int)] *f
+ cdef function[double(double, int) noexcept]* _get_function_ptr_from_name(self, function_name):
+ cdef function[double(double, int) noexcept] *f
if function_name == 'add_one':
- f = new function[double(double, int)](cpp_function_lib.add_one)
+ f = new function[double(double, int) noexcept](cpp_function_lib.add_one)
elif function_name == 'add_two':
- f = new function[double(double, int)](cpp_function_lib.add_two)
+ f = new function[double(double, int) noexcept](cpp_function_lib.add_two)
elif function_name == 'AddAnotherFunctor5':
- f = new function[double(double, int)]()
+ f = new function[double(double, int) noexcept]()
f[0] = cpp_function_lib.AddAnotherFunctor(5.0)
elif function_name == 'NULL':
- f = new function[double(double, int)](NULL)
+ f = new function[double(double, int) noexcept](NULL)
elif function_name == 'default':
- f = new function[double(double, int)]()
+ f = new function[double(double, int) noexcept]()
return f
def __cinit__(self, function_name):
- cdef function[double(double, int)] *f = self._get_function_ptr_from_name(function_name)
+ cdef function[double(double, int) noexcept] *f = self._get_function_ptr_from_name(function_name)
self.function_keeper = new cpp_function_lib.FunctionKeeper(f[0])
del f
@@ -81,6 +81,6 @@ cdef class FunctionKeeper:
return <bint> self.function_keeper.get_function()
def set_function(self, function_name):
- cdef function[double(double, int)] *f = self._get_function_ptr_from_name(function_name)
+ cdef function[double(double, int) noexcept] *f = self._get_function_ptr_from_name(function_name)
self.function_keeper.set_function(f[0])
del f
diff --git a/tests/run/cpp_stl_numeric_ops_cpp17.pyx b/tests/run/cpp_stl_numeric_ops_cpp17.pyx
index eba4d2beb..e89540d35 100644
--- a/tests/run/cpp_stl_numeric_ops_cpp17.pyx
+++ b/tests/run/cpp_stl_numeric_ops_cpp17.pyx
@@ -3,7 +3,7 @@
from libcpp.numeric cimport (reduce, transform_reduce, inclusive_scan,
exclusive_scan, transform_inclusive_scan,
- transform_exclusive_scan)
+ transform_exclusive_scan, gcd, lcm)
from libcpp.execution cimport seq
from libcpp.vector cimport vector
@@ -275,3 +275,19 @@ def test_transform_exclusive_scan_with_execpolicy(vector[int] v, int init):
cdef vector[int] out = vector[int](v.size())
transform_exclusive_scan(seq, v.begin(), v.end(), out.begin(), init, add_integers, multiply_with_2)
return out
+
+def test_gcd(int a, int b):
+ """
+ Test gcd
+ >>> test_gcd(12, 18)
+ 6
+ """
+ return gcd[int](a, b)
+
+def test_lcm(int a, int b):
+ """
+ Test lcm
+ >>> test_lcm(45, 75)
+ 225
+ """
+ return lcm[int](a, b) \ No newline at end of file
diff --git a/tests/run/cpp_stl_numeric_ops_cpp20.pyx b/tests/run/cpp_stl_numeric_ops_cpp20.pyx
new file mode 100644
index 000000000..e3a8c01df
--- /dev/null
+++ b/tests/run/cpp_stl_numeric_ops_cpp20.pyx
@@ -0,0 +1,23 @@
+# mode: run
+# tag: cpp, werror, cpp20
+
+from libcpp.numeric cimport midpoint
+
+def test_midpoint_integer(int a, int b):
+ """
+ Test midpoint for integer types
+ >>> test_midpoint_integer(2, 6)
+ 4
+ """
+ cdef int res = midpoint[int](a, b)
+ return res
+
+
+def test_midpoint_float(float a, float b):
+ """
+ Test midpoint for float
+ >>> test_midpoint_float(2, 6)
+ 4.0
+ """
+ cdef float res = midpoint[float](a, b)
+ return res
diff --git a/tests/run/cpp_stl_string_cpp20.pyx b/tests/run/cpp_stl_string_cpp20.pyx
new file mode 100644
index 000000000..f3a2b80d1
--- /dev/null
+++ b/tests/run/cpp_stl_string_cpp20.pyx
@@ -0,0 +1,61 @@
+# mode: run
+# tag: cpp, werror, cpp20
+
+from libcpp cimport bool
+from libcpp.string cimport string
+
+b_A = b'A'
+b_F = b'F'
+b_abc = b"ABC"
+b_def = b"DEF"
+
+def test_string_starts_with_char(bytes py_str):
+ """
+ Test std::string.starts_with() with char type argument
+ >>> test_string_starts_with_char(b'A')
+ True
+ >>> test_string_starts_with_char(b'F')
+ False
+ """
+ cdef char c = py_str[0]
+ cdef string s = b"ABCDEF"
+ return s.starts_with(c)
+
+
+def test_string_starts_with_cstr(bytes py_str):
+ """
+ Test std::string.starts_with() with c str type argument (char*)
+ >>> test_string_starts_with_cstr(b"ABC")
+ True
+ >>> test_string_starts_with_cstr(b"DEF")
+ False
+ """
+ cdef char* c = py_str
+ cdef string s = b"ABCDEF"
+ return s.starts_with(c)
+
+
+def test_string_ends_with_char(bytes py_str):
+ """
+ Test std::string.ends_with() with char type argument
+ >>> test_string_ends_with_char(b'F')
+ True
+ >>> test_string_ends_with_char(b'A')
+ False
+ """
+ cdef char c = py_str[0]
+ cdef string s = b"ABCDEF"
+ return s.ends_with(c)
+
+
+def test_string_ends_with_cstr(bytes py_str):
+ """
+ Test std::string.ends_with() with c str type argument (char*)
+ >>> test_string_ends_with_cstr(b"DEF")
+ True
+ >>> test_string_ends_with_cstr(b"ABC")
+ False
+ """
+ cdef char* c = py_str
+ cdef string s = b"ABCDEF"
+ return s.ends_with(c) \ No newline at end of file
diff --git a/tests/run/cython_no_files.srctree b/tests/run/cython_no_files.srctree
new file mode 100644
index 000000000..455258c03
--- /dev/null
+++ b/tests/run/cython_no_files.srctree
@@ -0,0 +1,34 @@
+PYTHON test_cythonize_no_files.py
+PYTHON test_cython_no_files.py
+
+######## a.py ###########
+a = 1
+
+######## b.py ###########
+b = 2
+
+######## c.pyx ###########
+c = 3
+
+######## d.pyx ###########
+d = 4
+
+######## test_cythonize_no_files.py ###########
+import subprocess
+import sys
+
+cmd = [sys.executable, '-c', 'from Cython.Build.Cythonize import main; main()', 'a.py', 'b.py', 'c.py', '*.pyx']
+proc = subprocess.Popen(cmd, stderr=subprocess.PIPE)
+_, err = proc.communicate()
+assert proc.returncode == 1, proc.returncode
+assert b"No such file or directory: 'c.py'" in err, err
+
+######## test_cython_no_files.py ###########
+import subprocess
+import sys
+
+cmd = [sys.executable, '-c', 'from Cython.Compiler.Main import main; main(command_line = 1)', 'a.py', 'b.py', 'c.py', '*.pyx']
+proc = subprocess.Popen(cmd, stderr=subprocess.PIPE)
+_, err = proc.communicate()
+assert proc.returncode == 1, proc.returncode
+assert b"No such file or directory: 'c.py'" in err, err
diff --git a/tests/run/decorators.pyx b/tests/run/decorators.pyx
index fc20235e2..64b0f0e20 100644
--- a/tests/run/decorators.pyx
+++ b/tests/run/decorators.pyx
@@ -17,6 +17,10 @@ __doc__ = u"""
3
>>> i.HERE
1
+ >>> i_called_directly(4)
+ 3
+ >>> i_called_directly.HERE
+ 1
"""
class wrap:
@@ -62,6 +66,35 @@ a = A()
def i(x):
return x - 1
+@A().decorate
+def i_called_directly(x):
+ # PEP 614 means this now works
+ return x - 1
+
+list_of_decorators = [decorate, decorate2]
+
+@list_of_decorators[0]
+def test_index_from_decorator_list0(a, b):
+ """
+ PEP 614 means this now works
+ >>> test_index_from_decorator_list0(1, 2)
+ 4
+ >>> test_index_from_decorator_list0.HERE
+ 1
+ """
+ return a+b+1
+
+@list_of_decorators[1](1,2)
+def test_index_from_decorator_list1(a, b):
+ """
+ PEP 614 means this now works
+ >>> test_index_from_decorator_list1(1, 2)
+ 4
+ >>> test_index_from_decorator_list1.HERE
+ 1
+ """
+ return a+b+1
+
def append_to_list_decorator(lst):
def do_append_to_list_dec(func):
def new_func():
diff --git a/tests/run/exceptionpropagation.pyx b/tests/run/exceptionpropagation.pyx
index 2c79bf26e..2466550d5 100644
--- a/tests/run/exceptionpropagation.pyx
+++ b/tests/run/exceptionpropagation.pyx
@@ -56,4 +56,26 @@ def test_except_promotion_compare(bint fire):
...
RuntimeError
"""
- except_promotion_compare(fire) \ No newline at end of file
+ except_promotion_compare(fire)
+
+
+cdef int cdef_function_that_raises():
+ raise RuntimeError
+
+cdef int cdef_noexcept_function_that_raises() noexcept:
+ raise RuntimeError
+
+def test_except_raise_by_default():
+ """
+ >>> test_except_raise_by_default()
+ Traceback (most recent call last):
+ ...
+ RuntimeError
+ """
+ cdef_function_that_raises()
+
+def test_noexcept():
+ """
+ >>> test_noexcept()
+ """
+ cdef_noexcept_function_that_raises()
diff --git a/tests/run/exceptions_nogil.pyx b/tests/run/exceptions_nogil.pyx
index 2bcedd9ed..31af84ae2 100644
--- a/tests/run/exceptions_nogil.pyx
+++ b/tests/run/exceptions_nogil.pyx
@@ -1,7 +1,7 @@
# mode: run
# tag: nogil, withgil, exceptions
-cdef void foo_nogil(int i) nogil except *:
+cdef void foo_nogil(int i) except * nogil:
if i != 0: raise ValueError("huhu !")
diff --git a/tests/run/fused_cpp.pyx b/tests/run/fused_cpp.pyx
index 9f3bb5104..95b326904 100644
--- a/tests/run/fused_cpp.pyx
+++ b/tests/run/fused_cpp.pyx
@@ -2,6 +2,7 @@
cimport cython
from libcpp.vector cimport vector
+from libcpp.map cimport map
from libcpp.typeinfo cimport type_info
from cython.operator cimport typeid
@@ -41,3 +42,49 @@ def typeid_call2(cython.integral x):
"""
cdef const type_info* a = &typeid(cython.integral)
return a[0] == tidint[0]
+
+cdef fused_ref(cython.integral& x):
+ return x*2
+
+def test_fused_ref(int x):
+ """
+ >>> test_fused_ref(5)
+ (10, 10)
+ """
+ return fused_ref(x), fused_ref[int](x)
+
+ctypedef fused nested_fused:
+ vector[cython.integral]
+
+cdef vec_of_fused(nested_fused v):
+ x = v[0]
+ return cython.typeof(x)
+
+def test_nested_fused():
+ """
+ >>> test_nested_fused()
+ int
+ long
+ """
+ cdef vector[int] vi = [0,1]
+ cdef vector[long] vl = [0,1]
+ print vec_of_fused(vi)
+ print vec_of_fused(vl)
+
+ctypedef fused nested_fused2:
+ map[cython.integral, cython.floating]
+
+cdef map_of_fused(nested_fused2 m):
+ for pair in m:
+ return cython.typeof(pair.first), cython.typeof(pair.second)
+
+def test_nested_fused2():
+ """
+ >>> test_nested_fused2()
+ ('int', 'float')
+ ('long', 'double')
+ """
+ cdef map[int, float] mif = { 0: 0.0 }
+ cdef map[long, double] mld = { 0: 0.0 }
+ print map_of_fused(mif)
+ print map_of_fused(mld)
diff --git a/tests/run/generators_py.py b/tests/run/generators_py.py
index 914252bf4..9ec6991cf 100644
--- a/tests/run/generators_py.py
+++ b/tests/run/generators_py.py
@@ -387,3 +387,20 @@ def test_yield_in_const_conditional_true():
"""
if True:
print((yield 1))
+
+
+def test_generator_scope():
+ """
+ Tests that the function is run at the correct time
+ (i.e. when the generator is created, not when it's run)
+ >>> list(test_generator_scope())
+ inner running
+ generator created
+ [0, 10]
+ """
+ def inner(val):
+ print("inner running")
+ return [0, val]
+ gen = (a for a in inner(10))
+ print("generator created")
+ return gen
diff --git a/tests/run/genexpr_arg_order.py b/tests/run/genexpr_arg_order.py
new file mode 100644
index 000000000..5b9e27238
--- /dev/null
+++ b/tests/run/genexpr_arg_order.py
@@ -0,0 +1,181 @@
+# mode: run
+# tag: genexpr, py3, py2
+
+from __future__ import print_function
+
+# Tests that function arguments to generator expressions are
+# evaluated in the correct order (even after optimization)
+# WARNING: there may be an amount of luck in this working correctly (since it
+# isn't strictly enforced). Therefore perhaps be prepared to disable these
+# tests if they stop working and aren't easily fixed
+
+import cython
+
+@cython.cfunc
+@cython.returns(cython.int)
+def zero():
+ print("In zero")
+ return 0
+
+@cython.cfunc
+@cython.returns(cython.int)
+def five():
+ print("In five")
+ return 5
+
+@cython.cfunc
+@cython.returns(cython.int)
+def one():
+ print("In one")
+ return 1
+
+# FIXME - I don't think this is easy to enforce unfortunately, but it is slightly wrong
+#@cython.test_assert_path_exists("//ForFromStatNode")
+#def genexp_range_argument_order():
+# """
+# >>> list(genexp_range_argument_order())
+# In zero
+# In five
+# [0, 1, 2, 3, 4]
+# """
+# return (a for a in range(zero(), five()))
+#
+#@cython.test_assert_path_exists("//ForFromStatNode")
+#@cython.test_assert_path_exists(
+# "//InlinedGeneratorExpressionNode",
+# "//ComprehensionAppendNode")
+#def list_range_argument_order():
+# """
+# >>> list_range_argument_order()
+# In zero
+# In five
+# [0, 1, 2, 3, 4]
+# """
+# return list(a for a in range(zero(), five()))
+
+@cython.test_assert_path_exists("//ForFromStatNode")
+def genexp_array_slice_order():
+ """
+ >>> list(genexp_array_slice_order())
+ In zero
+ In five
+ [0, 1, 2, 3, 4]
+ """
+ # TODO ideally find a way to add the evaluation of x to this test too
+ x = cython.declare(cython.int[20])
+ x = list(range(20))
+ return (a for a in x[zero():five()])
+
+@cython.test_assert_path_exists("//ForFromStatNode")
+@cython.test_assert_path_exists(
+ "//InlinedGeneratorExpressionNode",
+ "//ComprehensionAppendNode")
+def list_array_slice_order():
+ """
+ >>> list(list_array_slice_order())
+ In zero
+ In five
+ [0, 1, 2, 3, 4]
+ """
+ # TODO ideally find a way to add the evaluation of x to this test too
+ x = cython.declare(cython.int[20])
+ x = list(range(20))
+ return list(a for a in x[zero():five()])
+
+class IndexableClass:
+ def __getitem__(self, idx):
+ print("In indexer")
+ return [ idx.start, idx.stop, idx.step ]
+
+class NoisyAttributeLookup:
+ @property
+ def indexer(self):
+ print("Getting indexer")
+ return IndexableClass()
+
+ @property
+ def function(self):
+ print("Getting function")
+ def func(a, b, c):
+ print("In func")
+ return [a, b, c]
+ return func
+
+def genexp_index_order():
+ """
+ >>> list(genexp_index_order())
+ Getting indexer
+ In zero
+ In five
+ In one
+ In indexer
+ Made generator expression
+ [0, 5, 1]
+ """
+ obj = NoisyAttributeLookup()
+ ret = (a for a in obj.indexer[zero():five():one()])
+ print("Made generator expression")
+ return ret
+
+@cython.test_assert_path_exists("//InlinedGeneratorExpressionNode")
+def list_index_order():
+ """
+ >>> list_index_order()
+ Getting indexer
+ In zero
+ In five
+ In one
+ In indexer
+ [0, 5, 1]
+ """
+ obj = NoisyAttributeLookup()
+ return list(a for a in obj.indexer[zero():five():one()])
+
+
+def genexpr_fcall_order():
+ """
+ >>> list(genexpr_fcall_order())
+ Getting function
+ In zero
+ In five
+ In one
+ In func
+ Made generator expression
+ [0, 5, 1]
+ """
+ obj = NoisyAttributeLookup()
+ ret = (a for a in obj.function(zero(), five(), one()))
+ print("Made generator expression")
+ return ret
+
+@cython.test_assert_path_exists("//InlinedGeneratorExpressionNode")
+def list_fcall_order():
+ """
+ >>> list_fcall_order()
+ Getting function
+ In zero
+ In five
+ In one
+ In func
+ [0, 5, 1]
+ """
+ obj = NoisyAttributeLookup()
+ return list(a for a in obj.function(zero(), five(), one()))
+
+def call1():
+ print("In call1")
+ return ["a"]
+def call2():
+ print("In call2")
+ return ["b"]
+
+def multiple_genexps_to_call_order():
+ """
+ >>> multiple_genexps_to_call_order()
+ In call1
+ In call2
+ """
+ def takes_two_genexps(a, b):
+ pass
+
+ return takes_two_genexps((x for x in call1()), (x for x in call2()))
diff --git a/tests/run/genexpr_iterable_lookup_T600.pyx b/tests/run/genexpr_iterable_lookup_T600.pyx
index 945652717..c288993a6 100644
--- a/tests/run/genexpr_iterable_lookup_T600.pyx
+++ b/tests/run/genexpr_iterable_lookup_T600.pyx
@@ -35,6 +35,11 @@ def genexpr_iterable_in_closure():
result = list( x*2 for x in x if x != 'b' )
assert x == 'abc' # don't leak in Py3 code
assert f() == 'abc' # don't leak in Py3 code
+
+ # Py2 cleanup (pretty irrelevant to the actual test!)
+ import sys
+ if sys.version_info[0] == 2:
+ result = map(bytes, result)
return result
@@ -51,6 +56,7 @@ def genexpr_over_complex_arg(func, L):
def listcomp():
"""
>>> listcomp()
+ [0, 1, 5, 8]
"""
data = [('red', 5), ('blue', 1), ('yellow', 8), ('black', 0)]
data.sort(key=lambda r: r[1])
@@ -84,3 +90,15 @@ def genexpr_in_dictcomp_dictiter():
"""
d = {1:2, 3:4, 5:6}
return {k:d for k,d in d.iteritems() if d != 4}
+
+
+def genexpr_over_array_slice():
+ """
+ >>> list(genexpr_over_array_slice())
+ [0.0, 1.0, 2.0, 3.0, 4.0, 5.0]
+ """
+ cdef double x[10]
+ for i in range(10):
+ x[i] = i
+ cdef int n = 5
+ return (n for n in x[:n+1])
diff --git a/tests/run/line_trace.pyx b/tests/run/line_trace.pyx
index 32579aff7..0a3dc13fa 100644
--- a/tests/run/line_trace.pyx
+++ b/tests/run/line_trace.pyx
@@ -155,7 +155,7 @@ def global_name(global_name):
return global_name + 321
-cdef int cy_add_nogil(int a, int b) nogil except -1:
+cdef int cy_add_nogil(int a, int b) except -1 nogil:
x = a + b # 1
return x # 2
diff --git a/tests/run/locals.pyx b/tests/run/locals.pyx
index f343fe1cb..9473ad01e 100644
--- a/tests/run/locals.pyx
+++ b/tests/run/locals.pyx
@@ -113,3 +113,13 @@ def buffers_in_locals(object[char, ndim=1] a):
cdef object[unsigned char, ndim=1] b = a
return locals()
+
+def set_comp_scope():
+ """
+ locals should be evaluated in the outer scope
+ >>> list(set_comp_scope())
+ ['something']
+ """
+ something = 1
+ return { b for b in locals().keys() }
+
diff --git a/tests/run/nogil.pyx b/tests/run/nogil.pyx
index efaee4ff6..356021149 100644
--- a/tests/run/nogil.pyx
+++ b/tests/run/nogil.pyx
@@ -71,7 +71,7 @@ def test_get_gil_in_nogil():
cdef int with_gil_func() except -1 with gil:
raise Exception("error!")
-cdef int nogil_func() nogil except -1:
+cdef int nogil_func() except -1 nogil:
with_gil_func()
def test_nogil_exception_propagation():
@@ -85,7 +85,7 @@ def test_nogil_exception_propagation():
nogil_func()
-cdef int write_unraisable() nogil:
+cdef int write_unraisable() noexcept nogil:
with gil:
raise ValueError()
diff --git a/tests/run/nogil_conditional.pyx b/tests/run/nogil_conditional.pyx
index eba22d5b2..92eff0853 100644
--- a/tests/run/nogil_conditional.pyx
+++ b/tests/run/nogil_conditional.pyx
@@ -34,7 +34,7 @@ cdef int with_gil_func() except? -1 with gil:
raise Exception("error!")
-cdef int nogil_func() nogil except? -1:
+cdef int nogil_func() except? -1 nogil:
with_gil_func()
@@ -51,7 +51,7 @@ def test_nogil_exception_propagation():
nogil_func()
-cdef int write_unraisable() nogil:
+cdef int write_unraisable() noexcept nogil:
with gil:
raise ValueError()
diff --git a/tests/run/parallel.pyx b/tests/run/parallel.pyx
index c3739b10b..40d7ac10d 100644
--- a/tests/run/parallel.pyx
+++ b/tests/run/parallel.pyx
@@ -32,7 +32,7 @@ def test_parallel():
free(buf)
-cdef int get_num_threads() with gil:
+cdef int get_num_threads() noexcept with gil:
print "get_num_threads called"
return 3
diff --git a/tests/run/pep442_tp_finalize.pyx b/tests/run/pep442_tp_finalize.pyx
index 49bed3268..6532757f9 100644
--- a/tests/run/pep442_tp_finalize.pyx
+++ b/tests/run/pep442_tp_finalize.pyx
@@ -1,5 +1,9 @@
# mode: run
+from __future__ import print_function
+
+cimport cython
+
import gc
cdef class nontrivial_del:
@@ -49,6 +53,80 @@ def test_del_and_dealloc():
gc.collect()
print("finish")
+@cython.final
+cdef class FinalClass:
+ def __init__(self):
+ print("init")
+ def __del__(self):
+ print("del")
+
+def test_final_class():
+ """
+ >>> test_final_class()
+ start
+ init
+ del
+ finish
+ """
+ print("start")
+ d = FinalClass()
+ d = None
+ gc.collect()
+ print("finish")
+
+@cython.final
+cdef class FinalInherits(nontrivial_del):
+ def __init__(self):
+ super().__init__()
+ print("FinalInherits init")
+ # no __del__ but nontrivial_del should still be called
+ def __dealloc__(self):
+ pass # define __dealloc__ so as not to fall back on base __dealloc__
+
+def test_final_inherited():
+ """
+ >>> test_final_inherited()
+ start
+ init
+ FinalInherits init
+ del
+ finish
+ """
+ print("start")
+ d = FinalInherits()
+ d = None
+ gc.collect()
+ print("finish")
+
+cdef class DummyBase:
+ pass
+
+class RegularClass:
+ __slots__ = ()
+ def __del__(self):
+ print("del")
+
+@cython.final
+cdef class FinalMultipleInheritance(DummyBase, RegularClass):
+ def __init__(self):
+ super().__init__()
+ print("init")
+ def __dealloc__(self):
+ pass
+
+def test_final_multiple_inheritance():
+ """
+ >>> test_final_multiple_inheritance()
+ start
+ init
+ del
+ finish
+ """
+ print("start")
+ d = FinalMultipleInheritance()
+ d = None
+ gc.collect()
+ print("finish")
cdef class del_with_exception:
def __init__(self):
@@ -301,3 +379,4 @@ class derived_python_child(cdef_nontrivial_parent):
raise RuntimeError("End function")
func(derived_python_child)
+
diff --git a/tests/run/pep442_tp_finalize_cimport.srctree b/tests/run/pep442_tp_finalize_cimport.srctree
new file mode 100644
index 000000000..8a257177f
--- /dev/null
+++ b/tests/run/pep442_tp_finalize_cimport.srctree
@@ -0,0 +1,67 @@
+"""
+PYTHON setup.py build_ext -i
+PYTHON runtests.py
+"""
+
+####### runtests.py #######
+
+import gc
+from testclasses import *
+import baseclasses
+
+def test_has_del():
+ inst = HasIndirectDel()
+ inst = None
+ gc.collect()
+ assert baseclasses.HasDel_del_called_count
+
+def test_no_del():
+ inst = NoIndirectDel()
+ inst = None
+ gc.collect()
+ # The test here is that it doesn't crash
+
+test_has_del()
+test_no_del()
+
+######## setup.py ########
+
+from setuptools import setup
+from Cython.Build import cythonize
+
+setup(ext_modules = cythonize('*.pyx'))
+
+####### baseclasses.pxd ######
+
+cdef class HasDel:
+ pass
+
+cdef class DoesntHaveDel:
+ pass
+
+####### baseclasses.pyx ######
+
+HasDel_del_called_count = 0
+
+cdef class HasDel:
+ def __del__(self):
+ global HasDel_del_called_count
+ HasDel_del_called_count += 1
+
+cdef class DoesntHaveDel:
+ pass
+
+######## testclasses.pyx ######
+
+cimport cython
+from baseclasses cimport HasDel, DoesntHaveDel
+
+@cython.final
+cdef class HasIndirectDel(HasDel):
+ pass
+
+@cython.final
+cdef class NoIndirectDel(DoesntHaveDel):
+ # But Cython can't tell that we don't have __del__ until runtime,
+ # so has to generate code to call it (and not crash!)
+ pass
diff --git a/tests/run/pep526_variable_annotations.py b/tests/run/pep526_variable_annotations.py
index 56cb0201b..6f430c0af 100644
--- a/tests/run/pep526_variable_annotations.py
+++ b/tests/run/pep526_variable_annotations.py
@@ -203,6 +203,29 @@ def test_tuple(a: typing.Tuple[cython.int, cython.float], b: typing.Tuple[cython
print(cython.typeof(c) + (" object" if not cython.compiled else ""))
+def test_use_typing_attributes_as_non_annotations():
+ """
+ >>> test_use_typing_attributes_as_non_annotations()
+ typing.Tuple typing.Tuple[int]
+ typing.Optional True
+ typing.Optional True
+ """
+ x1 = typing.Tuple
+ x2 = typing.Tuple[int]
+ y1 = typing.Optional
+ y2 = typing.Optional[typing.Dict]
+ z1 = Optional
+ z2 = Optional[Dict]
+ # The result of printing "Optional[type]" is slightly version-dependent
+ # so accept both possible forms
+ allowed_optional_strings = [
+ "typing.Union[typing.Dict, NoneType]",
+ "typing.Optional[typing.Dict]"
+ ]
+ print(x1, x2)
+ print(y1, str(y2) in allowed_optional_strings)
+ print(z1, str(z2) in allowed_optional_strings)
+
if cython.compiled:
__doc__ = """
# passing non-dicts to variables declared as dict now fails
@@ -219,6 +242,5 @@ if cython.compiled:
TypeError: Expected dict, got D
"""
-
_WARNINGS = """
"""
diff --git a/tests/run/pure_cdef_class_dataclass.py b/tests/run/pure_cdef_class_dataclass.py
index 8a978d36f..e5c4bcd32 100644
--- a/tests/run/pure_cdef_class_dataclass.py
+++ b/tests/run/pure_cdef_class_dataclass.py
@@ -25,7 +25,54 @@ class MyDataclass:
True
>>> hash(inst1) != id(inst1)
True
+ >>> inst1.func_with_annotations(2.0)
+ 4.0
"""
a: int = 1
self: list = cython.dataclasses.field(default_factory=list, hash=False) # test that arguments of init don't conflict
+
+ def func_with_annotations(self, b: float):
+ c: float = b
+ return self.a * c
+
+
+class DummyObj:
+ def __repr__(self):
+ return "DummyObj()"
+
+
+@cython.dataclasses.dataclass
+@cython.cclass
+class NoInitFields:
+ """
+ >>> NoInitFields()
+ NoInitFields(has_default=DummyObj(), has_factory='From a lambda', neither=None)
+ >>> NoInitFields().has_default is NoInitFields().has_default
+ True
+
+ >>> NoInitFields(1) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ TypeError: NoInitFields.__init__() takes 1 positional argument but 2 were given
+
+ >>> NoInitFields(has_default=1) # doctest: +ELLIPSIS
+ Traceback (most recent call last):
+ TypeError: ...has_default...
+ >>> NoInitFields(has_factory=1) # doctest: +ELLIPSIS
+ Traceback (most recent call last):
+ TypeError: ...has_factory...
+ >>> NoInitFields(neither=1) # doctest: +ELLIPSIS
+ Traceback (most recent call last):
+ TypeError: ...neither...
+ """
+ has_default : object = cython.dataclasses.field(default=DummyObj(), init=False)
+ has_factory : object = cython.dataclasses.field(default_factory=lambda: "From a lambda", init=False)
+ # Cython will default-initialize to None
+ neither : object = cython.dataclasses.field(init=False)
+
+ def __post_init__(self):
+ if not cython.compiled:
+ # Cython will default-initialize this to None, while Python won't
+ # and not initializing it will mess up repr
+ assert not hasattr(self, "neither")
+ self.neither = None
diff --git a/tests/run/pure_py.py b/tests/run/pure_py.py
index a8dc5b014..ae1f820d3 100644
--- a/tests/run/pure_py.py
+++ b/tests/run/pure_py.py
@@ -549,18 +549,18 @@ def empty_declare():
]
r2.is_integral = True
- assert( r2.is_integral == True )
+ assert r2.is_integral == True
r3.x = 12.3
- assert( r3.x == 12.3 )
+ assert r3.x == 12.3
#It generates a correct C code, but raises an exception when interpreted
if cython.compiled:
r4[0].is_integral = True
- assert( r4[0].is_integral == True )
+ assert r4[0].is_integral == True
r5[0] = 42
- assert ( r5[0] == 42 )
+ assert r5[0] == 42
return [i for i, x in enumerate(res) if not x]
diff --git a/tests/run/relative_cimport_compare.srctree b/tests/run/relative_cimport_compare.srctree
new file mode 100644
index 000000000..77b6fb22e
--- /dev/null
+++ b/tests/run/relative_cimport_compare.srctree
@@ -0,0 +1,327 @@
+# mode: run
+# tag: cimport, pep489
+
+PYTHON setup.py build_ext --inplace
+PYTHON -c "import test_import"
+PYTHON -c "import test_cimport"
+
+
+######## setup.py ########
+
+from distutils.core import setup
+from Cython.Build import cythonize
+from Cython.Distutils.extension import Extension
+
+setup(
+ ext_modules=cythonize('**/*.pyx'),
+)
+
+######## test_import.py ########
+import sys
+SUPPORTS_PEP_489 = sys.version_info > (3, 5)
+if SUPPORTS_PEP_489:
+ import cypkg.sub.submodule
+ import cypkg.sub.sub2.sub2module
+ import pypkg.module
+ import pypkg.sub.submodule
+ import pypkg.sub.sub2.sub2module
+
+######## test_cimport.py ########
+import sys
+SUPPORTS_PEP_489 = sys.version_info > (3, 5)
+if SUPPORTS_PEP_489:
+ import module
+
+
+######## module.pyx ########
+cimport cypkg
+
+cdef cypkg.a_type a1 = 3
+assert a1 == 3
+cdef cypkg.a.a_type a2 = 3
+assert a2 == 3
+cdef cypkg.b_type b1 = 4
+assert b1 == 4
+cdef cypkg.b.b_type b2 = 4
+assert b2 == 4
+
+
+cimport cypkg.sub
+cdef cypkg.sub.a_type a3 = 3
+assert a3 == 3
+cdef cypkg.sub.a.a_type a4 = 3
+assert a4 == 3
+cdef cypkg.sub.b_type b3 = 4
+assert b3 == 4
+cdef cypkg.sub.b.b_type b4 = 4
+assert b4 == 4
+
+
+cimport cypkg.sub.sub2
+cdef cypkg.sub.sub2.a_type a5 = 3
+assert a5 == 3
+cdef cypkg.sub.sub2.a.a_type a6 = 3
+assert a6 == 3
+cdef cypkg.sub.sub2.b_type b5 = 4
+assert b5 == 4
+cdef cypkg.sub.sub2.b.b_type b6 = 4
+assert b6 == 4
+
+import pypkg
+assert pypkg.a_value == 3
+assert pypkg.a.a_value == 3
+assert pypkg.b_value == 4
+assert pypkg.b.b_value == 4
+
+
+import pypkg.sub
+assert pypkg.sub.a_value == 3
+assert pypkg.sub.a.a_value == 3
+assert pypkg.sub.b_value == 4
+assert pypkg.sub.b.b_value == 4
+
+
+import cypkg.sub.sub2
+assert pypkg.sub.sub2.a_value == 3
+assert pypkg.sub.sub2.a.a_value == 3
+assert pypkg.sub.sub2.b_value == 4
+assert pypkg.sub.sub2.b.b_value == 4
+
+
+######## cypkg/__init__.pxd ########
+
+cimport cypkg.sub
+cimport cypkg.sub.sub2
+
+from cypkg.sub cimport a
+from cypkg.sub.a cimport a_type
+from cypkg.sub.sub2 cimport b
+from cypkg.sub.sub2.b cimport b_type
+
+from . cimport sub
+from .sub cimport a
+from .sub.a cimport a_type
+from .sub.sub2 cimport b
+from .sub.sub2.b cimport b_type
+
+######## cypkg/__init__.pyx ########
+
+
+######## cypkg/module.pyx ########
+
+cimport cypkg
+cimport cypkg.sub
+cimport cypkg.sub.sub2
+from cypkg.sub cimport a
+from cypkg.sub.a cimport a_type
+from cypkg.sub.sub2 cimport b
+from cypkg.sub.sub2.b cimport b_type
+
+from . cimport sub
+from .sub cimport a
+from .sub.a cimport a_type
+from .sub.sub2 cimport b
+from .sub.sub2.b cimport b_type
+
+
+######## cypkg/sub/__init__.pxd ########
+
+cimport cypkg
+from cypkg.sub cimport a
+from cypkg.sub.a cimport a_type
+from cypkg.sub.sub2 cimport b
+from cypkg.sub.sub2.b cimport b_type
+
+from . cimport a
+from .a cimport a_type
+
+from .. cimport sub
+from ..sub cimport a
+from ..sub.a cimport a_type
+from ..sub.sub2 cimport b
+from ..sub.sub2.b cimport b_type
+
+######## cypkg/sub/__init__.pyx ########
+
+######## cypkg/sub/a.pxd ########
+
+ctypedef int a_type
+
+######## cypkg/sub/submodule.pyx ########
+
+cimport cypkg
+cimport cypkg.sub
+from cypkg.sub cimport a
+from cypkg.sub.a cimport a_type
+from cypkg.sub.sub2 cimport b
+from cypkg.sub.sub2.b cimport b_type
+
+from . cimport a
+from .a cimport a_type
+
+from .. cimport sub
+from ..sub cimport a
+from ..sub.a cimport a_type
+from ..sub.sub2 cimport b
+from ..sub.sub2.b cimport b_type
+
+######## cypkg/sub/sub2/__init__.pxd ########
+
+cimport cypkg
+cimport cypkg.sub
+from cypkg.sub cimport a
+from cypkg.sub.a cimport a_type
+from cypkg.sub.sub2 cimport b
+from cypkg.sub.sub2.b cimport b_type
+
+from ..sub2 cimport b
+from ..sub2.b cimport b_type
+
+from ...sub cimport a
+from ...sub.a cimport a_type
+
+from ... cimport sub
+from ...sub.sub2 cimport b
+from ...sub.sub2.b cimport b_type
+
+######## cypkg/sub/sub2/__init__.pyx ########
+
+######## cypkg/sub/sub2/b.pxd ########
+
+ctypedef int b_type
+
+
+######## cypkg/sub/sub2/sub2module.pyx ########
+
+cimport cypkg
+cimport cypkg.sub
+from cypkg.sub cimport a
+from cypkg.sub.a cimport a_type
+from cypkg.sub.sub2 cimport b
+from cypkg.sub.sub2.b cimport b_type
+
+from .. cimport sub2
+from ..sub2 cimport b
+from ..sub2.b cimport b_type
+
+from ...sub cimport a
+from ...sub.a cimport a_type
+
+from ... cimport sub
+from ...sub.sub2 cimport b
+from ...sub.sub2.b cimport b_type
+
+######## pypkg/__init__.py ########
+
+import pypkg.sub
+import pypkg.sub.sub2
+
+from pypkg.sub import a
+from pypkg.sub.a import a_value
+from pypkg.sub.sub2 import b
+from pypkg.sub.sub2.b import b_value
+
+from . import sub
+from .sub import a
+from .sub.a import a_value
+from .sub.sub2 import b
+from .sub.sub2.b import b_value
+
+######## pypkg/module.py ########
+
+import pypkg
+import pypkg.sub
+import pypkg.sub.sub2
+from pypkg.sub import a
+from pypkg.sub.a import a_value
+from pypkg.sub.sub2 import b
+from pypkg.sub.sub2.b import b_value
+
+from . import sub
+from .sub import a
+from .sub.a import a_value
+from .sub.sub2 import b
+from .sub.sub2.b import b_value
+
+######## pypkg/sub/__init__.py ########
+
+import pypkg
+from pypkg.sub import a
+from pypkg.sub.a import a_value
+from pypkg.sub.sub2 import b
+from pypkg.sub.sub2.b import b_value
+
+from . import a
+from .a import a_value
+
+from .. import sub
+from ..sub import a
+from ..sub.a import a_value
+from ..sub.sub2 import b
+from ..sub.sub2.b import b_value
+
+######## pypkg/sub/a.py ########
+
+a_value = 3
+
+######## pypkg/sub/submodule.py ########
+
+import pypkg
+import pypkg.sub
+from pypkg.sub import a
+from pypkg.sub.a import a_value
+from pypkg.sub.sub2 import b
+from pypkg.sub.sub2.b import b_value
+
+from . import a
+from .a import a_value
+
+from .. import sub
+from ..sub import a
+from ..sub.a import a_value
+from ..sub.sub2 import b
+from ..sub.sub2.b import b_value
+
+######## pypkg/sub/sub2/__init__.py ########
+
+import pypkg
+import pypkg.sub
+from pypkg.sub import a
+from pypkg.sub.a import a_value
+from pypkg.sub.sub2 import b
+from pypkg.sub.sub2.b import b_value
+
+from ..sub2 import b
+from ..sub2.b import b_value
+
+from ...sub import a
+from ...sub.a import a_value
+
+from ... import sub
+from ...sub.sub2 import b
+from ...sub.sub2.b import b_value
+
+######## pypkg/sub/sub2/b.py ########
+
+b_value = 4
+
+
+######## pypkg/sub/sub2/sub2module.py ########
+
+import pypkg
+import pypkg.sub
+from pypkg.sub import a
+from pypkg.sub.a import a_value
+from pypkg.sub.sub2 import b
+from pypkg.sub.sub2.b import b_value
+
+from .. import sub2
+from ..sub2 import b
+from ..sub2.b import b_value
+
+from ...sub import a
+from ...sub.a import a_value
+
+from ... import sub
+from ...sub.sub2 import b
+from ...sub.sub2.b import b_value
diff --git a/tests/run/sequential_parallel.pyx b/tests/run/sequential_parallel.pyx
index 3d8e1efff..cd4bbd6bc 100644
--- a/tests/run/sequential_parallel.pyx
+++ b/tests/run/sequential_parallel.pyx
@@ -315,7 +315,7 @@ def test_nan_init():
c1 = 16
-cdef void nogil_print(char *s) with gil:
+cdef void nogil_print(char *s) noexcept with gil:
print s.decode('ascii')
def test_else_clause():
@@ -406,7 +406,7 @@ def test_nested_break_continue():
print i
-cdef int parallel_return() nogil:
+cdef int parallel_return() noexcept nogil:
cdef int i
for i in prange(10):
@@ -640,7 +640,7 @@ def test_parallel_with_gil_continue_unnested():
print sum
-cdef int inner_parallel_section() nogil:
+cdef int inner_parallel_section() noexcept nogil:
cdef int j, sum = 0
for j in prange(10):
sum += j
@@ -656,10 +656,10 @@ def outer_parallel_section():
sum += inner_parallel_section()
return sum
-cdef int nogil_cdef_except_clause() nogil except -1:
+cdef int nogil_cdef_except_clause() except -1 nogil:
return 1
-cdef void nogil_cdef_except_star() nogil except *:
+cdef void nogil_cdef_except_star() except * nogil:
pass
def test_nogil_cdef_except_clause():
@@ -683,7 +683,7 @@ def test_num_threads_compile():
for i in prange(10):
pass
-cdef int chunksize() nogil:
+cdef int chunksize() noexcept nogil:
return 3
def test_chunksize():
@@ -784,7 +784,7 @@ cdef extern from *:
"""
void address_of_temp(...) nogil
void address_of_temp2(...) nogil
- double get_value() nogil except -1.0 # will generate a temp for exception checking
+ double get_value() except -1.0 nogil # will generate a temp for exception checking
def test_inner_private():
"""
diff --git a/tests/run/special_methods_T561.pyx b/tests/run/special_methods_T561.pyx
index 5eb9dddfc..bd68291e7 100644
--- a/tests/run/special_methods_T561.pyx
+++ b/tests/run/special_methods_T561.pyx
@@ -956,3 +956,44 @@ cdef class ReverseMethodsExist:
return "radd"
def __rsub__(self, other):
return "rsub"
+
+
+cdef class ArgumentTypeConversions:
+ """
+ The user can set the signature of special method arguments so that
+ it doesn't match the C signature. This just tests that a few
+ variations work
+
+ >>> obj = ArgumentTypeConversions()
+ >>> obj[1]
+ 1
+ >>> obj["not a number!"]
+ Traceback (most recent call last):
+ ...
+ TypeError: an integer is required
+ >>> obj < obj
+ In comparison 0
+ True
+ >>> obj == obj
+ In comparison 2
+ False
+
+ Here I'm not sure how reproducible the flags are between Python versions.
+ Therefore I'm just checking that they end with ".0"
+ >>> memoryview(obj) # doctest:+ELLIPSIS
+ Traceback (most recent call last):
+ ...
+ RuntimeError: From __getbuffer__ with flags ....0
+ """
+ # force conversion of object to int
+ def __getitem__(self, int x):
+ return x
+
+ # force conversion of comparison (int) to object
+ def __richcmp__(self, other, object comparison):
+ print "In comparison", comparison
+ return not bool(comparison)
+
+ # force conversion of flags (int) to double
+ def __getbuffer__(self, Py_buffer *buffer, double flags):
+ raise RuntimeError("From __getbuffer__ with flags {}".format(flags))
diff --git a/tests/run/test_coroutines_pep492.pyx b/tests/run/test_coroutines_pep492.pyx
index 2841d97af..3060ab704 100644
--- a/tests/run/test_coroutines_pep492.pyx
+++ b/tests/run/test_coroutines_pep492.pyx
@@ -14,7 +14,7 @@ import copy
#import types
import pickle
import os.path
-#import inspect
+import inspect
import unittest
import warnings
import contextlib
@@ -754,7 +754,8 @@ class AsyncBadSyntaxTest(unittest.TestCase):
async def g(): pass
await z
await = 1
- #self.assertTrue(inspect.iscoroutinefunction(f))
+ if sys.version_info >= (3,10,6):
+ self.assertTrue(inspect.iscoroutinefunction(f))
class TokenizerRegrTest(unittest.TestCase):
@@ -777,7 +778,8 @@ class TokenizerRegrTest(unittest.TestCase):
exec(buf, ns, ns)
self.assertEqual(ns['i499'](), 499)
self.assertEqual(type(ns['foo']()).__name__, 'coroutine')
- #self.assertTrue(inspect.iscoroutinefunction(ns['foo']))
+ if sys.version_info >= (3,10,6):
+ self.assertTrue(inspect.iscoroutinefunction(ns['foo']))
class CoroutineTest(unittest.TestCase):
diff --git a/tests/run/test_dataclasses.pxi b/tests/run/test_dataclasses.pxi
new file mode 100644
index 000000000..998d837f2
--- /dev/null
+++ b/tests/run/test_dataclasses.pxi
@@ -0,0 +1,19 @@
+from cython.dataclasses cimport dataclass, field
+from cython cimport cclass
+from dataclasses import (
+ fields, FrozenInstanceError, InitVar, is_dataclass, asdict, astuple, replace
+)
+import unittest
+from unittest.mock import Mock
+import pickle
+import inspect
+from typing import ClassVar, Any, List, Union, Tuple, Dict, Generic, TypeVar, Optional
+from typing import get_type_hints
+from collections import deque, OrderedDict, namedtuple
+import sys
+
+def skip_on_versions_below(version):
+ def decorator(func):
+ if sys.version_info >= version:
+ return func
+ return decorator
diff --git a/tests/run/test_dataclasses.pyx b/tests/run/test_dataclasses.pyx
new file mode 100644
index 000000000..4daf62cf8
--- /dev/null
+++ b/tests/run/test_dataclasses.pyx
@@ -0,0 +1,1186 @@
+# AUTO-GENERATED BY Tools/make_dataclass_tests.py
+# DO NOT EDIT
+
+# cython: language_level=3
+include "test_dataclasses.pxi"
+
+@dataclass
+@cclass
+class C_TestCase_test_no_fields:
+ pass
+
+@dataclass
+@cclass
+class C_TestCase_test_no_fields_but_member_variable:
+ i = 0
+
+@dataclass
+@cclass
+class C_TestCase_test_one_field_no_default:
+ x: int
+
+@dataclass
+@cclass
+class C_TestCase_test_named_init_params:
+ x: int
+
+@dataclass
+@cclass
+class C_TestCase_test_field_named_object:
+ object: str
+
+@dataclass(frozen=True)
+@cclass
+class C_TestCase_test_field_named_object_frozen:
+ object: str
+
+@dataclass
+@cclass
+class C0_TestCase_test_0_field_compare:
+ pass
+
+@dataclass(order=False)
+@cclass
+class C1_TestCase_test_0_field_compare:
+ pass
+
+@dataclass(order=True)
+@cclass
+class C_TestCase_test_0_field_compare:
+ pass
+
+@dataclass
+@cclass
+class C0_TestCase_test_1_field_compare:
+ x: int
+
+@dataclass(order=False)
+@cclass
+class C1_TestCase_test_1_field_compare:
+ x: int
+
+@dataclass(order=True)
+@cclass
+class C_TestCase_test_1_field_compare:
+ x: int
+
+@dataclass
+@cclass
+class C_TestCase_test_field_no_default:
+ x: int = field()
+
+@dataclass
+@cclass
+class C_TestCase_test_not_in_compare:
+ x: int = 0
+ y: int = field(compare=False, default=4)
+
+class Mutable_TestCase_test_deliberately_mutable_defaults:
+
+ def __init__(self):
+ self.l = []
+
+@dataclass
+@cclass
+class C_TestCase_test_deliberately_mutable_defaults:
+ x: Mutable_TestCase_test_deliberately_mutable_defaults
+
+@dataclass()
+@cclass
+class C_TestCase_test_no_options:
+ x: int
+
+@dataclass
+@cclass
+class Point_TestCase_test_not_tuple:
+ x: int
+ y: int
+
+@dataclass
+@cclass
+class C_TestCase_test_not_tuple:
+ x: int
+ y: int
+
+@dataclass
+@cclass
+class Point3D_TestCase_test_not_other_dataclass:
+ x: int
+ y: int
+ z: int
+
+@dataclass
+@cclass
+class Date_TestCase_test_not_other_dataclass:
+ year: int
+ month: int
+ day: int
+
+@dataclass
+@cclass
+class Point3Dv1_TestCase_test_not_other_dataclass:
+ x: int = 0
+ y: int = 0
+ z: int = 0
+
+@dataclass
+@cclass
+class C_TestCase_test_class_var_no_default:
+ x: ClassVar[int]
+
+@dataclass
+@cclass
+class C_TestCase_test_init_var:
+ x: int = None
+ init_param: InitVar[int] = None
+
+ def __post_init__(self, init_param):
+ if self.x is None:
+ self.x = init_param * 2
+
+@dataclass
+@cclass
+class Foo_TestCase_test_default_factory_derived:
+ x: dict = field(default_factory=dict)
+
+@dataclass
+@cclass
+class Bar_TestCase_test_default_factory_derived(Foo_TestCase_test_default_factory_derived):
+ y: int = 1
+
+@dataclass
+@cclass
+class Baz_TestCase_test_default_factory_derived(Foo_TestCase_test_default_factory_derived):
+ pass
+
+@dataclass
+@cclass
+class A_TestCase_test_intermediate_non_dataclass:
+ x: int
+
+@cclass
+class B_TestCase_test_intermediate_non_dataclass(A_TestCase_test_intermediate_non_dataclass):
+ y: int
+
+@dataclass
+@cclass
+class C_TestCase_test_intermediate_non_dataclass(B_TestCase_test_intermediate_non_dataclass):
+ z: int
+
+class D_TestCase_test_intermediate_non_dataclass(C_TestCase_test_intermediate_non_dataclass):
+ t: int
+
+class NotDataClass_TestCase_test_is_dataclass:
+ pass
+
+@dataclass
+@cclass
+class C_TestCase_test_is_dataclass:
+ x: int
+
+@dataclass
+@cclass
+class D_TestCase_test_is_dataclass:
+ d: C_TestCase_test_is_dataclass
+ e: int
+
+class A_TestCase_test_is_dataclass_when_getattr_always_returns:
+
+ def __getattr__(self, key):
+ return 0
+
+class B_TestCase_test_is_dataclass_when_getattr_always_returns:
+ pass
+
+@dataclass
+@cclass
+class C_TestCase_test_helper_fields_with_class_instance:
+ x: int
+ y: float
+
+class C_TestCase_test_helper_fields_exception:
+ pass
+
+@dataclass
+@cclass
+class C_TestCase_test_helper_asdict:
+ x: int
+ y: int
+
+@dataclass
+@cclass
+class C_TestCase_test_helper_asdict_raises_on_classes:
+ x: int
+ y: int
+
+@dataclass
+@cclass
+class C_TestCase_test_helper_asdict_copy_values:
+ x: int
+ y: List[int] = field(default_factory=list)
+
+@dataclass
+@cclass
+class UserId_TestCase_test_helper_asdict_nested:
+ token: int
+ group: int
+
+@dataclass
+@cclass
+class User_TestCase_test_helper_asdict_nested:
+ name: str
+ id: UserId_TestCase_test_helper_asdict_nested
+
+@dataclass
+@cclass
+class User_TestCase_test_helper_asdict_builtin_containers:
+ name: str
+ id: int
+
+@dataclass
+@cclass
+class GroupList_TestCase_test_helper_asdict_builtin_containers:
+ id: int
+ users: List[User_TestCase_test_helper_asdict_builtin_containers]
+
+@dataclass
+@cclass
+class GroupTuple_TestCase_test_helper_asdict_builtin_containers:
+ id: int
+ users: Tuple[User_TestCase_test_helper_asdict_builtin_containers, ...]
+
+@dataclass
+@cclass
+class GroupDict_TestCase_test_helper_asdict_builtin_containers:
+ id: int
+ users: Dict[str, User_TestCase_test_helper_asdict_builtin_containers]
+
+@dataclass
+@cclass
+class Child_TestCase_test_helper_asdict_builtin_object_containers:
+ d: object
+
+@dataclass
+@cclass
+class Parent_TestCase_test_helper_asdict_builtin_object_containers:
+ child: Child_TestCase_test_helper_asdict_builtin_object_containers
+
+@dataclass
+@cclass
+class C_TestCase_test_helper_asdict_factory:
+ x: int
+ y: int
+
+@dataclass
+@cclass
+class C_TestCase_test_helper_asdict_namedtuple:
+ x: str
+ y: T
+
+@dataclass
+@cclass
+class C_TestCase_test_helper_asdict_namedtuple_key:
+ f: dict
+
+class T_TestCase_test_helper_asdict_namedtuple_derived(namedtuple('Tbase', 'a')):
+
+ def my_a(self):
+ return self.a
+
+@dataclass
+@cclass
+class C_TestCase_test_helper_asdict_namedtuple_derived:
+ f: T_TestCase_test_helper_asdict_namedtuple_derived
+
+@dataclass
+@cclass
+class C_TestCase_test_helper_astuple:
+ x: int
+ y: int = 0
+
+@dataclass
+@cclass
+class C_TestCase_test_helper_astuple_raises_on_classes:
+ x: int
+ y: int
+
+@dataclass
+@cclass
+class C_TestCase_test_helper_astuple_copy_values:
+ x: int
+ y: List[int] = field(default_factory=list)
+
+@dataclass
+@cclass
+class UserId_TestCase_test_helper_astuple_nested:
+ token: int
+ group: int
+
+@dataclass
+@cclass
+class User_TestCase_test_helper_astuple_nested:
+ name: str
+ id: UserId_TestCase_test_helper_astuple_nested
+
+@dataclass
+@cclass
+class User_TestCase_test_helper_astuple_builtin_containers:
+ name: str
+ id: int
+
+@dataclass
+@cclass
+class GroupList_TestCase_test_helper_astuple_builtin_containers:
+ id: int
+ users: List[User_TestCase_test_helper_astuple_builtin_containers]
+
+@dataclass
+@cclass
+class GroupTuple_TestCase_test_helper_astuple_builtin_containers:
+ id: int
+ users: Tuple[User_TestCase_test_helper_astuple_builtin_containers, ...]
+
+@dataclass
+@cclass
+class GroupDict_TestCase_test_helper_astuple_builtin_containers:
+ id: int
+ users: Dict[str, User_TestCase_test_helper_astuple_builtin_containers]
+
+@dataclass
+@cclass
+class Child_TestCase_test_helper_astuple_builtin_object_containers:
+ d: object
+
+@dataclass
+@cclass
+class Parent_TestCase_test_helper_astuple_builtin_object_containers:
+ child: Child_TestCase_test_helper_astuple_builtin_object_containers
+
+@dataclass
+@cclass
+class C_TestCase_test_helper_astuple_factory:
+ x: int
+ y: int
+
+@dataclass
+@cclass
+class C_TestCase_test_helper_astuple_namedtuple:
+ x: str
+ y: T
+
+@dataclass
+@cclass
+class C_TestCase_test_alternate_classmethod_constructor:
+ x: int
+
+ @classmethod
+ def from_file(cls, filename):
+ value_in_file = 20
+ return cls(value_in_file)
+
+@dataclass
+@cclass
+class C_TestCase_test_field_metadata_default:
+ i: int
+
+@dataclass
+@cclass
+class P_TestCase_test_dataclasses_pickleable:
+ x: int
+ y: int = 0
+
+@dataclass
+@cclass
+class Q_TestCase_test_dataclasses_pickleable:
+ x: int
+ y: int = field(default=0, init=False)
+
+@dataclass
+@cclass
+class R_TestCase_test_dataclasses_pickleable:
+ x: int
+ y: List[int] = field(default_factory=list)
+
+@dataclass
+@cclass
+class C_TestInit_test_overwriting_init:
+ x: int
+
+ def __init__(self, x):
+ self.x = 2 * x
+
+@dataclass(init=True)
+@cclass
+class C_TestInit_test_overwriting_init_:
+ x: int
+
+ def __init__(self, x):
+ self.x = 2 * x
+
+@dataclass(init=False)
+@cclass
+class C_TestInit_test_overwriting_init__:
+ x: int
+
+ def __init__(self, x):
+ self.x = 2 * x
+
+@dataclass
+@cclass
+class C_TestRepr_test_overwriting_repr:
+ x: int
+
+ def __repr__(self):
+ return 'x'
+
+@dataclass(repr=True)
+@cclass
+class C_TestRepr_test_overwriting_repr_:
+ x: int
+
+ def __repr__(self):
+ return 'x'
+
+@dataclass(repr=False)
+@cclass
+class C_TestRepr_test_overwriting_repr__:
+ x: int
+
+ def __repr__(self):
+ return 'x'
+
+@dataclass(eq=False)
+@cclass
+class C_TestEq_test_no_eq:
+ x: int
+
+@dataclass(eq=False)
+@cclass
+class C_TestEq_test_no_eq_:
+ x: int
+
+ def __eq__(self, other):
+ return other == 10
+
+@dataclass
+@cclass
+class C_TestEq_test_overwriting_eq:
+ x: int
+
+ def __eq__(self, other):
+ return other == 3
+
+@dataclass(eq=True)
+@cclass
+class C_TestEq_test_overwriting_eq_:
+ x: int
+
+ def __eq__(self, other):
+ return other == 4
+
+@dataclass(eq=False)
+@cclass
+class C_TestEq_test_overwriting_eq__:
+ x: int
+
+ def __eq__(self, other):
+ return other == 5
+
+@dataclass(unsafe_hash=True)
+@cclass
+class C_TestHash_test_unsafe_hash:
+ x: int
+ y: str
+
+@dataclass(frozen=True)
+@cclass
+class C_TestHash_test_0_field_hash:
+ pass
+
+@dataclass(unsafe_hash=True)
+@cclass
+class C_TestHash_test_0_field_hash_:
+ pass
+
+@dataclass(frozen=True)
+@cclass
+class C_TestHash_test_1_field_hash:
+ x: int
+
+@dataclass(unsafe_hash=True)
+@cclass
+class C_TestHash_test_1_field_hash_:
+ x: int
+
+class Base1_TestMakeDataclass_test_base:
+ pass
+
+class Base2_TestMakeDataclass_test_base:
+ pass
+
+@dataclass
+@cclass
+class Base1_TestMakeDataclass_test_base_dataclass:
+ x: int
+
+class Base2_TestMakeDataclass_test_base_dataclass:
+ pass
+
+@dataclass(frozen=True)
+@cclass
+class C_TestReplace_test:
+ x: int
+ y: int
+
+@dataclass(frozen=True)
+@cclass
+class C_TestReplace_test_invalid_field_name:
+ x: int
+ y: int
+
+@dataclass(frozen=True)
+@cclass
+class C_TestReplace_test_invalid_object:
+ x: int
+ y: int
+
+@dataclass
+@cclass
+class C_TestReplace_test_no_init:
+ x: int
+ y: int = field(init=False, default=10)
+
+@dataclass
+@cclass
+class C_TestReplace_test_classvar:
+ x: int
+ y: ClassVar[int] = 1000
+
+@dataclass
+@cclass
+class C_TestReplace_test_initvar_is_specified:
+ x: int
+ y: InitVar[int]
+
+ def __post_init__(self, y):
+ self.x *= y
+
+@dataclass
+@cclass
+class C_TestReplace_test_recursive_repr:
+ f: object
+
+@dataclass
+@cclass
+class C_TestReplace_test_recursive_repr_two_attrs:
+ f: object
+ g: object
+
+@dataclass
+@cclass
+class C_TestReplace_test_recursive_repr_indirection:
+ f: object
+
+@dataclass
+@cclass
+class D_TestReplace_test_recursive_repr_indirection:
+ f: object
+
+@dataclass
+@cclass
+class C_TestReplace_test_recursive_repr_indirection_two:
+ f: object
+
+@dataclass
+@cclass
+class D_TestReplace_test_recursive_repr_indirection_two:
+ f: object
+
+@dataclass
+@cclass
+class E_TestReplace_test_recursive_repr_indirection_two:
+ f: object
+
+@dataclass
+@cclass
+class C_TestReplace_test_recursive_repr_misc_attrs:
+ f: object
+ g: int
+
+class CustomError(Exception):
+ pass
+
+class TestCase(unittest.TestCase):
+
+ def test_no_fields(self):
+ C = C_TestCase_test_no_fields
+ o = C()
+ self.assertEqual(len(fields(C)), 0)
+
+ def test_no_fields_but_member_variable(self):
+ C = C_TestCase_test_no_fields_but_member_variable
+ o = C()
+ self.assertEqual(len(fields(C)), 0)
+
+ def test_one_field_no_default(self):
+ C = C_TestCase_test_one_field_no_default
+ o = C(42)
+ self.assertEqual(o.x, 42)
+
+ def test_named_init_params(self):
+ C = C_TestCase_test_named_init_params
+ o = C(x=32)
+ self.assertEqual(o.x, 32)
+
+ def test_field_named_object(self):
+ C = C_TestCase_test_field_named_object
+ c = C('foo')
+ self.assertEqual(c.object, 'foo')
+
+ def test_field_named_object_frozen(self):
+ C = C_TestCase_test_field_named_object_frozen
+ c = C('foo')
+ self.assertEqual(c.object, 'foo')
+
+ def test_0_field_compare(self):
+ C0 = C0_TestCase_test_0_field_compare
+ C1 = C1_TestCase_test_0_field_compare
+ for cls in [C0, C1]:
+ with self.subTest(cls=cls):
+ self.assertEqual(cls(), cls())
+ for (idx, fn) in enumerate([lambda a, b: a < b, lambda a, b: a <= b, lambda a, b: a > b, lambda a, b: a >= b]):
+ with self.subTest(idx=idx):
+ with self.assertRaises(TypeError):
+ fn(cls(), cls())
+ C = C_TestCase_test_0_field_compare
+ self.assertLessEqual(C(), C())
+ self.assertGreaterEqual(C(), C())
+
+ def test_1_field_compare(self):
+ C0 = C0_TestCase_test_1_field_compare
+ C1 = C1_TestCase_test_1_field_compare
+ for cls in [C0, C1]:
+ with self.subTest(cls=cls):
+ self.assertEqual(cls(1), cls(1))
+ self.assertNotEqual(cls(0), cls(1))
+ for (idx, fn) in enumerate([lambda a, b: a < b, lambda a, b: a <= b, lambda a, b: a > b, lambda a, b: a >= b]):
+ with self.subTest(idx=idx):
+ with self.assertRaises(TypeError):
+ fn(cls(0), cls(0))
+ C = C_TestCase_test_1_field_compare
+ self.assertLess(C(0), C(1))
+ self.assertLessEqual(C(0), C(1))
+ self.assertLessEqual(C(1), C(1))
+ self.assertGreater(C(1), C(0))
+ self.assertGreaterEqual(C(1), C(0))
+ self.assertGreaterEqual(C(1), C(1))
+
+ def test_field_no_default(self):
+ C = C_TestCase_test_field_no_default
+ self.assertEqual(C(5).x, 5)
+ with self.assertRaises(TypeError):
+ C()
+
+ def test_not_in_compare(self):
+ C = C_TestCase_test_not_in_compare
+ self.assertEqual(C(), C(0, 20))
+ self.assertEqual(C(1, 10), C(1, 20))
+ self.assertNotEqual(C(3), C(4, 10))
+ self.assertNotEqual(C(3, 10), C(4, 10))
+
+ def test_deliberately_mutable_defaults(self):
+ Mutable = Mutable_TestCase_test_deliberately_mutable_defaults
+ C = C_TestCase_test_deliberately_mutable_defaults
+ lst = Mutable()
+ o1 = C(lst)
+ o2 = C(lst)
+ self.assertEqual(o1, o2)
+ o1.x.l.extend([1, 2])
+ self.assertEqual(o1, o2)
+ self.assertEqual(o1.x.l, [1, 2])
+ self.assertIs(o1.x, o2.x)
+
+ def test_no_options(self):
+ C = C_TestCase_test_no_options
+ self.assertEqual(C(42).x, 42)
+
+ def test_not_tuple(self):
+ Point = Point_TestCase_test_not_tuple
+ self.assertNotEqual(Point(1, 2), (1, 2))
+ C = C_TestCase_test_not_tuple
+ self.assertNotEqual(Point(1, 3), C(1, 3))
+
+ def test_not_other_dataclass(self):
+ Point3D = Point3D_TestCase_test_not_other_dataclass
+ Date = Date_TestCase_test_not_other_dataclass
+ self.assertNotEqual(Point3D(2017, 6, 3), Date(2017, 6, 3))
+ self.assertNotEqual(Point3D(1, 2, 3), (1, 2, 3))
+ with self.assertRaises(TypeError):
+ (x, y, z) = Point3D(4, 5, 6)
+ Point3Dv1 = Point3Dv1_TestCase_test_not_other_dataclass
+ self.assertNotEqual(Point3D(0, 0, 0), Point3Dv1())
+
+ def test_class_var_no_default(self):
+ C = C_TestCase_test_class_var_no_default
+ self.assertNotIn('x', C.__dict__)
+
+ def test_init_var(self):
+ C = C_TestCase_test_init_var
+ c = C(init_param=10)
+ self.assertEqual(c.x, 20)
+
+ @skip_on_versions_below((3, 10))
+ def test_init_var_preserve_type(self):
+ self.assertEqual(InitVar[int].type, int)
+ self.assertEqual(repr(InitVar[int]), 'dataclasses.InitVar[int]')
+ self.assertEqual(repr(InitVar[List[int]]), 'dataclasses.InitVar[typing.List[int]]')
+ self.assertEqual(repr(InitVar[list[int]]), 'dataclasses.InitVar[list[int]]')
+ self.assertEqual(repr(InitVar[int | str]), 'dataclasses.InitVar[int | str]')
+
+ def test_default_factory_derived(self):
+ Foo = Foo_TestCase_test_default_factory_derived
+ Bar = Bar_TestCase_test_default_factory_derived
+ self.assertEqual(Foo().x, {})
+ self.assertEqual(Bar().x, {})
+ self.assertEqual(Bar().y, 1)
+ Baz = Baz_TestCase_test_default_factory_derived
+ self.assertEqual(Baz().x, {})
+
+ def test_intermediate_non_dataclass(self):
+ A = A_TestCase_test_intermediate_non_dataclass
+ B = B_TestCase_test_intermediate_non_dataclass
+ C = C_TestCase_test_intermediate_non_dataclass
+ c = C(1, 3)
+ self.assertEqual((c.x, c.z), (1, 3))
+ with self.assertRaises(AttributeError):
+ c.y
+ D = D_TestCase_test_intermediate_non_dataclass
+ d = D(4, 5)
+ self.assertEqual((d.x, d.z), (4, 5))
+
+ def test_is_dataclass(self):
+ NotDataClass = NotDataClass_TestCase_test_is_dataclass
+ self.assertFalse(is_dataclass(0))
+ self.assertFalse(is_dataclass(int))
+ self.assertFalse(is_dataclass(NotDataClass))
+ self.assertFalse(is_dataclass(NotDataClass()))
+ C = C_TestCase_test_is_dataclass
+ D = D_TestCase_test_is_dataclass
+ c = C(10)
+ d = D(c, 4)
+ self.assertTrue(is_dataclass(C))
+ self.assertTrue(is_dataclass(c))
+ self.assertFalse(is_dataclass(c.x))
+ self.assertTrue(is_dataclass(d.d))
+ self.assertFalse(is_dataclass(d.e))
+
+ def test_is_dataclass_when_getattr_always_returns(self):
+ A = A_TestCase_test_is_dataclass_when_getattr_always_returns
+ self.assertFalse(is_dataclass(A))
+ a = A()
+ B = B_TestCase_test_is_dataclass_when_getattr_always_returns
+ b = B()
+ b.__dataclass_fields__ = []
+ for obj in (a, b):
+ with self.subTest(obj=obj):
+ self.assertFalse(is_dataclass(obj))
+ with self.assertRaises(TypeError):
+ asdict(obj)
+ with self.assertRaises(TypeError):
+ astuple(obj)
+ with self.assertRaises(TypeError):
+ replace(obj, x=0)
+
+ def test_helper_fields_with_class_instance(self):
+ C = C_TestCase_test_helper_fields_with_class_instance
+ self.assertEqual(fields(C), fields(C(0, 0.0)))
+
+ def test_helper_fields_exception(self):
+ with self.assertRaises(TypeError):
+ fields(0)
+ C = C_TestCase_test_helper_fields_exception
+ with self.assertRaises(TypeError):
+ fields(C)
+ with self.assertRaises(TypeError):
+ fields(C())
+
+ def test_helper_asdict(self):
+ C = C_TestCase_test_helper_asdict
+ c = C(1, 2)
+ self.assertEqual(asdict(c), {'x': 1, 'y': 2})
+ self.assertEqual(asdict(c), asdict(c))
+ self.assertIsNot(asdict(c), asdict(c))
+ c.x = 42
+ self.assertEqual(asdict(c), {'x': 42, 'y': 2})
+ self.assertIs(type(asdict(c)), dict)
+
+ def test_helper_asdict_raises_on_classes(self):
+ C = C_TestCase_test_helper_asdict_raises_on_classes
+ with self.assertRaises(TypeError):
+ asdict(C)
+ with self.assertRaises(TypeError):
+ asdict(int)
+
+ def test_helper_asdict_copy_values(self):
+ C = C_TestCase_test_helper_asdict_copy_values
+ initial = []
+ c = C(1, initial)
+ d = asdict(c)
+ self.assertEqual(d['y'], initial)
+ self.assertIsNot(d['y'], initial)
+ c = C(1)
+ d = asdict(c)
+ d['y'].append(1)
+ self.assertEqual(c.y, [])
+
+ def test_helper_asdict_nested(self):
+ UserId = UserId_TestCase_test_helper_asdict_nested
+ User = User_TestCase_test_helper_asdict_nested
+ u = User('Joe', UserId(123, 1))
+ d = asdict(u)
+ self.assertEqual(d, {'name': 'Joe', 'id': {'token': 123, 'group': 1}})
+ self.assertIsNot(asdict(u), asdict(u))
+ u.id.group = 2
+ self.assertEqual(asdict(u), {'name': 'Joe', 'id': {'token': 123, 'group': 2}})
+
+ def test_helper_asdict_builtin_containers(self):
+ User = User_TestCase_test_helper_asdict_builtin_containers
+ GroupList = GroupList_TestCase_test_helper_asdict_builtin_containers
+ GroupTuple = GroupTuple_TestCase_test_helper_asdict_builtin_containers
+ GroupDict = GroupDict_TestCase_test_helper_asdict_builtin_containers
+ a = User('Alice', 1)
+ b = User('Bob', 2)
+ gl = GroupList(0, [a, b])
+ gt = GroupTuple(0, (a, b))
+ gd = GroupDict(0, {'first': a, 'second': b})
+ self.assertEqual(asdict(gl), {'id': 0, 'users': [{'name': 'Alice', 'id': 1}, {'name': 'Bob', 'id': 2}]})
+ self.assertEqual(asdict(gt), {'id': 0, 'users': ({'name': 'Alice', 'id': 1}, {'name': 'Bob', 'id': 2})})
+ self.assertEqual(asdict(gd), {'id': 0, 'users': {'first': {'name': 'Alice', 'id': 1}, 'second': {'name': 'Bob', 'id': 2}}})
+
+ def test_helper_asdict_builtin_object_containers(self):
+ Child = Child_TestCase_test_helper_asdict_builtin_object_containers
+ Parent = Parent_TestCase_test_helper_asdict_builtin_object_containers
+ self.assertEqual(asdict(Parent(Child([1]))), {'child': {'d': [1]}})
+ self.assertEqual(asdict(Parent(Child({1: 2}))), {'child': {'d': {1: 2}}})
+
+ def test_helper_asdict_factory(self):
+ C = C_TestCase_test_helper_asdict_factory
+ c = C(1, 2)
+ d = asdict(c, dict_factory=OrderedDict)
+ self.assertEqual(d, OrderedDict([('x', 1), ('y', 2)]))
+ self.assertIsNot(d, asdict(c, dict_factory=OrderedDict))
+ c.x = 42
+ d = asdict(c, dict_factory=OrderedDict)
+ self.assertEqual(d, OrderedDict([('x', 42), ('y', 2)]))
+ self.assertIs(type(d), OrderedDict)
+
+ def test_helper_asdict_namedtuple(self):
+ T = namedtuple('T', 'a b c')
+ C = C_TestCase_test_helper_asdict_namedtuple
+ c = C('outer', T(1, C('inner', T(11, 12, 13)), 2))
+ d = asdict(c)
+ self.assertEqual(d, {'x': 'outer', 'y': T(1, {'x': 'inner', 'y': T(11, 12, 13)}, 2)})
+ d = asdict(c, dict_factory=OrderedDict)
+ self.assertEqual(d, {'x': 'outer', 'y': T(1, {'x': 'inner', 'y': T(11, 12, 13)}, 2)})
+ self.assertIs(type(d), OrderedDict)
+ self.assertIs(type(d['y'][1]), OrderedDict)
+
+ def test_helper_asdict_namedtuple_key(self):
+ C = C_TestCase_test_helper_asdict_namedtuple_key
+ T = namedtuple('T', 'a')
+ c = C({T('an a'): 0})
+ self.assertEqual(asdict(c), {'f': {T(a='an a'): 0}})
+
+ def test_helper_asdict_namedtuple_derived(self):
+ T = T_TestCase_test_helper_asdict_namedtuple_derived
+ C = C_TestCase_test_helper_asdict_namedtuple_derived
+ t = T(6)
+ c = C(t)
+ d = asdict(c)
+ self.assertEqual(d, {'f': T(a=6)})
+ self.assertIsNot(d['f'], t)
+ self.assertEqual(d['f'].my_a(), 6)
+
+ def test_helper_astuple(self):
+ C = C_TestCase_test_helper_astuple
+ c = C(1)
+ self.assertEqual(astuple(c), (1, 0))
+ self.assertEqual(astuple(c), astuple(c))
+ self.assertIsNot(astuple(c), astuple(c))
+ c.y = 42
+ self.assertEqual(astuple(c), (1, 42))
+ self.assertIs(type(astuple(c)), tuple)
+
+ def test_helper_astuple_raises_on_classes(self):
+ C = C_TestCase_test_helper_astuple_raises_on_classes
+ with self.assertRaises(TypeError):
+ astuple(C)
+ with self.assertRaises(TypeError):
+ astuple(int)
+
+ def test_helper_astuple_copy_values(self):
+ C = C_TestCase_test_helper_astuple_copy_values
+ initial = []
+ c = C(1, initial)
+ t = astuple(c)
+ self.assertEqual(t[1], initial)
+ self.assertIsNot(t[1], initial)
+ c = C(1)
+ t = astuple(c)
+ t[1].append(1)
+ self.assertEqual(c.y, [])
+
+ def test_helper_astuple_nested(self):
+ UserId = UserId_TestCase_test_helper_astuple_nested
+ User = User_TestCase_test_helper_astuple_nested
+ u = User('Joe', UserId(123, 1))
+ t = astuple(u)
+ self.assertEqual(t, ('Joe', (123, 1)))
+ self.assertIsNot(astuple(u), astuple(u))
+ u.id.group = 2
+ self.assertEqual(astuple(u), ('Joe', (123, 2)))
+
+ def test_helper_astuple_builtin_containers(self):
+ User = User_TestCase_test_helper_astuple_builtin_containers
+ GroupList = GroupList_TestCase_test_helper_astuple_builtin_containers
+ GroupTuple = GroupTuple_TestCase_test_helper_astuple_builtin_containers
+ GroupDict = GroupDict_TestCase_test_helper_astuple_builtin_containers
+ a = User('Alice', 1)
+ b = User('Bob', 2)
+ gl = GroupList(0, [a, b])
+ gt = GroupTuple(0, (a, b))
+ gd = GroupDict(0, {'first': a, 'second': b})
+ self.assertEqual(astuple(gl), (0, [('Alice', 1), ('Bob', 2)]))
+ self.assertEqual(astuple(gt), (0, (('Alice', 1), ('Bob', 2))))
+ self.assertEqual(astuple(gd), (0, {'first': ('Alice', 1), 'second': ('Bob', 2)}))
+
+ def test_helper_astuple_builtin_object_containers(self):
+ Child = Child_TestCase_test_helper_astuple_builtin_object_containers
+ Parent = Parent_TestCase_test_helper_astuple_builtin_object_containers
+ self.assertEqual(astuple(Parent(Child([1]))), (([1],),))
+ self.assertEqual(astuple(Parent(Child({1: 2}))), (({1: 2},),))
+
+ def test_helper_astuple_factory(self):
+ C = C_TestCase_test_helper_astuple_factory
+ NT = namedtuple('NT', 'x y')
+
+ def nt(lst):
+ return NT(*lst)
+ c = C(1, 2)
+ t = astuple(c, tuple_factory=nt)
+ self.assertEqual(t, NT(1, 2))
+ self.assertIsNot(t, astuple(c, tuple_factory=nt))
+ c.x = 42
+ t = astuple(c, tuple_factory=nt)
+ self.assertEqual(t, NT(42, 2))
+ self.assertIs(type(t), NT)
+
+ def test_helper_astuple_namedtuple(self):
+ T = namedtuple('T', 'a b c')
+ C = C_TestCase_test_helper_astuple_namedtuple
+ c = C('outer', T(1, C('inner', T(11, 12, 13)), 2))
+ t = astuple(c)
+ self.assertEqual(t, ('outer', T(1, ('inner', (11, 12, 13)), 2)))
+ t = astuple(c, tuple_factory=list)
+ self.assertEqual(t, ['outer', T(1, ['inner', T(11, 12, 13)], 2)])
+
+ def test_alternate_classmethod_constructor(self):
+ C = C_TestCase_test_alternate_classmethod_constructor
+ self.assertEqual(C.from_file('filename').x, 20)
+
+ def test_field_metadata_default(self):
+ C = C_TestCase_test_field_metadata_default
+ self.assertFalse(fields(C)[0].metadata)
+ self.assertEqual(len(fields(C)[0].metadata), 0)
+ with self.assertRaises(TypeError):
+ fields(C)[0].metadata['test'] = 3
+
+ def test_dataclasses_pickleable(self):
+ global P, Q, R
+ P = P_TestCase_test_dataclasses_pickleable
+ Q = Q_TestCase_test_dataclasses_pickleable
+ R = R_TestCase_test_dataclasses_pickleable
+ q = Q(1)
+ q.y = 2
+ samples = [P(1), P(1, 2), Q(1), q, R(1), R(1, [2, 3, 4])]
+ for sample in samples:
+ for proto in range(pickle.HIGHEST_PROTOCOL + 1):
+ with self.subTest(sample=sample, proto=proto):
+ new_sample = pickle.loads(pickle.dumps(sample, proto))
+ self.assertEqual(sample.x, new_sample.x)
+ self.assertEqual(sample.y, new_sample.y)
+ self.assertIsNot(sample, new_sample)
+ new_sample.x = 42
+ another_new_sample = pickle.loads(pickle.dumps(new_sample, proto))
+ self.assertEqual(new_sample.x, another_new_sample.x)
+ self.assertEqual(sample.y, another_new_sample.y)
+
+class TestFieldNoAnnotation(unittest.TestCase):
+ pass
+
+class TestInit(unittest.TestCase):
+
+ def test_overwriting_init(self):
+ C = C_TestInit_test_overwriting_init
+ self.assertEqual(C(3).x, 6)
+ C = C_TestInit_test_overwriting_init_
+ self.assertEqual(C(4).x, 8)
+ C = C_TestInit_test_overwriting_init__
+ self.assertEqual(C(5).x, 10)
+
+class TestRepr(unittest.TestCase):
+
+ def test_overwriting_repr(self):
+ C = C_TestRepr_test_overwriting_repr
+ self.assertEqual(repr(C(0)), 'x')
+ C = C_TestRepr_test_overwriting_repr_
+ self.assertEqual(repr(C(0)), 'x')
+ C = C_TestRepr_test_overwriting_repr__
+ self.assertEqual(repr(C(0)), 'x')
+
+class TestEq(unittest.TestCase):
+
+ def test_no_eq(self):
+ C = C_TestEq_test_no_eq
+ self.assertNotEqual(C(0), C(0))
+ c = C(3)
+ self.assertEqual(c, c)
+ C = C_TestEq_test_no_eq_
+ self.assertEqual(C(3), 10)
+
+ def test_overwriting_eq(self):
+ C = C_TestEq_test_overwriting_eq
+ self.assertEqual(C(1), 3)
+ self.assertNotEqual(C(1), 1)
+ C = C_TestEq_test_overwriting_eq_
+ self.assertEqual(C(1), 4)
+ self.assertNotEqual(C(1), 1)
+ C = C_TestEq_test_overwriting_eq__
+ self.assertEqual(C(1), 5)
+ self.assertNotEqual(C(1), 1)
+
+class TestOrdering(unittest.TestCase):
+ pass
+
+class TestHash(unittest.TestCase):
+
+ def test_unsafe_hash(self):
+ C = C_TestHash_test_unsafe_hash
+ self.assertEqual(hash(C(1, 'foo')), hash((1, 'foo')))
+
+ def test_0_field_hash(self):
+ C = C_TestHash_test_0_field_hash
+ self.assertEqual(hash(C()), hash(()))
+ C = C_TestHash_test_0_field_hash_
+ self.assertEqual(hash(C()), hash(()))
+
+ def test_1_field_hash(self):
+ C = C_TestHash_test_1_field_hash
+ self.assertEqual(hash(C(4)), hash((4,)))
+ self.assertEqual(hash(C(42)), hash((42,)))
+ C = C_TestHash_test_1_field_hash_
+ self.assertEqual(hash(C(4)), hash((4,)))
+ self.assertEqual(hash(C(42)), hash((42,)))
+
+class TestMakeDataclass(unittest.TestCase):
+ pass
+
+class TestReplace(unittest.TestCase):
+
+ def test(self):
+ C = C_TestReplace_test
+ c = C(1, 2)
+ c1 = replace(c, x=3)
+ self.assertEqual(c1.x, 3)
+ self.assertEqual(c1.y, 2)
+
+ def test_invalid_field_name(self):
+ C = C_TestReplace_test_invalid_field_name
+ c = C(1, 2)
+ with self.assertRaises(TypeError):
+ c1 = replace(c, z=3)
+
+ def test_invalid_object(self):
+ C = C_TestReplace_test_invalid_object
+ with self.assertRaises(TypeError):
+ replace(C, x=3)
+ with self.assertRaises(TypeError):
+ replace(0, x=3)
+
+ def test_no_init(self):
+ C = C_TestReplace_test_no_init
+ c = C(1)
+ c.y = 20
+ c1 = replace(c, x=5)
+ self.assertEqual((c1.x, c1.y), (5, 10))
+ with self.assertRaises(ValueError):
+ replace(c, x=2, y=30)
+ with self.assertRaises(ValueError):
+ replace(c, y=30)
+
+ def test_classvar(self):
+ C = C_TestReplace_test_classvar
+ c = C(1)
+ d = C(2)
+ self.assertIs(c.y, d.y)
+ self.assertEqual(c.y, 1000)
+ with self.assertRaises(TypeError):
+ replace(c, y=30)
+ replace(c, x=5)
+
+ def test_initvar_is_specified(self):
+ C = C_TestReplace_test_initvar_is_specified
+ c = C(1, 10)
+ self.assertEqual(c.x, 10)
+ with self.assertRaises(ValueError):
+ replace(c, x=3)
+ c = replace(c, x=3, y=5)
+ self.assertEqual(c.x, 15)
+
+ def test_recursive_repr(self):
+ C = C_TestReplace_test_recursive_repr
+ c = C(None)
+ c.f = c
+ self.assertEqual(repr(c), 'C_TestReplace_test_recursive_repr(f=...)')
+
+ def test_recursive_repr_two_attrs(self):
+ C = C_TestReplace_test_recursive_repr_two_attrs
+ c = C(None, None)
+ c.f = c
+ c.g = c
+ self.assertEqual(repr(c), 'C_TestReplace_test_recursive_repr_two_attrs(f=..., g=...)')
+
+ def test_recursive_repr_indirection(self):
+ C = C_TestReplace_test_recursive_repr_indirection
+ D = D_TestReplace_test_recursive_repr_indirection
+ c = C(None)
+ d = D(None)
+ c.f = d
+ d.f = c
+ self.assertEqual(repr(c), 'C_TestReplace_test_recursive_repr_indirection(f=D_TestReplace_test_recursive_repr_indirection(f=...))')
+
+ def test_recursive_repr_indirection_two(self):
+ C = C_TestReplace_test_recursive_repr_indirection_two
+ D = D_TestReplace_test_recursive_repr_indirection_two
+ E = E_TestReplace_test_recursive_repr_indirection_two
+ c = C(None)
+ d = D(None)
+ e = E(None)
+ c.f = d
+ d.f = e
+ e.f = c
+ self.assertEqual(repr(c), 'C_TestReplace_test_recursive_repr_indirection_two(f=D_TestReplace_test_recursive_repr_indirection_two(f=E_TestReplace_test_recursive_repr_indirection_two(f=...)))')
+
+ def test_recursive_repr_misc_attrs(self):
+ C = C_TestReplace_test_recursive_repr_misc_attrs
+ c = C(None, 1)
+ c.f = c
+ self.assertEqual(repr(c), 'C_TestReplace_test_recursive_repr_misc_attrs(f=..., g=1)')
+
+class TestAbstract(unittest.TestCase):
+ pass
+
+class TestKeywordArgs(unittest.TestCase):
+ pass
+if __name__ == '__main__':
+ unittest.main()
diff --git a/tests/run/test_grammar.py b/tests/run/test_grammar.py
index c41b75f55..dfa11e087 100644
--- a/tests/run/test_grammar.py
+++ b/tests/run/test_grammar.py
@@ -64,8 +64,7 @@ if cython.compiled:
def use_old_parser():
- # FIXME: currently disabling new PEG parser tests.
- return True
+ return False
import unittest
@@ -798,8 +797,6 @@ class GrammarTests(unittest.TestCase):
self.assertEqual(f.__annotations__, {'return': 'list'})
# Test expressions as decorators (PEP 614):
- # FIXME: implement PEP 614
- """
@False or null
def f(x): pass
@d := null
@@ -812,7 +809,6 @@ class GrammarTests(unittest.TestCase):
def f(x): pass
@[null][0].__call__.__call__
def f(x): pass
- """
# test closures with a variety of opargs
closure = 1
@@ -1706,8 +1702,6 @@ class GrammarTests(unittest.TestCase):
class G: pass
# Test expressions as decorators (PEP 614):
- # FIXME: implement PEP 614
- """
@False or class_decorator
class H: pass
@d := class_decorator
@@ -1720,7 +1714,6 @@ class GrammarTests(unittest.TestCase):
class L: pass
@[class_decorator][0].__call__.__call__
class M: pass
- """
def test_dictcomps(self):
# dictorsetmaker: ( (test ':' test (comp_for |
@@ -1869,68 +1862,53 @@ class GrammarTests(unittest.TestCase):
with manager() as x, manager():
pass
- if not use_old_parser():
- test_cases = [
- """if 1:
- with (
- manager()
- ):
- pass
- """,
- """if 1:
- with (
- manager() as x
- ):
- pass
- """,
- """if 1:
- with (
- manager() as (x, y),
- manager() as z,
- ):
- pass
- """,
- """if 1:
- with (
- manager(),
- manager()
- ):
- pass
- """,
- """if 1:
- with (
- manager() as x,
- manager() as y
- ):
- pass
- """,
- """if 1:
- with (
- manager() as x,
- manager()
- ):
- pass
- """,
- """if 1:
- with (
- manager() as x,
- manager() as y,
- manager() as z,
- ):
- pass
- """,
- """if 1:
- with (
- manager() as x,
- manager() as y,
- manager(),
- ):
- pass
- """,
- ]
- for case in test_cases:
- with self.subTest(case=case):
- compile(case, "<string>", "exec")
+ with (
+ manager()
+ ):
+ pass
+
+ with (
+ manager() as x
+ ):
+ pass
+
+ with (
+ manager() as (x, y),
+ manager() as z,
+ ):
+ pass
+
+ with (
+ manager(),
+ manager()
+ ):
+ pass
+
+ with (
+ manager() as x,
+ manager() as y
+ ):
+ pass
+
+ with (
+ manager() as x,
+ manager()
+ ):
+ pass
+
+ with (
+ manager() as x,
+ manager() as y,
+ manager() as z,
+ ):
+ pass
+
+ with (
+ manager() as x,
+ manager() as y,
+ manager(),
+ ):
+ pass
def test_if_else_expr(self):
diff --git a/tests/run/trace_nogil.pyx b/tests/run/trace_nogil.pyx
index dee443e5b..175935ced 100644
--- a/tests/run/trace_nogil.pyx
+++ b/tests/run/trace_nogil.pyx
@@ -1,6 +1,6 @@
# cython: linetrace=True
-cdef void foo(int err) nogil except *:
+cdef void foo(int err) except * nogil:
with gil:
raise ValueError(err)
diff --git a/tests/run/type_inference.pyx b/tests/run/type_inference.pyx
index df77f6bd9..9a72022b2 100644
--- a/tests/run/type_inference.pyx
+++ b/tests/run/type_inference.pyx
@@ -242,7 +242,7 @@ def c_functions():
>>> c_functions()
"""
f = cfunc
- assert typeof(f) == 'int (*)(int)', typeof(f)
+ assert typeof(f) == 'int (*)(int) except? -1', typeof(f)
assert 2 == f(1)
def builtin_functions():
@@ -537,7 +537,7 @@ def safe_c_functions():
>>> safe_c_functions()
"""
f = cfunc
- assert typeof(f) == 'int (*)(int)', typeof(f)
+ assert typeof(f) == 'int (*)(int) except? -1', typeof(f)
assert 2 == f(1)
@infer_types(None)
diff --git a/tests/run/with_gil.pyx b/tests/run/with_gil.pyx
index 6fee3f192..2eed27eac 100644
--- a/tests/run/with_gil.pyx
+++ b/tests/run/with_gil.pyx
@@ -259,7 +259,7 @@ cpdef test_cpdef():
# Now test some cdef functions with different return types
-cdef void void_nogil_ignore_exception() nogil:
+cdef void void_nogil_ignore_exception() noexcept nogil:
with gil:
raise ExceptionWithMsg("This is swallowed")
@@ -267,7 +267,7 @@ cdef void void_nogil_ignore_exception() nogil:
with gil:
print "unreachable"
-cdef void void_nogil_nested_gil() nogil:
+cdef void void_nogil_nested_gil() noexcept nogil:
with gil:
with nogil:
with gil:
@@ -304,7 +304,7 @@ def test_nogil_void_funcs_with_nogil():
void_nogil_nested_gil()
-cdef PyObject *nogil_propagate_exception() nogil except NULL:
+cdef PyObject *nogil_propagate_exception() except NULL nogil:
with nogil:
with gil:
raise Exception("This exception propagates!")
diff --git a/tests/run/with_gil_automatic.pyx b/tests/run/with_gil_automatic.pyx
index 425dbbce7..954ed6d47 100644
--- a/tests/run/with_gil_automatic.pyx
+++ b/tests/run/with_gil_automatic.pyx
@@ -28,7 +28,7 @@ def test_print_in_nogil_section(x):
@cython.test_fail_if_path_exists(
"//GILStatNode//GILStatNode",
)
-cpdef int test_print_in_nogil_func(x) nogil except -1:
+cpdef int test_print_in_nogil_func(x) except -1 nogil:
"""
>>> _ = test_print_in_nogil_func(123)
--123--
@@ -61,7 +61,7 @@ def test_raise_in_nogil_section(x):
@cython.test_fail_if_path_exists(
"//GILStatNode//GILStatNode",
)
-cpdef int test_raise_in_nogil_func(x) nogil except -1:
+cpdef int test_raise_in_nogil_func(x) except -1 nogil:
"""
>>> test_raise_in_nogil_func(123)
Traceback (most recent call last):
@@ -128,7 +128,7 @@ def assert_in_nogil_section_string(int x):
"//AssertStatNode//GILStatNode",
"//AssertStatNode//GILStatNode//RaiseStatNode",
)
-cpdef int assert_in_nogil_func(int x) nogil except -1:
+cpdef int assert_in_nogil_func(int x) except -1 nogil:
"""
>>> _ = assert_in_nogil_func(123)
>>> assert_in_nogil_func(0)
diff --git a/tests/run/withnogil.pyx b/tests/run/withnogil.pyx
index 55b7896a7..a64779dfe 100644
--- a/tests/run/withnogil.pyx
+++ b/tests/run/withnogil.pyx
@@ -19,5 +19,5 @@ def g():
h()
return 1
-cdef int h() nogil except -1:
+cdef int h() except -1 nogil:
pass
diff --git a/tests/testsupport/cythonarrayutil.pxi b/tests/testsupport/cythonarrayutil.pxi
index 50d764acd..683dc4b71 100644
--- a/tests/testsupport/cythonarrayutil.pxi
+++ b/tests/testsupport/cythonarrayutil.pxi
@@ -2,7 +2,7 @@ from libc.stdlib cimport malloc, free
cimport cython
from cython.view cimport array
-cdef void callback(void *data):
+cdef void callback(void *data) noexcept:
print "callback called"
free(data)
diff --git a/tests/windows_bugs_39.txt b/tests/windows_bugs_39.txt
new file mode 100644
index 000000000..6b56b9d33
--- /dev/null
+++ b/tests/windows_bugs_39.txt
@@ -0,0 +1,3 @@
+# https://github.com/cython/cython/issues/3450
+TestInline
+scanner_trace