summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.appveyor.yaml30
-rw-r--r--.hgignore5
-rw-r--r--CHANGES123
-rw-r--r--LICENSE2
-rw-r--r--README.rst811
-rw-r--r--__init__.py29
-rw-r--r--_doc/_static/pypi.svg2
-rw-r--r--_doc/api.ryd47
-rw-r--r--_doc/basicuse.ryd12
-rw-r--r--_doc/conf.py26
-rw-r--r--_doc/contributing.ryd4
-rw-r--r--_doc/detail.ryd12
-rw-r--r--_doc/dumpcls.ryd10
-rw-r--r--_doc/example.ryd44
-rw-r--r--_doc/index.ryd5
-rw-r--r--_doc/install.ryd6
-rw-r--r--_doc/overview.ryd6
-rw-r--r--_doc/pyyaml.ryd9
-rw-r--r--_test/data/construct-python-str-utf8-py2.code2
-rw-r--r--_test/data/construct-python-unicode-utf8-py2.code2
-rw-r--r--_test/data/construct-str-utf8-py2.code2
-rw-r--r--_test/lib/canonical.py128
-rw-r--r--_test/lib/test_appliance.py22
-rw-r--r--_test/lib/test_canonical.py3
-rw-r--r--_test/lib/test_constructor.py16
-rw-r--r--_test/lib/test_emitter.py27
-rw-r--r--_test/lib/test_errors.py15
-rw-r--r--_test/lib/test_input_output.py296
-rw-r--r--_test/lib/test_mark.py7
-rw-r--r--_test/lib/test_reader.py13
-rw-r--r--_test/lib/test_recursive.py15
-rw-r--r--_test/lib/test_representer.py5
-rw-r--r--_test/lib/test_resolver.py18
-rw-r--r--_test/lib/test_structure.py69
-rw-r--r--_test/lib/test_tokens.py47
-rw-r--r--_test/lib/test_yaml_ext.py50
-rw-r--r--_test/roundtrip.py220
-rw-r--r--_test/test_a_dedent.py13
-rw-r--r--_test/test_add_xxx.py123
-rw-r--r--_test/test_anchor.py146
-rw-r--r--_test/test_api_change.py62
-rw-r--r--_test/test_class_register.py55
-rw-r--r--_test/test_collections.py7
-rw-r--r--_test/test_comment_manipulation.py73
-rw-r--r--_test/test_comments.py175
-rw-r--r--_test/test_contextmanager.py23
-rw-r--r--_test/test_copy.py12
-rw-r--r--_test/test_cyaml.py45
-rw-r--r--_test/test_datetime.py43
-rw-r--r--_test/test_deprecation.py10
-rw-r--r--_test/test_documents.py22
-rw-r--r--_test/test_fail.py40
-rw-r--r--_test/test_float.py21
-rw-r--r--_test/test_flowsequencekey.py2
-rw-r--r--_test/test_indentation.py88
-rw-r--r--_test/test_int.py6
-rw-r--r--_test/test_issues.py279
-rw-r--r--_test/test_json_numbers.py17
-rw-r--r--_test/test_line_col.py20
-rw-r--r--_test/test_literal.py58
-rw-r--r--_test/test_none.py44
-rw-r--r--_test/test_numpy.py40
-rw-r--r--_test/test_program_config.py9
-rw-r--r--_test/test_spec_examples.py76
-rw-r--r--_test/test_string.py44
-rw-r--r--_test/test_tag.py71
-rw-r--r--_test/test_version.py25
-rw-r--r--_test/test_yamlfile.py89
-rw-r--r--_test/test_yamlobject.py37
-rw-r--r--_test/test_z_check_debug_leftovers.py7
-rw-r--r--_test/test_z_data.py108
-rw-r--r--_test/test_z_olddata.py19
-rw-r--r--anchor.py15
-rw-r--r--comments.py777
-rw-r--r--compat.py228
-rw-r--r--composer.py77
-rw-r--r--configobjwalker.py11
-rw-r--r--constructor.py1046
-rw-r--r--cyaml.py146
-rw-r--r--dumper.py153
-rw-r--r--emitter.py802
-rw-r--r--error.py130
-rw-r--r--events.py145
-rw-r--r--loader.py44
-rw-r--r--main.py784
-rw-r--r--nodes.py99
-rw-r--r--parser.py315
-rw-r--r--reader.py140
-rw-r--r--representer.py667
-rw-r--r--resolver.py171
-rw-r--r--scalarbool.py23
-rw-r--r--scalarfloat.py76
-rw-r--r--scalarint.py68
-rw-r--r--scalarstring.py66
-rw-r--r--scanner.py872
-rw-r--r--serializer.py73
-rw-r--r--setup.py275
-rw-r--r--timestamp.py52
-rw-r--r--tokens.py215
-rwxr-xr-xtox.ini12
-rw-r--r--util.py143
101 files changed, 5738 insertions, 5936 deletions
diff --git a/.appveyor.yaml b/.appveyor.yaml
deleted file mode 100644
index 4f53ba4..0000000
--- a/.appveyor.yaml
+++ /dev/null
@@ -1,30 +0,0 @@
-# based on: https://packaging.python.org/guides/supporting-windows-using-appveyor/
-
-environment:
- matrix:
- - PYTHON: C:\Python27
- - PYTHON: C:\Python27-x64
- - PYTHON: C:\Python35
- - PYTHON: C:\Python35-x64
- - PYTHON: C:\Python36
- - PYTHON: C:\Python36-x64
- - PYTHON: C:\Python37
- - PYTHON: C:\Python37-x64
-
-install:
- - |
- %PYTHON%\python.exe -m pip install --upgrade -q pip wheel setuptools
- %PYTHON%\python.exe -m pip list --format columns
-
-# only used when compiling (for Python with C extensions this is done in the after_test)
-build: off
-
-test_script:
- - echo Skipped for now
-
-after_test:
- - |
- %PYTHON%\python.exe setup.py bdist_wheel
-
-artifacts:
- - path: dist\*
diff --git a/.hgignore b/.hgignore
index 75a723f..c305fec 100644
--- a/.hgignore
+++ b/.hgignore
@@ -12,3 +12,8 @@ venv
TODO.rst
try_*
_doc/*.pdf
+_doc/*.html
+_doc/*.rst
+*.py_alt
+
+ziglib
diff --git a/CHANGES b/CHANGES
index 4f783c7..f59a8b8 100644
--- a/CHANGES
+++ b/CHANGES
@@ -1,3 +1,126 @@
+[0, 17, 21]: 2022-02-12
+ - fix bug in calling `.compose()` method with `pathlib.Path` instance.
+
+[0, 17, 20]: today
+ - fix error in microseconds while rounding datetime fractions >= 9999995
+ (reported by `Luis Ferreira <https://sourceforge.net/u/ljmf00/>`__)
+
+[0, 17, 19]: 2021-12-26
+ - fix mypy problems (reported by `Arun <https://sourceforge.net/u/arunppsg/profile/>`__)
+
+[0, 17, 18]: 2021-12-24
+ - copy-paste error in folded scalar comment attachment (reported by `Stephan Geulette
+ <https://sourceforge.net/u/sgeulette/profile/>`__)
+ - fix 411, indent error comment between key empty seq value (reported by `Guillermo Julián
+ <https://sourceforge.net/u/gjulianm/profile/>`__)
+
+[0, 17, 17]: 2021-10-31
+ - extract timestamp matching/creation to util
+
+[0, 17, 16]: 2021-08-28
+ - also handle issue 397 when comment is newline
+
+[0, 17, 15]: 2021-08-28
+ - fix issue 397, insert comment before key when a comment between key and value exists
+ (reported by `Bastien gerard <https://sourceforge.net/u/bagerard/>`__)
+
+[0, 17, 14]: 2021-08-25
+ - fix issue 396, inserting key/val in merged-in dictionary (reported by `Bastien gerard
+ <https://sourceforge.net/u/bagerard/>`__)
+
+[0, 17, 13]: 2021-08-21
+ - minor fix in attr handling
+
+[0, 17, 12]: 2021-08-21
+ - fix issue with anchor on registered class not preserved and those classes using package
+ attrs with `@attr.s()` (both reported by `ssph <https://sourceforge.net/u/sph/>`__)
+
+[0, 17, 11]: 2021-08-19
+ - fix error baseclass for ``DuplicateKeyErorr`` (reported by `Łukasz Rogalski
+ <https://sourceforge.net/u/lrogalski/>`__)
+ - fix typo in reader error message, causing `KeyError` during reader error
+ (reported by `MTU <https://sourceforge.net/u/mtu/>`__)
+
+[0, 17, 10]: 2021-06-24
+ - fix issue 388, token with old comment structure != two elements
+ (reported by `Dimitrios Bariamis <https://sourceforge.net/u/dbdbc/>`__)
+
+[0, 17, 9]: 2021-06-10
+ - fix issue with updating CommentedMap (reported by sri on
+ `StackOverlow <https://stackoverflow.com/q/67911659/1307905>`__)
+
+[0, 17, 8]: 2021-06-09
+ - fix for issue 387 where templated anchors on tagged object did get set
+ resulting in potential id reuse. (reported by `Artem Ploujnikov
+ <https://sourceforge.net/u/flexthink/>`__)
+
+[0, 17, 7]: 2021-05-31
+ - issue 385 also affected other deprecated loaders (reported via email
+ by Oren Watson)
+
+[0, 17, 6]: 2021-05-31
+ - merged type annotations update provided by
+ `Jochen Sprickerhof <https://sourceforge.net/u/jspricke/>`__
+ - fix for issue 385: deprecated round_trip_loader function not working
+ (reported by `Mike Gouline <https://sourceforge.net/u/gouline/>`__)
+ - wasted a few hours getting rid of mypy warnings/errors
+
+[0, 17, 5]: 2021-05-30
+ - fix for issue 384 !!set with aliased entry resulting in broken YAML on rt
+ reported by `William Kimball <https://sourceforge.net/u/william303/>`__)
+
+[0, 17, 4]: 2021-04-07
+ - prevent (empty) comments from throwing assertion error (issue 351
+ reported by `William Kimball <https://sourceforge.net/u/william303/>`__)
+ comments (or empty line) will be dropped
+
+[0, 17, 3]: 2021-04-07
+ - fix for issue 382 caused by an error in a format string (reported by
+ `William Kimball <https://sourceforge.net/u/william303/>`__)
+ - allow expansion of aliases by setting ``yaml.composer.return_alias = lambda s: copy.deepcopy(s)``
+ (as per `Stackoverflow answer <https://stackoverflow.com/a/66983530/1307905>`__)
+
+[0, 17, 2]: 2021-03-29
+ - change -py2.py3-none-any.whl to -py3-none-any.whl, and remove 0.17.1
+
+[0, 17, 1]: 2021-03-29
+ - added 'Programming Language :: Python :: 3 :: Only', and removing
+ 0.17.0 from PyPI (reported by `Alasdair Nicol <https://sourceforge.net/u/alasdairnicol/>`__)
+
+[0, 17, 0]: 2021-03-26
+ - this release no longer supports Python 2.7, most if not all Python 2
+ specific code is removed. The 0.17.x series is the last to support Python 3.5
+ (this also allowed for removal of the dependency on ``ruamel.std.pathlib``)
+ - remove Python2 specific code branches and adaptations (u-strings)
+ - prepare % code for f-strings using ``_F``
+ - allow PyOxidisation (`issue 324 <https://sourceforge.net/p/ruamel-yaml/tickets/324/>`__
+ resp. `issue 171 <https://github.com/indygreg/PyOxidizer/issues/171>`__)
+ - replaced Python 2 compatible enforcement of keyword arguments with '*'
+ - the old top level *functions* ``load``, ``safe_load``, ``round_trip_load``,
+ ``dump``, ``safe_dump``, ``round_trip_dump``, ``scan``, ``parse``,
+ ``compose``, ``emit``, ``serialize`` as well as their ``_all`` variants for
+ multi-document streams, now issue a ``PendingDeprecationning`` (e.g. when run
+ from pytest, but also Python is started with ``-Wd``). Use the methods on
+ ``YAML()``, which have been extended.
+ - fix for issue 376: indentation changes could put literal/folded scalar to start
+ before the ``#`` column of a following comment. Effectively making the comment
+ part of the scalar in the output. (reported by
+ `Bence Nagy <https://sourceforge.net/u/underyx/>`__)
+
+
+[0, 16, 13]: 2021-03-05
+ - fix for issue 359: could not update() CommentedMap with keyword arguments
+ (reported by `Steve Franchak <https://sourceforge.net/u/binaryadder/>`__)
+ - fix for issue 365: unable to dump mutated TimeStamp objects
+ (reported by Anton Akmerov <https://sourceforge.net/u/akhmerov/>`__)
+ - fix for issue 371: unable to addd comment without starting space
+ (reported by 'Mark Grandi <https://sourceforge.net/u/mgrandi>`__)
+ - fix for issue 373: recursive call to walk_tree not preserving all params
+ (reported by `eulores <https://sourceforge.net/u/eulores/>`__)
+ - a None value in a flow-style sequence is now dumped as `null` instead
+ of `!!null ''` (reported by mcarans on
+ `StackOverlow <https://stackoverflow.com/a/66489600/1307905>`__)
+
[0, 16, 12]: 2020-09-04
- update links in doc
diff --git a/LICENSE b/LICENSE
index 5b863d3..678f5cc 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,6 +1,6 @@
The MIT License (MIT)
- Copyright (c) 2014-2020 Anthon van der Neut, Ruamel bvba
+ Copyright (c) 2014-2022 Anthon van der Neut, Ruamel bvba
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
diff --git a/README.rst b/README.rst
index f4be7f5..b5b2f80 100644
--- a/README.rst
+++ b/README.rst
@@ -4,23 +4,38 @@ ruamel.yaml
``ruamel.yaml`` is a YAML 1.2 loader/dumper package for Python.
-:version: 0.16.12
-:updated: 2020-09-04
+:version: 0.17.21
+:updated: 2022-02-12
:documentation: http://yaml.readthedocs.io
:repository: https://sourceforge.net/projects/ruamel-yaml/
:pypi: https://pypi.org/project/ruamel.yaml/
+*The 0.16.13 release was the last that was tested to be working on Python 2.7.
+The 0.17.21 was the last one tested to be working on Python 3.5 and 3.6 (the
+latter not tested, because
+tox/virtualenv stopped supporting that EOL versions).
+The 0.17 series is also the last to support old PyYAML functions, replace it by
+creating a `YAML()` instance and use its `.load()` and `.dump()` methods.*
+
+*Please adjust your dependencies accordingly if necessary. (`ruamel.yaml<0.18`)*
Starting with version 0.15.0 the way YAML files are loaded and dumped
-is changing. See the API doc for details. Currently existing
+has been changing, see the API doc for details. Currently existing
functionality will throw a warning before being changed/removed.
-**For production systems you should pin the version being used with
-``ruamel.yaml<=0.15``**. There might be bug fixes in the 0.14 series,
-but new functionality is likely only to be available via the new API.
+**For production systems already using a pre 0.16 version, you should
+pin the version being used with ``ruamel.yaml<=0.15``** if you cannot
+fully test upgrading to a newer version. For new usage
+pin to the minor version tested ( ``ruamel.yaml<=0.17``) or even to the
+exact version used.
+
+New functionality is usually only available via the new API, so
+make sure you use it and stop using the `ruamel.yaml.safe_load()`,
+`ruamel.yaml.round_trip_load()` and `ruamel.yaml.load()` functions
+(and their `....dump()` counterparts).
If your package uses ``ruamel.yaml`` and is not listed on PyPI, drop
me an email, preferably with some information on how you use the
-package (or a link to bitbucket/github) and I'll keep you informed
+package (or a link to the repository) and I'll keep you informed
when the status of the API is stable enough to make the transition.
* `Overview <http://yaml.readthedocs.org/en/latest/overview.html>`_
@@ -55,8 +70,150 @@ ChangeLog
.. should insert NEXT: at the beginning of line for next key (with empty line)
NEXT:
+
+ - fix issue with indent != 2 and literal scalars with empty first line
+ (reported by wrdis on `StackOverflow <https://stackoverflow.com/q/75584262/1307905>`__)
+ - updated __repr__ of CommentedMap, now that Python's dict is ordered -> no more
+ ordereddict(list-of-tuples)
+ - merge MR 4, handling OctalInt in YAML 1.1
+ (provided by `Jacob Floyd <https://sourceforge.net/u/cognifloyd/profile/>`_)
+ - fix loading of `!!float 42` (reported by Eric on
+ `Stack overflow <https://stackoverflow.com/a/71555107/1307905>`_)
+ - line numbers are now set on `CommentedKeySeq` and `CommentedKeyMap` (which
+ are created if you have a sequence resp. mapping as the key in a mapping)
+ - plain scalars: put single words longer than width on a line of their own, instead
+ of after the previous line (issue 427, reported by `Antoine Cotten
+ <https://sourceforge.net/u/antoineco/profile/>`_). Caveat: this currently results in a
+ space ending the previous line.
+ - fix for folded scalar part of 421: comments after ">" on first line of folded
+ scalars are now preserved (as were those in the same position on literal scalars).
+ Issue reported by Jacob Floyd.
+ - added stacklevel to warnings
+ - typing changed from Py2 compatible comments to Py3, removed various Py2-isms
+
+0.17.21 (2022-02-12):
+ - fix bug in calling `.compose()` method with `pathlib.Path` instance.
+
+0.17.20 (2022-01-03):
+ - fix error in microseconds while rounding datetime fractions >= 9999995
+ (reported by `Luis Ferreira <https://sourceforge.net/u/ljmf00/>`__)
+
+0.17.19 (2021-12-26):
+ - fix mypy problems (reported by `Arun <https://sourceforge.net/u/arunppsg/profile/>`__)
+
+0.17.18 (2021-12-24):
+ - copy-paste error in folded scalar comment attachment (reported by `Stephan Geulette
+ <https://sourceforge.net/u/sgeulette/profile/>`__)
+ - fix 411, indent error comment between key empty seq value (reported by `Guillermo Julián
+ <https://sourceforge.net/u/gjulianm/profile/>`__)
+
+0.17.17 (2021-10-31):
+ - extract timestamp matching/creation to util
+
+0.17.16 (2021-08-28):
+ - 398 also handle issue 397 when comment is newline
+
+0.17.15 (2021-08-28):
+ - fix issue 397, insert comment before key when a comment between key and value exists
+ (reported by `Bastien gerard <https://sourceforge.net/u/bagerard/>`__)
+
+0.17.14 (2021-08-25):
+ - fix issue 396, inserting key/val in merged-in dictionary (reported by `Bastien gerard
+ <https://sourceforge.net/u/bagerard/>`__)
+
+0.17.13 (2021-08-21):
+ - minor fix in attr handling
+
+0.17.12 (2021-08-21):
+ - fix issue with anchor on registered class not preserved and those classes using package
+ attrs with `@attr.s()` (both reported by `ssph <https://sourceforge.net/u/sph/>`__)
+
+0.17.11 (2021-08-19):
+ - fix error baseclass for ``DuplicateKeyErorr`` (reported by `Łukasz Rogalski
+ <https://sourceforge.net/u/lrogalski/>`__)
+ - fix typo in reader error message, causing `KeyError` during reader error
+ (reported by `MTU <https://sourceforge.net/u/mtu/>`__)
+
+0.17.10 (2021-06-24):
+ - fix issue 388, token with old comment structure != two elements
+ (reported by `Dimitrios Bariamis <https://sourceforge.net/u/dbdbc/>`__)
+
+0.17.9 (2021-06-10):
+ - fix issue with updating CommentedMap (reported by sri on
+ `StackOverflow <https://stackoverflow.com/q/67911659/1307905>`__)
+
+0.17.8 (2021-06-09):
+ - fix for issue 387 where templated anchors on tagged object did get set
+ resulting in potential id reuse. (reported by `Artem Ploujnikov
+ <https://sourceforge.net/u/flexthink/>`__)
+
+0.17.7 (2021-05-31):
+ - issue 385 also affected other deprecated loaders (reported via email
+ by Oren Watson)
+
+0.17.6 (2021-05-31):
+ - merged type annotations update provided by
+ `Jochen Sprickerhof <https://sourceforge.net/u/jspricke/>`__
+ - fix for issue 385: deprecated round_trip_loader function not working
+ (reported by `Mike Gouline <https://sourceforge.net/u/gouline/>`__)
+ - wasted a few hours getting rid of mypy warnings/errors
+
+0.17.5 (2021-05-30):
+ - fix for issue 384 !!set with aliased entry resulting in broken YAML on rt
+ reported by `William Kimball <https://sourceforge.net/u/william303/>`__)
+
+0.17.4 (2021-04-07):
+ - prevent (empty) comments from throwing assertion error (issue 351
+ reported by `William Kimball <https://sourceforge.net/u/william303/>`__)
+ comments (or empty line) will be dropped
+
+0.17.3 (2021-04-07):
+ - fix for issue 382 caused by an error in a format string (reported by
+ `William Kimball <https://sourceforge.net/u/william303/>`__)
+ - allow expansion of aliases by setting ``yaml.composer.return_alias = lambda s: copy.deepcopy(s)``
+ (as per `Stackoverflow answer <https://stackoverflow.com/a/66983530/1307905>`__)
+
+0.17.2 (2021-03-29):
+ - change -py2.py3-none-any.whl to -py3-none-any.whl, and remove 0.17.1
+
+0.17.1 (2021-03-29):
+ - added 'Programming Language :: Python :: 3 :: Only', and removing
+ 0.17.0 from PyPI (reported by `Alasdair Nicol <https://sourceforge.net/u/alasdairnicol/>`__)
+
+0.17.0 (2021-03-26):
+ - removed because of incomplete classifiers
+ - this release no longer supports Python 2.7, most if not all Python 2
+ specific code is removed. The 0.17.x series is the last to support Python 3.5
+ (this also allowed for removal of the dependency on ``ruamel.std.pathlib``)
+ - remove Python2 specific code branches and adaptations (u-strings)
+ - prepare % code for f-strings using ``_F``
+ - allow PyOxidisation (`issue 324 <https://sourceforge.net/p/ruamel-yaml/tickets/324/>`__
+ resp. `issue 171 <https://github.com/indygreg/PyOxidizer/issues/171>`__)
+ - replaced Python 2 compatible enforcement of keyword arguments with '*'
+ - the old top level *functions* ``load``, ``safe_load``, ``round_trip_load``,
+ ``dump``, ``safe_dump``, ``round_trip_dump``, ``scan``, ``parse``,
+ ``compose``, ``emit``, ``serialize`` as well as their ``_all`` variants for
+ multi-document streams, now issue a ``PendingDeprecationning`` (e.g. when run
+ from pytest, but also Python is started with ``-Wd``). Use the methods on
+ ``YAML()``, which have been extended.
+ - fix for issue 376: indentation changes could put literal/folded scalar to start
+ before the ``#`` column of a following comment. Effectively making the comment
+ part of the scalar in the output. (reported by
+ `Bence Nagy <https://sourceforge.net/u/underyx/>`__)
+
+
+0.16.13 (2021-03-05):
- fix for issue 359: could not update() CommentedMap with keyword arguments
(reported by `Steve Franchak <https://sourceforge.net/u/binaryadder/>`__)
+ - fix for issue 365: unable to dump mutated TimeStamp objects
+ (reported by Anton Akmerov <https://sourceforge.net/u/akhmerov/>`__)
+ - fix for issue 371: unable to addd comment without starting space
+ (reported by 'Mark Grandi <https://sourceforge.net/u/mgrandi>`__)
+ - fix for issue 373: recursive call to walk_tree not preserving all params
+ (reported by `eulores <https://sourceforge.net/u/eulores/>`__)
+ - a None value in a flow-style sequence is now dumped as `null` instead
+ of `!!null ''` (reported by mcarans on
+ `StackOverflow <https://stackoverflow.com/a/66489600/1307905>`__)
0.16.12 (2020-09-04):
- update links in doc
@@ -118,646 +275,8 @@ NEXT:
- split of C source that generates .so file to ruamel.yaml.clib
- duplicate keys are now an error when working with the old API as well
-0.15.100 (2019-07-17):
- - fixing issue with dumping deep-copied data from commented YAML, by
- providing both the memo parameter to __deepcopy__, and by allowing
- startmarks to be compared on their content (reported by `Theofilos
- Petsios
- <https://bitbucket.org/%7Be550bc5d-403d-4fda-820b-bebbe71796d3%7D/>`__)
-
-0.15.99 (2019-07-12):
- - add `py.typed` to distribution, based on a PR submitted by
- `Michael Crusoe
- <https://bitbucket.org/%7Bc9fbde69-e746-48f5-900d-34992b7860c8%7D/>`__
- - merge PR 40 (also by Michael Crusoe) to more accurately specify
- repository in the README (also reported in a misunderstood issue
- some time ago)
-
-0.15.98 (2019-07-09):
- - regenerate ext/_ruamel_yaml.c with Cython version 0.29.12, needed
- for Python 3.8.0b2 (reported by `John Vandenberg
- <https://bitbucket.org/%7B6d4e8487-3c97-4dab-a060-088ec50c682c%7D/>`__)
-
-0.15.97 (2019-06-06):
- - regenerate ext/_ruamel_yaml.c with Cython version 0.29.10, needed for
- Python 3.8.0b1
- - regenerate ext/_ruamel_yaml.c with Cython version 0.29.9, needed for
- Python 3.8.0a4 (reported by `Anthony Sottile
- <https://bitbucket.org/%7B569cc8ea-0d9e-41cb-94a4-19ea517324df%7D/>`__)
-
-0.15.96 (2019-05-16):
- - fix failure to indent comments on round-trip anchored block style
- scalars in block sequence (reported by `William Kimball
- <https://bitbucket.org/%7Bba35ed20-4bb0-46f8-bb5d-c29871e86a22%7D/>`__)
-
-0.15.95 (2019-05-16):
- - fix failure to round-trip anchored scalars in block sequence
- (reported by `William Kimball
- <https://bitbucket.org/%7Bba35ed20-4bb0-46f8-bb5d-c29871e86a22%7D/>`__)
- - wheel files for Python 3.4 no longer provided (`Python 3.4 EOL 2019-03-18
- <https://www.python.org/dev/peps/pep-0429/>`__)
-
-0.15.94 (2019-04-23):
- - fix missing line-break after end-of-file comments not ending in
- line-break (reported by `Philip Thompson
- <https://bitbucket.org/%7Be42ba205-0876-4151-bcbe-ccaea5bd13ce%7D/>`__)
-
-0.15.93 (2019-04-21):
- - fix failure to parse empty implicit flow mapping key
- - in YAML 1.1 plains scalars `y`, 'n', `Y`, and 'N' are now
- correctly recognised as booleans and such strings dumped quoted
- (reported by `Marcel Bollmann
- <https://bitbucket.org/%7Bd8850921-9145-4ad0-ac30-64c3bd9b036d%7D/>`__)
-
-0.15.92 (2019-04-16):
- - fix failure to parse empty implicit block mapping key (reported by
- `Nolan W <https://bitbucket.org/i2labs/>`__)
-
-0.15.91 (2019-04-05):
- - allowing duplicate keys would not work for merge keys (reported by mamacdon on
- `StackOverflow <https://stackoverflow.com/questions/55540686/>`__
-
-0.15.90 (2019-04-04):
- - fix issue with updating `CommentedMap` from list of tuples (reported by
- `Peter Henry <https://bitbucket.org/mosbasik/>`__)
-
-0.15.89 (2019-02-27):
- - fix for items with flow-mapping in block sequence output on single line
- (reported by `Zahari Dim <https://bitbucket.org/zahari_dim/>`__)
- - fix for safe dumping erroring in creation of representereror when dumping namedtuple
- (reported and solution by `Jaakko Kantojärvi <https://bitbucket.org/raphendyr/>`__)
-
-0.15.88 (2019-02-12):
- - fix inclusing of python code from the subpackage data (containing extra tests,
- reported by `Florian Apolloner <https://bitbucket.org/apollo13/>`__)
-
-0.15.87 (2019-01-22):
- - fix problem with empty lists and the code to reinsert merge keys (reported via email
- by Zaloo)
-
-0.15.86 (2019-01-16):
- - reinsert merge key in its old position (reported by grumbler on
- `StackOverflow <https://stackoverflow.com/a/54206512/1307905>`__)
- - fix for issue with non-ASCII anchor names (reported and fix
- provided by Dandaleon Flux via email)
- - fix for issue when parsing flow mapping value starting with colon (in pure Python only)
- (reported by `FichteFoll <https://bitbucket.org/FichteFoll/>`__)
-
-0.15.85 (2019-01-08):
- - the types used by ``SafeConstructor`` for mappings and sequences can
- now by set by assigning to ``XXXConstructor.yaml_base_dict_type``
- (and ``..._list_type``), preventing the need to copy two methods
- with 50+ lines that had ``var = {}`` hardcoded. (Implemented to
- help solve an feature request by `Anthony Sottile
- <https://bitbucket.org/asottile/>`__ in an easier way)
-
-0.15.84 (2019-01-07):
- - fix for ``CommentedMap.copy()`` not returning ``CommentedMap``, let alone copying comments etc.
- (reported by `Anthony Sottile <https://bitbucket.org/asottile/>`__)
-
-0.15.83 (2019-01-02):
- - fix for bug in roundtripping aliases used as key (reported via email by Zaloo)
-
-0.15.82 (2018-12-28):
- - anchors and aliases on scalar int, float, string and bool are now preserved. Anchors
- do not need a referring alias for these (reported by
- `Alex Harvey <https://bitbucket.org/alexharv074/>`__)
- - anchors no longer lost on tagged objects when roundtripping (reported by `Zaloo
- <https://bitbucket.org/zaloo/>`__)
-
-0.15.81 (2018-12-06):
- - fix issue dumping methods of metaclass derived classes (reported and fix provided
- by `Douglas Raillard <https://bitbucket.org/DouglasRaillard/>`__)
-
-0.15.80 (2018-11-26):
- - fix issue emitting BEL character when round-tripping invalid folded input
- (reported by Isaac on `StackOverflow <https://stackoverflow.com/a/53471217/1307905>`__)
-
-0.15.79 (2018-11-21):
- - fix issue with anchors nested deeper than alias (reported by gaFF on
- `StackOverflow <https://stackoverflow.com/a/53397781/1307905>`__)
-
-0.15.78 (2018-11-15):
- - fix setup issue for 3.8 (reported by `Sidney Kuyateh
- <https://bitbucket.org/autinerd/>`__)
-
-0.15.77 (2018-11-09):
- - setting `yaml.sort_base_mapping_type_on_output = False`, will prevent
- explicit sorting by keys in the base representer of mappings. Roundtrip
- already did not do this. Usage only makes real sense for Python 3.6+
- (feature request by `Sebastian Gerber <https://bitbucket.org/spacemanspiff2007/>`__).
- - implement Python version check in YAML metadata in ``_test/test_z_data.py``
-
-0.15.76 (2018-11-01):
- - fix issue with empty mapping and sequence loaded as flow-style
- (mapping reported by `Min RK <https://bitbucket.org/minrk/>`__, sequence
- by `Maged Ahmed <https://bitbucket.org/maged2/>`__)
-
-0.15.75 (2018-10-27):
- - fix issue with single '?' scalar (reported by `Terrance
- <https://bitbucket.org/OllieTerrance/>`__)
- - fix issue with duplicate merge keys (prompted by `answering
- <https://stackoverflow.com/a/52852106/1307905>`__ a
- `StackOverflow question <https://stackoverflow.com/q/52851168/1307905>`__
- by `math <https://stackoverflow.com/users/1355634/math>`__)
-
-0.15.74 (2018-10-17):
- - fix dropping of comment on rt before sequence item that is sequence item
- (reported by `Thorsten Kampe <https://bitbucket.org/thorstenkampe/>`__)
-
-0.15.73 (2018-10-16):
- - fix irregular output on pre-comment in sequence within sequence (reported
- by `Thorsten Kampe <https://bitbucket.org/thorstenkampe/>`__)
- - allow non-compact (i.e. next line) dumping sequence/mapping within sequence.
-
-0.15.72 (2018-10-06):
- - fix regression on explicit 1.1 loading with the C based scanner/parser
- (reported by `Tomas Vavra <https://bitbucket.org/xtomik/>`__)
-
-0.15.71 (2018-09-26):
- - some of the tests now live in YAML files in the
- `yaml.data <https://bitbucket.org/ruamel/yaml.data>`__ repository.
- ``_test/test_z_data.py`` processes these.
- - fix regression where handcrafted CommentedMaps could not be initiated (reported by
- `Dan Helfman <https://bitbucket.org/dhelfman/>`__)
- - fix regression with non-root literal scalars that needed indent indicator
- (reported by `Clark Breyman <https://bitbucket.org/clarkbreyman/>`__)
- - tag:yaml.org,2002:python/object/apply now also uses __qualname__ on PY3
- (reported by `Douglas RAILLARD <https://bitbucket.org/DouglasRaillard/>`__)
- - issue with self-referring object creation
- (reported and fix by `Douglas RAILLARD <https://bitbucket.org/DouglasRaillard/>`__)
-
-0.15.70 (2018-09-21):
- - reverted CommentedMap and CommentedSeq to subclass ordereddict resp. list,
- reimplemented merge maps so that both ``dict(**commented_map_instance)`` and JSON
- dumping works. This also allows checking with ``isinstance()`` on ``dict`` resp. ``list``.
- (Proposed by `Stuart Berg <https://bitbucket.org/stuarteberg/>`__, with feedback
- from `blhsing <https://stackoverflow.com/users/6890912/blhsing>`__ on
- `StackOverflow <https://stackoverflow.com/q/52314186/1307905>`__)
-
-0.15.69 (2018-09-20):
- - fix issue with dump_all gobbling end-of-document comments on parsing
- (reported by `Pierre B. <https://bitbucket.org/octplane/>`__)
-
-0.15.68 (2018-09-20):
- - fix issue with parsabel, but incorrect output with nested flow-style sequences
- (reported by `Dougal Seeley <https://bitbucket.org/dseeley/>`__)
- - fix issue with loading Python objects that have __setstate__ and recursion in parameters
- (reported by `Douglas RAILLARD <https://bitbucket.org/DouglasRaillard/>`__)
-
-0.15.67 (2018-09-19):
- - fix issue with extra space inserted with non-root literal strings
- (Issue reported and PR with fix provided by
- `Naomi Seyfer <https://bitbucket.org/sixolet/>`__.)
-
-0.15.66 (2018-09-07):
- - fix issue with fold indicating characters inserted in safe_load-ed folded strings
- (reported by `Maximilian Hils <https://bitbucket.org/mhils/>`__).
-
-0.15.65 (2018-09-07):
- - fix issue #232 revert to throw ParserError for unexcpected ``]``
- and ``}`` instead of IndexError. (Issue reported and PR with fix
- provided by `Naomi Seyfer <https://bitbucket.org/sixolet/>`__.)
- - added ``key`` and ``reverse`` parameter (suggested by Jannik Klemm via email)
- - indent root level literal scalars that have directive or document end markers
- at the beginning of a line
-
-0.15.64 (2018-08-30):
- - support round-trip of tagged sequences: ``!Arg [a, {b: 1}]``
- - single entry mappings in flow sequences now written by default without braces,
- set ``yaml.brace_single_entry_mapping_in_flow_sequence=True`` to force
- getting ``[a, {b: 1}, {c: {d: 2}}]`` instead of the default ``[a, b: 1, c: {d: 2}]``
- - fix issue when roundtripping floats starting with a dot such as ``.5``
- (reported by `Harrison Gregg <https://bitbucket.org/HarrisonGregg/>`__)
-
-0.15.63 (2018-08-29):
- - small fix only necessary for Windows users that don't use wheels.
-
-0.15.62 (2018-08-29):
- - C based reader/scanner & emitter now allow setting of 1.2 as YAML version.
- ** The loading/dumping is still YAML 1.1 code**, so use the common subset of
- YAML 1.2 and 1.1 (reported by `Ge Yang <https://bitbucket.org/yangge/>`__)
-
-0.15.61 (2018-08-23):
- - support for round-tripping folded style scalars (initially requested
- by `Johnathan Viduchinsky <https://bitbucket.org/johnathanvidu/>`__)
- - update of C code
- - speed up of scanning (~30% depending on the input)
-
-0.15.60 (2018-08-18):
- - again allow single entry map in flow sequence context (reported by
- `Lee Goolsbee <https://bitbucket.org/lgoolsbee/>`__)
- - cleanup for mypy
- - spurious print in library (reported by
- `Lele Gaifax <https://bitbucket.org/lele/>`__), now automatically checked
-
-0.15.59 (2018-08-17):
- - issue with C based loader and leading zeros (reported by
- `Tom Hamilton Stubber <https://bitbucket.org/TomHamiltonStubber/>`__)
-
-0.15.58 (2018-08-17):
- - simple mappings can now be used as keys when round-tripping::
-
- {a: 1, b: 2}: hello world
-
- although using the obvious operations (del, popitem) on the key will
- fail, you can mutilate it by going through its attributes. If you load the
- above YAML in `d`, then changing the value is cumbersome:
-
- d = {CommentedKeyMap([('a', 1), ('b', 2)]): "goodbye"}
-
- and changing the key even more so:
-
- d[CommentedKeyMap([('b', 1), ('a', 2)])] = d.pop(
- CommentedKeyMap([('a', 1), ('b', 2)]))
-
- (you can use a `dict` instead of a list of tuples (or ordereddict), but that might result
- in a different order, of the keys of the key, in the output)
- - check integers to dump with 1.2 patterns instead of 1.1 (reported by
- `Lele Gaifax <https://bitbucket.org/lele/>`__)
-
-
-0.15.57 (2018-08-15):
- - Fix that CommentedSeq could no longer be used in adding or do a sort
- (reported by `Christopher Wright <https://bitbucket.org/CJ-Wright4242/>`__)
-
-0.15.56 (2018-08-15):
- - fix issue with ``python -O`` optimizing away code (reported, and detailed cause
- pinpointed, by `Alex Grönholm <https://bitbucket.org/agronholm/>`__)
-
-0.15.55 (2018-08-14):
- - unmade ``CommentedSeq`` a subclass of ``list``. It is now
- indirectly a subclass of the standard
- ``collections.abc.MutableSequence`` (without .abc if you are
- still on Python2.7). If you do ``isinstance(yaml.load('[1, 2]'),
- list)``) anywhere in your code replace ``list`` with
- ``MutableSequence``. Directly, ``CommentedSeq`` is a subclass of
- the abstract baseclass ``ruamel.yaml.compat.MutableScliceableSequence``,
- with the result that *(extended) slicing is supported on
- ``CommentedSeq``*.
- (reported by `Stuart Berg <https://bitbucket.org/stuarteberg/>`__)
- - duplicate keys (or their values) with non-ascii now correctly
- report in Python2, instead of raising a Unicode error.
- (Reported by `Jonathan Pyle <https://bitbucket.org/jonathan_pyle/>`__)
-
-0.15.54 (2018-08-13):
- - fix issue where a comment could pop-up twice in the output (reported by
- `Mike Kazantsev <https://bitbucket.org/mk_fg/>`__ and by
- `Nate Peterson <https://bitbucket.org/ndpete21/>`__)
- - fix issue where JSON object (mapping) without spaces was not parsed
- properly (reported by `Marc Schmidt <https://bitbucket.org/marcj/>`__)
- - fix issue where comments after empty flow-style mappings were not emitted
- (reported by `Qinfench Chen <https://bitbucket.org/flyin5ish/>`__)
-
-0.15.53 (2018-08-12):
- - fix issue with flow style mapping with comments gobbled newline (reported
- by `Christopher Lambert <https://bitbucket.org/XN137/>`__)
- - fix issue where single '+' under YAML 1.2 was interpreted as
- integer, erroring out (reported by `Jethro Yu
- <https://bitbucket.org/jcppkkk/>`__)
-
-0.15.52 (2018-08-09):
- - added `.copy()` mapping representation for round-tripping
- (``CommentedMap``) to fix incomplete copies of merged mappings
- (reported by `Will Richards
- <https://bitbucket.org/will_richards/>`__)
- - Also unmade that class a subclass of ordereddict to solve incorrect behaviour
- for ``{**merged-mapping}`` and ``dict(**merged-mapping)`` (reported independently by
- `Tim Olsson <https://bitbucket.org/tgolsson/>`__ and
- `Filip Matzner <https://bitbucket.org/FloopCZ/>`__)
-
-0.15.51 (2018-08-08):
- - Fix method name dumps (were not dotted) and loads (reported by `Douglas Raillard
- <https://bitbucket.org/DouglasRaillard/>`__)
- - Fix spurious trailing white-space caused when the comment start
- column was no longer reached and there was no actual EOL comment
- (e.g. following empty line) and doing substitutions, or when
- quotes around scalars got dropped. (reported by `Thomas Guillet
- <https://bitbucket.org/guillett/>`__)
-
-0.15.50 (2018-08-05):
- - Allow ``YAML()`` as a context manager for output, thereby making it much easier
- to generate multi-documents in a stream.
- - Fix issue with incorrect type information for `load()` and `dump()` (reported
- by `Jimbo Jim <https://bitbucket.org/jimbo1qaz/>`__)
-
-0.15.49 (2018-08-05):
- - fix preservation of leading newlines in root level literal style scalar,
- and preserve comment after literal style indicator (``| # some comment``)
- Both needed for round-tripping multi-doc streams in
- `ryd <https://pypi.org/project/ryd/>`__.
-
-0.15.48 (2018-08-03):
- - housekeeping: ``oitnb`` for formatting, mypy 0.620 upgrade and conformity
-
-0.15.47 (2018-07-31):
- - fix broken 3.6 manylinux1, the result of an unclean ``build`` (reported by
- `Roman Sichnyi <https://bitbucket.org/rsichnyi-gl/>`__)
-
-
-0.15.46 (2018-07-29):
- - fixed DeprecationWarning for importing from ``collections`` on 3.7
- (issue 210, reported by `Reinoud Elhorst
- <https://bitbucket.org/reinhrst/>`__). It was `difficult to find
- why tox/pytest did not report
- <https://stackoverflow.com/q/51573204/1307905>`__ and as time
- consuming to actually `fix
- <https://stackoverflow.com/a/51573205/1307905>`__ the tests.
-
-0.15.45 (2018-07-26):
- - After adding failing test for ``YAML.load_all(Path())``, remove StopIteration
- (PR provided by `Zachary Buhman <https://bitbucket.org/buhman/>`__,
- also reported by `Steven Hiscocks <https://bitbucket.org/sdhiscocks/>`__.
-
-0.15.44 (2018-07-14):
- - Correct loading plain scalars consisting of numerals only and
- starting with `0`, when not explicitly specifying YAML version
- 1.1. This also fixes the issue about dumping string `'019'` as
- plain scalars as reported by `Min RK
- <https://bitbucket.org/minrk/>`__, that prompted this chance.
-
-0.15.43 (2018-07-12):
- - merge PR33: Python2.7 on Windows is narrow, but has no
- ``sysconfig.get_config_var('Py_UNICODE_SIZE')``. (merge provided by
- `Marcel Bargull <https://bitbucket.org/mbargull/>`__)
- - ``register_class()`` now returns class (proposed by
- `Mike Nerone <https://bitbucket.org/Manganeez/>`__}
-
-0.15.42 (2018-07-01):
- - fix regression showing only on narrow Python 2.7 (py27mu) builds
- (with help from
- `Marcel Bargull <https://bitbucket.org/mbargull/>`__ and
- `Colm O'Connor <https://bitbucket.org/colmoconnorgithub/>`__).
- - run pre-commit ``tox`` on Python 2.7 wide and narrow, as well as
- 3.4/3.5/3.6/3.7/pypy
-
-0.15.41 (2018-06-27):
- - add detection of C-compile failure (investigation prompted by
- `StackOverlow <https://stackoverflow.com/a/51057399/1307905>`__ by
- `Emmanuel Blot <https://stackoverflow.com/users/8233409/emmanuel-blot>`__),
- which was removed while no longer dependent on ``libyaml``, C-extensions
- compilation still needs a compiler though.
-
-0.15.40 (2018-06-18):
- - added links to landing places as suggested in issue 190 by
- `KostisA <https://bitbucket.org/ankostis/>`__
- - fixes issue #201: decoding unicode escaped tags on Python2, reported
- by `Dan Abolafia <https://bitbucket.org/danabo/>`__
-
-0.15.39 (2018-06-17):
- - merge PR27 improving package startup time (and loading when regexp not
- actually used), provided by
- `Marcel Bargull <https://bitbucket.org/mbargull/>`__
-
-0.15.38 (2018-06-13):
- - fix for losing precision when roundtripping floats by
- `Rolf Wojtech <https://bitbucket.org/asomov/>`__
- - fix for hardcoded dir separator not working for Windows by
- `Nuno André <https://bitbucket.org/nu_no/>`__
- - typo fix by `Andrey Somov <https://bitbucket.org/asomov/>`__
-
-0.15.37 (2018-03-21):
- - again trying to create installable files for 187
-
-0.15.36 (2018-02-07):
- - fix issue 187, incompatibility of C extension with 3.7 (reported by
- Daniel Blanchard)
-
-0.15.35 (2017-12-03):
- - allow ``None`` as stream when specifying ``transform`` parameters to
- ``YAML.dump()``.
- This is useful if the transforming function doesn't return a meaningful value
- (inspired by `StackOverflow <https://stackoverflow.com/q/47614862/1307905>`__ by
- `rsaw <https://stackoverflow.com/users/406281/rsaw>`__).
-
-0.15.34 (2017-09-17):
- - fix for issue 157: CDumper not dumping floats (reported by Jan Smitka)
-
-0.15.33 (2017-08-31):
- - support for "undefined" round-tripping tagged scalar objects (in addition to
- tagged mapping object). Inspired by a use case presented by Matthew Patton
- on `StackOverflow <https://stackoverflow.com/a/45967047/1307905>`__.
- - fix issue 148: replace cryptic error message when using !!timestamp with an
- incorrectly formatted or non- scalar. Reported by FichteFoll.
-
-0.15.32 (2017-08-21):
- - allow setting ``yaml.default_flow_style = None`` (default: ``False``) for
- for ``typ='rt'``.
- - fix for issue 149: multiplications on ``ScalarFloat`` now return ``float``
- (reported by jan.brezina@tul.cz)
-
-0.15.31 (2017-08-15):
- - fix Comment dumping
-
-0.15.30 (2017-08-14):
- - fix for issue with "compact JSON" not parsing: ``{"in":{},"out":{}}``
- (reported on `StackOverflow <https://stackoverflow.com/q/45681626/1307905>`__ by
- `mjalkio <https://stackoverflow.com/users/5130525/mjalkio>`_
-
-0.15.29 (2017-08-14):
- - fix issue #51: different indents for mappings and sequences (reported by
- Alex Harvey)
- - fix for flow sequence/mapping as element/value of block sequence with
- sequence-indent minus dash-offset not equal two.
-
-0.15.28 (2017-08-13):
- - fix issue #61: merge of merge cannot be __repr__-ed (reported by Tal Liron)
-
-0.15.27 (2017-08-13):
- - fix issue 62, YAML 1.2 allows ``?`` and ``:`` in plain scalars if non-ambigious
- (reported by nowox)
- - fix lists within lists which would make comments disappear
-
-0.15.26 (2017-08-10):
- - fix for disappearing comment after empty flow sequence (reported by
- oit-tzhimmash)
-
-0.15.25 (2017-08-09):
- - fix for problem with dumping (unloaded) floats (reported by eyenseo)
-
-0.15.24 (2017-08-09):
- - added ScalarFloat which supports roundtripping of 23.1, 23.100,
- 42.00E+56, 0.0, -0.0 etc. while keeping the format. Underscores in mantissas
- are not preserved/supported (yet, is anybody using that?).
- - (finally) fixed longstanding issue 23 (reported by `Antony Sottile
- <https://bitbucket.org/asottile/>`__), now handling comment between block
- mapping key and value correctly
- - warn on YAML 1.1 float input that is incorrect (triggered by invalid YAML
- provided by Cecil Curry)
- - allow setting of boolean representation (`false`, `true`) by using:
- ``yaml.boolean_representation = [u'False', u'True']``
-
-0.15.23 (2017-08-01):
- - fix for round_tripping integers on 2.7.X > sys.maxint (reported by ccatterina)
-
-0.15.22 (2017-07-28):
- - fix for round_tripping singe excl. mark tags doubling (reported and fix by Jan Brezina)
-
-0.15.21 (2017-07-25):
- - fix for writing unicode in new API, (reported on
- `StackOverflow <https://stackoverflow.com/a/45281922/1307905>`__
-
-0.15.20 (2017-07-23):
- - wheels for windows including C extensions
-
-0.15.19 (2017-07-13):
- - added object constructor for rt, decorator ``yaml_object`` to replace YAMLObject.
- - fix for problem using load_all with Path() instance
- - fix for load_all in combination with zero indent block style literal
- (``pure=True`` only!)
-
-0.15.18 (2017-07-04):
- - missing ``pure`` attribute on ``YAML`` useful for implementing `!include` tag
- constructor for `including YAML files in a YAML file
- <https://stackoverflow.com/a/44913652/1307905>`__
- - some documentation improvements
- - trigger of doc build on new revision
-
-0.15.17 (2017-07-03):
- - support for Unicode supplementary Plane **output**
- (input was already supported, triggered by
- `this <https://stackoverflow.com/a/44875714/1307905>`__ Stack Overflow Q&A)
-
-0.15.16 (2017-07-01):
- - minor typing issues (reported and fix provided by
- `Manvendra Singh <https://bitbucket.org/manu-chroma/>`__
- - small doc improvements
-
-0.15.15 (2017-06-27):
- - fix for issue 135, typ='safe' not dumping in Python 2.7
- (reported by Andrzej Ostrowski <https://bitbucket.org/aostr123/>`__)
-
-0.15.14 (2017-06-25):
- - fix for issue 133, in setup.py: change ModuleNotFoundError to
- ImportError (reported and fix by
- `Asley Drake <https://github.com/aldraco>`__)
-
-0.15.13 (2017-06-24):
- - suppress duplicate key warning on mappings with merge keys (reported by
- Cameron Sweeney)
-
-0.15.12 (2017-06-24):
- - remove fatal dependency of setup.py on wheel package (reported by
- Cameron Sweeney)
-
-0.15.11 (2017-06-24):
- - fix for issue 130, regression in nested merge keys (reported by
- `David Fee <https://bitbucket.org/dfee/>`__)
-
-0.15.10 (2017-06-23):
- - top level PreservedScalarString not indented if not explicitly asked to
- - remove Makefile (not very useful anyway)
- - some mypy additions
-
-0.15.9 (2017-06-16):
- - fix for issue 127: tagged scalars were always quoted and seperated
- by a newline when in a block sequence (reported and largely fixed by
- `Tommy Wang <https://bitbucket.org/twang817/>`__)
-
-0.15.8 (2017-06-15):
- - allow plug-in install via ``install ruamel.yaml[jinja2]``
-
-0.15.7 (2017-06-14):
- - add plug-in mechanism for load/dump pre resp. post-processing
-
-0.15.6 (2017-06-10):
- - a set() with duplicate elements now throws error in rt loading
- - support for toplevel column zero literal/folded scalar in explicit documents
-
-0.15.5 (2017-06-08):
- - repeat `load()` on a single `YAML()` instance would fail.
-
-0.15.4 (2017-06-08):
- - `transform` parameter on dump that expects a function taking a
- string and returning a string. This allows transformation of the output
- before it is written to stream. This forces creation of the complete output in memory!
- - some updates to the docs
-
-0.15.3 (2017-06-07):
- - No longer try to compile C extensions on Windows. Compilation can be forced by setting
- the environment variable `RUAMEL_FORCE_EXT_BUILD` to some value
- before starting the `pip install`.
-
-0.15.2 (2017-06-07):
- - update to conform to mypy 0.511: mypy --strict
-
-0.15.1 (2017-06-07):
- - `duplicate keys <http://yaml.readthedocs.io/en/latest/api.html#duplicate-keys>`__
- in mappings generate an error (in the old API this change generates a warning until 0.16)
- - dependecy on ruamel.ordereddict for 2.7 now via extras_require
-
-0.15.0 (2017-06-04):
- - it is now allowed to pass in a ``pathlib.Path`` as "stream" parameter to all
- load/dump functions
- - passing in a non-supported object (e.g. a string) as "stream" will result in a
- much more meaningful YAMLStreamError.
- - assigning a normal string value to an existing CommentedMap key or CommentedSeq
- element will result in a value cast to the previous value's type if possible.
- - added ``YAML`` class for new API
-
-0.14.12 (2017-05-14):
- - fix for issue 119, deepcopy not returning subclasses (reported and PR by
- Constantine Evans <cevans@evanslabs.org>)
-
-0.14.11 (2017-05-01):
- - fix for issue 103 allowing implicit documents after document end marker line (``...``)
- in YAML 1.2
-
-0.14.10 (2017-04-26):
- - fix problem with emitting using cyaml
-
-0.14.9 (2017-04-22):
- - remove dependency on ``typing`` while still supporting ``mypy``
- (http://stackoverflow.com/a/43516781/1307905)
- - fix unclarity in doc that stated 2.6 is supported (reported by feetdust)
-
-0.14.8 (2017-04-19):
- - fix Text not available on 3.5.0 and 3.5.1, now proactively setting version guards
- on all files (reported by `João Paulo Magalhães <https://bitbucket.org/jpmag/>`__)
-
-0.14.7 (2017-04-18):
- - round trip of integers (decimal, octal, hex, binary) now preserve
- leading zero(s) padding and underscores. Underscores are presumed
- to be at regular distances (i.e. ``0o12_345_67`` dumps back as
- ``0o1_23_45_67`` as the space from the last digit to the
- underscore before that is the determining factor).
-
-0.14.6 (2017-04-14):
- - binary, octal and hex integers are now preserved by default. This
- was a known deficiency. Working on this was prompted by the issue report (112)
- from devnoname120, as well as the additional experience with `.replace()`
- on `scalarstring` classes.
- - fix issues 114: cannot install on Buildozer (reported by mixmastamyk).
- Setting env. var ``RUAMEL_NO_PIP_INSTALL_CHECK`` will suppress ``pip``-check.
-
-0.14.5 (2017-04-04):
- - fix issue 109: None not dumping correctly at top level (reported by Andrea Censi)
- - fix issue 110: .replace on Preserved/DoubleQuoted/SingleQuoted ScalarString
- would give back "normal" string (reported by sandres23)
-
-0.14.4 (2017-03-31):
- - fix readme
-
-0.14.3 (2017-03-31):
- - fix for 0o52 not being a string in YAML 1.1 (reported on
- `StackOverflow Q&A 43138503 <http://stackoverflow.com/a/43138503/1307905>`__ by
- `Frank D <http://stackoverflow.com/users/7796630/frank-d>`__)
-
-0.14.2 (2017-03-23):
- - fix for old default pip on Ubuntu 14.04 (reported by Sébastien Maccagnoni-Munch)
-
-0.14.1 (2017-03-22):
- - fix Text not available on 3.5.0 and 3.5.1 (reported by Charles Bouchard-Légaré)
-
-0.14.0 (2017-03-21):
- - updates for mypy --strict
- - preparation for moving away from inheritance in Loader and Dumper, calls from e.g.
- the Representer to the Serializer.serialize() are now done via the attribute
- .serializer.serialize(). Usage of .serialize() outside of Serializer will be
- deprecated soon
- - some extra tests on main.py functions
----
For older changes see the file
-`CHANGES <https://bitbucket.org/ruamel/yaml/src/default/CHANGES>`_
+`CHANGES <https://sourceforge.net/p/ruamel-yaml/code/ci/default/tree/CHANGES>`_
diff --git a/__init__.py b/__init__.py
index 5bd0d3d..58e39af 100644
--- a/__init__.py
+++ b/__init__.py
@@ -1,47 +1,44 @@
# coding: utf-8
-from __future__ import print_function, absolute_import, division, unicode_literals
-
if False: # MYPY
from typing import Dict, Any # NOQA
_package_data = dict(
full_package_name='ruamel.yaml',
- version_info=(0, 16, 12),
- __version__='0.16.12',
+ version_info=(0, 17, 21),
+ __version__='0.17.21',
+ version_timestamp='2022-02-12 09:49:22',
author='Anthon van der Neut',
author_email='a.van.der.neut@ruamel.eu',
description='ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order', # NOQA
entry_points=None,
since=2014,
extras_require={
- ':platform_python_implementation=="CPython" and python_version<="2.7"': ['ruamel.ordereddict'], # NOQA
- ':platform_python_implementation=="CPython" and python_version<"3.9"': ['ruamel.yaml.clib>=0.1.2'], # NOQA
+ ':platform_python_implementation=="CPython" and python_version<"3.11"': ['ruamel.yaml.clib>=0.2.6'], # NOQA
'jinja2': ['ruamel.yaml.jinja2>=0.2'],
'docs': ['ryd'],
},
classifiers=[
- 'Programming Language :: Python :: 2.7',
- 'Programming Language :: Python :: 3.5',
- 'Programming Language :: Python :: 3.6',
+ 'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
+ 'Programming Language :: Python :: 3.9',
+ 'Programming Language :: Python :: 3.10',
+ 'Programming Language :: Python :: 3.11',
'Programming Language :: Python :: Implementation :: CPython',
- 'Programming Language :: Python :: Implementation :: PyPy',
- 'Programming Language :: Python :: Implementation :: Jython',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Text Processing :: Markup',
'Typing :: Typed',
],
keywords='yaml 1.2 parser round-trip preserve quotes order config',
read_the_docs='yaml',
- supported=[(2, 7), (3, 5)], # minimum
+ supported=[(3, 7)], # minimum
tox=dict(
- env='*', # remove 'pn', no longer test narrow Python 2.7 for unicode patterns and PyPy
- deps='ruamel.std.pathlib',
- fl8excl='_test/lib',
+ env='*',
+ fl8excl='_test/lib,branch_default',
),
- universal=True,
+ # universal=True,
+ python_requires='>=3',
rtfd='yaml',
) # type: Dict[Any, Any]
diff --git a/_doc/_static/pypi.svg b/_doc/_static/pypi.svg
index 89b8585..315b8c9 100644
--- a/_doc/_static/pypi.svg
+++ b/_doc/_static/pypi.svg
@@ -1 +1 @@
-<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="86" height="20"><linearGradient id="b" x2="0" y2="100%"><stop offset="0" stop-color="#bbb" stop-opacity=".1"/><stop offset="1" stop-opacity=".1"/></linearGradient><clipPath id="a"><rect width="86" height="20" rx="3" fill="#fff"/></clipPath><g clip-path="url(#a)"><path fill="#555" d="M0 0h33v20H0z"/><path fill="#007ec6" d="M33 0h53v20H33z"/><path fill="url(#b)" d="M0 0h86v20H0z"/></g><g fill="#fff" text-anchor="middle" font-family="DejaVu Sans,Verdana,Geneva,sans-serif" font-size="110"> <text x="175" y="150" fill="#010101" fill-opacity=".3" transform="scale(.1)" textLength="230">pypi</text><text x="175" y="140" transform="scale(.1)" textLength="230">pypi</text><text x="585" y="150" fill="#010101" fill-opacity=".3" transform="scale(.1)" textLength="430">0.16.12</text><text x="585" y="140" transform="scale(.1)" textLength="430">0.16.12</text></g> </svg>
+<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="86" height="20"><linearGradient id="b" x2="0" y2="100%"><stop offset="0" stop-color="#bbb" stop-opacity=".1"/><stop offset="1" stop-opacity=".1"/></linearGradient><clipPath id="a"><rect width="86" height="20" rx="3" fill="#fff"/></clipPath><g clip-path="url(#a)"><path fill="#555" d="M0 0h33v20H0z"/><path fill="#007ec6" d="M33 0h53v20H33z"/><path fill="url(#b)" d="M0 0h86v20H0z"/></g><g fill="#fff" text-anchor="middle" font-family="DejaVu Sans,Verdana,Geneva,sans-serif" font-size="110"> <text x="175" y="150" fill="#010101" fill-opacity=".3" transform="scale(.1)" textLength="230">pypi</text><text x="175" y="140" transform="scale(.1)" textLength="230">pypi</text><text x="585" y="150" fill="#010101" fill-opacity=".3" transform="scale(.1)" textLength="430">0.17.21</text><text x="585" y="140" transform="scale(.1)" textLength="430">0.17.21</text></g> </svg>
diff --git a/_doc/api.ryd b/_doc/api.ryd
index 314bfbd..8bdf9a4 100644
--- a/_doc/api.ryd
+++ b/_doc/api.ryd
@@ -1,5 +1,5 @@
-version: 0.1
-output: rst
+version: 0.2
+text: rst
fix_inline_single_backquotes: true
pdf: true
--- !python-pre |
@@ -47,17 +47,19 @@ Starting with 0.15.0 ``load()`` and ``dump()`` are methods on a
resp. the data and stream argument. All other parameters are set on the instance
of ``YAML`` before calling ``load()`` or ``dump()``
-Before 0.15.0::
---- !python |
-from pathlib import Path
-from ruamel import yaml
+Before 0.15.0 you could do:
-data = yaml.safe_load("abc: 1")
-out = Path('/tmp/out.yaml')
-with out.open('w') as fp:
- yaml.safe_dump(data, fp, default_flow_style=False)
---- |
-after::
+.. code:: python
+
+ from pathlib import Path
+ from ruamel import yaml
+
+ data = yaml.safe_load("abc: 1")
+ out = Path('/tmp/out.yaml')
+ with out.open('w') as fp:
+ yaml.safe_dump(data, fp, default_flow_style=False)
+
+after:
--- !python |
from pathlib import Path
from ruamel.yaml import YAML
@@ -111,7 +113,7 @@ PyYAML never enforced this although the YAML 1.1 specification already
required this.
In the new API (starting 0.15.1) duplicate keys in mappings are no longer allowed by
-default. To allow duplicate keys in mappings::
+default. To allow duplicate keys in mappings:
--- !python |
yaml = ruamel.yaml.YAML()
@@ -213,18 +215,17 @@ for reading resp. writing.
Loading and dumping using the ``SafeLoader``::
---- !python |
-if ruamel.yaml.version_info < (0, 15):
- data = yaml.safe_load(istream)
- yaml.safe_dump(data, ostream)
-else:
- yml = ruamel.yaml.YAML(typ='safe', pure=True) # 'safe' load and dump
- data = yml.load(istream)
- yml.dump(data, ostream)
---- |
+ if ruamel.yaml.version_info < (0, 15):
+ data = yaml.safe_load(istream)
+ yaml.safe_dump(data, ostream)
+ else:
+ yml = ruamel.yaml.YAML(typ='safe', pure=True) # 'safe' load and dump
+ data = yml.load(istream)
+ yml.dump(data, ostream)
+
Loading with the ``CSafeLoader``, dumping with
``RoundTripLoader``. You need two ``YAML`` instances, but each of them
-can be re-used::
+can be re-used:
--- !python |
if ruamel.yaml.version_info < (0, 15):
data = yaml.load(istream, Loader=yaml.CSafeLoader)
diff --git a/_doc/basicuse.ryd b/_doc/basicuse.ryd
index ca5139b..35e7c38 100644
--- a/_doc/basicuse.ryd
+++ b/_doc/basicuse.ryd
@@ -1,5 +1,5 @@
-version: 0.1
-output: rst
+version: 0.2
+text: rst
fix_inline_single_backquotes: true
pdf: true
--- !python-pre |
@@ -15,7 +15,7 @@ data = dict(a=1)
Basic Usage
***********
-You load a YAML document using::
+You load a YAML document using:
--- !python |
from ruamel.yaml import YAML
@@ -43,7 +43,7 @@ in this ``s`` can be a file pointer (i.e. an object that has the
your output, just stream to ``sys.stdout``.
If you need to transform a string representation of the output provide
-a function that takes a string as input and returns one::
+a function that takes a string as input and returns one:
--- !python |
def tr(s):
@@ -56,7 +56,7 @@ More examples
=============
Using the C based SafeLoader (at this time is inherited from
-libyaml/PyYAML and e.g. loads ``0o52`` as well as ``052`` load as integer ``42``)::
+libyaml/PyYAML and e.g. loads ``0o52`` as well as ``052`` load as integer ``42``):
--- !python |
from ruamel.yaml import YAML
@@ -65,7 +65,7 @@ libyaml/PyYAML and e.g. loads ``0o52`` as well as ``052`` load as integer ``42``
yaml.load("""a:\n b: 2\n c: 3\n""")
--- |
-Using the Python based SafeLoader (YAML 1.2 support, ``052`` loads as ``52``)::
+Using the Python based SafeLoader (YAML 1.2 support, ``052`` loads as ``52``):
--- !python |
from ruamel.yaml import YAML
diff --git a/_doc/conf.py b/_doc/conf.py
index 30a76f5..67fcc0e 100644
--- a/_doc/conf.py
+++ b/_doc/conf.py
@@ -28,7 +28,7 @@ import os # NOQA
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
-extensions = []
+extensions = [] # type: ignore
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
@@ -45,11 +45,11 @@ source_suffix = ['.rst']
master_doc = 'index'
# General information about the project.
-project = u'yaml'
-copyright = u'2017-2019, Anthon van der Neut, Ruamel bvba'
-author = u'Anthon van der Neut'
+project = 'yaml'
+copyright = '2017-2021, Anthon van der Neut, Ruamel bvba'
+author = 'Anthon van der Neut'
-# The version info for the project you're documenting, acts as replacement for
+# The version info for the project you are documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
@@ -74,15 +74,13 @@ class ryd2rst:
if True:
try:
- from ryd.__main__ import RYDCmd
- from ruamel.std.pathlib import Path
+ from ryd.__main__ import main
+ from pathlib import Path
oldargv = sys.argv
for fn in Path('.').glob('*.ryd'):
sys.argv = ['ryd', 'convert', '--no-pdf', str(fn)]
- rc = RYDCmd()
- rc.parse_args()
- print(sys.argv, '->', rc.run())
+ main(sys.argv)
sys.argv = oldargv
except Exception as e:
@@ -251,8 +249,8 @@ latex_documents = [
(
master_doc,
'yaml.tex',
- u'Python YAML package documentation',
- u'Anthon van der Neut',
+ 'Python YAML package documentation',
+ 'Anthon van der Neut',
'manual',
)
]
@@ -282,7 +280,7 @@ latex_documents = [
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
-man_pages = [(master_doc, 'yaml', u'yaml Documentation', [author], 1)]
+man_pages = [(master_doc, 'yaml', 'yaml Documentation', [author], 1)]
# If true, show URL addresses after external links.
# man_show_urls = False
@@ -297,7 +295,7 @@ texinfo_documents = [
(
master_doc,
'yaml',
- u'yaml Documentation',
+ 'yaml Documentation',
author,
'yaml',
'One line description of project.',
diff --git a/_doc/contributing.ryd b/_doc/contributing.ryd
index 96e60f6..8c76325 100644
--- a/_doc/contributing.ryd
+++ b/_doc/contributing.ryd
@@ -1,5 +1,5 @@
-version: 0.1
-output: rst
+version: 0.2
+text: rst
fix_inline_single_backquotes: true
pdf: true
--- |
diff --git a/_doc/detail.ryd b/_doc/detail.ryd
index 00d0b8c..b25c939 100644
--- a/_doc/detail.ryd
+++ b/_doc/detail.ryd
@@ -1,5 +1,5 @@
-version: 0.1
-output: rst
+version: 0.2
+text: rst
fix_inline_single_backquotes: true
--- |
*******
@@ -179,7 +179,7 @@ adding/replacing comments
Starting with version 0.8, you can add/replace comments on block style
collections (mappings/sequences resuting in Python dict/list). The basic
-for for this is::
+for for this is:
--- !python |
from __future__ import print_function
@@ -205,7 +205,7 @@ for for this is::
data['abc'].yaml_add_eol_comment('comment 4', 1) # takes column of comment 1
data['xyz'].yaml_add_eol_comment('comment 5', 'c') # takes column of comment 2
data['xyz'].yaml_add_eol_comment('comment 6', 'e') # takes column of comment 3
- data['xyz'].yaml_add_eol_comment('comment 7', 'd', column=20)
+ data['xyz'].yaml_add_eol_comment('comment 7\n\n# that\'s all folks', 'd', column=20)
yaml.dump(data, sys.stdout)
--- !stdout |
@@ -229,6 +229,10 @@ from the example, the column to choose for a comment is derived
from the previous, next or preceding comment column (picking the first one
found).
+Make sure that the added comment is correct, in the sense that when it
+contains newlines, the following is either an empty line or a line with
+only spaces, or the first non-space is a `#`.
+
Config file formats
+++++++++++++++++++
diff --git a/_doc/dumpcls.ryd b/_doc/dumpcls.ryd
index 929d5f5..8e9ac6e 100644
--- a/_doc/dumpcls.ryd
+++ b/_doc/dumpcls.ryd
@@ -1,5 +1,5 @@
-version: 0.1
-output: rst
+version: 0.2
+text: rst
fix_inline_single_backquotes: true
pdf: true
# code_directory: ../_example
@@ -16,7 +16,7 @@ If you have instances of some class(es) that you want to dump or load, it is
easy to allow the YAML instance to do that explicitly. You can either register the
class with the ``YAML`` instance or decorate the class.
-Registering is done with ``YAML.register_class()``::
+Registering is done with ``YAML.register_class()``:
--- !python |
@@ -40,7 +40,7 @@ The tag ``!User`` originates from the name of the class.
You can specify a different tag by adding the attribute ``yaml_tag``, and
explicitly specify dump and/or load *classmethods* which have to be called
-``to_yaml`` resp. ``from_yaml``::
+``to_yaml`` resp. ``from_yaml``:
--- !python |
import sys
@@ -73,7 +73,7 @@ which gives as output::
--- |
When using the decorator, which takes the ``YAML()`` instance as a parameter,
-the ``yaml = YAML()`` line needs to be moved up in the file::
+the ``yaml = YAML()`` line needs to be moved up in the file:
--- !python |
import sys
diff --git a/_doc/example.ryd b/_doc/example.ryd
index 44a3f2c..c38ead0 100644
--- a/_doc/example.ryd
+++ b/_doc/example.ryd
@@ -1,5 +1,5 @@
-version: 0.1
-output: rst
+version: 0.2
+text: rst
fix_inline_single_backquotes: true
pdf: true
--- |
@@ -8,7 +8,7 @@ Examples
********
Basic round trip of parsing YAML to Python objects, modifying
-and generating YAML::
+and generating YAML:
--- !python |
import sys
from ruamel.yaml import YAML
@@ -30,37 +30,11 @@ and generating YAML::
--- !stdout |
Resulting in::
--- |
-with the old API::
---- !python |
- from __future__ import print_function
-
- import sys
- import ruamel.yaml
-
- inp = """\
- # example
- name:
- # details
- family: Smith # very common
- given: Alice # one of the siblings
- """
-
- code = ruamel.yaml.load(inp, ruamel.yaml.RoundTripLoader)
- code['name']['given'] = 'Bob'
-
- ruamel.yaml.dump(code, sys.stdout, Dumper=ruamel.yaml.RoundTripDumper)
-
- # the last statement can be done less efficient in time and memory with
- # leaving out the end='' would cause a double newline at the end
- # print(ruamel.yaml.dump(code, Dumper=ruamel.yaml.RoundTripDumper), end='')
---- !stdout |
-Resulting in ::
---- |
----
YAML handcrafted anchors and references as well as key merging
are preserved. The merged keys can transparently be accessed
-using ``[]`` and ``.get()``::
+using ``[]`` and ``.get()``:
--- !python |
from ruamel.yaml import YAML
@@ -94,7 +68,7 @@ using ``[]`` and ``.get()``::
--- |
The ``CommentedMap``, which is the ``dict`` like construct one gets when round-trip loading,
-supports insertion of a key into a particular position, while optionally adding a comment::
+supports insertion of a key into a particular position, while optionally adding a comment:
--- !python |
import sys
from ruamel.yaml import YAML
@@ -125,7 +99,7 @@ both mappings and sequences. For sequences the indent is counted to the
beginning of the scalar, with the dash taking the first position of the
indented "space".
-You can change this default indentation by e.g. using ``yaml.indent()``::
+You can change this default indentation by e.g. using ``yaml.indent()``:
--- !python |
@@ -157,7 +131,7 @@ that the dash of the "parent" sequence is on the same line as the
first element resp. first key/value pair of the child collection.
If you want either or both of these (sequence within sequence, mapping
-within sequence) to begin on the next line use ``yaml.compact()``::
+within sequence) to begin on the next line use ``yaml.compact()``:
--- !python |
@@ -183,7 +157,7 @@ giving::
------
The following program uses three dumps on the same data, resulting in a stream with
-three documents::
+three documents:
--- !python |
import sys
@@ -259,7 +233,7 @@ sys.stdout)`` and do away with 90% of the cases for returning the string, and
that all post-processing YAML, before writing to stream, can be handled by using
the ``transform=`` parameter of dump, being able to handle most of the rest. But
it is also much easier in the new API to provide that YAML output as a string if
-you really need to have it (or think you do)::
+you really need to have it (or think you do):
--- !python |
import sys
diff --git a/_doc/index.ryd b/_doc/index.ryd
index a539e65..9333020 100644
--- a/_doc/index.ryd
+++ b/_doc/index.ryd
@@ -1,6 +1,7 @@
-version: 0.1
-output: rst
+version: 0.2
+text: rst
fix_inline_single_backquotes: true
+pdf: false
--- !comment |
Sections, subsections, etc. in .ryd files
# with overline, for parts
diff --git a/_doc/install.ryd b/_doc/install.ryd
index 1da00b8..2d54849 100644
--- a/_doc/install.ryd
+++ b/_doc/install.ryd
@@ -1,5 +1,5 @@
-version: 0.1
-output: rst
+version: 0.2
+text: rst
fix_inline_single_backquotes: true
# pdf: true
--- |
@@ -50,5 +50,5 @@ For CentOS (7) based systems you should do::
sudo yum install python-devel
---- !incraw |
+--- !inc-raw |
links.rydinc
diff --git a/_doc/overview.ryd b/_doc/overview.ryd
index 38b9b52..43be596 100644
--- a/_doc/overview.ryd
+++ b/_doc/overview.ryd
@@ -1,5 +1,5 @@
-version: 0.1
-output: rst
+version: 0.2
+text: rst
fix_inline_single_backquotes: true
--- |
********
@@ -47,5 +47,5 @@ Many of the bugs filed against PyYAML, but that were never
acted upon, have been fixed in ``ruamel.yaml``
---- !incraw |
+--- !inc-raw |
links.rydinc
diff --git a/_doc/pyyaml.ryd b/_doc/pyyaml.ryd
index af3c049..56ee391 100644
--- a/_doc/pyyaml.ryd
+++ b/_doc/pyyaml.ryd
@@ -1,6 +1,7 @@
-version: 0.1
-output: rst
+version: 0.2
+text: rst
fix_inline_single_backquotes: true
+pdf: true
--- |
***********************
Differences with PyYAML
@@ -79,5 +80,5 @@ API
Starting with 0.15 the API for using ``ruamel.yaml`` has diverged allowing
easier addition of new features.
---- !incraw |
-links.rydinc
+--- !inc-raw
+- links.rydinc
diff --git a/_test/data/construct-python-str-utf8-py2.code b/_test/data/construct-python-str-utf8-py2.code
index 47b28ab..6ca7d8f 100644
--- a/_test/data/construct-python-str-utf8-py2.code
+++ b/_test/data/construct-python-str-utf8-py2.code
@@ -1 +1 @@
-u'\u042d\u0442\u043e \u0443\u043d\u0438\u043a\u043e\u0434\u043d\u0430\u044f \u0441\u0442\u0440\u043e\u043a\u0430'.encode('utf-8')
+'\u042d\u0442\u043e \u0443\u043d\u0438\u043a\u043e\u0434\u043d\u0430\u044f \u0441\u0442\u0440\u043e\u043a\u0430'.encode('utf-8')
diff --git a/_test/data/construct-python-unicode-utf8-py2.code b/_test/data/construct-python-unicode-utf8-py2.code
index 2793ac7..9f66032 100644
--- a/_test/data/construct-python-unicode-utf8-py2.code
+++ b/_test/data/construct-python-unicode-utf8-py2.code
@@ -1 +1 @@
-u'\u042d\u0442\u043e \u0443\u043d\u0438\u043a\u043e\u0434\u043d\u0430\u044f \u0441\u0442\u0440\u043e\u043a\u0430'
+'\u042d\u0442\u043e \u0443\u043d\u0438\u043a\u043e\u0434\u043d\u0430\u044f \u0441\u0442\u0440\u043e\u043a\u0430'
diff --git a/_test/data/construct-str-utf8-py2.code b/_test/data/construct-str-utf8-py2.code
index 2793ac7..9f66032 100644
--- a/_test/data/construct-str-utf8-py2.code
+++ b/_test/data/construct-str-utf8-py2.code
@@ -1 +1 @@
-u'\u042d\u0442\u043e \u0443\u043d\u0438\u043a\u043e\u0434\u043d\u0430\u044f \u0441\u0442\u0440\u043e\u043a\u0430'
+'\u042d\u0442\u043e \u0443\u043d\u0438\u043a\u043e\u0434\u043d\u0430\u044f \u0441\u0442\u0440\u043e\u043a\u0430'
diff --git a/_test/lib/canonical.py b/_test/lib/canonical.py
index af2c3cf..31c9728 100644
--- a/_test/lib/canonical.py
+++ b/_test/lib/canonical.py
@@ -3,7 +3,6 @@ import ruamel.yaml
from ruamel.yaml.composer import Composer
from ruamel.yaml.constructor import Constructor
from ruamel.yaml.resolver import Resolver
-from ruamel.yaml.compat import unichr, PY3
class CanonicalError(ruamel.yaml.YAMLError):
@@ -13,14 +12,11 @@ class CanonicalError(ruamel.yaml.YAMLError):
class CanonicalScanner:
def __init__(self, data):
try:
- if PY3:
- if isinstance(data, bytes):
- data = data.decode('utf-8')
- else:
- data = unicode(data, 'utf-8') # NOQA
+ if isinstance(data, bytes):
+ data = data.decode('utf-8')
except UnicodeDecodeError:
raise CanonicalError('utf-8 stream is expected')
- self.data = data + u'\0'
+ self.data = data + '\0'
self.index = 0
self.tokens = []
self.scanned = False
@@ -59,51 +55,51 @@ class CanonicalScanner:
while True:
self.find_token()
ch = self.data[self.index]
- if ch == u'\0':
+ if ch == '\0':
self.tokens.append(ruamel.yaml.StreamEndToken(None, None))
break
- elif ch == u'%':
+ elif ch == '%':
self.tokens.append(self.scan_directive())
- elif ch == u'-' and self.data[self.index : self.index + 3] == u'---':
+ elif ch == '-' and self.data[self.index : self.index + 3] == '---':
self.index += 3
self.tokens.append(ruamel.yaml.DocumentStartToken(None, None))
- elif ch == u'[':
+ elif ch == '[':
self.index += 1
self.tokens.append(ruamel.yaml.FlowSequenceStartToken(None, None))
- elif ch == u'{':
+ elif ch == '{':
self.index += 1
self.tokens.append(ruamel.yaml.FlowMappingStartToken(None, None))
- elif ch == u']':
+ elif ch == ']':
self.index += 1
self.tokens.append(ruamel.yaml.FlowSequenceEndToken(None, None))
- elif ch == u'}':
+ elif ch == '}':
self.index += 1
self.tokens.append(ruamel.yaml.FlowMappingEndToken(None, None))
- elif ch == u'?':
+ elif ch == '?':
self.index += 1
self.tokens.append(ruamel.yaml.KeyToken(None, None))
- elif ch == u':':
+ elif ch == ':':
self.index += 1
self.tokens.append(ruamel.yaml.ValueToken(None, None))
- elif ch == u',':
+ elif ch == ',':
self.index += 1
self.tokens.append(ruamel.yaml.FlowEntryToken(None, None))
- elif ch == u'*' or ch == u'&':
+ elif ch == '*' or ch == '&':
self.tokens.append(self.scan_alias())
- elif ch == u'!':
+ elif ch == '!':
self.tokens.append(self.scan_tag())
- elif ch == u'"':
+ elif ch == '"':
self.tokens.append(self.scan_scalar())
else:
raise CanonicalError('invalid token')
self.scanned = True
- DIRECTIVE = u'%YAML 1.1'
+ DIRECTIVE = '%YAML 1.1'
def scan_directive(self):
if (
self.data[self.index : self.index + len(self.DIRECTIVE)] == self.DIRECTIVE
- and self.data[self.index + len(self.DIRECTIVE)] in u' \n\0'
+ and self.data[self.index + len(self.DIRECTIVE)] in ' \n\0'
):
self.index += len(self.DIRECTIVE)
return ruamel.yaml.DirectiveToken('YAML', (1, 1), None, None)
@@ -111,13 +107,13 @@ class CanonicalScanner:
raise CanonicalError('invalid directive')
def scan_alias(self):
- if self.data[self.index] == u'*':
+ if self.data[self.index] == '*':
TokenClass = ruamel.yaml.AliasToken
else:
TokenClass = ruamel.yaml.AnchorToken
self.index += 1
start = self.index
- while self.data[self.index] not in u', \n\0':
+ while self.data[self.index] not in ', \n\0':
self.index += 1
value = self.data[start : self.index]
return TokenClass(value, None, None)
@@ -125,38 +121,38 @@ class CanonicalScanner:
def scan_tag(self):
self.index += 1
start = self.index
- while self.data[self.index] not in u' \n\0':
+ while self.data[self.index] not in ' \n\0':
self.index += 1
value = self.data[start : self.index]
if not value:
- value = u'!'
- elif value[0] == u'!':
+ value = '!'
+ elif value[0] == '!':
value = 'tag:yaml.org,2002:' + value[1:]
- elif value[0] == u'<' and value[-1] == u'>':
+ elif value[0] == '<' and value[-1] == '>':
value = value[1:-1]
else:
- value = u'!' + value
+ value = '!' + value
return ruamel.yaml.TagToken(value, None, None)
QUOTE_CODES = {'x': 2, 'u': 4, 'U': 8}
QUOTE_REPLACES = {
- u'\\': u'\\',
- u'"': u'"',
- u' ': u' ',
- u'a': u'\x07',
- u'b': u'\x08',
- u'e': u'\x1B',
- u'f': u'\x0C',
- u'n': u'\x0A',
- u'r': u'\x0D',
- u't': u'\x09',
- u'v': u'\x0B',
- u'N': u'\u0085',
- u'L': u'\u2028',
- u'P': u'\u2029',
- u'_': u'_',
- u'0': u'\x00',
+ '\\': '\\',
+ '"': '"',
+ ' ': ' ',
+ 'a': '\x07',
+ 'b': '\x08',
+ 'e': '\x1B',
+ 'f': '\x0C',
+ 'n': '\x0A',
+ 'r': '\x0D',
+ 't': '\x09',
+ 'v': '\x0B',
+ 'N': '\u0085',
+ 'L': '\u2028',
+ 'P': '\u2029',
+ '_': '_',
+ '0': '\x00',
}
def scan_scalar(self):
@@ -164,32 +160,32 @@ class CanonicalScanner:
chunks = []
start = self.index
ignore_spaces = False
- while self.data[self.index] != u'"':
- if self.data[self.index] == u'\\':
+ while self.data[self.index] != '"':
+ if self.data[self.index] == '\\':
ignore_spaces = False
chunks.append(self.data[start : self.index])
self.index += 1
ch = self.data[self.index]
self.index += 1
- if ch == u'\n':
+ if ch == '\n':
ignore_spaces = True
elif ch in self.QUOTE_CODES:
length = self.QUOTE_CODES[ch]
code = int(self.data[self.index : self.index + length], 16)
- chunks.append(unichr(code))
+ chunks.append(chr(code))
self.index += length
else:
if ch not in self.QUOTE_REPLACES:
raise CanonicalError('invalid escape code')
chunks.append(self.QUOTE_REPLACES[ch])
start = self.index
- elif self.data[self.index] == u'\n':
+ elif self.data[self.index] == '\n':
chunks.append(self.data[start : self.index])
- chunks.append(u' ')
+ chunks.append(' ')
self.index += 1
start = self.index
ignore_spaces = True
- elif ignore_spaces and self.data[self.index] == u' ':
+ elif ignore_spaces and self.data[self.index] == ' ':
self.index += 1
start = self.index
else:
@@ -202,12 +198,12 @@ class CanonicalScanner:
def find_token(self):
found = False
while not found:
- while self.data[self.index] in u' \t':
+ while self.data[self.index] in ' \t':
self.index += 1
- if self.data[self.index] == u'#':
- while self.data[self.index] != u'\n':
+ if self.data[self.index] == '#':
+ while self.data[self.index] != '\n':
self.index += 1
- if self.data[self.index] == u'\n':
+ if self.data[self.index] == '\n':
self.index += 1
else:
found = True
@@ -343,42 +339,50 @@ ruamel.yaml.CanonicalLoader = CanonicalLoader
def canonical_scan(stream):
- return ruamel.yaml.scan(stream, Loader=CanonicalLoader)
+ yaml = ruamel.yaml.YAML()
+ yaml.scanner = CanonicalScanner
+ return yaml.scan(stream)
ruamel.yaml.canonical_scan = canonical_scan
def canonical_parse(stream):
- return ruamel.yaml.parse(stream, Loader=CanonicalLoader)
+ yaml = ruamel.yaml.YAML()
+ return yaml.parse(stream, Loader=CanonicalLoader)
ruamel.yaml.canonical_parse = canonical_parse
def canonical_compose(stream):
- return ruamel.yaml.compose(stream, Loader=CanonicalLoader)
+ yaml = ruamel.yaml.YAML()
+ return yaml.compose(stream, Loader=CanonicalLoader)
ruamel.yaml.canonical_compose = canonical_compose
def canonical_compose_all(stream):
- return ruamel.yaml.compose_all(stream, Loader=CanonicalLoader)
+ yaml = ruamel.yaml.YAML()
+ return yaml.compose_all(stream, Loader=CanonicalLoader)
ruamel.yaml.canonical_compose_all = canonical_compose_all
def canonical_load(stream):
- return ruamel.yaml.load(stream, Loader=CanonicalLoader)
+ yaml = ruamel.yaml.YAML()
+ return yaml.load(stream, Loader=CanonicalLoader)
ruamel.yaml.canonical_load = canonical_load
def canonical_load_all(stream):
- return ruamel.yaml.load_all(stream, Loader=CanonicalLoader)
+ yaml = ruamel.yaml.YAML(typ='safe', pure=True)
+ yaml.Loader = CanonicalLoader
+ return yaml.load_all(stream)
ruamel.yaml.canonical_load_all = canonical_load_all
diff --git a/_test/lib/test_appliance.py b/_test/lib/test_appliance.py
index 137c271..d624ebe 100644
--- a/_test/lib/test_appliance.py
+++ b/_test/lib/test_appliance.py
@@ -1,13 +1,10 @@
-from __future__ import print_function
-
import sys
import os
import types
import traceback
import pprint
import argparse
-from ruamel.yaml.compat import PY3
# DATA = 'tests/data'
# determine the position of data dynamically relative to program
@@ -35,7 +32,8 @@ def find_test_filenames(directory):
for filename in os.listdir(directory):
if os.path.isfile(os.path.join(directory, filename)):
base, ext = os.path.splitext(filename)
- if base.endswith('-py2' if PY3 else '-py3'):
+ # ToDo: remove
+ if base.endswith('-py2'):
continue
filenames.setdefault(base, []).append(ext)
filenames = sorted(filenames.items())
@@ -105,13 +103,7 @@ def parse_arguments(args):
def execute(function, filenames, verbose):
- if PY3:
- name = function.__name__
- else:
- if hasattr(function, 'unittest_name'):
- name = function.unittest_name
- else:
- name = function.func_name
+ name = function.__name__
if verbose:
sys.stdout.write('=' * 75 + '\n')
sys.stdout.write('%s(%s)...\n' % (name, ', '.join(filenames)))
@@ -164,12 +156,8 @@ def display(results, verbose):
for filename in filenames:
sys.stdout.write('-' * 75 + '\n')
sys.stdout.write('%s:\n' % filename)
- if PY3:
- with open(filename, 'r', errors='replace') as fp:
- data = fp.read()
- else:
- with open(filename, 'rb') as fp:
- data = fp.read()
+ with open(filename, 'r', errors='replace') as fp:
+ data = fp.read()
sys.stdout.write(data)
if data and data[-1] != '\n':
sys.stdout.write('\n')
diff --git a/_test/lib/test_canonical.py b/_test/lib/test_canonical.py
index 48a1764..b5cd14d 100644
--- a/_test/lib/test_canonical.py
+++ b/_test/lib/test_canonical.py
@@ -1,5 +1,4 @@
-# from __future__ import absolute_import
-from __future__ import print_function
+
import ruamel.yaml
import canonical # NOQA
diff --git a/_test/lib/test_constructor.py b/_test/lib/test_constructor.py
index a66ff1a..b38bf2f 100644
--- a/_test/lib/test_constructor.py
+++ b/_test/lib/test_constructor.py
@@ -1,9 +1,6 @@
-from __future__ import absolute_import
-from __future__ import print_function
import ruamel.yaml
import pprint
-from ruamel.yaml.compat import PY2
import datetime
@@ -124,7 +121,7 @@ def _make_objects():
else:
return False
- class AnObject(object):
+ class AnObject:
def __new__(cls, foo=None, bar=None, baz=None):
self = object.__new__(cls)
self.foo = foo
@@ -211,7 +208,6 @@ def _make_objects():
def __setstate__(self, state):
self.baz = state
- # if PY3 or PY2:
InitArgs = NewArgs
InitArgsWithState = NewArgsWithState
@@ -291,8 +287,6 @@ def _serialize_value(data):
return '{%s}' % ', '.join(items)
elif isinstance(data, datetime.datetime):
return repr(data.utctimetuple())
- elif PY2 and isinstance(data, unicode): # NOQA
- return data.encode('utf-8')
elif isinstance(data, float) and data != data:
return '?'
else:
@@ -303,9 +297,11 @@ def test_constructor_types(data_filename, code_filename, verbose=False):
_make_objects()
native1 = None
native2 = None
+ yaml = ruamel.yaml.YAML(typ='safe', pure=True)
+ yaml.loader = MyLoader
try:
with open(data_filename, 'rb') as fp0:
- native1 = list(ruamel.yaml.load_all(fp0, Loader=MyLoader))
+ native1 = list(yaml.load_all(fp0))
if len(native1) == 1:
native1 = native1[0]
with open(code_filename, 'rb') as fp0:
@@ -337,7 +333,9 @@ def test_roundtrip_data(code_filename, roundtrip_filename, verbose=False):
_make_objects()
with open(code_filename, 'rb') as fp0:
value1 = fp0.read()
- native2 = list(ruamel.yaml.load_all(value1, Loader=MyLoader))
+ yaml = YAML(typ='safe', pure=True)
+ yaml.Loader = MyLoader
+ native2 = list(yaml.load_all(value1))
if len(native2) == 1:
native2 = native2[0]
try:
diff --git a/_test/lib/test_emitter.py b/_test/lib/test_emitter.py
index fbdbb79..b1991e3 100644
--- a/_test/lib/test_emitter.py
+++ b/_test/lib/test_emitter.py
@@ -1,7 +1,8 @@
from __future__ import absolute_import
from __future__ import print_function
-import ruamel.yaml as yaml
+import ruamel.yaml
+from ruamel.yaml import YAML
def _compare_events(events1, events2):
@@ -20,8 +21,8 @@ def _compare_events(events1, events2):
def test_emitter_on_data(data_filename, canonical_filename, verbose=False):
with open(data_filename, 'rb') as fp0:
- events = list(yaml.parse(fp0))
- output = yaml.emit(events)
+ events = list(YAML().parse(fp0))
+ output = YAML().emit(events)
if verbose:
print('OUTPUT:')
print(output)
@@ -34,9 +35,9 @@ test_emitter_on_data.unittest = ['.data', '.canonical']
def test_emitter_on_canonical(canonical_filename, verbose=False):
with open(canonical_filename, 'rb') as fp0:
- events = list(yaml.parse(fp0))
+ events = list(YAML().parse(fp0))
for canonical in [False, True]:
- output = yaml.emit(events, canonical=canonical)
+ output = YAML().emit(events, canonical=canonical)
if verbose:
print('OUTPUT (canonical=%s):' % canonical)
print(output)
@@ -50,7 +51,7 @@ test_emitter_on_canonical.unittest = ['.canonical']
def test_emitter_styles(data_filename, canonical_filename, verbose=False):
for filename in [data_filename, canonical_filename]:
with open(filename, 'rb') as fp0:
- events = list(yaml.parse(fp0))
+ events = list(YAML().parse(fp0))
for flow_style in [False, True]:
for style in ['|', '>', '"', "'", ""]:
styled_events = []
@@ -68,23 +69,23 @@ def test_emitter_styles(data_filename, canonical_filename, verbose=False):
event.anchor, event.tag, event.implicit, flow_style=flow_style
)
styled_events.append(event)
- output = yaml.emit(styled_events)
+ output = YAML().emit(styled_events)
if verbose:
print(
'OUTPUT (filename=%r, flow_style=%r, style=%r)'
% (filename, flow_style, style)
)
print(output)
- new_events = list(yaml.parse(output))
+ new_events = list(YAML().parse(output))
_compare_events(events, new_events)
test_emitter_styles.unittest = ['.data', '.canonical']
-class EventsLoader(yaml.Loader):
+class EventsLoader(ruamel.yaml.Loader):
def construct_event(self, node):
- if isinstance(node, yaml.ScalarNode):
+ if isinstance(node, ruamel.yaml.ScalarNode):
mapping = {}
else:
mapping = self.construct_mapping(node)
@@ -116,12 +117,12 @@ EventsLoader.add_constructor(None, EventsLoader.construct_event)
def test_emitter_events(events_filename, verbose=False):
with open(events_filename, 'rb') as fp0:
- events = list(yaml.load(fp0, Loader=EventsLoader))
- output = yaml.emit(events)
+ events = list(YAML().load(fp0, Loader=EventsLoader))
+ output = YAML().emit(events)
if verbose:
print('OUTPUT:')
print(output)
- new_events = list(yaml.parse(output))
+ new_events = list(YAML().parse(output))
_compare_events(events, new_events)
diff --git a/_test/lib/test_errors.py b/_test/lib/test_errors.py
index b43540c..c0fd3df 100644
--- a/_test/lib/test_errors.py
+++ b/_test/lib/test_errors.py
@@ -1,14 +1,15 @@
-from __future__ import absolute_import
-from __future__ import print_function
-import ruamel.yaml as yaml
+import ruamel.yaml
+YAML = ruamel.yaml.YAML
+
import test_emitter
import warnings
-warnings.simplefilter('ignore', yaml.error.UnsafeLoaderWarning)
+warnings.simplefilter('ignore', ruamel.yaml.error.UnsafeLoaderWarning)
def test_loader_error(error_filename, verbose=False):
+ yaml = YAML(typ='safe', pure=True)
try:
with open(error_filename, 'rb') as fp0:
list(yaml.load_all(fp0))
@@ -23,6 +24,7 @@ test_loader_error.unittest = ['.loader-error']
def test_loader_error_string(error_filename, verbose=False):
+ yaml = YAML(typ='safe', pure=True)
try:
with open(error_filename, 'rb') as fp0:
list(yaml.load_all(fp0.read()))
@@ -37,6 +39,7 @@ test_loader_error_string.unittest = ['.loader-error']
def test_loader_error_single(error_filename, verbose=False):
+ yaml = YAML(typ='safe', pure=True)
try:
with open(error_filename, 'rb') as fp0:
yaml.load(fp0.read())
@@ -51,10 +54,11 @@ test_loader_error_single.unittest = ['.single-loader-error']
def test_emitter_error(error_filename, verbose=False):
+ yaml = YAML(typ='safe', pure=True)
with open(error_filename, 'rb') as fp0:
events = list(yaml.load(fp0, Loader=test_emitter.EventsLoader))
try:
- yaml.emit(events)
+ ruamel.yaml.emit(events)
except yaml.YAMLError as exc:
if verbose:
print('%s:' % exc.__class__.__name__, exc)
@@ -66,6 +70,7 @@ test_emitter_error.unittest = ['.emitter-error']
def test_dumper_error(error_filename, verbose=False):
+ yaml = YAML(typ='safe', pure=True)
with open(error_filename, 'rb') as fp0:
code = fp0.read()
try:
diff --git a/_test/lib/test_input_output.py b/_test/lib/test_input_output.py
index c36477f..37bda3d 100644
--- a/_test/lib/test_input_output.py
+++ b/_test/lib/test_input_output.py
@@ -1,79 +1,39 @@
-from __future__ import absolute_import
-from __future__ import print_function
-import ruamel.yaml as yaml
+from ruamel.yaml import YAML
import codecs
import tempfile
import os
import os.path
-from ruamel.yaml.compat import PY2, PY3, StringIO, BytesIO
+from ruamel.yaml.compat import StringIO, BytesIO
-if PY2:
-
- def _unicode_open(file, encoding, errors='strict'):
- info = codecs.lookup(encoding)
- if isinstance(info, tuple):
- reader = info[2]
- writer = info[3]
- else:
- reader = info.streamreader
- writer = info.streamwriter
- srw = codecs.StreamReaderWriter(file, reader, writer, errors)
- srw.encoding = encoding
- return srw
-
-
-if PY3:
-
- def test_unicode_input(unicode_filename, verbose=False):
- with open(unicode_filename, 'rb') as fp:
- data = fp.read().decode('utf-8')
- value = ' '.join(data.split())
- output = yaml.load(data)
- assert output == value, (output, value)
- output = yaml.load(StringIO(data))
+def test_unicode_input(unicode_filename, verbose=False):
+ yaml = YAML(typ='safe', pure=True)
+ with open(unicode_filename, 'rb') as fp:
+ data = fp.read().decode('utf-8')
+ value = ' '.join(data.split())
+ output = yaml.load(data)
+ assert output == value, (output, value)
+ output = yaml.load(StringIO(data))
+ assert output == value, (output, value)
+ for input in [
+ data.encode('utf-8'),
+ codecs.BOM_UTF8 + data.encode('utf-8'),
+ codecs.BOM_UTF16_BE + data.encode('utf-16-be'),
+ codecs.BOM_UTF16_LE + data.encode('utf-16-le'),
+ ]:
+ if verbose:
+ print('INPUT:', repr(input[:10]), '...')
+ output = yaml.load(input)
assert output == value, (output, value)
- for input in [
- data.encode('utf-8'),
- codecs.BOM_UTF8 + data.encode('utf-8'),
- codecs.BOM_UTF16_BE + data.encode('utf-16-be'),
- codecs.BOM_UTF16_LE + data.encode('utf-16-le'),
- ]:
- if verbose:
- print('INPUT:', repr(input[:10]), '...')
- output = yaml.load(input)
- assert output == value, (output, value)
- output = yaml.load(BytesIO(input))
- assert output == value, (output, value)
-
-
-else:
-
- def test_unicode_input(unicode_filename, verbose=False):
- with open(unicode_filename, 'rb') as fp:
- data = fp.read().decode('utf-8')
- value = ' '.join(data.split())
- output = yaml.load(_unicode_open(StringIO(data.encode('utf-8')), 'utf-8'))
+ output = yaml.load(BytesIO(input))
assert output == value, (output, value)
- for input in [
- data,
- data.encode('utf-8'),
- codecs.BOM_UTF8 + data.encode('utf-8'),
- codecs.BOM_UTF16_BE + data.encode('utf-16-be'),
- codecs.BOM_UTF16_LE + data.encode('utf-16-le'),
- ]:
- if verbose:
- print('INPUT:', repr(input[:10]), '...')
- output = yaml.load(input)
- assert output == value, (output, value)
- output = yaml.load(StringIO(input))
- assert output == value, (output, value)
test_unicode_input.unittest = ['.unicode']
def test_unicode_input_errors(unicode_filename, verbose=False):
+ yaml = YAML(typ='safe', pure=True)
with open(unicode_filename, 'rb') as fp:
data = fp.read().decode('utf-8')
for input in [
@@ -92,7 +52,7 @@ def test_unicode_input_errors(unicode_filename, verbose=False):
else:
raise AssertionError('expected an exception')
try:
- yaml.load(BytesIO(input) if PY3 else StringIO(input))
+ yaml.load(BytesIO(input))
except yaml.YAMLError as exc:
if verbose:
print(exc)
@@ -102,108 +62,63 @@ def test_unicode_input_errors(unicode_filename, verbose=False):
test_unicode_input_errors.unittest = ['.unicode']
-if PY3:
- def test_unicode_output(unicode_filename, verbose=False):
- with open(unicode_filename, 'rb') as fp:
- data = fp.read().decode('utf-8')
- value = ' '.join(data.split())
- for allow_unicode in [False, True]:
- data1 = yaml.dump(value, allow_unicode=allow_unicode)
- for encoding in [None, 'utf-8', 'utf-16-be', 'utf-16-le']:
- stream = StringIO()
+def test_unicode_output(unicode_filename, verbose=False):
+ yaml = YAML(typ='safe', pure=True)
+ with open(unicode_filename, 'rb') as fp:
+ data = fp.read().decode('utf-8')
+ value = ' '.join(data.split())
+ for allow_unicode in [False, True]:
+ data1 = yaml.dump(value, allow_unicode=allow_unicode)
+ for encoding in [None, 'utf-8', 'utf-16-be', 'utf-16-le']:
+ stream = StringIO()
+ yaml.dump(value, stream, encoding=encoding, allow_unicode=allow_unicode)
+ data2 = stream.getvalue()
+ data3 = yaml.dump(value, encoding=encoding, allow_unicode=allow_unicode)
+ if encoding is not None:
+ assert isinstance(data3, bytes)
+ data3 = data3.decode(encoding)
+ stream = BytesIO()
+ if encoding is None:
+ try:
+ yaml.dump(
+ value, stream, encoding=encoding, allow_unicode=allow_unicode
+ )
+ except TypeError as exc:
+ if verbose:
+ print(exc)
+ data4 = None
+ else:
+ raise AssertionError('expected an exception')
+ else:
yaml.dump(value, stream, encoding=encoding, allow_unicode=allow_unicode)
- data2 = stream.getvalue()
- data3 = yaml.dump(value, encoding=encoding, allow_unicode=allow_unicode)
- if encoding is not None:
- assert isinstance(data3, bytes)
- data3 = data3.decode(encoding)
- stream = BytesIO()
- if encoding is None:
+ data4 = stream.getvalue()
+ if verbose:
+ print('BYTES:', data4[:50])
+ data4 = data4.decode(encoding)
+ for copy in [data1, data2, data3, data4]:
+ if copy is None:
+ continue
+ assert isinstance(copy, str)
+ if allow_unicode:
try:
- yaml.dump(
- value, stream, encoding=encoding, allow_unicode=allow_unicode
- )
- except TypeError as exc:
+ copy[4:].encode('ascii')
+ except UnicodeEncodeError as exc:
if verbose:
print(exc)
- data4 = None
else:
raise AssertionError('expected an exception')
else:
- yaml.dump(value, stream, encoding=encoding, allow_unicode=allow_unicode)
- data4 = stream.getvalue()
- if verbose:
- print('BYTES:', data4[:50])
- data4 = data4.decode(encoding)
- for copy in [data1, data2, data3, data4]:
- if copy is None:
- continue
- assert isinstance(copy, str)
- if allow_unicode:
- try:
- copy[4:].encode('ascii')
- except UnicodeEncodeError as exc:
- if verbose:
- print(exc)
- else:
- raise AssertionError('expected an exception')
- else:
- copy[4:].encode('ascii')
- assert isinstance(data1, str), (type(data1), encoding)
- assert isinstance(data2, str), (type(data2), encoding)
-
-
-else:
-
- def test_unicode_output(unicode_filename, verbose=False):
- with open(unicode_filename, 'rb') as fp:
- data = fp.read().decode('utf-8')
- value = ' '.join(data.split())
- for allow_unicode in [False, True]:
- data1 = yaml.dump(value, allow_unicode=allow_unicode)
- for encoding in [None, 'utf-8', 'utf-16-be', 'utf-16-le']:
- stream = StringIO()
- yaml.dump(
- value,
- _unicode_open(stream, 'utf-8'),
- encoding=encoding,
- allow_unicode=allow_unicode,
- )
- data2 = stream.getvalue()
- data3 = yaml.dump(value, encoding=encoding, allow_unicode=allow_unicode)
- stream = StringIO()
- yaml.dump(value, stream, encoding=encoding, allow_unicode=allow_unicode)
- data4 = stream.getvalue()
- for copy in [data1, data2, data3, data4]:
- if allow_unicode:
- try:
- copy[4:].encode('ascii')
- except (UnicodeDecodeError, UnicodeEncodeError) as exc:
- if verbose:
- print(exc)
- else:
- raise AssertionError('expected an exception')
- else:
- copy[4:].encode('ascii')
- assert isinstance(data1, str), (type(data1), encoding)
- data1.decode('utf-8')
- assert isinstance(data2, str), (type(data2), encoding)
- data2.decode('utf-8')
- if encoding is None:
- assert isinstance(data3, unicode), (type(data3), encoding) # NOQA
- assert isinstance(data4, unicode), (type(data4), encoding) # NOQA
- else:
- assert isinstance(data3, str), (type(data3), encoding)
- data3.decode(encoding)
- assert isinstance(data4, str), (type(data4), encoding)
- data4.decode(encoding)
+ copy[4:].encode('ascii')
+ assert isinstance(data1, str), (type(data1), encoding)
+ assert isinstance(data2, str), (type(data2), encoding)
test_unicode_output.unittest = ['.unicode']
def test_file_output(unicode_filename, verbose=False):
+ yaml = YAML(typ='safe', pure=True)
with open(unicode_filename, 'rb') as fp:
data = fp.read().decode('utf-8')
handle, filename = tempfile.mkstemp()
@@ -212,32 +127,17 @@ def test_file_output(unicode_filename, verbose=False):
stream = StringIO()
yaml.dump(data, stream, allow_unicode=True)
data1 = stream.getvalue()
- if PY3:
- stream = BytesIO()
- yaml.dump(data, stream, encoding='utf-16-le', allow_unicode=True)
- data2 = stream.getvalue().decode('utf-16-le')[1:]
- with open(filename, 'w', encoding='utf-16-le') as stream:
- yaml.dump(data, stream, allow_unicode=True)
- with open(filename, 'r', encoding='utf-16-le') as fp0:
- data3 = fp0.read()
- with open(filename, 'wb') as stream:
- yaml.dump(data, stream, encoding='utf-8', allow_unicode=True)
- with open(filename, 'r', encoding='utf-8') as fp0:
- data4 = fp0.read()
- else:
- with open(filename, 'wb') as stream:
- yaml.dump(data, stream, allow_unicode=True)
- with open(filename, 'rb') as fp0:
- data2 = fp0.read()
- with open(filename, 'wb') as stream:
- yaml.dump(data, stream, encoding='utf-16-le', allow_unicode=True)
- with open(filename, 'rb') as fp0:
- data3 = fp0.read().decode('utf-16-le')[1:].encode('utf-8')
- stream = _unicode_open(open(filename, 'wb'), 'utf-8')
+ stream = BytesIO()
+ yaml.dump(data, stream, encoding='utf-16-le', allow_unicode=True)
+ data2 = stream.getvalue().decode('utf-16-le')[1:]
+ with open(filename, 'w', encoding='utf-16-le') as stream:
yaml.dump(data, stream, allow_unicode=True)
- stream.close()
- with open(filename, 'rb') as fp0:
- data4 = fp0.read()
+ with open(filename, 'r', encoding='utf-16-le') as fp0:
+ data3 = fp0.read()
+ with open(filename, 'wb') as stream:
+ yaml.dump(data, stream, encoding='utf-8', allow_unicode=True)
+ with open(filename, 'r', encoding='utf-8') as fp0:
+ data4 = fp0.read()
assert data1 == data2, (data1, data2)
assert data1 == data3, (data1, data3)
assert data1 == data4, (data1, data4)
@@ -250,40 +150,26 @@ test_file_output.unittest = ['.unicode']
def test_unicode_transfer(unicode_filename, verbose=False):
+ yaml = YAML(typ='safe', pure=True)
with open(unicode_filename, 'rb') as fp:
data = fp.read().decode('utf-8')
for encoding in [None, 'utf-8', 'utf-16-be', 'utf-16-le']:
input = data
- if PY3:
- if encoding is not None:
- input = ('\ufeff' + input).encode(encoding)
- output1 = yaml.emit(yaml.parse(input), allow_unicode=True)
- if encoding is None:
- stream = StringIO()
- else:
- stream = BytesIO()
- yaml.emit(yaml.parse(input), stream, allow_unicode=True)
- output2 = stream.getvalue()
- assert isinstance(output1, str), (type(output1), encoding)
- if encoding is None:
- assert isinstance(output2, str), (type(output1), encoding)
- else:
- assert isinstance(output2, bytes), (type(output1), encoding)
- output2.decode(encoding)
- else:
- if encoding is not None:
- input = (u'\ufeff' + input).encode(encoding)
- output1 = yaml.emit(yaml.parse(input), allow_unicode=True)
+ if encoding is not None:
+ input = ('\ufeff' + input).encode(encoding)
+ output1 = yaml.emit(yaml.parse(input), allow_unicode=True)
+ if encoding is None:
stream = StringIO()
- yaml.emit(yaml.parse(input), _unicode_open(stream, 'utf-8'), allow_unicode=True)
- output2 = stream.getvalue()
- if encoding is None:
- assert isinstance(output1, unicode), (type(output1), encoding) # NOQA
- else:
- assert isinstance(output1, str), (type(output1), encoding)
- output1.decode(encoding)
- assert isinstance(output2, str), (type(output2), encoding)
- output2.decode('utf-8')
+ else:
+ stream = BytesIO()
+ yaml.emit(yaml.parse(input), stream, allow_unicode=True)
+ output2 = stream.getvalue()
+ assert isinstance(output1, str), (type(output1), encoding)
+ if encoding is None:
+ assert isinstance(output2, str), (type(output1), encoding)
+ else:
+ assert isinstance(output2, bytes), (type(output1), encoding)
+ output2.decode(encoding)
test_unicode_transfer.unittest = ['.unicode']
diff --git a/_test/lib/test_mark.py b/_test/lib/test_mark.py
index 0ff2789..2644a79 100644
--- a/_test/lib/test_mark.py
+++ b/_test/lib/test_mark.py
@@ -1,12 +1,9 @@
-from __future__ import absolute_import
-from __future__ import print_function
import ruamel.yaml as yaml
-from ruamel.yaml.compat import text_type, PY3
def test_marks(marks_filename, verbose=False):
- with open(marks_filename, 'r' if PY3 else 'rb') as fp0:
+ with open(marks_filename, 'r') as fp0:
inputs = fp0.read().split('---\n')[1:]
for input in inputs:
index = 0
@@ -19,7 +16,7 @@ def test_marks(marks_filename, verbose=False):
else:
column += 1
index += 1
- mark = yaml.Mark(marks_filename, index, line, column, text_type(input), index)
+ mark = yaml.Mark(marks_filename, index, line, column, str(input), index)
snippet = mark.get_snippet(indent=2, max_length=79)
if verbose:
print(snippet)
diff --git a/_test/lib/test_reader.py b/_test/lib/test_reader.py
index 6604f24..16b9cd7 100644
--- a/_test/lib/test_reader.py
+++ b/_test/lib/test_reader.py
@@ -1,17 +1,14 @@
-from __future__ import absolute_import
-from __future__ import print_function
import codecs # NOQA
import io
-from ruamel.yaml.compat import PY2
import ruamel.yaml.reader
def _run_reader(data, verbose):
try:
stream = ruamel.yaml.py.reader.Reader(data)
- while stream.peek() != u'\0':
+ while stream.peek() != '\0':
stream.forward()
except ruamel.yaml.py.reader.ReaderError as exc:
if verbose:
@@ -27,12 +24,8 @@ def test_stream_error(error_filename, verbose=False):
_run_reader(fp0.read(), verbose)
for encoding in ['utf-8', 'utf-16-le', 'utf-16-be']:
try:
- if PY2:
- with open(error_filename, 'rb') as fp0:
- data = unicode(fp0.read(), encoding) # NOQA
- else:
- with open(error_filename, 'rb') as fp0:
- data = fp0.read().decode(encoding)
+ with open(error_filename, 'rb') as fp0:
+ data = fp0.read().decode(encoding)
break
except UnicodeDecodeError:
pass
diff --git a/_test/lib/test_recursive.py b/_test/lib/test_recursive.py
index c87f879..88858e4 100644
--- a/_test/lib/test_recursive.py
+++ b/_test/lib/test_recursive.py
@@ -1,7 +1,5 @@
-from __future__ import absolute_import
-from __future__ import print_function
-import ruamel.yaml as yaml
+import ruamel.yaml
class AnInstance:
@@ -25,6 +23,7 @@ class AnInstanceWithState(AnInstance):
def test_recursive(recursive_filename, verbose=False):
+ yaml = ruamel.yaml.YAML(typ='safe', pure=True)
context = globals().copy()
with open(recursive_filename, 'rb') as fp0:
exec(fp0.read(), context)
@@ -33,9 +32,13 @@ def test_recursive(recursive_filename, verbose=False):
value2 = None
output2 = None
try:
- output1 = yaml.dump(value1)
- value2 = yaml.load(output1)
- output2 = yaml.dump(value2)
+ buf = ruamel.yaml.compat.StringIO()
+ output1 = yaml.dump(value1, buf)
+ yaml.load(output1)
+ value2 = buf.getvalue()
+ buf = ruamel.yaml.compat.StringIO()
+ yaml.dump(value2, buf)
+ output2 = buf.getvalue()
assert output1 == output2, (output1, output2)
finally:
if verbose:
diff --git a/_test/lib/test_representer.py b/_test/lib/test_representer.py
index a83d2b2..5b2415d 100644
--- a/_test/lib/test_representer.py
+++ b/_test/lib/test_representer.py
@@ -1,12 +1,11 @@
-from __future__ import absolute_import
-from __future__ import print_function
-import ruamel.yaml as yaml
+from ruamel.yaml import YAML
import test_constructor
import pprint
def test_representer_types(code_filename, verbose=False):
+ yaml = YAML(typ='safe', pure=True)
test_constructor._make_objects()
for allow_unicode in [False, True]:
for encoding in ['utf-8', 'utf-16-be', 'utf-16-le']:
diff --git a/_test/lib/test_resolver.py b/_test/lib/test_resolver.py
index 0a04e7a..b2b0839 100644
--- a/_test/lib/test_resolver.py
+++ b/_test/lib/test_resolver.py
@@ -1,16 +1,14 @@
-from __future__ import absolute_import
-from __future__ import print_function
-import ruamel.yaml as yaml
+import ruamel.yaml
+yaml = ruamel.yaml.YAML()
import pprint
-from ruamel.yaml.compat import PY3
def test_implicit_resolver(data_filename, detect_filename, verbose=False):
correct_tag = None
node = None
try:
- with open(detect_filename, 'r' if PY3 else 'rb') as fp0:
+ with open(detect_filename, 'r') as fp0:
correct_tag = fp0.read().strip()
with open(data_filename, 'rb') as fp0:
node = yaml.compose(fp0)
@@ -38,14 +36,14 @@ def _make_path_loader_and_dumper():
class MyDumper(yaml.Dumper):
pass
- yaml.add_path_resolver(u'!root', [], Loader=MyLoader, Dumper=MyDumper)
- yaml.add_path_resolver(u'!root/scalar', [], str, Loader=MyLoader, Dumper=MyDumper)
+ yaml.add_path_resolver('!root', [], Loader=MyLoader, Dumper=MyDumper)
+ yaml.add_path_resolver('!root/scalar', [], str, Loader=MyLoader, Dumper=MyDumper)
yaml.add_path_resolver(
- u'!root/key11/key12/*', ['key11', 'key12'], Loader=MyLoader, Dumper=MyDumper
+ '!root/key11/key12/*', ['key11', 'key12'], Loader=MyLoader, Dumper=MyDumper
)
- yaml.add_path_resolver(u'!root/key21/1/*', ['key21', 1], Loader=MyLoader, Dumper=MyDumper)
+ yaml.add_path_resolver('!root/key21/1/*', ['key21', 1], Loader=MyLoader, Dumper=MyDumper)
yaml.add_path_resolver(
- u'!root/key31/*/*/key14/map',
+ '!root/key31/*/*/key14/map',
['key31', None, None, 'key14'],
dict,
Loader=MyLoader,
diff --git a/_test/lib/test_structure.py b/_test/lib/test_structure.py
index 2656bbb..8de24a3 100644
--- a/_test/lib/test_structure.py
+++ b/_test/lib/test_structure.py
@@ -1,36 +1,33 @@
-from __future__ import absolute_import
-from __future__ import print_function
-import ruamel.yaml as yaml
+import ruamel.yaml
import canonical # NOQA
import pprint
-from ruamel.yaml.compat import text_type, PY3
def _convert_structure(loader):
- if loader.check_event(yaml.ScalarEvent):
+ if loader.check_event(ruamel.yaml.ScalarEvent):
event = loader.get_event()
if event.tag or event.anchor or event.value:
return True
else:
return None
- elif loader.check_event(yaml.SequenceStartEvent):
+ elif loader.check_event(ruamel.yaml.SequenceStartEvent):
loader.get_event()
sequence = []
- while not loader.check_event(yaml.SequenceEndEvent):
+ while not loader.check_event(ruamel.yaml.SequenceEndEvent):
sequence.append(_convert_structure(loader))
loader.get_event()
return sequence
- elif loader.check_event(yaml.MappingStartEvent):
+ elif loader.check_event(ruamel.yaml.MappingStartEvent):
loader.get_event()
mapping = []
- while not loader.check_event(yaml.MappingEndEvent):
+ while not loader.check_event(ruamel.yaml.MappingEndEvent):
key = _convert_structure(loader)
value = _convert_structure(loader)
mapping.append((key, value))
loader.get_event()
return mapping
- elif loader.check_event(yaml.AliasEvent):
+ elif loader.check_event(ruamel.yaml.AliasEvent):
loader.get_event()
return '*'
else:
@@ -40,17 +37,17 @@ def _convert_structure(loader):
def test_structure(data_filename, structure_filename, verbose=False):
nodes1 = []
- with open(structure_filename, 'r' if PY3 else 'rb') as fp:
+ with open(structure_filename, 'r') as fp:
nodes2 = eval(fp.read())
try:
with open(data_filename, 'rb') as fp:
- loader = yaml.Loader(fp)
+ loader = ruamel.yaml.Loader(fp)
while loader.check_event():
if loader.check_event(
- yaml.StreamStartEvent,
- yaml.StreamEndEvent,
- yaml.DocumentStartEvent,
- yaml.DocumentEndEvent,
+ ruamel.yaml.StreamStartEvent,
+ ruamel.yaml.StreamEndEvent,
+ ruamel.yaml.DocumentStartEvent,
+ ruamel.yaml.DocumentEndEvent,
):
loader.get_event()
continue
@@ -73,12 +70,12 @@ def _compare_events(events1, events2, full=False):
assert len(events1) == len(events2), (len(events1), len(events2))
for event1, event2 in zip(events1, events2):
assert event1.__class__ == event2.__class__, (event1, event2)
- if isinstance(event1, yaml.AliasEvent) and full:
+ if isinstance(event1, ruamel.yaml.AliasEvent) and full:
assert event1.anchor == event2.anchor, (event1, event2)
- if isinstance(event1, (yaml.ScalarEvent, yaml.CollectionStartEvent)):
- if (event1.tag not in [None, u'!'] and event2.tag not in [None, u'!']) or full:
+ if isinstance(event1, (ruamel.yaml.ScalarEvent, ruamel.yaml.CollectionStartEvent)):
+ if (event1.tag not in [None, '!'] and event2.tag not in [None, '!']) or full:
assert event1.tag == event2.tag, (event1, event2)
- if isinstance(event1, yaml.ScalarEvent):
+ if isinstance(event1, ruamel.yaml.ScalarEvent):
assert event1.value == event2.value, (event1, event2)
@@ -87,9 +84,9 @@ def test_parser(data_filename, canonical_filename, verbose=False):
events2 = None
try:
with open(data_filename, 'rb') as fp0:
- events1 = list(yaml.parse(fp0))
+ events1 = list(ruamel.yaml.YAML().parse(fp0))
with open(canonical_filename, 'rb') as fp0:
- events2 = list(yaml.canonical_parse(fp0))
+ events2 = list(ruamel.yaml.YAML().canonical_parse(fp0))
_compare_events(events1, events2)
finally:
if verbose:
@@ -107,9 +104,9 @@ def test_parser_on_canonical(canonical_filename, verbose=False):
events2 = None
try:
with open(canonical_filename, 'rb') as fp0:
- events1 = list(yaml.parse(fp0))
+ events1 = list(ruamel.yaml.YAML().parse(fp0))
with open(canonical_filename, 'rb') as fp0:
- events2 = list(yaml.canonical_parse(fp0))
+ events2 = list(ruamel.yaml.YAML().canonical_parse(fp0))
_compare_events(events1, events2, full=True)
finally:
if verbose:
@@ -125,7 +122,7 @@ test_parser_on_canonical.unittest = ['.canonical']
def _compare_nodes(node1, node2):
assert node1.__class__ == node2.__class__, (node1, node2)
assert node1.tag == node2.tag, (node1, node2)
- if isinstance(node1, yaml.ScalarNode):
+ if isinstance(node1, ruamel.yaml.ScalarNode):
assert node1.value == node2.value, (node1, node2)
else:
assert len(node1.value) == len(node2.value), (node1, node2)
@@ -141,6 +138,7 @@ def test_composer(data_filename, canonical_filename, verbose=False):
nodes1 = None
nodes2 = None
try:
+ yaml = ruamel.yaml.YAML()
with open(data_filename, 'rb') as fp0:
nodes1 = list(yaml.compose_all(fp0))
with open(canonical_filename, 'rb') as fp0:
@@ -162,39 +160,39 @@ test_composer.unittest = ['.data', '.canonical']
def _make_loader():
global MyLoader
- class MyLoader(yaml.Loader):
+ class MyLoader(ruamel.yaml.Loader):
def construct_sequence(self, node):
- return tuple(yaml.Loader.construct_sequence(self, node))
+ return tuple(ruamel.yaml.Loader.construct_sequence(self, node))
def construct_mapping(self, node):
pairs = self.construct_pairs(node)
- pairs.sort(key=(lambda i: text_type(i)))
+ pairs.sort(key=(lambda i: str(i)))
return pairs
def construct_undefined(self, node):
return self.construct_scalar(node)
- MyLoader.add_constructor(u'tag:yaml.org,2002:map', MyLoader.construct_mapping)
+ MyLoader.add_constructor('tag:yaml.org,2002:map', MyLoader.construct_mapping)
MyLoader.add_constructor(None, MyLoader.construct_undefined)
def _make_canonical_loader():
global MyCanonicalLoader
- class MyCanonicalLoader(yaml.CanonicalLoader):
+ class MyCanonicalLoader(ruamel.yaml.CanonicalLoader):
def construct_sequence(self, node):
- return tuple(yaml.CanonicalLoader.construct_sequence(self, node))
+ return tuple(ruamel.yaml.CanonicalLoader.construct_sequence(self, node))
def construct_mapping(self, node):
pairs = self.construct_pairs(node)
- pairs.sort(key=(lambda i: text_type(i)))
+ pairs.sort(key=(lambda i: str(i)))
return pairs
def construct_undefined(self, node):
return self.construct_scalar(node)
MyCanonicalLoader.add_constructor(
- u'tag:yaml.org,2002:map', MyCanonicalLoader.construct_mapping
+ 'tag:yaml.org,2002:map', MyCanonicalLoader.construct_mapping
)
MyCanonicalLoader.add_constructor(None, MyCanonicalLoader.construct_undefined)
@@ -204,11 +202,12 @@ def test_constructor(data_filename, canonical_filename, verbose=False):
_make_canonical_loader()
native1 = None
native2 = None
+ yaml = YAML(typ='safe')
try:
with open(data_filename, 'rb') as fp0:
- native1 = list(yaml.load_all(fp0, Loader=MyLoader))
+ native1 = list(yaml.load(fp0, Loader=MyLoader))
with open(canonical_filename, 'rb') as fp0:
- native2 = list(yaml.load_all(fp0, Loader=MyCanonicalLoader))
+ native2 = list(yaml.load(fp0, Loader=MyCanonicalLoader))
assert native1 == native2, (native1, native2)
finally:
if verbose:
diff --git a/_test/lib/test_tokens.py b/_test/lib/test_tokens.py
index cdb41ba..575e95c 100644
--- a/_test/lib/test_tokens.py
+++ b/_test/lib/test_tokens.py
@@ -1,9 +1,6 @@
-from __future__ import absolute_import
-from __future__ import print_function
-import ruamel.yaml as yaml
+import ruamel.yaml
import pprint
-from ruamel.yaml.compat import PY3
# Tokens mnemonic:
# directive: %
@@ -25,35 +22,36 @@ from ruamel.yaml.compat import PY3
# value: :
_replaces = {
- yaml.DirectiveToken: '%',
- yaml.DocumentStartToken: '---',
- yaml.DocumentEndToken: '...',
- yaml.AliasToken: '*',
- yaml.AnchorToken: '&',
- yaml.TagToken: '!',
- yaml.ScalarToken: '_',
- yaml.BlockSequenceStartToken: '[[',
- yaml.BlockMappingStartToken: '{{',
- yaml.BlockEndToken: ']}',
- yaml.FlowSequenceStartToken: '[',
- yaml.FlowSequenceEndToken: ']',
- yaml.FlowMappingStartToken: '{',
- yaml.FlowMappingEndToken: '}',
- yaml.BlockEntryToken: ',',
- yaml.FlowEntryToken: ',',
- yaml.KeyToken: '?',
- yaml.ValueToken: ':',
+ ruamel.yaml.DirectiveToken: '%',
+ ruamel.yaml.DocumentStartToken: '---',
+ ruamel.yaml.DocumentEndToken: '...',
+ ruamel.yaml.AliasToken: '*',
+ ruamel.yaml.AnchorToken: '&',
+ ruamel.yaml.TagToken: '!',
+ ruamel.yaml.ScalarToken: '_',
+ ruamel.yaml.BlockSequenceStartToken: '[[',
+ ruamel.yaml.BlockMappingStartToken: '{{',
+ ruamel.yaml.BlockEndToken: ']}',
+ ruamel.yaml.FlowSequenceStartToken: '[',
+ ruamel.yaml.FlowSequenceEndToken: ']',
+ ruamel.yaml.FlowMappingStartToken: '{',
+ ruamel.yaml.FlowMappingEndToken: '}',
+ ruamel.yaml.BlockEntryToken: ',',
+ ruamel.yaml.FlowEntryToken: ',',
+ ruamel.yaml.KeyToken: '?',
+ ruamel.yaml.ValueToken: ':',
}
def test_tokens(data_filename, tokens_filename, verbose=False):
tokens1 = []
- with open(tokens_filename, 'r' if PY3 else 'rb') as fp:
+ with open(tokens_filename, 'r') as fp:
tokens2 = fp.read().split()
try:
+ yaml = ruamel.yaml.YAML(typ='unsafe', pure=True)
with open(data_filename, 'rb') as fp1:
for token in yaml.scan(fp1):
- if not isinstance(token, (yaml.StreamStartToken, yaml.StreamEndToken)):
+ if not isinstance(token, (ruamel.yaml.StreamStartToken, ruamel.yaml.StreamEndToken)):
tokens1.append(_replaces[token.__class__])
finally:
if verbose:
@@ -71,6 +69,7 @@ def test_scanner(data_filename, canonical_filename, verbose=False):
for filename in [data_filename, canonical_filename]:
tokens = []
try:
+ yaml = ruamel.yaml.YAML(typ='unsafe', pure=False)
with open(filename, 'rb') as fp:
for token in yaml.scan(fp):
tokens.append(token.__class__.__name__)
diff --git a/_test/lib/test_yaml_ext.py b/_test/lib/test_yaml_ext.py
index e36ddd0..a6fa287 100644
--- a/_test/lib/test_yaml_ext.py
+++ b/_test/lib/test_yaml_ext.py
@@ -1,13 +1,9 @@
# coding: utf-8
-from __future__ import absolute_import
-from __future__ import print_function
-
import _ruamel_yaml
import ruamel.yaml
import types
import pprint
-from ruamel.yaml.compat import PY3
ruamel.yaml.PyBaseLoader = ruamel.yaml.BaseLoader
ruamel.yaml.PySafeLoader = ruamel.yaml.SafeLoader
@@ -114,7 +110,7 @@ def new_safe_dump(data, stream=None, **kwds):
return old_dump(data, stream, ruamel.yaml.CSafeDumper, **kwds)
-old_safe_dump_all = ruamel.yaml.safe_dump_all
+# old_safe_dump_all = ruamel.yaml.safe_dump_all
def new_safe_dump_all(documents, stream=None, **kwds):
@@ -180,10 +176,12 @@ def test_c_version(verbose=False):
def _compare_scanners(py_data, c_data, verbose):
- py_tokens = list(ruamel.yaml.scan(py_data, Loader=ruamel.yaml.PyLoader))
+ yaml = ruamel.yaml.YAML(typ='unsafe', pure=True)
+ py_tokens = list(yaml.scan(py_data, Loader=ruamel.yaml.PyLoader))
c_tokens = []
try:
- for token in ruamel.yaml.scan(c_data, Loader=ruamel.yaml.CLoader):
+ yaml = ruamel.yaml.YAML(typ='unsafe', pure=False)
+ for token in yaml.scan(c_data, Loader=ruamel.yaml.CLoader):
c_tokens.append(token)
assert len(py_tokens) == len(c_tokens), (len(py_tokens), len(c_tokens))
for py_token, c_token in zip(py_tokens, c_tokens):
@@ -238,10 +236,12 @@ test_c_scanner.skip = ['.skip-ext']
def _compare_parsers(py_data, c_data, verbose):
- py_events = list(ruamel.yaml.parse(py_data, Loader=ruamel.yaml.PyLoader))
+ yaml = ruamel.yaml.YAML(typ='unsafe', pure=True)
+ py_events = list(yaml.parse(py_data, Loader=ruamel.yaml.PyLoader))
c_events = []
try:
- for event in ruamel.yaml.parse(c_data, Loader=ruamel.yaml.CLoader):
+ yaml = ruamel.yaml.YAML(typ='unsafe', pure=False)
+ for event in yaml.parse(c_data, Loader=ruamel.yaml.CLoader):
c_events.append(event)
assert len(py_events) == len(c_events), (len(py_events), len(c_events))
for py_event, c_event in zip(py_events, c_events):
@@ -286,12 +286,13 @@ test_c_parser.skip = ['.skip-ext']
def _compare_emitters(data, verbose):
- events = list(ruamel.yaml.parse(data, Loader=ruamel.yaml.PyLoader))
- c_data = ruamel.yaml.emit(events, Dumper=ruamel.yaml.CDumper)
+ yaml = ruamel.yaml.YAML(typ='unsafe', pure=True)
+ events = list(yaml.parse(py_data, Loader=ruamel.yaml.PyLoader))
+ c_data = yaml.emit(events, Dumper=ruamel.yaml.CDumper)
if verbose:
print(c_data)
- py_events = list(ruamel.yaml.parse(c_data, Loader=ruamel.yaml.PyLoader))
- c_events = list(ruamel.yaml.parse(c_data, Loader=ruamel.yaml.CLoader))
+ py_events = list(yaml.parse(c_data, Loader=ruamel.yaml.PyLoader))
+ c_events = list(yaml.parse(c_data, Loader=ruamel.yaml.CLoader))
try:
assert len(events) == len(py_events), (len(events), len(py_events))
assert len(events) == len(c_events), (len(events), len(c_events))
@@ -311,9 +312,9 @@ def _compare_emitters(data, verbose):
c_value = getattr(c_event, attribute, None)
if (
attribute == 'tag'
- and value in [None, u'!']
- and py_value in [None, u'!']
- and c_value in [None, u'!']
+ and value in [None, '!']
+ and py_value in [None, '!']
+ and c_value in [None, '!']
):
continue
if attribute == 'explicit' and (py_value or c_value):
@@ -349,14 +350,7 @@ def wrap_ext_function(function):
finally:
_tear_down()
- if PY3:
- wrapper.__name__ = '%s_ext' % function.__name__
- else:
- try:
- wrapper.__name__ = '%s_ext' % function.__name__
- except TypeError:
- pass
- wrapper.unittest_name = '%s_ext' % function.__name__
+ wrapper.__name__ = '%s_ext' % function.__name__
wrapper.unittest = function.unittest
wrapper.skip = getattr(function, 'skip', []) + ['.skip-ext']
return wrapper
@@ -374,12 +368,8 @@ def wrap_ext(collections):
if isinstance(value, types.FunctionType) and hasattr(value, 'unittest'):
functions.append(wrap_ext_function(value))
for function in functions:
- if PY3:
- assert function.__name__ not in globals()
- globals()[function.__name__] = function
- else:
- assert function.unittest_name not in globals()
- globals()[function.unittest_name] = function
+ assert function.__name__ not in globals()
+ globals()[function.__name__] = function
import test_tokens # NOQA
diff --git a/_test/roundtrip.py b/_test/roundtrip.py
index af8a555..fa8b08a 100644
--- a/_test/roundtrip.py
+++ b/_test/roundtrip.py
@@ -1,18 +1,19 @@
# coding: utf-8
-from __future__ import print_function
-
"""
helper routines for testing round trip of commented YAML data
"""
import sys
import textwrap
-from ruamel.std.pathlib import Path
+import io
+from pathlib import Path
+
+from typing import Any, Optional, Union
-enforce = object()
+unset = object()
-def dedent(data):
+def dedent(data: str) -> str:
try:
position_of_first_newline = data.index('\n')
for idx in range(position_of_first_newline):
@@ -25,57 +26,97 @@ def dedent(data):
return textwrap.dedent(data)
-def round_trip_load(inp, preserve_quotes=None, version=None):
+def round_trip_load(
+ inp: Any, preserve_quotes: Optional[bool] = None, version: Optional[Any] = None
+) -> Any:
import ruamel.yaml # NOQA
dinp = dedent(inp)
- return ruamel.yaml.load(
- dinp,
- Loader=ruamel.yaml.RoundTripLoader,
- preserve_quotes=preserve_quotes,
- version=version,
- )
+ yaml = ruamel.yaml.YAML()
+ yaml.preserve_quotes = preserve_quotes
+ yaml.version = version
+ return yaml.load(dinp)
-def round_trip_load_all(inp, preserve_quotes=None, version=None):
+def round_trip_load_all(
+ inp: Any, preserve_quotes: Optional[bool] = None, version: Optional[Any] = None
+) -> Any:
import ruamel.yaml # NOQA
dinp = dedent(inp)
- return ruamel.yaml.load_all(
- dinp,
- Loader=ruamel.yaml.RoundTripLoader,
- preserve_quotes=preserve_quotes,
- version=version,
- )
+ yaml = ruamel.yaml.YAML()
+ yaml.preserve_quotes = preserve_quotes
+ yaml.version = version
+ return yaml.load_all(dinp)
def round_trip_dump(
- data,
- stream=None,
- indent=None,
- block_seq_indent=None,
- top_level_colon_align=None,
- prefix_colon=None,
- explicit_start=None,
- explicit_end=None,
- version=None,
-):
+ data: Any,
+ stream: Any = None, # *,
+ indent: Optional[int] = None,
+ block_seq_indent: Optional[int] = None,
+ default_flow_style: Any = unset,
+ top_level_colon_align: Any = None,
+ prefix_colon: Any = None,
+ explicit_start: Optional[bool] = None,
+ explicit_end: Optional[bool] = None,
+ version: Optional[Any] = None,
+ allow_unicode: bool = True,
+) -> Union[str, None]:
import ruamel.yaml # NOQA
- return ruamel.yaml.round_trip_dump(
- data,
- stream=stream,
- indent=indent,
- block_seq_indent=block_seq_indent,
- top_level_colon_align=top_level_colon_align,
- prefix_colon=prefix_colon,
- explicit_start=explicit_start,
- explicit_end=explicit_end,
- version=version,
- )
+ yaml = ruamel.yaml.YAML()
+ yaml.indent(mapping=indent, sequence=indent, offset=block_seq_indent)
+ if default_flow_style is not unset:
+ yaml.default_flow_style = default_flow_style
+ yaml.top_level_colon_align = top_level_colon_align
+ yaml.prefix_colon = prefix_colon
+ yaml.explicit_start = explicit_start
+ yaml.explicit_end = explicit_end
+ yaml.version = version
+ yaml.allow_unicode = allow_unicode
+ if stream is not None:
+ yaml.dump(data, stream=stream)
+ return None
+ buf = io.StringIO()
+ yaml.dump(data, stream=buf)
+ return buf.getvalue()
+
+
+def round_trip_dump_all(
+ data: Any,
+ stream: Any = None, # *,
+ indent: Optional[int] = None,
+ block_seq_indent: Optional[int] = None,
+ default_flow_style: Any = unset,
+ top_level_colon_align: Any = None,
+ prefix_colon: Any = None,
+ explicit_start: Optional[bool] = None,
+ explicit_end: Optional[bool] = None,
+ version: Optional[Any] = None,
+ allow_unicode: bool = True,
+) -> Union[str, None]:
+ import ruamel.yaml # NOQA
+
+ yaml = ruamel.yaml.YAML()
+ yaml.indent(mapping=indent, sequence=indent, offset=block_seq_indent)
+ if default_flow_style is not unset:
+ yaml.default_flow_style = default_flow_style
+ yaml.top_level_colon_align = top_level_colon_align
+ yaml.prefix_colon = prefix_colon
+ yaml.explicit_start = explicit_start
+ yaml.explicit_end = explicit_end
+ yaml.version = version
+ yaml.allow_unicode = allow_unicode
+ if stream is not None:
+ yaml.dump(data, stream=stream)
+ return None
+ buf = io.StringIO()
+ yaml.dump_all(data, stream=buf)
+ return buf.getvalue()
-def diff(inp, outp, file_name='stdin'):
+def diff(inp: str, outp: str, file_name: str = 'stdin') -> None:
import difflib
inl = inp.splitlines(True) # True for keepends
@@ -90,20 +131,21 @@ def diff(inp, outp, file_name='stdin'):
def round_trip(
- inp,
- outp=None,
- extra=None,
- intermediate=None,
- indent=None,
- block_seq_indent=None,
- top_level_colon_align=None,
- prefix_colon=None,
- preserve_quotes=None,
- explicit_start=None,
- explicit_end=None,
- version=None,
- dump_data=None,
-):
+ inp: str,
+ outp: Optional[str] = None,
+ extra: Optional[str] = None,
+ intermediate: Any = None,
+ indent: Optional[int] = None,
+ block_seq_indent: Optional[int] = None,
+ default_flow_style: Any = unset,
+ top_level_colon_align: Any = None,
+ prefix_colon: Any = None,
+ preserve_quotes: Any = None,
+ explicit_start: Optional[bool] = None,
+ explicit_end: Optional[bool] = None,
+ version: Optional[Any] = None,
+ dump_data: Any = None,
+) -> Any:
"""
inp: input string to parse
outp: expected output (equals input if not specified)
@@ -132,6 +174,7 @@ def round_trip(
explicit_end=explicit_end,
version=version,
)
+ assert isinstance(res, str)
if res != doutp:
diff(doutp, res, 'input string')
print('\nroundtrip data:\n', res, sep="")
@@ -152,19 +195,19 @@ def round_trip(
def na_round_trip(
- inp,
- outp=None,
- extra=None,
- intermediate=None,
- indent=None,
- top_level_colon_align=None,
- prefix_colon=None,
- preserve_quotes=None,
- explicit_start=None,
- explicit_end=None,
- version=None,
- dump_data=None,
-):
+ inp: str,
+ outp: Optional[str] = None,
+ extra: Optional[str] = None,
+ intermediate: Any = None,
+ indent: Optional[int] = None,
+ top_level_colon_align: Any = None,
+ prefix_colon: Any = None,
+ preserve_quotes: Any = None,
+ explicit_start: Optional[bool] = None,
+ explicit_end: Optional[bool] = None,
+ version: Optional[Any] = None,
+ dump_data: Any = None,
+) -> Any:
"""
inp: input string to parse
outp: expected output (equals input if not specified)
@@ -198,20 +241,20 @@ def na_round_trip(
return res
-def YAML(**kw):
+def YAML(**kw: Any) -> Any:
import ruamel.yaml # NOQA
class MyYAML(ruamel.yaml.YAML):
"""auto dedent string parameters on load"""
- def load(self, stream):
+ def load(self, stream: Any) -> Any:
if isinstance(stream, str):
if stream and stream[0] == '\n':
stream = stream[1:]
stream = textwrap.dedent(stream)
return ruamel.yaml.YAML.load(self, stream)
- def load_all(self, stream):
+ def load_all(self, stream: Any) -> Any:
if isinstance(stream, str):
if stream and stream[0] == '\n':
stream = stream[1:]
@@ -219,7 +262,7 @@ def YAML(**kw):
for d in ruamel.yaml.YAML.load_all(self, stream):
yield d
- def dump(self, data, **kw):
+ def dump(self, data: Any, **kw: Any) -> Any: # type: ignore
from ruamel.yaml.compat import StringIO, BytesIO # NOQA
assert ('stream' in kw) ^ ('compare' in kw)
@@ -235,14 +278,14 @@ def YAML(**kw):
res = st.getvalue()
print(res)
if unordered_lines:
- res = sorted(res.splitlines())
- expected = sorted(expected.splitlines())
+ res = sorted(res.splitlines()) # type: ignore
+ expected = sorted(expected.splitlines()) # type: ignore
assert res == expected
- def round_trip(self, stream, **kw):
+ def round_trip(self, stream: Any, **kw: Any) -> None:
from ruamel.yaml.compat import StringIO, BytesIO # NOQA
- assert isinstance(stream, (ruamel.yaml.compat.text_type, str))
+ assert isinstance(stream, str)
lkw = kw.copy()
if stream and stream[0] == '\n':
stream = stream[1:]
@@ -256,10 +299,10 @@ def YAML(**kw):
diff(outp, res, 'input string')
assert res == outp
- def round_trip_all(self, stream, **kw):
+ def round_trip_all(self, stream: Any, **kw: Any) -> None:
from ruamel.yaml.compat import StringIO, BytesIO # NOQA
- assert isinstance(stream, (ruamel.yaml.compat.text_type, str))
+ assert isinstance(stream, str)
lkw = kw.copy()
if stream and stream[0] == '\n':
stream = stream[1:]
@@ -276,7 +319,13 @@ def YAML(**kw):
return MyYAML(**kw)
-def save_and_run(program, base_dir=None, output=None, file_name=None, optimized=False):
+def save_and_run(
+ program: str,
+ base_dir: Optional[Any] = None,
+ output: Optional[Any] = None,
+ file_name: Optional[Any] = None,
+ optimized: bool = False,
+) -> int:
"""
safe and run a python program, thereby circumventing any restrictions on module level
imports
@@ -287,21 +336,22 @@ def save_and_run(program, base_dir=None, output=None, file_name=None, optimized=
base_dir = Path(str(base_dir))
if file_name is None:
file_name = 'safe_and_run_tmp.py'
- file_name = base_dir / file_name
+ file_name = base_dir / file_name # type: ignore
file_name.write_text(dedent(program))
try:
- cmd = [sys.executable]
+ cmd = [sys.executable, '-Wd']
if optimized:
cmd.append('-O')
cmd.append(str(file_name))
print('running:', *cmd)
- res = check_output(cmd, stderr=STDOUT, universal_newlines=True)
+ # 3.5 needs strings
+ res = check_output(cmd, stderr=STDOUT, universal_newlines=True, cwd=str(base_dir))
if output is not None:
if '__pypy__' in sys.builtin_module_names:
- res = res.splitlines(True)
- res = [line for line in res if 'no version info' not in line]
- res = ''.join(res)
+ res1 = res.splitlines(True)
+ res2 = [line for line in res1 if 'no version info' not in line]
+ res = ''.join(res2)
print('result: ', res, end='')
print('expected:', output, end='')
assert res == output
diff --git a/_test/test_a_dedent.py b/_test/test_a_dedent.py
index fc6157a..e13a54b 100644
--- a/_test/test_a_dedent.py
+++ b/_test/test_a_dedent.py
@@ -1,9 +1,10 @@
+# coding: utf-8
from roundtrip import dedent
class TestDedent:
- def test_start_newline(self):
+ def test_start_newline(self) -> None:
# fmt: off
x = dedent("""
123
@@ -12,7 +13,7 @@ class TestDedent:
# fmt: on
assert x == '123\n 456\n'
- def test_start_space_newline(self):
+ def test_start_space_newline(self) -> None:
# special construct to prevent stripping of following whitespace
# fmt: off
x = dedent(" " """
@@ -21,7 +22,7 @@ class TestDedent:
# fmt: on
assert x == '123\n'
- def test_start_no_newline(self):
+ def test_start_no_newline(self) -> None:
# special construct to prevent stripping of following whitespac
x = dedent("""\
123
@@ -29,17 +30,17 @@ class TestDedent:
""")
assert x == '123\n 456\n'
- def test_preserve_no_newline_at_end(self):
+ def test_preserve_no_newline_at_end(self) -> None:
x = dedent("""
123""")
assert x == '123'
- def test_preserve_no_newline_at_all(self):
+ def test_preserve_no_newline_at_all(self) -> None:
x = dedent("""\
123""")
assert x == '123'
- def test_multiple_dedent(self):
+ def test_multiple_dedent(self) -> None:
x = dedent(
dedent("""
123
diff --git a/_test/test_add_xxx.py b/_test/test_add_xxx.py
index 085e352..5f12ece 100644
--- a/_test/test_add_xxx.py
+++ b/_test/test_add_xxx.py
@@ -1,91 +1,94 @@
# coding: utf-8
import re
-import pytest # NOQA
+import pytest # type: ignore # NOQA
-from roundtrip import dedent
+from roundtrip import dedent, round_trip_dump # NOQA
+from typing import Any
# from PyYAML docs
-class Dice(tuple):
- def __new__(cls, a, b):
+class Dice(tuple): # type: ignore
+ def __new__(cls, a: int, b: int) -> "Dice":
return tuple.__new__(cls, [a, b])
- def __repr__(self):
+ def __repr__(self) -> str:
return 'Dice(%s,%s)' % self
-def dice_constructor(loader, node):
+def dice_constructor(loader: Any, node: Any) -> Dice:
value = loader.construct_scalar(node)
a, b = map(int, value.split('d'))
return Dice(a, b)
-def dice_representer(dumper, data):
- return dumper.represent_scalar(u'!dice', u'{}d{}'.format(*data))
+def dice_representer(dumper: Any, data: Any) -> Any:
+ return dumper.represent_scalar('!dice', '{}d{}'.format(*data))
-def test_dice_constructor():
+def test_dice_constructor() -> None:
import ruamel.yaml # NOQA
- ruamel.yaml.add_constructor(u'!dice', dice_constructor)
- data = ruamel.yaml.load('initial hit points: !dice 8d4', Loader=ruamel.yaml.Loader)
+ yaml = ruamel.yaml.YAML(typ='unsafe', pure=True)
+ ruamel.yaml.add_constructor('!dice', dice_constructor)
+ data = yaml.load('initial hit points: !dice 8d4')
assert str(data) == "{'initial hit points': Dice(8,4)}"
-def test_dice_constructor_with_loader():
+def test_dice_constructor_with_loader() -> None:
import ruamel.yaml # NOQA
- ruamel.yaml.add_constructor(u'!dice', dice_constructor, Loader=ruamel.yaml.Loader)
- data = ruamel.yaml.load('initial hit points: !dice 8d4', Loader=ruamel.yaml.Loader)
+ yaml = ruamel.yaml.YAML(typ='unsafe', pure=True)
+ ruamel.yaml.add_constructor('!dice', dice_constructor, Loader=ruamel.yaml.Loader)
+ data = yaml.load('initial hit points: !dice 8d4')
assert str(data) == "{'initial hit points': Dice(8,4)}"
-def test_dice_representer():
+def test_dice_representer() -> None:
import ruamel.yaml # NOQA
+ yaml = ruamel.yaml.YAML(typ='unsafe', pure=True)
+ yaml.default_flow_style = False
ruamel.yaml.add_representer(Dice, dice_representer)
# ruamel.yaml 0.15.8+ no longer forces quotes tagged scalars
- assert (
- ruamel.yaml.dump(dict(gold=Dice(10, 6)), default_flow_style=False)
- == 'gold: !dice 10d6\n'
- )
+ buf = ruamel.yaml.compat.StringIO()
+ yaml.dump(dict(gold=Dice(10, 6)), buf)
+ assert buf.getvalue() == 'gold: !dice 10d6\n'
-def test_dice_implicit_resolver():
+def test_dice_implicit_resolver() -> None:
import ruamel.yaml # NOQA
+ yaml = ruamel.yaml.YAML(typ='unsafe', pure=True)
+ yaml.default_flow_style = False
pattern = re.compile(r'^\d+d\d+$')
- ruamel.yaml.add_implicit_resolver(u'!dice', pattern)
- assert (
- ruamel.yaml.dump(dict(treasure=Dice(10, 20)), default_flow_style=False)
- == 'treasure: 10d20\n'
- )
- assert ruamel.yaml.load('damage: 5d10', Loader=ruamel.yaml.Loader) == dict(
- damage=Dice(5, 10)
- )
+ ruamel.yaml.add_implicit_resolver('!dice', pattern)
+ buf = ruamel.yaml.compat.StringIO()
+ yaml.dump(dict(treasure=Dice(10, 20)), buf)
+ assert buf.getvalue() == 'treasure: 10d20\n'
+ assert yaml.load('damage: 5d10') == dict(damage=Dice(5, 10))
-class Obj1(dict):
- def __init__(self, suffix):
+class Obj1(dict): # type: ignore
+ def __init__(self, suffix: Any) -> None:
self._suffix = suffix
self._node = None
- def add_node(self, n):
+ def add_node(self, n: Any) -> None:
self._node = n
- def __repr__(self):
+ def __repr__(self) -> str:
return 'Obj1(%s->%s)' % (self._suffix, self.items())
- def dump(self):
+ def dump(self) -> str:
return repr(self._node)
-class YAMLObj1(object):
- yaml_tag = u'!obj:'
+class YAMLObj1:
+ yaml_tag = '!obj:'
@classmethod
- def from_yaml(cls, loader, suffix, node):
+ def from_yaml(cls, loader: Any, suffix: Any, node: Any) -> Any:
import ruamel.yaml # NOQA
obj1 = Obj1(suffix)
@@ -96,31 +99,37 @@ class YAMLObj1(object):
return obj1
@classmethod
- def to_yaml(cls, dumper, data):
+ def to_yaml(cls, dumper: Any, data: Any) -> Any:
return dumper.represent_scalar(cls.yaml_tag + data._suffix, data.dump())
-def test_yaml_obj():
+def test_yaml_obj() -> None:
import ruamel.yaml # NOQA
+ yaml = ruamel.yaml.YAML(typ='unsafe', pure=True)
ruamel.yaml.add_representer(Obj1, YAMLObj1.to_yaml)
ruamel.yaml.add_multi_constructor(YAMLObj1.yaml_tag, YAMLObj1.from_yaml)
- x = ruamel.yaml.load('!obj:x.2\na: 1', Loader=ruamel.yaml.Loader)
+ x = yaml.load('!obj:x.2\na: 1')
print(x)
- assert ruamel.yaml.dump(x) == """!obj:x.2 "{'a': 1}"\n"""
+ buf = ruamel.yaml.compat.StringIO()
+ yaml.dump(x, buf)
+ assert buf.getvalue() == """!obj:x.2 "{'a': 1}"\n"""
-def test_yaml_obj_with_loader_and_dumper():
+def test_yaml_obj_with_loader_and_dumper() -> None:
import ruamel.yaml # NOQA
+ yaml = ruamel.yaml.YAML(typ='unsafe', pure=True)
ruamel.yaml.add_representer(Obj1, YAMLObj1.to_yaml, Dumper=ruamel.yaml.Dumper)
ruamel.yaml.add_multi_constructor(
YAMLObj1.yaml_tag, YAMLObj1.from_yaml, Loader=ruamel.yaml.Loader
)
- x = ruamel.yaml.load('!obj:x.2\na: 1', Loader=ruamel.yaml.Loader)
+ x = yaml.load('!obj:x.2\na: 1')
# x = ruamel.yaml.load('!obj:x.2\na: 1')
print(x)
- assert ruamel.yaml.dump(x) == """!obj:x.2 "{'a': 1}"\n"""
+ buf = ruamel.yaml.compat.StringIO()
+ yaml.dump(x, buf)
+ assert buf.getvalue() == """!obj:x.2 "{'a': 1}"\n"""
# ToDo use nullege to search add_multi_representer and add_path_resolver
@@ -129,25 +138,25 @@ def test_yaml_obj_with_loader_and_dumper():
# Issue 127 reported by Tommy Wang
-def test_issue_127():
+def test_issue_127() -> None:
import ruamel.yaml # NOQA
class Ref(ruamel.yaml.YAMLObject):
- yaml_constructor = ruamel.yaml.RoundTripConstructor
- yaml_representer = ruamel.yaml.RoundTripRepresenter
- yaml_tag = u'!Ref'
+ yaml_constructor = ruamel.yaml.RoundTripConstructor # type: ignore
+ yaml_representer = ruamel.yaml.RoundTripRepresenter # type: ignore
+ yaml_tag = '!Ref'
- def __init__(self, logical_id):
+ def __init__(self, logical_id: Any) -> None:
self.logical_id = logical_id
@classmethod
- def from_yaml(cls, loader, node):
+ def from_yaml(cls, loader: Any, node: Any) -> Any:
return cls(loader.construct_scalar(node))
@classmethod
- def to_yaml(cls, dumper, data):
+ def to_yaml(cls, dumper: Any, data: Any) -> Any:
if isinstance(data.logical_id, ruamel.yaml.scalarstring.ScalarString):
- style = data.logical_id.style # ruamel.yaml>0.15.8
+ style = data.logical_id.style # type: ignore # ruamel.yaml>0.15.8
else:
style = None
return dumper.represent_scalar(cls.yaml_tag, data.logical_id, style=style)
@@ -163,7 +172,11 @@ def test_issue_127():
- Five Six
- 'Seven Eight'
""")
- data = ruamel.yaml.round_trip_load(document, preserve_quotes=True)
- assert ruamel.yaml.round_trip_dump(data, indent=4, block_seq_indent=2) == document.replace(
- '\n Two and', ' Two and'
- )
+ yaml = ruamel.yaml.YAML()
+ yaml.preserve_quotes = True
+ yaml.default_flow_style = None
+ yaml.indent(sequence=4, offset=2)
+ data = yaml.load(document)
+ buf = ruamel.yaml.compat.StringIO()
+ yaml.dump(data, buf)
+ assert buf.getvalue() == document.replace('\n Two and', ' Two and')
diff --git a/_test/test_anchor.py b/_test/test_anchor.py
index dec1261..da0f1ef 100644
--- a/_test/test_anchor.py
+++ b/_test/test_anchor.py
@@ -1,31 +1,29 @@
# coding: utf-8
-from __future__ import print_function
-
"""
testing of anchors and the aliases referring to them
"""
-import pytest
-from textwrap import dedent
+import pytest # type: ignore # NOQA
import platform
from roundtrip import round_trip, dedent, round_trip_load, round_trip_dump, YAML # NOQA
+from typing import Any
-def load(s):
+def load(s: str) -> Any:
return round_trip_load(dedent(s))
-def compare(d, s):
+def compare(d: Any, s: str) -> None:
assert round_trip_dump(d) == dedent(s)
class TestAnchorsAliases:
- def test_anchor_id_renumber(self):
+ def test_anchor_id_renumber(self) -> None:
from ruamel.yaml.serializer import Serializer
- assert Serializer.ANCHOR_TEMPLATE == 'id%03d'
+ assert Serializer.ANCHOR_TEMPLATE == 'id{:03d}'
data = load("""
a: &id002
b: 1
@@ -42,26 +40,26 @@ class TestAnchorsAliases:
""",
)
- def test_template_matcher(self):
+ def test_template_matcher(self) -> None:
"""test if id matches the anchor template"""
from ruamel.yaml.serializer import templated_id
- assert templated_id(u'id001')
- assert templated_id(u'id999')
- assert templated_id(u'id1000')
- assert templated_id(u'id0001')
- assert templated_id(u'id0000')
- assert not templated_id(u'id02')
- assert not templated_id(u'id000')
- assert not templated_id(u'x000')
+ assert templated_id('id001')
+ assert templated_id('id999')
+ assert templated_id('id1000')
+ assert templated_id('id0001')
+ assert templated_id('id0000')
+ assert not templated_id('id02')
+ assert not templated_id('id000')
+ assert not templated_id('x000')
- # def test_re_matcher(self):
+ # def test_re_matcher(self) -> None:
# import re
- # assert re.compile(u'id(?!000)\\d{3,}').match('id001')
- # assert not re.compile(u'id(?!000\\d*)\\d{3,}').match('id000')
- # assert re.compile(u'id(?!000$)\\d{3,}').match('id0001')
+ # assert re.compile('id(?!000)\\d{3,}').match('id001')
+ # assert not re.compile('id(?!000\\d*)\\d{3,}').match('id000')
+ # assert re.compile('id(?!000$)\\d{3,}').match('id0001')
- def test_anchor_assigned(self):
+ def test_anchor_assigned(self) -> None:
from ruamel.yaml.comments import CommentedMap
data = load("""
@@ -82,7 +80,7 @@ class TestAnchorsAliases:
assert e.yaml_anchor().value == 'etemplate'
assert e.yaml_anchor().always_dump is False
- def test_anchor_id_retained(self):
+ def test_anchor_id_retained(self) -> None:
data = load("""
a: &id002
b: 1
@@ -107,10 +105,10 @@ class TestAnchorsAliases:
""",
)
- @pytest.mark.skipif(
+ @pytest.mark.skipif( # type: ignore
platform.python_implementation() == 'Jython', reason='Jython throws RepresenterError'
)
- def test_alias_before_anchor(self):
+ def test_alias_before_anchor(self) -> None:
from ruamel.yaml.composer import ComposerError
with pytest.raises(ComposerError):
@@ -122,7 +120,7 @@ class TestAnchorsAliases:
""")
data = data
- def test_anchor_on_sequence(self):
+ def test_anchor_on_sequence(self) -> None:
# as reported by Bjorn Stabell
# https://bitbucket.org/ruamel/yaml/issue/7/anchor-names-not-preserved
from ruamel.yaml.comments import CommentedSeq
@@ -167,7 +165,7 @@ class TestAnchorsAliases:
label: center/huge
""")
- def test_merge_00(self):
+ def test_merge_00(self) -> None:
data = load(self.merge_yaml)
d = data[4]
ok = True
@@ -183,7 +181,7 @@ class TestAnchorsAliases:
print('key', k, d.get(k), data[o].get(k))
assert ok
- def test_merge_accessible(self):
+ def test_merge_accessible(self) -> None:
from ruamel.yaml.comments import CommentedMap, merge_attrib
data = load("""
@@ -198,11 +196,11 @@ class TestAnchorsAliases:
assert isinstance(d, CommentedMap)
assert hasattr(d, merge_attrib)
- def test_merge_01(self):
+ def test_merge_01(self) -> None:
data = load(self.merge_yaml)
compare(data, self.merge_yaml)
- def test_merge_nested(self):
+ def test_merge_nested(self) -> None:
yaml = """
a:
<<: &content
@@ -214,7 +212,7 @@ class TestAnchorsAliases:
"""
data = round_trip(yaml) # NOQA
- def test_merge_nested_with_sequence(self):
+ def test_merge_nested_with_sequence(self) -> None:
yaml = """
a:
<<: &content
@@ -227,7 +225,7 @@ class TestAnchorsAliases:
"""
data = round_trip(yaml) # NOQA
- def test_add_anchor(self):
+ def test_add_anchor(self) -> None:
from ruamel.yaml.comments import CommentedMap
data = CommentedMap()
@@ -248,7 +246,7 @@ class TestAnchorsAliases:
)
# this is an error in PyYAML
- def test_reused_anchor(self):
+ def test_reused_anchor(self) -> None:
from ruamel.yaml.error import ReusedAnchorWarning
yaml = """
@@ -262,7 +260,7 @@ class TestAnchorsAliases:
with pytest.warns(ReusedAnchorWarning):
data = round_trip(yaml) # NOQA
- def test_issue_130(self):
+ def test_issue_130(self) -> None:
# issue 130 reported by Devid Fee
import ruamel.yaml
@@ -283,10 +281,11 @@ class TestAnchorsAliases:
components:
server: {<<: *server_service}
""")
- data = ruamel.yaml.safe_load(ys)
+ yaml = ruamel.yaml.YAML(typ='safe', pure=True)
+ data = yaml.load(ys)
assert data['services']['shell']['components']['server']['port'] == 8000
- def test_issue_130a(self):
+ def test_issue_130a(self) -> None:
# issue 130 reported by Devid Fee
import ruamel.yaml
@@ -308,7 +307,8 @@ class TestAnchorsAliases:
components:
server: {<<: *server_service}
""")
- data = ruamel.yaml.safe_load(ys)
+ yaml = ruamel.yaml.YAML(typ='safe', pure=True)
+ data = yaml.load(ys)
assert data['services']['shell']['components']['server']['port'] == 4000
@@ -331,10 +331,10 @@ class TestMergeKeysValues:
# in the following d always has "expanded" the merges
- def test_merge_for(self):
- from ruamel.yaml import safe_load
+ def test_merge_for(self) -> None:
+ from ruamel.yaml import YAML # type: ignore
- d = safe_load(self.yaml_str)
+ d = YAML(typ='safe', pure=True).load(self.yaml_str)
data = round_trip_load(self.yaml_str)
count = 0
for x in data[2]:
@@ -342,10 +342,10 @@ class TestMergeKeysValues:
print(count, x)
assert count == len(d[2])
- def test_merge_keys(self):
- from ruamel.yaml import safe_load
+ def test_merge_keys(self) -> None:
+ from ruamel.yaml import YAML # type: ignore
- d = safe_load(self.yaml_str)
+ d = YAML(typ='safe', pure=True).load(self.yaml_str)
data = round_trip_load(self.yaml_str)
count = 0
for x in data[2].keys():
@@ -353,10 +353,10 @@ class TestMergeKeysValues:
print(count, x)
assert count == len(d[2])
- def test_merge_values(self):
- from ruamel.yaml import safe_load
+ def test_merge_values(self) -> None:
+ from ruamel.yaml import YAML # type: ignore
- d = safe_load(self.yaml_str)
+ d = YAML(typ='safe', pure=True).load(self.yaml_str)
data = round_trip_load(self.yaml_str)
count = 0
for x in data[2].values():
@@ -364,10 +364,10 @@ class TestMergeKeysValues:
print(count, x)
assert count == len(d[2])
- def test_merge_items(self):
- from ruamel.yaml import safe_load
+ def test_merge_items(self) -> None:
+ from ruamel.yaml import YAML # type: ignore
- d = safe_load(self.yaml_str)
+ d = YAML(typ='safe', pure=True).load(self.yaml_str)
data = round_trip_load(self.yaml_str)
count = 0
for x in data[2].items():
@@ -375,11 +375,10 @@ class TestMergeKeysValues:
print(count, x)
assert count == len(d[2])
- def test_len_items_delete(self):
- from ruamel.yaml import safe_load
- from ruamel.yaml.compat import PY3
+ def test_len_items_delete(self) -> None:
+ from ruamel.yaml import YAML # type: ignore
- d = safe_load(self.yaml_str)
+ d = YAML(typ='safe', pure=True).load(self.yaml_str)
data = round_trip_load(self.yaml_str)
x = data[2].items()
print('d2 items', d[2].items(), len(d[2].items()), x, len(x))
@@ -387,20 +386,17 @@ class TestMergeKeysValues:
print('ref', ref)
assert len(x) == ref
del data[2]['m']
- if PY3:
- ref -= 1
+ ref -= 1
assert len(x) == ref
del data[2]['d']
- if PY3:
- ref -= 1
+ ref -= 1
assert len(x) == ref
del data[2]['a']
- if PY3:
- ref -= 1
+ ref -= 1
assert len(x) == ref
- def test_issue_196_cast_of_dict(self, capsys):
- from ruamel.yaml import YAML
+ def test_issue_196_cast_of_dict(self, capsys: Any) -> None:
+ from ruamel.yaml import YAML # type: ignore
yaml = YAML()
mapping = yaml.load("""\
@@ -437,15 +433,15 @@ class TestMergeKeysValues:
assert 'a' in dict(mapping)
assert 'a' in dict(mapping.items())
- def test_values_of_merged(self):
- from ruamel.yaml import YAML
+ def test_values_of_merged(self) -> None:
+ from ruamel.yaml import YAML # type: ignore
yaml = YAML()
data = yaml.load(dedent(self.yaml_str))
assert list(data[2].values()) == [1, 6, 'x2', 'x3', 'y4']
- def test_issue_213_copy_of_merge(self):
- from ruamel.yaml import YAML
+ def test_issue_213_copy_of_merge(self) -> None:
+ from ruamel.yaml import YAML # type: ignore
yaml = YAML()
d = yaml.load("""\
@@ -465,9 +461,9 @@ class TestMergeKeysValues:
class TestDuplicateKeyThroughAnchor:
- def test_duplicate_key_00(self):
+ def test_duplicate_key_00(self) -> None:
from ruamel.yaml import version_info
- from ruamel.yaml import safe_load, round_trip_load
+ from ruamel.yaml import YAML # type: ignore
from ruamel.yaml.constructor import DuplicateKeyFutureWarning, DuplicateKeyError
s = dedent("""\
@@ -481,16 +477,16 @@ class TestDuplicateKeyThroughAnchor:
pass
elif version_info < (0, 16, 0):
with pytest.warns(DuplicateKeyFutureWarning):
- safe_load(s)
+ YAML(typ='safe', pure=True).load(s)
with pytest.warns(DuplicateKeyFutureWarning):
- round_trip_load(s)
+ YAML(typ='rt').load(s)
else:
with pytest.raises(DuplicateKeyError):
- safe_load(s)
+ YAML(typ='safe', pure=True).load(s)
with pytest.raises(DuplicateKeyError):
- round_trip_load(s)
+ YAML(typ='rt').load(s)
- def test_duplicate_key_01(self):
+ def test_duplicate_key_01(self) -> None:
# so issue https://stackoverflow.com/a/52852106/1307905
from ruamel.yaml import version_info
from ruamel.yaml.constructor import DuplicateKeyError
@@ -515,7 +511,7 @@ class TestDuplicateKeyThroughAnchor:
class TestFullCharSetAnchors:
- def test_master_of_orion(self):
+ def test_master_of_orion(self) -> None:
# https://bitbucket.org/ruamel/yaml/issues/72/not-allowed-in-anchor-names
# submitted by Shalon Wood
yaml_str = """
@@ -526,7 +522,7 @@ class TestFullCharSetAnchors:
"""
data = load(yaml_str) # NOQA
- def test_roundtrip_00(self):
+ def test_roundtrip_00(self) -> None:
yaml_str = """
- &dotted.words.here
a: 1
@@ -535,7 +531,7 @@ class TestFullCharSetAnchors:
"""
data = round_trip(yaml_str) # NOQA
- def test_roundtrip_01(self):
+ def test_roundtrip_01(self) -> None:
yaml_str = """
- &dotted.words.here[a, b]
- *dotted.words.here
diff --git a/_test/test_api_change.py b/_test/test_api_change.py
index b47a551..8961273 100644
--- a/_test/test_api_change.py
+++ b/_test/test_api_change.py
@@ -1,19 +1,19 @@
# coding: utf-8
-from __future__ import print_function
-
"""
testing of anchors and the aliases referring to them
"""
import sys
import textwrap
-import pytest
-from ruamel.std.pathlib import Path
+import pytest # type: ignore
+from pathlib import Path
+
+from typing import Any
class TestNewAPI:
- def test_duplicate_keys_00(self):
+ def test_duplicate_keys_00(self) -> None:
from ruamel.yaml import YAML
from ruamel.yaml.constructor import DuplicateKeyError
@@ -21,7 +21,7 @@ class TestNewAPI:
with pytest.raises(DuplicateKeyError):
yaml.load('{a: 1, a: 2}')
- def test_duplicate_keys_01(self):
+ def test_duplicate_keys_01(self) -> None:
from ruamel.yaml import YAML
from ruamel.yaml.constructor import DuplicateKeyError
@@ -29,7 +29,7 @@ class TestNewAPI:
with pytest.raises(DuplicateKeyError):
yaml.load('{a: 1, a: 2}')
- def test_duplicate_keys_02(self):
+ def test_duplicate_keys_02(self) -> None:
from ruamel.yaml import YAML
from ruamel.yaml.constructor import DuplicateKeyError
@@ -37,7 +37,7 @@ class TestNewAPI:
with pytest.raises(DuplicateKeyError):
yaml.load('{a: 1, a: 2}')
- def test_issue_135(self):
+ def test_issue_135(self) -> None:
# reported by Andrzej Ostrowski
from ruamel.yaml import YAML
@@ -46,7 +46,7 @@ class TestNewAPI:
# originally on 2.7: with pytest.raises(TypeError):
yaml.dump(data, sys.stdout)
- def test_issue_135_temporary_workaround(self):
+ def test_issue_135_temporary_workaround(self) -> None:
# never raised error
from ruamel.yaml import YAML
@@ -56,7 +56,7 @@ class TestNewAPI:
class TestWrite:
- def test_dump_path(self, tmpdir):
+ def test_dump_path(self, tmpdir: Any) -> None:
from ruamel.yaml import YAML
fn = Path(str(tmpdir)) / 'test.yaml'
@@ -67,7 +67,7 @@ class TestWrite:
yaml.dump(data, fn)
assert fn.read_text() == 'a: 1\nb: 2\n'
- def test_dump_file(self, tmpdir):
+ def test_dump_file(self, tmpdir: Any) -> None:
from ruamel.yaml import YAML
fn = Path(str(tmpdir)) / 'test.yaml'
@@ -79,7 +79,7 @@ class TestWrite:
yaml.dump(data, fp)
assert fn.read_text() == 'a: 1\nb: 2\n'
- def test_dump_missing_stream(self):
+ def test_dump_missing_stream(self) -> None:
from ruamel.yaml import YAML
yaml = YAML()
@@ -89,7 +89,7 @@ class TestWrite:
with pytest.raises(TypeError):
yaml.dump(data)
- def test_dump_too_many_args(self, tmpdir):
+ def test_dump_too_many_args(self, tmpdir: Any) -> None:
from ruamel.yaml import YAML
fn = Path(str(tmpdir)) / 'test.yaml'
@@ -98,12 +98,12 @@ class TestWrite:
data['a'] = 1
data['b'] = 2
with pytest.raises(TypeError):
- yaml.dump(data, fn, True)
+ yaml.dump(data, fn, True) # type: ignore
- def test_transform(self, tmpdir):
+ def test_transform(self, tmpdir: Any) -> None:
from ruamel.yaml import YAML
- def tr(s):
+ def tr(s: str) -> str:
return s.replace(' ', ' ')
fn = Path(str(tmpdir)) / 'test.yaml'
@@ -114,7 +114,7 @@ class TestWrite:
yaml.dump(data, fn, transform=tr)
assert fn.read_text() == 'a: 1\nb: 2\n'
- def test_print(self, capsys):
+ def test_print(self, capsys: Any) -> None:
from ruamel.yaml import YAML
yaml = YAML()
@@ -127,7 +127,7 @@ class TestWrite:
class TestRead:
- def test_multi_load(self):
+ def test_multi_load(self) -> None:
# make sure reader, scanner, parser get reset
from ruamel.yaml import YAML
@@ -135,7 +135,7 @@ class TestRead:
yaml.load('a: 1')
yaml.load('a: 1') # did not work in 0.15.4
- def test_parse(self):
+ def test_parse(self) -> None:
# ensure `parse` method is functional and can parse "unsafe" yaml
from ruamel.yaml import YAML
from ruamel.yaml.constructor import ConstructorError
@@ -152,13 +152,13 @@ class TestRead:
class TestLoadAll:
- def test_multi_document_load(self, tmpdir):
+ def test_multi_document_load(self, tmpdir: Any) -> None:
"""this went wrong on 3.7 because of StopIteration, PR 37 and Issue 211"""
from ruamel.yaml import YAML
fn = Path(str(tmpdir)) / 'test.yaml'
fn.write_text(
- textwrap.dedent(u"""\
+ textwrap.dedent("""\
---
- a
---
@@ -171,7 +171,7 @@ class TestLoadAll:
class TestDuplSet:
- def test_dupl_set_00(self):
+ def test_dupl_set_00(self) -> None:
# round-trip-loader should except
from ruamel.yaml import YAML
from ruamel.yaml.constructor import DuplicateKeyError
@@ -192,28 +192,28 @@ class TestDuplSet:
class TestDumpLoadUnicode:
# test triggered by SamH on stackoverflow (https://stackoverflow.com/q/45281596/1307905)
# and answer by randomir (https://stackoverflow.com/a/45281922/1307905)
- def test_write_unicode(self, tmpdir):
+ def test_write_unicode(self, tmpdir: Any) -> None:
from ruamel.yaml import YAML
yaml = YAML()
- text_dict = {'text': u'HELLO_WORLD©'}
+ text_dict = {'text': 'HELLO_WORLD©'}
file_name = str(tmpdir) + '/tstFile.yaml'
yaml.dump(text_dict, open(file_name, 'w'))
- assert open(file_name, 'rb').read().decode('utf-8') == u'text: HELLO_WORLD©\n'
+ assert open(file_name, 'rb').read().decode('utf-8') == 'text: HELLO_WORLD©\n'
- def test_read_unicode(self, tmpdir):
+ def test_read_unicode(self, tmpdir: Any) -> None:
from ruamel.yaml import YAML
yaml = YAML()
file_name = str(tmpdir) + '/tstFile.yaml'
with open(file_name, 'wb') as fp:
- fp.write(u'text: HELLO_WORLD©\n'.encode('utf-8'))
+ fp.write('text: HELLO_WORLD©\n'.encode('utf-8'))
text_dict = yaml.load(open(file_name, 'r'))
- assert text_dict['text'] == u'HELLO_WORLD©'
+ assert text_dict['text'] == 'HELLO_WORLD©'
class TestFlowStyle:
- def test_flow_style(self, capsys):
+ def test_flow_style(self, capsys: Any) -> None:
# https://stackoverflow.com/questions/45791712/
from ruamel.yaml import YAML
@@ -228,8 +228,8 @@ class TestFlowStyle:
class TestOldAPI:
- @pytest.mark.skipif(sys.version_info >= (3, 0), reason='ok on Py3')
- def test_duplicate_keys_02(self):
+ @pytest.mark.skipif(sys.version_info >= (3, 0), reason='ok on Py3') # type: ignore
+ def test_duplicate_keys_02(self) -> None:
# Issue 165 unicode keys in error/warning
from ruamel.yaml import safe_load
from ruamel.yaml.constructor import DuplicateKeyError
diff --git a/_test/test_class_register.py b/_test/test_class_register.py
index 126d93f..fdd0275 100644
--- a/_test/test_class_register.py
+++ b/_test/test_class_register.py
@@ -4,33 +4,36 @@
testing of YAML.register_class and @yaml_object
"""
+from typing import Any
+from ruamel.yaml.comments import TaggedScalar, CommentedMap # NOQA
+
from roundtrip import YAML
-class User0(object):
- def __init__(self, name, age):
+class User0:
+ def __init__(self, name: str, age: int) -> None:
self.name = name
self.age = age
-class User1(object):
- yaml_tag = u'!user'
+class User1:
+ yaml_tag = '!user'
- def __init__(self, name, age):
+ def __init__(self, name: str, age: int) -> None:
self.name = name
self.age = age
@classmethod
- def to_yaml(cls, representer, node):
- return representer.represent_scalar(cls.yaml_tag, u'{.name}-{.age}'.format(node, node))
+ def to_yaml(cls, representer: Any, node: Any) -> Any:
+ return representer.represent_scalar(cls.yaml_tag, '{.name}-{.age}'.format(node, node))
@classmethod
- def from_yaml(cls, constructor, node):
+ def from_yaml(cls, constructor: Any, node: Any) -> Any:
return cls(*node.value.split('-'))
-class TestRegisterClass(object):
- def test_register_0_rt(self):
+class TestRegisterClass:
+ def test_register_0_rt(self) -> None:
yaml = YAML()
yaml.register_class(User0)
ys = """
@@ -41,7 +44,7 @@ class TestRegisterClass(object):
d = yaml.load(ys)
yaml.dump(d, compare=ys, unordered_lines=True)
- def test_register_0_safe(self):
+ def test_register_0_safe(self) -> None:
# default_flow_style = None
yaml = YAML(typ='safe')
yaml.register_class(User0)
@@ -51,7 +54,7 @@ class TestRegisterClass(object):
d = yaml.load(ys)
yaml.dump(d, compare=ys)
- def test_register_0_unsafe(self):
+ def test_register_0_unsafe(self) -> None:
# default_flow_style = None
yaml = YAML(typ='unsafe')
yaml.register_class(User0)
@@ -61,7 +64,7 @@ class TestRegisterClass(object):
d = yaml.load(ys)
yaml.dump(d, compare=ys)
- def test_register_1_rt(self):
+ def test_register_1_rt(self) -> None:
yaml = YAML()
yaml.register_class(User1)
ys = """
@@ -70,7 +73,7 @@ class TestRegisterClass(object):
d = yaml.load(ys)
yaml.dump(d, compare=ys)
- def test_register_1_safe(self):
+ def test_register_1_safe(self) -> None:
yaml = YAML(typ='safe')
yaml.register_class(User1)
ys = """
@@ -79,7 +82,7 @@ class TestRegisterClass(object):
d = yaml.load(ys)
yaml.dump(d, compare=ys)
- def test_register_1_unsafe(self):
+ def test_register_1_unsafe(self) -> None:
yaml = YAML(typ='unsafe')
yaml.register_class(User1)
ys = """
@@ -89,15 +92,15 @@ class TestRegisterClass(object):
yaml.dump(d, compare=ys)
-class TestDecorator(object):
- def test_decorator_implicit(self):
+class TestDecorator:
+ def test_decorator_implicit(self) -> None:
from ruamel.yaml import yaml_object
yml = YAML()
@yaml_object(yml)
- class User2(object):
- def __init__(self, name, age):
+ class User2:
+ def __init__(self, name: str, age: int) -> None:
self.name = name
self.age = age
@@ -109,27 +112,27 @@ class TestDecorator(object):
d = yml.load(ys)
yml.dump(d, compare=ys, unordered_lines=True)
- def test_decorator_explicit(self):
+ def test_decorator_explicit(self) -> None:
from ruamel.yaml import yaml_object
yml = YAML()
@yaml_object(yml)
- class User3(object):
- yaml_tag = u'!USER'
+ class User3:
+ yaml_tag = '!USER'
- def __init__(self, name, age):
+ def __init__(self, name: str, age: int) -> None:
self.name = name
self.age = age
@classmethod
- def to_yaml(cls, representer, node):
+ def to_yaml(cls, representer: Any, node: Any) -> Any:
return representer.represent_scalar(
- cls.yaml_tag, u'{.name}-{.age}'.format(node, node)
+ cls.yaml_tag, '{.name}-{.age}'.format(node, node)
)
@classmethod
- def from_yaml(cls, constructor, node):
+ def from_yaml(cls, constructor: Any, node: Any) -> Any:
return cls(*node.value.split('-'))
ys = """
diff --git a/_test/test_collections.py b/_test/test_collections.py
index e6033bb..d6e88ef 100644
--- a/_test/test_collections.py
+++ b/_test/test_collections.py
@@ -7,15 +7,14 @@ This is now so integrated in Python that it can be mapped to !!omap
"""
-import pytest # NOQA
+import pytest # type: ignore # NOQA
from roundtrip import round_trip, dedent, round_trip_load, round_trip_dump # NOQA
class TestOrderedDict:
- def test_ordereddict(self):
+ def test_ordereddict(self) -> None:
from collections import OrderedDict
- import ruamel.yaml # NOQA
- assert ruamel.yaml.dump(OrderedDict()) == '!!omap []\n'
+ assert round_trip_dump(OrderedDict()) == '!!omap []\n'
diff --git a/_test/test_comment_manipulation.py b/_test/test_comment_manipulation.py
index 7f09a38..979b386 100644
--- a/_test/test_comment_manipulation.py
+++ b/_test/test_comment_manipulation.py
@@ -1,30 +1,29 @@
# coding: utf-8
-from __future__ import print_function
-
-import pytest # NOQA
+import pytest # type: ignore # NOQA
from roundtrip import round_trip, dedent, round_trip_load, round_trip_dump # NOQA
+from typing import Any
-def load(s):
+def load(s: str) -> Any:
return round_trip_load(dedent(s))
-def compare(data, s, **kw):
+def compare(data: Any, s: str, **kw: Any) -> None:
assert round_trip_dump(data, **kw) == dedent(s)
-def compare_eol(data, s):
+def compare_eol(data: Any, s: str) -> None:
assert 'EOL' in s
ds = dedent(s).replace('EOL', '').replace('\n', '|\n')
- assert round_trip_dump(data).replace('\n', '|\n') == ds
+ assert round_trip_dump(data).replace('\n', '|\n') == ds # type: ignore
class TestCommentsManipulation:
# list
- def test_seq_set_comment_on_existing_explicit_column(self):
+ def test_seq_set_comment_on_existing_explicit_column(self) -> None:
data = load("""
- a # comment 1
- b
@@ -38,7 +37,7 @@ class TestCommentsManipulation:
"""
compare(data, exp)
- def test_seq_overwrite_comment_on_existing_explicit_column(self):
+ def test_seq_overwrite_comment_on_existing_explicit_column(self) -> None:
data = load("""
- a # comment 1
- b
@@ -52,7 +51,7 @@ class TestCommentsManipulation:
"""
compare(data, exp)
- def test_seq_first_comment_explicit_column(self):
+ def test_seq_first_comment_explicit_column(self) -> None:
data = load("""
- a
- b
@@ -66,7 +65,7 @@ class TestCommentsManipulation:
"""
compare(data, exp)
- def test_seq_set_comment_on_existing_column_prev(self):
+ def test_seq_set_comment_on_existing_column_prev(self) -> None:
data = load("""
- a # comment 1
- b
@@ -82,14 +81,14 @@ class TestCommentsManipulation:
"""
compare(data, exp)
- def test_seq_set_comment_on_existing_column_next(self):
+ def test_seq_set_comment_on_existing_column_next(self) -> None:
data = load("""
- a # comment 1
- b
- c
- d # comment 3
""")
- print(data._yaml_comment)
+ print(data.ca)
# print(type(data._yaml_comment._items[0][0].start_mark))
# ruamel.yaml.error.Mark
# print(type(data._yaml_comment._items[0][0].start_mark))
@@ -102,7 +101,7 @@ class TestCommentsManipulation:
"""
compare(data, exp)
- def test_seq_set_comment_on_existing_column_further_away(self):
+ def test_seq_set_comment_on_existing_column_further_away(self) -> None:
"""
no comment line before or after, take the latest before
the new position
@@ -115,7 +114,7 @@ class TestCommentsManipulation:
- e
- f # comment 3
""")
- print(data._yaml_comment)
+ print(data.ca)
# print(type(data._yaml_comment._items[0][0].start_mark))
# ruamel.yaml.error.Mark
# print(type(data._yaml_comment._items[0][0].start_mark))
@@ -130,7 +129,7 @@ class TestCommentsManipulation:
"""
compare(data, exp)
- def test_seq_set_comment_on_existing_explicit_column_with_hash(self):
+ def test_seq_set_comment_on_existing_explicit_column_with_hash(self) -> None:
data = load("""
- a # comment 1
- b
@@ -146,7 +145,7 @@ class TestCommentsManipulation:
# dict
- def test_dict_set_comment_on_existing_explicit_column(self):
+ def test_dict_set_comment_on_existing_explicit_column(self) -> None:
data = load("""
a: 1 # comment 1
b: 2
@@ -164,7 +163,7 @@ class TestCommentsManipulation:
"""
compare(data, exp)
- def test_dict_overwrite_comment_on_existing_explicit_column(self):
+ def test_dict_overwrite_comment_on_existing_explicit_column(self) -> None:
data = load("""
a: 1 # comment 1
b: 2
@@ -182,7 +181,7 @@ class TestCommentsManipulation:
"""
compare(data, exp)
- def test_map_set_comment_on_existing_column_prev(self):
+ def test_map_set_comment_on_existing_column_prev(self) -> None:
data = load("""
a: 1 # comment 1
b: 2
@@ -200,7 +199,7 @@ class TestCommentsManipulation:
"""
compare(data, exp)
- def test_map_set_comment_on_existing_column_next(self):
+ def test_map_set_comment_on_existing_column_next(self) -> None:
data = load("""
a: 1 # comment 1
b: 2
@@ -218,7 +217,7 @@ class TestCommentsManipulation:
"""
compare(data, exp)
- def test_map_set_comment_on_existing_column_further_away(self):
+ def test_map_set_comment_on_existing_column_further_away(self) -> None:
"""
no comment line before or after, take the latest before
the new position
@@ -241,7 +240,7 @@ class TestCommentsManipulation:
"""
compare(data, exp)
- def test_before_top_map_rt(self):
+ def test_before_top_map_rt(self) -> None:
data = load("""
a: 1
b: 2
@@ -255,7 +254,7 @@ class TestCommentsManipulation:
"""
compare(data, exp.format(comment='#'))
- def test_before_top_map_replace(self):
+ def test_before_top_map_replace(self) -> None:
data = load("""
# abc
# def
@@ -271,7 +270,7 @@ class TestCommentsManipulation:
"""
compare(data, exp.format(comment='#'))
- def test_before_top_map_from_scratch(self):
+ def test_before_top_map_from_scratch(self) -> None:
from ruamel.yaml.comments import CommentedMap
data = CommentedMap()
@@ -288,7 +287,7 @@ class TestCommentsManipulation:
"""
compare(data, exp.format(comment='#'))
- def test_before_top_seq_rt(self):
+ def test_before_top_seq_rt(self) -> None:
data = load("""
- a
- b
@@ -303,7 +302,7 @@ class TestCommentsManipulation:
"""
compare(data, exp)
- def test_before_top_seq_rt_replace(self):
+ def test_before_top_seq_rt_replace(self) -> None:
s = """
# this
# that
@@ -321,7 +320,7 @@ class TestCommentsManipulation:
"""
compare(data, exp.format(comment='#'))
- def test_before_top_seq_from_scratch(self):
+ def test_before_top_seq_from_scratch(self) -> None:
from ruamel.yaml.comments import CommentedSeq
data = CommentedSeq()
@@ -338,7 +337,7 @@ class TestCommentsManipulation:
compare(data, exp.format(comment='#'))
# nested variants
- def test_before_nested_map_rt(self):
+ def test_before_nested_map_rt(self) -> None:
data = load("""
a: 1
b:
@@ -356,7 +355,7 @@ class TestCommentsManipulation:
"""
compare(data, exp.format(comment='#'))
- def test_before_nested_map_rt_indent(self):
+ def test_before_nested_map_rt_indent(self) -> None:
data = load("""
a: 1
b:
@@ -375,7 +374,7 @@ class TestCommentsManipulation:
compare(data, exp.format(comment='#'))
print(data['b'].ca)
- def test_before_nested_map_from_scratch(self):
+ def test_before_nested_map_from_scratch(self) -> None:
from ruamel.yaml.comments import CommentedMap
data = CommentedMap()
@@ -395,7 +394,7 @@ class TestCommentsManipulation:
"""
compare(data, exp.format(comment='#'))
- def test_before_nested_seq_from_scratch(self):
+ def test_before_nested_seq_from_scratch(self) -> None:
from ruamel.yaml.comments import CommentedMap, CommentedSeq
data = CommentedMap()
@@ -415,7 +414,7 @@ class TestCommentsManipulation:
"""
compare(data, exp.format(comment='#'))
- def test_before_nested_seq_from_scratch_block_seq_indent(self):
+ def test_before_nested_seq_from_scratch_block_seq_indent(self) -> None:
from ruamel.yaml.comments import CommentedMap, CommentedSeq
data = CommentedMap()
@@ -435,7 +434,7 @@ class TestCommentsManipulation:
"""
compare(data, exp.format(comment='#'), indent=4, block_seq_indent=2)
- def test_map_set_comment_before_and_after_non_first_key_00(self):
+ def test_map_set_comment_before_and_after_non_first_key_00(self) -> None:
# http://stackoverflow.com/a/40705671/1307905
data = load("""
xyz:
@@ -464,7 +463,7 @@ class TestCommentsManipulation:
"""
compare(data, exp)
- def Xtest_map_set_comment_before_and_after_non_first_key_01(self):
+ def Xtest_map_set_comment_before_and_after_non_first_key_01(self) -> None:
data = load("""
xyz:
a: 1 # comment 1
@@ -496,7 +495,7 @@ class TestCommentsManipulation:
# EOL is no longer necessary
# fixed together with issue # 216
- def test_map_set_comment_before_and_after_non_first_key_01(self):
+ def test_map_set_comment_before_and_after_non_first_key_01(self) -> None:
data = load("""
xyz:
a: 1 # comment 1
@@ -525,7 +524,7 @@ class TestCommentsManipulation:
"""
compare(data, exp)
- def Xtest_map_set_comment_before_and_after_non_first_key_02(self):
+ def Xtest_map_set_comment_before_and_after_non_first_key_02(self) -> None:
data = load("""
xyz:
a: 1 # comment 1
@@ -557,7 +556,7 @@ class TestCommentsManipulation:
"""
compare_eol(data, exp)
- def test_map_set_comment_before_and_after_non_first_key_02(self):
+ def test_map_set_comment_before_and_after_non_first_key_02(self) -> None:
data = load("""
xyz:
a: 1 # comment 1
diff --git a/_test/test_comments.py b/_test/test_comments.py
index 31ab2a6..6c3d8c3 100644
--- a/_test/test_comments.py
+++ b/_test/test_comments.py
@@ -10,14 +10,14 @@ roundtrip changes
"""
-import pytest
+import pytest # type: ignore # NOQA
import sys
from roundtrip import round_trip, dedent, round_trip_load, round_trip_dump
class TestComments:
- def test_no_end_of_file_eol(self):
+ def test_no_end_of_file_eol(self) -> None:
"""not excluding comments caused some problems if at the end of
the file without a newline. First error, then included \0 """
x = """\
@@ -26,7 +26,7 @@ class TestComments:
with pytest.raises(AssertionError):
round_trip(x, extra='a\n')
- def test_no_comments(self):
+ def test_no_comments(self) -> None:
round_trip("""
- europe: 10
- usa:
@@ -34,7 +34,7 @@ class TestComments:
- california: 9
""")
- def test_round_trip_ordering(self):
+ def test_round_trip_ordering(self) -> None:
round_trip("""
a: 1
b: 2
@@ -46,7 +46,7 @@ class TestComments:
f: 6
""")
- def test_complex(self):
+ def test_complex(self) -> None:
round_trip("""
- europe: 10 # top
- usa:
@@ -54,7 +54,7 @@ class TestComments:
- california: 9 # o
""")
- def test_dropped(self):
+ def test_dropped(self) -> None:
s = """\
# comment
scalar
@@ -62,7 +62,7 @@ class TestComments:
"""
round_trip(s, 'scalar\n...\n')
- def test_main_mapping_begin_end(self):
+ def test_main_mapping_begin_end(self) -> None:
round_trip("""
# C start a
# C start b
@@ -73,7 +73,7 @@ class TestComments:
# C end b
""")
- def test_reindent(self):
+ def test_reindent(self) -> None:
x = """\
a:
b: # comment 1
@@ -87,7 +87,7 @@ class TestComments:
c: 1 # comment 2
""")
- def test_main_mapping_begin_end_items_post(self):
+ def test_main_mapping_begin_end_items_post(self) -> None:
round_trip("""
# C start a
# C start b
@@ -98,7 +98,7 @@ class TestComments:
# C end b
""")
- def test_main_sequence_begin_end(self):
+ def test_main_sequence_begin_end(self) -> None:
round_trip("""
# C start a
# C start b
@@ -109,7 +109,7 @@ class TestComments:
# C end b
""")
- def test_main_sequence_begin_end_items_post(self):
+ def test_main_sequence_begin_end_items_post(self) -> None:
round_trip("""
# C start a
# C start b
@@ -120,7 +120,7 @@ class TestComments:
# C end b
""")
- def test_main_mapping_begin_end_complex(self):
+ def test_main_mapping_begin_end_complex(self) -> None:
round_trip("""
# C start a
# C start b
@@ -133,7 +133,7 @@ class TestComments:
# C end b
""")
- def test_09(self): # 2.9 from the examples in the spec
+ def test_09(self) -> None: # 2.9 from the examples in the spec
s = """\
hr: # 1998 hr ranking
- Mark McGwire
@@ -145,7 +145,7 @@ class TestComments:
"""
round_trip(s, indent=4, block_seq_indent=2)
- def test_09a(self):
+ def test_09a(self) -> None:
round_trip("""
hr: # 1998 hr ranking
- Mark McGwire
@@ -156,7 +156,7 @@ class TestComments:
- Ken Griffey
""")
- def test_simple_map_middle_comment(self):
+ def test_simple_map_middle_comment(self) -> None:
round_trip("""
abc: 1
# C 3a
@@ -164,7 +164,7 @@ class TestComments:
ghi: 2
""")
- def test_map_in_map_0(self):
+ def test_map_in_map_0(self) -> None:
round_trip("""
map1: # comment 1
# comment 2
@@ -172,7 +172,7 @@ class TestComments:
key1: val1
""")
- def test_map_in_map_1(self):
+ def test_map_in_map_1(self) -> None:
# comment is moved from value to key
round_trip("""
map1:
@@ -181,7 +181,7 @@ class TestComments:
key1: val1
""")
- def test_application_arguments(self):
+ def test_application_arguments(self) -> None:
# application configur
round_trip("""
args:
@@ -194,7 +194,7 @@ class TestComments:
wait: 10
""")
- def test_substitute(self):
+ def test_substitute(self) -> None:
x = """
args:
username: anthon # name
@@ -211,7 +211,7 @@ class TestComments:
x = x.replace(': secret ', ': deleted password')
assert round_trip_dump(data) == dedent(x)
- def test_set_comment(self):
+ def test_set_comment(self) -> None:
round_trip("""
!!set
# the beginning
@@ -222,7 +222,7 @@ class TestComments:
# this is the end
""")
- def test_omap_comment_roundtrip(self):
+ def test_omap_comment_roundtrip(self) -> None:
round_trip("""
!!omap
- a: 1
@@ -231,7 +231,7 @@ class TestComments:
- d: 4
""")
- def test_omap_comment_roundtrip_pre_comment(self):
+ def test_omap_comment_roundtrip_pre_comment(self) -> None:
round_trip("""
!!omap
- a: 1
@@ -241,7 +241,7 @@ class TestComments:
- d: 4
""")
- def test_non_ascii(self):
+ def test_non_ascii(self) -> None:
round_trip("""
verbosity: 1 # 0 is minimal output, -1 none
base_url: http://gopher.net
@@ -263,7 +263,7 @@ class TestComments:
Italy: Rome
""")
- def test_dump_utf8(self):
+ def test_dump_utf8(self) -> None:
import ruamel.yaml # NOQA
x = dedent("""\
@@ -272,30 +272,28 @@ class TestComments:
- y # more comment
""")
data = round_trip_load(x)
- dumper = ruamel.yaml.RoundTripDumper
for utf in [True, False]:
- y = ruamel.yaml.dump(
- data, default_flow_style=False, Dumper=dumper, allow_unicode=utf
+ y = round_trip_dump(
+ data, default_flow_style=False, allow_unicode=utf
)
assert y == x
- def test_dump_unicode_utf8(self):
+ def test_dump_unicode_utf8(self) -> None:
import ruamel.yaml # NOQA
- x = dedent(u"""\
+ x = dedent("""\
ab:
- x # comment
- y # more comment
""")
data = round_trip_load(x)
- dumper = ruamel.yaml.RoundTripDumper
for utf in [True, False]:
- y = ruamel.yaml.dump(
- data, default_flow_style=False, Dumper=dumper, allow_unicode=utf
+ y = round_trip_dump(
+ data, default_flow_style=False, allow_unicode=utf
)
assert y == x
- def test_mlget_00(self):
+ def test_mlget_00(self) -> None:
x = """\
a:
- b:
@@ -307,8 +305,8 @@ class TestComments:
"""
d = round_trip_load(x)
assert d.mlget(['a', 1, 'd', 'f'], list_ok=True) == 196
- with pytest.raises(AssertionError):
- d.mlget(['a', 1, 'd', 'f']) == 196
+ # with pytest.raises(AssertionError):
+ # d.mlget(['a', 1, 'd', 'f']) == 196
class TestInsertPopList:
@@ -316,7 +314,7 @@ class TestInsertPopList:
need to move the values to subsequent keys on insert"""
@property
- def ins(self):
+ def ins(self) -> str:
return """\
ab:
- a # a
@@ -329,7 +327,7 @@ class TestInsertPopList:
- 2
"""
- def test_insert_0(self):
+ def test_insert_0(self) -> None:
d = round_trip_load(self.ins)
d['ab'].insert(0, 'xyz')
y = round_trip_dump(d, indent=2)
@@ -346,7 +344,7 @@ class TestInsertPopList:
- 2
""")
- def test_insert_1(self):
+ def test_insert_1(self) -> None:
d = round_trip_load(self.ins)
d['ab'].insert(4, 'xyz')
y = round_trip_dump(d, indent=2)
@@ -363,7 +361,7 @@ class TestInsertPopList:
- 2
""")
- def test_insert_2(self):
+ def test_insert_2(self) -> None:
d = round_trip_load(self.ins)
d['ab'].insert(1, 'xyz')
y = round_trip_dump(d, indent=2)
@@ -380,7 +378,7 @@ class TestInsertPopList:
- 2
""")
- def test_pop_0(self):
+ def test_pop_0(self) -> None:
d = round_trip_load(self.ins)
d['ab'].pop(0)
y = round_trip_dump(d, indent=2)
@@ -396,7 +394,7 @@ class TestInsertPopList:
- 2
""")
- def test_pop_1(self):
+ def test_pop_1(self) -> None:
d = round_trip_load(self.ins)
d['ab'].pop(1)
y = round_trip_dump(d, indent=2)
@@ -412,7 +410,7 @@ class TestInsertPopList:
- 2
""")
- def test_pop_2(self):
+ def test_pop_2(self) -> None:
d = round_trip_load(self.ins)
d['ab'].pop(2)
y = round_trip_dump(d, indent=2)
@@ -428,7 +426,7 @@ class TestInsertPopList:
- 2
""")
- def test_pop_3(self):
+ def test_pop_3(self) -> None:
d = round_trip_load(self.ins)
d['ab'].pop(3)
y = round_trip_dump(d, indent=2)
@@ -448,14 +446,14 @@ class TestInsertPopList:
# http://stackoverflow.com/a/36970608/1307905
class TestInsertInMapping:
@property
- def ins(self):
+ def ins(self) -> str:
return """\
first_name: Art
occupation: Architect # This is an occupation comment
about: Art Vandelay is a fictional character that George invents...
"""
- def test_insert_at_pos_1(self):
+ def test_insert_at_pos_1(self) -> None:
d = round_trip_load(self.ins)
d.insert(1, 'last name', 'Vandelay', comment='new key')
y = round_trip_dump(d)
@@ -467,7 +465,7 @@ class TestInsertInMapping:
about: Art Vandelay is a fictional character that George invents...
""")
- def test_insert_at_pos_0(self):
+ def test_insert_at_pos_0(self) -> None:
d = round_trip_load(self.ins)
d.insert(0, 'last name', 'Vandelay', comment='new key')
y = round_trip_dump(d)
@@ -479,7 +477,7 @@ class TestInsertInMapping:
about: Art Vandelay is a fictional character that George invents...
""")
- def test_insert_at_pos_3(self):
+ def test_insert_at_pos_3(self) -> None:
# much more simple if done with appending.
d = round_trip_load(self.ins)
d.insert(3, 'last name', 'Vandelay', comment='new key')
@@ -494,7 +492,7 @@ class TestInsertInMapping:
class TestCommentedMapMerge:
- def test_in_operator(self):
+ def test_in_operator(self) -> None:
data = round_trip_load("""
x: &base
a: 1
@@ -510,7 +508,7 @@ class TestCommentedMapMerge:
assert data['y']['a'] == 1
assert 'a' in data['y']
- def test_issue_60(self):
+ def test_issue_60(self) -> None:
data = round_trip_load("""
x: &base
a: 1
@@ -519,9 +517,9 @@ class TestCommentedMapMerge:
""")
assert data['x']['a'] == 1
assert data['y']['a'] == 1
- assert str(data['y']) == """ordereddict([('a', 1)])"""
+ assert str(data['y']) == """{'a': 1}"""
- def test_issue_60_1(self):
+ def test_issue_60_1(self) -> None:
data = round_trip_load("""
x: &base
a: 1
@@ -531,12 +529,12 @@ class TestCommentedMapMerge:
""")
assert data['x']['a'] == 1
assert data['y']['a'] == 1
- assert str(data['y']) == """ordereddict([('b', 2), ('a', 1)])"""
+ assert str(data['y']) == """{'b': 2, 'a': 1}"""
class TestEmptyLines:
# prompted by issue 46 from Alex Harvey
- def test_issue_46(self):
+ def test_issue_46(self) -> None:
yaml_str = dedent("""\
---
# Please add key/value pairs in alphabetical order
@@ -551,7 +549,7 @@ class TestEmptyLines:
y = round_trip_dump(d, explicit_start=True)
assert yaml_str == y
- def test_multispace_map(self):
+ def test_multispace_map(self) -> None:
round_trip("""
a: 1x
@@ -566,8 +564,8 @@ class TestEmptyLines:
""")
- @pytest.mark.xfail(strict=True)
- def test_multispace_map_initial(self):
+ @pytest.mark.xfail(strict=True) # type: ignore
+ def test_multispace_map_initial(self) -> None:
round_trip("""
a: 1x
@@ -583,7 +581,7 @@ class TestEmptyLines:
""")
- def test_embedded_map(self):
+ def test_embedded_map(self) -> None:
round_trip("""
- a: 1y
b: 2y
@@ -591,7 +589,7 @@ class TestEmptyLines:
c: 3y
""")
- def test_toplevel_seq(self):
+ def test_toplevel_seq(self) -> None:
round_trip("""\
- 1
@@ -600,7 +598,7 @@ class TestEmptyLines:
- 3
""")
- def test_embedded_seq(self):
+ def test_embedded_seq(self) -> None:
round_trip("""
a:
b:
@@ -612,7 +610,7 @@ class TestEmptyLines:
- 3
""")
- def test_line_with_only_spaces(self):
+ def test_line_with_only_spaces(self) -> None:
# issue 54
yaml_str = "---\n\na: 'x'\n \nb: y\n"
d = round_trip_load(yaml_str, preserve_quotes=True)
@@ -623,7 +621,7 @@ class TestEmptyLines:
print(line + '$')
assert stripped == y
- def test_some_eol_spaces(self):
+ def test_some_eol_spaces(self) -> None:
# spaces after tokens and on empty lines
yaml_str = '--- \n \na: "x" \n \nb: y \n'
d = round_trip_load(yaml_str, preserve_quotes=True)
@@ -634,7 +632,7 @@ class TestEmptyLines:
print(line + '$')
assert stripped == y
- def test_issue_54_not_ok(self):
+ def test_issue_54_not_ok(self) -> None:
yaml_str = dedent("""\
toplevel:
@@ -644,10 +642,11 @@ class TestEmptyLines:
d = round_trip_load(yaml_str)
print(d.ca)
y = round_trip_dump(d, indent=4)
+ assert isinstance(y, str)
print(y.replace('\n', '$\n'))
assert yaml_str == y
- def test_issue_54_ok(self):
+ def test_issue_54_ok(self) -> None:
yaml_str = dedent("""\
toplevel:
# some comment
@@ -657,7 +656,7 @@ class TestEmptyLines:
y = round_trip_dump(d, indent=4)
assert yaml_str == y
- def test_issue_93(self):
+ def test_issue_93(self) -> None:
round_trip("""\
a:
b:
@@ -666,7 +665,7 @@ class TestEmptyLines:
- c2: catfish # a2
""")
- def test_issue_93_00(self):
+ def test_issue_93_00(self) -> None:
round_trip("""\
a:
- - c1: cat # a1
@@ -674,14 +673,14 @@ class TestEmptyLines:
- c2: catfish # a2
""")
- def test_issue_93_01(self):
+ def test_issue_93_01(self) -> None:
round_trip("""\
- - c1: cat # a1
# my comment on catfish
- c2: catfish # a2
""")
- def test_issue_93_02(self):
+ def test_issue_93_02(self) -> None:
# never failed as there is no indent
round_trip("""\
- c1: cat
@@ -689,7 +688,7 @@ class TestEmptyLines:
- c2: catfish
""")
- def test_issue_96(self):
+ def test_issue_96(self) -> None:
# inserted extra line on trailing spaces
round_trip("""\
a:
@@ -703,8 +702,8 @@ class TestEmptyLines:
class TestUnicodeComments:
- @pytest.mark.skipif(sys.version_info < (2, 7), reason='wide unicode')
- def test_issue_55(self): # reported by Haraguroicha Hsu
+ @pytest.mark.skipif(sys.version_info < (2, 7), reason='wide unicode') # type: ignore
+ def test_issue_55(self) -> None: # reported by Haraguroicha Hsu
round_trip("""\
name: TEST
description: test using
@@ -724,7 +723,7 @@ class TestUnicodeComments:
class TestEmptyValueBeforeComments:
- def test_issue_25a(self):
+ def test_issue_25a(self) -> None:
round_trip("""\
- a: b
c: d
@@ -732,7 +731,7 @@ class TestEmptyValueBeforeComments:
- e: f
""")
- def test_issue_25a1(self):
+ def test_issue_25a1(self) -> None:
round_trip("""\
- a: b
c: d
@@ -740,13 +739,13 @@ class TestEmptyValueBeforeComments:
e: f
""")
- def test_issue_25b(self):
+ def test_issue_25b(self) -> None:
round_trip("""\
var1: #empty
var2: something #notempty
""")
- def test_issue_25c(self):
+ def test_issue_25c(self) -> None:
round_trip("""\
params:
a: 1 # comment a
@@ -754,7 +753,7 @@ class TestEmptyValueBeforeComments:
c: 3 # comment c
""")
- def test_issue_25c1(self):
+ def test_issue_25c1(self) -> None:
round_trip("""\
params:
a: 1 # comment a
@@ -763,14 +762,14 @@ class TestEmptyValueBeforeComments:
c: 3 # comment c
""")
- def test_issue_25_00(self):
+ def test_issue_25_00(self) -> None:
round_trip("""\
params:
a: 1 # comment a
b: # comment b
""")
- def test_issue_25_01(self):
+ def test_issue_25_01(self) -> None:
round_trip("""\
a: # comment 1
# comment 2
@@ -778,14 +777,14 @@ class TestEmptyValueBeforeComments:
c: 1 # comment 4
""")
- def test_issue_25_02(self):
+ def test_issue_25_02(self) -> None:
round_trip("""\
a: # comment 1
# comment 2
- b: 2 # comment 3
""")
- def test_issue_25_03(self):
+ def test_issue_25_03(self) -> None:
s = """\
a: # comment 1
# comment 2
@@ -793,14 +792,14 @@ class TestEmptyValueBeforeComments:
"""
round_trip(s, indent=4, block_seq_indent=2)
- def test_issue_25_04(self):
+ def test_issue_25_04(self) -> None:
round_trip("""\
a: # comment 1
# comment 2
b: 1 # comment 3
""")
- def test_flow_seq_within_seq(self):
+ def test_flow_seq_within_seq(self) -> None:
round_trip("""\
# comment 1
- a
@@ -815,6 +814,14 @@ class TestEmptyValueBeforeComments:
- []
""")
+ def test_comment_after_block_scalar_indicator(self) -> None:
+ round_trip("""\
+ a: | # abc
+ test 1
+ test 2
+ # all done
+ """)
+
test_block_scalar_commented_line_template = """\
y: p
@@ -828,7 +835,7 @@ a: |
class TestBlockScalarWithComments:
# issue 99 reported by Colm O'Connor
- def test_scalar_with_comments(self):
+ def test_scalar_with_comments(self) -> None:
import ruamel.yaml # NOQA
for x in [
@@ -843,6 +850,6 @@ class TestBlockScalarWithComments:
]:
commented_line = test_block_scalar_commented_line_template.format(x)
- data = ruamel.yaml.round_trip_load(commented_line)
+ data = round_trip_load(commented_line)
- assert ruamel.yaml.round_trip_dump(data) == commented_line
+ assert round_trip_dump(data) == commented_line
diff --git a/_test/test_contextmanager.py b/_test/test_contextmanager.py
index b3bb3be..e6256d3 100644
--- a/_test/test_contextmanager.py
+++ b/_test/test_contextmanager.py
@@ -1,14 +1,13 @@
# coding: utf-8
-from __future__ import print_function
-
"""
testing of anchors and the aliases referring to them
"""
import sys
-import pytest
+import pytest # type: ignore
+from typing import Any
single_doc = """\
- a: 1
@@ -33,33 +32,33 @@ multi_doc = """\
multi_doc_data = [['abc', 'xyz'], single_data]
-def get_yaml():
+def get_yaml() -> Any:
from ruamel.yaml import YAML
return YAML()
class TestOldStyle:
- def test_single_load(self):
+ def test_single_load(self) -> None:
d = get_yaml().load(single_doc)
print(d)
print(type(d[0]))
assert d == single_data
- def test_single_load_no_arg(self):
+ def test_single_load_no_arg(self) -> None:
with pytest.raises(TypeError):
assert get_yaml().load() == single_data
- def test_multi_load(self):
+ def test_multi_load(self) -> None:
data = list(get_yaml().load_all(multi_doc))
assert data == multi_doc_data
- def test_single_dump(self, capsys):
+ def test_single_dump(self, capsys: Any) -> None:
get_yaml().dump(single_data, sys.stdout)
out, err = capsys.readouterr()
assert out == single_doc
- def test_multi_dump(self, capsys):
+ def test_multi_dump(self, capsys: Any) -> None:
yaml = get_yaml()
yaml.explicit_start = True
yaml.dump_all(multi_doc_data, sys.stdout)
@@ -68,7 +67,7 @@ class TestOldStyle:
class TestContextManager:
- def test_single_dump(self, capsys):
+ def test_single_dump(self, capsys: Any) -> None:
from ruamel.yaml import YAML
with YAML(output=sys.stdout) as yaml:
@@ -77,7 +76,7 @@ class TestContextManager:
print(err)
assert out == single_doc
- def test_multi_dump(self, capsys):
+ def test_multi_dump(self, capsys: Any) -> None:
from ruamel.yaml import YAML
with YAML(output=sys.stdout) as yaml:
@@ -105,7 +104,7 @@ class TestContextManager:
# for idx, data in enumerate(yaml.load()):
# assert data == multi_doc_data[0]
- def test_roundtrip(self, capsys):
+ def test_roundtrip(self, capsys: Any) -> None:
from ruamel.yaml import YAML
with YAML(output=sys.stdout) as yaml:
diff --git a/_test/test_copy.py b/_test/test_copy.py
index 4931d2a..cf402a4 100644
--- a/_test/test_copy.py
+++ b/_test/test_copy.py
@@ -6,13 +6,13 @@ Testing copy and deepcopy, instigated by Issue 84 (Peter Amstutz)
import copy
-import pytest # NOQA
+import pytest # type: ignore # NOQA
from roundtrip import dedent, round_trip_load, round_trip_dump
class TestDeepCopy:
- def test_preserve_flow_style_simple(self):
+ def test_preserve_flow_style_simple(self) -> None:
x = dedent("""\
{foo: bar, baz: quux}
""")
@@ -24,7 +24,7 @@ class TestDeepCopy:
assert y == x
assert data.fa.flow_style() == data_copy.fa.flow_style()
- def test_deepcopy_flow_style_nested_dict(self):
+ def test_deepcopy_flow_style_nested_dict(self) -> None:
x = dedent("""\
a: {foo: bar, baz: quux}
""")
@@ -46,7 +46,7 @@ class TestDeepCopy:
baz: quux
""")
- def test_deepcopy_flow_style_nested_list(self):
+ def test_deepcopy_flow_style_nested_list(self) -> None:
x = dedent("""\
a: [1, 2, 3]
""")
@@ -71,7 +71,7 @@ class TestDeepCopy:
class TestCopy:
- def test_copy_flow_style_nested_dict(self):
+ def test_copy_flow_style_nested_dict(self) -> None:
x = dedent("""\
a: {foo: bar, baz: quux}
""")
@@ -93,7 +93,7 @@ class TestCopy:
baz: quux
""")
- def test_copy_flow_style_nested_list(self):
+ def test_copy_flow_style_nested_list(self) -> None:
x = dedent("""\
a: [1, 2, 3]
""")
diff --git a/_test/test_cyaml.py b/_test/test_cyaml.py
index 6b618f3..056093b 100644
--- a/_test/test_cyaml.py
+++ b/_test/test_cyaml.py
@@ -2,52 +2,53 @@
import sys
import platform
-import pytest
+import pytest # type: ignore # NOQA
from textwrap import dedent
+NO_CLIB_VER = (3, 10)
-@pytest.mark.skipif(
+
+@pytest.mark.skipif( # type: ignore
platform.python_implementation() in ['Jython', 'PyPy'],
reason='Jython throws RepresenterError'
)
-def test_load_cyaml():
+def test_load_cyaml() -> None:
print("???????????????????????", platform.python_implementation())
import ruamel.yaml
- if sys.version_info >= (3, 8):
+ if sys.version_info >= NO_CLIB_VER:
return
+ yaml = ruamel.yaml.YAML(typ='safe', pure=False)
assert ruamel.yaml.__with_libyaml__
- from ruamel.yaml.cyaml import CLoader
- ruamel.yaml.load('abc: 1', Loader=CLoader)
+ yaml.load('abc: 1')
-@pytest.mark.skipif(sys.version_info >= (3, 8)
+@pytest.mark.skipif(sys.version_info >= NO_CLIB_VER # type: ignore
or platform.python_implementation() in ['Jython', 'PyPy'],
reason='no _PyGC_FINALIZED')
-def test_dump_cyaml():
+def test_dump_cyaml() -> None:
import ruamel.yaml
- if sys.version_info >= (3, 8):
+ if sys.version_info >= NO_CLIB_VER:
return
data = {'a': 1, 'b': 2}
- res = ruamel.yaml.dump(
- data,
- Dumper=ruamel.yaml.cyaml.CSafeDumper,
- default_flow_style=False,
- allow_unicode=True,
- )
- assert res == 'a: 1\nb: 2\n'
+ yaml = ruamel.yaml.YAML(typ='safe', pure=False)
+ yaml.default_flow_style = False
+ yaml.allow_unicode = True
+ buf = ruamel.yaml.compat.StringIO()
+ yaml.dump(data, buf)
+ assert buf.getvalue() == 'a: 1\nb: 2\n'
-@pytest.mark.skipif(
+@pytest.mark.skipif( # type: ignore
platform.python_implementation() in ['Jython', 'PyPy'], reason='not avialable'
)
-def test_load_cyaml_1_2():
+def test_load_cyaml_1_2() -> None:
# issue 155
import ruamel.yaml
- if sys.version_info >= (3, 8):
+ if sys.version_info >= NO_CLIB_VER:
return
assert ruamel.yaml.__with_libyaml__
inp = dedent("""\
@@ -59,15 +60,15 @@ def test_load_cyaml_1_2():
yaml.load(inp)
-@pytest.mark.skipif(
+@pytest.mark.skipif( # type: ignore
platform.python_implementation() in ['Jython', 'PyPy'], reason='not available'
)
-def test_dump_cyaml_1_2():
+def test_dump_cyaml_1_2() -> None:
# issue 155
import ruamel.yaml
from ruamel.yaml.compat import StringIO
- if sys.version_info >= (3, 8):
+ if sys.version_info >= NO_CLIB_VER:
return
assert ruamel.yaml.__with_libyaml__
yaml = ruamel.yaml.YAML(typ='safe')
diff --git a/_test/test_datetime.py b/_test/test_datetime.py
index 5874c0d..bc86e74 100644
--- a/_test/test_datetime.py
+++ b/_test/test_datetime.py
@@ -20,13 +20,13 @@ Please note that a fraction can only be included if not equal to 0
"""
import copy
-import pytest # NOQA
+import pytest # type: ignore # NOQA
from roundtrip import round_trip, dedent, round_trip_load, round_trip_dump # NOQA
class TestDateTime:
- def test_date_only(self):
+ def test_date_only(self) -> None:
inp = """
- 2011-10-02
"""
@@ -35,7 +35,7 @@ class TestDateTime:
"""
round_trip(inp, exp)
- def test_zero_fraction(self):
+ def test_zero_fraction(self) -> None:
inp = """
- 2011-10-02 16:45:00.0
"""
@@ -44,7 +44,7 @@ class TestDateTime:
"""
round_trip(inp, exp)
- def test_long_fraction(self):
+ def test_long_fraction(self) -> None:
inp = """
- 2011-10-02 16:45:00.1234 # expand with zeros
- 2011-10-02 16:45:00.123456
@@ -61,7 +61,7 @@ class TestDateTime:
"""
round_trip(inp, exp)
- def test_canonical(self):
+ def test_canonical(self) -> None:
inp = """
- 2011-10-02T16:45:00.1Z
"""
@@ -70,7 +70,7 @@ class TestDateTime:
"""
round_trip(inp, exp)
- def test_spaced_timezone(self):
+ def test_spaced_timezone(self) -> None:
inp = """
- 2011-10-02T11:45:00 -5
"""
@@ -79,7 +79,7 @@ class TestDateTime:
"""
round_trip(inp, exp)
- def test_normal_timezone(self):
+ def test_normal_timezone(self) -> None:
round_trip("""
- 2011-10-02T11:45:00-5
- 2011-10-02 11:45:00-5
@@ -87,7 +87,7 @@ class TestDateTime:
- 2011-10-02 11:45:00-05:00
""")
- def test_no_timezone(self):
+ def test_no_timezone(self) -> None:
inp = """
- 2011-10-02 6:45:00
"""
@@ -96,7 +96,7 @@ class TestDateTime:
"""
round_trip(inp, exp)
- def test_explicit_T(self):
+ def test_explicit_T(self) -> None:
inp = """
- 2011-10-02T16:45:00
"""
@@ -105,7 +105,7 @@ class TestDateTime:
"""
round_trip(inp, exp)
- def test_explicit_t(self): # to upper
+ def test_explicit_t(self) -> None: # to upper
inp = """
- 2011-10-02t16:45:00
"""
@@ -114,7 +114,7 @@ class TestDateTime:
"""
round_trip(inp, exp)
- def test_no_T_multi_space(self):
+ def test_no_T_multi_space(self) -> None:
inp = """
- 2011-10-02 16:45:00
"""
@@ -123,25 +123,38 @@ class TestDateTime:
"""
round_trip(inp, exp)
- def test_iso(self):
+ def test_iso(self) -> None:
round_trip("""
- 2011-10-02T15:45:00+01:00
""")
- def test_zero_tz(self):
+ def test_zero_tz(self) -> None:
round_trip("""
- 2011-10-02T15:45:00+0
""")
- def test_issue_45(self):
+ def test_issue_45(self) -> None:
round_trip("""
dt: 2016-08-19T22:45:47Z
""")
- def test_deepcopy_datestring(self):
+ def test_deepcopy_datestring(self) -> None:
# reported by Quuxplusone, http://stackoverflow.com/a/41577841/1307905
x = dedent("""\
foo: 2016-10-12T12:34:56
""")
data = copy.deepcopy(round_trip_load(x))
assert round_trip_dump(data) == x
+
+ def test_fraction_overflow(self) -> None:
+ # reported (indirectly) by Luís Ferreira
+ # https://sourceforge.net/p/ruamel-yaml/tickets/414/
+ inp = dedent("""\
+ - 2022-01-02T12:34:59.9999994
+ - 2022-01-02T12:34:59.9999995
+ """)
+ exp = dedent("""\
+ - 2022-01-02T12:34:59.999999
+ - 2022-01-02T12:35:00
+ """)
+ round_trip(inp, exp)
diff --git a/_test/test_deprecation.py b/_test/test_deprecation.py
index 14acd71..390c26c 100644
--- a/_test/test_deprecation.py
+++ b/_test/test_deprecation.py
@@ -1,13 +1,11 @@
# coding: utf-8
-from __future__ import print_function
-
import sys
-import pytest # NOQA
+import pytest # type:ignore # NOQA
-@pytest.mark.skipif(sys.version_info < (3, 7) or sys.version_info >= (3, 9),
+@pytest.mark.skipif(sys.version_info < (3, 7) or sys.version_info >= (3, 9), # type: ignore
reason='collections not available?')
-def test_collections_deprecation():
+def test_collections_deprecation() -> None:
with pytest.warns(DeprecationWarning):
- from collections import Hashable # NOQA
+ from collections import Hashable # type: ignore # NOQA
diff --git a/_test/test_documents.py b/_test/test_documents.py
index 05ba7dd..7c6e2e6 100644
--- a/_test/test_documents.py
+++ b/_test/test_documents.py
@@ -1,12 +1,12 @@
# coding: utf-8
-import pytest # NOQA
+import pytest # type: ignore # NOQA
-from roundtrip import round_trip, round_trip_load_all
+from roundtrip import round_trip, round_trip_load_all, round_trip_dump_all
class TestDocument:
- def test_single_doc_begin_end(self):
+ def test_single_doc_begin_end(self) -> None:
inp = """\
---
- a
@@ -15,9 +15,7 @@ class TestDocument:
"""
round_trip(inp, explicit_start=True, explicit_end=True)
- def test_multi_doc_begin_end(self):
- from ruamel import yaml
-
+ def test_multi_doc_begin_end(self) -> None:
inp = """\
---
- a
@@ -28,12 +26,10 @@ class TestDocument:
"""
docs = list(round_trip_load_all(inp))
assert docs == [['a'], ['b']]
- out = yaml.dump_all(
- docs, Dumper=yaml.RoundTripDumper, explicit_start=True, explicit_end=True
- )
+ out = round_trip_dump_all(docs, explicit_start=True, explicit_end=True)
assert out == '---\n- a\n...\n---\n- b\n...\n'
- def test_multi_doc_no_start(self):
+ def test_multi_doc_no_start(self) -> None:
inp = """\
- a
...
@@ -44,7 +40,7 @@ class TestDocument:
docs = list(round_trip_load_all(inp))
assert docs == [['a'], ['b']]
- def test_multi_doc_no_end(self):
+ def test_multi_doc_no_end(self) -> None:
inp = """\
- a
---
@@ -53,7 +49,7 @@ class TestDocument:
docs = list(round_trip_load_all(inp))
assert docs == [['a'], ['b']]
- def test_multi_doc_ends_only(self):
+ def test_multi_doc_ends_only(self) -> None:
# this is ok in 1.2
inp = """\
- a
@@ -64,7 +60,7 @@ class TestDocument:
docs = list(round_trip_load_all(inp, version=(1, 2)))
assert docs == [['a'], ['b']]
- def test_multi_doc_ends_only_1_1(self):
+ def test_multi_doc_ends_only_1_1(self) -> None:
from ruamel import yaml
# this is not ok in 1.1
diff --git a/_test/test_fail.py b/_test/test_fail.py
index 2f90112..7fbbd07 100644
--- a/_test/test_fail.py
+++ b/_test/test_fail.py
@@ -6,14 +6,14 @@
# on fix of ruamel.yaml, move the marked test to the appropriate test (without mark)
# and remove remove the xyz_no_fail
-import pytest
+import pytest # type: ignore
from roundtrip import round_trip, dedent, round_trip_load, round_trip_dump
class TestCommentFailures:
- @pytest.mark.xfail(strict=True)
- def test_set_comment_before_tag(self):
+ @pytest.mark.xfail(strict=True) # type: ignore
+ def test_set_comment_before_tag(self) -> None:
# no comments before tags
round_trip("""
# the beginning
@@ -26,7 +26,7 @@ class TestCommentFailures:
# this is the end
""")
- def test_set_comment_before_tag_no_fail(self):
+ def test_set_comment_before_tag_no_fail(self) -> None:
# no comments before tags
inp = """
# the beginning
@@ -48,15 +48,15 @@ class TestCommentFailures:
# this is the end
""")
- @pytest.mark.xfail(strict=True)
- def test_comment_dash_line(self):
+ @pytest.mark.xfail(strict=True) # type: ignore
+ def test_comment_dash_line(self) -> None:
round_trip("""
- # abc
a: 1
b: 2
""")
- def test_comment_dash_line_fail(self):
+ def test_comment_dash_line_fail(self) -> None:
x = """
- # abc
a: 1
@@ -72,8 +72,8 @@ class TestCommentFailures:
class TestIndentFailures:
- @pytest.mark.xfail(strict=True)
- def test_indent_not_retained(self):
+ @pytest.mark.xfail(strict=True) # type: ignore
+ def test_indent_not_retained(self) -> None:
round_trip("""
verbosity: 1 # 0 is minimal output, -1 none
base_url: http://gopher.net
@@ -97,7 +97,7 @@ class TestIndentFailures:
- too cold
""")
- def test_indent_not_retained_no_fail(self):
+ def test_indent_not_retained_no_fail(self) -> None:
inp = """
verbosity: 1 # 0 is minimal output, -1 none
base_url: http://gopher.net
@@ -143,7 +143,7 @@ class TestIndentFailures:
- too cold
""")
- def Xtest_indent_top_level_no_fail(self):
+ def Xtest_indent_top_level_no_fail(self) -> None:
inp = """
- a:
- b
@@ -152,8 +152,8 @@ class TestIndentFailures:
class TestTagFailures:
- @pytest.mark.xfail(strict=True)
- def test_standard_short_tag(self):
+ @pytest.mark.xfail(strict=True) # type: ignore
+ def test_standard_short_tag(self) -> None:
round_trip("""\
!!map
name: Anthon
@@ -161,7 +161,7 @@ class TestTagFailures:
language: python
""")
- def test_standard_short_tag_no_fail(self):
+ def test_standard_short_tag_no_fail(self) -> None:
inp = """
!!map
name: Anthon
@@ -177,13 +177,13 @@ class TestTagFailures:
class TestFlowValues:
- def test_flow_value_with_colon(self):
+ def test_flow_value_with_colon(self) -> None:
inp = """\
{a: bcd:efg}
"""
round_trip(inp)
- def test_flow_value_with_colon_quoted(self):
+ def test_flow_value_with_colon_quoted(self) -> None:
inp = """\
{a: 'bcd:efg'}
"""
@@ -191,13 +191,13 @@ class TestFlowValues:
class TestMappingKey:
- def test_simple_mapping_key(self):
+ def test_simple_mapping_key(self) -> None:
inp = """\
{a: 1, b: 2}: hello world
"""
round_trip(inp, preserve_quotes=True, dump_data=False)
- def test_set_simple_mapping_key(self):
+ def test_set_simple_mapping_key(self) -> None:
from ruamel.yaml.comments import CommentedKeyMap
d = {CommentedKeyMap([('a', 1), ('b', 2)]): 'hello world'}
@@ -206,7 +206,7 @@ class TestMappingKey:
""")
assert round_trip_dump(d) == exp
- def test_change_key_simple_mapping_key(self):
+ def test_change_key_simple_mapping_key(self) -> None:
from ruamel.yaml.comments import CommentedKeyMap
inp = """\
@@ -219,7 +219,7 @@ class TestMappingKey:
""")
assert round_trip_dump(d) == exp
- def test_change_value_simple_mapping_key(self):
+ def test_change_value_simple_mapping_key(self) -> None:
from ruamel.yaml.comments import CommentedKeyMap
inp = """\
diff --git a/_test/test_float.py b/_test/test_float.py
index c996efd..582ccf0 100644
--- a/_test/test_float.py
+++ b/_test/test_float.py
@@ -1,8 +1,6 @@
# coding: utf-8
-from __future__ import print_function, absolute_import, division, unicode_literals
-
-import pytest # NOQA
+import pytest # type: ignore # NOQA
from roundtrip import round_trip, dedent, round_trip_load, round_trip_dump # NOQA
@@ -10,7 +8,7 @@ from roundtrip import round_trip, dedent, round_trip_load, round_trip_dump # NO
class TestFloat:
- def test_round_trip_non_exp(self):
+ def test_round_trip_non_exp(self) -> None:
data = round_trip("""\
- 1.0
- 1.00
@@ -24,6 +22,8 @@ class TestFloat:
- .5
- +.5
- -.5
+ - !!float '42'
+ - !!float '-42'
""")
print(data)
assert 0.999 < data[0] < 1.001
@@ -38,8 +38,10 @@ class TestFloat:
assert .49 < data[9] < .51
assert .49 < data[10] < .51
assert -.51 < data[11] < -.49
+ assert 41.99 < data[12] < 42.01
+ assert 41.99 < -data[13] < 42.01
- def test_round_trip_zeros_0(self):
+ def test_round_trip_zeros_0(self) -> None:
data = round_trip("""\
- 0.
- +0.
@@ -55,12 +57,13 @@ class TestFloat:
for d in data:
assert -0.00001 < d < 0.00001
- def Xtest_round_trip_non_exp_trailing_dot(self):
+ def test_round_trip_exp_trailing_dot(self) -> None:
data = round_trip("""\
+ - 3.e4
""")
print(data)
- def test_yaml_1_1_no_dot(self):
+ def test_yaml_1_1_no_dot(self) -> None:
from ruamel.yaml.error import MantissaNoDotYAML1_1Warning
with pytest.warns(MantissaNoDotYAML1_1Warning):
@@ -71,8 +74,8 @@ class TestFloat:
""")
-class TestCalculations(object):
- def test_mul_00(self):
+class TestCalculations:
+ def test_mul_00(self) -> None:
# issue 149 reported by jan.brezina@tul.cz
d = round_trip_load("""\
- 0.1
diff --git a/_test/test_flowsequencekey.py b/_test/test_flowsequencekey.py
index 96bee67..be70699 100644
--- a/_test/test_flowsequencekey.py
+++ b/_test/test_flowsequencekey.py
@@ -11,7 +11,7 @@ from roundtrip import round_trip # , dedent, round_trip_load, round_trip_dump
class TestFlowStyleSequenceKey:
- def test_so_39595807(self):
+ def test_so_39595807(self) -> None:
inp = """\
%YAML 1.2
---
diff --git a/_test/test_indentation.py b/_test/test_indentation.py
index 6321409..1f16cb2 100644
--- a/_test/test_indentation.py
+++ b/_test/test_indentation.py
@@ -1,32 +1,24 @@
# coding: utf-8
-from __future__ import absolute_import
-from __future__ import print_function
-from __future__ import unicode_literals
-
-
-import pytest # NOQA
+from typing import Any
+import pytest # type: ignore # NOQA
from roundtrip import round_trip, round_trip_load, round_trip_dump, dedent, YAML
-def rt(s):
- import ruamel.yaml
-
- res = ruamel.yaml.dump(
- ruamel.yaml.load(s, Loader=ruamel.yaml.RoundTripLoader),
- Dumper=ruamel.yaml.RoundTripDumper,
- )
+def rt(s: str) -> str:
+ res = round_trip_dump(round_trip_load(s))
+ assert res is not None
return res.strip() + '\n'
class TestIndent:
- def test_roundtrip_inline_list(self):
+ def test_roundtrip_inline_list(self) -> None:
s = 'a: [a, b, c]\n'
output = rt(s)
assert s == output
- def test_roundtrip_mapping_of_inline_lists(self):
+ def test_roundtrip_mapping_of_inline_lists(self) -> None:
s = dedent("""\
a: [a, b, c]
j: [k, l, m]
@@ -34,7 +26,7 @@ class TestIndent:
output = rt(s)
assert s == output
- def test_roundtrip_mapping_of_inline_lists_comments(self):
+ def test_roundtrip_mapping_of_inline_lists_comments(self) -> None:
s = dedent("""\
# comment A
a: [a, b, c]
@@ -44,7 +36,7 @@ class TestIndent:
output = rt(s)
assert s == output
- def test_roundtrip_mapping_of_inline_sequence_eol_comments(self):
+ def test_roundtrip_mapping_of_inline_sequence_eol_comments(self) -> None:
s = dedent("""\
# comment A
a: [a, b, c] # comment B
@@ -54,9 +46,7 @@ class TestIndent:
assert s == output
# first test by explicitly setting flow style
- def test_added_inline_list(self):
- import ruamel.yaml
-
+ def test_added_inline_list(self) -> None:
s1 = dedent("""
a:
- b
@@ -64,27 +54,25 @@ class TestIndent:
- d
""")
s = 'a: [b, c, d]\n'
- data = ruamel.yaml.load(s1, Loader=ruamel.yaml.RoundTripLoader)
+ data = round_trip_load(s1)
val = data['a']
val.fa.set_flow_style()
# print(type(val), '_yaml_format' in dir(val))
- output = ruamel.yaml.dump(data, Dumper=ruamel.yaml.RoundTripDumper)
+ output = round_trip_dump(data)
assert s == output
# ############ flow mappings
- def test_roundtrip_flow_mapping(self):
- import ruamel.yaml
-
+ def test_roundtrip_flow_mapping(self) -> None:
s = dedent("""\
- {a: 1, b: hallo}
- {j: fka, k: 42}
""")
- data = ruamel.yaml.load(s, Loader=ruamel.yaml.RoundTripLoader)
- output = ruamel.yaml.dump(data, Dumper=ruamel.yaml.RoundTripDumper)
+ data = round_trip_load(s)
+ output = round_trip_dump(data)
assert s == output
- def test_roundtrip_sequence_of_inline_mappings_eol_comments(self):
+ def test_roundtrip_sequence_of_inline_mappings_eol_comments(self) -> None:
s = dedent("""\
# comment A
- {a: 1, b: hallo} # comment B
@@ -93,14 +81,14 @@ class TestIndent:
output = rt(s)
assert s == output
- def test_indent_top_level(self):
+ def test_indent_top_level(self) -> None:
inp = """
- a:
- b
"""
round_trip(inp, indent=4)
- def test_set_indent_5_block_list_indent_1(self):
+ def test_set_indent_5_block_list_indent_1(self) -> None:
inp = """
a:
- b: c
@@ -110,7 +98,7 @@ class TestIndent:
"""
round_trip(inp, indent=5, block_seq_indent=1)
- def test_set_indent_4_block_list_indent_2(self):
+ def test_set_indent_4_block_list_indent_2(self) -> None:
inp = """
a:
- b: c
@@ -120,7 +108,7 @@ class TestIndent:
"""
round_trip(inp, indent=4, block_seq_indent=2)
- def test_set_indent_3_block_list_indent_0(self):
+ def test_set_indent_3_block_list_indent_0(self) -> None:
inp = """
a:
- b: c
@@ -130,7 +118,7 @@ class TestIndent:
"""
round_trip(inp, indent=3, block_seq_indent=0)
- def Xtest_set_indent_3_block_list_indent_2(self):
+ def Xtest_set_indent_3_block_list_indent_2(self) -> None:
inp = """
a:
-
@@ -144,7 +132,7 @@ class TestIndent:
"""
round_trip(inp, indent=3, block_seq_indent=2)
- def test_set_indent_3_block_list_indent_2(self):
+ def test_set_indent_3_block_list_indent_2(self) -> None:
inp = """
a:
- b: c
@@ -154,7 +142,7 @@ class TestIndent:
"""
round_trip(inp, indent=3, block_seq_indent=2)
- def Xtest_set_indent_2_block_list_indent_2(self):
+ def Xtest_set_indent_2_block_list_indent_2(self) -> None:
inp = """
a:
-
@@ -169,7 +157,7 @@ class TestIndent:
round_trip(inp, indent=2, block_seq_indent=2)
# this is how it should be: block_seq_indent stretches the indent
- def test_set_indent_2_block_list_indent_2(self):
+ def test_set_indent_2_block_list_indent_2(self) -> None:
inp = """
a:
- b: c
@@ -180,7 +168,7 @@ class TestIndent:
round_trip(inp, indent=2, block_seq_indent=2)
# have to set indent!
- def test_roundtrip_four_space_indents(self):
+ def test_roundtrip_four_space_indents(self) -> None:
# fmt: off
s = (
'a:\n'
@@ -190,7 +178,7 @@ class TestIndent:
# fmt: on
round_trip(s, indent=4)
- def test_roundtrip_four_space_indents_no_fail(self):
+ def test_roundtrip_four_space_indents_no_fail(self) -> None:
inp = """
a:
- foo
@@ -205,7 +193,7 @@ class TestIndent:
class TestYpkgIndent:
- def test_00(self):
+ def test_00(self) -> None:
inp = """
name : nano
version : 2.3.2
@@ -227,7 +215,7 @@ class TestYpkgIndent:
)
-def guess(s):
+def guess(s: str) -> Any:
from ruamel.yaml.util import load_yaml_guess_indent
x, y, z = load_yaml_guess_indent(dedent(s))
@@ -235,21 +223,21 @@ def guess(s):
class TestGuessIndent:
- def test_guess_20(self):
+ def test_guess_20(self) -> None:
inp = """\
a:
- 1
"""
assert guess(inp) == (2, 0)
- def test_guess_42(self):
+ def test_guess_42(self) -> None:
inp = """\
a:
- 1
"""
assert guess(inp) == (4, 2)
- def test_guess_42a(self):
+ def test_guess_42a(self) -> None:
# block seq indent prevails over nested key indent level
inp = """\
b:
@@ -258,7 +246,7 @@ class TestGuessIndent:
"""
assert guess(inp) == (4, 2)
- def test_guess_3None(self):
+ def test_guess_3None(self) -> None:
inp = """\
b:
a: 1
@@ -269,7 +257,7 @@ class TestGuessIndent:
class TestSeparateMapSeqIndents:
# using uncommon 6 indent with 3 push in as 2 push in automatically
# gets you 4 indent even if not set
- def test_00(self):
+ def test_00(self) -> None:
# old style
yaml = YAML()
yaml.indent = 6
@@ -281,7 +269,7 @@ class TestSeparateMapSeqIndents:
"""
yaml.round_trip(inp)
- def test_01(self):
+ def test_01(self) -> None:
yaml = YAML()
yaml.indent(sequence=6)
yaml.indent(offset=3)
@@ -292,7 +280,7 @@ class TestSeparateMapSeqIndents:
"""
yaml.round_trip(inp)
- def test_02(self):
+ def test_02(self) -> None:
yaml = YAML()
yaml.indent(mapping=5, sequence=6, offset=3)
inp = """
@@ -303,7 +291,7 @@ class TestSeparateMapSeqIndents:
"""
yaml.round_trip(inp)
- def test_03(self):
+ def test_03(self) -> None:
inp = """
a:
b:
@@ -313,7 +301,7 @@ class TestSeparateMapSeqIndents:
"""
round_trip(inp, indent=4)
- def test_04(self):
+ def test_04(self) -> None:
yaml = YAML()
yaml.indent(mapping=5, sequence=6)
inp = """
@@ -325,7 +313,7 @@ class TestSeparateMapSeqIndents:
"""
yaml.round_trip(inp)
- def test_issue_51(self):
+ def test_issue_51(self) -> None:
yaml = YAML()
# yaml.map_indent = 2 # the default
yaml.indent(sequence=4, offset=2)
diff --git a/_test/test_int.py b/_test/test_int.py
index 4e7de6b..92fb92a 100644
--- a/_test/test_int.py
+++ b/_test/test_int.py
@@ -1,8 +1,6 @@
# coding: utf-8
-from __future__ import print_function, absolute_import, division, unicode_literals
-
-import pytest # NOQA
+import pytest # type: ignore # NOQA
from roundtrip import dedent, round_trip_load, round_trip_dump
@@ -10,7 +8,7 @@ from roundtrip import dedent, round_trip_load, round_trip_dump
class TestBinHexOct:
- def test_calculate(self):
+ def test_calculate(self) -> None:
# make sure type, leading zero(s) and underscore are preserved
s = dedent("""\
- 42
diff --git a/_test/test_issues.py b/_test/test_issues.py
index 673ba45..d722b74 100644
--- a/_test/test_issues.py
+++ b/_test/test_issues.py
@@ -1,9 +1,8 @@
# coding: utf-8
-from __future__ import absolute_import, print_function, unicode_literals
+from typing import Any
-
-import pytest # NOQA
+import pytest # type: ignore # NOQA
from roundtrip import (
@@ -18,9 +17,7 @@ from roundtrip import (
class TestIssues:
- def test_issue_61(self):
- import ruamel.yaml
-
+ def test_issue_61(self) -> None:
s = dedent("""
def1: &ANCHOR1
key1: value1
@@ -30,62 +27,58 @@ class TestIssues:
comb:
<<: *ANCHOR
""")
- data = ruamel.yaml.round_trip_load(s)
+ data = round_trip_load(s)
assert str(data['comb']) == str(data['def'])
- assert str(data['comb']) == "ordereddict([('key', 'value'), ('key1', 'value1')])"
-
- def test_issue_82(self, tmpdir):
- program_src = r'''
- from __future__ import print_function
-
- from ruamel import yaml
-
- import re
-
-
- class SINumber(yaml.YAMLObject):
- PREFIXES = {'k': 1e3, 'M': 1e6, 'G': 1e9}
- yaml_loader = yaml.Loader
- yaml_dumper = yaml.Dumper
- yaml_tag = u'!si'
- yaml_implicit_pattern = re.compile(
- r'^(?P<value>[0-9]+(?:\.[0-9]+)?)(?P<prefix>[kMG])$')
-
- @classmethod
- def from_yaml(cls, loader, node):
- return cls(node.value)
-
- @classmethod
- def to_yaml(cls, dumper, data):
- return dumper.represent_scalar(cls.yaml_tag, str(data))
-
- def __init__(self, *args):
- m = self.yaml_implicit_pattern.match(args[0])
- self.value = float(m.groupdict()['value'])
- self.prefix = m.groupdict()['prefix']
-
- def __str__(self):
- return str(self.value)+self.prefix
-
- def __int__(self):
- return int(self.value*self.PREFIXES[self.prefix])
-
- # This fails:
- yaml.add_implicit_resolver(SINumber.yaml_tag, SINumber.yaml_implicit_pattern)
-
- ret = yaml.load("""
- [1,2,3, !si 10k, 100G]
- """, Loader=yaml.Loader)
- for idx, l in enumerate([1, 2, 3, 10000, 100000000000]):
- assert int(ret[idx]) == l
- '''
- assert save_and_run(dedent(program_src), tmpdir) == 0
-
- def test_issue_82rt(self, tmpdir):
+ assert str(data['comb']) == "{'key': 'value', 'key1': 'value1'}"
+
+# def test_issue_82(self, tmpdir):
+# program_src = r'''
+# from ruamel import yaml
+# import re
+#
+# class SINumber(yaml.YAMLObject):
+# PREFIXES = {'k': 1e3, 'M': 1e6, 'G': 1e9}
+# yaml_loader = yaml.Loader
+# yaml_dumper = yaml.Dumper
+# yaml_tag = '!si'
+# yaml_implicit_pattern = re.compile(
+# r'^(?P<value>[0-9]+(?:\.[0-9]+)?)(?P<prefix>[kMG])$')
+#
+# @classmethod
+# def from_yaml(cls, loader, node):
+# return cls(node.value)
+#
+# @classmethod
+# def to_yaml(cls, dumper, data):
+# return dumper.represent_scalar(cls.yaml_tag, str(data))
+#
+# def __init__(self, *args):
+# m = self.yaml_implicit_pattern.match(args[0])
+# self.value = float(m.groupdict()['value'])
+# self.prefix = m.groupdict()['prefix']
+#
+# def __str__(self) -> None:
+# return str(self.value)+self.prefix
+#
+# def __int__(self) -> None:
+# return int(self.value*self.PREFIXES[self.prefix])
+#
+# # This fails:
+# yaml.add_implicit_resolver(SINumber.yaml_tag, SINumber.yaml_implicit_pattern)
+#
+# ret = yaml.load("""
+# [1,2,3, !si 10k, 100G]
+# """, Loader=yaml.Loader)
+# for idx, l in enumerate([1, 2, 3, 10000, 100000000000]):
+# assert int(ret[idx]) == l
+# '''
+# assert save_and_run(dedent(program_src), tmpdir) == 0
+
+ def test_issue_82rt(self, tmpdir: Any) -> None:
yaml_str = '[1, 2, 3, !si 10k, 100G]\n'
x = round_trip(yaml_str, preserve_quotes=True) # NOQA
- def test_issue_102(self):
+ def test_issue_102(self) -> None:
yaml_str = dedent("""
var1: #empty
var2: something #notempty
@@ -95,8 +88,8 @@ class TestIssues:
""")
x = round_trip(yaml_str, preserve_quotes=True) # NOQA
- def test_issue_150(self):
- from ruamel.yaml import YAML
+ def test_issue_150(self) -> None:
+ from ruamel.yaml import YAML # type: ignore
inp = """\
base: &base_key
@@ -112,7 +105,7 @@ class TestIssues:
child = data['child']
assert 'second' in dict(**child)
- def test_issue_160(self):
+ def test_issue_160(self) -> None:
from ruamel.yaml.compat import StringIO
s = dedent("""\
root:
@@ -136,7 +129,7 @@ class TestIssues:
""")
assert buf.getvalue() == exp
- def test_issue_161(self):
+ def test_issue_161(self) -> None:
yaml_str = dedent("""\
mapping-A:
key-A:{}
@@ -146,7 +139,7 @@ class TestIssues:
s = yaml_str.format(comment)
res = round_trip(s) # NOQA
- def test_issue_161a(self):
+ def test_issue_161a(self) -> None:
yaml_str = dedent("""\
mapping-A:
key-A:{}
@@ -156,7 +149,7 @@ class TestIssues:
s = yaml_str.format(comment)
res = round_trip(s) # NOQA
- def test_issue_163(self):
+ def test_issue_163(self) -> None:
s = dedent("""\
some-list:
# List comment
@@ -172,19 +165,19 @@ class TestIssues:
json_str2 = '{"abc":[{"a":"1", "uses":0}]}'
- def test_issue_172(self):
+ def test_issue_172(self) -> None:
x = round_trip_load(TestIssues.json_str2) # NOQA
x = round_trip_load(TestIssues.json_str) # NOQA
- def test_issue_176(self):
+ def test_issue_176(self) -> None:
# basic request by Stuart Berg
- from ruamel.yaml import YAML
+ from ruamel.yaml import YAML # type: ignore
yaml = YAML()
seq = yaml.load('[1,2,3]')
seq[:] = [1, 2, 3, 4]
- def test_issue_176_preserve_comments_on_extended_slice_assignment(self):
+ def test_issue_176_preserve_comments_on_extended_slice_assignment(self) -> None:
yaml_str = dedent("""\
- a
- b # comment
@@ -199,9 +192,7 @@ class TestIssues:
res = round_trip_dump(seq)
assert res == yaml_str.replace(' b ', ' B ').replace(' d\n', ' D\n')
- def test_issue_176_test_slicing(self):
- from ruamel.yaml.compat import PY2
-
+ def test_issue_176_test_slicing(self) -> None:
mss = round_trip_load('[0, 1, 2, 3, 4]')
assert len(mss) == 5
assert mss[2:2] == []
@@ -231,18 +222,10 @@ class TestIssues:
m[1::2] = [42, 43]
assert m == [0, 42, 2, 43, 4]
m = mss[:]
- if PY2:
- with pytest.raises(ValueError, match='attempt to assign'):
- m[1::2] = [42, 43, 44]
- else:
- with pytest.raises(TypeError, match='too many'):
- m[1::2] = [42, 43, 44]
- if PY2:
- with pytest.raises(ValueError, match='attempt to assign'):
- m[1::2] = [42]
- else:
- with pytest.raises(TypeError, match='not enough'):
- m[1::2] = [42]
+ with pytest.raises(TypeError, match='too many'):
+ m[1::2] = [42, 43, 44]
+ with pytest.raises(TypeError, match='not enough'):
+ m[1::2] = [42]
m = mss[:]
m += [5]
m[1::2] = [42, 43, 44]
@@ -259,7 +242,7 @@ class TestIssues:
del m[:]
assert m == []
- def test_issue_184(self):
+ def test_issue_184(self) -> None:
yaml_str = dedent("""\
test::test:
# test
@@ -271,24 +254,24 @@ class TestIssues:
d.yaml_add_eol_comment('test1', 'bar')
assert round_trip_dump(d) == yaml_str + 'bar: foo # test1\n'
- def test_issue_219(self):
+ def test_issue_219(self) -> None:
yaml_str = dedent("""\
[StackName: AWS::StackName]
""")
d = round_trip_load(yaml_str) # NOQA
- def test_issue_219a(self):
+ def test_issue_219a(self) -> None:
yaml_str = dedent("""\
[StackName:
AWS::StackName]
""")
d = round_trip_load(yaml_str) # NOQA
- def test_issue_220(self, tmpdir):
+ def test_issue_220(self, tmpdir: Any) -> None:
program_src = r'''
from ruamel.yaml import YAML
- yaml_str = u"""\
+ yaml_str = """\
---
foo: ["bar"]
"""
@@ -299,14 +282,14 @@ class TestIssues:
'''
assert save_and_run(dedent(program_src), tmpdir, optimized=True) == 0
- def test_issue_221_add(self):
+ def test_issue_221_add(self) -> None:
from ruamel.yaml.comments import CommentedSeq
a = CommentedSeq([1, 2, 3])
a + [4, 5]
- def test_issue_221_sort(self):
- from ruamel.yaml import YAML
+ def test_issue_221_sort(self) -> None:
+ from ruamel.yaml import YAML # type: ignore
from ruamel.yaml.compat import StringIO
yaml = YAML()
@@ -330,8 +313,8 @@ class TestIssues:
""")
assert buf.getvalue() == exp
- def test_issue_221_sort_reverse(self):
- from ruamel.yaml import YAML
+ def test_issue_221_sort_reverse(self) -> None:
+ from ruamel.yaml import YAML # type: ignore
from ruamel.yaml.compat import StringIO
yaml = YAML()
@@ -355,8 +338,8 @@ class TestIssues:
""")
assert buf.getvalue() == exp
- def test_issue_221_sort_key(self):
- from ruamel.yaml import YAML
+ def test_issue_221_sort_key(self) -> None:
+ from ruamel.yaml import YAML # type: ignore
from ruamel.yaml.compat import StringIO
yaml = YAML()
@@ -380,8 +363,8 @@ class TestIssues:
""")
assert buf.getvalue() == exp
- def test_issue_221_sort_key_reverse(self):
- from ruamel.yaml import YAML
+ def test_issue_221_sort_key_reverse(self) -> None:
+ from ruamel.yaml import YAML # type: ignore
from ruamel.yaml.compat import StringIO
yaml = YAML()
@@ -405,47 +388,49 @@ class TestIssues:
""")
assert buf.getvalue() == exp
- def test_issue_222(self):
+ def test_issue_222(self) -> None:
import ruamel.yaml
from ruamel.yaml.compat import StringIO
+ yaml = ruamel.yaml.YAML(typ='safe')
buf = StringIO()
- ruamel.yaml.safe_dump(['012923'], buf)
+ yaml.dump(['012923'], buf)
assert buf.getvalue() == "['012923']\n"
- def test_issue_223(self):
+ def test_issue_223(self) -> None:
import ruamel.yaml
yaml = ruamel.yaml.YAML(typ='safe')
yaml.load('phone: 0123456789')
- def test_issue_232(self):
+ def test_issue_232(self) -> None:
import ruamel.yaml
- from ruamel import yaml
+
+ yaml = YAML(typ='safe', pure=True)
with pytest.raises(ruamel.yaml.parser.ParserError):
- yaml.safe_load(']')
+ yaml.load(']')
with pytest.raises(ruamel.yaml.parser.ParserError):
- yaml.safe_load('{]')
+ yaml.load('{]')
- def test_issue_233(self):
- from ruamel.yaml import YAML
+ def test_issue_233(self) -> None:
+ from ruamel.yaml import YAML # type: ignore
import json
yaml = YAML()
data = yaml.load('{}')
json_str = json.dumps(data) # NOQA
- def test_issue_233a(self):
- from ruamel.yaml import YAML
+ def test_issue_233a(self) -> None:
+ from ruamel.yaml import YAML # type: ignore
import json
yaml = YAML()
data = yaml.load('[]')
json_str = json.dumps(data) # NOQA
- def test_issue_234(self):
- from ruamel.yaml import YAML
+ def test_issue_234(self) -> None:
+ from ruamel.yaml import YAML # type: ignore
inp = dedent("""\
- key: key1
@@ -461,7 +446,7 @@ class TestIssues:
print(repr(fold))
assert '\a' not in fold
- def test_issue_236(self):
+ def test_issue_236(self) -> None:
inp = """
conf:
xx: {a: "b", c: []}
@@ -469,7 +454,7 @@ class TestIssues:
"""
d = round_trip(inp, preserve_quotes=True) # NOQA
- def test_issue_238(self, tmpdir):
+ def test_issue_238(self, tmpdir: Any) -> None:
program_src = r"""
import ruamel.yaml
from ruamel.yaml.compat import StringIO
@@ -501,7 +486,7 @@ class TestIssues:
"""
assert save_and_run(dedent(program_src), tmpdir) == 0
- def test_issue_239(self):
+ def test_issue_239(self) -> None:
inp = """
first_name: Art
occupation: Architect
@@ -521,14 +506,14 @@ class TestIssues:
"""
d = YAML().round_trip_all(inp) # NOQA
- def test_issue_242(self):
+ def test_issue_242(self) -> None:
from ruamel.yaml.comments import CommentedMap
d0 = CommentedMap([('a', 'b')])
assert d0['a'] == 'b'
- def test_issue_245(self):
- from ruamel.yaml import YAML
+ def test_issue_245(self) -> None:
+ from ruamel.yaml import YAML # type: ignore
inp = """
d: yes
"""
@@ -545,7 +530,7 @@ class TestIssues:
print(typ, yaml.parser, yaml.resolver)
assert d['d'] is True
- def test_issue_249(self):
+ def test_issue_249(self) -> None:
yaml = YAML()
inp = dedent("""\
# comment
@@ -562,7 +547,7 @@ class TestIssues:
""")
yaml.round_trip(inp, outp=exp) # NOQA
- def test_issue_250(self):
+ def test_issue_250(self) -> None:
inp = """
# 1.
- - 1
@@ -574,8 +559,8 @@ class TestIssues:
d = round_trip(inp) # NOQA
# @pytest.mark.xfail(strict=True, reason='bla bla', raises=AssertionError)
- def test_issue_279(self):
- from ruamel.yaml import YAML
+ def test_issue_279(self) -> None:
+ from ruamel.yaml import YAML # type: ignore
from ruamel.yaml.compat import StringIO
yaml = YAML()
@@ -593,8 +578,8 @@ class TestIssues:
print(buf.getvalue())
assert buf.getvalue() == inp
- def test_issue_280(self):
- from ruamel.yaml import YAML
+ def test_issue_280(self) -> None:
+ from ruamel.yaml import YAML # type: ignore
from ruamel.yaml.representer import RepresenterError
from collections import namedtuple
from sys import stdout
@@ -604,7 +589,7 @@ class TestIssues:
with pytest.raises(RepresenterError, match='cannot represent'):
yaml.dump({'t': t}, stdout)
- def test_issue_282(self):
+ def test_issue_282(self) -> None:
# update from list of tuples caused AttributeError
import ruamel.yaml
yaml_data = ruamel.yaml.comments.CommentedMap([('a', 'apple'), ('b', 'banana')])
@@ -613,7 +598,7 @@ class TestIssues:
assert 'c' in yaml_data.keys()
assert 'c' in yaml_data._ok
- def test_issue_284(self):
+ def test_issue_284(self) -> None:
import ruamel.yaml
inp = dedent("""\
plain key: in-line value
@@ -631,8 +616,8 @@ class TestIssues:
with pytest.raises(ruamel.yaml.parser.ParserError, match='expected <block end>'):
d = yaml.load(inp)
- def test_issue_285(self):
- from ruamel.yaml import YAML
+ def test_issue_285(self) -> None:
+ from ruamel.yaml import YAML # type: ignore
yaml = YAML()
inp = dedent("""\
@@ -649,8 +634,8 @@ class TestIssues:
assert not a[1]
assert not a[3]
- def test_issue_286(self):
- from ruamel.yaml import YAML
+ def test_issue_286(self) -> None:
+ from ruamel.yaml import YAML # type: ignore
from ruamel.yaml.compat import StringIO
yaml = YAML()
@@ -665,10 +650,10 @@ class TestIssues:
yaml.dump(a, buf)
assert buf.getvalue().endswith('xxx\nnew_key: new_value\n')
- def test_issue_288(self):
+ def test_issue_288(self) -> None:
import sys
from ruamel.yaml.compat import StringIO
- from ruamel.yaml import YAML
+ from ruamel.yaml import YAML # type: ignore
yamldoc = dedent("""\
---
@@ -696,10 +681,10 @@ class TestIssues:
yaml.dump(data, buf)
assert buf.getvalue() == yamldoc
- def test_issue_288a(self):
+ def test_issue_288a(self) -> None:
import sys
from ruamel.yaml.compat import StringIO
- from ruamel.yaml import YAML
+ from ruamel.yaml import YAML # type: ignore
yamldoc = dedent("""\
---
@@ -727,10 +712,10 @@ class TestIssues:
yaml.dump(data, buf)
assert buf.getvalue() == yamldoc
- def test_issue_290(self):
+ def test_issue_290(self) -> None:
import sys
from ruamel.yaml.compat import StringIO
- from ruamel.yaml import YAML
+ from ruamel.yaml import YAML # type: ignore
yamldoc = dedent("""\
---
@@ -763,10 +748,10 @@ class TestIssues:
yaml.dump(data, buf)
assert buf.getvalue() == yamldoc
- def test_issue_290a(self):
+ def test_issue_290a(self) -> None:
import sys
from ruamel.yaml.compat import StringIO
- from ruamel.yaml import YAML
+ from ruamel.yaml import YAML # type: ignore
yamldoc = dedent("""\
---
@@ -800,7 +785,7 @@ class TestIssues:
assert buf.getvalue() == yamldoc
# @pytest.mark.xfail(strict=True, reason='should fail pre 0.15.100', raises=AssertionError)
- def test_issue_295(self):
+ def test_issue_295(self) -> None:
# deepcopy also makes a copy of the start and end mark, and these did not
# have any comparison beyond their ID, which of course changed, breaking
# some old merge_comment code
@@ -825,8 +810,8 @@ class TestIssues:
dc = copy.deepcopy(data)
assert round_trip_dump(dc) == inp
- def test_issue_300(self):
- from ruamel.yaml import YAML
+ def test_issue_300(self) -> None:
+ from ruamel.yaml import YAML # type: ignore
inp = dedent("""
%YAML 1.2
@@ -836,7 +821,7 @@ class TestIssues:
""")
YAML().load(inp)
- def test_issue_300a(self):
+ def test_issue_300a(self) -> None:
import ruamel.yaml
inp = dedent("""
@@ -850,7 +835,7 @@ class TestIssues:
match='while scanning a directive'):
yaml.load(inp)
- def test_issue_304(self):
+ def test_issue_304(self) -> None:
inp = """
%YAML 1.2
%TAG ! tag:example.com,2019:
@@ -860,7 +845,7 @@ class TestIssues:
"""
d = na_round_trip(inp) # NOQA
- def test_issue_305(self):
+ def test_issue_305(self) -> None:
inp = """
%YAML 1.2
---
@@ -869,7 +854,7 @@ class TestIssues:
"""
d = na_round_trip(inp) # NOQA
- def test_issue_307(self):
+ def test_issue_307(self) -> None:
inp = """
%YAML 1.2
%TAG ! tag:example.com,2019/path#
@@ -879,8 +864,14 @@ class TestIssues:
"""
d = na_round_trip(inp) # NOQA
+ def test_issue_449(self):
+ inp = """\
+ emoji_index: !!python/name:materialx.emoji.twemoji
+ """
+ d = na_round_trip(inp) # NOQA
+
# @pytest.mark.xfail(strict=True, reason='bla bla', raises=AssertionError)
-# def test_issue_ xxx(self):
+# def test_issue_ xxx(self) -> None:
# inp = """
# """
# d = round_trip(inp) # NOQA
diff --git a/_test/test_json_numbers.py b/_test/test_json_numbers.py
index 56b7b6f..08f39d0 100644
--- a/_test/test_json_numbers.py
+++ b/_test/test_json_numbers.py
@@ -1,23 +1,24 @@
# coding: utf-8
-from __future__ import print_function
-
-import pytest # NOQA
+import pytest # type: ignore # NOQA
import json
+from typing import Any
+
-def load(s, typ=float):
+def load(s: str, typ: Any = float) -> float:
import ruamel.yaml
+ yaml = ruamel.yaml.YAML()
x = '{"low": %s }' % (s)
print('input: [%s]' % (s), repr(x))
# just to check it is loadable json
res = json.loads(x)
assert isinstance(res['low'], typ)
- ret_val = ruamel.yaml.load(x, ruamel.yaml.RoundTripLoader)
+ ret_val = yaml.load(x)
print(ret_val)
- return ret_val['low']
+ return ret_val['low'] # type: ignore
class TestJSONNumbers:
@@ -27,7 +28,7 @@ class TestJSONNumbers:
# -? [1-9] ( \. [0-9]* [1-9] )? ( e [-+] [1-9] [0-9]* )?
#
# which is not a superset of the JSON numbers
- def test_json_number_float(self):
+ def test_json_number_float(self) -> None:
for x in (
y.split('#')[0].strip()
for y in """
@@ -44,7 +45,7 @@ class TestJSONNumbers:
res = load(x)
assert isinstance(res, float)
- def test_json_number_int(self):
+ def test_json_number_int(self) -> None:
for x in (
y.split('#')[0].strip()
for y in """
diff --git a/_test/test_line_col.py b/_test/test_line_col.py
index febe9c2..5ba125e 100644
--- a/_test/test_line_col.py
+++ b/_test/test_line_col.py
@@ -1,16 +1,18 @@
# coding: utf-8
-import pytest # NOQA
+import pytest # type: ignore # NOQA
from roundtrip import round_trip, dedent, round_trip_load, round_trip_dump # NOQA
+from typing import Any
-def load(s):
+
+def load(s: str) -> Any:
return round_trip_load(dedent(s))
class TestLineCol:
- def test_item_00(self):
+ def test_item_00(self) -> None:
data = load("""
- a
- e
@@ -20,7 +22,7 @@ class TestLineCol:
assert data[2].lc.line == 2
assert data[2].lc.col == 2
- def test_item_01(self):
+ def test_item_01(self) -> None:
data = load("""
- a
- e
@@ -30,7 +32,7 @@ class TestLineCol:
assert data[2].lc.line == 2
assert data[2].lc.col == 2
- def test_item_02(self):
+ def test_item_02(self) -> None:
data = load("""
- a
- e
@@ -40,7 +42,7 @@ class TestLineCol:
assert data[2].lc.line == 2
assert data[2].lc.col == 2
- def test_item_03(self):
+ def test_item_03(self) -> None:
data = load("""
- a
- e
@@ -52,7 +54,7 @@ class TestLineCol:
assert data[2].lc.line == 2
assert data[2].lc.col == 2
- def test_item_04(self):
+ def test_item_04(self) -> None:
data = load("""
# testing line and column based on SO
# http://stackoverflow.com/questions/13319067/
@@ -66,7 +68,7 @@ class TestLineCol:
assert data[1].lc.line == 4
assert data[1].lc.col == 2
- def test_pos_mapping(self):
+ def test_pos_mapping(self) -> None:
data = load("""
a: 1
b: 2
@@ -78,7 +80,7 @@ class TestLineCol:
assert data.lc.key('klm') == (4, 0)
assert data.lc.value('klm') == (4, 5)
- def test_pos_sequence(self):
+ def test_pos_sequence(self) -> None:
data = load("""
- a
- b
diff --git a/_test/test_literal.py b/_test/test_literal.py
index 20eb8e9..0cf34bc 100644
--- a/_test/test_literal.py
+++ b/_test/test_literal.py
@@ -1,7 +1,6 @@
# coding: utf-8
-from __future__ import print_function
-import pytest # NOQA
+import pytest # type: ignore # NOQA
from roundtrip import YAML # does an automatic dedent on load
@@ -28,7 +27,7 @@ YAML 1.2 is again clear about root literal level scalar after directive in examp
class TestNoIndent:
- def test_root_literal_scalar_indent_example_9_5(self):
+ def test_root_literal_scalar_indent_example_9_5(self) -> None:
yaml = YAML()
s = '%!PS-Adobe-2.0'
inp = """
@@ -39,7 +38,7 @@ class TestNoIndent:
print(d)
assert d == s + '\n'
- def test_root_literal_scalar_no_indent(self):
+ def test_root_literal_scalar_no_indent(self) -> None:
yaml = YAML()
s = 'testing123'
inp = """
@@ -50,7 +49,7 @@ class TestNoIndent:
print(d)
assert d == s + '\n'
- def test_root_literal_scalar_no_indent_1_1(self):
+ def test_root_literal_scalar_no_indent_1_1(self) -> None:
yaml = YAML()
s = 'testing123'
inp = """
@@ -62,21 +61,22 @@ class TestNoIndent:
print(d)
assert d == s + '\n'
- def test_root_literal_scalar_no_indent_1_1_old_style(self):
+ def test_root_literal_scalar_no_indent_1_1_old_style(self) -> None:
from textwrap import dedent
- from ruamel.yaml import safe_load
+ from ruamel.yaml import YAML # type: ignore
+ yaml = YAML(typ='safe', pure=True)
s = 'testing123'
inp = """
%YAML 1.1
--- |
{}
"""
- d = safe_load(dedent(inp.format(s)))
+ d = yaml.load(dedent(inp.format(s)))
print(d)
assert d == s + '\n'
- def test_root_literal_scalar_no_indent_1_1_no_raise(self):
+ def test_root_literal_scalar_no_indent_1_1_no_raise(self) -> None:
# from ruamel.yaml.parser import ParserError
yaml = YAML()
@@ -91,7 +91,7 @@ class TestNoIndent:
"""
yaml.load(inp.format(s))
- def test_root_literal_scalar_indent_offset_one(self):
+ def test_root_literal_scalar_indent_offset_one(self) -> None:
yaml = YAML()
s = 'testing123'
inp = """
@@ -102,7 +102,7 @@ class TestNoIndent:
print(d)
assert d == s + '\n'
- def test_root_literal_scalar_indent_offset_four(self):
+ def test_root_literal_scalar_indent_offset_four(self) -> None:
yaml = YAML()
s = 'testing123'
inp = """
@@ -113,7 +113,7 @@ class TestNoIndent:
print(d)
assert d == s + '\n'
- def test_root_literal_scalar_indent_offset_two_leading_space(self):
+ def test_root_literal_scalar_indent_offset_two_leading_space(self) -> None:
yaml = YAML()
s = ' testing123'
inp = """
@@ -125,7 +125,7 @@ class TestNoIndent:
print(d)
assert d == (s + '\n') * 2
- def test_root_literal_scalar_no_indent_special(self):
+ def test_root_literal_scalar_no_indent_special(self) -> None:
yaml = YAML()
s = '%!PS-Adobe-2.0'
inp = """
@@ -136,7 +136,7 @@ class TestNoIndent:
print(d)
assert d == s + '\n'
- def test_root_folding_scalar_indent(self):
+ def test_root_folding_scalar_indent(self) -> None:
yaml = YAML()
s = '%!PS-Adobe-2.0'
inp = """
@@ -147,7 +147,7 @@ class TestNoIndent:
print(d)
assert d == s + '\n'
- def test_root_folding_scalar_no_indent(self):
+ def test_root_folding_scalar_no_indent(self) -> None:
yaml = YAML()
s = 'testing123'
inp = """
@@ -158,7 +158,7 @@ class TestNoIndent:
print(d)
assert d == s + '\n'
- def test_root_folding_scalar_no_indent_special(self):
+ def test_root_folding_scalar_no_indent_special(self) -> None:
yaml = YAML()
s = '%!PS-Adobe-2.0'
inp = """
@@ -169,7 +169,7 @@ class TestNoIndent:
print(d)
assert d == s + '\n'
- def test_root_literal_multi_doc(self):
+ def test_root_literal_multi_doc(self) -> None:
yaml = YAML(typ='safe', pure=True)
s1 = 'abc'
s2 = 'klm'
@@ -183,7 +183,7 @@ class TestNoIndent:
print('d1:', d1)
assert ['abc', 'klm\n'][idx] == d1
- def test_root_literal_doc_indent_directives_end(self):
+ def test_root_literal_doc_indent_directives_end(self) -> None:
yaml = YAML()
yaml.explicit_start = True
inp = """
@@ -194,7 +194,7 @@ class TestNoIndent:
"""
yaml.round_trip(inp)
- def test_root_literal_doc_indent_document_end(self):
+ def test_root_literal_doc_indent_document_end(self) -> None:
yaml = YAML()
yaml.explicit_start = True
inp = """
@@ -205,7 +205,7 @@ class TestNoIndent:
"""
yaml.round_trip(inp)
- def test_root_literal_doc_indent_marker(self):
+ def test_root_literal_doc_indent_marker(self) -> None:
yaml = YAML()
yaml.explicit_start = True
inp = """
@@ -217,7 +217,7 @@ class TestNoIndent:
print(type(d), repr(d))
yaml.round_trip(inp)
- def test_nested_literal_doc_indent_marker(self):
+ def test_nested_literal_doc_indent_marker(self) -> None:
yaml = YAML()
yaml.explicit_start = True
inp = """
@@ -232,7 +232,7 @@ class TestNoIndent:
class Test_RoundTripLiteral:
- def test_rt_root_literal_scalar_no_indent(self):
+ def test_rt_root_literal_scalar_no_indent(self) -> None:
yaml = YAML()
yaml.explicit_start = True
s = 'testing123'
@@ -244,7 +244,7 @@ class Test_RoundTripLiteral:
d = yaml.load(ys)
yaml.dump(d, compare=ys)
- def test_rt_root_literal_scalar_indent(self):
+ def test_rt_root_literal_scalar_indent(self) -> None:
yaml = YAML()
yaml.explicit_start = True
yaml.indent = 4
@@ -257,7 +257,7 @@ class Test_RoundTripLiteral:
d = yaml.load(ys)
yaml.dump(d, compare=ys)
- def test_rt_root_plain_scalar_no_indent(self):
+ def test_rt_root_plain_scalar_no_indent(self) -> None:
yaml = YAML()
yaml.explicit_start = True
yaml.indent = 0
@@ -270,7 +270,7 @@ class Test_RoundTripLiteral:
d = yaml.load(ys)
yaml.dump(d, compare=ys)
- def test_rt_root_plain_scalar_expl_indent(self):
+ def test_rt_root_plain_scalar_expl_indent(self) -> None:
yaml = YAML()
yaml.explicit_start = True
yaml.indent = 4
@@ -283,7 +283,7 @@ class Test_RoundTripLiteral:
d = yaml.load(ys)
yaml.dump(d, compare=ys)
- def test_rt_root_sq_scalar_expl_indent(self):
+ def test_rt_root_sq_scalar_expl_indent(self) -> None:
yaml = YAML()
yaml.explicit_start = True
yaml.indent = 4
@@ -296,7 +296,7 @@ class Test_RoundTripLiteral:
d = yaml.load(ys)
yaml.dump(d, compare=ys)
- def test_rt_root_dq_scalar_expl_indent(self):
+ def test_rt_root_dq_scalar_expl_indent(self) -> None:
# if yaml.indent is the default (None)
# then write after the directive indicator
yaml = YAML()
@@ -311,7 +311,7 @@ class Test_RoundTripLiteral:
d = yaml.load(ys)
yaml.dump(d, compare=ys)
- def test_rt_root_literal_scalar_no_indent_no_eol(self):
+ def test_rt_root_literal_scalar_no_indent_no_eol(self) -> None:
yaml = YAML()
yaml.explicit_start = True
s = 'testing123'
@@ -323,7 +323,7 @@ class Test_RoundTripLiteral:
d = yaml.load(ys)
yaml.dump(d, compare=ys)
- def test_rt_non_root_literal_scalar(self):
+ def test_rt_non_root_literal_scalar(self) -> None:
yaml = YAML()
s = 'testing123'
ys = """
diff --git a/_test/test_none.py b/_test/test_none.py
index e313edc..e11de17 100644
--- a/_test/test_none.py
+++ b/_test/test_none.py
@@ -1,51 +1,41 @@
# coding: utf-8
-
-import pytest # NOQA
+import pytest # type: ignore # NOQA
+from roundtrip import round_trip_load, round_trip_dump
class TestNone:
- def test_dump00(self):
- import ruamel.yaml # NOQA
-
+ def test_dump00(self) -> None:
data = None
- s = ruamel.yaml.round_trip_dump(data)
+ s = round_trip_dump(data)
assert s == 'null\n...\n'
- d = ruamel.yaml.round_trip_load(s)
+ d = round_trip_load(s)
assert d == data
- def test_dump01(self):
- import ruamel.yaml # NOQA
-
+ def test_dump01(self) -> None:
data = None
- s = ruamel.yaml.round_trip_dump(data, explicit_end=True)
+ s = round_trip_dump(data, explicit_end=True)
assert s == 'null\n...\n'
- d = ruamel.yaml.round_trip_load(s)
+ d = round_trip_load(s)
assert d == data
- def test_dump02(self):
- import ruamel.yaml # NOQA
-
+ def test_dump02(self) -> None:
data = None
- s = ruamel.yaml.round_trip_dump(data, explicit_end=False)
+ s = round_trip_dump(data, explicit_end=False)
assert s == 'null\n...\n'
- d = ruamel.yaml.round_trip_load(s)
+ d = round_trip_load(s)
assert d == data
- def test_dump03(self):
- import ruamel.yaml # NOQA
-
+ def test_dump03(self) -> None:
data = None
- s = ruamel.yaml.round_trip_dump(data, explicit_start=True)
+ s = round_trip_dump(data, explicit_start=True)
assert s == '---\n...\n'
- d = ruamel.yaml.round_trip_load(s)
+ d = round_trip_load(s)
assert d == data
- def test_dump04(self):
- import ruamel.yaml # NOQA
-
+ def test_dump04(self) -> None:
data = None
- s = ruamel.yaml.round_trip_dump(data, explicit_start=True, explicit_end=False)
+ s = round_trip_dump(data, explicit_start=True, explicit_end=False)
assert s == '---\n...\n'
- d = ruamel.yaml.round_trip_load(s)
+ d = round_trip_load(s)
assert d == data
diff --git a/_test/test_numpy.py b/_test/test_numpy.py
index 2747fc4..24eb768 100644
--- a/_test/test_numpy.py
+++ b/_test/test_numpy.py
@@ -1,24 +1,24 @@
# coding: utf-8
-from __future__ import print_function, absolute_import, division, unicode_literals
+# try:
+# import numpy
+# except: # NOQA
+# numpy = None
-try:
- import numpy
-except: # NOQA
- numpy = None
-
-def Xtest_numpy():
- import ruamel.yaml
-
- if numpy is None:
- return
- data = numpy.arange(10)
- print('data', type(data), data)
-
- yaml_str = ruamel.yaml.dump(data)
- datb = ruamel.yaml.load(yaml_str)
- print('datb', type(datb), datb)
-
- print('\nYAML', yaml_str)
- assert data == datb
+# def Xtest_numpy() -> None:
+# import ruamel.yaml
+#
+# if numpy is None:
+# return
+# data = numpy.arange(10)
+# print('data', type(data), data)
+#
+# buf = io.BytesIO()
+# ruamel.yaml.dump(data) # needs updating to use buffer
+# yaml_str = buf.getvalue().decode('utf-8')
+# datb = ruamel.yaml.load(yaml_str)
+# print('datb', type(datb), datb)
+#
+# print('\nYAML', yaml_str)
+# assert data == datb
diff --git a/_test/test_program_config.py b/_test/test_program_config.py
index dcd8351..6c5cad8 100644
--- a/_test/test_program_config.py
+++ b/_test/test_program_config.py
@@ -1,12 +1,13 @@
+# coding: utf-8
-import pytest # NOQA
+import pytest # type: ignore # NOQA
# import ruamel.yaml
from roundtrip import round_trip
class TestProgramConfig:
- def test_application_arguments(self):
+ def test_application_arguments(self) -> None:
# application configur
round_trip("""
args:
@@ -19,7 +20,7 @@ class TestProgramConfig:
wait: 10
""")
- def test_single(self):
+ def test_single(self) -> None:
# application configuration
round_trip("""
# default arguments for the program
@@ -38,7 +39,7 @@ class TestProgramConfig:
# no more argument info to pass
""")
- def test_multi(self):
+ def test_multi(self) -> None:
# application configuration
round_trip("""
# default arguments for the program
diff --git a/_test/test_spec_examples.py b/_test/test_spec_examples.py
index bae505d..7faa4bf 100644
--- a/_test/test_spec_examples.py
+++ b/_test/test_spec_examples.py
@@ -1,9 +1,10 @@
+# coding: utf-8
from roundtrip import YAML
-import pytest # NOQA
+import pytest # type: ignore # NOQA
-def test_example_2_1():
+def test_example_2_1() -> None:
yaml = YAML()
yaml.round_trip("""
- Mark McGwire
@@ -12,8 +13,8 @@ def test_example_2_1():
""")
-@pytest.mark.xfail(strict=True)
-def test_example_2_2():
+@pytest.mark.xfail(strict=True) # type: ignore
+def test_example_2_2() -> None:
yaml = YAML()
yaml.mapping_value_align = True
yaml.round_trip("""
@@ -23,7 +24,7 @@ def test_example_2_2():
""")
-def test_example_2_3():
+def test_example_2_3() -> None:
yaml = YAML()
yaml.indent(sequence=4, offset=2)
yaml.round_trip("""
@@ -38,8 +39,8 @@ def test_example_2_3():
""")
-@pytest.mark.xfail(strict=True)
-def test_example_2_4():
+@pytest.mark.xfail(strict=True) # type: ignore
+def test_example_2_4() -> None:
yaml = YAML()
yaml.mapping_value_align = True
yaml.round_trip("""
@@ -54,8 +55,8 @@ def test_example_2_4():
""")
-@pytest.mark.xfail(strict=True)
-def test_example_2_5():
+@pytest.mark.xfail(strict=True) # type: ignore
+def test_example_2_5() -> None:
yaml = YAML()
yaml.flow_sequence_element_align = True
yaml.round_trip("""
@@ -65,8 +66,8 @@ def test_example_2_5():
""")
-@pytest.mark.xfail(strict=True)
-def test_example_2_6():
+@pytest.mark.xfail(strict=True) # type: ignore
+def test_example_2_6() -> None:
yaml = YAML()
# yaml.flow_mapping_final_comma = False
yaml.flow_mapping_one_element_per_line = True
@@ -79,8 +80,8 @@ def test_example_2_6():
""")
-@pytest.mark.xfail(strict=True)
-def test_example_2_7():
+@pytest.mark.xfail(strict=True) # type: ignore
+def test_example_2_7() -> None:
yaml = YAML()
yaml.round_trip_all("""
# Ranking of 1998 home runs
@@ -96,7 +97,7 @@ def test_example_2_7():
""")
-def test_example_2_8():
+def test_example_2_8() -> None:
yaml = YAML()
yaml.explicit_start = True
yaml.explicit_end = True
@@ -114,7 +115,7 @@ def test_example_2_8():
""")
-def test_example_2_9():
+def test_example_2_9() -> None:
yaml = YAML()
yaml.explicit_start = True
yaml.indent(sequence=4, offset=2)
@@ -130,8 +131,8 @@ def test_example_2_9():
""")
-@pytest.mark.xfail(strict=True)
-def test_example_2_10():
+@pytest.mark.xfail(strict=True) # type: ignore
+def test_example_2_10() -> None:
yaml = YAML()
yaml.explicit_start = True
yaml.indent(sequence=4, offset=2)
@@ -147,8 +148,8 @@ def test_example_2_10():
""")
-@pytest.mark.xfail(strict=True)
-def test_example_2_11():
+@pytest.mark.xfail(strict=True) # type: ignore
+def test_example_2_11() -> None:
yaml = YAML()
yaml.round_trip("""
? - Detroit Tigers
@@ -163,8 +164,8 @@ def test_example_2_11():
""")
-@pytest.mark.xfail(strict=True)
-def test_example_2_12():
+@pytest.mark.xfail(strict=True) # type: ignore
+def test_example_2_12() -> None:
yaml = YAML()
yaml.explicit_start = True
yaml.round_trip("""
@@ -179,8 +180,8 @@ def test_example_2_12():
""")
-@pytest.mark.xfail(strict=True)
-def test_example_2_13():
+@pytest.mark.xfail(strict=True) # type: ignore
+def test_example_2_13() -> None:
yaml = YAML()
yaml.round_trip(r"""
# ASCII Art
@@ -190,8 +191,8 @@ def test_example_2_13():
""")
-@pytest.mark.xfail(strict=True)
-def test_example_2_14():
+@pytest.mark.xfail(strict=True) # type: ignore
+def test_example_2_14() -> None:
yaml = YAML()
yaml.explicit_start = True
yaml.indent(root_scalar=2) # needs to be added
@@ -203,8 +204,8 @@ def test_example_2_14():
""")
-@pytest.mark.xfail(strict=True)
-def test_example_2_15():
+@pytest.mark.xfail(strict=True) # type: ignore
+def test_example_2_15() -> None:
yaml = YAML()
yaml.round_trip("""
>
@@ -218,7 +219,7 @@ def test_example_2_15():
""")
-def test_example_2_16():
+def test_example_2_16() -> None:
yaml = YAML()
yaml.round_trip("""
name: Mark McGwire
@@ -231,10 +232,10 @@ def test_example_2_16():
""")
-@pytest.mark.xfail(
+@pytest.mark.xfail( # type: ignore
strict=True, reason='cannot YAML dump escape sequences (\n) as hex and normal'
)
-def test_example_2_17():
+def test_example_2_17() -> None:
yaml = YAML()
yaml.allow_unicode = False
yaml.preserve_quotes = True
@@ -249,8 +250,9 @@ def test_example_2_17():
""")
-@pytest.mark.xfail(strict=True, reason='non-literal/folding multiline scalars not supported')
-def test_example_2_18():
+@pytest.mark.xfail(strict=True, # type: ignore # NOQA
+ reason='non-literal/folding multiline scalars not supported')
+def test_example_2_18() -> None:
yaml = YAML()
yaml.round_trip("""
plain:
@@ -262,8 +264,8 @@ def test_example_2_18():
""")
-@pytest.mark.xfail(strict=True, reason='leading + on decimal dropped')
-def test_example_2_19():
+@pytest.mark.xfail(strict=True, reason='leading + on decimal dropped') # type: ignore
+def test_example_2_19() -> None:
yaml = YAML()
yaml.round_trip("""
canonical: 12345
@@ -273,8 +275,8 @@ def test_example_2_19():
""")
-@pytest.mark.xfail(strict=True, reason='case of NaN not preserved')
-def test_example_2_20():
+@pytest.mark.xfail(strict=True, reason='case of NaN not preserved') # type: ignore
+def test_example_2_20() -> None:
yaml = YAML()
yaml.round_trip("""
canonical: 1.23015e+3
@@ -285,7 +287,7 @@ def test_example_2_20():
""")
-def Xtest_example_2_X():
+def Xtest_example_2_X() -> None:
yaml = YAML()
yaml.round_trip("""
""")
diff --git a/_test/test_string.py b/_test/test_string.py
index d1f7982..75890d2 100644
--- a/_test/test_string.py
+++ b/_test/test_string.py
@@ -1,7 +1,5 @@
# coding: utf-8
-from __future__ import print_function
-
"""
various test cases for string scalars in YAML files
'|' for preserved newlines
@@ -15,7 +13,7 @@ and the chomping modifiers:
"""
-import pytest
+import pytest # type: ignore
import platform
# from ruamel.yaml.compat import ordereddict
@@ -23,20 +21,20 @@ from roundtrip import round_trip, dedent, round_trip_load, round_trip_dump # NO
class TestLiteralScalarString:
- def test_basic_string(self):
+ def test_basic_string(self) -> None:
round_trip("""
a: abcdefg
""")
- def test_quoted_integer_string(self):
+ def test_quoted_integer_string(self) -> None:
round_trip("""
a: '12345'
""")
- @pytest.mark.skipif(
+ @pytest.mark.skipif( # type: ignore
platform.python_implementation() == 'Jython', reason='Jython throws RepresenterError'
)
- def test_preserve_string(self):
+ def test_preserve_string(self) -> None:
inp = """
a: |
abc
@@ -44,10 +42,10 @@ class TestLiteralScalarString:
"""
round_trip(inp, intermediate=dict(a='abc\ndef\n'))
- @pytest.mark.skipif(
+ @pytest.mark.skipif( # type: ignore
platform.python_implementation() == 'Jython', reason='Jython throws RepresenterError'
)
- def test_preserve_string_strip(self):
+ def test_preserve_string_strip(self) -> None:
s = """
a: |-
abc
@@ -56,10 +54,10 @@ class TestLiteralScalarString:
"""
round_trip(s, intermediate=dict(a='abc\ndef'))
- @pytest.mark.skipif(
+ @pytest.mark.skipif( # type: ignore
platform.python_implementation() == 'Jython', reason='Jython throws RepresenterError'
)
- def test_preserve_string_keep(self):
+ def test_preserve_string_keep(self) -> None:
# with pytest.raises(AssertionError) as excinfo:
inp = """
a: |+
@@ -71,10 +69,10 @@ class TestLiteralScalarString:
"""
round_trip(inp, intermediate=dict(a='ghi\njkl\n\n\n', b='x'))
- @pytest.mark.skipif(
+ @pytest.mark.skipif( # type: ignore
platform.python_implementation() == 'Jython', reason='Jython throws RepresenterError'
)
- def test_preserve_string_keep_at_end(self):
+ def test_preserve_string_keep_at_end(self) -> None:
# at EOF you have to specify the ... to get proper "closure"
# of the multiline scalar
inp = """
@@ -86,7 +84,7 @@ class TestLiteralScalarString:
"""
round_trip(inp, intermediate=dict(a='ghi\njkl\n\n'))
- def test_fold_string(self):
+ def test_fold_string(self) -> None:
inp = """
a: >
abc
@@ -95,7 +93,7 @@ class TestLiteralScalarString:
"""
round_trip(inp)
- def test_fold_string_strip(self):
+ def test_fold_string_strip(self) -> None:
inp = """
a: >-
abc
@@ -104,7 +102,7 @@ class TestLiteralScalarString:
"""
round_trip(inp)
- def test_fold_string_keep(self):
+ def test_fold_string_keep(self) -> None:
with pytest.raises(AssertionError) as excinfo: # NOQA
inp = """
a: >+
@@ -116,19 +114,19 @@ class TestLiteralScalarString:
class TestQuotedScalarString:
- def test_single_quoted_string(self):
+ def test_single_quoted_string(self) -> None:
inp = """
a: 'abc'
"""
round_trip(inp, preserve_quotes=True)
- def test_double_quoted_string(self):
+ def test_double_quoted_string(self) -> None:
inp = """
a: "abc"
"""
round_trip(inp, preserve_quotes=True)
- def test_non_preserved_double_quoted_string(self):
+ def test_non_preserved_double_quoted_string(self) -> None:
inp = """
a: "abc"
"""
@@ -141,7 +139,7 @@ class TestQuotedScalarString:
class TestReplace:
"""inspired by issue 110 from sandres23"""
- def test_replace_preserved_scalar_string(self):
+ def test_replace_preserved_scalar_string(self) -> None:
import ruamel
s = dedent("""\
@@ -161,7 +159,7 @@ class TestReplace:
foo
""")
- def test_replace_double_quoted_scalar_string(self):
+ def test_replace_double_quoted_scalar_string(self) -> None:
import ruamel
s = dedent("""\
@@ -174,7 +172,7 @@ class TestReplace:
class TestWalkTree:
- def test_basic(self):
+ def test_basic(self) -> None:
from ruamel.yaml.comments import CommentedMap
from ruamel.yaml.scalarstring import walk_tree
@@ -190,7 +188,7 @@ class TestWalkTree:
"""
assert round_trip_dump(data) == dedent(exp)
- def test_map(self):
+ def test_map(self) -> None:
from ruamel.yaml.compat import ordereddict
from ruamel.yaml.comments import CommentedMap
from ruamel.yaml.scalarstring import walk_tree, preserve_literal
diff --git a/_test/test_tag.py b/_test/test_tag.py
index 31a192e..40e3f69 100644
--- a/_test/test_tag.py
+++ b/_test/test_tag.py
@@ -1,30 +1,31 @@
# coding: utf-8
-import pytest # NOQA
+import pytest # type: ignore # NOQA
+from typing import Any
from roundtrip import round_trip, round_trip_load, YAML
-def register_xxx(**kw):
+def register_xxx(**kw: Any) -> None:
from ruamel import yaml
class XXX(yaml.comments.CommentedMap):
@staticmethod
- def yaml_dump(dumper, data):
- return dumper.represent_mapping(u'!xxx', data)
+ def yaml_dump(dumper: Any, data: Any) -> Any:
+ return dumper.represent_mapping('!xxx', data)
@classmethod
- def yaml_load(cls, constructor, node):
+ def yaml_load(cls, constructor: Any, node: Any) -> Any:
data = cls()
yield data
constructor.construct_mapping(node, data)
- yaml.add_constructor(u'!xxx', XXX.yaml_load, constructor=yaml.RoundTripConstructor)
+ yaml.add_constructor('!xxx', XXX.yaml_load, constructor=yaml.RoundTripConstructor)
yaml.add_representer(XXX, XXX.yaml_dump, representer=yaml.RoundTripRepresenter)
class TestIndentFailures:
- def test_tag(self):
+ def test_tag(self) -> None:
round_trip("""\
!!python/object:__main__.Developer
name: Anthon
@@ -32,7 +33,7 @@ class TestIndentFailures:
language: python
""")
- def test_full_tag(self):
+ def test_full_tag(self) -> None:
round_trip("""\
!!tag:yaml.org,2002:python/object:__main__.Developer
name: Anthon
@@ -40,7 +41,7 @@ class TestIndentFailures:
language: python
""")
- def test_standard_tag(self):
+ def test_standard_tag(self) -> None:
round_trip("""\
!!tag:yaml.org,2002:python/object:map
name: Anthon
@@ -48,7 +49,7 @@ class TestIndentFailures:
language: python
""")
- def test_Y1(self):
+ def test_Y1(self) -> None:
round_trip("""\
!yyy
name: Anthon
@@ -56,7 +57,7 @@ class TestIndentFailures:
language: python
""")
- def test_Y2(self):
+ def test_Y2(self) -> None:
round_trip("""\
!!yyy
name: Anthon
@@ -64,9 +65,18 @@ class TestIndentFailures:
language: python
""")
+ def test_spec_6_26_tag_shorthands(self):
+ round_trip("""\
+ %TAG !e! tag:example.com,2000:app/
+ ---
+ - !local foo
+ - !!str bar
+ - !e!tag%21 baz
+ """)
+
class TestRoundTripCustom:
- def test_X1(self):
+ def test_X1(self) -> None:
register_xxx()
round_trip("""\
!xxx
@@ -75,8 +85,8 @@ class TestRoundTripCustom:
language: python
""")
- @pytest.mark.xfail(strict=True)
- def test_X_pre_tag_comment(self):
+ @pytest.mark.xfail(strict=True) # type: ignore
+ def test_X_pre_tag_comment(self) -> None:
register_xxx()
round_trip("""\
-
@@ -87,8 +97,8 @@ class TestRoundTripCustom:
language: python
""")
- @pytest.mark.xfail(strict=True)
- def test_X_post_tag_comment(self):
+ @pytest.mark.xfail(strict=True) # type: ignore
+ def test_X_post_tag_comment(self) -> None:
register_xxx()
round_trip("""\
- !xxx
@@ -98,7 +108,7 @@ class TestRoundTripCustom:
language: python
""")
- def test_scalar_00(self):
+ def test_scalar_00(self) -> None:
# https://stackoverflow.com/a/45967047/1307905
round_trip("""\
Outputs:
@@ -110,24 +120,35 @@ class TestRoundTripCustom:
class TestIssue201:
- def test_encoded_unicode_tag(self):
+ def test_encoded_unicode_tag(self) -> None:
round_trip_load("""
s: !!python/%75nicode 'abc'
""")
class TestImplicitTaggedNodes:
- def test_scalar(self):
- round_trip("""\
- - !Scalar abcdefg
+ def test_scalar(self) -> None:
+ data = round_trip("""\
+ - !SString abcdefg
+ - !SFloat 1.0
+ - !SInt 1961
+ - !SBool true
+ - !SLit |
+ glitter in the dark near the Tanhäuser gate
""")
-
- def test_mapping(self):
+ # tagged scalers have string or string types as value
+ assert data[0].count('d') == 1
+ assert data[1].count('1') == 1
+ assert data[2].count('1') == 2
+ assert data[3].count('u') == 1
+ assert data[4].count('a') == 4
+
+ def test_mapping(self) -> None:
round_trip("""\
- !Mapping {a: 1, b: 2}
""")
- def test_sequence(self):
+ def test_sequence(self) -> None:
yaml = YAML()
yaml.brace_single_entry_mapping_in_flow_sequence = True
yaml.mapping_value_align = True
@@ -135,7 +156,7 @@ class TestImplicitTaggedNodes:
- !Sequence [a, {b: 1}, {c: {d: 3}}]
""")
- def test_sequence2(self):
+ def test_sequence2(self) -> None:
yaml = YAML()
yaml.mapping_value_align = True
yaml.round_trip("""
diff --git a/_test/test_version.py b/_test/test_version.py
index 742ec36..b60b1dd 100644
--- a/_test/test_version.py
+++ b/_test/test_version.py
@@ -1,18 +1,21 @@
# coding: utf-8
-import pytest # NOQA
+import pytest # type: ignore # NOQA
+from typing import Any, Optional
from roundtrip import dedent, round_trip, round_trip_load
-def load(s, version=None):
+def load(s: str, version: Optional[Any] = None) -> Any:
import ruamel.yaml # NOQA
- return ruamel.yaml.round_trip_load(dedent(s), version)
+ yaml = ruamel.yaml.YAML()
+ yaml.version = version
+ return yaml.load(dedent(s))
class TestVersions:
- def test_explicit_1_2(self):
+ def test_explicit_1_2(self) -> None:
r = load("""\
%YAML 1.2
---
@@ -36,7 +39,7 @@ class TestVersions:
assert r[7] == 'no'
assert r[8] is True
- def test_explicit_1_1(self):
+ def test_explicit_1_1(self) -> None:
r = load("""\
%YAML 1.1
---
@@ -60,7 +63,7 @@ class TestVersions:
assert r[7] is False
assert r[8] is True
- def test_implicit_1_2(self):
+ def test_implicit_1_2(self) -> None:
r = load("""\
- 12:34:56
- 12:34:56.78
@@ -84,7 +87,7 @@ class TestVersions:
assert r[8] == 'no'
assert r[9] is True
- def test_load_version_1_1(self):
+ def test_load_version_1_1(self) -> None:
inp = """\
- 12:34:56
- 12:34:56.78
@@ -112,7 +115,7 @@ class TestVersions:
class TestIssue62:
# bitbucket issue 62, issue_62
- def test_00(self):
+ def test_00(self) -> None:
import ruamel.yaml # NOQA
s = dedent("""\
@@ -129,7 +132,7 @@ class TestIssue62:
round_trip(s.format('%YAML 1.1\n---\n'), preserve_quotes=True)
round_trip(s.format(""), preserve_quotes=True)
- def test_00_single_comment(self):
+ def test_00_single_comment(self) -> None:
import ruamel.yaml # NOQA
s = dedent("""\
@@ -146,7 +149,7 @@ class TestIssue62:
round_trip(s.format(""), preserve_quotes=True)
# round_trip(s.format('%YAML 1.2\n---\n'), preserve_quotes=True, version=(1, 2))
- def test_01(self):
+ def test_01(self) -> None:
import ruamel.yaml # NOQA
s = dedent("""\
@@ -158,6 +161,6 @@ class TestIssue62:
# note the flow seq on the --- line!
round_trip(s.format('%YAML 1.2\n--- '), preserve_quotes=True, version='1.2')
- def test_so_45681626(self):
+ def test_so_45681626(self) -> None:
# was not properly parsing
round_trip_load('{"in":{},"out":{}}')
diff --git a/_test/test_yamlfile.py b/_test/test_yamlfile.py
index a02fcab..6f7aca7 100644
--- a/_test/test_yamlfile.py
+++ b/_test/test_yamlfile.py
@@ -1,38 +1,38 @@
-
-from __future__ import print_function
+# coding: utf-8
"""
various test cases for YAML files
"""
import sys
-import pytest # NOQA
+import io
+import pytest # type: ignore # NOQA
import platform
from roundtrip import round_trip, dedent, round_trip_load, round_trip_dump # NOQA
class TestYAML:
- def test_backslash(self):
+ def test_backslash(self) -> None:
round_trip("""
handlers:
static_files: applications/\\1/static/\\2
""")
- def test_omap_out(self):
+ def test_omap_out(self) -> None:
# ordereddict mapped to !!omap
from ruamel.yaml.compat import ordereddict
import ruamel.yaml # NOQA
x = ordereddict([('a', 1), ('b', 2)])
- res = ruamel.yaml.dump(x, default_flow_style=False)
+ res = round_trip_dump(x, default_flow_style=False)
assert res == dedent("""
!!omap
- a: 1
- b: 2
""")
- def test_omap_roundtrip(self):
+ def test_omap_roundtrip(self) -> None:
round_trip("""
!!omap
- a: 1
@@ -41,38 +41,39 @@ class TestYAML:
- d: 4
""")
- @pytest.mark.skipif(sys.version_info < (2, 7), reason='collections not available')
- def test_dump_collections_ordereddict(self):
- from collections import OrderedDict
- import ruamel.yaml # NOQA
-
- # OrderedDict mapped to !!omap
- x = OrderedDict([('a', 1), ('b', 2)])
- res = ruamel.yaml.dump(x, Dumper=ruamel.yaml.RoundTripDumper, default_flow_style=False)
- assert res == dedent("""
- !!omap
- - a: 1
- - b: 2
- """)
-
- @pytest.mark.skipif(
+ # @pytest.mark.skipif(sys.version_info < (2, 7),
+ # reason='collections not available')
+ # def test_dump_collections_ordereddict(self) -> None:
+ # from collections import OrderedDict
+ # import ruamel.yaml # NOQA
+
+ # # OrderedDict mapped to !!omap
+ # x = OrderedDict([('a', 1), ('b', 2)])
+ # res = round_trip_dump(x, default_flow_style=False)
+ # assert res == dedent("""
+ # !!omap
+ # - a: 1
+ # - b: 2
+ # """)
+
+ @pytest.mark.skipif( # type: ignore
sys.version_info >= (3, 0) or platform.python_implementation() != 'CPython',
reason='ruamel.yaml not available',
)
- def test_dump_ruamel_ordereddict(self):
+ def test_dump_ruamel_ordereddict(self) -> None:
from ruamel.ordereddict import ordereddict
import ruamel.yaml # NOQA
# OrderedDict mapped to !!omap
x = ordereddict([('a', 1), ('b', 2)])
- res = ruamel.yaml.dump(x, Dumper=ruamel.yaml.RoundTripDumper, default_flow_style=False)
+ res = round_trip_dump(x, default_flow_style=False)
assert res == dedent("""
!!omap
- a: 1
- b: 2
""")
- def test_CommentedSet(self):
+ def test_CommentedSet(self) -> None:
from ruamel.yaml.constructor import CommentedSet
s = CommentedSet(['a', 'b', 'c'])
@@ -84,13 +85,17 @@ class TestYAML:
s.remove('e')
assert s == CommentedSet(['a', 'c', 'd', 'f'])
- def test_set_out(self):
+ def test_set_out(self) -> None:
# preferable would be the shorter format without the ': null'
import ruamel.yaml # NOQA
x = set(['a', 'b', 'c'])
- res = ruamel.yaml.dump(x, default_flow_style=False)
- assert res == dedent("""
+ # cannot use round_trip_dump, it doesn't show null in block style
+ buf = io.StringIO()
+ yaml = ruamel.yaml.YAML(typ='unsafe', pure=True)
+ yaml.default_flow_style = False
+ yaml.dump(x, buf)
+ assert buf.getvalue() == dedent("""
!!set
a: null
b: null
@@ -98,7 +103,7 @@ class TestYAML:
""")
# ordering is not preserved in a set
- def test_set_compact(self):
+ def test_set_compact(self) -> None:
# this format is read and also should be written by default
round_trip("""
!!set
@@ -107,7 +112,7 @@ class TestYAML:
? c
""")
- def test_blank_line_after_comment(self):
+ def test_blank_line_after_comment(self) -> None:
round_trip("""
# Comment with spaces after it.
@@ -115,7 +120,7 @@ class TestYAML:
a: 1
""")
- def test_blank_line_between_seq_items(self):
+ def test_blank_line_between_seq_items(self) -> None:
round_trip("""
# Seq with empty lines in between items.
b:
@@ -125,10 +130,10 @@ class TestYAML:
- baz
""")
- @pytest.mark.skipif(
+ @pytest.mark.skipif( # type: ignore
platform.python_implementation() == 'Jython', reason='Jython throws RepresenterError'
)
- def test_blank_line_after_literal_chip(self):
+ def test_blank_line_after_literal_chip(self) -> None:
s = """
c:
- |
@@ -149,10 +154,10 @@ class TestYAML:
assert d['c'][0].split('it.')[1] == '\n'
assert d['c'][1].split('line.')[1] == '\n'
- @pytest.mark.skipif(
+ @pytest.mark.skipif( # type: ignore
platform.python_implementation() == 'Jython', reason='Jython throws RepresenterError'
)
- def test_blank_line_after_literal_keep(self):
+ def test_blank_line_after_literal_keep(self) -> None:
""" have to insert an eof marker in YAML to test this"""
s = """
c:
@@ -175,10 +180,10 @@ class TestYAML:
assert d['c'][0].split('it.')[1] == '\n\n'
assert d['c'][1].split('line.')[1] == '\n\n\n'
- @pytest.mark.skipif(
+ @pytest.mark.skipif( # type: ignore
platform.python_implementation() == 'Jython', reason='Jython throws RepresenterError'
)
- def test_blank_line_after_literal_strip(self):
+ def test_blank_line_after_literal_strip(self) -> None:
s = """
c:
- |-
@@ -199,18 +204,22 @@ class TestYAML:
assert d['c'][0].split('it.')[1] == ""
assert d['c'][1].split('line.')[1] == ""
- def test_load_all_perserve_quotes(self):
+ def test_load_all_perserve_quotes(self) -> None:
import ruamel.yaml # NOQA
+ yaml = ruamel.yaml.YAML()
+ yaml.preserve_quotes = True
s = dedent("""\
a: 'hello'
---
b: "goodbye"
""")
data = []
- for x in ruamel.yaml.round_trip_load_all(s, preserve_quotes=True):
+ for x in yaml.load_all(s):
data.append(x)
- out = ruamel.yaml.dump_all(data, Dumper=ruamel.yaml.RoundTripDumper)
+ buf = ruamel.yaml.compat.StringIO()
+ yaml.dump_all(data, buf)
+ out = buf.getvalue()
print(type(data[0]['a']), data[0]['a'])
# out = ruamel.yaml.round_trip_dump_all(data)
print(out)
diff --git a/_test/test_yamlobject.py b/_test/test_yamlobject.py
index 7c4f7ec..3f488d3 100644
--- a/_test/test_yamlobject.py
+++ b/_test/test_yamlobject.py
@@ -1,20 +1,19 @@
# coding: utf-8
-from __future__ import print_function
-
import sys
-import pytest # NOQA
+from typing import Any
+import pytest # type: ignore # NOQA
from roundtrip import save_and_run # NOQA
-def test_monster(tmpdir):
- program_src = u'''\
+def test_monster(tmpdir: Any) -> None:
+ program_src = '''\
import ruamel.yaml
from textwrap import dedent
class Monster(ruamel.yaml.YAMLObject):
- yaml_tag = u'!Monster'
+ yaml_tag = '!Monster'
def __init__(self, name, hp, ac, attacks):
self.name = name
@@ -26,29 +25,35 @@ def test_monster(tmpdir):
return "%s(name=%r, hp=%r, ac=%r, attacks=%r)" % (
self.__class__.__name__, self.name, self.hp, self.ac, self.attacks)
- data = ruamel.yaml.load(dedent("""\\
+ yaml = ruamel.yaml.YAML(typ='safe', pure='True')
+ yaml = ruamel.yaml.YAML()
+ data = yaml.load(dedent("""\\
--- !Monster
name: Cave spider
hp: [2,6] # 2d6
ac: 16
attacks: [BITE, HURT]
- """), Loader=ruamel.yaml.Loader)
+ """))
# normal dump, keys will be sorted
- assert ruamel.yaml.dump(data) == dedent("""\\
+ from io import BytesIO
+ buf = BytesIO()
+ yaml.dump(data, buf)
+ print(buf.getvalue().decode('utf-8'))
+ assert buf.getvalue().decode('utf8') == dedent("""\\
!Monster
+ name: Cave spider
+ hp: [2, 6] # 2d6
ac: 16
attacks: [BITE, HURT]
- hp: [2, 6]
- name: Cave spider
""")
'''
assert save_and_run(program_src, tmpdir) == 0
-@pytest.mark.skipif(sys.version_info < (3, 0), reason='no __qualname__')
-def test_qualified_name00(tmpdir):
+@pytest.mark.skipif(sys.version_info < (3, 0), reason='no __qualname__') # type: ignore
+def test_qualified_name00(tmpdir: Any) -> None:
"""issue 214"""
- program_src = u"""\
+ program_src = """\
from ruamel.yaml import YAML
from ruamel.yaml.compat import StringIO
@@ -69,8 +74,8 @@ def test_qualified_name00(tmpdir):
assert save_and_run(program_src, tmpdir) == 0
-@pytest.mark.skipif(sys.version_info < (3, 0), reason='no __qualname__')
-def test_qualified_name01(tmpdir):
+@pytest.mark.skipif(sys.version_info < (3, 0), reason='no __qualname__') # type: ignore
+def test_qualified_name01(tmpdir: Any) -> None:
"""issue 214"""
from ruamel.yaml import YAML
import ruamel.yaml.comments
diff --git a/_test/test_z_check_debug_leftovers.py b/_test/test_z_check_debug_leftovers.py
index f5be5df..7096a73 100644
--- a/_test/test_z_check_debug_leftovers.py
+++ b/_test/test_z_check_debug_leftovers.py
@@ -1,7 +1,8 @@
# coding: utf-8
import sys
-import pytest # NOQA
+from typing import Any
+import pytest # type: ignore # NOQA
from roundtrip import round_trip_load, round_trip_dump, dedent
@@ -9,7 +10,7 @@ from roundtrip import round_trip_load, round_trip_dump, dedent
class TestLeftOverDebug:
# idea here is to capture round_trip_output via pytest stdout capture
# if there is are any leftover debug statements they should show up
- def test_00(self, capsys):
+ def test_00(self, capsys: Any) -> None:
s = dedent("""
a: 1
b: []
@@ -21,7 +22,7 @@ class TestLeftOverDebug:
out, err = capsys.readouterr()
assert out == s
- def test_01(self, capsys):
+ def test_01(self, capsys: Any) -> None:
s = dedent("""
- 1
- []
diff --git a/_test/test_z_data.py b/_test/test_z_data.py
index a4eec0d..8a8ba21 100644
--- a/_test/test_z_data.py
+++ b/_test/test_z_data.py
@@ -1,21 +1,19 @@
# coding: utf-8
-from __future__ import print_function, unicode_literals
-
import sys
-import pytest # NOQA
+import os
+import pytest # type: ignore # NOQA
import warnings # NOQA
-
-from ruamel.std.pathlib import Path
+from typing import Any, Optional, List, Tuple
+from pathlib import Path
base_path = Path('data') # that is ruamel.yaml.data
-PY2 = sys.version_info[0] == 2
-class YAMLData(object):
+class YAMLData:
yaml_tag = '!YAML'
- def __init__(self, s):
+ def __init__(self, s: Any) -> None:
self._s = s
# Conversion tables for input. E.g. "<TAB>" is replaced by "\t"
@@ -29,9 +27,9 @@ class YAMLData(object):
# fmt: on
@property
- def value(self):
+ def value(self) -> Any:
if hasattr(self, '_p'):
- return self._p
+ return self._p # type: ignore
assert ' \n' not in self._s
assert '\t\n' not in self._s
self._p = self._s
@@ -40,7 +38,7 @@ class YAMLData(object):
self._p = self._p.replace(k, v)
return self._p
- def test_rewrite(self, s):
+ def test_rewrite(self, s: str) -> str:
assert ' \n' not in s
assert '\t\n' not in s
for k, v in YAMLData.special.items():
@@ -49,7 +47,7 @@ class YAMLData(object):
return s
@classmethod
- def from_yaml(cls, constructor, node):
+ def from_yaml(cls, constructor: Any, node: Any) -> 'YAMLData':
from ruamel.yaml.nodes import MappingNode
if isinstance(node, MappingNode):
@@ -69,22 +67,30 @@ class Assert(YAMLData):
yaml_tag = '!Assert'
@property
- def value(self):
- from ruamel.yaml.compat import Mapping
+ def value(self) -> Any:
+ from collections.abc import Mapping
if hasattr(self, '_pa'):
- return self._pa
+ return self._pa # type: ignore
if isinstance(self._s, Mapping):
- self._s['lines'] = self.test_rewrite(self._s['lines'])
+ self._s['lines'] = self.test_rewrite(self._s['lines']) # type: ignore
self._pa = self._s
return self._pa
-def pytest_generate_tests(metafunc):
+def pytest_generate_tests(metafunc: Any) -> None:
test_yaml = []
paths = sorted(base_path.glob('**/*.yaml'))
idlist = []
for path in paths:
+ # while developing tests put them in data/debug and run:
+ # auto -c "pytest _test/test_z_data.py" data/debug/*.yaml *.py _test/*.py
+ if os.environ.get('RUAMELAUTOTEST') == '1':
+ if path.parent.stem != 'debug':
+ continue
+ elif path.parent.stem == 'debug':
+ # don't test debug entries for production
+ continue
stem = path.stem
if stem.startswith('.#'): # skip emacs temporary file
continue
@@ -93,8 +99,8 @@ def pytest_generate_tests(metafunc):
metafunc.parametrize(['yaml'], test_yaml, ids=idlist, scope='class')
-class TestYAMLData(object):
- def yaml(self, yaml_version=None):
+class TestYAMLData:
+ def yaml(self, yaml_version: Optional[Any] = None) -> Any:
from ruamel.yaml import YAML
y = YAML()
@@ -103,7 +109,7 @@ class TestYAMLData(object):
y.version = yaml_version
return y
- def docs(self, path):
+ def docs(self, path: Path) -> List[Any]:
from ruamel.yaml import YAML
tyaml = YAML(typ='safe', pure=True)
@@ -113,12 +119,14 @@ class TestYAMLData(object):
tyaml.register_class(Assert)
return list(tyaml.load_all(path))
- def yaml_load(self, value, yaml_version=None):
+ def yaml_load(self, value: Any, yaml_version: Optional[Any] = None) -> Tuple[Any, Any]:
yaml = self.yaml(yaml_version=yaml_version)
data = yaml.load(value)
return yaml, data
- def round_trip(self, input, output=None, yaml_version=None):
+ def round_trip(
+ self, input: Any, output: Optional[Any] = None, yaml_version: Optional[Any] = None
+ ) -> None:
from ruamel.yaml.compat import StringIO
yaml, data = self.yaml_load(input.value, yaml_version=yaml_version)
@@ -126,14 +134,13 @@ class TestYAMLData(object):
yaml.dump(data, buf)
expected = input.value if output is None else output.value
value = buf.getvalue()
- if PY2:
- value = value.decode('utf-8')
- print('value', value)
- # print('expected', expected)
+ print('>>>> rt output\n', value.replace(' ', '\u2423'), sep='') # 2423 open box
assert value == expected
- def load_assert(self, input, confirm, yaml_version=None):
- from ruamel.yaml.compat import Mapping
+ def load_assert(
+ self, input: Any, confirm: Any, yaml_version: Optional[Any] = None
+ ) -> None:
+ from collections.abc import Mapping
d = self.yaml_load(input.value, yaml_version=yaml_version)[1] # NOQA
print('confirm.value', confirm.value, type(confirm.value))
@@ -151,15 +158,36 @@ class TestYAMLData(object):
print(line)
exec(line)
- def run_python(self, python, data, tmpdir):
+ def run_python(
+ self, python: Any, data: Any, tmpdir: Any, input: Optional[Any] = None
+ ) -> None:
from roundtrip import save_and_run
+ if input is not None:
+ (tmpdir / 'input.yaml').write_text(input.value, encoding='utf-8')
assert save_and_run(python.value, base_dir=tmpdir, output=data.value) == 0
- # this is executed by pytest the methods with names not starting with test_
- # are helpers
- def test_yaml_data(self, yaml, tmpdir):
- from ruamel.yaml.compat import Mapping
+ def insert_comments(self, data: Any, actions: Any) -> None:
+ """this is to automatically insert based on:
+ path (a.1.b),
+ position (before, after, between), and
+ offset (absolute/relative)
+ """
+ raise NotImplementedError
+ expected = []
+ for line in data.value.splitlines(True):
+ idx = line.index['?']
+ if idx < 0:
+ expected.append(line)
+ continue
+ assert line.lstrip()[0] == '#' # it has to be comment line
+ print(data)
+ assert ''.join(expected) == data.value
+
+ # this is executed by pytest the methods with names not starting with
+ # test_ are helper methods
+ def test_yaml_data(self, yaml: Any, tmpdir: Any) -> None:
+ from collections.abc import Mapping
idx = 0
typ = None
@@ -199,20 +227,24 @@ class TestYAMLData(object):
typ = 'rt'
print('type:', typ)
if data is not None:
- print('data:', data.value, end='')
- print('output:', output.value if output is not None else output)
+ print('>>>> data:\n', data.value.replace(' ', '\u2423'), sep='', end='')
+ print('>>>> output:\n', output.value if output is not None else output, sep='')
if typ == 'rt':
self.round_trip(data, output, yaml_version=yaml_version)
elif typ == 'python_run':
- self.run_python(python, output if output is not None else data, tmpdir)
+ inp = None if output is None or data is None else data
+ self.run_python(python, output if output is not None else data, tmpdir, input=inp)
elif typ == 'load_assert':
self.load_assert(data, confirm, yaml_version=yaml_version)
+ elif typ == 'comment':
+ actions: List[Any] = []
+ self.insert_comments(data, actions)
else:
- print('\nrun type unknown:', typ)
+ f'\n>>>>>> run type unknown: "{typ}" <<<<<<\n'
raise AssertionError()
-def check_python_version(match, current=None):
+def check_python_version(match: Any, current: Optional[Any] = None) -> bool:
"""
version indication, return True if version matches.
match should be something like 3.6+, or [2.7, 3.3] etc. Floats
diff --git a/_test/test_z_olddata.py b/_test/test_z_olddata.py
index 91d89db..ffe1572 100644
--- a/_test/test_z_olddata.py
+++ b/_test/test_z_olddata.py
@@ -1,24 +1,24 @@
# coding: utf-8
-from __future__ import print_function
-
import sys
import os
-import pytest # NOQA
+import pytest # type: ignore # NOQA
sys.path.insert(0, os.path.dirname(__file__) + '/lib')
import warnings # NOQA
+from typing import List, Any # NOQA
-args = []
+args: List[Any] = []
-def test_data():
- import test_appliance # NOQA
+def test_data() -> None:
+ import test_appliance # type: ignore # NOQA
+ warnings.simplefilter('ignore', PendingDeprecationWarning)
collections = []
- import test_yaml
+ import test_yaml # type: ignore
collections.append(test_yaml)
test_appliance.run(collections, args)
@@ -28,14 +28,15 @@ def test_data():
# reason="no libyaml")
-def test_data_ext():
+def test_data_ext() -> None:
collections = []
import ruamel.yaml # NOQA
import test_appliance # NOQA
warnings.simplefilter('ignore', ruamel.yaml.error.UnsafeLoaderWarning)
+ warnings.simplefilter('ignore', PendingDeprecationWarning)
if ruamel.yaml.__with_libyaml__:
- import test_yaml_ext
+ import test_yaml_ext # type: ignore
collections.append(test_yaml_ext)
test_appliance.run(collections, args)
diff --git a/anchor.py b/anchor.py
index d702126..1eb1480 100644
--- a/anchor.py
+++ b/anchor.py
@@ -1,19 +1,18 @@
-if False: # MYPY
- from typing import Any, Dict, Optional, List, Union, Optional, Iterator # NOQA
+# coding: utf-8
+
+from typing import Any, Dict, Optional, List, Union, Optional, Iterator # NOQA
anchor_attrib = '_yaml_anchor'
-class Anchor(object):
+class Anchor:
__slots__ = 'value', 'always_dump'
attrib = anchor_attrib
- def __init__(self):
- # type: () -> None
+ def __init__(self) -> None:
self.value = None
self.always_dump = False
- def __repr__(self):
- # type: () -> Any
+ def __repr__(self) -> Any:
ad = ', (always dump)' if self.always_dump else ""
- return 'Anchor({!r}{})'.format(self.value, ad)
+ return f'Anchor({self.value!r}{ad})'
diff --git a/comments.py b/comments.py
index a091d37..da2b191 100644
--- a/comments.py
+++ b/comments.py
@@ -1,7 +1,5 @@
# coding: utf-8
-from __future__ import absolute_import, print_function
-
"""
stuff to deal with comments and formatting on dict/list/ordereddict/set
these are not really related, formatting could be factored out as
@@ -12,25 +10,71 @@ import sys
import copy
-from ruamel.yaml.compat import ordereddict # type: ignore
-from ruamel.yaml.compat import PY2, string_types, MutableSliceableSequence
+from ruamel.yaml.compat import ordereddict
+from ruamel.yaml.compat import MutableSliceableSequence, nprintf # NOQA
from ruamel.yaml.scalarstring import ScalarString
from ruamel.yaml.anchor import Anchor
-if PY2:
- from collections import MutableSet, Sized, Set, Mapping
-else:
- from collections.abc import MutableSet, Sized, Set, Mapping
+from collections.abc import MutableSet, Sized, Set, Mapping
-if False: # MYPY
- from typing import Any, Dict, Optional, List, Union, Optional, Iterator # NOQA
+from typing import Any, Dict, Optional, List, Union, Optional, Iterator # NOQA
# fmt: off
__all__ = ['CommentedSeq', 'CommentedKeySeq',
'CommentedMap', 'CommentedOrderedMap',
- 'CommentedSet', 'comment_attrib', 'merge_attrib']
+ 'CommentedSet', 'comment_attrib', 'merge_attrib',
+ 'C_POST', 'C_PRE', 'C_SPLIT_ON_FIRST_BLANK', 'C_BLANK_LINE_PRESERVE_SPACE',
+ ]
# fmt: on
+# splitting of comments by the scanner
+# an EOLC (End-Of-Line Comment) is preceded by some token
+# an FLC (Full Line Comment) is a comment not preceded by a token, i.e. # is
+# the first non-blank on line
+# a BL is a blank line i.e. empty or spaces/tabs only
+# bits 0 and 1 are combined, you can choose only one
+C_POST = 0b00
+C_PRE = 0b01
+C_SPLIT_ON_FIRST_BLANK = 0b10 # as C_POST, but if blank line then C_PRE all lines before
+# first blank goes to POST even if no following real FLC
+# (first blank -> first of post)
+# 0b11 -> reserved for future use
+C_BLANK_LINE_PRESERVE_SPACE = 0b100
+# C_EOL_PRESERVE_SPACE2 = 0b1000
+
+
+class IDX:
+ # temporary auto increment, so rearranging is easier
+ def __init__(self) -> None:
+ self._idx = 0
+
+ def __call__(self) -> Any:
+ x = self._idx
+ self._idx += 1
+ return x
+
+ def __str__(self) -> Any:
+ return str(self._idx)
+
+
+cidx = IDX()
+
+# more or less in order of subjective expected likelyhood
+# the _POST and _PRE ones are lists themselves
+C_VALUE_EOL = C_ELEM_EOL = cidx()
+C_KEY_EOL = cidx()
+C_KEY_PRE = C_ELEM_PRE = cidx() # not this is not value
+C_VALUE_POST = C_ELEM_POST = cidx() # not this is not value
+C_VALUE_PRE = cidx()
+C_KEY_POST = cidx()
+C_TAG_EOL = cidx()
+C_TAG_POST = cidx()
+C_TAG_PRE = cidx()
+C_ANCHOR_EOL = cidx()
+C_ANCHOR_POST = cidx()
+C_ANCHOR_PRE = cidx()
+
+
comment_attrib = '_yaml_comment'
format_attrib = '_yaml_format'
line_col_attrib = '_yaml_line_col'
@@ -38,80 +82,136 @@ merge_attrib = '_yaml_merge'
tag_attrib = '_yaml_tag'
-class Comment(object):
- # sys.getsize tested the Comment objects, __slots__ makes them bigger
+class Comment:
+ # using sys.getsize tested the Comment objects, __slots__ makes them bigger
# and adding self.end did not matter
- __slots__ = 'comment', '_items', '_end', '_start'
+ __slots__ = 'comment', '_items', '_post', '_pre'
attrib = comment_attrib
- def __init__(self):
- # type: () -> None
+ def __init__(self, old: bool = True) -> None:
+ self._pre = None if old else [] # type: ignore
self.comment = None # [post, [pre]]
# map key (mapping/omap/dict) or index (sequence/list) to a list of
# dict: post_key, pre_key, post_value, pre_value
# list: pre item, post item
- self._items = {} # type: Dict[Any, Any]
+ self._items: Dict[Any, Any] = {}
# self._start = [] # should not put these on first item
- self._end = [] # type: List[Any] # end of document comments
+ self._post: List[Any] = [] # end of document comments
- def __str__(self):
- # type: () -> str
- if bool(self._end):
- end = ',\n end=' + str(self._end)
+ def __str__(self) -> str:
+ if bool(self._post):
+ end = ',\n end=' + str(self._post)
else:
end = ""
- return 'Comment(comment={0},\n items={1}{2})'.format(self.comment, self._items, end)
+ return f'Comment(comment={self.comment},\n items={self._items}{end})'
+
+ def _old__repr__(self) -> str:
+ if bool(self._post):
+ end = ',\n end=' + str(self._post)
+ else:
+ end = ""
+ try:
+ ln = max([len(str(k)) for k in self._items]) + 1
+ except ValueError:
+ ln = '' # type: ignore
+ it = ' '.join([f'{str(k) + ":":{ln}} {v}\n' for k, v in self._items.items()])
+ if it:
+ it = '\n ' + it + ' '
+ return f'Comment(\n start={self.comment},\n items={{{it}}}{end})'
+
+ def __repr__(self) -> str:
+ if self._pre is None:
+ return self._old__repr__()
+ if bool(self._post):
+ end = ',\n end=' + repr(self._post)
+ else:
+ end = ""
+ try:
+ ln = max([len(str(k)) for k in self._items]) + 1
+ except ValueError:
+ ln = '' # type: ignore
+ it = ' '.join([f'{str(k) + ":":{ln}} {v}\n' for k, v in self._items.items()])
+ if it:
+ it = '\n ' + it + ' '
+ return f'Comment(\n pre={self.pre},\n items={{{it}}}{end})'
@property
- def items(self):
- # type: () -> Any
+ def items(self) -> Any:
return self._items
@property
- def end(self):
- # type: () -> Any
- return self._end
+ def end(self) -> Any:
+ return self._post
@end.setter
- def end(self, value):
- # type: (Any) -> None
- self._end = value
+ def end(self, value: Any) -> None:
+ self._post = value
@property
- def start(self):
- # type: () -> Any
- return self._start
+ def pre(self) -> Any:
+ return self._pre
+
+ @pre.setter
+ def pre(self, value: Any) -> None:
+ self._pre = value
- @start.setter
- def start(self, value):
- # type: (Any) -> None
- self._start = value
+ def get(self, item: Any, pos: Any) -> Any:
+ x = self._items.get(item)
+ if x is None or len(x) < pos:
+ return None
+ return x[pos] # can be None
+
+ def set(self, item: Any, pos: Any, value: Any) -> Any:
+ x = self._items.get(item)
+ if x is None:
+ self._items[item] = x = [None] * (pos + 1)
+ else:
+ while len(x) <= pos:
+ x.append(None)
+ assert x[pos] is None
+ x[pos] = value
+
+ def __contains__(self, x: Any) -> Any:
+ # test if a substring is in any of the attached comments
+ if self.comment:
+ if self.comment[0] and x in self.comment[0].value:
+ return True
+ if self.comment[1]:
+ for c in self.comment[1]:
+ if x in c.value:
+ return True
+ for value in self.items.values():
+ if not value:
+ continue
+ for c in value:
+ if c and x in c.value:
+ return True
+ if self.end:
+ for c in self.end:
+ if x in c.value:
+ return True
+ return False
# to distinguish key from None
-def NoComment():
- # type: () -> None
+def NoComment() -> None:
pass
-class Format(object):
+class Format:
__slots__ = ('_flow_style',)
attrib = format_attrib
- def __init__(self):
- # type: () -> None
- self._flow_style = None # type: Any
+ def __init__(self) -> None:
+ self._flow_style: Any = None
- def set_flow_style(self):
- # type: () -> None
+ def set_flow_style(self) -> None:
self._flow_style = True
- def set_block_style(self):
- # type: () -> None
+ def set_block_style(self) -> None:
self._flow_style = False
- def flow_style(self, default=None):
- # type: (Optional[Any]) -> Any
+ def flow_style(self, default: Optional[Any] = None) -> Any:
"""if default (the flow_style) is None, the flow style tacked on to
the object explicitly will be taken. If that is None as well the
default flow style rules the format down the line, or the type
@@ -121,65 +221,63 @@ class Format(object):
return self._flow_style
-class LineCol(object):
+class LineCol:
+ """
+ line and column information wrt document, values start at zero (0)
+ """
+
attrib = line_col_attrib
- def __init__(self):
- # type: () -> None
+ def __init__(self) -> None:
self.line = None
self.col = None
- self.data = None # type: Optional[Dict[Any, Any]]
+ self.data: Optional[Dict[Any, Any]] = None
- def add_kv_line_col(self, key, data):
- # type: (Any, Any) -> None
+ def add_kv_line_col(self, key: Any, data: Any) -> None:
if self.data is None:
self.data = {}
self.data[key] = data
- def key(self, k):
- # type: (Any) -> Any
+ def key(self, k: Any) -> Any:
return self._kv(k, 0, 1)
- def value(self, k):
- # type: (Any) -> Any
+ def value(self, k: Any) -> Any:
return self._kv(k, 2, 3)
- def _kv(self, k, x0, x1):
- # type: (Any, Any, Any) -> Any
+ def _kv(self, k: Any, x0: Any, x1: Any) -> Any:
if self.data is None:
return None
data = self.data[k]
return data[x0], data[x1]
- def item(self, idx):
- # type: (Any) -> Any
+ def item(self, idx: Any) -> Any:
if self.data is None:
return None
return self.data[idx][0], self.data[idx][1]
- def add_idx_line_col(self, key, data):
- # type: (Any, Any) -> None
+ def add_idx_line_col(self, key: Any, data: Any) -> None:
if self.data is None:
self.data = {}
self.data[key] = data
+ def __repr__(self) -> str:
+ return f'LineCol({self.line}, {self.col})'
-class Tag(object):
+
+class Tag:
"""store tag information for roundtripping"""
__slots__ = ('value',)
attrib = tag_attrib
- def __init__(self):
- # type: () -> None
+ def __init__(self) -> None:
self.value = None
- def __repr__(self):
- # type: () -> Any
- return '{0.__class__.__name__}({0.value!r})'.format(self)
+ def __repr__(self) -> Any:
+ return f'{self.__class__.__name__}({self.value!r})'
-class CommentedBase(object):
+class CommentedBase:
@property
def ca(self):
# type: () -> Any
@@ -187,16 +285,14 @@ class CommentedBase(object):
setattr(self, Comment.attrib, Comment())
return getattr(self, Comment.attrib)
- def yaml_end_comment_extend(self, comment, clear=False):
- # type: (Any, bool) -> None
+ def yaml_end_comment_extend(self, comment: Any, clear: bool = False) -> None:
if comment is None:
return
if clear or self.ca.end is None:
self.ca.end = []
self.ca.end.extend(comment)
- def yaml_key_comment_extend(self, key, comment, clear=False):
- # type: (Any, Any, bool) -> None
+ def yaml_key_comment_extend(self, key: Any, comment: Any, clear: bool = False) -> None:
r = self.ca._items.setdefault(key, [None, None, None, None])
if clear or r[1] is None:
if comment[1] is not None:
@@ -206,8 +302,7 @@ class CommentedBase(object):
r[1].extend(comment[0])
r[0] = comment[0]
- def yaml_value_comment_extend(self, key, comment, clear=False):
- # type: (Any, Any, bool) -> None
+ def yaml_value_comment_extend(self, key: Any, comment: Any, clear: bool = False) -> None:
r = self.ca._items.setdefault(key, [None, None, None, None])
if clear or r[3] is None:
if comment[1] is not None:
@@ -217,35 +312,40 @@ class CommentedBase(object):
r[3].extend(comment[0])
r[2] = comment[0]
- def yaml_set_start_comment(self, comment, indent=0):
- # type: (Any, Any) -> None
+ def yaml_set_start_comment(self, comment: Any, indent: Any = 0) -> None:
"""overwrites any preceding comment lines on an object
expects comment to be without `#` and possible have multiple lines
"""
from .error import CommentMark
from .tokens import CommentToken
- pre_comments = self._yaml_get_pre_comment()
+ pre_comments = self._yaml_clear_pre_comment() # type: ignore
if comment[-1] == '\n':
comment = comment[:-1] # strip final newline if there
start_mark = CommentMark(indent)
for com in comment.split('\n'):
- pre_comments.append(CommentToken('# ' + com + '\n', start_mark, None))
+ c = com.strip()
+ if len(c) > 0 and c[0] != '#':
+ com = '# ' + com
+ pre_comments.append(CommentToken(com + '\n', start_mark))
def yaml_set_comment_before_after_key(
- self, key, before=None, indent=0, after=None, after_indent=None
- ):
- # type: (Any, Any, Any, Any, Any) -> None
+ self,
+ key: Any,
+ before: Any = None,
+ indent: Any = 0,
+ after: Any = None,
+ after_indent: Any = None,
+ ) -> None:
"""
expects comment (before/after) to be without `#` and possible have multiple lines
"""
from ruamel.yaml.error import CommentMark
from ruamel.yaml.tokens import CommentToken
- def comment_token(s, mark):
- # type: (Any, Any) -> Any
+ def comment_token(s: Any, mark: Any) -> Any:
# handle empty lines as having no comment
- return CommentToken(('# ' if s else "") + s + '\n', mark, None)
+ return CommentToken(('# ' if s else "") + s + '\n', mark)
if after_indent is None:
after_indent = indent + 2
@@ -255,11 +355,14 @@ class CommentedBase(object):
after = after[:-1] # strip final newline if there
start_mark = CommentMark(indent)
c = self.ca.items.setdefault(key, [None, [], None, None])
- if before == '\n':
- c[1].append(comment_token("", start_mark))
- elif before:
- for com in before.split('\n'):
- c[1].append(comment_token(com, start_mark))
+ if before is not None:
+ if c[1] is None:
+ c[1] = []
+ if before == '\n':
+ c[1].append(comment_token("", start_mark)) # type: ignore
+ else:
+ for com in before.split('\n'):
+ c[1].append(comment_token(com, start_mark)) # type: ignore
if after:
start_mark = CommentMark(after_indent)
if c[3] is None:
@@ -268,8 +371,7 @@ class CommentedBase(object):
c[3].append(comment_token(com, start_mark)) # type: ignore
@property
- def fa(self):
- # type: () -> Any
+ def fa(self) -> Any:
"""format attribute
set_flow_style()/set_block_style()"""
@@ -277,8 +379,9 @@ class CommentedBase(object):
setattr(self, Format.attrib, Format())
return getattr(self, Format.attrib)
- def yaml_add_eol_comment(self, comment, key=NoComment, column=None):
- # type: (Any, Optional[Any], Optional[Any]) -> None
+ def yaml_add_eol_comment(
+ self, comment: Any, key: Optional[Any] = NoComment, column: Optional[Any] = None
+ ) -> None:
"""
there is a problem as eol comments should start with ' #'
(but at the beginning of the line the space doesn't have to be before
@@ -299,60 +402,50 @@ class CommentedBase(object):
comment = ' ' + comment
column = 0
start_mark = CommentMark(column)
- ct = [CommentToken(comment, start_mark, None), None]
+ ct = [CommentToken(comment, start_mark), None]
self._yaml_add_eol_comment(ct, key=key)
@property
- def lc(self):
- # type: () -> Any
+ def lc(self) -> Any:
if not hasattr(self, LineCol.attrib):
setattr(self, LineCol.attrib, LineCol())
return getattr(self, LineCol.attrib)
- def _yaml_set_line_col(self, line, col):
- # type: (Any, Any) -> None
+ def _yaml_set_line_col(self, line: Any, col: Any) -> None:
self.lc.line = line
self.lc.col = col
- def _yaml_set_kv_line_col(self, key, data):
- # type: (Any, Any) -> None
+ def _yaml_set_kv_line_col(self, key: Any, data: Any) -> None:
self.lc.add_kv_line_col(key, data)
- def _yaml_set_idx_line_col(self, key, data):
- # type: (Any, Any) -> None
+ def _yaml_set_idx_line_col(self, key: Any, data: Any) -> None:
self.lc.add_idx_line_col(key, data)
@property
- def anchor(self):
- # type: () -> Any
+ def anchor(self) -> Any:
if not hasattr(self, Anchor.attrib):
setattr(self, Anchor.attrib, Anchor())
return getattr(self, Anchor.attrib)
- def yaml_anchor(self):
- # type: () -> Any
+ def yaml_anchor(self) -> Any:
if not hasattr(self, Anchor.attrib):
return None
return self.anchor
- def yaml_set_anchor(self, value, always_dump=False):
- # type: (Any, bool) -> None
+ def yaml_set_anchor(self, value: Any, always_dump: bool = False) -> None:
self.anchor.value = value
self.anchor.always_dump = always_dump
@property
- def tag(self):
- # type: () -> Any
+ def tag(self) -> Any:
if not hasattr(self, Tag.attrib):
setattr(self, Tag.attrib, Tag())
return getattr(self, Tag.attrib)
- def yaml_set_tag(self, value):
- # type: (Any) -> None
+ def yaml_set_tag(self, value: Any) -> None:
self.tag.value = value
- def copy_attributes(self, t, memo=None):
- # type: (Any, Any) -> None
+ def copy_attributes(self, t: Any, memo: Any = None) -> None:
# fmt: off
for a in [Comment.attrib, Format.attrib, LineCol.attrib, Anchor.attrib,
Tag.attrib, merge_attrib]:
@@ -363,44 +456,37 @@ class CommentedBase(object):
setattr(t, a, getattr(self, a))
# fmt: on
- def _yaml_add_eol_comment(self, comment, key):
- # type: (Any, Any) -> None
+ def _yaml_add_eol_comment(self, comment: Any, key: Any) -> None:
raise NotImplementedError
- def _yaml_get_pre_comment(self):
- # type: () -> Any
+ def _yaml_get_pre_comment(self) -> Any:
raise NotImplementedError
- def _yaml_get_column(self, key):
- # type: (Any) -> Any
+ def _yaml_get_column(self, key: Any) -> Any:
raise NotImplementedError
class CommentedSeq(MutableSliceableSequence, list, CommentedBase): # type: ignore
__slots__ = (Comment.attrib, '_lst')
- def __init__(self, *args, **kw):
- # type: (Any, Any) -> None
+ def __init__(self, *args: Any, **kw: Any) -> None:
list.__init__(self, *args, **kw)
- def __getsingleitem__(self, idx):
- # type: (Any) -> Any
+ def __getsingleitem__(self, idx: Any) -> Any:
return list.__getitem__(self, idx)
- def __setsingleitem__(self, idx, value):
- # type: (Any, Any) -> None
+ def __setsingleitem__(self, idx: Any, value: Any) -> None:
# try to preserve the scalarstring type if setting an existing key to a new value
if idx < len(self):
if (
- isinstance(value, string_types)
+ isinstance(value, str)
and not isinstance(value, ScalarString)
and isinstance(self[idx], ScalarString)
):
value = type(self[idx])(value)
list.__setitem__(self, idx, value)
- def __delsingleitem__(self, idx=None):
- # type: (Any) -> Any
+ def __delsingleitem__(self, idx: Any = None) -> Any:
list.__delitem__(self, idx)
self.ca.items.pop(idx, None) # might not be there -> default value
for list_index in sorted(self.ca.items):
@@ -408,12 +494,10 @@ class CommentedSeq(MutableSliceableSequence, list, CommentedBase): # type: igno
continue
self.ca.items[list_index - 1] = self.ca.items.pop(list_index)
- def __len__(self):
- # type: () -> int
+ def __len__(self) -> int:
return list.__len__(self)
- def insert(self, idx, val):
- # type: (Any, Any) -> None
+ def insert(self, idx: Any, val: Any) -> None:
"""the comments after the insertion have to move forward"""
list.insert(self, idx, val)
for list_index in sorted(self.ca.items, reverse=True):
@@ -421,31 +505,25 @@ class CommentedSeq(MutableSliceableSequence, list, CommentedBase): # type: igno
break
self.ca.items[list_index + 1] = self.ca.items.pop(list_index)
- def extend(self, val):
- # type: (Any) -> None
+ def extend(self, val: Any) -> None:
list.extend(self, val)
- def __eq__(self, other):
- # type: (Any) -> bool
+ def __eq__(self, other: Any) -> bool:
return list.__eq__(self, other)
- def _yaml_add_comment(self, comment, key=NoComment):
- # type: (Any, Optional[Any]) -> None
+ def _yaml_add_comment(self, comment: Any, key: Optional[Any] = NoComment) -> None:
if key is not NoComment:
self.yaml_key_comment_extend(key, comment)
else:
self.ca.comment = comment
- def _yaml_add_eol_comment(self, comment, key):
- # type: (Any, Any) -> None
+ def _yaml_add_eol_comment(self, comment: Any, key: Any) -> None:
self._yaml_add_comment(comment, key=key)
- def _yaml_get_columnX(self, key):
- # type: (Any) -> Any
+ def _yaml_get_columnX(self, key: Any) -> Any:
return self.ca.items[key][0].start_mark.column
- def _yaml_get_column(self, key):
- # type: (Any) -> Any
+ def _yaml_get_column(self, key: Any) -> Any:
column = None
sel_idx = None
pre, post = key - 1, key + 1
@@ -465,17 +543,23 @@ class CommentedSeq(MutableSliceableSequence, list, CommentedBase): # type: igno
column = self._yaml_get_columnX(sel_idx)
return column
- def _yaml_get_pre_comment(self):
- # type: () -> Any
- pre_comments = [] # type: List[Any]
+ def _yaml_get_pre_comment(self) -> Any:
+ pre_comments: List[Any] = []
+ if self.ca.comment is None:
+ self.ca.comment = [None, pre_comments]
+ else:
+ pre_comments = self.ca.comment[1]
+ return pre_comments
+
+ def _yaml_clear_pre_comment(self) -> Any:
+ pre_comments: List[Any] = []
if self.ca.comment is None:
self.ca.comment = [None, pre_comments]
else:
self.ca.comment[1] = pre_comments
return pre_comments
- def __deepcopy__(self, memo):
- # type: (Any) -> Any
+ def __deepcopy__(self, memo: Any) -> Any:
res = self.__class__()
memo[id(self)] = res
for k in self:
@@ -483,12 +567,10 @@ class CommentedSeq(MutableSliceableSequence, list, CommentedBase): # type: igno
self.copy_attributes(res, memo=memo)
return res
- def __add__(self, other):
- # type: (Any) -> Any
+ def __add__(self, other: Any) -> Any:
return list.__add__(self, other)
- def sort(self, key=None, reverse=False): # type: ignore
- # type: (Any, bool) -> None
+ def sort(self, key: Any = None, reverse: bool = False) -> None:
if key is None:
tmp_lst = sorted(zip(self, range(len(self))), reverse=reverse)
list.__init__(self, [x[0] for x in tmp_lst])
@@ -504,31 +586,26 @@ class CommentedSeq(MutableSliceableSequence, list, CommentedBase): # type: igno
if old_index in itm:
self.ca.items[idx] = itm[old_index]
- def __repr__(self):
- # type: () -> Any
+ def __repr__(self) -> Any:
return list.__repr__(self)
class CommentedKeySeq(tuple, CommentedBase): # type: ignore
"""This primarily exists to be able to roundtrip keys that are sequences"""
- def _yaml_add_comment(self, comment, key=NoComment):
- # type: (Any, Optional[Any]) -> None
+ def _yaml_add_comment(self, comment: Any, key: Optional[Any] = NoComment) -> None:
if key is not NoComment:
self.yaml_key_comment_extend(key, comment)
else:
self.ca.comment = comment
- def _yaml_add_eol_comment(self, comment, key):
- # type: (Any, Any) -> None
+ def _yaml_add_eol_comment(self, comment: Any, key: Any) -> None:
self._yaml_add_comment(comment, key=key)
- def _yaml_get_columnX(self, key):
- # type: (Any) -> Any
+ def _yaml_get_columnX(self, key: Any) -> Any:
return self.ca.items[key][0].start_mark.column
- def _yaml_get_column(self, key):
- # type: (Any) -> Any
+ def _yaml_get_column(self, key: Any) -> Any:
column = None
sel_idx = None
pre, post = key - 1, key + 1
@@ -548,9 +625,16 @@ class CommentedKeySeq(tuple, CommentedBase): # type: ignore
column = self._yaml_get_columnX(sel_idx)
return column
- def _yaml_get_pre_comment(self):
- # type: () -> Any
- pre_comments = [] # type: List[Any]
+ def _yaml_get_pre_comment(self) -> Any:
+ pre_comments: List[Any] = []
+ if self.ca.comment is None:
+ self.ca.comment = [None, pre_comments]
+ else:
+ pre_comments = self.ca.comment[1]
+ return pre_comments
+
+ def _yaml_clear_pre_comment(self) -> Any:
+ pre_comments: List[Any] = []
if self.ca.comment is None:
self.ca.comment = [None, pre_comments]
else:
@@ -561,12 +645,10 @@ class CommentedKeySeq(tuple, CommentedBase): # type: ignore
class CommentedMapView(Sized):
__slots__ = ('_mapping',)
- def __init__(self, mapping):
- # type: (Any) -> None
+ def __init__(self, mapping: Any) -> None:
self._mapping = mapping
- def __len__(self):
- # type: () -> int
+ def __len__(self) -> int:
count = len(self._mapping)
return count
@@ -575,16 +657,14 @@ class CommentedMapKeysView(CommentedMapView, Set): # type: ignore
__slots__ = ()
@classmethod
- def _from_iterable(self, it):
- # type: (Any) -> Any
+ def _from_iterable(self, it: Any) -> Any:
return set(it)
- def __contains__(self, key):
- # type: (Any) -> Any
+ def __contains__(self, key: Any) -> Any:
return key in self._mapping
- def __iter__(self):
- # type: () -> Any # yield from self._mapping # not in py27, pypy
+ def __iter__(self) -> Any:
+ # yield from self._mapping # not in py27, pypy
# for x in self._mapping._keys():
for x in self._mapping:
yield x
@@ -594,12 +674,10 @@ class CommentedMapItemsView(CommentedMapView, Set): # type: ignore
__slots__ = ()
@classmethod
- def _from_iterable(self, it):
- # type: (Any) -> Any
+ def _from_iterable(self, it: Any) -> Any:
return set(it)
- def __contains__(self, item):
- # type: (Any) -> Any
+ def __contains__(self, item: Any) -> Any:
key, value = item
try:
v = self._mapping[key]
@@ -608,8 +686,7 @@ class CommentedMapItemsView(CommentedMapView, Set): # type: ignore
else:
return v == value
- def __iter__(self):
- # type: () -> Any
+ def __iter__(self) -> Any:
for key in self._mapping._keys():
yield (key, self._mapping[key])
@@ -617,30 +694,28 @@ class CommentedMapItemsView(CommentedMapView, Set): # type: ignore
class CommentedMapValuesView(CommentedMapView):
__slots__ = ()
- def __contains__(self, value):
- # type: (Any) -> Any
+ def __contains__(self, value: Any) -> Any:
for key in self._mapping:
if value == self._mapping[key]:
return True
return False
- def __iter__(self):
- # type: () -> Any
+ def __iter__(self) -> Any:
for key in self._mapping._keys():
yield self._mapping[key]
-class CommentedMap(ordereddict, CommentedBase): # type: ignore
+class CommentedMap(ordereddict, CommentedBase):
__slots__ = (Comment.attrib, '_ok', '_ref')
- def __init__(self, *args, **kw):
- # type: (Any, Any) -> None
- self._ok = set() # type: MutableSet[Any] # own keys
- self._ref = [] # type: List[CommentedMap]
+ def __init__(self, *args: Any, **kw: Any) -> None:
+ self._ok: MutableSet[Any] = set() # own keys
+ self._ref: List[CommentedMap] = []
ordereddict.__init__(self, *args, **kw)
- def _yaml_add_comment(self, comment, key=NoComment, value=NoComment):
- # type: (Any, Optional[Any], Optional[Any]) -> None
+ def _yaml_add_comment(
+ self, comment: Any, key: Optional[Any] = NoComment, value: Optional[Any] = NoComment
+ ) -> None:
"""values is set to key to indicate a value attachment of comment"""
if key is not NoComment:
self.yaml_key_comment_extend(key, comment)
@@ -650,17 +725,14 @@ class CommentedMap(ordereddict, CommentedBase): # type: ignore
else:
self.ca.comment = comment
- def _yaml_add_eol_comment(self, comment, key):
- # type: (Any, Any) -> None
+ def _yaml_add_eol_comment(self, comment: Any, key: Any) -> None:
"""add on the value line, with value specified by the key"""
self._yaml_add_comment(comment, value=key)
- def _yaml_get_columnX(self, key):
- # type: (Any) -> Any
+ def _yaml_get_columnX(self, key: Any) -> Any:
return self.ca.items[key][2].start_mark.column
- def _yaml_get_column(self, key):
- # type: (Any) -> Any
+ def _yaml_get_column(self, key: Any) -> Any:
column = None
sel_idx = None
pre, post, last = None, None, None
@@ -687,51 +759,60 @@ class CommentedMap(ordereddict, CommentedBase): # type: ignore
column = self._yaml_get_columnX(sel_idx)
return column
- def _yaml_get_pre_comment(self):
- # type: () -> Any
- pre_comments = [] # type: List[Any]
+ def _yaml_get_pre_comment(self) -> Any:
+ pre_comments: List[Any] = []
+ if self.ca.comment is None:
+ self.ca.comment = [None, pre_comments]
+ else:
+ pre_comments = self.ca.comment[1]
+ return pre_comments
+
+ def _yaml_clear_pre_comment(self) -> Any:
+ pre_comments: List[Any] = []
if self.ca.comment is None:
self.ca.comment = [None, pre_comments]
else:
self.ca.comment[1] = pre_comments
return pre_comments
- def update(self, *vals, **kw):
- # type: (Any, Any) -> None
+ def update(self, *vals: Any, **kw: Any) -> None:
try:
ordereddict.update(self, *vals, **kw)
except TypeError:
# probably a dict that is used
for x in vals[0]:
self[x] = vals[0][x]
- try:
- self._ok.update(vals.keys()) # type: ignore
- except AttributeError:
- # assume one argument that is a list/tuple of two element lists/tuples
- for x in vals[0]:
- self._ok.add(x[0])
+ if vals:
+ try:
+ self._ok.update(vals[0].keys()) # type: ignore
+ except AttributeError:
+ # assume one argument that is a list/tuple of two element lists/tuples
+ for x in vals[0]:
+ self._ok.add(x[0])
if kw:
self._ok.add(*kw.keys())
- def insert(self, pos, key, value, comment=None):
- # type: (Any, Any, Any, Optional[Any]) -> None
+ def insert(self, pos: Any, key: Any, value: Any, comment: Optional[Any] = None) -> None:
"""insert key value into given position
attach comment if provided
"""
+ keys = list(self.keys()) + [key]
ordereddict.insert(self, pos, key, value)
- self._ok.add(key)
+ for keytmp in keys:
+ self._ok.add(keytmp)
+ for referer in self._ref:
+ for keytmp in keys:
+ referer.update_key_value(keytmp)
if comment is not None:
self.yaml_add_eol_comment(comment, key=key)
- def mlget(self, key, default=None, list_ok=False):
- # type: (Any, Any, Any) -> Any
+ def mlget(self, key: Any, default: Any = None, list_ok: Any = False) -> Any:
"""multi-level get that expects dicts within dicts"""
if not isinstance(key, list):
return self.get(key, default)
# assume that the key is a list of recursively accessible dicts
- def get_one_level(key_list, level, d):
- # type: (Any, Any, Any) -> Any
+ def get_one_level(key_list: Any, level: Any, d: Any) -> Any:
if not list_ok:
assert isinstance(d, dict)
if level >= len(key_list):
@@ -749,8 +830,7 @@ class CommentedMap(ordereddict, CommentedBase): # type: ignore
raise
return default
- def __getitem__(self, key):
- # type: (Any) -> Any
+ def __getitem__(self, key: Any) -> Any:
try:
return ordereddict.__getitem__(self, key)
except KeyError:
@@ -759,12 +839,11 @@ class CommentedMap(ordereddict, CommentedBase): # type: ignore
return merged[1][key]
raise
- def __setitem__(self, key, value):
- # type: (Any, Any) -> None
+ def __setitem__(self, key: Any, value: Any) -> None:
# try to preserve the scalarstring type if setting an existing key to a new value
if key in self:
if (
- isinstance(value, string_types)
+ isinstance(value, str)
and not isinstance(value, ScalarString)
and isinstance(self[key], ScalarString)
):
@@ -772,35 +851,36 @@ class CommentedMap(ordereddict, CommentedBase): # type: ignore
ordereddict.__setitem__(self, key, value)
self._ok.add(key)
- def _unmerged_contains(self, key):
- # type: (Any) -> Any
+ def _unmerged_contains(self, key: Any) -> Any:
if key in self._ok:
return True
return None
- def __contains__(self, key):
- # type: (Any) -> bool
+ def __contains__(self, key: Any) -> bool:
return bool(ordereddict.__contains__(self, key))
- def get(self, key, default=None):
- # type: (Any, Any) -> Any
+ def get(self, key: Any, default: Any = None) -> Any:
try:
return self.__getitem__(key)
except: # NOQA
return default
- def __repr__(self):
- # type: () -> Any
- return ordereddict.__repr__(self).replace('CommentedMap', 'ordereddict')
+ def __repr__(self) -> Any:
+ res = "{"
+ sep = ''
+ for k, v in self.items():
+ res += f'{sep}{k!r}: {v!r}'
+ if not sep:
+ sep = ', '
+ res += '}'
+ return res
- def non_merged_items(self):
- # type: () -> Any
+ def non_merged_items(self) -> Any:
for x in ordereddict.__iter__(self):
if x in self._ok:
yield x, ordereddict.__getitem__(self, x)
- def __delitem__(self, key):
- # type: (Any) -> None
+ def __delitem__(self, key: Any) -> None:
# for merged in getattr(self, merge_attrib, []):
# if key in merged[1]:
# value = merged[1][key]
@@ -809,7 +889,7 @@ class CommentedMap(ordereddict, CommentedBase): # type: ignore
# # not found in merged in stuff
# ordereddict.__delitem__(self, key)
# for referer in self._ref:
- # referer.update_key_value(key)
+ # referer.update=_key_value(key)
# return
#
# ordereddict.__setitem__(self, key, value) # merge might have different value
@@ -819,126 +899,60 @@ class CommentedMap(ordereddict, CommentedBase): # type: ignore
for referer in self._ref:
referer.update_key_value(key)
- def __iter__(self):
- # type: () -> Any
+ def __iter__(self) -> Any:
for x in ordereddict.__iter__(self):
yield x
- def _keys(self):
- # type: () -> Any
+ def _keys(self) -> Any:
for x in ordereddict.__iter__(self):
yield x
- def __len__(self):
- # type: () -> int
+ def __len__(self) -> int:
return int(ordereddict.__len__(self))
- def __eq__(self, other):
- # type: (Any) -> bool
+ def __eq__(self, other: Any) -> bool:
return bool(dict(self) == other)
- if PY2:
-
- def keys(self):
- # type: () -> Any
- return list(self._keys())
-
- def iterkeys(self):
- # type: () -> Any
- return self._keys()
-
- def viewkeys(self):
- # type: () -> Any
- return CommentedMapKeysView(self)
-
- else:
-
- def keys(self):
- # type: () -> Any
- return CommentedMapKeysView(self)
+ def keys(self) -> Any:
+ return CommentedMapKeysView(self)
- if PY2:
+ def values(self) -> Any:
+ return CommentedMapValuesView(self)
- def _values(self):
- # type: () -> Any
- for x in ordereddict.__iter__(self):
- yield ordereddict.__getitem__(self, x)
-
- def values(self):
- # type: () -> Any
- return list(self._values())
-
- def itervalues(self):
- # type: () -> Any
- return self._values()
-
- def viewvalues(self):
- # type: () -> Any
- return CommentedMapValuesView(self)
-
- else:
-
- def values(self):
- # type: () -> Any
- return CommentedMapValuesView(self)
-
- def _items(self):
- # type: () -> Any
+ def _items(self) -> Any:
for x in ordereddict.__iter__(self):
yield x, ordereddict.__getitem__(self, x)
- if PY2:
-
- def items(self):
- # type: () -> Any
- return list(self._items())
-
- def iteritems(self):
- # type: () -> Any
- return self._items()
-
- def viewitems(self):
- # type: () -> Any
- return CommentedMapItemsView(self)
-
- else:
-
- def items(self):
- # type: () -> Any
- return CommentedMapItemsView(self)
+ def items(self) -> Any:
+ return CommentedMapItemsView(self)
@property
- def merge(self):
- # type: () -> Any
+ def merge(self) -> Any:
if not hasattr(self, merge_attrib):
setattr(self, merge_attrib, [])
return getattr(self, merge_attrib)
- def copy(self):
- # type: () -> Any
+ def copy(self) -> Any:
x = type(self)() # update doesn't work
for k, v in self._items():
x[k] = v
self.copy_attributes(x)
return x
- def add_referent(self, cm):
- # type: (Any) -> None
+ def add_referent(self, cm: Any) -> None:
if cm not in self._ref:
self._ref.append(cm)
- def add_yaml_merge(self, value):
- # type: (Any) -> None
+ def add_yaml_merge(self, value: Any) -> None:
for v in value:
v[1].add_referent(self)
- for k, v in v[1].items():
- if ordereddict.__contains__(self, k):
+ for k1, v1 in v[1].items():
+ if ordereddict.__contains__(self, k1):
continue
- ordereddict.__setitem__(self, k, v)
+ ordereddict.__setitem__(self, k1, v1)
self.merge.extend(value)
- def update_key_value(self, key):
- # type: (Any) -> None
+ def update_key_value(self, key: Any) -> None:
if key in self._ok:
return
for v in self.merge:
@@ -947,8 +961,7 @@ class CommentedMap(ordereddict, CommentedBase): # type: ignore
return
ordereddict.__delitem__(self, key)
- def __deepcopy__(self, memo):
- # type: (Any) -> Any
+ def __deepcopy__(self, memo: Any) -> Any:
res = self.__class__()
memo[id(self)] = res
for k in self:
@@ -959,75 +972,60 @@ class CommentedMap(ordereddict, CommentedBase): # type: ignore
# based on brownie mappings
@classmethod # type: ignore
-def raise_immutable(cls, *args, **kwargs):
- # type: (Any, *Any, **Any) -> None
- raise TypeError('{} objects are immutable'.format(cls.__name__))
+def raise_immutable(cls: Any, *args: Any, **kwargs: Any) -> None:
+ raise TypeError(f'{cls.__name__} objects are immutable')
class CommentedKeyMap(CommentedBase, Mapping): # type: ignore
__slots__ = Comment.attrib, '_od'
"""This primarily exists to be able to roundtrip keys that are mappings"""
- def __init__(self, *args, **kw):
- # type: (Any, Any) -> None
+ def __init__(self, *args: Any, **kw: Any) -> None:
if hasattr(self, '_od'):
raise_immutable(self)
try:
self._od = ordereddict(*args, **kw)
except TypeError:
- if PY2:
- self._od = ordereddict(args[0].items())
- else:
- raise
+ raise
__delitem__ = __setitem__ = clear = pop = popitem = setdefault = update = raise_immutable
# need to implement __getitem__, __iter__ and __len__
- def __getitem__(self, index):
- # type: (Any) -> Any
+ def __getitem__(self, index: Any) -> Any:
return self._od[index]
- def __iter__(self):
- # type: () -> Iterator[Any]
+ def __iter__(self) -> Iterator[Any]:
for x in self._od.__iter__():
yield x
- def __len__(self):
- # type: () -> int
+ def __len__(self) -> int:
return len(self._od)
- def __hash__(self):
- # type: () -> Any
+ def __hash__(self) -> Any:
return hash(tuple(self.items()))
- def __repr__(self):
- # type: () -> Any
+ def __repr__(self) -> Any:
if not hasattr(self, merge_attrib):
return self._od.__repr__()
return 'ordereddict(' + repr(list(self._od.items())) + ')'
@classmethod
- def fromkeys(keys, v=None):
- # type: (Any, Any) -> Any
+ def fromkeys(keys: Any, v: Any = None) -> Any:
return CommentedKeyMap(dict.fromkeys(keys, v))
- def _yaml_add_comment(self, comment, key=NoComment):
- # type: (Any, Optional[Any]) -> None
+ def _yaml_add_comment(self, comment: Any, key: Optional[Any] = NoComment) -> None:
if key is not NoComment:
self.yaml_key_comment_extend(key, comment)
else:
self.ca.comment = comment
- def _yaml_add_eol_comment(self, comment, key):
- # type: (Any, Any) -> None
+ def _yaml_add_eol_comment(self, comment: Any, key: Any) -> None:
self._yaml_add_comment(comment, key=key)
- def _yaml_get_columnX(self, key):
- # type: (Any) -> Any
+ def _yaml_get_columnX(self, key: Any) -> Any:
return self.ca.items[key][0].start_mark.column
- def _yaml_get_column(self, key):
- # type: (Any) -> Any
+ def _yaml_get_column(self, key: Any) -> Any:
column = None
sel_idx = None
pre, post = key - 1, key + 1
@@ -1047,9 +1045,8 @@ class CommentedKeyMap(CommentedBase, Mapping): # type: ignore
column = self._yaml_get_columnX(sel_idx)
return column
- def _yaml_get_pre_comment(self):
- # type: () -> Any
- pre_comments = [] # type: List[Any]
+ def _yaml_get_pre_comment(self) -> Any:
+ pre_comments: List[Any] = []
if self.ca.comment is None:
self.ca.comment = [None, pre_comments]
else:
@@ -1064,15 +1061,15 @@ class CommentedOrderedMap(CommentedMap):
class CommentedSet(MutableSet, CommentedBase): # type: ignore # NOQA
__slots__ = Comment.attrib, 'odict'
- def __init__(self, values=None):
- # type: (Any) -> None
+ def __init__(self, values: Any = None) -> None:
self.odict = ordereddict()
MutableSet.__init__(self)
if values is not None:
- self |= values # type: ignore
+ self |= values
- def _yaml_add_comment(self, comment, key=NoComment, value=NoComment):
- # type: (Any, Optional[Any], Optional[Any]) -> None
+ def _yaml_add_comment(
+ self, comment: Any, key: Optional[Any] = NoComment, value: Optional[Any] = NoComment
+ ) -> None:
"""values is set to key to indicate a value attachment of comment"""
if key is not NoComment:
self.yaml_key_comment_extend(key, comment)
@@ -1082,69 +1079,65 @@ class CommentedSet(MutableSet, CommentedBase): # type: ignore # NOQA
else:
self.ca.comment = comment
- def _yaml_add_eol_comment(self, comment, key):
- # type: (Any, Any) -> None
+ def _yaml_add_eol_comment(self, comment: Any, key: Any) -> None:
"""add on the value line, with value specified by the key"""
self._yaml_add_comment(comment, value=key)
- def add(self, value):
- # type: (Any) -> None
+ def add(self, value: Any) -> None:
"""Add an element."""
self.odict[value] = None
- def discard(self, value):
- # type: (Any) -> None
+ def discard(self, value: Any) -> None:
"""Remove an element. Do not raise an exception if absent."""
del self.odict[value]
- def __contains__(self, x):
- # type: (Any) -> Any
+ def __contains__(self, x: Any) -> Any:
return x in self.odict
- def __iter__(self):
- # type: () -> Any
+ def __iter__(self) -> Any:
for x in self.odict:
yield x
- def __len__(self):
- # type: () -> int
+ def __len__(self) -> int:
return len(self.odict)
- def __repr__(self):
- # type: () -> str
- return 'set({0!r})'.format(self.odict.keys())
+ def __repr__(self) -> str:
+ return f'set({self.odict.keys()!r})'
class TaggedScalar(CommentedBase):
# the value and style attributes are set during roundtrip construction
- def __init__(self, value=None, style=None, tag=None):
- # type: (Any, Any, Any) -> None
+ def __init__(self, value: Any = None, style: Any = None, tag: Any = None) -> None:
self.value = value
self.style = style
if tag is not None:
self.yaml_set_tag(tag)
- def __str__(self):
- # type: () -> Any
+ def __str__(self) -> Any:
return self.value
+ def count(self, s: str, start: Optional[int] = None, end: Optional[int] = None) -> Any:
+ return self.value.count(s, start, end)
+
+ def __getitem__(self, pos: int) -> Any:
+ return self.value[pos]
+
-def dump_comments(d, name="", sep='.', out=sys.stdout):
- # type: (Any, str, str, Any) -> None
+def dump_comments(d: Any, name: str = "", sep: str = '.', out: Any = sys.stdout) -> None:
"""
recursively dump comments, all but the toplevel preceded by the path
in dotted form x.0.a
"""
if isinstance(d, dict) and hasattr(d, 'ca'):
if name:
- sys.stdout.write('{}\n'.format(name))
- out.write('{}\n'.format(d.ca)) # type: ignore
+ out.write(f'{name} {type(d)}\n')
+ out.write(f'{d.ca!r}\n') # type: ignore
for k in d:
- dump_comments(d[k], name=(name + sep + k) if name else k, sep=sep, out=out)
+ dump_comments(d[k], name=(name + sep + str(k)) if name else k, sep=sep, out=out)
elif isinstance(d, list) and hasattr(d, 'ca'):
if name:
- sys.stdout.write('{}\n'.format(name))
- out.write('{}\n'.format(d.ca)) # type: ignore
+ out.write(f'{name} {type(d)}\n')
+ out.write(f'{d.ca!r}\n') # type: ignore
for idx, k in enumerate(d):
dump_comments(
k, name=(name + sep + str(idx)) if name else str(idx), sep=sep, out=out
diff --git a/compat.py b/compat.py
index 839166f..c32d105 100644
--- a/compat.py
+++ b/compat.py
@@ -1,141 +1,69 @@
# coding: utf-8
-from __future__ import print_function
-
# partially from package six by Benjamin Peterson
import sys
import os
-import types
+import io
import traceback
from abc import abstractmethod
+import collections.abc
# fmt: off
-if False: # MYPY
- from typing import Any, Dict, Optional, List, Union, BinaryIO, IO, Text, Tuple # NOQA
- from typing import Optional # NOQA
+from typing import Any, Dict, Optional, List, Union, BinaryIO, IO, Text, Tuple # NOQA
+from typing import Optional # NOQA
+try:
+ from typing import SupportsIndex as SupportsIndex # in order to reexport for mypy
+except ImportError:
+ SupportsIndex = int # type: ignore
# fmt: on
+
_DEFAULT_YAML_VERSION = (1, 2)
try:
- from ruamel.ordereddict import ordereddict
-except: # NOQA
- try:
- from collections import OrderedDict
- except ImportError:
- from ordereddict import OrderedDict # type: ignore
+ from collections import OrderedDict
+except ImportError:
+ from ordereddict import OrderedDict # type: ignore
+
# to get the right name import ... as ordereddict doesn't do that
- class ordereddict(OrderedDict): # type: ignore
- if not hasattr(OrderedDict, 'insert'):
- def insert(self, pos, key, value):
- # type: (int, Any, Any) -> None
- if pos >= len(self):
+class ordereddict(OrderedDict): # type: ignore
+ if not hasattr(OrderedDict, 'insert'):
+
+ def insert(self, pos: int, key: Any, value: Any) -> None:
+ if pos >= len(self):
+ self[key] = value
+ return
+ od = ordereddict()
+ od.update(self)
+ for k in od:
+ del self[k]
+ for index, old_key in enumerate(od):
+ if pos == index:
self[key] = value
- return
- od = ordereddict()
- od.update(self)
- for k in od:
- del self[k]
- for index, old_key in enumerate(od):
- if pos == index:
- self[key] = value
- self[old_key] = od[old_key]
+ self[old_key] = od[old_key]
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
+StringIO = io.StringIO
+BytesIO = io.BytesIO
-if PY3:
-
- def utf8(s):
- # type: (str) -> str
- return s
-
- def to_str(s):
- # type: (str) -> str
- return s
-
- def to_unicode(s):
- # type: (str) -> str
- return s
-
-
-else:
- if False:
- unicode = str
-
- def utf8(s):
- # type: (unicode) -> str
- return s.encode('utf-8')
-
- def to_str(s):
- # type: (str) -> str
- return str(s)
+# StreamType = Union[BinaryIO, IO[str], IO[unicode], StringIO]
+# StreamType = Union[BinaryIO, IO[str], StringIO] # type: ignore
+StreamType = Any
- def to_unicode(s):
- # type: (str) -> unicode
- return unicode(s) # NOQA
+StreamTextType = StreamType # Union[Text, StreamType]
+VersionType = Union[List[int], str, Tuple[int, int]]
+builtins_module = 'builtins'
-if PY3:
- string_types = str
- integer_types = int
- class_types = type
- text_type = str
- binary_type = bytes
- MAXSIZE = sys.maxsize
- unichr = chr
- import io
-
- StringIO = io.StringIO
- BytesIO = io.BytesIO
- # have unlimited precision
- no_limit_int = int
- from collections.abc import Hashable, MutableSequence, MutableMapping, Mapping # NOQA
-
-else:
- string_types = basestring # NOQA
- integer_types = (int, long) # NOQA
- class_types = (type, types.ClassType)
- text_type = unicode # NOQA
- binary_type = str
-
- # to allow importing
- unichr = unichr
- from StringIO import StringIO as _StringIO
-
- StringIO = _StringIO
- import cStringIO
-
- BytesIO = cStringIO.StringIO
- # have unlimited precision
- no_limit_int = long # NOQA not available on Python 3
- from collections import Hashable, MutableSequence, MutableMapping, Mapping # NOQA
-
-if False: # MYPY
- # StreamType = Union[BinaryIO, IO[str], IO[unicode], StringIO]
- # StreamType = Union[BinaryIO, IO[str], StringIO] # type: ignore
- StreamType = Any
-
- StreamTextType = StreamType # Union[Text, StreamType]
- VersionType = Union[List[int], str, Tuple[int, int]]
-
-if PY3:
- builtins_module = 'builtins'
-else:
- builtins_module = '__builtin__'
-
-UNICODE_SIZE = 4 if sys.maxunicode > 65535 else 2
-
-
-def with_metaclass(meta, *bases):
- # type: (Any, Any) -> Any
+def with_metaclass(meta: Any, *bases: Any) -> Any:
"""Create a base class with a metaclass."""
return meta('NewBase', bases, {})
@@ -145,7 +73,7 @@ DBG_EVENT = 2
DBG_NODE = 4
-_debug = None # type: Optional[int]
+_debug: Optional[int] = None
if 'RUAMELDEBUG' in os.environ:
_debugx = os.environ.get('RUAMELDEBUG')
if _debugx is None:
@@ -156,26 +84,22 @@ if 'RUAMELDEBUG' in os.environ:
if bool(_debug):
- class ObjectCounter(object):
- def __init__(self):
- # type: () -> None
- self.map = {} # type: Dict[Any, Any]
+ class ObjectCounter:
+ def __init__(self) -> None:
+ self.map: Dict[Any, Any] = {}
- def __call__(self, k):
- # type: (Any) -> None
+ def __call__(self, k: Any) -> None:
self.map[k] = self.map.get(k, 0) + 1
- def dump(self):
- # type: () -> None
+ def dump(self) -> None:
for k in sorted(self.map):
- sys.stdout.write('{} -> {}'.format(k, self.map[k]))
+ sys.stdout.write(f'{k} -> {self.map[k]}')
object_counter = ObjectCounter()
# used from yaml util when testing
-def dbg(val=None):
- # type: (Any) -> Any
+def dbg(val: Any = None) -> Any:
global _debug
if _debug is None:
# set to true or false
@@ -189,15 +113,13 @@ def dbg(val=None):
return _debug & val
-class Nprint(object):
- def __init__(self, file_name=None):
- # type: (Any) -> None
- self._max_print = None # type: Any
- self._count = None # type: Any
+class Nprint:
+ def __init__(self, file_name: Any = None) -> None:
+ self._max_print: Any = None
+ self._count: Any = None
self._file_name = file_name
- def __call__(self, *args, **kw):
- # type: (Any, Any) -> None
+ def __call__(self, *args: Any, **kw: Any) -> None:
if not bool(_debug):
return
out = sys.stdout if self._file_name is None else open(self._file_name, 'a')
@@ -218,11 +140,14 @@ class Nprint(object):
if self._file_name:
out.close()
- def set_max_print(self, i):
- # type: (int) -> None
+ def set_max_print(self, i: int) -> None:
self._max_print = i
self._count = None
+ def fp(self, mode: str = 'a') -> Any:
+ out = sys.stdout if self._file_name is None else open(self._file_name, mode)
+ return out
+
nprint = Nprint()
nprintf = Nprint('/var/tmp/ruamel.yaml.log')
@@ -230,28 +155,25 @@ nprintf = Nprint('/var/tmp/ruamel.yaml.log')
# char checkers following production rules
-def check_namespace_char(ch):
- # type: (Any) -> bool
- if u'\x21' <= ch <= u'\x7E': # ! to ~
+def check_namespace_char(ch: Any) -> bool:
+ if '\x21' <= ch <= '\x7E': # ! to ~
return True
- if u'\xA0' <= ch <= u'\uD7FF':
+ if '\xA0' <= ch <= '\uD7FF':
return True
- if (u'\uE000' <= ch <= u'\uFFFD') and ch != u'\uFEFF': # excl. byte order mark
+ if ('\uE000' <= ch <= '\uFFFD') and ch != '\uFEFF': # excl. byte order mark
return True
- if u'\U00010000' <= ch <= u'\U0010FFFF':
+ if '\U00010000' <= ch <= '\U0010FFFF':
return True
return False
-def check_anchorname_char(ch):
- # type: (Any) -> bool
- if ch in u',[]{}':
+def check_anchorname_char(ch: Any) -> bool:
+ if ch in ',[]{}':
return False
return check_namespace_char(ch)
-def version_tnf(t1, t2=None):
- # type: (Any, Any) -> Any
+def version_tnf(t1: Any, t2: Any = None) -> Any:
"""
return True if ruamel.yaml version_info < t1, None if t2 is specified and bigger else False
"""
@@ -264,17 +186,15 @@ def version_tnf(t1, t2=None):
return False
-class MutableSliceableSequence(MutableSequence): # type: ignore
+class MutableSliceableSequence(collections.abc.MutableSequence): # type: ignore
__slots__ = ()
- def __getitem__(self, index):
- # type: (Any) -> Any
+ def __getitem__(self, index: Any) -> Any:
if not isinstance(index, slice):
return self.__getsingleitem__(index)
return type(self)([self[i] for i in range(*index.indices(len(self)))]) # type: ignore
- def __setitem__(self, index, value):
- # type: (Any, Any) -> None
+ def __setitem__(self, index: Any, value: Any) -> None:
if not isinstance(index, slice):
return self.__setsingleitem__(index, value)
assert iter(value)
@@ -289,19 +209,16 @@ class MutableSliceableSequence(MutableSequence): # type: ignore
# need to test before changing, in case TypeError is caught
if nr_assigned_items < len(value):
raise TypeError(
- 'too many elements in value {} < {}'.format(nr_assigned_items, len(value))
+ f'too many elements in value {nr_assigned_items} < {len(value)}'
)
elif nr_assigned_items > len(value):
raise TypeError(
- 'not enough elements in value {} > {}'.format(
- nr_assigned_items, len(value)
- )
+ f'not enough elements in value {nr_assigned_items} > {len(value)}'
)
for idx, i in enumerate(range(*range_parms)):
self[i] = value[idx]
- def __delitem__(self, index):
- # type: (Any) -> None
+ def __delitem__(self, index: Any) -> None:
if not isinstance(index, slice):
return self.__delsingleitem__(index)
# nprint(index.start, index.stop, index.step, index.indices(len(self)))
@@ -309,16 +226,13 @@ class MutableSliceableSequence(MutableSequence): # type: ignore
del self[i]
@abstractmethod
- def __getsingleitem__(self, index):
- # type: (Any) -> Any
+ def __getsingleitem__(self, index: Any) -> Any:
raise IndexError
@abstractmethod
- def __setsingleitem__(self, index, value):
- # type: (Any, Any) -> None
+ def __setsingleitem__(self, index: Any, value: Any) -> None:
raise IndexError
@abstractmethod
- def __delsingleitem__(self, index):
- # type: (Any) -> None
+ def __delsingleitem__(self, index: Any) -> None:
raise IndexError
diff --git a/composer.py b/composer.py
index d8d3d11..c943c1b 100644
--- a/composer.py
+++ b/composer.py
@@ -1,11 +1,9 @@
# coding: utf-8
-from __future__ import absolute_import, print_function
-
import warnings
from ruamel.yaml.error import MarkedYAMLError, ReusedAnchorWarning
-from ruamel.yaml.compat import utf8, nprint, nprintf # NOQA
+from ruamel.yaml.compat import nprint, nprintf # NOQA
from ruamel.yaml.events import (
StreamStartEvent,
@@ -19,8 +17,7 @@ from ruamel.yaml.events import (
)
from ruamel.yaml.nodes import MappingNode, ScalarNode, SequenceNode
-if False: # MYPY
- from typing import Any, Dict, Optional, List # NOQA
+from typing import Any, Dict, Optional, List # NOQA
__all__ = ['Composer', 'ComposerError']
@@ -29,31 +26,27 @@ class ComposerError(MarkedYAMLError):
pass
-class Composer(object):
- def __init__(self, loader=None):
- # type: (Any) -> None
+class Composer:
+ def __init__(self, loader: Any = None) -> None:
self.loader = loader
if self.loader is not None and getattr(self.loader, '_composer', None) is None:
self.loader._composer = self
- self.anchors = {} # type: Dict[Any, Any]
+ self.anchors: Dict[Any, Any] = {}
@property
- def parser(self):
- # type: () -> Any
+ def parser(self) -> Any:
if hasattr(self.loader, 'typ'):
self.loader.parser
return self.loader._parser
@property
- def resolver(self):
- # type: () -> Any
+ def resolver(self) -> Any:
# assert self.loader._resolver is not None
if hasattr(self.loader, 'typ'):
self.loader.resolver
return self.loader._resolver
- def check_node(self):
- # type: () -> Any
+ def check_node(self) -> Any:
# Drop the STREAM-START event.
if self.parser.check_event(StreamStartEvent):
self.parser.get_event()
@@ -61,19 +54,17 @@ class Composer(object):
# If there are more documents available?
return not self.parser.check_event(StreamEndEvent)
- def get_node(self):
- # type: () -> Any
+ def get_node(self) -> Any:
# Get the root node of the next document.
if not self.parser.check_event(StreamEndEvent):
return self.compose_document()
- def get_single_node(self):
- # type: () -> Any
+ def get_single_node(self) -> Any:
# Drop the STREAM-START event.
self.parser.get_event()
# Compose a document if the stream is not empty.
- document = None # type: Any
+ document: Any = None
if not self.parser.check_event(StreamEndEvent):
document = self.compose_document()
@@ -92,8 +83,7 @@ class Composer(object):
return document
- def compose_document(self):
- # type: (Any) -> Any
+ def compose_document(self: Any) -> Any:
# Drop the DOCUMENT-START event.
self.parser.get_event()
@@ -106,29 +96,28 @@ class Composer(object):
self.anchors = {}
return node
- def compose_node(self, parent, index):
- # type: (Any, Any) -> Any
+ def return_alias(self, a: Any) -> Any:
+ return a
+
+ def compose_node(self, parent: Any, index: Any) -> Any:
if self.parser.check_event(AliasEvent):
event = self.parser.get_event()
alias = event.anchor
if alias not in self.anchors:
raise ComposerError(
- None, None, 'found undefined alias %r' % utf8(alias), event.start_mark
+ None, None, f'found undefined alias {alias!r}', event.start_mark,
)
- return self.anchors[alias]
+ return self.return_alias(self.anchors[alias])
event = self.parser.peek_event()
anchor = event.anchor
if anchor is not None: # have an anchor
if anchor in self.anchors:
- # raise ComposerError(
- # "found duplicate anchor %r; first occurrence"
- # % utf8(anchor), self.anchors[anchor].start_mark,
- # "second occurrence", event.start_mark)
ws = (
- '\nfound duplicate anchor {!r}\nfirst occurrence {}\nsecond occurrence '
- '{}'.format((anchor), self.anchors[anchor].start_mark, event.start_mark)
+ f'\nfound duplicate anchor {anchor!r}\n'
+ f'first occurrence {self.anchors[anchor].start_mark}\n'
+ f'second occurrence {event.start_mark}'
)
- warnings.warn(ws, ReusedAnchorWarning)
+ warnings.warn(ws, ReusedAnchorWarning, stacklevel=2)
self.resolver.descend_resolver(parent, index)
if self.parser.check_event(ScalarEvent):
node = self.compose_scalar_node(anchor)
@@ -139,11 +128,10 @@ class Composer(object):
self.resolver.ascend_resolver()
return node
- def compose_scalar_node(self, anchor):
- # type: (Any) -> Any
+ def compose_scalar_node(self, anchor: Any) -> Any:
event = self.parser.get_event()
tag = event.tag
- if tag is None or tag == u'!':
+ if tag is None or tag == '!':
tag = self.resolver.resolve(ScalarNode, event.value, event.implicit)
node = ScalarNode(
tag,
@@ -158,11 +146,10 @@ class Composer(object):
self.anchors[anchor] = node
return node
- def compose_sequence_node(self, anchor):
- # type: (Any) -> Any
+ def compose_sequence_node(self, anchor: Any) -> Any:
start_event = self.parser.get_event()
tag = start_event.tag
- if tag is None or tag == u'!':
+ if tag is None or tag == '!':
tag = self.resolver.resolve(SequenceNode, None, start_event.implicit)
node = SequenceNode(
tag,
@@ -182,20 +169,19 @@ class Composer(object):
end_event = self.parser.get_event()
if node.flow_style is True and end_event.comment is not None:
if node.comment is not None:
+ x = node.flow_style
nprint(
- 'Warning: unexpected end_event commment in sequence '
- 'node {}'.format(node.flow_style)
+ f'Warning: unexpected end_event commment in sequence node {x}'
)
node.comment = end_event.comment
node.end_mark = end_event.end_mark
self.check_end_doc_comment(end_event, node)
return node
- def compose_mapping_node(self, anchor):
- # type: (Any) -> Any
+ def compose_mapping_node(self, anchor: Any) -> Any:
start_event = self.parser.get_event()
tag = start_event.tag
- if tag is None or tag == u'!':
+ if tag is None or tag == '!':
tag = self.resolver.resolve(MappingNode, None, start_event.implicit)
node = MappingNode(
tag,
@@ -225,8 +211,7 @@ class Composer(object):
self.check_end_doc_comment(end_event, node)
return node
- def check_end_doc_comment(self, end_event, node):
- # type: (Any, Any) -> None
+ def check_end_doc_comment(self, end_event: Any, node: Any) -> None:
if end_event.comment and end_event.comment[1]:
# pre comments on an end_event, no following to move to
if node.comment is None:
diff --git a/configobjwalker.py b/configobjwalker.py
index cbc6148..a6faa88 100644
--- a/configobjwalker.py
+++ b/configobjwalker.py
@@ -4,11 +4,12 @@ import warnings
from ruamel.yaml.util import configobj_walker as new_configobj_walker
-if False: # MYPY
- from typing import Any # NOQA
+from typing import Any
-def configobj_walker(cfg):
- # type: (Any) -> Any
- warnings.warn('configobj_walker has moved to ruamel.yaml.util, please update your code')
+def configobj_walker(cfg: Any) -> Any:
+ warnings.warn(
+ 'configobj_walker has moved to ruamel.yaml.util, please update your code',
+ stacklevel=2
+ )
return new_configobj_walker(cfg)
diff --git a/constructor.py b/constructor.py
index ddd2915..dc7e5ed 100644
--- a/constructor.py
+++ b/constructor.py
@@ -1,29 +1,29 @@
# coding: utf-8
-from __future__ import print_function, absolute_import, division
-
import datetime
import base64
import binascii
-import re
import sys
import types
import warnings
+from collections.abc import Hashable, MutableSequence, MutableMapping
# fmt: off
from ruamel.yaml.error import (MarkedYAMLError, MarkedYAMLFutureWarning,
MantissaNoDotYAML1_1Warning)
from ruamel.yaml.nodes import * # NOQA
from ruamel.yaml.nodes import (SequenceNode, MappingNode, ScalarNode)
-from ruamel.yaml.compat import (utf8, builtins_module, to_str, PY2, PY3, # NOQA
- text_type, nprint, nprintf, version_tnf)
-from ruamel.yaml.compat import ordereddict, Hashable, MutableSequence # type: ignore
-from ruamel.yaml.compat import MutableMapping # type: ignore
+from ruamel.yaml.compat import (builtins_module, # NOQA
+ nprint, nprintf, version_tnf)
+from ruamel.yaml.compat import ordereddict
from ruamel.yaml.comments import * # NOQA
from ruamel.yaml.comments import (CommentedMap, CommentedOrderedMap, CommentedSet,
CommentedKeySeq, CommentedSeq, TaggedScalar,
- CommentedKeyMap)
+ CommentedKeyMap,
+ C_KEY_PRE, C_KEY_EOL, C_KEY_POST,
+ C_VALUE_PRE, C_VALUE_EOL, C_VALUE_POST,
+ )
from ruamel.yaml.scalarstring import (SingleQuotedScalarString, DoubleQuotedScalarString,
LiteralScalarString, FoldedScalarString,
PlainScalarString, ScalarString,)
@@ -31,10 +31,9 @@ from ruamel.yaml.scalarint import ScalarInt, BinaryInt, OctalInt, HexInt, HexCap
from ruamel.yaml.scalarfloat import ScalarFloat
from ruamel.yaml.scalarbool import ScalarBoolean
from ruamel.yaml.timestamp import TimeStamp
-from ruamel.yaml.util import RegExp
+from ruamel.yaml.util import timestamp_regexp, create_timestamp
-if False: # MYPY
- from typing import Any, Dict, List, Set, Generator, Union, Optional # NOQA
+from typing import Any, Dict, List, Set, Iterator, Union, Optional # NOQA
__all__ = ['BaseConstructor', 'SafeConstructor', 'Constructor',
@@ -50,71 +49,71 @@ class DuplicateKeyFutureWarning(MarkedYAMLFutureWarning):
pass
-class DuplicateKeyError(MarkedYAMLFutureWarning):
+class DuplicateKeyError(MarkedYAMLError):
pass
-class BaseConstructor(object):
+class BaseConstructor:
yaml_constructors = {} # type: Dict[Any, Any]
yaml_multi_constructors = {} # type: Dict[Any, Any]
- def __init__(self, preserve_quotes=None, loader=None):
- # type: (Optional[bool], Any) -> None
+ def __init__(self, preserve_quotes: Optional[bool] = None, loader: Any = None) -> None:
self.loader = loader
if self.loader is not None and getattr(self.loader, '_constructor', None) is None:
self.loader._constructor = self
self.loader = loader
self.yaml_base_dict_type = dict
self.yaml_base_list_type = list
- self.constructed_objects = {} # type: Dict[Any, Any]
- self.recursive_objects = {} # type: Dict[Any, Any]
- self.state_generators = [] # type: List[Any]
+ self.constructed_objects: Dict[Any, Any] = {}
+ self.recursive_objects: Dict[Any, Any] = {}
+ self.state_generators: List[Any] = []
self.deep_construct = False
self._preserve_quotes = preserve_quotes
self.allow_duplicate_keys = version_tnf((0, 15, 1), (0, 16))
@property
- def composer(self):
- # type: () -> Any
+ def composer(self) -> Any:
if hasattr(self.loader, 'typ'):
return self.loader.composer
try:
return self.loader._composer
except AttributeError:
- sys.stdout.write('slt {}\n'.format(type(self)))
- sys.stdout.write('slc {}\n'.format(self.loader._composer))
- sys.stdout.write('{}\n'.format(dir(self)))
+ sys.stdout.write(f'slt {type(self)}\n')
+ sys.stdout.write(f'slc {self.loader._composer}\n')
+ sys.stdout.write(f'{dir(self)}\n')
raise
@property
- def resolver(self):
- # type: () -> Any
+ def resolver(self) -> Any:
if hasattr(self.loader, 'typ'):
return self.loader.resolver
return self.loader._resolver
- def check_data(self):
- # type: () -> Any
+ @property
+ def scanner(self) -> Any:
+ # needed to get to the expanded comments
+ if hasattr(self.loader, 'typ'):
+ return self.loader.scanner
+ return self.loader._scanner
+
+ def check_data(self) -> Any:
# If there are more documents available?
return self.composer.check_node()
- def get_data(self):
- # type: () -> Any
+ def get_data(self) -> Any:
# Construct and return the next document.
if self.composer.check_node():
return self.construct_document(self.composer.get_node())
- def get_single_data(self):
- # type: () -> Any
+ def get_single_data(self) -> Any:
# Ensure that the stream contains a single document and construct it.
node = self.composer.get_single_node()
if node is not None:
return self.construct_document(node)
return None
- def construct_document(self, node):
- # type: (Any) -> Any
+ def construct_document(self, node: Any) -> Any:
data = self.construct_object(node)
while bool(self.state_generators):
state_generators = self.state_generators
@@ -127,8 +126,7 @@ class BaseConstructor(object):
self.deep_construct = False
return data
- def construct_object(self, node, deep=False):
- # type: (Any, bool) -> Any
+ def construct_object(self, node: Any, deep: bool = False) -> Any:
"""deep is True when creating an object/mapping recursively,
in that case want the underlying elements available during construction
"""
@@ -151,9 +149,8 @@ class BaseConstructor(object):
self.deep_construct = old_deep
return data
- def construct_non_recursive_object(self, node, tag=None):
- # type: (Any, Optional[str]) -> Any
- constructor = None # type: Any
+ def construct_non_recursive_object(self, node: Any, tag: Optional[str] = None) -> Any:
+ constructor: Any = None
tag_suffix = None
if tag is None:
tag = node.tag
@@ -191,33 +188,33 @@ class BaseConstructor(object):
self.state_generators.append(generator)
return data
- def construct_scalar(self, node):
- # type: (Any) -> Any
+ def construct_scalar(self, node: Any) -> Any:
if not isinstance(node, ScalarNode):
raise ConstructorError(
- None, None, 'expected a scalar node, but found %s' % node.id, node.start_mark
+ None, None, f'expected a scalar node, but found {node.id!s}', node.start_mark,
)
return node.value
- def construct_sequence(self, node, deep=False):
- # type: (Any, bool) -> Any
+ def construct_sequence(self, node: Any, deep: bool = False) -> Any:
"""deep is True when creating an object/mapping recursively,
in that case want the underlying elements available during construction
"""
if not isinstance(node, SequenceNode):
raise ConstructorError(
- None, None, 'expected a sequence node, but found %s' % node.id, node.start_mark
+ None,
+ None,
+ f'expected a sequence node, but found {node.id!s}',
+ node.start_mark,
)
return [self.construct_object(child, deep=deep) for child in node.value]
- def construct_mapping(self, node, deep=False):
- # type: (Any, bool) -> Any
+ def construct_mapping(self, node: Any, deep: bool = False) -> Any:
"""deep is True when creating an object/mapping recursively,
in that case want the underlying elements available during construction
"""
if not isinstance(node, MappingNode):
raise ConstructorError(
- None, None, 'expected a mapping node, but found %s' % node.id, node.start_mark
+ None, None, f'expected a mapping node, but found {node.id!s}', node.start_mark,
)
total_mapping = self.yaml_base_dict_type()
if getattr(node, 'merge', None) is not None:
@@ -225,7 +222,7 @@ class BaseConstructor(object):
else:
todo = [(node.value, True)]
for values, check in todo:
- mapping = self.yaml_base_dict_type() # type: Dict[Any, Any]
+ mapping: Dict[Any, Any] = self.yaml_base_dict_type()
for key_node, value_node in values:
# keys can be list -> deep
key = self.construct_object(key_node, deep=True)
@@ -233,24 +230,13 @@ class BaseConstructor(object):
if not isinstance(key, Hashable):
if isinstance(key, list):
key = tuple(key)
- if PY2:
- try:
- hash(key)
- except TypeError as exc:
- raise ConstructorError(
- 'while constructing a mapping',
- node.start_mark,
- 'found unacceptable key (%s)' % exc,
- key_node.start_mark,
- )
- else:
- if not isinstance(key, Hashable):
- raise ConstructorError(
- 'while constructing a mapping',
- node.start_mark,
- 'found unhashable key',
- key_node.start_mark,
- )
+ if not isinstance(key, Hashable):
+ raise ConstructorError(
+ 'while constructing a mapping',
+ node.start_mark,
+ 'found unhashable key',
+ key_node.start_mark,
+ )
value = self.construct_object(value_node, deep=deep)
if check:
@@ -261,24 +247,18 @@ class BaseConstructor(object):
total_mapping.update(mapping)
return total_mapping
- def check_mapping_key(self, node, key_node, mapping, key, value):
- # type: (Any, Any, Any, Any, Any) -> bool
+ def check_mapping_key(
+ self, node: Any, key_node: Any, mapping: Any, key: Any, value: Any
+ ) -> bool:
"""return True if key is unique"""
if key in mapping:
if not self.allow_duplicate_keys:
mk = mapping.get(key)
- if PY2:
- if isinstance(key, unicode):
- key = key.encode('utf-8')
- if isinstance(value, unicode):
- value = value.encode('utf-8')
- if isinstance(mk, unicode):
- mk = mk.encode('utf-8')
args = [
'while constructing a mapping',
node.start_mark,
- 'found duplicate key "{}" with value "{}" '
- '(original value: "{}")'.format(key, value, mk),
+ f'found duplicate key "{key}" with value "{value}" '
+ f'(original value: "{mk}")',
key_node.start_mark,
"""
To suppress this check see:
@@ -290,23 +270,19 @@ class BaseConstructor(object):
""",
]
if self.allow_duplicate_keys is None:
- warnings.warn(DuplicateKeyFutureWarning(*args))
+ warnings.warn(DuplicateKeyFutureWarning(*args), stacklevel=1)
else:
raise DuplicateKeyError(*args)
return False
return True
- def check_set_key(self, node, key_node, setting, key):
- # type: (Any, Any, Any, Any, Any) -> None
+ def check_set_key(self: Any, node: Any, key_node: Any, setting: Any, key: Any) -> None:
if key in setting:
if not self.allow_duplicate_keys:
- if PY2:
- if isinstance(key, unicode):
- key = key.encode('utf-8')
args = [
'while constructing a set',
node.start_mark,
- 'found duplicate key "{}"'.format(key),
+ f'found duplicate key "{key}"',
key_node.start_mark,
"""
To suppress this check see:
@@ -318,15 +294,14 @@ class BaseConstructor(object):
""",
]
if self.allow_duplicate_keys is None:
- warnings.warn(DuplicateKeyFutureWarning(*args))
+ warnings.warn(DuplicateKeyFutureWarning(*args), stacklevel=1)
else:
raise DuplicateKeyError(*args)
- def construct_pairs(self, node, deep=False):
- # type: (Any, bool) -> Any
+ def construct_pairs(self, node: Any, deep: bool = False) -> Any:
if not isinstance(node, MappingNode):
raise ConstructorError(
- None, None, 'expected a mapping node, but found %s' % node.id, node.start_mark
+ None, None, f'expected a mapping node, but found {node.id!s}', node.start_mark,
)
pairs = []
for key_node, value_node in node.value:
@@ -336,41 +311,37 @@ class BaseConstructor(object):
return pairs
@classmethod
- def add_constructor(cls, tag, constructor):
- # type: (Any, Any) -> None
+ def add_constructor(cls, tag: Any, constructor: Any) -> None:
if 'yaml_constructors' not in cls.__dict__:
cls.yaml_constructors = cls.yaml_constructors.copy()
cls.yaml_constructors[tag] = constructor
@classmethod
- def add_multi_constructor(cls, tag_prefix, multi_constructor):
- # type: (Any, Any) -> None
+ def add_multi_constructor(cls, tag_prefix: Any, multi_constructor: Any) -> None:
if 'yaml_multi_constructors' not in cls.__dict__:
cls.yaml_multi_constructors = cls.yaml_multi_constructors.copy()
cls.yaml_multi_constructors[tag_prefix] = multi_constructor
class SafeConstructor(BaseConstructor):
- def construct_scalar(self, node):
- # type: (Any) -> Any
+ def construct_scalar(self, node: Any) -> Any:
if isinstance(node, MappingNode):
for key_node, value_node in node.value:
- if key_node.tag == u'tag:yaml.org,2002:value':
+ if key_node.tag == 'tag:yaml.org,2002:value':
return self.construct_scalar(value_node)
return BaseConstructor.construct_scalar(self, node)
- def flatten_mapping(self, node):
- # type: (Any) -> Any
+ def flatten_mapping(self, node: Any) -> Any:
"""
This implements the merge key feature http://yaml.org/type/merge.html
by inserting keys from the merge dict/list of dicts if not yet
available in this node
"""
- merge = [] # type: List[Any]
+ merge: List[Any] = []
index = 0
while index < len(node.value):
key_node, value_node = node.value[index]
- if key_node.tag == u'tag:yaml.org,2002:merge':
+ if key_node.tag == 'tag:yaml.org,2002:merge':
if merge: # double << key
if self.allow_duplicate_keys:
del node.value[index]
@@ -379,7 +350,7 @@ class SafeConstructor(BaseConstructor):
args = [
'while constructing a mapping',
node.start_mark,
- 'found duplicate key "{}"'.format(key_node.value),
+ f'found duplicate key "{key_node.value}"',
key_node.start_mark,
"""
To suppress this check see:
@@ -391,7 +362,7 @@ class SafeConstructor(BaseConstructor):
""",
]
if self.allow_duplicate_keys is None:
- warnings.warn(DuplicateKeyFutureWarning(*args))
+ warnings.warn(DuplicateKeyFutureWarning(*args), stacklevel=1)
else:
raise DuplicateKeyError(*args)
del node.value[index]
@@ -405,7 +376,7 @@ class SafeConstructor(BaseConstructor):
raise ConstructorError(
'while constructing a mapping',
node.start_mark,
- 'expected a mapping for merging, but found %s' % subnode.id,
+ f'expected a mapping for merging, but found {subnode.id!s}',
subnode.start_mark,
)
self.flatten_mapping(subnode)
@@ -418,11 +389,11 @@ class SafeConstructor(BaseConstructor):
'while constructing a mapping',
node.start_mark,
'expected a mapping or list of mappings for merging, '
- 'but found %s' % value_node.id,
+ f'but found {value_node.id!s}',
value_node.start_mark,
)
- elif key_node.tag == u'tag:yaml.org,2002:value':
- key_node.tag = u'tag:yaml.org,2002:str'
+ elif key_node.tag == 'tag:yaml.org,2002:value':
+ key_node.tag = 'tag:yaml.org,2002:str'
index += 1
else:
index += 1
@@ -430,8 +401,7 @@ class SafeConstructor(BaseConstructor):
node.merge = merge # separate merge keys to be able to update without duplicate
node.value = merge + node.value
- def construct_mapping(self, node, deep=False):
- # type: (Any, bool) -> Any
+ def construct_mapping(self, node: Any, deep: bool = False) -> Any:
"""deep is True when creating an object/mapping recursively,
in that case want the underlying elements available during construction
"""
@@ -439,31 +409,28 @@ class SafeConstructor(BaseConstructor):
self.flatten_mapping(node)
return BaseConstructor.construct_mapping(self, node, deep=deep)
- def construct_yaml_null(self, node):
- # type: (Any) -> Any
+ def construct_yaml_null(self, node: Any) -> Any:
self.construct_scalar(node)
return None
# YAML 1.2 spec doesn't mention yes/no etc any more, 1.1 does
bool_values = {
- u'yes': True,
- u'no': False,
- u'y': True,
- u'n': False,
- u'true': True,
- u'false': False,
- u'on': True,
- u'off': False,
+ 'yes': True,
+ 'no': False,
+ 'y': True,
+ 'n': False,
+ 'true': True,
+ 'false': False,
+ 'on': True,
+ 'off': False,
}
- def construct_yaml_bool(self, node):
- # type: (Any) -> bool
+ def construct_yaml_bool(self, node: Any) -> bool:
value = self.construct_scalar(node)
return self.bool_values[value.lower()]
- def construct_yaml_int(self, node):
- # type: (Any) -> int
- value_s = to_str(self.construct_scalar(node))
+ def construct_yaml_int(self, node: Any) -> int:
+ value_s = self.construct_scalar(node)
value_s = value_s.replace('_', "")
sign = +1
if value_s[0] == '-':
@@ -497,9 +464,8 @@ class SafeConstructor(BaseConstructor):
inf_value *= inf_value
nan_value = -inf_value / inf_value # Trying to make a quiet NaN (like C99).
- def construct_yaml_float(self, node):
- # type: (Any) -> float
- value_so = to_str(self.construct_scalar(node))
+ def construct_yaml_float(self, node: Any) -> float:
+ value_so = self.construct_scalar(node)
value_s = value_so.replace('_', "").lower()
sign = +1
if value_s[0] == '-':
@@ -524,60 +490,29 @@ class SafeConstructor(BaseConstructor):
# value_s is lower case independent of input
mantissa, exponent = value_s.split('e')
if '.' not in mantissa:
- warnings.warn(MantissaNoDotYAML1_1Warning(node, value_so))
+ warnings.warn(MantissaNoDotYAML1_1Warning(node, value_so), stacklevel=1)
return sign * float(value_s)
- if PY3:
-
- def construct_yaml_binary(self, node):
- # type: (Any) -> Any
- try:
- value = self.construct_scalar(node).encode('ascii')
- except UnicodeEncodeError as exc:
- raise ConstructorError(
- None,
- None,
- 'failed to convert base64 data into ascii: %s' % exc,
- node.start_mark,
- )
- try:
- if hasattr(base64, 'decodebytes'):
- return base64.decodebytes(value)
- else:
- return base64.decodestring(value)
- except binascii.Error as exc:
- raise ConstructorError(
- None, None, 'failed to decode base64 data: %s' % exc, node.start_mark
- )
-
- else:
+ def construct_yaml_binary(self, node: Any) -> Any:
+ try:
+ value = self.construct_scalar(node).encode('ascii')
+ except UnicodeEncodeError as exc:
+ raise ConstructorError(
+ None,
+ None,
+ f'failed to convert base64 data into ascii: {exc!s}',
+ node.start_mark,
+ )
+ try:
+ return base64.decodebytes(value)
+ except binascii.Error as exc:
+ raise ConstructorError(
+ None, None, f'failed to decode base64 data: {exc!s}', node.start_mark,
+ )
- def construct_yaml_binary(self, node):
- # type: (Any) -> Any
- value = self.construct_scalar(node)
- try:
- return to_str(value).decode('base64')
- except (binascii.Error, UnicodeEncodeError) as exc:
- raise ConstructorError(
- None, None, 'failed to decode base64 data: %s' % exc, node.start_mark
- )
+ timestamp_regexp = timestamp_regexp # moved to util 0.17.17
- timestamp_regexp = RegExp(
- u"""^(?P<year>[0-9][0-9][0-9][0-9])
- -(?P<month>[0-9][0-9]?)
- -(?P<day>[0-9][0-9]?)
- (?:((?P<t>[Tt])|[ \\t]+) # explictly not retaining extra spaces
- (?P<hour>[0-9][0-9]?)
- :(?P<minute>[0-9][0-9])
- :(?P<second>[0-9][0-9])
- (?:\\.(?P<fraction>[0-9]*))?
- (?:[ \\t]*(?P<tz>Z|(?P<tz_sign>[-+])(?P<tz_hour>[0-9][0-9]?)
- (?::(?P<tz_minute>[0-9][0-9]))?))?)?$""",
- re.X,
- )
-
- def construct_yaml_timestamp(self, node, values=None):
- # type: (Any, Any) -> Any
+ def construct_yaml_timestamp(self, node: Any, values: Any = None) -> Any:
if values is None:
try:
match = self.timestamp_regexp.match(node.value)
@@ -587,49 +522,13 @@ class SafeConstructor(BaseConstructor):
raise ConstructorError(
None,
None,
- 'failed to construct timestamp from "{}"'.format(node.value),
+ f'failed to construct timestamp from "{node.value}"',
node.start_mark,
)
values = match.groupdict()
- year = int(values['year'])
- month = int(values['month'])
- day = int(values['day'])
- if not values['hour']:
- return datetime.date(year, month, day)
- hour = int(values['hour'])
- minute = int(values['minute'])
- second = int(values['second'])
- fraction = 0
- if values['fraction']:
- fraction_s = values['fraction'][:6]
- while len(fraction_s) < 6:
- fraction_s += '0'
- fraction = int(fraction_s)
- if len(values['fraction']) > 6 and int(values['fraction'][6]) > 4:
- fraction += 1
- delta = None
- if values['tz_sign']:
- tz_hour = int(values['tz_hour'])
- minutes = values['tz_minute']
- tz_minute = int(minutes) if minutes else 0
- delta = datetime.timedelta(hours=tz_hour, minutes=tz_minute)
- if values['tz_sign'] == '-':
- delta = -delta
- # should do something else instead (or hook this up to the preceding if statement
- # in reverse
- # if delta is None:
- # return datetime.datetime(year, month, day, hour, minute, second, fraction)
- # return datetime.datetime(year, month, day, hour, minute, second, fraction,
- # datetime.timezone.utc)
- # the above is not good enough though, should provide tzinfo. In Python3 that is easily
- # doable drop that kind of support for Python2 as it has not native tzinfo
- data = datetime.datetime(year, month, day, hour, minute, second, fraction)
- if delta:
- data -= delta
- return data
+ return create_timestamp(**values)
- def construct_yaml_omap(self, node):
- # type: (Any) -> Any
+ def construct_yaml_omap(self, node: Any) -> Any:
# Note: we do now check for duplicate keys
omap = ordereddict()
yield omap
@@ -637,7 +536,7 @@ class SafeConstructor(BaseConstructor):
raise ConstructorError(
'while constructing an ordered map',
node.start_mark,
- 'expected a sequence, but found %s' % node.id,
+ f'expected a sequence, but found {node.id!s}',
node.start_mark,
)
for subnode in node.value:
@@ -645,14 +544,14 @@ class SafeConstructor(BaseConstructor):
raise ConstructorError(
'while constructing an ordered map',
node.start_mark,
- 'expected a mapping of length 1, but found %s' % subnode.id,
+ f'expected a mapping of length 1, but found {subnode.id!s}',
subnode.start_mark,
)
if len(subnode.value) != 1:
raise ConstructorError(
'while constructing an ordered map',
node.start_mark,
- 'expected a single mapping item, but found %d items' % len(subnode.value),
+ f'expected a single mapping item, but found {len(subnode.value):d} items',
subnode.start_mark,
)
key_node, value_node = subnode.value[0]
@@ -661,16 +560,15 @@ class SafeConstructor(BaseConstructor):
value = self.construct_object(value_node)
omap[key] = value
- def construct_yaml_pairs(self, node):
- # type: (Any) -> Any
+ def construct_yaml_pairs(self, node: Any) -> Any:
# Note: the same code as `construct_yaml_omap`.
- pairs = [] # type: List[Any]
+ pairs: List[Any] = []
yield pairs
if not isinstance(node, SequenceNode):
raise ConstructorError(
'while constructing pairs',
node.start_mark,
- 'expected a sequence, but found %s' % node.id,
+ f'expected a sequence, but found {node.id!s}',
node.start_mark,
)
for subnode in node.value:
@@ -678,14 +576,14 @@ class SafeConstructor(BaseConstructor):
raise ConstructorError(
'while constructing pairs',
node.start_mark,
- 'expected a mapping of length 1, but found %s' % subnode.id,
+ f'expected a mapping of length 1, but found {subnode.id!s}',
subnode.start_mark,
)
if len(subnode.value) != 1:
raise ConstructorError(
'while constructing pairs',
node.start_mark,
- 'expected a single mapping item, but found %d items' % len(subnode.value),
+ f'expected a single mapping item, but found {len(subnode.value):d} items',
subnode.start_mark,
)
key_node, value_node = subnode.value[0]
@@ -693,38 +591,28 @@ class SafeConstructor(BaseConstructor):
value = self.construct_object(value_node)
pairs.append((key, value))
- def construct_yaml_set(self, node):
- # type: (Any) -> Any
- data = set() # type: Set[Any]
+ def construct_yaml_set(self, node: Any) -> Any:
+ data: Set[Any] = set()
yield data
value = self.construct_mapping(node)
data.update(value)
- def construct_yaml_str(self, node):
- # type: (Any) -> Any
+ def construct_yaml_str(self, node: Any) -> Any:
value = self.construct_scalar(node)
- if PY3:
- return value
- try:
- return value.encode('ascii')
- except UnicodeEncodeError:
- return value
+ return value
- def construct_yaml_seq(self, node):
- # type: (Any) -> Any
- data = self.yaml_base_list_type() # type: List[Any]
+ def construct_yaml_seq(self, node: Any) -> Any:
+ data: List[Any] = self.yaml_base_list_type()
yield data
data.extend(self.construct_sequence(node))
- def construct_yaml_map(self, node):
- # type: (Any) -> Any
- data = self.yaml_base_dict_type() # type: Dict[Any, Any]
+ def construct_yaml_map(self, node: Any) -> Any:
+ data: Dict[Any, Any] = self.yaml_base_dict_type()
yield data
value = self.construct_mapping(node)
data.update(value)
- def construct_yaml_object(self, node, cls):
- # type: (Any, Any) -> Any
+ def construct_yaml_object(self, node: Any, cls: Any) -> Any:
data = cls.__new__(cls)
yield data
if hasattr(data, '__setstate__'):
@@ -734,105 +622,85 @@ class SafeConstructor(BaseConstructor):
state = self.construct_mapping(node)
data.__dict__.update(state)
- def construct_undefined(self, node):
- # type: (Any) -> None
+ def construct_undefined(self, node: Any) -> None:
raise ConstructorError(
None,
None,
- 'could not determine a constructor for the tag %r' % utf8(node.tag),
+ f'could not determine a constructor for the tag {node.tag!r}',
node.start_mark,
)
-SafeConstructor.add_constructor(u'tag:yaml.org,2002:null', SafeConstructor.construct_yaml_null)
+SafeConstructor.add_constructor('tag:yaml.org,2002:null', SafeConstructor.construct_yaml_null)
-SafeConstructor.add_constructor(u'tag:yaml.org,2002:bool', SafeConstructor.construct_yaml_bool)
+SafeConstructor.add_constructor('tag:yaml.org,2002:bool', SafeConstructor.construct_yaml_bool)
-SafeConstructor.add_constructor(u'tag:yaml.org,2002:int', SafeConstructor.construct_yaml_int)
+SafeConstructor.add_constructor('tag:yaml.org,2002:int', SafeConstructor.construct_yaml_int)
SafeConstructor.add_constructor(
- u'tag:yaml.org,2002:float', SafeConstructor.construct_yaml_float
+ 'tag:yaml.org,2002:float', SafeConstructor.construct_yaml_float
)
SafeConstructor.add_constructor(
- u'tag:yaml.org,2002:binary', SafeConstructor.construct_yaml_binary
+ 'tag:yaml.org,2002:binary', SafeConstructor.construct_yaml_binary
)
SafeConstructor.add_constructor(
- u'tag:yaml.org,2002:timestamp', SafeConstructor.construct_yaml_timestamp
+ 'tag:yaml.org,2002:timestamp', SafeConstructor.construct_yaml_timestamp
)
-SafeConstructor.add_constructor(u'tag:yaml.org,2002:omap', SafeConstructor.construct_yaml_omap)
+SafeConstructor.add_constructor('tag:yaml.org,2002:omap', SafeConstructor.construct_yaml_omap)
SafeConstructor.add_constructor(
- u'tag:yaml.org,2002:pairs', SafeConstructor.construct_yaml_pairs
+ 'tag:yaml.org,2002:pairs', SafeConstructor.construct_yaml_pairs
)
-SafeConstructor.add_constructor(u'tag:yaml.org,2002:set', SafeConstructor.construct_yaml_set)
+SafeConstructor.add_constructor('tag:yaml.org,2002:set', SafeConstructor.construct_yaml_set)
-SafeConstructor.add_constructor(u'tag:yaml.org,2002:str', SafeConstructor.construct_yaml_str)
+SafeConstructor.add_constructor('tag:yaml.org,2002:str', SafeConstructor.construct_yaml_str)
-SafeConstructor.add_constructor(u'tag:yaml.org,2002:seq', SafeConstructor.construct_yaml_seq)
+SafeConstructor.add_constructor('tag:yaml.org,2002:seq', SafeConstructor.construct_yaml_seq)
-SafeConstructor.add_constructor(u'tag:yaml.org,2002:map', SafeConstructor.construct_yaml_map)
+SafeConstructor.add_constructor('tag:yaml.org,2002:map', SafeConstructor.construct_yaml_map)
SafeConstructor.add_constructor(None, SafeConstructor.construct_undefined)
-if PY2:
-
- class classobj:
- pass
-
class Constructor(SafeConstructor):
- def construct_python_str(self, node):
- # type: (Any) -> Any
- return utf8(self.construct_scalar(node))
-
- def construct_python_unicode(self, node):
- # type: (Any) -> Any
+ def construct_python_str(self, node: Any) -> Any:
return self.construct_scalar(node)
- if PY3:
+ def construct_python_unicode(self, node: Any) -> Any:
+ return self.construct_scalar(node)
- def construct_python_bytes(self, node):
- # type: (Any) -> Any
- try:
- value = self.construct_scalar(node).encode('ascii')
- except UnicodeEncodeError as exc:
- raise ConstructorError(
- None,
- None,
- 'failed to convert base64 data into ascii: %s' % exc,
- node.start_mark,
- )
- try:
- if hasattr(base64, 'decodebytes'):
- return base64.decodebytes(value)
- else:
- return base64.decodestring(value)
- except binascii.Error as exc:
- raise ConstructorError(
- None, None, 'failed to decode base64 data: %s' % exc, node.start_mark
- )
+ def construct_python_bytes(self, node: Any) -> Any:
+ try:
+ value = self.construct_scalar(node).encode('ascii')
+ except UnicodeEncodeError as exc:
+ raise ConstructorError(
+ None,
+ None,
+ f'failed to convert base64 data into ascii: {exc!s}',
+ node.start_mark,
+ )
+ try:
+ return base64.decodebytes(value)
+ except binascii.Error as exc:
+ raise ConstructorError(
+ None, None, f'failed to decode base64 data: {exc!s}', node.start_mark,
+ )
- def construct_python_long(self, node):
- # type: (Any) -> int
+ def construct_python_long(self, node: Any) -> int:
val = self.construct_yaml_int(node)
- if PY3:
- return val
- return int(val)
+ return val
- def construct_python_complex(self, node):
- # type: (Any) -> Any
+ def construct_python_complex(self, node: Any) -> Any:
return complex(self.construct_scalar(node))
- def construct_python_tuple(self, node):
- # type: (Any) -> Any
+ def construct_python_tuple(self, node: Any) -> Any:
return tuple(self.construct_sequence(node))
- def find_python_module(self, name, mark):
- # type: (Any, Any) -> Any
+ def find_python_module(self, name: Any, mark: Any) -> Any:
if not name:
raise ConstructorError(
'while constructing a Python module',
@@ -846,13 +714,12 @@ class Constructor(SafeConstructor):
raise ConstructorError(
'while constructing a Python module',
mark,
- 'cannot find module %r (%s)' % (utf8(name), exc),
+ f'cannot find module {name!r} ({exc!s})',
mark,
)
return sys.modules[name]
- def find_python_name(self, name, mark):
- # type: (Any, Any) -> Any
+ def find_python_name(self, name: Any, mark: Any) -> Any:
if not name:
raise ConstructorError(
'while constructing a Python object',
@@ -860,10 +727,10 @@ class Constructor(SafeConstructor):
'expected non-empty name appended to the tag',
mark,
)
- if u'.' in name:
+ if '.' in name:
lname = name.split('.')
lmodule_name = lname
- lobject_name = [] # type: List[Any]
+ lobject_name: List[Any] = []
while len(lmodule_name) > 1:
lobject_name.insert(0, lmodule_name.pop())
module_name = '.'.join(lmodule_name)
@@ -882,7 +749,7 @@ class Constructor(SafeConstructor):
raise ConstructorError(
'while constructing a Python object',
mark,
- 'cannot find module %r (%s)' % (utf8(module_name), exc),
+ f'cannot find module {module_name!r} ({exc!s})',
mark,
)
module = sys.modules[module_name]
@@ -894,64 +761,52 @@ class Constructor(SafeConstructor):
raise ConstructorError(
'while constructing a Python object',
mark,
- 'cannot find %r in the module %r' % (utf8(object_name), module.__name__),
+ f'cannot find {object_name!r} in the module {module.__name__!r}',
mark,
)
obj = getattr(obj, lobject_name.pop(0))
return obj
- def construct_python_name(self, suffix, node):
- # type: (Any, Any) -> Any
+ def construct_python_name(self, suffix: Any, node: Any) -> Any:
value = self.construct_scalar(node)
if value:
raise ConstructorError(
'while constructing a Python name',
node.start_mark,
- 'expected the empty value, but found %r' % utf8(value),
+ f'expected the empty value, but found {value!r}',
node.start_mark,
)
return self.find_python_name(suffix, node.start_mark)
- def construct_python_module(self, suffix, node):
- # type: (Any, Any) -> Any
+ def construct_python_module(self, suffix: Any, node: Any) -> Any:
value = self.construct_scalar(node)
if value:
raise ConstructorError(
'while constructing a Python module',
node.start_mark,
- 'expected the empty value, but found %r' % utf8(value),
+ f'expected the empty value, but found {value!r}',
node.start_mark,
)
return self.find_python_module(suffix, node.start_mark)
- def make_python_instance(self, suffix, node, args=None, kwds=None, newobj=False):
- # type: (Any, Any, Any, Any, bool) -> Any
+ def make_python_instance(
+ self, suffix: Any, node: Any, args: Any = None, kwds: Any = None, newobj: bool = False
+ ) -> Any:
if not args:
args = []
if not kwds:
kwds = {}
cls = self.find_python_name(suffix, node.start_mark)
- if PY3:
- if newobj and isinstance(cls, type):
- return cls.__new__(cls, *args, **kwds)
- else:
- return cls(*args, **kwds)
+ if newobj and isinstance(cls, type):
+ return cls.__new__(cls, *args, **kwds)
else:
- if newobj and isinstance(cls, type(classobj)) and not args and not kwds:
- instance = classobj()
- instance.__class__ = cls
- return instance
- elif newobj and isinstance(cls, type):
- return cls.__new__(cls, *args, **kwds)
- else:
- return cls(*args, **kwds)
+ return cls(*args, **kwds)
- def set_python_instance_state(self, instance, state):
- # type: (Any, Any) -> None
+ def set_python_instance_state(self, instance: Any, state: Any) -> None:
if hasattr(instance, '__setstate__'):
instance.__setstate__(state)
else:
- slotstate = {} # type: Dict[Any, Any]
+ slotstate: Dict[Any, Any] = {}
if isinstance(state, tuple) and len(state) == 2:
state, slotstate = state
if hasattr(instance, '__dict__'):
@@ -961,8 +816,7 @@ class Constructor(SafeConstructor):
for key, value in slotstate.items():
setattr(instance, key, value)
- def construct_python_object(self, suffix, node):
- # type: (Any, Any) -> Any
+ def construct_python_object(self, suffix: Any, node: Any) -> Any:
# Format:
# !!python/object:module.name { ... state ... }
instance = self.make_python_instance(suffix, node, newobj=True)
@@ -972,8 +826,9 @@ class Constructor(SafeConstructor):
state = self.construct_mapping(node, deep=deep)
self.set_python_instance_state(instance, state)
- def construct_python_object_apply(self, suffix, node, newobj=False):
- # type: (Any, Any, bool) -> Any
+ def construct_python_object_apply(
+ self, suffix: Any, node: Any, newobj: bool = False
+ ) -> Any:
# Format:
# !!python/object/apply # (or !!python/object/new)
# args: [ ... arguments ... ]
@@ -987,10 +842,10 @@ class Constructor(SafeConstructor):
# is how an object is created, check make_python_instance for details.
if isinstance(node, SequenceNode):
args = self.construct_sequence(node, deep=True)
- kwds = {} # type: Dict[Any, Any]
- state = {} # type: Dict[Any, Any]
- listitems = [] # type: List[Any]
- dictitems = {} # type: Dict[Any, Any]
+ kwds: Dict[Any, Any] = {}
+ state: Dict[Any, Any] = {}
+ listitems: List[Any] = []
+ dictitems: Dict[Any, Any] = {}
else:
value = self.construct_mapping(node, deep=True)
args = value.get('args', [])
@@ -1008,66 +863,60 @@ class Constructor(SafeConstructor):
instance[key] = dictitems[key]
return instance
- def construct_python_object_new(self, suffix, node):
- # type: (Any, Any) -> Any
+ def construct_python_object_new(self, suffix: Any, node: Any) -> Any:
return self.construct_python_object_apply(suffix, node, newobj=True)
-Constructor.add_constructor(u'tag:yaml.org,2002:python/none', Constructor.construct_yaml_null)
+Constructor.add_constructor('tag:yaml.org,2002:python/none', Constructor.construct_yaml_null)
-Constructor.add_constructor(u'tag:yaml.org,2002:python/bool', Constructor.construct_yaml_bool)
+Constructor.add_constructor('tag:yaml.org,2002:python/bool', Constructor.construct_yaml_bool)
-Constructor.add_constructor(u'tag:yaml.org,2002:python/str', Constructor.construct_python_str)
+Constructor.add_constructor('tag:yaml.org,2002:python/str', Constructor.construct_python_str)
Constructor.add_constructor(
- u'tag:yaml.org,2002:python/unicode', Constructor.construct_python_unicode
+ 'tag:yaml.org,2002:python/unicode', Constructor.construct_python_unicode
)
-if PY3:
- Constructor.add_constructor(
- u'tag:yaml.org,2002:python/bytes', Constructor.construct_python_bytes
- )
-
-Constructor.add_constructor(u'tag:yaml.org,2002:python/int', Constructor.construct_yaml_int)
-
Constructor.add_constructor(
- u'tag:yaml.org,2002:python/long', Constructor.construct_python_long
+ 'tag:yaml.org,2002:python/bytes', Constructor.construct_python_bytes
)
-Constructor.add_constructor(
- u'tag:yaml.org,2002:python/float', Constructor.construct_yaml_float
-)
+Constructor.add_constructor('tag:yaml.org,2002:python/int', Constructor.construct_yaml_int)
+
+Constructor.add_constructor('tag:yaml.org,2002:python/long', Constructor.construct_python_long)
+
+Constructor.add_constructor('tag:yaml.org,2002:python/float', Constructor.construct_yaml_float)
Constructor.add_constructor(
- u'tag:yaml.org,2002:python/complex', Constructor.construct_python_complex
+ 'tag:yaml.org,2002:python/complex', Constructor.construct_python_complex
)
-Constructor.add_constructor(u'tag:yaml.org,2002:python/list', Constructor.construct_yaml_seq)
+Constructor.add_constructor('tag:yaml.org,2002:python/list', Constructor.construct_yaml_seq)
Constructor.add_constructor(
- u'tag:yaml.org,2002:python/tuple', Constructor.construct_python_tuple
+ 'tag:yaml.org,2002:python/tuple', Constructor.construct_python_tuple
)
-Constructor.add_constructor(u'tag:yaml.org,2002:python/dict', Constructor.construct_yaml_map)
+Constructor.add_constructor('tag:yaml.org,2002:python/dict', Constructor.construct_yaml_map)
Constructor.add_multi_constructor(
- u'tag:yaml.org,2002:python/name:', Constructor.construct_python_name
+ 'tag:yaml.org,2002:python/name:', Constructor.construct_python_name
)
Constructor.add_multi_constructor(
- u'tag:yaml.org,2002:python/module:', Constructor.construct_python_module
+ 'tag:yaml.org,2002:python/module:', Constructor.construct_python_module
)
Constructor.add_multi_constructor(
- u'tag:yaml.org,2002:python/object:', Constructor.construct_python_object
+ 'tag:yaml.org,2002:python/object:', Constructor.construct_python_object
)
Constructor.add_multi_constructor(
- u'tag:yaml.org,2002:python/object/apply:', Constructor.construct_python_object_apply
+ 'tag:yaml.org,2002:python/object/apply:', Constructor.construct_python_object_apply
)
Constructor.add_multi_constructor(
- u'tag:yaml.org,2002:python/object/new:', Constructor.construct_python_object_new
+ 'tag:yaml.org,2002:python/object/new:', Constructor.construct_python_object_new
)
@@ -1076,20 +925,43 @@ class RoundTripConstructor(SafeConstructor):
as well as on the items
"""
- def construct_scalar(self, node):
- # type: (Any) -> Any
+ def comment(self, idx: Any) -> Any:
+ assert self.loader.comment_handling is not None
+ x = self.scanner.comments[idx]
+ x.set_assigned()
+ return x
+
+ def comments(self, list_of_comments: Any, idx: Optional[Any] = None) -> Any:
+ # hand in the comment and optional pre, eol, post segment
+ if list_of_comments is None:
+ return []
+ if idx is not None:
+ if list_of_comments[idx] is None:
+ return []
+ list_of_comments = list_of_comments[idx]
+ for x in list_of_comments:
+ yield self.comment(x)
+
+ def construct_scalar(self, node: Any) -> Any:
if not isinstance(node, ScalarNode):
raise ConstructorError(
- None, None, 'expected a scalar node, but found %s' % node.id, node.start_mark
+ None, None, f'expected a scalar node, but found {node.id!s}', node.start_mark,
)
- if node.style == '|' and isinstance(node.value, text_type):
+ if node.style == '|' and isinstance(node.value, str):
lss = LiteralScalarString(node.value, anchor=node.anchor)
- if node.comment and node.comment[1]:
- lss.comment = node.comment[1][0] # type: ignore
+ if self.loader and self.loader.comment_handling is None:
+ if node.comment and node.comment[1]:
+ lss.comment = node.comment[1][0] # type: ignore
+ else:
+ # NEWCMNT
+ if node.comment is not None and node.comment[1]:
+ # nprintf('>>>>nc1', node.comment)
+ # EOL comment after |
+ lss.comment = self.comment(node.comment[1][0]) # type: ignore
return lss
- if node.style == '>' and isinstance(node.value, text_type):
- fold_positions = [] # type: List[int]
+ if node.style == '>' and isinstance(node.value, str):
+ fold_positions: List[int] = []
idx = -1
while True:
idx = node.value.find('\a', idx + 1)
@@ -1097,12 +969,19 @@ class RoundTripConstructor(SafeConstructor):
break
fold_positions.append(idx - len(fold_positions))
fss = FoldedScalarString(node.value.replace('\a', ''), anchor=node.anchor)
- if node.comment and node.comment[1]:
- fss.comment = node.comment[1][0] # type: ignore
+ if self.loader and self.loader.comment_handling is None:
+ if node.comment and node.comment[1]:
+ fss.comment = node.comment[1][0] # type: ignore
+ else:
+ # NEWCMNT
+ if node.comment is not None and node.comment[1]:
+ # nprintf('>>>>nc2', node.comment)
+ # EOL comment after >
+ fss.comment = self.comment(node.comment[1][0]) # type: ignore
if fold_positions:
fss.fold_pos = fold_positions # type: ignore
return fss
- elif bool(self._preserve_quotes) and isinstance(node.value, text_type):
+ elif bool(self._preserve_quotes) and isinstance(node.value, str):
if node.style == "'":
return SingleQuotedScalarString(node.value, anchor=node.anchor)
if node.style == '"':
@@ -1111,13 +990,12 @@ class RoundTripConstructor(SafeConstructor):
return PlainScalarString(node.value, anchor=node.anchor)
return node.value
- def construct_yaml_int(self, node):
- # type: (Any) -> Any
- width = None # type: Any
- value_su = to_str(self.construct_scalar(node))
+ def construct_yaml_int(self, node: Any) -> Any:
+ width: Any = None
+ value_su = self.construct_scalar(node)
try:
sx = value_su.rstrip('_')
- underscore = [len(sx) - sx.rindex('_') - 1, False, False] # type: Any
+ underscore: Any = [len(sx) - sx.rindex('_') - 1, False, False]
except ValueError:
underscore = None
except IndexError:
@@ -1146,7 +1024,7 @@ class RoundTripConstructor(SafeConstructor):
# default to lower-case if no a-fA-F in string
if self.resolver.processing_version > (1, 1) and value_s[2] == '0':
width = len(value_s[2:])
- hex_fun = HexInt # type: Any
+ hex_fun: Any = HexInt
for ch in value_s[2:]:
if ch in 'ABCDEF': # first non-digit is capital
hex_fun = HexCapsInt
@@ -1175,7 +1053,12 @@ class RoundTripConstructor(SafeConstructor):
anchor=node.anchor,
)
elif self.resolver.processing_version != (1, 2) and value_s[0] == '0':
- return sign * int(value_s, 8)
+ return OctalInt(
+ sign * int(value_s, 8),
+ width=width,
+ underscore=underscore,
+ anchor=node.anchor,
+ )
elif self.resolver.processing_version != (1, 2) and ':' in value_s:
digits = [int(part) for part in value_s.split(':')]
digits.reverse()
@@ -1202,10 +1085,8 @@ class RoundTripConstructor(SafeConstructor):
else:
return sign * int(value_s)
- def construct_yaml_float(self, node):
- # type: (Any) -> Any
- def leading_zeros(v):
- # type: (Any) -> int
+ def construct_yaml_float(self, node: Any) -> Any:
+ def leading_zeros(v: Any) -> int:
lead0 = 0
idx = 0
while idx < len(v) and v[idx] in '0.':
@@ -1215,8 +1096,8 @@ class RoundTripConstructor(SafeConstructor):
return lead0
# underscore = None
- m_sign = False # type: Any
- value_so = to_str(self.construct_scalar(node))
+ m_sign: Any = False
+ value_so = self.construct_scalar(node)
value_s = value_so.replace('_', "").lower()
sign = +1
if value_s[0] == '-':
@@ -1247,7 +1128,7 @@ class RoundTripConstructor(SafeConstructor):
if self.resolver.processing_version != (1, 2):
# value_s is lower case independent of input
if '.' not in mantissa:
- warnings.warn(MantissaNoDotYAML1_1Warning(node, value_so))
+ warnings.warn(MantissaNoDotYAML1_1Warning(node, value_so), stacklevel=1)
lead0 = leading_zeros(mantissa)
width = len(mantissa)
prec = mantissa.find('.')
@@ -1268,7 +1149,8 @@ class RoundTripConstructor(SafeConstructor):
anchor=node.anchor,
)
width = len(value_so)
- prec = value_so.index('.') # you can use index, this would not be float without dot
+ # you can't use index, !!float 42 would be a float without a dot
+ prec = value_so.find('.')
lead0 = leading_zeros(value_so)
return ScalarFloat(
sign * float(value_s),
@@ -1279,32 +1161,33 @@ class RoundTripConstructor(SafeConstructor):
anchor=node.anchor,
)
- def construct_yaml_str(self, node):
- # type: (Any) -> Any
+ def construct_yaml_str(self, node: Any) -> Any:
value = self.construct_scalar(node)
if isinstance(value, ScalarString):
return value
- if PY3:
- return value
- try:
- return value.encode('ascii')
- except AttributeError:
- # in case you replace the node dynamically e.g. with a dict
- return value
- except UnicodeEncodeError:
- return value
+ return value
- def construct_rt_sequence(self, node, seqtyp, deep=False):
- # type: (Any, Any, bool) -> Any
+ def construct_rt_sequence(self, node: Any, seqtyp: Any, deep: bool = False) -> Any:
if not isinstance(node, SequenceNode):
raise ConstructorError(
- None, None, 'expected a sequence node, but found %s' % node.id, node.start_mark
+ None,
+ None,
+ f'expected a sequence node, but found {node.id!s}',
+ node.start_mark,
)
ret_val = []
- if node.comment:
- seqtyp._yaml_add_comment(node.comment[:2])
- if len(node.comment) > 2:
- seqtyp.yaml_end_comment_extend(node.comment[2], clear=True)
+ if self.loader and self.loader.comment_handling is None:
+ if node.comment:
+ seqtyp._yaml_add_comment(node.comment[:2])
+ if len(node.comment) > 2:
+ # this happens e.g. if you have a sequence element that is a flow-style
+ # mapping and that has no EOL comment but a following commentline or
+ # empty line
+ seqtyp.yaml_end_comment_extend(node.comment[2], clear=True)
+ else:
+ # NEWCMNT
+ if node.comment:
+ nprintf('nc3', node.comment)
if node.anchor:
from ruamel.yaml.serializer import templated_id
@@ -1320,16 +1203,14 @@ class RoundTripConstructor(SafeConstructor):
)
return ret_val
- def flatten_mapping(self, node):
- # type: (Any) -> Any
+ def flatten_mapping(self, node: Any) -> Any:
"""
This implements the merge key feature http://yaml.org/type/merge.html
by inserting keys from the merge dict/list of dicts if not yet
available in this node
"""
- def constructed(value_node):
- # type: (Any) -> Any
+ def constructed(value_node: Any) -> Any:
# If the contents of a merge are defined within the
# merge marker, then they won't have been constructed
# yet. But if they were already constructed, we need to use
@@ -1341,11 +1222,11 @@ class RoundTripConstructor(SafeConstructor):
return value
# merge = []
- merge_map_list = [] # type: List[Any]
+ merge_map_list: List[Any] = []
index = 0
while index < len(node.value):
key_node, value_node = node.value[index]
- if key_node.tag == u'tag:yaml.org,2002:merge':
+ if key_node.tag == 'tag:yaml.org,2002:merge':
if merge_map_list: # double << key
if self.allow_duplicate_keys:
del node.value[index]
@@ -1354,7 +1235,7 @@ class RoundTripConstructor(SafeConstructor):
args = [
'while constructing a mapping',
node.start_mark,
- 'found duplicate key "{}"'.format(key_node.value),
+ f'found duplicate key "{key_node.value}"',
key_node.start_mark,
"""
To suppress this check see:
@@ -1366,7 +1247,7 @@ class RoundTripConstructor(SafeConstructor):
""",
]
if self.allow_duplicate_keys is None:
- warnings.warn(DuplicateKeyFutureWarning(*args))
+ warnings.warn(DuplicateKeyFutureWarning(*args), stacklevel=1)
else:
raise DuplicateKeyError(*args)
del node.value[index]
@@ -1381,7 +1262,7 @@ class RoundTripConstructor(SafeConstructor):
raise ConstructorError(
'while constructing a mapping',
node.start_mark,
- 'expected a mapping for merging, but found %s' % subnode.id,
+ f'expected a mapping for merging, but found {subnode.id!s}',
subnode.start_mark,
)
merge_map_list.append((index, constructed(subnode)))
@@ -1395,11 +1276,11 @@ class RoundTripConstructor(SafeConstructor):
'while constructing a mapping',
node.start_mark,
'expected a mapping or list of mappings for merging, '
- 'but found %s' % value_node.id,
+ f'but found {value_node.id!s}',
value_node.start_mark,
)
- elif key_node.tag == u'tag:yaml.org,2002:value':
- key_node.tag = u'tag:yaml.org,2002:str'
+ elif key_node.tag == 'tag:yaml.org,2002:value':
+ key_node.tag = 'tag:yaml.org,2002:str'
index += 1
else:
index += 1
@@ -1407,22 +1288,29 @@ class RoundTripConstructor(SafeConstructor):
# if merge:
# node.value = merge + node.value
- def _sentinel(self):
- # type: () -> None
+ def _sentinel(self) -> None:
pass
- def construct_mapping(self, node, maptyp, deep=False): # type: ignore
- # type: (Any, Any, bool) -> Any
+ def construct_mapping(self, node: Any, maptyp: Any, deep: bool = False) -> Any: # type: ignore # NOQA
if not isinstance(node, MappingNode):
raise ConstructorError(
- None, None, 'expected a mapping node, but found %s' % node.id, node.start_mark
+ None, None, f'expected a mapping node, but found {node.id!s}', node.start_mark,
)
merge_map = self.flatten_mapping(node)
# mapping = {}
- if node.comment:
- maptyp._yaml_add_comment(node.comment[:2])
- if len(node.comment) > 2:
- maptyp.yaml_end_comment_extend(node.comment[2], clear=True)
+ if self.loader and self.loader.comment_handling is None:
+ if node.comment:
+ maptyp._yaml_add_comment(node.comment[:2])
+ if len(node.comment) > 2:
+ maptyp.yaml_end_comment_extend(node.comment[2], clear=True)
+ else:
+ # NEWCMNT
+ if node.comment:
+ # nprintf('nc4', node.comment, node.start_mark)
+ if maptyp.ca.pre is None:
+ maptyp.ca.pre = []
+ for cmnt in self.comments(node.comment, 0):
+ maptyp.ca.pre.append(cmnt)
if node.anchor:
from ruamel.yaml.serializer import templated_id
@@ -1440,6 +1328,7 @@ class RoundTripConstructor(SafeConstructor):
key_s.fa.set_flow_style()
elif key_node.flow_style is False:
key_s.fa.set_block_style()
+ key_s._yaml_set_line_col(key.lc.line, key.lc.col) # type: ignore
key = key_s
elif isinstance(key, MutableMapping):
key_m = CommentedKeyMap(key)
@@ -1447,39 +1336,48 @@ class RoundTripConstructor(SafeConstructor):
key_m.fa.set_flow_style()
elif key_node.flow_style is False:
key_m.fa.set_block_style()
+ key_m._yaml_set_line_col(key.lc.line, key.lc.col) # type: ignore
key = key_m
- if PY2:
- try:
- hash(key)
- except TypeError as exc:
- raise ConstructorError(
- 'while constructing a mapping',
- node.start_mark,
- 'found unacceptable key (%s)' % exc,
- key_node.start_mark,
- )
- else:
- if not isinstance(key, Hashable):
- raise ConstructorError(
- 'while constructing a mapping',
- node.start_mark,
- 'found unhashable key',
- key_node.start_mark,
- )
+ if not isinstance(key, Hashable):
+ raise ConstructorError(
+ 'while constructing a mapping',
+ node.start_mark,
+ 'found unhashable key',
+ key_node.start_mark,
+ )
value = self.construct_object(value_node, deep=deep)
if self.check_mapping_key(node, key_node, maptyp, key, value):
- if key_node.comment and len(key_node.comment) > 4 and key_node.comment[4]:
- if last_value is None:
- key_node.comment[0] = key_node.comment.pop(4)
- maptyp._yaml_add_comment(key_node.comment, value=last_key)
- else:
- key_node.comment[2] = key_node.comment.pop(4)
+ if self.loader and self.loader.comment_handling is None:
+ if key_node.comment and len(key_node.comment) > 4 and key_node.comment[4]:
+ if last_value is None:
+ key_node.comment[0] = key_node.comment.pop(4)
+ maptyp._yaml_add_comment(key_node.comment, value=last_key)
+ else:
+ key_node.comment[2] = key_node.comment.pop(4)
+ maptyp._yaml_add_comment(key_node.comment, key=key)
+ key_node.comment = None
+ if key_node.comment:
maptyp._yaml_add_comment(key_node.comment, key=key)
- key_node.comment = None
- if key_node.comment:
- maptyp._yaml_add_comment(key_node.comment, key=key)
- if value_node.comment:
- maptyp._yaml_add_comment(value_node.comment, value=key)
+ if value_node.comment:
+ maptyp._yaml_add_comment(value_node.comment, value=key)
+ else:
+ # NEWCMNT
+ if key_node.comment:
+ nprintf('nc5a', key, key_node.comment)
+ if key_node.comment[0]:
+ maptyp.ca.set(key, C_KEY_PRE, key_node.comment[0])
+ if key_node.comment[1]:
+ maptyp.ca.set(key, C_KEY_EOL, key_node.comment[1])
+ if key_node.comment[2]:
+ maptyp.ca.set(key, C_KEY_POST, key_node.comment[2])
+ if value_node.comment:
+ nprintf('nc5b', key, value_node.comment)
+ if value_node.comment[0]:
+ maptyp.ca.set(key, C_VALUE_PRE, value_node.comment[0])
+ if value_node.comment[1]:
+ maptyp.ca.set(key, C_VALUE_EOL, value_node.comment[1])
+ if value_node.comment[2]:
+ maptyp.ca.set(key, C_VALUE_POST, value_node.comment[2])
maptyp._yaml_set_kv_line_col(
key,
[
@@ -1496,16 +1394,20 @@ class RoundTripConstructor(SafeConstructor):
if merge_map:
maptyp.add_yaml_merge(merge_map)
- def construct_setting(self, node, typ, deep=False):
- # type: (Any, Any, bool) -> Any
+ def construct_setting(self, node: Any, typ: Any, deep: bool = False) -> Any:
if not isinstance(node, MappingNode):
raise ConstructorError(
- None, None, 'expected a mapping node, but found %s' % node.id, node.start_mark
+ None, None, f'expected a mapping node, but found {node.id!s}', node.start_mark,
)
- if node.comment:
- typ._yaml_add_comment(node.comment[:2])
- if len(node.comment) > 2:
- typ.yaml_end_comment_extend(node.comment[2], clear=True)
+ if self.loader and self.loader.comment_handling is None:
+ if node.comment:
+ typ._yaml_add_comment(node.comment[:2])
+ if len(node.comment) > 2:
+ typ.yaml_end_comment_extend(node.comment[2], clear=True)
+ else:
+ # NEWCMNT
+ if node.comment:
+ nprintf('nc6', node.comment)
if node.anchor:
from ruamel.yaml.serializer import templated_id
@@ -1518,53 +1420,46 @@ class RoundTripConstructor(SafeConstructor):
if not isinstance(key, Hashable):
if isinstance(key, list):
key = tuple(key)
- if PY2:
- try:
- hash(key)
- except TypeError as exc:
- raise ConstructorError(
- 'while constructing a mapping',
- node.start_mark,
- 'found unacceptable key (%s)' % exc,
- key_node.start_mark,
- )
- else:
- if not isinstance(key, Hashable):
- raise ConstructorError(
- 'while constructing a mapping',
- node.start_mark,
- 'found unhashable key',
- key_node.start_mark,
- )
+ if not isinstance(key, Hashable):
+ raise ConstructorError(
+ 'while constructing a mapping',
+ node.start_mark,
+ 'found unhashable key',
+ key_node.start_mark,
+ )
# construct but should be null
value = self.construct_object(value_node, deep=deep) # NOQA
self.check_set_key(node, key_node, typ, key)
- if key_node.comment:
- typ._yaml_add_comment(key_node.comment, key=key)
- if value_node.comment:
- typ._yaml_add_comment(value_node.comment, value=key)
+ if self.loader and self.loader.comment_handling is None:
+ if key_node.comment:
+ typ._yaml_add_comment(key_node.comment, key=key)
+ if value_node.comment:
+ typ._yaml_add_comment(value_node.comment, value=key)
+ else:
+ # NEWCMNT
+ if key_node.comment:
+ nprintf('nc7a', key_node.comment)
+ if value_node.comment:
+ nprintf('nc7b', value_node.comment)
typ.add(key)
- def construct_yaml_seq(self, node):
- # type: (Any) -> Any
+ def construct_yaml_seq(self, node: Any) -> Iterator[CommentedSeq]:
data = CommentedSeq()
data._yaml_set_line_col(node.start_mark.line, node.start_mark.column)
- if node.comment:
- data._yaml_add_comment(node.comment)
+ # if node.comment:
+ # data._yaml_add_comment(node.comment)
yield data
data.extend(self.construct_rt_sequence(node, data))
self.set_collection_style(data, node)
- def construct_yaml_map(self, node):
- # type: (Any) -> Any
+ def construct_yaml_map(self, node: Any) -> Iterator[CommentedMap]:
data = CommentedMap()
data._yaml_set_line_col(node.start_mark.line, node.start_mark.column)
yield data
self.construct_mapping(node, data, deep=True)
self.set_collection_style(data, node)
- def set_collection_style(self, data, node):
- # type: (Any, Any) -> None
+ def set_collection_style(self, data: Any, node: Any) -> None:
if len(data) == 0:
return
if node.flow_style is True:
@@ -1572,8 +1467,7 @@ class RoundTripConstructor(SafeConstructor):
elif node.flow_style is False:
data.fa.set_block_style()
- def construct_yaml_object(self, node, cls):
- # type: (Any, Any) -> Any
+ def construct_yaml_object(self, node: Any, cls: Any) -> Any:
data = cls.__new__(cls)
yield data
if hasattr(data, '__setstate__'):
@@ -1581,10 +1475,23 @@ class RoundTripConstructor(SafeConstructor):
data.__setstate__(state)
else:
state = SafeConstructor.construct_mapping(self, node)
- data.__dict__.update(state)
+ if hasattr(data, '__attrs_attrs__'): # issue 394
+ data.__init__(**state)
+ else:
+ data.__dict__.update(state)
+ if node.anchor:
+ from ruamel.yaml.serializer import templated_id
+ from ruamel.yaml.anchor import Anchor
- def construct_yaml_omap(self, node):
- # type: (Any) -> Any
+ if not templated_id(node.anchor):
+ if not hasattr(data, Anchor.attrib):
+ a = Anchor()
+ setattr(data, Anchor.attrib, a)
+ else:
+ a = getattr(data, Anchor.attrib)
+ a.value = node.anchor
+
+ def construct_yaml_omap(self, node: Any) -> Iterator[CommentedOrderedMap]:
# Note: we do now check for duplicate keys
omap = CommentedOrderedMap()
omap._yaml_set_line_col(node.start_mark.line, node.start_mark.column)
@@ -1593,15 +1500,20 @@ class RoundTripConstructor(SafeConstructor):
elif node.flow_style is False:
omap.fa.set_block_style()
yield omap
- if node.comment:
- omap._yaml_add_comment(node.comment[:2])
- if len(node.comment) > 2:
- omap.yaml_end_comment_extend(node.comment[2], clear=True)
+ if self.loader and self.loader.comment_handling is None:
+ if node.comment:
+ omap._yaml_add_comment(node.comment[:2])
+ if len(node.comment) > 2:
+ omap.yaml_end_comment_extend(node.comment[2], clear=True)
+ else:
+ # NEWCMNT
+ if node.comment:
+ nprintf('nc8', node.comment)
if not isinstance(node, SequenceNode):
raise ConstructorError(
'while constructing an ordered map',
node.start_mark,
- 'expected a sequence, but found %s' % node.id,
+ f'expected a sequence, but found {node.id!s}',
node.start_mark,
)
for subnode in node.value:
@@ -1609,37 +1521,46 @@ class RoundTripConstructor(SafeConstructor):
raise ConstructorError(
'while constructing an ordered map',
node.start_mark,
- 'expected a mapping of length 1, but found %s' % subnode.id,
+ f'expected a mapping of length 1, but found {subnode.id!s}',
subnode.start_mark,
)
if len(subnode.value) != 1:
raise ConstructorError(
'while constructing an ordered map',
node.start_mark,
- 'expected a single mapping item, but found %d items' % len(subnode.value),
+ f'expected a single mapping item, but found {len(subnode.value):d} items',
subnode.start_mark,
)
key_node, value_node = subnode.value[0]
key = self.construct_object(key_node)
assert key not in omap
value = self.construct_object(value_node)
- if key_node.comment:
- omap._yaml_add_comment(key_node.comment, key=key)
- if subnode.comment:
- omap._yaml_add_comment(subnode.comment, key=key)
- if value_node.comment:
- omap._yaml_add_comment(value_node.comment, value=key)
+ if self.loader and self.loader.comment_handling is None:
+ if key_node.comment:
+ omap._yaml_add_comment(key_node.comment, key=key)
+ if subnode.comment:
+ omap._yaml_add_comment(subnode.comment, key=key)
+ if value_node.comment:
+ omap._yaml_add_comment(value_node.comment, value=key)
+ else:
+ # NEWCMNT
+ if key_node.comment:
+ nprintf('nc9a', key_node.comment)
+ if subnode.comment:
+ nprintf('nc9b', subnode.comment)
+ if value_node.comment:
+ nprintf('nc9c', value_node.comment)
omap[key] = value
- def construct_yaml_set(self, node):
- # type: (Any) -> Any
+ def construct_yaml_set(self, node: Any) -> Iterator[CommentedSet]:
data = CommentedSet()
data._yaml_set_line_col(node.start_mark.line, node.start_mark.column)
yield data
self.construct_setting(node, data)
- def construct_undefined(self, node):
- # type: (Any) -> Any
+ def construct_unknown(
+ self, node: Any
+ ) -> Iterator[Union[CommentedMap, TaggedScalar, CommentedSeq]]:
try:
if isinstance(node, MappingNode):
data = CommentedMap()
@@ -1651,7 +1572,10 @@ class RoundTripConstructor(SafeConstructor):
data.yaml_set_tag(node.tag)
yield data
if node.anchor:
- data.yaml_set_anchor(node.anchor)
+ from ruamel.yaml.serializer import templated_id
+
+ if not templated_id(node.anchor):
+ data.yaml_set_anchor(node.anchor)
self.construct_mapping(node, data)
return
elif isinstance(node, ScalarNode):
@@ -1661,7 +1585,10 @@ class RoundTripConstructor(SafeConstructor):
data2.yaml_set_tag(node.tag)
yield data2
if node.anchor:
- data2.yaml_set_anchor(node.anchor, always_dump=True)
+ from ruamel.yaml.serializer import templated_id
+
+ if not templated_id(node.anchor):
+ data2.yaml_set_anchor(node.anchor, always_dump=True)
return
elif isinstance(node, SequenceNode):
data3 = CommentedSeq()
@@ -1673,7 +1600,10 @@ class RoundTripConstructor(SafeConstructor):
data3.yaml_set_tag(node.tag)
yield data3
if node.anchor:
- data3.yaml_set_anchor(node.anchor)
+ from ruamel.yaml.serializer import templated_id
+
+ if not templated_id(node.anchor):
+ data3.yaml_set_anchor(node.anchor)
data3.extend(self.construct_sequence(node))
return
except: # NOQA
@@ -1681,12 +1611,13 @@ class RoundTripConstructor(SafeConstructor):
raise ConstructorError(
None,
None,
- 'could not determine a constructor for the tag %r' % utf8(node.tag),
+ f'could not determine a constructor for the tag {node.tag!r}',
node.start_mark,
)
- def construct_yaml_timestamp(self, node, values=None):
- # type: (Any, Any) -> Any
+ def construct_yaml_timestamp(
+ self, node: Any, values: Any = None
+ ) -> Union[datetime.date, datetime.datetime, TimeStamp]:
try:
match = self.timestamp_regexp.match(node.value)
except TypeError:
@@ -1695,31 +1626,20 @@ class RoundTripConstructor(SafeConstructor):
raise ConstructorError(
None,
None,
- 'failed to construct timestamp from "{}"'.format(node.value),
+ f'failed to construct timestamp from "{node.value}"',
node.start_mark,
)
values = match.groupdict()
if not values['hour']:
- return SafeConstructor.construct_yaml_timestamp(self, node, values)
+ return create_timestamp(**values)
+ # return SafeConstructor.construct_yaml_timestamp(self, node, values)
for part in ['t', 'tz_sign', 'tz_hour', 'tz_minute']:
if values[part]:
break
else:
- return SafeConstructor.construct_yaml_timestamp(self, node, values)
- year = int(values['year'])
- month = int(values['month'])
- day = int(values['day'])
- hour = int(values['hour'])
- minute = int(values['minute'])
- second = int(values['second'])
- fraction = 0
- if values['fraction']:
- fraction_s = values['fraction'][:6]
- while len(fraction_s) < 6:
- fraction_s += '0'
- fraction = int(fraction_s)
- if len(values['fraction']) > 6 and int(values['fraction'][6]) > 4:
- fraction += 1
+ return create_timestamp(**values)
+ # return SafeConstructor.construct_yaml_timestamp(self, node, values)
+ dd = create_timestamp(**values) # this has delta applied
delta = None
if values['tz_sign']:
tz_hour = int(values['tz_hour'])
@@ -1728,26 +1648,30 @@ class RoundTripConstructor(SafeConstructor):
delta = datetime.timedelta(hours=tz_hour, minutes=tz_minute)
if values['tz_sign'] == '-':
delta = -delta
+ # should check for None and solve issue 366 should be tzinfo=delta)
+ # isinstance(datetime.datetime.now, datetime.date) is true)
+ if isinstance(dd, datetime.datetime):
+ data = TimeStamp(
+ dd.year, dd.month, dd.day, dd.hour, dd.minute, dd.second, dd.microsecond
+ )
+ else:
+ # ToDo: make this into a DateStamp?
+ data = TimeStamp(dd.year, dd.month, dd.day, 0, 0, 0, 0)
+ return data
if delta:
- dt = datetime.datetime(year, month, day, hour, minute)
- dt -= delta
- data = TimeStamp(dt.year, dt.month, dt.day, dt.hour, dt.minute, second, fraction)
data._yaml['delta'] = delta
tz = values['tz_sign'] + values['tz_hour']
if values['tz_minute']:
tz += ':' + values['tz_minute']
data._yaml['tz'] = tz
else:
- data = TimeStamp(year, month, day, hour, minute, second, fraction)
if values['tz']: # no delta
data._yaml['tz'] = values['tz']
-
if values['t']:
data._yaml['t'] = True
return data
- def construct_yaml_bool(self, node):
- # type: (Any) -> Any
+ def construct_yaml_sbool(self, node: Any) -> Union[bool, ScalarBoolean]:
b = SafeConstructor.construct_yaml_bool(self, node)
if node.anchor:
return ScalarBoolean(b, anchor=node.anchor)
@@ -1755,51 +1679,51 @@ class RoundTripConstructor(SafeConstructor):
RoundTripConstructor.add_constructor(
- u'tag:yaml.org,2002:null', RoundTripConstructor.construct_yaml_null
+ 'tag:yaml.org,2002:null', RoundTripConstructor.construct_yaml_null
)
RoundTripConstructor.add_constructor(
- u'tag:yaml.org,2002:bool', RoundTripConstructor.construct_yaml_bool
+ 'tag:yaml.org,2002:bool', RoundTripConstructor.construct_yaml_sbool
)
RoundTripConstructor.add_constructor(
- u'tag:yaml.org,2002:int', RoundTripConstructor.construct_yaml_int
+ 'tag:yaml.org,2002:int', RoundTripConstructor.construct_yaml_int
)
RoundTripConstructor.add_constructor(
- u'tag:yaml.org,2002:float', RoundTripConstructor.construct_yaml_float
+ 'tag:yaml.org,2002:float', RoundTripConstructor.construct_yaml_float
)
RoundTripConstructor.add_constructor(
- u'tag:yaml.org,2002:binary', RoundTripConstructor.construct_yaml_binary
+ 'tag:yaml.org,2002:binary', RoundTripConstructor.construct_yaml_binary
)
RoundTripConstructor.add_constructor(
- u'tag:yaml.org,2002:timestamp', RoundTripConstructor.construct_yaml_timestamp
+ 'tag:yaml.org,2002:timestamp', RoundTripConstructor.construct_yaml_timestamp
)
RoundTripConstructor.add_constructor(
- u'tag:yaml.org,2002:omap', RoundTripConstructor.construct_yaml_omap
+ 'tag:yaml.org,2002:omap', RoundTripConstructor.construct_yaml_omap
)
RoundTripConstructor.add_constructor(
- u'tag:yaml.org,2002:pairs', RoundTripConstructor.construct_yaml_pairs
+ 'tag:yaml.org,2002:pairs', RoundTripConstructor.construct_yaml_pairs
)
RoundTripConstructor.add_constructor(
- u'tag:yaml.org,2002:set', RoundTripConstructor.construct_yaml_set
+ 'tag:yaml.org,2002:set', RoundTripConstructor.construct_yaml_set
)
RoundTripConstructor.add_constructor(
- u'tag:yaml.org,2002:str', RoundTripConstructor.construct_yaml_str
+ 'tag:yaml.org,2002:str', RoundTripConstructor.construct_yaml_str
)
RoundTripConstructor.add_constructor(
- u'tag:yaml.org,2002:seq', RoundTripConstructor.construct_yaml_seq
+ 'tag:yaml.org,2002:seq', RoundTripConstructor.construct_yaml_seq
)
RoundTripConstructor.add_constructor(
- u'tag:yaml.org,2002:map', RoundTripConstructor.construct_yaml_map
+ 'tag:yaml.org,2002:map', RoundTripConstructor.construct_yaml_map
)
-RoundTripConstructor.add_constructor(None, RoundTripConstructor.construct_undefined)
+RoundTripConstructor.add_constructor(None, RoundTripConstructor.construct_unknown)
diff --git a/cyaml.py b/cyaml.py
index 7a808a5..09d6480 100644
--- a/cyaml.py
+++ b/cyaml.py
@@ -1,16 +1,14 @@
# coding: utf-8
-from __future__ import absolute_import
-
from _ruamel_yaml import CParser, CEmitter # type: ignore
from ruamel.yaml.constructor import Constructor, BaseConstructor, SafeConstructor
from ruamel.yaml.representer import Representer, SafeRepresenter, BaseRepresenter
from ruamel.yaml.resolver import Resolver, BaseResolver
-if False: # MYPY
- from typing import Any, Union, Optional # NOQA
- from ruamel.yaml.compat import StreamTextType, StreamType, VersionType # NOQA
+
+from typing import Any, Union, Optional # NOQA
+from ruamel.yaml.compat import StreamTextType, StreamType, VersionType # NOQA
__all__ = ['CBaseLoader', 'CSafeLoader', 'CLoader', 'CBaseDumper', 'CSafeDumper', 'CDumper']
@@ -20,8 +18,12 @@ __all__ = ['CBaseLoader', 'CSafeLoader', 'CLoader', 'CBaseDumper', 'CSafeDumper'
class CBaseLoader(CParser, BaseConstructor, BaseResolver): # type: ignore
- def __init__(self, stream, version=None, preserve_quotes=None):
- # type: (StreamTextType, Optional[VersionType], Optional[bool]) -> None
+ def __init__(
+ self,
+ stream: StreamTextType,
+ version: Optional[VersionType] = None,
+ preserve_quotes: Optional[bool] = None,
+ ) -> None:
CParser.__init__(self, stream)
self._parser = self._composer = self
BaseConstructor.__init__(self, loader=self)
@@ -32,8 +34,12 @@ class CBaseLoader(CParser, BaseConstructor, BaseResolver): # type: ignore
class CSafeLoader(CParser, SafeConstructor, Resolver): # type: ignore
- def __init__(self, stream, version=None, preserve_quotes=None):
- # type: (StreamTextType, Optional[VersionType], Optional[bool]) -> None
+ def __init__(
+ self,
+ stream: StreamTextType,
+ version: Optional[VersionType] = None,
+ preserve_quotes: Optional[bool] = None,
+ ) -> None:
CParser.__init__(self, stream)
self._parser = self._composer = self
SafeConstructor.__init__(self, loader=self)
@@ -44,8 +50,12 @@ class CSafeLoader(CParser, SafeConstructor, Resolver): # type: ignore
class CLoader(CParser, Constructor, Resolver): # type: ignore
- def __init__(self, stream, version=None, preserve_quotes=None):
- # type: (StreamTextType, Optional[VersionType], Optional[bool]) -> None
+ def __init__(
+ self,
+ stream: StreamTextType,
+ version: Optional[VersionType] = None,
+ preserve_quotes: Optional[bool] = None,
+ ) -> None:
CParser.__init__(self, stream)
self._parser = self._composer = self
Constructor.__init__(self, loader=self)
@@ -57,25 +67,25 @@ class CLoader(CParser, Constructor, Resolver): # type: ignore
class CBaseDumper(CEmitter, BaseRepresenter, BaseResolver): # type: ignore
def __init__(
- self,
- stream,
- default_style=None,
- default_flow_style=None,
- canonical=None,
- indent=None,
- width=None,
- allow_unicode=None,
- line_break=None,
- encoding=None,
- explicit_start=None,
- explicit_end=None,
- version=None,
- tags=None,
- block_seq_indent=None,
- top_level_colon_align=None,
- prefix_colon=None,
- ):
- # type: (StreamType, Any, Any, Any, Optional[bool], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Any, Any, Any, Any, Any) -> None # NOQA
+ self: StreamType,
+ stream: Any,
+ default_style: Any = None,
+ default_flow_style: Any = None,
+ canonical: Optional[bool] = None,
+ indent: Optional[int] = None,
+ width: Optional[int] = None,
+ allow_unicode: Optional[bool] = None,
+ line_break: Any = None,
+ encoding: Any = None,
+ explicit_start: Optional[bool] = None,
+ explicit_end: Optional[bool] = None,
+ version: Any = None,
+ tags: Any = None,
+ block_seq_indent: Any = None,
+ top_level_colon_align: Any = None,
+ prefix_colon: Any = None,
+ ) -> None:
+ # NOQA
CEmitter.__init__(
self,
stream,
@@ -102,25 +112,25 @@ class CBaseDumper(CEmitter, BaseRepresenter, BaseResolver): # type: ignore
class CSafeDumper(CEmitter, SafeRepresenter, Resolver): # type: ignore
def __init__(
- self,
- stream,
- default_style=None,
- default_flow_style=None,
- canonical=None,
- indent=None,
- width=None,
- allow_unicode=None,
- line_break=None,
- encoding=None,
- explicit_start=None,
- explicit_end=None,
- version=None,
- tags=None,
- block_seq_indent=None,
- top_level_colon_align=None,
- prefix_colon=None,
- ):
- # type: (StreamType, Any, Any, Any, Optional[bool], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Any, Any, Any, Any, Any) -> None # NOQA
+ self: StreamType,
+ stream: Any,
+ default_style: Any = None,
+ default_flow_style: Any = None,
+ canonical: Optional[bool] = None,
+ indent: Optional[int] = None,
+ width: Optional[int] = None,
+ allow_unicode: Optional[bool] = None,
+ line_break: Any = None,
+ encoding: Any = None,
+ explicit_start: Optional[bool] = None,
+ explicit_end: Optional[bool] = None,
+ version: Any = None,
+ tags: Any = None,
+ block_seq_indent: Any = None,
+ top_level_colon_align: Any = None,
+ prefix_colon: Any = None,
+ ) -> None:
+ # NOQA
self._emitter = self._serializer = self._representer = self
CEmitter.__init__(
self,
@@ -145,25 +155,25 @@ class CSafeDumper(CEmitter, SafeRepresenter, Resolver): # type: ignore
class CDumper(CEmitter, Representer, Resolver): # type: ignore
def __init__(
- self,
- stream,
- default_style=None,
- default_flow_style=None,
- canonical=None,
- indent=None,
- width=None,
- allow_unicode=None,
- line_break=None,
- encoding=None,
- explicit_start=None,
- explicit_end=None,
- version=None,
- tags=None,
- block_seq_indent=None,
- top_level_colon_align=None,
- prefix_colon=None,
- ):
- # type: (StreamType, Any, Any, Any, Optional[bool], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Any, Any, Any, Any, Any) -> None # NOQA
+ self: StreamType,
+ stream: Any,
+ default_style: Any = None,
+ default_flow_style: Any = None,
+ canonical: Optional[bool] = None,
+ indent: Optional[int] = None,
+ width: Optional[int] = None,
+ allow_unicode: Optional[bool] = None,
+ line_break: Any = None,
+ encoding: Any = None,
+ explicit_start: Optional[bool] = None,
+ explicit_end: Optional[bool] = None,
+ version: Any = None,
+ tags: Any = None,
+ block_seq_indent: Any = None,
+ top_level_colon_align: Any = None,
+ prefix_colon: Any = None,
+ ) -> None:
+ # NOQA
CEmitter.__init__(
self,
stream,
diff --git a/dumper.py b/dumper.py
index 5d99b4f..e6457a6 100644
--- a/dumper.py
+++ b/dumper.py
@@ -1,7 +1,5 @@
# coding: utf-8
-from __future__ import absolute_import
-
from ruamel.yaml.emitter import Emitter
from ruamel.yaml.serializer import Serializer
from ruamel.yaml.representer import (
@@ -12,34 +10,33 @@ from ruamel.yaml.representer import (
)
from ruamel.yaml.resolver import Resolver, BaseResolver, VersionedResolver
-if False: # MYPY
- from typing import Any, Dict, List, Union, Optional # NOQA
- from ruamel.yaml.compat import StreamType, VersionType # NOQA
+from typing import Any, Dict, List, Union, Optional # NOQA
+from ruamel.yaml.compat import StreamType, VersionType # NOQA
__all__ = ['BaseDumper', 'SafeDumper', 'Dumper', 'RoundTripDumper']
class BaseDumper(Emitter, Serializer, BaseRepresenter, BaseResolver):
def __init__(
- self,
- stream,
- default_style=None,
- default_flow_style=None,
- canonical=None,
- indent=None,
- width=None,
- allow_unicode=None,
- line_break=None,
- encoding=None,
- explicit_start=None,
- explicit_end=None,
- version=None,
- tags=None,
- block_seq_indent=None,
- top_level_colon_align=None,
- prefix_colon=None,
- ):
- # type: (Any, StreamType, Any, Any, Optional[bool], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Any, Any, Any, Any, Any) -> None # NOQA
+ self: Any,
+ stream: StreamType,
+ default_style: Any = None,
+ default_flow_style: Any = None,
+ canonical: Optional[bool] = None,
+ indent: Optional[int] = None,
+ width: Optional[int] = None,
+ allow_unicode: Optional[bool] = None,
+ line_break: Any = None,
+ encoding: Any = None,
+ explicit_start: Optional[bool] = None,
+ explicit_end: Optional[bool] = None,
+ version: Any = None,
+ tags: Any = None,
+ block_seq_indent: Any = None,
+ top_level_colon_align: Any = None,
+ prefix_colon: Any = None,
+ ) -> None:
+ # NOQA
Emitter.__init__(
self,
stream,
@@ -72,24 +69,24 @@ class BaseDumper(Emitter, Serializer, BaseRepresenter, BaseResolver):
class SafeDumper(Emitter, Serializer, SafeRepresenter, Resolver):
def __init__(
self,
- stream,
- default_style=None,
- default_flow_style=None,
- canonical=None,
- indent=None,
- width=None,
- allow_unicode=None,
- line_break=None,
- encoding=None,
- explicit_start=None,
- explicit_end=None,
- version=None,
- tags=None,
- block_seq_indent=None,
- top_level_colon_align=None,
- prefix_colon=None,
- ):
- # type: (StreamType, Any, Any, Optional[bool], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Any, Any, Any, Any, Any) -> None # NOQA
+ stream: StreamType,
+ default_style: Any = None,
+ default_flow_style: Any = None,
+ canonical: Optional[bool] = None,
+ indent: Optional[int] = None,
+ width: Optional[int] = None,
+ allow_unicode: Optional[bool] = None,
+ line_break: Any = None,
+ encoding: Any = None,
+ explicit_start: Optional[bool] = None,
+ explicit_end: Optional[bool] = None,
+ version: Any = None,
+ tags: Any = None,
+ block_seq_indent: Any = None,
+ top_level_colon_align: Any = None,
+ prefix_colon: Any = None,
+ ) -> None:
+ # NOQA
Emitter.__init__(
self,
stream,
@@ -122,24 +119,24 @@ class SafeDumper(Emitter, Serializer, SafeRepresenter, Resolver):
class Dumper(Emitter, Serializer, Representer, Resolver):
def __init__(
self,
- stream,
- default_style=None,
- default_flow_style=None,
- canonical=None,
- indent=None,
- width=None,
- allow_unicode=None,
- line_break=None,
- encoding=None,
- explicit_start=None,
- explicit_end=None,
- version=None,
- tags=None,
- block_seq_indent=None,
- top_level_colon_align=None,
- prefix_colon=None,
- ):
- # type: (StreamType, Any, Any, Optional[bool], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Any, Any, Any, Any, Any) -> None # NOQA
+ stream: StreamType,
+ default_style: Any = None,
+ default_flow_style: Any = None,
+ canonical: Optional[bool] = None,
+ indent: Optional[int] = None,
+ width: Optional[int] = None,
+ allow_unicode: Optional[bool] = None,
+ line_break: Any = None,
+ encoding: Any = None,
+ explicit_start: Optional[bool] = None,
+ explicit_end: Optional[bool] = None,
+ version: Any = None,
+ tags: Any = None,
+ block_seq_indent: Any = None,
+ top_level_colon_align: Any = None,
+ prefix_colon: Any = None,
+ ) -> None:
+ # NOQA
Emitter.__init__(
self,
stream,
@@ -172,24 +169,24 @@ class Dumper(Emitter, Serializer, Representer, Resolver):
class RoundTripDumper(Emitter, Serializer, RoundTripRepresenter, VersionedResolver):
def __init__(
self,
- stream,
- default_style=None,
- default_flow_style=None,
- canonical=None,
- indent=None,
- width=None,
- allow_unicode=None,
- line_break=None,
- encoding=None,
- explicit_start=None,
- explicit_end=None,
- version=None,
- tags=None,
- block_seq_indent=None,
- top_level_colon_align=None,
- prefix_colon=None,
- ):
- # type: (StreamType, Any, Optional[bool], Optional[int], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Any, Any, Any, Any, Any) -> None # NOQA
+ stream: StreamType,
+ default_style: Any = None,
+ default_flow_style: Optional[bool] = None,
+ canonical: Optional[int] = None,
+ indent: Optional[int] = None,
+ width: Optional[int] = None,
+ allow_unicode: Optional[bool] = None,
+ line_break: Any = None,
+ encoding: Any = None,
+ explicit_start: Optional[bool] = None,
+ explicit_end: Optional[bool] = None,
+ version: Any = None,
+ tags: Any = None,
+ block_seq_indent: Any = None,
+ top_level_colon_align: Any = None,
+ prefix_colon: Any = None,
+ ) -> None:
+ # NOQA
Emitter.__init__(
self,
stream,
diff --git a/emitter.py b/emitter.py
index 99e8abe..3d168db 100644
--- a/emitter.py
+++ b/emitter.py
@@ -1,8 +1,5 @@
# coding: utf-8
-from __future__ import absolute_import
-from __future__ import print_function
-
# Emitter expects events obeying the following grammar:
# stream ::= STREAM-START document* STREAM-END
# document ::= DOCUMENT-START node DOCUMENT-END
@@ -15,13 +12,13 @@ from ruamel.yaml.error import YAMLError, YAMLStreamError
from ruamel.yaml.events import * # NOQA
# fmt: off
-from ruamel.yaml.compat import utf8, text_type, PY2, nprint, dbg, DBG_EVENT, \
- check_anchorname_char
+from ruamel.yaml.compat import nprint, dbg, DBG_EVENT, \
+ check_anchorname_char, nprintf # NOQA
# fmt: on
-if False: # MYPY
- from typing import Any, Dict, List, Union, Text, Tuple, Optional # NOQA
- from ruamel.yaml.compat import StreamType # NOQA
+
+from typing import Any, Dict, List, Union, Text, Tuple, Optional # NOQA
+from ruamel.yaml.compat import StreamType # NOQA
__all__ = ['Emitter', 'EmitterError']
@@ -30,19 +27,18 @@ class EmitterError(YAMLError):
pass
-class ScalarAnalysis(object):
+class ScalarAnalysis:
def __init__(
self,
- scalar,
- empty,
- multiline,
- allow_flow_plain,
- allow_block_plain,
- allow_single_quoted,
- allow_double_quoted,
- allow_block,
- ):
- # type: (Any, Any, Any, bool, bool, bool, bool, bool) -> None
+ scalar: Any,
+ empty: Any,
+ multiline: Any,
+ allow_flow_plain: bool,
+ allow_block_plain: bool,
+ allow_single_quoted: bool,
+ allow_double_quoted: bool,
+ allow_block: bool,
+ ) -> None:
self.scalar = scalar
self.empty = empty
self.multiline = multiline
@@ -53,22 +49,18 @@ class ScalarAnalysis(object):
self.allow_block = allow_block
-class Indents(object):
+class Indents:
# replacement for the list based stack of None/int
- def __init__(self):
- # type: () -> None
- self.values = [] # type: List[Tuple[int, bool]]
+ def __init__(self) -> None:
+ self.values: List[Tuple[Any, bool]] = []
- def append(self, val, seq):
- # type: (Any, Any) -> None
+ def append(self, val: Any, seq: Any) -> None:
self.values.append((val, seq))
- def pop(self):
- # type: () -> Any
+ def pop(self) -> Any:
return self.values.pop()[0]
- def last_seq(self):
- # type: () -> bool
+ def last_seq(self) -> bool:
# return the seq(uence) value for the element added before the last one
# in increase_indent()
try:
@@ -76,25 +68,31 @@ class Indents(object):
except IndexError:
return False
- def seq_flow_align(self, seq_indent, column):
- # type: (int, int) -> int
+ def seq_flow_align(
+ self, seq_indent: int, column: int, pre_comment: Optional[bool] = False
+ ) -> int:
# extra spaces because of dash
+ # nprint('seq_flow_align', self.values, pre_comment)
if len(self.values) < 2 or not self.values[-1][1]:
- return 0
- # -1 for the dash
+ if len(self.values) == 0 or not pre_comment:
+ return 0
base = self.values[-1][0] if self.values[-1][0] is not None else 0
- return base + seq_indent - column - 1
+ if pre_comment:
+ return base + seq_indent # type: ignore
+ # return (len(self.values)) * seq_indent
+ # -1 for the dash
+ return base + seq_indent - column - 1 # type: ignore
- def __len__(self):
- # type: () -> int
+ def __len__(self) -> int:
return len(self.values)
-class Emitter(object):
+class Emitter:
# fmt: off
DEFAULT_TAG_PREFIXES = {
- u'!': u'!',
- u'tag:yaml.org,2002:': u'!!',
+ '!': '!',
+ 'tag:yaml.org,2002:': '!!',
+ '!!': '!!',
}
# fmt: on
@@ -102,44 +100,44 @@ class Emitter(object):
def __init__(
self,
- stream,
- canonical=None,
- indent=None,
- width=None,
- allow_unicode=None,
- line_break=None,
- block_seq_indent=None,
- top_level_colon_align=None,
- prefix_colon=None,
- brace_single_entry_mapping_in_flow_sequence=None,
- dumper=None,
- ):
- # type: (StreamType, Any, Optional[int], Optional[int], Optional[bool], Any, Optional[int], Optional[bool], Any, Optional[bool], Any) -> None # NOQA
+ stream: StreamType,
+ canonical: Any = None,
+ indent: Optional[int] = None,
+ width: Optional[int] = None,
+ allow_unicode: Optional[bool] = None,
+ line_break: Any = None,
+ block_seq_indent: Optional[int] = None,
+ top_level_colon_align: Optional[bool] = None,
+ prefix_colon: Any = None,
+ brace_single_entry_mapping_in_flow_sequence: Optional[bool] = None,
+ dumper: Any = None,
+ ) -> None:
+ # NOQA
self.dumper = dumper
if self.dumper is not None and getattr(self.dumper, '_emitter', None) is None:
self.dumper._emitter = self
self.stream = stream
# Encoding can be overriden by STREAM-START.
- self.encoding = None # type: Optional[Text]
+ self.encoding: Optional[Text] = None
self.allow_space_break = None
# Emitter is a state machine with a stack of states to handle nested
# structures.
- self.states = [] # type: List[Any]
- self.state = self.expect_stream_start # type: Any
+ self.states: List[Any] = []
+ self.state: Any = self.expect_stream_start
# Current event and the event queue.
- self.events = [] # type: List[Any]
- self.event = None # type: Any
+ self.events: List[Any] = []
+ self.event: Any = None
# The current indentation level and the stack of previous indents.
self.indents = Indents()
- self.indent = None # type: Optional[int]
+ self.indent: Optional[int] = None
# flow_context is an expanding/shrinking list consisting of '{' and '['
# for each unclosed flow context. If empty list that means block context
- self.flow_context = [] # type: List[Text]
+ self.flow_context: List[Text] = []
# Contexts.
self.root_context = False
@@ -159,13 +157,13 @@ class Emitter(object):
self.compact_seq_seq = True # dash after dash
self.compact_seq_map = True # key after dash
# self.compact_ms = False # dash after key, only when excplicit key with ?
- self.no_newline = None # type: Optional[bool] # set if directly after `- `
+ self.no_newline: Optional[bool] = None # set if directly after `- `
# Whether the document requires an explicit document end indicator
self.open_ended = False
# colon handling
- self.colon = u':'
+ self.colon = ':'
self.prefixed_colon = self.colon if prefix_colon is None else prefix_colon + self.colon
# single entry mappings in flow sequence
self.brace_single_entry_mapping_in_flow_sequence = (
@@ -189,34 +187,34 @@ class Emitter(object):
self.best_width = 80
if width and width > self.best_sequence_indent * 2:
self.best_width = width
- self.best_line_break = u'\n' # type: Any
- if line_break in [u'\r', u'\n', u'\r\n']:
+ self.best_line_break: Any = '\n'
+ if line_break in ['\r', '\n', '\r\n']:
self.best_line_break = line_break
# Tag prefixes.
- self.tag_prefixes = None # type: Any
+ self.tag_prefixes: Any = None
# Prepared anchor and tag.
- self.prepared_anchor = None # type: Any
- self.prepared_tag = None # type: Any
+ self.prepared_anchor: Any = None
+ self.prepared_tag: Any = None
# Scalar analysis and style.
- self.analysis = None # type: Any
- self.style = None # type: Any
+ self.analysis: Any = None
+ self.style: Any = None
self.scalar_after_indicator = True # write a scalar on the same line as `---`
+ self.alt_null = 'null'
+
@property
- def stream(self):
- # type: () -> Any
+ def stream(self) -> Any:
try:
return self._stream
except AttributeError:
raise YAMLStreamError('output stream needs to specified')
@stream.setter
- def stream(self, val):
- # type: (Any) -> None
+ def stream(self, val: Any) -> None:
if val is None:
return
if not hasattr(val, 'write'):
@@ -224,8 +222,7 @@ class Emitter(object):
self._stream = val
@property
- def serializer(self):
- # type: () -> Any
+ def serializer(self) -> Any:
try:
if hasattr(self.dumper, 'typ'):
return self.dumper.serializer
@@ -234,18 +231,15 @@ class Emitter(object):
return self # cyaml
@property
- def flow_level(self):
- # type: () -> int
+ def flow_level(self) -> int:
return len(self.flow_context)
- def dispose(self):
- # type: () -> None
+ def dispose(self) -> None:
# Reset the state attributes (to clear self-references)
self.states = []
self.state = None
- def emit(self, event):
- # type: (Any) -> None
+ def emit(self, event: Any) -> None:
if dbg(DBG_EVENT):
nprint(event)
self.events.append(event)
@@ -256,8 +250,7 @@ class Emitter(object):
# In some cases, we wait for a few next events before emitting.
- def need_more_events(self):
- # type: () -> bool
+ def need_more_events(self) -> bool:
if not self.events:
return True
event = self.events[0]
@@ -270,8 +263,7 @@ class Emitter(object):
else:
return False
- def need_events(self, count):
- # type: (int) -> bool
+ def need_events(self, count: int) -> bool:
level = 0
for event in self.events[1:]:
if isinstance(event, (DocumentStartEvent, CollectionStartEvent)):
@@ -284,8 +276,9 @@ class Emitter(object):
return False
return len(self.events) < count + 1
- def increase_indent(self, flow=False, sequence=None, indentless=False):
- # type: (bool, Optional[bool], bool) -> None
+ def increase_indent(
+ self, flow: bool = False, sequence: Optional[bool] = None, indentless: bool = False
+ ) -> None:
self.indents.append(self.indent, sequence)
if self.indent is None: # top level
if flow:
@@ -311,35 +304,27 @@ class Emitter(object):
# Stream handlers.
- def expect_stream_start(self):
- # type: () -> None
+ def expect_stream_start(self) -> None:
if isinstance(self.event, StreamStartEvent):
- if PY2:
- if self.event.encoding and not getattr(self.stream, 'encoding', None):
- self.encoding = self.event.encoding
- else:
- if self.event.encoding and not hasattr(self.stream, 'encoding'):
- self.encoding = self.event.encoding
+ if self.event.encoding and not hasattr(self.stream, 'encoding'):
+ self.encoding = self.event.encoding
self.write_stream_start()
self.state = self.expect_first_document_start
else:
- raise EmitterError('expected StreamStartEvent, but got %s' % (self.event,))
+ raise EmitterError(f'expected StreamStartEvent, but got {self.event!s}')
- def expect_nothing(self):
- # type: () -> None
- raise EmitterError('expected nothing, but got %s' % (self.event,))
+ def expect_nothing(self) -> None:
+ raise EmitterError(f'expected nothing, but got {self.event!s}')
# Document handlers.
- def expect_first_document_start(self):
- # type: () -> Any
+ def expect_first_document_start(self) -> Any:
return self.expect_document_start(first=True)
- def expect_document_start(self, first=False):
- # type: (bool) -> None
+ def expect_document_start(self, first: bool = False) -> None:
if isinstance(self.event, DocumentStartEvent):
if (self.event.version or self.event.tags) and self.open_ended:
- self.write_indicator(u'...', True)
+ self.write_indicator('...', True)
self.write_indent()
if self.event.version:
version_text = self.prepare_version(self.event.version)
@@ -363,49 +348,53 @@ class Emitter(object):
)
if not implicit:
self.write_indent()
- self.write_indicator(u'---', True)
+ self.write_indicator('---', True)
if self.canonical:
self.write_indent()
self.state = self.expect_document_root
elif isinstance(self.event, StreamEndEvent):
if self.open_ended:
- self.write_indicator(u'...', True)
+ self.write_indicator('...', True)
self.write_indent()
self.write_stream_end()
self.state = self.expect_nothing
else:
- raise EmitterError('expected DocumentStartEvent, but got %s' % (self.event,))
+ raise EmitterError(f'expected DocumentStartEvent, but got {self.event!s}')
- def expect_document_end(self):
- # type: () -> None
+ def expect_document_end(self) -> None:
if isinstance(self.event, DocumentEndEvent):
self.write_indent()
if self.event.explicit:
- self.write_indicator(u'...', True)
+ self.write_indicator('...', True)
self.write_indent()
self.flush_stream()
self.state = self.expect_document_start
else:
- raise EmitterError('expected DocumentEndEvent, but got %s' % (self.event,))
+ raise EmitterError(f'expected DocumentEndEvent, but got {self.event!s}')
- def expect_document_root(self):
- # type: () -> None
+ def expect_document_root(self) -> None:
self.states.append(self.expect_document_end)
self.expect_node(root=True)
# Node handlers.
- def expect_node(self, root=False, sequence=False, mapping=False, simple_key=False):
- # type: (bool, bool, bool, bool) -> None
+ def expect_node(
+ self,
+ root: bool = False,
+ sequence: bool = False,
+ mapping: bool = False,
+ simple_key: bool = False,
+ ) -> None:
self.root_context = root
self.sequence_context = sequence # not used in PyYAML
+ force_flow_indent = False
self.mapping_context = mapping
self.simple_key_context = simple_key
if isinstance(self.event, AliasEvent):
self.expect_alias()
elif isinstance(self.event, (ScalarEvent, CollectionStartEvent)):
if (
- self.process_anchor(u'&')
+ self.process_anchor('&')
and isinstance(self.event, ScalarEvent)
and self.sequence_context
):
@@ -424,20 +413,27 @@ class Emitter(object):
# nprint('@', self.indention, self.no_newline, self.column)
i2, n2 = self.indention, self.no_newline # NOQA
if self.event.comment:
- if self.event.flow_style is False and self.event.comment:
+ if self.event.flow_style is False:
if self.write_post_comment(self.event):
self.indention = False
self.no_newline = True
+ if self.event.flow_style:
+ column = self.column
if self.write_pre_comment(self.event):
+ if self.event.flow_style:
+ # force_flow_indent = True
+ force_flow_indent = not self.indents.values[-1][1]
self.indention = i2
self.no_newline = not self.indention
+ if self.event.flow_style:
+ self.column = column
if (
self.flow_level
or self.canonical
or self.event.flow_style
or self.check_empty_sequence()
):
- self.expect_flow_sequence()
+ self.expect_flow_sequence(force_flow_indent)
else:
self.expect_block_sequence()
elif isinstance(self.event, MappingStartEvent):
@@ -445,27 +441,29 @@ class Emitter(object):
self.write_post_comment(self.event)
if self.event.comment and self.event.comment[1]:
self.write_pre_comment(self.event)
+ if self.event.flow_style:
+ force_flow_indent = not self.indents.values[-1][1]
if (
self.flow_level
or self.canonical
or self.event.flow_style
or self.check_empty_mapping()
):
- self.expect_flow_mapping(single=self.event.nr_items == 1)
+ self.expect_flow_mapping(
+ single=self.event.nr_items == 1, force_flow_indent=force_flow_indent
+ )
else:
self.expect_block_mapping()
else:
- raise EmitterError('expected NodeEvent, but got %s' % (self.event,))
+ raise EmitterError('expected NodeEvent, but got {self.event!s}')
- def expect_alias(self):
- # type: () -> None
+ def expect_alias(self) -> None:
if self.event.anchor is None:
raise EmitterError('anchor is not specified for alias')
- self.process_anchor(u'*')
+ self.process_anchor('*')
self.state = self.states.pop()
- def expect_scalar(self):
- # type: () -> None
+ def expect_scalar(self) -> None:
self.increase_indent(flow=True)
self.process_scalar()
self.indent = self.indents.pop()
@@ -473,21 +471,24 @@ class Emitter(object):
# Flow sequence handlers.
- def expect_flow_sequence(self):
- # type: () -> None
- ind = self.indents.seq_flow_align(self.best_sequence_indent, self.column)
- self.write_indicator(u' ' * ind + u'[', True, whitespace=True)
- self.increase_indent(flow=True, sequence=True)
+ def expect_flow_sequence(self, force_flow_indent: Optional[bool] = False) -> None:
+ if force_flow_indent:
+ self.increase_indent(flow=True, sequence=True)
+ ind = self.indents.seq_flow_align(
+ self.best_sequence_indent, self.column, force_flow_indent
+ )
+ self.write_indicator(' ' * ind + '[', True, whitespace=True)
+ if not force_flow_indent:
+ self.increase_indent(flow=True, sequence=True)
self.flow_context.append('[')
self.state = self.expect_first_flow_sequence_item
- def expect_first_flow_sequence_item(self):
- # type: () -> None
+ def expect_first_flow_sequence_item(self) -> None:
if isinstance(self.event, SequenceEndEvent):
self.indent = self.indents.pop()
popped = self.flow_context.pop()
assert popped == '['
- self.write_indicator(u']', False)
+ self.write_indicator(']', False)
if self.event.comment and self.event.comment[0]:
# eol comment on empty flow sequence
self.write_post_comment(self.event)
@@ -500,16 +501,15 @@ class Emitter(object):
self.states.append(self.expect_flow_sequence_item)
self.expect_node(sequence=True)
- def expect_flow_sequence_item(self):
- # type: () -> None
+ def expect_flow_sequence_item(self) -> None:
if isinstance(self.event, SequenceEndEvent):
self.indent = self.indents.pop()
popped = self.flow_context.pop()
assert popped == '['
if self.canonical:
- self.write_indicator(u',', False)
+ self.write_indicator(',', False)
self.write_indent()
- self.write_indicator(u']', False)
+ self.write_indicator(']', False)
if self.event.comment and self.event.comment[0]:
# eol comment on flow sequence
self.write_post_comment(self.event)
@@ -517,7 +517,7 @@ class Emitter(object):
self.no_newline = False
self.state = self.states.pop()
else:
- self.write_indicator(u',', False)
+ self.write_indicator(',', False)
if self.canonical or self.column > self.best_width:
self.write_indent()
self.states.append(self.expect_flow_sequence_item)
@@ -525,10 +525,15 @@ class Emitter(object):
# Flow mapping handlers.
- def expect_flow_mapping(self, single=False):
- # type: (Optional[bool]) -> None
- ind = self.indents.seq_flow_align(self.best_sequence_indent, self.column)
- map_init = u'{'
+ def expect_flow_mapping(
+ self, single: Optional[bool] = False, force_flow_indent: Optional[bool] = False
+ ) -> None:
+ if force_flow_indent:
+ self.increase_indent(flow=True, sequence=False)
+ ind = self.indents.seq_flow_align(
+ self.best_sequence_indent, self.column, force_flow_indent
+ )
+ map_init = '{'
if (
single
and self.flow_level
@@ -537,19 +542,19 @@ class Emitter(object):
and not self.brace_single_entry_mapping_in_flow_sequence
):
# single map item with flow context, no curly braces necessary
- map_init = u''
- self.write_indicator(u' ' * ind + map_init, True, whitespace=True)
+ map_init = ''
+ self.write_indicator(' ' * ind + map_init, True, whitespace=True)
self.flow_context.append(map_init)
- self.increase_indent(flow=True, sequence=False)
+ if not force_flow_indent:
+ self.increase_indent(flow=True, sequence=False)
self.state = self.expect_first_flow_mapping_key
- def expect_first_flow_mapping_key(self):
- # type: () -> None
+ def expect_first_flow_mapping_key(self) -> None:
if isinstance(self.event, MappingEndEvent):
self.indent = self.indents.pop()
popped = self.flow_context.pop()
assert popped == '{' # empty flow mapping
- self.write_indicator(u'}', False)
+ self.write_indicator('}', False)
if self.event.comment and self.event.comment[0]:
# eol comment on empty mapping
self.write_post_comment(self.event)
@@ -563,23 +568,22 @@ class Emitter(object):
self.states.append(self.expect_flow_mapping_simple_value)
self.expect_node(mapping=True, simple_key=True)
else:
- self.write_indicator(u'?', True)
+ self.write_indicator('?', True)
self.states.append(self.expect_flow_mapping_value)
self.expect_node(mapping=True)
- def expect_flow_mapping_key(self):
- # type: () -> None
+ def expect_flow_mapping_key(self) -> None:
if isinstance(self.event, MappingEndEvent):
# if self.event.comment and self.event.comment[1]:
# self.write_pre_comment(self.event)
self.indent = self.indents.pop()
popped = self.flow_context.pop()
- assert popped in [u'{', u'']
+ assert popped in ['{', '']
if self.canonical:
- self.write_indicator(u',', False)
+ self.write_indicator(',', False)
self.write_indent()
- if popped != u'':
- self.write_indicator(u'}', False)
+ if popped != '':
+ self.write_indicator('}', False)
if self.event.comment and self.event.comment[0]:
# eol comment on flow mapping, never reached on empty mappings
self.write_post_comment(self.event)
@@ -587,25 +591,23 @@ class Emitter(object):
self.no_newline = False
self.state = self.states.pop()
else:
- self.write_indicator(u',', False)
+ self.write_indicator(',', False)
if self.canonical or self.column > self.best_width:
self.write_indent()
if not self.canonical and self.check_simple_key():
self.states.append(self.expect_flow_mapping_simple_value)
self.expect_node(mapping=True, simple_key=True)
else:
- self.write_indicator(u'?', True)
+ self.write_indicator('?', True)
self.states.append(self.expect_flow_mapping_value)
self.expect_node(mapping=True)
- def expect_flow_mapping_simple_value(self):
- # type: () -> None
+ def expect_flow_mapping_simple_value(self) -> None:
self.write_indicator(self.prefixed_colon, False)
self.states.append(self.expect_flow_mapping_key)
self.expect_node(mapping=True)
- def expect_flow_mapping_value(self):
- # type: () -> None
+ def expect_flow_mapping_value(self) -> None:
if self.canonical or self.column > self.best_width:
self.write_indent()
self.write_indicator(self.prefixed_colon, True)
@@ -614,8 +616,7 @@ class Emitter(object):
# Block sequence handlers.
- def expect_block_sequence(self):
- # type: () -> None
+ def expect_block_sequence(self) -> None:
if self.mapping_context:
indentless = not self.indention
else:
@@ -625,12 +626,10 @@ class Emitter(object):
self.increase_indent(flow=False, sequence=True, indentless=indentless)
self.state = self.expect_first_block_sequence_item
- def expect_first_block_sequence_item(self):
- # type: () -> Any
+ def expect_first_block_sequence_item(self) -> Any:
return self.expect_block_sequence_item(first=True)
- def expect_block_sequence_item(self, first=False):
- # type: (bool) -> None
+ def expect_block_sequence_item(self, first: bool = False) -> None:
if not first and isinstance(self.event, SequenceEndEvent):
if self.event.comment and self.event.comment[1]:
# final comments on a block list e.g. empty line
@@ -644,7 +643,7 @@ class Emitter(object):
nonl = self.no_newline if self.column == 0 else False
self.write_indent()
ind = self.sequence_dash_offset # if len(self.indents) > 1 else 0
- self.write_indicator(u' ' * ind + u'-', True, indention=True)
+ self.write_indicator(' ' * ind + '-', True, indention=True)
if nonl or self.sequence_dash_offset + 2 > self.best_sequence_indent:
self.no_newline = True
self.states.append(self.expect_block_sequence_item)
@@ -652,19 +651,16 @@ class Emitter(object):
# Block mapping handlers.
- def expect_block_mapping(self):
- # type: () -> None
+ def expect_block_mapping(self) -> None:
if not self.mapping_context and not (self.compact_seq_map or self.column == 0):
self.write_line_break()
self.increase_indent(flow=False, sequence=False)
self.state = self.expect_first_block_mapping_key
- def expect_first_block_mapping_key(self):
- # type: () -> None
+ def expect_first_block_mapping_key(self) -> None:
return self.expect_block_mapping_key(first=True)
- def expect_block_mapping_key(self, first=False):
- # type: (Any) -> None
+ def expect_block_mapping_key(self, first: Any = False) -> None:
if not first and isinstance(self.event, MappingEndEvent):
if self.event.comment and self.event.comment[1]:
# final comments from a doc
@@ -682,33 +678,32 @@ class Emitter(object):
): # sequence keys
try:
if self.event.style == '?':
- self.write_indicator(u'?', True, indention=True)
+ self.write_indicator('?', True, indention=True)
except AttributeError: # aliases have no style
pass
self.states.append(self.expect_block_mapping_simple_value)
self.expect_node(mapping=True, simple_key=True)
- if isinstance(self.event, AliasEvent):
- self.stream.write(u' ')
+ # test on style for alias in !!set
+ if isinstance(self.event, AliasEvent) and not self.event.style == '?':
+ self.stream.write(' ')
else:
- self.write_indicator(u'?', True, indention=True)
+ self.write_indicator('?', True, indention=True)
self.states.append(self.expect_block_mapping_value)
self.expect_node(mapping=True)
- def expect_block_mapping_simple_value(self):
- # type: () -> None
+ def expect_block_mapping_simple_value(self) -> None:
if getattr(self.event, 'style', None) != '?':
- # prefix = u''
+ # prefix = ''
if self.indent == 0 and self.top_level_colon_align is not None:
# write non-prefixed colon
- c = u' ' * (self.top_level_colon_align - self.column) + self.colon
+ c = ' ' * (self.top_level_colon_align - self.column) + self.colon
else:
c = self.prefixed_colon
self.write_indicator(c, False)
self.states.append(self.expect_block_mapping_key)
self.expect_node(mapping=True)
- def expect_block_mapping_value(self):
- # type: () -> None
+ def expect_block_mapping_value(self) -> None:
self.write_indent()
self.write_indicator(self.prefixed_colon, True, indention=True)
self.states.append(self.expect_block_mapping_key)
@@ -716,24 +711,21 @@ class Emitter(object):
# Checkers.
- def check_empty_sequence(self):
- # type: () -> bool
+ def check_empty_sequence(self) -> bool:
return (
isinstance(self.event, SequenceStartEvent)
and bool(self.events)
and isinstance(self.events[0], SequenceEndEvent)
)
- def check_empty_mapping(self):
- # type: () -> bool
+ def check_empty_mapping(self) -> bool:
return (
isinstance(self.event, MappingStartEvent)
and bool(self.events)
and isinstance(self.events[0], MappingEndEvent)
)
- def check_empty_document(self):
- # type: () -> bool
+ def check_empty_document(self) -> bool:
if not isinstance(self.event, DocumentStartEvent) or not self.events:
return False
event = self.events[0]
@@ -745,8 +737,7 @@ class Emitter(object):
and event.value == ""
)
- def check_simple_key(self):
- # type: () -> bool
+ def check_simple_key(self) -> bool:
length = 0
if isinstance(self.event, NodeEvent) and self.event.anchor is not None:
if self.prepared_anchor is None:
@@ -779,8 +770,7 @@ class Emitter(object):
# Anchor, Tag, and Scalar processors.
- def process_anchor(self, indicator):
- # type: (Any) -> bool
+ def process_anchor(self, indicator: Any) -> bool:
if self.event.anchor is None:
self.prepared_anchor = None
return False
@@ -793,12 +783,20 @@ class Emitter(object):
self.prepared_anchor = None
return True
- def process_tag(self):
- # type: () -> None
+ def process_tag(self) -> None:
tag = self.event.tag
if isinstance(self.event, ScalarEvent):
if self.style is None:
self.style = self.choose_scalar_style()
+ if (
+ self.event.value == ''
+ and self.style == "'"
+ and tag == 'tag:yaml.org,2002:null'
+ and self.alt_null is not None
+ ):
+ self.event.value = self.alt_null
+ self.analysis = None
+ self.style = self.choose_scalar_style()
if (not self.canonical or tag is None) and (
(self.style == "" and self.event.implicit[0])
or (self.style != "" and self.event.implicit[1])
@@ -806,7 +804,7 @@ class Emitter(object):
self.prepared_tag = None
return
if self.event.implicit[0] and tag is None:
- tag = u'!'
+ tag = '!'
self.prepared_tag = None
else:
if (not self.canonical or tag is None) and self.event.implicit:
@@ -826,8 +824,7 @@ class Emitter(object):
self.no_newline = True
self.prepared_tag = None
- def choose_scalar_style(self):
- # type: () -> Any
+ def choose_scalar_style(self) -> Any:
if self.analysis is None:
self.analysis = self.analyze_scalar(self.event.value)
if self.event.style == '"' or self.canonical:
@@ -861,8 +858,7 @@ class Emitter(object):
return "'"
return '"'
- def process_scalar(self):
- # type: () -> None
+ def process_scalar(self) -> None:
if self.analysis is None:
self.analysis = self.analyze_scalar(self.event.value)
if self.style is None:
@@ -879,9 +875,32 @@ class Emitter(object):
elif self.style == "'":
self.write_single_quoted(self.analysis.scalar, split)
elif self.style == '>':
- self.write_folded(self.analysis.scalar)
+ try:
+ cmx = self.event.comment[1][0]
+ except (IndexError, TypeError):
+ cmx = ""
+ self.write_folded(self.analysis.scalar, cmx)
+ if (
+ self.event.comment
+ and self.event.comment[0]
+ and self.event.comment[0].column >= self.indent
+ ):
+ # comment following a folded scalar must dedent (issue 376)
+ self.event.comment[0].column = self.indent - 1 # type: ignore
elif self.style == '|':
- self.write_literal(self.analysis.scalar, self.event.comment)
+ # self.write_literal(self.analysis.scalar, self.event.comment)
+ try:
+ cmx = self.event.comment[1][0]
+ except (IndexError, TypeError):
+ cmx = ""
+ self.write_literal(self.analysis.scalar, cmx)
+ if (
+ self.event.comment
+ and self.event.comment[0]
+ and self.event.comment[0].column >= self.indent
+ ):
+ # comment following a literal scalar must dedent (issue 376)
+ self.event.comment[0].column = self.indent - 1 # type: ignore
else:
self.write_plain(self.analysis.scalar, split)
self.analysis = None
@@ -891,114 +910,102 @@ class Emitter(object):
# Analyzers.
- def prepare_version(self, version):
- # type: (Any) -> Any
+ def prepare_version(self, version: Any) -> Any:
major, minor = version
if major != 1:
- raise EmitterError('unsupported YAML version: %d.%d' % (major, minor))
- return u'%d.%d' % (major, minor)
+ raise EmitterError(f'unsupported YAML version: {major:d}.{minor:d}')
+ return f'{major:d}.{minor:d}'
- def prepare_tag_handle(self, handle):
- # type: (Any) -> Any
+ def prepare_tag_handle(self, handle: Any) -> Any:
if not handle:
raise EmitterError('tag handle must not be empty')
- if handle[0] != u'!' or handle[-1] != u'!':
- raise EmitterError("tag handle must start and end with '!': %r" % (utf8(handle)))
+ if handle[0] != '!' or handle[-1] != '!':
+ raise EmitterError(f"tag handle must start and end with '!': {handle!r}")
for ch in handle[1:-1]:
- if not (
- u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' or ch in u'-_'
- ):
- raise EmitterError(
- 'invalid character %r in the tag handle: %r' % (utf8(ch), utf8(handle))
- )
+ if not ('0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' or ch in '-_'):
+ raise EmitterError(f'invalid character {ch!r} in the tag handle: {handle!r}')
return handle
- def prepare_tag_prefix(self, prefix):
- # type: (Any) -> Any
+ def prepare_tag_prefix(self, prefix: Any) -> Any:
if not prefix:
raise EmitterError('tag prefix must not be empty')
- chunks = [] # type: List[Any]
+ chunks: List[Any] = []
start = end = 0
- if prefix[0] == u'!':
+ if prefix[0] == '!':
end = 1
- ch_set = u"-;/?:@&=+$,_.~*'()[]"
+ ch_set = "-;/?:@&=+$,_.~*'()[]"
if self.dumper:
version = getattr(self.dumper, 'version', (1, 2))
if version is None or version >= (1, 2):
- ch_set += u'#'
+ ch_set += '#'
while end < len(prefix):
ch = prefix[end]
- if u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' or ch in ch_set:
+ if '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' or ch in ch_set:
end += 1
else:
if start < end:
chunks.append(prefix[start:end])
start = end = end + 1
- data = utf8(ch)
+ data = ch
for ch in data:
- chunks.append(u'%%%02X' % ord(ch))
+ chunks.append(f'%{ord(ch):02X}')
if start < end:
chunks.append(prefix[start:end])
return "".join(chunks)
- def prepare_tag(self, tag):
- # type: (Any) -> Any
+ def prepare_tag(self, tag: Any) -> Any:
if not tag:
raise EmitterError('tag must not be empty')
- if tag == u'!':
+ if tag == '!' or tag == '!!':
return tag
handle = None
suffix = tag
prefixes = sorted(self.tag_prefixes.keys())
for prefix in prefixes:
- if tag.startswith(prefix) and (prefix == u'!' or len(prefix) < len(tag)):
+ if tag.startswith(prefix) and (prefix == '!' or len(prefix) < len(tag)):
handle = self.tag_prefixes[prefix]
suffix = tag[len(prefix) :]
- chunks = [] # type: List[Any]
+ chunks: List[Any] = []
start = end = 0
- ch_set = u"-;/?:@&=+$,_.~*'()[]"
+ ch_set = "-;/?:@&=+$,_.~*'()[]"
if self.dumper:
version = getattr(self.dumper, 'version', (1, 2))
if version is None or version >= (1, 2):
- ch_set += u'#'
+ ch_set += '#'
while end < len(suffix):
ch = suffix[end]
if (
- u'0' <= ch <= u'9'
- or u'A' <= ch <= u'Z'
- or u'a' <= ch <= u'z'
+ '0' <= ch <= '9'
+ or 'A' <= ch <= 'Z'
+ or 'a' <= ch <= 'z'
or ch in ch_set
- or (ch == u'!' and handle != u'!')
+ or (ch == '!' and handle != '!')
):
end += 1
else:
if start < end:
chunks.append(suffix[start:end])
start = end = end + 1
- data = utf8(ch)
+ data = ch
for ch in data:
- chunks.append(u'%%%02X' % ord(ch))
+ chunks.append(f'%{ord(ch):02X}')
if start < end:
chunks.append(suffix[start:end])
suffix_text = "".join(chunks)
if handle:
- return u'%s%s' % (handle, suffix_text)
+ return f'{handle!s}{suffix_text!s}'
else:
- return u'!<%s>' % suffix_text
+ return f'!<{suffix_text!s}>'
- def prepare_anchor(self, anchor):
- # type: (Any) -> Any
+ def prepare_anchor(self, anchor: Any) -> Any:
if not anchor:
raise EmitterError('anchor must not be empty')
for ch in anchor:
if not check_anchorname_char(ch):
- raise EmitterError(
- 'invalid character %r in the anchor: %r' % (utf8(ch), utf8(anchor))
- )
+ raise EmitterError(f'invalid character {ch!r} in the anchor: {anchor!r}')
return anchor
- def analyze_scalar(self, scalar):
- # type: (Any) -> Any
+ def analyze_scalar(self, scalar: Any) -> Any:
# Empty scalar is a special case.
if not scalar:
return ScalarAnalysis(
@@ -1027,7 +1034,7 @@ class Emitter(object):
space_break = False
# Check document indicators.
- if scalar.startswith(u'---') or scalar.startswith(u'...'):
+ if scalar.startswith('---') or scalar.startswith('...'):
block_indicators = True
flow_indicators = True
@@ -1035,7 +1042,7 @@ class Emitter(object):
preceeded_by_whitespace = True
# Last character or followed by a whitespace.
- followed_by_whitespace = len(scalar) == 1 or scalar[1] in u'\0 \t\r\n\x85\u2028\u2029'
+ followed_by_whitespace = len(scalar) == 1 or scalar[1] in '\0 \t\r\n\x85\u2028\u2029'
# The previous character is a space.
previous_space = False
@@ -1050,43 +1057,43 @@ class Emitter(object):
# Check for indicators.
if index == 0:
# Leading indicators are special characters.
- if ch in u'#,[]{}&*!|>\'"%@`':
+ if ch in '#,[]{}&*!|>\'"%@`':
flow_indicators = True
block_indicators = True
- if ch in u'?:': # ToDo
+ if ch in '?:': # ToDo
if self.serializer.use_version == (1, 1):
flow_indicators = True
elif len(scalar) == 1: # single character
flow_indicators = True
if followed_by_whitespace:
block_indicators = True
- if ch == u'-' and followed_by_whitespace:
+ if ch == '-' and followed_by_whitespace:
flow_indicators = True
block_indicators = True
else:
# Some indicators cannot appear within a scalar as well.
- if ch in u',[]{}': # http://yaml.org/spec/1.2/spec.html#id2788859
+ if ch in ',[]{}': # http://yaml.org/spec/1.2/spec.html#id2788859
flow_indicators = True
- if ch == u'?' and self.serializer.use_version == (1, 1):
+ if ch == '?' and self.serializer.use_version == (1, 1):
flow_indicators = True
- if ch == u':':
+ if ch == ':':
if followed_by_whitespace:
flow_indicators = True
block_indicators = True
- if ch == u'#' and preceeded_by_whitespace:
+ if ch == '#' and preceeded_by_whitespace:
flow_indicators = True
block_indicators = True
# Check for line breaks, special, and unicode characters.
- if ch in u'\n\x85\u2028\u2029':
+ if ch in '\n\x85\u2028\u2029':
line_breaks = True
- if not (ch == u'\n' or u'\x20' <= ch <= u'\x7E'):
+ if not (ch == '\n' or '\x20' <= ch <= '\x7E'):
if (
- ch == u'\x85'
- or u'\xA0' <= ch <= u'\uD7FF'
- or u'\uE000' <= ch <= u'\uFFFD'
- or (self.unicode_supplementary and (u'\U00010000' <= ch <= u'\U0010FFFF'))
- ) and ch != u'\uFEFF':
+ ch == '\x85'
+ or '\xA0' <= ch <= '\uD7FF'
+ or '\uE000' <= ch <= '\uFFFD'
+ or (self.unicode_supplementary and ('\U00010000' <= ch <= '\U0010FFFF'))
+ ) and ch != '\uFEFF':
# unicode_characters = True
if not self.allow_unicode:
special_characters = True
@@ -1094,7 +1101,7 @@ class Emitter(object):
special_characters = True
# Detect important whitespace combinations.
- if ch == u' ':
+ if ch == ' ':
if index == 0:
leading_space = True
if index == len(scalar) - 1:
@@ -1103,7 +1110,7 @@ class Emitter(object):
break_space = True
previous_space = True
previous_break = False
- elif ch in u'\n\x85\u2028\u2029':
+ elif ch in '\n\x85\u2028\u2029':
if index == 0:
leading_break = True
if index == len(scalar) - 1:
@@ -1118,9 +1125,9 @@ class Emitter(object):
# Prepare for the next character.
index += 1
- preceeded_by_whitespace = ch in u'\0 \t\r\n\x85\u2028\u2029'
+ preceeded_by_whitespace = ch in '\0 \t\r\n\x85\u2028\u2029'
followed_by_whitespace = (
- index + 1 >= len(scalar) or scalar[index + 1] in u'\0 \t\r\n\x85\u2028\u2029'
+ index + 1 >= len(scalar) or scalar[index + 1] in '\0 \t\r\n\x85\u2028\u2029'
)
# Let's decide what styles are allowed.
@@ -1178,27 +1185,29 @@ class Emitter(object):
# Writers.
- def flush_stream(self):
- # type: () -> None
+ def flush_stream(self) -> None:
if hasattr(self.stream, 'flush'):
self.stream.flush()
- def write_stream_start(self):
- # type: () -> None
+ def write_stream_start(self) -> None:
# Write BOM if needed.
if self.encoding and self.encoding.startswith('utf-16'):
- self.stream.write(u'\uFEFF'.encode(self.encoding))
+ self.stream.write('\uFEFF'.encode(self.encoding))
- def write_stream_end(self):
- # type: () -> None
+ def write_stream_end(self) -> None:
self.flush_stream()
- def write_indicator(self, indicator, need_whitespace, whitespace=False, indention=False):
- # type: (Any, Any, bool, bool) -> None
+ def write_indicator(
+ self,
+ indicator: Any,
+ need_whitespace: Any,
+ whitespace: bool = False,
+ indention: bool = False,
+ ) -> None:
if self.whitespace or not need_whitespace:
data = indicator
else:
- data = u' ' + indicator
+ data = ' ' + indicator
self.whitespace = whitespace
self.indention = self.indention and indention
self.column += len(data)
@@ -1207,8 +1216,7 @@ class Emitter(object):
data = data.encode(self.encoding)
self.stream.write(data)
- def write_indent(self):
- # type: () -> None
+ def write_indent(self) -> None:
indent = self.indent or 0
if (
not self.indention
@@ -1221,14 +1229,13 @@ class Emitter(object):
self.write_line_break()
if self.column < indent:
self.whitespace = True
- data = u' ' * (indent - self.column)
+ data = ' ' * (indent - self.column)
self.column = indent
if self.encoding:
- data = data.encode(self.encoding)
+ data = data.encode(self.encoding) # type: ignore
self.stream.write(data)
- def write_line_break(self, data=None):
- # type: (Any) -> None
+ def write_line_break(self, data: Any = None) -> None:
if data is None:
data = self.best_line_break
self.whitespace = True
@@ -1239,17 +1246,15 @@ class Emitter(object):
data = data.encode(self.encoding)
self.stream.write(data)
- def write_version_directive(self, version_text):
- # type: (Any) -> None
- data = u'%%YAML %s' % version_text
+ def write_version_directive(self, version_text: Any) -> None:
+ data: Any = f'%YAML {version_text!s}'
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
self.write_line_break()
- def write_tag_directive(self, handle_text, prefix_text):
- # type: (Any, Any) -> None
- data = u'%%TAG %s %s' % (handle_text, prefix_text)
+ def write_tag_directive(self, handle_text: Any, prefix_text: Any) -> None:
+ data: Any = f'%TAG {handle_text!s} {prefix_text!s}'
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
@@ -1257,14 +1262,13 @@ class Emitter(object):
# Scalar streams.
- def write_single_quoted(self, text, split=True):
- # type: (Any, Any) -> None
+ def write_single_quoted(self, text: Any, split: Any = True) -> None:
if self.root_context:
if self.requested_indent is not None:
self.write_line_break()
if self.requested_indent != 0:
self.write_indent()
- self.write_indicator(u"'", True)
+ self.write_indicator("'", True)
spaces = False
breaks = False
start = end = 0
@@ -1273,7 +1277,7 @@ class Emitter(object):
if end < len(text):
ch = text[end]
if spaces:
- if ch is None or ch != u' ':
+ if ch is None or ch != ' ':
if (
start + 1 == end
and self.column > self.best_width
@@ -1290,18 +1294,18 @@ class Emitter(object):
self.stream.write(data)
start = end
elif breaks:
- if ch is None or ch not in u'\n\x85\u2028\u2029':
- if text[start] == u'\n':
+ if ch is None or ch not in '\n\x85\u2028\u2029':
+ if text[start] == '\n':
self.write_line_break()
for br in text[start:end]:
- if br == u'\n':
+ if br == '\n':
self.write_line_break()
else:
self.write_line_break(br)
self.write_indent()
start = end
else:
- if ch is None or ch in u' \n\x85\u2028\u2029' or ch == u"'":
+ if ch is None or ch in ' \n\x85\u2028\u2029' or ch == "'":
if start < end:
data = text[start:end]
self.column += len(data)
@@ -1309,45 +1313,44 @@ class Emitter(object):
data = data.encode(self.encoding)
self.stream.write(data)
start = end
- if ch == u"'":
- data = u"''"
+ if ch == "'":
+ data = "''"
self.column += 2
if bool(self.encoding):
data = data.encode(self.encoding)
self.stream.write(data)
start = end + 1
if ch is not None:
- spaces = ch == u' '
- breaks = ch in u'\n\x85\u2028\u2029'
+ spaces = ch == ' '
+ breaks = ch in '\n\x85\u2028\u2029'
end += 1
- self.write_indicator(u"'", False)
+ self.write_indicator("'", False)
ESCAPE_REPLACEMENTS = {
- u'\0': u'0',
- u'\x07': u'a',
- u'\x08': u'b',
- u'\x09': u't',
- u'\x0A': u'n',
- u'\x0B': u'v',
- u'\x0C': u'f',
- u'\x0D': u'r',
- u'\x1B': u'e',
- u'"': u'"',
- u'\\': u'\\',
- u'\x85': u'N',
- u'\xA0': u'_',
- u'\u2028': u'L',
- u'\u2029': u'P',
+ '\0': '0',
+ '\x07': 'a',
+ '\x08': 'b',
+ '\x09': 't',
+ '\x0A': 'n',
+ '\x0B': 'v',
+ '\x0C': 'f',
+ '\x0D': 'r',
+ '\x1B': 'e',
+ '"': '"',
+ '\\': '\\',
+ '\x85': 'N',
+ '\xA0': '_',
+ '\u2028': 'L',
+ '\u2029': 'P',
}
- def write_double_quoted(self, text, split=True):
- # type: (Any, Any) -> None
+ def write_double_quoted(self, text: Any, split: Any = True) -> None:
if self.root_context:
if self.requested_indent is not None:
self.write_line_break()
if self.requested_indent != 0:
self.write_indent()
- self.write_indicator(u'"', True)
+ self.write_indicator('"', True)
start = end = 0
while end <= len(text):
ch = None
@@ -1355,12 +1358,12 @@ class Emitter(object):
ch = text[end]
if (
ch is None
- or ch in u'"\\\x85\u2028\u2029\uFEFF'
+ or ch in '"\\\x85\u2028\u2029\uFEFF'
or not (
- u'\x20' <= ch <= u'\x7E'
+ '\x20' <= ch <= '\x7E'
or (
self.allow_unicode
- and (u'\xA0' <= ch <= u'\uD7FF' or u'\uE000' <= ch <= u'\uFFFD')
+ and ('\xA0' <= ch <= '\uD7FF' or '\uE000' <= ch <= '\uFFFD')
)
)
):
@@ -1373,13 +1376,13 @@ class Emitter(object):
start = end
if ch is not None:
if ch in self.ESCAPE_REPLACEMENTS:
- data = u'\\' + self.ESCAPE_REPLACEMENTS[ch]
- elif ch <= u'\xFF':
- data = u'\\x%02X' % ord(ch)
- elif ch <= u'\uFFFF':
- data = u'\\u%04X' % ord(ch)
+ data = '\\' + self.ESCAPE_REPLACEMENTS[ch]
+ elif ch <= '\xFF':
+ data = f'\\x{ord(ch):02X}'
+ elif ch <= '\uFFFF':
+ data = f'\\u{ord(ch):04X}'
else:
- data = u'\\U%08X' % ord(ch)
+ data = f'\\U{ord(ch):08X}'
self.column += len(data)
if bool(self.encoding):
data = data.encode(self.encoding)
@@ -1387,11 +1390,11 @@ class Emitter(object):
start = end + 1
if (
0 < end < len(text) - 1
- and (ch == u' ' or start >= end)
+ and (ch == ' ' or start >= end)
and self.column + (end - start) > self.best_width
and split
):
- data = text[start:end] + u'\\'
+ data = text[start:end] + '\\'
if start < end:
start = end
self.column += len(data)
@@ -1401,24 +1404,23 @@ class Emitter(object):
self.write_indent()
self.whitespace = False
self.indention = False
- if text[start] == u' ':
- data = u'\\'
+ if text[start] == ' ':
+ data = '\\'
self.column += len(data)
if bool(self.encoding):
data = data.encode(self.encoding)
self.stream.write(data)
end += 1
- self.write_indicator(u'"', False)
+ self.write_indicator('"', False)
- def determine_block_hints(self, text):
- # type: (Any) -> Any
+ def determine_block_hints(self, text: Any) -> Any:
indent = 0
- indicator = u''
- hints = u''
+ indicator = ''
+ hints = ''
if text:
- if text[0] in u' \n\x85\u2028\u2029':
- indent = self.best_sequence_indent
- hints += text_type(indent)
+ if text[0] in ' \n\x85\u2028\u2029':
+ indent = 2
+ hints += str(indent)
elif self.root_context:
for end in ['\n---', '\n...']:
pos = 0
@@ -1435,19 +1437,20 @@ class Emitter(object):
if pos > -1:
break
if pos > 0:
- indent = self.best_sequence_indent
- if text[-1] not in u'\n\x85\u2028\u2029':
- indicator = u'-'
- elif len(text) == 1 or text[-2] in u'\n\x85\u2028\u2029':
- indicator = u'+'
+ indent = 2
+ if text[-1] not in '\n\x85\u2028\u2029':
+ indicator = '-'
+ elif len(text) == 1 or text[-2] in '\n\x85\u2028\u2029':
+ indicator = '+'
hints += indicator
return hints, indent, indicator
- def write_folded(self, text):
- # type: (Any) -> None
+ def write_folded(self, text: Any, comment: Any) -> None:
hints, _indent, _indicator = self.determine_block_hints(text)
- self.write_indicator(u'>' + hints, True)
- if _indicator == u'+':
+ if not isinstance(comment, str):
+ comment = ''
+ self.write_indicator('>' + hints + comment, True)
+ if _indicator == '+':
self.open_ended = True
self.write_line_break()
leading_space = True
@@ -1459,17 +1462,17 @@ class Emitter(object):
if end < len(text):
ch = text[end]
if breaks:
- if ch is None or ch not in u'\n\x85\u2028\u2029\a':
+ if ch is None or ch not in '\n\x85\u2028\u2029\a':
if (
not leading_space
and ch is not None
- and ch != u' '
- and text[start] == u'\n'
+ and ch != ' '
+ and text[start] == '\n'
):
self.write_line_break()
- leading_space = ch == u' '
+ leading_space = ch == ' '
for br in text[start:end]:
- if br == u'\n':
+ if br == '\n':
self.write_line_break()
else:
self.write_line_break(br)
@@ -1477,7 +1480,7 @@ class Emitter(object):
self.write_indent()
start = end
elif spaces:
- if ch != u' ':
+ if ch != ' ':
if start + 1 == end and self.column > self.best_width:
self.write_indent()
else:
@@ -1488,13 +1491,13 @@ class Emitter(object):
self.stream.write(data)
start = end
else:
- if ch is None or ch in u' \n\x85\u2028\u2029\a':
+ if ch is None or ch in ' \n\x85\u2028\u2029\a':
data = text[start:end]
self.column += len(data)
if bool(self.encoding):
data = data.encode(self.encoding)
self.stream.write(data)
- if ch == u'\a':
+ if ch == '\a':
if end < (len(text) - 1) and not text[end + 2].isspace():
self.write_line_break()
self.write_indent()
@@ -1505,21 +1508,28 @@ class Emitter(object):
self.write_line_break()
start = end
if ch is not None:
- breaks = ch in u'\n\x85\u2028\u2029'
- spaces = ch == u' '
+ breaks = ch in '\n\x85\u2028\u2029'
+ spaces = ch == ' '
end += 1
- def write_literal(self, text, comment=None):
- # type: (Any, Any) -> None
+ def write_literal(self, text: Any, comment: Any = None) -> None:
hints, _indent, _indicator = self.determine_block_hints(text)
- self.write_indicator(u'|' + hints, True)
- try:
- comment = comment[1][0]
- if comment:
- self.stream.write(comment)
- except (TypeError, IndexError):
- pass
- if _indicator == u'+':
+ # if comment is not None:
+ # try:
+ # hints += comment[1][0]
+ # except (TypeError, IndexError) as e:
+ # pass
+ if not isinstance(comment, str):
+ comment = ''
+ self.write_indicator('|' + hints + comment, True)
+ # try:
+ # nprintf('selfev', comment)
+ # cmx = comment[1][0]
+ # if cmx:
+ # self.stream.write(cmx)
+ # except (TypeError, IndexError) as e:
+ # pass
+ if _indicator == '+':
self.open_ended = True
self.write_line_break()
breaks = True
@@ -1529,21 +1539,21 @@ class Emitter(object):
if end < len(text):
ch = text[end]
if breaks:
- if ch is None or ch not in u'\n\x85\u2028\u2029':
+ if ch is None or ch not in '\n\x85\u2028\u2029':
for br in text[start:end]:
- if br == u'\n':
+ if br == '\n':
self.write_line_break()
else:
self.write_line_break(br)
if ch is not None:
if self.root_context:
idnx = self.indent if self.indent is not None else 0
- self.stream.write(u' ' * (_indent + idnx))
+ self.stream.write(' ' * (_indent + idnx))
else:
self.write_indent()
start = end
else:
- if ch is None or ch in u'\n\x85\u2028\u2029':
+ if ch is None or ch in '\n\x85\u2028\u2029':
data = text[start:end]
if bool(self.encoding):
data = data.encode(self.encoding)
@@ -1552,11 +1562,10 @@ class Emitter(object):
self.write_line_break()
start = end
if ch is not None:
- breaks = ch in u'\n\x85\u2028\u2029'
+ breaks = ch in '\n\x85\u2028\u2029'
end += 1
- def write_plain(self, text, split=True):
- # type: (Any, Any) -> None
+ def write_plain(self, text: Any, split: Any = True) -> None:
if self.root_context:
if self.requested_indent is not None:
self.write_line_break()
@@ -1567,10 +1576,10 @@ class Emitter(object):
if not text:
return
if not self.whitespace:
- data = u' '
+ data = ' '
self.column += len(data)
if self.encoding:
- data = data.encode(self.encoding)
+ data = data.encode(self.encoding) # type: ignore
self.stream.write(data)
self.whitespace = False
self.indention = False
@@ -1582,7 +1591,7 @@ class Emitter(object):
if end < len(text):
ch = text[end]
if spaces:
- if ch != u' ':
+ if ch != ' ':
if start + 1 == end and self.column > self.best_width and split:
self.write_indent()
self.whitespace = False
@@ -1591,15 +1600,15 @@ class Emitter(object):
data = text[start:end]
self.column += len(data)
if self.encoding:
- data = data.encode(self.encoding)
+ data = data.encode(self.encoding) # type: ignore
self.stream.write(data)
start = end
elif breaks:
- if ch not in u'\n\x85\u2028\u2029': # type: ignore
- if text[start] == u'\n':
+ if ch not in '\n\x85\u2028\u2029': # type: ignore
+ if text[start] == '\n':
self.write_line_break()
for br in text[start:end]:
- if br == u'\n':
+ if br == '\n':
self.write_line_break()
else:
self.write_line_break(br)
@@ -1608,11 +1617,15 @@ class Emitter(object):
self.indention = False
start = end
else:
- if ch is None or ch in u' \n\x85\u2028\u2029':
+ if ch is None or ch in ' \n\x85\u2028\u2029':
data = text[start:end]
+ if len(data) > self.best_width and \
+ self.column > self.indent: # type: ignore
+ # words longer than line length get a line of their own
+ self.write_indent()
self.column += len(data)
if self.encoding:
- data = data.encode(self.encoding)
+ data = data.encode(self.encoding) # type: ignore
try:
self.stream.write(data)
except: # NOQA
@@ -1620,14 +1633,13 @@ class Emitter(object):
raise
start = end
if ch is not None:
- spaces = ch == u' '
- breaks = ch in u'\n\x85\u2028\u2029'
+ spaces = ch == ' '
+ breaks = ch in '\n\x85\u2028\u2029'
end += 1
- def write_comment(self, comment, pre=False):
- # type: (Any, bool) -> None
+ def write_comment(self, comment: Any, pre: bool = False) -> None:
value = comment.value
- # nprintf('{:02d} {:02d} {!r}'.format(self.column, comment.start_mark.column, value))
+ # nprintf(f'{self.column:02d} {comment.start_mark.column:02d} {value!r}')
if not pre and value[-1] == '\n':
value = value[:-1]
try:
@@ -1660,8 +1672,7 @@ class Emitter(object):
if not pre:
self.write_line_break()
- def write_pre_comment(self, event):
- # type: (Any) -> bool
+ def write_pre_comment(self, event: Any) -> bool:
comments = event.comment[1]
if comments is None:
return False
@@ -1676,12 +1687,11 @@ class Emitter(object):
if isinstance(event, start_events):
comment.pre_done = True
except TypeError:
- sys.stdout.write('eventtt {} {}'.format(type(event), event))
+ sys.stdout.write(f'eventtt {type(event)} {event}')
raise
return True
- def write_post_comment(self, event):
- # type: (Any) -> bool
+ def write_post_comment(self, event: Any) -> bool:
if self.event.comment[0] is None:
return False
comment = event.comment[0]
diff --git a/error.py b/error.py
index d5f1553..ccdbf28 100644
--- a/error.py
+++ b/error.py
@@ -1,14 +1,9 @@
# coding: utf-8
-from __future__ import absolute_import
-
import warnings
import textwrap
-from ruamel.yaml.compat import utf8
-
-if False: # MYPY
- from typing import Any, Dict, Optional, List, Text # NOQA
+from typing import Any, Dict, Optional, List, Text # NOQA
__all__ = [
@@ -24,31 +19,27 @@ __all__ = [
]
-class StreamMark(object):
+class StreamMark:
__slots__ = 'name', 'index', 'line', 'column'
- def __init__(self, name, index, line, column):
- # type: (Any, int, int, int) -> None
+ def __init__(self, name: Any, index: int, line: int, column: int) -> None:
self.name = name
self.index = index
self.line = line
self.column = column
- def __str__(self):
- # type: () -> Any
- where = ' in "%s", line %d, column %d' % (self.name, self.line + 1, self.column + 1)
+ def __str__(self) -> Any:
+ where = f' in "{self.name!s}", line {self.line + 1:d}, column {self.column + 1:d}'
return where
- def __eq__(self, other):
- # type: (Any) -> bool
+ def __eq__(self, other: Any) -> bool:
if self.line != other.line or self.column != other.column:
return False
if self.name != other.name or self.index != other.index:
return False
return True
- def __ne__(self, other):
- # type: (Any) -> bool
+ def __ne__(self, other: Any) -> bool:
return not self.__eq__(other)
@@ -59,19 +50,19 @@ class FileMark(StreamMark):
class StringMark(StreamMark):
__slots__ = 'name', 'index', 'line', 'column', 'buffer', 'pointer'
- def __init__(self, name, index, line, column, buffer, pointer):
- # type: (Any, int, int, int, Any, Any) -> None
+ def __init__(
+ self, name: Any, index: int, line: int, column: int, buffer: Any, pointer: Any
+ ) -> None:
StreamMark.__init__(self, name, index, line, column)
self.buffer = buffer
self.pointer = pointer
- def get_snippet(self, indent=4, max_length=75):
- # type: (int, int) -> Any
+ def get_snippet(self, indent: int = 4, max_length: int = 75) -> Any:
if self.buffer is None: # always False
return None
head = ""
start = self.pointer
- while start > 0 and self.buffer[start - 1] not in u'\0\r\n\x85\u2028\u2029':
+ while start > 0 and self.buffer[start - 1] not in '\0\r\n\x85\u2028\u2029':
start -= 1
if self.pointer - start > max_length / 2 - 1:
head = ' ... '
@@ -79,15 +70,15 @@ class StringMark(StreamMark):
break
tail = ""
end = self.pointer
- while end < len(self.buffer) and self.buffer[end] not in u'\0\r\n\x85\u2028\u2029':
+ while end < len(self.buffer) and self.buffer[end] not in '\0\r\n\x85\u2028\u2029':
end += 1
if end - self.pointer > max_length / 2 - 1:
tail = ' ... '
end -= 5
break
- snippet = utf8(self.buffer[start:end])
+ snippet = self.buffer[start:end]
caret = '^'
- caret = '^ (line: {})'.format(self.line + 1)
+ caret = f'^ (line: {self.line + 1})'
return (
' ' * indent
+ head
@@ -98,20 +89,25 @@ class StringMark(StreamMark):
+ caret
)
- def __str__(self):
- # type: () -> Any
+ def __str__(self) -> Any:
snippet = self.get_snippet()
- where = ' in "%s", line %d, column %d' % (self.name, self.line + 1, self.column + 1)
+ where = f' in "{self.name!s}", line {self.line + 1:d}, column {self.column + 1:d}'
if snippet is not None:
where += ':\n' + snippet
return where
+ def __repr__(self) -> Any:
+ snippet = self.get_snippet()
+ where = f' in "{self.name!s}", line {self.line + 1:d}, column {self.column + 1:d}'
+ if snippet is not None:
+ where += ':\n' + snippet
+ return where
-class CommentMark(object):
+
+class CommentMark:
__slots__ = ('column',)
- def __init__(self, column):
- # type: (Any) -> None
+ def __init__(self, column: Any) -> None:
self.column = column
@@ -122,14 +118,13 @@ class YAMLError(Exception):
class MarkedYAMLError(YAMLError):
def __init__(
self,
- context=None,
- context_mark=None,
- problem=None,
- problem_mark=None,
- note=None,
- warn=None,
- ):
- # type: (Any, Any, Any, Any, Any, Any) -> None
+ context: Any = None,
+ context_mark: Any = None,
+ problem: Any = None,
+ problem_mark: Any = None,
+ note: Any = None,
+ warn: Any = None,
+ ) -> None:
self.context = context
self.context_mark = context_mark
self.problem = problem
@@ -137,9 +132,8 @@ class MarkedYAMLError(YAMLError):
self.note = note
# warn is ignored
- def __str__(self):
- # type: () -> Any
- lines = [] # type: List[str]
+ def __str__(self) -> Any:
+ lines: List[str] = []
if self.context is not None:
lines.append(self.context)
if self.context_mark is not None and (
@@ -171,14 +165,13 @@ class YAMLWarning(Warning):
class MarkedYAMLWarning(YAMLWarning):
def __init__(
self,
- context=None,
- context_mark=None,
- problem=None,
- problem_mark=None,
- note=None,
- warn=None,
- ):
- # type: (Any, Any, Any, Any, Any, Any) -> None
+ context: Any = None,
+ context_mark: Any = None,
+ problem: Any = None,
+ problem_mark: Any = None,
+ note: Any = None,
+ warn: Any = None,
+ ) -> None:
self.context = context
self.context_mark = context_mark
self.problem = problem
@@ -186,9 +179,8 @@ class MarkedYAMLWarning(YAMLWarning):
self.note = note
self.warn = warn
- def __str__(self):
- # type: () -> Any
- lines = [] # type: List[str]
+ def __str__(self) -> Any:
+ lines: List[str] = []
if self.context is not None:
lines.append(self.context)
if self.context_mark is not None and (
@@ -233,30 +225,26 @@ warnings.simplefilter('once', UnsafeLoaderWarning)
class MantissaNoDotYAML1_1Warning(YAMLWarning):
- def __init__(self, node, flt_str):
- # type: (Any, Any) -> None
+ def __init__(self, node: Any, flt_str: Any) -> None:
self.node = node
self.flt = flt_str
- def __str__(self):
- # type: () -> Any
+ def __str__(self) -> Any:
line = self.node.start_mark.line
col = self.node.start_mark.column
- return """
+ return f"""
In YAML 1.1 floating point values should have a dot ('.') in their mantissa.
See the Floating-Point Language-Independent Type for YAML™ Version 1.1 specification
( http://yaml.org/type/float.html ). This dot is not required for JSON nor for YAML 1.2
-Correct your float: "{}" on line: {}, column: {}
+Correct your float: "{self.flt}" on line: {line}, column: {col}
or alternatively include the following in your code:
import warnings
warnings.simplefilter('ignore', ruamel.yaml.error.MantissaNoDotYAML1_1Warning)
-""".format(
- self.flt, line, col
- )
+"""
warnings.simplefilter('once', MantissaNoDotYAML1_1Warning)
@@ -269,14 +257,13 @@ class YAMLFutureWarning(Warning):
class MarkedYAMLFutureWarning(YAMLFutureWarning):
def __init__(
self,
- context=None,
- context_mark=None,
- problem=None,
- problem_mark=None,
- note=None,
- warn=None,
- ):
- # type: (Any, Any, Any, Any, Any, Any) -> None
+ context: Any = None,
+ context_mark: Any = None,
+ problem: Any = None,
+ problem_mark: Any = None,
+ note: Any = None,
+ warn: Any = None,
+ ) -> None:
self.context = context
self.context_mark = context_mark
self.problem = problem
@@ -284,9 +271,8 @@ class MarkedYAMLFutureWarning(YAMLFutureWarning):
self.note = note
self.warn = warn
- def __str__(self):
- # type: () -> Any
- lines = [] # type: List[str]
+ def __str__(self) -> Any:
+ lines: List[str] = []
if self.context is not None:
lines.append(self.context)
diff --git a/events.py b/events.py
index 58b2121..03f3d9e 100644
--- a/events.py
+++ b/events.py
@@ -2,20 +2,21 @@
# Abstract classes.
-if False: # MYPY
- from typing import Any, Dict, Optional, List # NOQA
+from typing import Any, Dict, Optional, List # NOQA
+SHOW_LINES = False
-def CommentCheck():
- # type: () -> None
+
+def CommentCheck() -> None:
pass
-class Event(object):
+class Event:
__slots__ = 'start_mark', 'end_mark', 'comment'
- def __init__(self, start_mark=None, end_mark=None, comment=CommentCheck):
- # type: (Any, Any, Any) -> None
+ def __init__(
+ self, start_mark: Any = None, end_mark: Any = None, comment: Any = CommentCheck
+ ) -> None:
self.start_mark = start_mark
self.end_mark = end_mark
# assert comment is not CommentCheck
@@ -23,24 +24,44 @@ class Event(object):
comment = None
self.comment = comment
- def __repr__(self):
- # type: () -> Any
- attributes = [
- key
- for key in ['anchor', 'tag', 'implicit', 'value', 'flow_style', 'style']
- if hasattr(self, key)
- ]
- arguments = ', '.join(['%s=%r' % (key, getattr(self, key)) for key in attributes])
- if self.comment not in [None, CommentCheck]:
- arguments += ', comment={!r}'.format(self.comment)
- return '%s(%s)' % (self.__class__.__name__, arguments)
+ def __repr__(self) -> Any:
+ if True:
+ arguments = []
+ if hasattr(self, 'value'):
+ # if you use repr(getattr(self, 'value')) then flake8 complains about
+ # abuse of getattr with a constant. When you change to self.value
+ # then mypy throws an error
+ arguments.append(repr(self.value)) # type: ignore
+ for key in ['anchor', 'tag', 'implicit', 'flow_style', 'style']:
+ v = getattr(self, key, None)
+ if v is not None:
+ arguments.append(f'{key!s}={v!r}')
+ if self.comment not in [None, CommentCheck]:
+ arguments.append(f'comment={self.comment!r}')
+ if SHOW_LINES:
+ arguments.append(
+ f'({self.start_mark.line}:{self.start_mark.column}/'
+ f'{self.end_mark.line}:{self.end_mark.column})'
+ )
+ arguments = ', '.join(arguments) # type: ignore
+ else:
+ attributes = [
+ key
+ for key in ['anchor', 'tag', 'implicit', 'value', 'flow_style', 'style']
+ if hasattr(self, key)
+ ]
+ arguments = ', '.join([f'{key!s}={getattr(self, key)!r}' for key in attributes])
+ if self.comment not in [None, CommentCheck]:
+ arguments += f', comment={self.comment!r}'
+ return f'{self.__class__.__name__!s}({arguments!s})'
class NodeEvent(Event):
__slots__ = ('anchor',)
- def __init__(self, anchor, start_mark=None, end_mark=None, comment=None):
- # type: (Any, Any, Any, Any) -> None
+ def __init__(
+ self, anchor: Any, start_mark: Any = None, end_mark: Any = None, comment: Any = None
+ ) -> None:
Event.__init__(self, start_mark, end_mark, comment)
self.anchor = anchor
@@ -50,16 +71,15 @@ class CollectionStartEvent(NodeEvent):
def __init__(
self,
- anchor,
- tag,
- implicit,
- start_mark=None,
- end_mark=None,
- flow_style=None,
- comment=None,
- nr_items=None,
- ):
- # type: (Any, Any, Any, Any, Any, Any, Any, Optional[int]) -> None
+ anchor: Any,
+ tag: Any,
+ implicit: Any,
+ start_mark: Any = None,
+ end_mark: Any = None,
+ flow_style: Any = None,
+ comment: Any = None,
+ nr_items: Optional[int] = None,
+ ) -> None:
NodeEvent.__init__(self, anchor, start_mark, end_mark, comment)
self.tag = tag
self.implicit = implicit
@@ -77,8 +97,13 @@ class CollectionEndEvent(Event):
class StreamStartEvent(Event):
__slots__ = ('encoding',)
- def __init__(self, start_mark=None, end_mark=None, encoding=None, comment=None):
- # type: (Any, Any, Any, Any) -> None
+ def __init__(
+ self,
+ start_mark: Any = None,
+ end_mark: Any = None,
+ encoding: Any = None,
+ comment: Any = None,
+ ) -> None:
Event.__init__(self, start_mark, end_mark, comment)
self.encoding = encoding
@@ -92,14 +117,13 @@ class DocumentStartEvent(Event):
def __init__(
self,
- start_mark=None,
- end_mark=None,
- explicit=None,
- version=None,
- tags=None,
- comment=None,
- ):
- # type: (Any, Any, Any, Any, Any, Any) -> None
+ start_mark: Any = None,
+ end_mark: Any = None,
+ explicit: Any = None,
+ version: Any = None,
+ tags: Any = None,
+ comment: Any = None,
+ ) -> None:
Event.__init__(self, start_mark, end_mark, comment)
self.explicit = explicit
self.version = version
@@ -109,14 +133,30 @@ class DocumentStartEvent(Event):
class DocumentEndEvent(Event):
__slots__ = ('explicit',)
- def __init__(self, start_mark=None, end_mark=None, explicit=None, comment=None):
- # type: (Any, Any, Any, Any) -> None
+ def __init__(
+ self,
+ start_mark: Any = None,
+ end_mark: Any = None,
+ explicit: Any = None,
+ comment: Any = None,
+ ) -> None:
Event.__init__(self, start_mark, end_mark, comment)
self.explicit = explicit
class AliasEvent(NodeEvent):
- __slots__ = ()
+ __slots__ = 'style'
+
+ def __init__(
+ self,
+ anchor: Any,
+ start_mark: Any = None,
+ end_mark: Any = None,
+ style: Any = None,
+ comment: Any = None,
+ ) -> None:
+ NodeEvent.__init__(self, anchor, start_mark, end_mark, comment)
+ self.style = style
class ScalarEvent(NodeEvent):
@@ -124,16 +164,15 @@ class ScalarEvent(NodeEvent):
def __init__(
self,
- anchor,
- tag,
- implicit,
- value,
- start_mark=None,
- end_mark=None,
- style=None,
- comment=None,
- ):
- # type: (Any, Any, Any, Any, Any, Any, Any, Any) -> None
+ anchor: Any,
+ tag: Any,
+ implicit: Any,
+ value: Any,
+ start_mark: Any = None,
+ end_mark: Any = None,
+ style: Any = None,
+ comment: Any = None,
+ ) -> None:
NodeEvent.__init__(self, anchor, start_mark, end_mark, comment)
self.tag = tag
self.implicit = implicit
diff --git a/loader.py b/loader.py
index 979ec62..d6c708b 100644
--- a/loader.py
+++ b/loader.py
@@ -1,8 +1,5 @@
# coding: utf-8
-from __future__ import absolute_import
-
-
from ruamel.yaml.reader import Reader
from ruamel.yaml.scanner import Scanner, RoundTripScanner
from ruamel.yaml.parser import Parser, RoundTripParser
@@ -15,16 +12,20 @@ from ruamel.yaml.constructor import (
)
from ruamel.yaml.resolver import VersionedResolver
-if False: # MYPY
- from typing import Any, Dict, List, Union, Optional # NOQA
- from ruamel.yaml.compat import StreamTextType, VersionType # NOQA
+from typing import Any, Dict, List, Union, Optional # NOQA
+from ruamel.yaml.compat import StreamTextType, VersionType # NOQA
__all__ = ['BaseLoader', 'SafeLoader', 'Loader', 'RoundTripLoader']
class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, VersionedResolver):
- def __init__(self, stream, version=None, preserve_quotes=None):
- # type: (StreamTextType, Optional[VersionType], Optional[bool]) -> None
+ def __init__(
+ self,
+ stream: StreamTextType,
+ version: Optional[VersionType] = None,
+ preserve_quotes: Optional[bool] = None,
+ ) -> None:
+ self.comment_handling = None
Reader.__init__(self, stream, loader=self)
Scanner.__init__(self, loader=self)
Parser.__init__(self, loader=self)
@@ -34,8 +35,13 @@ class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, VersionedRe
class SafeLoader(Reader, Scanner, Parser, Composer, SafeConstructor, VersionedResolver):
- def __init__(self, stream, version=None, preserve_quotes=None):
- # type: (StreamTextType, Optional[VersionType], Optional[bool]) -> None
+ def __init__(
+ self,
+ stream: StreamTextType,
+ version: Optional[VersionType] = None,
+ preserve_quotes: Optional[bool] = None,
+ ) -> None:
+ self.comment_handling = None
Reader.__init__(self, stream, loader=self)
Scanner.__init__(self, loader=self)
Parser.__init__(self, loader=self)
@@ -45,8 +51,13 @@ class SafeLoader(Reader, Scanner, Parser, Composer, SafeConstructor, VersionedRe
class Loader(Reader, Scanner, Parser, Composer, Constructor, VersionedResolver):
- def __init__(self, stream, version=None, preserve_quotes=None):
- # type: (StreamTextType, Optional[VersionType], Optional[bool]) -> None
+ def __init__(
+ self,
+ stream: StreamTextType,
+ version: Optional[VersionType] = None,
+ preserve_quotes: Optional[bool] = None,
+ ) -> None:
+ self.comment_handling = None
Reader.__init__(self, stream, loader=self)
Scanner.__init__(self, loader=self)
Parser.__init__(self, loader=self)
@@ -63,9 +74,14 @@ class RoundTripLoader(
RoundTripConstructor,
VersionedResolver,
):
- def __init__(self, stream, version=None, preserve_quotes=None):
- # type: (StreamTextType, Optional[VersionType], Optional[bool]) -> None
+ def __init__(
+ self,
+ stream: StreamTextType,
+ version: Optional[VersionType] = None,
+ preserve_quotes: Optional[bool] = None,
+ ) -> None:
# self.reader = Reader.__init__(self, stream)
+ self.comment_handling = None # issue 385
Reader.__init__(self, stream, loader=self)
RoundTripScanner.__init__(self, loader=self)
RoundTripParser.__init__(self, loader=self)
diff --git a/main.py b/main.py
index a452a39..636ad6c 100644
--- a/main.py
+++ b/main.py
@@ -1,7 +1,5 @@
# coding: utf-8
-from __future__ import absolute_import, unicode_literals, print_function
-
import sys
import os
import warnings
@@ -18,7 +16,7 @@ from ruamel.yaml.nodes import * # NOQA
from ruamel.yaml.loader import BaseLoader, SafeLoader, Loader, RoundTripLoader # NOQA
from ruamel.yaml.dumper import BaseDumper, SafeDumper, Dumper, RoundTripDumper # NOQA
-from ruamel.yaml.compat import StringIO, BytesIO, with_metaclass, PY3, nprint
+from ruamel.yaml.compat import StringIO, BytesIO, with_metaclass, nprint, nprintf # NOQA
from ruamel.yaml.resolver import VersionedResolver, Resolver # NOQA
from ruamel.yaml.representer import (
BaseRepresenter,
@@ -32,16 +30,13 @@ from ruamel.yaml.constructor import (
Constructor,
RoundTripConstructor,
)
-from ruamel.yaml.loader import Loader as UnsafeLoader
-
-if False: # MYPY
- from typing import List, Set, Dict, Union, Any, Callable, Optional, Text # NOQA
- from ruamel.yaml.compat import StreamType, StreamTextType, VersionType # NOQA
+from ruamel.yaml.loader import Loader as UnsafeLoader # NOQA
+from ruamel.yaml.comments import CommentedMap, CommentedSeq, C_PRE
- if PY3:
- from pathlib import Path
- else:
- Path = Any
+from typing import List, Set, Dict, Union, Any, Callable, Optional, Text, Type # NOQA
+from types import TracebackType
+from ruamel.yaml.compat import StreamType, StreamTextType, VersionType # NOQA
+from pathlib import Path # NOQA
try:
from _ruamel_yaml import CParser, CEmitter # type: ignore
@@ -50,20 +45,21 @@ except: # NOQA
# import io
-enforce = object()
-
# YAML is an acronym, i.e. spoken: rhymes with "camel". And thus a
# subset of abbreviations, which should be all caps according to PEP8
-class YAML(object):
+class YAML:
def __init__(
- self, _kw=enforce, typ=None, pure=False, output=None, plug_ins=None # input=None,
- ):
- # type: (Any, Optional[Text], Any, Any, Any) -> None
+ self: Any,
+ *,
+ typ: Optional[Text] = None,
+ pure: Any = False,
+ output: Any = None,
+ plug_ins: Any = None,
+ ) -> None: # input=None,
"""
- _kw: not used, forces keyword arguments in 2.7 (in 3 you can do (*, safe_load=..)
typ: 'rt'/None -> RoundTripLoader/RoundTripDumper, (default)
'safe' -> SafeLoader/SafeDumper,
'unsafe' -> normal/unsafe Loader/Dumper
@@ -72,31 +68,27 @@ class YAML(object):
input/output: needed to work as context manager
plug_ins: a list of plug-in files
"""
- if _kw is not enforce:
- raise TypeError(
- '{}.__init__() takes no positional argument but at least '
- 'one was given ({!r})'.format(self.__class__.__name__, _kw)
- )
self.typ = ['rt'] if typ is None else (typ if isinstance(typ, list) else [typ])
self.pure = pure
# self._input = input
self._output = output
- self._context_manager = None # type: Any
+ self._context_manager: Any = None
- self.plug_ins = [] # type: List[Any]
+ self.plug_ins: List[Any] = []
for pu in ([] if plug_ins is None else plug_ins) + self.official_plug_ins():
file_name = pu.replace(os.sep, '.')
self.plug_ins.append(import_module(file_name))
- self.Resolver = ruamel.yaml.resolver.VersionedResolver # type: Any
+ self.Resolver: Any = ruamel.yaml.resolver.VersionedResolver
self.allow_unicode = True
- self.Reader = None # type: Any
- self.Representer = None # type: Any
- self.Constructor = None # type: Any
- self.Scanner = None # type: Any
- self.Serializer = None # type: Any
- self.default_flow_style = None # type: Any
+ self.Reader: Any = None
+ self.Representer: Any = None
+ self.Constructor: Any = None
+ self.Scanner: Any = None
+ self.Serializer: Any = None
+ self.default_flow_style: Any = None
+ self.comment_handling = None
typ_found = 1
setup_rt = False
if 'rt' in self.typ:
@@ -123,6 +115,18 @@ class YAML(object):
self.Parser = ruamel.yaml.parser.Parser if pure or CParser is None else CParser
self.Composer = ruamel.yaml.composer.Composer
self.Constructor = ruamel.yaml.constructor.Constructor
+ elif 'rtsc' in self.typ:
+ self.default_flow_style = False
+ # no optimized rt-dumper yet
+ self.Emitter = ruamel.yaml.emitter.Emitter
+ self.Serializer = ruamel.yaml.serializer.Serializer
+ self.Representer = ruamel.yaml.representer.RoundTripRepresenter
+ self.Scanner = ruamel.yaml.scanner.RoundTripScannerSC
+ # no optimized rt-parser yet
+ self.Parser = ruamel.yaml.parser.RoundTripParserSC
+ self.Composer = ruamel.yaml.composer.Composer
+ self.Constructor = ruamel.yaml.constructor.RoundTripConstructor
+ self.comment_handling = C_PRE
else:
setup_rt = True
typ_found = 0
@@ -141,29 +145,29 @@ class YAML(object):
self.stream = None
self.canonical = None
self.old_indent = None
- self.width = None
+ self.width: Union[int, None] = None
self.line_break = None
- self.map_indent = None
- self.sequence_indent = None
- self.sequence_dash_offset = 0
+ self.map_indent: Union[int, None] = None
+ self.sequence_indent: Union[int, None] = None
+ self.sequence_dash_offset: int = 0
self.compact_seq_seq = None
self.compact_seq_map = None
self.sort_base_mapping_type_on_output = None # default: sort
self.top_level_colon_align = None
self.prefix_colon = None
- self.version = None
- self.preserve_quotes = None
+ self.version: Optional[Any] = None
+ self.preserve_quotes: Optional[bool] = None
self.allow_duplicate_keys = False # duplicate keys in map, set
self.encoding = 'utf-8'
- self.explicit_start = None
- self.explicit_end = None
+ self.explicit_start: Union[bool, None] = None
+ self.explicit_end: Union[bool, None] = None
self.tags = None
self.default_style = None
self.top_level_block_style_scalar_no_indent_error_1_1 = False
# directives end indicator with single scalar document
- self.scalar_after_indicator = None
+ self.scalar_after_indicator: Optional[bool] = None
# [a, b: 1, c: {d: 2}] vs. [a, {b: 1}, {c: {d: 2}}]
self.brace_single_entry_mapping_in_flow_sequence = False
for module in self.plug_ins:
@@ -173,12 +177,11 @@ class YAML(object):
break
if typ_found == 0:
raise NotImplementedError(
- 'typ "{}"not recognised (need to install plug-in?)'.format(self.typ)
+ f'typ "{self.typ}" not recognised (need to install plug-in?)'
)
@property
- def reader(self):
- # type: () -> Any
+ def reader(self) -> Any:
try:
return self._reader # type: ignore
except AttributeError:
@@ -186,8 +189,7 @@ class YAML(object):
return self._reader
@property
- def scanner(self):
- # type: () -> Any
+ def scanner(self) -> Any:
try:
return self._scanner # type: ignore
except AttributeError:
@@ -195,8 +197,7 @@ class YAML(object):
return self._scanner
@property
- def parser(self):
- # type: () -> Any
+ def parser(self) -> Any:
attr = '_' + sys._getframe().f_code.co_name
if not hasattr(self, attr):
if self.Parser is not CParser:
@@ -217,16 +218,14 @@ class YAML(object):
return getattr(self, attr)
@property
- def composer(self):
- # type: () -> Any
+ def composer(self) -> Any:
attr = '_' + sys._getframe().f_code.co_name
if not hasattr(self, attr):
setattr(self, attr, self.Composer(loader=self))
return getattr(self, attr)
@property
- def constructor(self):
- # type: () -> Any
+ def constructor(self) -> Any:
attr = '_' + sys._getframe().f_code.co_name
if not hasattr(self, attr):
cnst = self.Constructor(preserve_quotes=self.preserve_quotes, loader=self)
@@ -235,16 +234,14 @@ class YAML(object):
return getattr(self, attr)
@property
- def resolver(self):
- # type: () -> Any
+ def resolver(self) -> Any:
attr = '_' + sys._getframe().f_code.co_name
if not hasattr(self, attr):
setattr(self, attr, self.Resolver(version=self.version, loader=self))
return getattr(self, attr)
@property
- def emitter(self):
- # type: () -> Any
+ def emitter(self) -> Any:
attr = '_' + sys._getframe().f_code.co_name
if not hasattr(self, attr):
if self.Emitter is not CEmitter:
@@ -279,8 +276,7 @@ class YAML(object):
return getattr(self, attr)
@property
- def serializer(self):
- # type: () -> Any
+ def serializer(self) -> Any:
attr = '_' + sys._getframe().f_code.co_name
if not hasattr(self, attr):
setattr(
@@ -298,8 +294,7 @@ class YAML(object):
return getattr(self, attr)
@property
- def representer(self):
- # type: () -> Any
+ def representer(self) -> Any:
attr = '_' + sys._getframe().f_code.co_name
if not hasattr(self, attr):
repres = self.Representer(
@@ -312,6 +307,95 @@ class YAML(object):
setattr(self, attr, repres)
return getattr(self, attr)
+ def scan(self, stream: StreamTextType) -> Any:
+ """
+ Scan a YAML stream and produce scanning tokens.
+ """
+ if not hasattr(stream, 'read') and hasattr(stream, 'open'):
+ # pathlib.Path() instance
+ with stream.open('rb') as fp:
+ return self.scan(fp)
+ _, parser = self.get_constructor_parser(stream)
+ try:
+ while self.scanner.check_token():
+ yield self.scanner.get_token()
+ finally:
+ parser.dispose()
+ try:
+ self._reader.reset_reader()
+ except AttributeError:
+ pass
+ try:
+ self._scanner.reset_scanner()
+ except AttributeError:
+ pass
+
+ def parse(self, stream: StreamTextType) -> Any:
+ """
+ Parse a YAML stream and produce parsing events.
+ """
+ if not hasattr(stream, 'read') and hasattr(stream, 'open'):
+ # pathlib.Path() instance
+ with stream.open('rb') as fp:
+ return self.parse(fp)
+ _, parser = self.get_constructor_parser(stream)
+ try:
+ while parser.check_event():
+ yield parser.get_event()
+ finally:
+ parser.dispose()
+ try:
+ self._reader.reset_reader()
+ except AttributeError:
+ pass
+ try:
+ self._scanner.reset_scanner()
+ except AttributeError:
+ pass
+
+ def compose(self, stream: Union[Path, StreamTextType]) -> Any:
+ """
+ Parse the first YAML document in a stream
+ and produce the corresponding representation tree.
+ """
+ if not hasattr(stream, 'read') and hasattr(stream, 'open'):
+ # pathlib.Path() instance
+ with stream.open('rb') as fp:
+ return self.compose(fp)
+ constructor, parser = self.get_constructor_parser(stream)
+ try:
+ return constructor.composer.get_single_node()
+ finally:
+ parser.dispose()
+ try:
+ self._reader.reset_reader()
+ except AttributeError:
+ pass
+ try:
+ self._scanner.reset_scanner()
+ except AttributeError:
+ pass
+
+ def compose_all(self, stream: Union[Path, StreamTextType]) -> Any:
+ """
+ Parse all YAML documents in a stream
+ and produce corresponding representation trees.
+ """
+ constructor, parser = self.get_constructor_parser(stream)
+ try:
+ while constructor.composer.check_node():
+ yield constructor.composer.get_node()
+ finally:
+ parser.dispose()
+ try:
+ self._reader.reset_reader()
+ except AttributeError:
+ pass
+ try:
+ self._scanner.reset_scanner()
+ except AttributeError:
+ pass
+
# separate output resolver?
# def load(self, stream=None):
@@ -325,8 +409,7 @@ class YAML(object):
# raise TypeError("Need a stream argument when not loading from context manager")
# return self.load_one(stream)
- def load(self, stream):
- # type: (Union[Path, StreamTextType]) -> Any
+ def load(self, stream: Union[Path, StreamTextType]) -> Any:
"""
at this point you either have the non-pure Parser (which has its own reader and
scanner) or you have the pure Parser.
@@ -352,17 +435,11 @@ class YAML(object):
except AttributeError:
pass
- def load_all(self, stream, _kw=enforce): # , skip=None):
- # type: (Union[Path, StreamTextType], Any) -> Any
- if _kw is not enforce:
- raise TypeError(
- '{}.__init__() takes no positional argument but at least '
- 'one was given ({!r})'.format(self.__class__.__name__, _kw)
- )
+ def load_all(self, stream: Union[Path, StreamTextType]) -> Any: # *, skip=None):
if not hasattr(stream, 'read') and hasattr(stream, 'open'):
# pathlib.Path() instance
with stream.open('r') as fp:
- for d in self.load_all(fp, _kw=enforce):
+ for d in self.load_all(fp):
yield d
return
# if skip is None:
@@ -384,8 +461,7 @@ class YAML(object):
except AttributeError:
pass
- def get_constructor_parser(self, stream):
- # type: (StreamTextType) -> Any
+ def get_constructor_parser(self, stream: StreamTextType) -> Any:
"""
the old cyaml needs special setup, and therefore the stream
"""
@@ -416,8 +492,13 @@ class YAML(object):
# rslvr = ruamel.yaml.resolver.Resolver
class XLoader(self.Parser, self.Constructor, rslvr): # type: ignore
- def __init__(selfx, stream, version=self.version, preserve_quotes=None):
- # type: (StreamTextType, Optional[VersionType], Optional[bool]) -> None # NOQA
+ def __init__(
+ selfx,
+ stream: StreamTextType,
+ version: Optional[VersionType] = self.version,
+ preserve_quotes: Optional[bool] = None,
+ ) -> None:
+ # NOQA
CParser.__init__(selfx, stream)
selfx._parser = selfx._composer = selfx
self.Constructor.__init__(selfx, loader=selfx)
@@ -429,36 +510,67 @@ class YAML(object):
return loader, loader
return self.constructor, self.parser
- def dump(self, data, stream=None, _kw=enforce, transform=None):
- # type: (Any, Union[Path, StreamType], Any, Any) -> Any
+ def emit(self, events: Any, stream: Any) -> None:
+ """
+ Emit YAML parsing events into a stream.
+ If stream is None, return the produced string instead.
+ """
+ _, _, emitter = self.get_serializer_representer_emitter(stream, None)
+ try:
+ for event in events:
+ emitter.emit(event)
+ finally:
+ try:
+ emitter.dispose()
+ except AttributeError:
+ raise
+
+ def serialize(self, node: Any, stream: Optional[StreamType]) -> Any:
+ """
+ Serialize a representation tree into a YAML stream.
+ If stream is None, return the produced string instead.
+ """
+ self.serialize_all([node], stream)
+
+ def serialize_all(self, nodes: Any, stream: Optional[StreamType]) -> Any:
+ """
+ Serialize a sequence of representation trees into a YAML stream.
+ If stream is None, return the produced string instead.
+ """
+ serializer, _, emitter = self.get_serializer_representer_emitter(stream, None)
+ try:
+ serializer.open()
+ for node in nodes:
+ serializer.serialize(node)
+ serializer.close()
+ finally:
+ try:
+ emitter.dispose()
+ except AttributeError:
+ raise
+
+ def dump(
+ self: Any, data: Union[Path, StreamType], stream: Any = None, *, transform: Any = None
+ ) -> Any:
if self._context_manager:
if not self._output:
raise TypeError('Missing output stream while dumping from context manager')
- if _kw is not enforce:
- raise TypeError(
- '{}.dump() takes one positional argument but at least '
- 'two were given ({!r})'.format(self.__class__.__name__, _kw)
- )
if transform is not None:
+ x = self.__class__.__name__
raise TypeError(
- '{}.dump() in the context manager cannot have transform keyword '
- ''.format(self.__class__.__name__)
+ f'{x}.dump() in the context manager cannot have transform keyword'
)
self._context_manager.dump(data)
else: # old style
if stream is None:
raise TypeError('Need a stream argument when not dumping from context manager')
- return self.dump_all([data], stream, _kw, transform=transform)
+ return self.dump_all([data], stream, transform=transform)
- def dump_all(self, documents, stream, _kw=enforce, transform=None):
- # type: (Any, Union[Path, StreamType], Any, Any) -> Any
+ def dump_all(
+ self, documents: Any, stream: Union[Path, StreamType], *, transform: Any = None
+ ) -> Any:
if self._context_manager:
raise NotImplementedError
- if _kw is not enforce:
- raise TypeError(
- '{}.dump(_all) takes two positional argument but at least '
- 'three were given ({!r})'.format(self.__class__.__name__, _kw)
- )
self._output = stream
self._context_manager = YAMLContextManager(self, transform=transform)
for data in documents:
@@ -467,23 +579,17 @@ class YAML(object):
self._output = None
self._context_manager = None
- def Xdump_all(self, documents, stream, _kw=enforce, transform=None):
- # type: (Any, Union[Path, StreamType], Any, Any) -> Any
+ def Xdump_all(self, documents: Any, stream: Any, *, transform: Any = None) -> Any:
"""
Serialize a sequence of Python objects into a YAML stream.
"""
if not hasattr(stream, 'write') and hasattr(stream, 'open'):
# pathlib.Path() instance
with stream.open('w') as fp:
- return self.dump_all(documents, fp, _kw, transform=transform)
- if _kw is not enforce:
- raise TypeError(
- '{}.dump(_all) takes two positional argument but at least '
- 'three were given ({!r})'.format(self.__class__.__name__, _kw)
- )
+ return self.dump_all(documents, fp, transform=transform)
# The stream should have the methods `write` and possibly `flush`.
if self.top_level_colon_align is True:
- tlca = max([len(str(x)) for x in documents[0]]) # type: Any
+ tlca: Any = max([len(str(x)) for x in documents[0]])
else:
tlca = self.top_level_colon_align
if transform is not None:
@@ -522,8 +628,7 @@ class YAML(object):
fstream.write(transform(val))
return None
- def get_serializer_representer_emitter(self, stream, tlca):
- # type: (StreamType, Any) -> Any
+ def get_serializer_representer_emitter(self, stream: StreamType, tlca: Any) -> Any:
# we have only .Serializer to deal with (vs .Reader & .Scanner), much simpler
if self.Emitter is not CEmitter:
if self.Serializer is None:
@@ -551,25 +656,25 @@ class YAML(object):
class XDumper(CEmitter, self.Representer, rslvr): # type: ignore
def __init__(
- selfx,
- stream,
- default_style=None,
- default_flow_style=None,
- canonical=None,
- indent=None,
- width=None,
- allow_unicode=None,
- line_break=None,
- encoding=None,
- explicit_start=None,
- explicit_end=None,
- version=None,
- tags=None,
- block_seq_indent=None,
- top_level_colon_align=None,
- prefix_colon=None,
- ):
- # type: (StreamType, Any, Any, Any, Optional[bool], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Any, Any, Any, Any, Any) -> None # NOQA
+ selfx: StreamType,
+ stream: Any,
+ default_style: Any = None,
+ default_flow_style: Any = None,
+ canonical: Optional[bool] = None,
+ indent: Optional[int] = None,
+ width: Optional[int] = None,
+ allow_unicode: Optional[bool] = None,
+ line_break: Any = None,
+ encoding: Any = None,
+ explicit_start: Optional[bool] = None,
+ explicit_end: Optional[bool] = None,
+ version: Any = None,
+ tags: Any = None,
+ block_seq_indent: Any = None,
+ top_level_colon_align: Any = None,
+ prefix_colon: Any = None,
+ ) -> None:
+ # NOQA
CEmitter.__init__(
selfx,
stream,
@@ -609,36 +714,37 @@ class YAML(object):
return dumper, dumper, dumper
# basic types
- def map(self, **kw):
- # type: (Any) -> Any
+ def map(self, **kw: Any) -> Any:
if 'rt' in self.typ:
- from ruamel.yaml.comments import CommentedMap
-
return CommentedMap(**kw)
else:
return dict(**kw)
- def seq(self, *args):
- # type: (Any) -> Any
+ def seq(self, *args: Any) -> Any:
if 'rt' in self.typ:
- from ruamel.yaml.comments import CommentedSeq
-
return CommentedSeq(*args)
else:
return list(*args)
# helpers
- def official_plug_ins(self):
- # type: () -> Any
- bd = os.path.dirname(__file__)
+ def official_plug_ins(self) -> Any:
+ """search for list of subdirs that are plug-ins, if __file__ is not available, e.g.
+ single file installers that are not properly emulating a file-system (issue 324)
+ no plug-ins will be found. If any are packaged, you know which file that are
+ and you can explicitly provide it during instantiation:
+ yaml = ruamel.yaml.YAML(plug_ins=['ruamel/yaml/jinja2/__plug_in__'])
+ """
+ try:
+ bd = os.path.dirname(__file__)
+ except NameError:
+ return []
gpbd = os.path.dirname(os.path.dirname(bd))
res = [x.replace(gpbd, "")[1:-3] for x in glob.glob(bd + '/*/__plug_in__.py')]
return res
- def register_class(self, cls):
- # type:(Any) -> Any
+ def register_class(self, cls: Any) -> Any:
"""
- register a class for dumping loading
+ register a class for dumping/loading
- if it has attribute yaml_tag use that to register, else use class name
- if it has methods to_yaml/from_yaml use those to dump/load else dump attributes
as mapping
@@ -648,8 +754,7 @@ class YAML(object):
self.representer.add_representer(cls, cls.to_yaml)
except AttributeError:
- def t_y(representer, data):
- # type: (Any, Any) -> Any
+ def t_y(representer: Any, data: Any) -> Any:
return representer.represent_yaml_object(
tag, data, cls, flow_style=representer.default_flow_style
)
@@ -659,42 +764,24 @@ class YAML(object):
self.constructor.add_constructor(tag, cls.from_yaml)
except AttributeError:
- def f_y(constructor, node):
- # type: (Any, Any) -> Any
+ def f_y(constructor: Any, node: Any) -> Any:
return constructor.construct_yaml_object(node, cls)
self.constructor.add_constructor(tag, f_y)
return cls
- def parse(self, stream):
- # type: (StreamTextType) -> Any
- """
- Parse a YAML stream and produce parsing events.
- """
- _, parser = self.get_constructor_parser(stream)
- try:
- while parser.check_event():
- yield parser.get_event()
- finally:
- parser.dispose()
- try:
- self._reader.reset_reader()
- except AttributeError:
- pass
- try:
- self._scanner.reset_scanner()
- except AttributeError:
- pass
-
# ### context manager
- def __enter__(self):
- # type: () -> Any
+ def __enter__(self) -> Any:
self._context_manager = YAMLContextManager(self)
return self
- def __exit__(self, typ, value, traceback):
- # type: (Any, Any, Any) -> None
+ def __exit__(
+ self,
+ typ: Optional[Type[BaseException]],
+ value: Optional[BaseException],
+ traceback: Optional[TracebackType],
+ ) -> None:
if typ:
nprint('typ', typ)
self._context_manager.teardown_output()
@@ -702,8 +789,7 @@ class YAML(object):
self._context_manager = None
# ### backwards compatibility
- def _indent(self, mapping=None, sequence=None, offset=None):
- # type: (Any, Any, Any) -> None
+ def _indent(self, mapping: Any = None, sequence: Any = None, offset: Any = None) -> None:
if mapping is not None:
self.map_indent = mapping
if sequence is not None:
@@ -712,34 +798,29 @@ class YAML(object):
self.sequence_dash_offset = offset
@property
- def indent(self):
- # type: () -> Any
+ def indent(self) -> Any:
return self._indent
@indent.setter
- def indent(self, val):
- # type: (Any) -> None
+ def indent(self, val: Any) -> None:
self.old_indent = val
@property
- def block_seq_indent(self):
- # type: () -> Any
+ def block_seq_indent(self) -> Any:
return self.sequence_dash_offset
@block_seq_indent.setter
- def block_seq_indent(self, val):
- # type: (Any) -> None
+ def block_seq_indent(self, val: Any) -> None:
self.sequence_dash_offset = val
- def compact(self, seq_seq=None, seq_map=None):
- # type: (Any, Any) -> None
+ def compact(self, seq_seq: Any = None, seq_map: Any = None) -> None:
self.compact_seq_seq = seq_seq
self.compact_seq_map = seq_map
-class YAMLContextManager(object):
- def __init__(self, yaml, transform=None):
- # type: (Any, Any) -> None # used to be: (Any, Optional[Callable]) -> None
+class YAMLContextManager:
+ def __init__(self, yaml: Any, transform: Any = None) -> None:
+ # used to be: (Any, Optional[Callable]) -> None
self._yaml = yaml
self._output_inited = False
self._output_path = None
@@ -770,8 +851,7 @@ class YAMLContextManager(object):
else:
self._output = BytesIO()
- def teardown_output(self):
- # type: () -> None
+ def teardown_output(self) -> None:
if self._output_inited:
self._yaml.serializer.close()
else:
@@ -799,18 +879,16 @@ class YAMLContextManager(object):
if self._output_path is not None:
self._output.close()
- def init_output(self, first_data):
- # type: (Any) -> None
+ def init_output(self, first_data: Any) -> None:
if self._yaml.top_level_colon_align is True:
- tlca = max([len(str(x)) for x in first_data]) # type: Any
+ tlca: Any = max([len(str(x)) for x in first_data])
else:
tlca = self._yaml.top_level_colon_align
self._yaml.get_serializer_representer_emitter(self._output, tlca)
self._yaml.serializer.open()
self._output_inited = True
- def dump(self, data):
- # type: (Any) -> None
+ def dump(self, data: Any) -> None:
if not self._output_inited:
self.init_output(data)
try:
@@ -844,8 +922,7 @@ class YAMLContextManager(object):
# pass
-def yaml_object(yml):
- # type: (Any) -> Any
+def yaml_object(yml: Any) -> Any:
""" decorator for classes that needs to dump/load objects
The tag for such objects is taken from the class attribute yaml_tag (or the
class name in lowercase in case unavailable)
@@ -853,15 +930,13 @@ def yaml_object(yml):
loading, default routines (dumping a mapping of the attributes) used otherwise.
"""
- def yo_deco(cls):
- # type: (Any) -> Any
+ def yo_deco(cls: Any) -> Any:
tag = getattr(cls, 'yaml_tag', '!' + cls.__name__)
try:
yml.representer.add_representer(cls, cls.to_yaml)
except AttributeError:
- def t_y(representer, data):
- # type: (Any, Any) -> Any
+ def t_y(representer: Any, data: Any) -> Any:
return representer.represent_yaml_object(
tag, data, cls, flow_style=representer.default_flow_style
)
@@ -871,8 +946,7 @@ def yaml_object(yml):
yml.constructor.add_constructor(tag, cls.from_yaml)
except AttributeError:
- def f_y(constructor, node):
- # type: (Any, Any) -> Any
+ def f_y(constructor: Any, node: Any) -> Any:
return constructor.construct_yaml_object(node, cls)
yml.constructor.add_constructor(tag, f_y)
@@ -882,13 +956,31 @@ def yaml_object(yml):
########################################################################################
+def warn_deprecation(fun: Any, method: Any, arg: str = '') -> None:
+ warnings.warn(
+ f'\n{fun} will be removed, use\n\n yaml=YAML({arg})\n yaml.{method}(...)\n\ninstead', # NOQA
+ PendingDeprecationWarning, # this will show when testing with pytest/tox
+ stacklevel=3,
+ )
+
+
+def error_deprecation(fun: Any, method: Any, arg: str = '') -> None:
+ warnings.warn(
+ f'\n{fun} has been removed, use\n\n yaml=YAML({arg})\n yaml.{method}(...)\n\ninstead', # NOQA
+ DeprecationWarning,
+ stacklevel=3,
+ )
+ sys.exit(1)
+
+
+########################################################################################
-def scan(stream, Loader=Loader):
- # type: (StreamTextType, Any) -> Any
+def scan(stream: StreamTextType, Loader: Any = Loader) -> Any:
"""
Scan a YAML stream and produce scanning tokens.
"""
+ warn_deprecation('scan', 'scan', arg="typ='unsafe', pure=True")
loader = Loader(stream)
try:
while loader.scanner.check_token():
@@ -897,11 +989,11 @@ def scan(stream, Loader=Loader):
loader._parser.dispose()
-def parse(stream, Loader=Loader):
- # type: (StreamTextType, Any) -> Any
+def parse(stream: StreamTextType, Loader: Any = Loader) -> Any:
"""
Parse a YAML stream and produce parsing events.
"""
+ warn_deprecation('parse', 'parse', arg="typ='unsafe', pure=True")
loader = Loader(stream)
try:
while loader._parser.check_event():
@@ -910,12 +1002,12 @@ def parse(stream, Loader=Loader):
loader._parser.dispose()
-def compose(stream, Loader=Loader):
- # type: (StreamTextType, Any) -> Any
+def compose(stream: StreamTextType, Loader: Any = Loader) -> Any:
"""
Parse the first YAML document in a stream
and produce the corresponding representation tree.
"""
+ warn_deprecation('compose', 'compose', arg="typ='unsafe', pure=True")
loader = Loader(stream)
try:
return loader.get_single_node()
@@ -923,12 +1015,12 @@ def compose(stream, Loader=Loader):
loader.dispose()
-def compose_all(stream, Loader=Loader):
- # type: (StreamTextType, Any) -> Any
+def compose_all(stream: StreamTextType, Loader: Any = Loader) -> Any:
"""
Parse all YAML documents in a stream
and produce corresponding representation trees.
"""
+ warn_deprecation('compose', 'compose', arg="typ='unsafe', pure=True")
loader = Loader(stream)
try:
while loader.check_node():
@@ -937,16 +1029,18 @@ def compose_all(stream, Loader=Loader):
loader._parser.dispose()
-def load(stream, Loader=None, version=None, preserve_quotes=None):
- # type: (StreamTextType, Any, Optional[VersionType], Any) -> Any
+def load(
+ stream: Any, Loader: Any = None, version: Any = None, preserve_quotes: Any = None
+) -> Any:
"""
Parse the first YAML document in a stream
and produce the corresponding Python object.
"""
+ warn_deprecation('load', 'load', arg="typ='unsafe', pure=True")
if Loader is None:
warnings.warn(UnsafeLoaderWarning.text, UnsafeLoaderWarning, stacklevel=2)
Loader = UnsafeLoader
- loader = Loader(stream, version, preserve_quotes=preserve_quotes)
+ loader = Loader(stream, version, preserve_quotes=preserve_quotes) # type: Any
try:
return loader._constructor.get_single_data()
finally:
@@ -961,16 +1055,19 @@ def load(stream, Loader=None, version=None, preserve_quotes=None):
pass
-def load_all(stream, Loader=None, version=None, preserve_quotes=None):
- # type: (Optional[StreamTextType], Any, Optional[VersionType], Optional[bool]) -> Any # NOQA
+def load_all(
+ stream: Any, Loader: Any = None, version: Any = None, preserve_quotes: Any = None
+) -> Any:
+ # NOQA
"""
Parse all YAML documents in a stream
and produce corresponding Python objects.
"""
+ warn_deprecation('load_all', 'load_all', arg="typ='unsafe', pure=True")
if Loader is None:
warnings.warn(UnsafeLoaderWarning.text, UnsafeLoaderWarning, stacklevel=2)
Loader = UnsafeLoader
- loader = Loader(stream, version, preserve_quotes=preserve_quotes)
+ loader = Loader(stream, version, preserve_quotes=preserve_quotes) # type: Any
try:
while loader._constructor.check_data():
yield loader._constructor.get_data()
@@ -986,61 +1083,70 @@ def load_all(stream, Loader=None, version=None, preserve_quotes=None):
pass
-def safe_load(stream, version=None):
- # type: (StreamTextType, Optional[VersionType]) -> Any
+def safe_load(stream: StreamTextType, version: Optional[VersionType] = None) -> Any:
"""
Parse the first YAML document in a stream
and produce the corresponding Python object.
Resolve only basic YAML tags.
"""
+ warn_deprecation('safe_load', 'load', arg="typ='safe', pure=True")
return load(stream, SafeLoader, version)
-def safe_load_all(stream, version=None):
- # type: (StreamTextType, Optional[VersionType]) -> Any
+def safe_load_all(stream: StreamTextType, version: Optional[VersionType] = None) -> Any:
"""
Parse all YAML documents in a stream
and produce corresponding Python objects.
Resolve only basic YAML tags.
"""
+ warn_deprecation('safe_load_all', 'load_all', arg="typ='safe', pure=True")
return load_all(stream, SafeLoader, version)
-def round_trip_load(stream, version=None, preserve_quotes=None):
- # type: (StreamTextType, Optional[VersionType], Optional[bool]) -> Any
+def round_trip_load(
+ stream: StreamTextType,
+ version: Optional[VersionType] = None,
+ preserve_quotes: Optional[bool] = None,
+) -> Any:
"""
Parse the first YAML document in a stream
and produce the corresponding Python object.
Resolve only basic YAML tags.
"""
+ warn_deprecation('round_trip_load_all', 'load')
return load(stream, RoundTripLoader, version, preserve_quotes=preserve_quotes)
-def round_trip_load_all(stream, version=None, preserve_quotes=None):
- # type: (StreamTextType, Optional[VersionType], Optional[bool]) -> Any
+def round_trip_load_all(
+ stream: StreamTextType,
+ version: Optional[VersionType] = None,
+ preserve_quotes: Optional[bool] = None,
+) -> Any:
"""
Parse all YAML documents in a stream
and produce corresponding Python objects.
Resolve only basic YAML tags.
"""
+ warn_deprecation('round_trip_load_all', 'load_all')
return load_all(stream, RoundTripLoader, version, preserve_quotes=preserve_quotes)
def emit(
- events,
- stream=None,
- Dumper=Dumper,
- canonical=None,
- indent=None,
- width=None,
- allow_unicode=None,
- line_break=None,
-):
- # type: (Any, Optional[StreamType], Any, Optional[bool], Union[int, None], Optional[int], Optional[bool], Any) -> Any # NOQA
+ events: Any,
+ stream: Optional[StreamType] = None,
+ Dumper: Any = Dumper,
+ canonical: Optional[bool] = None,
+ indent: Union[int, None] = None,
+ width: Optional[int] = None,
+ allow_unicode: Optional[bool] = None,
+ line_break: Any = None,
+) -> Any:
+ # NOQA
"""
Emit YAML parsing events into a stream.
If stream is None, return the produced string instead.
"""
+ warn_deprecation('emit', 'emit', arg="typ='safe', pure=True")
getvalue = None
if stream is None:
stream = StringIO()
@@ -1066,29 +1172,30 @@ def emit(
return getvalue()
-enc = None if PY3 else 'utf-8'
+enc = None
def serialize_all(
- nodes,
- stream=None,
- Dumper=Dumper,
- canonical=None,
- indent=None,
- width=None,
- allow_unicode=None,
- line_break=None,
- encoding=enc,
- explicit_start=None,
- explicit_end=None,
- version=None,
- tags=None,
-):
- # type: (Any, Optional[StreamType], Any, Any, Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Optional[VersionType], Any) -> Any # NOQA
+ nodes: Any,
+ stream: Optional[StreamType] = None,
+ Dumper: Any = Dumper,
+ canonical: Any = None,
+ indent: Optional[int] = None,
+ width: Optional[int] = None,
+ allow_unicode: Optional[bool] = None,
+ line_break: Any = None,
+ encoding: Any = enc,
+ explicit_start: Optional[bool] = None,
+ explicit_end: Optional[bool] = None,
+ version: Optional[VersionType] = None,
+ tags: Any = None,
+) -> Any:
+ # NOQA
"""
Serialize a sequence of representation trees into a YAML stream.
If stream is None, return the produced string instead.
"""
+ warn_deprecation('serialize_all', 'serialize_all', arg="typ='safe', pure=True")
getvalue = None
if stream is None:
if encoding is None:
@@ -1124,40 +1231,43 @@ def serialize_all(
return getvalue()
-def serialize(node, stream=None, Dumper=Dumper, **kwds):
- # type: (Any, Optional[StreamType], Any, Any) -> Any
+def serialize(
+ node: Any, stream: Optional[StreamType] = None, Dumper: Any = Dumper, **kwds: Any
+) -> Any:
"""
Serialize a representation tree into a YAML stream.
If stream is None, return the produced string instead.
"""
+ warn_deprecation('serialize', 'serialize', arg="typ='safe', pure=True")
return serialize_all([node], stream, Dumper=Dumper, **kwds)
def dump_all(
- documents,
- stream=None,
- Dumper=Dumper,
- default_style=None,
- default_flow_style=None,
- canonical=None,
- indent=None,
- width=None,
- allow_unicode=None,
- line_break=None,
- encoding=enc,
- explicit_start=None,
- explicit_end=None,
- version=None,
- tags=None,
- block_seq_indent=None,
- top_level_colon_align=None,
- prefix_colon=None,
-):
- # type: (Any, Optional[StreamType], Any, Any, Any, Optional[bool], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Any, Any, Any, Any, Any) -> Optional[str] # NOQA
+ documents: Any,
+ stream: Optional[StreamType] = None,
+ Dumper: Any = Dumper,
+ default_style: Any = None,
+ default_flow_style: Any = None,
+ canonical: Optional[bool] = None,
+ indent: Optional[int] = None,
+ width: Optional[int] = None,
+ allow_unicode: Optional[bool] = None,
+ line_break: Any = None,
+ encoding: Any = enc,
+ explicit_start: Optional[bool] = None,
+ explicit_end: Optional[bool] = None,
+ version: Any = None,
+ tags: Any = None,
+ block_seq_indent: Any = None,
+ top_level_colon_align: Any = None,
+ prefix_colon: Any = None,
+) -> Any:
+ # NOQA
"""
Serialize a sequence of Python objects into a YAML stream.
If stream is None, return the produced string instead.
"""
+ warn_deprecation('dump_all', 'dump_all', arg="typ='unsafe', pure=True")
getvalue = None
if top_level_colon_align is True:
top_level_colon_align = max([len(str(x)) for x in documents[0]])
@@ -1206,24 +1316,24 @@ def dump_all(
def dump(
- data,
- stream=None,
- Dumper=Dumper,
- default_style=None,
- default_flow_style=None,
- canonical=None,
- indent=None,
- width=None,
- allow_unicode=None,
- line_break=None,
- encoding=enc,
- explicit_start=None,
- explicit_end=None,
- version=None,
- tags=None,
- block_seq_indent=None,
-):
- # type: (Any, Optional[StreamType], Any, Any, Any, Optional[bool], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Optional[VersionType], Any, Any) -> Optional[str] # NOQA
+ data: Any,
+ stream: Optional[StreamType] = None,
+ Dumper: Any = Dumper,
+ default_style: Any = None,
+ default_flow_style: Any = None,
+ canonical: Optional[bool] = None,
+ indent: Optional[int] = None,
+ width: Optional[int] = None,
+ allow_unicode: Optional[bool] = None,
+ line_break: Any = None,
+ encoding: Any = enc,
+ explicit_start: Optional[bool] = None,
+ explicit_end: Optional[bool] = None,
+ version: Optional[VersionType] = None,
+ tags: Any = None,
+ block_seq_indent: Any = None,
+) -> Any:
+ # NOQA
"""
Serialize a Python object into a YAML stream.
If stream is None, return the produced string instead.
@@ -1231,6 +1341,7 @@ def dump(
default_style ∈ None, '', '"', "'", '|', '>'
"""
+ warn_deprecation('dump', 'dump', arg="typ='unsafe', pure=True")
return dump_all(
[data],
stream,
@@ -1251,48 +1362,38 @@ def dump(
)
-def safe_dump_all(documents, stream=None, **kwds):
- # type: (Any, Optional[StreamType], Any) -> Optional[str]
- """
- Serialize a sequence of Python objects into a YAML stream.
- Produce only basic YAML tags.
- If stream is None, return the produced string instead.
- """
- return dump_all(documents, stream, Dumper=SafeDumper, **kwds)
-
-
-def safe_dump(data, stream=None, **kwds):
- # type: (Any, Optional[StreamType], Any) -> Optional[str]
+def safe_dump(data: Any, stream: Optional[StreamType] = None, **kwds: Any) -> Any:
"""
Serialize a Python object into a YAML stream.
Produce only basic YAML tags.
If stream is None, return the produced string instead.
"""
+ warn_deprecation('safe_dump', 'dump', arg="typ='safe', pure=True")
return dump_all([data], stream, Dumper=SafeDumper, **kwds)
def round_trip_dump(
- data,
- stream=None,
- Dumper=RoundTripDumper,
- default_style=None,
- default_flow_style=None,
- canonical=None,
- indent=None,
- width=None,
- allow_unicode=None,
- line_break=None,
- encoding=enc,
- explicit_start=None,
- explicit_end=None,
- version=None,
- tags=None,
- block_seq_indent=None,
- top_level_colon_align=None,
- prefix_colon=None,
-):
- # type: (Any, Optional[StreamType], Any, Any, Any, Optional[bool], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Optional[VersionType], Any, Any, Any, Any) -> Optional[str] # NOQA
+ data: Any,
+ stream: Optional[StreamType] = None,
+ Dumper: Any = RoundTripDumper,
+ default_style: Any = None,
+ default_flow_style: Any = None,
+ canonical: Optional[bool] = None,
+ indent: Optional[int] = None,
+ width: Optional[int] = None,
+ allow_unicode: Optional[bool] = None,
+ line_break: Any = None,
+ encoding: Any = enc,
+ explicit_start: Optional[bool] = None,
+ explicit_end: Optional[bool] = None,
+ version: Optional[VersionType] = None,
+ tags: Any = None,
+ block_seq_indent: Any = None,
+ top_level_colon_align: Any = None,
+ prefix_colon: Any = None,
+) -> Any:
allow_unicode = True if allow_unicode is None else allow_unicode
+ warn_deprecation('round_trip_dump', 'dump')
return dump_all(
[data],
stream,
@@ -1320,9 +1421,13 @@ def round_trip_dump(
def add_implicit_resolver(
- tag, regexp, first=None, Loader=None, Dumper=None, resolver=Resolver
-):
- # type: (Any, Any, Any, Any, Any, Any) -> None
+ tag: Any,
+ regexp: Any,
+ first: Any = None,
+ Loader: Any = None,
+ Dumper: Any = None,
+ resolver: Any = Resolver,
+) -> None:
"""
Add an implicit scalar detector.
If an implicit scalar value matches the given regexp,
@@ -1353,8 +1458,14 @@ def add_implicit_resolver(
# this code currently not tested
-def add_path_resolver(tag, path, kind=None, Loader=None, Dumper=None, resolver=Resolver):
- # type: (Any, Any, Any, Any, Any, Any) -> None
+def add_path_resolver(
+ tag: Any,
+ path: Any,
+ kind: Any = None,
+ Loader: Any = None,
+ Dumper: Any = None,
+ resolver: Any = Resolver,
+) -> None:
"""
Add a path based resolver for the given tag.
A path is a list of keys that forms a path
@@ -1384,8 +1495,9 @@ def add_path_resolver(tag, path, kind=None, Loader=None, Dumper=None, resolver=R
raise NotImplementedError
-def add_constructor(tag, object_constructor, Loader=None, constructor=Constructor):
- # type: (Any, Any, Any, Any) -> None
+def add_constructor(
+ tag: Any, object_constructor: Any, Loader: Any = None, constructor: Any = Constructor
+) -> None:
"""
Add an object constructor for the given tag.
object_onstructor is a function that accepts a Loader instance
@@ -1409,8 +1521,9 @@ def add_constructor(tag, object_constructor, Loader=None, constructor=Constructo
raise NotImplementedError
-def add_multi_constructor(tag_prefix, multi_constructor, Loader=None, constructor=Constructor):
- # type: (Any, Any, Any, Any) -> None
+def add_multi_constructor(
+ tag_prefix: Any, multi_constructor: Any, Loader: Any = None, constructor: Any = Constructor
+) -> None:
"""
Add a multi-constructor for the given tag prefix.
Multi-constructor is called for a node if its tag starts with tag_prefix.
@@ -1435,8 +1548,9 @@ def add_multi_constructor(tag_prefix, multi_constructor, Loader=None, constructo
raise NotImplementedError
-def add_representer(data_type, object_representer, Dumper=None, representer=Representer):
- # type: (Any, Any, Any, Any) -> None
+def add_representer(
+ data_type: Any, object_representer: Any, Dumper: Any = None, representer: Any = Representer
+) -> None:
"""
Add a representer for the given type.
object_representer is a function accepting a Dumper instance
@@ -1462,8 +1576,9 @@ def add_representer(data_type, object_representer, Dumper=None, representer=Repr
# this code currently not tested
-def add_multi_representer(data_type, multi_representer, Dumper=None, representer=Representer):
- # type: (Any, Any, Any, Any) -> None
+def add_multi_representer(
+ data_type: Any, multi_representer: Any, Dumper: Any = None, representer: Any = Representer
+) -> None:
"""
Add a representer for the given type.
multi_representer is a function accepting a Dumper instance
@@ -1493,9 +1608,8 @@ class YAMLObjectMetaclass(type):
The metaclass for YAMLObject.
"""
- def __init__(cls, name, bases, kwds):
- # type: (Any, Any, Any) -> None
- super(YAMLObjectMetaclass, cls).__init__(name, bases, kwds)
+ def __init__(cls, name: Any, bases: Any, kwds: Any) -> None:
+ super().__init__(name, bases, kwds)
if 'yaml_tag' in kwds and kwds['yaml_tag'] is not None:
cls.yaml_constructor.add_constructor(cls.yaml_tag, cls.from_yaml) # type: ignore
cls.yaml_representer.add_representer(cls, cls.to_yaml) # type: ignore
@@ -1512,20 +1626,18 @@ class YAMLObject(with_metaclass(YAMLObjectMetaclass)): # type: ignore
yaml_constructor = Constructor
yaml_representer = Representer
- yaml_tag = None # type: Any
- yaml_flow_style = None # type: Any
+ yaml_tag: Any = None
+ yaml_flow_style: Any = None
@classmethod
- def from_yaml(cls, constructor, node):
- # type: (Any, Any) -> Any
+ def from_yaml(cls, constructor: Any, node: Any) -> Any:
"""
Convert a representation node to a Python object.
"""
return constructor.construct_yaml_object(node, cls)
@classmethod
- def to_yaml(cls, representer, data):
- # type: (Any, Any) -> Any
+ def to_yaml(cls, representer: Any, data: Any) -> Any:
"""
Convert a Python object to a representation node.
"""
diff --git a/nodes.py b/nodes.py
index da86e9c..b2f4e13 100644
--- a/nodes.py
+++ b/nodes.py
@@ -1,19 +1,22 @@
# coding: utf-8
-from __future__ import print_function
-
import sys
-from .compat import string_types
-if False: # MYPY
- from typing import Dict, Any, Text # NOQA
+from typing import Dict, Any, Text # NOQA
-class Node(object):
+class Node:
__slots__ = 'tag', 'value', 'start_mark', 'end_mark', 'comment', 'anchor'
- def __init__(self, tag, value, start_mark, end_mark, comment=None, anchor=None):
- # type: (Any, Any, Any, Any, Any, Any) -> None
+ def __init__(
+ self,
+ tag: Any,
+ value: Any,
+ start_mark: Any,
+ end_mark: Any,
+ comment: Any = None,
+ anchor: Any = None,
+ ) -> None:
self.tag = tag
self.value = value
self.start_mark = start_mark
@@ -21,8 +24,7 @@ class Node(object):
self.comment = comment
self.anchor = anchor
- def __repr__(self):
- # type: () -> str
+ def __repr__(self) -> Any:
value = self.value
# if isinstance(value, list):
# if len(value) == 0:
@@ -30,31 +32,26 @@ class Node(object):
# elif len(value) == 1:
# value = '<1 item>'
# else:
- # value = '<%d items>' % len(value)
+ # value = f'<{len(value)} items>'
# else:
# if len(value) > 75:
- # value = repr(value[:70]+u' ... ')
+ # value = repr(value[:70]+' ... ')
# else:
# value = repr(value)
value = repr(value)
- return '%s(tag=%r, value=%s)' % (self.__class__.__name__, self.tag, value)
-
- def dump(self, indent=0):
- # type: (int) -> None
- if isinstance(self.value, string_types):
- sys.stdout.write(
- '{}{}(tag={!r}, value={!r})\n'.format(
- ' ' * indent, self.__class__.__name__, self.tag, self.value
- )
- )
+ return f'{self.__class__.__name__!s}(tag={self.tag!r}, value={value!s})'
+
+ def dump(self, indent: int = 0) -> None:
+ xx = self.__class__.__name__
+ xi = ' ' * indent
+ if isinstance(self.value, str):
+ sys.stdout.write(f'{xi}{xx}(tag={self.tag!r}, value={self.value!r})\n')
if self.comment:
- sys.stdout.write(' {}comment: {})\n'.format(' ' * indent, self.comment))
+ sys.stdout.write(f' {xi}comment: {self.comment})\n')
return
- sys.stdout.write(
- '{}{}(tag={!r})\n'.format(' ' * indent, self.__class__.__name__, self.tag)
- )
+ sys.stdout.write(f'{xi}{xx}(tag={self.tag!r})\n')
if self.comment:
- sys.stdout.write(' {}comment: {})\n'.format(' ' * indent, self.comment))
+ sys.stdout.write(f' {xi}comment: {self.comment})\n')
for v in self.value:
if isinstance(v, tuple):
for v1 in v:
@@ -62,7 +59,7 @@ class Node(object):
elif isinstance(v, Node):
v.dump(indent + 1)
else:
- sys.stdout.write('Node value type? {}\n'.format(type(v)))
+ sys.stdout.write(f'Node value type? {type(v)}\n')
class ScalarNode(Node):
@@ -79,9 +76,15 @@ class ScalarNode(Node):
id = 'scalar'
def __init__(
- self, tag, value, start_mark=None, end_mark=None, style=None, comment=None, anchor=None
- ):
- # type: (Any, Any, Any, Any, Any, Any, Any) -> None
+ self,
+ tag: Any,
+ value: Any,
+ start_mark: Any = None,
+ end_mark: Any = None,
+ style: Any = None,
+ comment: Any = None,
+ anchor: Any = None,
+ ) -> None:
Node.__init__(self, tag, value, start_mark, end_mark, comment=comment, anchor=anchor)
self.style = style
@@ -91,15 +94,14 @@ class CollectionNode(Node):
def __init__(
self,
- tag,
- value,
- start_mark=None,
- end_mark=None,
- flow_style=None,
- comment=None,
- anchor=None,
- ):
- # type: (Any, Any, Any, Any, Any, Any, Any) -> None
+ tag: Any,
+ value: Any,
+ start_mark: Any = None,
+ end_mark: Any = None,
+ flow_style: Any = None,
+ comment: Any = None,
+ anchor: Any = None,
+ ) -> None:
Node.__init__(self, tag, value, start_mark, end_mark, comment=comment)
self.flow_style = flow_style
self.anchor = anchor
@@ -116,15 +118,14 @@ class MappingNode(CollectionNode):
def __init__(
self,
- tag,
- value,
- start_mark=None,
- end_mark=None,
- flow_style=None,
- comment=None,
- anchor=None,
- ):
- # type: (Any, Any, Any, Any, Any, Any, Any) -> None
+ tag: Any,
+ value: Any,
+ start_mark: Any = None,
+ end_mark: Any = None,
+ flow_style: Any = None,
+ comment: Any = None,
+ anchor: Any = None,
+ ) -> None:
CollectionNode.__init__(
self, tag, value, start_mark, end_mark, flow_style, comment, anchor
)
diff --git a/parser.py b/parser.py
index 10deaa8..c8b5fcf 100644
--- a/parser.py
+++ b/parser.py
@@ -1,7 +1,5 @@
# coding: utf-8
-from __future__ import absolute_import
-
# The following YAML grammar is LL(1) and is parsed by a recursive descent
# parser.
#
@@ -46,7 +44,7 @@ from __future__ import absolute_import
#
# FIRST sets:
#
-# stream: { STREAM-START }
+# stream: { STREAM-START <}
# explicit_document: { DIRECTIVE DOCUMENT-START }
# implicit_document: FIRST(block_node)
# block_node: { ALIAS TAG ANCHOR SCALAR BLOCK-SEQUENCE-START
@@ -80,60 +78,60 @@ from ruamel.yaml.error import MarkedYAMLError
from ruamel.yaml.tokens import * # NOQA
from ruamel.yaml.events import * # NOQA
from ruamel.yaml.scanner import Scanner, RoundTripScanner, ScannerError # NOQA
-from ruamel.yaml.compat import utf8, nprint, nprintf # NOQA
+from ruamel.yaml.scanner import BlankLineComment
+from ruamel.yaml.comments import C_PRE, C_POST, C_SPLIT_ON_FIRST_BLANK
+from ruamel.yaml.compat import nprint, nprintf # NOQA
-if False: # MYPY
- from typing import Any, Dict, Optional, List # NOQA
+from typing import Any, Dict, Optional, List, Optional # NOQA
__all__ = ['Parser', 'RoundTripParser', 'ParserError']
+def xprintf(*args: Any, **kw: Any) -> Any:
+ return nprintf(*args, **kw)
+ pass
+
+
class ParserError(MarkedYAMLError):
pass
-class Parser(object):
+class Parser:
# Since writing a recursive-descendant parser is a straightforward task, we
# do not give many comments here.
- DEFAULT_TAGS = {u'!': u'!', u'!!': u'tag:yaml.org,2002:'}
+ DEFAULT_TAGS = {'!': '!', '!!': 'tag:yaml.org,2002:'}
- def __init__(self, loader):
- # type: (Any) -> None
+ def __init__(self, loader: Any) -> None:
self.loader = loader
if self.loader is not None and getattr(self.loader, '_parser', None) is None:
self.loader._parser = self
self.reset_parser()
- def reset_parser(self):
- # type: () -> None
+ def reset_parser(self) -> None:
# Reset the state attributes (to clear self-references)
- self.current_event = None
- self.tag_handles = {} # type: Dict[Any, Any]
- self.states = [] # type: List[Any]
- self.marks = [] # type: List[Any]
- self.state = self.parse_stream_start # type: Any
+ self.current_event = self.last_event = None
+ self.tag_handles: Dict[Any, Any] = {}
+ self.states: List[Any] = []
+ self.marks: List[Any] = []
+ self.state: Any = self.parse_stream_start
- def dispose(self):
- # type: () -> None
+ def dispose(self) -> None:
self.reset_parser()
@property
- def scanner(self):
- # type: () -> Any
+ def scanner(self) -> Any:
if hasattr(self.loader, 'typ'):
return self.loader.scanner
return self.loader._scanner
@property
- def resolver(self):
- # type: () -> Any
+ def resolver(self) -> Any:
if hasattr(self.loader, 'typ'):
return self.loader.resolver
return self.loader._resolver
- def check_event(self, *choices):
- # type: (Any) -> bool
+ def check_event(self, *choices: Any) -> bool:
# Check the type of the next event.
if self.current_event is None:
if self.state:
@@ -146,21 +144,22 @@ class Parser(object):
return True
return False
- def peek_event(self):
- # type: () -> Any
+ def peek_event(self) -> Any:
# Get the next event.
if self.current_event is None:
if self.state:
self.current_event = self.state()
return self.current_event
- def get_event(self):
- # type: () -> Any
+ def get_event(self) -> Any:
# Get the next event and proceed further.
if self.current_event is None:
if self.state:
self.current_event = self.state()
- value = self.current_event
+ # assert self.current_event is not None
+ # if self.current_event.end_mark.line != self.peek_event().start_mark.line:
+ xprintf('get_event', repr(self.current_event), self.peek_event().start_mark.line)
+ self.last_event = value = self.current_event
self.current_event = None
return value
@@ -169,11 +168,10 @@ class Parser(object):
# implicit_document ::= block_node DOCUMENT-END*
# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
- def parse_stream_start(self):
- # type: () -> Any
+ def parse_stream_start(self) -> Any:
# Parse the stream start.
token = self.scanner.get_token()
- token.move_comment(self.scanner.peek_token())
+ self.move_token_comment(token)
event = StreamStartEvent(token.start_mark, token.end_mark, encoding=token.encoding)
# Prepare the next state.
@@ -181,8 +179,7 @@ class Parser(object):
return event
- def parse_implicit_document_start(self):
- # type: () -> Any
+ def parse_implicit_document_start(self) -> Any:
# Parse an implicit document.
if not self.scanner.check_token(DirectiveToken, DocumentStartToken, StreamEndToken):
self.tag_handles = self.DEFAULT_TAGS
@@ -199,31 +196,35 @@ class Parser(object):
else:
return self.parse_document_start()
- def parse_document_start(self):
- # type: () -> Any
+ def parse_document_start(self) -> Any:
# Parse any extra document end indicators.
while self.scanner.check_token(DocumentEndToken):
self.scanner.get_token()
# Parse an explicit document.
if not self.scanner.check_token(StreamEndToken):
- token = self.scanner.peek_token()
- start_mark = token.start_mark
version, tags = self.process_directives()
if not self.scanner.check_token(DocumentStartToken):
raise ParserError(
None,
None,
- "expected '<document start>', but found %r" % self.scanner.peek_token().id,
+ "expected '<document start>', "
+ f'but found {self.scanner.peek_token().id,!r}',
self.scanner.peek_token().start_mark,
)
token = self.scanner.get_token()
+ start_mark = token.start_mark
end_mark = token.end_mark
# if self.loader is not None and \
# end_mark.line != self.scanner.peek_token().start_mark.line:
# self.loader.scalar_after_indicator = False
- event = DocumentStartEvent(
- start_mark, end_mark, explicit=True, version=version, tags=tags
- ) # type: Any
+ event: Any = DocumentStartEvent(
+ start_mark,
+ end_mark,
+ explicit=True,
+ version=version,
+ tags=tags,
+ comment=token.comment,
+ )
self.states.append(self.parse_document_end)
self.state = self.parse_document_content
else:
@@ -235,8 +236,7 @@ class Parser(object):
self.state = None
return event
- def parse_document_end(self):
- # type: () -> Any
+ def parse_document_end(self) -> Any:
# Parse the document end.
token = self.scanner.peek_token()
start_mark = end_mark = token.start_mark
@@ -255,8 +255,7 @@ class Parser(object):
return event
- def parse_document_content(self):
- # type: () -> Any
+ def parse_document_content(self) -> Any:
if self.scanner.check_token(
DirectiveToken, DocumentStartToken, DocumentEndToken, StreamEndToken
):
@@ -266,13 +265,12 @@ class Parser(object):
else:
return self.parse_block_node()
- def process_directives(self):
- # type: () -> Any
+ def process_directives(self) -> Any:
yaml_version = None
self.tag_handles = {}
while self.scanner.check_token(DirectiveToken):
token = self.scanner.get_token()
- if token.name == u'YAML':
+ if token.name == 'YAML':
if yaml_version is not None:
raise ParserError(
None, None, 'found duplicate YAML directive', token.start_mark
@@ -282,19 +280,19 @@ class Parser(object):
raise ParserError(
None,
None,
- 'found incompatible YAML document (version 1.* is ' 'required)',
+ 'found incompatible YAML document (version 1.* is required)',
token.start_mark,
)
yaml_version = token.value
- elif token.name == u'TAG':
+ elif token.name == 'TAG':
handle, prefix = token.value
if handle in self.tag_handles:
raise ParserError(
- None, None, 'duplicate tag handle %r' % utf8(handle), token.start_mark
+ None, None, f'duplicate tag handle {handle!r}', token.start_mark,
)
self.tag_handles[handle] = prefix
if bool(self.tag_handles):
- value = yaml_version, self.tag_handles.copy() # type: Any
+ value: Any = (yaml_version, self.tag_handles.copy())
else:
value = yaml_version, None
if self.loader is not None and hasattr(self.loader, 'tags'):
@@ -324,27 +322,22 @@ class Parser(object):
# block_collection ::= block_sequence | block_mapping
# flow_collection ::= flow_sequence | flow_mapping
- def parse_block_node(self):
- # type: () -> Any
+ def parse_block_node(self) -> Any:
return self.parse_node(block=True)
- def parse_flow_node(self):
- # type: () -> Any
+ def parse_flow_node(self) -> Any:
return self.parse_node()
- def parse_block_node_or_indentless_sequence(self):
- # type: () -> Any
+ def parse_block_node_or_indentless_sequence(self) -> Any:
return self.parse_node(block=True, indentless_sequence=True)
- def transform_tag(self, handle, suffix):
- # type: (Any, Any) -> Any
+ def transform_tag(self, handle: Any, suffix: Any) -> Any:
return self.tag_handles[handle] + suffix
- def parse_node(self, block=False, indentless_sequence=False):
- # type: (bool, bool) -> Any
+ def parse_node(self, block: bool = False, indentless_sequence: bool = False) -> Any:
if self.scanner.check_token(AliasToken):
token = self.scanner.get_token()
- event = AliasEvent(token.value, token.start_mark, token.end_mark) # type: Any
+ event: Any = AliasEvent(token.value, token.start_mark, token.end_mark)
self.state = self.states.pop()
return event
@@ -353,6 +346,7 @@ class Parser(object):
start_mark = end_mark = tag_mark = None
if self.scanner.check_token(AnchorToken):
token = self.scanner.get_token()
+ self.move_token_comment(token)
start_mark = token.start_mark
end_mark = token.end_mark
anchor = token.value
@@ -378,13 +372,13 @@ class Parser(object):
raise ParserError(
'while parsing a node',
start_mark,
- 'found undefined tag handle %r' % utf8(handle),
+ f'found undefined tag handle {handle!r}',
tag_mark,
)
tag = self.transform_tag(handle, suffix)
else:
tag = suffix
- # if tag == u'!':
+ # if tag == '!':
# raise ParserError("while parsing a node", start_mark,
# "found non-specific tag '!'", tag_mark,
# "Please check 'http://pyyaml.org/wiki/YAMLNonSpecificTag'
@@ -392,13 +386,17 @@ class Parser(object):
if start_mark is None:
start_mark = end_mark = self.scanner.peek_token().start_mark
event = None
- implicit = tag is None or tag == u'!'
+ implicit = tag is None or tag == '!'
if indentless_sequence and self.scanner.check_token(BlockEntryToken):
comment = None
pt = self.scanner.peek_token()
- if pt.comment and pt.comment[0]:
- comment = [pt.comment[0], []]
- pt.comment[0] = None
+ if self.loader and self.loader.comment_handling is None:
+ if pt.comment and pt.comment[0]:
+ comment = [pt.comment[0], []]
+ pt.comment[0] = None
+ elif self.loader:
+ if pt.comment:
+ comment = pt.comment
end_mark = self.scanner.peek_token().end_mark
event = SequenceStartEvent(
anchor, tag, implicit, start_mark, end_mark, flow_style=False, comment=comment
@@ -410,7 +408,7 @@ class Parser(object):
token = self.scanner.get_token()
# self.scanner.peek_token_same_line_comment(token)
end_mark = token.end_mark
- if (token.plain and tag is None) or tag == u'!':
+ if (token.plain and tag is None) or tag == '!':
implicit = (True, False)
elif tag is None:
implicit = (False, True)
@@ -462,7 +460,7 @@ class Parser(object):
comment = pt.comment
# nprint('pt0', type(pt))
if comment is None or comment[1] is None:
- comment = pt.split_comment()
+ comment = pt.split_old_comment()
# nprint('pt1', comment)
event = SequenceStartEvent(
anchor, tag, implicit, start_mark, end_mark, flow_style=False, comment=comment
@@ -487,9 +485,9 @@ class Parser(object):
node = 'flow'
token = self.scanner.peek_token()
raise ParserError(
- 'while parsing a %s node' % node,
+ f'while parsing a {node!s} node',
start_mark,
- 'expected the node content, but found %r' % token.id,
+ f'expected the node content, but found {token.id!r}',
token.start_mark,
)
return event
@@ -497,19 +495,17 @@ class Parser(object):
# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)*
# BLOCK-END
- def parse_block_sequence_first_entry(self):
- # type: () -> Any
+ def parse_block_sequence_first_entry(self) -> Any:
token = self.scanner.get_token()
# move any comment from start token
- # token.move_comment(self.scanner.peek_token())
+ # self.move_token_comment(token)
self.marks.append(token.start_mark)
return self.parse_block_sequence_entry()
- def parse_block_sequence_entry(self):
- # type: () -> Any
+ def parse_block_sequence_entry(self) -> Any:
if self.scanner.check_token(BlockEntryToken):
token = self.scanner.get_token()
- token.move_comment(self.scanner.peek_token())
+ self.move_token_comment(token)
if not self.scanner.check_token(BlockEntryToken, BlockEndToken):
self.states.append(self.parse_block_sequence_entry)
return self.parse_block_node()
@@ -521,7 +517,7 @@ class Parser(object):
raise ParserError(
'while parsing a block collection',
self.marks[-1],
- 'expected <block end>, but found %r' % token.id,
+ f'expected <block end>, but found {token.id!r}',
token.start_mark,
)
token = self.scanner.get_token() # BlockEndToken
@@ -537,11 +533,10 @@ class Parser(object):
# - entry
# - nested
- def parse_indentless_sequence_entry(self):
- # type: () -> Any
+ def parse_indentless_sequence_entry(self) -> Any:
if self.scanner.check_token(BlockEntryToken):
token = self.scanner.get_token()
- token.move_comment(self.scanner.peek_token())
+ self.move_token_comment(token)
if not self.scanner.check_token(
BlockEntryToken, KeyToken, ValueToken, BlockEndToken
):
@@ -551,7 +546,14 @@ class Parser(object):
self.state = self.parse_indentless_sequence_entry
return self.process_empty_scalar(token.end_mark)
token = self.scanner.peek_token()
- event = SequenceEndEvent(token.start_mark, token.start_mark, comment=token.comment)
+ c = None
+ if self.loader and self.loader.comment_handling is None:
+ c = token.comment
+ start_mark = token.start_mark
+ else:
+ start_mark = self.last_event.end_mark # type: ignore
+ c = self.distribute_comment(token.comment, start_mark.line) # type: ignore
+ event = SequenceEndEvent(start_mark, start_mark, comment=c)
self.state = self.states.pop()
return event
@@ -560,17 +562,15 @@ class Parser(object):
# (VALUE block_node_or_indentless_sequence?)?)*
# BLOCK-END
- def parse_block_mapping_first_key(self):
- # type: () -> Any
+ def parse_block_mapping_first_key(self) -> Any:
token = self.scanner.get_token()
self.marks.append(token.start_mark)
return self.parse_block_mapping_key()
- def parse_block_mapping_key(self):
- # type: () -> Any
+ def parse_block_mapping_key(self) -> Any:
if self.scanner.check_token(KeyToken):
token = self.scanner.get_token()
- token.move_comment(self.scanner.peek_token())
+ self.move_token_comment(token)
if not self.scanner.check_token(KeyToken, ValueToken, BlockEndToken):
self.states.append(self.parse_block_mapping_value)
return self.parse_block_node_or_indentless_sequence()
@@ -585,26 +585,25 @@ class Parser(object):
raise ParserError(
'while parsing a block mapping',
self.marks[-1],
- 'expected <block end>, but found %r' % token.id,
+ f'expected <block end>, but found {token.id!r}',
token.start_mark,
)
token = self.scanner.get_token()
- token.move_comment(self.scanner.peek_token())
+ self.move_token_comment(token)
event = MappingEndEvent(token.start_mark, token.end_mark, comment=token.comment)
self.state = self.states.pop()
self.marks.pop()
return event
- def parse_block_mapping_value(self):
- # type: () -> Any
+ def parse_block_mapping_value(self) -> Any:
if self.scanner.check_token(ValueToken):
token = self.scanner.get_token()
# value token might have post comment move it to e.g. block
if self.scanner.check_token(ValueToken):
- token.move_comment(self.scanner.peek_token())
+ self.move_token_comment(token)
else:
if not self.scanner.check_token(KeyToken):
- token.move_comment(self.scanner.peek_token(), empty=True)
+ self.move_token_comment(token, empty=True)
# else: empty value for this key cannot move token.comment
if not self.scanner.check_token(KeyToken, ValueToken, BlockEndToken):
self.states.append(self.parse_block_mapping_key)
@@ -635,14 +634,12 @@ class Parser(object):
# For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?`
# generate an inline mapping (set syntax).
- def parse_flow_sequence_first_entry(self):
- # type: () -> Any
+ def parse_flow_sequence_first_entry(self) -> Any:
token = self.scanner.get_token()
self.marks.append(token.start_mark)
return self.parse_flow_sequence_entry(first=True)
- def parse_flow_sequence_entry(self, first=False):
- # type: (bool) -> Any
+ def parse_flow_sequence_entry(self, first: bool = False) -> Any:
if not self.scanner.check_token(FlowSequenceEndToken):
if not first:
if self.scanner.check_token(FlowEntryToken):
@@ -652,15 +649,15 @@ class Parser(object):
raise ParserError(
'while parsing a flow sequence',
self.marks[-1],
- "expected ',' or ']', but got %r" % token.id,
+ f"expected ',' or ']', but got {token.id!r}",
token.start_mark,
)
if self.scanner.check_token(KeyToken):
token = self.scanner.peek_token()
- event = MappingStartEvent(
+ event: Any = MappingStartEvent(
None, None, True, token.start_mark, token.end_mark, flow_style=True
- ) # type: Any
+ )
self.state = self.parse_flow_sequence_entry_mapping_key
return event
elif not self.scanner.check_token(FlowSequenceEndToken):
@@ -672,8 +669,7 @@ class Parser(object):
self.marks.pop()
return event
- def parse_flow_sequence_entry_mapping_key(self):
- # type: () -> Any
+ def parse_flow_sequence_entry_mapping_key(self) -> Any:
token = self.scanner.get_token()
if not self.scanner.check_token(ValueToken, FlowEntryToken, FlowSequenceEndToken):
self.states.append(self.parse_flow_sequence_entry_mapping_value)
@@ -682,8 +678,7 @@ class Parser(object):
self.state = self.parse_flow_sequence_entry_mapping_value
return self.process_empty_scalar(token.end_mark)
- def parse_flow_sequence_entry_mapping_value(self):
- # type: () -> Any
+ def parse_flow_sequence_entry_mapping_value(self) -> Any:
if self.scanner.check_token(ValueToken):
token = self.scanner.get_token()
if not self.scanner.check_token(FlowEntryToken, FlowSequenceEndToken):
@@ -697,8 +692,7 @@ class Parser(object):
token = self.scanner.peek_token()
return self.process_empty_scalar(token.start_mark)
- def parse_flow_sequence_entry_mapping_end(self):
- # type: () -> Any
+ def parse_flow_sequence_entry_mapping_end(self) -> Any:
self.state = self.parse_flow_sequence_entry
token = self.scanner.peek_token()
return MappingEndEvent(token.start_mark, token.start_mark)
@@ -709,14 +703,12 @@ class Parser(object):
# FLOW-MAPPING-END
# flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
- def parse_flow_mapping_first_key(self):
- # type: () -> Any
+ def parse_flow_mapping_first_key(self) -> Any:
token = self.scanner.get_token()
self.marks.append(token.start_mark)
return self.parse_flow_mapping_key(first=True)
- def parse_flow_mapping_key(self, first=False):
- # type: (Any) -> Any
+ def parse_flow_mapping_key(self, first: Any = False) -> Any:
if not self.scanner.check_token(FlowMappingEndToken):
if not first:
if self.scanner.check_token(FlowEntryToken):
@@ -726,7 +718,7 @@ class Parser(object):
raise ParserError(
'while parsing a flow mapping',
self.marks[-1],
- "expected ',' or '}', but got %r" % token.id,
+ f"expected ',' or '}}', but got {token.id!r}",
token.start_mark,
)
if self.scanner.check_token(KeyToken):
@@ -753,8 +745,7 @@ class Parser(object):
self.marks.pop()
return event
- def parse_flow_mapping_value(self):
- # type: () -> Any
+ def parse_flow_mapping_value(self) -> Any:
if self.scanner.check_token(ValueToken):
token = self.scanner.get_token()
if not self.scanner.check_token(FlowEntryToken, FlowMappingEndToken):
@@ -768,35 +759,89 @@ class Parser(object):
token = self.scanner.peek_token()
return self.process_empty_scalar(token.start_mark)
- def parse_flow_mapping_empty_value(self):
- # type: () -> Any
+ def parse_flow_mapping_empty_value(self) -> Any:
self.state = self.parse_flow_mapping_key
return self.process_empty_scalar(self.scanner.peek_token().start_mark)
- def process_empty_scalar(self, mark, comment=None):
- # type: (Any, Any) -> Any
+ def process_empty_scalar(self, mark: Any, comment: Any = None) -> Any:
return ScalarEvent(None, None, (True, False), "", mark, mark, comment=comment)
+ def move_token_comment(
+ self, token: Any, nt: Optional[Any] = None, empty: Optional[bool] = False
+ ) -> Any:
+ pass
+
class RoundTripParser(Parser):
"""roundtrip is a safe loader, that wants to see the unmangled tag"""
- def transform_tag(self, handle, suffix):
- # type: (Any, Any) -> Any
+ def transform_tag(self, handle: Any, suffix: Any) -> Any:
# return self.tag_handles[handle]+suffix
if handle == '!!' and suffix in (
- u'null',
- u'bool',
- u'int',
- u'float',
- u'binary',
- u'timestamp',
- u'omap',
- u'pairs',
- u'set',
- u'str',
- u'seq',
- u'map',
+ 'null',
+ 'bool',
+ 'int',
+ 'float',
+ 'binary',
+ 'timestamp',
+ 'omap',
+ 'pairs',
+ 'set',
+ 'str',
+ 'seq',
+ 'map',
):
return Parser.transform_tag(self, handle, suffix)
return handle + suffix
+
+ def move_token_comment(
+ self, token: Any, nt: Optional[Any] = None, empty: Optional[bool] = False
+ ) -> Any:
+ token.move_old_comment(self.scanner.peek_token() if nt is None else nt, empty=empty)
+
+
+class RoundTripParserSC(RoundTripParser):
+ """roundtrip is a safe loader, that wants to see the unmangled tag"""
+
+ # some of the differences are based on the superclass testing
+ # if self.loader.comment_handling is not None
+
+ def move_token_comment(
+ self: Any, token: Any, nt: Any = None, empty: Optional[bool] = False
+ ) -> None:
+ token.move_new_comment(self.scanner.peek_token() if nt is None else nt, empty=empty)
+
+ def distribute_comment(self, comment: Any, line: Any) -> Any:
+ # ToDo, look at indentation of the comment to determine attachment
+ if comment is None:
+ return None
+ if not comment[0]:
+ return None
+ if comment[0][0] != line + 1:
+ nprintf('>>>dcxxx', comment, line)
+ assert comment[0][0] == line + 1
+ # if comment[0] - line > 1:
+ # return
+ typ = self.loader.comment_handling & 0b11
+ # nprintf('>>>dca', comment, line, typ)
+ if typ == C_POST:
+ return None
+ if typ == C_PRE:
+ c = [None, None, comment[0]]
+ comment[0] = None
+ return c
+ # nprintf('>>>dcb', comment[0])
+ for _idx, cmntidx in enumerate(comment[0]):
+ # nprintf('>>>dcb', cmntidx)
+ if isinstance(self.scanner.comments[cmntidx], BlankLineComment):
+ break
+ else:
+ return None # no space found
+ if _idx == 0:
+ return None # first line was blank
+ # nprintf('>>>dcc', idx)
+ if typ == C_SPLIT_ON_FIRST_BLANK:
+ c = [None, None, comment[0][:_idx]]
+ comment[0] = comment[0][_idx:]
+ return c
+ raise NotImplementedError # reserved
diff --git a/reader.py b/reader.py
index b056a04..dec6e9f 100644
--- a/reader.py
+++ b/reader.py
@@ -1,7 +1,5 @@
# coding: utf-8
-from __future__ import absolute_import
-
# This module contains abstractions for the input stream. You don't have to
# looks further, there are no pretty code.
#
@@ -24,99 +22,89 @@ from __future__ import absolute_import
import codecs
from ruamel.yaml.error import YAMLError, FileMark, StringMark, YAMLStreamError
-from ruamel.yaml.compat import text_type, binary_type, PY3, UNICODE_SIZE
from ruamel.yaml.util import RegExp
-if False: # MYPY
- from typing import Any, Dict, Optional, List, Union, Text, Tuple, Optional # NOQA
-# from ruamel.yaml.compat import StreamTextType # NOQA
+from typing import Any, Dict, Optional, List, Union, Text, Tuple, Optional # NOQA
+# from ruamel.yaml.compat import StreamTextType # NOQA
__all__ = ['Reader', 'ReaderError']
class ReaderError(YAMLError):
- def __init__(self, name, position, character, encoding, reason):
- # type: (Any, Any, Any, Any, Any) -> None
+ def __init__(
+ self, name: Any, position: Any, character: Any, encoding: Any, reason: Any
+ ) -> None:
self.name = name
self.character = character
self.position = position
self.encoding = encoding
self.reason = reason
- def __str__(self):
- # type: () -> str
- if isinstance(self.character, binary_type):
- return "'%s' codec can't decode byte #x%02x: %s\n" ' in "%s", position %d' % (
- self.encoding,
- ord(self.character),
- self.reason,
- self.name,
- self.position,
+ def __str__(self) -> Any:
+ if isinstance(self.character, bytes):
+ return (
+ f"'{self.encoding!s}' codec can't decode byte #x{ord(self.character):02x}: "
+ f'{self.reason!s}\n'
+ f' in "{self.name!s}", position {self.position:d}'
)
else:
- return 'unacceptable character #x%04x: %s\n' ' in "%s", position %d' % (
- self.character,
- self.reason,
- self.name,
- self.position,
+ return (
+ f'unacceptable character #x{self.character:04x}: {self.reason!s}\n'
+ f' in "{self.name!s}", position {self.position:d}'
)
-class Reader(object):
+class Reader:
# Reader:
# - determines the data encoding and converts it to a unicode string,
# - checks if characters are in allowed range,
# - adds '\0' to the end.
# Reader accepts
- # - a `str` object (PY2) / a `bytes` object (PY3),
- # - a `unicode` object (PY2) / a `str` object (PY3),
+ # - a `bytes` object,
+ # - a `str` object,
# - a file-like object with its `read` method returning `str`,
# - a file-like object with its `read` method returning `unicode`.
# Yeah, it's ugly and slow.
- def __init__(self, stream, loader=None):
- # type: (Any, Any) -> None
+ def __init__(self, stream: Any, loader: Any = None) -> None:
self.loader = loader
if self.loader is not None and getattr(self.loader, '_reader', None) is None:
self.loader._reader = self
self.reset_reader()
- self.stream = stream # type: Any # as .read is called
+ self.stream: Any = stream # as .read is called
- def reset_reader(self):
- # type: () -> None
- self.name = None # type: Any
+ def reset_reader(self) -> None:
+ self.name: Any = None
self.stream_pointer = 0
self.eof = True
self.buffer = ""
self.pointer = 0
- self.raw_buffer = None # type: Any
+ self.raw_buffer: Any = None
self.raw_decode = None
- self.encoding = None # type: Optional[Text]
+ self.encoding: Optional[Text] = None
self.index = 0
self.line = 0
self.column = 0
@property
- def stream(self):
- # type: () -> Any
+ def stream(self) -> Any:
try:
return self._stream
except AttributeError:
raise YAMLStreamError('input stream needs to specified')
@stream.setter
- def stream(self, val):
- # type: (Any) -> None
+ def stream(self, val: Any) -> None:
if val is None:
return
self._stream = None
- if isinstance(val, text_type):
+ if isinstance(val, str):
self.name = '<unicode string>'
self.check_printable(val)
- self.buffer = val + u'\0' # type: ignore
- elif isinstance(val, binary_type):
+ self.buffer = val + '\0'
+ elif isinstance(val, bytes):
self.name = '<byte string>'
self.raw_buffer = val
self.determine_encoding()
@@ -129,54 +117,49 @@ class Reader(object):
self.raw_buffer = None
self.determine_encoding()
- def peek(self, index=0):
- # type: (int) -> Text
+ def peek(self, index: int = 0) -> Text:
try:
return self.buffer[self.pointer + index]
except IndexError:
self.update(index + 1)
return self.buffer[self.pointer + index]
- def prefix(self, length=1):
- # type: (int) -> Any
+ def prefix(self, length: int = 1) -> Any:
if self.pointer + length >= len(self.buffer):
self.update(length)
return self.buffer[self.pointer : self.pointer + length]
- def forward_1_1(self, length=1):
- # type: (int) -> None
+ def forward_1_1(self, length: int = 1) -> None:
if self.pointer + length + 1 >= len(self.buffer):
self.update(length + 1)
while length != 0:
ch = self.buffer[self.pointer]
self.pointer += 1
self.index += 1
- if ch in u'\n\x85\u2028\u2029' or (
- ch == u'\r' and self.buffer[self.pointer] != u'\n'
+ if ch in '\n\x85\u2028\u2029' or (
+ ch == '\r' and self.buffer[self.pointer] != '\n'
):
self.line += 1
self.column = 0
- elif ch != u'\uFEFF':
+ elif ch != '\uFEFF':
self.column += 1
length -= 1
- def forward(self, length=1):
- # type: (int) -> None
+ def forward(self, length: int = 1) -> None:
if self.pointer + length + 1 >= len(self.buffer):
self.update(length + 1)
while length != 0:
ch = self.buffer[self.pointer]
self.pointer += 1
self.index += 1
- if ch == u'\n' or (ch == u'\r' and self.buffer[self.pointer] != u'\n'):
+ if ch == '\n' or (ch == '\r' and self.buffer[self.pointer] != '\n'):
self.line += 1
self.column = 0
- elif ch != u'\uFEFF':
+ elif ch != '\uFEFF':
self.column += 1
length -= 1
- def get_mark(self):
- # type: () -> Any
+ def get_mark(self) -> Any:
if self.stream is None:
return StringMark(
self.name, self.index, self.line, self.column, self.buffer, self.pointer
@@ -184,11 +167,10 @@ class Reader(object):
else:
return FileMark(self.name, self.index, self.line, self.column)
- def determine_encoding(self):
- # type: () -> None
+ def determine_encoding(self) -> None:
while not self.eof and (self.raw_buffer is None or len(self.raw_buffer) < 2):
self.update_raw()
- if isinstance(self.raw_buffer, binary_type):
+ if isinstance(self.raw_buffer, bytes):
if self.raw_buffer.startswith(codecs.BOM_UTF16_LE):
self.raw_decode = codecs.utf_16_le_decode # type: ignore
self.encoding = 'utf-16-le'
@@ -200,25 +182,15 @@ class Reader(object):
self.encoding = 'utf-8'
self.update(1)
- if UNICODE_SIZE == 2:
- NON_PRINTABLE = RegExp(
- u'[^\x09\x0A\x0D\x20-\x7E\x85' u'\xA0-\uD7FF' u'\uE000-\uFFFD' u']'
- )
- else:
- NON_PRINTABLE = RegExp(
- u'[^\x09\x0A\x0D\x20-\x7E\x85'
- u'\xA0-\uD7FF'
- u'\uE000-\uFFFD'
- u'\U00010000-\U0010FFFF'
- u']'
- )
+ NON_PRINTABLE = RegExp(
+ '[^\x09\x0A\x0D\x20-\x7E\x85' '\xA0-\uD7FF' '\uE000-\uFFFD' '\U00010000-\U0010FFFF' ']'
+ )
_printable_ascii = ('\x09\x0A\x0D' + "".join(map(chr, range(0x20, 0x7F)))).encode('ascii')
@classmethod
- def _get_non_printable_ascii(cls, data): # type: ignore
- # type: (Text, bytes) -> Optional[Tuple[int, Text]]
- ascii_bytes = data.encode('ascii')
+ def _get_non_printable_ascii(cls: Text, data: bytes) -> Optional[Tuple[int, Text]]: # type: ignore # NOQA
+ ascii_bytes = data.encode('ascii') # type: ignore
non_printables = ascii_bytes.translate(None, cls._printable_ascii) # type: ignore
if not non_printables:
return None
@@ -226,23 +198,20 @@ class Reader(object):
return ascii_bytes.index(non_printable), non_printable.decode('ascii')
@classmethod
- def _get_non_printable_regex(cls, data):
- # type: (Text) -> Optional[Tuple[int, Text]]
+ def _get_non_printable_regex(cls, data: Text) -> Optional[Tuple[int, Text]]:
match = cls.NON_PRINTABLE.search(data)
if not bool(match):
return None
return match.start(), match.group()
@classmethod
- def _get_non_printable(cls, data):
- # type: (Text) -> Optional[Tuple[int, Text]]
+ def _get_non_printable(cls, data: Text) -> Optional[Tuple[int, Text]]:
try:
return cls._get_non_printable_ascii(data) # type: ignore
except UnicodeEncodeError:
return cls._get_non_printable_regex(data)
- def check_printable(self, data):
- # type: (Any) -> None
+ def check_printable(self, data: Any) -> None:
non_printable_match = self._get_non_printable(data)
if non_printable_match is not None:
start, character = non_printable_match
@@ -255,8 +224,7 @@ class Reader(object):
'special characters are not allowed',
)
- def update(self, length):
- # type: (int) -> None
+ def update(self, length: int) -> None:
if self.raw_buffer is None:
return
self.buffer = self.buffer[self.pointer :]
@@ -268,10 +236,7 @@ class Reader(object):
try:
data, converted = self.raw_decode(self.raw_buffer, 'strict', self.eof)
except UnicodeDecodeError as exc:
- if PY3:
- character = self.raw_buffer[exc.start]
- else:
- character = exc.object[exc.start]
+ character = self.raw_buffer[exc.start]
if self.stream is not None:
position = self.stream_pointer - len(self.raw_buffer) + exc.start
elif self.stream is not None:
@@ -290,10 +255,9 @@ class Reader(object):
self.raw_buffer = None
break
- def update_raw(self, size=None):
- # type: (Optional[int]) -> None
+ def update_raw(self, size: Optional[int] = None) -> None:
if size is None:
- size = 4096 if PY3 else 1024
+ size = 4096
data = self.stream.read(size)
if self.raw_buffer is None:
self.raw_buffer = data
diff --git a/representer.py b/representer.py
index 1b5185a..8a03234 100644
--- a/representer.py
+++ b/representer.py
@@ -1,12 +1,8 @@
# coding: utf-8
-from __future__ import print_function, absolute_import, division
-
-
from ruamel.yaml.error import * # NOQA
from ruamel.yaml.nodes import * # NOQA
-from ruamel.yaml.compat import text_type, binary_type, to_unicode, PY2, PY3
-from ruamel.yaml.compat import ordereddict # type: ignore
+from ruamel.yaml.compat import ordereddict
from ruamel.yaml.compat import nprint, nprintf # NOQA
from ruamel.yaml.scalarstring import (
LiteralScalarString,
@@ -30,19 +26,16 @@ from ruamel.yaml.scalarint import ScalarInt, BinaryInt, OctalInt, HexInt, HexCap
from ruamel.yaml.scalarfloat import ScalarFloat
from ruamel.yaml.scalarbool import ScalarBoolean
from ruamel.yaml.timestamp import TimeStamp
+from ruamel.yaml.anchor import Anchor
import datetime
import sys
import types
-if PY3:
- import copyreg
- import base64
-else:
- import copy_reg as copyreg # type: ignore
+import copyreg
+import base64
-if False: # MYPY
- from typing import Dict, List, Any, Union, Text, Optional # NOQA
+from typing import Dict, List, Any, Union, Text, Optional # NOQA
# fmt: off
__all__ = ['BaseRepresenter', 'SafeRepresenter', 'Representer',
@@ -54,36 +47,29 @@ class RepresenterError(YAMLError):
pass
-if PY2:
-
- def get_classobj_bases(cls):
- # type: (Any) -> Any
- bases = [cls]
- for base in cls.__bases__:
- bases.extend(get_classobj_bases(base))
- return bases
-
+class BaseRepresenter:
-class BaseRepresenter(object):
+ yaml_representers: Dict[Any, Any] = {}
+ yaml_multi_representers: Dict[Any, Any] = {}
- yaml_representers = {} # type: Dict[Any, Any]
- yaml_multi_representers = {} # type: Dict[Any, Any]
-
- def __init__(self, default_style=None, default_flow_style=None, dumper=None):
- # type: (Any, Any, Any, Any) -> None
+ def __init__(
+ self: Any,
+ default_style: Any = None,
+ default_flow_style: Any = None,
+ dumper: Any = None,
+ ) -> None:
self.dumper = dumper
if self.dumper is not None:
self.dumper._representer = self
self.default_style = default_style
self.default_flow_style = default_flow_style
- self.represented_objects = {} # type: Dict[Any, Any]
- self.object_keeper = [] # type: List[Any]
- self.alias_key = None # type: Optional[int]
+ self.represented_objects: Dict[Any, Any] = {}
+ self.object_keeper: List[Any] = []
+ self.alias_key: Optional[int] = None
self.sort_base_mapping_type_on_output = True
@property
- def serializer(self):
- # type: () -> Any
+ def serializer(self) -> Any:
try:
if hasattr(self.dumper, 'typ'):
return self.dumper.serializer
@@ -91,16 +77,14 @@ class BaseRepresenter(object):
except AttributeError:
return self # cyaml
- def represent(self, data):
- # type: (Any) -> None
+ def represent(self, data: Any) -> None:
node = self.represent_data(data)
self.serializer.serialize(node)
self.represented_objects = {}
self.object_keeper = []
self.alias_key = None
- def represent_data(self, data):
- # type: (Any) -> Any
+ def represent_data(self, data: Any) -> Any:
if self.ignore_aliases(data):
self.alias_key = None
else:
@@ -110,15 +94,11 @@ class BaseRepresenter(object):
node = self.represented_objects[self.alias_key]
# if node is None:
# raise RepresenterError(
- # "recursive objects are not allowed: %r" % data)
+ # f"recursive objects are not allowed: {data!r}")
return node
# self.represented_objects[alias_key] = None
self.object_keeper.append(data)
data_types = type(data).__mro__
- if PY2:
- # if type(data) is types.InstanceType:
- if isinstance(data, types.InstanceType):
- data_types = get_classobj_bases(data.__class__) + list(data_types)
if data_types[0] in self.yaml_representers:
node = self.yaml_representers[data_types[0]](self, data)
else:
@@ -132,13 +112,12 @@ class BaseRepresenter(object):
elif None in self.yaml_representers:
node = self.yaml_representers[None](self, data)
else:
- node = ScalarNode(None, text_type(data))
+ node = ScalarNode(None, str(data))
# if alias_key is not None:
# self.represented_objects[alias_key] = node
return node
- def represent_key(self, data):
- # type: (Any) -> Any
+ def represent_key(self, data: Any) -> Any:
"""
David Fraser: Extract a method to represent keys in mappings, so that
a subclass can choose not to quote them (for example)
@@ -148,21 +127,20 @@ class BaseRepresenter(object):
return self.represent_data(data)
@classmethod
- def add_representer(cls, data_type, representer):
- # type: (Any, Any) -> None
+ def add_representer(cls, data_type: Any, representer: Any) -> None:
if 'yaml_representers' not in cls.__dict__:
cls.yaml_representers = cls.yaml_representers.copy()
cls.yaml_representers[data_type] = representer
@classmethod
- def add_multi_representer(cls, data_type, representer):
- # type: (Any, Any) -> None
+ def add_multi_representer(cls, data_type: Any, representer: Any) -> None:
if 'yaml_multi_representers' not in cls.__dict__:
cls.yaml_multi_representers = cls.yaml_multi_representers.copy()
cls.yaml_multi_representers[data_type] = representer
- def represent_scalar(self, tag, value, style=None, anchor=None):
- # type: (Any, Any, Any, Any) -> Any
+ def represent_scalar(
+ self, tag: Any, value: Any, style: Any = None, anchor: Any = None
+ ) -> ScalarNode:
if style is None:
style = self.default_style
comment = None
@@ -175,9 +153,10 @@ class BaseRepresenter(object):
self.represented_objects[self.alias_key] = node
return node
- def represent_sequence(self, tag, sequence, flow_style=None):
- # type: (Any, Any, Any) -> Any
- value = [] # type: List[Any]
+ def represent_sequence(
+ self, tag: Any, sequence: Any, flow_style: Any = None
+ ) -> SequenceNode:
+ value: List[Any] = []
node = SequenceNode(tag, value, flow_style=flow_style)
if self.alias_key is not None:
self.represented_objects[self.alias_key] = node
@@ -194,9 +173,8 @@ class BaseRepresenter(object):
node.flow_style = best_style
return node
- def represent_omap(self, tag, omap, flow_style=None):
- # type: (Any, Any, Any) -> Any
- value = [] # type: List[Any]
+ def represent_omap(self, tag: Any, omap: Any, flow_style: Any = None) -> SequenceNode:
+ value: List[Any] = []
node = SequenceNode(tag, value, flow_style=flow_style)
if self.alias_key is not None:
self.represented_objects[self.alias_key] = node
@@ -215,9 +193,8 @@ class BaseRepresenter(object):
node.flow_style = best_style
return node
- def represent_mapping(self, tag, mapping, flow_style=None):
- # type: (Any, Any, Any) -> Any
- value = [] # type: List[Any]
+ def represent_mapping(self, tag: Any, mapping: Any, flow_style: Any = None) -> MappingNode:
+ value: List[Any] = []
node = MappingNode(tag, value, flow_style=flow_style)
if self.alias_key is not None:
self.represented_objects[self.alias_key] = node
@@ -244,101 +221,63 @@ class BaseRepresenter(object):
node.flow_style = best_style
return node
- def ignore_aliases(self, data):
- # type: (Any) -> bool
+ def ignore_aliases(self, data: Any) -> bool:
return False
class SafeRepresenter(BaseRepresenter):
- def ignore_aliases(self, data):
- # type: (Any) -> bool
+ def ignore_aliases(self, data: Any) -> bool:
# https://docs.python.org/3/reference/expressions.html#parenthesized-forms :
# "i.e. two occurrences of the empty tuple may or may not yield the same object"
# so "data is ()" should not be used
if data is None or (isinstance(data, tuple) and data == ()):
return True
- if isinstance(data, (binary_type, text_type, bool, int, float)):
+ if isinstance(data, (bytes, str, bool, int, float)):
return True
return False
- def represent_none(self, data):
- # type: (Any) -> Any
- return self.represent_scalar(u'tag:yaml.org,2002:null', u'null')
-
- if PY3:
-
- def represent_str(self, data):
- # type: (Any) -> Any
- return self.represent_scalar(u'tag:yaml.org,2002:str', data)
+ def represent_none(self, data: Any) -> ScalarNode:
+ return self.represent_scalar('tag:yaml.org,2002:null', 'null')
- def represent_binary(self, data):
- # type: (Any) -> Any
- if hasattr(base64, 'encodebytes'):
- data = base64.encodebytes(data).decode('ascii')
- else:
- data = base64.encodestring(data).decode('ascii')
- return self.represent_scalar(u'tag:yaml.org,2002:binary', data, style='|')
+ def represent_str(self, data: Any) -> Any:
+ return self.represent_scalar('tag:yaml.org,2002:str', data)
- else:
+ def represent_binary(self, data: Any) -> ScalarNode:
+ if hasattr(base64, 'encodebytes'):
+ data = base64.encodebytes(data).decode('ascii')
+ else:
+ # check py2 only?
+ data = base64.encodestring(data).decode('ascii') # type: ignore
+ return self.represent_scalar('tag:yaml.org,2002:binary', data, style='|')
- def represent_str(self, data):
- # type: (Any) -> Any
- tag = None
- style = None
- try:
- data = unicode(data, 'ascii')
- tag = u'tag:yaml.org,2002:str'
- except UnicodeDecodeError:
- try:
- data = unicode(data, 'utf-8')
- tag = u'tag:yaml.org,2002:str'
- except UnicodeDecodeError:
- data = data.encode('base64')
- tag = u'tag:yaml.org,2002:binary'
- style = '|'
- return self.represent_scalar(tag, data, style=style)
-
- def represent_unicode(self, data):
- # type: (Any) -> Any
- return self.represent_scalar(u'tag:yaml.org,2002:str', data)
-
- def represent_bool(self, data, anchor=None):
- # type: (Any, Optional[Any]) -> Any
+ def represent_bool(self, data: Any, anchor: Optional[Any] = None) -> ScalarNode:
try:
value = self.dumper.boolean_representation[bool(data)]
except AttributeError:
if data:
- value = u'true'
+ value = 'true'
else:
- value = u'false'
- return self.represent_scalar(u'tag:yaml.org,2002:bool', value, anchor=anchor)
-
- def represent_int(self, data):
- # type: (Any) -> Any
- return self.represent_scalar(u'tag:yaml.org,2002:int', text_type(data))
-
- if PY2:
+ value = 'false'
+ return self.represent_scalar('tag:yaml.org,2002:bool', value, anchor=anchor)
- def represent_long(self, data):
- # type: (Any) -> Any
- return self.represent_scalar(u'tag:yaml.org,2002:int', text_type(data))
+ def represent_int(self, data: Any) -> ScalarNode:
+ return self.represent_scalar('tag:yaml.org,2002:int', str(data))
inf_value = 1e300
while repr(inf_value) != repr(inf_value * inf_value):
inf_value *= inf_value
- def represent_float(self, data):
- # type: (Any) -> Any
+ def represent_float(self, data: Any) -> ScalarNode:
if data != data or (data == 0.0 and data == 1.0):
- value = u'.nan'
+ value = '.nan'
elif data == self.inf_value:
- value = u'.inf'
+ value = '.inf'
elif data == -self.inf_value:
- value = u'-.inf'
+ value = '-.inf'
else:
- value = to_unicode(repr(data)).lower()
+ value = repr(data).lower()
if getattr(self.serializer, 'use_version', None) == (1, 1):
- if u'.' not in value and u'e' in value:
+ if '.' not in value and 'e' in value:
# Note that in some cases `repr(data)` represents a float number
# without the decimal parts. For instance:
# >>> repr(1e17)
@@ -346,11 +285,10 @@ class SafeRepresenter(BaseRepresenter):
# Unfortunately, this is not a valid float representation according
# to the definition of the `!!float` tag in YAML 1.1. We fix
# this by adding '.0' before the 'e' symbol.
- value = value.replace(u'e', u'.0e', 1)
- return self.represent_scalar(u'tag:yaml.org,2002:float', value)
+ value = value.replace('e', '.0e', 1)
+ return self.represent_scalar('tag:yaml.org,2002:float', value)
- def represent_list(self, data):
- # type: (Any) -> Any
+ def represent_list(self, data: Any) -> SequenceNode:
# pairs = (len(data) > 0 and isinstance(data, list))
# if pairs:
# for item in data:
@@ -358,68 +296,57 @@ class SafeRepresenter(BaseRepresenter):
# pairs = False
# break
# if not pairs:
- return self.represent_sequence(u'tag:yaml.org,2002:seq', data)
+ return self.represent_sequence('tag:yaml.org,2002:seq', data)
# value = []
# for item_key, item_value in data:
- # value.append(self.represent_mapping(u'tag:yaml.org,2002:map',
+ # value.append(self.represent_mapping('tag:yaml.org,2002:map',
# [(item_key, item_value)]))
- # return SequenceNode(u'tag:yaml.org,2002:pairs', value)
+ # return SequenceNode('tag:yaml.org,2002:pairs', value)
- def represent_dict(self, data):
- # type: (Any) -> Any
- return self.represent_mapping(u'tag:yaml.org,2002:map', data)
+ def represent_dict(self, data: Any) -> MappingNode:
+ return self.represent_mapping('tag:yaml.org,2002:map', data)
- def represent_ordereddict(self, data):
- # type: (Any) -> Any
- return self.represent_omap(u'tag:yaml.org,2002:omap', data)
+ def represent_ordereddict(self, data: Any) -> SequenceNode:
+ return self.represent_omap('tag:yaml.org,2002:omap', data)
- def represent_set(self, data):
- # type: (Any) -> Any
- value = {} # type: Dict[Any, None]
+ def represent_set(self, data: Any) -> MappingNode:
+ value: Dict[Any, None] = {}
for key in data:
value[key] = None
- return self.represent_mapping(u'tag:yaml.org,2002:set', value)
+ return self.represent_mapping('tag:yaml.org,2002:set', value)
- def represent_date(self, data):
- # type: (Any) -> Any
- value = to_unicode(data.isoformat())
- return self.represent_scalar(u'tag:yaml.org,2002:timestamp', value)
+ def represent_date(self, data: Any) -> ScalarNode:
+ value = data.isoformat()
+ return self.represent_scalar('tag:yaml.org,2002:timestamp', value)
- def represent_datetime(self, data):
- # type: (Any) -> Any
- value = to_unicode(data.isoformat(' '))
- return self.represent_scalar(u'tag:yaml.org,2002:timestamp', value)
+ def represent_datetime(self, data: Any) -> ScalarNode:
+ value = data.isoformat(' ')
+ return self.represent_scalar('tag:yaml.org,2002:timestamp', value)
- def represent_yaml_object(self, tag, data, cls, flow_style=None):
- # type: (Any, Any, Any, Any) -> Any
+ def represent_yaml_object(
+ self, tag: Any, data: Any, cls: Any, flow_style: Any = None
+ ) -> MappingNode:
if hasattr(data, '__getstate__'):
state = data.__getstate__()
else:
state = data.__dict__.copy()
return self.represent_mapping(tag, state, flow_style=flow_style)
- def represent_undefined(self, data):
- # type: (Any) -> None
- raise RepresenterError('cannot represent an object: %s' % (data,))
+ def represent_undefined(self, data: Any) -> None:
+ raise RepresenterError(f'cannot represent an object: {data!s}')
SafeRepresenter.add_representer(type(None), SafeRepresenter.represent_none)
SafeRepresenter.add_representer(str, SafeRepresenter.represent_str)
-if PY2:
- SafeRepresenter.add_representer(unicode, SafeRepresenter.represent_unicode)
-else:
- SafeRepresenter.add_representer(bytes, SafeRepresenter.represent_binary)
+SafeRepresenter.add_representer(bytes, SafeRepresenter.represent_binary)
SafeRepresenter.add_representer(bool, SafeRepresenter.represent_bool)
SafeRepresenter.add_representer(int, SafeRepresenter.represent_int)
-if PY2:
- SafeRepresenter.add_representer(long, SafeRepresenter.represent_long)
-
SafeRepresenter.add_representer(float, SafeRepresenter.represent_float)
SafeRepresenter.add_representer(list, SafeRepresenter.represent_list)
@@ -447,120 +374,32 @@ SafeRepresenter.add_representer(None, SafeRepresenter.represent_undefined)
class Representer(SafeRepresenter):
- if PY2:
-
- def represent_str(self, data):
- # type: (Any) -> Any
- tag = None
- style = None
- try:
- data = unicode(data, 'ascii')
- tag = u'tag:yaml.org,2002:str'
- except UnicodeDecodeError:
- try:
- data = unicode(data, 'utf-8')
- tag = u'tag:yaml.org,2002:python/str'
- except UnicodeDecodeError:
- data = data.encode('base64')
- tag = u'tag:yaml.org,2002:binary'
- style = '|'
- return self.represent_scalar(tag, data, style=style)
-
- def represent_unicode(self, data):
- # type: (Any) -> Any
- tag = None
- try:
- data.encode('ascii')
- tag = u'tag:yaml.org,2002:python/unicode'
- except UnicodeEncodeError:
- tag = u'tag:yaml.org,2002:str'
- return self.represent_scalar(tag, data)
-
- def represent_long(self, data):
- # type: (Any) -> Any
- tag = u'tag:yaml.org,2002:int'
- if int(data) is not data:
- tag = u'tag:yaml.org,2002:python/long'
- return self.represent_scalar(tag, to_unicode(data))
-
- def represent_complex(self, data):
- # type: (Any) -> Any
+ def represent_complex(self, data: Any) -> Any:
if data.imag == 0.0:
- data = u'%r' % data.real
+ data = repr(data.real)
elif data.real == 0.0:
- data = u'%rj' % data.imag
+ data = f'{data.imag!r}j'
elif data.imag > 0:
- data = u'%r+%rj' % (data.real, data.imag)
+ data = f'{data.real!r}+{data.imag!r}j'
else:
- data = u'%r%rj' % (data.real, data.imag)
- return self.represent_scalar(u'tag:yaml.org,2002:python/complex', data)
+ data = f'{data.real!r}{data.imag!r}j'
+ return self.represent_scalar('tag:yaml.org,2002:python/complex', data)
- def represent_tuple(self, data):
- # type: (Any) -> Any
- return self.represent_sequence(u'tag:yaml.org,2002:python/tuple', data)
+ def represent_tuple(self, data: Any) -> SequenceNode:
+ return self.represent_sequence('tag:yaml.org,2002:python/tuple', data)
- def represent_name(self, data):
- # type: (Any) -> Any
+ def represent_name(self, data: Any) -> ScalarNode:
try:
- name = u'%s.%s' % (data.__module__, data.__qualname__)
+ name = f'{data.__module__!s}.{data.__qualname__!s}'
except AttributeError:
- # probably PY2
- name = u'%s.%s' % (data.__module__, data.__name__)
- return self.represent_scalar(u'tag:yaml.org,2002:python/name:' + name, "")
-
- def represent_module(self, data):
- # type: (Any) -> Any
- return self.represent_scalar(u'tag:yaml.org,2002:python/module:' + data.__name__, "")
-
- if PY2:
-
- def represent_instance(self, data):
- # type: (Any) -> Any
- # For instances of classic classes, we use __getinitargs__ and
- # __getstate__ to serialize the data.
-
- # If data.__getinitargs__ exists, the object must be reconstructed
- # by calling cls(**args), where args is a tuple returned by
- # __getinitargs__. Otherwise, the cls.__init__ method should never
- # be called and the class instance is created by instantiating a
- # trivial class and assigning to the instance's __class__ variable.
-
- # If data.__getstate__ exists, it returns the state of the object.
- # Otherwise, the state of the object is data.__dict__.
-
- # We produce either a !!python/object or !!python/object/new node.
- # If data.__getinitargs__ does not exist and state is a dictionary,
- # we produce a !!python/object node . Otherwise we produce a
- # !!python/object/new node.
-
- cls = data.__class__
- class_name = u'%s.%s' % (cls.__module__, cls.__name__)
- args = None
- state = None
- if hasattr(data, '__getinitargs__'):
- args = list(data.__getinitargs__())
- if hasattr(data, '__getstate__'):
- state = data.__getstate__()
- else:
- state = data.__dict__
- if args is None and isinstance(state, dict):
- return self.represent_mapping(
- u'tag:yaml.org,2002:python/object:' + class_name, state
- )
- if isinstance(state, dict) and not state:
- return self.represent_sequence(
- u'tag:yaml.org,2002:python/object/new:' + class_name, args
- )
- value = {}
- if bool(args):
- value['args'] = args
- value['state'] = state # type: ignore
- return self.represent_mapping(
- u'tag:yaml.org,2002:python/object/new:' + class_name, value
- )
+ # ToDo: check if this can be reached in Py3
+ name = f'{data.__module__!s}.{data.__name__!s}'
+ return self.represent_scalar('tag:yaml.org,2002:python/name:' + name, "")
+
+ def represent_module(self, data: Any) -> ScalarNode:
+ return self.represent_scalar('tag:yaml.org,2002:python/module:' + data.__name__, "")
- def represent_object(self, data):
- # type: (Any) -> Any
+ def represent_object(self, data: Any) -> Union[SequenceNode, MappingNode]:
# We use __reduce__ API to save the data. data.__reduce__ returns
# a tuple of length 2-5:
# (function, args, state, listitems, dictitems)
@@ -579,13 +418,13 @@ class Representer(SafeRepresenter):
cls = type(data)
if cls in copyreg.dispatch_table:
- reduce = copyreg.dispatch_table[cls](data)
+ reduce: Any = copyreg.dispatch_table[cls](data)
elif hasattr(data, '__reduce_ex__'):
reduce = data.__reduce_ex__(2)
elif hasattr(data, '__reduce__'):
reduce = data.__reduce__()
else:
- raise RepresenterError('cannot represent object: %r' % (data,))
+ raise RepresenterError(f'cannot represent object: {data!r}')
reduce = (list(reduce) + [None] * 5)[:5]
function, args, state, listitems, dictitems = reduce
args = list(args)
@@ -598,19 +437,19 @@ class Representer(SafeRepresenter):
if function.__name__ == '__newobj__':
function = args[0]
args = args[1:]
- tag = u'tag:yaml.org,2002:python/object/new:'
+ tag = 'tag:yaml.org,2002:python/object/new:'
newobj = True
else:
- tag = u'tag:yaml.org,2002:python/object/apply:'
+ tag = 'tag:yaml.org,2002:python/object/apply:'
newobj = False
try:
- function_name = u'%s.%s' % (function.__module__, function.__qualname__)
+ function_name = f'{function.__module__!s}.{function.__qualname__!s}'
except AttributeError:
- # probably PY2
- function_name = u'%s.%s' % (function.__module__, function.__name__)
+ # ToDo: check if this can be reached in Py3
+ function_name = f'{function.__module__!s}.{function.__name__!s}'
if not args and not listitems and not dictitems and isinstance(state, dict) and newobj:
return self.represent_mapping(
- u'tag:yaml.org,2002:python/object:' + function_name, state
+ 'tag:yaml.org,2002:python/object:' + function_name, state
)
if not listitems and not dictitems and isinstance(state, dict) and not state:
return self.represent_sequence(tag + function_name, args)
@@ -626,31 +465,18 @@ class Representer(SafeRepresenter):
return self.represent_mapping(tag + function_name, value)
-if PY2:
- Representer.add_representer(str, Representer.represent_str)
-
- Representer.add_representer(unicode, Representer.represent_unicode)
-
- Representer.add_representer(long, Representer.represent_long)
-
Representer.add_representer(complex, Representer.represent_complex)
Representer.add_representer(tuple, Representer.represent_tuple)
Representer.add_representer(type, Representer.represent_name)
-if PY2:
- Representer.add_representer(types.ClassType, Representer.represent_name)
-
Representer.add_representer(types.FunctionType, Representer.represent_name)
Representer.add_representer(types.BuiltinFunctionType, Representer.represent_name)
Representer.add_representer(types.ModuleType, Representer.represent_module)
-if PY2:
- Representer.add_multi_representer(types.InstanceType, Representer.represent_instance)
-
Representer.add_multi_representer(object, Representer.represent_object)
Representer.add_multi_representer(type, Representer.represent_name)
@@ -660,8 +486,9 @@ class RoundTripRepresenter(SafeRepresenter):
# need to add type here and write out the .comment
# in serializer and emitter
- def __init__(self, default_style=None, default_flow_style=None, dumper=None):
- # type: (Any, Any, Any) -> None
+ def __init__(
+ self, default_style: Any = None, default_flow_style: Any = None, dumper: Any = None
+ ) -> None:
if not hasattr(dumper, 'typ') and default_flow_style is None:
default_flow_style = False
SafeRepresenter.__init__(
@@ -671,8 +498,7 @@ class RoundTripRepresenter(SafeRepresenter):
dumper=dumper,
)
- def ignore_aliases(self, data):
- # type: (Any) -> bool
+ def ignore_aliases(self, data: Any) -> bool:
try:
if data.anchor is not None and data.anchor.value is not None:
return False
@@ -680,27 +506,22 @@ class RoundTripRepresenter(SafeRepresenter):
pass
return SafeRepresenter.ignore_aliases(self, data)
- def represent_none(self, data):
- # type: (Any) -> Any
+ def represent_none(self, data: Any) -> ScalarNode:
if len(self.represented_objects) == 0 and not self.serializer.use_explicit_start:
# this will be open ended (although it is not yet)
- return self.represent_scalar(u'tag:yaml.org,2002:null', u'null')
- return self.represent_scalar(u'tag:yaml.org,2002:null', "")
+ return self.represent_scalar('tag:yaml.org,2002:null', 'null')
+ return self.represent_scalar('tag:yaml.org,2002:null', "")
- def represent_literal_scalarstring(self, data):
- # type: (Any) -> Any
+ def represent_literal_scalarstring(self, data: Any) -> ScalarNode:
tag = None
style = '|'
anchor = data.yaml_anchor(any=True)
- if PY2 and not isinstance(data, unicode):
- data = unicode(data, 'ascii')
- tag = u'tag:yaml.org,2002:str'
+ tag = 'tag:yaml.org,2002:str'
return self.represent_scalar(tag, data, style=style, anchor=anchor)
represent_preserved_scalarstring = represent_literal_scalarstring
- def represent_folded_scalarstring(self, data):
- # type: (Any) -> Any
+ def represent_folded_scalarstring(self, data: Any) -> ScalarNode:
tag = None
style = '>'
anchor = data.yaml_anchor(any=True)
@@ -711,45 +532,35 @@ class RoundTripRepresenter(SafeRepresenter):
and (fold_pos < len(data) and not data[fold_pos + 1].isspace())
):
data = data[:fold_pos] + '\a' + data[fold_pos:]
- if PY2 and not isinstance(data, unicode):
- data = unicode(data, 'ascii')
- tag = u'tag:yaml.org,2002:str'
+ tag = 'tag:yaml.org,2002:str'
return self.represent_scalar(tag, data, style=style, anchor=anchor)
- def represent_single_quoted_scalarstring(self, data):
- # type: (Any) -> Any
+ def represent_single_quoted_scalarstring(self, data: Any) -> ScalarNode:
tag = None
style = "'"
anchor = data.yaml_anchor(any=True)
- if PY2 and not isinstance(data, unicode):
- data = unicode(data, 'ascii')
- tag = u'tag:yaml.org,2002:str'
+ tag = 'tag:yaml.org,2002:str'
return self.represent_scalar(tag, data, style=style, anchor=anchor)
- def represent_double_quoted_scalarstring(self, data):
- # type: (Any) -> Any
+ def represent_double_quoted_scalarstring(self, data: Any) -> ScalarNode:
tag = None
style = '"'
anchor = data.yaml_anchor(any=True)
- if PY2 and not isinstance(data, unicode):
- data = unicode(data, 'ascii')
- tag = u'tag:yaml.org,2002:str'
+ tag = 'tag:yaml.org,2002:str'
return self.represent_scalar(tag, data, style=style, anchor=anchor)
- def represent_plain_scalarstring(self, data):
- # type: (Any) -> Any
+ def represent_plain_scalarstring(self, data: Any) -> ScalarNode:
tag = None
style = ''
anchor = data.yaml_anchor(any=True)
- if PY2 and not isinstance(data, unicode):
- data = unicode(data, 'ascii')
- tag = u'tag:yaml.org,2002:str'
+ tag = 'tag:yaml.org,2002:str'
return self.represent_scalar(tag, data, style=style, anchor=anchor)
- def insert_underscore(self, prefix, s, underscore, anchor=None):
- # type: (Any, Any, Any, Any) -> Any
+ def insert_underscore(
+ self, prefix: Any, s: Any, underscore: Any, anchor: Any = None
+ ) -> ScalarNode:
if underscore is None:
- return self.represent_scalar(u'tag:yaml.org,2002:int', prefix + s, anchor=anchor)
+ return self.represent_scalar('tag:yaml.org,2002:int', prefix + s, anchor=anchor)
if underscore[0]:
sl = list(s)
pos = len(s) - underscore[0]
@@ -761,93 +572,89 @@ class RoundTripRepresenter(SafeRepresenter):
s = '_' + s
if underscore[2]:
s += '_'
- return self.represent_scalar(u'tag:yaml.org,2002:int', prefix + s, anchor=anchor)
+ return self.represent_scalar('tag:yaml.org,2002:int', prefix + s, anchor=anchor)
- def represent_scalar_int(self, data):
- # type: (Any) -> Any
+ def represent_scalar_int(self, data: Any) -> ScalarNode:
if data._width is not None:
- s = '{:0{}d}'.format(data, data._width)
+ s = f'{data:0{data._width}d}'
else:
s = format(data, 'd')
anchor = data.yaml_anchor(any=True)
return self.insert_underscore("", s, data._underscore, anchor=anchor)
- def represent_binary_int(self, data):
- # type: (Any) -> Any
+ def represent_binary_int(self, data: Any) -> ScalarNode:
if data._width is not None:
# cannot use '{:#0{}b}', that strips the zeros
- s = '{:0{}b}'.format(data, data._width)
+ s = f'{data:0{data._width}b}'
else:
s = format(data, 'b')
anchor = data.yaml_anchor(any=True)
return self.insert_underscore('0b', s, data._underscore, anchor=anchor)
- def represent_octal_int(self, data):
- # type: (Any) -> Any
+ def represent_octal_int(self, data: Any) -> ScalarNode:
if data._width is not None:
# cannot use '{:#0{}o}', that strips the zeros
- s = '{:0{}o}'.format(data, data._width)
+ s = f'{data:0{data._width}o}'
else:
s = format(data, 'o')
anchor = data.yaml_anchor(any=True)
- return self.insert_underscore('0o', s, data._underscore, anchor=anchor)
+ prefix = '0o'
+ if getattr(self.serializer, 'use_version', None) == (1, 1):
+ prefix = '0'
+ return self.insert_underscore(prefix, s, data._underscore, anchor=anchor)
- def represent_hex_int(self, data):
- # type: (Any) -> Any
+ def represent_hex_int(self, data: Any) -> ScalarNode:
if data._width is not None:
# cannot use '{:#0{}x}', that strips the zeros
- s = '{:0{}x}'.format(data, data._width)
+ s = f'{data:0{data._width}x}'
else:
s = format(data, 'x')
anchor = data.yaml_anchor(any=True)
return self.insert_underscore('0x', s, data._underscore, anchor=anchor)
- def represent_hex_caps_int(self, data):
- # type: (Any) -> Any
+ def represent_hex_caps_int(self, data: Any) -> ScalarNode:
if data._width is not None:
# cannot use '{:#0{}X}', that strips the zeros
- s = '{:0{}X}'.format(data, data._width)
+ s = f'{data:0{data._width}X}'
else:
s = format(data, 'X')
anchor = data.yaml_anchor(any=True)
return self.insert_underscore('0x', s, data._underscore, anchor=anchor)
- def represent_scalar_float(self, data):
- # type: (Any) -> Any
+ def represent_scalar_float(self, data: Any) -> ScalarNode:
""" this is way more complicated """
value = None
anchor = data.yaml_anchor(any=True)
if data != data or (data == 0.0 and data == 1.0):
- value = u'.nan'
+ value = '.nan'
elif data == self.inf_value:
- value = u'.inf'
+ value = '.inf'
elif data == -self.inf_value:
- value = u'-.inf'
+ value = '-.inf'
if value:
- return self.represent_scalar(u'tag:yaml.org,2002:float', value, anchor=anchor)
+ return self.represent_scalar('tag:yaml.org,2002:float', value, anchor=anchor)
if data._exp is None and data._prec > 0 and data._prec == data._width - 1:
# no exponent, but trailing dot
- value = u'{}{:d}.'.format(data._m_sign if data._m_sign else "", abs(int(data)))
+ value = f'{data._m_sign if data._m_sign else ""}{abs(int(data)):d}.'
elif data._exp is None:
# no exponent, "normal" dot
prec = data._prec
ms = data._m_sign if data._m_sign else ""
- # -1 for the dot
- value = u'{}{:0{}.{}f}'.format(
- ms, abs(data), data._width - len(ms), data._width - prec - 1
- )
- if prec == 0 or (prec == 1 and ms != ""):
- value = value.replace(u'0.', u'.')
+ if prec < 0:
+ value = f'{ms}{abs(int(data)):0{data._width - len(ms)}d}'
+ else:
+ # -1 for the dot
+ value = f'{ms}{abs(data):0{data._width - len(ms)}.{data._width - prec - 1}f}'
+ if prec == 0 or (prec == 1 and ms != ""):
+ value = value.replace('0.', '.')
while len(value) < data._width:
- value += u'0'
+ value += '0'
else:
# exponent
- m, es = u'{:{}.{}e}'.format(
- # data, data._width, data._width - data._prec + (1 if data._m_sign else 0)
- data,
- data._width,
- data._width + (1 if data._m_sign else 0),
- ).split('e')
+ (
+ m,
+ es,
+ ) = f'{data:{data._width}.{data._width + (1 if data._m_sign else 0)}e}'.split('e')
w = data._width if data._prec > 0 else (data._width + 1)
if data < 0:
w += 1
@@ -855,45 +662,42 @@ class RoundTripRepresenter(SafeRepresenter):
e = int(es)
m1, m2 = m.split('.') # always second?
while len(m1) + len(m2) < data._width - (1 if data._prec >= 0 else 0):
- m2 += u'0'
+ m2 += '0'
if data._m_sign and data > 0:
m1 = '+' + m1
- esgn = u'+' if data._e_sign else ""
+ esgn = '+' if data._e_sign else ""
if data._prec < 0: # mantissa without dot
- if m2 != u'0':
+ if m2 != '0':
e -= len(m2)
else:
m2 = ""
while (len(m1) + len(m2) - (1 if data._m_sign else 0)) < data._width:
- m2 += u'0'
+ m2 += '0'
e -= 1
- value = m1 + m2 + data._exp + u'{:{}0{}d}'.format(e, esgn, data._e_width)
+ value = m1 + m2 + data._exp + f'{e:{esgn}0{data._e_width}d}'
elif data._prec == 0: # mantissa with trailing dot
e -= len(m2)
- value = (
- m1 + m2 + u'.' + data._exp + u'{:{}0{}d}'.format(e, esgn, data._e_width)
- )
+ value = m1 + m2 + '.' + data._exp + f'{e:{esgn}0{data._e_width}d}'
else:
if data._m_lead0 > 0:
- m2 = u'0' * (data._m_lead0 - 1) + m1 + m2
- m1 = u'0'
+ m2 = '0' * (data._m_lead0 - 1) + m1 + m2
+ m1 = '0'
m2 = m2[: -data._m_lead0] # these should be zeros
e += data._m_lead0
while len(m1) < data._prec:
m1 += m2[0]
m2 = m2[1:]
e -= 1
- value = (
- m1 + u'.' + m2 + data._exp + u'{:{}0{}d}'.format(e, esgn, data._e_width)
- )
+ value = m1 + '.' + m2 + data._exp + f'{e:{esgn}0{data._e_width}d}'
if value is None:
- value = to_unicode(repr(data)).lower()
- return self.represent_scalar(u'tag:yaml.org,2002:float', value, anchor=anchor)
+ value = repr(data).lower()
+ return self.represent_scalar('tag:yaml.org,2002:float', value, anchor=anchor)
- def represent_sequence(self, tag, sequence, flow_style=None):
- # type: (Any, Any, Any) -> Any
- value = [] # type: List[Any]
+ def represent_sequence(
+ self, tag: Any, sequence: Any, flow_style: Any = None
+ ) -> SequenceNode:
+ value: List[Any] = []
# if the flow_style is None, the flow style tacked on to the object
# explicitly will be taken. If that is None as well the default flow
# style rules
@@ -922,7 +726,11 @@ class RoundTripRepresenter(SafeRepresenter):
for ct in v[1]:
ct.reset()
item_comments = comment.items
- node.comment = comment.comment
+ if node.comment is None:
+ node.comment = comment.comment
+ else:
+ # as we are potentially going to extend this, make a new list
+ node.comment = comment.comment[:]
try:
node.comment.append(comment.end)
except AttributeError:
@@ -942,8 +750,7 @@ class RoundTripRepresenter(SafeRepresenter):
node.flow_style = best_style
return node
- def merge_comments(self, node, comments):
- # type: (Any, Any) -> Any
+ def merge_comments(self, node: Any, comments: Any) -> Any:
if comments is None:
assert hasattr(node, 'comment')
return node
@@ -958,19 +765,17 @@ class RoundTripRepresenter(SafeRepresenter):
node.comment = comments
return node
- def represent_key(self, data):
- # type: (Any) -> Any
+ def represent_key(self, data: Any) -> Any:
if isinstance(data, CommentedKeySeq):
self.alias_key = None
- return self.represent_sequence(u'tag:yaml.org,2002:seq', data, flow_style=True)
+ return self.represent_sequence('tag:yaml.org,2002:seq', data, flow_style=True)
if isinstance(data, CommentedKeyMap):
self.alias_key = None
- return self.represent_mapping(u'tag:yaml.org,2002:map', data, flow_style=True)
+ return self.represent_mapping('tag:yaml.org,2002:map', data, flow_style=True)
return SafeRepresenter.represent_key(self, data)
- def represent_mapping(self, tag, mapping, flow_style=None):
- # type: (Any, Any, Any) -> Any
- value = [] # type: List[Any]
+ def represent_mapping(self, tag: Any, mapping: Any, flow_style: Any = None) -> MappingNode:
+ value: List[Any] = []
try:
flow_style = mapping.fa.flow_style(flow_style)
except AttributeError:
@@ -986,18 +791,26 @@ class RoundTripRepresenter(SafeRepresenter):
# no sorting! !!
try:
comment = getattr(mapping, comment_attrib)
- node.comment = comment.comment
+ if node.comment is None:
+ node.comment = comment.comment
+ else:
+ # as we are potentially going to extend this, make a new list
+ node.comment = comment.comment[:]
if node.comment and node.comment[1]:
for ct in node.comment[1]:
ct.reset()
item_comments = comment.items
- for v in item_comments.values():
- if v and v[1]:
- for ct in v[1]:
- ct.reset()
- try:
- node.comment.append(comment.end)
- except AttributeError:
+ if self.dumper.comment_handling is None:
+ for v in item_comments.values():
+ if v and v[1]:
+ for ct in v[1]:
+ ct.reset()
+ try:
+ node.comment.append(comment.end)
+ except AttributeError:
+ pass
+ else:
+ # NEWCMNT
pass
except AttributeError:
item_comments = {}
@@ -1017,7 +830,9 @@ class RoundTripRepresenter(SafeRepresenter):
node_value = self.represent_data(item_value)
item_comment = item_comments.get(item_key)
if item_comment:
- assert getattr(node_key, 'comment', None) is None
+ # assert getattr(node_key, 'comment', None) is None
+ # issue 351 did throw this because the comment from the list item was
+ # moved to the dict
node_key.comment = item_comment[:2]
nvc = getattr(node_value, 'comment', None)
if nvc is not None: # end comment already there
@@ -1043,12 +858,11 @@ class RoundTripRepresenter(SafeRepresenter):
else:
arg = self.represent_data(merge_list)
arg.flow_style = True
- value.insert(merge_pos, (ScalarNode(u'tag:yaml.org,2002:merge', '<<'), arg))
+ value.insert(merge_pos, (ScalarNode('tag:yaml.org,2002:merge', '<<'), arg))
return node
- def represent_omap(self, tag, omap, flow_style=None):
- # type: (Any, Any, Any) -> Any
- value = [] # type: List[Any]
+ def represent_omap(self, tag: Any, omap: Any, flow_style: Any = None) -> SequenceNode:
+ value: List[Any] = []
try:
flow_style = omap.fa.flow_style(flow_style)
except AttributeError:
@@ -1063,7 +877,11 @@ class RoundTripRepresenter(SafeRepresenter):
best_style = True
try:
comment = getattr(omap, comment_attrib)
- node.comment = comment.comment
+ if node.comment is None:
+ node.comment = comment.comment
+ else:
+ # as we are potentially going to extend this, make a new list
+ node.comment = comment.comment[:]
if node.comment and node.comment[1]:
for ct in node.comment[1]:
ct.reset()
@@ -1106,12 +924,11 @@ class RoundTripRepresenter(SafeRepresenter):
node.flow_style = best_style
return node
- def represent_set(self, setting):
- # type: (Any) -> Any
+ def represent_set(self, setting: Any) -> MappingNode:
flow_style = False
- tag = u'tag:yaml.org,2002:set'
+ tag = 'tag:yaml.org,2002:set'
# return self.represent_mapping(tag, value)
- value = [] # type: List[Any]
+ value: List[Any] = []
flow_style = setting.fa.flow_style(flow_style)
try:
anchor = setting.yaml_anchor()
@@ -1124,7 +941,11 @@ class RoundTripRepresenter(SafeRepresenter):
# no sorting! !!
try:
comment = getattr(setting, comment_attrib)
- node.comment = comment.comment
+ if node.comment is None:
+ node.comment = comment.comment
+ else:
+ # as we are potentially going to extend this, make a new list
+ node.comment = comment.comment[:]
if node.comment and node.comment[1]:
for ct in node.comment[1]:
ct.reset()
@@ -1155,8 +976,7 @@ class RoundTripRepresenter(SafeRepresenter):
best_style = best_style
return node
- def represent_dict(self, data):
- # type: (Any) -> Any
+ def represent_dict(self, data: Any) -> MappingNode:
"""write out tag if saved on loading"""
try:
t = data.tag.value
@@ -1168,11 +988,10 @@ class RoundTripRepresenter(SafeRepresenter):
else:
tag = t
else:
- tag = u'tag:yaml.org,2002:map'
+ tag = 'tag:yaml.org,2002:map'
return self.represent_mapping(tag, data)
- def represent_list(self, data):
- # type: (Any) -> Any
+ def represent_list(self, data: Any) -> SequenceNode:
try:
t = data.tag.value
except AttributeError:
@@ -1183,11 +1002,10 @@ class RoundTripRepresenter(SafeRepresenter):
else:
tag = t
else:
- tag = u'tag:yaml.org,2002:seq'
+ tag = 'tag:yaml.org,2002:seq'
return self.represent_sequence(tag, data)
- def represent_datetime(self, data):
- # type: (Any) -> Any
+ def represent_datetime(self, data: Any) -> ScalarNode:
inter = 'T' if data._yaml['t'] else ' '
_yaml = data._yaml
if _yaml['delta']:
@@ -1197,10 +1015,9 @@ class RoundTripRepresenter(SafeRepresenter):
value = data.isoformat(inter)
if _yaml['tz']:
value += _yaml['tz']
- return self.represent_scalar(u'tag:yaml.org,2002:timestamp', to_unicode(value))
+ return self.represent_scalar('tag:yaml.org,2002:timestamp', value)
- def represent_tagged_scalar(self, data):
- # type: (Any) -> Any
+ def represent_tagged_scalar(self, data: Any) -> ScalarNode:
try:
tag = data.tag.value
except AttributeError:
@@ -1211,14 +1028,26 @@ class RoundTripRepresenter(SafeRepresenter):
anchor = None
return self.represent_scalar(tag, data.value, style=data.style, anchor=anchor)
- def represent_scalar_bool(self, data):
- # type: (Any) -> Any
+ def represent_scalar_bool(self, data: Any) -> ScalarNode:
try:
anchor = data.yaml_anchor()
except AttributeError:
anchor = None
return SafeRepresenter.represent_bool(self, data, anchor=anchor)
+ def represent_yaml_object(
+ self, tag: Any, data: Any, cls: Any, flow_style: Optional[Any] = None
+ ) -> MappingNode:
+ if hasattr(data, '__getstate__'):
+ state = data.__getstate__()
+ else:
+ state = data.__dict__.copy()
+ anchor = state.pop(Anchor.attrib, None)
+ res = self.represent_mapping(tag, state, flow_style=flow_style)
+ if anchor is not None:
+ res.anchor = anchor
+ return res
+
RoundTripRepresenter.add_representer(type(None), RoundTripRepresenter.represent_none)
diff --git a/resolver.py b/resolver.py
index 6379943..e7ed6d9 100644
--- a/resolver.py
+++ b/resolver.py
@@ -1,14 +1,11 @@
# coding: utf-8
-from __future__ import absolute_import
-
import re
-if False: # MYPY
- from typing import Any, Dict, List, Union, Text, Optional # NOQA
- from ruamel.yaml.compat import VersionType # NOQA
+from typing import Any, Dict, List, Union, Text, Optional # NOQA
+from ruamel.yaml.compat import VersionType # NOQA
-from ruamel.yaml.compat import string_types, _DEFAULT_YAML_VERSION # NOQA
+from ruamel.yaml.compat import _DEFAULT_YAML_VERSION # NOQA
from ruamel.yaml.error import * # NOQA
from ruamel.yaml.nodes import MappingNode, ScalarNode, SequenceNode # NOQA
from ruamel.yaml.util import RegExp # NOQA
@@ -24,77 +21,77 @@ __all__ = ['BaseResolver', 'Resolver', 'VersionedResolver']
# - a list of first characters to match
implicit_resolvers = [
([(1, 2)],
- u'tag:yaml.org,2002:bool',
- RegExp(u'''^(?:true|True|TRUE|false|False|FALSE)$''', re.X),
- list(u'tTfF')),
+ 'tag:yaml.org,2002:bool',
+ RegExp('''^(?:true|True|TRUE|false|False|FALSE)$''', re.X),
+ list('tTfF')),
([(1, 1)],
- u'tag:yaml.org,2002:bool',
- RegExp(u'''^(?:y|Y|yes|Yes|YES|n|N|no|No|NO
+ 'tag:yaml.org,2002:bool',
+ RegExp('''^(?:y|Y|yes|Yes|YES|n|N|no|No|NO
|true|True|TRUE|false|False|FALSE
|on|On|ON|off|Off|OFF)$''', re.X),
- list(u'yYnNtTfFoO')),
+ list('yYnNtTfFoO')),
([(1, 2)],
- u'tag:yaml.org,2002:float',
- RegExp(u'''^(?:
+ 'tag:yaml.org,2002:float',
+ RegExp('''^(?:
[-+]?(?:[0-9][0-9_]*)\\.[0-9_]*(?:[eE][-+]?[0-9]+)?
|[-+]?(?:[0-9][0-9_]*)(?:[eE][-+]?[0-9]+)
|[-+]?\\.[0-9_]+(?:[eE][-+][0-9]+)?
|[-+]?\\.(?:inf|Inf|INF)
|\\.(?:nan|NaN|NAN))$''', re.X),
- list(u'-+0123456789.')),
+ list('-+0123456789.')),
([(1, 1)],
- u'tag:yaml.org,2002:float',
- RegExp(u'''^(?:
+ 'tag:yaml.org,2002:float',
+ RegExp('''^(?:
[-+]?(?:[0-9][0-9_]*)\\.[0-9_]*(?:[eE][-+]?[0-9]+)?
|[-+]?(?:[0-9][0-9_]*)(?:[eE][-+]?[0-9]+)
|\\.[0-9_]+(?:[eE][-+][0-9]+)?
|[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\\.[0-9_]* # sexagesimal float
|[-+]?\\.(?:inf|Inf|INF)
|\\.(?:nan|NaN|NAN))$''', re.X),
- list(u'-+0123456789.')),
+ list('-+0123456789.')),
([(1, 2)],
- u'tag:yaml.org,2002:int',
- RegExp(u'''^(?:[-+]?0b[0-1_]+
+ 'tag:yaml.org,2002:int',
+ RegExp('''^(?:[-+]?0b[0-1_]+
|[-+]?0o?[0-7_]+
|[-+]?[0-9_]+
|[-+]?0x[0-9a-fA-F_]+)$''', re.X),
- list(u'-+0123456789')),
+ list('-+0123456789')),
([(1, 1)],
- u'tag:yaml.org,2002:int',
- RegExp(u'''^(?:[-+]?0b[0-1_]+
+ 'tag:yaml.org,2002:int',
+ RegExp('''^(?:[-+]?0b[0-1_]+
|[-+]?0?[0-7_]+
|[-+]?(?:0|[1-9][0-9_]*)
|[-+]?0x[0-9a-fA-F_]+
|[-+]?[1-9][0-9_]*(?::[0-5]?[0-9])+)$''', re.X), # sexagesimal int
- list(u'-+0123456789')),
+ list('-+0123456789')),
([(1, 2), (1, 1)],
- u'tag:yaml.org,2002:merge',
- RegExp(u'^(?:<<)$'),
- [u'<']),
+ 'tag:yaml.org,2002:merge',
+ RegExp('^(?:<<)$'),
+ ['<']),
([(1, 2), (1, 1)],
- u'tag:yaml.org,2002:null',
- RegExp(u'''^(?: ~
+ 'tag:yaml.org,2002:null',
+ RegExp('''^(?: ~
|null|Null|NULL
| )$''', re.X),
- [u'~', u'n', u'N', u'']),
+ ['~', 'n', 'N', '']),
([(1, 2), (1, 1)],
- u'tag:yaml.org,2002:timestamp',
- RegExp(u'''^(?:[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]
+ 'tag:yaml.org,2002:timestamp',
+ RegExp('''^(?:[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]
|[0-9][0-9][0-9][0-9] -[0-9][0-9]? -[0-9][0-9]?
(?:[Tt]|[ \\t]+)[0-9][0-9]?
:[0-9][0-9] :[0-9][0-9] (?:\\.[0-9]*)?
(?:[ \\t]*(?:Z|[-+][0-9][0-9]?(?::[0-9][0-9])?))?)$''', re.X),
- list(u'0123456789')),
+ list('0123456789')),
([(1, 2), (1, 1)],
- u'tag:yaml.org,2002:value',
- RegExp(u'^(?:=)$'),
- [u'=']),
+ 'tag:yaml.org,2002:value',
+ RegExp('^(?:=)$'),
+ ['=']),
# The following resolver is only for documentation purposes. It cannot work
# because plain scalars cannot start with '!', '&', or '*'.
([(1, 2), (1, 1)],
- u'tag:yaml.org,2002:yaml',
- RegExp(u'^(?:!|&|\\*)$'),
- list(u'!&*')),
+ 'tag:yaml.org,2002:yaml',
+ RegExp('^(?:!|&|\\*)$'),
+ list('!&*')),
]
# fmt: on
@@ -103,27 +100,25 @@ class ResolverError(YAMLError):
pass
-class BaseResolver(object):
+class BaseResolver:
- DEFAULT_SCALAR_TAG = u'tag:yaml.org,2002:str'
- DEFAULT_SEQUENCE_TAG = u'tag:yaml.org,2002:seq'
- DEFAULT_MAPPING_TAG = u'tag:yaml.org,2002:map'
+ DEFAULT_SCALAR_TAG = 'tag:yaml.org,2002:str'
+ DEFAULT_SEQUENCE_TAG = 'tag:yaml.org,2002:seq'
+ DEFAULT_MAPPING_TAG = 'tag:yaml.org,2002:map'
- yaml_implicit_resolvers = {} # type: Dict[Any, Any]
- yaml_path_resolvers = {} # type: Dict[Any, Any]
+ yaml_implicit_resolvers: Dict[Any, Any] = {}
+ yaml_path_resolvers: Dict[Any, Any] = {}
- def __init__(self, loadumper=None):
- # type: (Any, Any) -> None
+ def __init__(self: Any, loadumper: Any = None) -> None:
self.loadumper = loadumper
if self.loadumper is not None and getattr(self.loadumper, '_resolver', None) is None:
self.loadumper._resolver = self.loadumper
- self._loader_version = None # type: Any
- self.resolver_exact_paths = [] # type: List[Any]
- self.resolver_prefix_paths = [] # type: List[Any]
+ self._loader_version: Any = None
+ self.resolver_exact_paths: List[Any] = []
+ self.resolver_prefix_paths: List[Any] = []
@property
- def parser(self):
- # type: () -> Any
+ def parser(self) -> Any:
if self.loadumper is not None:
if hasattr(self.loadumper, 'typ'):
return self.loadumper.parser
@@ -131,8 +126,7 @@ class BaseResolver(object):
return None
@classmethod
- def add_implicit_resolver_base(cls, tag, regexp, first):
- # type: (Any, Any, Any) -> None
+ def add_implicit_resolver_base(cls, tag: Any, regexp: Any, first: Any) -> None:
if 'yaml_implicit_resolvers' not in cls.__dict__:
# deepcopy doesn't work here
cls.yaml_implicit_resolvers = dict(
@@ -144,8 +138,7 @@ class BaseResolver(object):
cls.yaml_implicit_resolvers.setdefault(ch, []).append((tag, regexp))
@classmethod
- def add_implicit_resolver(cls, tag, regexp, first):
- # type: (Any, Any, Any) -> None
+ def add_implicit_resolver(cls, tag: Any, regexp: Any, first: Any) -> None:
if 'yaml_implicit_resolvers' not in cls.__dict__:
# deepcopy doesn't work here
cls.yaml_implicit_resolvers = dict(
@@ -161,8 +154,7 @@ class BaseResolver(object):
# def add_implicit_resolver(cls, tag, regexp, first):
@classmethod
- def add_path_resolver(cls, tag, path, kind=None):
- # type: (Any, Any, Any) -> None
+ def add_path_resolver(cls, tag: Any, path: Any, kind: Any = None) -> None:
# Note: `add_path_resolver` is experimental. The API could be changed.
# `new_path` is a pattern that is matched against the path from the
# root to the node that is being considered. `node_path` elements are
@@ -177,7 +169,7 @@ class BaseResolver(object):
# against a sequence value with the index equal to `index_check`.
if 'yaml_path_resolvers' not in cls.__dict__:
cls.yaml_path_resolvers = cls.yaml_path_resolvers.copy()
- new_path = [] # type: List[Any]
+ new_path: List[Any] = []
for element in path:
if isinstance(element, (list, tuple)):
if len(element) == 2:
@@ -186,7 +178,7 @@ class BaseResolver(object):
node_check = element[0]
index_check = True
else:
- raise ResolverError('Invalid path element: %s' % (element,))
+ raise ResolverError(f'Invalid path element: {element!s}')
else:
node_check = None
index_check = element
@@ -198,12 +190,12 @@ class BaseResolver(object):
node_check = MappingNode
elif (
node_check not in [ScalarNode, SequenceNode, MappingNode]
- and not isinstance(node_check, string_types)
+ and not isinstance(node_check, str)
and node_check is not None
):
- raise ResolverError('Invalid node checker: %s' % (node_check,))
- if not isinstance(index_check, (string_types, int)) and index_check is not None:
- raise ResolverError('Invalid index checker: %s' % (index_check,))
+ raise ResolverError(f'Invalid node checker: {node_check!s}')
+ if not isinstance(index_check, (str, int)) and index_check is not None:
+ raise ResolverError(f'Invalid index checker: {index_check!s}')
new_path.append((node_check, index_check))
if kind is str:
kind = ScalarNode
@@ -212,11 +204,10 @@ class BaseResolver(object):
elif kind is dict:
kind = MappingNode
elif kind not in [ScalarNode, SequenceNode, MappingNode] and kind is not None:
- raise ResolverError('Invalid node kind: %s' % (kind,))
+ raise ResolverError(f'Invalid node kind: {kind!s}')
cls.yaml_path_resolvers[tuple(new_path), kind] = tag
- def descend_resolver(self, current_node, current_index):
- # type: (Any, Any) -> None
+ def descend_resolver(self, current_node: Any, current_index: Any) -> None:
if not self.yaml_path_resolvers:
return
exact_paths = {}
@@ -238,17 +229,17 @@ class BaseResolver(object):
self.resolver_exact_paths.append(exact_paths)
self.resolver_prefix_paths.append(prefix_paths)
- def ascend_resolver(self):
- # type: () -> None
+ def ascend_resolver(self) -> None:
if not self.yaml_path_resolvers:
return
self.resolver_exact_paths.pop()
self.resolver_prefix_paths.pop()
- def check_resolver_prefix(self, depth, path, kind, current_node, current_index):
- # type: (int, Text, Any, Any, Any) -> bool
+ def check_resolver_prefix(
+ self, depth: int, path: Any, kind: Any, current_node: Any, current_index: Any
+ ) -> bool:
node_check, index_check = path[depth - 1]
- if isinstance(node_check, string_types):
+ if isinstance(node_check, str):
if current_node.tag != node_check:
return False
elif node_check is not None:
@@ -258,7 +249,7 @@ class BaseResolver(object):
return False
if (index_check is False or index_check is None) and current_index is None:
return False
- if isinstance(index_check, string_types):
+ if isinstance(index_check, str):
if not (
isinstance(current_index, ScalarNode) and index_check == current_index.value
):
@@ -268,8 +259,7 @@ class BaseResolver(object):
return False
return True
- def resolve(self, kind, value, implicit):
- # type: (Any, Any, Any) -> Any
+ def resolve(self, kind: Any, value: Any, implicit: Any) -> Any:
if kind is ScalarNode and implicit[0]:
if value == "":
resolvers = self.yaml_implicit_resolvers.get("", [])
@@ -294,8 +284,7 @@ class BaseResolver(object):
return self.DEFAULT_MAPPING_TAG
@property
- def processing_version(self):
- # type: () -> Any
+ def processing_version(self) -> Any:
return None
@@ -316,46 +305,47 @@ class VersionedResolver(BaseResolver):
and Yes/No/On/Off booleans.
"""
- def __init__(self, version=None, loader=None, loadumper=None):
- # type: (Optional[VersionType], Any, Any) -> None
+ def __init__(
+ self, version: Optional[VersionType] = None, loader: Any = None, loadumper: Any = None
+ ) -> None:
if loader is None and loadumper is not None:
loader = loadumper
BaseResolver.__init__(self, loader)
self._loader_version = self.get_loader_version(version)
- self._version_implicit_resolver = {} # type: Dict[Any, Any]
+ self._version_implicit_resolver: Dict[Any, Any] = {}
- def add_version_implicit_resolver(self, version, tag, regexp, first):
- # type: (VersionType, Any, Any, Any) -> None
+ def add_version_implicit_resolver(
+ self, version: VersionType, tag: Any, regexp: Any, first: Any
+ ) -> None:
if first is None:
first = [None]
impl_resolver = self._version_implicit_resolver.setdefault(version, {})
for ch in first:
impl_resolver.setdefault(ch, []).append((tag, regexp))
- def get_loader_version(self, version):
- # type: (Optional[VersionType]) -> Any
+ def get_loader_version(self, version: Optional[VersionType]) -> Any:
if version is None or isinstance(version, tuple):
return version
if isinstance(version, list):
return tuple(version)
# assume string
- return tuple(map(int, version.split(u'.')))
+ return tuple(map(int, version.split('.')))
@property
- def versioned_resolver(self):
- # type: () -> Any
+ def versioned_resolver(self) -> Any:
"""
select the resolver based on the version we are parsing
"""
version = self.processing_version
+ if isinstance(version, str):
+ version = tuple(map(int, version.split('.')))
if version not in self._version_implicit_resolver:
for x in implicit_resolvers:
if version in x[0]:
self.add_version_implicit_resolver(version, x[1], x[2], x[3])
return self._version_implicit_resolver[version]
- def resolve(self, kind, value, implicit):
- # type: (Any, Any, Any) -> Any
+ def resolve(self, kind: Any, value: Any, implicit: Any) -> Any:
if kind is ScalarNode and implicit[0]:
if value == "":
resolvers = self.versioned_resolver.get("", [])
@@ -380,8 +370,7 @@ class VersionedResolver(BaseResolver):
return self.DEFAULT_MAPPING_TAG
@property
- def processing_version(self):
- # type: () -> Any
+ def processing_version(self) -> Any:
try:
version = self.loadumper._scanner.yaml_version
except AttributeError:
diff --git a/scalarbool.py b/scalarbool.py
index fc8f8c2..083d3cb 100644
--- a/scalarbool.py
+++ b/scalarbool.py
@@ -1,7 +1,5 @@
# coding: utf-8
-from __future__ import print_function, absolute_import, division, unicode_literals
-
"""
You cannot subclass bool, and this is necessary for round-tripping anchored
bool values (and also if you want to preserve the original way of writing)
@@ -13,39 +11,32 @@ You can use these in an if statement, but not when testing equivalence
from ruamel.yaml.anchor import Anchor
-if False: # MYPY
- from typing import Text, Any, Dict, List # NOQA
+from typing import Text, Any, Dict, List # NOQA
__all__ = ['ScalarBoolean']
-# no need for no_limit_int -> int
-
class ScalarBoolean(int):
- def __new__(cls, *args, **kw):
- # type: (Any, Any, Any) -> Any
- anchor = kw.pop('anchor', None) # type: ignore
- b = int.__new__(cls, *args, **kw) # type: ignore
+ def __new__(cls: Any, *args: Any, **kw: Any) -> Any:
+ anchor = kw.pop('anchor', None)
+ b = int.__new__(cls, *args, **kw)
if anchor is not None:
b.yaml_set_anchor(anchor, always_dump=True)
return b
@property
- def anchor(self):
- # type: () -> Any
+ def anchor(self) -> Any:
if not hasattr(self, Anchor.attrib):
setattr(self, Anchor.attrib, Anchor())
return getattr(self, Anchor.attrib)
- def yaml_anchor(self, any=False):
- # type: (bool) -> Any
+ def yaml_anchor(self, any: bool = False) -> Any:
if not hasattr(self, Anchor.attrib):
return None
if any or self.anchor.always_dump:
return self.anchor
return None
- def yaml_set_anchor(self, value, always_dump=False):
- # type: (Any, bool) -> None
+ def yaml_set_anchor(self, value: Any, always_dump: bool = False) -> None:
self.anchor.value = value
self.anchor.always_dump = always_dump
diff --git a/scalarfloat.py b/scalarfloat.py
index 0404df3..d3fe12e 100644
--- a/scalarfloat.py
+++ b/scalarfloat.py
@@ -1,30 +1,25 @@
# coding: utf-8
-from __future__ import print_function, absolute_import, division, unicode_literals
-
import sys
-from .compat import no_limit_int # NOQA
from ruamel.yaml.anchor import Anchor
-if False: # MYPY
- from typing import Text, Any, Dict, List # NOQA
+from typing import Text, Any, Dict, List # NOQA
__all__ = ['ScalarFloat', 'ExponentialFloat', 'ExponentialCapsFloat']
class ScalarFloat(float):
- def __new__(cls, *args, **kw):
- # type: (Any, Any, Any) -> Any
- width = kw.pop('width', None) # type: ignore
- prec = kw.pop('prec', None) # type: ignore
- m_sign = kw.pop('m_sign', None) # type: ignore
- m_lead0 = kw.pop('m_lead0', 0) # type: ignore
- exp = kw.pop('exp', None) # type: ignore
- e_width = kw.pop('e_width', None) # type: ignore
- e_sign = kw.pop('e_sign', None) # type: ignore
- underscore = kw.pop('underscore', None) # type: ignore
- anchor = kw.pop('anchor', None) # type: ignore
- v = float.__new__(cls, *args, **kw) # type: ignore
+ def __new__(cls: Any, *args: Any, **kw: Any) -> Any:
+ width = kw.pop('width', None)
+ prec = kw.pop('prec', None)
+ m_sign = kw.pop('m_sign', None)
+ m_lead0 = kw.pop('m_lead0', 0)
+ exp = kw.pop('exp', None)
+ e_width = kw.pop('e_width', None)
+ e_sign = kw.pop('e_sign', None)
+ underscore = kw.pop('underscore', None)
+ anchor = kw.pop('anchor', None)
+ v = float.__new__(cls, *args, **kw)
v._width = width
v._prec = prec
v._m_sign = m_sign
@@ -37,24 +32,21 @@ class ScalarFloat(float):
v.yaml_set_anchor(anchor, always_dump=True)
return v
- def __iadd__(self, a): # type: ignore
- # type: (Any) -> Any
+ def __iadd__(self, a: Any) -> Any: # type: ignore
return float(self) + a
x = type(self)(self + a)
x._width = self._width
x._underscore = self._underscore[:] if self._underscore is not None else None # NOQA
return x
- def __ifloordiv__(self, a): # type: ignore
- # type: (Any) -> Any
+ def __ifloordiv__(self, a: Any) -> Any: # type: ignore
return float(self) // a
x = type(self)(self // a)
x._width = self._width
x._underscore = self._underscore[:] if self._underscore is not None else None # NOQA
return x
- def __imul__(self, a): # type: ignore
- # type: (Any) -> Any
+ def __imul__(self, a: Any) -> Any: # type: ignore
return float(self) * a
x = type(self)(self * a)
x._width = self._width
@@ -62,16 +54,14 @@ class ScalarFloat(float):
x._prec = self._prec # check for others
return x
- def __ipow__(self, a): # type: ignore
- # type: (Any) -> Any
+ def __ipow__(self, a: Any) -> Any: # type: ignore
return float(self) ** a
x = type(self)(self ** a)
x._width = self._width
x._underscore = self._underscore[:] if self._underscore is not None else None # NOQA
return x
- def __isub__(self, a): # type: ignore
- # type: (Any) -> Any
+ def __isub__(self, a: Any) -> Any: # type: ignore
return float(self) - a
x = type(self)(self - a)
x._width = self._width
@@ -79,49 +69,35 @@ class ScalarFloat(float):
return x
@property
- def anchor(self):
- # type: () -> Any
+ def anchor(self) -> Any:
if not hasattr(self, Anchor.attrib):
setattr(self, Anchor.attrib, Anchor())
return getattr(self, Anchor.attrib)
- def yaml_anchor(self, any=False):
- # type: (bool) -> Any
+ def yaml_anchor(self, any: bool = False) -> Any:
if not hasattr(self, Anchor.attrib):
return None
if any or self.anchor.always_dump:
return self.anchor
return None
- def yaml_set_anchor(self, value, always_dump=False):
- # type: (Any, bool) -> None
+ def yaml_set_anchor(self, value: Any, always_dump: bool = False) -> None:
self.anchor.value = value
self.anchor.always_dump = always_dump
- def dump(self, out=sys.stdout):
- # type: (Any) -> Any
+ def dump(self, out: Any = sys.stdout) -> None:
out.write(
- 'ScalarFloat({}| w:{}, p:{}, s:{}, lz:{}, _:{}|{}, w:{}, s:{})\n'.format(
- self,
- self._width, # type: ignore
- self._prec, # type: ignore
- self._m_sign, # type: ignore
- self._m_lead0, # type: ignore
- self._underscore, # type: ignore
- self._exp, # type: ignore
- self._e_width, # type: ignore
- self._e_sign, # type: ignore
- )
+ f'ScalarFloat({self}| w:{self._width}, p:{self._prec}, ' # type: ignore
+ f's:{self._m_sign}, lz:{self._m_lead0}, _:{self._underscore}|{self._exp}'
+ f', w:{self._e_width}, s:{self._e_sign})\n'
)
class ExponentialFloat(ScalarFloat):
- def __new__(cls, value, width=None, underscore=None):
- # type: (Any, Any, Any) -> Any
+ def __new__(cls, value: Any, width: Any = None, underscore: Any = None) -> Any:
return ScalarFloat.__new__(cls, value, width=width, underscore=underscore)
class ExponentialCapsFloat(ScalarFloat):
- def __new__(cls, value, width=None, underscore=None):
- # type: (Any, Any, Any) -> Any
+ def __new__(cls, value: Any, width: Any = None, underscore: Any = None) -> Any:
return ScalarFloat.__new__(cls, value, width=width, underscore=underscore)
diff --git a/scalarint.py b/scalarint.py
index 581a8df..3a2603d 100644
--- a/scalarint.py
+++ b/scalarint.py
@@ -1,31 +1,25 @@
# coding: utf-8
-from __future__ import print_function, absolute_import, division, unicode_literals
-
-from .compat import no_limit_int # NOQA
from ruamel.yaml.anchor import Anchor
-if False: # MYPY
- from typing import Text, Any, Dict, List # NOQA
+from typing import Text, Any, Dict, List # NOQA
__all__ = ['ScalarInt', 'BinaryInt', 'OctalInt', 'HexInt', 'HexCapsInt', 'DecimalInt']
-class ScalarInt(no_limit_int):
- def __new__(cls, *args, **kw):
- # type: (Any, Any, Any) -> Any
- width = kw.pop('width', None) # type: ignore
- underscore = kw.pop('underscore', None) # type: ignore
- anchor = kw.pop('anchor', None) # type: ignore
- v = no_limit_int.__new__(cls, *args, **kw) # type: ignore
+class ScalarInt(int):
+ def __new__(cls: Any, *args: Any, **kw: Any) -> Any:
+ width = kw.pop('width', None)
+ underscore = kw.pop('underscore', None)
+ anchor = kw.pop('anchor', None)
+ v = int.__new__(cls, *args, **kw)
v._width = width
v._underscore = underscore
if anchor is not None:
v.yaml_set_anchor(anchor, always_dump=True)
return v
- def __iadd__(self, a): # type: ignore
- # type: (Any) -> Any
+ def __iadd__(self, a: Any) -> Any: # type: ignore
x = type(self)(self + a)
x._width = self._width # type: ignore
x._underscore = ( # type: ignore
@@ -33,8 +27,7 @@ class ScalarInt(no_limit_int):
) # NOQA
return x
- def __ifloordiv__(self, a): # type: ignore
- # type: (Any) -> Any
+ def __ifloordiv__(self, a: Any) -> Any: # type: ignore
x = type(self)(self // a)
x._width = self._width # type: ignore
x._underscore = ( # type: ignore
@@ -42,8 +35,7 @@ class ScalarInt(no_limit_int):
) # NOQA
return x
- def __imul__(self, a): # type: ignore
- # type: (Any) -> Any
+ def __imul__(self, a: Any) -> Any: # type: ignore
x = type(self)(self * a)
x._width = self._width # type: ignore
x._underscore = ( # type: ignore
@@ -51,8 +43,7 @@ class ScalarInt(no_limit_int):
) # NOQA
return x
- def __ipow__(self, a): # type: ignore
- # type: (Any) -> Any
+ def __ipow__(self, a: Any) -> Any: # type: ignore
x = type(self)(self ** a)
x._width = self._width # type: ignore
x._underscore = ( # type: ignore
@@ -60,8 +51,7 @@ class ScalarInt(no_limit_int):
) # NOQA
return x
- def __isub__(self, a): # type: ignore
- # type: (Any) -> Any
+ def __isub__(self, a: Any) -> Any: # type: ignore
x = type(self)(self - a)
x._width = self._width # type: ignore
x._underscore = ( # type: ignore
@@ -70,35 +60,34 @@ class ScalarInt(no_limit_int):
return x
@property
- def anchor(self):
- # type: () -> Any
+ def anchor(self) -> Any:
if not hasattr(self, Anchor.attrib):
setattr(self, Anchor.attrib, Anchor())
return getattr(self, Anchor.attrib)
- def yaml_anchor(self, any=False):
- # type: (bool) -> Any
+ def yaml_anchor(self, any: bool = False) -> Any:
if not hasattr(self, Anchor.attrib):
return None
if any or self.anchor.always_dump:
return self.anchor
return None
- def yaml_set_anchor(self, value, always_dump=False):
- # type: (Any, bool) -> None
+ def yaml_set_anchor(self, value: Any, always_dump: bool = False) -> None:
self.anchor.value = value
self.anchor.always_dump = always_dump
class BinaryInt(ScalarInt):
- def __new__(cls, value, width=None, underscore=None, anchor=None):
- # type: (Any, Any, Any, Any) -> Any
+ def __new__(
+ cls, value: Any, width: Any = None, underscore: Any = None, anchor: Any = None
+ ) -> Any:
return ScalarInt.__new__(cls, value, width=width, underscore=underscore, anchor=anchor)
class OctalInt(ScalarInt):
- def __new__(cls, value, width=None, underscore=None, anchor=None):
- # type: (Any, Any, Any, Any) -> Any
+ def __new__(
+ cls, value: Any, width: Any = None, underscore: Any = None, anchor: Any = None
+ ) -> Any:
return ScalarInt.__new__(cls, value, width=width, underscore=underscore, anchor=anchor)
@@ -109,22 +98,25 @@ class OctalInt(ScalarInt):
class HexInt(ScalarInt):
"""uses lower case (a-f)"""
- def __new__(cls, value, width=None, underscore=None, anchor=None):
- # type: (Any, Any, Any, Any) -> Any
+ def __new__(
+ cls, value: Any, width: Any = None, underscore: Any = None, anchor: Any = None
+ ) -> Any:
return ScalarInt.__new__(cls, value, width=width, underscore=underscore, anchor=anchor)
class HexCapsInt(ScalarInt):
"""uses upper case (A-F)"""
- def __new__(cls, value, width=None, underscore=None, anchor=None):
- # type: (Any, Any, Any, Any) -> Any
+ def __new__(
+ cls, value: Any, width: Any = None, underscore: Any = None, anchor: Any = None
+ ) -> Any:
return ScalarInt.__new__(cls, value, width=width, underscore=underscore, anchor=anchor)
class DecimalInt(ScalarInt):
"""needed if anchor"""
- def __new__(cls, value, width=None, underscore=None, anchor=None):
- # type: (Any, Any, Any, Any) -> Any
+ def __new__(
+ cls, value: Any, width: Any = None, underscore: Any = None, anchor: Any = None
+ ) -> Any:
return ScalarInt.__new__(cls, value, width=width, underscore=underscore, anchor=anchor)
diff --git a/scalarstring.py b/scalarstring.py
index 8427323..30f4fde 100644
--- a/scalarstring.py
+++ b/scalarstring.py
@@ -1,12 +1,9 @@
# coding: utf-8
-from __future__ import print_function, absolute_import, division, unicode_literals
-
-from ruamel.yaml.compat import text_type
from ruamel.yaml.anchor import Anchor
-if False: # MYPY
- from typing import Text, Any, Dict, List # NOQA
+from typing import Text, Any, Dict, List # NOQA
+from ruamel.yaml.compat import SupportsIndex
__all__ = [
'ScalarString',
@@ -21,38 +18,33 @@ __all__ = [
]
-class ScalarString(text_type):
+class ScalarString(str):
__slots__ = Anchor.attrib
- def __new__(cls, *args, **kw):
- # type: (Any, Any) -> Any
- anchor = kw.pop('anchor', None) # type: ignore
- ret_val = text_type.__new__(cls, *args, **kw) # type: ignore
+ def __new__(cls, *args: Any, **kw: Any) -> Any:
+ anchor = kw.pop('anchor', None)
+ ret_val = str.__new__(cls, *args, **kw)
if anchor is not None:
ret_val.yaml_set_anchor(anchor, always_dump=True)
return ret_val
- def replace(self, old, new, maxreplace=-1):
- # type: (Any, Any, int) -> Any
- return type(self)((text_type.replace(self, old, new, maxreplace)))
+ def replace(self, old: Any, new: Any, maxreplace: SupportsIndex = -1) -> Any:
+ return type(self)((str.replace(self, old, new, maxreplace)))
@property
- def anchor(self):
- # type: () -> Any
+ def anchor(self) -> Any:
if not hasattr(self, Anchor.attrib):
setattr(self, Anchor.attrib, Anchor())
return getattr(self, Anchor.attrib)
- def yaml_anchor(self, any=False):
- # type: (bool) -> Any
+ def yaml_anchor(self, any: bool = False) -> Any:
if not hasattr(self, Anchor.attrib):
return None
if any or self.anchor.always_dump:
return self.anchor
return None
- def yaml_set_anchor(self, value, always_dump=False):
- # type: (Any, bool) -> None
+ def yaml_set_anchor(self, value: Any, always_dump: bool = False) -> None:
self.anchor.value = value
self.anchor.always_dump = always_dump
@@ -62,8 +54,7 @@ class LiteralScalarString(ScalarString):
style = '|'
- def __new__(cls, value, anchor=None):
- # type: (Text, Any) -> Any
+ def __new__(cls, value: Text, anchor: Any = None) -> Any:
return ScalarString.__new__(cls, value, anchor=anchor)
@@ -75,8 +66,7 @@ class FoldedScalarString(ScalarString):
style = '>'
- def __new__(cls, value, anchor=None):
- # type: (Text, Any) -> Any
+ def __new__(cls, value: Text, anchor: Any = None) -> Any:
return ScalarString.__new__(cls, value, anchor=anchor)
@@ -85,8 +75,7 @@ class SingleQuotedScalarString(ScalarString):
style = "'"
- def __new__(cls, value, anchor=None):
- # type: (Text, Any) -> Any
+ def __new__(cls, value: Text, anchor: Any = None) -> Any:
return ScalarString.__new__(cls, value, anchor=anchor)
@@ -95,8 +84,7 @@ class DoubleQuotedScalarString(ScalarString):
style = '"'
- def __new__(cls, value, anchor=None):
- # type: (Text, Any) -> Any
+ def __new__(cls, value: Text, anchor: Any = None) -> Any:
return ScalarString.__new__(cls, value, anchor=anchor)
@@ -105,18 +93,15 @@ class PlainScalarString(ScalarString):
style = ''
- def __new__(cls, value, anchor=None):
- # type: (Text, Any) -> Any
+ def __new__(cls, value: Text, anchor: Any = None) -> Any:
return ScalarString.__new__(cls, value, anchor=anchor)
-def preserve_literal(s):
- # type: (Text) -> Text
+def preserve_literal(s: Text) -> Text:
return LiteralScalarString(s.replace('\r\n', '\n').replace('\r', '\n'))
-def walk_tree(base, map=None):
- # type: (Any, Any) -> None
+def walk_tree(base: Any, map: Any = None) -> None:
"""
the routine here walks over a simple yaml tree (recursing in
dict values and list items) and converts strings that
@@ -129,28 +114,27 @@ def walk_tree(base, map=None):
map[':'] = SingleQuotedScalarString
walk_tree(data, map=map)
"""
- from ruamel.yaml.compat import string_types
- from ruamel.yaml.compat import MutableMapping, MutableSequence # type: ignore
+ from collections.abc import MutableMapping, MutableSequence
if map is None:
map = {'\n': preserve_literal}
if isinstance(base, MutableMapping):
for k in base:
- v = base[k] # type: Text
- if isinstance(v, string_types):
+ v: Text = base[k]
+ if isinstance(v, str):
for ch in map:
if ch in v:
base[k] = map[ch](v)
break
else:
- walk_tree(v)
+ walk_tree(v, map=map)
elif isinstance(base, MutableSequence):
for idx, elem in enumerate(base):
- if isinstance(elem, string_types):
+ if isinstance(elem, str):
for ch in map:
- if ch in elem: # type: ignore
+ if ch in elem:
base[idx] = map[ch](elem)
break
else:
- walk_tree(elem)
+ walk_tree(elem, map=map)
diff --git a/scanner.py b/scanner.py
index df85ae0..2bc199f 100644
--- a/scanner.py
+++ b/scanner.py
@@ -1,7 +1,5 @@
# coding: utf-8
-from __future__ import print_function, absolute_import, division, unicode_literals
-
# Scanner produces tokens of the following types:
# STREAM-START
# STREAM-END
@@ -30,13 +28,13 @@ from __future__ import print_function, absolute_import, division, unicode_litera
# Read comments in the Scanner code for more details.
#
-from ruamel.yaml.error import MarkedYAMLError
+import inspect
+from ruamel.yaml.error import MarkedYAMLError, CommentMark # NOQA
from ruamel.yaml.tokens import * # NOQA
-from ruamel.yaml.compat import utf8, unichr, PY3, check_anchorname_char, nprint # NOQA
+from ruamel.yaml.compat import check_anchorname_char, nprint, nprintf # NOQA
-if False: # MYPY
- from typing import Any, Dict, Optional, List, Union, Text # NOQA
- from ruamel.yaml.compat import VersionType # NOQA
+from typing import Any, Dict, Optional, List, Union, Text # NOQA
+from ruamel.yaml.compat import VersionType # NOQA
__all__ = ['Scanner', 'RoundTripScanner', 'ScannerError']
@@ -46,15 +44,21 @@ _THE_END_SPACE_TAB = ' \n\0\t\r\x85\u2028\u2029'
_SPACE_TAB = ' \t'
+def xprintf(*args: Any, **kw: Any) -> Any:
+ return nprintf(*args, **kw)
+ pass
+
+
class ScannerError(MarkedYAMLError):
pass
-class SimpleKey(object):
+class SimpleKey:
# See below simple keys treatment.
- def __init__(self, token_number, required, index, line, column, mark):
- # type: (Any, Any, int, int, int, Any) -> None
+ def __init__(
+ self, token_number: Any, required: Any, index: int, line: int, column: int, mark: Any
+ ) -> None:
self.token_number = token_number
self.required = required
self.index = index
@@ -63,9 +67,8 @@ class SimpleKey(object):
self.mark = mark
-class Scanner(object):
- def __init__(self, loader=None):
- # type: (Any) -> None
+class Scanner:
+ def __init__(self, loader: Any = None) -> None:
"""Initialize the scanner."""
# It is assumed that Scanner and Reader will have a common descendant.
# Reader do the dirty work of checking for BOM and converting the
@@ -81,24 +84,22 @@ class Scanner(object):
self.loader._scanner = self
self.reset_scanner()
self.first_time = False
- self.yaml_version = None # type: Any
+ self.yaml_version: Any = None
@property
- def flow_level(self):
- # type: () -> int
+ def flow_level(self) -> int:
return len(self.flow_context)
- def reset_scanner(self):
- # type: () -> None
+ def reset_scanner(self) -> None:
# Had we reached the end of the stream?
self.done = False
# flow_context is an expanding/shrinking list consisting of '{' and '['
# for each unclosed flow context. If empty list that means block context
- self.flow_context = [] # type: List[Text]
+ self.flow_context: List[Text] = []
# List of processed tokens that are not yet emitted.
- self.tokens = [] # type: List[Any]
+ self.tokens: List[Any] = []
# Add the STREAM-START token.
self.fetch_stream_start()
@@ -110,7 +111,7 @@ class Scanner(object):
self.indent = -1
# Past indentation levels.
- self.indents = [] # type: List[int]
+ self.indents: List[int] = []
# Variables related to simple keys treatment.
@@ -140,11 +141,10 @@ class Scanner(object):
# (token_number, required, index, line, column, mark)
# A simple key may start with ALIAS, ANCHOR, TAG, SCALAR(flow),
# '[', or '{' tokens.
- self.possible_simple_keys = {} # type: Dict[Any, Any]
+ self.possible_simple_keys: Dict[Any, Any] = {}
@property
- def reader(self):
- # type: () -> Any
+ def reader(self) -> Any:
try:
return self._scanner_reader # type: ignore
except AttributeError:
@@ -155,20 +155,18 @@ class Scanner(object):
return self._scanner_reader
@property
- def scanner_processing_version(self): # prefix until un-composited
- # type: () -> Any
+ def scanner_processing_version(self) -> Any: # prefix until un-composited
if hasattr(self.loader, 'typ'):
return self.loader.resolver.processing_version
return self.loader.processing_version
# Public methods.
- def check_token(self, *choices):
- # type: (Any) -> bool
+ def check_token(self, *choices: Any) -> bool:
# Check if the next token is one of the given types.
while self.need_more_tokens():
self.fetch_more_tokens()
- if bool(self.tokens):
+ if len(self.tokens) > 0:
if not choices:
return True
for choice in choices:
@@ -176,30 +174,27 @@ class Scanner(object):
return True
return False
- def peek_token(self):
- # type: () -> Any
+ def peek_token(self) -> Any:
# Return the next token, but do not delete if from the queue.
while self.need_more_tokens():
self.fetch_more_tokens()
- if bool(self.tokens):
+ if len(self.tokens) > 0:
return self.tokens[0]
- def get_token(self):
- # type: () -> Any
+ def get_token(self) -> Any:
# Return the next token.
while self.need_more_tokens():
self.fetch_more_tokens()
- if bool(self.tokens):
+ if len(self.tokens) > 0:
self.tokens_taken += 1
return self.tokens.pop(0)
# Private methods.
- def need_more_tokens(self):
- # type: () -> bool
+ def need_more_tokens(self) -> bool:
if self.done:
return False
- if not self.tokens:
+ if len(self.tokens) == 0:
return True
# The current token may be a potential simple key, so we
# need to look further.
@@ -208,12 +203,10 @@ class Scanner(object):
return True
return False
- def fetch_comment(self, comment):
- # type: (Any) -> None
+ def fetch_comment(self, comment: Any) -> None:
raise NotImplementedError
- def fetch_more_tokens(self):
- # type: () -> Any
+ def fetch_more_tokens(self) -> Any:
# Eat whitespaces and comments until we reach the next token.
comment = self.scan_to_next_token()
if comment is not None: # never happens for base scanner
@@ -245,7 +238,7 @@ class Scanner(object):
return self.fetch_document_end()
# TODO: support for BOM within a stream.
- # if ch == u'\uFEFF':
+ # if ch == '\uFEFF':
# return self.fetch_bom() <-- issue BOMToken
# Note: the order of the following checks is NOT significant.
@@ -318,14 +311,13 @@ class Scanner(object):
raise ScannerError(
'while scanning for the next token',
None,
- 'found character %r that cannot start any token' % utf8(ch),
+ f'found character {ch!r} that cannot start any token',
self.reader.get_mark(),
)
# Simple keys treatment.
- def next_possible_simple_key(self):
- # type: () -> Any
+ def next_possible_simple_key(self) -> Any:
# Return the number of the nearest possible simple key. Actually we
# don't need to loop through the whole dictionary. We may replace it
# with the following code:
@@ -340,8 +332,7 @@ class Scanner(object):
min_token_number = key.token_number
return min_token_number
- def stale_possible_simple_keys(self):
- # type: () -> None
+ def stale_possible_simple_keys(self) -> None:
# Remove entries that are no longer possible simple keys. According to
# the YAML specification, simple keys
# - should be limited to a single line,
@@ -360,8 +351,7 @@ class Scanner(object):
)
del self.possible_simple_keys[level]
- def save_possible_simple_key(self):
- # type: () -> None
+ def save_possible_simple_key(self) -> None:
# The next token may start a simple key. We check if it's possible
# and save its position. This function is called for
# ALIAS, ANCHOR, TAG, SCALAR(flow), '[', and '{'.
@@ -384,8 +374,7 @@ class Scanner(object):
)
self.possible_simple_keys[self.flow_level] = key
- def remove_possible_simple_key(self):
- # type: () -> None
+ def remove_possible_simple_key(self) -> None:
# Remove the saved possible key position at the current flow level.
if self.flow_level in self.possible_simple_keys:
key = self.possible_simple_keys[self.flow_level]
@@ -402,8 +391,7 @@ class Scanner(object):
# Indentation functions.
- def unwind_indent(self, column):
- # type: (Any) -> None
+ def unwind_indent(self, column: Any) -> None:
# In flow context, tokens should respect indentation.
# Actually the condition should be `self.indent >= column` according to
# the spec. But this condition will prohibit intuitively correct
@@ -427,8 +415,7 @@ class Scanner(object):
self.indent = self.indents.pop()
self.tokens.append(BlockEndToken(mark, mark))
- def add_indent(self, column):
- # type: (int) -> bool
+ def add_indent(self, column: int) -> bool:
# Check if we need to increase indentation.
if self.indent < column:
self.indents.append(self.indent)
@@ -438,8 +425,7 @@ class Scanner(object):
# Fetchers.
- def fetch_stream_start(self):
- # type: () -> None
+ def fetch_stream_start(self) -> None:
# We always add STREAM-START as the first token and STREAM-END as the
# last token.
# Read the token.
@@ -447,8 +433,7 @@ class Scanner(object):
# Add STREAM-START.
self.tokens.append(StreamStartToken(mark, mark, encoding=self.reader.encoding))
- def fetch_stream_end(self):
- # type: () -> None
+ def fetch_stream_end(self) -> None:
# Set the current intendation to -1.
self.unwind_indent(-1)
# Reset simple keys.
@@ -462,8 +447,7 @@ class Scanner(object):
# The steam is finished.
self.done = True
- def fetch_directive(self):
- # type: () -> None
+ def fetch_directive(self) -> None:
# Set the current intendation to -1.
self.unwind_indent(-1)
@@ -474,16 +458,13 @@ class Scanner(object):
# Scan and add DIRECTIVE.
self.tokens.append(self.scan_directive())
- def fetch_document_start(self):
- # type: () -> None
+ def fetch_document_start(self) -> None:
self.fetch_document_indicator(DocumentStartToken)
- def fetch_document_end(self):
- # type: () -> None
+ def fetch_document_end(self) -> None:
self.fetch_document_indicator(DocumentEndToken)
- def fetch_document_indicator(self, TokenClass):
- # type: (Any) -> None
+ def fetch_document_indicator(self, TokenClass: Any) -> None:
# Set the current intendation to -1.
self.unwind_indent(-1)
@@ -498,16 +479,13 @@ class Scanner(object):
end_mark = self.reader.get_mark()
self.tokens.append(TokenClass(start_mark, end_mark))
- def fetch_flow_sequence_start(self):
- # type: () -> None
+ def fetch_flow_sequence_start(self) -> None:
self.fetch_flow_collection_start(FlowSequenceStartToken, to_push='[')
- def fetch_flow_mapping_start(self):
- # type: () -> None
+ def fetch_flow_mapping_start(self) -> None:
self.fetch_flow_collection_start(FlowMappingStartToken, to_push='{')
- def fetch_flow_collection_start(self, TokenClass, to_push):
- # type: (Any, Text) -> None
+ def fetch_flow_collection_start(self, TokenClass: Any, to_push: Text) -> None:
# '[' and '{' may start a simple key.
self.save_possible_simple_key()
# Increase the flow level.
@@ -520,16 +498,13 @@ class Scanner(object):
end_mark = self.reader.get_mark()
self.tokens.append(TokenClass(start_mark, end_mark))
- def fetch_flow_sequence_end(self):
- # type: () -> None
+ def fetch_flow_sequence_end(self) -> None:
self.fetch_flow_collection_end(FlowSequenceEndToken)
- def fetch_flow_mapping_end(self):
- # type: () -> None
+ def fetch_flow_mapping_end(self) -> None:
self.fetch_flow_collection_end(FlowMappingEndToken)
- def fetch_flow_collection_end(self, TokenClass):
- # type: (Any) -> None
+ def fetch_flow_collection_end(self, TokenClass: Any) -> None:
# Reset possible simple key on the current level.
self.remove_possible_simple_key()
# Decrease the flow level.
@@ -547,8 +522,7 @@ class Scanner(object):
end_mark = self.reader.get_mark()
self.tokens.append(TokenClass(start_mark, end_mark))
- def fetch_flow_entry(self):
- # type: () -> None
+ def fetch_flow_entry(self) -> None:
# Simple keys are allowed after ','.
self.allow_simple_key = True
# Reset possible simple key on the current level.
@@ -559,8 +533,7 @@ class Scanner(object):
end_mark = self.reader.get_mark()
self.tokens.append(FlowEntryToken(start_mark, end_mark))
- def fetch_block_entry(self):
- # type: () -> None
+ def fetch_block_entry(self) -> None:
# Block context needs additional checks.
if not self.flow_level:
# Are we allowed to start a new entry?
@@ -587,8 +560,7 @@ class Scanner(object):
end_mark = self.reader.get_mark()
self.tokens.append(BlockEntryToken(start_mark, end_mark))
- def fetch_key(self):
- # type: () -> None
+ def fetch_key(self) -> None:
# Block context needs additional checks.
if not self.flow_level:
@@ -615,8 +587,7 @@ class Scanner(object):
end_mark = self.reader.get_mark()
self.tokens.append(KeyToken(start_mark, end_mark))
- def fetch_value(self):
- # type: () -> None
+ def fetch_value(self) -> None:
# Do we determine a simple key?
if self.flow_level in self.possible_simple_keys:
# Add KEY.
@@ -676,8 +647,7 @@ class Scanner(object):
end_mark = self.reader.get_mark()
self.tokens.append(ValueToken(start_mark, end_mark))
- def fetch_alias(self):
- # type: () -> None
+ def fetch_alias(self) -> None:
# ALIAS could be a simple key.
self.save_possible_simple_key()
# No simple keys after ALIAS.
@@ -685,8 +655,7 @@ class Scanner(object):
# Scan and add ALIAS.
self.tokens.append(self.scan_anchor(AliasToken))
- def fetch_anchor(self):
- # type: () -> None
+ def fetch_anchor(self) -> None:
# ANCHOR could start a simple key.
self.save_possible_simple_key()
# No simple keys after ANCHOR.
@@ -694,8 +663,7 @@ class Scanner(object):
# Scan and add ANCHOR.
self.tokens.append(self.scan_anchor(AnchorToken))
- def fetch_tag(self):
- # type: () -> None
+ def fetch_tag(self) -> None:
# TAG could start a simple key.
self.save_possible_simple_key()
# No simple keys after TAG.
@@ -703,16 +671,13 @@ class Scanner(object):
# Scan and add TAG.
self.tokens.append(self.scan_tag())
- def fetch_literal(self):
- # type: () -> None
+ def fetch_literal(self) -> None:
self.fetch_block_scalar(style='|')
- def fetch_folded(self):
- # type: () -> None
+ def fetch_folded(self) -> None:
self.fetch_block_scalar(style='>')
- def fetch_block_scalar(self, style):
- # type: (Any) -> None
+ def fetch_block_scalar(self, style: Any) -> None:
# A simple key may follow a block scalar.
self.allow_simple_key = True
# Reset possible simple key on the current level.
@@ -720,16 +685,13 @@ class Scanner(object):
# Scan and add SCALAR.
self.tokens.append(self.scan_block_scalar(style))
- def fetch_single(self):
- # type: () -> None
+ def fetch_single(self) -> None:
self.fetch_flow_scalar(style="'")
- def fetch_double(self):
- # type: () -> None
+ def fetch_double(self) -> None:
self.fetch_flow_scalar(style='"')
- def fetch_flow_scalar(self, style):
- # type: (Any) -> None
+ def fetch_flow_scalar(self, style: Any) -> None:
# A flow scalar could be a simple key.
self.save_possible_simple_key()
# No simple keys after flow scalars.
@@ -737,8 +699,7 @@ class Scanner(object):
# Scan and add SCALAR.
self.tokens.append(self.scan_flow_scalar(style))
- def fetch_plain(self):
- # type: () -> None
+ def fetch_plain(self) -> None:
# A plain scalar could be a simple key.
self.save_possible_simple_key()
# No simple keys after plain scalars. But note that `scan_plain` will
@@ -750,45 +711,39 @@ class Scanner(object):
# Checkers.
- def check_directive(self):
- # type: () -> Any
+ def check_directive(self) -> Any:
# DIRECTIVE: ^ '%' ...
# The '%' indicator is already checked.
if self.reader.column == 0:
return True
return None
- def check_document_start(self):
- # type: () -> Any
+ def check_document_start(self) -> Any:
# DOCUMENT-START: ^ '---' (' '|'\n')
if self.reader.column == 0:
if self.reader.prefix(3) == '---' and self.reader.peek(3) in _THE_END_SPACE_TAB:
return True
return None
- def check_document_end(self):
- # type: () -> Any
+ def check_document_end(self) -> Any:
# DOCUMENT-END: ^ '...' (' '|'\n')
if self.reader.column == 0:
if self.reader.prefix(3) == '...' and self.reader.peek(3) in _THE_END_SPACE_TAB:
return True
return None
- def check_block_entry(self):
- # type: () -> Any
+ def check_block_entry(self) -> Any:
# BLOCK-ENTRY: '-' (' '|'\n')
return self.reader.peek(1) in _THE_END_SPACE_TAB
- def check_key(self):
- # type: () -> Any
+ def check_key(self) -> Any:
# KEY(flow context): '?'
if bool(self.flow_level):
return True
# KEY(block context): '?' (' '|'\n')
return self.reader.peek(1) in _THE_END_SPACE_TAB
- def check_value(self):
- # type: () -> Any
+ def check_value(self) -> Any:
# VALUE(flow context): ':'
if self.scanner_processing_version == (1, 1):
if bool(self.flow_level):
@@ -806,8 +761,7 @@ class Scanner(object):
# VALUE(block context): ':' (' '|'\n')
return self.reader.peek(1) in _THE_END_SPACE_TAB
- def check_plain(self):
- # type: () -> Any
+ def check_plain(self) -> Any:
# A plain scalar may start with any non-space character except:
# '-', '?', ':', ',', '[', ']', '{', '}',
# '#', '&', '*', '!', '|', '>', '\'', '\"',
@@ -843,8 +797,7 @@ class Scanner(object):
# Scanners.
- def scan_to_next_token(self):
- # type: () -> Any
+ def scan_to_next_token(self) -> Any:
# We ignore spaces, line breaks and comments.
# If we find a line break in the block context, we set the flag
# `allow_simple_key` on.
@@ -882,8 +835,7 @@ class Scanner(object):
found = True
return None
- def scan_directive(self):
- # type: () -> Any
+ def scan_directive(self) -> Any:
# See the specification for details.
srp = self.reader.peek
srf = self.reader.forward
@@ -904,8 +856,7 @@ class Scanner(object):
self.scan_directive_ignored_line(start_mark)
return DirectiveToken(name, value, start_mark, end_mark)
- def scan_directive_name(self, start_mark):
- # type: (Any) -> Any
+ def scan_directive_name(self, start_mark: Any) -> Any:
# See the specification for details.
length = 0
srp = self.reader.peek
@@ -917,7 +868,7 @@ class Scanner(object):
raise ScannerError(
'while scanning a directive',
start_mark,
- 'expected alphabetic or numeric character, but found %r' % utf8(ch),
+ f'expected alphabetic or numeric character, but found {ch!r}',
self.reader.get_mark(),
)
value = self.reader.prefix(length)
@@ -927,13 +878,12 @@ class Scanner(object):
raise ScannerError(
'while scanning a directive',
start_mark,
- 'expected alphabetic or numeric character, but found %r' % utf8(ch),
+ f'expected alphabetic or numeric character, but found {ch!r}',
self.reader.get_mark(),
)
return value
- def scan_yaml_directive_value(self, start_mark):
- # type: (Any) -> Any
+ def scan_yaml_directive_value(self, start_mark: Any) -> Any:
# See the specification for details.
srp = self.reader.peek
srf = self.reader.forward
@@ -944,7 +894,7 @@ class Scanner(object):
raise ScannerError(
'while scanning a directive',
start_mark,
- "expected a digit or '.', but found %r" % utf8(srp()),
+ f"expected a digit or '.', but found {srp()!r}",
self.reader.get_mark(),
)
srf()
@@ -953,14 +903,13 @@ class Scanner(object):
raise ScannerError(
'while scanning a directive',
start_mark,
- "expected a digit or ' ', but found %r" % utf8(srp()),
+ f"expected a digit or '.', but found {srp()!r}",
self.reader.get_mark(),
)
self.yaml_version = (major, minor)
return self.yaml_version
- def scan_yaml_directive_number(self, start_mark):
- # type: (Any) -> Any
+ def scan_yaml_directive_number(self, start_mark: Any) -> Any:
# See the specification for details.
srp = self.reader.peek
srf = self.reader.forward
@@ -969,7 +918,7 @@ class Scanner(object):
raise ScannerError(
'while scanning a directive',
start_mark,
- 'expected a digit, but found %r' % utf8(ch),
+ f'expected a digit, but found {ch!r}',
self.reader.get_mark(),
)
length = 0
@@ -979,8 +928,7 @@ class Scanner(object):
srf(length)
return value
- def scan_tag_directive_value(self, start_mark):
- # type: (Any) -> Any
+ def scan_tag_directive_value(self, start_mark: Any) -> Any:
# See the specification for details.
srp = self.reader.peek
srf = self.reader.forward
@@ -992,8 +940,7 @@ class Scanner(object):
prefix = self.scan_tag_directive_prefix(start_mark)
return (handle, prefix)
- def scan_tag_directive_handle(self, start_mark):
- # type: (Any) -> Any
+ def scan_tag_directive_handle(self, start_mark: Any) -> Any:
# See the specification for details.
value = self.scan_tag_handle('directive', start_mark)
ch = self.reader.peek()
@@ -1001,13 +948,12 @@ class Scanner(object):
raise ScannerError(
'while scanning a directive',
start_mark,
- "expected ' ', but found %r" % utf8(ch),
+ f"expected ' ', but found {ch!r}",
self.reader.get_mark(),
)
return value
- def scan_tag_directive_prefix(self, start_mark):
- # type: (Any) -> Any
+ def scan_tag_directive_prefix(self, start_mark: Any) -> Any:
# See the specification for details.
value = self.scan_tag_uri('directive', start_mark)
ch = self.reader.peek()
@@ -1015,13 +961,12 @@ class Scanner(object):
raise ScannerError(
'while scanning a directive',
start_mark,
- "expected ' ', but found %r" % utf8(ch),
+ f"expected ' ', but found {ch!r}",
self.reader.get_mark(),
)
return value
- def scan_directive_ignored_line(self, start_mark):
- # type: (Any) -> None
+ def scan_directive_ignored_line(self, start_mark: Any) -> None:
# See the specification for details.
srp = self.reader.peek
srf = self.reader.forward
@@ -1035,13 +980,12 @@ class Scanner(object):
raise ScannerError(
'while scanning a directive',
start_mark,
- 'expected a comment or a line break, but found %r' % utf8(ch),
+ f'expected a comment or a line break, but found {ch!r}',
self.reader.get_mark(),
)
self.scan_line_break()
- def scan_anchor(self, TokenClass):
- # type: (Any) -> Any
+ def scan_anchor(self, TokenClass: Any) -> Any:
# The specification does not restrict characters for anchors and
# aliases. This may lead to problems, for instance, the document:
# [ *alias, value ]
@@ -1060,16 +1004,16 @@ class Scanner(object):
self.reader.forward()
length = 0
ch = srp(length)
- # while u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \
- # or ch in u'-_':
+ # while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \
+ # or ch in '-_':
while check_anchorname_char(ch):
length += 1
ch = srp(length)
if not length:
raise ScannerError(
- 'while scanning an %s' % (name,),
+ f'while scanning an {name!s}',
start_mark,
- 'expected alphabetic or numeric character, but found %r' % utf8(ch),
+ f'expected alphabetic or numeric character, but found {ch!r}',
self.reader.get_mark(),
)
value = self.reader.prefix(length)
@@ -1079,20 +1023,26 @@ class Scanner(object):
# assert ch1 == ch
if ch not in '\0 \t\r\n\x85\u2028\u2029?:,[]{}%@`':
raise ScannerError(
- 'while scanning an %s' % (name,),
+ f'while scanning an {name!s}',
start_mark,
- 'expected alphabetic or numeric character, but found %r' % utf8(ch),
+ f'expected alphabetic or numeric character, but found {ch!r}',
self.reader.get_mark(),
)
end_mark = self.reader.get_mark()
return TokenClass(value, start_mark, end_mark)
- def scan_tag(self):
- # type: () -> Any
+ def scan_tag(self) -> Any:
# See the specification for details.
srp = self.reader.peek
start_mark = self.reader.get_mark()
ch = srp(1)
+ short_handle = '!'
+ if ch == '!':
+ short_handle = '!!'
+ self.reader.forward()
+ srp = self.reader.peek
+ ch = srp(1)
+
if ch == '<':
handle = None
self.reader.forward(2)
@@ -1101,13 +1051,13 @@ class Scanner(object):
raise ScannerError(
'while parsing a tag',
start_mark,
- "expected '>', but found %r" % utf8(srp()),
+ f"expected '>' but found {srp()!r}",
self.reader.get_mark(),
)
self.reader.forward()
elif ch in _THE_END_SPACE_TAB:
handle = None
- suffix = '!'
+ suffix = short_handle
self.reader.forward()
else:
length = 1
@@ -1118,11 +1068,11 @@ class Scanner(object):
break
length += 1
ch = srp(length)
- handle = '!'
+ handle = short_handle
if use_handle:
handle = self.scan_tag_handle('tag', start_mark)
else:
- handle = '!'
+ handle = short_handle
self.reader.forward()
suffix = self.scan_tag_uri('tag', start_mark)
ch = srp()
@@ -1130,15 +1080,14 @@ class Scanner(object):
raise ScannerError(
'while scanning a tag',
start_mark,
- "expected ' ', but found %r" % utf8(ch),
+ f"expected ' ', but found {ch!r}",
self.reader.get_mark(),
)
value = (handle, suffix)
end_mark = self.reader.get_mark()
return TagToken(value, start_mark, end_mark)
- def scan_block_scalar(self, style, rt=False):
- # type: (Any, Optional[bool]) -> Any
+ def scan_block_scalar(self, style: Any, rt: Optional[bool] = False) -> Any:
# See the specification for details.
srp = self.reader.peek
if style == '>':
@@ -1146,7 +1095,7 @@ class Scanner(object):
else:
folded = False
- chunks = [] # type: List[Any]
+ chunks: List[Any] = []
start_mark = self.reader.get_mark()
# Scan the header.
@@ -1209,10 +1158,10 @@ class Scanner(object):
# This is Clark Evans's interpretation (also in the spec
# examples):
#
- # if folded and line_break == u'\n':
+ # if folded and line_break == '\n':
# if not breaks:
# if srp() not in ' \t':
- # chunks.append(u' ')
+ # chunks.append(' ')
# else:
# chunks.append(line_break)
# else:
@@ -1222,7 +1171,7 @@ class Scanner(object):
# Process trailing line breaks. The 'chomping' setting determines
# whether they are included in the value.
- trailing = [] # type: List[Any]
+ trailing: List[Any] = []
if chomping in [None, True]:
chunks.append(line_break)
if chomping is True:
@@ -1232,25 +1181,36 @@ class Scanner(object):
# We are done.
token = ScalarToken("".join(chunks), False, start_mark, end_mark, style)
- if block_scalar_comment is not None:
- token.add_pre_comments([block_scalar_comment])
+ if self.loader is not None:
+ comment_handler = getattr(self.loader, 'comment_handling', False)
+ if comment_handler is None:
+ if block_scalar_comment is not None:
+ token.add_pre_comments([block_scalar_comment])
if len(trailing) > 0:
- # nprint('trailing 1', trailing) # XXXXX
# Eat whitespaces and comments until we reach the next token.
+ if self.loader is not None:
+ comment_handler = getattr(self.loader, 'comment_handling', None)
+ if comment_handler is not None:
+ line = end_mark.line - len(trailing)
+ for x in trailing:
+ assert x[-1] == '\n'
+ self.comments.add_blank_line(x, 0, line) # type: ignore
+ line += 1
comment = self.scan_to_next_token()
while comment:
trailing.append(' ' * comment[1].column + comment[0])
comment = self.scan_to_next_token()
-
- # Keep track of the trailing whitespace and following comments
- # as a comment token, if isn't all included in the actual value.
- comment_end_mark = self.reader.get_mark()
- comment = CommentToken("".join(trailing), end_mark, comment_end_mark)
- token.add_post_comment(comment)
+ if self.loader is not None:
+ comment_handler = getattr(self.loader, 'comment_handling', False)
+ if comment_handler is None:
+ # Keep track of the trailing whitespace and following comments
+ # as a comment token, if isn't all included in the actual value.
+ comment_end_mark = self.reader.get_mark()
+ comment = CommentToken("".join(trailing), end_mark, comment_end_mark)
+ token.add_post_comment(comment)
return token
- def scan_block_scalar_indicators(self, start_mark):
- # type: (Any) -> Any
+ def scan_block_scalar_indicators(self, start_mark: Any) -> Any:
# See the specification for details.
srp = self.reader.peek
chomping = None
@@ -1295,13 +1255,12 @@ class Scanner(object):
raise ScannerError(
'while scanning a block scalar',
start_mark,
- 'expected chomping or indentation indicators, but found %r' % utf8(ch),
+ f'expected chomping or indentation indicators, but found {ch!r}',
self.reader.get_mark(),
)
return chomping, increment
- def scan_block_scalar_ignored_line(self, start_mark):
- # type: (Any) -> Any
+ def scan_block_scalar_ignored_line(self, start_mark: Any) -> Any:
# See the specification for details.
srp = self.reader.peek
srf = self.reader.forward
@@ -1320,14 +1279,13 @@ class Scanner(object):
raise ScannerError(
'while scanning a block scalar',
start_mark,
- 'expected a comment or a line break, but found %r' % utf8(ch),
+ f'expected a comment or a line break, but found {ch!r}',
self.reader.get_mark(),
)
self.scan_line_break()
return comment
- def scan_block_scalar_indentation(self):
- # type: () -> Any
+ def scan_block_scalar_indentation(self) -> Any:
# See the specification for details.
srp = self.reader.peek
srf = self.reader.forward
@@ -1344,8 +1302,7 @@ class Scanner(object):
max_indent = self.reader.column
return chunks, max_indent, end_mark
- def scan_block_scalar_breaks(self, indent):
- # type: (int) -> Any
+ def scan_block_scalar_breaks(self, indent: int) -> Any:
# See the specification for details.
chunks = []
srp = self.reader.peek
@@ -1360,8 +1317,7 @@ class Scanner(object):
srf()
return chunks, end_mark
- def scan_flow_scalar(self, style):
- # type: (Any) -> Any
+ def scan_flow_scalar(self, style: Any) -> Any:
# See the specification for details.
# Note that we loose indentation rules for quoted scalars. Quoted
# scalars don't need to adhere indentation because " and ' clearly
@@ -1373,7 +1329,7 @@ class Scanner(object):
else:
double = False
srp = self.reader.peek
- chunks = [] # type: List[Any]
+ chunks: List[Any] = []
start_mark = self.reader.get_mark()
quote = srp()
self.reader.forward()
@@ -1408,10 +1364,9 @@ class Scanner(object):
ESCAPE_CODES = {'x': 2, 'u': 4, 'U': 8}
- def scan_flow_scalar_non_spaces(self, double, start_mark):
- # type: (Any, Any) -> Any
+ def scan_flow_scalar_non_spaces(self, double: Any, start_mark: Any) -> Any:
# See the specification for details.
- chunks = [] # type: List[Any]
+ chunks: List[Any] = []
srp = self.reader.peek
srf = self.reader.forward
while True:
@@ -1442,12 +1397,12 @@ class Scanner(object):
raise ScannerError(
'while scanning a double-quoted scalar',
start_mark,
- 'expected escape sequence of %d hexdecimal '
- 'numbers, but found %r' % (length, utf8(srp(k))),
+ f'expected escape sequence of {length:d} '
+ f'hexdecimal numbers, but found {srp(k)!r}',
self.reader.get_mark(),
)
code = int(self.reader.prefix(length), 16)
- chunks.append(unichr(code))
+ chunks.append(chr(code))
srf(length)
elif ch in '\n\r\x85\u2028\u2029':
self.scan_line_break()
@@ -1456,14 +1411,13 @@ class Scanner(object):
raise ScannerError(
'while scanning a double-quoted scalar',
start_mark,
- 'found unknown escape character %r' % utf8(ch),
+ f'found unknown escape character {ch!r}',
self.reader.get_mark(),
)
else:
return chunks
- def scan_flow_scalar_spaces(self, double, start_mark):
- # type: (Any, Any) -> Any
+ def scan_flow_scalar_spaces(self, double: Any, start_mark: Any) -> Any:
# See the specification for details.
srp = self.reader.peek
chunks = []
@@ -1492,10 +1446,9 @@ class Scanner(object):
chunks.append(whitespaces)
return chunks
- def scan_flow_scalar_breaks(self, double, start_mark):
- # type: (Any, Any) -> Any
+ def scan_flow_scalar_breaks(self, double: Any, start_mark: Any) -> Any:
# See the specification for details.
- chunks = [] # type: List[Any]
+ chunks: List[Any] = []
srp = self.reader.peek
srf = self.reader.forward
while True:
@@ -1516,8 +1469,7 @@ class Scanner(object):
else:
return chunks
- def scan_plain(self):
- # type: () -> Any
+ def scan_plain(self) -> Any:
# See the specification for details.
# We add an additional restriction for the flow context:
# plain scalars in the flow context cannot contain ',', ': ' and '?'.
@@ -1525,7 +1477,7 @@ class Scanner(object):
# Indentation rules are loosed for the flow context.
srp = self.reader.peek
srf = self.reader.forward
- chunks = [] # type: List[Any]
+ chunks: List[Any] = []
start_mark = self.reader.get_mark()
end_mark = start_mark
indent = self.indent + 1
@@ -1533,7 +1485,7 @@ class Scanner(object):
# document separators at the beginning of the line.
# if indent == 0:
# indent = 1
- spaces = [] # type: List[Any]
+ spaces: List[Any] = []
while True:
length = 0
if srp() == '#':
@@ -1587,14 +1539,25 @@ class Scanner(object):
break
token = ScalarToken("".join(chunks), True, start_mark, end_mark)
- if spaces and spaces[0] == '\n':
- # Create a comment token to preserve the trailing line breaks.
- comment = CommentToken("".join(spaces) + '\n', start_mark, end_mark)
- token.add_post_comment(comment)
+ # getattr provides True so C type loader, which cannot handle comment,
+ # will not make CommentToken
+ if self.loader is not None:
+ comment_handler = getattr(self.loader, 'comment_handling', False)
+ if comment_handler is None:
+ if spaces and spaces[0] == '\n':
+ # Create a comment token to preserve the trailing line breaks.
+ comment = CommentToken("".join(spaces) + '\n', start_mark, end_mark)
+ token.add_post_comment(comment)
+ elif comment_handler is not False:
+ line = start_mark.line + 1
+ for ch in spaces:
+ if ch == '\n':
+ self.comments.add_blank_line('\n', 0, line) # type: ignore
+ line += 1
+
return token
- def scan_plain_spaces(self, indent, start_mark):
- # type: (Any, Any) -> Any
+ def scan_plain_spaces(self, indent: Any, start_mark: Any) -> Any:
# See the specification for details.
# The specification is really confusing about tabs in plain scalars.
# We just forbid them completely. Do not use tabs in YAML!
@@ -1631,8 +1594,7 @@ class Scanner(object):
chunks.append(whitespaces)
return chunks
- def scan_tag_handle(self, name, start_mark):
- # type: (Any, Any) -> Any
+ def scan_tag_handle(self, name: Any, start_mark: Any) -> Any:
# See the specification for details.
# For some strange reasons, the specification does not allow '_' in
# tag handles. I have allowed it anyway.
@@ -1640,9 +1602,9 @@ class Scanner(object):
ch = srp()
if ch != '!':
raise ScannerError(
- 'while scanning a %s' % (name,),
+ f'while scanning an {name!s}',
start_mark,
- "expected '!', but found %r" % utf8(ch),
+ f"expected '!', but found {ch!r}",
self.reader.get_mark(),
)
length = 1
@@ -1654,9 +1616,9 @@ class Scanner(object):
if ch != '!':
self.reader.forward(length)
raise ScannerError(
- 'while scanning a %s' % (name,),
+ f'while scanning an {name!s}',
start_mark,
- "expected '!', but found %r" % utf8(ch),
+ f"expected '!' but found {ch!r}",
self.reader.get_mark(),
)
length += 1
@@ -1664,8 +1626,7 @@ class Scanner(object):
self.reader.forward(length)
return value
- def scan_tag_uri(self, name, start_mark):
- # type: (Any, Any) -> Any
+ def scan_tag_uri(self, name: Any, start_mark: Any) -> Any:
# See the specification for details.
# Note: we do not check if URI is well-formed.
srp = self.reader.peek
@@ -1693,47 +1654,39 @@ class Scanner(object):
length = 0
if not chunks:
raise ScannerError(
- 'while parsing a %s' % (name,),
+ f'while parsing an {name!s}',
start_mark,
- 'expected URI, but found %r' % utf8(ch),
+ f'expected URI, but found {ch!r}',
self.reader.get_mark(),
)
return "".join(chunks)
- def scan_uri_escapes(self, name, start_mark):
- # type: (Any, Any) -> Any
+ def scan_uri_escapes(self, name: Any, start_mark: Any) -> Any:
# See the specification for details.
srp = self.reader.peek
srf = self.reader.forward
- code_bytes = [] # type: List[Any]
+ code_bytes: List[Any] = []
mark = self.reader.get_mark()
while srp() == '%':
srf()
for k in range(2):
if srp(k) not in '0123456789ABCDEFabcdef':
raise ScannerError(
- 'while scanning a %s' % (name,),
+ f'while scanning an {name!s}',
start_mark,
- 'expected URI escape sequence of 2 hexdecimal numbers,'
- ' but found %r' % utf8(srp(k)),
+ f'expected URI escape sequence of 2 hexdecimal numbers, '
+ f'but found {srp(k)!r}',
self.reader.get_mark(),
)
- if PY3:
- code_bytes.append(int(self.reader.prefix(2), 16))
- else:
- code_bytes.append(chr(int(self.reader.prefix(2), 16)))
+ code_bytes.append(int(self.reader.prefix(2), 16))
srf(2)
try:
- if PY3:
- value = bytes(code_bytes).decode('utf-8')
- else:
- value = unicode(b"".join(code_bytes), 'utf-8')
+ value = bytes(code_bytes).decode('utf-8')
except UnicodeDecodeError as exc:
- raise ScannerError('while scanning a %s' % (name,), start_mark, str(exc), mark)
+ raise ScannerError(f'while scanning an {name!s}', start_mark, str(exc), mark)
return value
- def scan_line_break(self):
- # type: () -> Any
+ def scan_line_break(self) -> Any:
# Transforms:
# '\r\n' : '\n'
# '\r' : '\n'
@@ -1756,13 +1709,12 @@ class Scanner(object):
class RoundTripScanner(Scanner):
- def check_token(self, *choices):
- # type: (Any) -> bool
+ def check_token(self, *choices: Any) -> bool:
# Check if the next token is one of the given types.
while self.need_more_tokens():
self.fetch_more_tokens()
self._gather_comments()
- if bool(self.tokens):
+ if len(self.tokens) > 0:
if not choices:
return True
for choice in choices:
@@ -1770,20 +1722,18 @@ class RoundTripScanner(Scanner):
return True
return False
- def peek_token(self):
- # type: () -> Any
+ def peek_token(self) -> Any:
# Return the next token, but do not delete if from the queue.
while self.need_more_tokens():
self.fetch_more_tokens()
self._gather_comments()
- if bool(self.tokens):
+ if len(self.tokens) > 0:
return self.tokens[0]
return None
- def _gather_comments(self):
- # type: () -> Any
- """combine multiple comment lines"""
- comments = [] # type: List[Any]
+ def _gather_comments(self) -> Any:
+ """combine multiple comment lines and assign to next non-comment-token"""
+ comments: List[Any] = []
if not self.tokens:
return comments
if isinstance(self.tokens[0], CommentToken):
@@ -1805,13 +1755,12 @@ class RoundTripScanner(Scanner):
if not self.done and len(self.tokens) < 2:
self.fetch_more_tokens()
- def get_token(self):
- # type: () -> Any
+ def get_token(self) -> Any:
# Return the next token.
while self.need_more_tokens():
self.fetch_more_tokens()
self._gather_comments()
- if bool(self.tokens):
+ if len(self.tokens) > 0:
# nprint('tk', self.tokens)
# only add post comment to single line tokens:
# scalar, value token. FlowXEndToken, otherwise
@@ -1859,8 +1808,7 @@ class RoundTripScanner(Scanner):
return self.tokens.pop(0)
return None
- def fetch_comment(self, comment):
- # type: (Any) -> None
+ def fetch_comment(self, comment: Any) -> None:
value, start_mark, end_mark = comment
while value and value[-1] == ' ':
# empty line within indented key context
@@ -1870,8 +1818,7 @@ class RoundTripScanner(Scanner):
# scanner
- def scan_to_next_token(self):
- # type: () -> Any
+ def scan_to_next_token(self) -> Any:
# We ignore spaces, line breaks and comments.
# If we find a line break in the block context, we set the flag
# `allow_simple_key` on.
@@ -1914,7 +1861,7 @@ class RoundTripScanner(Scanner):
break
comment += ch
srf()
- # gather any blank lines following the comment too
+ # gather any blank lines following the comment
ch = self.scan_line_break()
while len(ch) > 0:
comment += ch
@@ -1923,7 +1870,7 @@ class RoundTripScanner(Scanner):
if not self.flow_level:
self.allow_simple_key = True
return comment, start_mark, end_mark
- if bool(self.scan_line_break()):
+ if self.scan_line_break() != '':
start_mark = self.reader.get_mark()
if not self.flow_level:
self.allow_simple_key = True
@@ -1943,8 +1890,7 @@ class RoundTripScanner(Scanner):
found = True
return None
- def scan_line_break(self, empty_line=False):
- # type: (bool) -> Text
+ def scan_line_break(self, empty_line: bool = False) -> Text:
# Transforms:
# '\r\n' : '\n'
# '\r' : '\n'
@@ -1953,7 +1899,7 @@ class RoundTripScanner(Scanner):
# '\u2028' : '\u2028'
# '\u2029 : '\u2029'
# default : ''
- ch = self.reader.peek() # type: Text
+ ch: Text = self.reader.peek()
if ch in '\r\n\x85':
if self.reader.prefix(2) == '\r\n':
self.reader.forward(2)
@@ -1968,13 +1914,401 @@ class RoundTripScanner(Scanner):
return ch
return ""
- def scan_block_scalar(self, style, rt=True):
- # type: (Any, Optional[bool]) -> Any
+ def scan_block_scalar(self, style: Any, rt: Optional[bool] = True) -> Any:
return Scanner.scan_block_scalar(self, style, rt=rt)
-# try:
-# import psyco
-# psyco.bind(Scanner)
-# except ImportError:
-# pass
+# commenthandling 2021, differentiatiation not needed
+
+VALUECMNT = 0
+KEYCMNT = 0 # 1
+# TAGCMNT = 2
+# ANCHORCMNT = 3
+
+
+class CommentBase:
+ __slots__ = ('value', 'line', 'column', 'used', 'function', 'fline', 'ufun', 'uline')
+
+ def __init__(self, value: Any, line: Any, column: Any) -> None:
+ self.value = value
+ self.line = line
+ self.column = column
+ self.used = ' '
+ info = inspect.getframeinfo(inspect.stack()[3][0])
+ self.function = info.function
+ self.fline = info.lineno
+ self.ufun = None
+ self.uline = None
+
+ def set_used(self, v: Any = '+') -> None:
+ self.used = v
+ info = inspect.getframeinfo(inspect.stack()[1][0])
+ self.ufun = info.function # type: ignore
+ self.uline = info.lineno # type: ignore
+
+ def set_assigned(self) -> None:
+ self.used = '|'
+
+ def __str__(self) -> str:
+ return f'{self.value}'
+
+ def __repr__(self) -> str:
+ return f'{self.value!r}'
+
+ def info(self) -> str:
+ xv = self.value + '"'
+ name = self.name # type: ignore
+ return (
+ f'{name}{self.used} {self.line:2}:{self.column:<2} "{xv:40s} '
+ f'{self.function}:{self.fline} {self.ufun}:{self.uline}'
+ )
+
+
+class EOLComment(CommentBase):
+ name = 'EOLC'
+
+ def __init__(self, value: Any, line: Any, column: Any) -> None:
+ super().__init__(value, line, column)
+
+
+class FullLineComment(CommentBase):
+ name = 'FULL'
+
+ def __init__(self, value: Any, line: Any, column: Any) -> None:
+ super().__init__(value, line, column)
+
+
+class BlankLineComment(CommentBase):
+ name = 'BLNK'
+
+ def __init__(self, value: Any, line: Any, column: Any) -> None:
+ super().__init__(value, line, column)
+
+
+class ScannedComments:
+ def __init__(self: Any) -> None:
+ self.comments = {} # type: ignore
+ self.unused = [] # type: ignore
+
+ def add_eol_comment(self, comment: Any, column: Any, line: Any) -> Any:
+ # info = inspect.getframeinfo(inspect.stack()[1][0])
+ if comment.count('\n') == 1:
+ assert comment[-1] == '\n'
+ else:
+ assert '\n' not in comment
+ self.comments[line] = retval = EOLComment(comment[:-1], line, column)
+ self.unused.append(line)
+ return retval
+
+ def add_blank_line(self, comment: Any, column: Any, line: Any) -> Any:
+ # info = inspect.getframeinfo(inspect.stack()[1][0])
+ assert comment.count('\n') == 1 and comment[-1] == '\n'
+ assert line not in self.comments
+ self.comments[line] = retval = BlankLineComment(comment[:-1], line, column)
+ self.unused.append(line)
+ return retval
+
+ def add_full_line_comment(self, comment: Any, column: Any, line: Any) -> Any:
+ # info = inspect.getframeinfo(inspect.stack()[1][0])
+ assert comment.count('\n') == 1 and comment[-1] == '\n'
+ # if comment.startswith('# C12'):
+ # raise
+ # this raises in line 2127 fro 330
+ self.comments[line] = retval = FullLineComment(comment[:-1], line, column)
+ self.unused.append(line)
+ return retval
+
+ def __getitem__(self, idx: Any) -> Any:
+ return self.comments[idx]
+
+ def __str__(self) -> Any:
+ return (
+ 'ParsedComments:\n '
+ + '\n '.join((f'{lineno:2} {x.info()}' for lineno, x in self.comments.items()))
+ + '\n'
+ )
+
+ def last(self) -> str:
+ lineno, x = list(self.comments.items())[-1]
+ return f'{lineno:2} {x.info()}\n'
+
+ def any_unprocessed(self) -> bool:
+ # ToDo: might want to differentiate based on lineno
+ return len(self.unused) > 0
+ # for lno, comment in reversed(self.comments.items()):
+ # if comment.used == ' ':
+ # return True
+ # return False
+
+ def unprocessed(self, use: Any = False) -> Any:
+ while len(self.unused) > 0:
+ first = self.unused.pop(0) if use else self.unused[0]
+ info = inspect.getframeinfo(inspect.stack()[1][0])
+ xprintf('using', first, self.comments[first].value, info.function, info.lineno)
+ yield first, self.comments[first]
+ if use:
+ self.comments[first].set_used()
+
+ def assign_pre(self, token: Any) -> Any:
+ token_line = token.start_mark.line
+ info = inspect.getframeinfo(inspect.stack()[1][0])
+ xprintf('assign_pre', token_line, self.unused, info.function, info.lineno)
+ gobbled = False
+ while self.unused and self.unused[0] < token_line:
+ gobbled = True
+ first = self.unused.pop(0)
+ xprintf('assign_pre < ', first)
+ self.comments[first].set_used()
+ token.add_comment_pre(first)
+ return gobbled
+
+ def assign_eol(self, tokens: Any) -> Any:
+ try:
+ comment_line = self.unused[0]
+ except IndexError:
+ return
+ if not isinstance(self.comments[comment_line], EOLComment):
+ return
+ idx = 1
+ while tokens[-idx].start_mark.line > comment_line or isinstance(
+ tokens[-idx], ValueToken
+ ):
+ idx += 1
+ xprintf('idx1', idx)
+ if (
+ len(tokens) > idx
+ and isinstance(tokens[-idx], ScalarToken)
+ and isinstance(tokens[-(idx + 1)], ScalarToken)
+ ):
+ return
+ try:
+ if isinstance(tokens[-idx], ScalarToken) and isinstance(
+ tokens[-(idx + 1)], KeyToken
+ ):
+ try:
+ eol_idx = self.unused.pop(0)
+ self.comments[eol_idx].set_used()
+ xprintf('>>>>>a', idx, eol_idx, KEYCMNT)
+ tokens[-idx].add_comment_eol(eol_idx, KEYCMNT)
+ except IndexError:
+ raise NotImplementedError
+ return
+ except IndexError:
+ xprintf('IndexError1')
+ pass
+ try:
+ if isinstance(tokens[-idx], ScalarToken) and isinstance(
+ tokens[-(idx + 1)], (ValueToken, BlockEntryToken)
+ ):
+ try:
+ eol_idx = self.unused.pop(0)
+ self.comments[eol_idx].set_used()
+ tokens[-idx].add_comment_eol(eol_idx, VALUECMNT)
+ except IndexError:
+ raise NotImplementedError
+ return
+ except IndexError:
+ xprintf('IndexError2')
+ pass
+ for t in tokens:
+ xprintf('tt-', t)
+ xprintf('not implemented EOL', type(tokens[-idx]))
+ import sys
+
+ sys.exit(0)
+
+ def assign_post(self, token: Any) -> Any:
+ token_line = token.start_mark.line
+ info = inspect.getframeinfo(inspect.stack()[1][0])
+ xprintf('assign_post', token_line, self.unused, info.function, info.lineno)
+ gobbled = False
+ while self.unused and self.unused[0] < token_line:
+ gobbled = True
+ first = self.unused.pop(0)
+ xprintf('assign_post < ', first)
+ self.comments[first].set_used()
+ token.add_comment_post(first)
+ return gobbled
+
+ def str_unprocessed(self) -> Any:
+ return ''.join(
+ (f' {ind:2} {x.info()}\n' for ind, x in self.comments.items() if x.used == ' ')
+ )
+
+
+class RoundTripScannerSC(Scanner): # RoundTripScanner Split Comments
+ def __init__(self, *arg: Any, **kw: Any) -> None:
+ super().__init__(*arg, **kw)
+ assert self.loader is not None
+ # comments isinitialised on .need_more_tokens and persist on
+ # self.loader.parsed_comments
+ self.comments = None
+
+ def get_token(self) -> Any:
+ # Return the next token.
+ while self.need_more_tokens():
+ self.fetch_more_tokens()
+ if len(self.tokens) > 0:
+ if isinstance(self.tokens[0], BlockEndToken):
+ self.comments.assign_post(self.tokens[0]) # type: ignore
+ else:
+ self.comments.assign_pre(self.tokens[0]) # type: ignore
+ self.tokens_taken += 1
+ return self.tokens.pop(0)
+
+ def need_more_tokens(self) -> bool:
+ if self.comments is None:
+ self.loader.parsed_comments = self.comments = ScannedComments() # type: ignore
+ if self.done:
+ return False
+ if len(self.tokens) == 0:
+ return True
+ # The current token may be a potential simple key, so we
+ # need to look further.
+ self.stale_possible_simple_keys()
+ if self.next_possible_simple_key() == self.tokens_taken:
+ return True
+ if len(self.tokens) < 2:
+ return True
+ if self.tokens[0].start_mark.line == self.tokens[-1].start_mark.line:
+ return True
+ if True:
+ xprintf('-x--', len(self.tokens))
+ for t in self.tokens:
+ xprintf(t)
+ # xprintf(self.comments.last())
+ xprintf(self.comments.str_unprocessed()) # type: ignore
+ self.comments.assign_pre(self.tokens[0]) # type: ignore
+ self.comments.assign_eol(self.tokens) # type: ignore
+ return False
+
+ def scan_to_next_token(self) -> None:
+ srp = self.reader.peek
+ srf = self.reader.forward
+ if self.reader.index == 0 and srp() == '\uFEFF':
+ srf()
+ start_mark = self.reader.get_mark()
+ # xprintf('current_mark', start_mark.line, start_mark.column)
+ found = False
+ while not found:
+ while srp() == ' ':
+ srf()
+ ch = srp()
+ if ch == '#':
+ comment_start_mark = self.reader.get_mark()
+ comment = ch
+ srf() # skipt the '#'
+ while ch not in _THE_END:
+ ch = srp()
+ if ch == '\0': # don't gobble the end-of-stream character
+ # but add an explicit newline as "YAML processors should terminate
+ # the stream with an explicit line break
+ # https://yaml.org/spec/1.2/spec.html#id2780069
+ comment += '\n'
+ break
+ comment += ch
+ srf()
+ # we have a comment
+ if start_mark.column == 0:
+ self.comments.add_full_line_comment( # type: ignore
+ comment, comment_start_mark.column, comment_start_mark.line
+ )
+ else:
+ self.comments.add_eol_comment( # type: ignore
+ comment, comment_start_mark.column, comment_start_mark.line
+ )
+ comment = ""
+ # gather any blank lines or full line comments following the comment as well
+ self.scan_empty_or_full_line_comments()
+ if not self.flow_level:
+ self.allow_simple_key = True
+ return
+ if bool(self.scan_line_break()):
+ # start_mark = self.reader.get_mark()
+ if not self.flow_level:
+ self.allow_simple_key = True
+ self.scan_empty_or_full_line_comments()
+ return None
+ ch = srp()
+ if ch == '\n': # empty toplevel lines
+ start_mark = self.reader.get_mark()
+ comment = ""
+ while ch:
+ ch = self.scan_line_break(empty_line=True)
+ comment += ch
+ if srp() == '#':
+ # empty line followed by indented real comment
+ comment = comment.rsplit('\n', 1)[0] + '\n'
+ _ = self.reader.get_mark() # gobble end_mark
+ return None
+ else:
+ found = True
+ return None
+
+ def scan_empty_or_full_line_comments(self) -> None:
+ blmark = self.reader.get_mark()
+ assert blmark.column == 0
+ blanks = ""
+ comment = None
+ mark = None
+ ch = self.reader.peek()
+ while True:
+ # nprint('ch', repr(ch), self.reader.get_mark().column)
+ if ch in '\r\n\x85\u2028\u2029':
+ if self.reader.prefix(2) == '\r\n':
+ self.reader.forward(2)
+ else:
+ self.reader.forward()
+ if comment is not None:
+ comment += '\n'
+ self.comments.add_full_line_comment(comment, mark.column, mark.line)
+ comment = None
+ else:
+ blanks += '\n'
+ self.comments.add_blank_line(blanks, blmark.column, blmark.line) # type: ignore # NOQA
+ blanks = ""
+ blmark = self.reader.get_mark()
+ ch = self.reader.peek()
+ continue
+ if comment is None:
+ if ch in ' \t':
+ blanks += ch
+ elif ch == '#':
+ mark = self.reader.get_mark()
+ comment = '#'
+ else:
+ # xprintf('breaking on', repr(ch))
+ break
+ else:
+ comment += ch
+ self.reader.forward()
+ ch = self.reader.peek()
+
+ def scan_block_scalar_ignored_line(self, start_mark: Any) -> Any:
+ # See the specification for details.
+ srp = self.reader.peek
+ srf = self.reader.forward
+ prefix = ''
+ comment = None
+ while srp() == ' ':
+ prefix += srp()
+ srf()
+ if srp() == '#':
+ comment = ''
+ mark = self.reader.get_mark()
+ while srp() not in _THE_END:
+ comment += srp()
+ srf()
+ comment += '\n' # type: ignore
+ ch = srp()
+ if ch not in _THE_END:
+ raise ScannerError(
+ 'while scanning a block scalar',
+ start_mark,
+ f'expected a comment or a line break, but found {ch!r}',
+ self.reader.get_mark(),
+ )
+ if comment is not None:
+ self.comments.add_eol_comment(comment, mark.column, mark.line) # type: ignore
+ self.scan_line_break()
+ return None
diff --git a/serializer.py b/serializer.py
index a37885c..0034240 100644
--- a/serializer.py
+++ b/serializer.py
@@ -1,9 +1,7 @@
# coding: utf-8
-from __future__ import absolute_import
-
from ruamel.yaml.error import YAMLError
-from ruamel.yaml.compat import nprint, DBG_NODE, dbg, string_types, nprintf # NOQA
+from ruamel.yaml.compat import nprint, DBG_NODE, dbg, nprintf # NOQA
from ruamel.yaml.util import RegExp
from ruamel.yaml.events import (
@@ -20,9 +18,8 @@ from ruamel.yaml.events import (
)
from ruamel.yaml.nodes import MappingNode, ScalarNode, SequenceNode
-if False: # MYPY
- from typing import Any, Dict, Union, Text, Optional # NOQA
- from ruamel.yaml.compat import VersionType # NOQA
+from typing import Any, Dict, Union, Text, Optional # NOQA
+from ruamel.yaml.compat import VersionType # NOQA
__all__ = ['Serializer', 'SerializerError']
@@ -31,55 +28,52 @@ class SerializerError(YAMLError):
pass
-class Serializer(object):
+class Serializer:
# 'id' and 3+ numbers, but not 000
- ANCHOR_TEMPLATE = u'id%03d'
- ANCHOR_RE = RegExp(u'id(?!000$)\\d{3,}')
+ ANCHOR_TEMPLATE = 'id{:03d}'
+ ANCHOR_RE = RegExp('id(?!000$)\\d{3,}')
def __init__(
self,
- encoding=None,
- explicit_start=None,
- explicit_end=None,
- version=None,
- tags=None,
- dumper=None,
- ):
- # type: (Any, Optional[bool], Optional[bool], Optional[VersionType], Any, Any) -> None # NOQA
+ encoding: Any = None,
+ explicit_start: Optional[bool] = None,
+ explicit_end: Optional[bool] = None,
+ version: Optional[VersionType] = None,
+ tags: Any = None,
+ dumper: Any = None,
+ ) -> None:
+ # NOQA
self.dumper = dumper
if self.dumper is not None:
self.dumper._serializer = self
self.use_encoding = encoding
self.use_explicit_start = explicit_start
self.use_explicit_end = explicit_end
- if isinstance(version, string_types):
+ if isinstance(version, str):
self.use_version = tuple(map(int, version.split('.')))
else:
self.use_version = version # type: ignore
self.use_tags = tags
- self.serialized_nodes = {} # type: Dict[Any, Any]
- self.anchors = {} # type: Dict[Any, Any]
+ self.serialized_nodes: Dict[Any, Any] = {}
+ self.anchors: Dict[Any, Any] = {}
self.last_anchor_id = 0
- self.closed = None # type: Optional[bool]
+ self.closed: Optional[bool] = None
self._templated_id = None
@property
- def emitter(self):
- # type: () -> Any
+ def emitter(self) -> Any:
if hasattr(self.dumper, 'typ'):
return self.dumper.emitter
return self.dumper._emitter
@property
- def resolver(self):
- # type: () -> Any
+ def resolver(self) -> Any:
if hasattr(self.dumper, 'typ'):
self.dumper.resolver
return self.dumper._resolver
- def open(self):
- # type: () -> None
+ def open(self) -> None:
if self.closed is None:
self.emitter.emit(StreamStartEvent(encoding=self.use_encoding))
self.closed = False
@@ -88,8 +82,7 @@ class Serializer(object):
else:
raise SerializerError('serializer is already opened')
- def close(self):
- # type: () -> None
+ def close(self) -> None:
if self.closed is None:
raise SerializerError('serializer is not opened')
elif not self.closed:
@@ -99,8 +92,7 @@ class Serializer(object):
# def __del__(self):
# self.close()
- def serialize(self, node):
- # type: (Any) -> None
+ def serialize(self, node: Any) -> None:
if dbg(DBG_NODE):
nprint('Serializing nodes')
node.dump()
@@ -120,8 +112,7 @@ class Serializer(object):
self.anchors = {}
self.last_anchor_id = 0
- def anchor_node(self, node):
- # type: (Any) -> None
+ def anchor_node(self, node: Any) -> None:
if node in self.anchors:
if self.anchors[node] is None:
self.anchors[node] = self.generate_anchor(node)
@@ -141,22 +132,23 @@ class Serializer(object):
self.anchor_node(key)
self.anchor_node(value)
- def generate_anchor(self, node):
- # type: (Any) -> Any
+ def generate_anchor(self, node: Any) -> Any:
try:
anchor = node.anchor.value
except: # NOQA
anchor = None
if anchor is None:
self.last_anchor_id += 1
- return self.ANCHOR_TEMPLATE % self.last_anchor_id
+ return self.ANCHOR_TEMPLATE.format(self.last_anchor_id)
return anchor
- def serialize_node(self, node, parent, index):
- # type: (Any, Any, Any) -> None
+ def serialize_node(self, node: Any, parent: Any, index: Any) -> None:
alias = self.anchors[node]
if node in self.serialized_nodes:
- self.emitter.emit(AliasEvent(alias))
+ node_style = getattr(node, 'style', None)
+ if node_style != '?':
+ node_style = None
+ self.emitter.emit(AliasEvent(alias, style=node_style))
else:
self.serialized_nodes[node] = True
self.resolver.descend_resolver(parent, index)
@@ -235,6 +227,5 @@ class Serializer(object):
self.resolver.ascend_resolver()
-def templated_id(s):
- # type: (Text) -> Any
+def templated_id(s: Text) -> Any:
return Serializer.ANCHOR_RE.match(s)
diff --git a/setup.py b/setup.py
index 40aba3a..50f4d1e 100644
--- a/setup.py
+++ b/setup.py
@@ -1,15 +1,13 @@
# # header
# coding: utf-8
-# dd: 20200903
-
-from __future__ import print_function, absolute_import, division, unicode_literals
+# dd: 20230418
# # __init__.py parser
import sys
import os
import datetime
-import traceback
+from textwrap import dedent
sys.path = [path for path in sys.path if path not in [os.getcwd(), ""]]
import platform # NOQA
@@ -20,13 +18,13 @@ from setuptools import setup, Extension, Distribution # NOQA
from setuptools.command import install_lib # NOQA
from setuptools.command.sdist import sdist as _sdist # NOQA
-try:
- from setuptools.namespaces import Installer as NameSpaceInstaller # NOQA
-except ImportError:
- msg = ('You should use the latest setuptools. The namespaces.py file that this setup.py'
- ' uses was added in setuptools 28.7.0 (Oct 2016)')
- print(msg)
- sys.exit()
+# try:
+# from setuptools.namespaces import Installer as NameSpaceInstaller # NOQA
+# except ImportError:
+# msg = ('You should use the latest setuptools. The namespaces.py file that this setup.py'
+# ' uses was added in setuptools 28.7.0 (Oct 2016)')
+# print(msg)
+# sys.exit()
if __name__ != '__main__':
raise NotImplementedError('should never include setup.py')
@@ -50,10 +48,6 @@ if sys.version_info < (3, 4):
pass
-if sys.version_info >= (3, 8):
- from ast import Str, Num, Bytes, NameConstant # NOQA
-
-
if sys.version_info < (3,):
open_kw = dict()
else:
@@ -81,6 +75,9 @@ else:
print('{:%Y-%d-%mT%H:%M:%S}'.format(datetime.datetime.now()), file=fp, end=' ')
print(*args, **kw1)
+if sys.version_info >= (3, 8):
+ from ast import Str, Num, Bytes, NameConstant # NOQA
+
def literal_eval(node_or_string):
"""
@@ -279,8 +276,7 @@ class MyInstallLib(install_lib.install_lib):
class MySdist(_sdist):
def initialize_options(self):
_sdist.initialize_options(self)
- # see pep 527, new uploads should be tar.gz or .zip
- # fmt = getattr(self, 'tarfmt', None)
+ # failed expiriment, see pep 527, new uploads should be tar.gz or .zip
# because of unicode_literals
# self.formats = fmt if fmt else [b'bztar'] if sys.version_info < (3, ) else ['bztar']
dist_base = os.environ.get('PYDISTBASE')
@@ -318,22 +314,22 @@ class NameSpacePackager(object):
self._split = None
self.depth = self.full_package_name.count('.')
self.nested = self._pkg_data.get('nested', False)
- if self.nested:
- NameSpaceInstaller.install_namespaces = lambda x: None
+ # if self.nested:
+ # NameSpaceInstaller.install_namespaces = lambda x: None
self.command = None
self.python_version()
self._pkg = [None, None] # required and pre-installable packages
- if (
- sys.argv[0] == 'setup.py'
- and sys.argv[1] == 'install'
- and '--single-version-externally-managed' not in sys.argv
- ):
- if os.environ.get('READTHEDOCS', None) == 'True':
- os.system('pip install .')
- sys.exit(0)
- if not os.environ.get('RUAMEL_NO_PIP_INSTALL_CHECK', False):
- print('error: you have to install with "pip install ."')
- sys.exit(1)
+ if sys.argv[0] == 'setup.py' and sys.argv[1] == 'install':
+ debug('calling setup.py', sys.argv)
+ if '-h' in sys.argv:
+ pass
+ elif '--single-version-externally-managed' not in sys.argv:
+ if os.environ.get('READTHEDOCS', None) == 'True':
+ os.system('pip install .')
+ sys.exit(0)
+ if not os.environ.get('RUAMEL_NO_PIP_INSTALL_CHECK', False):
+ print('error: you have to install with "pip install ."')
+ sys.exit(1)
# If you only support an extension module on Linux, Windows thinks it
# is pure. That way you would get pure python .whl files that take
# precedence for downloading on Linux over source with compilable C code
@@ -388,9 +384,6 @@ class NameSpacePackager(object):
return self._split
@property
- def namespace_packages(self):
- return self.split[: self.depth]
-
def namespace_directories(self, depth=None):
"""return list of directories where the namespace should be created /
can be found
@@ -411,23 +404,11 @@ class NameSpacePackager(object):
}
if 'extra_packages' in self._pkg_data:
return d
- if len(self.split) > 1: # only if package namespace
- d[self.split[0]] = self.namespace_directories(1)[0]
+ # if len(self.split) > 1: # only if package namespace
+ # d[self.split[0]] = self.namespace_directories(1)[0]
+ # print('d', d, os.getcwd())
return d
- def create_dirs(self):
- """create the directories necessary for namespace packaging"""
- directories = self.namespace_directories(self.depth)
- if not directories:
- return
- if not os.path.exists(directories[0]):
- for d in directories:
- os.mkdir(d)
- with open(os.path.join(d, '__init__.py'), 'w') as fp:
- fp.write(
- 'import pkg_resources\n' 'pkg_resources.declare_namespace(__name__)\n'
- )
-
def python_version(self):
supported = self._pkg_data.get('supported')
if supported is None:
@@ -528,7 +509,7 @@ class NameSpacePackager(object):
if package_name is None:
package_name = self.full_package_name
if not script_name:
- script_name = package_name.split('.')[-1]
+ script_name = package_name.rsplit('.', 1)[-1]
return {
'console_scripts': [
'{0} = {1}'.format(script_name, pckg_entry_point(package_name))
@@ -719,7 +700,8 @@ class NameSpacePackager(object):
@property
def packages(self):
- s = self.split
+ # s = self.split
+ s = [self._pkg_data['full_package_name']]
# fixed this in package_data, the keys there must be non-unicode for py27
# if sys.version_info < (3, 0):
# s = [x.encode('utf-8') for x in self.split]
@@ -755,7 +737,7 @@ class NameSpacePackager(object):
except ValueError:
pass
self._ext_modules = []
- no_test_compile = False
+ no_test_compile = True
if '--restructuredtext' in sys.argv:
no_test_compile = True
elif 'sdist' in sys.argv:
@@ -769,75 +751,7 @@ class NameSpacePackager(object):
)
self._ext_modules.append(ext)
return self._ext_modules
-
- print('sys.argv', sys.argv)
- import tempfile
- import shutil
- from textwrap import dedent
-
- import distutils.sysconfig
- import distutils.ccompiler
- from distutils.errors import CompileError, LinkError
-
- for target in self._pkg_data.get('ext_modules', []): # list of dicts
- ext = Extension(
- self.pn(target['name']),
- sources=[self.pn(x) for x in target['src']],
- libraries=[self.pn(x) for x in target.get('lib')],
- )
- # debug('test1 in target', 'test' in target, target)
- if 'test' not in target: # no test, just hope it works
- self._ext_modules.append(ext)
- continue
- if sys.version_info[:2] == (3, 4) and platform.system() == 'Windows':
- # this is giving problems on appveyor, so skip
- if 'FORCE_C_BUILD_TEST' not in os.environ:
- self._ext_modules.append(ext)
- continue
- # write a temporary .c file to compile
- c_code = dedent(target['test'])
- try:
- tmp_dir = tempfile.mkdtemp(prefix='tmp_ruamel_')
- bin_file_name = 'test' + self.pn(target['name'])
- print('test compiling', bin_file_name)
- file_name = os.path.join(tmp_dir, bin_file_name + '.c')
- with open(file_name, 'w') as fp: # write source
- fp.write(c_code)
- # and try to compile it
- compiler = distutils.ccompiler.new_compiler()
- assert isinstance(compiler, distutils.ccompiler.CCompiler)
- # do any platform specific initialisations
- distutils.sysconfig.customize_compiler(compiler)
- # make sure you can reach header files because compile does change dir
- compiler.add_include_dir(os.getcwd())
- if sys.version_info < (3,):
- tmp_dir = tmp_dir.encode('utf-8')
- # used to be a different directory, not necessary
- compile_out_dir = tmp_dir
- try:
- compiler.link_executable(
- compiler.compile([file_name], output_dir=compile_out_dir),
- bin_file_name,
- output_dir=tmp_dir,
- libraries=ext.libraries,
- )
- except CompileError:
- debug('compile error:', file_name)
- print('compile error:', file_name)
- continue
- except LinkError:
- debug('link error', file_name)
- print('link error', file_name)
- continue
- self._ext_modules.append(ext)
- except Exception as e: # NOQA
- debug('Exception:', e)
- print('Exception:', e)
- if sys.version_info[:2] == (3, 4) and platform.system() == 'Windows':
- traceback.print_exc()
- finally:
- shutil.rmtree(tmp_dir)
- return self._ext_modules
+ # this used to use distutils
@property
def test_suite(self):
@@ -853,10 +767,6 @@ class NameSpacePackager(object):
if os.path.exists(file_name): # add it if not in there?
return False
with open(file_name, 'w') as fp:
- if os.path.exists('LICENSE'):
- fp.write('[metadata]\nlicense-file = LICENSE\n')
- else:
- print('\n\n>>>>>> LICENSE file not found <<<<<\n\n')
if self._pkg_data.get('universal'):
fp.write('[bdist_wheel]\nuniversal = 1\n')
try:
@@ -868,25 +778,72 @@ class NameSpacePackager(object):
return True
-# # call setup
+class TmpFiles:
+ def __init__(self, pkg_data, py_project=True, keep=False):
+ self._rm_after = []
+ self._pkg_data = pkg_data
+ self._py_project = py_project
+ self._bdist_wheel = 'bdist_wheel' in sys.argv
+ self._keep = keep
+
+ def __enter__(self):
+ self.bdist_wheel()
+ return
+ self.py_project()
+
+ def bdist_wheel(self):
+ """pyproject doesn't allow for universal, so use setup.cfg if necessary
+ """
+ file_name = 'setup.cfg'
+ if not self._bdist_wheel or os.path.exists(file_name):
+ return
+ if self._pkg_data.get('universal'):
+ self._rm_after.append(file_name)
+ with open(file_name, 'w') as fp:
+ fp.write('[bdist_wheel]\nuniversal = 1\n')
+
+ def py_project(self):
+ """
+ to prevent pip from complaining, or is it too late to create it from setup.py
+ """
+ file_name = 'pyproject.toml'
+ if not self._py_project or os.path.exists(file_name):
+ return
+ self._rm_after.append(file_name)
+ with open(file_name, 'w') as fp:
+ fp.write(dedent("""\
+ [build-system]
+ requires = ["setuptools", "wheel"]
+ # test
+ build-backend = "setuptools.build_meta"
+ """))
+
+ def __exit__(self, typ, value, traceback):
+ if self._keep:
+ return
+ for p in self._rm_after:
+ if not os.path.exists(p):
+ print('file {} already removed'.format(p))
+ else:
+ os.unlink(p)
+
+
+# call setup
def main():
dump_kw = '--dump-kw'
if dump_kw in sys.argv:
import wheel
- import distutils
import setuptools
+ import pip
print('python: ', sys.version)
+ print('pip: ', pip.__version__)
print('setuptools:', setuptools.__version__)
- print('distutils: ', distutils.__version__)
print('wheel: ', wheel.__version__)
nsp = NameSpacePackager(pkg_data)
nsp.check()
- nsp.create_dirs()
+ # nsp.create_dirs()
MySdist.nsp = nsp
- if pkg_data.get('tarfmt'):
- MySdist.tarfmt = pkg_data.get('tarfmt')
-
cmdclass = dict(install_lib=MyInstallLib, sdist=MySdist)
if _bdist_wheel_available:
MyBdistWheel.nsp = nsp
@@ -894,7 +851,6 @@ def main():
kw = dict(
name=nsp.full_package_name,
- namespace_packages=nsp.namespace_packages,
version=version_str,
packages=nsp.packages,
python_requires=nsp.python_requires,
@@ -913,12 +869,13 @@ def main():
package_data=nsp.package_data,
ext_modules=nsp.ext_modules,
test_suite=nsp.test_suite,
+ zip_safe=False,
)
if '--version' not in sys.argv and ('--verbose' in sys.argv or dump_kw in sys.argv):
for k in sorted(kw):
v = kw[k]
- print(' "{0}": "{1}",'.format(k, v))
+ print(' "{0}": {1},'.format(k, repr(v)))
# if '--record' in sys.argv:
# return
if dump_kw in sys.argv:
@@ -930,31 +887,33 @@ def main():
except Exception:
pass
- if nsp.wheel(kw, setup):
- return
- for x in ['-c', 'egg_info', '--egg-base', 'pip-egg-info']:
- if x not in sys.argv:
- break
- else:
- # we're doing a tox setup install any starred package by searching up the source tree
- # until you match your/package/name for your.package.name
- for p in nsp.install_pre:
- import subprocess
-
- # search other source
- setup_path = os.path.join(*p.split('.') + ['setup.py'])
- try_dir = os.path.dirname(sys.executable)
- while len(try_dir) > 1:
- full_path_setup_py = os.path.join(try_dir, setup_path)
- if os.path.exists(full_path_setup_py):
- pip = sys.executable.replace('python', 'pip')
- cmd = [pip, 'install', os.path.dirname(full_path_setup_py)]
- # with open('/var/tmp/notice', 'a') as fp:
- # print('installing', cmd, file=fp)
- subprocess.check_output(cmd)
- break
- try_dir = os.path.dirname(try_dir)
- setup(**kw)
+ # if nsp.wheel(kw, setup):
+ # return
+ with TmpFiles(pkg_data, keep=True):
+ for x in ['-c', 'egg_info', '--egg-base', 'pip-egg-info']:
+ if x not in sys.argv:
+ break
+ else:
+ # we're doing a tox setup install any starred package by searching up the
+ # source tree until you match your/package/name for your.package.name
+ for p in nsp.install_pre:
+ import subprocess
+
+ # search other source
+ setup_path = os.path.join(*p.split('.') + ['setup.py'])
+ try_dir = os.path.dirname(sys.executable)
+ while len(try_dir) > 1:
+ full_path_setup_py = os.path.join(try_dir, setup_path)
+ if os.path.exists(full_path_setup_py):
+ pip = sys.executable.replace('python', 'pip')
+ cmd = [pip, 'install', os.path.dirname(full_path_setup_py)]
+ # with open('/var/tmp/notice', 'a') as fp:
+ # print('installing', cmd, file=fp)
+ subprocess.check_output(cmd)
+ break
+ try_dir = os.path.dirname(try_dir)
+ setup(**kw)
+ print('done')
main()
diff --git a/timestamp.py b/timestamp.py
index 374e4c0..4ab695f 100644
--- a/timestamp.py
+++ b/timestamp.py
@@ -1,28 +1,56 @@
# coding: utf-8
-from __future__ import print_function, absolute_import, division, unicode_literals
-
import datetime
import copy
# ToDo: at least on PY3 you could probably attach the tzinfo correctly to the object
# a more complete datetime might be used by safe loading as well
-if False: # MYPY
- from typing import Any, Dict, Optional, List # NOQA
+from typing import Any, Dict, Optional, List # NOQA
class TimeStamp(datetime.datetime):
- def __init__(self, *args, **kw):
- # type: (Any, Any) -> None
- self._yaml = dict(t=False, tz=None, delta=0) # type: Dict[Any, Any]
+ def __init__(self, *args: Any, **kw: Any) -> None:
+ self._yaml: Dict[Any, Any] = dict(t=False, tz=None, delta=0)
- def __new__(cls, *args, **kw): # datetime is immutable
- # type: (Any, Any) -> Any
- return datetime.datetime.__new__(cls, *args, **kw) # type: ignore
+ def __new__(cls, *args: Any, **kw: Any) -> Any: # datetime is immutable
+ return datetime.datetime.__new__(cls, *args, **kw)
- def __deepcopy__(self, memo):
- # type: (Any) -> Any
+ def __deepcopy__(self, memo: Any) -> Any:
ts = TimeStamp(self.year, self.month, self.day, self.hour, self.minute, self.second)
ts._yaml = copy.deepcopy(self._yaml)
return ts
+
+ def replace(
+ self,
+ year: Any = None,
+ month: Any = None,
+ day: Any = None,
+ hour: Any = None,
+ minute: Any = None,
+ second: Any = None,
+ microsecond: Any = None,
+ tzinfo: Any = True,
+ fold: Any = None,
+ ) -> Any:
+ if year is None:
+ year = self.year
+ if month is None:
+ month = self.month
+ if day is None:
+ day = self.day
+ if hour is None:
+ hour = self.hour
+ if minute is None:
+ minute = self.minute
+ if second is None:
+ second = self.second
+ if microsecond is None:
+ microsecond = self.microsecond
+ if tzinfo is True:
+ tzinfo = self.tzinfo
+ if fold is None:
+ fold = self.fold
+ ts = type(self)(year, month, day, hour, minute, second, microsecond, tzinfo, fold=fold)
+ ts._yaml = copy.deepcopy(self._yaml)
+ return ts
diff --git a/tokens.py b/tokens.py
index 5f5a663..0cf37f2 100644
--- a/tokens.py
+++ b/tokens.py
@@ -1,65 +1,118 @@
-# # header
# coding: utf-8
-from __future__ import unicode_literals
+from ruamel.yaml.compat import nprintf # NOQA
-if False: # MYPY
- from typing import Text, Any, Dict, Optional, List # NOQA
- from .error import StreamMark # NOQA
+from typing import Text, Any, Dict, Optional, List # NOQA
+from .error import StreamMark # NOQA
-SHOWLINES = True
+SHOW_LINES = True
-class Token(object):
+class Token:
__slots__ = 'start_mark', 'end_mark', '_comment'
- def __init__(self, start_mark, end_mark):
- # type: (StreamMark, StreamMark) -> None
+ def __init__(self, start_mark: StreamMark, end_mark: StreamMark) -> None:
self.start_mark = start_mark
self.end_mark = end_mark
- def __repr__(self):
- # type: () -> Any
+ def __repr__(self) -> Any:
# attributes = [key for key in self.__slots__ if not key.endswith('_mark') and
# hasattr('self', key)]
attributes = [key for key in self.__slots__ if not key.endswith('_mark')]
attributes.sort()
- arguments = ', '.join(['%s=%r' % (key, getattr(self, key)) for key in attributes])
- if SHOWLINES:
+ # arguments = ', '.join(
+ # [f'{key!s}={getattr(self, key)!r})' for key in attributes]
+ # )
+ arguments = [f'{key!s}={getattr(self, key)!r}' for key in attributes]
+ if SHOW_LINES:
try:
- arguments += ', line: ' + str(self.start_mark.line)
+ arguments.append('line: ' + str(self.start_mark.line))
except: # NOQA
pass
try:
- arguments += ', comment: ' + str(self._comment)
+ arguments.append('comment: ' + str(self._comment))
except: # NOQA
pass
- return '{}({})'.format(self.__class__.__name__, arguments)
+ return f'{self.__class__.__name__}({", ".join(arguments)})'
- def add_post_comment(self, comment):
- # type: (Any) -> None
+ @property
+ def column(self) -> int:
+ return self.start_mark.column
+
+ @column.setter
+ def column(self, pos: Any) -> None:
+ self.start_mark.column = pos
+
+ # old style ( <= 0.17) is a TWO element list with first being the EOL
+ # comment concatenated with following FLC/BLNK; and second being a list of FLC/BLNK
+ # preceding the token
+ # new style ( >= 0.17 ) is a THREE element list with the first being a list of
+ # preceding FLC/BLNK, the second EOL and the third following FLC/BLNK
+ # note that new style has differing order, and does not consist of CommentToken(s)
+ # but of CommentInfo instances
+ # any non-assigned values in new style are None, but first and last can be empty list
+ # new style routines add one comment at a time
+
+ # going to be deprecated in favour of add_comment_eol/post
+ def add_post_comment(self, comment: Any) -> None:
if not hasattr(self, '_comment'):
self._comment = [None, None]
+ else:
+ assert len(self._comment) in [2, 5] # make sure it is version 0
+ # if isinstance(comment, CommentToken):
+ # if comment.value.startswith('# C09'):
+ # raise
self._comment[0] = comment
- def add_pre_comments(self, comments):
- # type: (Any) -> None
+ # going to be deprecated in favour of add_comment_pre
+ def add_pre_comments(self, comments: Any) -> None:
if not hasattr(self, '_comment'):
self._comment = [None, None]
+ else:
+ assert len(self._comment) == 2 # make sure it is version 0
assert self._comment[1] is None
self._comment[1] = comments
+ return
- def get_comment(self):
- # type: () -> Any
- return getattr(self, '_comment', None)
+ # new style
+ def add_comment_pre(self, comment: Any) -> None:
+ if not hasattr(self, '_comment'):
+ self._comment = [[], None, None] # type: ignore
+ else:
+ assert len(self._comment) == 3
+ if self._comment[0] is None:
+ self._comment[0] = [] # type: ignore
+ self._comment[0].append(comment) # type: ignore
+
+ def add_comment_eol(self, comment: Any, comment_type: Any) -> None:
+ if not hasattr(self, '_comment'):
+ self._comment = [None, None, None]
+ else:
+ assert len(self._comment) == 3
+ assert self._comment[1] is None
+ if self.comment[1] is None:
+ self._comment[1] = [] # type: ignore
+ self._comment[1].extend([None] * (comment_type + 1 - len(self.comment[1]))) # type: ignore # NOQA
+ # nprintf('commy', self.comment, comment_type)
+ self._comment[1][comment_type] = comment # type: ignore
+
+ def add_comment_post(self, comment: Any) -> None:
+ if not hasattr(self, '_comment'):
+ self._comment = [None, None, []] # type: ignore
+ else:
+ assert len(self._comment) == 3
+ if self._comment[2] is None:
+ self._comment[2] = [] # type: ignore
+ self._comment[2].append(comment) # type: ignore
+
+ # def get_comment(self) -> Any:
+ # return getattr(self, '_comment', None)
@property
- def comment(self):
- # type: () -> Any
+ def comment(self) -> Any:
return getattr(self, '_comment', None)
- def move_comment(self, target, empty=False):
- # type: (Any, bool) -> Any
+ def move_old_comment(self, target: Any, empty: bool = False) -> Any:
"""move a comment from this token to target (normally next token)
used to combine e.g. comments before a BlockEntryToken to the
ScalarToken that follows it
@@ -81,15 +134,14 @@ class Token(object):
# nprint('mco2:', self, target, target.comment, empty)
return self
if c[0] and tc[0] or c[1] and tc[1]:
- raise NotImplementedError('overlap in comment %r %r' % (c, tc))
+ raise NotImplementedError(f'overlap in comment {c!r} {tc!r}')
if c[0]:
tc[0] = c[0]
if c[1]:
tc[1] = c[1]
return self
- def split_comment(self):
- # type: () -> Any
+ def split_old_comment(self) -> Any:
""" split the post part of a comment, and return it
as comment to be added. Delete second part if [None, None]
abc: # this goes to sequence
@@ -104,6 +156,37 @@ class Token(object):
delattr(self, '_comment')
return ret_val
+ def move_new_comment(self, target: Any, empty: bool = False) -> Any:
+ """move a comment from this token to target (normally next token)
+ used to combine e.g. comments before a BlockEntryToken to the
+ ScalarToken that follows it
+ empty is a special for empty values -> comment after key
+ """
+ c = self.comment
+ if c is None:
+ return
+ # don't push beyond last element
+ if isinstance(target, (StreamEndToken, DocumentStartToken)):
+ return
+ delattr(self, '_comment')
+ tc = target.comment
+ if not tc: # target comment, just insert
+ # special for empty value in key: value issue 25
+ if empty:
+ c = [c[0], c[1], c[2]]
+ target._comment = c
+ # nprint('mco2:', self, target, target.comment, empty)
+ return self
+ # if self and target have both pre, eol or post comments, something seems wrong
+ for idx in range(3):
+ if c[idx] is not None and tc[idx] is not None:
+ raise NotImplementedError(f'overlap in comment {c!r} {tc!r}')
+ # move the comment parts
+ for idx in range(3):
+ if c[idx]:
+ tc[idx] = c[idx]
+ return self
+
# class BOMToken(Token):
# id = '<byte order mark>'
@@ -113,8 +196,7 @@ class DirectiveToken(Token):
__slots__ = 'name', 'value'
id = '<directive>'
- def __init__(self, name, value, start_mark, end_mark):
- # type: (Any, Any, Any, Any) -> None
+ def __init__(self, name: Any, value: Any, start_mark: Any, end_mark: Any) -> None:
Token.__init__(self, start_mark, end_mark)
self.name = name
self.value = value
@@ -134,8 +216,9 @@ class StreamStartToken(Token):
__slots__ = ('encoding',)
id = '<stream start>'
- def __init__(self, start_mark=None, end_mark=None, encoding=None):
- # type: (Any, Any, Any) -> None
+ def __init__(
+ self, start_mark: Any = None, end_mark: Any = None, encoding: Any = None
+ ) -> None:
Token.__init__(self, start_mark, end_mark)
self.encoding = encoding
@@ -184,9 +267,8 @@ class KeyToken(Token):
__slots__ = ()
id = '?'
- # def x__repr__(self):
- # return 'KeyToken({})'.format(
- # self.start_mark.buffer[self.start_mark.index:].split(None, 1)[0])
+# def x__repr__(self):
+# return f'KeyToken({self.start_mark.buffer[self.start_mark.index:].split(None, 1)[0]})'
class ValueToken(Token):
@@ -208,8 +290,7 @@ class AliasToken(Token):
__slots__ = ('value',)
id = '<alias>'
- def __init__(self, value, start_mark, end_mark):
- # type: (Any, Any, Any) -> None
+ def __init__(self, value: Any, start_mark: Any, end_mark: Any) -> None:
Token.__init__(self, start_mark, end_mark)
self.value = value
@@ -218,8 +299,7 @@ class AnchorToken(Token):
__slots__ = ('value',)
id = '<anchor>'
- def __init__(self, value, start_mark, end_mark):
- # type: (Any, Any, Any) -> None
+ def __init__(self, value: Any, start_mark: Any, end_mark: Any) -> None:
Token.__init__(self, start_mark, end_mark)
self.value = value
@@ -228,8 +308,7 @@ class TagToken(Token):
__slots__ = ('value',)
id = '<tag>'
- def __init__(self, value, start_mark, end_mark):
- # type: (Any, Any, Any) -> None
+ def __init__(self, value: Any, start_mark: Any, end_mark: Any) -> None:
Token.__init__(self, start_mark, end_mark)
self.value = value
@@ -238,8 +317,9 @@ class ScalarToken(Token):
__slots__ = 'value', 'plain', 'style'
id = '<scalar>'
- def __init__(self, value, plain, start_mark, end_mark, style=None):
- # type: (Any, Any, Any, Any, Any) -> None
+ def __init__(
+ self, value: Any, plain: Any, start_mark: Any, end_mark: Any, style: Any = None
+ ) -> None:
Token.__init__(self, start_mark, end_mark)
self.value = value
self.plain = plain
@@ -247,32 +327,46 @@ class ScalarToken(Token):
class CommentToken(Token):
- __slots__ = 'value', 'pre_done'
+ __slots__ = '_value', '_column', 'pre_done'
id = '<comment>'
- def __init__(self, value, start_mark, end_mark):
- # type: (Any, Any, Any) -> None
- Token.__init__(self, start_mark, end_mark)
- self.value = value
+ def __init__(
+ self, value: Any, start_mark: Any = None, end_mark: Any = None, column: Any = None
+ ) -> None:
+ if start_mark is None:
+ assert column is not None
+ self._column = column
+ Token.__init__(self, start_mark, None) # type: ignore
+ self._value = value
+
+ @property
+ def value(self) -> str:
+ if isinstance(self._value, str):
+ return self._value
+ return "".join(self._value)
- def reset(self):
- # type: () -> None
+ @value.setter
+ def value(self, val: Any) -> None:
+ self._value = val
+
+ def reset(self) -> None:
if hasattr(self, 'pre_done'):
delattr(self, 'pre_done')
- def __repr__(self):
- # type: () -> Any
- v = '{!r}'.format(self.value)
- if SHOWLINES:
+ def __repr__(self) -> Any:
+ v = f'{self.value!r}'
+ if SHOW_LINES:
try:
v += ', line: ' + str(self.start_mark.line)
+ except: # NOQA
+ pass
+ try:
v += ', col: ' + str(self.start_mark.column)
except: # NOQA
pass
- return 'CommentToken({})'.format(v)
+ return f'CommentToken({v})'
- def __eq__(self, other):
- # type: (Any) -> bool
+ def __eq__(self, other: Any) -> bool:
if self.start_mark != other.start_mark:
return False
if self.end_mark != other.end_mark:
@@ -281,6 +375,5 @@ class CommentToken(Token):
return False
return True
- def __ne__(self, other):
- # type: (Any) -> bool
+ def __ne__(self, other: Any) -> bool:
return not self.__eq__(other)
diff --git a/tox.ini b/tox.ini
index 0e5fb67..d581f3d 100755
--- a/tox.ini
+++ b/tox.ini
@@ -1,27 +1,27 @@
[tox]
# toxworkdir = /data1/DATA/tox/ruamel.yaml
-envlist = cs,py38,py27,py37,py36,py35,py39
+envlist = cs,py310,py39,py38,py37,py36,py35
[testenv]
+install_command = pip install --disable-pip-version-check {opts} {packages}
commands =
/bin/bash -c 'pytest _test/test_*.py'
deps =
pytest
- ruamel.std.pathlib
[testenv:cs]
-basepython = python3.6
+basepython = python3.8
deps =
flake8
- flake8-bugbear;python_version>="3.5"
+ flake8-bugbear;python_version>="3.8"
commands =
flake8 []{posargs}
[testenv:pep8]
-basepython = python3.6
+basepython = python3.8
deps =
flake8
- flake8-bugbear;python_version>="3.5"
+ flake8-bugbear;python_version>="3.8"
commands =
flake8 []{posargs}
diff --git a/util.py b/util.py
index 1788254..39d71b4 100644
--- a/util.py
+++ b/util.py
@@ -4,19 +4,16 @@
some helper functions that might be generally useful
"""
-from __future__ import absolute_import, print_function
-
+import datetime
from functools import partial
import re
-from .compat import text_type, binary_type
-if False: # MYPY
- from typing import Any, Dict, Optional, List, Text # NOQA
- from .compat import StreamTextType # NOQA
+from typing import Any, Dict, Optional, List, Text, Callable, Union # NOQA
+from .compat import StreamTextType # NOQA
-class LazyEval(object):
+class LazyEval:
"""
Lightweight wrapper around lazily evaluated func(*args, **kwargs).
@@ -27,38 +24,110 @@ class LazyEval(object):
return value (or, prior to evaluation, func and arguments), in its closure.
"""
- def __init__(self, func, *args, **kwargs):
- # type: (Any, Any, Any) -> None
- def lazy_self():
- # type: () -> Any
+ def __init__(self, func: Callable[..., Any], *args: Any, **kwargs: Any) -> None:
+ def lazy_self() -> Any:
return_value = func(*args, **kwargs)
object.__setattr__(self, 'lazy_self', lambda: return_value)
return return_value
object.__setattr__(self, 'lazy_self', lazy_self)
- def __getattribute__(self, name):
- # type: (Any) -> Any
+ def __getattribute__(self, name: str) -> Any:
lazy_self = object.__getattribute__(self, 'lazy_self')
if name == 'lazy_self':
return lazy_self
return getattr(lazy_self(), name)
- def __setattr__(self, name, value):
- # type: (Any, Any) -> None
+ def __setattr__(self, name: str, value: Any) -> None:
setattr(self.lazy_self(), name, value)
RegExp = partial(LazyEval, re.compile)
+timestamp_regexp = RegExp(
+ """^(?P<year>[0-9][0-9][0-9][0-9])
+ -(?P<month>[0-9][0-9]?)
+ -(?P<day>[0-9][0-9]?)
+ (?:((?P<t>[Tt])|[ \\t]+) # explictly not retaining extra spaces
+ (?P<hour>[0-9][0-9]?)
+ :(?P<minute>[0-9][0-9])
+ :(?P<second>[0-9][0-9])
+ (?:\\.(?P<fraction>[0-9]*))?
+ (?:[ \\t]*(?P<tz>Z|(?P<tz_sign>[-+])(?P<tz_hour>[0-9][0-9]?)
+ (?::(?P<tz_minute>[0-9][0-9]))?))?)?$""",
+ re.X,
+)
+
+
+def create_timestamp(
+ year: Any,
+ month: Any,
+ day: Any,
+ t: Any,
+ hour: Any,
+ minute: Any,
+ second: Any,
+ fraction: Any,
+ tz: Any,
+ tz_sign: Any,
+ tz_hour: Any,
+ tz_minute: Any,
+) -> Union[datetime.datetime, datetime.date]:
+ # create a timestamp from match against timestamp_regexp
+ MAX_FRAC = 999999
+ year = int(year)
+ month = int(month)
+ day = int(day)
+ if not hour:
+ return datetime.date(year, month, day)
+ hour = int(hour)
+ minute = int(minute)
+ second = int(second)
+ frac = 0
+ if fraction:
+ frac_s = fraction[:6]
+ while len(frac_s) < 6:
+ frac_s += '0'
+ frac = int(frac_s)
+ if len(fraction) > 6 and int(fraction[6]) > 4:
+ frac += 1
+ if frac > MAX_FRAC:
+ fraction = 0
+ else:
+ fraction = frac
+ else:
+ fraction = 0
+ delta = None
+ if tz_sign:
+ tz_hour = int(tz_hour)
+ tz_minute = int(tz_minute) if tz_minute else 0
+ delta = datetime.timedelta(
+ hours=tz_hour, minutes=tz_minute, seconds=1 if frac > MAX_FRAC else 0
+ )
+ if tz_sign == '-':
+ delta = -delta
+ elif frac > MAX_FRAC:
+ delta = -datetime.timedelta(seconds=1)
+ # should do something else instead (or hook this up to the preceding if statement
+ # in reverse
+ # if delta is None:
+ # return datetime.datetime(year, month, day, hour, minute, second, fraction)
+ # return datetime.datetime(year, month, day, hour, minute, second, fraction,
+ # datetime.timezone.utc)
+ # the above is not good enough though, should provide tzinfo. In Python3 that is easily
+ # doable drop that kind of support for Python2 as it has not native tzinfo
+ data = datetime.datetime(year, month, day, hour, minute, second, fraction)
+ if delta:
+ data -= delta
+ return data
+
# originally as comment
# https://github.com/pre-commit/pre-commit/pull/211#issuecomment-186466605
# if you use this in your code, I suggest adding a test in your test suite
# that check this routines output against a known piece of your YAML
# before upgrades to this code break your round-tripped YAML
-def load_yaml_guess_indent(stream, **kw):
- # type: (StreamTextType, Any) -> Any
+def load_yaml_guess_indent(stream: StreamTextType, **kw: Any) -> Any:
"""guess the indent and block sequence indent of yaml stream/string
returns round_trip_loaded stream, indent level, block sequence indent
@@ -66,19 +135,18 @@ def load_yaml_guess_indent(stream, **kw):
- if there are no block sequences, indent is taken from nested mappings, block sequence
indent is unset (None) in that case
"""
- from .main import round_trip_load
+ from .main import YAML
- # load a YAML document, guess the indentation, if you use TABs you're on your own
- def leading_spaces(line):
- # type: (Any) -> int
+ # load a YAML document, guess the indentation, if you use TABs you are on your own
+ def leading_spaces(line: Any) -> int:
idx = 0
while idx < len(line) and line[idx] == ' ':
idx += 1
return idx
- if isinstance(stream, text_type):
- yaml_str = stream # type: Any
- elif isinstance(stream, binary_type):
+ if isinstance(stream, str):
+ yaml_str: Any = stream
+ elif isinstance(stream, bytes):
# most likely, but the Reader checks BOM for this
yaml_str = stream.decode('utf-8')
else:
@@ -117,11 +185,11 @@ def load_yaml_guess_indent(stream, **kw):
prev_line_key_only = None
if indent is None and map_indent is not None:
indent = map_indent
- return round_trip_load(yaml_str, **kw), indent, block_seq_indent
+ yaml = YAML()
+ return yaml.load(yaml_str, **kw), indent, block_seq_indent
-def configobj_walker(cfg):
- # type: (Any) -> Any
+def configobj_walker(cfg: Any) -> Any:
"""
walks over a ConfigObj (INI file with comments) generating
corresponding YAML output (including comments
@@ -140,33 +208,32 @@ def configobj_walker(cfg):
yield c
-def _walk_section(s, level=0):
- # type: (Any, int) -> Any
+def _walk_section(s: Any, level: int = 0) -> Any:
from configobj import Section
assert isinstance(s, Section)
- indent = u' ' * level
+ indent = ' ' * level
for name in s.scalars:
for c in s.comments[name]:
yield indent + c.strip()
x = s[name]
- if u'\n' in x:
- i = indent + u' '
- x = u'|\n' + i + x.strip().replace(u'\n', u'\n' + i)
+ if '\n' in x:
+ i = indent + ' '
+ x = '|\n' + i + x.strip().replace('\n', '\n' + i)
elif ':' in x:
- x = u"'" + x.replace(u"'", u"''") + u"'"
- line = u'{0}{1}: {2}'.format(indent, name, x)
+ x = "'" + x.replace("'", "''") + "'"
+ line = f'{indent}{name}: {x}'
c = s.inline_comments[name]
if c:
- line += u' ' + c
+ line += ' ' + c
yield line
for name in s.sections:
for c in s.comments[name]:
yield indent + c.strip()
- line = u'{0}{1}:'.format(indent, name)
+ line = f'{indent}{name}:'
c = s.inline_comments[name]
if c:
- line += u' ' + c
+ line += ' ' + c
yield line
for val in _walk_section(s[name], level=level + 1):
yield val