summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAnthon van der Neut <anthon@mnt.org>2023-05-01 19:13:50 +0200
committerAnthon van der Neut <anthon@mnt.org>2023-05-01 19:13:50 +0200
commit8b731994b1543d7886af85f926d9eea5a22d0732 (patch)
tree3553d4cbc80b541484d7a3f39e00cdcfd8f9d030
parent45111ba0b67e8619265d89f3202635e62c13cde6 (diff)
downloadruamel.yaml-8b731994b1543d7886af85f926d9eea5a22d0732.tar.gz
retrofitted 0.18 changes
-rw-r--r--.hgignore1
-rw-r--r--README.rst26
-rw-r--r--__init__.py9
-rw-r--r--_doc/conf.py8
-rw-r--r--_test/lib/canonical.py12
-rw-r--r--_test/lib/test_constructor.py2
-rw-r--r--_test/lib/test_emitter.py27
-rw-r--r--_test/lib/test_resolver.py3
-rw-r--r--_test/lib/test_structure.py13
-rw-r--r--_test/lib/test_yaml_ext.py17
-rw-r--r--_test/roundtrip.py154
-rw-r--r--_test/test_a_dedent.py12
-rw-r--r--_test/test_add_xxx.py55
-rw-r--r--_test/test_anchor.py88
-rw-r--r--_test/test_api_change.py48
-rw-r--r--_test/test_class_register.py47
-rw-r--r--_test/test_collections.py4
-rw-r--r--_test/test_comment_manipulation.py71
-rw-r--r--_test/test_comments.py145
-rw-r--r--_test/test_contextmanager.py21
-rw-r--r--_test/test_copy.py12
-rw-r--r--_test/test_cyaml.py18
-rw-r--r--_test/test_datetime.py32
-rw-r--r--_test/test_deprecation.py8
-rw-r--r--_test/test_documents.py14
-rw-r--r--_test/test_fail.py40
-rw-r--r--_test/test_float.py19
-rw-r--r--_test/test_flowsequencekey.py2
-rw-r--r--_test/test_indentation.py65
-rw-r--r--_test/test_int.py4
-rw-r--r--_test/test_issues.py230
-rw-r--r--_test/test_json_numbers.py12
-rw-r--r--_test/test_line_col.py20
-rw-r--r--_test/test_literal.py54
-rw-r--r--_test/test_none.py12
-rw-r--r--_test/test_numpy.py38
-rw-r--r--_test/test_program_config.py8
-rw-r--r--_test/test_spec_examples.py75
-rw-r--r--_test/test_string.py42
-rw-r--r--_test/test_tag.py58
-rw-r--r--_test/test_version.py21
-rw-r--r--_test/test_yamlfile.py65
-rw-r--r--_test/test_yamlobject.py29
-rw-r--r--_test/test_z_check_debug_leftovers.py7
-rw-r--r--_test/test_z_data.py60
-rw-r--r--_test/test_z_olddata.py16
-rw-r--r--anchor.py12
-rw-r--r--comments.py527
-rw-r--r--compat.py106
-rw-r--r--composer.py68
-rw-r--r--configobjwalker.py11
-rw-r--r--constructor.py463
-rw-r--r--cyaml.py144
-rw-r--r--dumper.py151
-rw-r--r--emitter.py426
-rw-r--r--error.py131
-rw-r--r--events.py126
-rw-r--r--loader.py37
-rw-r--r--main.py561
-rw-r--r--nodes.py93
-rw-r--r--parser.py187
-rw-r--r--reader.py93
-rw-r--r--representer.py296
-rw-r--r--resolver.py89
-rw-r--r--scalarbool.py15
-rw-r--r--scalarfloat.py53
-rw-r--r--scalarint.py55
-rw-r--r--scalarstring.py42
-rw-r--r--scanner.py493
-rw-r--r--serializer.py58
-rw-r--r--setup.py246
-rw-r--r--timestamp.py35
-rw-r--r--tokens.py117
-rw-r--r--util.py53
74 files changed, 2858 insertions, 3554 deletions
diff --git a/.hgignore b/.hgignore
index 6661784..c305fec 100644
--- a/.hgignore
+++ b/.hgignore
@@ -12,6 +12,7 @@ venv
TODO.rst
try_*
_doc/*.pdf
+_doc/*.html
_doc/*.rst
*.py_alt
diff --git a/README.rst b/README.rst
index 204b67d..36d6abc 100644
--- a/README.rst
+++ b/README.rst
@@ -11,9 +11,11 @@ ruamel.yaml
:pypi: https://pypi.org/project/ruamel.yaml/
*The 0.16.13 release was the last that was tested to be working on Python 2.7.
-The 0.17.21 is the last one tested to be working on Python 3.5,
-that is also the last release supporting old PyYAML functions, you'll have to create a
-`YAML()` instance and use its `.load()` and `.dump()` methods.*
+The 0.17.21 was the last one tested to be working on Python 3.5 and 3.6 (the
+latter not tested, because
+tox/virtualenv stopped supporting that EOL versions).
+The 0.17 series is also the last to support old PyYAML functions, replace it by
+creating a `YAML()` instance and use its `.load()` and `.dump()` methods.*
*Please adjust your dependencies accordingly if necessary. (`ruamel.yaml<0.18`)*
@@ -67,6 +69,24 @@ ChangeLog
.. should insert NEXT: at the beginning of line for next key (with empty line)
+NEXT:
+ - fix issue with indent != 2 and literal scalars with empty first line
+ (reported by wrdis on `StackOverflow <https://stackoverflow.com/q/75584262/1307905>`__)
+ - updated __repr__ of CommentedMap, now that dict is ordered -> no more ordereddict
+ - fix loading of `!!float 42` (reported by Eric on
+ `Stack overflow <https://stackoverflow.com/a/71555107/1307905>`_)
+ - line numbers are now set on `CommentedKeySeq` and `CommentedKeyMap` (which
+ are created if you have a sequence resp. mapping as the key in a mapping)
+ - plain scalars: put single words longer than width on a line of their own, instead
+ of after the previous line (issue 427, reported by `Antoine Cotten
+ <https://sourceforge.net/u/antoineco/profile/>`_). Caveat: this currently results in a
+ space ending the previous line.
+ - fix for folded scalar part of 421: comments after ">" on first line of folded
+ scalars are now preserved (as were those in the same position on literal scalars).
+ Issue reported by Jacob Floyd.
+ - added stacklevel to warnings
+ - typing changed from Py2 compatible comments to Py3, removed various Py2-isms
+
0.17.21 (2022-02-12):
- fix bug in calling `.compose()` method with `pathlib.Path` instance.
diff --git a/__init__.py b/__init__.py
index 2a2572c..58e39af 100644
--- a/__init__.py
+++ b/__init__.py
@@ -20,12 +20,11 @@ _package_data = dict(
},
classifiers=[
'Programming Language :: Python :: 3 :: Only',
- 'Programming Language :: Python :: 3.5',
- 'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
+ 'Programming Language :: Python :: 3.11',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Text Processing :: Markup',
@@ -33,10 +32,10 @@ _package_data = dict(
],
keywords='yaml 1.2 parser round-trip preserve quotes order config',
read_the_docs='yaml',
- supported=[(3, 5)], # minimum
+ supported=[(3, 7)], # minimum
tox=dict(
- env='*f', # f for 3.5
- fl8excl='_test/lib',
+ env='*',
+ fl8excl='_test/lib,branch_default',
),
# universal=True,
python_requires='>=3',
diff --git a/_doc/conf.py b/_doc/conf.py
index 0efcc64..67fcc0e 100644
--- a/_doc/conf.py
+++ b/_doc/conf.py
@@ -28,7 +28,7 @@ import os # NOQA
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
-extensions = []
+extensions = [] # type: ignore
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
@@ -74,15 +74,13 @@ class ryd2rst:
if True:
try:
- from ryd.__main__ import RYDCmd
+ from ryd.__main__ import main
from pathlib import Path
oldargv = sys.argv
for fn in Path('.').glob('*.ryd'):
sys.argv = ['ryd', 'convert', '--no-pdf', str(fn)]
- rc = RYDCmd()
- rc.parse_args()
- print(sys.argv, '->', rc.run())
+ main(sys.argv)
sys.argv = oldargv
except Exception as e:
diff --git a/_test/lib/canonical.py b/_test/lib/canonical.py
index 8a00772..31c9728 100644
--- a/_test/lib/canonical.py
+++ b/_test/lib/canonical.py
@@ -348,28 +348,32 @@ ruamel.yaml.canonical_scan = canonical_scan
def canonical_parse(stream):
- return ruamel.yaml.parse(stream, Loader=CanonicalLoader)
+ yaml = ruamel.yaml.YAML()
+ return yaml.parse(stream, Loader=CanonicalLoader)
ruamel.yaml.canonical_parse = canonical_parse
def canonical_compose(stream):
- return ruamel.yaml.compose(stream, Loader=CanonicalLoader)
+ yaml = ruamel.yaml.YAML()
+ return yaml.compose(stream, Loader=CanonicalLoader)
ruamel.yaml.canonical_compose = canonical_compose
def canonical_compose_all(stream):
- return ruamel.yaml.compose_all(stream, Loader=CanonicalLoader)
+ yaml = ruamel.yaml.YAML()
+ return yaml.compose_all(stream, Loader=CanonicalLoader)
ruamel.yaml.canonical_compose_all = canonical_compose_all
def canonical_load(stream):
- return ruamel.yaml.load(stream, Loader=CanonicalLoader)
+ yaml = ruamel.yaml.YAML()
+ return yaml.load(stream, Loader=CanonicalLoader)
ruamel.yaml.canonical_load = canonical_load
diff --git a/_test/lib/test_constructor.py b/_test/lib/test_constructor.py
index 738aaec..b38bf2f 100644
--- a/_test/lib/test_constructor.py
+++ b/_test/lib/test_constructor.py
@@ -121,7 +121,7 @@ def _make_objects():
else:
return False
- class AnObject(object):
+ class AnObject:
def __new__(cls, foo=None, bar=None, baz=None):
self = object.__new__(cls)
self.foo = foo
diff --git a/_test/lib/test_emitter.py b/_test/lib/test_emitter.py
index fbdbb79..b1991e3 100644
--- a/_test/lib/test_emitter.py
+++ b/_test/lib/test_emitter.py
@@ -1,7 +1,8 @@
from __future__ import absolute_import
from __future__ import print_function
-import ruamel.yaml as yaml
+import ruamel.yaml
+from ruamel.yaml import YAML
def _compare_events(events1, events2):
@@ -20,8 +21,8 @@ def _compare_events(events1, events2):
def test_emitter_on_data(data_filename, canonical_filename, verbose=False):
with open(data_filename, 'rb') as fp0:
- events = list(yaml.parse(fp0))
- output = yaml.emit(events)
+ events = list(YAML().parse(fp0))
+ output = YAML().emit(events)
if verbose:
print('OUTPUT:')
print(output)
@@ -34,9 +35,9 @@ test_emitter_on_data.unittest = ['.data', '.canonical']
def test_emitter_on_canonical(canonical_filename, verbose=False):
with open(canonical_filename, 'rb') as fp0:
- events = list(yaml.parse(fp0))
+ events = list(YAML().parse(fp0))
for canonical in [False, True]:
- output = yaml.emit(events, canonical=canonical)
+ output = YAML().emit(events, canonical=canonical)
if verbose:
print('OUTPUT (canonical=%s):' % canonical)
print(output)
@@ -50,7 +51,7 @@ test_emitter_on_canonical.unittest = ['.canonical']
def test_emitter_styles(data_filename, canonical_filename, verbose=False):
for filename in [data_filename, canonical_filename]:
with open(filename, 'rb') as fp0:
- events = list(yaml.parse(fp0))
+ events = list(YAML().parse(fp0))
for flow_style in [False, True]:
for style in ['|', '>', '"', "'", ""]:
styled_events = []
@@ -68,23 +69,23 @@ def test_emitter_styles(data_filename, canonical_filename, verbose=False):
event.anchor, event.tag, event.implicit, flow_style=flow_style
)
styled_events.append(event)
- output = yaml.emit(styled_events)
+ output = YAML().emit(styled_events)
if verbose:
print(
'OUTPUT (filename=%r, flow_style=%r, style=%r)'
% (filename, flow_style, style)
)
print(output)
- new_events = list(yaml.parse(output))
+ new_events = list(YAML().parse(output))
_compare_events(events, new_events)
test_emitter_styles.unittest = ['.data', '.canonical']
-class EventsLoader(yaml.Loader):
+class EventsLoader(ruamel.yaml.Loader):
def construct_event(self, node):
- if isinstance(node, yaml.ScalarNode):
+ if isinstance(node, ruamel.yaml.ScalarNode):
mapping = {}
else:
mapping = self.construct_mapping(node)
@@ -116,12 +117,12 @@ EventsLoader.add_constructor(None, EventsLoader.construct_event)
def test_emitter_events(events_filename, verbose=False):
with open(events_filename, 'rb') as fp0:
- events = list(yaml.load(fp0, Loader=EventsLoader))
- output = yaml.emit(events)
+ events = list(YAML().load(fp0, Loader=EventsLoader))
+ output = YAML().emit(events)
if verbose:
print('OUTPUT:')
print(output)
- new_events = list(yaml.parse(output))
+ new_events = list(YAML().parse(output))
_compare_events(events, new_events)
diff --git a/_test/lib/test_resolver.py b/_test/lib/test_resolver.py
index 24373a7..b2b0839 100644
--- a/_test/lib/test_resolver.py
+++ b/_test/lib/test_resolver.py
@@ -1,5 +1,6 @@
-import ruamel.yaml as yaml
+import ruamel.yaml
+yaml = ruamel.yaml.YAML()
import pprint
diff --git a/_test/lib/test_structure.py b/_test/lib/test_structure.py
index 470d267..8de24a3 100644
--- a/_test/lib/test_structure.py
+++ b/_test/lib/test_structure.py
@@ -84,9 +84,9 @@ def test_parser(data_filename, canonical_filename, verbose=False):
events2 = None
try:
with open(data_filename, 'rb') as fp0:
- events1 = list(ruamel.yaml.parse(fp0))
+ events1 = list(ruamel.yaml.YAML().parse(fp0))
with open(canonical_filename, 'rb') as fp0:
- events2 = list(ruamel.yaml.canonical_parse(fp0))
+ events2 = list(ruamel.yaml.YAML().canonical_parse(fp0))
_compare_events(events1, events2)
finally:
if verbose:
@@ -104,9 +104,9 @@ def test_parser_on_canonical(canonical_filename, verbose=False):
events2 = None
try:
with open(canonical_filename, 'rb') as fp0:
- events1 = list(ruamel.yaml.parse(fp0))
+ events1 = list(ruamel.yaml.YAML().parse(fp0))
with open(canonical_filename, 'rb') as fp0:
- events2 = list(ruamel.yaml.canonical_parse(fp0))
+ events2 = list(ruamel.yaml.YAML().canonical_parse(fp0))
_compare_events(events1, events2, full=True)
finally:
if verbose:
@@ -138,10 +138,11 @@ def test_composer(data_filename, canonical_filename, verbose=False):
nodes1 = None
nodes2 = None
try:
+ yaml = ruamel.yaml.YAML()
with open(data_filename, 'rb') as fp0:
- nodes1 = list(ruamel.yaml.compose_all(fp0))
+ nodes1 = list(yaml.compose_all(fp0))
with open(canonical_filename, 'rb') as fp0:
- nodes2 = list(ruamel.yaml.canonical_compose_all(fp0))
+ nodes2 = list(yaml.canonical_compose_all(fp0))
assert len(nodes1) == len(nodes2), (len(nodes1), len(nodes2))
for node1, node2 in zip(nodes1, nodes2):
_compare_nodes(node1, node2)
diff --git a/_test/lib/test_yaml_ext.py b/_test/lib/test_yaml_ext.py
index 8cba7e5..a6fa287 100644
--- a/_test/lib/test_yaml_ext.py
+++ b/_test/lib/test_yaml_ext.py
@@ -110,7 +110,7 @@ def new_safe_dump(data, stream=None, **kwds):
return old_dump(data, stream, ruamel.yaml.CSafeDumper, **kwds)
-old_safe_dump_all = ruamel.yaml.safe_dump_all
+# old_safe_dump_all = ruamel.yaml.safe_dump_all
def new_safe_dump_all(documents, stream=None, **kwds):
@@ -236,10 +236,12 @@ test_c_scanner.skip = ['.skip-ext']
def _compare_parsers(py_data, c_data, verbose):
- py_events = list(ruamel.yaml.parse(py_data, Loader=ruamel.yaml.PyLoader))
+ yaml = ruamel.yaml.YAML(typ='unsafe', pure=True)
+ py_events = list(yaml.parse(py_data, Loader=ruamel.yaml.PyLoader))
c_events = []
try:
- for event in ruamel.yaml.parse(c_data, Loader=ruamel.yaml.CLoader):
+ yaml = ruamel.yaml.YAML(typ='unsafe', pure=False)
+ for event in yaml.parse(c_data, Loader=ruamel.yaml.CLoader):
c_events.append(event)
assert len(py_events) == len(c_events), (len(py_events), len(c_events))
for py_event, c_event in zip(py_events, c_events):
@@ -284,12 +286,13 @@ test_c_parser.skip = ['.skip-ext']
def _compare_emitters(data, verbose):
- events = list(ruamel.yaml.parse(data, Loader=ruamel.yaml.PyLoader))
- c_data = ruamel.yaml.emit(events, Dumper=ruamel.yaml.CDumper)
+ yaml = ruamel.yaml.YAML(typ='unsafe', pure=True)
+ events = list(yaml.parse(py_data, Loader=ruamel.yaml.PyLoader))
+ c_data = yaml.emit(events, Dumper=ruamel.yaml.CDumper)
if verbose:
print(c_data)
- py_events = list(ruamel.yaml.parse(c_data, Loader=ruamel.yaml.PyLoader))
- c_events = list(ruamel.yaml.parse(c_data, Loader=ruamel.yaml.CLoader))
+ py_events = list(yaml.parse(c_data, Loader=ruamel.yaml.PyLoader))
+ c_events = list(yaml.parse(c_data, Loader=ruamel.yaml.CLoader))
try:
assert len(events) == len(py_events), (len(events), len(py_events))
assert len(events) == len(c_events), (len(events), len(c_events))
diff --git a/_test/roundtrip.py b/_test/roundtrip.py
index 8b87380..fa8b08a 100644
--- a/_test/roundtrip.py
+++ b/_test/roundtrip.py
@@ -8,10 +8,12 @@ import textwrap
import io
from pathlib import Path
+from typing import Any, Optional, Union
+
unset = object()
-def dedent(data):
+def dedent(data: str) -> str:
try:
position_of_first_newline = data.index('\n')
for idx in range(position_of_first_newline):
@@ -24,7 +26,9 @@ def dedent(data):
return textwrap.dedent(data)
-def round_trip_load(inp, preserve_quotes=None, version=None):
+def round_trip_load(
+ inp: Any, preserve_quotes: Optional[bool] = None, version: Optional[Any] = None
+) -> Any:
import ruamel.yaml # NOQA
dinp = dedent(inp)
@@ -34,7 +38,9 @@ def round_trip_load(inp, preserve_quotes=None, version=None):
return yaml.load(dinp)
-def round_trip_load_all(inp, preserve_quotes=None, version=None):
+def round_trip_load_all(
+ inp: Any, preserve_quotes: Optional[bool] = None, version: Optional[Any] = None
+) -> Any:
import ruamel.yaml # NOQA
dinp = dedent(inp)
@@ -45,18 +51,18 @@ def round_trip_load_all(inp, preserve_quotes=None, version=None):
def round_trip_dump(
- data,
- stream=None, # *,
- indent=None,
- block_seq_indent=None,
- default_flow_style=unset,
- top_level_colon_align=None,
- prefix_colon=None,
- explicit_start=None,
- explicit_end=None,
- version=None,
- allow_unicode=True,
-):
+ data: Any,
+ stream: Any = None, # *,
+ indent: Optional[int] = None,
+ block_seq_indent: Optional[int] = None,
+ default_flow_style: Any = unset,
+ top_level_colon_align: Any = None,
+ prefix_colon: Any = None,
+ explicit_start: Optional[bool] = None,
+ explicit_end: Optional[bool] = None,
+ version: Optional[Any] = None,
+ allow_unicode: bool = True,
+) -> Union[str, None]:
import ruamel.yaml # NOQA
yaml = ruamel.yaml.YAML()
@@ -71,25 +77,25 @@ def round_trip_dump(
yaml.allow_unicode = allow_unicode
if stream is not None:
yaml.dump(data, stream=stream)
- return
+ return None
buf = io.StringIO()
yaml.dump(data, stream=buf)
return buf.getvalue()
def round_trip_dump_all(
- data,
- stream=None, # *,
- indent=None,
- block_seq_indent=None,
- default_flow_style=unset,
- top_level_colon_align=None,
- prefix_colon=None,
- explicit_start=None,
- explicit_end=None,
- version=None,
- allow_unicode=None,
-):
+ data: Any,
+ stream: Any = None, # *,
+ indent: Optional[int] = None,
+ block_seq_indent: Optional[int] = None,
+ default_flow_style: Any = unset,
+ top_level_colon_align: Any = None,
+ prefix_colon: Any = None,
+ explicit_start: Optional[bool] = None,
+ explicit_end: Optional[bool] = None,
+ version: Optional[Any] = None,
+ allow_unicode: bool = True,
+) -> Union[str, None]:
import ruamel.yaml # NOQA
yaml = ruamel.yaml.YAML()
@@ -104,13 +110,13 @@ def round_trip_dump_all(
yaml.allow_unicode = allow_unicode
if stream is not None:
yaml.dump(data, stream=stream)
- return
+ return None
buf = io.StringIO()
yaml.dump_all(data, stream=buf)
return buf.getvalue()
-def diff(inp, outp, file_name='stdin'):
+def diff(inp: str, outp: str, file_name: str = 'stdin') -> None:
import difflib
inl = inp.splitlines(True) # True for keepends
@@ -125,20 +131,21 @@ def diff(inp, outp, file_name='stdin'):
def round_trip(
- inp,
- outp=None,
- extra=None,
- intermediate=None,
- indent=None,
- block_seq_indent=None,
- top_level_colon_align=None,
- prefix_colon=None,
- preserve_quotes=None,
- explicit_start=None,
- explicit_end=None,
- version=None,
- dump_data=None,
-):
+ inp: str,
+ outp: Optional[str] = None,
+ extra: Optional[str] = None,
+ intermediate: Any = None,
+ indent: Optional[int] = None,
+ block_seq_indent: Optional[int] = None,
+ default_flow_style: Any = unset,
+ top_level_colon_align: Any = None,
+ prefix_colon: Any = None,
+ preserve_quotes: Any = None,
+ explicit_start: Optional[bool] = None,
+ explicit_end: Optional[bool] = None,
+ version: Optional[Any] = None,
+ dump_data: Any = None,
+) -> Any:
"""
inp: input string to parse
outp: expected output (equals input if not specified)
@@ -167,6 +174,7 @@ def round_trip(
explicit_end=explicit_end,
version=version,
)
+ assert isinstance(res, str)
if res != doutp:
diff(doutp, res, 'input string')
print('\nroundtrip data:\n', res, sep="")
@@ -187,19 +195,19 @@ def round_trip(
def na_round_trip(
- inp,
- outp=None,
- extra=None,
- intermediate=None,
- indent=None,
- top_level_colon_align=None,
- prefix_colon=None,
- preserve_quotes=None,
- explicit_start=None,
- explicit_end=None,
- version=None,
- dump_data=None,
-):
+ inp: str,
+ outp: Optional[str] = None,
+ extra: Optional[str] = None,
+ intermediate: Any = None,
+ indent: Optional[int] = None,
+ top_level_colon_align: Any = None,
+ prefix_colon: Any = None,
+ preserve_quotes: Any = None,
+ explicit_start: Optional[bool] = None,
+ explicit_end: Optional[bool] = None,
+ version: Optional[Any] = None,
+ dump_data: Any = None,
+) -> Any:
"""
inp: input string to parse
outp: expected output (equals input if not specified)
@@ -233,20 +241,20 @@ def na_round_trip(
return res
-def YAML(**kw):
+def YAML(**kw: Any) -> Any:
import ruamel.yaml # NOQA
class MyYAML(ruamel.yaml.YAML):
"""auto dedent string parameters on load"""
- def load(self, stream):
+ def load(self, stream: Any) -> Any:
if isinstance(stream, str):
if stream and stream[0] == '\n':
stream = stream[1:]
stream = textwrap.dedent(stream)
return ruamel.yaml.YAML.load(self, stream)
- def load_all(self, stream):
+ def load_all(self, stream: Any) -> Any:
if isinstance(stream, str):
if stream and stream[0] == '\n':
stream = stream[1:]
@@ -254,7 +262,7 @@ def YAML(**kw):
for d in ruamel.yaml.YAML.load_all(self, stream):
yield d
- def dump(self, data, **kw):
+ def dump(self, data: Any, **kw: Any) -> Any: # type: ignore
from ruamel.yaml.compat import StringIO, BytesIO # NOQA
assert ('stream' in kw) ^ ('compare' in kw)
@@ -270,11 +278,11 @@ def YAML(**kw):
res = st.getvalue()
print(res)
if unordered_lines:
- res = sorted(res.splitlines())
- expected = sorted(expected.splitlines())
+ res = sorted(res.splitlines()) # type: ignore
+ expected = sorted(expected.splitlines()) # type: ignore
assert res == expected
- def round_trip(self, stream, **kw):
+ def round_trip(self, stream: Any, **kw: Any) -> None:
from ruamel.yaml.compat import StringIO, BytesIO # NOQA
assert isinstance(stream, str)
@@ -291,7 +299,7 @@ def YAML(**kw):
diff(outp, res, 'input string')
assert res == outp
- def round_trip_all(self, stream, **kw):
+ def round_trip_all(self, stream: Any, **kw: Any) -> None:
from ruamel.yaml.compat import StringIO, BytesIO # NOQA
assert isinstance(stream, str)
@@ -311,7 +319,13 @@ def YAML(**kw):
return MyYAML(**kw)
-def save_and_run(program, base_dir=None, output=None, file_name=None, optimized=False):
+def save_and_run(
+ program: str,
+ base_dir: Optional[Any] = None,
+ output: Optional[Any] = None,
+ file_name: Optional[Any] = None,
+ optimized: bool = False,
+) -> int:
"""
safe and run a python program, thereby circumventing any restrictions on module level
imports
@@ -322,7 +336,7 @@ def save_and_run(program, base_dir=None, output=None, file_name=None, optimized=
base_dir = Path(str(base_dir))
if file_name is None:
file_name = 'safe_and_run_tmp.py'
- file_name = base_dir / file_name
+ file_name = base_dir / file_name # type: ignore
file_name.write_text(dedent(program))
try:
@@ -335,9 +349,9 @@ def save_and_run(program, base_dir=None, output=None, file_name=None, optimized=
res = check_output(cmd, stderr=STDOUT, universal_newlines=True, cwd=str(base_dir))
if output is not None:
if '__pypy__' in sys.builtin_module_names:
- res = res.splitlines(True)
- res = [line for line in res if 'no version info' not in line]
- res = ''.join(res)
+ res1 = res.splitlines(True)
+ res2 = [line for line in res1 if 'no version info' not in line]
+ res = ''.join(res2)
print('result: ', res, end='')
print('expected:', output, end='')
assert res == output
diff --git a/_test/test_a_dedent.py b/_test/test_a_dedent.py
index 447bdde..e13a54b 100644
--- a/_test/test_a_dedent.py
+++ b/_test/test_a_dedent.py
@@ -4,7 +4,7 @@ from roundtrip import dedent
class TestDedent:
- def test_start_newline(self):
+ def test_start_newline(self) -> None:
# fmt: off
x = dedent("""
123
@@ -13,7 +13,7 @@ class TestDedent:
# fmt: on
assert x == '123\n 456\n'
- def test_start_space_newline(self):
+ def test_start_space_newline(self) -> None:
# special construct to prevent stripping of following whitespace
# fmt: off
x = dedent(" " """
@@ -22,7 +22,7 @@ class TestDedent:
# fmt: on
assert x == '123\n'
- def test_start_no_newline(self):
+ def test_start_no_newline(self) -> None:
# special construct to prevent stripping of following whitespac
x = dedent("""\
123
@@ -30,17 +30,17 @@ class TestDedent:
""")
assert x == '123\n 456\n'
- def test_preserve_no_newline_at_end(self):
+ def test_preserve_no_newline_at_end(self) -> None:
x = dedent("""
123""")
assert x == '123'
- def test_preserve_no_newline_at_all(self):
+ def test_preserve_no_newline_at_all(self) -> None:
x = dedent("""\
123""")
assert x == '123'
- def test_multiple_dedent(self):
+ def test_multiple_dedent(self) -> None:
x = dedent(
dedent("""
123
diff --git a/_test/test_add_xxx.py b/_test/test_add_xxx.py
index 8beac65..5f12ece 100644
--- a/_test/test_add_xxx.py
+++ b/_test/test_add_xxx.py
@@ -1,31 +1,32 @@
# coding: utf-8
import re
-import pytest # NOQA
+import pytest # type: ignore # NOQA
from roundtrip import dedent, round_trip_dump # NOQA
+from typing import Any
# from PyYAML docs
-class Dice(tuple):
- def __new__(cls, a, b):
+class Dice(tuple): # type: ignore
+ def __new__(cls, a: int, b: int) -> "Dice":
return tuple.__new__(cls, [a, b])
- def __repr__(self):
+ def __repr__(self) -> str:
return 'Dice(%s,%s)' % self
-def dice_constructor(loader, node):
+def dice_constructor(loader: Any, node: Any) -> Dice:
value = loader.construct_scalar(node)
a, b = map(int, value.split('d'))
return Dice(a, b)
-def dice_representer(dumper, data):
+def dice_representer(dumper: Any, data: Any) -> Any:
return dumper.represent_scalar('!dice', '{}d{}'.format(*data))
-def test_dice_constructor():
+def test_dice_constructor() -> None:
import ruamel.yaml # NOQA
yaml = ruamel.yaml.YAML(typ='unsafe', pure=True)
@@ -34,7 +35,7 @@ def test_dice_constructor():
assert str(data) == "{'initial hit points': Dice(8,4)}"
-def test_dice_constructor_with_loader():
+def test_dice_constructor_with_loader() -> None:
import ruamel.yaml # NOQA
yaml = ruamel.yaml.YAML(typ='unsafe', pure=True)
@@ -43,7 +44,7 @@ def test_dice_constructor_with_loader():
assert str(data) == "{'initial hit points': Dice(8,4)}"
-def test_dice_representer():
+def test_dice_representer() -> None:
import ruamel.yaml # NOQA
yaml = ruamel.yaml.YAML(typ='unsafe', pure=True)
@@ -55,7 +56,7 @@ def test_dice_representer():
assert buf.getvalue() == 'gold: !dice 10d6\n'
-def test_dice_implicit_resolver():
+def test_dice_implicit_resolver() -> None:
import ruamel.yaml # NOQA
yaml = ruamel.yaml.YAML(typ='unsafe', pure=True)
@@ -68,26 +69,26 @@ def test_dice_implicit_resolver():
assert yaml.load('damage: 5d10') == dict(damage=Dice(5, 10))
-class Obj1(dict):
- def __init__(self, suffix):
+class Obj1(dict): # type: ignore
+ def __init__(self, suffix: Any) -> None:
self._suffix = suffix
self._node = None
- def add_node(self, n):
+ def add_node(self, n: Any) -> None:
self._node = n
- def __repr__(self):
+ def __repr__(self) -> str:
return 'Obj1(%s->%s)' % (self._suffix, self.items())
- def dump(self):
+ def dump(self) -> str:
return repr(self._node)
-class YAMLObj1(object):
+class YAMLObj1:
yaml_tag = '!obj:'
@classmethod
- def from_yaml(cls, loader, suffix, node):
+ def from_yaml(cls, loader: Any, suffix: Any, node: Any) -> Any:
import ruamel.yaml # NOQA
obj1 = Obj1(suffix)
@@ -98,11 +99,11 @@ class YAMLObj1(object):
return obj1
@classmethod
- def to_yaml(cls, dumper, data):
+ def to_yaml(cls, dumper: Any, data: Any) -> Any:
return dumper.represent_scalar(cls.yaml_tag + data._suffix, data.dump())
-def test_yaml_obj():
+def test_yaml_obj() -> None:
import ruamel.yaml # NOQA
yaml = ruamel.yaml.YAML(typ='unsafe', pure=True)
@@ -115,7 +116,7 @@ def test_yaml_obj():
assert buf.getvalue() == """!obj:x.2 "{'a': 1}"\n"""
-def test_yaml_obj_with_loader_and_dumper():
+def test_yaml_obj_with_loader_and_dumper() -> None:
import ruamel.yaml # NOQA
yaml = ruamel.yaml.YAML(typ='unsafe', pure=True)
@@ -137,25 +138,25 @@ def test_yaml_obj_with_loader_and_dumper():
# Issue 127 reported by Tommy Wang
-def test_issue_127():
+def test_issue_127() -> None:
import ruamel.yaml # NOQA
class Ref(ruamel.yaml.YAMLObject):
- yaml_constructor = ruamel.yaml.RoundTripConstructor
- yaml_representer = ruamel.yaml.RoundTripRepresenter
+ yaml_constructor = ruamel.yaml.RoundTripConstructor # type: ignore
+ yaml_representer = ruamel.yaml.RoundTripRepresenter # type: ignore
yaml_tag = '!Ref'
- def __init__(self, logical_id):
+ def __init__(self, logical_id: Any) -> None:
self.logical_id = logical_id
@classmethod
- def from_yaml(cls, loader, node):
+ def from_yaml(cls, loader: Any, node: Any) -> Any:
return cls(loader.construct_scalar(node))
@classmethod
- def to_yaml(cls, dumper, data):
+ def to_yaml(cls, dumper: Any, data: Any) -> Any:
if isinstance(data.logical_id, ruamel.yaml.scalarstring.ScalarString):
- style = data.logical_id.style # ruamel.yaml>0.15.8
+ style = data.logical_id.style # type: ignore # ruamel.yaml>0.15.8
else:
style = None
return dumper.represent_scalar(cls.yaml_tag, data.logical_id, style=style)
diff --git a/_test/test_anchor.py b/_test/test_anchor.py
index 3c83886..da0f1ef 100644
--- a/_test/test_anchor.py
+++ b/_test/test_anchor.py
@@ -4,26 +4,26 @@
testing of anchors and the aliases referring to them
"""
-import pytest
-from textwrap import dedent
+import pytest # type: ignore # NOQA
import platform
from roundtrip import round_trip, dedent, round_trip_load, round_trip_dump, YAML # NOQA
+from typing import Any
-def load(s):
+def load(s: str) -> Any:
return round_trip_load(dedent(s))
-def compare(d, s):
+def compare(d: Any, s: str) -> None:
assert round_trip_dump(d) == dedent(s)
class TestAnchorsAliases:
- def test_anchor_id_renumber(self):
+ def test_anchor_id_renumber(self) -> None:
from ruamel.yaml.serializer import Serializer
- assert Serializer.ANCHOR_TEMPLATE == 'id%03d'
+ assert Serializer.ANCHOR_TEMPLATE == 'id{:03d}'
data = load("""
a: &id002
b: 1
@@ -40,7 +40,7 @@ class TestAnchorsAliases:
""",
)
- def test_template_matcher(self):
+ def test_template_matcher(self) -> None:
"""test if id matches the anchor template"""
from ruamel.yaml.serializer import templated_id
@@ -53,13 +53,13 @@ class TestAnchorsAliases:
assert not templated_id('id000')
assert not templated_id('x000')
- # def test_re_matcher(self):
+ # def test_re_matcher(self) -> None:
# import re
# assert re.compile('id(?!000)\\d{3,}').match('id001')
# assert not re.compile('id(?!000\\d*)\\d{3,}').match('id000')
# assert re.compile('id(?!000$)\\d{3,}').match('id0001')
- def test_anchor_assigned(self):
+ def test_anchor_assigned(self) -> None:
from ruamel.yaml.comments import CommentedMap
data = load("""
@@ -80,7 +80,7 @@ class TestAnchorsAliases:
assert e.yaml_anchor().value == 'etemplate'
assert e.yaml_anchor().always_dump is False
- def test_anchor_id_retained(self):
+ def test_anchor_id_retained(self) -> None:
data = load("""
a: &id002
b: 1
@@ -105,10 +105,10 @@ class TestAnchorsAliases:
""",
)
- @pytest.mark.skipif(
+ @pytest.mark.skipif( # type: ignore
platform.python_implementation() == 'Jython', reason='Jython throws RepresenterError'
)
- def test_alias_before_anchor(self):
+ def test_alias_before_anchor(self) -> None:
from ruamel.yaml.composer import ComposerError
with pytest.raises(ComposerError):
@@ -120,7 +120,7 @@ class TestAnchorsAliases:
""")
data = data
- def test_anchor_on_sequence(self):
+ def test_anchor_on_sequence(self) -> None:
# as reported by Bjorn Stabell
# https://bitbucket.org/ruamel/yaml/issue/7/anchor-names-not-preserved
from ruamel.yaml.comments import CommentedSeq
@@ -165,7 +165,7 @@ class TestAnchorsAliases:
label: center/huge
""")
- def test_merge_00(self):
+ def test_merge_00(self) -> None:
data = load(self.merge_yaml)
d = data[4]
ok = True
@@ -181,7 +181,7 @@ class TestAnchorsAliases:
print('key', k, d.get(k), data[o].get(k))
assert ok
- def test_merge_accessible(self):
+ def test_merge_accessible(self) -> None:
from ruamel.yaml.comments import CommentedMap, merge_attrib
data = load("""
@@ -196,11 +196,11 @@ class TestAnchorsAliases:
assert isinstance(d, CommentedMap)
assert hasattr(d, merge_attrib)
- def test_merge_01(self):
+ def test_merge_01(self) -> None:
data = load(self.merge_yaml)
compare(data, self.merge_yaml)
- def test_merge_nested(self):
+ def test_merge_nested(self) -> None:
yaml = """
a:
<<: &content
@@ -212,7 +212,7 @@ class TestAnchorsAliases:
"""
data = round_trip(yaml) # NOQA
- def test_merge_nested_with_sequence(self):
+ def test_merge_nested_with_sequence(self) -> None:
yaml = """
a:
<<: &content
@@ -225,7 +225,7 @@ class TestAnchorsAliases:
"""
data = round_trip(yaml) # NOQA
- def test_add_anchor(self):
+ def test_add_anchor(self) -> None:
from ruamel.yaml.comments import CommentedMap
data = CommentedMap()
@@ -246,7 +246,7 @@ class TestAnchorsAliases:
)
# this is an error in PyYAML
- def test_reused_anchor(self):
+ def test_reused_anchor(self) -> None:
from ruamel.yaml.error import ReusedAnchorWarning
yaml = """
@@ -260,7 +260,7 @@ class TestAnchorsAliases:
with pytest.warns(ReusedAnchorWarning):
data = round_trip(yaml) # NOQA
- def test_issue_130(self):
+ def test_issue_130(self) -> None:
# issue 130 reported by Devid Fee
import ruamel.yaml
@@ -285,7 +285,7 @@ class TestAnchorsAliases:
data = yaml.load(ys)
assert data['services']['shell']['components']['server']['port'] == 8000
- def test_issue_130a(self):
+ def test_issue_130a(self) -> None:
# issue 130 reported by Devid Fee
import ruamel.yaml
@@ -331,8 +331,8 @@ class TestMergeKeysValues:
# in the following d always has "expanded" the merges
- def test_merge_for(self):
- from ruamel.yaml import YAML
+ def test_merge_for(self) -> None:
+ from ruamel.yaml import YAML # type: ignore
d = YAML(typ='safe', pure=True).load(self.yaml_str)
data = round_trip_load(self.yaml_str)
@@ -342,8 +342,8 @@ class TestMergeKeysValues:
print(count, x)
assert count == len(d[2])
- def test_merge_keys(self):
- from ruamel.yaml import YAML
+ def test_merge_keys(self) -> None:
+ from ruamel.yaml import YAML # type: ignore
d = YAML(typ='safe', pure=True).load(self.yaml_str)
data = round_trip_load(self.yaml_str)
@@ -353,8 +353,8 @@ class TestMergeKeysValues:
print(count, x)
assert count == len(d[2])
- def test_merge_values(self):
- from ruamel.yaml import YAML
+ def test_merge_values(self) -> None:
+ from ruamel.yaml import YAML # type: ignore
d = YAML(typ='safe', pure=True).load(self.yaml_str)
data = round_trip_load(self.yaml_str)
@@ -364,8 +364,8 @@ class TestMergeKeysValues:
print(count, x)
assert count == len(d[2])
- def test_merge_items(self):
- from ruamel.yaml import YAML
+ def test_merge_items(self) -> None:
+ from ruamel.yaml import YAML # type: ignore
d = YAML(typ='safe', pure=True).load(self.yaml_str)
data = round_trip_load(self.yaml_str)
@@ -375,8 +375,8 @@ class TestMergeKeysValues:
print(count, x)
assert count == len(d[2])
- def test_len_items_delete(self):
- from ruamel.yaml import YAML
+ def test_len_items_delete(self) -> None:
+ from ruamel.yaml import YAML # type: ignore
d = YAML(typ='safe', pure=True).load(self.yaml_str)
data = round_trip_load(self.yaml_str)
@@ -395,8 +395,8 @@ class TestMergeKeysValues:
ref -= 1
assert len(x) == ref
- def test_issue_196_cast_of_dict(self, capsys):
- from ruamel.yaml import YAML
+ def test_issue_196_cast_of_dict(self, capsys: Any) -> None:
+ from ruamel.yaml import YAML # type: ignore
yaml = YAML()
mapping = yaml.load("""\
@@ -433,15 +433,15 @@ class TestMergeKeysValues:
assert 'a' in dict(mapping)
assert 'a' in dict(mapping.items())
- def test_values_of_merged(self):
- from ruamel.yaml import YAML
+ def test_values_of_merged(self) -> None:
+ from ruamel.yaml import YAML # type: ignore
yaml = YAML()
data = yaml.load(dedent(self.yaml_str))
assert list(data[2].values()) == [1, 6, 'x2', 'x3', 'y4']
- def test_issue_213_copy_of_merge(self):
- from ruamel.yaml import YAML
+ def test_issue_213_copy_of_merge(self) -> None:
+ from ruamel.yaml import YAML # type: ignore
yaml = YAML()
d = yaml.load("""\
@@ -461,9 +461,9 @@ class TestMergeKeysValues:
class TestDuplicateKeyThroughAnchor:
- def test_duplicate_key_00(self):
+ def test_duplicate_key_00(self) -> None:
from ruamel.yaml import version_info
- from ruamel.yaml import YAML
+ from ruamel.yaml import YAML # type: ignore
from ruamel.yaml.constructor import DuplicateKeyFutureWarning, DuplicateKeyError
s = dedent("""\
@@ -486,7 +486,7 @@ class TestDuplicateKeyThroughAnchor:
with pytest.raises(DuplicateKeyError):
YAML(typ='rt').load(s)
- def test_duplicate_key_01(self):
+ def test_duplicate_key_01(self) -> None:
# so issue https://stackoverflow.com/a/52852106/1307905
from ruamel.yaml import version_info
from ruamel.yaml.constructor import DuplicateKeyError
@@ -511,7 +511,7 @@ class TestDuplicateKeyThroughAnchor:
class TestFullCharSetAnchors:
- def test_master_of_orion(self):
+ def test_master_of_orion(self) -> None:
# https://bitbucket.org/ruamel/yaml/issues/72/not-allowed-in-anchor-names
# submitted by Shalon Wood
yaml_str = """
@@ -522,7 +522,7 @@ class TestFullCharSetAnchors:
"""
data = load(yaml_str) # NOQA
- def test_roundtrip_00(self):
+ def test_roundtrip_00(self) -> None:
yaml_str = """
- &dotted.words.here
a: 1
@@ -531,7 +531,7 @@ class TestFullCharSetAnchors:
"""
data = round_trip(yaml_str) # NOQA
- def test_roundtrip_01(self):
+ def test_roundtrip_01(self) -> None:
yaml_str = """
- &dotted.words.here[a, b]
- *dotted.words.here
diff --git a/_test/test_api_change.py b/_test/test_api_change.py
index 22250b8..8961273 100644
--- a/_test/test_api_change.py
+++ b/_test/test_api_change.py
@@ -6,12 +6,14 @@ testing of anchors and the aliases referring to them
import sys
import textwrap
-import pytest
+import pytest # type: ignore
from pathlib import Path
+from typing import Any
+
class TestNewAPI:
- def test_duplicate_keys_00(self):
+ def test_duplicate_keys_00(self) -> None:
from ruamel.yaml import YAML
from ruamel.yaml.constructor import DuplicateKeyError
@@ -19,7 +21,7 @@ class TestNewAPI:
with pytest.raises(DuplicateKeyError):
yaml.load('{a: 1, a: 2}')
- def test_duplicate_keys_01(self):
+ def test_duplicate_keys_01(self) -> None:
from ruamel.yaml import YAML
from ruamel.yaml.constructor import DuplicateKeyError
@@ -27,7 +29,7 @@ class TestNewAPI:
with pytest.raises(DuplicateKeyError):
yaml.load('{a: 1, a: 2}')
- def test_duplicate_keys_02(self):
+ def test_duplicate_keys_02(self) -> None:
from ruamel.yaml import YAML
from ruamel.yaml.constructor import DuplicateKeyError
@@ -35,7 +37,7 @@ class TestNewAPI:
with pytest.raises(DuplicateKeyError):
yaml.load('{a: 1, a: 2}')
- def test_issue_135(self):
+ def test_issue_135(self) -> None:
# reported by Andrzej Ostrowski
from ruamel.yaml import YAML
@@ -44,7 +46,7 @@ class TestNewAPI:
# originally on 2.7: with pytest.raises(TypeError):
yaml.dump(data, sys.stdout)
- def test_issue_135_temporary_workaround(self):
+ def test_issue_135_temporary_workaround(self) -> None:
# never raised error
from ruamel.yaml import YAML
@@ -54,7 +56,7 @@ class TestNewAPI:
class TestWrite:
- def test_dump_path(self, tmpdir):
+ def test_dump_path(self, tmpdir: Any) -> None:
from ruamel.yaml import YAML
fn = Path(str(tmpdir)) / 'test.yaml'
@@ -65,7 +67,7 @@ class TestWrite:
yaml.dump(data, fn)
assert fn.read_text() == 'a: 1\nb: 2\n'
- def test_dump_file(self, tmpdir):
+ def test_dump_file(self, tmpdir: Any) -> None:
from ruamel.yaml import YAML
fn = Path(str(tmpdir)) / 'test.yaml'
@@ -77,7 +79,7 @@ class TestWrite:
yaml.dump(data, fp)
assert fn.read_text() == 'a: 1\nb: 2\n'
- def test_dump_missing_stream(self):
+ def test_dump_missing_stream(self) -> None:
from ruamel.yaml import YAML
yaml = YAML()
@@ -87,7 +89,7 @@ class TestWrite:
with pytest.raises(TypeError):
yaml.dump(data)
- def test_dump_too_many_args(self, tmpdir):
+ def test_dump_too_many_args(self, tmpdir: Any) -> None:
from ruamel.yaml import YAML
fn = Path(str(tmpdir)) / 'test.yaml'
@@ -96,12 +98,12 @@ class TestWrite:
data['a'] = 1
data['b'] = 2
with pytest.raises(TypeError):
- yaml.dump(data, fn, True)
+ yaml.dump(data, fn, True) # type: ignore
- def test_transform(self, tmpdir):
+ def test_transform(self, tmpdir: Any) -> None:
from ruamel.yaml import YAML
- def tr(s):
+ def tr(s: str) -> str:
return s.replace(' ', ' ')
fn = Path(str(tmpdir)) / 'test.yaml'
@@ -112,7 +114,7 @@ class TestWrite:
yaml.dump(data, fn, transform=tr)
assert fn.read_text() == 'a: 1\nb: 2\n'
- def test_print(self, capsys):
+ def test_print(self, capsys: Any) -> None:
from ruamel.yaml import YAML
yaml = YAML()
@@ -125,7 +127,7 @@ class TestWrite:
class TestRead:
- def test_multi_load(self):
+ def test_multi_load(self) -> None:
# make sure reader, scanner, parser get reset
from ruamel.yaml import YAML
@@ -133,7 +135,7 @@ class TestRead:
yaml.load('a: 1')
yaml.load('a: 1') # did not work in 0.15.4
- def test_parse(self):
+ def test_parse(self) -> None:
# ensure `parse` method is functional and can parse "unsafe" yaml
from ruamel.yaml import YAML
from ruamel.yaml.constructor import ConstructorError
@@ -150,7 +152,7 @@ class TestRead:
class TestLoadAll:
- def test_multi_document_load(self, tmpdir):
+ def test_multi_document_load(self, tmpdir: Any) -> None:
"""this went wrong on 3.7 because of StopIteration, PR 37 and Issue 211"""
from ruamel.yaml import YAML
@@ -169,7 +171,7 @@ class TestLoadAll:
class TestDuplSet:
- def test_dupl_set_00(self):
+ def test_dupl_set_00(self) -> None:
# round-trip-loader should except
from ruamel.yaml import YAML
from ruamel.yaml.constructor import DuplicateKeyError
@@ -190,7 +192,7 @@ class TestDuplSet:
class TestDumpLoadUnicode:
# test triggered by SamH on stackoverflow (https://stackoverflow.com/q/45281596/1307905)
# and answer by randomir (https://stackoverflow.com/a/45281922/1307905)
- def test_write_unicode(self, tmpdir):
+ def test_write_unicode(self, tmpdir: Any) -> None:
from ruamel.yaml import YAML
yaml = YAML()
@@ -199,7 +201,7 @@ class TestDumpLoadUnicode:
yaml.dump(text_dict, open(file_name, 'w'))
assert open(file_name, 'rb').read().decode('utf-8') == 'text: HELLO_WORLD©\n'
- def test_read_unicode(self, tmpdir):
+ def test_read_unicode(self, tmpdir: Any) -> None:
from ruamel.yaml import YAML
yaml = YAML()
@@ -211,7 +213,7 @@ class TestDumpLoadUnicode:
class TestFlowStyle:
- def test_flow_style(self, capsys):
+ def test_flow_style(self, capsys: Any) -> None:
# https://stackoverflow.com/questions/45791712/
from ruamel.yaml import YAML
@@ -226,8 +228,8 @@ class TestFlowStyle:
class TestOldAPI:
- @pytest.mark.skipif(sys.version_info >= (3, 0), reason='ok on Py3')
- def test_duplicate_keys_02(self):
+ @pytest.mark.skipif(sys.version_info >= (3, 0), reason='ok on Py3') # type: ignore
+ def test_duplicate_keys_02(self) -> None:
# Issue 165 unicode keys in error/warning
from ruamel.yaml import safe_load
from ruamel.yaml.constructor import DuplicateKeyError
diff --git a/_test/test_class_register.py b/_test/test_class_register.py
index d996269..fdd0275 100644
--- a/_test/test_class_register.py
+++ b/_test/test_class_register.py
@@ -4,33 +4,36 @@
testing of YAML.register_class and @yaml_object
"""
+from typing import Any
+from ruamel.yaml.comments import TaggedScalar, CommentedMap # NOQA
+
from roundtrip import YAML
-class User0(object):
- def __init__(self, name, age):
+class User0:
+ def __init__(self, name: str, age: int) -> None:
self.name = name
self.age = age
-class User1(object):
+class User1:
yaml_tag = '!user'
- def __init__(self, name, age):
+ def __init__(self, name: str, age: int) -> None:
self.name = name
self.age = age
@classmethod
- def to_yaml(cls, representer, node):
+ def to_yaml(cls, representer: Any, node: Any) -> Any:
return representer.represent_scalar(cls.yaml_tag, '{.name}-{.age}'.format(node, node))
@classmethod
- def from_yaml(cls, constructor, node):
+ def from_yaml(cls, constructor: Any, node: Any) -> Any:
return cls(*node.value.split('-'))
-class TestRegisterClass(object):
- def test_register_0_rt(self):
+class TestRegisterClass:
+ def test_register_0_rt(self) -> None:
yaml = YAML()
yaml.register_class(User0)
ys = """
@@ -41,7 +44,7 @@ class TestRegisterClass(object):
d = yaml.load(ys)
yaml.dump(d, compare=ys, unordered_lines=True)
- def test_register_0_safe(self):
+ def test_register_0_safe(self) -> None:
# default_flow_style = None
yaml = YAML(typ='safe')
yaml.register_class(User0)
@@ -51,7 +54,7 @@ class TestRegisterClass(object):
d = yaml.load(ys)
yaml.dump(d, compare=ys)
- def test_register_0_unsafe(self):
+ def test_register_0_unsafe(self) -> None:
# default_flow_style = None
yaml = YAML(typ='unsafe')
yaml.register_class(User0)
@@ -61,7 +64,7 @@ class TestRegisterClass(object):
d = yaml.load(ys)
yaml.dump(d, compare=ys)
- def test_register_1_rt(self):
+ def test_register_1_rt(self) -> None:
yaml = YAML()
yaml.register_class(User1)
ys = """
@@ -70,7 +73,7 @@ class TestRegisterClass(object):
d = yaml.load(ys)
yaml.dump(d, compare=ys)
- def test_register_1_safe(self):
+ def test_register_1_safe(self) -> None:
yaml = YAML(typ='safe')
yaml.register_class(User1)
ys = """
@@ -79,7 +82,7 @@ class TestRegisterClass(object):
d = yaml.load(ys)
yaml.dump(d, compare=ys)
- def test_register_1_unsafe(self):
+ def test_register_1_unsafe(self) -> None:
yaml = YAML(typ='unsafe')
yaml.register_class(User1)
ys = """
@@ -89,15 +92,15 @@ class TestRegisterClass(object):
yaml.dump(d, compare=ys)
-class TestDecorator(object):
- def test_decorator_implicit(self):
+class TestDecorator:
+ def test_decorator_implicit(self) -> None:
from ruamel.yaml import yaml_object
yml = YAML()
@yaml_object(yml)
- class User2(object):
- def __init__(self, name, age):
+ class User2:
+ def __init__(self, name: str, age: int) -> None:
self.name = name
self.age = age
@@ -109,27 +112,27 @@ class TestDecorator(object):
d = yml.load(ys)
yml.dump(d, compare=ys, unordered_lines=True)
- def test_decorator_explicit(self):
+ def test_decorator_explicit(self) -> None:
from ruamel.yaml import yaml_object
yml = YAML()
@yaml_object(yml)
- class User3(object):
+ class User3:
yaml_tag = '!USER'
- def __init__(self, name, age):
+ def __init__(self, name: str, age: int) -> None:
self.name = name
self.age = age
@classmethod
- def to_yaml(cls, representer, node):
+ def to_yaml(cls, representer: Any, node: Any) -> Any:
return representer.represent_scalar(
cls.yaml_tag, '{.name}-{.age}'.format(node, node)
)
@classmethod
- def from_yaml(cls, constructor, node):
+ def from_yaml(cls, constructor: Any, node: Any) -> Any:
return cls(*node.value.split('-'))
ys = """
diff --git a/_test/test_collections.py b/_test/test_collections.py
index 40af9db..d6e88ef 100644
--- a/_test/test_collections.py
+++ b/_test/test_collections.py
@@ -7,14 +7,14 @@ This is now so integrated in Python that it can be mapped to !!omap
"""
-import pytest # NOQA
+import pytest # type: ignore # NOQA
from roundtrip import round_trip, dedent, round_trip_load, round_trip_dump # NOQA
class TestOrderedDict:
- def test_ordereddict(self):
+ def test_ordereddict(self) -> None:
from collections import OrderedDict
assert round_trip_dump(OrderedDict()) == '!!omap []\n'
diff --git a/_test/test_comment_manipulation.py b/_test/test_comment_manipulation.py
index 6d706f2..979b386 100644
--- a/_test/test_comment_manipulation.py
+++ b/_test/test_comment_manipulation.py
@@ -1,28 +1,29 @@
# coding: utf-8
-import pytest # NOQA
+import pytest # type: ignore # NOQA
from roundtrip import round_trip, dedent, round_trip_load, round_trip_dump # NOQA
+from typing import Any
-def load(s):
+def load(s: str) -> Any:
return round_trip_load(dedent(s))
-def compare(data, s, **kw):
+def compare(data: Any, s: str, **kw: Any) -> None:
assert round_trip_dump(data, **kw) == dedent(s)
-def compare_eol(data, s):
+def compare_eol(data: Any, s: str) -> None:
assert 'EOL' in s
ds = dedent(s).replace('EOL', '').replace('\n', '|\n')
- assert round_trip_dump(data).replace('\n', '|\n') == ds
+ assert round_trip_dump(data).replace('\n', '|\n') == ds # type: ignore
class TestCommentsManipulation:
# list
- def test_seq_set_comment_on_existing_explicit_column(self):
+ def test_seq_set_comment_on_existing_explicit_column(self) -> None:
data = load("""
- a # comment 1
- b
@@ -36,7 +37,7 @@ class TestCommentsManipulation:
"""
compare(data, exp)
- def test_seq_overwrite_comment_on_existing_explicit_column(self):
+ def test_seq_overwrite_comment_on_existing_explicit_column(self) -> None:
data = load("""
- a # comment 1
- b
@@ -50,7 +51,7 @@ class TestCommentsManipulation:
"""
compare(data, exp)
- def test_seq_first_comment_explicit_column(self):
+ def test_seq_first_comment_explicit_column(self) -> None:
data = load("""
- a
- b
@@ -64,7 +65,7 @@ class TestCommentsManipulation:
"""
compare(data, exp)
- def test_seq_set_comment_on_existing_column_prev(self):
+ def test_seq_set_comment_on_existing_column_prev(self) -> None:
data = load("""
- a # comment 1
- b
@@ -80,14 +81,14 @@ class TestCommentsManipulation:
"""
compare(data, exp)
- def test_seq_set_comment_on_existing_column_next(self):
+ def test_seq_set_comment_on_existing_column_next(self) -> None:
data = load("""
- a # comment 1
- b
- c
- d # comment 3
""")
- print(data._yaml_comment)
+ print(data.ca)
# print(type(data._yaml_comment._items[0][0].start_mark))
# ruamel.yaml.error.Mark
# print(type(data._yaml_comment._items[0][0].start_mark))
@@ -100,7 +101,7 @@ class TestCommentsManipulation:
"""
compare(data, exp)
- def test_seq_set_comment_on_existing_column_further_away(self):
+ def test_seq_set_comment_on_existing_column_further_away(self) -> None:
"""
no comment line before or after, take the latest before
the new position
@@ -113,7 +114,7 @@ class TestCommentsManipulation:
- e
- f # comment 3
""")
- print(data._yaml_comment)
+ print(data.ca)
# print(type(data._yaml_comment._items[0][0].start_mark))
# ruamel.yaml.error.Mark
# print(type(data._yaml_comment._items[0][0].start_mark))
@@ -128,7 +129,7 @@ class TestCommentsManipulation:
"""
compare(data, exp)
- def test_seq_set_comment_on_existing_explicit_column_with_hash(self):
+ def test_seq_set_comment_on_existing_explicit_column_with_hash(self) -> None:
data = load("""
- a # comment 1
- b
@@ -144,7 +145,7 @@ class TestCommentsManipulation:
# dict
- def test_dict_set_comment_on_existing_explicit_column(self):
+ def test_dict_set_comment_on_existing_explicit_column(self) -> None:
data = load("""
a: 1 # comment 1
b: 2
@@ -162,7 +163,7 @@ class TestCommentsManipulation:
"""
compare(data, exp)
- def test_dict_overwrite_comment_on_existing_explicit_column(self):
+ def test_dict_overwrite_comment_on_existing_explicit_column(self) -> None:
data = load("""
a: 1 # comment 1
b: 2
@@ -180,7 +181,7 @@ class TestCommentsManipulation:
"""
compare(data, exp)
- def test_map_set_comment_on_existing_column_prev(self):
+ def test_map_set_comment_on_existing_column_prev(self) -> None:
data = load("""
a: 1 # comment 1
b: 2
@@ -198,7 +199,7 @@ class TestCommentsManipulation:
"""
compare(data, exp)
- def test_map_set_comment_on_existing_column_next(self):
+ def test_map_set_comment_on_existing_column_next(self) -> None:
data = load("""
a: 1 # comment 1
b: 2
@@ -216,7 +217,7 @@ class TestCommentsManipulation:
"""
compare(data, exp)
- def test_map_set_comment_on_existing_column_further_away(self):
+ def test_map_set_comment_on_existing_column_further_away(self) -> None:
"""
no comment line before or after, take the latest before
the new position
@@ -239,7 +240,7 @@ class TestCommentsManipulation:
"""
compare(data, exp)
- def test_before_top_map_rt(self):
+ def test_before_top_map_rt(self) -> None:
data = load("""
a: 1
b: 2
@@ -253,7 +254,7 @@ class TestCommentsManipulation:
"""
compare(data, exp.format(comment='#'))
- def test_before_top_map_replace(self):
+ def test_before_top_map_replace(self) -> None:
data = load("""
# abc
# def
@@ -269,7 +270,7 @@ class TestCommentsManipulation:
"""
compare(data, exp.format(comment='#'))
- def test_before_top_map_from_scratch(self):
+ def test_before_top_map_from_scratch(self) -> None:
from ruamel.yaml.comments import CommentedMap
data = CommentedMap()
@@ -286,7 +287,7 @@ class TestCommentsManipulation:
"""
compare(data, exp.format(comment='#'))
- def test_before_top_seq_rt(self):
+ def test_before_top_seq_rt(self) -> None:
data = load("""
- a
- b
@@ -301,7 +302,7 @@ class TestCommentsManipulation:
"""
compare(data, exp)
- def test_before_top_seq_rt_replace(self):
+ def test_before_top_seq_rt_replace(self) -> None:
s = """
# this
# that
@@ -319,7 +320,7 @@ class TestCommentsManipulation:
"""
compare(data, exp.format(comment='#'))
- def test_before_top_seq_from_scratch(self):
+ def test_before_top_seq_from_scratch(self) -> None:
from ruamel.yaml.comments import CommentedSeq
data = CommentedSeq()
@@ -336,7 +337,7 @@ class TestCommentsManipulation:
compare(data, exp.format(comment='#'))
# nested variants
- def test_before_nested_map_rt(self):
+ def test_before_nested_map_rt(self) -> None:
data = load("""
a: 1
b:
@@ -354,7 +355,7 @@ class TestCommentsManipulation:
"""
compare(data, exp.format(comment='#'))
- def test_before_nested_map_rt_indent(self):
+ def test_before_nested_map_rt_indent(self) -> None:
data = load("""
a: 1
b:
@@ -373,7 +374,7 @@ class TestCommentsManipulation:
compare(data, exp.format(comment='#'))
print(data['b'].ca)
- def test_before_nested_map_from_scratch(self):
+ def test_before_nested_map_from_scratch(self) -> None:
from ruamel.yaml.comments import CommentedMap
data = CommentedMap()
@@ -393,7 +394,7 @@ class TestCommentsManipulation:
"""
compare(data, exp.format(comment='#'))
- def test_before_nested_seq_from_scratch(self):
+ def test_before_nested_seq_from_scratch(self) -> None:
from ruamel.yaml.comments import CommentedMap, CommentedSeq
data = CommentedMap()
@@ -413,7 +414,7 @@ class TestCommentsManipulation:
"""
compare(data, exp.format(comment='#'))
- def test_before_nested_seq_from_scratch_block_seq_indent(self):
+ def test_before_nested_seq_from_scratch_block_seq_indent(self) -> None:
from ruamel.yaml.comments import CommentedMap, CommentedSeq
data = CommentedMap()
@@ -433,7 +434,7 @@ class TestCommentsManipulation:
"""
compare(data, exp.format(comment='#'), indent=4, block_seq_indent=2)
- def test_map_set_comment_before_and_after_non_first_key_00(self):
+ def test_map_set_comment_before_and_after_non_first_key_00(self) -> None:
# http://stackoverflow.com/a/40705671/1307905
data = load("""
xyz:
@@ -462,7 +463,7 @@ class TestCommentsManipulation:
"""
compare(data, exp)
- def Xtest_map_set_comment_before_and_after_non_first_key_01(self):
+ def Xtest_map_set_comment_before_and_after_non_first_key_01(self) -> None:
data = load("""
xyz:
a: 1 # comment 1
@@ -494,7 +495,7 @@ class TestCommentsManipulation:
# EOL is no longer necessary
# fixed together with issue # 216
- def test_map_set_comment_before_and_after_non_first_key_01(self):
+ def test_map_set_comment_before_and_after_non_first_key_01(self) -> None:
data = load("""
xyz:
a: 1 # comment 1
@@ -523,7 +524,7 @@ class TestCommentsManipulation:
"""
compare(data, exp)
- def Xtest_map_set_comment_before_and_after_non_first_key_02(self):
+ def Xtest_map_set_comment_before_and_after_non_first_key_02(self) -> None:
data = load("""
xyz:
a: 1 # comment 1
@@ -555,7 +556,7 @@ class TestCommentsManipulation:
"""
compare_eol(data, exp)
- def test_map_set_comment_before_and_after_non_first_key_02(self):
+ def test_map_set_comment_before_and_after_non_first_key_02(self) -> None:
data = load("""
xyz:
a: 1 # comment 1
diff --git a/_test/test_comments.py b/_test/test_comments.py
index dbf035d..64b8cd2 100644
--- a/_test/test_comments.py
+++ b/_test/test_comments.py
@@ -10,14 +10,14 @@ roundtrip changes
"""
-import pytest
+import pytest # type: ignore # NOQA
import sys
from roundtrip import round_trip, dedent, round_trip_load, round_trip_dump
class TestComments:
- def test_no_end_of_file_eol(self):
+ def test_no_end_of_file_eol(self) -> None:
"""not excluding comments caused some problems if at the end of
the file without a newline. First error, then included \0 """
x = """\
@@ -26,7 +26,7 @@ class TestComments:
with pytest.raises(AssertionError):
round_trip(x, extra='a\n')
- def test_no_comments(self):
+ def test_no_comments(self) -> None:
round_trip("""
- europe: 10
- usa:
@@ -34,7 +34,7 @@ class TestComments:
- california: 9
""")
- def test_round_trip_ordering(self):
+ def test_round_trip_ordering(self) -> None:
round_trip("""
a: 1
b: 2
@@ -46,7 +46,7 @@ class TestComments:
f: 6
""")
- def test_complex(self):
+ def test_complex(self) -> None:
round_trip("""
- europe: 10 # top
- usa:
@@ -54,7 +54,7 @@ class TestComments:
- california: 9 # o
""")
- def test_dropped(self):
+ def test_dropped(self) -> None:
s = """\
# comment
scalar
@@ -62,7 +62,7 @@ class TestComments:
"""
round_trip(s, 'scalar\n...\n')
- def test_main_mapping_begin_end(self):
+ def test_main_mapping_begin_end(self) -> None:
round_trip("""
# C start a
# C start b
@@ -73,7 +73,7 @@ class TestComments:
# C end b
""")
- def test_reindent(self):
+ def test_reindent(self) -> None:
x = """\
a:
b: # comment 1
@@ -87,7 +87,7 @@ class TestComments:
c: 1 # comment 2
""")
- def test_main_mapping_begin_end_items_post(self):
+ def test_main_mapping_begin_end_items_post(self) -> None:
round_trip("""
# C start a
# C start b
@@ -98,7 +98,7 @@ class TestComments:
# C end b
""")
- def test_main_sequence_begin_end(self):
+ def test_main_sequence_begin_end(self) -> None:
round_trip("""
# C start a
# C start b
@@ -109,7 +109,7 @@ class TestComments:
# C end b
""")
- def test_main_sequence_begin_end_items_post(self):
+ def test_main_sequence_begin_end_items_post(self) -> None:
round_trip("""
# C start a
# C start b
@@ -120,7 +120,7 @@ class TestComments:
# C end b
""")
- def test_main_mapping_begin_end_complex(self):
+ def test_main_mapping_begin_end_complex(self) -> None:
round_trip("""
# C start a
# C start b
@@ -133,7 +133,7 @@ class TestComments:
# C end b
""")
- def test_09(self): # 2.9 from the examples in the spec
+ def test_09(self) -> None: # 2.9 from the examples in the spec
s = """\
hr: # 1998 hr ranking
- Mark McGwire
@@ -145,7 +145,7 @@ class TestComments:
"""
round_trip(s, indent=4, block_seq_indent=2)
- def test_09a(self):
+ def test_09a(self) -> None:
round_trip("""
hr: # 1998 hr ranking
- Mark McGwire
@@ -156,7 +156,7 @@ class TestComments:
- Ken Griffey
""")
- def test_simple_map_middle_comment(self):
+ def test_simple_map_middle_comment(self) -> None:
round_trip("""
abc: 1
# C 3a
@@ -164,7 +164,7 @@ class TestComments:
ghi: 2
""")
- def test_map_in_map_0(self):
+ def test_map_in_map_0(self) -> None:
round_trip("""
map1: # comment 1
# comment 2
@@ -172,7 +172,7 @@ class TestComments:
key1: val1
""")
- def test_map_in_map_1(self):
+ def test_map_in_map_1(self) -> None:
# comment is moved from value to key
round_trip("""
map1:
@@ -181,7 +181,7 @@ class TestComments:
key1: val1
""")
- def test_application_arguments(self):
+ def test_application_arguments(self) -> None:
# application configur
round_trip("""
args:
@@ -194,7 +194,7 @@ class TestComments:
wait: 10
""")
- def test_substitute(self):
+ def test_substitute(self) -> None:
x = """
args:
username: anthon # name
@@ -211,7 +211,7 @@ class TestComments:
x = x.replace(': secret ', ': deleted password')
assert round_trip_dump(data) == dedent(x)
- def test_set_comment(self):
+ def test_set_comment(self) -> None:
round_trip("""
!!set
# the beginning
@@ -222,7 +222,7 @@ class TestComments:
# this is the end
""")
- def test_omap_comment_roundtrip(self):
+ def test_omap_comment_roundtrip(self) -> None:
round_trip("""
!!omap
- a: 1
@@ -231,7 +231,7 @@ class TestComments:
- d: 4
""")
- def test_omap_comment_roundtrip_pre_comment(self):
+ def test_omap_comment_roundtrip_pre_comment(self) -> None:
round_trip("""
!!omap
- a: 1
@@ -241,7 +241,7 @@ class TestComments:
- d: 4
""")
- def test_non_ascii(self):
+ def test_non_ascii(self) -> None:
round_trip("""
verbosity: 1 # 0 is minimal output, -1 none
base_url: http://gopher.net
@@ -263,7 +263,7 @@ class TestComments:
Italy: Rome
""")
- def test_dump_utf8(self):
+ def test_dump_utf8(self) -> None:
import ruamel.yaml # NOQA
x = dedent("""\
@@ -278,7 +278,7 @@ class TestComments:
)
assert y == x
- def test_dump_unicode_utf8(self):
+ def test_dump_unicode_utf8(self) -> None:
import ruamel.yaml # NOQA
x = dedent("""\
@@ -293,7 +293,7 @@ class TestComments:
)
assert y == x
- def test_mlget_00(self):
+ def test_mlget_00(self) -> None:
x = """\
a:
- b:
@@ -314,7 +314,7 @@ class TestInsertPopList:
need to move the values to subsequent keys on insert"""
@property
- def ins(self):
+ def ins(self) -> str:
return """\
ab:
- a # a
@@ -327,7 +327,7 @@ class TestInsertPopList:
- 2
"""
- def test_insert_0(self):
+ def test_insert_0(self) -> None:
d = round_trip_load(self.ins)
d['ab'].insert(0, 'xyz')
y = round_trip_dump(d, indent=2)
@@ -344,7 +344,7 @@ class TestInsertPopList:
- 2
""")
- def test_insert_1(self):
+ def test_insert_1(self) -> None:
d = round_trip_load(self.ins)
d['ab'].insert(4, 'xyz')
y = round_trip_dump(d, indent=2)
@@ -361,7 +361,7 @@ class TestInsertPopList:
- 2
""")
- def test_insert_2(self):
+ def test_insert_2(self) -> None:
d = round_trip_load(self.ins)
d['ab'].insert(1, 'xyz')
y = round_trip_dump(d, indent=2)
@@ -378,7 +378,7 @@ class TestInsertPopList:
- 2
""")
- def test_pop_0(self):
+ def test_pop_0(self) -> None:
d = round_trip_load(self.ins)
d['ab'].pop(0)
y = round_trip_dump(d, indent=2)
@@ -394,7 +394,7 @@ class TestInsertPopList:
- 2
""")
- def test_pop_1(self):
+ def test_pop_1(self) -> None:
d = round_trip_load(self.ins)
d['ab'].pop(1)
y = round_trip_dump(d, indent=2)
@@ -410,7 +410,7 @@ class TestInsertPopList:
- 2
""")
- def test_pop_2(self):
+ def test_pop_2(self) -> None:
d = round_trip_load(self.ins)
d['ab'].pop(2)
y = round_trip_dump(d, indent=2)
@@ -426,7 +426,7 @@ class TestInsertPopList:
- 2
""")
- def test_pop_3(self):
+ def test_pop_3(self) -> None:
d = round_trip_load(self.ins)
d['ab'].pop(3)
y = round_trip_dump(d, indent=2)
@@ -446,14 +446,14 @@ class TestInsertPopList:
# http://stackoverflow.com/a/36970608/1307905
class TestInsertInMapping:
@property
- def ins(self):
+ def ins(self) -> str:
return """\
first_name: Art
occupation: Architect # This is an occupation comment
about: Art Vandelay is a fictional character that George invents...
"""
- def test_insert_at_pos_1(self):
+ def test_insert_at_pos_1(self) -> None:
d = round_trip_load(self.ins)
d.insert(1, 'last name', 'Vandelay', comment='new key')
y = round_trip_dump(d)
@@ -465,7 +465,7 @@ class TestInsertInMapping:
about: Art Vandelay is a fictional character that George invents...
""")
- def test_insert_at_pos_0(self):
+ def test_insert_at_pos_0(self) -> None:
d = round_trip_load(self.ins)
d.insert(0, 'last name', 'Vandelay', comment='new key')
y = round_trip_dump(d)
@@ -477,7 +477,7 @@ class TestInsertInMapping:
about: Art Vandelay is a fictional character that George invents...
""")
- def test_insert_at_pos_3(self):
+ def test_insert_at_pos_3(self) -> None:
# much more simple if done with appending.
d = round_trip_load(self.ins)
d.insert(3, 'last name', 'Vandelay', comment='new key')
@@ -492,7 +492,7 @@ class TestInsertInMapping:
class TestCommentedMapMerge:
- def test_in_operator(self):
+ def test_in_operator(self) -> None:
data = round_trip_load("""
x: &base
a: 1
@@ -508,7 +508,7 @@ class TestCommentedMapMerge:
assert data['y']['a'] == 1
assert 'a' in data['y']
- def test_issue_60(self):
+ def test_issue_60(self) -> None:
data = round_trip_load("""
x: &base
a: 1
@@ -519,7 +519,7 @@ class TestCommentedMapMerge:
assert data['y']['a'] == 1
assert str(data['y']) == """ordereddict([('a', 1)])"""
- def test_issue_60_1(self):
+ def test_issue_60_1(self) -> None:
data = round_trip_load("""
x: &base
a: 1
@@ -534,7 +534,7 @@ class TestCommentedMapMerge:
class TestEmptyLines:
# prompted by issue 46 from Alex Harvey
- def test_issue_46(self):
+ def test_issue_46(self) -> None:
yaml_str = dedent("""\
---
# Please add key/value pairs in alphabetical order
@@ -549,7 +549,7 @@ class TestEmptyLines:
y = round_trip_dump(d, explicit_start=True)
assert yaml_str == y
- def test_multispace_map(self):
+ def test_multispace_map(self) -> None:
round_trip("""
a: 1x
@@ -564,8 +564,8 @@ class TestEmptyLines:
""")
- @pytest.mark.xfail(strict=True)
- def test_multispace_map_initial(self):
+ @pytest.mark.xfail(strict=True) # type: ignore
+ def test_multispace_map_initial(self) -> None:
round_trip("""
a: 1x
@@ -581,7 +581,7 @@ class TestEmptyLines:
""")
- def test_embedded_map(self):
+ def test_embedded_map(self) -> None:
round_trip("""
- a: 1y
b: 2y
@@ -589,7 +589,7 @@ class TestEmptyLines:
c: 3y
""")
- def test_toplevel_seq(self):
+ def test_toplevel_seq(self) -> None:
round_trip("""\
- 1
@@ -598,7 +598,7 @@ class TestEmptyLines:
- 3
""")
- def test_embedded_seq(self):
+ def test_embedded_seq(self) -> None:
round_trip("""
a:
b:
@@ -610,7 +610,7 @@ class TestEmptyLines:
- 3
""")
- def test_line_with_only_spaces(self):
+ def test_line_with_only_spaces(self) -> None:
# issue 54
yaml_str = "---\n\na: 'x'\n \nb: y\n"
d = round_trip_load(yaml_str, preserve_quotes=True)
@@ -621,7 +621,7 @@ class TestEmptyLines:
print(line + '$')
assert stripped == y
- def test_some_eol_spaces(self):
+ def test_some_eol_spaces(self) -> None:
# spaces after tokens and on empty lines
yaml_str = '--- \n \na: "x" \n \nb: y \n'
d = round_trip_load(yaml_str, preserve_quotes=True)
@@ -632,7 +632,7 @@ class TestEmptyLines:
print(line + '$')
assert stripped == y
- def test_issue_54_not_ok(self):
+ def test_issue_54_not_ok(self) -> None:
yaml_str = dedent("""\
toplevel:
@@ -642,10 +642,11 @@ class TestEmptyLines:
d = round_trip_load(yaml_str)
print(d.ca)
y = round_trip_dump(d, indent=4)
+ assert isinstance(y, str)
print(y.replace('\n', '$\n'))
assert yaml_str == y
- def test_issue_54_ok(self):
+ def test_issue_54_ok(self) -> None:
yaml_str = dedent("""\
toplevel:
# some comment
@@ -655,7 +656,7 @@ class TestEmptyLines:
y = round_trip_dump(d, indent=4)
assert yaml_str == y
- def test_issue_93(self):
+ def test_issue_93(self) -> None:
round_trip("""\
a:
b:
@@ -664,7 +665,7 @@ class TestEmptyLines:
- c2: catfish # a2
""")
- def test_issue_93_00(self):
+ def test_issue_93_00(self) -> None:
round_trip("""\
a:
- - c1: cat # a1
@@ -672,14 +673,14 @@ class TestEmptyLines:
- c2: catfish # a2
""")
- def test_issue_93_01(self):
+ def test_issue_93_01(self) -> None:
round_trip("""\
- - c1: cat # a1
# my comment on catfish
- c2: catfish # a2
""")
- def test_issue_93_02(self):
+ def test_issue_93_02(self) -> None:
# never failed as there is no indent
round_trip("""\
- c1: cat
@@ -687,7 +688,7 @@ class TestEmptyLines:
- c2: catfish
""")
- def test_issue_96(self):
+ def test_issue_96(self) -> None:
# inserted extra line on trailing spaces
round_trip("""\
a:
@@ -701,8 +702,8 @@ class TestEmptyLines:
class TestUnicodeComments:
- @pytest.mark.skipif(sys.version_info < (2, 7), reason='wide unicode')
- def test_issue_55(self): # reported by Haraguroicha Hsu
+ @pytest.mark.skipif(sys.version_info < (2, 7), reason='wide unicode') # type: ignore
+ def test_issue_55(self) -> None: # reported by Haraguroicha Hsu
round_trip("""\
name: TEST
description: test using
@@ -722,7 +723,7 @@ class TestUnicodeComments:
class TestEmptyValueBeforeComments:
- def test_issue_25a(self):
+ def test_issue_25a(self) -> None:
round_trip("""\
- a: b
c: d
@@ -730,7 +731,7 @@ class TestEmptyValueBeforeComments:
- e: f
""")
- def test_issue_25a1(self):
+ def test_issue_25a1(self) -> None:
round_trip("""\
- a: b
c: d
@@ -738,13 +739,13 @@ class TestEmptyValueBeforeComments:
e: f
""")
- def test_issue_25b(self):
+ def test_issue_25b(self) -> None:
round_trip("""\
var1: #empty
var2: something #notempty
""")
- def test_issue_25c(self):
+ def test_issue_25c(self) -> None:
round_trip("""\
params:
a: 1 # comment a
@@ -752,7 +753,7 @@ class TestEmptyValueBeforeComments:
c: 3 # comment c
""")
- def test_issue_25c1(self):
+ def test_issue_25c1(self) -> None:
round_trip("""\
params:
a: 1 # comment a
@@ -761,14 +762,14 @@ class TestEmptyValueBeforeComments:
c: 3 # comment c
""")
- def test_issue_25_00(self):
+ def test_issue_25_00(self) -> None:
round_trip("""\
params:
a: 1 # comment a
b: # comment b
""")
- def test_issue_25_01(self):
+ def test_issue_25_01(self) -> None:
round_trip("""\
a: # comment 1
# comment 2
@@ -776,14 +777,14 @@ class TestEmptyValueBeforeComments:
c: 1 # comment 4
""")
- def test_issue_25_02(self):
+ def test_issue_25_02(self) -> None:
round_trip("""\
a: # comment 1
# comment 2
- b: 2 # comment 3
""")
- def test_issue_25_03(self):
+ def test_issue_25_03(self) -> None:
s = """\
a: # comment 1
# comment 2
@@ -791,14 +792,14 @@ class TestEmptyValueBeforeComments:
"""
round_trip(s, indent=4, block_seq_indent=2)
- def test_issue_25_04(self):
+ def test_issue_25_04(self) -> None:
round_trip("""\
a: # comment 1
# comment 2
b: 1 # comment 3
""")
- def test_flow_seq_within_seq(self):
+ def test_flow_seq_within_seq(self) -> None:
round_trip("""\
# comment 1
- a
@@ -813,7 +814,7 @@ class TestEmptyValueBeforeComments:
- []
""")
- def test_comment_after_block_scalar_indicator(self):
+ def test_comment_after_block_scalar_indicator(self) -> None:
round_trip("""\
a: | # abc
test 1
@@ -834,7 +835,7 @@ a: |
class TestBlockScalarWithComments:
# issue 99 reported by Colm O'Connor
- def test_scalar_with_comments(self):
+ def test_scalar_with_comments(self) -> None:
import ruamel.yaml # NOQA
for x in [
diff --git a/_test/test_contextmanager.py b/_test/test_contextmanager.py
index bdc8b78..e6256d3 100644
--- a/_test/test_contextmanager.py
+++ b/_test/test_contextmanager.py
@@ -5,8 +5,9 @@ testing of anchors and the aliases referring to them
"""
import sys
-import pytest
+import pytest # type: ignore
+from typing import Any
single_doc = """\
- a: 1
@@ -31,33 +32,33 @@ multi_doc = """\
multi_doc_data = [['abc', 'xyz'], single_data]
-def get_yaml():
+def get_yaml() -> Any:
from ruamel.yaml import YAML
return YAML()
class TestOldStyle:
- def test_single_load(self):
+ def test_single_load(self) -> None:
d = get_yaml().load(single_doc)
print(d)
print(type(d[0]))
assert d == single_data
- def test_single_load_no_arg(self):
+ def test_single_load_no_arg(self) -> None:
with pytest.raises(TypeError):
assert get_yaml().load() == single_data
- def test_multi_load(self):
+ def test_multi_load(self) -> None:
data = list(get_yaml().load_all(multi_doc))
assert data == multi_doc_data
- def test_single_dump(self, capsys):
+ def test_single_dump(self, capsys: Any) -> None:
get_yaml().dump(single_data, sys.stdout)
out, err = capsys.readouterr()
assert out == single_doc
- def test_multi_dump(self, capsys):
+ def test_multi_dump(self, capsys: Any) -> None:
yaml = get_yaml()
yaml.explicit_start = True
yaml.dump_all(multi_doc_data, sys.stdout)
@@ -66,7 +67,7 @@ class TestOldStyle:
class TestContextManager:
- def test_single_dump(self, capsys):
+ def test_single_dump(self, capsys: Any) -> None:
from ruamel.yaml import YAML
with YAML(output=sys.stdout) as yaml:
@@ -75,7 +76,7 @@ class TestContextManager:
print(err)
assert out == single_doc
- def test_multi_dump(self, capsys):
+ def test_multi_dump(self, capsys: Any) -> None:
from ruamel.yaml import YAML
with YAML(output=sys.stdout) as yaml:
@@ -103,7 +104,7 @@ class TestContextManager:
# for idx, data in enumerate(yaml.load()):
# assert data == multi_doc_data[0]
- def test_roundtrip(self, capsys):
+ def test_roundtrip(self, capsys: Any) -> None:
from ruamel.yaml import YAML
with YAML(output=sys.stdout) as yaml:
diff --git a/_test/test_copy.py b/_test/test_copy.py
index 4931d2a..cf402a4 100644
--- a/_test/test_copy.py
+++ b/_test/test_copy.py
@@ -6,13 +6,13 @@ Testing copy and deepcopy, instigated by Issue 84 (Peter Amstutz)
import copy
-import pytest # NOQA
+import pytest # type: ignore # NOQA
from roundtrip import dedent, round_trip_load, round_trip_dump
class TestDeepCopy:
- def test_preserve_flow_style_simple(self):
+ def test_preserve_flow_style_simple(self) -> None:
x = dedent("""\
{foo: bar, baz: quux}
""")
@@ -24,7 +24,7 @@ class TestDeepCopy:
assert y == x
assert data.fa.flow_style() == data_copy.fa.flow_style()
- def test_deepcopy_flow_style_nested_dict(self):
+ def test_deepcopy_flow_style_nested_dict(self) -> None:
x = dedent("""\
a: {foo: bar, baz: quux}
""")
@@ -46,7 +46,7 @@ class TestDeepCopy:
baz: quux
""")
- def test_deepcopy_flow_style_nested_list(self):
+ def test_deepcopy_flow_style_nested_list(self) -> None:
x = dedent("""\
a: [1, 2, 3]
""")
@@ -71,7 +71,7 @@ class TestDeepCopy:
class TestCopy:
- def test_copy_flow_style_nested_dict(self):
+ def test_copy_flow_style_nested_dict(self) -> None:
x = dedent("""\
a: {foo: bar, baz: quux}
""")
@@ -93,7 +93,7 @@ class TestCopy:
baz: quux
""")
- def test_copy_flow_style_nested_list(self):
+ def test_copy_flow_style_nested_list(self) -> None:
x = dedent("""\
a: [1, 2, 3]
""")
diff --git a/_test/test_cyaml.py b/_test/test_cyaml.py
index 593d171..056093b 100644
--- a/_test/test_cyaml.py
+++ b/_test/test_cyaml.py
@@ -2,17 +2,17 @@
import sys
import platform
-import pytest
+import pytest # type: ignore # NOQA
from textwrap import dedent
NO_CLIB_VER = (3, 10)
-@pytest.mark.skipif(
+@pytest.mark.skipif( # type: ignore
platform.python_implementation() in ['Jython', 'PyPy'],
reason='Jython throws RepresenterError'
)
-def test_load_cyaml():
+def test_load_cyaml() -> None:
print("???????????????????????", platform.python_implementation())
import ruamel.yaml
@@ -24,10 +24,10 @@ def test_load_cyaml():
yaml.load('abc: 1')
-@pytest.mark.skipif(sys.version_info >= NO_CLIB_VER
+@pytest.mark.skipif(sys.version_info >= NO_CLIB_VER # type: ignore
or platform.python_implementation() in ['Jython', 'PyPy'],
reason='no _PyGC_FINALIZED')
-def test_dump_cyaml():
+def test_dump_cyaml() -> None:
import ruamel.yaml
if sys.version_info >= NO_CLIB_VER:
@@ -41,10 +41,10 @@ def test_dump_cyaml():
assert buf.getvalue() == 'a: 1\nb: 2\n'
-@pytest.mark.skipif(
+@pytest.mark.skipif( # type: ignore
platform.python_implementation() in ['Jython', 'PyPy'], reason='not avialable'
)
-def test_load_cyaml_1_2():
+def test_load_cyaml_1_2() -> None:
# issue 155
import ruamel.yaml
@@ -60,10 +60,10 @@ def test_load_cyaml_1_2():
yaml.load(inp)
-@pytest.mark.skipif(
+@pytest.mark.skipif( # type: ignore
platform.python_implementation() in ['Jython', 'PyPy'], reason='not available'
)
-def test_dump_cyaml_1_2():
+def test_dump_cyaml_1_2() -> None:
# issue 155
import ruamel.yaml
from ruamel.yaml.compat import StringIO
diff --git a/_test/test_datetime.py b/_test/test_datetime.py
index 7321816..bc86e74 100644
--- a/_test/test_datetime.py
+++ b/_test/test_datetime.py
@@ -20,13 +20,13 @@ Please note that a fraction can only be included if not equal to 0
"""
import copy
-import pytest # NOQA
+import pytest # type: ignore # NOQA
from roundtrip import round_trip, dedent, round_trip_load, round_trip_dump # NOQA
class TestDateTime:
- def test_date_only(self):
+ def test_date_only(self) -> None:
inp = """
- 2011-10-02
"""
@@ -35,7 +35,7 @@ class TestDateTime:
"""
round_trip(inp, exp)
- def test_zero_fraction(self):
+ def test_zero_fraction(self) -> None:
inp = """
- 2011-10-02 16:45:00.0
"""
@@ -44,7 +44,7 @@ class TestDateTime:
"""
round_trip(inp, exp)
- def test_long_fraction(self):
+ def test_long_fraction(self) -> None:
inp = """
- 2011-10-02 16:45:00.1234 # expand with zeros
- 2011-10-02 16:45:00.123456
@@ -61,7 +61,7 @@ class TestDateTime:
"""
round_trip(inp, exp)
- def test_canonical(self):
+ def test_canonical(self) -> None:
inp = """
- 2011-10-02T16:45:00.1Z
"""
@@ -70,7 +70,7 @@ class TestDateTime:
"""
round_trip(inp, exp)
- def test_spaced_timezone(self):
+ def test_spaced_timezone(self) -> None:
inp = """
- 2011-10-02T11:45:00 -5
"""
@@ -79,7 +79,7 @@ class TestDateTime:
"""
round_trip(inp, exp)
- def test_normal_timezone(self):
+ def test_normal_timezone(self) -> None:
round_trip("""
- 2011-10-02T11:45:00-5
- 2011-10-02 11:45:00-5
@@ -87,7 +87,7 @@ class TestDateTime:
- 2011-10-02 11:45:00-05:00
""")
- def test_no_timezone(self):
+ def test_no_timezone(self) -> None:
inp = """
- 2011-10-02 6:45:00
"""
@@ -96,7 +96,7 @@ class TestDateTime:
"""
round_trip(inp, exp)
- def test_explicit_T(self):
+ def test_explicit_T(self) -> None:
inp = """
- 2011-10-02T16:45:00
"""
@@ -105,7 +105,7 @@ class TestDateTime:
"""
round_trip(inp, exp)
- def test_explicit_t(self): # to upper
+ def test_explicit_t(self) -> None: # to upper
inp = """
- 2011-10-02t16:45:00
"""
@@ -114,7 +114,7 @@ class TestDateTime:
"""
round_trip(inp, exp)
- def test_no_T_multi_space(self):
+ def test_no_T_multi_space(self) -> None:
inp = """
- 2011-10-02 16:45:00
"""
@@ -123,22 +123,22 @@ class TestDateTime:
"""
round_trip(inp, exp)
- def test_iso(self):
+ def test_iso(self) -> None:
round_trip("""
- 2011-10-02T15:45:00+01:00
""")
- def test_zero_tz(self):
+ def test_zero_tz(self) -> None:
round_trip("""
- 2011-10-02T15:45:00+0
""")
- def test_issue_45(self):
+ def test_issue_45(self) -> None:
round_trip("""
dt: 2016-08-19T22:45:47Z
""")
- def test_deepcopy_datestring(self):
+ def test_deepcopy_datestring(self) -> None:
# reported by Quuxplusone, http://stackoverflow.com/a/41577841/1307905
x = dedent("""\
foo: 2016-10-12T12:34:56
@@ -146,7 +146,7 @@ class TestDateTime:
data = copy.deepcopy(round_trip_load(x))
assert round_trip_dump(data) == x
- def test_fraction_overflow(self):
+ def test_fraction_overflow(self) -> None:
# reported (indirectly) by Luís Ferreira
# https://sourceforge.net/p/ruamel-yaml/tickets/414/
inp = dedent("""\
diff --git a/_test/test_deprecation.py b/_test/test_deprecation.py
index b267e66..390c26c 100644
--- a/_test/test_deprecation.py
+++ b/_test/test_deprecation.py
@@ -1,11 +1,11 @@
# coding: utf-8
import sys
-import pytest # NOQA
+import pytest # type:ignore # NOQA
-@pytest.mark.skipif(sys.version_info < (3, 7) or sys.version_info >= (3, 9),
+@pytest.mark.skipif(sys.version_info < (3, 7) or sys.version_info >= (3, 9), # type: ignore
reason='collections not available?')
-def test_collections_deprecation():
+def test_collections_deprecation() -> None:
with pytest.warns(DeprecationWarning):
- from collections import Hashable # NOQA
+ from collections import Hashable # type: ignore # NOQA
diff --git a/_test/test_documents.py b/_test/test_documents.py
index 5119f06..7c6e2e6 100644
--- a/_test/test_documents.py
+++ b/_test/test_documents.py
@@ -1,12 +1,12 @@
# coding: utf-8
-import pytest # NOQA
+import pytest # type: ignore # NOQA
from roundtrip import round_trip, round_trip_load_all, round_trip_dump_all
class TestDocument:
- def test_single_doc_begin_end(self):
+ def test_single_doc_begin_end(self) -> None:
inp = """\
---
- a
@@ -15,7 +15,7 @@ class TestDocument:
"""
round_trip(inp, explicit_start=True, explicit_end=True)
- def test_multi_doc_begin_end(self):
+ def test_multi_doc_begin_end(self) -> None:
inp = """\
---
- a
@@ -29,7 +29,7 @@ class TestDocument:
out = round_trip_dump_all(docs, explicit_start=True, explicit_end=True)
assert out == '---\n- a\n...\n---\n- b\n...\n'
- def test_multi_doc_no_start(self):
+ def test_multi_doc_no_start(self) -> None:
inp = """\
- a
...
@@ -40,7 +40,7 @@ class TestDocument:
docs = list(round_trip_load_all(inp))
assert docs == [['a'], ['b']]
- def test_multi_doc_no_end(self):
+ def test_multi_doc_no_end(self) -> None:
inp = """\
- a
---
@@ -49,7 +49,7 @@ class TestDocument:
docs = list(round_trip_load_all(inp))
assert docs == [['a'], ['b']]
- def test_multi_doc_ends_only(self):
+ def test_multi_doc_ends_only(self) -> None:
# this is ok in 1.2
inp = """\
- a
@@ -60,7 +60,7 @@ class TestDocument:
docs = list(round_trip_load_all(inp, version=(1, 2)))
assert docs == [['a'], ['b']]
- def test_multi_doc_ends_only_1_1(self):
+ def test_multi_doc_ends_only_1_1(self) -> None:
from ruamel import yaml
# this is not ok in 1.1
diff --git a/_test/test_fail.py b/_test/test_fail.py
index 2f90112..7fbbd07 100644
--- a/_test/test_fail.py
+++ b/_test/test_fail.py
@@ -6,14 +6,14 @@
# on fix of ruamel.yaml, move the marked test to the appropriate test (without mark)
# and remove remove the xyz_no_fail
-import pytest
+import pytest # type: ignore
from roundtrip import round_trip, dedent, round_trip_load, round_trip_dump
class TestCommentFailures:
- @pytest.mark.xfail(strict=True)
- def test_set_comment_before_tag(self):
+ @pytest.mark.xfail(strict=True) # type: ignore
+ def test_set_comment_before_tag(self) -> None:
# no comments before tags
round_trip("""
# the beginning
@@ -26,7 +26,7 @@ class TestCommentFailures:
# this is the end
""")
- def test_set_comment_before_tag_no_fail(self):
+ def test_set_comment_before_tag_no_fail(self) -> None:
# no comments before tags
inp = """
# the beginning
@@ -48,15 +48,15 @@ class TestCommentFailures:
# this is the end
""")
- @pytest.mark.xfail(strict=True)
- def test_comment_dash_line(self):
+ @pytest.mark.xfail(strict=True) # type: ignore
+ def test_comment_dash_line(self) -> None:
round_trip("""
- # abc
a: 1
b: 2
""")
- def test_comment_dash_line_fail(self):
+ def test_comment_dash_line_fail(self) -> None:
x = """
- # abc
a: 1
@@ -72,8 +72,8 @@ class TestCommentFailures:
class TestIndentFailures:
- @pytest.mark.xfail(strict=True)
- def test_indent_not_retained(self):
+ @pytest.mark.xfail(strict=True) # type: ignore
+ def test_indent_not_retained(self) -> None:
round_trip("""
verbosity: 1 # 0 is minimal output, -1 none
base_url: http://gopher.net
@@ -97,7 +97,7 @@ class TestIndentFailures:
- too cold
""")
- def test_indent_not_retained_no_fail(self):
+ def test_indent_not_retained_no_fail(self) -> None:
inp = """
verbosity: 1 # 0 is minimal output, -1 none
base_url: http://gopher.net
@@ -143,7 +143,7 @@ class TestIndentFailures:
- too cold
""")
- def Xtest_indent_top_level_no_fail(self):
+ def Xtest_indent_top_level_no_fail(self) -> None:
inp = """
- a:
- b
@@ -152,8 +152,8 @@ class TestIndentFailures:
class TestTagFailures:
- @pytest.mark.xfail(strict=True)
- def test_standard_short_tag(self):
+ @pytest.mark.xfail(strict=True) # type: ignore
+ def test_standard_short_tag(self) -> None:
round_trip("""\
!!map
name: Anthon
@@ -161,7 +161,7 @@ class TestTagFailures:
language: python
""")
- def test_standard_short_tag_no_fail(self):
+ def test_standard_short_tag_no_fail(self) -> None:
inp = """
!!map
name: Anthon
@@ -177,13 +177,13 @@ class TestTagFailures:
class TestFlowValues:
- def test_flow_value_with_colon(self):
+ def test_flow_value_with_colon(self) -> None:
inp = """\
{a: bcd:efg}
"""
round_trip(inp)
- def test_flow_value_with_colon_quoted(self):
+ def test_flow_value_with_colon_quoted(self) -> None:
inp = """\
{a: 'bcd:efg'}
"""
@@ -191,13 +191,13 @@ class TestFlowValues:
class TestMappingKey:
- def test_simple_mapping_key(self):
+ def test_simple_mapping_key(self) -> None:
inp = """\
{a: 1, b: 2}: hello world
"""
round_trip(inp, preserve_quotes=True, dump_data=False)
- def test_set_simple_mapping_key(self):
+ def test_set_simple_mapping_key(self) -> None:
from ruamel.yaml.comments import CommentedKeyMap
d = {CommentedKeyMap([('a', 1), ('b', 2)]): 'hello world'}
@@ -206,7 +206,7 @@ class TestMappingKey:
""")
assert round_trip_dump(d) == exp
- def test_change_key_simple_mapping_key(self):
+ def test_change_key_simple_mapping_key(self) -> None:
from ruamel.yaml.comments import CommentedKeyMap
inp = """\
@@ -219,7 +219,7 @@ class TestMappingKey:
""")
assert round_trip_dump(d) == exp
- def test_change_value_simple_mapping_key(self):
+ def test_change_value_simple_mapping_key(self) -> None:
from ruamel.yaml.comments import CommentedKeyMap
inp = """\
diff --git a/_test/test_float.py b/_test/test_float.py
index 8257208..582ccf0 100644
--- a/_test/test_float.py
+++ b/_test/test_float.py
@@ -1,6 +1,6 @@
# coding: utf-8
-import pytest # NOQA
+import pytest # type: ignore # NOQA
from roundtrip import round_trip, dedent, round_trip_load, round_trip_dump # NOQA
@@ -8,7 +8,7 @@ from roundtrip import round_trip, dedent, round_trip_load, round_trip_dump # NO
class TestFloat:
- def test_round_trip_non_exp(self):
+ def test_round_trip_non_exp(self) -> None:
data = round_trip("""\
- 1.0
- 1.00
@@ -22,6 +22,8 @@ class TestFloat:
- .5
- +.5
- -.5
+ - !!float '42'
+ - !!float '-42'
""")
print(data)
assert 0.999 < data[0] < 1.001
@@ -36,8 +38,10 @@ class TestFloat:
assert .49 < data[9] < .51
assert .49 < data[10] < .51
assert -.51 < data[11] < -.49
+ assert 41.99 < data[12] < 42.01
+ assert 41.99 < -data[13] < 42.01
- def test_round_trip_zeros_0(self):
+ def test_round_trip_zeros_0(self) -> None:
data = round_trip("""\
- 0.
- +0.
@@ -53,12 +57,13 @@ class TestFloat:
for d in data:
assert -0.00001 < d < 0.00001
- def Xtest_round_trip_non_exp_trailing_dot(self):
+ def test_round_trip_exp_trailing_dot(self) -> None:
data = round_trip("""\
+ - 3.e4
""")
print(data)
- def test_yaml_1_1_no_dot(self):
+ def test_yaml_1_1_no_dot(self) -> None:
from ruamel.yaml.error import MantissaNoDotYAML1_1Warning
with pytest.warns(MantissaNoDotYAML1_1Warning):
@@ -69,8 +74,8 @@ class TestFloat:
""")
-class TestCalculations(object):
- def test_mul_00(self):
+class TestCalculations:
+ def test_mul_00(self) -> None:
# issue 149 reported by jan.brezina@tul.cz
d = round_trip_load("""\
- 0.1
diff --git a/_test/test_flowsequencekey.py b/_test/test_flowsequencekey.py
index 96bee67..be70699 100644
--- a/_test/test_flowsequencekey.py
+++ b/_test/test_flowsequencekey.py
@@ -11,7 +11,7 @@ from roundtrip import round_trip # , dedent, round_trip_load, round_trip_dump
class TestFlowStyleSequenceKey:
- def test_so_39595807(self):
+ def test_so_39595807(self) -> None:
inp = """\
%YAML 1.2
---
diff --git a/_test/test_indentation.py b/_test/test_indentation.py
index 6e0fce2..1f16cb2 100644
--- a/_test/test_indentation.py
+++ b/_test/test_indentation.py
@@ -1,23 +1,24 @@
# coding: utf-8
-import pytest # NOQA
+from typing import Any
+import pytest # type: ignore # NOQA
from roundtrip import round_trip, round_trip_load, round_trip_dump, dedent, YAML
-def rt(s):
-
+def rt(s: str) -> str:
res = round_trip_dump(round_trip_load(s))
+ assert res is not None
return res.strip() + '\n'
class TestIndent:
- def test_roundtrip_inline_list(self):
+ def test_roundtrip_inline_list(self) -> None:
s = 'a: [a, b, c]\n'
output = rt(s)
assert s == output
- def test_roundtrip_mapping_of_inline_lists(self):
+ def test_roundtrip_mapping_of_inline_lists(self) -> None:
s = dedent("""\
a: [a, b, c]
j: [k, l, m]
@@ -25,7 +26,7 @@ class TestIndent:
output = rt(s)
assert s == output
- def test_roundtrip_mapping_of_inline_lists_comments(self):
+ def test_roundtrip_mapping_of_inline_lists_comments(self) -> None:
s = dedent("""\
# comment A
a: [a, b, c]
@@ -35,7 +36,7 @@ class TestIndent:
output = rt(s)
assert s == output
- def test_roundtrip_mapping_of_inline_sequence_eol_comments(self):
+ def test_roundtrip_mapping_of_inline_sequence_eol_comments(self) -> None:
s = dedent("""\
# comment A
a: [a, b, c] # comment B
@@ -45,7 +46,7 @@ class TestIndent:
assert s == output
# first test by explicitly setting flow style
- def test_added_inline_list(self):
+ def test_added_inline_list(self) -> None:
s1 = dedent("""
a:
- b
@@ -62,7 +63,7 @@ class TestIndent:
# ############ flow mappings
- def test_roundtrip_flow_mapping(self):
+ def test_roundtrip_flow_mapping(self) -> None:
s = dedent("""\
- {a: 1, b: hallo}
- {j: fka, k: 42}
@@ -71,7 +72,7 @@ class TestIndent:
output = round_trip_dump(data)
assert s == output
- def test_roundtrip_sequence_of_inline_mappings_eol_comments(self):
+ def test_roundtrip_sequence_of_inline_mappings_eol_comments(self) -> None:
s = dedent("""\
# comment A
- {a: 1, b: hallo} # comment B
@@ -80,14 +81,14 @@ class TestIndent:
output = rt(s)
assert s == output
- def test_indent_top_level(self):
+ def test_indent_top_level(self) -> None:
inp = """
- a:
- b
"""
round_trip(inp, indent=4)
- def test_set_indent_5_block_list_indent_1(self):
+ def test_set_indent_5_block_list_indent_1(self) -> None:
inp = """
a:
- b: c
@@ -97,7 +98,7 @@ class TestIndent:
"""
round_trip(inp, indent=5, block_seq_indent=1)
- def test_set_indent_4_block_list_indent_2(self):
+ def test_set_indent_4_block_list_indent_2(self) -> None:
inp = """
a:
- b: c
@@ -107,7 +108,7 @@ class TestIndent:
"""
round_trip(inp, indent=4, block_seq_indent=2)
- def test_set_indent_3_block_list_indent_0(self):
+ def test_set_indent_3_block_list_indent_0(self) -> None:
inp = """
a:
- b: c
@@ -117,7 +118,7 @@ class TestIndent:
"""
round_trip(inp, indent=3, block_seq_indent=0)
- def Xtest_set_indent_3_block_list_indent_2(self):
+ def Xtest_set_indent_3_block_list_indent_2(self) -> None:
inp = """
a:
-
@@ -131,7 +132,7 @@ class TestIndent:
"""
round_trip(inp, indent=3, block_seq_indent=2)
- def test_set_indent_3_block_list_indent_2(self):
+ def test_set_indent_3_block_list_indent_2(self) -> None:
inp = """
a:
- b: c
@@ -141,7 +142,7 @@ class TestIndent:
"""
round_trip(inp, indent=3, block_seq_indent=2)
- def Xtest_set_indent_2_block_list_indent_2(self):
+ def Xtest_set_indent_2_block_list_indent_2(self) -> None:
inp = """
a:
-
@@ -156,7 +157,7 @@ class TestIndent:
round_trip(inp, indent=2, block_seq_indent=2)
# this is how it should be: block_seq_indent stretches the indent
- def test_set_indent_2_block_list_indent_2(self):
+ def test_set_indent_2_block_list_indent_2(self) -> None:
inp = """
a:
- b: c
@@ -167,7 +168,7 @@ class TestIndent:
round_trip(inp, indent=2, block_seq_indent=2)
# have to set indent!
- def test_roundtrip_four_space_indents(self):
+ def test_roundtrip_four_space_indents(self) -> None:
# fmt: off
s = (
'a:\n'
@@ -177,7 +178,7 @@ class TestIndent:
# fmt: on
round_trip(s, indent=4)
- def test_roundtrip_four_space_indents_no_fail(self):
+ def test_roundtrip_four_space_indents_no_fail(self) -> None:
inp = """
a:
- foo
@@ -192,7 +193,7 @@ class TestIndent:
class TestYpkgIndent:
- def test_00(self):
+ def test_00(self) -> None:
inp = """
name : nano
version : 2.3.2
@@ -214,7 +215,7 @@ class TestYpkgIndent:
)
-def guess(s):
+def guess(s: str) -> Any:
from ruamel.yaml.util import load_yaml_guess_indent
x, y, z = load_yaml_guess_indent(dedent(s))
@@ -222,21 +223,21 @@ def guess(s):
class TestGuessIndent:
- def test_guess_20(self):
+ def test_guess_20(self) -> None:
inp = """\
a:
- 1
"""
assert guess(inp) == (2, 0)
- def test_guess_42(self):
+ def test_guess_42(self) -> None:
inp = """\
a:
- 1
"""
assert guess(inp) == (4, 2)
- def test_guess_42a(self):
+ def test_guess_42a(self) -> None:
# block seq indent prevails over nested key indent level
inp = """\
b:
@@ -245,7 +246,7 @@ class TestGuessIndent:
"""
assert guess(inp) == (4, 2)
- def test_guess_3None(self):
+ def test_guess_3None(self) -> None:
inp = """\
b:
a: 1
@@ -256,7 +257,7 @@ class TestGuessIndent:
class TestSeparateMapSeqIndents:
# using uncommon 6 indent with 3 push in as 2 push in automatically
# gets you 4 indent even if not set
- def test_00(self):
+ def test_00(self) -> None:
# old style
yaml = YAML()
yaml.indent = 6
@@ -268,7 +269,7 @@ class TestSeparateMapSeqIndents:
"""
yaml.round_trip(inp)
- def test_01(self):
+ def test_01(self) -> None:
yaml = YAML()
yaml.indent(sequence=6)
yaml.indent(offset=3)
@@ -279,7 +280,7 @@ class TestSeparateMapSeqIndents:
"""
yaml.round_trip(inp)
- def test_02(self):
+ def test_02(self) -> None:
yaml = YAML()
yaml.indent(mapping=5, sequence=6, offset=3)
inp = """
@@ -290,7 +291,7 @@ class TestSeparateMapSeqIndents:
"""
yaml.round_trip(inp)
- def test_03(self):
+ def test_03(self) -> None:
inp = """
a:
b:
@@ -300,7 +301,7 @@ class TestSeparateMapSeqIndents:
"""
round_trip(inp, indent=4)
- def test_04(self):
+ def test_04(self) -> None:
yaml = YAML()
yaml.indent(mapping=5, sequence=6)
inp = """
@@ -312,7 +313,7 @@ class TestSeparateMapSeqIndents:
"""
yaml.round_trip(inp)
- def test_issue_51(self):
+ def test_issue_51(self) -> None:
yaml = YAML()
# yaml.map_indent = 2 # the default
yaml.indent(sequence=4, offset=2)
diff --git a/_test/test_int.py b/_test/test_int.py
index aa300df..92fb92a 100644
--- a/_test/test_int.py
+++ b/_test/test_int.py
@@ -1,6 +1,6 @@
# coding: utf-8
-import pytest # NOQA
+import pytest # type: ignore # NOQA
from roundtrip import dedent, round_trip_load, round_trip_dump
@@ -8,7 +8,7 @@ from roundtrip import dedent, round_trip_load, round_trip_dump
class TestBinHexOct:
- def test_calculate(self):
+ def test_calculate(self) -> None:
# make sure type, leading zero(s) and underscore are preserved
s = dedent("""\
- 42
diff --git a/_test/test_issues.py b/_test/test_issues.py
index 736dccb..7106453 100644
--- a/_test/test_issues.py
+++ b/_test/test_issues.py
@@ -1,6 +1,8 @@
# coding: utf-8
-import pytest # NOQA
+from typing import Any
+
+import pytest # type: ignore # NOQA
from roundtrip import (
@@ -15,7 +17,7 @@ from roundtrip import (
class TestIssues:
- def test_issue_61(self):
+ def test_issue_61(self) -> None:
s = dedent("""
def1: &ANCHOR1
key1: value1
@@ -29,54 +31,54 @@ class TestIssues:
assert str(data['comb']) == str(data['def'])
assert str(data['comb']) == "ordereddict([('key', 'value'), ('key1', 'value1')])"
- def test_issue_82(self, tmpdir):
- program_src = r'''
- from ruamel import yaml
- import re
-
- class SINumber(yaml.YAMLObject):
- PREFIXES = {'k': 1e3, 'M': 1e6, 'G': 1e9}
- yaml_loader = yaml.Loader
- yaml_dumper = yaml.Dumper
- yaml_tag = '!si'
- yaml_implicit_pattern = re.compile(
- r'^(?P<value>[0-9]+(?:\.[0-9]+)?)(?P<prefix>[kMG])$')
-
- @classmethod
- def from_yaml(cls, loader, node):
- return cls(node.value)
-
- @classmethod
- def to_yaml(cls, dumper, data):
- return dumper.represent_scalar(cls.yaml_tag, str(data))
-
- def __init__(self, *args):
- m = self.yaml_implicit_pattern.match(args[0])
- self.value = float(m.groupdict()['value'])
- self.prefix = m.groupdict()['prefix']
-
- def __str__(self):
- return str(self.value)+self.prefix
-
- def __int__(self):
- return int(self.value*self.PREFIXES[self.prefix])
-
- # This fails:
- yaml.add_implicit_resolver(SINumber.yaml_tag, SINumber.yaml_implicit_pattern)
-
- ret = yaml.load("""
- [1,2,3, !si 10k, 100G]
- """, Loader=yaml.Loader)
- for idx, l in enumerate([1, 2, 3, 10000, 100000000000]):
- assert int(ret[idx]) == l
- '''
- assert save_and_run(dedent(program_src), tmpdir) == 0
-
- def test_issue_82rt(self, tmpdir):
+# def test_issue_82(self, tmpdir):
+# program_src = r'''
+# from ruamel import yaml
+# import re
+#
+# class SINumber(yaml.YAMLObject):
+# PREFIXES = {'k': 1e3, 'M': 1e6, 'G': 1e9}
+# yaml_loader = yaml.Loader
+# yaml_dumper = yaml.Dumper
+# yaml_tag = '!si'
+# yaml_implicit_pattern = re.compile(
+# r'^(?P<value>[0-9]+(?:\.[0-9]+)?)(?P<prefix>[kMG])$')
+#
+# @classmethod
+# def from_yaml(cls, loader, node):
+# return cls(node.value)
+#
+# @classmethod
+# def to_yaml(cls, dumper, data):
+# return dumper.represent_scalar(cls.yaml_tag, str(data))
+#
+# def __init__(self, *args):
+# m = self.yaml_implicit_pattern.match(args[0])
+# self.value = float(m.groupdict()['value'])
+# self.prefix = m.groupdict()['prefix']
+#
+# def __str__(self) -> None:
+# return str(self.value)+self.prefix
+#
+# def __int__(self) -> None:
+# return int(self.value*self.PREFIXES[self.prefix])
+#
+# # This fails:
+# yaml.add_implicit_resolver(SINumber.yaml_tag, SINumber.yaml_implicit_pattern)
+#
+# ret = yaml.load("""
+# [1,2,3, !si 10k, 100G]
+# """, Loader=yaml.Loader)
+# for idx, l in enumerate([1, 2, 3, 10000, 100000000000]):
+# assert int(ret[idx]) == l
+# '''
+# assert save_and_run(dedent(program_src), tmpdir) == 0
+
+ def test_issue_82rt(self, tmpdir: Any) -> None:
yaml_str = '[1, 2, 3, !si 10k, 100G]\n'
x = round_trip(yaml_str, preserve_quotes=True) # NOQA
- def test_issue_102(self):
+ def test_issue_102(self) -> None:
yaml_str = dedent("""
var1: #empty
var2: something #notempty
@@ -86,8 +88,8 @@ class TestIssues:
""")
x = round_trip(yaml_str, preserve_quotes=True) # NOQA
- def test_issue_150(self):
- from ruamel.yaml import YAML
+ def test_issue_150(self) -> None:
+ from ruamel.yaml import YAML # type: ignore
inp = """\
base: &base_key
@@ -103,7 +105,7 @@ class TestIssues:
child = data['child']
assert 'second' in dict(**child)
- def test_issue_160(self):
+ def test_issue_160(self) -> None:
from ruamel.yaml.compat import StringIO
s = dedent("""\
root:
@@ -127,7 +129,7 @@ class TestIssues:
""")
assert buf.getvalue() == exp
- def test_issue_161(self):
+ def test_issue_161(self) -> None:
yaml_str = dedent("""\
mapping-A:
key-A:{}
@@ -137,7 +139,7 @@ class TestIssues:
s = yaml_str.format(comment)
res = round_trip(s) # NOQA
- def test_issue_161a(self):
+ def test_issue_161a(self) -> None:
yaml_str = dedent("""\
mapping-A:
key-A:{}
@@ -147,7 +149,7 @@ class TestIssues:
s = yaml_str.format(comment)
res = round_trip(s) # NOQA
- def test_issue_163(self):
+ def test_issue_163(self) -> None:
s = dedent("""\
some-list:
# List comment
@@ -163,19 +165,19 @@ class TestIssues:
json_str2 = '{"abc":[{"a":"1", "uses":0}]}'
- def test_issue_172(self):
+ def test_issue_172(self) -> None:
x = round_trip_load(TestIssues.json_str2) # NOQA
x = round_trip_load(TestIssues.json_str) # NOQA
- def test_issue_176(self):
+ def test_issue_176(self) -> None:
# basic request by Stuart Berg
- from ruamel.yaml import YAML
+ from ruamel.yaml import YAML # type: ignore
yaml = YAML()
seq = yaml.load('[1,2,3]')
seq[:] = [1, 2, 3, 4]
- def test_issue_176_preserve_comments_on_extended_slice_assignment(self):
+ def test_issue_176_preserve_comments_on_extended_slice_assignment(self) -> None:
yaml_str = dedent("""\
- a
- b # comment
@@ -190,7 +192,7 @@ class TestIssues:
res = round_trip_dump(seq)
assert res == yaml_str.replace(' b ', ' B ').replace(' d\n', ' D\n')
- def test_issue_176_test_slicing(self):
+ def test_issue_176_test_slicing(self) -> None:
mss = round_trip_load('[0, 1, 2, 3, 4]')
assert len(mss) == 5
assert mss[2:2] == []
@@ -240,7 +242,7 @@ class TestIssues:
del m[:]
assert m == []
- def test_issue_184(self):
+ def test_issue_184(self) -> None:
yaml_str = dedent("""\
test::test:
# test
@@ -252,20 +254,20 @@ class TestIssues:
d.yaml_add_eol_comment('test1', 'bar')
assert round_trip_dump(d) == yaml_str + 'bar: foo # test1\n'
- def test_issue_219(self):
+ def test_issue_219(self) -> None:
yaml_str = dedent("""\
[StackName: AWS::StackName]
""")
d = round_trip_load(yaml_str) # NOQA
- def test_issue_219a(self):
+ def test_issue_219a(self) -> None:
yaml_str = dedent("""\
[StackName:
AWS::StackName]
""")
d = round_trip_load(yaml_str) # NOQA
- def test_issue_220(self, tmpdir):
+ def test_issue_220(self, tmpdir: Any) -> None:
program_src = r'''
from ruamel.yaml import YAML
@@ -280,14 +282,14 @@ class TestIssues:
'''
assert save_and_run(dedent(program_src), tmpdir, optimized=True) == 0
- def test_issue_221_add(self):
+ def test_issue_221_add(self) -> None:
from ruamel.yaml.comments import CommentedSeq
a = CommentedSeq([1, 2, 3])
a + [4, 5]
- def test_issue_221_sort(self):
- from ruamel.yaml import YAML
+ def test_issue_221_sort(self) -> None:
+ from ruamel.yaml import YAML # type: ignore
from ruamel.yaml.compat import StringIO
yaml = YAML()
@@ -311,8 +313,8 @@ class TestIssues:
""")
assert buf.getvalue() == exp
- def test_issue_221_sort_reverse(self):
- from ruamel.yaml import YAML
+ def test_issue_221_sort_reverse(self) -> None:
+ from ruamel.yaml import YAML # type: ignore
from ruamel.yaml.compat import StringIO
yaml = YAML()
@@ -336,8 +338,8 @@ class TestIssues:
""")
assert buf.getvalue() == exp
- def test_issue_221_sort_key(self):
- from ruamel.yaml import YAML
+ def test_issue_221_sort_key(self) -> None:
+ from ruamel.yaml import YAML # type: ignore
from ruamel.yaml.compat import StringIO
yaml = YAML()
@@ -361,8 +363,8 @@ class TestIssues:
""")
assert buf.getvalue() == exp
- def test_issue_221_sort_key_reverse(self):
- from ruamel.yaml import YAML
+ def test_issue_221_sort_key_reverse(self) -> None:
+ from ruamel.yaml import YAML # type: ignore
from ruamel.yaml.compat import StringIO
yaml = YAML()
@@ -386,7 +388,7 @@ class TestIssues:
""")
assert buf.getvalue() == exp
- def test_issue_222(self):
+ def test_issue_222(self) -> None:
import ruamel.yaml
from ruamel.yaml.compat import StringIO
@@ -395,13 +397,13 @@ class TestIssues:
yaml.dump(['012923'], buf)
assert buf.getvalue() == "['012923']\n"
- def test_issue_223(self):
+ def test_issue_223(self) -> None:
import ruamel.yaml
yaml = ruamel.yaml.YAML(typ='safe')
yaml.load('phone: 0123456789')
- def test_issue_232(self):
+ def test_issue_232(self) -> None:
import ruamel.yaml
yaml = YAML(typ='safe', pure=True)
@@ -411,24 +413,24 @@ class TestIssues:
with pytest.raises(ruamel.yaml.parser.ParserError):
yaml.load('{]')
- def test_issue_233(self):
- from ruamel.yaml import YAML
+ def test_issue_233(self) -> None:
+ from ruamel.yaml import YAML # type: ignore
import json
yaml = YAML()
data = yaml.load('{}')
json_str = json.dumps(data) # NOQA
- def test_issue_233a(self):
- from ruamel.yaml import YAML
+ def test_issue_233a(self) -> None:
+ from ruamel.yaml import YAML # type: ignore
import json
yaml = YAML()
data = yaml.load('[]')
json_str = json.dumps(data) # NOQA
- def test_issue_234(self):
- from ruamel.yaml import YAML
+ def test_issue_234(self) -> None:
+ from ruamel.yaml import YAML # type: ignore
inp = dedent("""\
- key: key1
@@ -444,7 +446,7 @@ class TestIssues:
print(repr(fold))
assert '\a' not in fold
- def test_issue_236(self):
+ def test_issue_236(self) -> None:
inp = """
conf:
xx: {a: "b", c: []}
@@ -452,7 +454,7 @@ class TestIssues:
"""
d = round_trip(inp, preserve_quotes=True) # NOQA
- def test_issue_238(self, tmpdir):
+ def test_issue_238(self, tmpdir: Any) -> None:
program_src = r"""
import ruamel.yaml
from ruamel.yaml.compat import StringIO
@@ -484,7 +486,7 @@ class TestIssues:
"""
assert save_and_run(dedent(program_src), tmpdir) == 0
- def test_issue_239(self):
+ def test_issue_239(self) -> None:
inp = """
first_name: Art
occupation: Architect
@@ -504,14 +506,14 @@ class TestIssues:
"""
d = YAML().round_trip_all(inp) # NOQA
- def test_issue_242(self):
+ def test_issue_242(self) -> None:
from ruamel.yaml.comments import CommentedMap
d0 = CommentedMap([('a', 'b')])
assert d0['a'] == 'b'
- def test_issue_245(self):
- from ruamel.yaml import YAML
+ def test_issue_245(self) -> None:
+ from ruamel.yaml import YAML # type: ignore
inp = """
d: yes
"""
@@ -528,7 +530,7 @@ class TestIssues:
print(typ, yaml.parser, yaml.resolver)
assert d['d'] is True
- def test_issue_249(self):
+ def test_issue_249(self) -> None:
yaml = YAML()
inp = dedent("""\
# comment
@@ -545,7 +547,7 @@ class TestIssues:
""")
yaml.round_trip(inp, outp=exp) # NOQA
- def test_issue_250(self):
+ def test_issue_250(self) -> None:
inp = """
# 1.
- - 1
@@ -557,8 +559,8 @@ class TestIssues:
d = round_trip(inp) # NOQA
# @pytest.mark.xfail(strict=True, reason='bla bla', raises=AssertionError)
- def test_issue_279(self):
- from ruamel.yaml import YAML
+ def test_issue_279(self) -> None:
+ from ruamel.yaml import YAML # type: ignore
from ruamel.yaml.compat import StringIO
yaml = YAML()
@@ -576,8 +578,8 @@ class TestIssues:
print(buf.getvalue())
assert buf.getvalue() == inp
- def test_issue_280(self):
- from ruamel.yaml import YAML
+ def test_issue_280(self) -> None:
+ from ruamel.yaml import YAML # type: ignore
from ruamel.yaml.representer import RepresenterError
from collections import namedtuple
from sys import stdout
@@ -587,7 +589,7 @@ class TestIssues:
with pytest.raises(RepresenterError, match='cannot represent'):
yaml.dump({'t': t}, stdout)
- def test_issue_282(self):
+ def test_issue_282(self) -> None:
# update from list of tuples caused AttributeError
import ruamel.yaml
yaml_data = ruamel.yaml.comments.CommentedMap([('a', 'apple'), ('b', 'banana')])
@@ -596,7 +598,7 @@ class TestIssues:
assert 'c' in yaml_data.keys()
assert 'c' in yaml_data._ok
- def test_issue_284(self):
+ def test_issue_284(self) -> None:
import ruamel.yaml
inp = dedent("""\
plain key: in-line value
@@ -614,8 +616,8 @@ class TestIssues:
with pytest.raises(ruamel.yaml.parser.ParserError, match='expected <block end>'):
d = yaml.load(inp)
- def test_issue_285(self):
- from ruamel.yaml import YAML
+ def test_issue_285(self) -> None:
+ from ruamel.yaml import YAML # type: ignore
yaml = YAML()
inp = dedent("""\
@@ -632,8 +634,8 @@ class TestIssues:
assert not a[1]
assert not a[3]
- def test_issue_286(self):
- from ruamel.yaml import YAML
+ def test_issue_286(self) -> None:
+ from ruamel.yaml import YAML # type: ignore
from ruamel.yaml.compat import StringIO
yaml = YAML()
@@ -648,10 +650,10 @@ class TestIssues:
yaml.dump(a, buf)
assert buf.getvalue().endswith('xxx\nnew_key: new_value\n')
- def test_issue_288(self):
+ def test_issue_288(self) -> None:
import sys
from ruamel.yaml.compat import StringIO
- from ruamel.yaml import YAML
+ from ruamel.yaml import YAML # type: ignore
yamldoc = dedent("""\
---
@@ -679,10 +681,10 @@ class TestIssues:
yaml.dump(data, buf)
assert buf.getvalue() == yamldoc
- def test_issue_288a(self):
+ def test_issue_288a(self) -> None:
import sys
from ruamel.yaml.compat import StringIO
- from ruamel.yaml import YAML
+ from ruamel.yaml import YAML # type: ignore
yamldoc = dedent("""\
---
@@ -710,10 +712,10 @@ class TestIssues:
yaml.dump(data, buf)
assert buf.getvalue() == yamldoc
- def test_issue_290(self):
+ def test_issue_290(self) -> None:
import sys
from ruamel.yaml.compat import StringIO
- from ruamel.yaml import YAML
+ from ruamel.yaml import YAML # type: ignore
yamldoc = dedent("""\
---
@@ -746,10 +748,10 @@ class TestIssues:
yaml.dump(data, buf)
assert buf.getvalue() == yamldoc
- def test_issue_290a(self):
+ def test_issue_290a(self) -> None:
import sys
from ruamel.yaml.compat import StringIO
- from ruamel.yaml import YAML
+ from ruamel.yaml import YAML # type: ignore
yamldoc = dedent("""\
---
@@ -783,7 +785,7 @@ class TestIssues:
assert buf.getvalue() == yamldoc
# @pytest.mark.xfail(strict=True, reason='should fail pre 0.15.100', raises=AssertionError)
- def test_issue_295(self):
+ def test_issue_295(self) -> None:
# deepcopy also makes a copy of the start and end mark, and these did not
# have any comparison beyond their ID, which of course changed, breaking
# some old merge_comment code
@@ -808,8 +810,8 @@ class TestIssues:
dc = copy.deepcopy(data)
assert round_trip_dump(dc) == inp
- def test_issue_300(self):
- from ruamel.yaml import YAML
+ def test_issue_300(self) -> None:
+ from ruamel.yaml import YAML # type: ignore
inp = dedent("""
%YAML 1.2
@@ -819,7 +821,7 @@ class TestIssues:
""")
YAML().load(inp)
- def test_issue_300a(self):
+ def test_issue_300a(self) -> None:
import ruamel.yaml
inp = dedent("""
@@ -833,7 +835,7 @@ class TestIssues:
match='while scanning a directive'):
yaml.load(inp)
- def test_issue_304(self):
+ def test_issue_304(self) -> None:
inp = """
%YAML 1.2
%TAG ! tag:example.com,2019:
@@ -843,7 +845,7 @@ class TestIssues:
"""
d = na_round_trip(inp) # NOQA
- def test_issue_305(self):
+ def test_issue_305(self) -> None:
inp = """
%YAML 1.2
---
@@ -852,7 +854,7 @@ class TestIssues:
"""
d = na_round_trip(inp) # NOQA
- def test_issue_307(self):
+ def test_issue_307(self) -> None:
inp = """
%YAML 1.2
%TAG ! tag:example.com,2019/path#
@@ -863,7 +865,7 @@ class TestIssues:
d = na_round_trip(inp) # NOQA
# @pytest.mark.xfail(strict=True, reason='bla bla', raises=AssertionError)
-# def test_issue_ xxx(self):
+# def test_issue_ xxx(self) -> None:
# inp = """
# """
# d = round_trip(inp) # NOQA
diff --git a/_test/test_json_numbers.py b/_test/test_json_numbers.py
index d89453c..08f39d0 100644
--- a/_test/test_json_numbers.py
+++ b/_test/test_json_numbers.py
@@ -1,11 +1,13 @@
# coding: utf-8
-import pytest # NOQA
+import pytest # type: ignore # NOQA
import json
+from typing import Any
-def load(s, typ=float):
+
+def load(s: str, typ: Any = float) -> float:
import ruamel.yaml
yaml = ruamel.yaml.YAML()
@@ -16,7 +18,7 @@ def load(s, typ=float):
assert isinstance(res['low'], typ)
ret_val = yaml.load(x)
print(ret_val)
- return ret_val['low']
+ return ret_val['low'] # type: ignore
class TestJSONNumbers:
@@ -26,7 +28,7 @@ class TestJSONNumbers:
# -? [1-9] ( \. [0-9]* [1-9] )? ( e [-+] [1-9] [0-9]* )?
#
# which is not a superset of the JSON numbers
- def test_json_number_float(self):
+ def test_json_number_float(self) -> None:
for x in (
y.split('#')[0].strip()
for y in """
@@ -43,7 +45,7 @@ class TestJSONNumbers:
res = load(x)
assert isinstance(res, float)
- def test_json_number_int(self):
+ def test_json_number_int(self) -> None:
for x in (
y.split('#')[0].strip()
for y in """
diff --git a/_test/test_line_col.py b/_test/test_line_col.py
index febe9c2..5ba125e 100644
--- a/_test/test_line_col.py
+++ b/_test/test_line_col.py
@@ -1,16 +1,18 @@
# coding: utf-8
-import pytest # NOQA
+import pytest # type: ignore # NOQA
from roundtrip import round_trip, dedent, round_trip_load, round_trip_dump # NOQA
+from typing import Any
-def load(s):
+
+def load(s: str) -> Any:
return round_trip_load(dedent(s))
class TestLineCol:
- def test_item_00(self):
+ def test_item_00(self) -> None:
data = load("""
- a
- e
@@ -20,7 +22,7 @@ class TestLineCol:
assert data[2].lc.line == 2
assert data[2].lc.col == 2
- def test_item_01(self):
+ def test_item_01(self) -> None:
data = load("""
- a
- e
@@ -30,7 +32,7 @@ class TestLineCol:
assert data[2].lc.line == 2
assert data[2].lc.col == 2
- def test_item_02(self):
+ def test_item_02(self) -> None:
data = load("""
- a
- e
@@ -40,7 +42,7 @@ class TestLineCol:
assert data[2].lc.line == 2
assert data[2].lc.col == 2
- def test_item_03(self):
+ def test_item_03(self) -> None:
data = load("""
- a
- e
@@ -52,7 +54,7 @@ class TestLineCol:
assert data[2].lc.line == 2
assert data[2].lc.col == 2
- def test_item_04(self):
+ def test_item_04(self) -> None:
data = load("""
# testing line and column based on SO
# http://stackoverflow.com/questions/13319067/
@@ -66,7 +68,7 @@ class TestLineCol:
assert data[1].lc.line == 4
assert data[1].lc.col == 2
- def test_pos_mapping(self):
+ def test_pos_mapping(self) -> None:
data = load("""
a: 1
b: 2
@@ -78,7 +80,7 @@ class TestLineCol:
assert data.lc.key('klm') == (4, 0)
assert data.lc.value('klm') == (4, 5)
- def test_pos_sequence(self):
+ def test_pos_sequence(self) -> None:
data = load("""
- a
- b
diff --git a/_test/test_literal.py b/_test/test_literal.py
index 7192207..0cf34bc 100644
--- a/_test/test_literal.py
+++ b/_test/test_literal.py
@@ -1,6 +1,6 @@
# coding: utf-8
-import pytest # NOQA
+import pytest # type: ignore # NOQA
from roundtrip import YAML # does an automatic dedent on load
@@ -27,7 +27,7 @@ YAML 1.2 is again clear about root literal level scalar after directive in examp
class TestNoIndent:
- def test_root_literal_scalar_indent_example_9_5(self):
+ def test_root_literal_scalar_indent_example_9_5(self) -> None:
yaml = YAML()
s = '%!PS-Adobe-2.0'
inp = """
@@ -38,7 +38,7 @@ class TestNoIndent:
print(d)
assert d == s + '\n'
- def test_root_literal_scalar_no_indent(self):
+ def test_root_literal_scalar_no_indent(self) -> None:
yaml = YAML()
s = 'testing123'
inp = """
@@ -49,7 +49,7 @@ class TestNoIndent:
print(d)
assert d == s + '\n'
- def test_root_literal_scalar_no_indent_1_1(self):
+ def test_root_literal_scalar_no_indent_1_1(self) -> None:
yaml = YAML()
s = 'testing123'
inp = """
@@ -61,9 +61,9 @@ class TestNoIndent:
print(d)
assert d == s + '\n'
- def test_root_literal_scalar_no_indent_1_1_old_style(self):
+ def test_root_literal_scalar_no_indent_1_1_old_style(self) -> None:
from textwrap import dedent
- from ruamel.yaml import YAML
+ from ruamel.yaml import YAML # type: ignore
yaml = YAML(typ='safe', pure=True)
s = 'testing123'
@@ -76,7 +76,7 @@ class TestNoIndent:
print(d)
assert d == s + '\n'
- def test_root_literal_scalar_no_indent_1_1_no_raise(self):
+ def test_root_literal_scalar_no_indent_1_1_no_raise(self) -> None:
# from ruamel.yaml.parser import ParserError
yaml = YAML()
@@ -91,7 +91,7 @@ class TestNoIndent:
"""
yaml.load(inp.format(s))
- def test_root_literal_scalar_indent_offset_one(self):
+ def test_root_literal_scalar_indent_offset_one(self) -> None:
yaml = YAML()
s = 'testing123'
inp = """
@@ -102,7 +102,7 @@ class TestNoIndent:
print(d)
assert d == s + '\n'
- def test_root_literal_scalar_indent_offset_four(self):
+ def test_root_literal_scalar_indent_offset_four(self) -> None:
yaml = YAML()
s = 'testing123'
inp = """
@@ -113,7 +113,7 @@ class TestNoIndent:
print(d)
assert d == s + '\n'
- def test_root_literal_scalar_indent_offset_two_leading_space(self):
+ def test_root_literal_scalar_indent_offset_two_leading_space(self) -> None:
yaml = YAML()
s = ' testing123'
inp = """
@@ -125,7 +125,7 @@ class TestNoIndent:
print(d)
assert d == (s + '\n') * 2
- def test_root_literal_scalar_no_indent_special(self):
+ def test_root_literal_scalar_no_indent_special(self) -> None:
yaml = YAML()
s = '%!PS-Adobe-2.0'
inp = """
@@ -136,7 +136,7 @@ class TestNoIndent:
print(d)
assert d == s + '\n'
- def test_root_folding_scalar_indent(self):
+ def test_root_folding_scalar_indent(self) -> None:
yaml = YAML()
s = '%!PS-Adobe-2.0'
inp = """
@@ -147,7 +147,7 @@ class TestNoIndent:
print(d)
assert d == s + '\n'
- def test_root_folding_scalar_no_indent(self):
+ def test_root_folding_scalar_no_indent(self) -> None:
yaml = YAML()
s = 'testing123'
inp = """
@@ -158,7 +158,7 @@ class TestNoIndent:
print(d)
assert d == s + '\n'
- def test_root_folding_scalar_no_indent_special(self):
+ def test_root_folding_scalar_no_indent_special(self) -> None:
yaml = YAML()
s = '%!PS-Adobe-2.0'
inp = """
@@ -169,7 +169,7 @@ class TestNoIndent:
print(d)
assert d == s + '\n'
- def test_root_literal_multi_doc(self):
+ def test_root_literal_multi_doc(self) -> None:
yaml = YAML(typ='safe', pure=True)
s1 = 'abc'
s2 = 'klm'
@@ -183,7 +183,7 @@ class TestNoIndent:
print('d1:', d1)
assert ['abc', 'klm\n'][idx] == d1
- def test_root_literal_doc_indent_directives_end(self):
+ def test_root_literal_doc_indent_directives_end(self) -> None:
yaml = YAML()
yaml.explicit_start = True
inp = """
@@ -194,7 +194,7 @@ class TestNoIndent:
"""
yaml.round_trip(inp)
- def test_root_literal_doc_indent_document_end(self):
+ def test_root_literal_doc_indent_document_end(self) -> None:
yaml = YAML()
yaml.explicit_start = True
inp = """
@@ -205,7 +205,7 @@ class TestNoIndent:
"""
yaml.round_trip(inp)
- def test_root_literal_doc_indent_marker(self):
+ def test_root_literal_doc_indent_marker(self) -> None:
yaml = YAML()
yaml.explicit_start = True
inp = """
@@ -217,7 +217,7 @@ class TestNoIndent:
print(type(d), repr(d))
yaml.round_trip(inp)
- def test_nested_literal_doc_indent_marker(self):
+ def test_nested_literal_doc_indent_marker(self) -> None:
yaml = YAML()
yaml.explicit_start = True
inp = """
@@ -232,7 +232,7 @@ class TestNoIndent:
class Test_RoundTripLiteral:
- def test_rt_root_literal_scalar_no_indent(self):
+ def test_rt_root_literal_scalar_no_indent(self) -> None:
yaml = YAML()
yaml.explicit_start = True
s = 'testing123'
@@ -244,7 +244,7 @@ class Test_RoundTripLiteral:
d = yaml.load(ys)
yaml.dump(d, compare=ys)
- def test_rt_root_literal_scalar_indent(self):
+ def test_rt_root_literal_scalar_indent(self) -> None:
yaml = YAML()
yaml.explicit_start = True
yaml.indent = 4
@@ -257,7 +257,7 @@ class Test_RoundTripLiteral:
d = yaml.load(ys)
yaml.dump(d, compare=ys)
- def test_rt_root_plain_scalar_no_indent(self):
+ def test_rt_root_plain_scalar_no_indent(self) -> None:
yaml = YAML()
yaml.explicit_start = True
yaml.indent = 0
@@ -270,7 +270,7 @@ class Test_RoundTripLiteral:
d = yaml.load(ys)
yaml.dump(d, compare=ys)
- def test_rt_root_plain_scalar_expl_indent(self):
+ def test_rt_root_plain_scalar_expl_indent(self) -> None:
yaml = YAML()
yaml.explicit_start = True
yaml.indent = 4
@@ -283,7 +283,7 @@ class Test_RoundTripLiteral:
d = yaml.load(ys)
yaml.dump(d, compare=ys)
- def test_rt_root_sq_scalar_expl_indent(self):
+ def test_rt_root_sq_scalar_expl_indent(self) -> None:
yaml = YAML()
yaml.explicit_start = True
yaml.indent = 4
@@ -296,7 +296,7 @@ class Test_RoundTripLiteral:
d = yaml.load(ys)
yaml.dump(d, compare=ys)
- def test_rt_root_dq_scalar_expl_indent(self):
+ def test_rt_root_dq_scalar_expl_indent(self) -> None:
# if yaml.indent is the default (None)
# then write after the directive indicator
yaml = YAML()
@@ -311,7 +311,7 @@ class Test_RoundTripLiteral:
d = yaml.load(ys)
yaml.dump(d, compare=ys)
- def test_rt_root_literal_scalar_no_indent_no_eol(self):
+ def test_rt_root_literal_scalar_no_indent_no_eol(self) -> None:
yaml = YAML()
yaml.explicit_start = True
s = 'testing123'
@@ -323,7 +323,7 @@ class Test_RoundTripLiteral:
d = yaml.load(ys)
yaml.dump(d, compare=ys)
- def test_rt_non_root_literal_scalar(self):
+ def test_rt_non_root_literal_scalar(self) -> None:
yaml = YAML()
s = 'testing123'
ys = """
diff --git a/_test/test_none.py b/_test/test_none.py
index 42aef4c..e11de17 100644
--- a/_test/test_none.py
+++ b/_test/test_none.py
@@ -1,39 +1,39 @@
# coding: utf-8
-import pytest # NOQA
+import pytest # type: ignore # NOQA
from roundtrip import round_trip_load, round_trip_dump
class TestNone:
- def test_dump00(self):
+ def test_dump00(self) -> None:
data = None
s = round_trip_dump(data)
assert s == 'null\n...\n'
d = round_trip_load(s)
assert d == data
- def test_dump01(self):
+ def test_dump01(self) -> None:
data = None
s = round_trip_dump(data, explicit_end=True)
assert s == 'null\n...\n'
d = round_trip_load(s)
assert d == data
- def test_dump02(self):
+ def test_dump02(self) -> None:
data = None
s = round_trip_dump(data, explicit_end=False)
assert s == 'null\n...\n'
d = round_trip_load(s)
assert d == data
- def test_dump03(self):
+ def test_dump03(self) -> None:
data = None
s = round_trip_dump(data, explicit_start=True)
assert s == '---\n...\n'
d = round_trip_load(s)
assert d == data
- def test_dump04(self):
+ def test_dump04(self) -> None:
data = None
s = round_trip_dump(data, explicit_start=True, explicit_end=False)
assert s == '---\n...\n'
diff --git a/_test/test_numpy.py b/_test/test_numpy.py
index 573f0bd..24eb768 100644
--- a/_test/test_numpy.py
+++ b/_test/test_numpy.py
@@ -1,22 +1,24 @@
# coding: utf-8
-try:
- import numpy
-except: # NOQA
- numpy = None
+# try:
+# import numpy
+# except: # NOQA
+# numpy = None
-def Xtest_numpy():
- import ruamel.yaml
-
- if numpy is None:
- return
- data = numpy.arange(10)
- print('data', type(data), data)
-
- yaml_str = ruamel.yaml.dump(data)
- datb = ruamel.yaml.load(yaml_str)
- print('datb', type(datb), datb)
-
- print('\nYAML', yaml_str)
- assert data == datb
+# def Xtest_numpy() -> None:
+# import ruamel.yaml
+#
+# if numpy is None:
+# return
+# data = numpy.arange(10)
+# print('data', type(data), data)
+#
+# buf = io.BytesIO()
+# ruamel.yaml.dump(data) # needs updating to use buffer
+# yaml_str = buf.getvalue().decode('utf-8')
+# datb = ruamel.yaml.load(yaml_str)
+# print('datb', type(datb), datb)
+#
+# print('\nYAML', yaml_str)
+# assert data == datb
diff --git a/_test/test_program_config.py b/_test/test_program_config.py
index 821ca15..6c5cad8 100644
--- a/_test/test_program_config.py
+++ b/_test/test_program_config.py
@@ -1,13 +1,13 @@
# coding: utf-8
-import pytest # NOQA
+import pytest # type: ignore # NOQA
# import ruamel.yaml
from roundtrip import round_trip
class TestProgramConfig:
- def test_application_arguments(self):
+ def test_application_arguments(self) -> None:
# application configur
round_trip("""
args:
@@ -20,7 +20,7 @@ class TestProgramConfig:
wait: 10
""")
- def test_single(self):
+ def test_single(self) -> None:
# application configuration
round_trip("""
# default arguments for the program
@@ -39,7 +39,7 @@ class TestProgramConfig:
# no more argument info to pass
""")
- def test_multi(self):
+ def test_multi(self) -> None:
# application configuration
round_trip("""
# default arguments for the program
diff --git a/_test/test_spec_examples.py b/_test/test_spec_examples.py
index cead787..7faa4bf 100644
--- a/_test/test_spec_examples.py
+++ b/_test/test_spec_examples.py
@@ -1,10 +1,10 @@
# coding: utf-8
from roundtrip import YAML
-import pytest # NOQA
+import pytest # type: ignore # NOQA
-def test_example_2_1():
+def test_example_2_1() -> None:
yaml = YAML()
yaml.round_trip("""
- Mark McGwire
@@ -13,8 +13,8 @@ def test_example_2_1():
""")
-@pytest.mark.xfail(strict=True)
-def test_example_2_2():
+@pytest.mark.xfail(strict=True) # type: ignore
+def test_example_2_2() -> None:
yaml = YAML()
yaml.mapping_value_align = True
yaml.round_trip("""
@@ -24,7 +24,7 @@ def test_example_2_2():
""")
-def test_example_2_3():
+def test_example_2_3() -> None:
yaml = YAML()
yaml.indent(sequence=4, offset=2)
yaml.round_trip("""
@@ -39,8 +39,8 @@ def test_example_2_3():
""")
-@pytest.mark.xfail(strict=True)
-def test_example_2_4():
+@pytest.mark.xfail(strict=True) # type: ignore
+def test_example_2_4() -> None:
yaml = YAML()
yaml.mapping_value_align = True
yaml.round_trip("""
@@ -55,8 +55,8 @@ def test_example_2_4():
""")
-@pytest.mark.xfail(strict=True)
-def test_example_2_5():
+@pytest.mark.xfail(strict=True) # type: ignore
+def test_example_2_5() -> None:
yaml = YAML()
yaml.flow_sequence_element_align = True
yaml.round_trip("""
@@ -66,8 +66,8 @@ def test_example_2_5():
""")
-@pytest.mark.xfail(strict=True)
-def test_example_2_6():
+@pytest.mark.xfail(strict=True) # type: ignore
+def test_example_2_6() -> None:
yaml = YAML()
# yaml.flow_mapping_final_comma = False
yaml.flow_mapping_one_element_per_line = True
@@ -80,8 +80,8 @@ def test_example_2_6():
""")
-@pytest.mark.xfail(strict=True)
-def test_example_2_7():
+@pytest.mark.xfail(strict=True) # type: ignore
+def test_example_2_7() -> None:
yaml = YAML()
yaml.round_trip_all("""
# Ranking of 1998 home runs
@@ -97,7 +97,7 @@ def test_example_2_7():
""")
-def test_example_2_8():
+def test_example_2_8() -> None:
yaml = YAML()
yaml.explicit_start = True
yaml.explicit_end = True
@@ -115,7 +115,7 @@ def test_example_2_8():
""")
-def test_example_2_9():
+def test_example_2_9() -> None:
yaml = YAML()
yaml.explicit_start = True
yaml.indent(sequence=4, offset=2)
@@ -131,8 +131,8 @@ def test_example_2_9():
""")
-@pytest.mark.xfail(strict=True)
-def test_example_2_10():
+@pytest.mark.xfail(strict=True) # type: ignore
+def test_example_2_10() -> None:
yaml = YAML()
yaml.explicit_start = True
yaml.indent(sequence=4, offset=2)
@@ -148,8 +148,8 @@ def test_example_2_10():
""")
-@pytest.mark.xfail(strict=True)
-def test_example_2_11():
+@pytest.mark.xfail(strict=True) # type: ignore
+def test_example_2_11() -> None:
yaml = YAML()
yaml.round_trip("""
? - Detroit Tigers
@@ -164,8 +164,8 @@ def test_example_2_11():
""")
-@pytest.mark.xfail(strict=True)
-def test_example_2_12():
+@pytest.mark.xfail(strict=True) # type: ignore
+def test_example_2_12() -> None:
yaml = YAML()
yaml.explicit_start = True
yaml.round_trip("""
@@ -180,8 +180,8 @@ def test_example_2_12():
""")
-@pytest.mark.xfail(strict=True)
-def test_example_2_13():
+@pytest.mark.xfail(strict=True) # type: ignore
+def test_example_2_13() -> None:
yaml = YAML()
yaml.round_trip(r"""
# ASCII Art
@@ -191,8 +191,8 @@ def test_example_2_13():
""")
-@pytest.mark.xfail(strict=True)
-def test_example_2_14():
+@pytest.mark.xfail(strict=True) # type: ignore
+def test_example_2_14() -> None:
yaml = YAML()
yaml.explicit_start = True
yaml.indent(root_scalar=2) # needs to be added
@@ -204,8 +204,8 @@ def test_example_2_14():
""")
-@pytest.mark.xfail(strict=True)
-def test_example_2_15():
+@pytest.mark.xfail(strict=True) # type: ignore
+def test_example_2_15() -> None:
yaml = YAML()
yaml.round_trip("""
>
@@ -219,7 +219,7 @@ def test_example_2_15():
""")
-def test_example_2_16():
+def test_example_2_16() -> None:
yaml = YAML()
yaml.round_trip("""
name: Mark McGwire
@@ -232,10 +232,10 @@ def test_example_2_16():
""")
-@pytest.mark.xfail(
+@pytest.mark.xfail( # type: ignore
strict=True, reason='cannot YAML dump escape sequences (\n) as hex and normal'
)
-def test_example_2_17():
+def test_example_2_17() -> None:
yaml = YAML()
yaml.allow_unicode = False
yaml.preserve_quotes = True
@@ -250,8 +250,9 @@ def test_example_2_17():
""")
-@pytest.mark.xfail(strict=True, reason='non-literal/folding multiline scalars not supported')
-def test_example_2_18():
+@pytest.mark.xfail(strict=True, # type: ignore # NOQA
+ reason='non-literal/folding multiline scalars not supported')
+def test_example_2_18() -> None:
yaml = YAML()
yaml.round_trip("""
plain:
@@ -263,8 +264,8 @@ def test_example_2_18():
""")
-@pytest.mark.xfail(strict=True, reason='leading + on decimal dropped')
-def test_example_2_19():
+@pytest.mark.xfail(strict=True, reason='leading + on decimal dropped') # type: ignore
+def test_example_2_19() -> None:
yaml = YAML()
yaml.round_trip("""
canonical: 12345
@@ -274,8 +275,8 @@ def test_example_2_19():
""")
-@pytest.mark.xfail(strict=True, reason='case of NaN not preserved')
-def test_example_2_20():
+@pytest.mark.xfail(strict=True, reason='case of NaN not preserved') # type: ignore
+def test_example_2_20() -> None:
yaml = YAML()
yaml.round_trip("""
canonical: 1.23015e+3
@@ -286,7 +287,7 @@ def test_example_2_20():
""")
-def Xtest_example_2_X():
+def Xtest_example_2_X() -> None:
yaml = YAML()
yaml.round_trip("""
""")
diff --git a/_test/test_string.py b/_test/test_string.py
index 7c10fd4..75890d2 100644
--- a/_test/test_string.py
+++ b/_test/test_string.py
@@ -13,7 +13,7 @@ and the chomping modifiers:
"""
-import pytest
+import pytest # type: ignore
import platform
# from ruamel.yaml.compat import ordereddict
@@ -21,20 +21,20 @@ from roundtrip import round_trip, dedent, round_trip_load, round_trip_dump # NO
class TestLiteralScalarString:
- def test_basic_string(self):
+ def test_basic_string(self) -> None:
round_trip("""
a: abcdefg
""")
- def test_quoted_integer_string(self):
+ def test_quoted_integer_string(self) -> None:
round_trip("""
a: '12345'
""")
- @pytest.mark.skipif(
+ @pytest.mark.skipif( # type: ignore
platform.python_implementation() == 'Jython', reason='Jython throws RepresenterError'
)
- def test_preserve_string(self):
+ def test_preserve_string(self) -> None:
inp = """
a: |
abc
@@ -42,10 +42,10 @@ class TestLiteralScalarString:
"""
round_trip(inp, intermediate=dict(a='abc\ndef\n'))
- @pytest.mark.skipif(
+ @pytest.mark.skipif( # type: ignore
platform.python_implementation() == 'Jython', reason='Jython throws RepresenterError'
)
- def test_preserve_string_strip(self):
+ def test_preserve_string_strip(self) -> None:
s = """
a: |-
abc
@@ -54,10 +54,10 @@ class TestLiteralScalarString:
"""
round_trip(s, intermediate=dict(a='abc\ndef'))
- @pytest.mark.skipif(
+ @pytest.mark.skipif( # type: ignore
platform.python_implementation() == 'Jython', reason='Jython throws RepresenterError'
)
- def test_preserve_string_keep(self):
+ def test_preserve_string_keep(self) -> None:
# with pytest.raises(AssertionError) as excinfo:
inp = """
a: |+
@@ -69,10 +69,10 @@ class TestLiteralScalarString:
"""
round_trip(inp, intermediate=dict(a='ghi\njkl\n\n\n', b='x'))
- @pytest.mark.skipif(
+ @pytest.mark.skipif( # type: ignore
platform.python_implementation() == 'Jython', reason='Jython throws RepresenterError'
)
- def test_preserve_string_keep_at_end(self):
+ def test_preserve_string_keep_at_end(self) -> None:
# at EOF you have to specify the ... to get proper "closure"
# of the multiline scalar
inp = """
@@ -84,7 +84,7 @@ class TestLiteralScalarString:
"""
round_trip(inp, intermediate=dict(a='ghi\njkl\n\n'))
- def test_fold_string(self):
+ def test_fold_string(self) -> None:
inp = """
a: >
abc
@@ -93,7 +93,7 @@ class TestLiteralScalarString:
"""
round_trip(inp)
- def test_fold_string_strip(self):
+ def test_fold_string_strip(self) -> None:
inp = """
a: >-
abc
@@ -102,7 +102,7 @@ class TestLiteralScalarString:
"""
round_trip(inp)
- def test_fold_string_keep(self):
+ def test_fold_string_keep(self) -> None:
with pytest.raises(AssertionError) as excinfo: # NOQA
inp = """
a: >+
@@ -114,19 +114,19 @@ class TestLiteralScalarString:
class TestQuotedScalarString:
- def test_single_quoted_string(self):
+ def test_single_quoted_string(self) -> None:
inp = """
a: 'abc'
"""
round_trip(inp, preserve_quotes=True)
- def test_double_quoted_string(self):
+ def test_double_quoted_string(self) -> None:
inp = """
a: "abc"
"""
round_trip(inp, preserve_quotes=True)
- def test_non_preserved_double_quoted_string(self):
+ def test_non_preserved_double_quoted_string(self) -> None:
inp = """
a: "abc"
"""
@@ -139,7 +139,7 @@ class TestQuotedScalarString:
class TestReplace:
"""inspired by issue 110 from sandres23"""
- def test_replace_preserved_scalar_string(self):
+ def test_replace_preserved_scalar_string(self) -> None:
import ruamel
s = dedent("""\
@@ -159,7 +159,7 @@ class TestReplace:
foo
""")
- def test_replace_double_quoted_scalar_string(self):
+ def test_replace_double_quoted_scalar_string(self) -> None:
import ruamel
s = dedent("""\
@@ -172,7 +172,7 @@ class TestReplace:
class TestWalkTree:
- def test_basic(self):
+ def test_basic(self) -> None:
from ruamel.yaml.comments import CommentedMap
from ruamel.yaml.scalarstring import walk_tree
@@ -188,7 +188,7 @@ class TestWalkTree:
"""
assert round_trip_dump(data) == dedent(exp)
- def test_map(self):
+ def test_map(self) -> None:
from ruamel.yaml.compat import ordereddict
from ruamel.yaml.comments import CommentedMap
from ruamel.yaml.scalarstring import walk_tree, preserve_literal
diff --git a/_test/test_tag.py b/_test/test_tag.py
index 3fd1e05..bb20621 100644
--- a/_test/test_tag.py
+++ b/_test/test_tag.py
@@ -1,20 +1,21 @@
# coding: utf-8
-import pytest # NOQA
+import pytest # type: ignore # NOQA
+from typing import Any
from roundtrip import round_trip, round_trip_load, YAML
-def register_xxx(**kw):
+def register_xxx(**kw: Any) -> None:
from ruamel import yaml
class XXX(yaml.comments.CommentedMap):
@staticmethod
- def yaml_dump(dumper, data):
+ def yaml_dump(dumper: Any, data: Any) -> Any:
return dumper.represent_mapping('!xxx', data)
@classmethod
- def yaml_load(cls, constructor, node):
+ def yaml_load(cls, constructor: Any, node: Any) -> Any:
data = cls()
yield data
constructor.construct_mapping(node, data)
@@ -24,7 +25,7 @@ def register_xxx(**kw):
class TestIndentFailures:
- def test_tag(self):
+ def test_tag(self) -> None:
round_trip("""\
!!python/object:__main__.Developer
name: Anthon
@@ -32,7 +33,7 @@ class TestIndentFailures:
language: python
""")
- def test_full_tag(self):
+ def test_full_tag(self) -> None:
round_trip("""\
!!tag:yaml.org,2002:python/object:__main__.Developer
name: Anthon
@@ -40,7 +41,7 @@ class TestIndentFailures:
language: python
""")
- def test_standard_tag(self):
+ def test_standard_tag(self) -> None:
round_trip("""\
!!tag:yaml.org,2002:python/object:map
name: Anthon
@@ -48,7 +49,7 @@ class TestIndentFailures:
language: python
""")
- def test_Y1(self):
+ def test_Y1(self) -> None:
round_trip("""\
!yyy
name: Anthon
@@ -56,7 +57,7 @@ class TestIndentFailures:
language: python
""")
- def test_Y2(self):
+ def test_Y2(self) -> None:
round_trip("""\
!!yyy
name: Anthon
@@ -66,7 +67,7 @@ class TestIndentFailures:
class TestRoundTripCustom:
- def test_X1(self):
+ def test_X1(self) -> None:
register_xxx()
round_trip("""\
!xxx
@@ -75,8 +76,8 @@ class TestRoundTripCustom:
language: python
""")
- @pytest.mark.xfail(strict=True)
- def test_X_pre_tag_comment(self):
+ @pytest.mark.xfail(strict=True) # type: ignore
+ def test_X_pre_tag_comment(self) -> None:
register_xxx()
round_trip("""\
-
@@ -87,8 +88,8 @@ class TestRoundTripCustom:
language: python
""")
- @pytest.mark.xfail(strict=True)
- def test_X_post_tag_comment(self):
+ @pytest.mark.xfail(strict=True) # type: ignore
+ def test_X_post_tag_comment(self) -> None:
register_xxx()
round_trip("""\
- !xxx
@@ -98,7 +99,7 @@ class TestRoundTripCustom:
language: python
""")
- def test_scalar_00(self):
+ def test_scalar_00(self) -> None:
# https://stackoverflow.com/a/45967047/1307905
round_trip("""\
Outputs:
@@ -110,24 +111,35 @@ class TestRoundTripCustom:
class TestIssue201:
- def test_encoded_unicode_tag(self):
+ def test_encoded_unicode_tag(self) -> None:
round_trip_load("""
s: !!python/%75nicode 'abc'
""")
class TestImplicitTaggedNodes:
- def test_scalar(self):
- round_trip("""\
- - !Scalar abcdefg
+ def test_scalar(self) -> None:
+ data = round_trip("""\
+ - !SString abcdefg
+ - !SFloat 1.0
+ - !SInt 1961
+ - !SBool true
+ - !SLit |
+ glitter in the dark near the Tanhäuser gate
""")
-
- def test_mapping(self):
+ # tagged scalers have string or string types as value
+ assert data[0].count('d') == 1
+ assert data[1].count('1') == 1
+ assert data[2].count('1') == 2
+ assert data[3].count('u') == 1
+ assert data[4].count('a') == 4
+
+ def test_mapping(self) -> None:
round_trip("""\
- !Mapping {a: 1, b: 2}
""")
- def test_sequence(self):
+ def test_sequence(self) -> None:
yaml = YAML()
yaml.brace_single_entry_mapping_in_flow_sequence = True
yaml.mapping_value_align = True
@@ -135,7 +147,7 @@ class TestImplicitTaggedNodes:
- !Sequence [a, {b: 1}, {c: {d: 3}}]
""")
- def test_sequence2(self):
+ def test_sequence2(self) -> None:
yaml = YAML()
yaml.mapping_value_align = True
yaml.round_trip("""
diff --git a/_test/test_version.py b/_test/test_version.py
index e110eed..b60b1dd 100644
--- a/_test/test_version.py
+++ b/_test/test_version.py
@@ -1,11 +1,12 @@
# coding: utf-8
-import pytest # NOQA
+import pytest # type: ignore # NOQA
+from typing import Any, Optional
from roundtrip import dedent, round_trip, round_trip_load
-def load(s, version=None):
+def load(s: str, version: Optional[Any] = None) -> Any:
import ruamel.yaml # NOQA
yaml = ruamel.yaml.YAML()
@@ -14,7 +15,7 @@ def load(s, version=None):
class TestVersions:
- def test_explicit_1_2(self):
+ def test_explicit_1_2(self) -> None:
r = load("""\
%YAML 1.2
---
@@ -38,7 +39,7 @@ class TestVersions:
assert r[7] == 'no'
assert r[8] is True
- def test_explicit_1_1(self):
+ def test_explicit_1_1(self) -> None:
r = load("""\
%YAML 1.1
---
@@ -62,7 +63,7 @@ class TestVersions:
assert r[7] is False
assert r[8] is True
- def test_implicit_1_2(self):
+ def test_implicit_1_2(self) -> None:
r = load("""\
- 12:34:56
- 12:34:56.78
@@ -86,7 +87,7 @@ class TestVersions:
assert r[8] == 'no'
assert r[9] is True
- def test_load_version_1_1(self):
+ def test_load_version_1_1(self) -> None:
inp = """\
- 12:34:56
- 12:34:56.78
@@ -114,7 +115,7 @@ class TestVersions:
class TestIssue62:
# bitbucket issue 62, issue_62
- def test_00(self):
+ def test_00(self) -> None:
import ruamel.yaml # NOQA
s = dedent("""\
@@ -131,7 +132,7 @@ class TestIssue62:
round_trip(s.format('%YAML 1.1\n---\n'), preserve_quotes=True)
round_trip(s.format(""), preserve_quotes=True)
- def test_00_single_comment(self):
+ def test_00_single_comment(self) -> None:
import ruamel.yaml # NOQA
s = dedent("""\
@@ -148,7 +149,7 @@ class TestIssue62:
round_trip(s.format(""), preserve_quotes=True)
# round_trip(s.format('%YAML 1.2\n---\n'), preserve_quotes=True, version=(1, 2))
- def test_01(self):
+ def test_01(self) -> None:
import ruamel.yaml # NOQA
s = dedent("""\
@@ -160,6 +161,6 @@ class TestIssue62:
# note the flow seq on the --- line!
round_trip(s.format('%YAML 1.2\n--- '), preserve_quotes=True, version='1.2')
- def test_so_45681626(self):
+ def test_so_45681626(self) -> None:
# was not properly parsing
round_trip_load('{"in":{},"out":{}}')
diff --git a/_test/test_yamlfile.py b/_test/test_yamlfile.py
index f1de872..6f7aca7 100644
--- a/_test/test_yamlfile.py
+++ b/_test/test_yamlfile.py
@@ -6,20 +6,20 @@ various test cases for YAML files
import sys
import io
-import pytest # NOQA
+import pytest # type: ignore # NOQA
import platform
from roundtrip import round_trip, dedent, round_trip_load, round_trip_dump # NOQA
class TestYAML:
- def test_backslash(self):
+ def test_backslash(self) -> None:
round_trip("""
handlers:
static_files: applications/\\1/static/\\2
""")
- def test_omap_out(self):
+ def test_omap_out(self) -> None:
# ordereddict mapped to !!omap
from ruamel.yaml.compat import ordereddict
import ruamel.yaml # NOQA
@@ -32,7 +32,7 @@ class TestYAML:
- b: 2
""")
- def test_omap_roundtrip(self):
+ def test_omap_roundtrip(self) -> None:
round_trip("""
!!omap
- a: 1
@@ -41,25 +41,26 @@ class TestYAML:
- d: 4
""")
- @pytest.mark.skipif(sys.version_info < (2, 7), reason='collections not available')
- def test_dump_collections_ordereddict(self):
- from collections import OrderedDict
- import ruamel.yaml # NOQA
-
- # OrderedDict mapped to !!omap
- x = OrderedDict([('a', 1), ('b', 2)])
- res = round_trip_dump(x, default_flow_style=False)
- assert res == dedent("""
- !!omap
- - a: 1
- - b: 2
- """)
-
- @pytest.mark.skipif(
+ # @pytest.mark.skipif(sys.version_info < (2, 7),
+ # reason='collections not available')
+ # def test_dump_collections_ordereddict(self) -> None:
+ # from collections import OrderedDict
+ # import ruamel.yaml # NOQA
+
+ # # OrderedDict mapped to !!omap
+ # x = OrderedDict([('a', 1), ('b', 2)])
+ # res = round_trip_dump(x, default_flow_style=False)
+ # assert res == dedent("""
+ # !!omap
+ # - a: 1
+ # - b: 2
+ # """)
+
+ @pytest.mark.skipif( # type: ignore
sys.version_info >= (3, 0) or platform.python_implementation() != 'CPython',
reason='ruamel.yaml not available',
)
- def test_dump_ruamel_ordereddict(self):
+ def test_dump_ruamel_ordereddict(self) -> None:
from ruamel.ordereddict import ordereddict
import ruamel.yaml # NOQA
@@ -72,7 +73,7 @@ class TestYAML:
- b: 2
""")
- def test_CommentedSet(self):
+ def test_CommentedSet(self) -> None:
from ruamel.yaml.constructor import CommentedSet
s = CommentedSet(['a', 'b', 'c'])
@@ -84,7 +85,7 @@ class TestYAML:
s.remove('e')
assert s == CommentedSet(['a', 'c', 'd', 'f'])
- def test_set_out(self):
+ def test_set_out(self) -> None:
# preferable would be the shorter format without the ': null'
import ruamel.yaml # NOQA
@@ -102,7 +103,7 @@ class TestYAML:
""")
# ordering is not preserved in a set
- def test_set_compact(self):
+ def test_set_compact(self) -> None:
# this format is read and also should be written by default
round_trip("""
!!set
@@ -111,7 +112,7 @@ class TestYAML:
? c
""")
- def test_blank_line_after_comment(self):
+ def test_blank_line_after_comment(self) -> None:
round_trip("""
# Comment with spaces after it.
@@ -119,7 +120,7 @@ class TestYAML:
a: 1
""")
- def test_blank_line_between_seq_items(self):
+ def test_blank_line_between_seq_items(self) -> None:
round_trip("""
# Seq with empty lines in between items.
b:
@@ -129,10 +130,10 @@ class TestYAML:
- baz
""")
- @pytest.mark.skipif(
+ @pytest.mark.skipif( # type: ignore
platform.python_implementation() == 'Jython', reason='Jython throws RepresenterError'
)
- def test_blank_line_after_literal_chip(self):
+ def test_blank_line_after_literal_chip(self) -> None:
s = """
c:
- |
@@ -153,10 +154,10 @@ class TestYAML:
assert d['c'][0].split('it.')[1] == '\n'
assert d['c'][1].split('line.')[1] == '\n'
- @pytest.mark.skipif(
+ @pytest.mark.skipif( # type: ignore
platform.python_implementation() == 'Jython', reason='Jython throws RepresenterError'
)
- def test_blank_line_after_literal_keep(self):
+ def test_blank_line_after_literal_keep(self) -> None:
""" have to insert an eof marker in YAML to test this"""
s = """
c:
@@ -179,10 +180,10 @@ class TestYAML:
assert d['c'][0].split('it.')[1] == '\n\n'
assert d['c'][1].split('line.')[1] == '\n\n\n'
- @pytest.mark.skipif(
+ @pytest.mark.skipif( # type: ignore
platform.python_implementation() == 'Jython', reason='Jython throws RepresenterError'
)
- def test_blank_line_after_literal_strip(self):
+ def test_blank_line_after_literal_strip(self) -> None:
s = """
c:
- |-
@@ -203,7 +204,7 @@ class TestYAML:
assert d['c'][0].split('it.')[1] == ""
assert d['c'][1].split('line.')[1] == ""
- def test_load_all_perserve_quotes(self):
+ def test_load_all_perserve_quotes(self) -> None:
import ruamel.yaml # NOQA
yaml = ruamel.yaml.YAML()
diff --git a/_test/test_yamlobject.py b/_test/test_yamlobject.py
index 80fb213..3f488d3 100644
--- a/_test/test_yamlobject.py
+++ b/_test/test_yamlobject.py
@@ -1,12 +1,13 @@
# coding: utf-8
import sys
-import pytest # NOQA
+from typing import Any
+import pytest # type: ignore # NOQA
from roundtrip import save_and_run # NOQA
-def test_monster(tmpdir):
+def test_monster(tmpdir: Any) -> None:
program_src = '''\
import ruamel.yaml
from textwrap import dedent
@@ -24,27 +25,33 @@ def test_monster(tmpdir):
return "%s(name=%r, hp=%r, ac=%r, attacks=%r)" % (
self.__class__.__name__, self.name, self.hp, self.ac, self.attacks)
- data = ruamel.yaml.load(dedent("""\\
+ yaml = ruamel.yaml.YAML(typ='safe', pure='True')
+ yaml = ruamel.yaml.YAML()
+ data = yaml.load(dedent("""\\
--- !Monster
name: Cave spider
hp: [2,6] # 2d6
ac: 16
attacks: [BITE, HURT]
- """), Loader=ruamel.yaml.Loader)
+ """))
# normal dump, keys will be sorted
- assert ruamel.yaml.dump(data) == dedent("""\\
+ from io import BytesIO
+ buf = BytesIO()
+ yaml.dump(data, buf)
+ print(buf.getvalue().decode('utf-8'))
+ assert buf.getvalue().decode('utf8') == dedent("""\\
!Monster
+ name: Cave spider
+ hp: [2, 6] # 2d6
ac: 16
attacks: [BITE, HURT]
- hp: [2, 6]
- name: Cave spider
""")
'''
assert save_and_run(program_src, tmpdir) == 0
-@pytest.mark.skipif(sys.version_info < (3, 0), reason='no __qualname__')
-def test_qualified_name00(tmpdir):
+@pytest.mark.skipif(sys.version_info < (3, 0), reason='no __qualname__') # type: ignore
+def test_qualified_name00(tmpdir: Any) -> None:
"""issue 214"""
program_src = """\
from ruamel.yaml import YAML
@@ -67,8 +74,8 @@ def test_qualified_name00(tmpdir):
assert save_and_run(program_src, tmpdir) == 0
-@pytest.mark.skipif(sys.version_info < (3, 0), reason='no __qualname__')
-def test_qualified_name01(tmpdir):
+@pytest.mark.skipif(sys.version_info < (3, 0), reason='no __qualname__') # type: ignore
+def test_qualified_name01(tmpdir: Any) -> None:
"""issue 214"""
from ruamel.yaml import YAML
import ruamel.yaml.comments
diff --git a/_test/test_z_check_debug_leftovers.py b/_test/test_z_check_debug_leftovers.py
index f5be5df..7096a73 100644
--- a/_test/test_z_check_debug_leftovers.py
+++ b/_test/test_z_check_debug_leftovers.py
@@ -1,7 +1,8 @@
# coding: utf-8
import sys
-import pytest # NOQA
+from typing import Any
+import pytest # type: ignore # NOQA
from roundtrip import round_trip_load, round_trip_dump, dedent
@@ -9,7 +10,7 @@ from roundtrip import round_trip_load, round_trip_dump, dedent
class TestLeftOverDebug:
# idea here is to capture round_trip_output via pytest stdout capture
# if there is are any leftover debug statements they should show up
- def test_00(self, capsys):
+ def test_00(self, capsys: Any) -> None:
s = dedent("""
a: 1
b: []
@@ -21,7 +22,7 @@ class TestLeftOverDebug:
out, err = capsys.readouterr()
assert out == s
- def test_01(self, capsys):
+ def test_01(self, capsys: Any) -> None:
s = dedent("""
- 1
- []
diff --git a/_test/test_z_data.py b/_test/test_z_data.py
index 965eb3d..8a8ba21 100644
--- a/_test/test_z_data.py
+++ b/_test/test_z_data.py
@@ -2,19 +2,18 @@
import sys
import os
-import pytest # NOQA
+import pytest # type: ignore # NOQA
import warnings # NOQA
+from typing import Any, Optional, List, Tuple
from pathlib import Path
-from ruamel.yaml.compat import _F
-
base_path = Path('data') # that is ruamel.yaml.data
-class YAMLData(object):
+class YAMLData:
yaml_tag = '!YAML'
- def __init__(self, s):
+ def __init__(self, s: Any) -> None:
self._s = s
# Conversion tables for input. E.g. "<TAB>" is replaced by "\t"
@@ -28,9 +27,9 @@ class YAMLData(object):
# fmt: on
@property
- def value(self):
+ def value(self) -> Any:
if hasattr(self, '_p'):
- return self._p
+ return self._p # type: ignore
assert ' \n' not in self._s
assert '\t\n' not in self._s
self._p = self._s
@@ -39,7 +38,7 @@ class YAMLData(object):
self._p = self._p.replace(k, v)
return self._p
- def test_rewrite(self, s):
+ def test_rewrite(self, s: str) -> str:
assert ' \n' not in s
assert '\t\n' not in s
for k, v in YAMLData.special.items():
@@ -48,7 +47,7 @@ class YAMLData(object):
return s
@classmethod
- def from_yaml(cls, constructor, node):
+ def from_yaml(cls, constructor: Any, node: Any) -> 'YAMLData':
from ruamel.yaml.nodes import MappingNode
if isinstance(node, MappingNode):
@@ -68,18 +67,18 @@ class Assert(YAMLData):
yaml_tag = '!Assert'
@property
- def value(self):
+ def value(self) -> Any:
from collections.abc import Mapping
if hasattr(self, '_pa'):
- return self._pa
+ return self._pa # type: ignore
if isinstance(self._s, Mapping):
- self._s['lines'] = self.test_rewrite(self._s['lines'])
+ self._s['lines'] = self.test_rewrite(self._s['lines']) # type: ignore
self._pa = self._s
return self._pa
-def pytest_generate_tests(metafunc):
+def pytest_generate_tests(metafunc: Any) -> None:
test_yaml = []
paths = sorted(base_path.glob('**/*.yaml'))
idlist = []
@@ -100,8 +99,8 @@ def pytest_generate_tests(metafunc):
metafunc.parametrize(['yaml'], test_yaml, ids=idlist, scope='class')
-class TestYAMLData(object):
- def yaml(self, yaml_version=None):
+class TestYAMLData:
+ def yaml(self, yaml_version: Optional[Any] = None) -> Any:
from ruamel.yaml import YAML
y = YAML()
@@ -110,7 +109,7 @@ class TestYAMLData(object):
y.version = yaml_version
return y
- def docs(self, path):
+ def docs(self, path: Path) -> List[Any]:
from ruamel.yaml import YAML
tyaml = YAML(typ='safe', pure=True)
@@ -120,12 +119,14 @@ class TestYAMLData(object):
tyaml.register_class(Assert)
return list(tyaml.load_all(path))
- def yaml_load(self, value, yaml_version=None):
+ def yaml_load(self, value: Any, yaml_version: Optional[Any] = None) -> Tuple[Any, Any]:
yaml = self.yaml(yaml_version=yaml_version)
data = yaml.load(value)
return yaml, data
- def round_trip(self, input, output=None, yaml_version=None):
+ def round_trip(
+ self, input: Any, output: Optional[Any] = None, yaml_version: Optional[Any] = None
+ ) -> None:
from ruamel.yaml.compat import StringIO
yaml, data = self.yaml_load(input.value, yaml_version=yaml_version)
@@ -133,9 +134,12 @@ class TestYAMLData(object):
yaml.dump(data, buf)
expected = input.value if output is None else output.value
value = buf.getvalue()
+ print('>>>> rt output\n', value.replace(' ', '\u2423'), sep='') # 2423 open box
assert value == expected
- def load_assert(self, input, confirm, yaml_version=None):
+ def load_assert(
+ self, input: Any, confirm: Any, yaml_version: Optional[Any] = None
+ ) -> None:
from collections.abc import Mapping
d = self.yaml_load(input.value, yaml_version=yaml_version)[1] # NOQA
@@ -154,14 +158,16 @@ class TestYAMLData(object):
print(line)
exec(line)
- def run_python(self, python, data, tmpdir, input=None):
+ def run_python(
+ self, python: Any, data: Any, tmpdir: Any, input: Optional[Any] = None
+ ) -> None:
from roundtrip import save_and_run
if input is not None:
(tmpdir / 'input.yaml').write_text(input.value, encoding='utf-8')
assert save_and_run(python.value, base_dir=tmpdir, output=data.value) == 0
- def insert_comments(self, data, actions):
+ def insert_comments(self, data: Any, actions: Any) -> None:
"""this is to automatically insert based on:
path (a.1.b),
position (before, after, between), and
@@ -180,7 +186,7 @@ class TestYAMLData(object):
# this is executed by pytest the methods with names not starting with
# test_ are helper methods
- def test_yaml_data(self, yaml, tmpdir):
+ def test_yaml_data(self, yaml: Any, tmpdir: Any) -> None:
from collections.abc import Mapping
idx = 0
@@ -221,8 +227,8 @@ class TestYAMLData(object):
typ = 'rt'
print('type:', typ)
if data is not None:
- print('data:', data.value, end='')
- print('output:', output.value if output is not None else output)
+ print('>>>> data:\n', data.value.replace(' ', '\u2423'), sep='', end='')
+ print('>>>> output:\n', output.value if output is not None else output, sep='')
if typ == 'rt':
self.round_trip(data, output, yaml_version=yaml_version)
elif typ == 'python_run':
@@ -231,14 +237,14 @@ class TestYAMLData(object):
elif typ == 'load_assert':
self.load_assert(data, confirm, yaml_version=yaml_version)
elif typ == 'comment':
- actions = []
+ actions: List[Any] = []
self.insert_comments(data, actions)
else:
- _F('\n>>>>>> run type unknown: "{typ}" <<<<<<\n')
+ f'\n>>>>>> run type unknown: "{typ}" <<<<<<\n'
raise AssertionError()
-def check_python_version(match, current=None):
+def check_python_version(match: Any, current: Optional[Any] = None) -> bool:
"""
version indication, return True if version matches.
match should be something like 3.6+, or [2.7, 3.3] etc. Floats
diff --git a/_test/test_z_olddata.py b/_test/test_z_olddata.py
index 89b7053..ffe1572 100644
--- a/_test/test_z_olddata.py
+++ b/_test/test_z_olddata.py
@@ -2,31 +2,33 @@
import sys
import os
-import pytest # NOQA
+import pytest # type: ignore # NOQA
sys.path.insert(0, os.path.dirname(__file__) + '/lib')
import warnings # NOQA
+from typing import List, Any # NOQA
-args = []
+args: List[Any] = []
-def test_data():
- import test_appliance # NOQA
+def test_data() -> None:
+ import test_appliance # type: ignore # NOQA
warnings.simplefilter('ignore', PendingDeprecationWarning)
collections = []
- import test_yaml
+ import test_yaml # type: ignore
collections.append(test_yaml)
test_appliance.run(collections, args)
+
# @pytest.mark.skipif(not ruamel.yaml.__with_libyaml__,
# reason="no libyaml")
-def test_data_ext():
+def test_data_ext() -> None:
collections = []
import ruamel.yaml # NOQA
import test_appliance # NOQA
@@ -34,7 +36,7 @@ def test_data_ext():
warnings.simplefilter('ignore', ruamel.yaml.error.UnsafeLoaderWarning)
warnings.simplefilter('ignore', PendingDeprecationWarning)
if ruamel.yaml.__with_libyaml__:
- import test_yaml_ext
+ import test_yaml_ext # type: ignore
collections.append(test_yaml_ext)
test_appliance.run(collections, args)
diff --git a/anchor.py b/anchor.py
index 1deea78..1eb1480 100644
--- a/anchor.py
+++ b/anchor.py
@@ -1,6 +1,6 @@
# coding: utf-8
-if False: # MYPY
- from typing import Any, Dict, Optional, List, Union, Optional, Iterator # NOQA
+
+from typing import Any, Dict, Optional, List, Union, Optional, Iterator # NOQA
anchor_attrib = '_yaml_anchor'
@@ -9,12 +9,10 @@ class Anchor:
__slots__ = 'value', 'always_dump'
attrib = anchor_attrib
- def __init__(self):
- # type: () -> None
+ def __init__(self) -> None:
self.value = None
self.always_dump = False
- def __repr__(self):
- # type: () -> Any
+ def __repr__(self) -> Any:
ad = ', (always dump)' if self.always_dump else ""
- return 'Anchor({!r}{})'.format(self.value, ad)
+ return f'Anchor({self.value!r}{ad})'
diff --git a/comments.py b/comments.py
index 892c868..c6a9703 100644
--- a/comments.py
+++ b/comments.py
@@ -11,14 +11,13 @@ import copy
from ruamel.yaml.compat import ordereddict
-from ruamel.yaml.compat import MutableSliceableSequence, _F, nprintf # NOQA
+from ruamel.yaml.compat import MutableSliceableSequence, nprintf # NOQA
from ruamel.yaml.scalarstring import ScalarString
from ruamel.yaml.anchor import Anchor
from collections.abc import MutableSet, Sized, Set, Mapping
-if False: # MYPY
- from typing import Any, Dict, Optional, List, Union, Optional, Iterator # NOQA
+from typing import Any, Dict, Optional, List, Union, Optional, Iterator # NOQA
# fmt: off
__all__ = ['CommentedSeq', 'CommentedKeySeq',
@@ -46,18 +45,15 @@ C_BLANK_LINE_PRESERVE_SPACE = 0b100
class IDX:
# temporary auto increment, so rearranging is easier
- def __init__(self):
- # type: () -> None
+ def __init__(self) -> None:
self._idx = 0
- def __call__(self):
- # type: () -> Any
+ def __call__(self) -> Any:
x = self._idx
self._idx += 1
return x
- def __str__(self):
- # type: () -> Any
+ def __str__(self) -> Any:
return str(self._idx)
@@ -92,27 +88,24 @@ class Comment:
__slots__ = 'comment', '_items', '_post', '_pre'
attrib = comment_attrib
- def __init__(self, old=True):
- # type: (bool) -> None
+ def __init__(self, old: bool = True) -> None:
self._pre = None if old else [] # type: ignore
self.comment = None # [post, [pre]]
# map key (mapping/omap/dict) or index (sequence/list) to a list of
# dict: post_key, pre_key, post_value, pre_value
# list: pre item, post item
- self._items = {} # type: Dict[Any, Any]
+ self._items: Dict[Any, Any] = {}
# self._start = [] # should not put these on first item
- self._post = [] # type: List[Any] # end of document comments
+ self._post: List[Any] = [] # end of document comments
- def __str__(self):
- # type: () -> str
+ def __str__(self) -> str:
if bool(self._post):
end = ',\n end=' + str(self._post)
else:
end = ""
- return 'Comment(comment={0},\n items={1}{2})'.format(self.comment, self._items, end)
+ return f'Comment(comment={self.comment},\n items={self._items}{end})'
- def _old__repr__(self):
- # type: () -> str
+ def _old__repr__(self) -> str:
if bool(self._post):
end = ',\n end=' + str(self._post)
else:
@@ -121,15 +114,12 @@ class Comment:
ln = max([len(str(k)) for k in self._items]) + 1
except ValueError:
ln = '' # type: ignore
- it = ' '.join(
- ['{:{}} {}\n'.format(str(k) + ':', ln, v) for k, v in self._items.items()]
- )
+ it = ' '.join([f'{str(k) + ":":{ln}} {v}\n' for k, v in self._items.items()])
if it:
it = '\n ' + it + ' '
- return 'Comment(\n start={},\n items={{{}}}{})'.format(self.comment, it, end)
+ return f'Comment(\n start={self.comment},\n items={{{it}}}{end})'
- def __repr__(self):
- # type: () -> str
+ def __repr__(self) -> str:
if self._pre is None:
return self._old__repr__()
if bool(self._post):
@@ -140,47 +130,38 @@ class Comment:
ln = max([len(str(k)) for k in self._items]) + 1
except ValueError:
ln = '' # type: ignore
- it = ' '.join(
- ['{:{}} {}\n'.format(str(k) + ':', ln, v) for k, v in self._items.items()]
- )
+ it = ' '.join([f'{str(k) + ":":{ln}} {v}\n' for k, v in self._items.items()])
if it:
it = '\n ' + it + ' '
- return 'Comment(\n pre={},\n items={{{}}}{})'.format(self.pre, it, end)
+ return f'Comment(\n pre={self.pre},\n items={{{it}}}{end})'
@property
- def items(self):
- # type: () -> Any
+ def items(self) -> Any:
return self._items
@property
- def end(self):
- # type: () -> Any
+ def end(self) -> Any:
return self._post
@end.setter
- def end(self, value):
- # type: (Any) -> None
+ def end(self, value: Any) -> None:
self._post = value
@property
- def pre(self):
- # type: () -> Any
+ def pre(self) -> Any:
return self._pre
@pre.setter
- def pre(self, value):
- # type: (Any) -> None
+ def pre(self, value: Any) -> None:
self._pre = value
- def get(self, item, pos):
- # type: (Any, Any) -> Any
+ def get(self, item: Any, pos: Any) -> Any:
x = self._items.get(item)
if x is None or len(x) < pos:
return None
return x[pos] # can be None
- def set(self, item, pos, value):
- # type: (Any, Any, Any) -> Any
+ def set(self, item: Any, pos: Any, value: Any) -> Any:
x = self._items.get(item)
if x is None:
self._items[item] = x = [None] * (pos + 1)
@@ -190,8 +171,7 @@ class Comment:
assert x[pos] is None
x[pos] = value
- def __contains__(self, x):
- # type: (Any) -> Any
+ def __contains__(self, x: Any) -> Any:
# test if a substring is in any of the attached comments
if self.comment:
if self.comment[0] and x in self.comment[0].value:
@@ -214,8 +194,7 @@ class Comment:
# to distinguish key from None
-def NoComment():
- # type: () -> None
+def NoComment() -> None:
pass
@@ -223,20 +202,16 @@ class Format:
__slots__ = ('_flow_style',)
attrib = format_attrib
- def __init__(self):
- # type: () -> None
- self._flow_style = None # type: Any
+ def __init__(self) -> None:
+ self._flow_style: Any = None
- def set_flow_style(self):
- # type: () -> None
+ def set_flow_style(self) -> None:
self._flow_style = True
- def set_block_style(self):
- # type: () -> None
+ def set_block_style(self) -> None:
self._flow_style = False
- def flow_style(self, default=None):
- # type: (Optional[Any]) -> Any
+ def flow_style(self, default: Optional[Any] = None) -> Any:
"""if default (the flow_style) is None, the flow style tacked on to
the object explicitly will be taken. If that is None as well the
default flow style rules the format down the line, or the type
@@ -253,48 +228,40 @@ class LineCol:
attrib = line_col_attrib
- def __init__(self):
- # type: () -> None
+ def __init__(self) -> None:
self.line = None
self.col = None
- self.data = None # type: Optional[Dict[Any, Any]]
+ self.data: Optional[Dict[Any, Any]] = None
- def add_kv_line_col(self, key, data):
- # type: (Any, Any) -> None
+ def add_kv_line_col(self, key: Any, data: Any) -> None:
if self.data is None:
self.data = {}
self.data[key] = data
- def key(self, k):
- # type: (Any) -> Any
+ def key(self, k: Any) -> Any:
return self._kv(k, 0, 1)
- def value(self, k):
- # type: (Any) -> Any
+ def value(self, k: Any) -> Any:
return self._kv(k, 2, 3)
- def _kv(self, k, x0, x1):
- # type: (Any, Any, Any) -> Any
+ def _kv(self, k: Any, x0: Any, x1: Any) -> Any:
if self.data is None:
return None
data = self.data[k]
return data[x0], data[x1]
- def item(self, idx):
- # type: (Any) -> Any
+ def item(self, idx: Any) -> Any:
if self.data is None:
return None
return self.data[idx][0], self.data[idx][1]
- def add_idx_line_col(self, key, data):
- # type: (Any, Any) -> None
+ def add_idx_line_col(self, key: Any, data: Any) -> None:
if self.data is None:
self.data = {}
self.data[key] = data
- def __repr__(self):
- # type: () -> str
- return _F('LineCol({line}, {col})', line=self.line, col=self.col) # type: ignore
+ def __repr__(self) -> str:
+ return f'LineCol({self.line}, {self.col})'
class Tag:
@@ -303,13 +270,11 @@ class Tag:
__slots__ = ('value',)
attrib = tag_attrib
- def __init__(self):
- # type: () -> None
+ def __init__(self) -> None:
self.value = None
- def __repr__(self):
- # type: () -> Any
- return '{0.__class__.__name__}({0.value!r})'.format(self)
+ def __repr__(self) -> Any:
+ return f'{self.__class__.__name__}({self.value!r})'
class CommentedBase:
@@ -320,16 +285,14 @@ class CommentedBase:
setattr(self, Comment.attrib, Comment())
return getattr(self, Comment.attrib)
- def yaml_end_comment_extend(self, comment, clear=False):
- # type: (Any, bool) -> None
+ def yaml_end_comment_extend(self, comment: Any, clear: bool = False) -> None:
if comment is None:
return
if clear or self.ca.end is None:
self.ca.end = []
self.ca.end.extend(comment)
- def yaml_key_comment_extend(self, key, comment, clear=False):
- # type: (Any, Any, bool) -> None
+ def yaml_key_comment_extend(self, key: Any, comment: Any, clear: bool = False) -> None:
r = self.ca._items.setdefault(key, [None, None, None, None])
if clear or r[1] is None:
if comment[1] is not None:
@@ -339,8 +302,7 @@ class CommentedBase:
r[1].extend(comment[0])
r[0] = comment[0]
- def yaml_value_comment_extend(self, key, comment, clear=False):
- # type: (Any, Any, bool) -> None
+ def yaml_value_comment_extend(self, key: Any, comment: Any, clear: bool = False) -> None:
r = self.ca._items.setdefault(key, [None, None, None, None])
if clear or r[3] is None:
if comment[1] is not None:
@@ -350,8 +312,7 @@ class CommentedBase:
r[3].extend(comment[0])
r[2] = comment[0]
- def yaml_set_start_comment(self, comment, indent=0):
- # type: (Any, Any) -> None
+ def yaml_set_start_comment(self, comment: Any, indent: Any = 0) -> None:
"""overwrites any preceding comment lines on an object
expects comment to be without `#` and possible have multiple lines
"""
@@ -369,17 +330,20 @@ class CommentedBase:
pre_comments.append(CommentToken(com + '\n', start_mark))
def yaml_set_comment_before_after_key(
- self, key, before=None, indent=0, after=None, after_indent=None
- ):
- # type: (Any, Any, Any, Any, Any) -> None
+ self,
+ key: Any,
+ before: Any = None,
+ indent: Any = 0,
+ after: Any = None,
+ after_indent: Any = None,
+ ) -> None:
"""
expects comment (before/after) to be without `#` and possible have multiple lines
"""
from ruamel.yaml.error import CommentMark
from ruamel.yaml.tokens import CommentToken
- def comment_token(s, mark):
- # type: (Any, Any) -> Any
+ def comment_token(s: Any, mark: Any) -> Any:
# handle empty lines as having no comment
return CommentToken(('# ' if s else "") + s + '\n', mark)
@@ -407,8 +371,7 @@ class CommentedBase:
c[3].append(comment_token(com, start_mark)) # type: ignore
@property
- def fa(self):
- # type: () -> Any
+ def fa(self) -> Any:
"""format attribute
set_flow_style()/set_block_style()"""
@@ -416,8 +379,9 @@ class CommentedBase:
setattr(self, Format.attrib, Format())
return getattr(self, Format.attrib)
- def yaml_add_eol_comment(self, comment, key=NoComment, column=None):
- # type: (Any, Optional[Any], Optional[Any]) -> None
+ def yaml_add_eol_comment(
+ self, comment: Any, key: Optional[Any] = NoComment, column: Optional[Any] = None
+ ) -> None:
"""
there is a problem as eol comments should start with ' #'
(but at the beginning of the line the space doesn't have to be before
@@ -442,56 +406,46 @@ class CommentedBase:
self._yaml_add_eol_comment(ct, key=key)
@property
- def lc(self):
- # type: () -> Any
+ def lc(self) -> Any:
if not hasattr(self, LineCol.attrib):
setattr(self, LineCol.attrib, LineCol())
return getattr(self, LineCol.attrib)
- def _yaml_set_line_col(self, line, col):
- # type: (Any, Any) -> None
+ def _yaml_set_line_col(self, line: Any, col: Any) -> None:
self.lc.line = line
self.lc.col = col
- def _yaml_set_kv_line_col(self, key, data):
- # type: (Any, Any) -> None
+ def _yaml_set_kv_line_col(self, key: Any, data: Any) -> None:
self.lc.add_kv_line_col(key, data)
- def _yaml_set_idx_line_col(self, key, data):
- # type: (Any, Any) -> None
+ def _yaml_set_idx_line_col(self, key: Any, data: Any) -> None:
self.lc.add_idx_line_col(key, data)
@property
- def anchor(self):
- # type: () -> Any
+ def anchor(self) -> Any:
if not hasattr(self, Anchor.attrib):
setattr(self, Anchor.attrib, Anchor())
return getattr(self, Anchor.attrib)
- def yaml_anchor(self):
- # type: () -> Any
+ def yaml_anchor(self) -> Any:
if not hasattr(self, Anchor.attrib):
return None
return self.anchor
- def yaml_set_anchor(self, value, always_dump=False):
- # type: (Any, bool) -> None
+ def yaml_set_anchor(self, value: Any, always_dump: bool = False) -> None:
self.anchor.value = value
self.anchor.always_dump = always_dump
@property
- def tag(self):
- # type: () -> Any
+ def tag(self) -> Any:
if not hasattr(self, Tag.attrib):
setattr(self, Tag.attrib, Tag())
return getattr(self, Tag.attrib)
- def yaml_set_tag(self, value):
- # type: (Any) -> None
+ def yaml_set_tag(self, value: Any) -> None:
self.tag.value = value
- def copy_attributes(self, t, memo=None):
- # type: (Any, Any) -> None
+ def copy_attributes(self, t: Any, memo: Any = None) -> None:
# fmt: off
for a in [Comment.attrib, Format.attrib, LineCol.attrib, Anchor.attrib,
Tag.attrib, merge_attrib]:
@@ -502,32 +456,26 @@ class CommentedBase:
setattr(t, a, getattr(self, a))
# fmt: on
- def _yaml_add_eol_comment(self, comment, key):
- # type: (Any, Any) -> None
+ def _yaml_add_eol_comment(self, comment: Any, key: Any) -> None:
raise NotImplementedError
- def _yaml_get_pre_comment(self):
- # type: () -> Any
+ def _yaml_get_pre_comment(self) -> Any:
raise NotImplementedError
- def _yaml_get_column(self, key):
- # type: (Any) -> Any
+ def _yaml_get_column(self, key: Any) -> Any:
raise NotImplementedError
class CommentedSeq(MutableSliceableSequence, list, CommentedBase): # type: ignore
__slots__ = (Comment.attrib, '_lst')
- def __init__(self, *args, **kw):
- # type: (Any, Any) -> None
+ def __init__(self, *args: Any, **kw: Any) -> None:
list.__init__(self, *args, **kw)
- def __getsingleitem__(self, idx):
- # type: (Any) -> Any
+ def __getsingleitem__(self, idx: Any) -> Any:
return list.__getitem__(self, idx)
- def __setsingleitem__(self, idx, value):
- # type: (Any, Any) -> None
+ def __setsingleitem__(self, idx: Any, value: Any) -> None:
# try to preserve the scalarstring type if setting an existing key to a new value
if idx < len(self):
if (
@@ -538,8 +486,7 @@ class CommentedSeq(MutableSliceableSequence, list, CommentedBase): # type: igno
value = type(self[idx])(value)
list.__setitem__(self, idx, value)
- def __delsingleitem__(self, idx=None):
- # type: (Any) -> Any
+ def __delsingleitem__(self, idx: Any = None) -> Any:
list.__delitem__(self, idx)
self.ca.items.pop(idx, None) # might not be there -> default value
for list_index in sorted(self.ca.items):
@@ -547,12 +494,10 @@ class CommentedSeq(MutableSliceableSequence, list, CommentedBase): # type: igno
continue
self.ca.items[list_index - 1] = self.ca.items.pop(list_index)
- def __len__(self):
- # type: () -> int
+ def __len__(self) -> int:
return list.__len__(self)
- def insert(self, idx, val):
- # type: (Any, Any) -> None
+ def insert(self, idx: Any, val: Any) -> None:
"""the comments after the insertion have to move forward"""
list.insert(self, idx, val)
for list_index in sorted(self.ca.items, reverse=True):
@@ -560,31 +505,25 @@ class CommentedSeq(MutableSliceableSequence, list, CommentedBase): # type: igno
break
self.ca.items[list_index + 1] = self.ca.items.pop(list_index)
- def extend(self, val):
- # type: (Any) -> None
+ def extend(self, val: Any) -> None:
list.extend(self, val)
- def __eq__(self, other):
- # type: (Any) -> bool
+ def __eq__(self, other: Any) -> bool:
return list.__eq__(self, other)
- def _yaml_add_comment(self, comment, key=NoComment):
- # type: (Any, Optional[Any]) -> None
+ def _yaml_add_comment(self, comment: Any, key: Optional[Any] = NoComment) -> None:
if key is not NoComment:
self.yaml_key_comment_extend(key, comment)
else:
self.ca.comment = comment
- def _yaml_add_eol_comment(self, comment, key):
- # type: (Any, Any) -> None
+ def _yaml_add_eol_comment(self, comment: Any, key: Any) -> None:
self._yaml_add_comment(comment, key=key)
- def _yaml_get_columnX(self, key):
- # type: (Any) -> Any
+ def _yaml_get_columnX(self, key: Any) -> Any:
return self.ca.items[key][0].start_mark.column
- def _yaml_get_column(self, key):
- # type: (Any) -> Any
+ def _yaml_get_column(self, key: Any) -> Any:
column = None
sel_idx = None
pre, post = key - 1, key + 1
@@ -604,26 +543,23 @@ class CommentedSeq(MutableSliceableSequence, list, CommentedBase): # type: igno
column = self._yaml_get_columnX(sel_idx)
return column
- def _yaml_get_pre_comment(self):
- # type: () -> Any
- pre_comments = [] # type: List[Any]
+ def _yaml_get_pre_comment(self) -> Any:
+ pre_comments: List[Any] = []
if self.ca.comment is None:
self.ca.comment = [None, pre_comments]
else:
pre_comments = self.ca.comment[1]
return pre_comments
- def _yaml_clear_pre_comment(self):
- # type: () -> Any
- pre_comments = [] # type: List[Any]
+ def _yaml_clear_pre_comment(self) -> Any:
+ pre_comments: List[Any] = []
if self.ca.comment is None:
self.ca.comment = [None, pre_comments]
else:
self.ca.comment[1] = pre_comments
return pre_comments
- def __deepcopy__(self, memo):
- # type: (Any) -> Any
+ def __deepcopy__(self, memo: Any) -> Any:
res = self.__class__()
memo[id(self)] = res
for k in self:
@@ -631,12 +567,10 @@ class CommentedSeq(MutableSliceableSequence, list, CommentedBase): # type: igno
self.copy_attributes(res, memo=memo)
return res
- def __add__(self, other):
- # type: (Any) -> Any
+ def __add__(self, other: Any) -> Any:
return list.__add__(self, other)
- def sort(self, key=None, reverse=False):
- # type: (Any, bool) -> None
+ def sort(self, key: Any = None, reverse: bool = False) -> None:
if key is None:
tmp_lst = sorted(zip(self, range(len(self))), reverse=reverse)
list.__init__(self, [x[0] for x in tmp_lst])
@@ -652,31 +586,26 @@ class CommentedSeq(MutableSliceableSequence, list, CommentedBase): # type: igno
if old_index in itm:
self.ca.items[idx] = itm[old_index]
- def __repr__(self):
- # type: () -> Any
+ def __repr__(self) -> Any:
return list.__repr__(self)
class CommentedKeySeq(tuple, CommentedBase): # type: ignore
"""This primarily exists to be able to roundtrip keys that are sequences"""
- def _yaml_add_comment(self, comment, key=NoComment):
- # type: (Any, Optional[Any]) -> None
+ def _yaml_add_comment(self, comment: Any, key: Optional[Any] = NoComment) -> None:
if key is not NoComment:
self.yaml_key_comment_extend(key, comment)
else:
self.ca.comment = comment
- def _yaml_add_eol_comment(self, comment, key):
- # type: (Any, Any) -> None
+ def _yaml_add_eol_comment(self, comment: Any, key: Any) -> None:
self._yaml_add_comment(comment, key=key)
- def _yaml_get_columnX(self, key):
- # type: (Any) -> Any
+ def _yaml_get_columnX(self, key: Any) -> Any:
return self.ca.items[key][0].start_mark.column
- def _yaml_get_column(self, key):
- # type: (Any) -> Any
+ def _yaml_get_column(self, key: Any) -> Any:
column = None
sel_idx = None
pre, post = key - 1, key + 1
@@ -696,18 +625,16 @@ class CommentedKeySeq(tuple, CommentedBase): # type: ignore
column = self._yaml_get_columnX(sel_idx)
return column
- def _yaml_get_pre_comment(self):
- # type: () -> Any
- pre_comments = [] # type: List[Any]
+ def _yaml_get_pre_comment(self) -> Any:
+ pre_comments: List[Any] = []
if self.ca.comment is None:
self.ca.comment = [None, pre_comments]
else:
pre_comments = self.ca.comment[1]
return pre_comments
- def _yaml_clear_pre_comment(self):
- # type: () -> Any
- pre_comments = [] # type: List[Any]
+ def _yaml_clear_pre_comment(self) -> Any:
+ pre_comments: List[Any] = []
if self.ca.comment is None:
self.ca.comment = [None, pre_comments]
else:
@@ -718,12 +645,10 @@ class CommentedKeySeq(tuple, CommentedBase): # type: ignore
class CommentedMapView(Sized):
__slots__ = ('_mapping',)
- def __init__(self, mapping):
- # type: (Any) -> None
+ def __init__(self, mapping: Any) -> None:
self._mapping = mapping
- def __len__(self):
- # type: () -> int
+ def __len__(self) -> int:
count = len(self._mapping)
return count
@@ -732,16 +657,14 @@ class CommentedMapKeysView(CommentedMapView, Set): # type: ignore
__slots__ = ()
@classmethod
- def _from_iterable(self, it):
- # type: (Any) -> Any
+ def _from_iterable(self, it: Any) -> Any:
return set(it)
- def __contains__(self, key):
- # type: (Any) -> Any
+ def __contains__(self, key: Any) -> Any:
return key in self._mapping
- def __iter__(self):
- # type: () -> Any # yield from self._mapping # not in py27, pypy
+ def __iter__(self) -> Any:
+ # yield from self._mapping # not in py27, pypy
# for x in self._mapping._keys():
for x in self._mapping:
yield x
@@ -751,12 +674,10 @@ class CommentedMapItemsView(CommentedMapView, Set): # type: ignore
__slots__ = ()
@classmethod
- def _from_iterable(self, it):
- # type: (Any) -> Any
+ def _from_iterable(self, it: Any) -> Any:
return set(it)
- def __contains__(self, item):
- # type: (Any) -> Any
+ def __contains__(self, item: Any) -> Any:
key, value = item
try:
v = self._mapping[key]
@@ -765,8 +686,7 @@ class CommentedMapItemsView(CommentedMapView, Set): # type: ignore
else:
return v == value
- def __iter__(self):
- # type: () -> Any
+ def __iter__(self) -> Any:
for key in self._mapping._keys():
yield (key, self._mapping[key])
@@ -774,15 +694,13 @@ class CommentedMapItemsView(CommentedMapView, Set): # type: ignore
class CommentedMapValuesView(CommentedMapView):
__slots__ = ()
- def __contains__(self, value):
- # type: (Any) -> Any
+ def __contains__(self, value: Any) -> Any:
for key in self._mapping:
if value == self._mapping[key]:
return True
return False
- def __iter__(self):
- # type: () -> Any
+ def __iter__(self) -> Any:
for key in self._mapping._keys():
yield self._mapping[key]
@@ -790,14 +708,14 @@ class CommentedMapValuesView(CommentedMapView):
class CommentedMap(ordereddict, CommentedBase):
__slots__ = (Comment.attrib, '_ok', '_ref')
- def __init__(self, *args, **kw):
- # type: (Any, Any) -> None
- self._ok = set() # type: MutableSet[Any] # own keys
- self._ref = [] # type: List[CommentedMap]
+ def __init__(self, *args: Any, **kw: Any) -> None:
+ self._ok: MutableSet[Any] = set() # own keys
+ self._ref: List[CommentedMap] = []
ordereddict.__init__(self, *args, **kw)
- def _yaml_add_comment(self, comment, key=NoComment, value=NoComment):
- # type: (Any, Optional[Any], Optional[Any]) -> None
+ def _yaml_add_comment(
+ self, comment: Any, key: Optional[Any] = NoComment, value: Optional[Any] = NoComment
+ ) -> None:
"""values is set to key to indicate a value attachment of comment"""
if key is not NoComment:
self.yaml_key_comment_extend(key, comment)
@@ -807,17 +725,14 @@ class CommentedMap(ordereddict, CommentedBase):
else:
self.ca.comment = comment
- def _yaml_add_eol_comment(self, comment, key):
- # type: (Any, Any) -> None
+ def _yaml_add_eol_comment(self, comment: Any, key: Any) -> None:
"""add on the value line, with value specified by the key"""
self._yaml_add_comment(comment, value=key)
- def _yaml_get_columnX(self, key):
- # type: (Any) -> Any
+ def _yaml_get_columnX(self, key: Any) -> Any:
return self.ca.items[key][2].start_mark.column
- def _yaml_get_column(self, key):
- # type: (Any) -> Any
+ def _yaml_get_column(self, key: Any) -> Any:
column = None
sel_idx = None
pre, post, last = None, None, None
@@ -844,26 +759,23 @@ class CommentedMap(ordereddict, CommentedBase):
column = self._yaml_get_columnX(sel_idx)
return column
- def _yaml_get_pre_comment(self):
- # type: () -> Any
- pre_comments = [] # type: List[Any]
+ def _yaml_get_pre_comment(self) -> Any:
+ pre_comments: List[Any] = []
if self.ca.comment is None:
self.ca.comment = [None, pre_comments]
else:
pre_comments = self.ca.comment[1]
return pre_comments
- def _yaml_clear_pre_comment(self):
- # type: () -> Any
- pre_comments = [] # type: List[Any]
+ def _yaml_clear_pre_comment(self) -> Any:
+ pre_comments: List[Any] = []
if self.ca.comment is None:
self.ca.comment = [None, pre_comments]
else:
self.ca.comment[1] = pre_comments
return pre_comments
- def update(self, *vals, **kw):
- # type: (Any, Any) -> None
+ def update(self, *vals: Any, **kw: Any) -> None:
try:
ordereddict.update(self, *vals, **kw)
except TypeError:
@@ -880,8 +792,7 @@ class CommentedMap(ordereddict, CommentedBase):
if kw:
self._ok.add(*kw.keys())
- def insert(self, pos, key, value, comment=None):
- # type: (Any, Any, Any, Optional[Any]) -> None
+ def insert(self, pos: Any, key: Any, value: Any, comment: Optional[Any] = None) -> None:
"""insert key value into given position
attach comment if provided
"""
@@ -895,15 +806,13 @@ class CommentedMap(ordereddict, CommentedBase):
if comment is not None:
self.yaml_add_eol_comment(comment, key=key)
- def mlget(self, key, default=None, list_ok=False):
- # type: (Any, Any, Any) -> Any
+ def mlget(self, key: Any, default: Any = None, list_ok: Any = False) -> Any:
"""multi-level get that expects dicts within dicts"""
if not isinstance(key, list):
return self.get(key, default)
# assume that the key is a list of recursively accessible dicts
- def get_one_level(key_list, level, d):
- # type: (Any, Any, Any) -> Any
+ def get_one_level(key_list: Any, level: Any, d: Any) -> Any:
if not list_ok:
assert isinstance(d, dict)
if level >= len(key_list):
@@ -921,8 +830,7 @@ class CommentedMap(ordereddict, CommentedBase):
raise
return default
- def __getitem__(self, key):
- # type: (Any) -> Any
+ def __getitem__(self, key: Any) -> Any:
try:
return ordereddict.__getitem__(self, key)
except KeyError:
@@ -931,8 +839,7 @@ class CommentedMap(ordereddict, CommentedBase):
return merged[1][key]
raise
- def __setitem__(self, key, value):
- # type: (Any, Any) -> None
+ def __setitem__(self, key: Any, value: Any) -> None:
# try to preserve the scalarstring type if setting an existing key to a new value
if key in self:
if (
@@ -944,35 +851,36 @@ class CommentedMap(ordereddict, CommentedBase):
ordereddict.__setitem__(self, key, value)
self._ok.add(key)
- def _unmerged_contains(self, key):
- # type: (Any) -> Any
+ def _unmerged_contains(self, key: Any) -> Any:
if key in self._ok:
return True
return None
- def __contains__(self, key):
- # type: (Any) -> bool
+ def __contains__(self, key: Any) -> bool:
return bool(ordereddict.__contains__(self, key))
- def get(self, key, default=None):
- # type: (Any, Any) -> Any
+ def get(self, key: Any, default: Any = None) -> Any:
try:
return self.__getitem__(key)
except: # NOQA
return default
- def __repr__(self):
- # type: () -> Any
- return ordereddict.__repr__(self).replace('CommentedMap', 'ordereddict')
+ def __repr__(self) -> Any:
+ res = "ordereddict(["
+ sep = ''
+ for k, v in self.items():
+ res += f'{sep}({k!r}, {v!r})'
+ if not sep:
+ sep = ', '
+ res += '])'
+ return res
- def non_merged_items(self):
- # type: () -> Any
+ def non_merged_items(self) -> Any:
for x in ordereddict.__iter__(self):
if x in self._ok:
yield x, ordereddict.__getitem__(self, x)
- def __delitem__(self, key):
- # type: (Any) -> None
+ def __delitem__(self, key: Any) -> None:
# for merged in getattr(self, merge_attrib, []):
# if key in merged[1]:
# value = merged[1][key]
@@ -991,73 +899,60 @@ class CommentedMap(ordereddict, CommentedBase):
for referer in self._ref:
referer.update_key_value(key)
- def __iter__(self):
- # type: () -> Any
+ def __iter__(self) -> Any:
for x in ordereddict.__iter__(self):
yield x
- def _keys(self):
- # type: () -> Any
+ def _keys(self) -> Any:
for x in ordereddict.__iter__(self):
yield x
- def __len__(self):
- # type: () -> int
+ def __len__(self) -> int:
return int(ordereddict.__len__(self))
- def __eq__(self, other):
- # type: (Any) -> bool
+ def __eq__(self, other: Any) -> bool:
return bool(dict(self) == other)
- def keys(self):
- # type: () -> Any
+ def keys(self) -> Any:
return CommentedMapKeysView(self)
- def values(self):
- # type: () -> Any
+ def values(self) -> Any:
return CommentedMapValuesView(self)
- def _items(self):
- # type: () -> Any
+ def _items(self) -> Any:
for x in ordereddict.__iter__(self):
yield x, ordereddict.__getitem__(self, x)
- def items(self):
- # type: () -> Any
+ def items(self) -> Any:
return CommentedMapItemsView(self)
@property
- def merge(self):
- # type: () -> Any
+ def merge(self) -> Any:
if not hasattr(self, merge_attrib):
setattr(self, merge_attrib, [])
return getattr(self, merge_attrib)
- def copy(self):
- # type: () -> Any
+ def copy(self) -> Any:
x = type(self)() # update doesn't work
for k, v in self._items():
x[k] = v
self.copy_attributes(x)
return x
- def add_referent(self, cm):
- # type: (Any) -> None
+ def add_referent(self, cm: Any) -> None:
if cm not in self._ref:
self._ref.append(cm)
- def add_yaml_merge(self, value):
- # type: (Any) -> None
+ def add_yaml_merge(self, value: Any) -> None:
for v in value:
v[1].add_referent(self)
- for k, v in v[1].items():
- if ordereddict.__contains__(self, k):
+ for k1, v1 in v[1].items():
+ if ordereddict.__contains__(self, k1):
continue
- ordereddict.__setitem__(self, k, v)
+ ordereddict.__setitem__(self, k1, v1)
self.merge.extend(value)
- def update_key_value(self, key):
- # type: (Any) -> None
+ def update_key_value(self, key: Any) -> None:
if key in self._ok:
return
for v in self.merge:
@@ -1066,8 +961,7 @@ class CommentedMap(ordereddict, CommentedBase):
return
ordereddict.__delitem__(self, key)
- def __deepcopy__(self, memo):
- # type: (Any) -> Any
+ def __deepcopy__(self, memo: Any) -> Any:
res = self.__class__()
memo[id(self)] = res
for k in self:
@@ -1078,17 +972,15 @@ class CommentedMap(ordereddict, CommentedBase):
# based on brownie mappings
@classmethod # type: ignore
-def raise_immutable(cls, *args, **kwargs):
- # type: (Any, *Any, **Any) -> None
- raise TypeError('{} objects are immutable'.format(cls.__name__))
+def raise_immutable(cls: Any, *args: Any, **kwargs: Any) -> None:
+ raise TypeError(f'{cls.__name__} objects are immutable')
class CommentedKeyMap(CommentedBase, Mapping): # type: ignore
__slots__ = Comment.attrib, '_od'
"""This primarily exists to be able to roundtrip keys that are mappings"""
- def __init__(self, *args, **kw):
- # type: (Any, Any) -> None
+ def __init__(self, *args: Any, **kw: Any) -> None:
if hasattr(self, '_od'):
raise_immutable(self)
try:
@@ -1099,51 +991,41 @@ class CommentedKeyMap(CommentedBase, Mapping): # type: ignore
__delitem__ = __setitem__ = clear = pop = popitem = setdefault = update = raise_immutable
# need to implement __getitem__, __iter__ and __len__
- def __getitem__(self, index):
- # type: (Any) -> Any
+ def __getitem__(self, index: Any) -> Any:
return self._od[index]
- def __iter__(self):
- # type: () -> Iterator[Any]
+ def __iter__(self) -> Iterator[Any]:
for x in self._od.__iter__():
yield x
- def __len__(self):
- # type: () -> int
+ def __len__(self) -> int:
return len(self._od)
- def __hash__(self):
- # type: () -> Any
+ def __hash__(self) -> Any:
return hash(tuple(self.items()))
- def __repr__(self):
- # type: () -> Any
+ def __repr__(self) -> Any:
if not hasattr(self, merge_attrib):
return self._od.__repr__()
return 'ordereddict(' + repr(list(self._od.items())) + ')'
@classmethod
- def fromkeys(keys, v=None):
- # type: (Any, Any) -> Any
+ def fromkeys(keys: Any, v: Any = None) -> Any:
return CommentedKeyMap(dict.fromkeys(keys, v))
- def _yaml_add_comment(self, comment, key=NoComment):
- # type: (Any, Optional[Any]) -> None
+ def _yaml_add_comment(self, comment: Any, key: Optional[Any] = NoComment) -> None:
if key is not NoComment:
self.yaml_key_comment_extend(key, comment)
else:
self.ca.comment = comment
- def _yaml_add_eol_comment(self, comment, key):
- # type: (Any, Any) -> None
+ def _yaml_add_eol_comment(self, comment: Any, key: Any) -> None:
self._yaml_add_comment(comment, key=key)
- def _yaml_get_columnX(self, key):
- # type: (Any) -> Any
+ def _yaml_get_columnX(self, key: Any) -> Any:
return self.ca.items[key][0].start_mark.column
- def _yaml_get_column(self, key):
- # type: (Any) -> Any
+ def _yaml_get_column(self, key: Any) -> Any:
column = None
sel_idx = None
pre, post = key - 1, key + 1
@@ -1163,9 +1045,8 @@ class CommentedKeyMap(CommentedBase, Mapping): # type: ignore
column = self._yaml_get_columnX(sel_idx)
return column
- def _yaml_get_pre_comment(self):
- # type: () -> Any
- pre_comments = [] # type: List[Any]
+ def _yaml_get_pre_comment(self) -> Any:
+ pre_comments: List[Any] = []
if self.ca.comment is None:
self.ca.comment = [None, pre_comments]
else:
@@ -1180,15 +1061,15 @@ class CommentedOrderedMap(CommentedMap):
class CommentedSet(MutableSet, CommentedBase): # type: ignore # NOQA
__slots__ = Comment.attrib, 'odict'
- def __init__(self, values=None):
- # type: (Any) -> None
+ def __init__(self, values: Any = None) -> None:
self.odict = ordereddict()
MutableSet.__init__(self)
if values is not None:
- self |= values # type: ignore
+ self |= values
- def _yaml_add_comment(self, comment, key=NoComment, value=NoComment):
- # type: (Any, Optional[Any], Optional[Any]) -> None
+ def _yaml_add_comment(
+ self, comment: Any, key: Optional[Any] = NoComment, value: Optional[Any] = NoComment
+ ) -> None:
"""values is set to key to indicate a value attachment of comment"""
if key is not NoComment:
self.yaml_key_comment_extend(key, comment)
@@ -1198,69 +1079,65 @@ class CommentedSet(MutableSet, CommentedBase): # type: ignore # NOQA
else:
self.ca.comment = comment
- def _yaml_add_eol_comment(self, comment, key):
- # type: (Any, Any) -> None
+ def _yaml_add_eol_comment(self, comment: Any, key: Any) -> None:
"""add on the value line, with value specified by the key"""
self._yaml_add_comment(comment, value=key)
- def add(self, value):
- # type: (Any) -> None
+ def add(self, value: Any) -> None:
"""Add an element."""
self.odict[value] = None
- def discard(self, value):
- # type: (Any) -> None
+ def discard(self, value: Any) -> None:
"""Remove an element. Do not raise an exception if absent."""
del self.odict[value]
- def __contains__(self, x):
- # type: (Any) -> Any
+ def __contains__(self, x: Any) -> Any:
return x in self.odict
- def __iter__(self):
- # type: () -> Any
+ def __iter__(self) -> Any:
for x in self.odict:
yield x
- def __len__(self):
- # type: () -> int
+ def __len__(self) -> int:
return len(self.odict)
- def __repr__(self):
- # type: () -> str
- return 'set({0!r})'.format(self.odict.keys())
+ def __repr__(self) -> str:
+ return f'set({self.odict.keys()!r})'
class TaggedScalar(CommentedBase):
# the value and style attributes are set during roundtrip construction
- def __init__(self, value=None, style=None, tag=None):
- # type: (Any, Any, Any) -> None
+ def __init__(self, value: Any = None, style: Any = None, tag: Any = None) -> None:
self.value = value
self.style = style
if tag is not None:
self.yaml_set_tag(tag)
- def __str__(self):
- # type: () -> Any
+ def __str__(self) -> Any:
return self.value
+ def count(self, s: str, start: Optional[int] = None, end: Optional[int] = None) -> Any:
+ return self.value.count(s, start, end)
+
+ def __getitem__(self, pos: int) -> Any:
+ return self.value[pos]
+
-def dump_comments(d, name="", sep='.', out=sys.stdout):
- # type: (Any, str, str, Any) -> None
+def dump_comments(d: Any, name: str = "", sep: str = '.', out: Any = sys.stdout) -> None:
"""
recursively dump comments, all but the toplevel preceded by the path
in dotted form x.0.a
"""
if isinstance(d, dict) and hasattr(d, 'ca'):
if name:
- out.write('{} {}\n'.format(name, type(d)))
- out.write('{!r}\n'.format(d.ca)) # type: ignore
+ out.write(f'{name} {type(d)}\n')
+ out.write(f'{d.ca!r}\n') # type: ignore
for k in d:
dump_comments(d[k], name=(name + sep + str(k)) if name else k, sep=sep, out=out)
elif isinstance(d, list) and hasattr(d, 'ca'):
if name:
- out.write('{} {}\n'.format(name, type(d)))
- out.write('{!r}\n'.format(d.ca)) # type: ignore
+ out.write(f'{name} {type(d)}\n')
+ out.write(f'{d.ca!r}\n') # type: ignore
for idx, k in enumerate(d):
dump_comments(
k, name=(name + sep + str(idx)) if name else str(idx), sep=sep, out=out
diff --git a/compat.py b/compat.py
index 87d5e35..c32d105 100644
--- a/compat.py
+++ b/compat.py
@@ -11,11 +11,15 @@ import collections.abc
# fmt: off
-if False: # MYPY
- from typing import Any, Dict, Optional, List, Union, BinaryIO, IO, Text, Tuple # NOQA
- from typing import Optional # NOQA
+from typing import Any, Dict, Optional, List, Union, BinaryIO, IO, Text, Tuple # NOQA
+from typing import Optional # NOQA
+try:
+ from typing import SupportsIndex as SupportsIndex # in order to reexport for mypy
+except ImportError:
+ SupportsIndex = int # type: ignore
# fmt: on
+
_DEFAULT_YAML_VERSION = (1, 2)
try:
@@ -29,8 +33,7 @@ except ImportError:
class ordereddict(OrderedDict): # type: ignore
if not hasattr(OrderedDict, 'insert'):
- def insert(self, pos, key, value):
- # type: (int, Any, Any) -> None
+ def insert(self, pos: int, key: Any, value: Any) -> None:
if pos >= len(self):
self[key] = value
return
@@ -47,34 +50,20 @@ class ordereddict(OrderedDict): # type: ignore
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
-
-# replace with f-strings when 3.5 support is dropped
-# ft = '42'
-# assert _F('abc {ft!r}', ft=ft) == 'abc %r' % ft
-# 'abc %r' % ft -> _F('abc {ft!r}' -> f'abc {ft!r}'
-def _F(s, *superfluous, **kw):
- # type: (Any, Any, Any) -> Any
- if superfluous:
- raise TypeError
- return s.format(**kw)
-
-
StringIO = io.StringIO
BytesIO = io.BytesIO
-if False: # MYPY
- # StreamType = Union[BinaryIO, IO[str], IO[unicode], StringIO]
- # StreamType = Union[BinaryIO, IO[str], StringIO] # type: ignore
- StreamType = Any
+# StreamType = Union[BinaryIO, IO[str], IO[unicode], StringIO]
+# StreamType = Union[BinaryIO, IO[str], StringIO] # type: ignore
+StreamType = Any
- StreamTextType = StreamType # Union[Text, StreamType]
- VersionType = Union[List[int], str, Tuple[int, int]]
+StreamTextType = StreamType # Union[Text, StreamType]
+VersionType = Union[List[int], str, Tuple[int, int]]
builtins_module = 'builtins'
-def with_metaclass(meta, *bases):
- # type: (Any, Any) -> Any
+def with_metaclass(meta: Any, *bases: Any) -> Any:
"""Create a base class with a metaclass."""
return meta('NewBase', bases, {})
@@ -84,7 +73,7 @@ DBG_EVENT = 2
DBG_NODE = 4
-_debug = None # type: Optional[int]
+_debug: Optional[int] = None
if 'RUAMELDEBUG' in os.environ:
_debugx = os.environ.get('RUAMELDEBUG')
if _debugx is None:
@@ -96,25 +85,21 @@ if 'RUAMELDEBUG' in os.environ:
if bool(_debug):
class ObjectCounter:
- def __init__(self):
- # type: () -> None
- self.map = {} # type: Dict[Any, Any]
+ def __init__(self) -> None:
+ self.map: Dict[Any, Any] = {}
- def __call__(self, k):
- # type: (Any) -> None
+ def __call__(self, k: Any) -> None:
self.map[k] = self.map.get(k, 0) + 1
- def dump(self):
- # type: () -> None
+ def dump(self) -> None:
for k in sorted(self.map):
- sys.stdout.write('{} -> {}'.format(k, self.map[k]))
+ sys.stdout.write(f'{k} -> {self.map[k]}')
object_counter = ObjectCounter()
# used from yaml util when testing
-def dbg(val=None):
- # type: (Any) -> Any
+def dbg(val: Any = None) -> Any:
global _debug
if _debug is None:
# set to true or false
@@ -129,14 +114,12 @@ def dbg(val=None):
class Nprint:
- def __init__(self, file_name=None):
- # type: (Any) -> None
- self._max_print = None # type: Any
- self._count = None # type: Any
+ def __init__(self, file_name: Any = None) -> None:
+ self._max_print: Any = None
+ self._count: Any = None
self._file_name = file_name
- def __call__(self, *args, **kw):
- # type: (Any, Any) -> None
+ def __call__(self, *args: Any, **kw: Any) -> None:
if not bool(_debug):
return
out = sys.stdout if self._file_name is None else open(self._file_name, 'a')
@@ -157,13 +140,11 @@ class Nprint:
if self._file_name:
out.close()
- def set_max_print(self, i):
- # type: (int) -> None
+ def set_max_print(self, i: int) -> None:
self._max_print = i
self._count = None
- def fp(self, mode='a'):
- # type: (str) -> Any
+ def fp(self, mode: str = 'a') -> Any:
out = sys.stdout if self._file_name is None else open(self._file_name, mode)
return out
@@ -174,8 +155,7 @@ nprintf = Nprint('/var/tmp/ruamel.yaml.log')
# char checkers following production rules
-def check_namespace_char(ch):
- # type: (Any) -> bool
+def check_namespace_char(ch: Any) -> bool:
if '\x21' <= ch <= '\x7E': # ! to ~
return True
if '\xA0' <= ch <= '\uD7FF':
@@ -187,15 +167,13 @@ def check_namespace_char(ch):
return False
-def check_anchorname_char(ch):
- # type: (Any) -> bool
+def check_anchorname_char(ch: Any) -> bool:
if ch in ',[]{}':
return False
return check_namespace_char(ch)
-def version_tnf(t1, t2=None):
- # type: (Any, Any) -> Any
+def version_tnf(t1: Any, t2: Any = None) -> Any:
"""
return True if ruamel.yaml version_info < t1, None if t2 is specified and bigger else False
"""
@@ -211,14 +189,12 @@ def version_tnf(t1, t2=None):
class MutableSliceableSequence(collections.abc.MutableSequence): # type: ignore
__slots__ = ()
- def __getitem__(self, index):
- # type: (Any) -> Any
+ def __getitem__(self, index: Any) -> Any:
if not isinstance(index, slice):
return self.__getsingleitem__(index)
return type(self)([self[i] for i in range(*index.indices(len(self)))]) # type: ignore
- def __setitem__(self, index, value):
- # type: (Any, Any) -> None
+ def __setitem__(self, index: Any, value: Any) -> None:
if not isinstance(index, slice):
return self.__setsingleitem__(index, value)
assert iter(value)
@@ -233,19 +209,16 @@ class MutableSliceableSequence(collections.abc.MutableSequence): # type: ignore
# need to test before changing, in case TypeError is caught
if nr_assigned_items < len(value):
raise TypeError(
- 'too many elements in value {} < {}'.format(nr_assigned_items, len(value))
+ f'too many elements in value {nr_assigned_items} < {len(value)}'
)
elif nr_assigned_items > len(value):
raise TypeError(
- 'not enough elements in value {} > {}'.format(
- nr_assigned_items, len(value)
- )
+ f'not enough elements in value {nr_assigned_items} > {len(value)}'
)
for idx, i in enumerate(range(*range_parms)):
self[i] = value[idx]
- def __delitem__(self, index):
- # type: (Any) -> None
+ def __delitem__(self, index: Any) -> None:
if not isinstance(index, slice):
return self.__delsingleitem__(index)
# nprint(index.start, index.stop, index.step, index.indices(len(self)))
@@ -253,16 +226,13 @@ class MutableSliceableSequence(collections.abc.MutableSequence): # type: ignore
del self[i]
@abstractmethod
- def __getsingleitem__(self, index):
- # type: (Any) -> Any
+ def __getsingleitem__(self, index: Any) -> Any:
raise IndexError
@abstractmethod
- def __setsingleitem__(self, index, value):
- # type: (Any, Any) -> None
+ def __setsingleitem__(self, index: Any, value: Any) -> None:
raise IndexError
@abstractmethod
- def __delsingleitem__(self, index):
- # type: (Any) -> None
+ def __delsingleitem__(self, index: Any) -> None:
raise IndexError
diff --git a/composer.py b/composer.py
index bad132a..c943c1b 100644
--- a/composer.py
+++ b/composer.py
@@ -3,7 +3,7 @@
import warnings
from ruamel.yaml.error import MarkedYAMLError, ReusedAnchorWarning
-from ruamel.yaml.compat import _F, nprint, nprintf # NOQA
+from ruamel.yaml.compat import nprint, nprintf # NOQA
from ruamel.yaml.events import (
StreamStartEvent,
@@ -17,8 +17,7 @@ from ruamel.yaml.events import (
)
from ruamel.yaml.nodes import MappingNode, ScalarNode, SequenceNode
-if False: # MYPY
- from typing import Any, Dict, Optional, List # NOQA
+from typing import Any, Dict, Optional, List # NOQA
__all__ = ['Composer', 'ComposerError']
@@ -28,30 +27,26 @@ class ComposerError(MarkedYAMLError):
class Composer:
- def __init__(self, loader=None):
- # type: (Any) -> None
+ def __init__(self, loader: Any = None) -> None:
self.loader = loader
if self.loader is not None and getattr(self.loader, '_composer', None) is None:
self.loader._composer = self
- self.anchors = {} # type: Dict[Any, Any]
+ self.anchors: Dict[Any, Any] = {}
@property
- def parser(self):
- # type: () -> Any
+ def parser(self) -> Any:
if hasattr(self.loader, 'typ'):
self.loader.parser
return self.loader._parser
@property
- def resolver(self):
- # type: () -> Any
+ def resolver(self) -> Any:
# assert self.loader._resolver is not None
if hasattr(self.loader, 'typ'):
self.loader.resolver
return self.loader._resolver
- def check_node(self):
- # type: () -> Any
+ def check_node(self) -> Any:
# Drop the STREAM-START event.
if self.parser.check_event(StreamStartEvent):
self.parser.get_event()
@@ -59,19 +54,17 @@ class Composer:
# If there are more documents available?
return not self.parser.check_event(StreamEndEvent)
- def get_node(self):
- # type: () -> Any
+ def get_node(self) -> Any:
# Get the root node of the next document.
if not self.parser.check_event(StreamEndEvent):
return self.compose_document()
- def get_single_node(self):
- # type: () -> Any
+ def get_single_node(self) -> Any:
# Drop the STREAM-START event.
self.parser.get_event()
# Compose a document if the stream is not empty.
- document = None # type: Any
+ document: Any = None
if not self.parser.check_event(StreamEndEvent):
document = self.compose_document()
@@ -90,8 +83,7 @@ class Composer:
return document
- def compose_document(self):
- # type: (Any) -> Any
+ def compose_document(self: Any) -> Any:
# Drop the DOCUMENT-START event.
self.parser.get_event()
@@ -104,36 +96,28 @@ class Composer:
self.anchors = {}
return node
- def return_alias(self, a):
- # type: (Any) -> Any
+ def return_alias(self, a: Any) -> Any:
return a
- def compose_node(self, parent, index):
- # type: (Any, Any) -> Any
+ def compose_node(self, parent: Any, index: Any) -> Any:
if self.parser.check_event(AliasEvent):
event = self.parser.get_event()
alias = event.anchor
if alias not in self.anchors:
raise ComposerError(
- None,
- None,
- _F('found undefined alias {alias!r}', alias=alias),
- event.start_mark,
+ None, None, f'found undefined alias {alias!r}', event.start_mark,
)
return self.return_alias(self.anchors[alias])
event = self.parser.peek_event()
anchor = event.anchor
if anchor is not None: # have an anchor
if anchor in self.anchors:
- # raise ComposerError(
- # "found duplicate anchor %r; first occurrence"
- # % (anchor), self.anchors[anchor].start_mark,
- # "second occurrence", event.start_mark)
ws = (
- '\nfound duplicate anchor {!r}\nfirst occurrence {}\nsecond occurrence '
- '{}'.format((anchor), self.anchors[anchor].start_mark, event.start_mark)
+ f'\nfound duplicate anchor {anchor!r}\n'
+ f'first occurrence {self.anchors[anchor].start_mark}\n'
+ f'second occurrence {event.start_mark}'
)
- warnings.warn(ws, ReusedAnchorWarning)
+ warnings.warn(ws, ReusedAnchorWarning, stacklevel=2)
self.resolver.descend_resolver(parent, index)
if self.parser.check_event(ScalarEvent):
node = self.compose_scalar_node(anchor)
@@ -144,8 +128,7 @@ class Composer:
self.resolver.ascend_resolver()
return node
- def compose_scalar_node(self, anchor):
- # type: (Any) -> Any
+ def compose_scalar_node(self, anchor: Any) -> Any:
event = self.parser.get_event()
tag = event.tag
if tag is None or tag == '!':
@@ -163,8 +146,7 @@ class Composer:
self.anchors[anchor] = node
return node
- def compose_sequence_node(self, anchor):
- # type: (Any) -> Any
+ def compose_sequence_node(self, anchor: Any) -> Any:
start_event = self.parser.get_event()
tag = start_event.tag
if tag is None or tag == '!':
@@ -187,17 +169,16 @@ class Composer:
end_event = self.parser.get_event()
if node.flow_style is True and end_event.comment is not None:
if node.comment is not None:
+ x = node.flow_style
nprint(
- 'Warning: unexpected end_event commment in sequence '
- 'node {}'.format(node.flow_style)
+ f'Warning: unexpected end_event commment in sequence node {x}'
)
node.comment = end_event.comment
node.end_mark = end_event.end_mark
self.check_end_doc_comment(end_event, node)
return node
- def compose_mapping_node(self, anchor):
- # type: (Any) -> Any
+ def compose_mapping_node(self, anchor: Any) -> Any:
start_event = self.parser.get_event()
tag = start_event.tag
if tag is None or tag == '!':
@@ -230,8 +211,7 @@ class Composer:
self.check_end_doc_comment(end_event, node)
return node
- def check_end_doc_comment(self, end_event, node):
- # type: (Any, Any) -> None
+ def check_end_doc_comment(self, end_event: Any, node: Any) -> None:
if end_event.comment and end_event.comment[1]:
# pre comments on an end_event, no following to move to
if node.comment is None:
diff --git a/configobjwalker.py b/configobjwalker.py
index cbc6148..a6faa88 100644
--- a/configobjwalker.py
+++ b/configobjwalker.py
@@ -4,11 +4,12 @@ import warnings
from ruamel.yaml.util import configobj_walker as new_configobj_walker
-if False: # MYPY
- from typing import Any # NOQA
+from typing import Any
-def configobj_walker(cfg):
- # type: (Any) -> Any
- warnings.warn('configobj_walker has moved to ruamel.yaml.util, please update your code')
+def configobj_walker(cfg: Any) -> Any:
+ warnings.warn(
+ 'configobj_walker has moved to ruamel.yaml.util, please update your code',
+ stacklevel=2
+ )
return new_configobj_walker(cfg)
diff --git a/constructor.py b/constructor.py
index a67ca55..dc7e5ed 100644
--- a/constructor.py
+++ b/constructor.py
@@ -13,7 +13,7 @@ from ruamel.yaml.error import (MarkedYAMLError, MarkedYAMLFutureWarning,
MantissaNoDotYAML1_1Warning)
from ruamel.yaml.nodes import * # NOQA
from ruamel.yaml.nodes import (SequenceNode, MappingNode, ScalarNode)
-from ruamel.yaml.compat import (_F, builtins_module, # NOQA
+from ruamel.yaml.compat import (builtins_module, # NOQA
nprint, nprintf, version_tnf)
from ruamel.yaml.compat import ordereddict
@@ -33,8 +33,7 @@ from ruamel.yaml.scalarbool import ScalarBoolean
from ruamel.yaml.timestamp import TimeStamp
from ruamel.yaml.util import timestamp_regexp, create_timestamp
-if False: # MYPY
- from typing import Any, Dict, List, Set, Generator, Union, Optional # NOQA
+from typing import Any, Dict, List, Set, Iterator, Union, Optional # NOQA
__all__ = ['BaseConstructor', 'SafeConstructor', 'Constructor',
@@ -59,70 +58,62 @@ class BaseConstructor:
yaml_constructors = {} # type: Dict[Any, Any]
yaml_multi_constructors = {} # type: Dict[Any, Any]
- def __init__(self, preserve_quotes=None, loader=None):
- # type: (Optional[bool], Any) -> None
+ def __init__(self, preserve_quotes: Optional[bool] = None, loader: Any = None) -> None:
self.loader = loader
if self.loader is not None and getattr(self.loader, '_constructor', None) is None:
self.loader._constructor = self
self.loader = loader
self.yaml_base_dict_type = dict
self.yaml_base_list_type = list
- self.constructed_objects = {} # type: Dict[Any, Any]
- self.recursive_objects = {} # type: Dict[Any, Any]
- self.state_generators = [] # type: List[Any]
+ self.constructed_objects: Dict[Any, Any] = {}
+ self.recursive_objects: Dict[Any, Any] = {}
+ self.state_generators: List[Any] = []
self.deep_construct = False
self._preserve_quotes = preserve_quotes
self.allow_duplicate_keys = version_tnf((0, 15, 1), (0, 16))
@property
- def composer(self):
- # type: () -> Any
+ def composer(self) -> Any:
if hasattr(self.loader, 'typ'):
return self.loader.composer
try:
return self.loader._composer
except AttributeError:
- sys.stdout.write('slt {}\n'.format(type(self)))
- sys.stdout.write('slc {}\n'.format(self.loader._composer))
- sys.stdout.write('{}\n'.format(dir(self)))
+ sys.stdout.write(f'slt {type(self)}\n')
+ sys.stdout.write(f'slc {self.loader._composer}\n')
+ sys.stdout.write(f'{dir(self)}\n')
raise
@property
- def resolver(self):
- # type: () -> Any
+ def resolver(self) -> Any:
if hasattr(self.loader, 'typ'):
return self.loader.resolver
return self.loader._resolver
@property
- def scanner(self):
- # type: () -> Any
+ def scanner(self) -> Any:
# needed to get to the expanded comments
if hasattr(self.loader, 'typ'):
return self.loader.scanner
return self.loader._scanner
- def check_data(self):
- # type: () -> Any
+ def check_data(self) -> Any:
# If there are more documents available?
return self.composer.check_node()
- def get_data(self):
- # type: () -> Any
+ def get_data(self) -> Any:
# Construct and return the next document.
if self.composer.check_node():
return self.construct_document(self.composer.get_node())
- def get_single_data(self):
- # type: () -> Any
+ def get_single_data(self) -> Any:
# Ensure that the stream contains a single document and construct it.
node = self.composer.get_single_node()
if node is not None:
return self.construct_document(node)
return None
- def construct_document(self, node):
- # type: (Any) -> Any
+ def construct_document(self, node: Any) -> Any:
data = self.construct_object(node)
while bool(self.state_generators):
state_generators = self.state_generators
@@ -135,8 +126,7 @@ class BaseConstructor:
self.deep_construct = False
return data
- def construct_object(self, node, deep=False):
- # type: (Any, bool) -> Any
+ def construct_object(self, node: Any, deep: bool = False) -> Any:
"""deep is True when creating an object/mapping recursively,
in that case want the underlying elements available during construction
"""
@@ -159,9 +149,8 @@ class BaseConstructor:
self.deep_construct = old_deep
return data
- def construct_non_recursive_object(self, node, tag=None):
- # type: (Any, Optional[str]) -> Any
- constructor = None # type: Any
+ def construct_non_recursive_object(self, node: Any, tag: Optional[str] = None) -> Any:
+ constructor: Any = None
tag_suffix = None
if tag is None:
tag = node.tag
@@ -199,19 +188,14 @@ class BaseConstructor:
self.state_generators.append(generator)
return data
- def construct_scalar(self, node):
- # type: (Any) -> Any
+ def construct_scalar(self, node: Any) -> Any:
if not isinstance(node, ScalarNode):
raise ConstructorError(
- None,
- None,
- _F('expected a scalar node, but found {node_id!s}', node_id=node.id),
- node.start_mark,
+ None, None, f'expected a scalar node, but found {node.id!s}', node.start_mark,
)
return node.value
- def construct_sequence(self, node, deep=False):
- # type: (Any, bool) -> Any
+ def construct_sequence(self, node: Any, deep: bool = False) -> Any:
"""deep is True when creating an object/mapping recursively,
in that case want the underlying elements available during construction
"""
@@ -219,22 +203,18 @@ class BaseConstructor:
raise ConstructorError(
None,
None,
- _F('expected a sequence node, but found {node_id!s}', node_id=node.id),
+ f'expected a sequence node, but found {node.id!s}',
node.start_mark,
)
return [self.construct_object(child, deep=deep) for child in node.value]
- def construct_mapping(self, node, deep=False):
- # type: (Any, bool) -> Any
+ def construct_mapping(self, node: Any, deep: bool = False) -> Any:
"""deep is True when creating an object/mapping recursively,
in that case want the underlying elements available during construction
"""
if not isinstance(node, MappingNode):
raise ConstructorError(
- None,
- None,
- _F('expected a mapping node, but found {node_id!s}', node_id=node.id),
- node.start_mark,
+ None, None, f'expected a mapping node, but found {node.id!s}', node.start_mark,
)
total_mapping = self.yaml_base_dict_type()
if getattr(node, 'merge', None) is not None:
@@ -242,7 +222,7 @@ class BaseConstructor:
else:
todo = [(node.value, True)]
for values, check in todo:
- mapping = self.yaml_base_dict_type() # type: Dict[Any, Any]
+ mapping: Dict[Any, Any] = self.yaml_base_dict_type()
for key_node, value_node in values:
# keys can be list -> deep
key = self.construct_object(key_node, deep=True)
@@ -267,8 +247,9 @@ class BaseConstructor:
total_mapping.update(mapping)
return total_mapping
- def check_mapping_key(self, node, key_node, mapping, key, value):
- # type: (Any, Any, Any, Any, Any) -> bool
+ def check_mapping_key(
+ self, node: Any, key_node: Any, mapping: Any, key: Any, value: Any
+ ) -> bool:
"""return True if key is unique"""
if key in mapping:
if not self.allow_duplicate_keys:
@@ -276,8 +257,8 @@ class BaseConstructor:
args = [
'while constructing a mapping',
node.start_mark,
- 'found duplicate key "{}" with value "{}" '
- '(original value: "{}")'.format(key, value, mk),
+ f'found duplicate key "{key}" with value "{value}" '
+ f'(original value: "{mk}")',
key_node.start_mark,
"""
To suppress this check see:
@@ -289,20 +270,19 @@ class BaseConstructor:
""",
]
if self.allow_duplicate_keys is None:
- warnings.warn(DuplicateKeyFutureWarning(*args))
+ warnings.warn(DuplicateKeyFutureWarning(*args), stacklevel=1)
else:
raise DuplicateKeyError(*args)
return False
return True
- def check_set_key(self, node, key_node, setting, key):
- # type: (Any, Any, Any, Any, Any) -> None
+ def check_set_key(self: Any, node: Any, key_node: Any, setting: Any, key: Any) -> None:
if key in setting:
if not self.allow_duplicate_keys:
args = [
'while constructing a set',
node.start_mark,
- 'found duplicate key "{}"'.format(key),
+ f'found duplicate key "{key}"',
key_node.start_mark,
"""
To suppress this check see:
@@ -314,18 +294,14 @@ class BaseConstructor:
""",
]
if self.allow_duplicate_keys is None:
- warnings.warn(DuplicateKeyFutureWarning(*args))
+ warnings.warn(DuplicateKeyFutureWarning(*args), stacklevel=1)
else:
raise DuplicateKeyError(*args)
- def construct_pairs(self, node, deep=False):
- # type: (Any, bool) -> Any
+ def construct_pairs(self, node: Any, deep: bool = False) -> Any:
if not isinstance(node, MappingNode):
raise ConstructorError(
- None,
- None,
- _F('expected a mapping node, but found {node_id!s}', node_id=node.id),
- node.start_mark,
+ None, None, f'expected a mapping node, but found {node.id!s}', node.start_mark,
)
pairs = []
for key_node, value_node in node.value:
@@ -335,37 +311,33 @@ class BaseConstructor:
return pairs
@classmethod
- def add_constructor(cls, tag, constructor):
- # type: (Any, Any) -> None
+ def add_constructor(cls, tag: Any, constructor: Any) -> None:
if 'yaml_constructors' not in cls.__dict__:
cls.yaml_constructors = cls.yaml_constructors.copy()
cls.yaml_constructors[tag] = constructor
@classmethod
- def add_multi_constructor(cls, tag_prefix, multi_constructor):
- # type: (Any, Any) -> None
+ def add_multi_constructor(cls, tag_prefix: Any, multi_constructor: Any) -> None:
if 'yaml_multi_constructors' not in cls.__dict__:
cls.yaml_multi_constructors = cls.yaml_multi_constructors.copy()
cls.yaml_multi_constructors[tag_prefix] = multi_constructor
class SafeConstructor(BaseConstructor):
- def construct_scalar(self, node):
- # type: (Any) -> Any
+ def construct_scalar(self, node: Any) -> Any:
if isinstance(node, MappingNode):
for key_node, value_node in node.value:
if key_node.tag == 'tag:yaml.org,2002:value':
return self.construct_scalar(value_node)
return BaseConstructor.construct_scalar(self, node)
- def flatten_mapping(self, node):
- # type: (Any) -> Any
+ def flatten_mapping(self, node: Any) -> Any:
"""
This implements the merge key feature http://yaml.org/type/merge.html
by inserting keys from the merge dict/list of dicts if not yet
available in this node
"""
- merge = [] # type: List[Any]
+ merge: List[Any] = []
index = 0
while index < len(node.value):
key_node, value_node = node.value[index]
@@ -378,7 +350,7 @@ class SafeConstructor(BaseConstructor):
args = [
'while constructing a mapping',
node.start_mark,
- 'found duplicate key "{}"'.format(key_node.value),
+ f'found duplicate key "{key_node.value}"',
key_node.start_mark,
"""
To suppress this check see:
@@ -390,7 +362,7 @@ class SafeConstructor(BaseConstructor):
""",
]
if self.allow_duplicate_keys is None:
- warnings.warn(DuplicateKeyFutureWarning(*args))
+ warnings.warn(DuplicateKeyFutureWarning(*args), stacklevel=1)
else:
raise DuplicateKeyError(*args)
del node.value[index]
@@ -404,10 +376,7 @@ class SafeConstructor(BaseConstructor):
raise ConstructorError(
'while constructing a mapping',
node.start_mark,
- _F(
- 'expected a mapping for merging, but found {subnode_id!s}',
- subnode_id=subnode.id,
- ),
+ f'expected a mapping for merging, but found {subnode.id!s}',
subnode.start_mark,
)
self.flatten_mapping(subnode)
@@ -419,11 +388,8 @@ class SafeConstructor(BaseConstructor):
raise ConstructorError(
'while constructing a mapping',
node.start_mark,
- _F(
- 'expected a mapping or list of mappings for merging, '
- 'but found {value_node_id!s}',
- value_node_id=value_node.id,
- ),
+ 'expected a mapping or list of mappings for merging, '
+ f'but found {value_node.id!s}',
value_node.start_mark,
)
elif key_node.tag == 'tag:yaml.org,2002:value':
@@ -435,8 +401,7 @@ class SafeConstructor(BaseConstructor):
node.merge = merge # separate merge keys to be able to update without duplicate
node.value = merge + node.value
- def construct_mapping(self, node, deep=False):
- # type: (Any, bool) -> Any
+ def construct_mapping(self, node: Any, deep: bool = False) -> Any:
"""deep is True when creating an object/mapping recursively,
in that case want the underlying elements available during construction
"""
@@ -444,8 +409,7 @@ class SafeConstructor(BaseConstructor):
self.flatten_mapping(node)
return BaseConstructor.construct_mapping(self, node, deep=deep)
- def construct_yaml_null(self, node):
- # type: (Any) -> Any
+ def construct_yaml_null(self, node: Any) -> Any:
self.construct_scalar(node)
return None
@@ -461,13 +425,11 @@ class SafeConstructor(BaseConstructor):
'off': False,
}
- def construct_yaml_bool(self, node):
- # type: (Any) -> bool
+ def construct_yaml_bool(self, node: Any) -> bool:
value = self.construct_scalar(node)
return self.bool_values[value.lower()]
- def construct_yaml_int(self, node):
- # type: (Any) -> int
+ def construct_yaml_int(self, node: Any) -> int:
value_s = self.construct_scalar(node)
value_s = value_s.replace('_', "")
sign = +1
@@ -502,8 +464,7 @@ class SafeConstructor(BaseConstructor):
inf_value *= inf_value
nan_value = -inf_value / inf_value # Trying to make a quiet NaN (like C99).
- def construct_yaml_float(self, node):
- # type: (Any) -> float
+ def construct_yaml_float(self, node: Any) -> float:
value_so = self.construct_scalar(node)
value_s = value_so.replace('_', "").lower()
sign = +1
@@ -529,34 +490,29 @@ class SafeConstructor(BaseConstructor):
# value_s is lower case independent of input
mantissa, exponent = value_s.split('e')
if '.' not in mantissa:
- warnings.warn(MantissaNoDotYAML1_1Warning(node, value_so))
+ warnings.warn(MantissaNoDotYAML1_1Warning(node, value_so), stacklevel=1)
return sign * float(value_s)
- def construct_yaml_binary(self, node):
- # type: (Any) -> Any
+ def construct_yaml_binary(self, node: Any) -> Any:
try:
value = self.construct_scalar(node).encode('ascii')
except UnicodeEncodeError as exc:
raise ConstructorError(
None,
None,
- _F('failed to convert base64 data into ascii: {exc!s}', exc=exc),
+ f'failed to convert base64 data into ascii: {exc!s}',
node.start_mark,
)
try:
return base64.decodebytes(value)
except binascii.Error as exc:
raise ConstructorError(
- None,
- None,
- _F('failed to decode base64 data: {exc!s}', exc=exc),
- node.start_mark,
+ None, None, f'failed to decode base64 data: {exc!s}', node.start_mark,
)
timestamp_regexp = timestamp_regexp # moved to util 0.17.17
- def construct_yaml_timestamp(self, node, values=None):
- # type: (Any, Any) -> Any
+ def construct_yaml_timestamp(self, node: Any, values: Any = None) -> Any:
if values is None:
try:
match = self.timestamp_regexp.match(node.value)
@@ -566,14 +522,13 @@ class SafeConstructor(BaseConstructor):
raise ConstructorError(
None,
None,
- 'failed to construct timestamp from "{}"'.format(node.value),
+ f'failed to construct timestamp from "{node.value}"',
node.start_mark,
)
values = match.groupdict()
return create_timestamp(**values)
- def construct_yaml_omap(self, node):
- # type: (Any) -> Any
+ def construct_yaml_omap(self, node: Any) -> Any:
# Note: we do now check for duplicate keys
omap = ordereddict()
yield omap
@@ -581,7 +536,7 @@ class SafeConstructor(BaseConstructor):
raise ConstructorError(
'while constructing an ordered map',
node.start_mark,
- _F('expected a sequence, but found {node_id!s}', node_id=node.id),
+ f'expected a sequence, but found {node.id!s}',
node.start_mark,
)
for subnode in node.value:
@@ -589,20 +544,14 @@ class SafeConstructor(BaseConstructor):
raise ConstructorError(
'while constructing an ordered map',
node.start_mark,
- _F(
- 'expected a mapping of length 1, but found {subnode_id!s}',
- subnode_id=subnode.id,
- ),
+ f'expected a mapping of length 1, but found {subnode.id!s}',
subnode.start_mark,
)
if len(subnode.value) != 1:
raise ConstructorError(
'while constructing an ordered map',
node.start_mark,
- _F(
- 'expected a single mapping item, but found {len_subnode_val:d} items',
- len_subnode_val=len(subnode.value),
- ),
+ f'expected a single mapping item, but found {len(subnode.value):d} items',
subnode.start_mark,
)
key_node, value_node = subnode.value[0]
@@ -611,16 +560,15 @@ class SafeConstructor(BaseConstructor):
value = self.construct_object(value_node)
omap[key] = value
- def construct_yaml_pairs(self, node):
- # type: (Any) -> Any
+ def construct_yaml_pairs(self, node: Any) -> Any:
# Note: the same code as `construct_yaml_omap`.
- pairs = [] # type: List[Any]
+ pairs: List[Any] = []
yield pairs
if not isinstance(node, SequenceNode):
raise ConstructorError(
'while constructing pairs',
node.start_mark,
- _F('expected a sequence, but found {node_id!s}', node_id=node.id),
+ f'expected a sequence, but found {node.id!s}',
node.start_mark,
)
for subnode in node.value:
@@ -628,20 +576,14 @@ class SafeConstructor(BaseConstructor):
raise ConstructorError(
'while constructing pairs',
node.start_mark,
- _F(
- 'expected a mapping of length 1, but found {subnode_id!s}',
- subnode_id=subnode.id,
- ),
+ f'expected a mapping of length 1, but found {subnode.id!s}',
subnode.start_mark,
)
if len(subnode.value) != 1:
raise ConstructorError(
'while constructing pairs',
node.start_mark,
- _F(
- 'expected a single mapping item, but found {len_subnode_val:d} items',
- len_subnode_val=len(subnode.value),
- ),
+ f'expected a single mapping item, but found {len(subnode.value):d} items',
subnode.start_mark,
)
key_node, value_node = subnode.value[0]
@@ -649,33 +591,28 @@ class SafeConstructor(BaseConstructor):
value = self.construct_object(value_node)
pairs.append((key, value))
- def construct_yaml_set(self, node):
- # type: (Any) -> Any
- data = set() # type: Set[Any]
+ def construct_yaml_set(self, node: Any) -> Any:
+ data: Set[Any] = set()
yield data
value = self.construct_mapping(node)
data.update(value)
- def construct_yaml_str(self, node):
- # type: (Any) -> Any
+ def construct_yaml_str(self, node: Any) -> Any:
value = self.construct_scalar(node)
return value
- def construct_yaml_seq(self, node):
- # type: (Any) -> Any
- data = self.yaml_base_list_type() # type: List[Any]
+ def construct_yaml_seq(self, node: Any) -> Any:
+ data: List[Any] = self.yaml_base_list_type()
yield data
data.extend(self.construct_sequence(node))
- def construct_yaml_map(self, node):
- # type: (Any) -> Any
- data = self.yaml_base_dict_type() # type: Dict[Any, Any]
+ def construct_yaml_map(self, node: Any) -> Any:
+ data: Dict[Any, Any] = self.yaml_base_dict_type()
yield data
value = self.construct_mapping(node)
data.update(value)
- def construct_yaml_object(self, node, cls):
- # type: (Any, Any) -> Any
+ def construct_yaml_object(self, node: Any, cls: Any) -> Any:
data = cls.__new__(cls)
yield data
if hasattr(data, '__setstate__'):
@@ -685,14 +622,11 @@ class SafeConstructor(BaseConstructor):
state = self.construct_mapping(node)
data.__dict__.update(state)
- def construct_undefined(self, node):
- # type: (Any) -> None
+ def construct_undefined(self, node: Any) -> None:
raise ConstructorError(
None,
None,
- _F(
- 'could not determine a constructor for the tag {node_tag!r}', node_tag=node.tag
- ),
+ f'could not determine a constructor for the tag {node.tag!r}',
node.start_mark,
)
@@ -733,50 +667,40 @@ SafeConstructor.add_constructor(None, SafeConstructor.construct_undefined)
class Constructor(SafeConstructor):
- def construct_python_str(self, node):
- # type: (Any) -> Any
+ def construct_python_str(self, node: Any) -> Any:
return self.construct_scalar(node)
- def construct_python_unicode(self, node):
- # type: (Any) -> Any
+ def construct_python_unicode(self, node: Any) -> Any:
return self.construct_scalar(node)
- def construct_python_bytes(self, node):
- # type: (Any) -> Any
+ def construct_python_bytes(self, node: Any) -> Any:
try:
value = self.construct_scalar(node).encode('ascii')
except UnicodeEncodeError as exc:
raise ConstructorError(
None,
None,
- _F('failed to convert base64 data into ascii: {exc!s}', exc=exc),
+ f'failed to convert base64 data into ascii: {exc!s}',
node.start_mark,
)
try:
return base64.decodebytes(value)
except binascii.Error as exc:
raise ConstructorError(
- None,
- None,
- _F('failed to decode base64 data: {exc!s}', exc=exc),
- node.start_mark,
+ None, None, f'failed to decode base64 data: {exc!s}', node.start_mark,
)
- def construct_python_long(self, node):
- # type: (Any) -> int
+ def construct_python_long(self, node: Any) -> int:
val = self.construct_yaml_int(node)
return val
- def construct_python_complex(self, node):
- # type: (Any) -> Any
+ def construct_python_complex(self, node: Any) -> Any:
return complex(self.construct_scalar(node))
- def construct_python_tuple(self, node):
- # type: (Any) -> Any
+ def construct_python_tuple(self, node: Any) -> Any:
return tuple(self.construct_sequence(node))
- def find_python_module(self, name, mark):
- # type: (Any, Any) -> Any
+ def find_python_module(self, name: Any, mark: Any) -> Any:
if not name:
raise ConstructorError(
'while constructing a Python module',
@@ -790,13 +714,12 @@ class Constructor(SafeConstructor):
raise ConstructorError(
'while constructing a Python module',
mark,
- _F('cannot find module {name!r} ({exc!s})', name=name, exc=exc),
+ f'cannot find module {name!r} ({exc!s})',
mark,
)
return sys.modules[name]
- def find_python_name(self, name, mark):
- # type: (Any, Any) -> Any
+ def find_python_name(self, name: Any, mark: Any) -> Any:
if not name:
raise ConstructorError(
'while constructing a Python object',
@@ -807,7 +730,7 @@ class Constructor(SafeConstructor):
if '.' in name:
lname = name.split('.')
lmodule_name = lname
- lobject_name = [] # type: List[Any]
+ lobject_name: List[Any] = []
while len(lmodule_name) > 1:
lobject_name.insert(0, lmodule_name.pop())
module_name = '.'.join(lmodule_name)
@@ -826,11 +749,7 @@ class Constructor(SafeConstructor):
raise ConstructorError(
'while constructing a Python object',
mark,
- _F(
- 'cannot find module {module_name!r} ({exc!s})',
- module_name=module_name,
- exc=exc,
- ),
+ f'cannot find module {module_name!r} ({exc!s})',
mark,
)
module = sys.modules[module_name]
@@ -842,42 +761,37 @@ class Constructor(SafeConstructor):
raise ConstructorError(
'while constructing a Python object',
mark,
- _F(
- 'cannot find {object_name!r} in the module {module_name!r}',
- object_name=object_name,
- module_name=module.__name__,
- ),
+ f'cannot find {object_name!r} in the module {module.__name__!r}',
mark,
)
obj = getattr(obj, lobject_name.pop(0))
return obj
- def construct_python_name(self, suffix, node):
- # type: (Any, Any) -> Any
+ def construct_python_name(self, suffix: Any, node: Any) -> Any:
value = self.construct_scalar(node)
if value:
raise ConstructorError(
'while constructing a Python name',
node.start_mark,
- _F('expected the empty value, but found {value!r}', value=value),
+ f'expected the empty value, but found {value!r}',
node.start_mark,
)
return self.find_python_name(suffix, node.start_mark)
- def construct_python_module(self, suffix, node):
- # type: (Any, Any) -> Any
+ def construct_python_module(self, suffix: Any, node: Any) -> Any:
value = self.construct_scalar(node)
if value:
raise ConstructorError(
'while constructing a Python module',
node.start_mark,
- _F('expected the empty value, but found {value!r}', value=value),
+ f'expected the empty value, but found {value!r}',
node.start_mark,
)
return self.find_python_module(suffix, node.start_mark)
- def make_python_instance(self, suffix, node, args=None, kwds=None, newobj=False):
- # type: (Any, Any, Any, Any, bool) -> Any
+ def make_python_instance(
+ self, suffix: Any, node: Any, args: Any = None, kwds: Any = None, newobj: bool = False
+ ) -> Any:
if not args:
args = []
if not kwds:
@@ -888,12 +802,11 @@ class Constructor(SafeConstructor):
else:
return cls(*args, **kwds)
- def set_python_instance_state(self, instance, state):
- # type: (Any, Any) -> None
+ def set_python_instance_state(self, instance: Any, state: Any) -> None:
if hasattr(instance, '__setstate__'):
instance.__setstate__(state)
else:
- slotstate = {} # type: Dict[Any, Any]
+ slotstate: Dict[Any, Any] = {}
if isinstance(state, tuple) and len(state) == 2:
state, slotstate = state
if hasattr(instance, '__dict__'):
@@ -903,8 +816,7 @@ class Constructor(SafeConstructor):
for key, value in slotstate.items():
setattr(instance, key, value)
- def construct_python_object(self, suffix, node):
- # type: (Any, Any) -> Any
+ def construct_python_object(self, suffix: Any, node: Any) -> Any:
# Format:
# !!python/object:module.name { ... state ... }
instance = self.make_python_instance(suffix, node, newobj=True)
@@ -914,8 +826,9 @@ class Constructor(SafeConstructor):
state = self.construct_mapping(node, deep=deep)
self.set_python_instance_state(instance, state)
- def construct_python_object_apply(self, suffix, node, newobj=False):
- # type: (Any, Any, bool) -> Any
+ def construct_python_object_apply(
+ self, suffix: Any, node: Any, newobj: bool = False
+ ) -> Any:
# Format:
# !!python/object/apply # (or !!python/object/new)
# args: [ ... arguments ... ]
@@ -929,10 +842,10 @@ class Constructor(SafeConstructor):
# is how an object is created, check make_python_instance for details.
if isinstance(node, SequenceNode):
args = self.construct_sequence(node, deep=True)
- kwds = {} # type: Dict[Any, Any]
- state = {} # type: Dict[Any, Any]
- listitems = [] # type: List[Any]
- dictitems = {} # type: Dict[Any, Any]
+ kwds: Dict[Any, Any] = {}
+ state: Dict[Any, Any] = {}
+ listitems: List[Any] = []
+ dictitems: Dict[Any, Any] = {}
else:
value = self.construct_mapping(node, deep=True)
args = value.get('args', [])
@@ -950,8 +863,7 @@ class Constructor(SafeConstructor):
instance[key] = dictitems[key]
return instance
- def construct_python_object_new(self, suffix, node):
- # type: (Any, Any) -> Any
+ def construct_python_object_new(self, suffix: Any, node: Any) -> Any:
return self.construct_python_object_apply(suffix, node, newobj=True)
@@ -1013,15 +925,13 @@ class RoundTripConstructor(SafeConstructor):
as well as on the items
"""
- def comment(self, idx):
- # type: (Any) -> Any
+ def comment(self, idx: Any) -> Any:
assert self.loader.comment_handling is not None
x = self.scanner.comments[idx]
x.set_assigned()
return x
- def comments(self, list_of_comments, idx=None):
- # type: (Any, Optional[Any]) -> Any
+ def comments(self, list_of_comments: Any, idx: Optional[Any] = None) -> Any:
# hand in the comment and optional pre, eol, post segment
if list_of_comments is None:
return []
@@ -1032,14 +942,10 @@ class RoundTripConstructor(SafeConstructor):
for x in list_of_comments:
yield self.comment(x)
- def construct_scalar(self, node):
- # type: (Any) -> Any
+ def construct_scalar(self, node: Any) -> Any:
if not isinstance(node, ScalarNode):
raise ConstructorError(
- None,
- None,
- _F('expected a scalar node, but found {node_id!s}', node_id=node.id),
- node.start_mark,
+ None, None, f'expected a scalar node, but found {node.id!s}', node.start_mark,
)
if node.style == '|' and isinstance(node.value, str):
@@ -1055,7 +961,7 @@ class RoundTripConstructor(SafeConstructor):
lss.comment = self.comment(node.comment[1][0]) # type: ignore
return lss
if node.style == '>' and isinstance(node.value, str):
- fold_positions = [] # type: List[int]
+ fold_positions: List[int] = []
idx = -1
while True:
idx = node.value.find('\a', idx + 1)
@@ -1084,13 +990,12 @@ class RoundTripConstructor(SafeConstructor):
return PlainScalarString(node.value, anchor=node.anchor)
return node.value
- def construct_yaml_int(self, node):
- # type: (Any) -> Any
- width = None # type: Any
+ def construct_yaml_int(self, node: Any) -> Any:
+ width: Any = None
value_su = self.construct_scalar(node)
try:
sx = value_su.rstrip('_')
- underscore = [len(sx) - sx.rindex('_') - 1, False, False] # type: Any
+ underscore: Any = [len(sx) - sx.rindex('_') - 1, False, False]
except ValueError:
underscore = None
except IndexError:
@@ -1119,7 +1024,7 @@ class RoundTripConstructor(SafeConstructor):
# default to lower-case if no a-fA-F in string
if self.resolver.processing_version > (1, 1) and value_s[2] == '0':
width = len(value_s[2:])
- hex_fun = HexInt # type: Any
+ hex_fun: Any = HexInt
for ch in value_s[2:]:
if ch in 'ABCDEF': # first non-digit is capital
hex_fun = HexCapsInt
@@ -1180,10 +1085,8 @@ class RoundTripConstructor(SafeConstructor):
else:
return sign * int(value_s)
- def construct_yaml_float(self, node):
- # type: (Any) -> Any
- def leading_zeros(v):
- # type: (Any) -> int
+ def construct_yaml_float(self, node: Any) -> Any:
+ def leading_zeros(v: Any) -> int:
lead0 = 0
idx = 0
while idx < len(v) and v[idx] in '0.':
@@ -1193,7 +1096,7 @@ class RoundTripConstructor(SafeConstructor):
return lead0
# underscore = None
- m_sign = False # type: Any
+ m_sign: Any = False
value_so = self.construct_scalar(node)
value_s = value_so.replace('_', "").lower()
sign = +1
@@ -1225,7 +1128,7 @@ class RoundTripConstructor(SafeConstructor):
if self.resolver.processing_version != (1, 2):
# value_s is lower case independent of input
if '.' not in mantissa:
- warnings.warn(MantissaNoDotYAML1_1Warning(node, value_so))
+ warnings.warn(MantissaNoDotYAML1_1Warning(node, value_so), stacklevel=1)
lead0 = leading_zeros(mantissa)
width = len(mantissa)
prec = mantissa.find('.')
@@ -1246,7 +1149,8 @@ class RoundTripConstructor(SafeConstructor):
anchor=node.anchor,
)
width = len(value_so)
- prec = value_so.index('.') # you can use index, this would not be float without dot
+ # you can't use index, !!float 42 would be a float without a dot
+ prec = value_so.find('.')
lead0 = leading_zeros(value_so)
return ScalarFloat(
sign * float(value_s),
@@ -1257,20 +1161,18 @@ class RoundTripConstructor(SafeConstructor):
anchor=node.anchor,
)
- def construct_yaml_str(self, node):
- # type: (Any) -> Any
+ def construct_yaml_str(self, node: Any) -> Any:
value = self.construct_scalar(node)
if isinstance(value, ScalarString):
return value
return value
- def construct_rt_sequence(self, node, seqtyp, deep=False):
- # type: (Any, Any, bool) -> Any
+ def construct_rt_sequence(self, node: Any, seqtyp: Any, deep: bool = False) -> Any:
if not isinstance(node, SequenceNode):
raise ConstructorError(
None,
None,
- _F('expected a sequence node, but found {node_id!s}', node_id=node.id),
+ f'expected a sequence node, but found {node.id!s}',
node.start_mark,
)
ret_val = []
@@ -1301,16 +1203,14 @@ class RoundTripConstructor(SafeConstructor):
)
return ret_val
- def flatten_mapping(self, node):
- # type: (Any) -> Any
+ def flatten_mapping(self, node: Any) -> Any:
"""
This implements the merge key feature http://yaml.org/type/merge.html
by inserting keys from the merge dict/list of dicts if not yet
available in this node
"""
- def constructed(value_node):
- # type: (Any) -> Any
+ def constructed(value_node: Any) -> Any:
# If the contents of a merge are defined within the
# merge marker, then they won't have been constructed
# yet. But if they were already constructed, we need to use
@@ -1322,7 +1222,7 @@ class RoundTripConstructor(SafeConstructor):
return value
# merge = []
- merge_map_list = [] # type: List[Any]
+ merge_map_list: List[Any] = []
index = 0
while index < len(node.value):
key_node, value_node = node.value[index]
@@ -1335,7 +1235,7 @@ class RoundTripConstructor(SafeConstructor):
args = [
'while constructing a mapping',
node.start_mark,
- 'found duplicate key "{}"'.format(key_node.value),
+ f'found duplicate key "{key_node.value}"',
key_node.start_mark,
"""
To suppress this check see:
@@ -1347,7 +1247,7 @@ class RoundTripConstructor(SafeConstructor):
""",
]
if self.allow_duplicate_keys is None:
- warnings.warn(DuplicateKeyFutureWarning(*args))
+ warnings.warn(DuplicateKeyFutureWarning(*args), stacklevel=1)
else:
raise DuplicateKeyError(*args)
del node.value[index]
@@ -1362,10 +1262,7 @@ class RoundTripConstructor(SafeConstructor):
raise ConstructorError(
'while constructing a mapping',
node.start_mark,
- _F(
- 'expected a mapping for merging, but found {subnode_id!s}',
- subnode_id=subnode.id,
- ),
+ f'expected a mapping for merging, but found {subnode.id!s}',
subnode.start_mark,
)
merge_map_list.append((index, constructed(subnode)))
@@ -1378,11 +1275,8 @@ class RoundTripConstructor(SafeConstructor):
raise ConstructorError(
'while constructing a mapping',
node.start_mark,
- _F(
- 'expected a mapping or list of mappings for merging, '
- 'but found {value_node_id!s}',
- value_node_id=value_node.id,
- ),
+ 'expected a mapping or list of mappings for merging, '
+ f'but found {value_node.id!s}',
value_node.start_mark,
)
elif key_node.tag == 'tag:yaml.org,2002:value':
@@ -1394,18 +1288,13 @@ class RoundTripConstructor(SafeConstructor):
# if merge:
# node.value = merge + node.value
- def _sentinel(self):
- # type: () -> None
+ def _sentinel(self) -> None:
pass
- def construct_mapping(self, node, maptyp, deep=False): # type: ignore
- # type: (Any, Any, bool) -> Any
+ def construct_mapping(self, node: Any, maptyp: Any, deep: bool = False) -> Any: # type: ignore # NOQA
if not isinstance(node, MappingNode):
raise ConstructorError(
- None,
- None,
- _F('expected a mapping node, but found {node_id!s}', node_id=node.id),
- node.start_mark,
+ None, None, f'expected a mapping node, but found {node.id!s}', node.start_mark,
)
merge_map = self.flatten_mapping(node)
# mapping = {}
@@ -1439,6 +1328,7 @@ class RoundTripConstructor(SafeConstructor):
key_s.fa.set_flow_style()
elif key_node.flow_style is False:
key_s.fa.set_block_style()
+ key_s._yaml_set_line_col(key.lc.line, key.lc.col) # type: ignore
key = key_s
elif isinstance(key, MutableMapping):
key_m = CommentedKeyMap(key)
@@ -1446,6 +1336,7 @@ class RoundTripConstructor(SafeConstructor):
key_m.fa.set_flow_style()
elif key_node.flow_style is False:
key_m.fa.set_block_style()
+ key_m._yaml_set_line_col(key.lc.line, key.lc.col) # type: ignore
key = key_m
if not isinstance(key, Hashable):
raise ConstructorError(
@@ -1503,14 +1394,10 @@ class RoundTripConstructor(SafeConstructor):
if merge_map:
maptyp.add_yaml_merge(merge_map)
- def construct_setting(self, node, typ, deep=False):
- # type: (Any, Any, bool) -> Any
+ def construct_setting(self, node: Any, typ: Any, deep: bool = False) -> Any:
if not isinstance(node, MappingNode):
raise ConstructorError(
- None,
- None,
- _F('expected a mapping node, but found {node_id!s}', node_id=node.id),
- node.start_mark,
+ None, None, f'expected a mapping node, but found {node.id!s}', node.start_mark,
)
if self.loader and self.loader.comment_handling is None:
if node.comment:
@@ -1556,8 +1443,7 @@ class RoundTripConstructor(SafeConstructor):
nprintf('nc7b', value_node.comment)
typ.add(key)
- def construct_yaml_seq(self, node):
- # type: (Any) -> Any
+ def construct_yaml_seq(self, node: Any) -> Iterator[CommentedSeq]:
data = CommentedSeq()
data._yaml_set_line_col(node.start_mark.line, node.start_mark.column)
# if node.comment:
@@ -1566,16 +1452,14 @@ class RoundTripConstructor(SafeConstructor):
data.extend(self.construct_rt_sequence(node, data))
self.set_collection_style(data, node)
- def construct_yaml_map(self, node):
- # type: (Any) -> Any
+ def construct_yaml_map(self, node: Any) -> Iterator[CommentedMap]:
data = CommentedMap()
data._yaml_set_line_col(node.start_mark.line, node.start_mark.column)
yield data
self.construct_mapping(node, data, deep=True)
self.set_collection_style(data, node)
- def set_collection_style(self, data, node):
- # type: (Any, Any) -> None
+ def set_collection_style(self, data: Any, node: Any) -> None:
if len(data) == 0:
return
if node.flow_style is True:
@@ -1583,8 +1467,7 @@ class RoundTripConstructor(SafeConstructor):
elif node.flow_style is False:
data.fa.set_block_style()
- def construct_yaml_object(self, node, cls):
- # type: (Any, Any) -> Any
+ def construct_yaml_object(self, node: Any, cls: Any) -> Any:
data = cls.__new__(cls)
yield data
if hasattr(data, '__setstate__'):
@@ -1608,8 +1491,7 @@ class RoundTripConstructor(SafeConstructor):
a = getattr(data, Anchor.attrib)
a.value = node.anchor
- def construct_yaml_omap(self, node):
- # type: (Any) -> Any
+ def construct_yaml_omap(self, node: Any) -> Iterator[CommentedOrderedMap]:
# Note: we do now check for duplicate keys
omap = CommentedOrderedMap()
omap._yaml_set_line_col(node.start_mark.line, node.start_mark.column)
@@ -1631,7 +1513,7 @@ class RoundTripConstructor(SafeConstructor):
raise ConstructorError(
'while constructing an ordered map',
node.start_mark,
- _F('expected a sequence, but found {node_id!s}', node_id=node.id),
+ f'expected a sequence, but found {node.id!s}',
node.start_mark,
)
for subnode in node.value:
@@ -1639,20 +1521,14 @@ class RoundTripConstructor(SafeConstructor):
raise ConstructorError(
'while constructing an ordered map',
node.start_mark,
- _F(
- 'expected a mapping of length 1, but found {subnode_id!s}',
- subnode_id=subnode.id,
- ),
+ f'expected a mapping of length 1, but found {subnode.id!s}',
subnode.start_mark,
)
if len(subnode.value) != 1:
raise ConstructorError(
'while constructing an ordered map',
node.start_mark,
- _F(
- 'expected a single mapping item, but found {len_subnode_val:d} items',
- len_subnode_val=len(subnode.value),
- ),
+ f'expected a single mapping item, but found {len(subnode.value):d} items',
subnode.start_mark,
)
key_node, value_node = subnode.value[0]
@@ -1676,15 +1552,15 @@ class RoundTripConstructor(SafeConstructor):
nprintf('nc9c', value_node.comment)
omap[key] = value
- def construct_yaml_set(self, node):
- # type: (Any) -> Any
+ def construct_yaml_set(self, node: Any) -> Iterator[CommentedSet]:
data = CommentedSet()
data._yaml_set_line_col(node.start_mark.line, node.start_mark.column)
yield data
self.construct_setting(node, data)
- def construct_undefined(self, node):
- # type: (Any) -> Any
+ def construct_unknown(
+ self, node: Any
+ ) -> Iterator[Union[CommentedMap, TaggedScalar, CommentedSeq]]:
try:
if isinstance(node, MappingNode):
data = CommentedMap()
@@ -1735,14 +1611,13 @@ class RoundTripConstructor(SafeConstructor):
raise ConstructorError(
None,
None,
- _F(
- 'could not determine a constructor for the tag {node_tag!r}', node_tag=node.tag
- ),
+ f'could not determine a constructor for the tag {node.tag!r}',
node.start_mark,
)
- def construct_yaml_timestamp(self, node, values=None):
- # type: (Any, Any) -> Any
+ def construct_yaml_timestamp(
+ self, node: Any, values: Any = None
+ ) -> Union[datetime.date, datetime.datetime, TimeStamp]:
try:
match = self.timestamp_regexp.match(node.value)
except TypeError:
@@ -1751,7 +1626,7 @@ class RoundTripConstructor(SafeConstructor):
raise ConstructorError(
None,
None,
- 'failed to construct timestamp from "{}"'.format(node.value),
+ f'failed to construct timestamp from "{node.value}"',
node.start_mark,
)
values = match.groupdict()
@@ -1774,9 +1649,15 @@ class RoundTripConstructor(SafeConstructor):
if values['tz_sign'] == '-':
delta = -delta
# should check for None and solve issue 366 should be tzinfo=delta)
- data = TimeStamp(
- dd.year, dd.month, dd.day, dd.hour, dd.minute, dd.second, dd.microsecond
- )
+ # isinstance(datetime.datetime.now, datetime.date) is true)
+ if isinstance(dd, datetime.datetime):
+ data = TimeStamp(
+ dd.year, dd.month, dd.day, dd.hour, dd.minute, dd.second, dd.microsecond
+ )
+ else:
+ # ToDo: make this into a DateStamp?
+ data = TimeStamp(dd.year, dd.month, dd.day, 0, 0, 0, 0)
+ return data
if delta:
data._yaml['delta'] = delta
tz = values['tz_sign'] + values['tz_hour']
@@ -1786,13 +1667,11 @@ class RoundTripConstructor(SafeConstructor):
else:
if values['tz']: # no delta
data._yaml['tz'] = values['tz']
-
if values['t']:
data._yaml['t'] = True
return data
- def construct_yaml_bool(self, node):
- # type: (Any) -> Any
+ def construct_yaml_sbool(self, node: Any) -> Union[bool, ScalarBoolean]:
b = SafeConstructor.construct_yaml_bool(self, node)
if node.anchor:
return ScalarBoolean(b, anchor=node.anchor)
@@ -1804,7 +1683,7 @@ RoundTripConstructor.add_constructor(
)
RoundTripConstructor.add_constructor(
- 'tag:yaml.org,2002:bool', RoundTripConstructor.construct_yaml_bool
+ 'tag:yaml.org,2002:bool', RoundTripConstructor.construct_yaml_sbool
)
RoundTripConstructor.add_constructor(
@@ -1847,4 +1726,4 @@ RoundTripConstructor.add_constructor(
'tag:yaml.org,2002:map', RoundTripConstructor.construct_yaml_map
)
-RoundTripConstructor.add_constructor(None, RoundTripConstructor.construct_undefined)
+RoundTripConstructor.add_constructor(None, RoundTripConstructor.construct_unknown)
diff --git a/cyaml.py b/cyaml.py
index 0ab2828..09d6480 100644
--- a/cyaml.py
+++ b/cyaml.py
@@ -6,9 +6,9 @@ from ruamel.yaml.constructor import Constructor, BaseConstructor, SafeConstructo
from ruamel.yaml.representer import Representer, SafeRepresenter, BaseRepresenter
from ruamel.yaml.resolver import Resolver, BaseResolver
-if False: # MYPY
- from typing import Any, Union, Optional # NOQA
- from ruamel.yaml.compat import StreamTextType, StreamType, VersionType # NOQA
+
+from typing import Any, Union, Optional # NOQA
+from ruamel.yaml.compat import StreamTextType, StreamType, VersionType # NOQA
__all__ = ['CBaseLoader', 'CSafeLoader', 'CLoader', 'CBaseDumper', 'CSafeDumper', 'CDumper']
@@ -18,8 +18,12 @@ __all__ = ['CBaseLoader', 'CSafeLoader', 'CLoader', 'CBaseDumper', 'CSafeDumper'
class CBaseLoader(CParser, BaseConstructor, BaseResolver): # type: ignore
- def __init__(self, stream, version=None, preserve_quotes=None):
- # type: (StreamTextType, Optional[VersionType], Optional[bool]) -> None
+ def __init__(
+ self,
+ stream: StreamTextType,
+ version: Optional[VersionType] = None,
+ preserve_quotes: Optional[bool] = None,
+ ) -> None:
CParser.__init__(self, stream)
self._parser = self._composer = self
BaseConstructor.__init__(self, loader=self)
@@ -30,8 +34,12 @@ class CBaseLoader(CParser, BaseConstructor, BaseResolver): # type: ignore
class CSafeLoader(CParser, SafeConstructor, Resolver): # type: ignore
- def __init__(self, stream, version=None, preserve_quotes=None):
- # type: (StreamTextType, Optional[VersionType], Optional[bool]) -> None
+ def __init__(
+ self,
+ stream: StreamTextType,
+ version: Optional[VersionType] = None,
+ preserve_quotes: Optional[bool] = None,
+ ) -> None:
CParser.__init__(self, stream)
self._parser = self._composer = self
SafeConstructor.__init__(self, loader=self)
@@ -42,8 +50,12 @@ class CSafeLoader(CParser, SafeConstructor, Resolver): # type: ignore
class CLoader(CParser, Constructor, Resolver): # type: ignore
- def __init__(self, stream, version=None, preserve_quotes=None):
- # type: (StreamTextType, Optional[VersionType], Optional[bool]) -> None
+ def __init__(
+ self,
+ stream: StreamTextType,
+ version: Optional[VersionType] = None,
+ preserve_quotes: Optional[bool] = None,
+ ) -> None:
CParser.__init__(self, stream)
self._parser = self._composer = self
Constructor.__init__(self, loader=self)
@@ -55,25 +67,25 @@ class CLoader(CParser, Constructor, Resolver): # type: ignore
class CBaseDumper(CEmitter, BaseRepresenter, BaseResolver): # type: ignore
def __init__(
- self,
- stream,
- default_style=None,
- default_flow_style=None,
- canonical=None,
- indent=None,
- width=None,
- allow_unicode=None,
- line_break=None,
- encoding=None,
- explicit_start=None,
- explicit_end=None,
- version=None,
- tags=None,
- block_seq_indent=None,
- top_level_colon_align=None,
- prefix_colon=None,
- ):
- # type: (StreamType, Any, Any, Any, Optional[bool], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Any, Any, Any, Any, Any) -> None # NOQA
+ self: StreamType,
+ stream: Any,
+ default_style: Any = None,
+ default_flow_style: Any = None,
+ canonical: Optional[bool] = None,
+ indent: Optional[int] = None,
+ width: Optional[int] = None,
+ allow_unicode: Optional[bool] = None,
+ line_break: Any = None,
+ encoding: Any = None,
+ explicit_start: Optional[bool] = None,
+ explicit_end: Optional[bool] = None,
+ version: Any = None,
+ tags: Any = None,
+ block_seq_indent: Any = None,
+ top_level_colon_align: Any = None,
+ prefix_colon: Any = None,
+ ) -> None:
+ # NOQA
CEmitter.__init__(
self,
stream,
@@ -100,25 +112,25 @@ class CBaseDumper(CEmitter, BaseRepresenter, BaseResolver): # type: ignore
class CSafeDumper(CEmitter, SafeRepresenter, Resolver): # type: ignore
def __init__(
- self,
- stream,
- default_style=None,
- default_flow_style=None,
- canonical=None,
- indent=None,
- width=None,
- allow_unicode=None,
- line_break=None,
- encoding=None,
- explicit_start=None,
- explicit_end=None,
- version=None,
- tags=None,
- block_seq_indent=None,
- top_level_colon_align=None,
- prefix_colon=None,
- ):
- # type: (StreamType, Any, Any, Any, Optional[bool], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Any, Any, Any, Any, Any) -> None # NOQA
+ self: StreamType,
+ stream: Any,
+ default_style: Any = None,
+ default_flow_style: Any = None,
+ canonical: Optional[bool] = None,
+ indent: Optional[int] = None,
+ width: Optional[int] = None,
+ allow_unicode: Optional[bool] = None,
+ line_break: Any = None,
+ encoding: Any = None,
+ explicit_start: Optional[bool] = None,
+ explicit_end: Optional[bool] = None,
+ version: Any = None,
+ tags: Any = None,
+ block_seq_indent: Any = None,
+ top_level_colon_align: Any = None,
+ prefix_colon: Any = None,
+ ) -> None:
+ # NOQA
self._emitter = self._serializer = self._representer = self
CEmitter.__init__(
self,
@@ -143,25 +155,25 @@ class CSafeDumper(CEmitter, SafeRepresenter, Resolver): # type: ignore
class CDumper(CEmitter, Representer, Resolver): # type: ignore
def __init__(
- self,
- stream,
- default_style=None,
- default_flow_style=None,
- canonical=None,
- indent=None,
- width=None,
- allow_unicode=None,
- line_break=None,
- encoding=None,
- explicit_start=None,
- explicit_end=None,
- version=None,
- tags=None,
- block_seq_indent=None,
- top_level_colon_align=None,
- prefix_colon=None,
- ):
- # type: (StreamType, Any, Any, Any, Optional[bool], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Any, Any, Any, Any, Any) -> None # NOQA
+ self: StreamType,
+ stream: Any,
+ default_style: Any = None,
+ default_flow_style: Any = None,
+ canonical: Optional[bool] = None,
+ indent: Optional[int] = None,
+ width: Optional[int] = None,
+ allow_unicode: Optional[bool] = None,
+ line_break: Any = None,
+ encoding: Any = None,
+ explicit_start: Optional[bool] = None,
+ explicit_end: Optional[bool] = None,
+ version: Any = None,
+ tags: Any = None,
+ block_seq_indent: Any = None,
+ top_level_colon_align: Any = None,
+ prefix_colon: Any = None,
+ ) -> None:
+ # NOQA
CEmitter.__init__(
self,
stream,
diff --git a/dumper.py b/dumper.py
index 7e9bf01..e6457a6 100644
--- a/dumper.py
+++ b/dumper.py
@@ -10,34 +10,33 @@ from ruamel.yaml.representer import (
)
from ruamel.yaml.resolver import Resolver, BaseResolver, VersionedResolver
-if False: # MYPY
- from typing import Any, Dict, List, Union, Optional # NOQA
- from ruamel.yaml.compat import StreamType, VersionType # NOQA
+from typing import Any, Dict, List, Union, Optional # NOQA
+from ruamel.yaml.compat import StreamType, VersionType # NOQA
__all__ = ['BaseDumper', 'SafeDumper', 'Dumper', 'RoundTripDumper']
class BaseDumper(Emitter, Serializer, BaseRepresenter, BaseResolver):
def __init__(
- self,
- stream,
- default_style=None,
- default_flow_style=None,
- canonical=None,
- indent=None,
- width=None,
- allow_unicode=None,
- line_break=None,
- encoding=None,
- explicit_start=None,
- explicit_end=None,
- version=None,
- tags=None,
- block_seq_indent=None,
- top_level_colon_align=None,
- prefix_colon=None,
- ):
- # type: (Any, StreamType, Any, Any, Optional[bool], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Any, Any, Any, Any, Any) -> None # NOQA
+ self: Any,
+ stream: StreamType,
+ default_style: Any = None,
+ default_flow_style: Any = None,
+ canonical: Optional[bool] = None,
+ indent: Optional[int] = None,
+ width: Optional[int] = None,
+ allow_unicode: Optional[bool] = None,
+ line_break: Any = None,
+ encoding: Any = None,
+ explicit_start: Optional[bool] = None,
+ explicit_end: Optional[bool] = None,
+ version: Any = None,
+ tags: Any = None,
+ block_seq_indent: Any = None,
+ top_level_colon_align: Any = None,
+ prefix_colon: Any = None,
+ ) -> None:
+ # NOQA
Emitter.__init__(
self,
stream,
@@ -70,24 +69,24 @@ class BaseDumper(Emitter, Serializer, BaseRepresenter, BaseResolver):
class SafeDumper(Emitter, Serializer, SafeRepresenter, Resolver):
def __init__(
self,
- stream,
- default_style=None,
- default_flow_style=None,
- canonical=None,
- indent=None,
- width=None,
- allow_unicode=None,
- line_break=None,
- encoding=None,
- explicit_start=None,
- explicit_end=None,
- version=None,
- tags=None,
- block_seq_indent=None,
- top_level_colon_align=None,
- prefix_colon=None,
- ):
- # type: (StreamType, Any, Any, Optional[bool], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Any, Any, Any, Any, Any) -> None # NOQA
+ stream: StreamType,
+ default_style: Any = None,
+ default_flow_style: Any = None,
+ canonical: Optional[bool] = None,
+ indent: Optional[int] = None,
+ width: Optional[int] = None,
+ allow_unicode: Optional[bool] = None,
+ line_break: Any = None,
+ encoding: Any = None,
+ explicit_start: Optional[bool] = None,
+ explicit_end: Optional[bool] = None,
+ version: Any = None,
+ tags: Any = None,
+ block_seq_indent: Any = None,
+ top_level_colon_align: Any = None,
+ prefix_colon: Any = None,
+ ) -> None:
+ # NOQA
Emitter.__init__(
self,
stream,
@@ -120,24 +119,24 @@ class SafeDumper(Emitter, Serializer, SafeRepresenter, Resolver):
class Dumper(Emitter, Serializer, Representer, Resolver):
def __init__(
self,
- stream,
- default_style=None,
- default_flow_style=None,
- canonical=None,
- indent=None,
- width=None,
- allow_unicode=None,
- line_break=None,
- encoding=None,
- explicit_start=None,
- explicit_end=None,
- version=None,
- tags=None,
- block_seq_indent=None,
- top_level_colon_align=None,
- prefix_colon=None,
- ):
- # type: (StreamType, Any, Any, Optional[bool], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Any, Any, Any, Any, Any) -> None # NOQA
+ stream: StreamType,
+ default_style: Any = None,
+ default_flow_style: Any = None,
+ canonical: Optional[bool] = None,
+ indent: Optional[int] = None,
+ width: Optional[int] = None,
+ allow_unicode: Optional[bool] = None,
+ line_break: Any = None,
+ encoding: Any = None,
+ explicit_start: Optional[bool] = None,
+ explicit_end: Optional[bool] = None,
+ version: Any = None,
+ tags: Any = None,
+ block_seq_indent: Any = None,
+ top_level_colon_align: Any = None,
+ prefix_colon: Any = None,
+ ) -> None:
+ # NOQA
Emitter.__init__(
self,
stream,
@@ -170,24 +169,24 @@ class Dumper(Emitter, Serializer, Representer, Resolver):
class RoundTripDumper(Emitter, Serializer, RoundTripRepresenter, VersionedResolver):
def __init__(
self,
- stream,
- default_style=None,
- default_flow_style=None,
- canonical=None,
- indent=None,
- width=None,
- allow_unicode=None,
- line_break=None,
- encoding=None,
- explicit_start=None,
- explicit_end=None,
- version=None,
- tags=None,
- block_seq_indent=None,
- top_level_colon_align=None,
- prefix_colon=None,
- ):
- # type: (StreamType, Any, Optional[bool], Optional[int], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Any, Any, Any, Any, Any) -> None # NOQA
+ stream: StreamType,
+ default_style: Any = None,
+ default_flow_style: Optional[bool] = None,
+ canonical: Optional[int] = None,
+ indent: Optional[int] = None,
+ width: Optional[int] = None,
+ allow_unicode: Optional[bool] = None,
+ line_break: Any = None,
+ encoding: Any = None,
+ explicit_start: Optional[bool] = None,
+ explicit_end: Optional[bool] = None,
+ version: Any = None,
+ tags: Any = None,
+ block_seq_indent: Any = None,
+ top_level_colon_align: Any = None,
+ prefix_colon: Any = None,
+ ) -> None:
+ # NOQA
Emitter.__init__(
self,
stream,
diff --git a/emitter.py b/emitter.py
index f9611ee..72ea661 100644
--- a/emitter.py
+++ b/emitter.py
@@ -12,13 +12,13 @@ from ruamel.yaml.error import YAMLError, YAMLStreamError
from ruamel.yaml.events import * # NOQA
# fmt: off
-from ruamel.yaml.compat import _F, nprint, dbg, DBG_EVENT, \
+from ruamel.yaml.compat import nprint, dbg, DBG_EVENT, \
check_anchorname_char, nprintf # NOQA
# fmt: on
-if False: # MYPY
- from typing import Any, Dict, List, Union, Text, Tuple, Optional # NOQA
- from ruamel.yaml.compat import StreamType # NOQA
+
+from typing import Any, Dict, List, Union, Text, Tuple, Optional # NOQA
+from ruamel.yaml.compat import StreamType # NOQA
__all__ = ['Emitter', 'EmitterError']
@@ -30,16 +30,15 @@ class EmitterError(YAMLError):
class ScalarAnalysis:
def __init__(
self,
- scalar,
- empty,
- multiline,
- allow_flow_plain,
- allow_block_plain,
- allow_single_quoted,
- allow_double_quoted,
- allow_block,
- ):
- # type: (Any, Any, Any, bool, bool, bool, bool, bool) -> None
+ scalar: Any,
+ empty: Any,
+ multiline: Any,
+ allow_flow_plain: bool,
+ allow_block_plain: bool,
+ allow_single_quoted: bool,
+ allow_double_quoted: bool,
+ allow_block: bool,
+ ) -> None:
self.scalar = scalar
self.empty = empty
self.multiline = multiline
@@ -52,20 +51,16 @@ class ScalarAnalysis:
class Indents:
# replacement for the list based stack of None/int
- def __init__(self):
- # type: () -> None
- self.values = [] # type: List[Tuple[Any, bool]]
+ def __init__(self) -> None:
+ self.values: List[Tuple[Any, bool]] = []
- def append(self, val, seq):
- # type: (Any, Any) -> None
+ def append(self, val: Any, seq: Any) -> None:
self.values.append((val, seq))
- def pop(self):
- # type: () -> Any
+ def pop(self) -> Any:
return self.values.pop()[0]
- def last_seq(self):
- # type: () -> bool
+ def last_seq(self) -> bool:
# return the seq(uence) value for the element added before the last one
# in increase_indent()
try:
@@ -73,8 +68,9 @@ class Indents:
except IndexError:
return False
- def seq_flow_align(self, seq_indent, column, pre_comment=False):
- # type: (int, int, Optional[bool]) -> int
+ def seq_flow_align(
+ self, seq_indent: int, column: int, pre_comment: Optional[bool] = False
+ ) -> int:
# extra spaces because of dash
# nprint('seq_flow_align', self.values, pre_comment)
if len(self.values) < 2 or not self.values[-1][1]:
@@ -87,8 +83,7 @@ class Indents:
# -1 for the dash
return base + seq_indent - column - 1 # type: ignore
- def __len__(self):
- # type: () -> int
+ def __len__(self) -> int:
return len(self.values)
@@ -104,44 +99,44 @@ class Emitter:
def __init__(
self,
- stream,
- canonical=None,
- indent=None,
- width=None,
- allow_unicode=None,
- line_break=None,
- block_seq_indent=None,
- top_level_colon_align=None,
- prefix_colon=None,
- brace_single_entry_mapping_in_flow_sequence=None,
- dumper=None,
- ):
- # type: (StreamType, Any, Optional[int], Optional[int], Optional[bool], Any, Optional[int], Optional[bool], Any, Optional[bool], Any) -> None # NOQA
+ stream: StreamType,
+ canonical: Any = None,
+ indent: Optional[int] = None,
+ width: Optional[int] = None,
+ allow_unicode: Optional[bool] = None,
+ line_break: Any = None,
+ block_seq_indent: Optional[int] = None,
+ top_level_colon_align: Optional[bool] = None,
+ prefix_colon: Any = None,
+ brace_single_entry_mapping_in_flow_sequence: Optional[bool] = None,
+ dumper: Any = None,
+ ) -> None:
+ # NOQA
self.dumper = dumper
if self.dumper is not None and getattr(self.dumper, '_emitter', None) is None:
self.dumper._emitter = self
self.stream = stream
# Encoding can be overriden by STREAM-START.
- self.encoding = None # type: Optional[Text]
+ self.encoding: Optional[Text] = None
self.allow_space_break = None
# Emitter is a state machine with a stack of states to handle nested
# structures.
- self.states = [] # type: List[Any]
- self.state = self.expect_stream_start # type: Any
+ self.states: List[Any] = []
+ self.state: Any = self.expect_stream_start
# Current event and the event queue.
- self.events = [] # type: List[Any]
- self.event = None # type: Any
+ self.events: List[Any] = []
+ self.event: Any = None
# The current indentation level and the stack of previous indents.
self.indents = Indents()
- self.indent = None # type: Optional[int]
+ self.indent: Optional[int] = None
# flow_context is an expanding/shrinking list consisting of '{' and '['
# for each unclosed flow context. If empty list that means block context
- self.flow_context = [] # type: List[Text]
+ self.flow_context: List[Text] = []
# Contexts.
self.root_context = False
@@ -161,7 +156,7 @@ class Emitter:
self.compact_seq_seq = True # dash after dash
self.compact_seq_map = True # key after dash
# self.compact_ms = False # dash after key, only when excplicit key with ?
- self.no_newline = None # type: Optional[bool] # set if directly after `- `
+ self.no_newline: Optional[bool] = None # set if directly after `- `
# Whether the document requires an explicit document end indicator
self.open_ended = False
@@ -191,36 +186,34 @@ class Emitter:
self.best_width = 80
if width and width > self.best_sequence_indent * 2:
self.best_width = width
- self.best_line_break = '\n' # type: Any
+ self.best_line_break: Any = '\n'
if line_break in ['\r', '\n', '\r\n']:
self.best_line_break = line_break
# Tag prefixes.
- self.tag_prefixes = None # type: Any
+ self.tag_prefixes: Any = None
# Prepared anchor and tag.
- self.prepared_anchor = None # type: Any
- self.prepared_tag = None # type: Any
+ self.prepared_anchor: Any = None
+ self.prepared_tag: Any = None
# Scalar analysis and style.
- self.analysis = None # type: Any
- self.style = None # type: Any
+ self.analysis: Any = None
+ self.style: Any = None
self.scalar_after_indicator = True # write a scalar on the same line as `---`
self.alt_null = 'null'
@property
- def stream(self):
- # type: () -> Any
+ def stream(self) -> Any:
try:
return self._stream
except AttributeError:
raise YAMLStreamError('output stream needs to specified')
@stream.setter
- def stream(self, val):
- # type: (Any) -> None
+ def stream(self, val: Any) -> None:
if val is None:
return
if not hasattr(val, 'write'):
@@ -228,8 +221,7 @@ class Emitter:
self._stream = val
@property
- def serializer(self):
- # type: () -> Any
+ def serializer(self) -> Any:
try:
if hasattr(self.dumper, 'typ'):
return self.dumper.serializer
@@ -238,18 +230,15 @@ class Emitter:
return self # cyaml
@property
- def flow_level(self):
- # type: () -> int
+ def flow_level(self) -> int:
return len(self.flow_context)
- def dispose(self):
- # type: () -> None
+ def dispose(self) -> None:
# Reset the state attributes (to clear self-references)
self.states = []
self.state = None
- def emit(self, event):
- # type: (Any) -> None
+ def emit(self, event: Any) -> None:
if dbg(DBG_EVENT):
nprint(event)
self.events.append(event)
@@ -260,8 +249,7 @@ class Emitter:
# In some cases, we wait for a few next events before emitting.
- def need_more_events(self):
- # type: () -> bool
+ def need_more_events(self) -> bool:
if not self.events:
return True
event = self.events[0]
@@ -274,8 +262,7 @@ class Emitter:
else:
return False
- def need_events(self, count):
- # type: (int) -> bool
+ def need_events(self, count: int) -> bool:
level = 0
for event in self.events[1:]:
if isinstance(event, (DocumentStartEvent, CollectionStartEvent)):
@@ -288,8 +275,9 @@ class Emitter:
return False
return len(self.events) < count + 1
- def increase_indent(self, flow=False, sequence=None, indentless=False):
- # type: (bool, Optional[bool], bool) -> None
+ def increase_indent(
+ self, flow: bool = False, sequence: Optional[bool] = None, indentless: bool = False
+ ) -> None:
self.indents.append(self.indent, sequence)
if self.indent is None: # top level
if flow:
@@ -315,32 +303,24 @@ class Emitter:
# Stream handlers.
- def expect_stream_start(self):
- # type: () -> None
+ def expect_stream_start(self) -> None:
if isinstance(self.event, StreamStartEvent):
if self.event.encoding and not hasattr(self.stream, 'encoding'):
self.encoding = self.event.encoding
self.write_stream_start()
self.state = self.expect_first_document_start
else:
- raise EmitterError(
- _F('expected StreamStartEvent, but got {self_event!s}', self_event=self.event)
- )
+ raise EmitterError(f'expected StreamStartEvent, but got {self.event!s}')
- def expect_nothing(self):
- # type: () -> None
- raise EmitterError(
- _F('expected nothing, but got {self_event!s}', self_event=self.event)
- )
+ def expect_nothing(self) -> None:
+ raise EmitterError(f'expected nothing, but got {self.event!s}')
# Document handlers.
- def expect_first_document_start(self):
- # type: () -> Any
+ def expect_first_document_start(self) -> Any:
return self.expect_document_start(first=True)
- def expect_document_start(self, first=False):
- # type: (bool) -> None
+ def expect_document_start(self, first: bool = False) -> None:
if isinstance(self.event, DocumentStartEvent):
if (self.event.version or self.event.tags) and self.open_ended:
self.write_indicator('...', True)
@@ -378,15 +358,9 @@ class Emitter:
self.write_stream_end()
self.state = self.expect_nothing
else:
- raise EmitterError(
- _F(
- 'expected DocumentStartEvent, but got {self_event!s}',
- self_event=self.event,
- )
- )
+ raise EmitterError(f'expected DocumentStartEvent, but got {self.event!s}')
- def expect_document_end(self):
- # type: () -> None
+ def expect_document_end(self) -> None:
if isinstance(self.event, DocumentEndEvent):
self.write_indent()
if self.event.explicit:
@@ -395,19 +369,21 @@ class Emitter:
self.flush_stream()
self.state = self.expect_document_start
else:
- raise EmitterError(
- _F('expected DocumentEndEvent, but got {self_event!s}', self_event=self.event)
- )
+ raise EmitterError(f'expected DocumentEndEvent, but got {self.event!s}')
- def expect_document_root(self):
- # type: () -> None
+ def expect_document_root(self) -> None:
self.states.append(self.expect_document_end)
self.expect_node(root=True)
# Node handlers.
- def expect_node(self, root=False, sequence=False, mapping=False, simple_key=False):
- # type: (bool, bool, bool, bool) -> None
+ def expect_node(
+ self,
+ root: bool = False,
+ sequence: bool = False,
+ mapping: bool = False,
+ simple_key: bool = False,
+ ) -> None:
self.root_context = root
self.sequence_context = sequence # not used in PyYAML
force_flow_indent = False
@@ -472,24 +448,21 @@ class Emitter:
or self.event.flow_style
or self.check_empty_mapping()
):
- self.expect_flow_mapping(single=self.event.nr_items == 1,
- force_flow_indent=force_flow_indent)
+ self.expect_flow_mapping(
+ single=self.event.nr_items == 1, force_flow_indent=force_flow_indent
+ )
else:
self.expect_block_mapping()
else:
- raise EmitterError(
- _F('expected NodeEvent, but got {self_event!s}', self_event=self.event)
- )
+ raise EmitterError('expected NodeEvent, but got {self.event!s}')
- def expect_alias(self):
- # type: () -> None
+ def expect_alias(self) -> None:
if self.event.anchor is None:
raise EmitterError('anchor is not specified for alias')
self.process_anchor('*')
self.state = self.states.pop()
- def expect_scalar(self):
- # type: () -> None
+ def expect_scalar(self) -> None:
self.increase_indent(flow=True)
self.process_scalar()
self.indent = self.indents.pop()
@@ -497,20 +470,19 @@ class Emitter:
# Flow sequence handlers.
- def expect_flow_sequence(self, force_flow_indent=False):
- # type: (Optional[bool]) -> None
+ def expect_flow_sequence(self, force_flow_indent: Optional[bool] = False) -> None:
if force_flow_indent:
self.increase_indent(flow=True, sequence=True)
- ind = self.indents.seq_flow_align(self.best_sequence_indent, self.column,
- force_flow_indent)
+ ind = self.indents.seq_flow_align(
+ self.best_sequence_indent, self.column, force_flow_indent
+ )
self.write_indicator(' ' * ind + '[', True, whitespace=True)
if not force_flow_indent:
self.increase_indent(flow=True, sequence=True)
self.flow_context.append('[')
self.state = self.expect_first_flow_sequence_item
- def expect_first_flow_sequence_item(self):
- # type: () -> None
+ def expect_first_flow_sequence_item(self) -> None:
if isinstance(self.event, SequenceEndEvent):
self.indent = self.indents.pop()
popped = self.flow_context.pop()
@@ -528,8 +500,7 @@ class Emitter:
self.states.append(self.expect_flow_sequence_item)
self.expect_node(sequence=True)
- def expect_flow_sequence_item(self):
- # type: () -> None
+ def expect_flow_sequence_item(self) -> None:
if isinstance(self.event, SequenceEndEvent):
self.indent = self.indents.pop()
popped = self.flow_context.pop()
@@ -553,12 +524,14 @@ class Emitter:
# Flow mapping handlers.
- def expect_flow_mapping(self, single=False, force_flow_indent=False):
- # type: (Optional[bool], Optional[bool]) -> None
+ def expect_flow_mapping(
+ self, single: Optional[bool] = False, force_flow_indent: Optional[bool] = False
+ ) -> None:
if force_flow_indent:
self.increase_indent(flow=True, sequence=False)
- ind = self.indents.seq_flow_align(self.best_sequence_indent, self.column,
- force_flow_indent)
+ ind = self.indents.seq_flow_align(
+ self.best_sequence_indent, self.column, force_flow_indent
+ )
map_init = '{'
if (
single
@@ -575,8 +548,7 @@ class Emitter:
self.increase_indent(flow=True, sequence=False)
self.state = self.expect_first_flow_mapping_key
- def expect_first_flow_mapping_key(self):
- # type: () -> None
+ def expect_first_flow_mapping_key(self) -> None:
if isinstance(self.event, MappingEndEvent):
self.indent = self.indents.pop()
popped = self.flow_context.pop()
@@ -599,8 +571,7 @@ class Emitter:
self.states.append(self.expect_flow_mapping_value)
self.expect_node(mapping=True)
- def expect_flow_mapping_key(self):
- # type: () -> None
+ def expect_flow_mapping_key(self) -> None:
if isinstance(self.event, MappingEndEvent):
# if self.event.comment and self.event.comment[1]:
# self.write_pre_comment(self.event)
@@ -630,14 +601,12 @@ class Emitter:
self.states.append(self.expect_flow_mapping_value)
self.expect_node(mapping=True)
- def expect_flow_mapping_simple_value(self):
- # type: () -> None
+ def expect_flow_mapping_simple_value(self) -> None:
self.write_indicator(self.prefixed_colon, False)
self.states.append(self.expect_flow_mapping_key)
self.expect_node(mapping=True)
- def expect_flow_mapping_value(self):
- # type: () -> None
+ def expect_flow_mapping_value(self) -> None:
if self.canonical or self.column > self.best_width:
self.write_indent()
self.write_indicator(self.prefixed_colon, True)
@@ -646,8 +615,7 @@ class Emitter:
# Block sequence handlers.
- def expect_block_sequence(self):
- # type: () -> None
+ def expect_block_sequence(self) -> None:
if self.mapping_context:
indentless = not self.indention
else:
@@ -657,12 +625,10 @@ class Emitter:
self.increase_indent(flow=False, sequence=True, indentless=indentless)
self.state = self.expect_first_block_sequence_item
- def expect_first_block_sequence_item(self):
- # type: () -> Any
+ def expect_first_block_sequence_item(self) -> Any:
return self.expect_block_sequence_item(first=True)
- def expect_block_sequence_item(self, first=False):
- # type: (bool) -> None
+ def expect_block_sequence_item(self, first: bool = False) -> None:
if not first and isinstance(self.event, SequenceEndEvent):
if self.event.comment and self.event.comment[1]:
# final comments on a block list e.g. empty line
@@ -684,19 +650,16 @@ class Emitter:
# Block mapping handlers.
- def expect_block_mapping(self):
- # type: () -> None
+ def expect_block_mapping(self) -> None:
if not self.mapping_context and not (self.compact_seq_map or self.column == 0):
self.write_line_break()
self.increase_indent(flow=False, sequence=False)
self.state = self.expect_first_block_mapping_key
- def expect_first_block_mapping_key(self):
- # type: () -> None
+ def expect_first_block_mapping_key(self) -> None:
return self.expect_block_mapping_key(first=True)
- def expect_block_mapping_key(self, first=False):
- # type: (Any) -> None
+ def expect_block_mapping_key(self, first: Any = False) -> None:
if not first and isinstance(self.event, MappingEndEvent):
if self.event.comment and self.event.comment[1]:
# final comments from a doc
@@ -727,8 +690,7 @@ class Emitter:
self.states.append(self.expect_block_mapping_value)
self.expect_node(mapping=True)
- def expect_block_mapping_simple_value(self):
- # type: () -> None
+ def expect_block_mapping_simple_value(self) -> None:
if getattr(self.event, 'style', None) != '?':
# prefix = ''
if self.indent == 0 and self.top_level_colon_align is not None:
@@ -740,8 +702,7 @@ class Emitter:
self.states.append(self.expect_block_mapping_key)
self.expect_node(mapping=True)
- def expect_block_mapping_value(self):
- # type: () -> None
+ def expect_block_mapping_value(self) -> None:
self.write_indent()
self.write_indicator(self.prefixed_colon, True, indention=True)
self.states.append(self.expect_block_mapping_key)
@@ -749,24 +710,21 @@ class Emitter:
# Checkers.
- def check_empty_sequence(self):
- # type: () -> bool
+ def check_empty_sequence(self) -> bool:
return (
isinstance(self.event, SequenceStartEvent)
and bool(self.events)
and isinstance(self.events[0], SequenceEndEvent)
)
- def check_empty_mapping(self):
- # type: () -> bool
+ def check_empty_mapping(self) -> bool:
return (
isinstance(self.event, MappingStartEvent)
and bool(self.events)
and isinstance(self.events[0], MappingEndEvent)
)
- def check_empty_document(self):
- # type: () -> bool
+ def check_empty_document(self) -> bool:
if not isinstance(self.event, DocumentStartEvent) or not self.events:
return False
event = self.events[0]
@@ -778,8 +736,7 @@ class Emitter:
and event.value == ""
)
- def check_simple_key(self):
- # type: () -> bool
+ def check_simple_key(self) -> bool:
length = 0
if isinstance(self.event, NodeEvent) and self.event.anchor is not None:
if self.prepared_anchor is None:
@@ -812,8 +769,7 @@ class Emitter:
# Anchor, Tag, and Scalar processors.
- def process_anchor(self, indicator):
- # type: (Any) -> bool
+ def process_anchor(self, indicator: Any) -> bool:
if self.event.anchor is None:
self.prepared_anchor = None
return False
@@ -826,8 +782,7 @@ class Emitter:
self.prepared_anchor = None
return True
- def process_tag(self):
- # type: () -> None
+ def process_tag(self) -> None:
tag = self.event.tag
if isinstance(self.event, ScalarEvent):
if self.style is None:
@@ -868,8 +823,7 @@ class Emitter:
self.no_newline = True
self.prepared_tag = None
- def choose_scalar_style(self):
- # type: () -> Any
+ def choose_scalar_style(self) -> Any:
if self.analysis is None:
self.analysis = self.analyze_scalar(self.event.value)
if self.event.style == '"' or self.canonical:
@@ -903,8 +857,7 @@ class Emitter:
return "'"
return '"'
- def process_scalar(self):
- # type: () -> None
+ def process_scalar(self) -> None:
if self.analysis is None:
self.analysis = self.analyze_scalar(self.event.value)
if self.style is None:
@@ -921,7 +874,11 @@ class Emitter:
elif self.style == "'":
self.write_single_quoted(self.analysis.scalar, split)
elif self.style == '>':
- self.write_folded(self.analysis.scalar)
+ try:
+ cmx = self.event.comment[1][0]
+ except (IndexError, TypeError):
+ cmx = ""
+ self.write_folded(self.analysis.scalar, cmx)
if (
self.event.comment
and self.event.comment[0]
@@ -952,39 +909,26 @@ class Emitter:
# Analyzers.
- def prepare_version(self, version):
- # type: (Any) -> Any
+ def prepare_version(self, version: Any) -> Any:
major, minor = version
if major != 1:
- raise EmitterError(
- _F('unsupported YAML version: {major:d}.{minor:d}', major=major, minor=minor)
- )
- return _F('{major:d}.{minor:d}', major=major, minor=minor)
+ raise EmitterError(f'unsupported YAML version: {major:d}.{minor:d}')
+ return f'{major:d}.{minor:d}'
- def prepare_tag_handle(self, handle):
- # type: (Any) -> Any
+ def prepare_tag_handle(self, handle: Any) -> Any:
if not handle:
raise EmitterError('tag handle must not be empty')
if handle[0] != '!' or handle[-1] != '!':
- raise EmitterError(
- _F("tag handle must start and end with '!': {handle!r}", handle=handle)
- )
+ raise EmitterError(f"tag handle must start and end with '!': {handle!r}")
for ch in handle[1:-1]:
if not ('0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' or ch in '-_'):
- raise EmitterError(
- _F(
- 'invalid character {ch!r} in the tag handle: {handle!r}',
- ch=ch,
- handle=handle,
- )
- )
+ raise EmitterError(f'invalid character {ch!r} in the tag handle: {handle!r}')
return handle
- def prepare_tag_prefix(self, prefix):
- # type: (Any) -> Any
+ def prepare_tag_prefix(self, prefix: Any) -> Any:
if not prefix:
raise EmitterError('tag prefix must not be empty')
- chunks = [] # type: List[Any]
+ chunks: List[Any] = []
start = end = 0
if prefix[0] == '!':
end = 1
@@ -1003,13 +947,12 @@ class Emitter:
start = end = end + 1
data = ch
for ch in data:
- chunks.append(_F('%{ord_ch:02X}', ord_ch=ord(ch)))
+ chunks.append(f'%{ord(ch):02X}')
if start < end:
chunks.append(prefix[start:end])
return "".join(chunks)
- def prepare_tag(self, tag):
- # type: (Any) -> Any
+ def prepare_tag(self, tag: Any) -> Any:
if not tag:
raise EmitterError('tag must not be empty')
if tag == '!':
@@ -1021,7 +964,7 @@ class Emitter:
if tag.startswith(prefix) and (prefix == '!' or len(prefix) < len(tag)):
handle = self.tag_prefixes[prefix]
suffix = tag[len(prefix) :]
- chunks = [] # type: List[Any]
+ chunks: List[Any] = []
start = end = 0
ch_set = "-;/?:@&=+$,_.~*'()[]"
if self.dumper:
@@ -1044,32 +987,24 @@ class Emitter:
start = end = end + 1
data = ch
for ch in data:
- chunks.append(_F('%{ord_ch:02X}', ord_ch=ord(ch)))
+ chunks.append(f'%{ord(ch):02X}')
if start < end:
chunks.append(suffix[start:end])
suffix_text = "".join(chunks)
if handle:
- return _F('{handle!s}{suffix_text!s}', handle=handle, suffix_text=suffix_text)
+ return f'{handle!s}{suffix_text!s}'
else:
- return _F('!<{suffix_text!s}>', suffix_text=suffix_text)
+ return f'!<{suffix_text!s}>'
- def prepare_anchor(self, anchor):
- # type: (Any) -> Any
+ def prepare_anchor(self, anchor: Any) -> Any:
if not anchor:
raise EmitterError('anchor must not be empty')
for ch in anchor:
if not check_anchorname_char(ch):
- raise EmitterError(
- _F(
- 'invalid character {ch!r} in the anchor: {anchor!r}',
- ch=ch,
- anchor=anchor,
- )
- )
+ raise EmitterError(f'invalid character {ch!r} in the anchor: {anchor!r}')
return anchor
- def analyze_scalar(self, scalar):
- # type: (Any) -> Any
+ def analyze_scalar(self, scalar: Any) -> Any:
# Empty scalar is a special case.
if not scalar:
return ScalarAnalysis(
@@ -1249,23 +1184,25 @@ class Emitter:
# Writers.
- def flush_stream(self):
- # type: () -> None
+ def flush_stream(self) -> None:
if hasattr(self.stream, 'flush'):
self.stream.flush()
- def write_stream_start(self):
- # type: () -> None
+ def write_stream_start(self) -> None:
# Write BOM if needed.
if self.encoding and self.encoding.startswith('utf-16'):
self.stream.write('\uFEFF'.encode(self.encoding))
- def write_stream_end(self):
- # type: () -> None
+ def write_stream_end(self) -> None:
self.flush_stream()
- def write_indicator(self, indicator, need_whitespace, whitespace=False, indention=False):
- # type: (Any, Any, bool, bool) -> None
+ def write_indicator(
+ self,
+ indicator: Any,
+ need_whitespace: Any,
+ whitespace: bool = False,
+ indention: bool = False,
+ ) -> None:
if self.whitespace or not need_whitespace:
data = indicator
else:
@@ -1278,8 +1215,7 @@ class Emitter:
data = data.encode(self.encoding)
self.stream.write(data)
- def write_indent(self):
- # type: () -> None
+ def write_indent(self) -> None:
indent = self.indent or 0
if (
not self.indention
@@ -1298,8 +1234,7 @@ class Emitter:
data = data.encode(self.encoding) # type: ignore
self.stream.write(data)
- def write_line_break(self, data=None):
- # type: (Any) -> None
+ def write_line_break(self, data: Any = None) -> None:
if data is None:
data = self.best_line_break
self.whitespace = True
@@ -1310,21 +1245,15 @@ class Emitter:
data = data.encode(self.encoding)
self.stream.write(data)
- def write_version_directive(self, version_text):
- # type: (Any) -> None
- data = _F('%YAML {version_text!s}', version_text=version_text)
+ def write_version_directive(self, version_text: Any) -> None:
+ data: Any = f'%YAML {version_text!s}'
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
self.write_line_break()
- def write_tag_directive(self, handle_text, prefix_text):
- # type: (Any, Any) -> None
- data = _F(
- '%TAG {handle_text!s} {prefix_text!s}',
- handle_text=handle_text,
- prefix_text=prefix_text,
- )
+ def write_tag_directive(self, handle_text: Any, prefix_text: Any) -> None:
+ data: Any = f'%TAG {handle_text!s} {prefix_text!s}'
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
@@ -1332,8 +1261,7 @@ class Emitter:
# Scalar streams.
- def write_single_quoted(self, text, split=True):
- # type: (Any, Any) -> None
+ def write_single_quoted(self, text: Any, split: Any = True) -> None:
if self.root_context:
if self.requested_indent is not None:
self.write_line_break()
@@ -1415,8 +1343,7 @@ class Emitter:
'\u2029': 'P',
}
- def write_double_quoted(self, text, split=True):
- # type: (Any, Any) -> None
+ def write_double_quoted(self, text: Any, split: Any = True) -> None:
if self.root_context:
if self.requested_indent is not None:
self.write_line_break()
@@ -1450,11 +1377,11 @@ class Emitter:
if ch in self.ESCAPE_REPLACEMENTS:
data = '\\' + self.ESCAPE_REPLACEMENTS[ch]
elif ch <= '\xFF':
- data = _F('\\x{ord_ch:02X}', ord_ch=ord(ch))
+ data = f'\\x{ord(ch):02X}'
elif ch <= '\uFFFF':
- data = _F('\\u{ord_ch:04X}', ord_ch=ord(ch))
+ data = f'\\u{ord(ch):04X}'
else:
- data = _F('\\U{ord_ch:08X}', ord_ch=ord(ch))
+ data = f'\\U{ord(ch):08X}'
self.column += len(data)
if bool(self.encoding):
data = data.encode(self.encoding)
@@ -1485,14 +1412,13 @@ class Emitter:
end += 1
self.write_indicator('"', False)
- def determine_block_hints(self, text):
- # type: (Any) -> Any
+ def determine_block_hints(self, text: Any) -> Any:
indent = 0
indicator = ''
hints = ''
if text:
if text[0] in ' \n\x85\u2028\u2029':
- indent = self.best_sequence_indent
+ indent = 2
hints += str(indent)
elif self.root_context:
for end in ['\n---', '\n...']:
@@ -1510,7 +1436,7 @@ class Emitter:
if pos > -1:
break
if pos > 0:
- indent = self.best_sequence_indent
+ indent = 2
if text[-1] not in '\n\x85\u2028\u2029':
indicator = '-'
elif len(text) == 1 or text[-2] in '\n\x85\u2028\u2029':
@@ -1518,10 +1444,11 @@ class Emitter:
hints += indicator
return hints, indent, indicator
- def write_folded(self, text):
- # type: (Any) -> None
+ def write_folded(self, text: Any, comment: Any) -> None:
hints, _indent, _indicator = self.determine_block_hints(text)
- self.write_indicator('>' + hints, True)
+ if not isinstance(comment, str):
+ comment = ''
+ self.write_indicator('>' + hints + comment, True)
if _indicator == '+':
self.open_ended = True
self.write_line_break()
@@ -1584,8 +1511,7 @@ class Emitter:
spaces = ch == ' '
end += 1
- def write_literal(self, text, comment=None):
- # type: (Any, Any) -> None
+ def write_literal(self, text: Any, comment: Any = None) -> None:
hints, _indent, _indicator = self.determine_block_hints(text)
# if comment is not None:
# try:
@@ -1638,8 +1564,7 @@ class Emitter:
breaks = ch in '\n\x85\u2028\u2029'
end += 1
- def write_plain(self, text, split=True):
- # type: (Any, Any) -> None
+ def write_plain(self, text: Any, split: Any = True) -> None:
if self.root_context:
if self.requested_indent is not None:
self.write_line_break()
@@ -1693,6 +1618,10 @@ class Emitter:
else:
if ch is None or ch in ' \n\x85\u2028\u2029':
data = text[start:end]
+ if len(data) > self.best_width and \
+ self.column > self.indent: # type: ignore
+ # words longer than line length get a line of their own
+ self.write_indent()
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding) # type: ignore
@@ -1707,10 +1636,9 @@ class Emitter:
breaks = ch in '\n\x85\u2028\u2029'
end += 1
- def write_comment(self, comment, pre=False):
- # type: (Any, bool) -> None
+ def write_comment(self, comment: Any, pre: bool = False) -> None:
value = comment.value
- # nprintf('{:02d} {:02d} {!r}'.format(self.column, comment.start_mark.column, value))
+ # nprintf(f'{self.column:02d} {comment.start_mark.column:02d} {value!r}')
if not pre and value[-1] == '\n':
value = value[:-1]
try:
@@ -1743,8 +1671,7 @@ class Emitter:
if not pre:
self.write_line_break()
- def write_pre_comment(self, event):
- # type: (Any) -> bool
+ def write_pre_comment(self, event: Any) -> bool:
comments = event.comment[1]
if comments is None:
return False
@@ -1759,12 +1686,11 @@ class Emitter:
if isinstance(event, start_events):
comment.pre_done = True
except TypeError:
- sys.stdout.write('eventtt {} {}'.format(type(event), event))
+ sys.stdout.write(f'eventtt {type(event)} {event}')
raise
return True
- def write_post_comment(self, event):
- # type: (Any) -> bool
+ def write_post_comment(self, event: Any) -> bool:
if self.event.comment[0] is None:
return False
comment = event.comment[0]
diff --git a/error.py b/error.py
index 30b114a..ccdbf28 100644
--- a/error.py
+++ b/error.py
@@ -3,10 +3,7 @@
import warnings
import textwrap
-from ruamel.yaml.compat import _F
-
-if False: # MYPY
- from typing import Any, Dict, Optional, List, Text # NOQA
+from typing import Any, Dict, Optional, List, Text # NOQA
__all__ = [
@@ -25,33 +22,24 @@ __all__ = [
class StreamMark:
__slots__ = 'name', 'index', 'line', 'column'
- def __init__(self, name, index, line, column):
- # type: (Any, int, int, int) -> None
+ def __init__(self, name: Any, index: int, line: int, column: int) -> None:
self.name = name
self.index = index
self.line = line
self.column = column
- def __str__(self):
- # type: () -> Any
- where = _F(
- ' in "{sname!s}", line {sline1:d}, column {scolumn1:d}',
- sname=self.name,
- sline1=self.line + 1,
- scolumn1=self.column + 1,
- )
+ def __str__(self) -> Any:
+ where = f' in "{self.name!s}", line {self.line + 1:d}, column {self.column + 1:d}'
return where
- def __eq__(self, other):
- # type: (Any) -> bool
+ def __eq__(self, other: Any) -> bool:
if self.line != other.line or self.column != other.column:
return False
if self.name != other.name or self.index != other.index:
return False
return True
- def __ne__(self, other):
- # type: (Any) -> bool
+ def __ne__(self, other: Any) -> bool:
return not self.__eq__(other)
@@ -62,14 +50,14 @@ class FileMark(StreamMark):
class StringMark(StreamMark):
__slots__ = 'name', 'index', 'line', 'column', 'buffer', 'pointer'
- def __init__(self, name, index, line, column, buffer, pointer):
- # type: (Any, int, int, int, Any, Any) -> None
+ def __init__(
+ self, name: Any, index: int, line: int, column: int, buffer: Any, pointer: Any
+ ) -> None:
StreamMark.__init__(self, name, index, line, column)
self.buffer = buffer
self.pointer = pointer
- def get_snippet(self, indent=4, max_length=75):
- # type: (int, int) -> Any
+ def get_snippet(self, indent: int = 4, max_length: int = 75) -> Any:
if self.buffer is None: # always False
return None
head = ""
@@ -90,7 +78,7 @@ class StringMark(StreamMark):
break
snippet = self.buffer[start:end]
caret = '^'
- caret = '^ (line: {})'.format(self.line + 1)
+ caret = f'^ (line: {self.line + 1})'
return (
' ' * indent
+ head
@@ -101,28 +89,16 @@ class StringMark(StreamMark):
+ caret
)
- def __str__(self):
- # type: () -> Any
+ def __str__(self) -> Any:
snippet = self.get_snippet()
- where = _F(
- ' in "{sname!s}", line {sline1:d}, column {scolumn1:d}',
- sname=self.name,
- sline1=self.line + 1,
- scolumn1=self.column + 1,
- )
+ where = f' in "{self.name!s}", line {self.line + 1:d}, column {self.column + 1:d}'
if snippet is not None:
where += ':\n' + snippet
return where
- def __repr__(self):
- # type: () -> Any
+ def __repr__(self) -> Any:
snippet = self.get_snippet()
- where = _F(
- ' in "{sname!s}", line {sline1:d}, column {scolumn1:d}',
- sname=self.name,
- sline1=self.line + 1,
- scolumn1=self.column + 1,
- )
+ where = f' in "{self.name!s}", line {self.line + 1:d}, column {self.column + 1:d}'
if snippet is not None:
where += ':\n' + snippet
return where
@@ -131,8 +107,7 @@ class StringMark(StreamMark):
class CommentMark:
__slots__ = ('column',)
- def __init__(self, column):
- # type: (Any) -> None
+ def __init__(self, column: Any) -> None:
self.column = column
@@ -143,14 +118,13 @@ class YAMLError(Exception):
class MarkedYAMLError(YAMLError):
def __init__(
self,
- context=None,
- context_mark=None,
- problem=None,
- problem_mark=None,
- note=None,
- warn=None,
- ):
- # type: (Any, Any, Any, Any, Any, Any) -> None
+ context: Any = None,
+ context_mark: Any = None,
+ problem: Any = None,
+ problem_mark: Any = None,
+ note: Any = None,
+ warn: Any = None,
+ ) -> None:
self.context = context
self.context_mark = context_mark
self.problem = problem
@@ -158,9 +132,8 @@ class MarkedYAMLError(YAMLError):
self.note = note
# warn is ignored
- def __str__(self):
- # type: () -> Any
- lines = [] # type: List[str]
+ def __str__(self) -> Any:
+ lines: List[str] = []
if self.context is not None:
lines.append(self.context)
if self.context_mark is not None and (
@@ -192,14 +165,13 @@ class YAMLWarning(Warning):
class MarkedYAMLWarning(YAMLWarning):
def __init__(
self,
- context=None,
- context_mark=None,
- problem=None,
- problem_mark=None,
- note=None,
- warn=None,
- ):
- # type: (Any, Any, Any, Any, Any, Any) -> None
+ context: Any = None,
+ context_mark: Any = None,
+ problem: Any = None,
+ problem_mark: Any = None,
+ note: Any = None,
+ warn: Any = None,
+ ) -> None:
self.context = context
self.context_mark = context_mark
self.problem = problem
@@ -207,9 +179,8 @@ class MarkedYAMLWarning(YAMLWarning):
self.note = note
self.warn = warn
- def __str__(self):
- # type: () -> Any
- lines = [] # type: List[str]
+ def __str__(self) -> Any:
+ lines: List[str] = []
if self.context is not None:
lines.append(self.context)
if self.context_mark is not None and (
@@ -254,30 +225,26 @@ warnings.simplefilter('once', UnsafeLoaderWarning)
class MantissaNoDotYAML1_1Warning(YAMLWarning):
- def __init__(self, node, flt_str):
- # type: (Any, Any) -> None
+ def __init__(self, node: Any, flt_str: Any) -> None:
self.node = node
self.flt = flt_str
- def __str__(self):
- # type: () -> Any
+ def __str__(self) -> Any:
line = self.node.start_mark.line
col = self.node.start_mark.column
- return """
+ return f"""
In YAML 1.1 floating point values should have a dot ('.') in their mantissa.
See the Floating-Point Language-Independent Type for YAMLâ„¢ Version 1.1 specification
( http://yaml.org/type/float.html ). This dot is not required for JSON nor for YAML 1.2
-Correct your float: "{}" on line: {}, column: {}
+Correct your float: "{self.flt}" on line: {line}, column: {col}
or alternatively include the following in your code:
import warnings
warnings.simplefilter('ignore', ruamel.yaml.error.MantissaNoDotYAML1_1Warning)
-""".format(
- self.flt, line, col
- )
+"""
warnings.simplefilter('once', MantissaNoDotYAML1_1Warning)
@@ -290,14 +257,13 @@ class YAMLFutureWarning(Warning):
class MarkedYAMLFutureWarning(YAMLFutureWarning):
def __init__(
self,
- context=None,
- context_mark=None,
- problem=None,
- problem_mark=None,
- note=None,
- warn=None,
- ):
- # type: (Any, Any, Any, Any, Any, Any) -> None
+ context: Any = None,
+ context_mark: Any = None,
+ problem: Any = None,
+ problem_mark: Any = None,
+ note: Any = None,
+ warn: Any = None,
+ ) -> None:
self.context = context
self.context_mark = context_mark
self.problem = problem
@@ -305,9 +271,8 @@ class MarkedYAMLFutureWarning(YAMLFutureWarning):
self.note = note
self.warn = warn
- def __str__(self):
- # type: () -> Any
- lines = [] # type: List[str]
+ def __str__(self) -> Any:
+ lines: List[str] = []
if self.context is not None:
lines.append(self.context)
diff --git a/events.py b/events.py
index 2a895ff..03f3d9e 100644
--- a/events.py
+++ b/events.py
@@ -1,25 +1,22 @@
# coding: utf-8
-from ruamel.yaml.compat import _F
-
# Abstract classes.
-if False: # MYPY
- from typing import Any, Dict, Optional, List # NOQA
+from typing import Any, Dict, Optional, List # NOQA
SHOW_LINES = False
-def CommentCheck():
- # type: () -> None
+def CommentCheck() -> None:
pass
class Event:
__slots__ = 'start_mark', 'end_mark', 'comment'
- def __init__(self, start_mark=None, end_mark=None, comment=CommentCheck):
- # type: (Any, Any, Any) -> None
+ def __init__(
+ self, start_mark: Any = None, end_mark: Any = None, comment: Any = CommentCheck
+ ) -> None:
self.start_mark = start_mark
self.end_mark = end_mark
# assert comment is not CommentCheck
@@ -27,8 +24,7 @@ class Event:
comment = None
self.comment = comment
- def __repr__(self):
- # type: () -> Any
+ def __repr__(self) -> Any:
if True:
arguments = []
if hasattr(self, 'value'):
@@ -39,17 +35,13 @@ class Event:
for key in ['anchor', 'tag', 'implicit', 'flow_style', 'style']:
v = getattr(self, key, None)
if v is not None:
- arguments.append(_F('{key!s}={v!r}', key=key, v=v))
+ arguments.append(f'{key!s}={v!r}')
if self.comment not in [None, CommentCheck]:
- arguments.append('comment={!r}'.format(self.comment))
+ arguments.append(f'comment={self.comment!r}')
if SHOW_LINES:
arguments.append(
- '({}:{}/{}:{})'.format(
- self.start_mark.line,
- self.start_mark.column,
- self.end_mark.line,
- self.end_mark.column,
- )
+ f'({self.start_mark.line}:{self.start_mark.column}/'
+ f'{self.end_mark.line}:{self.end_mark.column})'
)
arguments = ', '.join(arguments) # type: ignore
else:
@@ -58,23 +50,18 @@ class Event:
for key in ['anchor', 'tag', 'implicit', 'value', 'flow_style', 'style']
if hasattr(self, key)
]
- arguments = ', '.join(
- [_F('{k!s}={attr!r}', k=key, attr=getattr(self, key)) for key in attributes]
- )
+ arguments = ', '.join([f'{key!s}={getattr(self, key)!r}' for key in attributes])
if self.comment not in [None, CommentCheck]:
- arguments += ', comment={!r}'.format(self.comment)
- return _F(
- '{self_class_name!s}({arguments!s})',
- self_class_name=self.__class__.__name__,
- arguments=arguments,
- )
+ arguments += f', comment={self.comment!r}'
+ return f'{self.__class__.__name__!s}({arguments!s})'
class NodeEvent(Event):
__slots__ = ('anchor',)
- def __init__(self, anchor, start_mark=None, end_mark=None, comment=None):
- # type: (Any, Any, Any, Any) -> None
+ def __init__(
+ self, anchor: Any, start_mark: Any = None, end_mark: Any = None, comment: Any = None
+ ) -> None:
Event.__init__(self, start_mark, end_mark, comment)
self.anchor = anchor
@@ -84,16 +71,15 @@ class CollectionStartEvent(NodeEvent):
def __init__(
self,
- anchor,
- tag,
- implicit,
- start_mark=None,
- end_mark=None,
- flow_style=None,
- comment=None,
- nr_items=None,
- ):
- # type: (Any, Any, Any, Any, Any, Any, Any, Optional[int]) -> None
+ anchor: Any,
+ tag: Any,
+ implicit: Any,
+ start_mark: Any = None,
+ end_mark: Any = None,
+ flow_style: Any = None,
+ comment: Any = None,
+ nr_items: Optional[int] = None,
+ ) -> None:
NodeEvent.__init__(self, anchor, start_mark, end_mark, comment)
self.tag = tag
self.implicit = implicit
@@ -111,8 +97,13 @@ class CollectionEndEvent(Event):
class StreamStartEvent(Event):
__slots__ = ('encoding',)
- def __init__(self, start_mark=None, end_mark=None, encoding=None, comment=None):
- # type: (Any, Any, Any, Any) -> None
+ def __init__(
+ self,
+ start_mark: Any = None,
+ end_mark: Any = None,
+ encoding: Any = None,
+ comment: Any = None,
+ ) -> None:
Event.__init__(self, start_mark, end_mark, comment)
self.encoding = encoding
@@ -126,14 +117,13 @@ class DocumentStartEvent(Event):
def __init__(
self,
- start_mark=None,
- end_mark=None,
- explicit=None,
- version=None,
- tags=None,
- comment=None,
- ):
- # type: (Any, Any, Any, Any, Any, Any) -> None
+ start_mark: Any = None,
+ end_mark: Any = None,
+ explicit: Any = None,
+ version: Any = None,
+ tags: Any = None,
+ comment: Any = None,
+ ) -> None:
Event.__init__(self, start_mark, end_mark, comment)
self.explicit = explicit
self.version = version
@@ -143,8 +133,13 @@ class DocumentStartEvent(Event):
class DocumentEndEvent(Event):
__slots__ = ('explicit',)
- def __init__(self, start_mark=None, end_mark=None, explicit=None, comment=None):
- # type: (Any, Any, Any, Any) -> None
+ def __init__(
+ self,
+ start_mark: Any = None,
+ end_mark: Any = None,
+ explicit: Any = None,
+ comment: Any = None,
+ ) -> None:
Event.__init__(self, start_mark, end_mark, comment)
self.explicit = explicit
@@ -152,8 +147,14 @@ class DocumentEndEvent(Event):
class AliasEvent(NodeEvent):
__slots__ = 'style'
- def __init__(self, anchor, start_mark=None, end_mark=None, style=None, comment=None):
- # type: (Any, Any, Any, Any, Any) -> None
+ def __init__(
+ self,
+ anchor: Any,
+ start_mark: Any = None,
+ end_mark: Any = None,
+ style: Any = None,
+ comment: Any = None,
+ ) -> None:
NodeEvent.__init__(self, anchor, start_mark, end_mark, comment)
self.style = style
@@ -163,16 +164,15 @@ class ScalarEvent(NodeEvent):
def __init__(
self,
- anchor,
- tag,
- implicit,
- value,
- start_mark=None,
- end_mark=None,
- style=None,
- comment=None,
- ):
- # type: (Any, Any, Any, Any, Any, Any, Any, Any) -> None
+ anchor: Any,
+ tag: Any,
+ implicit: Any,
+ value: Any,
+ start_mark: Any = None,
+ end_mark: Any = None,
+ style: Any = None,
+ comment: Any = None,
+ ) -> None:
NodeEvent.__init__(self, anchor, start_mark, end_mark, comment)
self.tag = tag
self.implicit = implicit
diff --git a/loader.py b/loader.py
index 7234ee1..d6c708b 100644
--- a/loader.py
+++ b/loader.py
@@ -12,16 +12,19 @@ from ruamel.yaml.constructor import (
)
from ruamel.yaml.resolver import VersionedResolver
-if False: # MYPY
- from typing import Any, Dict, List, Union, Optional # NOQA
- from ruamel.yaml.compat import StreamTextType, VersionType # NOQA
+from typing import Any, Dict, List, Union, Optional # NOQA
+from ruamel.yaml.compat import StreamTextType, VersionType # NOQA
__all__ = ['BaseLoader', 'SafeLoader', 'Loader', 'RoundTripLoader']
class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, VersionedResolver):
- def __init__(self, stream, version=None, preserve_quotes=None):
- # type: (StreamTextType, Optional[VersionType], Optional[bool]) -> None
+ def __init__(
+ self,
+ stream: StreamTextType,
+ version: Optional[VersionType] = None,
+ preserve_quotes: Optional[bool] = None,
+ ) -> None:
self.comment_handling = None
Reader.__init__(self, stream, loader=self)
Scanner.__init__(self, loader=self)
@@ -32,8 +35,12 @@ class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, VersionedRe
class SafeLoader(Reader, Scanner, Parser, Composer, SafeConstructor, VersionedResolver):
- def __init__(self, stream, version=None, preserve_quotes=None):
- # type: (StreamTextType, Optional[VersionType], Optional[bool]) -> None
+ def __init__(
+ self,
+ stream: StreamTextType,
+ version: Optional[VersionType] = None,
+ preserve_quotes: Optional[bool] = None,
+ ) -> None:
self.comment_handling = None
Reader.__init__(self, stream, loader=self)
Scanner.__init__(self, loader=self)
@@ -44,8 +51,12 @@ class SafeLoader(Reader, Scanner, Parser, Composer, SafeConstructor, VersionedRe
class Loader(Reader, Scanner, Parser, Composer, Constructor, VersionedResolver):
- def __init__(self, stream, version=None, preserve_quotes=None):
- # type: (StreamTextType, Optional[VersionType], Optional[bool]) -> None
+ def __init__(
+ self,
+ stream: StreamTextType,
+ version: Optional[VersionType] = None,
+ preserve_quotes: Optional[bool] = None,
+ ) -> None:
self.comment_handling = None
Reader.__init__(self, stream, loader=self)
Scanner.__init__(self, loader=self)
@@ -63,8 +74,12 @@ class RoundTripLoader(
RoundTripConstructor,
VersionedResolver,
):
- def __init__(self, stream, version=None, preserve_quotes=None):
- # type: (StreamTextType, Optional[VersionType], Optional[bool]) -> None
+ def __init__(
+ self,
+ stream: StreamTextType,
+ version: Optional[VersionType] = None,
+ preserve_quotes: Optional[bool] = None,
+ ) -> None:
# self.reader = Reader.__init__(self, stream)
self.comment_handling = None # issue 385
Reader.__init__(self, stream, loader=self)
diff --git a/main.py b/main.py
index 20bd8d3..636ad6c 100644
--- a/main.py
+++ b/main.py
@@ -30,13 +30,13 @@ from ruamel.yaml.constructor import (
Constructor,
RoundTripConstructor,
)
-from ruamel.yaml.loader import Loader as UnsafeLoader
+from ruamel.yaml.loader import Loader as UnsafeLoader # NOQA
from ruamel.yaml.comments import CommentedMap, CommentedSeq, C_PRE
-if False: # MYPY
- from typing import List, Set, Dict, Union, Any, Callable, Optional, Text # NOQA
- from ruamel.yaml.compat import StreamType, StreamTextType, VersionType # NOQA
- from pathlib import Path
+from typing import List, Set, Dict, Union, Any, Callable, Optional, Text, Type # NOQA
+from types import TracebackType
+from ruamel.yaml.compat import StreamType, StreamTextType, VersionType # NOQA
+from pathlib import Path # NOQA
try:
from _ruamel_yaml import CParser, CEmitter # type: ignore
@@ -51,8 +51,14 @@ except: # NOQA
class YAML:
- def __init__(self, *, typ=None, pure=False, output=None, plug_ins=None): # input=None,
- # type: (Any, Optional[Text], Any, Any, Any) -> None
+ def __init__(
+ self: Any,
+ *,
+ typ: Optional[Text] = None,
+ pure: Any = False,
+ output: Any = None,
+ plug_ins: Any = None,
+ ) -> None: # input=None,
"""
typ: 'rt'/None -> RoundTripLoader/RoundTripDumper, (default)
'safe' -> SafeLoader/SafeDumper,
@@ -68,20 +74,20 @@ class YAML:
# self._input = input
self._output = output
- self._context_manager = None # type: Any
+ self._context_manager: Any = None
- self.plug_ins = [] # type: List[Any]
+ self.plug_ins: List[Any] = []
for pu in ([] if plug_ins is None else plug_ins) + self.official_plug_ins():
file_name = pu.replace(os.sep, '.')
self.plug_ins.append(import_module(file_name))
- self.Resolver = ruamel.yaml.resolver.VersionedResolver # type: Any
+ self.Resolver: Any = ruamel.yaml.resolver.VersionedResolver
self.allow_unicode = True
- self.Reader = None # type: Any
- self.Representer = None # type: Any
- self.Constructor = None # type: Any
- self.Scanner = None # type: Any
- self.Serializer = None # type: Any
- self.default_flow_style = None # type: Any
+ self.Reader: Any = None
+ self.Representer: Any = None
+ self.Constructor: Any = None
+ self.Scanner: Any = None
+ self.Serializer: Any = None
+ self.default_flow_style: Any = None
self.comment_handling = None
typ_found = 1
setup_rt = False
@@ -139,29 +145,29 @@ class YAML:
self.stream = None
self.canonical = None
self.old_indent = None
- self.width = None
+ self.width: Union[int, None] = None
self.line_break = None
- self.map_indent = None
- self.sequence_indent = None
- self.sequence_dash_offset = 0
+ self.map_indent: Union[int, None] = None
+ self.sequence_indent: Union[int, None] = None
+ self.sequence_dash_offset: int = 0
self.compact_seq_seq = None
self.compact_seq_map = None
self.sort_base_mapping_type_on_output = None # default: sort
self.top_level_colon_align = None
self.prefix_colon = None
- self.version = None
- self.preserve_quotes = None
+ self.version: Optional[Any] = None
+ self.preserve_quotes: Optional[bool] = None
self.allow_duplicate_keys = False # duplicate keys in map, set
self.encoding = 'utf-8'
- self.explicit_start = None
- self.explicit_end = None
+ self.explicit_start: Union[bool, None] = None
+ self.explicit_end: Union[bool, None] = None
self.tags = None
self.default_style = None
self.top_level_block_style_scalar_no_indent_error_1_1 = False
# directives end indicator with single scalar document
- self.scalar_after_indicator = None
+ self.scalar_after_indicator: Optional[bool] = None
# [a, b: 1, c: {d: 2}] vs. [a, {b: 1}, {c: {d: 2}}]
self.brace_single_entry_mapping_in_flow_sequence = False
for module in self.plug_ins:
@@ -171,12 +177,11 @@ class YAML:
break
if typ_found == 0:
raise NotImplementedError(
- 'typ "{}"not recognised (need to install plug-in?)'.format(self.typ)
+ f'typ "{self.typ}" not recognised (need to install plug-in?)'
)
@property
- def reader(self):
- # type: () -> Any
+ def reader(self) -> Any:
try:
return self._reader # type: ignore
except AttributeError:
@@ -184,8 +189,7 @@ class YAML:
return self._reader
@property
- def scanner(self):
- # type: () -> Any
+ def scanner(self) -> Any:
try:
return self._scanner # type: ignore
except AttributeError:
@@ -193,8 +197,7 @@ class YAML:
return self._scanner
@property
- def parser(self):
- # type: () -> Any
+ def parser(self) -> Any:
attr = '_' + sys._getframe().f_code.co_name
if not hasattr(self, attr):
if self.Parser is not CParser:
@@ -215,16 +218,14 @@ class YAML:
return getattr(self, attr)
@property
- def composer(self):
- # type: () -> Any
+ def composer(self) -> Any:
attr = '_' + sys._getframe().f_code.co_name
if not hasattr(self, attr):
setattr(self, attr, self.Composer(loader=self))
return getattr(self, attr)
@property
- def constructor(self):
- # type: () -> Any
+ def constructor(self) -> Any:
attr = '_' + sys._getframe().f_code.co_name
if not hasattr(self, attr):
cnst = self.Constructor(preserve_quotes=self.preserve_quotes, loader=self)
@@ -233,16 +234,14 @@ class YAML:
return getattr(self, attr)
@property
- def resolver(self):
- # type: () -> Any
+ def resolver(self) -> Any:
attr = '_' + sys._getframe().f_code.co_name
if not hasattr(self, attr):
setattr(self, attr, self.Resolver(version=self.version, loader=self))
return getattr(self, attr)
@property
- def emitter(self):
- # type: () -> Any
+ def emitter(self) -> Any:
attr = '_' + sys._getframe().f_code.co_name
if not hasattr(self, attr):
if self.Emitter is not CEmitter:
@@ -277,8 +276,7 @@ class YAML:
return getattr(self, attr)
@property
- def serializer(self):
- # type: () -> Any
+ def serializer(self) -> Any:
attr = '_' + sys._getframe().f_code.co_name
if not hasattr(self, attr):
setattr(
@@ -296,8 +294,7 @@ class YAML:
return getattr(self, attr)
@property
- def representer(self):
- # type: () -> Any
+ def representer(self) -> Any:
attr = '_' + sys._getframe().f_code.co_name
if not hasattr(self, attr):
repres = self.Representer(
@@ -310,8 +307,7 @@ class YAML:
setattr(self, attr, repres)
return getattr(self, attr)
- def scan(self, stream):
- # type: (StreamTextType) -> Any
+ def scan(self, stream: StreamTextType) -> Any:
"""
Scan a YAML stream and produce scanning tokens.
"""
@@ -334,8 +330,7 @@ class YAML:
except AttributeError:
pass
- def parse(self, stream):
- # type: (StreamTextType) -> Any
+ def parse(self, stream: StreamTextType) -> Any:
"""
Parse a YAML stream and produce parsing events.
"""
@@ -358,8 +353,7 @@ class YAML:
except AttributeError:
pass
- def compose(self, stream):
- # type: (Union[Path, StreamTextType]) -> Any
+ def compose(self, stream: Union[Path, StreamTextType]) -> Any:
"""
Parse the first YAML document in a stream
and produce the corresponding representation tree.
@@ -382,8 +376,7 @@ class YAML:
except AttributeError:
pass
- def compose_all(self, stream):
- # type: (Union[Path, StreamTextType]) -> Any
+ def compose_all(self, stream: Union[Path, StreamTextType]) -> Any:
"""
Parse all YAML documents in a stream
and produce corresponding representation trees.
@@ -416,8 +409,7 @@ class YAML:
# raise TypeError("Need a stream argument when not loading from context manager")
# return self.load_one(stream)
- def load(self, stream):
- # type: (Union[Path, StreamTextType]) -> Any
+ def load(self, stream: Union[Path, StreamTextType]) -> Any:
"""
at this point you either have the non-pure Parser (which has its own reader and
scanner) or you have the pure Parser.
@@ -443,8 +435,7 @@ class YAML:
except AttributeError:
pass
- def load_all(self, stream): # *, skip=None):
- # type: (Union[Path, StreamTextType]) -> Any
+ def load_all(self, stream: Union[Path, StreamTextType]) -> Any: # *, skip=None):
if not hasattr(stream, 'read') and hasattr(stream, 'open'):
# pathlib.Path() instance
with stream.open('r') as fp:
@@ -470,8 +461,7 @@ class YAML:
except AttributeError:
pass
- def get_constructor_parser(self, stream):
- # type: (StreamTextType) -> Any
+ def get_constructor_parser(self, stream: StreamTextType) -> Any:
"""
the old cyaml needs special setup, and therefore the stream
"""
@@ -502,8 +492,13 @@ class YAML:
# rslvr = ruamel.yaml.resolver.Resolver
class XLoader(self.Parser, self.Constructor, rslvr): # type: ignore
- def __init__(selfx, stream, version=self.version, preserve_quotes=None):
- # type: (StreamTextType, Optional[VersionType], Optional[bool]) -> None # NOQA
+ def __init__(
+ selfx,
+ stream: StreamTextType,
+ version: Optional[VersionType] = self.version,
+ preserve_quotes: Optional[bool] = None,
+ ) -> None:
+ # NOQA
CParser.__init__(selfx, stream)
selfx._parser = selfx._composer = selfx
self.Constructor.__init__(selfx, loader=selfx)
@@ -515,8 +510,7 @@ class YAML:
return loader, loader
return self.constructor, self.parser
- def emit(self, events, stream):
- # type: (Any, Any) -> None
+ def emit(self, events: Any, stream: Any) -> None:
"""
Emit YAML parsing events into a stream.
If stream is None, return the produced string instead.
@@ -531,16 +525,14 @@ class YAML:
except AttributeError:
raise
- def serialize(self, node, stream):
- # type: (Any, Optional[StreamType]) -> Any
+ def serialize(self, node: Any, stream: Optional[StreamType]) -> Any:
"""
Serialize a representation tree into a YAML stream.
If stream is None, return the produced string instead.
"""
self.serialize_all([node], stream)
- def serialize_all(self, nodes, stream):
- # type: (Any, Optional[StreamType]) -> Any
+ def serialize_all(self, nodes: Any, stream: Optional[StreamType]) -> Any:
"""
Serialize a sequence of representation trees into a YAML stream.
If stream is None, return the produced string instead.
@@ -557,15 +549,16 @@ class YAML:
except AttributeError:
raise
- def dump(self, data, stream=None, *, transform=None):
- # type: (Any, Union[Path, StreamType], Any, Any) -> Any
+ def dump(
+ self: Any, data: Union[Path, StreamType], stream: Any = None, *, transform: Any = None
+ ) -> Any:
if self._context_manager:
if not self._output:
raise TypeError('Missing output stream while dumping from context manager')
if transform is not None:
+ x = self.__class__.__name__
raise TypeError(
- '{}.dump() in the context manager cannot have transform keyword '
- ''.format(self.__class__.__name__)
+ f'{x}.dump() in the context manager cannot have transform keyword'
)
self._context_manager.dump(data)
else: # old style
@@ -573,8 +566,9 @@ class YAML:
raise TypeError('Need a stream argument when not dumping from context manager')
return self.dump_all([data], stream, transform=transform)
- def dump_all(self, documents, stream, *, transform=None):
- # type: (Any, Union[Path, StreamType], Any) -> Any
+ def dump_all(
+ self, documents: Any, stream: Union[Path, StreamType], *, transform: Any = None
+ ) -> Any:
if self._context_manager:
raise NotImplementedError
self._output = stream
@@ -585,8 +579,7 @@ class YAML:
self._output = None
self._context_manager = None
- def Xdump_all(self, documents, stream, *, transform=None):
- # type: (Any, Any, Any) -> Any
+ def Xdump_all(self, documents: Any, stream: Any, *, transform: Any = None) -> Any:
"""
Serialize a sequence of Python objects into a YAML stream.
"""
@@ -596,7 +589,7 @@ class YAML:
return self.dump_all(documents, fp, transform=transform)
# The stream should have the methods `write` and possibly `flush`.
if self.top_level_colon_align is True:
- tlca = max([len(str(x)) for x in documents[0]]) # type: Any
+ tlca: Any = max([len(str(x)) for x in documents[0]])
else:
tlca = self.top_level_colon_align
if transform is not None:
@@ -635,8 +628,7 @@ class YAML:
fstream.write(transform(val))
return None
- def get_serializer_representer_emitter(self, stream, tlca):
- # type: (StreamType, Any) -> Any
+ def get_serializer_representer_emitter(self, stream: StreamType, tlca: Any) -> Any:
# we have only .Serializer to deal with (vs .Reader & .Scanner), much simpler
if self.Emitter is not CEmitter:
if self.Serializer is None:
@@ -664,25 +656,25 @@ class YAML:
class XDumper(CEmitter, self.Representer, rslvr): # type: ignore
def __init__(
- selfx,
- stream,
- default_style=None,
- default_flow_style=None,
- canonical=None,
- indent=None,
- width=None,
- allow_unicode=None,
- line_break=None,
- encoding=None,
- explicit_start=None,
- explicit_end=None,
- version=None,
- tags=None,
- block_seq_indent=None,
- top_level_colon_align=None,
- prefix_colon=None,
- ):
- # type: (StreamType, Any, Any, Any, Optional[bool], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Any, Any, Any, Any, Any) -> None # NOQA
+ selfx: StreamType,
+ stream: Any,
+ default_style: Any = None,
+ default_flow_style: Any = None,
+ canonical: Optional[bool] = None,
+ indent: Optional[int] = None,
+ width: Optional[int] = None,
+ allow_unicode: Optional[bool] = None,
+ line_break: Any = None,
+ encoding: Any = None,
+ explicit_start: Optional[bool] = None,
+ explicit_end: Optional[bool] = None,
+ version: Any = None,
+ tags: Any = None,
+ block_seq_indent: Any = None,
+ top_level_colon_align: Any = None,
+ prefix_colon: Any = None,
+ ) -> None:
+ # NOQA
CEmitter.__init__(
selfx,
stream,
@@ -722,23 +714,20 @@ class YAML:
return dumper, dumper, dumper
# basic types
- def map(self, **kw):
- # type: (Any) -> Any
+ def map(self, **kw: Any) -> Any:
if 'rt' in self.typ:
return CommentedMap(**kw)
else:
return dict(**kw)
- def seq(self, *args):
- # type: (Any) -> Any
+ def seq(self, *args: Any) -> Any:
if 'rt' in self.typ:
return CommentedSeq(*args)
else:
return list(*args)
# helpers
- def official_plug_ins(self):
- # type: () -> Any
+ def official_plug_ins(self) -> Any:
"""search for list of subdirs that are plug-ins, if __file__ is not available, e.g.
single file installers that are not properly emulating a file-system (issue 324)
no plug-ins will be found. If any are packaged, you know which file that are
@@ -753,10 +742,9 @@ class YAML:
res = [x.replace(gpbd, "")[1:-3] for x in glob.glob(bd + '/*/__plug_in__.py')]
return res
- def register_class(self, cls):
- # type:(Any) -> Any
+ def register_class(self, cls: Any) -> Any:
"""
- register a class for dumping loading
+ register a class for dumping/loading
- if it has attribute yaml_tag use that to register, else use class name
- if it has methods to_yaml/from_yaml use those to dump/load else dump attributes
as mapping
@@ -766,8 +754,7 @@ class YAML:
self.representer.add_representer(cls, cls.to_yaml)
except AttributeError:
- def t_y(representer, data):
- # type: (Any, Any) -> Any
+ def t_y(representer: Any, data: Any) -> Any:
return representer.represent_yaml_object(
tag, data, cls, flow_style=representer.default_flow_style
)
@@ -777,8 +764,7 @@ class YAML:
self.constructor.add_constructor(tag, cls.from_yaml)
except AttributeError:
- def f_y(constructor, node):
- # type: (Any, Any) -> Any
+ def f_y(constructor: Any, node: Any) -> Any:
return constructor.construct_yaml_object(node, cls)
self.constructor.add_constructor(tag, f_y)
@@ -786,13 +772,16 @@ class YAML:
# ### context manager
- def __enter__(self):
- # type: () -> Any
+ def __enter__(self) -> Any:
self._context_manager = YAMLContextManager(self)
return self
- def __exit__(self, typ, value, traceback):
- # type: (Any, Any, Any) -> None
+ def __exit__(
+ self,
+ typ: Optional[Type[BaseException]],
+ value: Optional[BaseException],
+ traceback: Optional[TracebackType],
+ ) -> None:
if typ:
nprint('typ', typ)
self._context_manager.teardown_output()
@@ -800,8 +789,7 @@ class YAML:
self._context_manager = None
# ### backwards compatibility
- def _indent(self, mapping=None, sequence=None, offset=None):
- # type: (Any, Any, Any) -> None
+ def _indent(self, mapping: Any = None, sequence: Any = None, offset: Any = None) -> None:
if mapping is not None:
self.map_indent = mapping
if sequence is not None:
@@ -810,34 +798,29 @@ class YAML:
self.sequence_dash_offset = offset
@property
- def indent(self):
- # type: () -> Any
+ def indent(self) -> Any:
return self._indent
@indent.setter
- def indent(self, val):
- # type: (Any) -> None
+ def indent(self, val: Any) -> None:
self.old_indent = val
@property
- def block_seq_indent(self):
- # type: () -> Any
+ def block_seq_indent(self) -> Any:
return self.sequence_dash_offset
@block_seq_indent.setter
- def block_seq_indent(self, val):
- # type: (Any) -> None
+ def block_seq_indent(self, val: Any) -> None:
self.sequence_dash_offset = val
- def compact(self, seq_seq=None, seq_map=None):
- # type: (Any, Any) -> None
+ def compact(self, seq_seq: Any = None, seq_map: Any = None) -> None:
self.compact_seq_seq = seq_seq
self.compact_seq_map = seq_map
class YAMLContextManager:
- def __init__(self, yaml, transform=None):
- # type: (Any, Any) -> None # used to be: (Any, Optional[Callable]) -> None
+ def __init__(self, yaml: Any, transform: Any = None) -> None:
+ # used to be: (Any, Optional[Callable]) -> None
self._yaml = yaml
self._output_inited = False
self._output_path = None
@@ -868,8 +851,7 @@ class YAMLContextManager:
else:
self._output = BytesIO()
- def teardown_output(self):
- # type: () -> None
+ def teardown_output(self) -> None:
if self._output_inited:
self._yaml.serializer.close()
else:
@@ -897,18 +879,16 @@ class YAMLContextManager:
if self._output_path is not None:
self._output.close()
- def init_output(self, first_data):
- # type: (Any) -> None
+ def init_output(self, first_data: Any) -> None:
if self._yaml.top_level_colon_align is True:
- tlca = max([len(str(x)) for x in first_data]) # type: Any
+ tlca: Any = max([len(str(x)) for x in first_data])
else:
tlca = self._yaml.top_level_colon_align
self._yaml.get_serializer_representer_emitter(self._output, tlca)
self._yaml.serializer.open()
self._output_inited = True
- def dump(self, data):
- # type: (Any) -> None
+ def dump(self, data: Any) -> None:
if not self._output_inited:
self.init_output(data)
try:
@@ -942,8 +922,7 @@ class YAMLContextManager:
# pass
-def yaml_object(yml):
- # type: (Any) -> Any
+def yaml_object(yml: Any) -> Any:
""" decorator for classes that needs to dump/load objects
The tag for such objects is taken from the class attribute yaml_tag (or the
class name in lowercase in case unavailable)
@@ -951,15 +930,13 @@ def yaml_object(yml):
loading, default routines (dumping a mapping of the attributes) used otherwise.
"""
- def yo_deco(cls):
- # type: (Any) -> Any
+ def yo_deco(cls: Any) -> Any:
tag = getattr(cls, 'yaml_tag', '!' + cls.__name__)
try:
yml.representer.add_representer(cls, cls.to_yaml)
except AttributeError:
- def t_y(representer, data):
- # type: (Any, Any) -> Any
+ def t_y(representer: Any, data: Any) -> Any:
return representer.represent_yaml_object(
tag, data, cls, flow_style=representer.default_flow_style
)
@@ -969,8 +946,7 @@ def yaml_object(yml):
yml.constructor.add_constructor(tag, cls.from_yaml)
except AttributeError:
- def f_y(constructor, node):
- # type: (Any, Any) -> Any
+ def f_y(constructor: Any, node: Any) -> Any:
return constructor.construct_yaml_object(node, cls)
yml.constructor.add_constructor(tag, f_y)
@@ -980,27 +956,27 @@ def yaml_object(yml):
########################################################################################
-def warn_deprecation(fun, method, arg=''):
- # type: (Any, Any, str) -> None
- from ruamel.yaml.compat import _F
-
+def warn_deprecation(fun: Any, method: Any, arg: str = '') -> None:
warnings.warn(
- _F(
- '\n{fun} will be removed, use\n\n yaml=YAML({arg})\n yaml.{method}(...)\n\ninstead', # NOQA
- fun=fun,
- method=method,
- arg=arg,
- ),
+ f'\n{fun} will be removed, use\n\n yaml=YAML({arg})\n yaml.{method}(...)\n\ninstead', # NOQA
PendingDeprecationWarning, # this will show when testing with pytest/tox
stacklevel=3,
)
+def error_deprecation(fun: Any, method: Any, arg: str = '') -> None:
+ warnings.warn(
+ f'\n{fun} has been removed, use\n\n yaml=YAML({arg})\n yaml.{method}(...)\n\ninstead', # NOQA
+ DeprecationWarning,
+ stacklevel=3,
+ )
+ sys.exit(1)
+
+
########################################################################################
-def scan(stream, Loader=Loader):
- # type: (StreamTextType, Any) -> Any
+def scan(stream: StreamTextType, Loader: Any = Loader) -> Any:
"""
Scan a YAML stream and produce scanning tokens.
"""
@@ -1013,8 +989,7 @@ def scan(stream, Loader=Loader):
loader._parser.dispose()
-def parse(stream, Loader=Loader):
- # type: (StreamTextType, Any) -> Any
+def parse(stream: StreamTextType, Loader: Any = Loader) -> Any:
"""
Parse a YAML stream and produce parsing events.
"""
@@ -1027,8 +1002,7 @@ def parse(stream, Loader=Loader):
loader._parser.dispose()
-def compose(stream, Loader=Loader):
- # type: (StreamTextType, Any) -> Any
+def compose(stream: StreamTextType, Loader: Any = Loader) -> Any:
"""
Parse the first YAML document in a stream
and produce the corresponding representation tree.
@@ -1041,8 +1015,7 @@ def compose(stream, Loader=Loader):
loader.dispose()
-def compose_all(stream, Loader=Loader):
- # type: (StreamTextType, Any) -> Any
+def compose_all(stream: StreamTextType, Loader: Any = Loader) -> Any:
"""
Parse all YAML documents in a stream
and produce corresponding representation trees.
@@ -1056,8 +1029,9 @@ def compose_all(stream, Loader=Loader):
loader._parser.dispose()
-def load(stream, Loader=None, version=None, preserve_quotes=None):
- # type: (Any, Any, Any, Any) -> Any
+def load(
+ stream: Any, Loader: Any = None, version: Any = None, preserve_quotes: Any = None
+) -> Any:
"""
Parse the first YAML document in a stream
and produce the corresponding Python object.
@@ -1081,8 +1055,10 @@ def load(stream, Loader=None, version=None, preserve_quotes=None):
pass
-def load_all(stream, Loader=None, version=None, preserve_quotes=None):
- # type: (Any, Any, Any, Any) -> Any # NOQA
+def load_all(
+ stream: Any, Loader: Any = None, version: Any = None, preserve_quotes: Any = None
+) -> Any:
+ # NOQA
"""
Parse all YAML documents in a stream
and produce corresponding Python objects.
@@ -1107,8 +1083,7 @@ def load_all(stream, Loader=None, version=None, preserve_quotes=None):
pass
-def safe_load(stream, version=None):
- # type: (StreamTextType, Optional[VersionType]) -> Any
+def safe_load(stream: StreamTextType, version: Optional[VersionType] = None) -> Any:
"""
Parse the first YAML document in a stream
and produce the corresponding Python object.
@@ -1118,8 +1093,7 @@ def safe_load(stream, version=None):
return load(stream, SafeLoader, version)
-def safe_load_all(stream, version=None):
- # type: (StreamTextType, Optional[VersionType]) -> Any
+def safe_load_all(stream: StreamTextType, version: Optional[VersionType] = None) -> Any:
"""
Parse all YAML documents in a stream
and produce corresponding Python objects.
@@ -1129,8 +1103,11 @@ def safe_load_all(stream, version=None):
return load_all(stream, SafeLoader, version)
-def round_trip_load(stream, version=None, preserve_quotes=None):
- # type: (StreamTextType, Optional[VersionType], Optional[bool]) -> Any
+def round_trip_load(
+ stream: StreamTextType,
+ version: Optional[VersionType] = None,
+ preserve_quotes: Optional[bool] = None,
+) -> Any:
"""
Parse the first YAML document in a stream
and produce the corresponding Python object.
@@ -1140,8 +1117,11 @@ def round_trip_load(stream, version=None, preserve_quotes=None):
return load(stream, RoundTripLoader, version, preserve_quotes=preserve_quotes)
-def round_trip_load_all(stream, version=None, preserve_quotes=None):
- # type: (StreamTextType, Optional[VersionType], Optional[bool]) -> Any
+def round_trip_load_all(
+ stream: StreamTextType,
+ version: Optional[VersionType] = None,
+ preserve_quotes: Optional[bool] = None,
+) -> Any:
"""
Parse all YAML documents in a stream
and produce corresponding Python objects.
@@ -1152,16 +1132,16 @@ def round_trip_load_all(stream, version=None, preserve_quotes=None):
def emit(
- events,
- stream=None,
- Dumper=Dumper,
- canonical=None,
- indent=None,
- width=None,
- allow_unicode=None,
- line_break=None,
-):
- # type: (Any, Optional[StreamType], Any, Optional[bool], Union[int, None], Optional[int], Optional[bool], Any) -> Any # NOQA
+ events: Any,
+ stream: Optional[StreamType] = None,
+ Dumper: Any = Dumper,
+ canonical: Optional[bool] = None,
+ indent: Union[int, None] = None,
+ width: Optional[int] = None,
+ allow_unicode: Optional[bool] = None,
+ line_break: Any = None,
+) -> Any:
+ # NOQA
"""
Emit YAML parsing events into a stream.
If stream is None, return the produced string instead.
@@ -1196,21 +1176,21 @@ enc = None
def serialize_all(
- nodes,
- stream=None,
- Dumper=Dumper,
- canonical=None,
- indent=None,
- width=None,
- allow_unicode=None,
- line_break=None,
- encoding=enc,
- explicit_start=None,
- explicit_end=None,
- version=None,
- tags=None,
-):
- # type: (Any, Optional[StreamType], Any, Any, Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Optional[VersionType], Any) -> Any # NOQA
+ nodes: Any,
+ stream: Optional[StreamType] = None,
+ Dumper: Any = Dumper,
+ canonical: Any = None,
+ indent: Optional[int] = None,
+ width: Optional[int] = None,
+ allow_unicode: Optional[bool] = None,
+ line_break: Any = None,
+ encoding: Any = enc,
+ explicit_start: Optional[bool] = None,
+ explicit_end: Optional[bool] = None,
+ version: Optional[VersionType] = None,
+ tags: Any = None,
+) -> Any:
+ # NOQA
"""
Serialize a sequence of representation trees into a YAML stream.
If stream is None, return the produced string instead.
@@ -1251,8 +1231,9 @@ def serialize_all(
return getvalue()
-def serialize(node, stream=None, Dumper=Dumper, **kwds):
- # type: (Any, Optional[StreamType], Any, Any) -> Any
+def serialize(
+ node: Any, stream: Optional[StreamType] = None, Dumper: Any = Dumper, **kwds: Any
+) -> Any:
"""
Serialize a representation tree into a YAML stream.
If stream is None, return the produced string instead.
@@ -1262,26 +1243,26 @@ def serialize(node, stream=None, Dumper=Dumper, **kwds):
def dump_all(
- documents,
- stream=None,
- Dumper=Dumper,
- default_style=None,
- default_flow_style=None,
- canonical=None,
- indent=None,
- width=None,
- allow_unicode=None,
- line_break=None,
- encoding=enc,
- explicit_start=None,
- explicit_end=None,
- version=None,
- tags=None,
- block_seq_indent=None,
- top_level_colon_align=None,
- prefix_colon=None,
-):
- # type: (Any, Optional[StreamType], Any, Any, Any, Optional[bool], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Any, Any, Any, Any, Any) -> Any # NOQA
+ documents: Any,
+ stream: Optional[StreamType] = None,
+ Dumper: Any = Dumper,
+ default_style: Any = None,
+ default_flow_style: Any = None,
+ canonical: Optional[bool] = None,
+ indent: Optional[int] = None,
+ width: Optional[int] = None,
+ allow_unicode: Optional[bool] = None,
+ line_break: Any = None,
+ encoding: Any = enc,
+ explicit_start: Optional[bool] = None,
+ explicit_end: Optional[bool] = None,
+ version: Any = None,
+ tags: Any = None,
+ block_seq_indent: Any = None,
+ top_level_colon_align: Any = None,
+ prefix_colon: Any = None,
+) -> Any:
+ # NOQA
"""
Serialize a sequence of Python objects into a YAML stream.
If stream is None, return the produced string instead.
@@ -1335,24 +1316,24 @@ def dump_all(
def dump(
- data,
- stream=None,
- Dumper=Dumper,
- default_style=None,
- default_flow_style=None,
- canonical=None,
- indent=None,
- width=None,
- allow_unicode=None,
- line_break=None,
- encoding=enc,
- explicit_start=None,
- explicit_end=None,
- version=None,
- tags=None,
- block_seq_indent=None,
-):
- # type: (Any, Optional[StreamType], Any, Any, Any, Optional[bool], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Optional[VersionType], Any, Any) -> Optional[Any] # NOQA
+ data: Any,
+ stream: Optional[StreamType] = None,
+ Dumper: Any = Dumper,
+ default_style: Any = None,
+ default_flow_style: Any = None,
+ canonical: Optional[bool] = None,
+ indent: Optional[int] = None,
+ width: Optional[int] = None,
+ allow_unicode: Optional[bool] = None,
+ line_break: Any = None,
+ encoding: Any = enc,
+ explicit_start: Optional[bool] = None,
+ explicit_end: Optional[bool] = None,
+ version: Optional[VersionType] = None,
+ tags: Any = None,
+ block_seq_indent: Any = None,
+) -> Any:
+ # NOQA
"""
Serialize a Python object into a YAML stream.
If stream is None, return the produced string instead.
@@ -1381,19 +1362,7 @@ def dump(
)
-def safe_dump_all(documents, stream=None, **kwds):
- # type: (Any, Optional[StreamType], Any) -> Optional[Any]
- """
- Serialize a sequence of Python objects into a YAML stream.
- Produce only basic YAML tags.
- If stream is None, return the produced string instead.
- """
- warn_deprecation('safe_dump_all', 'dump_all', arg="typ='safe', pure=True")
- return dump_all(documents, stream, Dumper=SafeDumper, **kwds)
-
-
-def safe_dump(data, stream=None, **kwds):
- # type: (Any, Optional[StreamType], Any) -> Optional[Any]
+def safe_dump(data: Any, stream: Optional[StreamType] = None, **kwds: Any) -> Any:
"""
Serialize a Python object into a YAML stream.
Produce only basic YAML tags.
@@ -1404,26 +1373,25 @@ def safe_dump(data, stream=None, **kwds):
def round_trip_dump(
- data,
- stream=None,
- Dumper=RoundTripDumper,
- default_style=None,
- default_flow_style=None,
- canonical=None,
- indent=None,
- width=None,
- allow_unicode=None,
- line_break=None,
- encoding=enc,
- explicit_start=None,
- explicit_end=None,
- version=None,
- tags=None,
- block_seq_indent=None,
- top_level_colon_align=None,
- prefix_colon=None,
-):
- # type: (Any, Optional[StreamType], Any, Any, Any, Optional[bool], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Optional[VersionType], Any, Any, Any, Any) -> Optional[Any] # NOQA
+ data: Any,
+ stream: Optional[StreamType] = None,
+ Dumper: Any = RoundTripDumper,
+ default_style: Any = None,
+ default_flow_style: Any = None,
+ canonical: Optional[bool] = None,
+ indent: Optional[int] = None,
+ width: Optional[int] = None,
+ allow_unicode: Optional[bool] = None,
+ line_break: Any = None,
+ encoding: Any = enc,
+ explicit_start: Optional[bool] = None,
+ explicit_end: Optional[bool] = None,
+ version: Optional[VersionType] = None,
+ tags: Any = None,
+ block_seq_indent: Any = None,
+ top_level_colon_align: Any = None,
+ prefix_colon: Any = None,
+) -> Any:
allow_unicode = True if allow_unicode is None else allow_unicode
warn_deprecation('round_trip_dump', 'dump')
return dump_all(
@@ -1453,9 +1421,13 @@ def round_trip_dump(
def add_implicit_resolver(
- tag, regexp, first=None, Loader=None, Dumper=None, resolver=Resolver
-):
- # type: (Any, Any, Any, Any, Any, Any) -> None
+ tag: Any,
+ regexp: Any,
+ first: Any = None,
+ Loader: Any = None,
+ Dumper: Any = None,
+ resolver: Any = Resolver,
+) -> None:
"""
Add an implicit scalar detector.
If an implicit scalar value matches the given regexp,
@@ -1486,8 +1458,14 @@ def add_implicit_resolver(
# this code currently not tested
-def add_path_resolver(tag, path, kind=None, Loader=None, Dumper=None, resolver=Resolver):
- # type: (Any, Any, Any, Any, Any, Any) -> None
+def add_path_resolver(
+ tag: Any,
+ path: Any,
+ kind: Any = None,
+ Loader: Any = None,
+ Dumper: Any = None,
+ resolver: Any = Resolver,
+) -> None:
"""
Add a path based resolver for the given tag.
A path is a list of keys that forms a path
@@ -1517,8 +1495,9 @@ def add_path_resolver(tag, path, kind=None, Loader=None, Dumper=None, resolver=R
raise NotImplementedError
-def add_constructor(tag, object_constructor, Loader=None, constructor=Constructor):
- # type: (Any, Any, Any, Any) -> None
+def add_constructor(
+ tag: Any, object_constructor: Any, Loader: Any = None, constructor: Any = Constructor
+) -> None:
"""
Add an object constructor for the given tag.
object_onstructor is a function that accepts a Loader instance
@@ -1542,8 +1521,9 @@ def add_constructor(tag, object_constructor, Loader=None, constructor=Constructo
raise NotImplementedError
-def add_multi_constructor(tag_prefix, multi_constructor, Loader=None, constructor=Constructor):
- # type: (Any, Any, Any, Any) -> None
+def add_multi_constructor(
+ tag_prefix: Any, multi_constructor: Any, Loader: Any = None, constructor: Any = Constructor
+) -> None:
"""
Add a multi-constructor for the given tag prefix.
Multi-constructor is called for a node if its tag starts with tag_prefix.
@@ -1568,8 +1548,9 @@ def add_multi_constructor(tag_prefix, multi_constructor, Loader=None, constructo
raise NotImplementedError
-def add_representer(data_type, object_representer, Dumper=None, representer=Representer):
- # type: (Any, Any, Any, Any) -> None
+def add_representer(
+ data_type: Any, object_representer: Any, Dumper: Any = None, representer: Any = Representer
+) -> None:
"""
Add a representer for the given type.
object_representer is a function accepting a Dumper instance
@@ -1595,8 +1576,9 @@ def add_representer(data_type, object_representer, Dumper=None, representer=Repr
# this code currently not tested
-def add_multi_representer(data_type, multi_representer, Dumper=None, representer=Representer):
- # type: (Any, Any, Any, Any) -> None
+def add_multi_representer(
+ data_type: Any, multi_representer: Any, Dumper: Any = None, representer: Any = Representer
+) -> None:
"""
Add a representer for the given type.
multi_representer is a function accepting a Dumper instance
@@ -1626,8 +1608,7 @@ class YAMLObjectMetaclass(type):
The metaclass for YAMLObject.
"""
- def __init__(cls, name, bases, kwds):
- # type: (Any, Any, Any) -> None
+ def __init__(cls, name: Any, bases: Any, kwds: Any) -> None:
super().__init__(name, bases, kwds)
if 'yaml_tag' in kwds and kwds['yaml_tag'] is not None:
cls.yaml_constructor.add_constructor(cls.yaml_tag, cls.from_yaml) # type: ignore
@@ -1645,20 +1626,18 @@ class YAMLObject(with_metaclass(YAMLObjectMetaclass)): # type: ignore
yaml_constructor = Constructor
yaml_representer = Representer
- yaml_tag = None # type: Any
- yaml_flow_style = None # type: Any
+ yaml_tag: Any = None
+ yaml_flow_style: Any = None
@classmethod
- def from_yaml(cls, constructor, node):
- # type: (Any, Any) -> Any
+ def from_yaml(cls, constructor: Any, node: Any) -> Any:
"""
Convert a representation node to a Python object.
"""
return constructor.construct_yaml_object(node, cls)
@classmethod
- def to_yaml(cls, representer, data):
- # type: (Any, Any) -> Any
+ def to_yaml(cls, representer: Any, data: Any) -> Any:
"""
Convert a Python object to a representation node.
"""
diff --git a/nodes.py b/nodes.py
index c76bb4e..b2f4e13 100644
--- a/nodes.py
+++ b/nodes.py
@@ -2,17 +2,21 @@
import sys
-from ruamel.yaml.compat import _F
-
-if False: # MYPY
- from typing import Dict, Any, Text # NOQA
+from typing import Dict, Any, Text # NOQA
class Node:
__slots__ = 'tag', 'value', 'start_mark', 'end_mark', 'comment', 'anchor'
- def __init__(self, tag, value, start_mark, end_mark, comment=None, anchor=None):
- # type: (Any, Any, Any, Any, Any, Any) -> None
+ def __init__(
+ self,
+ tag: Any,
+ value: Any,
+ start_mark: Any,
+ end_mark: Any,
+ comment: Any = None,
+ anchor: Any = None,
+ ) -> None:
self.tag = tag
self.value = value
self.start_mark = start_mark
@@ -20,8 +24,7 @@ class Node:
self.comment = comment
self.anchor = anchor
- def __repr__(self):
- # type: () -> Any
+ def __repr__(self) -> Any:
value = self.value
# if isinstance(value, list):
# if len(value) == 0:
@@ -36,29 +39,19 @@ class Node:
# else:
# value = repr(value)
value = repr(value)
- return _F(
- '{class_name!s}(tag={self_tag!r}, value={value!s})',
- class_name=self.__class__.__name__,
- self_tag=self.tag,
- value=value,
- )
+ return f'{self.__class__.__name__!s}(tag={self.tag!r}, value={value!s})'
- def dump(self, indent=0):
- # type: (int) -> None
+ def dump(self, indent: int = 0) -> None:
+ xx = self.__class__.__name__
+ xi = ' ' * indent
if isinstance(self.value, str):
- sys.stdout.write(
- '{}{}(tag={!r}, value={!r})\n'.format(
- ' ' * indent, self.__class__.__name__, self.tag, self.value
- )
- )
+ sys.stdout.write(f'{xi}{xx}(tag={self.tag!r}, value={self.value!r})\n')
if self.comment:
- sys.stdout.write(' {}comment: {})\n'.format(' ' * indent, self.comment))
+ sys.stdout.write(f' {xi}comment: {self.comment})\n')
return
- sys.stdout.write(
- '{}{}(tag={!r})\n'.format(' ' * indent, self.__class__.__name__, self.tag)
- )
+ sys.stdout.write(f'{xi}{xx}(tag={self.tag!r})\n')
if self.comment:
- sys.stdout.write(' {}comment: {})\n'.format(' ' * indent, self.comment))
+ sys.stdout.write(f' {xi}comment: {self.comment})\n')
for v in self.value:
if isinstance(v, tuple):
for v1 in v:
@@ -66,7 +59,7 @@ class Node:
elif isinstance(v, Node):
v.dump(indent + 1)
else:
- sys.stdout.write('Node value type? {}\n'.format(type(v)))
+ sys.stdout.write(f'Node value type? {type(v)}\n')
class ScalarNode(Node):
@@ -83,9 +76,15 @@ class ScalarNode(Node):
id = 'scalar'
def __init__(
- self, tag, value, start_mark=None, end_mark=None, style=None, comment=None, anchor=None
- ):
- # type: (Any, Any, Any, Any, Any, Any, Any) -> None
+ self,
+ tag: Any,
+ value: Any,
+ start_mark: Any = None,
+ end_mark: Any = None,
+ style: Any = None,
+ comment: Any = None,
+ anchor: Any = None,
+ ) -> None:
Node.__init__(self, tag, value, start_mark, end_mark, comment=comment, anchor=anchor)
self.style = style
@@ -95,15 +94,14 @@ class CollectionNode(Node):
def __init__(
self,
- tag,
- value,
- start_mark=None,
- end_mark=None,
- flow_style=None,
- comment=None,
- anchor=None,
- ):
- # type: (Any, Any, Any, Any, Any, Any, Any) -> None
+ tag: Any,
+ value: Any,
+ start_mark: Any = None,
+ end_mark: Any = None,
+ flow_style: Any = None,
+ comment: Any = None,
+ anchor: Any = None,
+ ) -> None:
Node.__init__(self, tag, value, start_mark, end_mark, comment=comment)
self.flow_style = flow_style
self.anchor = anchor
@@ -120,15 +118,14 @@ class MappingNode(CollectionNode):
def __init__(
self,
- tag,
- value,
- start_mark=None,
- end_mark=None,
- flow_style=None,
- comment=None,
- anchor=None,
- ):
- # type: (Any, Any, Any, Any, Any, Any, Any) -> None
+ tag: Any,
+ value: Any,
+ start_mark: Any = None,
+ end_mark: Any = None,
+ flow_style: Any = None,
+ comment: Any = None,
+ anchor: Any = None,
+ ) -> None:
CollectionNode.__init__(
self, tag, value, start_mark, end_mark, flow_style, comment, anchor
)
diff --git a/parser.py b/parser.py
index a2fab43..c8b5fcf 100644
--- a/parser.py
+++ b/parser.py
@@ -80,16 +80,14 @@ from ruamel.yaml.events import * # NOQA
from ruamel.yaml.scanner import Scanner, RoundTripScanner, ScannerError # NOQA
from ruamel.yaml.scanner import BlankLineComment
from ruamel.yaml.comments import C_PRE, C_POST, C_SPLIT_ON_FIRST_BLANK
-from ruamel.yaml.compat import _F, nprint, nprintf # NOQA
+from ruamel.yaml.compat import nprint, nprintf # NOQA
-if False: # MYPY
- from typing import Any, Dict, Optional, List, Optional # NOQA
+from typing import Any, Dict, Optional, List, Optional # NOQA
__all__ = ['Parser', 'RoundTripParser', 'ParserError']
-def xprintf(*args, **kw):
- # type: (Any, Any) -> Any
+def xprintf(*args: Any, **kw: Any) -> Any:
return nprintf(*args, **kw)
pass
@@ -104,42 +102,36 @@ class Parser:
DEFAULT_TAGS = {'!': '!', '!!': 'tag:yaml.org,2002:'}
- def __init__(self, loader):
- # type: (Any) -> None
+ def __init__(self, loader: Any) -> None:
self.loader = loader
if self.loader is not None and getattr(self.loader, '_parser', None) is None:
self.loader._parser = self
self.reset_parser()
- def reset_parser(self):
- # type: () -> None
+ def reset_parser(self) -> None:
# Reset the state attributes (to clear self-references)
self.current_event = self.last_event = None
- self.tag_handles = {} # type: Dict[Any, Any]
- self.states = [] # type: List[Any]
- self.marks = [] # type: List[Any]
- self.state = self.parse_stream_start # type: Any
+ self.tag_handles: Dict[Any, Any] = {}
+ self.states: List[Any] = []
+ self.marks: List[Any] = []
+ self.state: Any = self.parse_stream_start
- def dispose(self):
- # type: () -> None
+ def dispose(self) -> None:
self.reset_parser()
@property
- def scanner(self):
- # type: () -> Any
+ def scanner(self) -> Any:
if hasattr(self.loader, 'typ'):
return self.loader.scanner
return self.loader._scanner
@property
- def resolver(self):
- # type: () -> Any
+ def resolver(self) -> Any:
if hasattr(self.loader, 'typ'):
return self.loader.resolver
return self.loader._resolver
- def check_event(self, *choices):
- # type: (Any) -> bool
+ def check_event(self, *choices: Any) -> bool:
# Check the type of the next event.
if self.current_event is None:
if self.state:
@@ -152,16 +144,14 @@ class Parser:
return True
return False
- def peek_event(self):
- # type: () -> Any
+ def peek_event(self) -> Any:
# Get the next event.
if self.current_event is None:
if self.state:
self.current_event = self.state()
return self.current_event
- def get_event(self):
- # type: () -> Any
+ def get_event(self) -> Any:
# Get the next event and proceed further.
if self.current_event is None:
if self.state:
@@ -178,8 +168,7 @@ class Parser:
# implicit_document ::= block_node DOCUMENT-END*
# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
- def parse_stream_start(self):
- # type: () -> Any
+ def parse_stream_start(self) -> Any:
# Parse the stream start.
token = self.scanner.get_token()
self.move_token_comment(token)
@@ -190,8 +179,7 @@ class Parser:
return event
- def parse_implicit_document_start(self):
- # type: () -> Any
+ def parse_implicit_document_start(self) -> Any:
# Parse an implicit document.
if not self.scanner.check_token(DirectiveToken, DocumentStartToken, StreamEndToken):
self.tag_handles = self.DEFAULT_TAGS
@@ -208,8 +196,7 @@ class Parser:
else:
return self.parse_document_start()
- def parse_document_start(self):
- # type: () -> Any
+ def parse_document_start(self) -> Any:
# Parse any extra document end indicators.
while self.scanner.check_token(DocumentEndToken):
self.scanner.get_token()
@@ -220,10 +207,8 @@ class Parser:
raise ParserError(
None,
None,
- _F(
- "expected '<document start>', but found {pt!r}",
- pt=self.scanner.peek_token().id,
- ),
+ "expected '<document start>', "
+ f'but found {self.scanner.peek_token().id,!r}',
self.scanner.peek_token().start_mark,
)
token = self.scanner.get_token()
@@ -232,10 +217,14 @@ class Parser:
# if self.loader is not None and \
# end_mark.line != self.scanner.peek_token().start_mark.line:
# self.loader.scalar_after_indicator = False
- event = DocumentStartEvent(
- start_mark, end_mark, explicit=True, version=version, tags=tags,
- comment=token.comment
- ) # type: Any
+ event: Any = DocumentStartEvent(
+ start_mark,
+ end_mark,
+ explicit=True,
+ version=version,
+ tags=tags,
+ comment=token.comment,
+ )
self.states.append(self.parse_document_end)
self.state = self.parse_document_content
else:
@@ -247,8 +236,7 @@ class Parser:
self.state = None
return event
- def parse_document_end(self):
- # type: () -> Any
+ def parse_document_end(self) -> Any:
# Parse the document end.
token = self.scanner.peek_token()
start_mark = end_mark = token.start_mark
@@ -267,8 +255,7 @@ class Parser:
return event
- def parse_document_content(self):
- # type: () -> Any
+ def parse_document_content(self) -> Any:
if self.scanner.check_token(
DirectiveToken, DocumentStartToken, DocumentEndToken, StreamEndToken
):
@@ -278,8 +265,7 @@ class Parser:
else:
return self.parse_block_node()
- def process_directives(self):
- # type: () -> Any
+ def process_directives(self) -> Any:
yaml_version = None
self.tag_handles = {}
while self.scanner.check_token(DirectiveToken):
@@ -302,14 +288,11 @@ class Parser:
handle, prefix = token.value
if handle in self.tag_handles:
raise ParserError(
- None,
- None,
- _F('duplicate tag handle {handle!r}', handle=handle),
- token.start_mark,
+ None, None, f'duplicate tag handle {handle!r}', token.start_mark,
)
self.tag_handles[handle] = prefix
if bool(self.tag_handles):
- value = yaml_version, self.tag_handles.copy() # type: Any
+ value: Any = (yaml_version, self.tag_handles.copy())
else:
value = yaml_version, None
if self.loader is not None and hasattr(self.loader, 'tags'):
@@ -339,27 +322,22 @@ class Parser:
# block_collection ::= block_sequence | block_mapping
# flow_collection ::= flow_sequence | flow_mapping
- def parse_block_node(self):
- # type: () -> Any
+ def parse_block_node(self) -> Any:
return self.parse_node(block=True)
- def parse_flow_node(self):
- # type: () -> Any
+ def parse_flow_node(self) -> Any:
return self.parse_node()
- def parse_block_node_or_indentless_sequence(self):
- # type: () -> Any
+ def parse_block_node_or_indentless_sequence(self) -> Any:
return self.parse_node(block=True, indentless_sequence=True)
- def transform_tag(self, handle, suffix):
- # type: (Any, Any) -> Any
+ def transform_tag(self, handle: Any, suffix: Any) -> Any:
return self.tag_handles[handle] + suffix
- def parse_node(self, block=False, indentless_sequence=False):
- # type: (bool, bool) -> Any
+ def parse_node(self, block: bool = False, indentless_sequence: bool = False) -> Any:
if self.scanner.check_token(AliasToken):
token = self.scanner.get_token()
- event = AliasEvent(token.value, token.start_mark, token.end_mark) # type: Any
+ event: Any = AliasEvent(token.value, token.start_mark, token.end_mark)
self.state = self.states.pop()
return event
@@ -394,7 +372,7 @@ class Parser:
raise ParserError(
'while parsing a node',
start_mark,
- _F('found undefined tag handle {handle!r}', handle=handle),
+ f'found undefined tag handle {handle!r}',
tag_mark,
)
tag = self.transform_tag(handle, suffix)
@@ -507,9 +485,9 @@ class Parser:
node = 'flow'
token = self.scanner.peek_token()
raise ParserError(
- _F('while parsing a {node!s} node', node=node),
+ f'while parsing a {node!s} node',
start_mark,
- _F('expected the node content, but found {token_id!r}', token_id=token.id),
+ f'expected the node content, but found {token.id!r}',
token.start_mark,
)
return event
@@ -517,16 +495,14 @@ class Parser:
# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)*
# BLOCK-END
- def parse_block_sequence_first_entry(self):
- # type: () -> Any
+ def parse_block_sequence_first_entry(self) -> Any:
token = self.scanner.get_token()
# move any comment from start token
# self.move_token_comment(token)
self.marks.append(token.start_mark)
return self.parse_block_sequence_entry()
- def parse_block_sequence_entry(self):
- # type: () -> Any
+ def parse_block_sequence_entry(self) -> Any:
if self.scanner.check_token(BlockEntryToken):
token = self.scanner.get_token()
self.move_token_comment(token)
@@ -541,7 +517,7 @@ class Parser:
raise ParserError(
'while parsing a block collection',
self.marks[-1],
- _F('expected <block end>, but found {token_id!r}', token_id=token.id),
+ f'expected <block end>, but found {token.id!r}',
token.start_mark,
)
token = self.scanner.get_token() # BlockEndToken
@@ -557,8 +533,7 @@ class Parser:
# - entry
# - nested
- def parse_indentless_sequence_entry(self):
- # type: () -> Any
+ def parse_indentless_sequence_entry(self) -> Any:
if self.scanner.check_token(BlockEntryToken):
token = self.scanner.get_token()
self.move_token_comment(token)
@@ -587,14 +562,12 @@ class Parser:
# (VALUE block_node_or_indentless_sequence?)?)*
# BLOCK-END
- def parse_block_mapping_first_key(self):
- # type: () -> Any
+ def parse_block_mapping_first_key(self) -> Any:
token = self.scanner.get_token()
self.marks.append(token.start_mark)
return self.parse_block_mapping_key()
- def parse_block_mapping_key(self):
- # type: () -> Any
+ def parse_block_mapping_key(self) -> Any:
if self.scanner.check_token(KeyToken):
token = self.scanner.get_token()
self.move_token_comment(token)
@@ -612,7 +585,7 @@ class Parser:
raise ParserError(
'while parsing a block mapping',
self.marks[-1],
- _F('expected <block end>, but found {token_id!r}', token_id=token.id),
+ f'expected <block end>, but found {token.id!r}',
token.start_mark,
)
token = self.scanner.get_token()
@@ -622,8 +595,7 @@ class Parser:
self.marks.pop()
return event
- def parse_block_mapping_value(self):
- # type: () -> Any
+ def parse_block_mapping_value(self) -> Any:
if self.scanner.check_token(ValueToken):
token = self.scanner.get_token()
# value token might have post comment move it to e.g. block
@@ -662,14 +634,12 @@ class Parser:
# For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?`
# generate an inline mapping (set syntax).
- def parse_flow_sequence_first_entry(self):
- # type: () -> Any
+ def parse_flow_sequence_first_entry(self) -> Any:
token = self.scanner.get_token()
self.marks.append(token.start_mark)
return self.parse_flow_sequence_entry(first=True)
- def parse_flow_sequence_entry(self, first=False):
- # type: (bool) -> Any
+ def parse_flow_sequence_entry(self, first: bool = False) -> Any:
if not self.scanner.check_token(FlowSequenceEndToken):
if not first:
if self.scanner.check_token(FlowEntryToken):
@@ -679,15 +649,15 @@ class Parser:
raise ParserError(
'while parsing a flow sequence',
self.marks[-1],
- _F("expected ',' or ']', but got {token_id!r}", token_id=token.id),
+ f"expected ',' or ']', but got {token.id!r}",
token.start_mark,
)
if self.scanner.check_token(KeyToken):
token = self.scanner.peek_token()
- event = MappingStartEvent(
+ event: Any = MappingStartEvent(
None, None, True, token.start_mark, token.end_mark, flow_style=True
- ) # type: Any
+ )
self.state = self.parse_flow_sequence_entry_mapping_key
return event
elif not self.scanner.check_token(FlowSequenceEndToken):
@@ -699,8 +669,7 @@ class Parser:
self.marks.pop()
return event
- def parse_flow_sequence_entry_mapping_key(self):
- # type: () -> Any
+ def parse_flow_sequence_entry_mapping_key(self) -> Any:
token = self.scanner.get_token()
if not self.scanner.check_token(ValueToken, FlowEntryToken, FlowSequenceEndToken):
self.states.append(self.parse_flow_sequence_entry_mapping_value)
@@ -709,8 +678,7 @@ class Parser:
self.state = self.parse_flow_sequence_entry_mapping_value
return self.process_empty_scalar(token.end_mark)
- def parse_flow_sequence_entry_mapping_value(self):
- # type: () -> Any
+ def parse_flow_sequence_entry_mapping_value(self) -> Any:
if self.scanner.check_token(ValueToken):
token = self.scanner.get_token()
if not self.scanner.check_token(FlowEntryToken, FlowSequenceEndToken):
@@ -724,8 +692,7 @@ class Parser:
token = self.scanner.peek_token()
return self.process_empty_scalar(token.start_mark)
- def parse_flow_sequence_entry_mapping_end(self):
- # type: () -> Any
+ def parse_flow_sequence_entry_mapping_end(self) -> Any:
self.state = self.parse_flow_sequence_entry
token = self.scanner.peek_token()
return MappingEndEvent(token.start_mark, token.start_mark)
@@ -736,14 +703,12 @@ class Parser:
# FLOW-MAPPING-END
# flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
- def parse_flow_mapping_first_key(self):
- # type: () -> Any
+ def parse_flow_mapping_first_key(self) -> Any:
token = self.scanner.get_token()
self.marks.append(token.start_mark)
return self.parse_flow_mapping_key(first=True)
- def parse_flow_mapping_key(self, first=False):
- # type: (Any) -> Any
+ def parse_flow_mapping_key(self, first: Any = False) -> Any:
if not self.scanner.check_token(FlowMappingEndToken):
if not first:
if self.scanner.check_token(FlowEntryToken):
@@ -753,7 +718,7 @@ class Parser:
raise ParserError(
'while parsing a flow mapping',
self.marks[-1],
- _F("expected ',' or '}}', but got {token_id!r}", token_id=token.id),
+ f"expected ',' or '}}', but got {token.id!r}",
token.start_mark,
)
if self.scanner.check_token(KeyToken):
@@ -780,8 +745,7 @@ class Parser:
self.marks.pop()
return event
- def parse_flow_mapping_value(self):
- # type: () -> Any
+ def parse_flow_mapping_value(self) -> Any:
if self.scanner.check_token(ValueToken):
token = self.scanner.get_token()
if not self.scanner.check_token(FlowEntryToken, FlowMappingEndToken):
@@ -795,25 +759,23 @@ class Parser:
token = self.scanner.peek_token()
return self.process_empty_scalar(token.start_mark)
- def parse_flow_mapping_empty_value(self):
- # type: () -> Any
+ def parse_flow_mapping_empty_value(self) -> Any:
self.state = self.parse_flow_mapping_key
return self.process_empty_scalar(self.scanner.peek_token().start_mark)
- def process_empty_scalar(self, mark, comment=None):
- # type: (Any, Any) -> Any
+ def process_empty_scalar(self, mark: Any, comment: Any = None) -> Any:
return ScalarEvent(None, None, (True, False), "", mark, mark, comment=comment)
- def move_token_comment(self, token, nt=None, empty=False):
- # type: (Any, Optional[Any], Optional[bool]) -> Any
+ def move_token_comment(
+ self, token: Any, nt: Optional[Any] = None, empty: Optional[bool] = False
+ ) -> Any:
pass
class RoundTripParser(Parser):
"""roundtrip is a safe loader, that wants to see the unmangled tag"""
- def transform_tag(self, handle, suffix):
- # type: (Any, Any) -> Any
+ def transform_tag(self, handle: Any, suffix: Any) -> Any:
# return self.tag_handles[handle]+suffix
if handle == '!!' and suffix in (
'null',
@@ -832,8 +794,9 @@ class RoundTripParser(Parser):
return Parser.transform_tag(self, handle, suffix)
return handle + suffix
- def move_token_comment(self, token, nt=None, empty=False):
- # type: (Any, Optional[Any], Optional[bool]) -> Any
+ def move_token_comment(
+ self, token: Any, nt: Optional[Any] = None, empty: Optional[bool] = False
+ ) -> Any:
token.move_old_comment(self.scanner.peek_token() if nt is None else nt, empty=empty)
@@ -843,12 +806,12 @@ class RoundTripParserSC(RoundTripParser):
# some of the differences are based on the superclass testing
# if self.loader.comment_handling is not None
- def move_token_comment(self, token, nt=None, empty=False):
- # type: (Any, Any, Any, Optional[bool]) -> None
+ def move_token_comment(
+ self: Any, token: Any, nt: Any = None, empty: Optional[bool] = False
+ ) -> None:
token.move_new_comment(self.scanner.peek_token() if nt is None else nt, empty=empty)
- def distribute_comment(self, comment, line):
- # type: (Any, Any) -> Any
+ def distribute_comment(self, comment: Any, line: Any) -> Any:
# ToDo, look at indentation of the comment to determine attachment
if comment is None:
return None
diff --git a/reader.py b/reader.py
index 4aac40a..dec6e9f 100644
--- a/reader.py
+++ b/reader.py
@@ -22,46 +22,35 @@
import codecs
from ruamel.yaml.error import YAMLError, FileMark, StringMark, YAMLStreamError
-from ruamel.yaml.compat import _F # NOQA
from ruamel.yaml.util import RegExp
-if False: # MYPY
- from typing import Any, Dict, Optional, List, Union, Text, Tuple, Optional # NOQA
-# from ruamel.yaml.compat import StreamTextType # NOQA
+from typing import Any, Dict, Optional, List, Union, Text, Tuple, Optional # NOQA
+# from ruamel.yaml.compat import StreamTextType # NOQA
__all__ = ['Reader', 'ReaderError']
class ReaderError(YAMLError):
- def __init__(self, name, position, character, encoding, reason):
- # type: (Any, Any, Any, Any, Any) -> None
+ def __init__(
+ self, name: Any, position: Any, character: Any, encoding: Any, reason: Any
+ ) -> None:
self.name = name
self.character = character
self.position = position
self.encoding = encoding
self.reason = reason
- def __str__(self):
- # type: () -> Any
+ def __str__(self) -> Any:
if isinstance(self.character, bytes):
- return _F(
- "'{self_encoding!s}' codec can't decode byte #x{ord_self_character:02x}: "
- '{self_reason!s}\n'
- ' in "{self_name!s}", position {self_position:d}',
- self_encoding=self.encoding,
- ord_self_character=ord(self.character),
- self_reason=self.reason,
- self_name=self.name,
- self_position=self.position,
+ return (
+ f"'{self.encoding!s}' codec can't decode byte #x{ord(self.character):02x}: "
+ f'{self.reason!s}\n'
+ f' in "{self.name!s}", position {self.position:d}'
)
else:
- return _F(
- 'unacceptable character #x{self_character:04x}: {self_reason!s}\n'
- ' in "{self_name!s}", position {self_position:d}',
- self_character=self.character,
- self_reason=self.reason,
- self_name=self.name,
- self_position=self.position,
+ return (
+ f'unacceptable character #x{self.character:04x}: {self.reason!s}\n'
+ f' in "{self.name!s}", position {self.position:d}'
)
@@ -79,39 +68,35 @@ class Reader:
# Yeah, it's ugly and slow.
- def __init__(self, stream, loader=None):
- # type: (Any, Any) -> None
+ def __init__(self, stream: Any, loader: Any = None) -> None:
self.loader = loader
if self.loader is not None and getattr(self.loader, '_reader', None) is None:
self.loader._reader = self
self.reset_reader()
- self.stream = stream # type: Any # as .read is called
+ self.stream: Any = stream # as .read is called
- def reset_reader(self):
- # type: () -> None
- self.name = None # type: Any
+ def reset_reader(self) -> None:
+ self.name: Any = None
self.stream_pointer = 0
self.eof = True
self.buffer = ""
self.pointer = 0
- self.raw_buffer = None # type: Any
+ self.raw_buffer: Any = None
self.raw_decode = None
- self.encoding = None # type: Optional[Text]
+ self.encoding: Optional[Text] = None
self.index = 0
self.line = 0
self.column = 0
@property
- def stream(self):
- # type: () -> Any
+ def stream(self) -> Any:
try:
return self._stream
except AttributeError:
raise YAMLStreamError('input stream needs to specified')
@stream.setter
- def stream(self, val):
- # type: (Any) -> None
+ def stream(self, val: Any) -> None:
if val is None:
return
self._stream = None
@@ -132,22 +117,19 @@ class Reader:
self.raw_buffer = None
self.determine_encoding()
- def peek(self, index=0):
- # type: (int) -> Text
+ def peek(self, index: int = 0) -> Text:
try:
return self.buffer[self.pointer + index]
except IndexError:
self.update(index + 1)
return self.buffer[self.pointer + index]
- def prefix(self, length=1):
- # type: (int) -> Any
+ def prefix(self, length: int = 1) -> Any:
if self.pointer + length >= len(self.buffer):
self.update(length)
return self.buffer[self.pointer : self.pointer + length]
- def forward_1_1(self, length=1):
- # type: (int) -> None
+ def forward_1_1(self, length: int = 1) -> None:
if self.pointer + length + 1 >= len(self.buffer):
self.update(length + 1)
while length != 0:
@@ -163,8 +145,7 @@ class Reader:
self.column += 1
length -= 1
- def forward(self, length=1):
- # type: (int) -> None
+ def forward(self, length: int = 1) -> None:
if self.pointer + length + 1 >= len(self.buffer):
self.update(length + 1)
while length != 0:
@@ -178,8 +159,7 @@ class Reader:
self.column += 1
length -= 1
- def get_mark(self):
- # type: () -> Any
+ def get_mark(self) -> Any:
if self.stream is None:
return StringMark(
self.name, self.index, self.line, self.column, self.buffer, self.pointer
@@ -187,8 +167,7 @@ class Reader:
else:
return FileMark(self.name, self.index, self.line, self.column)
- def determine_encoding(self):
- # type: () -> None
+ def determine_encoding(self) -> None:
while not self.eof and (self.raw_buffer is None or len(self.raw_buffer) < 2):
self.update_raw()
if isinstance(self.raw_buffer, bytes):
@@ -210,8 +189,7 @@ class Reader:
_printable_ascii = ('\x09\x0A\x0D' + "".join(map(chr, range(0x20, 0x7F)))).encode('ascii')
@classmethod
- def _get_non_printable_ascii(cls, data): # type: ignore
- # type: (Text, bytes) -> Optional[Tuple[int, Text]]
+ def _get_non_printable_ascii(cls: Text, data: bytes) -> Optional[Tuple[int, Text]]: # type: ignore # NOQA
ascii_bytes = data.encode('ascii') # type: ignore
non_printables = ascii_bytes.translate(None, cls._printable_ascii) # type: ignore
if not non_printables:
@@ -220,23 +198,20 @@ class Reader:
return ascii_bytes.index(non_printable), non_printable.decode('ascii')
@classmethod
- def _get_non_printable_regex(cls, data):
- # type: (Text) -> Optional[Tuple[int, Text]]
+ def _get_non_printable_regex(cls, data: Text) -> Optional[Tuple[int, Text]]:
match = cls.NON_PRINTABLE.search(data)
if not bool(match):
return None
return match.start(), match.group()
@classmethod
- def _get_non_printable(cls, data):
- # type: (Text) -> Optional[Tuple[int, Text]]
+ def _get_non_printable(cls, data: Text) -> Optional[Tuple[int, Text]]:
try:
return cls._get_non_printable_ascii(data) # type: ignore
except UnicodeEncodeError:
return cls._get_non_printable_regex(data)
- def check_printable(self, data):
- # type: (Any) -> None
+ def check_printable(self, data: Any) -> None:
non_printable_match = self._get_non_printable(data)
if non_printable_match is not None:
start, character = non_printable_match
@@ -249,8 +224,7 @@ class Reader:
'special characters are not allowed',
)
- def update(self, length):
- # type: (int) -> None
+ def update(self, length: int) -> None:
if self.raw_buffer is None:
return
self.buffer = self.buffer[self.pointer :]
@@ -281,8 +255,7 @@ class Reader:
self.raw_buffer = None
break
- def update_raw(self, size=None):
- # type: (Optional[int]) -> None
+ def update_raw(self, size: Optional[int] = None) -> None:
if size is None:
size = 4096
data = self.stream.read(size)
diff --git a/representer.py b/representer.py
index c57d9aa..8a03234 100644
--- a/representer.py
+++ b/representer.py
@@ -3,7 +3,7 @@
from ruamel.yaml.error import * # NOQA
from ruamel.yaml.nodes import * # NOQA
from ruamel.yaml.compat import ordereddict
-from ruamel.yaml.compat import _F, nprint, nprintf # NOQA
+from ruamel.yaml.compat import nprint, nprintf # NOQA
from ruamel.yaml.scalarstring import (
LiteralScalarString,
FoldedScalarString,
@@ -35,8 +35,7 @@ import types
import copyreg
import base64
-if False: # MYPY
- from typing import Dict, List, Any, Union, Text, Optional # NOQA
+from typing import Dict, List, Any, Union, Text, Optional # NOQA
# fmt: off
__all__ = ['BaseRepresenter', 'SafeRepresenter', 'Representer',
@@ -50,24 +49,27 @@ class RepresenterError(YAMLError):
class BaseRepresenter:
- yaml_representers = {} # type: Dict[Any, Any]
- yaml_multi_representers = {} # type: Dict[Any, Any]
+ yaml_representers: Dict[Any, Any] = {}
+ yaml_multi_representers: Dict[Any, Any] = {}
- def __init__(self, default_style=None, default_flow_style=None, dumper=None):
- # type: (Any, Any, Any, Any) -> None
+ def __init__(
+ self: Any,
+ default_style: Any = None,
+ default_flow_style: Any = None,
+ dumper: Any = None,
+ ) -> None:
self.dumper = dumper
if self.dumper is not None:
self.dumper._representer = self
self.default_style = default_style
self.default_flow_style = default_flow_style
- self.represented_objects = {} # type: Dict[Any, Any]
- self.object_keeper = [] # type: List[Any]
- self.alias_key = None # type: Optional[int]
+ self.represented_objects: Dict[Any, Any] = {}
+ self.object_keeper: List[Any] = []
+ self.alias_key: Optional[int] = None
self.sort_base_mapping_type_on_output = True
@property
- def serializer(self):
- # type: () -> Any
+ def serializer(self) -> Any:
try:
if hasattr(self.dumper, 'typ'):
return self.dumper.serializer
@@ -75,16 +77,14 @@ class BaseRepresenter:
except AttributeError:
return self # cyaml
- def represent(self, data):
- # type: (Any) -> None
+ def represent(self, data: Any) -> None:
node = self.represent_data(data)
self.serializer.serialize(node)
self.represented_objects = {}
self.object_keeper = []
self.alias_key = None
- def represent_data(self, data):
- # type: (Any) -> Any
+ def represent_data(self, data: Any) -> Any:
if self.ignore_aliases(data):
self.alias_key = None
else:
@@ -117,8 +117,7 @@ class BaseRepresenter:
# self.represented_objects[alias_key] = node
return node
- def represent_key(self, data):
- # type: (Any) -> Any
+ def represent_key(self, data: Any) -> Any:
"""
David Fraser: Extract a method to represent keys in mappings, so that
a subclass can choose not to quote them (for example)
@@ -128,21 +127,20 @@ class BaseRepresenter:
return self.represent_data(data)
@classmethod
- def add_representer(cls, data_type, representer):
- # type: (Any, Any) -> None
+ def add_representer(cls, data_type: Any, representer: Any) -> None:
if 'yaml_representers' not in cls.__dict__:
cls.yaml_representers = cls.yaml_representers.copy()
cls.yaml_representers[data_type] = representer
@classmethod
- def add_multi_representer(cls, data_type, representer):
- # type: (Any, Any) -> None
+ def add_multi_representer(cls, data_type: Any, representer: Any) -> None:
if 'yaml_multi_representers' not in cls.__dict__:
cls.yaml_multi_representers = cls.yaml_multi_representers.copy()
cls.yaml_multi_representers[data_type] = representer
- def represent_scalar(self, tag, value, style=None, anchor=None):
- # type: (Any, Any, Any, Any) -> Any
+ def represent_scalar(
+ self, tag: Any, value: Any, style: Any = None, anchor: Any = None
+ ) -> ScalarNode:
if style is None:
style = self.default_style
comment = None
@@ -155,9 +153,10 @@ class BaseRepresenter:
self.represented_objects[self.alias_key] = node
return node
- def represent_sequence(self, tag, sequence, flow_style=None):
- # type: (Any, Any, Any) -> Any
- value = [] # type: List[Any]
+ def represent_sequence(
+ self, tag: Any, sequence: Any, flow_style: Any = None
+ ) -> SequenceNode:
+ value: List[Any] = []
node = SequenceNode(tag, value, flow_style=flow_style)
if self.alias_key is not None:
self.represented_objects[self.alias_key] = node
@@ -174,9 +173,8 @@ class BaseRepresenter:
node.flow_style = best_style
return node
- def represent_omap(self, tag, omap, flow_style=None):
- # type: (Any, Any, Any) -> Any
- value = [] # type: List[Any]
+ def represent_omap(self, tag: Any, omap: Any, flow_style: Any = None) -> SequenceNode:
+ value: List[Any] = []
node = SequenceNode(tag, value, flow_style=flow_style)
if self.alias_key is not None:
self.represented_objects[self.alias_key] = node
@@ -195,9 +193,8 @@ class BaseRepresenter:
node.flow_style = best_style
return node
- def represent_mapping(self, tag, mapping, flow_style=None):
- # type: (Any, Any, Any) -> Any
- value = [] # type: List[Any]
+ def represent_mapping(self, tag: Any, mapping: Any, flow_style: Any = None) -> MappingNode:
+ value: List[Any] = []
node = MappingNode(tag, value, flow_style=flow_style)
if self.alias_key is not None:
self.represented_objects[self.alias_key] = node
@@ -224,14 +221,12 @@ class BaseRepresenter:
node.flow_style = best_style
return node
- def ignore_aliases(self, data):
- # type: (Any) -> bool
+ def ignore_aliases(self, data: Any) -> bool:
return False
class SafeRepresenter(BaseRepresenter):
- def ignore_aliases(self, data):
- # type: (Any) -> bool
+ def ignore_aliases(self, data: Any) -> bool:
# https://docs.python.org/3/reference/expressions.html#parenthesized-forms :
# "i.e. two occurrences of the empty tuple may or may not yield the same object"
# so "data is ()" should not be used
@@ -241,16 +236,13 @@ class SafeRepresenter(BaseRepresenter):
return True
return False
- def represent_none(self, data):
- # type: (Any) -> Any
+ def represent_none(self, data: Any) -> ScalarNode:
return self.represent_scalar('tag:yaml.org,2002:null', 'null')
- def represent_str(self, data):
- # type: (Any) -> Any
+ def represent_str(self, data: Any) -> Any:
return self.represent_scalar('tag:yaml.org,2002:str', data)
- def represent_binary(self, data):
- # type: (Any) -> Any
+ def represent_binary(self, data: Any) -> ScalarNode:
if hasattr(base64, 'encodebytes'):
data = base64.encodebytes(data).decode('ascii')
else:
@@ -258,8 +250,7 @@ class SafeRepresenter(BaseRepresenter):
data = base64.encodestring(data).decode('ascii') # type: ignore
return self.represent_scalar('tag:yaml.org,2002:binary', data, style='|')
- def represent_bool(self, data, anchor=None):
- # type: (Any, Optional[Any]) -> Any
+ def represent_bool(self, data: Any, anchor: Optional[Any] = None) -> ScalarNode:
try:
value = self.dumper.boolean_representation[bool(data)]
except AttributeError:
@@ -269,16 +260,14 @@ class SafeRepresenter(BaseRepresenter):
value = 'false'
return self.represent_scalar('tag:yaml.org,2002:bool', value, anchor=anchor)
- def represent_int(self, data):
- # type: (Any) -> Any
+ def represent_int(self, data: Any) -> ScalarNode:
return self.represent_scalar('tag:yaml.org,2002:int', str(data))
inf_value = 1e300
while repr(inf_value) != repr(inf_value * inf_value):
inf_value *= inf_value
- def represent_float(self, data):
- # type: (Any) -> Any
+ def represent_float(self, data: Any) -> ScalarNode:
if data != data or (data == 0.0 and data == 1.0):
value = '.nan'
elif data == self.inf_value:
@@ -299,8 +288,7 @@ class SafeRepresenter(BaseRepresenter):
value = value.replace('e', '.0e', 1)
return self.represent_scalar('tag:yaml.org,2002:float', value)
- def represent_list(self, data):
- # type: (Any) -> Any
+ def represent_list(self, data: Any) -> SequenceNode:
# pairs = (len(data) > 0 and isinstance(data, list))
# if pairs:
# for item in data:
@@ -316,42 +304,37 @@ class SafeRepresenter(BaseRepresenter):
# [(item_key, item_value)]))
# return SequenceNode('tag:yaml.org,2002:pairs', value)
- def represent_dict(self, data):
- # type: (Any) -> Any
+ def represent_dict(self, data: Any) -> MappingNode:
return self.represent_mapping('tag:yaml.org,2002:map', data)
- def represent_ordereddict(self, data):
- # type: (Any) -> Any
+ def represent_ordereddict(self, data: Any) -> SequenceNode:
return self.represent_omap('tag:yaml.org,2002:omap', data)
- def represent_set(self, data):
- # type: (Any) -> Any
- value = {} # type: Dict[Any, None]
+ def represent_set(self, data: Any) -> MappingNode:
+ value: Dict[Any, None] = {}
for key in data:
value[key] = None
return self.represent_mapping('tag:yaml.org,2002:set', value)
- def represent_date(self, data):
- # type: (Any) -> Any
+ def represent_date(self, data: Any) -> ScalarNode:
value = data.isoformat()
return self.represent_scalar('tag:yaml.org,2002:timestamp', value)
- def represent_datetime(self, data):
- # type: (Any) -> Any
+ def represent_datetime(self, data: Any) -> ScalarNode:
value = data.isoformat(' ')
return self.represent_scalar('tag:yaml.org,2002:timestamp', value)
- def represent_yaml_object(self, tag, data, cls, flow_style=None):
- # type: (Any, Any, Any, Any) -> Any
+ def represent_yaml_object(
+ self, tag: Any, data: Any, cls: Any, flow_style: Any = None
+ ) -> MappingNode:
if hasattr(data, '__getstate__'):
state = data.__getstate__()
else:
state = data.__dict__.copy()
return self.represent_mapping(tag, state, flow_style=flow_style)
- def represent_undefined(self, data):
- # type: (Any) -> None
- raise RepresenterError(_F('cannot represent an object: {data!s}', data=data))
+ def represent_undefined(self, data: Any) -> None:
+ raise RepresenterError(f'cannot represent an object: {data!s}')
SafeRepresenter.add_representer(type(None), SafeRepresenter.represent_none)
@@ -391,39 +374,32 @@ SafeRepresenter.add_representer(None, SafeRepresenter.represent_undefined)
class Representer(SafeRepresenter):
- def represent_complex(self, data):
- # type: (Any) -> Any
+ def represent_complex(self, data: Any) -> Any:
if data.imag == 0.0:
data = repr(data.real)
elif data.real == 0.0:
- data = _F('{data_imag!r}j', data_imag=data.imag)
+ data = f'{data.imag!r}j'
elif data.imag > 0:
- data = _F('{data_real!r}+{data_imag!r}j', data_real=data.real, data_imag=data.imag)
+ data = f'{data.real!r}+{data.imag!r}j'
else:
- data = _F('{data_real!r}{data_imag!r}j', data_real=data.real, data_imag=data.imag)
+ data = f'{data.real!r}{data.imag!r}j'
return self.represent_scalar('tag:yaml.org,2002:python/complex', data)
- def represent_tuple(self, data):
- # type: (Any) -> Any
+ def represent_tuple(self, data: Any) -> SequenceNode:
return self.represent_sequence('tag:yaml.org,2002:python/tuple', data)
- def represent_name(self, data):
- # type: (Any) -> Any
+ def represent_name(self, data: Any) -> ScalarNode:
try:
- name = _F(
- '{modname!s}.{qualname!s}', modname=data.__module__, qualname=data.__qualname__
- )
+ name = f'{data.__module__!s}.{data.__qualname__!s}'
except AttributeError:
# ToDo: check if this can be reached in Py3
- name = _F('{modname!s}.{name!s}', modname=data.__module__, name=data.__name__)
+ name = f'{data.__module__!s}.{data.__name__!s}'
return self.represent_scalar('tag:yaml.org,2002:python/name:' + name, "")
- def represent_module(self, data):
- # type: (Any) -> Any
+ def represent_module(self, data: Any) -> ScalarNode:
return self.represent_scalar('tag:yaml.org,2002:python/module:' + data.__name__, "")
- def represent_object(self, data):
- # type: (Any) -> Any
+ def represent_object(self, data: Any) -> Union[SequenceNode, MappingNode]:
# We use __reduce__ API to save the data. data.__reduce__ returns
# a tuple of length 2-5:
# (function, args, state, listitems, dictitems)
@@ -441,14 +417,14 @@ class Representer(SafeRepresenter):
# !!python/object/apply node.
cls = type(data)
- if cls in copyreg.dispatch_table: # type: ignore
- reduce = copyreg.dispatch_table[cls](data) # type: ignore
+ if cls in copyreg.dispatch_table:
+ reduce: Any = copyreg.dispatch_table[cls](data)
elif hasattr(data, '__reduce_ex__'):
reduce = data.__reduce_ex__(2)
elif hasattr(data, '__reduce__'):
reduce = data.__reduce__()
else:
- raise RepresenterError(_F('cannot represent object: {data!r}', data=data))
+ raise RepresenterError(f'cannot represent object: {data!r}')
reduce = (list(reduce) + [None] * 5)[:5]
function, args, state, listitems, dictitems = reduce
args = list(args)
@@ -467,14 +443,10 @@ class Representer(SafeRepresenter):
tag = 'tag:yaml.org,2002:python/object/apply:'
newobj = False
try:
- function_name = _F(
- '{fun!s}.{qualname!s}', fun=function.__module__, qualname=function.__qualname__
- )
+ function_name = f'{function.__module__!s}.{function.__qualname__!s}'
except AttributeError:
# ToDo: check if this can be reached in Py3
- function_name = _F(
- '{fun!s}.{name!s}', fun=function.__module__, name=function.__name__
- )
+ function_name = f'{function.__module__!s}.{function.__name__!s}'
if not args and not listitems and not dictitems and isinstance(state, dict) and newobj:
return self.represent_mapping(
'tag:yaml.org,2002:python/object:' + function_name, state
@@ -514,8 +486,9 @@ class RoundTripRepresenter(SafeRepresenter):
# need to add type here and write out the .comment
# in serializer and emitter
- def __init__(self, default_style=None, default_flow_style=None, dumper=None):
- # type: (Any, Any, Any) -> None
+ def __init__(
+ self, default_style: Any = None, default_flow_style: Any = None, dumper: Any = None
+ ) -> None:
if not hasattr(dumper, 'typ') and default_flow_style is None:
default_flow_style = False
SafeRepresenter.__init__(
@@ -525,8 +498,7 @@ class RoundTripRepresenter(SafeRepresenter):
dumper=dumper,
)
- def ignore_aliases(self, data):
- # type: (Any) -> bool
+ def ignore_aliases(self, data: Any) -> bool:
try:
if data.anchor is not None and data.anchor.value is not None:
return False
@@ -534,15 +506,13 @@ class RoundTripRepresenter(SafeRepresenter):
pass
return SafeRepresenter.ignore_aliases(self, data)
- def represent_none(self, data):
- # type: (Any) -> Any
+ def represent_none(self, data: Any) -> ScalarNode:
if len(self.represented_objects) == 0 and not self.serializer.use_explicit_start:
# this will be open ended (although it is not yet)
return self.represent_scalar('tag:yaml.org,2002:null', 'null')
return self.represent_scalar('tag:yaml.org,2002:null', "")
- def represent_literal_scalarstring(self, data):
- # type: (Any) -> Any
+ def represent_literal_scalarstring(self, data: Any) -> ScalarNode:
tag = None
style = '|'
anchor = data.yaml_anchor(any=True)
@@ -551,8 +521,7 @@ class RoundTripRepresenter(SafeRepresenter):
represent_preserved_scalarstring = represent_literal_scalarstring
- def represent_folded_scalarstring(self, data):
- # type: (Any) -> Any
+ def represent_folded_scalarstring(self, data: Any) -> ScalarNode:
tag = None
style = '>'
anchor = data.yaml_anchor(any=True)
@@ -566,32 +535,30 @@ class RoundTripRepresenter(SafeRepresenter):
tag = 'tag:yaml.org,2002:str'
return self.represent_scalar(tag, data, style=style, anchor=anchor)
- def represent_single_quoted_scalarstring(self, data):
- # type: (Any) -> Any
+ def represent_single_quoted_scalarstring(self, data: Any) -> ScalarNode:
tag = None
style = "'"
anchor = data.yaml_anchor(any=True)
tag = 'tag:yaml.org,2002:str'
return self.represent_scalar(tag, data, style=style, anchor=anchor)
- def represent_double_quoted_scalarstring(self, data):
- # type: (Any) -> Any
+ def represent_double_quoted_scalarstring(self, data: Any) -> ScalarNode:
tag = None
style = '"'
anchor = data.yaml_anchor(any=True)
tag = 'tag:yaml.org,2002:str'
return self.represent_scalar(tag, data, style=style, anchor=anchor)
- def represent_plain_scalarstring(self, data):
- # type: (Any) -> Any
+ def represent_plain_scalarstring(self, data: Any) -> ScalarNode:
tag = None
style = ''
anchor = data.yaml_anchor(any=True)
tag = 'tag:yaml.org,2002:str'
return self.represent_scalar(tag, data, style=style, anchor=anchor)
- def insert_underscore(self, prefix, s, underscore, anchor=None):
- # type: (Any, Any, Any, Any) -> Any
+ def insert_underscore(
+ self, prefix: Any, s: Any, underscore: Any, anchor: Any = None
+ ) -> ScalarNode:
if underscore is None:
return self.represent_scalar('tag:yaml.org,2002:int', prefix + s, anchor=anchor)
if underscore[0]:
@@ -607,30 +574,27 @@ class RoundTripRepresenter(SafeRepresenter):
s += '_'
return self.represent_scalar('tag:yaml.org,2002:int', prefix + s, anchor=anchor)
- def represent_scalar_int(self, data):
- # type: (Any) -> Any
+ def represent_scalar_int(self, data: Any) -> ScalarNode:
if data._width is not None:
- s = '{:0{}d}'.format(data, data._width)
+ s = f'{data:0{data._width}d}'
else:
s = format(data, 'd')
anchor = data.yaml_anchor(any=True)
return self.insert_underscore("", s, data._underscore, anchor=anchor)
- def represent_binary_int(self, data):
- # type: (Any) -> Any
+ def represent_binary_int(self, data: Any) -> ScalarNode:
if data._width is not None:
# cannot use '{:#0{}b}', that strips the zeros
- s = '{:0{}b}'.format(data, data._width)
+ s = f'{data:0{data._width}b}'
else:
s = format(data, 'b')
anchor = data.yaml_anchor(any=True)
return self.insert_underscore('0b', s, data._underscore, anchor=anchor)
- def represent_octal_int(self, data):
- # type: (Any) -> Any
+ def represent_octal_int(self, data: Any) -> ScalarNode:
if data._width is not None:
# cannot use '{:#0{}o}', that strips the zeros
- s = '{:0{}o}'.format(data, data._width)
+ s = f'{data:0{data._width}o}'
else:
s = format(data, 'o')
anchor = data.yaml_anchor(any=True)
@@ -639,28 +603,25 @@ class RoundTripRepresenter(SafeRepresenter):
prefix = '0'
return self.insert_underscore(prefix, s, data._underscore, anchor=anchor)
- def represent_hex_int(self, data):
- # type: (Any) -> Any
+ def represent_hex_int(self, data: Any) -> ScalarNode:
if data._width is not None:
# cannot use '{:#0{}x}', that strips the zeros
- s = '{:0{}x}'.format(data, data._width)
+ s = f'{data:0{data._width}x}'
else:
s = format(data, 'x')
anchor = data.yaml_anchor(any=True)
return self.insert_underscore('0x', s, data._underscore, anchor=anchor)
- def represent_hex_caps_int(self, data):
- # type: (Any) -> Any
+ def represent_hex_caps_int(self, data: Any) -> ScalarNode:
if data._width is not None:
# cannot use '{:#0{}X}', that strips the zeros
- s = '{:0{}X}'.format(data, data._width)
+ s = f'{data:0{data._width}X}'
else:
s = format(data, 'X')
anchor = data.yaml_anchor(any=True)
return self.insert_underscore('0x', s, data._underscore, anchor=anchor)
- def represent_scalar_float(self, data):
- # type: (Any) -> Any
+ def represent_scalar_float(self, data: Any) -> ScalarNode:
""" this is way more complicated """
value = None
anchor = data.yaml_anchor(any=True)
@@ -674,27 +635,26 @@ class RoundTripRepresenter(SafeRepresenter):
return self.represent_scalar('tag:yaml.org,2002:float', value, anchor=anchor)
if data._exp is None and data._prec > 0 and data._prec == data._width - 1:
# no exponent, but trailing dot
- value = '{}{:d}.'.format(data._m_sign if data._m_sign else "", abs(int(data)))
+ value = f'{data._m_sign if data._m_sign else ""}{abs(int(data)):d}.'
elif data._exp is None:
# no exponent, "normal" dot
prec = data._prec
ms = data._m_sign if data._m_sign else ""
- # -1 for the dot
- value = '{}{:0{}.{}f}'.format(
- ms, abs(data), data._width - len(ms), data._width - prec - 1
- )
- if prec == 0 or (prec == 1 and ms != ""):
- value = value.replace('0.', '.')
+ if prec < 0:
+ value = f'{ms}{abs(int(data)):0{data._width - len(ms)}d}'
+ else:
+ # -1 for the dot
+ value = f'{ms}{abs(data):0{data._width - len(ms)}.{data._width - prec - 1}f}'
+ if prec == 0 or (prec == 1 and ms != ""):
+ value = value.replace('0.', '.')
while len(value) < data._width:
value += '0'
else:
# exponent
- m, es = '{:{}.{}e}'.format(
- # data, data._width, data._width - data._prec + (1 if data._m_sign else 0)
- data,
- data._width,
- data._width + (1 if data._m_sign else 0),
- ).split('e')
+ (
+ m,
+ es,
+ ) = f'{data:{data._width}.{data._width + (1 if data._m_sign else 0)}e}'.split('e')
w = data._width if data._prec > 0 else (data._width + 1)
if data < 0:
w += 1
@@ -714,10 +674,10 @@ class RoundTripRepresenter(SafeRepresenter):
while (len(m1) + len(m2) - (1 if data._m_sign else 0)) < data._width:
m2 += '0'
e -= 1
- value = m1 + m2 + data._exp + '{:{}0{}d}'.format(e, esgn, data._e_width)
+ value = m1 + m2 + data._exp + f'{e:{esgn}0{data._e_width}d}'
elif data._prec == 0: # mantissa with trailing dot
e -= len(m2)
- value = m1 + m2 + '.' + data._exp + '{:{}0{}d}'.format(e, esgn, data._e_width)
+ value = m1 + m2 + '.' + data._exp + f'{e:{esgn}0{data._e_width}d}'
else:
if data._m_lead0 > 0:
m2 = '0' * (data._m_lead0 - 1) + m1 + m2
@@ -728,15 +688,16 @@ class RoundTripRepresenter(SafeRepresenter):
m1 += m2[0]
m2 = m2[1:]
e -= 1
- value = m1 + '.' + m2 + data._exp + '{:{}0{}d}'.format(e, esgn, data._e_width)
+ value = m1 + '.' + m2 + data._exp + f'{e:{esgn}0{data._e_width}d}'
if value is None:
value = repr(data).lower()
return self.represent_scalar('tag:yaml.org,2002:float', value, anchor=anchor)
- def represent_sequence(self, tag, sequence, flow_style=None):
- # type: (Any, Any, Any) -> Any
- value = [] # type: List[Any]
+ def represent_sequence(
+ self, tag: Any, sequence: Any, flow_style: Any = None
+ ) -> SequenceNode:
+ value: List[Any] = []
# if the flow_style is None, the flow style tacked on to the object
# explicitly will be taken. If that is None as well the default flow
# style rules
@@ -789,8 +750,7 @@ class RoundTripRepresenter(SafeRepresenter):
node.flow_style = best_style
return node
- def merge_comments(self, node, comments):
- # type: (Any, Any) -> Any
+ def merge_comments(self, node: Any, comments: Any) -> Any:
if comments is None:
assert hasattr(node, 'comment')
return node
@@ -805,8 +765,7 @@ class RoundTripRepresenter(SafeRepresenter):
node.comment = comments
return node
- def represent_key(self, data):
- # type: (Any) -> Any
+ def represent_key(self, data: Any) -> Any:
if isinstance(data, CommentedKeySeq):
self.alias_key = None
return self.represent_sequence('tag:yaml.org,2002:seq', data, flow_style=True)
@@ -815,9 +774,8 @@ class RoundTripRepresenter(SafeRepresenter):
return self.represent_mapping('tag:yaml.org,2002:map', data, flow_style=True)
return SafeRepresenter.represent_key(self, data)
- def represent_mapping(self, tag, mapping, flow_style=None):
- # type: (Any, Any, Any) -> Any
- value = [] # type: List[Any]
+ def represent_mapping(self, tag: Any, mapping: Any, flow_style: Any = None) -> MappingNode:
+ value: List[Any] = []
try:
flow_style = mapping.fa.flow_style(flow_style)
except AttributeError:
@@ -903,9 +861,8 @@ class RoundTripRepresenter(SafeRepresenter):
value.insert(merge_pos, (ScalarNode('tag:yaml.org,2002:merge', '<<'), arg))
return node
- def represent_omap(self, tag, omap, flow_style=None):
- # type: (Any, Any, Any) -> Any
- value = [] # type: List[Any]
+ def represent_omap(self, tag: Any, omap: Any, flow_style: Any = None) -> SequenceNode:
+ value: List[Any] = []
try:
flow_style = omap.fa.flow_style(flow_style)
except AttributeError:
@@ -967,12 +924,11 @@ class RoundTripRepresenter(SafeRepresenter):
node.flow_style = best_style
return node
- def represent_set(self, setting):
- # type: (Any) -> Any
+ def represent_set(self, setting: Any) -> MappingNode:
flow_style = False
tag = 'tag:yaml.org,2002:set'
# return self.represent_mapping(tag, value)
- value = [] # type: List[Any]
+ value: List[Any] = []
flow_style = setting.fa.flow_style(flow_style)
try:
anchor = setting.yaml_anchor()
@@ -1020,8 +976,7 @@ class RoundTripRepresenter(SafeRepresenter):
best_style = best_style
return node
- def represent_dict(self, data):
- # type: (Any) -> Any
+ def represent_dict(self, data: Any) -> MappingNode:
"""write out tag if saved on loading"""
try:
t = data.tag.value
@@ -1036,8 +991,7 @@ class RoundTripRepresenter(SafeRepresenter):
tag = 'tag:yaml.org,2002:map'
return self.represent_mapping(tag, data)
- def represent_list(self, data):
- # type: (Any) -> Any
+ def represent_list(self, data: Any) -> SequenceNode:
try:
t = data.tag.value
except AttributeError:
@@ -1051,8 +1005,7 @@ class RoundTripRepresenter(SafeRepresenter):
tag = 'tag:yaml.org,2002:seq'
return self.represent_sequence(tag, data)
- def represent_datetime(self, data):
- # type: (Any) -> Any
+ def represent_datetime(self, data: Any) -> ScalarNode:
inter = 'T' if data._yaml['t'] else ' '
_yaml = data._yaml
if _yaml['delta']:
@@ -1064,8 +1017,7 @@ class RoundTripRepresenter(SafeRepresenter):
value += _yaml['tz']
return self.represent_scalar('tag:yaml.org,2002:timestamp', value)
- def represent_tagged_scalar(self, data):
- # type: (Any) -> Any
+ def represent_tagged_scalar(self, data: Any) -> ScalarNode:
try:
tag = data.tag.value
except AttributeError:
@@ -1076,16 +1028,16 @@ class RoundTripRepresenter(SafeRepresenter):
anchor = None
return self.represent_scalar(tag, data.value, style=data.style, anchor=anchor)
- def represent_scalar_bool(self, data):
- # type: (Any) -> Any
+ def represent_scalar_bool(self, data: Any) -> ScalarNode:
try:
anchor = data.yaml_anchor()
except AttributeError:
anchor = None
return SafeRepresenter.represent_bool(self, data, anchor=anchor)
- def represent_yaml_object(self, tag, data, cls, flow_style=None):
- # type: (Any, Any, Any, Optional[Any]) -> Any
+ def represent_yaml_object(
+ self, tag: Any, data: Any, cls: Any, flow_style: Optional[Any] = None
+ ) -> MappingNode:
if hasattr(data, '__getstate__'):
state = data.__getstate__()
else:
diff --git a/resolver.py b/resolver.py
index 969b9a9..e7ed6d9 100644
--- a/resolver.py
+++ b/resolver.py
@@ -2,11 +2,10 @@
import re
-if False: # MYPY
- from typing import Any, Dict, List, Union, Text, Optional # NOQA
- from ruamel.yaml.compat import VersionType # NOQA
+from typing import Any, Dict, List, Union, Text, Optional # NOQA
+from ruamel.yaml.compat import VersionType # NOQA
-from ruamel.yaml.compat import _DEFAULT_YAML_VERSION, _F # NOQA
+from ruamel.yaml.compat import _DEFAULT_YAML_VERSION # NOQA
from ruamel.yaml.error import * # NOQA
from ruamel.yaml.nodes import MappingNode, ScalarNode, SequenceNode # NOQA
from ruamel.yaml.util import RegExp # NOQA
@@ -107,21 +106,19 @@ class BaseResolver:
DEFAULT_SEQUENCE_TAG = 'tag:yaml.org,2002:seq'
DEFAULT_MAPPING_TAG = 'tag:yaml.org,2002:map'
- yaml_implicit_resolvers = {} # type: Dict[Any, Any]
- yaml_path_resolvers = {} # type: Dict[Any, Any]
+ yaml_implicit_resolvers: Dict[Any, Any] = {}
+ yaml_path_resolvers: Dict[Any, Any] = {}
- def __init__(self, loadumper=None):
- # type: (Any, Any) -> None
+ def __init__(self: Any, loadumper: Any = None) -> None:
self.loadumper = loadumper
if self.loadumper is not None and getattr(self.loadumper, '_resolver', None) is None:
self.loadumper._resolver = self.loadumper
- self._loader_version = None # type: Any
- self.resolver_exact_paths = [] # type: List[Any]
- self.resolver_prefix_paths = [] # type: List[Any]
+ self._loader_version: Any = None
+ self.resolver_exact_paths: List[Any] = []
+ self.resolver_prefix_paths: List[Any] = []
@property
- def parser(self):
- # type: () -> Any
+ def parser(self) -> Any:
if self.loadumper is not None:
if hasattr(self.loadumper, 'typ'):
return self.loadumper.parser
@@ -129,8 +126,7 @@ class BaseResolver:
return None
@classmethod
- def add_implicit_resolver_base(cls, tag, regexp, first):
- # type: (Any, Any, Any) -> None
+ def add_implicit_resolver_base(cls, tag: Any, regexp: Any, first: Any) -> None:
if 'yaml_implicit_resolvers' not in cls.__dict__:
# deepcopy doesn't work here
cls.yaml_implicit_resolvers = dict(
@@ -142,8 +138,7 @@ class BaseResolver:
cls.yaml_implicit_resolvers.setdefault(ch, []).append((tag, regexp))
@classmethod
- def add_implicit_resolver(cls, tag, regexp, first):
- # type: (Any, Any, Any) -> None
+ def add_implicit_resolver(cls, tag: Any, regexp: Any, first: Any) -> None:
if 'yaml_implicit_resolvers' not in cls.__dict__:
# deepcopy doesn't work here
cls.yaml_implicit_resolvers = dict(
@@ -159,8 +154,7 @@ class BaseResolver:
# def add_implicit_resolver(cls, tag, regexp, first):
@classmethod
- def add_path_resolver(cls, tag, path, kind=None):
- # type: (Any, Any, Any) -> None
+ def add_path_resolver(cls, tag: Any, path: Any, kind: Any = None) -> None:
# Note: `add_path_resolver` is experimental. The API could be changed.
# `new_path` is a pattern that is matched against the path from the
# root to the node that is being considered. `node_path` elements are
@@ -175,7 +169,7 @@ class BaseResolver:
# against a sequence value with the index equal to `index_check`.
if 'yaml_path_resolvers' not in cls.__dict__:
cls.yaml_path_resolvers = cls.yaml_path_resolvers.copy()
- new_path = [] # type: List[Any]
+ new_path: List[Any] = []
for element in path:
if isinstance(element, (list, tuple)):
if len(element) == 2:
@@ -184,9 +178,7 @@ class BaseResolver:
node_check = element[0]
index_check = True
else:
- raise ResolverError(
- _F('Invalid path element: {element!s}', element=element)
- )
+ raise ResolverError(f'Invalid path element: {element!s}')
else:
node_check = None
index_check = element
@@ -201,13 +193,9 @@ class BaseResolver:
and not isinstance(node_check, str)
and node_check is not None
):
- raise ResolverError(
- _F('Invalid node checker: {node_check!s}', node_check=node_check)
- )
+ raise ResolverError(f'Invalid node checker: {node_check!s}')
if not isinstance(index_check, (str, int)) and index_check is not None:
- raise ResolverError(
- _F('Invalid index checker: {index_check!s}', index_check=index_check)
- )
+ raise ResolverError(f'Invalid index checker: {index_check!s}')
new_path.append((node_check, index_check))
if kind is str:
kind = ScalarNode
@@ -216,11 +204,10 @@ class BaseResolver:
elif kind is dict:
kind = MappingNode
elif kind not in [ScalarNode, SequenceNode, MappingNode] and kind is not None:
- raise ResolverError(_F('Invalid node kind: {kind!s}', kind=kind))
+ raise ResolverError(f'Invalid node kind: {kind!s}')
cls.yaml_path_resolvers[tuple(new_path), kind] = tag
- def descend_resolver(self, current_node, current_index):
- # type: (Any, Any) -> None
+ def descend_resolver(self, current_node: Any, current_index: Any) -> None:
if not self.yaml_path_resolvers:
return
exact_paths = {}
@@ -242,15 +229,15 @@ class BaseResolver:
self.resolver_exact_paths.append(exact_paths)
self.resolver_prefix_paths.append(prefix_paths)
- def ascend_resolver(self):
- # type: () -> None
+ def ascend_resolver(self) -> None:
if not self.yaml_path_resolvers:
return
self.resolver_exact_paths.pop()
self.resolver_prefix_paths.pop()
- def check_resolver_prefix(self, depth, path, kind, current_node, current_index):
- # type: (int, Any, Any, Any, Any) -> bool
+ def check_resolver_prefix(
+ self, depth: int, path: Any, kind: Any, current_node: Any, current_index: Any
+ ) -> bool:
node_check, index_check = path[depth - 1]
if isinstance(node_check, str):
if current_node.tag != node_check:
@@ -272,8 +259,7 @@ class BaseResolver:
return False
return True
- def resolve(self, kind, value, implicit):
- # type: (Any, Any, Any) -> Any
+ def resolve(self, kind: Any, value: Any, implicit: Any) -> Any:
if kind is ScalarNode and implicit[0]:
if value == "":
resolvers = self.yaml_implicit_resolvers.get("", [])
@@ -298,8 +284,7 @@ class BaseResolver:
return self.DEFAULT_MAPPING_TAG
@property
- def processing_version(self):
- # type: () -> Any
+ def processing_version(self) -> Any:
return None
@@ -320,24 +305,25 @@ class VersionedResolver(BaseResolver):
and Yes/No/On/Off booleans.
"""
- def __init__(self, version=None, loader=None, loadumper=None):
- # type: (Optional[VersionType], Any, Any) -> None
+ def __init__(
+ self, version: Optional[VersionType] = None, loader: Any = None, loadumper: Any = None
+ ) -> None:
if loader is None and loadumper is not None:
loader = loadumper
BaseResolver.__init__(self, loader)
self._loader_version = self.get_loader_version(version)
- self._version_implicit_resolver = {} # type: Dict[Any, Any]
+ self._version_implicit_resolver: Dict[Any, Any] = {}
- def add_version_implicit_resolver(self, version, tag, regexp, first):
- # type: (VersionType, Any, Any, Any) -> None
+ def add_version_implicit_resolver(
+ self, version: VersionType, tag: Any, regexp: Any, first: Any
+ ) -> None:
if first is None:
first = [None]
impl_resolver = self._version_implicit_resolver.setdefault(version, {})
for ch in first:
impl_resolver.setdefault(ch, []).append((tag, regexp))
- def get_loader_version(self, version):
- # type: (Optional[VersionType]) -> Any
+ def get_loader_version(self, version: Optional[VersionType]) -> Any:
if version is None or isinstance(version, tuple):
return version
if isinstance(version, list):
@@ -346,8 +332,7 @@ class VersionedResolver(BaseResolver):
return tuple(map(int, version.split('.')))
@property
- def versioned_resolver(self):
- # type: () -> Any
+ def versioned_resolver(self) -> Any:
"""
select the resolver based on the version we are parsing
"""
@@ -360,8 +345,7 @@ class VersionedResolver(BaseResolver):
self.add_version_implicit_resolver(version, x[1], x[2], x[3])
return self._version_implicit_resolver[version]
- def resolve(self, kind, value, implicit):
- # type: (Any, Any, Any) -> Any
+ def resolve(self, kind: Any, value: Any, implicit: Any) -> Any:
if kind is ScalarNode and implicit[0]:
if value == "":
resolvers = self.versioned_resolver.get("", [])
@@ -386,8 +370,7 @@ class VersionedResolver(BaseResolver):
return self.DEFAULT_MAPPING_TAG
@property
- def processing_version(self):
- # type: () -> Any
+ def processing_version(self) -> Any:
try:
version = self.loadumper._scanner.yaml_version
except AttributeError:
diff --git a/scalarbool.py b/scalarbool.py
index 60242b4..083d3cb 100644
--- a/scalarbool.py
+++ b/scalarbool.py
@@ -11,15 +11,13 @@ You can use these in an if statement, but not when testing equivalence
from ruamel.yaml.anchor import Anchor
-if False: # MYPY
- from typing import Text, Any, Dict, List # NOQA
+from typing import Text, Any, Dict, List # NOQA
__all__ = ['ScalarBoolean']
class ScalarBoolean(int):
- def __new__(cls, *args, **kw):
- # type: (Any, Any, Any) -> Any
+ def __new__(cls: Any, *args: Any, **kw: Any) -> Any:
anchor = kw.pop('anchor', None)
b = int.__new__(cls, *args, **kw)
if anchor is not None:
@@ -27,21 +25,18 @@ class ScalarBoolean(int):
return b
@property
- def anchor(self):
- # type: () -> Any
+ def anchor(self) -> Any:
if not hasattr(self, Anchor.attrib):
setattr(self, Anchor.attrib, Anchor())
return getattr(self, Anchor.attrib)
- def yaml_anchor(self, any=False):
- # type: (bool) -> Any
+ def yaml_anchor(self, any: bool = False) -> Any:
if not hasattr(self, Anchor.attrib):
return None
if any or self.anchor.always_dump:
return self.anchor
return None
- def yaml_set_anchor(self, value, always_dump=False):
- # type: (Any, bool) -> None
+ def yaml_set_anchor(self, value: Any, always_dump: bool = False) -> None:
self.anchor.value = value
self.anchor.always_dump = always_dump
diff --git a/scalarfloat.py b/scalarfloat.py
index b9f8bdf..d3fe12e 100644
--- a/scalarfloat.py
+++ b/scalarfloat.py
@@ -3,15 +3,13 @@
import sys
from ruamel.yaml.anchor import Anchor
-if False: # MYPY
- from typing import Text, Any, Dict, List # NOQA
+from typing import Text, Any, Dict, List # NOQA
__all__ = ['ScalarFloat', 'ExponentialFloat', 'ExponentialCapsFloat']
class ScalarFloat(float):
- def __new__(cls, *args, **kw):
- # type: (Any, Any, Any) -> Any
+ def __new__(cls: Any, *args: Any, **kw: Any) -> Any:
width = kw.pop('width', None)
prec = kw.pop('prec', None)
m_sign = kw.pop('m_sign', None)
@@ -34,24 +32,21 @@ class ScalarFloat(float):
v.yaml_set_anchor(anchor, always_dump=True)
return v
- def __iadd__(self, a): # type: ignore
- # type: (Any) -> Any
+ def __iadd__(self, a: Any) -> Any: # type: ignore
return float(self) + a
x = type(self)(self + a)
x._width = self._width
x._underscore = self._underscore[:] if self._underscore is not None else None # NOQA
return x
- def __ifloordiv__(self, a): # type: ignore
- # type: (Any) -> Any
+ def __ifloordiv__(self, a: Any) -> Any: # type: ignore
return float(self) // a
x = type(self)(self // a)
x._width = self._width
x._underscore = self._underscore[:] if self._underscore is not None else None # NOQA
return x
- def __imul__(self, a): # type: ignore
- # type: (Any) -> Any
+ def __imul__(self, a: Any) -> Any: # type: ignore
return float(self) * a
x = type(self)(self * a)
x._width = self._width
@@ -59,16 +54,14 @@ class ScalarFloat(float):
x._prec = self._prec # check for others
return x
- def __ipow__(self, a): # type: ignore
- # type: (Any) -> Any
+ def __ipow__(self, a: Any) -> Any: # type: ignore
return float(self) ** a
x = type(self)(self ** a)
x._width = self._width
x._underscore = self._underscore[:] if self._underscore is not None else None # NOQA
return x
- def __isub__(self, a): # type: ignore
- # type: (Any) -> Any
+ def __isub__(self, a: Any) -> Any: # type: ignore
return float(self) - a
x = type(self)(self - a)
x._width = self._width
@@ -76,49 +69,35 @@ class ScalarFloat(float):
return x
@property
- def anchor(self):
- # type: () -> Any
+ def anchor(self) -> Any:
if not hasattr(self, Anchor.attrib):
setattr(self, Anchor.attrib, Anchor())
return getattr(self, Anchor.attrib)
- def yaml_anchor(self, any=False):
- # type: (bool) -> Any
+ def yaml_anchor(self, any: bool = False) -> Any:
if not hasattr(self, Anchor.attrib):
return None
if any or self.anchor.always_dump:
return self.anchor
return None
- def yaml_set_anchor(self, value, always_dump=False):
- # type: (Any, bool) -> None
+ def yaml_set_anchor(self, value: Any, always_dump: bool = False) -> None:
self.anchor.value = value
self.anchor.always_dump = always_dump
- def dump(self, out=sys.stdout):
- # type: (Any) -> Any
+ def dump(self, out: Any = sys.stdout) -> None:
out.write(
- 'ScalarFloat({}| w:{}, p:{}, s:{}, lz:{}, _:{}|{}, w:{}, s:{})\n'.format(
- self,
- self._width, # type: ignore
- self._prec, # type: ignore
- self._m_sign, # type: ignore
- self._m_lead0, # type: ignore
- self._underscore, # type: ignore
- self._exp, # type: ignore
- self._e_width, # type: ignore
- self._e_sign, # type: ignore
- )
+ f'ScalarFloat({self}| w:{self._width}, p:{self._prec}, ' # type: ignore
+ f's:{self._m_sign}, lz:{self._m_lead0}, _:{self._underscore}|{self._exp}'
+ f', w:{self._e_width}, s:{self._e_sign})\n'
)
class ExponentialFloat(ScalarFloat):
- def __new__(cls, value, width=None, underscore=None):
- # type: (Any, Any, Any) -> Any
+ def __new__(cls, value: Any, width: Any = None, underscore: Any = None) -> Any:
return ScalarFloat.__new__(cls, value, width=width, underscore=underscore)
class ExponentialCapsFloat(ScalarFloat):
- def __new__(cls, value, width=None, underscore=None):
- # type: (Any, Any, Any) -> Any
+ def __new__(cls, value: Any, width: Any = None, underscore: Any = None) -> Any:
return ScalarFloat.__new__(cls, value, width=width, underscore=underscore)
diff --git a/scalarint.py b/scalarint.py
index 1572b0f..3a2603d 100644
--- a/scalarint.py
+++ b/scalarint.py
@@ -2,15 +2,13 @@
from ruamel.yaml.anchor import Anchor
-if False: # MYPY
- from typing import Text, Any, Dict, List # NOQA
+from typing import Text, Any, Dict, List # NOQA
__all__ = ['ScalarInt', 'BinaryInt', 'OctalInt', 'HexInt', 'HexCapsInt', 'DecimalInt']
class ScalarInt(int):
- def __new__(cls, *args, **kw):
- # type: (Any, Any, Any) -> Any
+ def __new__(cls: Any, *args: Any, **kw: Any) -> Any:
width = kw.pop('width', None)
underscore = kw.pop('underscore', None)
anchor = kw.pop('anchor', None)
@@ -21,8 +19,7 @@ class ScalarInt(int):
v.yaml_set_anchor(anchor, always_dump=True)
return v
- def __iadd__(self, a): # type: ignore
- # type: (Any) -> Any
+ def __iadd__(self, a: Any) -> Any: # type: ignore
x = type(self)(self + a)
x._width = self._width # type: ignore
x._underscore = ( # type: ignore
@@ -30,8 +27,7 @@ class ScalarInt(int):
) # NOQA
return x
- def __ifloordiv__(self, a): # type: ignore
- # type: (Any) -> Any
+ def __ifloordiv__(self, a: Any) -> Any: # type: ignore
x = type(self)(self // a)
x._width = self._width # type: ignore
x._underscore = ( # type: ignore
@@ -39,8 +35,7 @@ class ScalarInt(int):
) # NOQA
return x
- def __imul__(self, a): # type: ignore
- # type: (Any) -> Any
+ def __imul__(self, a: Any) -> Any: # type: ignore
x = type(self)(self * a)
x._width = self._width # type: ignore
x._underscore = ( # type: ignore
@@ -48,8 +43,7 @@ class ScalarInt(int):
) # NOQA
return x
- def __ipow__(self, a): # type: ignore
- # type: (Any) -> Any
+ def __ipow__(self, a: Any) -> Any: # type: ignore
x = type(self)(self ** a)
x._width = self._width # type: ignore
x._underscore = ( # type: ignore
@@ -57,8 +51,7 @@ class ScalarInt(int):
) # NOQA
return x
- def __isub__(self, a): # type: ignore
- # type: (Any) -> Any
+ def __isub__(self, a: Any) -> Any: # type: ignore
x = type(self)(self - a)
x._width = self._width # type: ignore
x._underscore = ( # type: ignore
@@ -67,35 +60,34 @@ class ScalarInt(int):
return x
@property
- def anchor(self):
- # type: () -> Any
+ def anchor(self) -> Any:
if not hasattr(self, Anchor.attrib):
setattr(self, Anchor.attrib, Anchor())
return getattr(self, Anchor.attrib)
- def yaml_anchor(self, any=False):
- # type: (bool) -> Any
+ def yaml_anchor(self, any: bool = False) -> Any:
if not hasattr(self, Anchor.attrib):
return None
if any or self.anchor.always_dump:
return self.anchor
return None
- def yaml_set_anchor(self, value, always_dump=False):
- # type: (Any, bool) -> None
+ def yaml_set_anchor(self, value: Any, always_dump: bool = False) -> None:
self.anchor.value = value
self.anchor.always_dump = always_dump
class BinaryInt(ScalarInt):
- def __new__(cls, value, width=None, underscore=None, anchor=None):
- # type: (Any, Any, Any, Any) -> Any
+ def __new__(
+ cls, value: Any, width: Any = None, underscore: Any = None, anchor: Any = None
+ ) -> Any:
return ScalarInt.__new__(cls, value, width=width, underscore=underscore, anchor=anchor)
class OctalInt(ScalarInt):
- def __new__(cls, value, width=None, underscore=None, anchor=None):
- # type: (Any, Any, Any, Any) -> Any
+ def __new__(
+ cls, value: Any, width: Any = None, underscore: Any = None, anchor: Any = None
+ ) -> Any:
return ScalarInt.__new__(cls, value, width=width, underscore=underscore, anchor=anchor)
@@ -106,22 +98,25 @@ class OctalInt(ScalarInt):
class HexInt(ScalarInt):
"""uses lower case (a-f)"""
- def __new__(cls, value, width=None, underscore=None, anchor=None):
- # type: (Any, Any, Any, Any) -> Any
+ def __new__(
+ cls, value: Any, width: Any = None, underscore: Any = None, anchor: Any = None
+ ) -> Any:
return ScalarInt.__new__(cls, value, width=width, underscore=underscore, anchor=anchor)
class HexCapsInt(ScalarInt):
"""uses upper case (A-F)"""
- def __new__(cls, value, width=None, underscore=None, anchor=None):
- # type: (Any, Any, Any, Any) -> Any
+ def __new__(
+ cls, value: Any, width: Any = None, underscore: Any = None, anchor: Any = None
+ ) -> Any:
return ScalarInt.__new__(cls, value, width=width, underscore=underscore, anchor=anchor)
class DecimalInt(ScalarInt):
"""needed if anchor"""
- def __new__(cls, value, width=None, underscore=None, anchor=None):
- # type: (Any, Any, Any, Any) -> Any
+ def __new__(
+ cls, value: Any, width: Any = None, underscore: Any = None, anchor: Any = None
+ ) -> Any:
return ScalarInt.__new__(cls, value, width=width, underscore=underscore, anchor=anchor)
diff --git a/scalarstring.py b/scalarstring.py
index 7538ab7..30f4fde 100644
--- a/scalarstring.py
+++ b/scalarstring.py
@@ -2,8 +2,8 @@
from ruamel.yaml.anchor import Anchor
-if False: # MYPY
- from typing import Text, Any, Dict, List # NOQA
+from typing import Text, Any, Dict, List # NOQA
+from ruamel.yaml.compat import SupportsIndex
__all__ = [
'ScalarString',
@@ -21,35 +21,30 @@ __all__ = [
class ScalarString(str):
__slots__ = Anchor.attrib
- def __new__(cls, *args, **kw):
- # type: (Any, Any) -> Any
+ def __new__(cls, *args: Any, **kw: Any) -> Any:
anchor = kw.pop('anchor', None)
ret_val = str.__new__(cls, *args, **kw)
if anchor is not None:
ret_val.yaml_set_anchor(anchor, always_dump=True)
return ret_val
- def replace(self, old, new, maxreplace=-1):
- # type: (Any, Any, int) -> Any
+ def replace(self, old: Any, new: Any, maxreplace: SupportsIndex = -1) -> Any:
return type(self)((str.replace(self, old, new, maxreplace)))
@property
- def anchor(self):
- # type: () -> Any
+ def anchor(self) -> Any:
if not hasattr(self, Anchor.attrib):
setattr(self, Anchor.attrib, Anchor())
return getattr(self, Anchor.attrib)
- def yaml_anchor(self, any=False):
- # type: (bool) -> Any
+ def yaml_anchor(self, any: bool = False) -> Any:
if not hasattr(self, Anchor.attrib):
return None
if any or self.anchor.always_dump:
return self.anchor
return None
- def yaml_set_anchor(self, value, always_dump=False):
- # type: (Any, bool) -> None
+ def yaml_set_anchor(self, value: Any, always_dump: bool = False) -> None:
self.anchor.value = value
self.anchor.always_dump = always_dump
@@ -59,8 +54,7 @@ class LiteralScalarString(ScalarString):
style = '|'
- def __new__(cls, value, anchor=None):
- # type: (Text, Any) -> Any
+ def __new__(cls, value: Text, anchor: Any = None) -> Any:
return ScalarString.__new__(cls, value, anchor=anchor)
@@ -72,8 +66,7 @@ class FoldedScalarString(ScalarString):
style = '>'
- def __new__(cls, value, anchor=None):
- # type: (Text, Any) -> Any
+ def __new__(cls, value: Text, anchor: Any = None) -> Any:
return ScalarString.__new__(cls, value, anchor=anchor)
@@ -82,8 +75,7 @@ class SingleQuotedScalarString(ScalarString):
style = "'"
- def __new__(cls, value, anchor=None):
- # type: (Text, Any) -> Any
+ def __new__(cls, value: Text, anchor: Any = None) -> Any:
return ScalarString.__new__(cls, value, anchor=anchor)
@@ -92,8 +84,7 @@ class DoubleQuotedScalarString(ScalarString):
style = '"'
- def __new__(cls, value, anchor=None):
- # type: (Text, Any) -> Any
+ def __new__(cls, value: Text, anchor: Any = None) -> Any:
return ScalarString.__new__(cls, value, anchor=anchor)
@@ -102,18 +93,15 @@ class PlainScalarString(ScalarString):
style = ''
- def __new__(cls, value, anchor=None):
- # type: (Text, Any) -> Any
+ def __new__(cls, value: Text, anchor: Any = None) -> Any:
return ScalarString.__new__(cls, value, anchor=anchor)
-def preserve_literal(s):
- # type: (Text) -> Text
+def preserve_literal(s: Text) -> Text:
return LiteralScalarString(s.replace('\r\n', '\n').replace('\r', '\n'))
-def walk_tree(base, map=None):
- # type: (Any, Any) -> None
+def walk_tree(base: Any, map: Any = None) -> None:
"""
the routine here walks over a simple yaml tree (recursing in
dict values and list items) and converts strings that
@@ -133,7 +121,7 @@ def walk_tree(base, map=None):
if isinstance(base, MutableMapping):
for k in base:
- v = base[k] # type: Text
+ v: Text = base[k]
if isinstance(v, str):
for ch in map:
if ch in v:
diff --git a/scanner.py b/scanner.py
index 61cae63..09fd2ad 100644
--- a/scanner.py
+++ b/scanner.py
@@ -31,11 +31,10 @@
import inspect
from ruamel.yaml.error import MarkedYAMLError, CommentMark # NOQA
from ruamel.yaml.tokens import * # NOQA
-from ruamel.yaml.compat import _F, check_anchorname_char, nprint, nprintf # NOQA
+from ruamel.yaml.compat import check_anchorname_char, nprint, nprintf # NOQA
-if False: # MYPY
- from typing import Any, Dict, Optional, List, Union, Text # NOQA
- from ruamel.yaml.compat import VersionType # NOQA
+from typing import Any, Dict, Optional, List, Union, Text # NOQA
+from ruamel.yaml.compat import VersionType # NOQA
__all__ = ['Scanner', 'RoundTripScanner', 'ScannerError']
@@ -45,8 +44,7 @@ _THE_END_SPACE_TAB = ' \n\0\t\r\x85\u2028\u2029'
_SPACE_TAB = ' \t'
-def xprintf(*args, **kw):
- # type: (Any, Any) -> Any
+def xprintf(*args: Any, **kw: Any) -> Any:
return nprintf(*args, **kw)
pass
@@ -58,8 +56,9 @@ class ScannerError(MarkedYAMLError):
class SimpleKey:
# See below simple keys treatment.
- def __init__(self, token_number, required, index, line, column, mark):
- # type: (Any, Any, int, int, int, Any) -> None
+ def __init__(
+ self, token_number: Any, required: Any, index: int, line: int, column: int, mark: Any
+ ) -> None:
self.token_number = token_number
self.required = required
self.index = index
@@ -69,8 +68,7 @@ class SimpleKey:
class Scanner:
- def __init__(self, loader=None):
- # type: (Any) -> None
+ def __init__(self, loader: Any = None) -> None:
"""Initialize the scanner."""
# It is assumed that Scanner and Reader will have a common descendant.
# Reader do the dirty work of checking for BOM and converting the
@@ -86,24 +84,22 @@ class Scanner:
self.loader._scanner = self
self.reset_scanner()
self.first_time = False
- self.yaml_version = None # type: Any
+ self.yaml_version: Any = None
@property
- def flow_level(self):
- # type: () -> int
+ def flow_level(self) -> int:
return len(self.flow_context)
- def reset_scanner(self):
- # type: () -> None
+ def reset_scanner(self) -> None:
# Had we reached the end of the stream?
self.done = False
# flow_context is an expanding/shrinking list consisting of '{' and '['
# for each unclosed flow context. If empty list that means block context
- self.flow_context = [] # type: List[Text]
+ self.flow_context: List[Text] = []
# List of processed tokens that are not yet emitted.
- self.tokens = [] # type: List[Any]
+ self.tokens: List[Any] = []
# Add the STREAM-START token.
self.fetch_stream_start()
@@ -115,7 +111,7 @@ class Scanner:
self.indent = -1
# Past indentation levels.
- self.indents = [] # type: List[int]
+ self.indents: List[int] = []
# Variables related to simple keys treatment.
@@ -145,11 +141,10 @@ class Scanner:
# (token_number, required, index, line, column, mark)
# A simple key may start with ALIAS, ANCHOR, TAG, SCALAR(flow),
# '[', or '{' tokens.
- self.possible_simple_keys = {} # type: Dict[Any, Any]
+ self.possible_simple_keys: Dict[Any, Any] = {}
@property
- def reader(self):
- # type: () -> Any
+ def reader(self) -> Any:
try:
return self._scanner_reader # type: ignore
except AttributeError:
@@ -160,16 +155,14 @@ class Scanner:
return self._scanner_reader
@property
- def scanner_processing_version(self): # prefix until un-composited
- # type: () -> Any
+ def scanner_processing_version(self) -> Any: # prefix until un-composited
if hasattr(self.loader, 'typ'):
return self.loader.resolver.processing_version
return self.loader.processing_version
# Public methods.
- def check_token(self, *choices):
- # type: (Any) -> bool
+ def check_token(self, *choices: Any) -> bool:
# Check if the next token is one of the given types.
while self.need_more_tokens():
self.fetch_more_tokens()
@@ -181,16 +174,14 @@ class Scanner:
return True
return False
- def peek_token(self):
- # type: () -> Any
+ def peek_token(self) -> Any:
# Return the next token, but do not delete if from the queue.
while self.need_more_tokens():
self.fetch_more_tokens()
if len(self.tokens) > 0:
return self.tokens[0]
- def get_token(self):
- # type: () -> Any
+ def get_token(self) -> Any:
# Return the next token.
while self.need_more_tokens():
self.fetch_more_tokens()
@@ -200,8 +191,7 @@ class Scanner:
# Private methods.
- def need_more_tokens(self):
- # type: () -> bool
+ def need_more_tokens(self) -> bool:
if self.done:
return False
if len(self.tokens) == 0:
@@ -213,12 +203,10 @@ class Scanner:
return True
return False
- def fetch_comment(self, comment):
- # type: (Any) -> None
+ def fetch_comment(self, comment: Any) -> None:
raise NotImplementedError
- def fetch_more_tokens(self):
- # type: () -> Any
+ def fetch_more_tokens(self) -> Any:
# Eat whitespaces and comments until we reach the next token.
comment = self.scan_to_next_token()
if comment is not None: # never happens for base scanner
@@ -323,14 +311,13 @@ class Scanner:
raise ScannerError(
'while scanning for the next token',
None,
- _F('found character {ch!r} that cannot start any token', ch=ch),
+ f'found character {ch!r} that cannot start any token',
self.reader.get_mark(),
)
# Simple keys treatment.
- def next_possible_simple_key(self):
- # type: () -> Any
+ def next_possible_simple_key(self) -> Any:
# Return the number of the nearest possible simple key. Actually we
# don't need to loop through the whole dictionary. We may replace it
# with the following code:
@@ -345,8 +332,7 @@ class Scanner:
min_token_number = key.token_number
return min_token_number
- def stale_possible_simple_keys(self):
- # type: () -> None
+ def stale_possible_simple_keys(self) -> None:
# Remove entries that are no longer possible simple keys. According to
# the YAML specification, simple keys
# - should be limited to a single line,
@@ -365,8 +351,7 @@ class Scanner:
)
del self.possible_simple_keys[level]
- def save_possible_simple_key(self):
- # type: () -> None
+ def save_possible_simple_key(self) -> None:
# The next token may start a simple key. We check if it's possible
# and save its position. This function is called for
# ALIAS, ANCHOR, TAG, SCALAR(flow), '[', and '{'.
@@ -389,8 +374,7 @@ class Scanner:
)
self.possible_simple_keys[self.flow_level] = key
- def remove_possible_simple_key(self):
- # type: () -> None
+ def remove_possible_simple_key(self) -> None:
# Remove the saved possible key position at the current flow level.
if self.flow_level in self.possible_simple_keys:
key = self.possible_simple_keys[self.flow_level]
@@ -407,8 +391,7 @@ class Scanner:
# Indentation functions.
- def unwind_indent(self, column):
- # type: (Any) -> None
+ def unwind_indent(self, column: Any) -> None:
# In flow context, tokens should respect indentation.
# Actually the condition should be `self.indent >= column` according to
# the spec. But this condition will prohibit intuitively correct
@@ -432,8 +415,7 @@ class Scanner:
self.indent = self.indents.pop()
self.tokens.append(BlockEndToken(mark, mark))
- def add_indent(self, column):
- # type: (int) -> bool
+ def add_indent(self, column: int) -> bool:
# Check if we need to increase indentation.
if self.indent < column:
self.indents.append(self.indent)
@@ -443,8 +425,7 @@ class Scanner:
# Fetchers.
- def fetch_stream_start(self):
- # type: () -> None
+ def fetch_stream_start(self) -> None:
# We always add STREAM-START as the first token and STREAM-END as the
# last token.
# Read the token.
@@ -452,8 +433,7 @@ class Scanner:
# Add STREAM-START.
self.tokens.append(StreamStartToken(mark, mark, encoding=self.reader.encoding))
- def fetch_stream_end(self):
- # type: () -> None
+ def fetch_stream_end(self) -> None:
# Set the current intendation to -1.
self.unwind_indent(-1)
# Reset simple keys.
@@ -467,8 +447,7 @@ class Scanner:
# The steam is finished.
self.done = True
- def fetch_directive(self):
- # type: () -> None
+ def fetch_directive(self) -> None:
# Set the current intendation to -1.
self.unwind_indent(-1)
@@ -479,16 +458,13 @@ class Scanner:
# Scan and add DIRECTIVE.
self.tokens.append(self.scan_directive())
- def fetch_document_start(self):
- # type: () -> None
+ def fetch_document_start(self) -> None:
self.fetch_document_indicator(DocumentStartToken)
- def fetch_document_end(self):
- # type: () -> None
+ def fetch_document_end(self) -> None:
self.fetch_document_indicator(DocumentEndToken)
- def fetch_document_indicator(self, TokenClass):
- # type: (Any) -> None
+ def fetch_document_indicator(self, TokenClass: Any) -> None:
# Set the current intendation to -1.
self.unwind_indent(-1)
@@ -503,16 +479,13 @@ class Scanner:
end_mark = self.reader.get_mark()
self.tokens.append(TokenClass(start_mark, end_mark))
- def fetch_flow_sequence_start(self):
- # type: () -> None
+ def fetch_flow_sequence_start(self) -> None:
self.fetch_flow_collection_start(FlowSequenceStartToken, to_push='[')
- def fetch_flow_mapping_start(self):
- # type: () -> None
+ def fetch_flow_mapping_start(self) -> None:
self.fetch_flow_collection_start(FlowMappingStartToken, to_push='{')
- def fetch_flow_collection_start(self, TokenClass, to_push):
- # type: (Any, Text) -> None
+ def fetch_flow_collection_start(self, TokenClass: Any, to_push: Text) -> None:
# '[' and '{' may start a simple key.
self.save_possible_simple_key()
# Increase the flow level.
@@ -525,16 +498,13 @@ class Scanner:
end_mark = self.reader.get_mark()
self.tokens.append(TokenClass(start_mark, end_mark))
- def fetch_flow_sequence_end(self):
- # type: () -> None
+ def fetch_flow_sequence_end(self) -> None:
self.fetch_flow_collection_end(FlowSequenceEndToken)
- def fetch_flow_mapping_end(self):
- # type: () -> None
+ def fetch_flow_mapping_end(self) -> None:
self.fetch_flow_collection_end(FlowMappingEndToken)
- def fetch_flow_collection_end(self, TokenClass):
- # type: (Any) -> None
+ def fetch_flow_collection_end(self, TokenClass: Any) -> None:
# Reset possible simple key on the current level.
self.remove_possible_simple_key()
# Decrease the flow level.
@@ -552,8 +522,7 @@ class Scanner:
end_mark = self.reader.get_mark()
self.tokens.append(TokenClass(start_mark, end_mark))
- def fetch_flow_entry(self):
- # type: () -> None
+ def fetch_flow_entry(self) -> None:
# Simple keys are allowed after ','.
self.allow_simple_key = True
# Reset possible simple key on the current level.
@@ -564,8 +533,7 @@ class Scanner:
end_mark = self.reader.get_mark()
self.tokens.append(FlowEntryToken(start_mark, end_mark))
- def fetch_block_entry(self):
- # type: () -> None
+ def fetch_block_entry(self) -> None:
# Block context needs additional checks.
if not self.flow_level:
# Are we allowed to start a new entry?
@@ -592,8 +560,7 @@ class Scanner:
end_mark = self.reader.get_mark()
self.tokens.append(BlockEntryToken(start_mark, end_mark))
- def fetch_key(self):
- # type: () -> None
+ def fetch_key(self) -> None:
# Block context needs additional checks.
if not self.flow_level:
@@ -620,8 +587,7 @@ class Scanner:
end_mark = self.reader.get_mark()
self.tokens.append(KeyToken(start_mark, end_mark))
- def fetch_value(self):
- # type: () -> None
+ def fetch_value(self) -> None:
# Do we determine a simple key?
if self.flow_level in self.possible_simple_keys:
# Add KEY.
@@ -681,8 +647,7 @@ class Scanner:
end_mark = self.reader.get_mark()
self.tokens.append(ValueToken(start_mark, end_mark))
- def fetch_alias(self):
- # type: () -> None
+ def fetch_alias(self) -> None:
# ALIAS could be a simple key.
self.save_possible_simple_key()
# No simple keys after ALIAS.
@@ -690,8 +655,7 @@ class Scanner:
# Scan and add ALIAS.
self.tokens.append(self.scan_anchor(AliasToken))
- def fetch_anchor(self):
- # type: () -> None
+ def fetch_anchor(self) -> None:
# ANCHOR could start a simple key.
self.save_possible_simple_key()
# No simple keys after ANCHOR.
@@ -699,8 +663,7 @@ class Scanner:
# Scan and add ANCHOR.
self.tokens.append(self.scan_anchor(AnchorToken))
- def fetch_tag(self):
- # type: () -> None
+ def fetch_tag(self) -> None:
# TAG could start a simple key.
self.save_possible_simple_key()
# No simple keys after TAG.
@@ -708,16 +671,13 @@ class Scanner:
# Scan and add TAG.
self.tokens.append(self.scan_tag())
- def fetch_literal(self):
- # type: () -> None
+ def fetch_literal(self) -> None:
self.fetch_block_scalar(style='|')
- def fetch_folded(self):
- # type: () -> None
+ def fetch_folded(self) -> None:
self.fetch_block_scalar(style='>')
- def fetch_block_scalar(self, style):
- # type: (Any) -> None
+ def fetch_block_scalar(self, style: Any) -> None:
# A simple key may follow a block scalar.
self.allow_simple_key = True
# Reset possible simple key on the current level.
@@ -725,16 +685,13 @@ class Scanner:
# Scan and add SCALAR.
self.tokens.append(self.scan_block_scalar(style))
- def fetch_single(self):
- # type: () -> None
+ def fetch_single(self) -> None:
self.fetch_flow_scalar(style="'")
- def fetch_double(self):
- # type: () -> None
+ def fetch_double(self) -> None:
self.fetch_flow_scalar(style='"')
- def fetch_flow_scalar(self, style):
- # type: (Any) -> None
+ def fetch_flow_scalar(self, style: Any) -> None:
# A flow scalar could be a simple key.
self.save_possible_simple_key()
# No simple keys after flow scalars.
@@ -742,8 +699,7 @@ class Scanner:
# Scan and add SCALAR.
self.tokens.append(self.scan_flow_scalar(style))
- def fetch_plain(self):
- # type: () -> None
+ def fetch_plain(self) -> None:
# A plain scalar could be a simple key.
self.save_possible_simple_key()
# No simple keys after plain scalars. But note that `scan_plain` will
@@ -755,45 +711,39 @@ class Scanner:
# Checkers.
- def check_directive(self):
- # type: () -> Any
+ def check_directive(self) -> Any:
# DIRECTIVE: ^ '%' ...
# The '%' indicator is already checked.
if self.reader.column == 0:
return True
return None
- def check_document_start(self):
- # type: () -> Any
+ def check_document_start(self) -> Any:
# DOCUMENT-START: ^ '---' (' '|'\n')
if self.reader.column == 0:
if self.reader.prefix(3) == '---' and self.reader.peek(3) in _THE_END_SPACE_TAB:
return True
return None
- def check_document_end(self):
- # type: () -> Any
+ def check_document_end(self) -> Any:
# DOCUMENT-END: ^ '...' (' '|'\n')
if self.reader.column == 0:
if self.reader.prefix(3) == '...' and self.reader.peek(3) in _THE_END_SPACE_TAB:
return True
return None
- def check_block_entry(self):
- # type: () -> Any
+ def check_block_entry(self) -> Any:
# BLOCK-ENTRY: '-' (' '|'\n')
return self.reader.peek(1) in _THE_END_SPACE_TAB
- def check_key(self):
- # type: () -> Any
+ def check_key(self) -> Any:
# KEY(flow context): '?'
if bool(self.flow_level):
return True
# KEY(block context): '?' (' '|'\n')
return self.reader.peek(1) in _THE_END_SPACE_TAB
- def check_value(self):
- # type: () -> Any
+ def check_value(self) -> Any:
# VALUE(flow context): ':'
if self.scanner_processing_version == (1, 1):
if bool(self.flow_level):
@@ -811,8 +761,7 @@ class Scanner:
# VALUE(block context): ':' (' '|'\n')
return self.reader.peek(1) in _THE_END_SPACE_TAB
- def check_plain(self):
- # type: () -> Any
+ def check_plain(self) -> Any:
# A plain scalar may start with any non-space character except:
# '-', '?', ':', ',', '[', ']', '{', '}',
# '#', '&', '*', '!', '|', '>', '\'', '\"',
@@ -848,8 +797,7 @@ class Scanner:
# Scanners.
- def scan_to_next_token(self):
- # type: () -> Any
+ def scan_to_next_token(self) -> Any:
# We ignore spaces, line breaks and comments.
# If we find a line break in the block context, we set the flag
# `allow_simple_key` on.
@@ -887,8 +835,7 @@ class Scanner:
found = True
return None
- def scan_directive(self):
- # type: () -> Any
+ def scan_directive(self) -> Any:
# See the specification for details.
srp = self.reader.peek
srf = self.reader.forward
@@ -909,8 +856,7 @@ class Scanner:
self.scan_directive_ignored_line(start_mark)
return DirectiveToken(name, value, start_mark, end_mark)
- def scan_directive_name(self, start_mark):
- # type: (Any) -> Any
+ def scan_directive_name(self, start_mark: Any) -> Any:
# See the specification for details.
length = 0
srp = self.reader.peek
@@ -922,7 +868,7 @@ class Scanner:
raise ScannerError(
'while scanning a directive',
start_mark,
- _F('expected alphabetic or numeric character, but found {ch!r}', ch=ch),
+ f'expected alphabetic or numeric character, but found {ch!r}',
self.reader.get_mark(),
)
value = self.reader.prefix(length)
@@ -932,13 +878,12 @@ class Scanner:
raise ScannerError(
'while scanning a directive',
start_mark,
- _F('expected alphabetic or numeric character, but found {ch!r}', ch=ch),
+ f'expected alphabetic or numeric character, but found {ch!r}',
self.reader.get_mark(),
)
return value
- def scan_yaml_directive_value(self, start_mark):
- # type: (Any) -> Any
+ def scan_yaml_directive_value(self, start_mark: Any) -> Any:
# See the specification for details.
srp = self.reader.peek
srf = self.reader.forward
@@ -949,7 +894,7 @@ class Scanner:
raise ScannerError(
'while scanning a directive',
start_mark,
- _F("expected a digit or '.', but found {srp_call!r}", srp_call=srp()),
+ f"expected a digit or '.', but found {srp()!r}",
self.reader.get_mark(),
)
srf()
@@ -958,14 +903,13 @@ class Scanner:
raise ScannerError(
'while scanning a directive',
start_mark,
- _F("expected a digit or '.', but found {srp_call!r}", srp_call=srp()),
+ f"expected a digit or '.', but found {srp()!r}",
self.reader.get_mark(),
)
self.yaml_version = (major, minor)
return self.yaml_version
- def scan_yaml_directive_number(self, start_mark):
- # type: (Any) -> Any
+ def scan_yaml_directive_number(self, start_mark: Any) -> Any:
# See the specification for details.
srp = self.reader.peek
srf = self.reader.forward
@@ -974,7 +918,7 @@ class Scanner:
raise ScannerError(
'while scanning a directive',
start_mark,
- _F('expected a digit, but found {ch!r}', ch=ch),
+ f'expected a digit, but found {ch!r}',
self.reader.get_mark(),
)
length = 0
@@ -984,8 +928,7 @@ class Scanner:
srf(length)
return value
- def scan_tag_directive_value(self, start_mark):
- # type: (Any) -> Any
+ def scan_tag_directive_value(self, start_mark: Any) -> Any:
# See the specification for details.
srp = self.reader.peek
srf = self.reader.forward
@@ -997,8 +940,7 @@ class Scanner:
prefix = self.scan_tag_directive_prefix(start_mark)
return (handle, prefix)
- def scan_tag_directive_handle(self, start_mark):
- # type: (Any) -> Any
+ def scan_tag_directive_handle(self, start_mark: Any) -> Any:
# See the specification for details.
value = self.scan_tag_handle('directive', start_mark)
ch = self.reader.peek()
@@ -1006,13 +948,12 @@ class Scanner:
raise ScannerError(
'while scanning a directive',
start_mark,
- _F("expected ' ', but found {ch!r}", ch=ch),
+ f"expected ' ', but found {ch!r}",
self.reader.get_mark(),
)
return value
- def scan_tag_directive_prefix(self, start_mark):
- # type: (Any) -> Any
+ def scan_tag_directive_prefix(self, start_mark: Any) -> Any:
# See the specification for details.
value = self.scan_tag_uri('directive', start_mark)
ch = self.reader.peek()
@@ -1020,13 +961,12 @@ class Scanner:
raise ScannerError(
'while scanning a directive',
start_mark,
- _F("expected ' ', but found {ch!r}", ch=ch),
+ f"expected ' ', but found {ch!r}",
self.reader.get_mark(),
)
return value
- def scan_directive_ignored_line(self, start_mark):
- # type: (Any) -> None
+ def scan_directive_ignored_line(self, start_mark: Any) -> None:
# See the specification for details.
srp = self.reader.peek
srf = self.reader.forward
@@ -1040,13 +980,12 @@ class Scanner:
raise ScannerError(
'while scanning a directive',
start_mark,
- _F('expected a comment or a line break, but found {ch!r}', ch=ch),
+ f'expected a comment or a line break, but found {ch!r}',
self.reader.get_mark(),
)
self.scan_line_break()
- def scan_anchor(self, TokenClass):
- # type: (Any) -> Any
+ def scan_anchor(self, TokenClass: Any) -> Any:
# The specification does not restrict characters for anchors and
# aliases. This may lead to problems, for instance, the document:
# [ *alias, value ]
@@ -1072,9 +1011,9 @@ class Scanner:
ch = srp(length)
if not length:
raise ScannerError(
- _F('while scanning an {name!s}', name=name),
+ f'while scanning an {name!s}',
start_mark,
- _F('expected alphabetic or numeric character, but found {ch!r}', ch=ch),
+ f'expected alphabetic or numeric character, but found {ch!r}',
self.reader.get_mark(),
)
value = self.reader.prefix(length)
@@ -1084,16 +1023,15 @@ class Scanner:
# assert ch1 == ch
if ch not in '\0 \t\r\n\x85\u2028\u2029?:,[]{}%@`':
raise ScannerError(
- _F('while scanning an {name!s}', name=name),
+ f'while scanning an {name!s}',
start_mark,
- _F('expected alphabetic or numeric character, but found {ch!r}', ch=ch),
+ f'expected alphabetic or numeric character, but found {ch!r}',
self.reader.get_mark(),
)
end_mark = self.reader.get_mark()
return TokenClass(value, start_mark, end_mark)
- def scan_tag(self):
- # type: () -> Any
+ def scan_tag(self) -> Any:
# See the specification for details.
srp = self.reader.peek
start_mark = self.reader.get_mark()
@@ -1106,7 +1044,7 @@ class Scanner:
raise ScannerError(
'while parsing a tag',
start_mark,
- _F("expected '>', but found {srp_call!r}", srp_call=srp()),
+ f"expected '>' but found {srp()!r}",
self.reader.get_mark(),
)
self.reader.forward()
@@ -1135,15 +1073,14 @@ class Scanner:
raise ScannerError(
'while scanning a tag',
start_mark,
- _F("expected ' ', but found {ch!r}", ch=ch),
+ f"expected ' ', but found {ch!r}",
self.reader.get_mark(),
)
value = (handle, suffix)
end_mark = self.reader.get_mark()
return TagToken(value, start_mark, end_mark)
- def scan_block_scalar(self, style, rt=False):
- # type: (Any, Optional[bool]) -> Any
+ def scan_block_scalar(self, style: Any, rt: Optional[bool] = False) -> Any:
# See the specification for details.
srp = self.reader.peek
if style == '>':
@@ -1151,7 +1088,7 @@ class Scanner:
else:
folded = False
- chunks = [] # type: List[Any]
+ chunks: List[Any] = []
start_mark = self.reader.get_mark()
# Scan the header.
@@ -1227,7 +1164,7 @@ class Scanner:
# Process trailing line breaks. The 'chomping' setting determines
# whether they are included in the value.
- trailing = [] # type: List[Any]
+ trailing: List[Any] = []
if chomping in [None, True]:
chunks.append(line_break)
if chomping is True:
@@ -1266,8 +1203,7 @@ class Scanner:
token.add_post_comment(comment)
return token
- def scan_block_scalar_indicators(self, start_mark):
- # type: (Any) -> Any
+ def scan_block_scalar_indicators(self, start_mark: Any) -> Any:
# See the specification for details.
srp = self.reader.peek
chomping = None
@@ -1312,13 +1248,12 @@ class Scanner:
raise ScannerError(
'while scanning a block scalar',
start_mark,
- _F('expected chomping or indentation indicators, but found {ch!r}', ch=ch),
+ f'expected chomping or indentation indicators, but found {ch!r}',
self.reader.get_mark(),
)
return chomping, increment
- def scan_block_scalar_ignored_line(self, start_mark):
- # type: (Any) -> Any
+ def scan_block_scalar_ignored_line(self, start_mark: Any) -> Any:
# See the specification for details.
srp = self.reader.peek
srf = self.reader.forward
@@ -1337,14 +1272,13 @@ class Scanner:
raise ScannerError(
'while scanning a block scalar',
start_mark,
- _F('expected a comment or a line break, but found {ch!r}', ch=ch),
+ f'expected a comment or a line break, but found {ch!r}',
self.reader.get_mark(),
)
self.scan_line_break()
return comment
- def scan_block_scalar_indentation(self):
- # type: () -> Any
+ def scan_block_scalar_indentation(self) -> Any:
# See the specification for details.
srp = self.reader.peek
srf = self.reader.forward
@@ -1361,8 +1295,7 @@ class Scanner:
max_indent = self.reader.column
return chunks, max_indent, end_mark
- def scan_block_scalar_breaks(self, indent):
- # type: (int) -> Any
+ def scan_block_scalar_breaks(self, indent: int) -> Any:
# See the specification for details.
chunks = []
srp = self.reader.peek
@@ -1377,8 +1310,7 @@ class Scanner:
srf()
return chunks, end_mark
- def scan_flow_scalar(self, style):
- # type: (Any) -> Any
+ def scan_flow_scalar(self, style: Any) -> Any:
# See the specification for details.
# Note that we loose indentation rules for quoted scalars. Quoted
# scalars don't need to adhere indentation because " and ' clearly
@@ -1390,7 +1322,7 @@ class Scanner:
else:
double = False
srp = self.reader.peek
- chunks = [] # type: List[Any]
+ chunks: List[Any] = []
start_mark = self.reader.get_mark()
quote = srp()
self.reader.forward()
@@ -1425,10 +1357,9 @@ class Scanner:
ESCAPE_CODES = {'x': 2, 'u': 4, 'U': 8}
- def scan_flow_scalar_non_spaces(self, double, start_mark):
- # type: (Any, Any) -> Any
+ def scan_flow_scalar_non_spaces(self, double: Any, start_mark: Any) -> Any:
# See the specification for details.
- chunks = [] # type: List[Any]
+ chunks: List[Any] = []
srp = self.reader.peek
srf = self.reader.forward
while True:
@@ -1459,12 +1390,8 @@ class Scanner:
raise ScannerError(
'while scanning a double-quoted scalar',
start_mark,
- _F(
- 'expected escape sequence of {length:d} hexdecimal '
- 'numbers, but found {srp_call!r}',
- length=length,
- srp_call=srp(k),
- ),
+ f'expected escape sequence of {length:d} '
+ f'hexdecimal numbers, but found {srp(k)!r}',
self.reader.get_mark(),
)
code = int(self.reader.prefix(length), 16)
@@ -1477,14 +1404,13 @@ class Scanner:
raise ScannerError(
'while scanning a double-quoted scalar',
start_mark,
- _F('found unknown escape character {ch!r}', ch=ch),
+ f'found unknown escape character {ch!r}',
self.reader.get_mark(),
)
else:
return chunks
- def scan_flow_scalar_spaces(self, double, start_mark):
- # type: (Any, Any) -> Any
+ def scan_flow_scalar_spaces(self, double: Any, start_mark: Any) -> Any:
# See the specification for details.
srp = self.reader.peek
chunks = []
@@ -1513,10 +1439,9 @@ class Scanner:
chunks.append(whitespaces)
return chunks
- def scan_flow_scalar_breaks(self, double, start_mark):
- # type: (Any, Any) -> Any
+ def scan_flow_scalar_breaks(self, double: Any, start_mark: Any) -> Any:
# See the specification for details.
- chunks = [] # type: List[Any]
+ chunks: List[Any] = []
srp = self.reader.peek
srf = self.reader.forward
while True:
@@ -1537,8 +1462,7 @@ class Scanner:
else:
return chunks
- def scan_plain(self):
- # type: () -> Any
+ def scan_plain(self) -> Any:
# See the specification for details.
# We add an additional restriction for the flow context:
# plain scalars in the flow context cannot contain ',', ': ' and '?'.
@@ -1546,7 +1470,7 @@ class Scanner:
# Indentation rules are loosed for the flow context.
srp = self.reader.peek
srf = self.reader.forward
- chunks = [] # type: List[Any]
+ chunks: List[Any] = []
start_mark = self.reader.get_mark()
end_mark = start_mark
indent = self.indent + 1
@@ -1554,7 +1478,7 @@ class Scanner:
# document separators at the beginning of the line.
# if indent == 0:
# indent = 1
- spaces = [] # type: List[Any]
+ spaces: List[Any] = []
while True:
length = 0
if srp() == '#':
@@ -1626,8 +1550,7 @@ class Scanner:
return token
- def scan_plain_spaces(self, indent, start_mark):
- # type: (Any, Any) -> Any
+ def scan_plain_spaces(self, indent: Any, start_mark: Any) -> Any:
# See the specification for details.
# The specification is really confusing about tabs in plain scalars.
# We just forbid them completely. Do not use tabs in YAML!
@@ -1664,8 +1587,7 @@ class Scanner:
chunks.append(whitespaces)
return chunks
- def scan_tag_handle(self, name, start_mark):
- # type: (Any, Any) -> Any
+ def scan_tag_handle(self, name: Any, start_mark: Any) -> Any:
# See the specification for details.
# For some strange reasons, the specification does not allow '_' in
# tag handles. I have allowed it anyway.
@@ -1673,9 +1595,9 @@ class Scanner:
ch = srp()
if ch != '!':
raise ScannerError(
- _F('while scanning an {name!s}', name=name),
+ f'while scanning an {name!s}',
start_mark,
- _F("expected '!', but found {ch!r}", ch=ch),
+ f"expected '!', but found {ch!r}",
self.reader.get_mark(),
)
length = 1
@@ -1687,9 +1609,9 @@ class Scanner:
if ch != '!':
self.reader.forward(length)
raise ScannerError(
- _F('while scanning an {name!s}', name=name),
+ f'while scanning an {name!s}',
start_mark,
- _F("expected '!', but found {ch!r}", ch=ch),
+ f"expected '!' but found {ch!r}",
self.reader.get_mark(),
)
length += 1
@@ -1697,8 +1619,7 @@ class Scanner:
self.reader.forward(length)
return value
- def scan_tag_uri(self, name, start_mark):
- # type: (Any, Any) -> Any
+ def scan_tag_uri(self, name: Any, start_mark: Any) -> Any:
# See the specification for details.
# Note: we do not check if URI is well-formed.
srp = self.reader.peek
@@ -1726,32 +1647,28 @@ class Scanner:
length = 0
if not chunks:
raise ScannerError(
- _F('while parsing an {name!s}', name=name),
+ f'while parsing an {name!s}',
start_mark,
- _F('expected URI, but found {ch!r}', ch=ch),
+ f'expected URI, but found {ch!r}',
self.reader.get_mark(),
)
return "".join(chunks)
- def scan_uri_escapes(self, name, start_mark):
- # type: (Any, Any) -> Any
+ def scan_uri_escapes(self, name: Any, start_mark: Any) -> Any:
# See the specification for details.
srp = self.reader.peek
srf = self.reader.forward
- code_bytes = [] # type: List[Any]
+ code_bytes: List[Any] = []
mark = self.reader.get_mark()
while srp() == '%':
srf()
for k in range(2):
if srp(k) not in '0123456789ABCDEFabcdef':
raise ScannerError(
- _F('while scanning an {name!s}', name=name),
+ f'while scanning an {name!s}',
start_mark,
- _F(
- 'expected URI escape sequence of 2 hexdecimal numbers,'
- ' but found {srp_call!r}',
- srp_call=srp(k),
- ),
+ f'expected URI escape sequence of 2 hexdecimal numbers, '
+ f'but found {srp(k)!r}',
self.reader.get_mark(),
)
code_bytes.append(int(self.reader.prefix(2), 16))
@@ -1759,13 +1676,10 @@ class Scanner:
try:
value = bytes(code_bytes).decode('utf-8')
except UnicodeDecodeError as exc:
- raise ScannerError(
- _F('while scanning an {name!s}', name=name), start_mark, str(exc), mark
- )
+ raise ScannerError(f'while scanning an {name!s}', start_mark, str(exc), mark)
return value
- def scan_line_break(self):
- # type: () -> Any
+ def scan_line_break(self) -> Any:
# Transforms:
# '\r\n' : '\n'
# '\r' : '\n'
@@ -1788,8 +1702,7 @@ class Scanner:
class RoundTripScanner(Scanner):
- def check_token(self, *choices):
- # type: (Any) -> bool
+ def check_token(self, *choices: Any) -> bool:
# Check if the next token is one of the given types.
while self.need_more_tokens():
self.fetch_more_tokens()
@@ -1802,8 +1715,7 @@ class RoundTripScanner(Scanner):
return True
return False
- def peek_token(self):
- # type: () -> Any
+ def peek_token(self) -> Any:
# Return the next token, but do not delete if from the queue.
while self.need_more_tokens():
self.fetch_more_tokens()
@@ -1812,10 +1724,9 @@ class RoundTripScanner(Scanner):
return self.tokens[0]
return None
- def _gather_comments(self):
- # type: () -> Any
+ def _gather_comments(self) -> Any:
"""combine multiple comment lines and assign to next non-comment-token"""
- comments = [] # type: List[Any]
+ comments: List[Any] = []
if not self.tokens:
return comments
if isinstance(self.tokens[0], CommentToken):
@@ -1837,8 +1748,7 @@ class RoundTripScanner(Scanner):
if not self.done and len(self.tokens) < 2:
self.fetch_more_tokens()
- def get_token(self):
- # type: () -> Any
+ def get_token(self) -> Any:
# Return the next token.
while self.need_more_tokens():
self.fetch_more_tokens()
@@ -1891,8 +1801,7 @@ class RoundTripScanner(Scanner):
return self.tokens.pop(0)
return None
- def fetch_comment(self, comment):
- # type: (Any) -> None
+ def fetch_comment(self, comment: Any) -> None:
value, start_mark, end_mark = comment
while value and value[-1] == ' ':
# empty line within indented key context
@@ -1902,8 +1811,7 @@ class RoundTripScanner(Scanner):
# scanner
- def scan_to_next_token(self):
- # type: () -> Any
+ def scan_to_next_token(self) -> Any:
# We ignore spaces, line breaks and comments.
# If we find a line break in the block context, we set the flag
# `allow_simple_key` on.
@@ -1946,7 +1854,7 @@ class RoundTripScanner(Scanner):
break
comment += ch
srf()
- # gather any blank lines following the comment too
+ # gather any blank lines following the comment
ch = self.scan_line_break()
while len(ch) > 0:
comment += ch
@@ -1975,8 +1883,7 @@ class RoundTripScanner(Scanner):
found = True
return None
- def scan_line_break(self, empty_line=False):
- # type: (bool) -> Text
+ def scan_line_break(self, empty_line: bool = False) -> Text:
# Transforms:
# '\r\n' : '\n'
# '\r' : '\n'
@@ -1985,7 +1892,7 @@ class RoundTripScanner(Scanner):
# '\u2028' : '\u2028'
# '\u2029 : '\u2029'
# default : ''
- ch = self.reader.peek() # type: Text
+ ch: Text = self.reader.peek()
if ch in '\r\n\x85':
if self.reader.prefix(2) == '\r\n':
self.reader.forward(2)
@@ -2000,8 +1907,7 @@ class RoundTripScanner(Scanner):
return ch
return ""
- def scan_block_scalar(self, style, rt=True):
- # type: (Any, Optional[bool]) -> Any
+ def scan_block_scalar(self, style: Any, rt: Optional[bool] = True) -> Any:
return Scanner.scan_block_scalar(self, style, rt=rt)
@@ -2016,8 +1922,7 @@ KEYCMNT = 0 # 1
class CommentBase:
__slots__ = ('value', 'line', 'column', 'used', 'function', 'fline', 'ufun', 'uline')
- def __init__(self, value, line, column):
- # type: (Any, Any, Any) -> None
+ def __init__(self, value: Any, line: Any, column: Any) -> None:
self.value = value
self.line = line
self.column = column
@@ -2028,73 +1933,57 @@ class CommentBase:
self.ufun = None
self.uline = None
- def set_used(self, v='+'):
- # type: (Any) -> None
+ def set_used(self, v: Any = '+') -> None:
self.used = v
info = inspect.getframeinfo(inspect.stack()[1][0])
self.ufun = info.function # type: ignore
self.uline = info.lineno # type: ignore
- def set_assigned(self):
- # type: () -> None
+ def set_assigned(self) -> None:
self.used = '|'
- def __str__(self):
- # type: () -> str
- return _F('{value}', value=self.value) # type: ignore
-
- def __repr__(self):
- # type: () -> str
- return _F('{value!r}', value=self.value) # type: ignore
-
- def info(self):
- # type: () -> str
- return _F( # type: ignore
- '{name}{used} {line:2}:{column:<2} "{value:40s} {function}:{fline} {ufun}:{uline}',
- name=self.name, # type: ignore
- line=self.line,
- column=self.column,
- value=self.value + '"',
- used=self.used,
- function=self.function,
- fline=self.fline,
- ufun=self.ufun,
- uline=self.uline,
+ def __str__(self) -> str:
+ return f'{self.value}'
+
+ def __repr__(self) -> str:
+ return f'{self.value!r}'
+
+ def info(self) -> str:
+ xv = self.value + '"'
+ name = self.name # type: ignore
+ return (
+ f'{name}{self.used} {self.line:2}:{self.column:<2} "{xv:40s} '
+ f'{self.function}:{self.fline} {self.ufun}:{self.uline}'
)
class EOLComment(CommentBase):
name = 'EOLC'
- def __init__(self, value, line, column):
- # type: (Any, Any, Any) -> None
+ def __init__(self, value: Any, line: Any, column: Any) -> None:
super().__init__(value, line, column)
class FullLineComment(CommentBase):
name = 'FULL'
- def __init__(self, value, line, column):
- # type: (Any, Any, Any) -> None
+ def __init__(self, value: Any, line: Any, column: Any) -> None:
super().__init__(value, line, column)
class BlankLineComment(CommentBase):
name = 'BLNK'
- def __init__(self, value, line, column):
- # type: (Any, Any, Any) -> None
+ def __init__(self, value: Any, line: Any, column: Any) -> None:
super().__init__(value, line, column)
class ScannedComments:
- def __init__(self):
- # type: (Any) -> None
+ def __init__(self: Any) -> None:
self.comments = {} # type: ignore
self.unused = [] # type: ignore
- def add_eol_comment(self, comment, column, line):
- # type: (Any, Any, Any) -> Any
+ def add_eol_comment(self, comment: Any, column: Any, line: Any) -> Any:
# info = inspect.getframeinfo(inspect.stack()[1][0])
if comment.count('\n') == 1:
assert comment[-1] == '\n'
@@ -2104,8 +1993,7 @@ class ScannedComments:
self.unused.append(line)
return retval
- def add_blank_line(self, comment, column, line):
- # type: (Any, Any, Any) -> Any
+ def add_blank_line(self, comment: Any, column: Any, line: Any) -> Any:
# info = inspect.getframeinfo(inspect.stack()[1][0])
assert comment.count('\n') == 1 and comment[-1] == '\n'
assert line not in self.comments
@@ -2113,8 +2001,7 @@ class ScannedComments:
self.unused.append(line)
return retval
- def add_full_line_comment(self, comment, column, line):
- # type: (Any, Any, Any) -> Any
+ def add_full_line_comment(self, comment: Any, column: Any, line: Any) -> Any:
# info = inspect.getframeinfo(inspect.stack()[1][0])
assert comment.count('\n') == 1 and comment[-1] == '\n'
# if comment.startswith('# C12'):
@@ -2124,30 +2011,21 @@ class ScannedComments:
self.unused.append(line)
return retval
- def __getitem__(self, idx):
- # type: (Any) -> Any
+ def __getitem__(self, idx: Any) -> Any:
return self.comments[idx]
- def __str__(self):
- # type: () -> Any
+ def __str__(self) -> Any:
return (
'ParsedComments:\n '
- + '\n '.join(
- (
- _F('{lineno:2} {x}', lineno=lineno, x=x.info())
- for lineno, x in self.comments.items()
- )
- )
+ + '\n '.join((f'{lineno:2} {x.info()}' for lineno, x in self.comments.items()))
+ '\n'
)
- def last(self):
- # type: () -> str
+ def last(self) -> str:
lineno, x = list(self.comments.items())[-1]
- return _F('{lineno:2} {x}\n', lineno=lineno, x=x.info()) # type: ignore
+ return f'{lineno:2} {x.info()}\n'
- def any_unprocessed(self):
- # type: () -> bool
+ def any_unprocessed(self) -> bool:
# ToDo: might want to differentiate based on lineno
return len(self.unused) > 0
# for lno, comment in reversed(self.comments.items()):
@@ -2155,8 +2033,7 @@ class ScannedComments:
# return True
# return False
- def unprocessed(self, use=False):
- # type: (Any) -> Any
+ def unprocessed(self, use: Any = False) -> Any:
while len(self.unused) > 0:
first = self.unused.pop(0) if use else self.unused[0]
info = inspect.getframeinfo(inspect.stack()[1][0])
@@ -2165,8 +2042,7 @@ class ScannedComments:
if use:
self.comments[first].set_used()
- def assign_pre(self, token):
- # type: (Any) -> Any
+ def assign_pre(self, token: Any) -> Any:
token_line = token.start_mark.line
info = inspect.getframeinfo(inspect.stack()[1][0])
xprintf('assign_pre', token_line, self.unused, info.function, info.lineno)
@@ -2179,8 +2055,7 @@ class ScannedComments:
token.add_comment_pre(first)
return gobbled
- def assign_eol(self, tokens):
- # type: (Any) -> Any
+ def assign_eol(self, tokens: Any) -> Any:
try:
comment_line = self.unused[0]
except IndexError:
@@ -2235,8 +2110,7 @@ class ScannedComments:
sys.exit(0)
- def assign_post(self, token):
- # type: (Any) -> Any
+ def assign_post(self, token: Any) -> Any:
token_line = token.start_mark.line
info = inspect.getframeinfo(inspect.stack()[1][0])
xprintf('assign_post', token_line, self.unused, info.function, info.lineno)
@@ -2249,28 +2123,21 @@ class ScannedComments:
token.add_comment_post(first)
return gobbled
- def str_unprocessed(self):
- # type: () -> Any
+ def str_unprocessed(self) -> Any:
return ''.join(
- (
- _F(' {ind:2} {x}\n', ind=ind, x=x.info())
- for ind, x in self.comments.items()
- if x.used == ' '
- )
+ (f' {ind:2} {x.info()}\n' for ind, x in self.comments.items() if x.used == ' ')
)
class RoundTripScannerSC(Scanner): # RoundTripScanner Split Comments
- def __init__(self, *arg, **kw):
- # type: (Any, Any) -> None
+ def __init__(self, *arg: Any, **kw: Any) -> None:
super().__init__(*arg, **kw)
assert self.loader is not None
# comments isinitialised on .need_more_tokens and persist on
# self.loader.parsed_comments
self.comments = None
- def get_token(self):
- # type: () -> Any
+ def get_token(self) -> Any:
# Return the next token.
while self.need_more_tokens():
self.fetch_more_tokens()
@@ -2282,8 +2149,7 @@ class RoundTripScannerSC(Scanner): # RoundTripScanner Split Comments
self.tokens_taken += 1
return self.tokens.pop(0)
- def need_more_tokens(self):
- # type: () -> bool
+ def need_more_tokens(self) -> bool:
if self.comments is None:
self.loader.parsed_comments = self.comments = ScannedComments() # type: ignore
if self.done:
@@ -2309,8 +2175,7 @@ class RoundTripScannerSC(Scanner): # RoundTripScanner Split Comments
self.comments.assign_eol(self.tokens) # type: ignore
return False
- def scan_to_next_token(self):
- # type: () -> None
+ def scan_to_next_token(self) -> None:
srp = self.reader.peek
srf = self.reader.forward
if self.reader.index == 0 and srp() == '\uFEFF':
@@ -2373,8 +2238,7 @@ class RoundTripScannerSC(Scanner): # RoundTripScanner Split Comments
found = True
return None
- def scan_empty_or_full_line_comments(self):
- # type: () -> None
+ def scan_empty_or_full_line_comments(self) -> None:
blmark = self.reader.get_mark()
assert blmark.column == 0
blanks = ""
@@ -2413,8 +2277,7 @@ class RoundTripScannerSC(Scanner): # RoundTripScanner Split Comments
self.reader.forward()
ch = self.reader.peek()
- def scan_block_scalar_ignored_line(self, start_mark):
- # type: (Any) -> Any
+ def scan_block_scalar_ignored_line(self, start_mark: Any) -> Any:
# See the specification for details.
srp = self.reader.peek
srf = self.reader.forward
@@ -2435,7 +2298,7 @@ class RoundTripScannerSC(Scanner): # RoundTripScanner Split Comments
raise ScannerError(
'while scanning a block scalar',
start_mark,
- _F('expected a comment or a line break, but found {ch!r}', ch=ch),
+ f'expected a comment or a line break, but found {ch!r}',
self.reader.get_mark(),
)
if comment is not None:
diff --git a/serializer.py b/serializer.py
index cc2329d..0034240 100644
--- a/serializer.py
+++ b/serializer.py
@@ -18,9 +18,8 @@ from ruamel.yaml.events import (
)
from ruamel.yaml.nodes import MappingNode, ScalarNode, SequenceNode
-if False: # MYPY
- from typing import Any, Dict, Union, Text, Optional # NOQA
- from ruamel.yaml.compat import VersionType # NOQA
+from typing import Any, Dict, Union, Text, Optional # NOQA
+from ruamel.yaml.compat import VersionType # NOQA
__all__ = ['Serializer', 'SerializerError']
@@ -32,19 +31,19 @@ class SerializerError(YAMLError):
class Serializer:
# 'id' and 3+ numbers, but not 000
- ANCHOR_TEMPLATE = 'id%03d'
+ ANCHOR_TEMPLATE = 'id{:03d}'
ANCHOR_RE = RegExp('id(?!000$)\\d{3,}')
def __init__(
self,
- encoding=None,
- explicit_start=None,
- explicit_end=None,
- version=None,
- tags=None,
- dumper=None,
- ):
- # type: (Any, Optional[bool], Optional[bool], Optional[VersionType], Any, Any) -> None # NOQA
+ encoding: Any = None,
+ explicit_start: Optional[bool] = None,
+ explicit_end: Optional[bool] = None,
+ version: Optional[VersionType] = None,
+ tags: Any = None,
+ dumper: Any = None,
+ ) -> None:
+ # NOQA
self.dumper = dumper
if self.dumper is not None:
self.dumper._serializer = self
@@ -56,28 +55,25 @@ class Serializer:
else:
self.use_version = version # type: ignore
self.use_tags = tags
- self.serialized_nodes = {} # type: Dict[Any, Any]
- self.anchors = {} # type: Dict[Any, Any]
+ self.serialized_nodes: Dict[Any, Any] = {}
+ self.anchors: Dict[Any, Any] = {}
self.last_anchor_id = 0
- self.closed = None # type: Optional[bool]
+ self.closed: Optional[bool] = None
self._templated_id = None
@property
- def emitter(self):
- # type: () -> Any
+ def emitter(self) -> Any:
if hasattr(self.dumper, 'typ'):
return self.dumper.emitter
return self.dumper._emitter
@property
- def resolver(self):
- # type: () -> Any
+ def resolver(self) -> Any:
if hasattr(self.dumper, 'typ'):
self.dumper.resolver
return self.dumper._resolver
- def open(self):
- # type: () -> None
+ def open(self) -> None:
if self.closed is None:
self.emitter.emit(StreamStartEvent(encoding=self.use_encoding))
self.closed = False
@@ -86,8 +82,7 @@ class Serializer:
else:
raise SerializerError('serializer is already opened')
- def close(self):
- # type: () -> None
+ def close(self) -> None:
if self.closed is None:
raise SerializerError('serializer is not opened')
elif not self.closed:
@@ -97,8 +92,7 @@ class Serializer:
# def __del__(self):
# self.close()
- def serialize(self, node):
- # type: (Any) -> None
+ def serialize(self, node: Any) -> None:
if dbg(DBG_NODE):
nprint('Serializing nodes')
node.dump()
@@ -118,8 +112,7 @@ class Serializer:
self.anchors = {}
self.last_anchor_id = 0
- def anchor_node(self, node):
- # type: (Any) -> None
+ def anchor_node(self, node: Any) -> None:
if node in self.anchors:
if self.anchors[node] is None:
self.anchors[node] = self.generate_anchor(node)
@@ -139,19 +132,17 @@ class Serializer:
self.anchor_node(key)
self.anchor_node(value)
- def generate_anchor(self, node):
- # type: (Any) -> Any
+ def generate_anchor(self, node: Any) -> Any:
try:
anchor = node.anchor.value
except: # NOQA
anchor = None
if anchor is None:
self.last_anchor_id += 1
- return self.ANCHOR_TEMPLATE % self.last_anchor_id
+ return self.ANCHOR_TEMPLATE.format(self.last_anchor_id)
return anchor
- def serialize_node(self, node, parent, index):
- # type: (Any, Any, Any) -> None
+ def serialize_node(self, node: Any, parent: Any, index: Any) -> None:
alias = self.anchors[node]
if node in self.serialized_nodes:
node_style = getattr(node, 'style', None)
@@ -236,6 +227,5 @@ class Serializer:
self.resolver.ascend_resolver()
-def templated_id(s):
- # type: (Text) -> Any
+def templated_id(s: Text) -> Any:
return Serializer.ANCHOR_RE.match(s)
diff --git a/setup.py b/setup.py
index 70bdff0..50f4d1e 100644
--- a/setup.py
+++ b/setup.py
@@ -1,15 +1,13 @@
# # header
# coding: utf-8
-# dd: 20200903
-
-from __future__ import print_function, absolute_import, division, unicode_literals
+# dd: 20230418
# # __init__.py parser
import sys
import os
import datetime
-import traceback
+from textwrap import dedent
sys.path = [path for path in sys.path if path not in [os.getcwd(), ""]]
import platform # NOQA
@@ -20,13 +18,13 @@ from setuptools import setup, Extension, Distribution # NOQA
from setuptools.command import install_lib # NOQA
from setuptools.command.sdist import sdist as _sdist # NOQA
-try:
- from setuptools.namespaces import Installer as NameSpaceInstaller # NOQA
-except ImportError:
- msg = ('You should use the latest setuptools. The namespaces.py file that this setup.py'
- ' uses was added in setuptools 28.7.0 (Oct 2016)')
- print(msg)
- sys.exit()
+# try:
+# from setuptools.namespaces import Installer as NameSpaceInstaller # NOQA
+# except ImportError:
+# msg = ('You should use the latest setuptools. The namespaces.py file that this setup.py'
+# ' uses was added in setuptools 28.7.0 (Oct 2016)')
+# print(msg)
+# sys.exit()
if __name__ != '__main__':
raise NotImplementedError('should never include setup.py')
@@ -278,8 +276,7 @@ class MyInstallLib(install_lib.install_lib):
class MySdist(_sdist):
def initialize_options(self):
_sdist.initialize_options(self)
- # see pep 527, new uploads should be tar.gz or .zip
- # fmt = getattr(self, 'tarfmt', None)
+ # failed expiriment, see pep 527, new uploads should be tar.gz or .zip
# because of unicode_literals
# self.formats = fmt if fmt else [b'bztar'] if sys.version_info < (3, ) else ['bztar']
dist_base = os.environ.get('PYDISTBASE')
@@ -317,8 +314,8 @@ class NameSpacePackager(object):
self._split = None
self.depth = self.full_package_name.count('.')
self.nested = self._pkg_data.get('nested', False)
- if self.nested:
- NameSpaceInstaller.install_namespaces = lambda x: None
+ # if self.nested:
+ # NameSpaceInstaller.install_namespaces = lambda x: None
self.command = None
self.python_version()
self._pkg = [None, None] # required and pre-installable packages
@@ -387,9 +384,6 @@ class NameSpacePackager(object):
return self._split
@property
- def namespace_packages(self):
- return self.split[: self.depth]
-
def namespace_directories(self, depth=None):
"""return list of directories where the namespace should be created /
can be found
@@ -410,23 +404,11 @@ class NameSpacePackager(object):
}
if 'extra_packages' in self._pkg_data:
return d
- if len(self.split) > 1: # only if package namespace
- d[self.split[0]] = self.namespace_directories(1)[0]
+ # if len(self.split) > 1: # only if package namespace
+ # d[self.split[0]] = self.namespace_directories(1)[0]
+ # print('d', d, os.getcwd())
return d
- def create_dirs(self):
- """create the directories necessary for namespace packaging"""
- directories = self.namespace_directories(self.depth)
- if not directories:
- return
- if not os.path.exists(directories[0]):
- for d in directories:
- os.mkdir(d)
- with open(os.path.join(d, '__init__.py'), 'w') as fp:
- fp.write(
- 'import pkg_resources\n' 'pkg_resources.declare_namespace(__name__)\n'
- )
-
def python_version(self):
supported = self._pkg_data.get('supported')
if supported is None:
@@ -718,7 +700,8 @@ class NameSpacePackager(object):
@property
def packages(self):
- s = self.split
+ # s = self.split
+ s = [self._pkg_data['full_package_name']]
# fixed this in package_data, the keys there must be non-unicode for py27
# if sys.version_info < (3, 0):
# s = [x.encode('utf-8') for x in self.split]
@@ -754,7 +737,7 @@ class NameSpacePackager(object):
except ValueError:
pass
self._ext_modules = []
- no_test_compile = False
+ no_test_compile = True
if '--restructuredtext' in sys.argv:
no_test_compile = True
elif 'sdist' in sys.argv:
@@ -768,77 +751,7 @@ class NameSpacePackager(object):
)
self._ext_modules.append(ext)
return self._ext_modules
-
- print('sys.argv', sys.argv)
- import tempfile
- import shutil
- from textwrap import dedent
-
- import distutils.sysconfig
- import distutils.ccompiler
- from distutils.errors import CompileError, LinkError
-
- for target in self._pkg_data.get('ext_modules', []): # list of dicts
- ext = Extension(
- self.pn(target['name']),
- sources=[self.pn(x) for x in target['src']],
- libraries=[self.pn(x) for x in target.get('lib')],
- )
- # debug('test1 in target', 'test' in target, target)
- if 'test' not in target: # no test, just hope it works
- self._ext_modules.append(ext)
- continue
- if sys.version_info[:2] == (3, 4) and platform.system() == 'Windows':
- # this is giving problems on appveyor, so skip
- if 'FORCE_C_BUILD_TEST' not in os.environ:
- self._ext_modules.append(ext)
- continue
- # write a temporary .c file to compile
- c_code = dedent(target['test'])
- try:
- tmp_dir = tempfile.mkdtemp(prefix='tmp_ruamel_')
- bin_file_name = 'test' + self.pn(target['name'])
- file_name = os.path.join(tmp_dir, bin_file_name + '.c')
- print('test compiling', file_name, '->', bin_file_name, end=' ')
- with open(file_name, 'w') as fp: # write source
- fp.write(c_code)
- # and try to compile it
- compiler = distutils.ccompiler.new_compiler()
- assert isinstance(compiler, distutils.ccompiler.CCompiler)
- # do any platform specific initialisations
- distutils.sysconfig.customize_compiler(compiler)
- # make sure you can reach header files because compile does change dir
- compiler.add_include_dir(os.getcwd())
- if sys.version_info < (3,):
- tmp_dir = tmp_dir.encode('utf-8')
- # used to be a different directory, not necessary
- compile_out_dir = tmp_dir
- try:
- compiler.link_executable(
- compiler.compile([file_name], output_dir=compile_out_dir),
- bin_file_name,
- output_dir=tmp_dir,
- libraries=ext.libraries,
- )
- except CompileError:
- debug('compile error:', file_name)
- print('compile error:', file_name)
- raise
- except LinkError:
- debug('link error', file_name)
- print('link error', file_name)
- raise
- print('OK')
- self._ext_modules.append(ext)
- except Exception as e: # NOQA
- debug('Exception:', e)
- print('Exception:', e)
- sys.exit(1)
- if sys.version_info[:2] == (3, 4) and platform.system() == 'Windows':
- traceback.print_exc()
- finally:
- shutil.rmtree(tmp_dir)
- return self._ext_modules
+ # this used to use distutils
@property
def test_suite(self):
@@ -854,10 +767,6 @@ class NameSpacePackager(object):
if os.path.exists(file_name): # add it if not in there?
return False
with open(file_name, 'w') as fp:
- if os.path.exists('LICENSE'):
- fp.write('[metadata]\nlicense_file = LICENSE\n')
- else:
- print('\n\n>>>>>> LICENSE file not found <<<<<\n\n')
if self._pkg_data.get('universal'):
fp.write('[bdist_wheel]\nuniversal = 1\n')
try:
@@ -869,25 +778,72 @@ class NameSpacePackager(object):
return True
-# # call setup
+class TmpFiles:
+ def __init__(self, pkg_data, py_project=True, keep=False):
+ self._rm_after = []
+ self._pkg_data = pkg_data
+ self._py_project = py_project
+ self._bdist_wheel = 'bdist_wheel' in sys.argv
+ self._keep = keep
+
+ def __enter__(self):
+ self.bdist_wheel()
+ return
+ self.py_project()
+
+ def bdist_wheel(self):
+ """pyproject doesn't allow for universal, so use setup.cfg if necessary
+ """
+ file_name = 'setup.cfg'
+ if not self._bdist_wheel or os.path.exists(file_name):
+ return
+ if self._pkg_data.get('universal'):
+ self._rm_after.append(file_name)
+ with open(file_name, 'w') as fp:
+ fp.write('[bdist_wheel]\nuniversal = 1\n')
+
+ def py_project(self):
+ """
+ to prevent pip from complaining, or is it too late to create it from setup.py
+ """
+ file_name = 'pyproject.toml'
+ if not self._py_project or os.path.exists(file_name):
+ return
+ self._rm_after.append(file_name)
+ with open(file_name, 'w') as fp:
+ fp.write(dedent("""\
+ [build-system]
+ requires = ["setuptools", "wheel"]
+ # test
+ build-backend = "setuptools.build_meta"
+ """))
+
+ def __exit__(self, typ, value, traceback):
+ if self._keep:
+ return
+ for p in self._rm_after:
+ if not os.path.exists(p):
+ print('file {} already removed'.format(p))
+ else:
+ os.unlink(p)
+
+
+# call setup
def main():
dump_kw = '--dump-kw'
if dump_kw in sys.argv:
import wheel
- import distutils
import setuptools
+ import pip
print('python: ', sys.version)
+ print('pip: ', pip.__version__)
print('setuptools:', setuptools.__version__)
- print('distutils: ', distutils.__version__)
print('wheel: ', wheel.__version__)
nsp = NameSpacePackager(pkg_data)
nsp.check()
- nsp.create_dirs()
+ # nsp.create_dirs()
MySdist.nsp = nsp
- if pkg_data.get('tarfmt'):
- MySdist.tarfmt = pkg_data.get('tarfmt')
-
cmdclass = dict(install_lib=MyInstallLib, sdist=MySdist)
if _bdist_wheel_available:
MyBdistWheel.nsp = nsp
@@ -895,7 +851,6 @@ def main():
kw = dict(
name=nsp.full_package_name,
- namespace_packages=nsp.namespace_packages,
version=version_str,
packages=nsp.packages,
python_requires=nsp.python_requires,
@@ -914,12 +869,13 @@ def main():
package_data=nsp.package_data,
ext_modules=nsp.ext_modules,
test_suite=nsp.test_suite,
+ zip_safe=False,
)
if '--version' not in sys.argv and ('--verbose' in sys.argv or dump_kw in sys.argv):
for k in sorted(kw):
v = kw[k]
- print(' "{0}": "{1}",'.format(k, v))
+ print(' "{0}": {1},'.format(k, repr(v)))
# if '--record' in sys.argv:
# return
if dump_kw in sys.argv:
@@ -931,31 +887,33 @@ def main():
except Exception:
pass
- if nsp.wheel(kw, setup):
- return
- for x in ['-c', 'egg_info', '--egg-base', 'pip-egg-info']:
- if x not in sys.argv:
- break
- else:
- # we're doing a tox setup install any starred package by searching up the source tree
- # until you match your/package/name for your.package.name
- for p in nsp.install_pre:
- import subprocess
-
- # search other source
- setup_path = os.path.join(*p.split('.') + ['setup.py'])
- try_dir = os.path.dirname(sys.executable)
- while len(try_dir) > 1:
- full_path_setup_py = os.path.join(try_dir, setup_path)
- if os.path.exists(full_path_setup_py):
- pip = sys.executable.replace('python', 'pip')
- cmd = [pip, 'install', os.path.dirname(full_path_setup_py)]
- # with open('/var/tmp/notice', 'a') as fp:
- # print('installing', cmd, file=fp)
- subprocess.check_output(cmd)
- break
- try_dir = os.path.dirname(try_dir)
- setup(**kw)
+ # if nsp.wheel(kw, setup):
+ # return
+ with TmpFiles(pkg_data, keep=True):
+ for x in ['-c', 'egg_info', '--egg-base', 'pip-egg-info']:
+ if x not in sys.argv:
+ break
+ else:
+ # we're doing a tox setup install any starred package by searching up the
+ # source tree until you match your/package/name for your.package.name
+ for p in nsp.install_pre:
+ import subprocess
+
+ # search other source
+ setup_path = os.path.join(*p.split('.') + ['setup.py'])
+ try_dir = os.path.dirname(sys.executable)
+ while len(try_dir) > 1:
+ full_path_setup_py = os.path.join(try_dir, setup_path)
+ if os.path.exists(full_path_setup_py):
+ pip = sys.executable.replace('python', 'pip')
+ cmd = [pip, 'install', os.path.dirname(full_path_setup_py)]
+ # with open('/var/tmp/notice', 'a') as fp:
+ # print('installing', cmd, file=fp)
+ subprocess.check_output(cmd)
+ break
+ try_dir = os.path.dirname(try_dir)
+ setup(**kw)
+ print('done')
main()
diff --git a/timestamp.py b/timestamp.py
index 58eef04..4ab695f 100644
--- a/timestamp.py
+++ b/timestamp.py
@@ -6,38 +6,33 @@ import copy
# ToDo: at least on PY3 you could probably attach the tzinfo correctly to the object
# a more complete datetime might be used by safe loading as well
-if False: # MYPY
- from typing import Any, Dict, Optional, List # NOQA
+from typing import Any, Dict, Optional, List # NOQA
class TimeStamp(datetime.datetime):
- def __init__(self, *args, **kw):
- # type: (Any, Any) -> None
- self._yaml = dict(t=False, tz=None, delta=0) # type: Dict[Any, Any]
+ def __init__(self, *args: Any, **kw: Any) -> None:
+ self._yaml: Dict[Any, Any] = dict(t=False, tz=None, delta=0)
- def __new__(cls, *args, **kw): # datetime is immutable
- # type: (Any, Any) -> Any
+ def __new__(cls, *args: Any, **kw: Any) -> Any: # datetime is immutable
return datetime.datetime.__new__(cls, *args, **kw)
- def __deepcopy__(self, memo):
- # type: (Any) -> Any
+ def __deepcopy__(self, memo: Any) -> Any:
ts = TimeStamp(self.year, self.month, self.day, self.hour, self.minute, self.second)
ts._yaml = copy.deepcopy(self._yaml)
return ts
def replace(
self,
- year=None,
- month=None,
- day=None,
- hour=None,
- minute=None,
- second=None,
- microsecond=None,
- tzinfo=True,
- fold=None,
- ):
- # type: (Any, Any, Any, Any, Any, Any, Any, Any, Any) -> Any
+ year: Any = None,
+ month: Any = None,
+ day: Any = None,
+ hour: Any = None,
+ minute: Any = None,
+ second: Any = None,
+ microsecond: Any = None,
+ tzinfo: Any = True,
+ fold: Any = None,
+ ) -> Any:
if year is None:
year = self.year
if month is None:
diff --git a/tokens.py b/tokens.py
index bc302ba..0cf37f2 100644
--- a/tokens.py
+++ b/tokens.py
@@ -1,10 +1,9 @@
# coding: utf-8
-from ruamel.yaml.compat import _F, nprintf # NOQA
+from ruamel.yaml.compat import nprintf # NOQA
-if False: # MYPY
- from typing import Text, Any, Dict, Optional, List # NOQA
- from .error import StreamMark # NOQA
+from typing import Text, Any, Dict, Optional, List # NOQA
+from .error import StreamMark # NOQA
SHOW_LINES = True
@@ -12,23 +11,19 @@ SHOW_LINES = True
class Token:
__slots__ = 'start_mark', 'end_mark', '_comment'
- def __init__(self, start_mark, end_mark):
- # type: (StreamMark, StreamMark) -> None
+ def __init__(self, start_mark: StreamMark, end_mark: StreamMark) -> None:
self.start_mark = start_mark
self.end_mark = end_mark
- def __repr__(self):
- # type: () -> Any
+ def __repr__(self) -> Any:
# attributes = [key for key in self.__slots__ if not key.endswith('_mark') and
# hasattr('self', key)]
attributes = [key for key in self.__slots__ if not key.endswith('_mark')]
attributes.sort()
# arguments = ', '.join(
- # [_F('{key!s}={gattr!r})', key=key, gattr=getattr(self, key)) for key in attributes]
+ # [f'{key!s}={getattr(self, key)!r})' for key in attributes]
# )
- arguments = [
- _F('{key!s}={gattr!r}', key=key, gattr=getattr(self, key)) for key in attributes
- ]
+ arguments = [f'{key!s}={getattr(self, key)!r}' for key in attributes]
if SHOW_LINES:
try:
arguments.append('line: ' + str(self.start_mark.line))
@@ -38,16 +33,14 @@ class Token:
arguments.append('comment: ' + str(self._comment))
except: # NOQA
pass
- return '{}({})'.format(self.__class__.__name__, ', '.join(arguments))
+ return f'{self.__class__.__name__}({", ".join(arguments)})'
@property
- def column(self):
- # type: () -> int
+ def column(self) -> int:
return self.start_mark.column
@column.setter
- def column(self, pos):
- # type: (Any) -> None
+ def column(self, pos: Any) -> None:
self.start_mark.column = pos
# old style ( <= 0.17) is a TWO element list with first being the EOL
@@ -61,8 +54,7 @@ class Token:
# new style routines add one comment at a time
# going to be deprecated in favour of add_comment_eol/post
- def add_post_comment(self, comment):
- # type: (Any) -> None
+ def add_post_comment(self, comment: Any) -> None:
if not hasattr(self, '_comment'):
self._comment = [None, None]
else:
@@ -73,8 +65,7 @@ class Token:
self._comment[0] = comment
# going to be deprecated in favour of add_comment_pre
- def add_pre_comments(self, comments):
- # type: (Any) -> None
+ def add_pre_comments(self, comments: Any) -> None:
if not hasattr(self, '_comment'):
self._comment = [None, None]
else:
@@ -84,8 +75,7 @@ class Token:
return
# new style
- def add_comment_pre(self, comment):
- # type: (Any) -> None
+ def add_comment_pre(self, comment: Any) -> None:
if not hasattr(self, '_comment'):
self._comment = [[], None, None] # type: ignore
else:
@@ -94,8 +84,7 @@ class Token:
self._comment[0] = [] # type: ignore
self._comment[0].append(comment) # type: ignore
- def add_comment_eol(self, comment, comment_type):
- # type: (Any, Any) -> None
+ def add_comment_eol(self, comment: Any, comment_type: Any) -> None:
if not hasattr(self, '_comment'):
self._comment = [None, None, None]
else:
@@ -107,8 +96,7 @@ class Token:
# nprintf('commy', self.comment, comment_type)
self._comment[1][comment_type] = comment # type: ignore
- def add_comment_post(self, comment):
- # type: (Any) -> None
+ def add_comment_post(self, comment: Any) -> None:
if not hasattr(self, '_comment'):
self._comment = [None, None, []] # type: ignore
else:
@@ -117,17 +105,14 @@ class Token:
self._comment[2] = [] # type: ignore
self._comment[2].append(comment) # type: ignore
- # def get_comment(self):
- # # type: () -> Any
+ # def get_comment(self) -> Any:
# return getattr(self, '_comment', None)
@property
- def comment(self):
- # type: () -> Any
+ def comment(self) -> Any:
return getattr(self, '_comment', None)
- def move_old_comment(self, target, empty=False):
- # type: (Any, bool) -> Any
+ def move_old_comment(self, target: Any, empty: bool = False) -> Any:
"""move a comment from this token to target (normally next token)
used to combine e.g. comments before a BlockEntryToken to the
ScalarToken that follows it
@@ -149,15 +134,14 @@ class Token:
# nprint('mco2:', self, target, target.comment, empty)
return self
if c[0] and tc[0] or c[1] and tc[1]:
- raise NotImplementedError(_F('overlap in comment {c!r} {tc!r}', c=c, tc=tc))
+ raise NotImplementedError(f'overlap in comment {c!r} {tc!r}')
if c[0]:
tc[0] = c[0]
if c[1]:
tc[1] = c[1]
return self
- def split_old_comment(self):
- # type: () -> Any
+ def split_old_comment(self) -> Any:
""" split the post part of a comment, and return it
as comment to be added. Delete second part if [None, None]
abc: # this goes to sequence
@@ -172,8 +156,7 @@ class Token:
delattr(self, '_comment')
return ret_val
- def move_new_comment(self, target, empty=False):
- # type: (Any, bool) -> Any
+ def move_new_comment(self, target: Any, empty: bool = False) -> Any:
"""move a comment from this token to target (normally next token)
used to combine e.g. comments before a BlockEntryToken to the
ScalarToken that follows it
@@ -197,7 +180,7 @@ class Token:
# if self and target have both pre, eol or post comments, something seems wrong
for idx in range(3):
if c[idx] is not None and tc[idx] is not None:
- raise NotImplementedError(_F('overlap in comment {c!r} {tc!r}', c=c, tc=tc))
+ raise NotImplementedError(f'overlap in comment {c!r} {tc!r}')
# move the comment parts
for idx in range(3):
if c[idx]:
@@ -213,8 +196,7 @@ class DirectiveToken(Token):
__slots__ = 'name', 'value'
id = '<directive>'
- def __init__(self, name, value, start_mark, end_mark):
- # type: (Any, Any, Any, Any) -> None
+ def __init__(self, name: Any, value: Any, start_mark: Any, end_mark: Any) -> None:
Token.__init__(self, start_mark, end_mark)
self.name = name
self.value = value
@@ -234,8 +216,9 @@ class StreamStartToken(Token):
__slots__ = ('encoding',)
id = '<stream start>'
- def __init__(self, start_mark=None, end_mark=None, encoding=None):
- # type: (Any, Any, Any) -> None
+ def __init__(
+ self, start_mark: Any = None, end_mark: Any = None, encoding: Any = None
+ ) -> None:
Token.__init__(self, start_mark, end_mark)
self.encoding = encoding
@@ -284,9 +267,8 @@ class KeyToken(Token):
__slots__ = ()
id = '?'
- # def x__repr__(self):
- # return 'KeyToken({})'.format(
- # self.start_mark.buffer[self.start_mark.index:].split(None, 1)[0])
+# def x__repr__(self):
+# return f'KeyToken({self.start_mark.buffer[self.start_mark.index:].split(None, 1)[0]})'
class ValueToken(Token):
@@ -308,8 +290,7 @@ class AliasToken(Token):
__slots__ = ('value',)
id = '<alias>'
- def __init__(self, value, start_mark, end_mark):
- # type: (Any, Any, Any) -> None
+ def __init__(self, value: Any, start_mark: Any, end_mark: Any) -> None:
Token.__init__(self, start_mark, end_mark)
self.value = value
@@ -318,8 +299,7 @@ class AnchorToken(Token):
__slots__ = ('value',)
id = '<anchor>'
- def __init__(self, value, start_mark, end_mark):
- # type: (Any, Any, Any) -> None
+ def __init__(self, value: Any, start_mark: Any, end_mark: Any) -> None:
Token.__init__(self, start_mark, end_mark)
self.value = value
@@ -328,8 +308,7 @@ class TagToken(Token):
__slots__ = ('value',)
id = '<tag>'
- def __init__(self, value, start_mark, end_mark):
- # type: (Any, Any, Any) -> None
+ def __init__(self, value: Any, start_mark: Any, end_mark: Any) -> None:
Token.__init__(self, start_mark, end_mark)
self.value = value
@@ -338,8 +317,9 @@ class ScalarToken(Token):
__slots__ = 'value', 'plain', 'style'
id = '<scalar>'
- def __init__(self, value, plain, start_mark, end_mark, style=None):
- # type: (Any, Any, Any, Any, Any) -> None
+ def __init__(
+ self, value: Any, plain: Any, start_mark: Any, end_mark: Any, style: Any = None
+ ) -> None:
Token.__init__(self, start_mark, end_mark)
self.value = value
self.plain = plain
@@ -347,11 +327,12 @@ class ScalarToken(Token):
class CommentToken(Token):
- __slots__ = '_value', 'pre_done'
+ __slots__ = '_value', '_column', 'pre_done'
id = '<comment>'
- def __init__(self, value, start_mark=None, end_mark=None, column=None):
- # type: (Any, Any, Any, Any) -> None
+ def __init__(
+ self, value: Any, start_mark: Any = None, end_mark: Any = None, column: Any = None
+ ) -> None:
if start_mark is None:
assert column is not None
self._column = column
@@ -359,25 +340,21 @@ class CommentToken(Token):
self._value = value
@property
- def value(self):
- # type: () -> str
+ def value(self) -> str:
if isinstance(self._value, str):
return self._value
return "".join(self._value)
@value.setter
- def value(self, val):
- # type: (Any) -> None
+ def value(self, val: Any) -> None:
self._value = val
- def reset(self):
- # type: () -> None
+ def reset(self) -> None:
if hasattr(self, 'pre_done'):
delattr(self, 'pre_done')
- def __repr__(self):
- # type: () -> Any
- v = '{!r}'.format(self.value)
+ def __repr__(self) -> Any:
+ v = f'{self.value!r}'
if SHOW_LINES:
try:
v += ', line: ' + str(self.start_mark.line)
@@ -387,10 +364,9 @@ class CommentToken(Token):
v += ', col: ' + str(self.start_mark.column)
except: # NOQA
pass
- return 'CommentToken({})'.format(v)
+ return f'CommentToken({v})'
- def __eq__(self, other):
- # type: (Any) -> bool
+ def __eq__(self, other: Any) -> bool:
if self.start_mark != other.start_mark:
return False
if self.end_mark != other.end_mark:
@@ -399,6 +375,5 @@ class CommentToken(Token):
return False
return True
- def __ne__(self, other):
- # type: (Any) -> bool
+ def __ne__(self, other: Any) -> bool:
return not self.__eq__(other)
diff --git a/util.py b/util.py
index 9ff51bd..39d71b4 100644
--- a/util.py
+++ b/util.py
@@ -9,9 +9,8 @@ from functools import partial
import re
-if False: # MYPY
- from typing import Any, Dict, Optional, List, Text # NOQA
- from .compat import StreamTextType # NOQA
+from typing import Any, Dict, Optional, List, Text, Callable, Union # NOQA
+from .compat import StreamTextType # NOQA
class LazyEval:
@@ -25,25 +24,21 @@ class LazyEval:
return value (or, prior to evaluation, func and arguments), in its closure.
"""
- def __init__(self, func, *args, **kwargs):
- # type: (Any, Any, Any) -> None
- def lazy_self():
- # type: () -> Any
+ def __init__(self, func: Callable[..., Any], *args: Any, **kwargs: Any) -> None:
+ def lazy_self() -> Any:
return_value = func(*args, **kwargs)
object.__setattr__(self, 'lazy_self', lambda: return_value)
return return_value
object.__setattr__(self, 'lazy_self', lazy_self)
- def __getattribute__(self, name):
- # type: (Any) -> Any
+ def __getattribute__(self, name: str) -> Any:
lazy_self = object.__getattribute__(self, 'lazy_self')
if name == 'lazy_self':
return lazy_self
return getattr(lazy_self(), name)
- def __setattr__(self, name, value):
- # type: (Any, Any) -> None
+ def __setattr__(self, name: str, value: Any) -> None:
setattr(self.lazy_self(), name, value)
@@ -65,9 +60,19 @@ timestamp_regexp = RegExp(
def create_timestamp(
- year, month, day, t, hour, minute, second, fraction, tz, tz_sign, tz_hour, tz_minute
-):
- # type: (Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any) -> Any
+ year: Any,
+ month: Any,
+ day: Any,
+ t: Any,
+ hour: Any,
+ minute: Any,
+ second: Any,
+ fraction: Any,
+ tz: Any,
+ tz_sign: Any,
+ tz_hour: Any,
+ tz_minute: Any,
+) -> Union[datetime.datetime, datetime.date]:
# create a timestamp from match against timestamp_regexp
MAX_FRAC = 999999
year = int(year)
@@ -122,8 +127,7 @@ def create_timestamp(
# if you use this in your code, I suggest adding a test in your test suite
# that check this routines output against a known piece of your YAML
# before upgrades to this code break your round-tripped YAML
-def load_yaml_guess_indent(stream, **kw):
- # type: (StreamTextType, Any) -> Any
+def load_yaml_guess_indent(stream: StreamTextType, **kw: Any) -> Any:
"""guess the indent and block sequence indent of yaml stream/string
returns round_trip_loaded stream, indent level, block sequence indent
@@ -134,15 +138,14 @@ def load_yaml_guess_indent(stream, **kw):
from .main import YAML
# load a YAML document, guess the indentation, if you use TABs you are on your own
- def leading_spaces(line):
- # type: (Any) -> int
+ def leading_spaces(line: Any) -> int:
idx = 0
while idx < len(line) and line[idx] == ' ':
idx += 1
return idx
if isinstance(stream, str):
- yaml_str = stream # type: Any
+ yaml_str: Any = stream
elif isinstance(stream, bytes):
# most likely, but the Reader checks BOM for this
yaml_str = stream.decode('utf-8')
@@ -183,11 +186,10 @@ def load_yaml_guess_indent(stream, **kw):
if indent is None and map_indent is not None:
indent = map_indent
yaml = YAML()
- return yaml.load(yaml_str, **kw), indent, block_seq_indent # type: ignore
+ return yaml.load(yaml_str, **kw), indent, block_seq_indent
-def configobj_walker(cfg):
- # type: (Any) -> Any
+def configobj_walker(cfg: Any) -> Any:
"""
walks over a ConfigObj (INI file with comments) generating
corresponding YAML output (including comments
@@ -206,8 +208,7 @@ def configobj_walker(cfg):
yield c
-def _walk_section(s, level=0):
- # type: (Any, int) -> Any
+def _walk_section(s: Any, level: int = 0) -> Any:
from configobj import Section
assert isinstance(s, Section)
@@ -221,7 +222,7 @@ def _walk_section(s, level=0):
x = '|\n' + i + x.strip().replace('\n', '\n' + i)
elif ':' in x:
x = "'" + x.replace("'", "''") + "'"
- line = '{0}{1}: {2}'.format(indent, name, x)
+ line = f'{indent}{name}: {x}'
c = s.inline_comments[name]
if c:
line += ' ' + c
@@ -229,7 +230,7 @@ def _walk_section(s, level=0):
for name in s.sections:
for c in s.comments[name]:
yield indent + c.strip()
- line = '{0}{1}:'.format(indent, name)
+ line = f'{indent}{name}:'
c = s.inline_comments[name]
if c:
line += ' ' + c