summaryrefslogtreecommitdiff
path: root/tests
diff options
context:
space:
mode:
Diffstat (limited to 'tests')
-rw-r--r--tests/data/construct-binary-py2.code (renamed from tests/data/construct-binary.code)0
-rw-r--r--tests/data/construct-binary-py2.data (renamed from tests/data/construct-binary.data)0
-rw-r--r--tests/data/construct-binary-py3.code7
-rw-r--r--tests/data/construct-binary-py3.data12
-rw-r--r--tests/data/construct-python-bytes-py3.code1
-rw-r--r--tests/data/construct-python-bytes-py3.data1
-rw-r--r--tests/data/construct-python-long-short-py2.code (renamed from tests/data/construct-python-long-short.code)0
-rw-r--r--tests/data/construct-python-long-short-py2.data (renamed from tests/data/construct-python-long-short.data)0
-rw-r--r--tests/data/construct-python-long-short-py3.code1
-rw-r--r--tests/data/construct-python-long-short-py3.data1
-rw-r--r--tests/data/construct-python-name-module.code2
-rw-r--r--tests/data/construct-python-name-module.data2
-rw-r--r--tests/data/construct-python-str-utf8-py2.code (renamed from tests/data/construct-python-str-utf8.code)0
-rw-r--r--tests/data/construct-python-str-utf8-py2.data (renamed from tests/data/construct-python-str-utf8.data)0
-rw-r--r--tests/data/construct-python-str-utf8-py3.code1
-rw-r--r--tests/data/construct-python-str-utf8-py3.data1
-rw-r--r--tests/data/construct-python-unicode-ascii-py2.code (renamed from tests/data/construct-python-unicode-ascii.code)0
-rw-r--r--tests/data/construct-python-unicode-ascii-py2.data (renamed from tests/data/construct-python-unicode-ascii.data)0
-rw-r--r--tests/data/construct-python-unicode-ascii-py3.code1
-rw-r--r--tests/data/construct-python-unicode-ascii-py3.data1
-rw-r--r--tests/data/construct-python-unicode-utf8-py2.code (renamed from tests/data/construct-python-unicode-utf8.code)0
-rw-r--r--tests/data/construct-python-unicode-utf8-py2.data (renamed from tests/data/construct-python-unicode-utf8.data)0
-rw-r--r--tests/data/construct-python-unicode-utf8-py3.code1
-rw-r--r--tests/data/construct-python-unicode-utf8-py3.data1
-rw-r--r--tests/data/construct-str-utf8-py2.code (renamed from tests/data/construct-str-utf8.code)0
-rw-r--r--tests/data/construct-str-utf8-py2.data (renamed from tests/data/construct-str-utf8.data)0
-rw-r--r--tests/data/construct-str-utf8-py3.code1
-rw-r--r--tests/data/construct-str-utf8-py3.data1
-rw-r--r--tests/data/emitting-unacceptable-unicode-character-bug-py2.code (renamed from tests/data/emitting-unacceptable-unicode-character-bug.code)0
-rw-r--r--tests/data/emitting-unacceptable-unicode-character-bug-py2.data (renamed from tests/data/emitting-unacceptable-unicode-character-bug.data)0
-rw-r--r--tests/data/emitting-unacceptable-unicode-character-bug-py2.skip-ext (renamed from tests/data/emitting-unacceptable-unicode-character-bug.skip-ext)0
-rw-r--r--tests/data/emitting-unacceptable-unicode-character-bug-py3.code1
-rw-r--r--tests/data/emitting-unacceptable-unicode-character-bug-py3.data1
-rw-r--r--tests/data/emitting-unacceptable-unicode-character-bug-py3.skip-ext0
-rw-r--r--tests/data/serializer-is-already-opened.dumper-error2
-rw-r--r--tests/data/serializer-is-closed-1.dumper-error2
-rw-r--r--tests/data/serializer-is-closed-2.dumper-error2
-rw-r--r--tests/data/serializer-is-not-opened-1.dumper-error2
-rw-r--r--tests/data/serializer-is-not-opened-2.dumper-error2
-rw-r--r--tests/lib/canonical.py (renamed from tests/canonical.py)0
-rw-r--r--tests/lib/test_all.py (renamed from tests/test_all.py)0
-rw-r--r--tests/lib/test_appliance.py (renamed from tests/test_appliance.py)2
-rw-r--r--tests/lib/test_build.py (renamed from tests/test_build.py)0
-rw-r--r--tests/lib/test_build_ext.py (renamed from tests/test_build_ext.py)0
-rw-r--r--tests/lib/test_canonical.py (renamed from tests/test_canonical.py)0
-rw-r--r--tests/lib/test_constructor.py (renamed from tests/test_constructor.py)0
-rw-r--r--tests/lib/test_emitter.py (renamed from tests/test_emitter.py)0
-rw-r--r--tests/lib/test_errors.py (renamed from tests/test_errors.py)3
-rw-r--r--tests/lib/test_mark.py (renamed from tests/test_mark.py)0
-rw-r--r--tests/lib/test_reader.py (renamed from tests/test_reader.py)0
-rw-r--r--tests/lib/test_recursive.py (renamed from tests/test_recursive.py)0
-rw-r--r--tests/lib/test_representer.py (renamed from tests/test_representer.py)0
-rw-r--r--tests/lib/test_resolver.py (renamed from tests/test_resolver.py)0
-rw-r--r--tests/lib/test_structure.py (renamed from tests/test_structure.py)0
-rw-r--r--tests/lib/test_tokens.py (renamed from tests/test_tokens.py)0
-rw-r--r--tests/lib/test_yaml.py (renamed from tests/test_yaml.py)0
-rw-r--r--tests/lib/test_yaml_ext.py (renamed from tests/test_yaml_ext.py)0
-rw-r--r--tests/lib3/canonical.py358
-rw-r--r--tests/lib3/test_all.py15
-rw-r--r--tests/lib3/test_appliance.py145
-rw-r--r--tests/lib3/test_build.py10
-rw-r--r--tests/lib3/test_build_ext.py11
-rw-r--r--tests/lib3/test_canonical.py40
-rw-r--r--tests/lib3/test_constructor.py258
-rw-r--r--tests/lib3/test_emitter.py100
-rw-r--r--tests/lib3/test_errors.py67
-rw-r--r--tests/lib3/test_mark.py32
-rw-r--r--tests/lib3/test_reader.py34
-rw-r--r--tests/lib3/test_recursive.py51
-rw-r--r--tests/lib3/test_representer.py42
-rw-r--r--tests/lib3/test_resolver.py92
-rw-r--r--tests/lib3/test_structure.py187
-rw-r--r--tests/lib3/test_tokens.py77
-rw-r--r--tests/lib3/test_yaml.py17
-rw-r--r--tests/lib3/test_yaml_ext.py273
75 files changed, 1853 insertions, 8 deletions
diff --git a/tests/data/construct-binary.code b/tests/data/construct-binary-py2.code
index 67ac0d5..67ac0d5 100644
--- a/tests/data/construct-binary.code
+++ b/tests/data/construct-binary-py2.code
diff --git a/tests/data/construct-binary.data b/tests/data/construct-binary-py2.data
index dcdb16f..dcdb16f 100644
--- a/tests/data/construct-binary.data
+++ b/tests/data/construct-binary-py2.data
diff --git a/tests/data/construct-binary-py3.code b/tests/data/construct-binary-py3.code
new file mode 100644
index 0000000..30bfc3f
--- /dev/null
+++ b/tests/data/construct-binary-py3.code
@@ -0,0 +1,7 @@
+{
+ "canonical":
+ b"GIF89a\x0c\x00\x0c\x00\x84\x00\x00\xff\xff\xf7\xf5\xf5\xee\xe9\xe9\xe5fff\x00\x00\x00\xe7\xe7\xe7^^^\xf3\xf3\xed\x8e\x8e\x8e\xe0\xe0\xe0\x9f\x9f\x9f\x93\x93\x93\xa7\xa7\xa7\x9e\x9e\x9eiiiccc\xa3\xa3\xa3\x84\x84\x84\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9!\xfe\x0eMade with GIMP\x00,\x00\x00\x00\x00\x0c\x00\x0c\x00\x00\x05, \x8e\x810\x9e\xe3@\x14\xe8i\x10\xc4\xd1\x8a\x08\x1c\xcf\x80M$z\xef\xff0\x85p\xb8\xb01f\r\x1b\xce\x01\xc3\x01\x1e\x10' \x82\n\x01\x00;",
+ "generic":
+ b"GIF89a\x0c\x00\x0c\x00\x84\x00\x00\xff\xff\xf7\xf5\xf5\xee\xe9\xe9\xe5fff\x00\x00\x00\xe7\xe7\xe7^^^\xf3\xf3\xed\x8e\x8e\x8e\xe0\xe0\xe0\x9f\x9f\x9f\x93\x93\x93\xa7\xa7\xa7\x9e\x9e\x9eiiiccc\xa3\xa3\xa3\x84\x84\x84\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9!\xfe\x0eMade with GIMP\x00,\x00\x00\x00\x00\x0c\x00\x0c\x00\x00\x05, \x8e\x810\x9e\xe3@\x14\xe8i\x10\xc4\xd1\x8a\x08\x1c\xcf\x80M$z\xef\xff0\x85p\xb8\xb01f\r\x1b\xce\x01\xc3\x01\x1e\x10' \x82\n\x01\x00;",
+ "description": "The binary value above is a tiny arrow encoded as a gif image.",
+}
diff --git a/tests/data/construct-binary-py3.data b/tests/data/construct-binary-py3.data
new file mode 100644
index 0000000..dcdb16f
--- /dev/null
+++ b/tests/data/construct-binary-py3.data
@@ -0,0 +1,12 @@
+canonical: !!binary "\
+ R0lGODlhDAAMAIQAAP//9/X17unp5WZmZgAAAOfn515eXvPz7Y6OjuDg4J+fn5\
+ OTk6enp56enmlpaWNjY6Ojo4SEhP/++f/++f/++f/++f/++f/++f/++f/++f/+\
+ +f/++f/++f/++f/++f/++SH+Dk1hZGUgd2l0aCBHSU1QACwAAAAADAAMAAAFLC\
+ AgjoEwnuNAFOhpEMTRiggcz4BNJHrv/zCFcLiwMWYNG84BwwEeECcgggoBADs="
+generic: !!binary |
+ R0lGODlhDAAMAIQAAP//9/X17unp5WZmZgAAAOfn515eXvPz7Y6OjuDg4J+fn5
+ OTk6enp56enmlpaWNjY6Ojo4SEhP/++f/++f/++f/++f/++f/++f/++f/++f/+
+ +f/++f/++f/++f/++f/++SH+Dk1hZGUgd2l0aCBHSU1QACwAAAAADAAMAAAFLC
+ AgjoEwnuNAFOhpEMTRiggcz4BNJHrv/zCFcLiwMWYNG84BwwEeECcgggoBADs=
+description:
+ The binary value above is a tiny arrow encoded as a gif image.
diff --git a/tests/data/construct-python-bytes-py3.code b/tests/data/construct-python-bytes-py3.code
new file mode 100644
index 0000000..b9051d8
--- /dev/null
+++ b/tests/data/construct-python-bytes-py3.code
@@ -0,0 +1 @@
+b'some binary data'
diff --git a/tests/data/construct-python-bytes-py3.data b/tests/data/construct-python-bytes-py3.data
new file mode 100644
index 0000000..9528725
--- /dev/null
+++ b/tests/data/construct-python-bytes-py3.data
@@ -0,0 +1 @@
+--- !!python/bytes 'c29tZSBiaW5hcnkgZGF0YQ=='
diff --git a/tests/data/construct-python-long-short.code b/tests/data/construct-python-long-short-py2.code
index fafc3f1..fafc3f1 100644
--- a/tests/data/construct-python-long-short.code
+++ b/tests/data/construct-python-long-short-py2.code
diff --git a/tests/data/construct-python-long-short.data b/tests/data/construct-python-long-short-py2.data
index 4bd5dc2..4bd5dc2 100644
--- a/tests/data/construct-python-long-short.data
+++ b/tests/data/construct-python-long-short-py2.data
diff --git a/tests/data/construct-python-long-short-py3.code b/tests/data/construct-python-long-short-py3.code
new file mode 100644
index 0000000..190a180
--- /dev/null
+++ b/tests/data/construct-python-long-short-py3.code
@@ -0,0 +1 @@
+123
diff --git a/tests/data/construct-python-long-short-py3.data b/tests/data/construct-python-long-short-py3.data
new file mode 100644
index 0000000..4bd5dc2
--- /dev/null
+++ b/tests/data/construct-python-long-short-py3.data
@@ -0,0 +1 @@
+!!python/long 123
diff --git a/tests/data/construct-python-name-module.code b/tests/data/construct-python-name-module.code
index 8f93503..6f39148 100644
--- a/tests/data/construct-python-name-module.code
+++ b/tests/data/construct-python-name-module.code
@@ -1 +1 @@
-[file, yaml.Loader, yaml.dump, abs, yaml.tokens]
+[str, yaml.Loader, yaml.dump, abs, yaml.tokens]
diff --git a/tests/data/construct-python-name-module.data b/tests/data/construct-python-name-module.data
index c8f8036..f0c9712 100644
--- a/tests/data/construct-python-name-module.data
+++ b/tests/data/construct-python-name-module.data
@@ -1,4 +1,4 @@
-- !!python/name:file
+- !!python/name:str
- !!python/name:yaml.Loader
- !!python/name:yaml.dump
- !!python/name:abs
diff --git a/tests/data/construct-python-str-utf8.code b/tests/data/construct-python-str-utf8-py2.code
index 47b28ab..47b28ab 100644
--- a/tests/data/construct-python-str-utf8.code
+++ b/tests/data/construct-python-str-utf8-py2.code
diff --git a/tests/data/construct-python-str-utf8.data b/tests/data/construct-python-str-utf8-py2.data
index 9ef2c72..9ef2c72 100644
--- a/tests/data/construct-python-str-utf8.data
+++ b/tests/data/construct-python-str-utf8-py2.data
diff --git a/tests/data/construct-python-str-utf8-py3.code b/tests/data/construct-python-str-utf8-py3.code
new file mode 100644
index 0000000..9f66032
--- /dev/null
+++ b/tests/data/construct-python-str-utf8-py3.code
@@ -0,0 +1 @@
+'\u042d\u0442\u043e \u0443\u043d\u0438\u043a\u043e\u0434\u043d\u0430\u044f \u0441\u0442\u0440\u043e\u043a\u0430'
diff --git a/tests/data/construct-python-str-utf8-py3.data b/tests/data/construct-python-str-utf8-py3.data
new file mode 100644
index 0000000..9ef2c72
--- /dev/null
+++ b/tests/data/construct-python-str-utf8-py3.data
@@ -0,0 +1 @@
+--- !!python/str "Это уникодная строка"
diff --git a/tests/data/construct-python-unicode-ascii.code b/tests/data/construct-python-unicode-ascii-py2.code
index d4cd82c..d4cd82c 100644
--- a/tests/data/construct-python-unicode-ascii.code
+++ b/tests/data/construct-python-unicode-ascii-py2.code
diff --git a/tests/data/construct-python-unicode-ascii.data b/tests/data/construct-python-unicode-ascii-py2.data
index 3a0647b..3a0647b 100644
--- a/tests/data/construct-python-unicode-ascii.data
+++ b/tests/data/construct-python-unicode-ascii-py2.data
diff --git a/tests/data/construct-python-unicode-ascii-py3.code b/tests/data/construct-python-unicode-ascii-py3.code
new file mode 100644
index 0000000..d9d62f6
--- /dev/null
+++ b/tests/data/construct-python-unicode-ascii-py3.code
@@ -0,0 +1 @@
+"ascii string"
diff --git a/tests/data/construct-python-unicode-ascii-py3.data b/tests/data/construct-python-unicode-ascii-py3.data
new file mode 100644
index 0000000..3a0647b
--- /dev/null
+++ b/tests/data/construct-python-unicode-ascii-py3.data
@@ -0,0 +1 @@
+--- !!python/unicode "ascii string"
diff --git a/tests/data/construct-python-unicode-utf8.code b/tests/data/construct-python-unicode-utf8-py2.code
index 2793ac7..2793ac7 100644
--- a/tests/data/construct-python-unicode-utf8.code
+++ b/tests/data/construct-python-unicode-utf8-py2.code
diff --git a/tests/data/construct-python-unicode-utf8.data b/tests/data/construct-python-unicode-utf8-py2.data
index 5a980ea..5a980ea 100644
--- a/tests/data/construct-python-unicode-utf8.data
+++ b/tests/data/construct-python-unicode-utf8-py2.data
diff --git a/tests/data/construct-python-unicode-utf8-py3.code b/tests/data/construct-python-unicode-utf8-py3.code
new file mode 100644
index 0000000..9f66032
--- /dev/null
+++ b/tests/data/construct-python-unicode-utf8-py3.code
@@ -0,0 +1 @@
+'\u042d\u0442\u043e \u0443\u043d\u0438\u043a\u043e\u0434\u043d\u0430\u044f \u0441\u0442\u0440\u043e\u043a\u0430'
diff --git a/tests/data/construct-python-unicode-utf8-py3.data b/tests/data/construct-python-unicode-utf8-py3.data
new file mode 100644
index 0000000..5a980ea
--- /dev/null
+++ b/tests/data/construct-python-unicode-utf8-py3.data
@@ -0,0 +1 @@
+--- !!python/unicode "Это уникодная строка"
diff --git a/tests/data/construct-str-utf8.code b/tests/data/construct-str-utf8-py2.code
index 2793ac7..2793ac7 100644
--- a/tests/data/construct-str-utf8.code
+++ b/tests/data/construct-str-utf8-py2.code
diff --git a/tests/data/construct-str-utf8.data b/tests/data/construct-str-utf8-py2.data
index e355f18..e355f18 100644
--- a/tests/data/construct-str-utf8.data
+++ b/tests/data/construct-str-utf8-py2.data
diff --git a/tests/data/construct-str-utf8-py3.code b/tests/data/construct-str-utf8-py3.code
new file mode 100644
index 0000000..9f66032
--- /dev/null
+++ b/tests/data/construct-str-utf8-py3.code
@@ -0,0 +1 @@
+'\u042d\u0442\u043e \u0443\u043d\u0438\u043a\u043e\u0434\u043d\u0430\u044f \u0441\u0442\u0440\u043e\u043a\u0430'
diff --git a/tests/data/construct-str-utf8-py3.data b/tests/data/construct-str-utf8-py3.data
new file mode 100644
index 0000000..e355f18
--- /dev/null
+++ b/tests/data/construct-str-utf8-py3.data
@@ -0,0 +1 @@
+--- !!str "Это уникодная строка"
diff --git a/tests/data/emitting-unacceptable-unicode-character-bug.code b/tests/data/emitting-unacceptable-unicode-character-bug-py2.code
index 4b92854..4b92854 100644
--- a/tests/data/emitting-unacceptable-unicode-character-bug.code
+++ b/tests/data/emitting-unacceptable-unicode-character-bug-py2.code
diff --git a/tests/data/emitting-unacceptable-unicode-character-bug.data b/tests/data/emitting-unacceptable-unicode-character-bug-py2.data
index 2a5df00..2a5df00 100644
--- a/tests/data/emitting-unacceptable-unicode-character-bug.data
+++ b/tests/data/emitting-unacceptable-unicode-character-bug-py2.data
diff --git a/tests/data/emitting-unacceptable-unicode-character-bug.skip-ext b/tests/data/emitting-unacceptable-unicode-character-bug-py2.skip-ext
index e69de29..e69de29 100644
--- a/tests/data/emitting-unacceptable-unicode-character-bug.skip-ext
+++ b/tests/data/emitting-unacceptable-unicode-character-bug-py2.skip-ext
diff --git a/tests/data/emitting-unacceptable-unicode-character-bug-py3.code b/tests/data/emitting-unacceptable-unicode-character-bug-py3.code
new file mode 100644
index 0000000..2a5df00
--- /dev/null
+++ b/tests/data/emitting-unacceptable-unicode-character-bug-py3.code
@@ -0,0 +1 @@
+"\udd00"
diff --git a/tests/data/emitting-unacceptable-unicode-character-bug-py3.data b/tests/data/emitting-unacceptable-unicode-character-bug-py3.data
new file mode 100644
index 0000000..2a5df00
--- /dev/null
+++ b/tests/data/emitting-unacceptable-unicode-character-bug-py3.data
@@ -0,0 +1 @@
+"\udd00"
diff --git a/tests/data/emitting-unacceptable-unicode-character-bug-py3.skip-ext b/tests/data/emitting-unacceptable-unicode-character-bug-py3.skip-ext
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/data/emitting-unacceptable-unicode-character-bug-py3.skip-ext
diff --git a/tests/data/serializer-is-already-opened.dumper-error b/tests/data/serializer-is-already-opened.dumper-error
index 5ac2e4b..9a23525 100644
--- a/tests/data/serializer-is-already-opened.dumper-error
+++ b/tests/data/serializer-is-already-opened.dumper-error
@@ -1,3 +1,3 @@
-dumper = yaml.Dumper(StringIO.StringIO())
+dumper = yaml.Dumper(StringIO())
dumper.open()
dumper.open()
diff --git a/tests/data/serializer-is-closed-1.dumper-error b/tests/data/serializer-is-closed-1.dumper-error
index b6d5c7a..8e7e600 100644
--- a/tests/data/serializer-is-closed-1.dumper-error
+++ b/tests/data/serializer-is-closed-1.dumper-error
@@ -1,4 +1,4 @@
-dumper = yaml.Dumper(StringIO.StringIO())
+dumper = yaml.Dumper(StringIO())
dumper.open()
dumper.close()
dumper.open()
diff --git a/tests/data/serializer-is-closed-2.dumper-error b/tests/data/serializer-is-closed-2.dumper-error
index ff57b4d..89aef7e 100644
--- a/tests/data/serializer-is-closed-2.dumper-error
+++ b/tests/data/serializer-is-closed-2.dumper-error
@@ -1,4 +1,4 @@
-dumper = yaml.Dumper(StringIO.StringIO())
+dumper = yaml.Dumper(StringIO())
dumper.open()
dumper.close()
dumper.serialize(yaml.ScalarNode(tag='!foo', value='bar'))
diff --git a/tests/data/serializer-is-not-opened-1.dumper-error b/tests/data/serializer-is-not-opened-1.dumper-error
index 8dc6c1e..8f22e73 100644
--- a/tests/data/serializer-is-not-opened-1.dumper-error
+++ b/tests/data/serializer-is-not-opened-1.dumper-error
@@ -1,2 +1,2 @@
-dumper = yaml.Dumper(StringIO.StringIO())
+dumper = yaml.Dumper(StringIO())
dumper.close()
diff --git a/tests/data/serializer-is-not-opened-2.dumper-error b/tests/data/serializer-is-not-opened-2.dumper-error
index 6922b22..ebd9df1 100644
--- a/tests/data/serializer-is-not-opened-2.dumper-error
+++ b/tests/data/serializer-is-not-opened-2.dumper-error
@@ -1,2 +1,2 @@
-dumper = yaml.Dumper(StringIO.StringIO())
+dumper = yaml.Dumper(StringIO())
dumper.serialize(yaml.ScalarNode(tag='!foo', value='bar'))
diff --git a/tests/canonical.py b/tests/lib/canonical.py
index 41d111a..41d111a 100644
--- a/tests/canonical.py
+++ b/tests/lib/canonical.py
diff --git a/tests/test_all.py b/tests/lib/test_all.py
index fec4ae4..fec4ae4 100644
--- a/tests/test_all.py
+++ b/tests/lib/test_all.py
diff --git a/tests/test_appliance.py b/tests/lib/test_appliance.py
index 49783ef..d50d5a2 100644
--- a/tests/test_appliance.py
+++ b/tests/lib/test_appliance.py
@@ -23,6 +23,8 @@ def find_test_filenames(directory):
for filename in os.listdir(directory):
if os.path.isfile(os.path.join(directory, filename)):
base, ext = os.path.splitext(filename)
+ if base.endswith('-py3'):
+ continue
filenames.setdefault(base, []).append(ext)
filenames = filenames.items()
filenames.sort()
diff --git a/tests/test_build.py b/tests/lib/test_build.py
index 901e8ed..901e8ed 100644
--- a/tests/test_build.py
+++ b/tests/lib/test_build.py
diff --git a/tests/test_build_ext.py b/tests/lib/test_build_ext.py
index ff195d5..ff195d5 100644
--- a/tests/test_build_ext.py
+++ b/tests/lib/test_build_ext.py
diff --git a/tests/test_canonical.py b/tests/lib/test_canonical.py
index a851ef2..a851ef2 100644
--- a/tests/test_canonical.py
+++ b/tests/lib/test_canonical.py
diff --git a/tests/test_constructor.py b/tests/lib/test_constructor.py
index 0aacf17..0aacf17 100644
--- a/tests/test_constructor.py
+++ b/tests/lib/test_constructor.py
diff --git a/tests/test_emitter.py b/tests/lib/test_emitter.py
index 61fd941..61fd941 100644
--- a/tests/test_emitter.py
+++ b/tests/lib/test_emitter.py
diff --git a/tests/test_errors.py b/tests/lib/test_errors.py
index 5fd7c46..7dc9388 100644
--- a/tests/test_errors.py
+++ b/tests/lib/test_errors.py
@@ -50,7 +50,8 @@ test_emitter_error.unittest = ['.emitter-error']
def test_dumper_error(error_filename, verbose=False):
code = open(error_filename, 'rb').read()
try:
- import yaml, StringIO
+ import yaml
+ from StringIO import StringIO
exec code
except yaml.YAMLError, exc:
if verbose:
diff --git a/tests/test_mark.py b/tests/lib/test_mark.py
index f30a121..f30a121 100644
--- a/tests/test_mark.py
+++ b/tests/lib/test_mark.py
diff --git a/tests/test_reader.py b/tests/lib/test_reader.py
index 3576ae6..3576ae6 100644
--- a/tests/test_reader.py
+++ b/tests/lib/test_reader.py
diff --git a/tests/test_recursive.py b/tests/lib/test_recursive.py
index 6707fd4..6707fd4 100644
--- a/tests/test_recursive.py
+++ b/tests/lib/test_recursive.py
diff --git a/tests/test_representer.py b/tests/lib/test_representer.py
index f814705..f814705 100644
--- a/tests/test_representer.py
+++ b/tests/lib/test_representer.py
diff --git a/tests/test_resolver.py b/tests/lib/test_resolver.py
index 5566750..5566750 100644
--- a/tests/test_resolver.py
+++ b/tests/lib/test_resolver.py
diff --git a/tests/test_structure.py b/tests/lib/test_structure.py
index 61bcb80..61bcb80 100644
--- a/tests/test_structure.py
+++ b/tests/lib/test_structure.py
diff --git a/tests/test_tokens.py b/tests/lib/test_tokens.py
index 9613fa0..9613fa0 100644
--- a/tests/test_tokens.py
+++ b/tests/lib/test_tokens.py
diff --git a/tests/test_yaml.py b/tests/lib/test_yaml.py
index d195e1a..d195e1a 100644
--- a/tests/test_yaml.py
+++ b/tests/lib/test_yaml.py
diff --git a/tests/test_yaml_ext.py b/tests/lib/test_yaml_ext.py
index e18becf..e18becf 100644
--- a/tests/test_yaml_ext.py
+++ b/tests/lib/test_yaml_ext.py
diff --git a/tests/lib3/canonical.py b/tests/lib3/canonical.py
new file mode 100644
index 0000000..e04477b
--- /dev/null
+++ b/tests/lib3/canonical.py
@@ -0,0 +1,358 @@
+
+import yaml, yaml.composer, yaml.constructor, yaml.resolver
+
+class CanonicalError(yaml.YAMLError):
+ pass
+
+class CanonicalScanner:
+
+ def __init__(self, data):
+ if isinstance(data, bytes):
+ try:
+ data = data.decode('utf-8')
+ except UnicodeDecodeError:
+ raise CanonicalError("utf-8 stream is expected")
+ self.data = data+'\0'
+ self.index = 0
+ self.tokens = []
+ self.scanned = False
+
+ def check_token(self, *choices):
+ if not self.scanned:
+ self.scan()
+ if self.tokens:
+ if not choices:
+ return True
+ for choice in choices:
+ if isinstance(self.tokens[0], choice):
+ return True
+ return False
+
+ def peek_token(self):
+ if not self.scanned:
+ self.scan()
+ if self.tokens:
+ return self.tokens[0]
+
+ def get_token(self, choice=None):
+ if not self.scanned:
+ self.scan()
+ token = self.tokens.pop(0)
+ if choice and not isinstance(token, choice):
+ raise CanonicalError("unexpected token "+repr(token))
+ return token
+
+ def get_token_value(self):
+ token = self.get_token()
+ return token.value
+
+ def scan(self):
+ self.tokens.append(yaml.StreamStartToken(None, None))
+ while True:
+ self.find_token()
+ ch = self.data[self.index]
+ if ch == '\0':
+ self.tokens.append(yaml.StreamEndToken(None, None))
+ break
+ elif ch == '%':
+ self.tokens.append(self.scan_directive())
+ elif ch == '-' and self.data[self.index:self.index+3] == '---':
+ self.index += 3
+ self.tokens.append(yaml.DocumentStartToken(None, None))
+ elif ch == '[':
+ self.index += 1
+ self.tokens.append(yaml.FlowSequenceStartToken(None, None))
+ elif ch == '{':
+ self.index += 1
+ self.tokens.append(yaml.FlowMappingStartToken(None, None))
+ elif ch == ']':
+ self.index += 1
+ self.tokens.append(yaml.FlowSequenceEndToken(None, None))
+ elif ch == '}':
+ self.index += 1
+ self.tokens.append(yaml.FlowMappingEndToken(None, None))
+ elif ch == '?':
+ self.index += 1
+ self.tokens.append(yaml.KeyToken(None, None))
+ elif ch == ':':
+ self.index += 1
+ self.tokens.append(yaml.ValueToken(None, None))
+ elif ch == ',':
+ self.index += 1
+ self.tokens.append(yaml.FlowEntryToken(None, None))
+ elif ch == '*' or ch == '&':
+ self.tokens.append(self.scan_alias())
+ elif ch == '!':
+ self.tokens.append(self.scan_tag())
+ elif ch == '"':
+ self.tokens.append(self.scan_scalar())
+ else:
+ raise CanonicalError("invalid token")
+ self.scanned = True
+
+ DIRECTIVE = '%YAML 1.1'
+
+ def scan_directive(self):
+ if self.data[self.index:self.index+len(self.DIRECTIVE)] == self.DIRECTIVE and \
+ self.data[self.index+len(self.DIRECTIVE)] in ' \n\0':
+ self.index += len(self.DIRECTIVE)
+ return yaml.DirectiveToken('YAML', (1, 1), None, None)
+ else:
+ raise CanonicalError("invalid directive")
+
+ def scan_alias(self):
+ if self.data[self.index] == '*':
+ TokenClass = yaml.AliasToken
+ else:
+ TokenClass = yaml.AnchorToken
+ self.index += 1
+ start = self.index
+ while self.data[self.index] not in ', \n\0':
+ self.index += 1
+ value = self.data[start:self.index]
+ return TokenClass(value, None, None)
+
+ def scan_tag(self):
+ self.index += 1
+ start = self.index
+ while self.data[self.index] not in ' \n\0':
+ self.index += 1
+ value = self.data[start:self.index]
+ if not value:
+ value = '!'
+ elif value[0] == '!':
+ value = 'tag:yaml.org,2002:'+value[1:]
+ elif value[0] == '<' and value[-1] == '>':
+ value = value[1:-1]
+ else:
+ value = '!'+value
+ return yaml.TagToken(value, None, None)
+
+ QUOTE_CODES = {
+ 'x': 2,
+ 'u': 4,
+ 'U': 8,
+ }
+
+ QUOTE_REPLACES = {
+ '\\': '\\',
+ '\"': '\"',
+ ' ': ' ',
+ 'a': '\x07',
+ 'b': '\x08',
+ 'e': '\x1B',
+ 'f': '\x0C',
+ 'n': '\x0A',
+ 'r': '\x0D',
+ 't': '\x09',
+ 'v': '\x0B',
+ 'N': '\u0085',
+ 'L': '\u2028',
+ 'P': '\u2029',
+ '_': '_',
+ '0': '\x00',
+ }
+
+ def scan_scalar(self):
+ self.index += 1
+ chunks = []
+ start = self.index
+ ignore_spaces = False
+ while self.data[self.index] != '"':
+ if self.data[self.index] == '\\':
+ ignore_spaces = False
+ chunks.append(self.data[start:self.index])
+ self.index += 1
+ ch = self.data[self.index]
+ self.index += 1
+ if ch == '\n':
+ ignore_spaces = True
+ elif ch in self.QUOTE_CODES:
+ length = self.QUOTE_CODES[ch]
+ code = int(self.data[self.index:self.index+length], 16)
+ chunks.append(chr(code))
+ self.index += length
+ else:
+ if ch not in self.QUOTE_REPLACES:
+ raise CanonicalError("invalid escape code")
+ chunks.append(self.QUOTE_REPLACES[ch])
+ start = self.index
+ elif self.data[self.index] == '\n':
+ chunks.append(self.data[start:self.index])
+ chunks.append(' ')
+ self.index += 1
+ start = self.index
+ ignore_spaces = True
+ elif ignore_spaces and self.data[self.index] == ' ':
+ self.index += 1
+ start = self.index
+ else:
+ ignore_spaces = False
+ self.index += 1
+ chunks.append(self.data[start:self.index])
+ self.index += 1
+ return yaml.ScalarToken(''.join(chunks), False, None, None)
+
+ def find_token(self):
+ found = False
+ while not found:
+ while self.data[self.index] in ' \t':
+ self.index += 1
+ if self.data[self.index] == '#':
+ while self.data[self.index] != '\n':
+ self.index += 1
+ if self.data[self.index] == '\n':
+ self.index += 1
+ else:
+ found = True
+
+class CanonicalParser:
+
+ def __init__(self):
+ self.events = []
+ self.parsed = False
+
+ # stream: STREAM-START document* STREAM-END
+ def parse_stream(self):
+ self.get_token(yaml.StreamStartToken)
+ self.events.append(yaml.StreamStartEvent(None, None))
+ while not self.check_token(yaml.StreamEndToken):
+ if self.check_token(yaml.DirectiveToken, yaml.DocumentStartToken):
+ self.parse_document()
+ else:
+ raise CanonicalError("document is expected, got "+repr(self.tokens[0]))
+ self.get_token(yaml.StreamEndToken)
+ self.events.append(yaml.StreamEndEvent(None, None))
+
+ # document: DIRECTIVE? DOCUMENT-START node
+ def parse_document(self):
+ node = None
+ if self.check_token(yaml.DirectiveToken):
+ self.get_token(yaml.DirectiveToken)
+ self.get_token(yaml.DocumentStartToken)
+ self.events.append(yaml.DocumentStartEvent(None, None))
+ self.parse_node()
+ self.events.append(yaml.DocumentEndEvent(None, None))
+
+ # node: ALIAS | ANCHOR? TAG? (SCALAR|sequence|mapping)
+ def parse_node(self):
+ if self.check_token(yaml.AliasToken):
+ self.events.append(yaml.AliasEvent(self.get_token_value(), None, None))
+ else:
+ anchor = None
+ if self.check_token(yaml.AnchorToken):
+ anchor = self.get_token_value()
+ tag = None
+ if self.check_token(yaml.TagToken):
+ tag = self.get_token_value()
+ if self.check_token(yaml.ScalarToken):
+ self.events.append(yaml.ScalarEvent(anchor, tag, (False, False), self.get_token_value(), None, None))
+ elif self.check_token(yaml.FlowSequenceStartToken):
+ self.events.append(yaml.SequenceStartEvent(anchor, tag, None, None))
+ self.parse_sequence()
+ elif self.check_token(yaml.FlowMappingStartToken):
+ self.events.append(yaml.MappingStartEvent(anchor, tag, None, None))
+ self.parse_mapping()
+ else:
+ raise CanonicalError("SCALAR, '[', or '{' is expected, got "+repr(self.tokens[0]))
+
+ # sequence: SEQUENCE-START (node (ENTRY node)*)? ENTRY? SEQUENCE-END
+ def parse_sequence(self):
+ self.get_token(yaml.FlowSequenceStartToken)
+ if not self.check_token(yaml.FlowSequenceEndToken):
+ self.parse_node()
+ while not self.check_token(yaml.FlowSequenceEndToken):
+ self.get_token(yaml.FlowEntryToken)
+ if not self.check_token(yaml.FlowSequenceEndToken):
+ self.parse_node()
+ self.get_token(yaml.FlowSequenceEndToken)
+ self.events.append(yaml.SequenceEndEvent(None, None))
+
+ # mapping: MAPPING-START (map_entry (ENTRY map_entry)*)? ENTRY? MAPPING-END
+ def parse_mapping(self):
+ self.get_token(yaml.FlowMappingStartToken)
+ if not self.check_token(yaml.FlowMappingEndToken):
+ self.parse_map_entry()
+ while not self.check_token(yaml.FlowMappingEndToken):
+ self.get_token(yaml.FlowEntryToken)
+ if not self.check_token(yaml.FlowMappingEndToken):
+ self.parse_map_entry()
+ self.get_token(yaml.FlowMappingEndToken)
+ self.events.append(yaml.MappingEndEvent(None, None))
+
+ # map_entry: KEY node VALUE node
+ def parse_map_entry(self):
+ self.get_token(yaml.KeyToken)
+ self.parse_node()
+ self.get_token(yaml.ValueToken)
+ self.parse_node()
+
+ def parse(self):
+ self.parse_stream()
+ self.parsed = True
+
+ def get_event(self):
+ if not self.parsed:
+ self.parse()
+ return self.events.pop(0)
+
+ def check_event(self, *choices):
+ if not self.parsed:
+ self.parse()
+ if self.events:
+ if not choices:
+ return True
+ for choice in choices:
+ if isinstance(self.events[0], choice):
+ return True
+ return False
+
+ def peek_event(self):
+ if not self.parsed:
+ self.parse()
+ return self.events[0]
+
+class CanonicalLoader(CanonicalScanner, CanonicalParser,
+ yaml.composer.Composer, yaml.constructor.Constructor, yaml.resolver.Resolver):
+
+ def __init__(self, stream):
+ if hasattr(stream, 'read'):
+ stream = stream.read()
+ CanonicalScanner.__init__(self, stream)
+ CanonicalParser.__init__(self)
+ yaml.composer.Composer.__init__(self)
+ yaml.constructor.Constructor.__init__(self)
+ yaml.resolver.Resolver.__init__(self)
+
+yaml.CanonicalLoader = CanonicalLoader
+
+def canonical_scan(stream):
+ return yaml.scan(stream, Loader=CanonicalLoader)
+
+yaml.canonical_scan = canonical_scan
+
+def canonical_parse(stream):
+ return yaml.parse(stream, Loader=CanonicalLoader)
+
+yaml.canonical_parse = canonical_parse
+
+def canonical_compose(stream):
+ return yaml.compose(stream, Loader=CanonicalLoader)
+
+yaml.canonical_compose = canonical_compose
+
+def canonical_compose_all(stream):
+ return yaml.compose_all(stream, Loader=CanonicalLoader)
+
+yaml.canonical_compose_all = canonical_compose_all
+
+def canonical_load(stream):
+ return yaml.load(stream, Loader=CanonicalLoader)
+
+yaml.canonical_load = canonical_load
+
+def canonical_load_all(stream):
+ return yaml.load_all(stream, Loader=CanonicalLoader)
+
+yaml.canonical_load_all = canonical_load_all
+
diff --git a/tests/lib3/test_all.py b/tests/lib3/test_all.py
new file mode 100644
index 0000000..fec4ae4
--- /dev/null
+++ b/tests/lib3/test_all.py
@@ -0,0 +1,15 @@
+
+import sys, yaml, test_appliance
+
+def main(args=None):
+ collections = []
+ import test_yaml
+ collections.append(test_yaml)
+ if yaml.__with_libyaml__:
+ import test_yaml_ext
+ collections.append(test_yaml_ext)
+ test_appliance.run(collections, args)
+
+if __name__ == '__main__':
+ main()
+
diff --git a/tests/lib3/test_appliance.py b/tests/lib3/test_appliance.py
new file mode 100644
index 0000000..81ff00b
--- /dev/null
+++ b/tests/lib3/test_appliance.py
@@ -0,0 +1,145 @@
+
+import sys, os, os.path, types, traceback, pprint
+
+DATA = 'tests/data'
+
+def find_test_functions(collections):
+ if not isinstance(collections, list):
+ collections = [collections]
+ functions = []
+ for collection in collections:
+ if not isinstance(collection, dict):
+ collection = vars(collection)
+ for key in sorted(collection):
+ value = collection[key]
+ if isinstance(value, types.FunctionType) and hasattr(value, 'unittest'):
+ functions.append(value)
+ return functions
+
+def find_test_filenames(directory):
+ filenames = {}
+ for filename in os.listdir(directory):
+ if os.path.isfile(os.path.join(directory, filename)):
+ base, ext = os.path.splitext(filename)
+ if base.endswith('-py2'):
+ continue
+ filenames.setdefault(base, []).append(ext)
+ filenames = sorted(filenames.items())
+ return filenames
+
+def parse_arguments(args):
+ if args is None:
+ args = sys.argv[1:]
+ verbose = False
+ if '-v' in args:
+ verbose = True
+ args.remove('-v')
+ if '--verbose' in args:
+ verbose = True
+ if 'YAML_TEST_VERBOSE' in os.environ:
+ verbose = True
+ include_functions = []
+ if args:
+ include_functions.append(args.pop(0))
+ if 'YAML_TEST_FUNCTIONS' in os.environ:
+ include_functions.extend(os.environ['YAML_TEST_FUNCTIONS'].split())
+ include_filenames = []
+ include_filenames.extend(args)
+ if 'YAML_TEST_FILENAMES' in os.environ:
+ include_filenames.extend(os.environ['YAML_TEST_FILENAMES'].split())
+ return include_functions, include_filenames, verbose
+
+def execute(function, filenames, verbose):
+ name = function.__name__
+ if verbose:
+ sys.stdout.write('='*75+'\n')
+ sys.stdout.write('%s(%s)...\n' % (name, ', '.join(filenames)))
+ try:
+ function(verbose=verbose, *filenames)
+ except Exception as exc:
+ info = sys.exc_info()
+ if isinstance(exc, AssertionError):
+ kind = 'FAILURE'
+ else:
+ kind = 'ERROR'
+ if verbose:
+ traceback.print_exc(limit=1, file=sys.stdout)
+ else:
+ sys.stdout.write(kind[0])
+ sys.stdout.flush()
+ else:
+ kind = 'SUCCESS'
+ info = None
+ if not verbose:
+ sys.stdout.write('.')
+ sys.stdout.flush()
+ return (name, filenames, kind, info)
+
+def display(results, verbose):
+ if results and not verbose:
+ sys.stdout.write('\n')
+ total = len(results)
+ failures = 0
+ errors = 0
+ for name, filenames, kind, info in results:
+ if kind == 'SUCCESS':
+ continue
+ if kind == 'FAILURE':
+ failures += 1
+ if kind == 'ERROR':
+ errors += 1
+ sys.stdout.write('='*75+'\n')
+ sys.stdout.write('%s(%s): %s\n' % (name, ', '.join(filenames), kind))
+ if kind == 'ERROR':
+ traceback.print_exception(file=sys.stdout, *info)
+ else:
+ sys.stdout.write('Traceback (most recent call last):\n')
+ traceback.print_tb(info[2], file=sys.stdout)
+ sys.stdout.write('%s: see below\n' % info[0].__name__)
+ sys.stdout.write('~'*75+'\n')
+ for arg in info[1].args:
+ pprint.pprint(arg, stream=sys.stdout)
+ for filename in filenames:
+ sys.stdout.write('-'*75+'\n')
+ sys.stdout.write('%s:\n' % filename)
+ data = open(filename, 'r', errors='replace').read()
+ sys.stdout.write(data)
+ if data and data[-1] != '\n':
+ sys.stdout.write('\n')
+ sys.stdout.write('='*75+'\n')
+ sys.stdout.write('TESTS: %s\n' % total)
+ if failures:
+ sys.stdout.write('FAILURES: %s\n' % failures)
+ if errors:
+ sys.stdout.write('ERRORS: %s\n' % errors)
+
+def run(collections, args=None):
+ test_functions = find_test_functions(collections)
+ test_filenames = find_test_filenames(DATA)
+ include_functions, include_filenames, verbose = parse_arguments(args)
+ results = []
+ for function in test_functions:
+ if include_functions and function.__name__ not in include_functions:
+ continue
+ if function.unittest:
+ for base, exts in test_filenames:
+ if include_filenames and base not in include_filenames:
+ continue
+ filenames = []
+ for ext in function.unittest:
+ if ext not in exts:
+ break
+ filenames.append(os.path.join(DATA, base+ext))
+ else:
+ skip_exts = getattr(function, 'skip', [])
+ for skip_ext in skip_exts:
+ if skip_ext in exts:
+ break
+ else:
+ result = execute(function, filenames, verbose)
+ results.append(result)
+ else:
+ result = execute(function, [], verbose)
+ results.append(result)
+ display(results, verbose=verbose)
+
diff --git a/tests/lib3/test_build.py b/tests/lib3/test_build.py
new file mode 100644
index 0000000..901e8ed
--- /dev/null
+++ b/tests/lib3/test_build.py
@@ -0,0 +1,10 @@
+
+if __name__ == '__main__':
+ import sys, os, distutils.util
+ build_lib = 'build/lib'
+ build_lib_ext = os.path.join('build', 'lib.%s-%s' % (distutils.util.get_platform(), sys.version[0:3]))
+ sys.path.insert(0, build_lib)
+ sys.path.insert(0, build_lib_ext)
+ import test_yaml, test_appliance
+ test_appliance.run(test_yaml)
+
diff --git a/tests/lib3/test_build_ext.py b/tests/lib3/test_build_ext.py
new file mode 100644
index 0000000..ff195d5
--- /dev/null
+++ b/tests/lib3/test_build_ext.py
@@ -0,0 +1,11 @@
+
+
+if __name__ == '__main__':
+ import sys, os, distutils.util
+ build_lib = 'build/lib'
+ build_lib_ext = os.path.join('build', 'lib.%s-%s' % (distutils.util.get_platform(), sys.version[0:3]))
+ sys.path.insert(0, build_lib)
+ sys.path.insert(0, build_lib_ext)
+ import test_yaml_ext, test_appliance
+ test_appliance.run(test_yaml_ext)
+
diff --git a/tests/lib3/test_canonical.py b/tests/lib3/test_canonical.py
new file mode 100644
index 0000000..a3b1153
--- /dev/null
+++ b/tests/lib3/test_canonical.py
@@ -0,0 +1,40 @@
+
+import yaml, canonical
+
+def test_canonical_scanner(canonical_filename, verbose=False):
+ data = open(canonical_filename, 'rb').read()
+ tokens = list(yaml.canonical_scan(data))
+ assert tokens, tokens
+ if verbose:
+ for token in tokens:
+ print(token)
+
+test_canonical_scanner.unittest = ['.canonical']
+
+def test_canonical_parser(canonical_filename, verbose=False):
+ data = open(canonical_filename, 'rb').read()
+ events = list(yaml.canonical_parse(data))
+ assert events, events
+ if verbose:
+ for event in events:
+ print(event)
+
+test_canonical_parser.unittest = ['.canonical']
+
+def test_canonical_error(data_filename, canonical_filename, verbose=False):
+ data = open(data_filename, 'rb').read()
+ try:
+ output = list(yaml.canonical_load_all(data))
+ except yaml.YAMLError as exc:
+ if verbose:
+ print(exc)
+ else:
+ raise AssertionError("expected an exception")
+
+test_canonical_error.unittest = ['.data', '.canonical']
+test_canonical_error.skip = ['.empty']
+
+if __name__ == '__main__':
+ import test_appliance
+ test_appliance.run(globals())
+
diff --git a/tests/lib3/test_constructor.py b/tests/lib3/test_constructor.py
new file mode 100644
index 0000000..b4b0884
--- /dev/null
+++ b/tests/lib3/test_constructor.py
@@ -0,0 +1,258 @@
+
+import yaml
+import pprint
+
+import datetime
+import yaml.tokens
+
+def execute(code):
+ global value
+ exec(code)
+ return value
+
+def _make_objects():
+ global MyLoader, MyDumper, MyTestClass1, MyTestClass2, MyTestClass3, YAMLObject1, YAMLObject2, \
+ AnObject, AnInstance, AState, ACustomState, InitArgs, InitArgsWithState, \
+ NewArgs, NewArgsWithState, Reduce, ReduceWithState, MyInt, MyList, MyDict, \
+ FixedOffset, execute
+
+ class MyLoader(yaml.Loader):
+ pass
+ class MyDumper(yaml.Dumper):
+ pass
+
+ class MyTestClass1:
+ def __init__(self, x, y=0, z=0):
+ self.x = x
+ self.y = y
+ self.z = z
+ def __eq__(self, other):
+ if isinstance(other, MyTestClass1):
+ return self.__class__, self.__dict__ == other.__class__, other.__dict__
+ else:
+ return False
+
+ def construct1(constructor, node):
+ mapping = constructor.construct_mapping(node)
+ return MyTestClass1(**mapping)
+ def represent1(representer, native):
+ return representer.represent_mapping("!tag1", native.__dict__)
+
+ yaml.add_constructor("!tag1", construct1, Loader=MyLoader)
+ yaml.add_representer(MyTestClass1, represent1, Dumper=MyDumper)
+
+ class MyTestClass2(MyTestClass1, yaml.YAMLObject):
+ yaml_loader = MyLoader
+ yaml_dumper = MyDumper
+ yaml_tag = "!tag2"
+ def from_yaml(cls, constructor, node):
+ x = constructor.construct_yaml_int(node)
+ return cls(x=x)
+ from_yaml = classmethod(from_yaml)
+ def to_yaml(cls, representer, native):
+ return representer.represent_scalar(cls.yaml_tag, str(native.x))
+ to_yaml = classmethod(to_yaml)
+
+ class MyTestClass3(MyTestClass2):
+ yaml_tag = "!tag3"
+ def from_yaml(cls, constructor, node):
+ mapping = constructor.construct_mapping(node)
+ if '=' in mapping:
+ x = mapping['=']
+ del mapping['=']
+ mapping['x'] = x
+ return cls(**mapping)
+ from_yaml = classmethod(from_yaml)
+ def to_yaml(cls, representer, native):
+ return representer.represent_mapping(cls.yaml_tag, native.__dict__)
+ to_yaml = classmethod(to_yaml)
+
+ class YAMLObject1(yaml.YAMLObject):
+ yaml_loader = MyLoader
+ yaml_dumper = MyDumper
+ yaml_tag = '!foo'
+ def __init__(self, my_parameter=None, my_another_parameter=None):
+ self.my_parameter = my_parameter
+ self.my_another_parameter = my_another_parameter
+ def __eq__(self, other):
+ if isinstance(other, YAMLObject1):
+ return self.__class__, self.__dict__ == other.__class__, other.__dict__
+ else:
+ return False
+
+ class YAMLObject2(yaml.YAMLObject):
+ yaml_loader = MyLoader
+ yaml_dumper = MyDumper
+ yaml_tag = '!bar'
+ def __init__(self, foo=1, bar=2, baz=3):
+ self.foo = foo
+ self.bar = bar
+ self.baz = baz
+ def __getstate__(self):
+ return {1: self.foo, 2: self.bar, 3: self.baz}
+ def __setstate__(self, state):
+ self.foo = state[1]
+ self.bar = state[2]
+ self.baz = state[3]
+ def __eq__(self, other):
+ if isinstance(other, YAMLObject2):
+ return self.__class__, self.__dict__ == other.__class__, other.__dict__
+ else:
+ return False
+
+ class AnObject:
+ def __new__(cls, foo=None, bar=None, baz=None):
+ self = object.__new__(cls)
+ self.foo = foo
+ self.bar = bar
+ self.baz = baz
+ return self
+ def __cmp__(self, other):
+ return cmp((type(self), self.foo, self.bar, self.baz),
+ (type(other), other.foo, other.bar, other.baz))
+ def __eq__(self, other):
+ return type(self) is type(other) and \
+ (self.foo, self.bar, self.baz) == (other.foo, other.bar, other.baz)
+
+ class AnInstance:
+ def __init__(self, foo=None, bar=None, baz=None):
+ self.foo = foo
+ self.bar = bar
+ self.baz = baz
+ def __cmp__(self, other):
+ return cmp((type(self), self.foo, self.bar, self.baz),
+ (type(other), other.foo, other.bar, other.baz))
+ def __eq__(self, other):
+ return type(self) is type(other) and \
+ (self.foo, self.bar, self.baz) == (other.foo, other.bar, other.baz)
+
+ class AState(AnInstance):
+ def __getstate__(self):
+ return {
+ '_foo': self.foo,
+ '_bar': self.bar,
+ '_baz': self.baz,
+ }
+ def __setstate__(self, state):
+ self.foo = state['_foo']
+ self.bar = state['_bar']
+ self.baz = state['_baz']
+
+ class ACustomState(AnInstance):
+ def __getstate__(self):
+ return (self.foo, self.bar, self.baz)
+ def __setstate__(self, state):
+ self.foo, self.bar, self.baz = state
+
+ class NewArgs(AnObject):
+ def __getnewargs__(self):
+ return (self.foo, self.bar, self.baz)
+ def __getstate__(self):
+ return {}
+
+ class NewArgsWithState(AnObject):
+ def __getnewargs__(self):
+ return (self.foo, self.bar)
+ def __getstate__(self):
+ return self.baz
+ def __setstate__(self, state):
+ self.baz = state
+
+ InitArgs = NewArgs
+
+ InitArgsWithState = NewArgsWithState
+
+ class Reduce(AnObject):
+ def __reduce__(self):
+ return self.__class__, (self.foo, self.bar, self.baz)
+
+ class ReduceWithState(AnObject):
+ def __reduce__(self):
+ return self.__class__, (self.foo, self.bar), self.baz
+ def __setstate__(self, state):
+ self.baz = state
+
+ class MyInt(int):
+ def __eq__(self, other):
+ return type(self) is type(other) and int(self) == int(other)
+
+ class MyList(list):
+ def __init__(self, n=1):
+ self.extend([None]*n)
+ def __eq__(self, other):
+ return type(self) is type(other) and list(self) == list(other)
+
+ class MyDict(dict):
+ def __init__(self, n=1):
+ for k in range(n):
+ self[k] = None
+ def __eq__(self, other):
+ return type(self) is type(other) and dict(self) == dict(other)
+
+ class FixedOffset(datetime.tzinfo):
+ def __init__(self, offset, name):
+ self.__offset = datetime.timedelta(minutes=offset)
+ self.__name = name
+ def utcoffset(self, dt):
+ return self.__offset
+ def tzname(self, dt):
+ return self.__name
+ def dst(self, dt):
+ return datetime.timedelta(0)
+
+def _load_code(expression):
+ return eval(expression)
+
+def _serialize_value(data):
+ if isinstance(data, list):
+ return '[%s]' % ', '.join(map(_serialize_value, data))
+ elif isinstance(data, dict):
+ items = []
+ for key, value in data.items():
+ key = _serialize_value(key)
+ value = _serialize_value(value)
+ items.append("%s: %s" % (key, value))
+ items.sort()
+ return '{%s}' % ', '.join(items)
+ elif isinstance(data, datetime.datetime):
+ return repr(data.utctimetuple())
+ elif isinstance(data, float) and data != data:
+ return '?'
+ else:
+ return str(data)
+
+def test_constructor_types(data_filename, code_filename, verbose=False):
+ _make_objects()
+ native1 = None
+ native2 = None
+ try:
+ native1 = list(yaml.load_all(open(data_filename, 'rb'), Loader=MyLoader))
+ if len(native1) == 1:
+ native1 = native1[0]
+ native2 = _load_code(open(code_filename, 'rb').read())
+ try:
+ if native1 == native2:
+ return
+ except TypeError:
+ pass
+ if verbose:
+ print("SERIALIZED NATIVE1:")
+ print(_serialize_value(native1))
+ print("SERIALIZED NATIVE2:")
+ print(_serialize_value(native2))
+ assert _serialize_value(native1) == _serialize_value(native2), (native1, native2)
+ finally:
+ if verbose:
+ print("NATIVE1:")
+ pprint.pprint(native1)
+ print("NATIVE2:")
+ pprint.pprint(native2)
+
+test_constructor_types.unittest = ['.data', '.code']
+
+if __name__ == '__main__':
+ import sys, test_constructor
+ sys.modules['test_constructor'] = sys.modules['__main__']
+ import test_appliance
+ test_appliance.run(globals())
+
diff --git a/tests/lib3/test_emitter.py b/tests/lib3/test_emitter.py
new file mode 100644
index 0000000..90d1652
--- /dev/null
+++ b/tests/lib3/test_emitter.py
@@ -0,0 +1,100 @@
+
+import yaml
+
+def _compare_events(events1, events2):
+ assert len(events1) == len(events2), (events1, events2)
+ for event1, event2 in zip(events1, events2):
+ assert event1.__class__ == event2.__class__, (event1, event2)
+ if isinstance(event1, yaml.NodeEvent):
+ assert event1.anchor == event2.anchor, (event1, event2)
+ if isinstance(event1, yaml.CollectionStartEvent):
+ assert event1.tag == event2.tag, (event1, event2)
+ if isinstance(event1, yaml.ScalarEvent):
+ if True not in event1.implicit+event2.implicit:
+ assert event1.tag == event2.tag, (event1, event2)
+ assert event1.value == event2.value, (event1, event2)
+
+def test_emitter_on_data(data_filename, canonical_filename, verbose=False):
+ events = list(yaml.parse(open(data_filename, 'rb')))
+ output = yaml.emit(events)
+ if verbose:
+ print("OUTPUT:")
+ print(output)
+ new_events = list(yaml.parse(output))
+ _compare_events(events, new_events)
+
+test_emitter_on_data.unittest = ['.data', '.canonical']
+
+def test_emitter_on_canonical(canonical_filename, verbose=False):
+ events = list(yaml.parse(open(canonical_filename, 'rb')))
+ for canonical in [False, True]:
+ output = yaml.emit(events, canonical=canonical)
+ if verbose:
+ print("OUTPUT (canonical=%s):" % canonical)
+ print(output)
+ new_events = list(yaml.parse(output))
+ _compare_events(events, new_events)
+
+test_emitter_on_canonical.unittest = ['.canonical']
+
+def test_emitter_styles(data_filename, canonical_filename, verbose=False):
+ for filename in [data_filename, canonical_filename]:
+ events = list(yaml.parse(open(filename, 'rb')))
+ for flow_style in [False, True]:
+ for style in ['|', '>', '"', '\'', '']:
+ styled_events = []
+ for event in events:
+ if isinstance(event, yaml.ScalarEvent):
+ event = yaml.ScalarEvent(event.anchor, event.tag,
+ event.implicit, event.value, style=style)
+ elif isinstance(event, yaml.SequenceStartEvent):
+ event = yaml.SequenceStartEvent(event.anchor, event.tag,
+ event.implicit, flow_style=flow_style)
+ elif isinstance(event, yaml.MappingStartEvent):
+ event = yaml.MappingStartEvent(event.anchor, event.tag,
+ event.implicit, flow_style=flow_style)
+ styled_events.append(event)
+ output = yaml.emit(styled_events)
+ if verbose:
+ print("OUTPUT (filename=%r, flow_style=%r, style=%r)" % (filename, flow_style, style))
+ print(output)
+ new_events = list(yaml.parse(output))
+ _compare_events(events, new_events)
+
+test_emitter_styles.unittest = ['.data', '.canonical']
+
+class EventsLoader(yaml.Loader):
+
+ def construct_event(self, node):
+ if isinstance(node, yaml.ScalarNode):
+ mapping = {}
+ else:
+ mapping = self.construct_mapping(node)
+ class_name = str(node.tag[1:])+'Event'
+ if class_name in ['AliasEvent', 'ScalarEvent', 'SequenceStartEvent', 'MappingStartEvent']:
+ mapping.setdefault('anchor', None)
+ if class_name in ['ScalarEvent', 'SequenceStartEvent', 'MappingStartEvent']:
+ mapping.setdefault('tag', None)
+ if class_name in ['SequenceStartEvent', 'MappingStartEvent']:
+ mapping.setdefault('implicit', True)
+ if class_name == 'ScalarEvent':
+ mapping.setdefault('implicit', (False, True))
+ mapping.setdefault('value', '')
+ value = getattr(yaml, class_name)(**mapping)
+ return value
+
+EventsLoader.add_constructor(None, EventsLoader.construct_event)
+
+def test_emitter_events(events_filename, verbose=False):
+ events = list(yaml.load(open(events_filename, 'rb'), Loader=EventsLoader))
+ output = yaml.emit(events)
+ if verbose:
+ print("OUTPUT:")
+ print(output)
+ new_events = list(yaml.parse(output))
+ _compare_events(events, new_events)
+
+if __name__ == '__main__':
+ import test_appliance
+ test_appliance.run(globals())
+
diff --git a/tests/lib3/test_errors.py b/tests/lib3/test_errors.py
new file mode 100644
index 0000000..a3f86af
--- /dev/null
+++ b/tests/lib3/test_errors.py
@@ -0,0 +1,67 @@
+
+import yaml, test_emitter
+
+def test_loader_error(error_filename, verbose=False):
+ try:
+ list(yaml.load_all(open(error_filename, 'rb')))
+ except yaml.YAMLError as exc:
+ if verbose:
+ print("%s:" % exc.__class__.__name__, exc)
+ else:
+ raise AssertionError("expected an exception")
+
+test_loader_error.unittest = ['.loader-error']
+
+def test_loader_error_string(error_filename, verbose=False):
+ try:
+ list(yaml.load_all(open(error_filename, 'rb').read()))
+ except yaml.YAMLError as exc:
+ if verbose:
+ print("%s:" % exc.__class__.__name__, exc)
+ else:
+ raise AssertionError("expected an exception")
+
+test_loader_error_string.unittest = ['.loader-error']
+
+def test_loader_error_single(error_filename, verbose=False):
+ try:
+ yaml.load(open(error_filename, 'rb').read())
+ except yaml.YAMLError as exc:
+ if verbose:
+ print("%s:" % exc.__class__.__name__, exc)
+ else:
+ raise AssertionError("expected an exception")
+
+test_loader_error_single.unittest = ['.single-loader-error']
+
+def test_emitter_error(error_filename, verbose=False):
+ events = list(yaml.load(open(error_filename, 'rb'),
+ Loader=test_emitter.EventsLoader))
+ try:
+ yaml.emit(events)
+ except yaml.YAMLError as exc:
+ if verbose:
+ print("%s:" % exc.__class__.__name__, exc)
+ else:
+ raise AssertionError("expected an exception")
+
+test_emitter_error.unittest = ['.emitter-error']
+
+def test_dumper_error(error_filename, verbose=False):
+ code = open(error_filename, 'rb').read()
+ try:
+ import yaml
+ from io import StringIO
+ exec(code)
+ except yaml.YAMLError as exc:
+ if verbose:
+ print("%s:" % exc.__class__.__name__, exc)
+ else:
+ raise AssertionError("expected an exception")
+
+test_dumper_error.unittest = ['.dumper-error']
+
+if __name__ == '__main__':
+ import test_appliance
+ test_appliance.run(globals())
+
diff --git a/tests/lib3/test_mark.py b/tests/lib3/test_mark.py
new file mode 100644
index 0000000..09eea2e
--- /dev/null
+++ b/tests/lib3/test_mark.py
@@ -0,0 +1,32 @@
+
+import yaml
+
+def test_marks(marks_filename, verbose=False):
+ inputs = open(marks_filename, 'r').read().split('---\n')[1:]
+ for input in inputs:
+ index = 0
+ line = 0
+ column = 0
+ while input[index] != '*':
+ if input[index] == '\n':
+ line += 1
+ column = 0
+ else:
+ column += 1
+ index += 1
+ mark = yaml.Mark(marks_filename, index, line, column, input, index)
+ snippet = mark.get_snippet(indent=2, max_length=79)
+ if verbose:
+ print(snippet)
+ assert isinstance(snippet, str), type(snippet)
+ assert snippet.count('\n') == 1, snippet.count('\n')
+ data, pointer = snippet.split('\n')
+ assert len(data) < 82, len(data)
+ assert data[len(pointer)-1] == '*', data[len(pointer)-1]
+
+test_marks.unittest = ['.marks']
+
+if __name__ == '__main__':
+ import test_appliance
+ test_appliance.run(globals())
+
diff --git a/tests/lib3/test_reader.py b/tests/lib3/test_reader.py
new file mode 100644
index 0000000..c07b346
--- /dev/null
+++ b/tests/lib3/test_reader.py
@@ -0,0 +1,34 @@
+
+import yaml.reader
+
+def _run_reader(data, verbose):
+ try:
+ stream = yaml.reader.Reader(data)
+ while stream.peek() != '\0':
+ stream.forward()
+ except yaml.reader.ReaderError as exc:
+ if verbose:
+ print(exc)
+ else:
+ raise AssertionError("expected an exception")
+
+def test_stream_error(error_filename, verbose=False):
+ _run_reader(open(error_filename, 'rb'), verbose)
+ _run_reader(open(error_filename, 'rb').read(), verbose)
+ for encoding in ['utf-8', 'utf-16-le', 'utf-16-be']:
+ try:
+ data = open(error_filename, 'rb').read().decode(encoding)
+ break
+ except UnicodeDecodeError:
+ pass
+ else:
+ return
+ _run_reader(data, verbose)
+ _run_reader(open(error_filename, encoding=encoding), verbose)
+
+test_stream_error.unittest = ['.stream-error']
+
+if __name__ == '__main__':
+ import test_appliance
+ test_appliance.run(globals())
+
diff --git a/tests/lib3/test_recursive.py b/tests/lib3/test_recursive.py
new file mode 100644
index 0000000..321a75f
--- /dev/null
+++ b/tests/lib3/test_recursive.py
@@ -0,0 +1,51 @@
+
+import yaml
+
+class AnInstance:
+
+ def __init__(self, foo, bar):
+ self.foo = foo
+ self.bar = bar
+
+ def __repr__(self):
+ try:
+ return "%s(foo=%r, bar=%r)" % (self.__class__.__name__,
+ self.foo, self.bar)
+ except RuntimeError:
+ return "%s(foo=..., bar=...)" % self.__class__.__name__
+
+class AnInstanceWithState(AnInstance):
+
+ def __getstate__(self):
+ return {'attributes': [self.foo, self.bar]}
+
+ def __setstate__(self, state):
+ self.foo, self.bar = state['attributes']
+
+def test_recursive(recursive_filename, verbose=False):
+ context = globals().copy()
+ exec(open(recursive_filename, 'rb').read(), context)
+ value1 = context['value']
+ output1 = None
+ value2 = None
+ output2 = None
+ try:
+ output1 = yaml.dump(value1)
+ value2 = yaml.load(output1)
+ output2 = yaml.dump(value2)
+ assert output1 == output2, (output1, output2)
+ finally:
+ if verbose:
+ print("VALUE1:", value1)
+ print("VALUE2:", value2)
+ print("OUTPUT1:")
+ print(output1)
+ print("OUTPUT2:")
+ print(output2)
+
+test_recursive.unittest = ['.recursive']
+
+if __name__ == '__main__':
+ import test_appliance
+ test_appliance.run(globals())
+
diff --git a/tests/lib3/test_representer.py b/tests/lib3/test_representer.py
new file mode 100644
index 0000000..c619d13
--- /dev/null
+++ b/tests/lib3/test_representer.py
@@ -0,0 +1,42 @@
+
+import yaml
+import test_constructor
+import pprint
+
+def test_representer_types(code_filename, verbose=False):
+ test_constructor._make_objects()
+ for allow_unicode in [False, True]:
+ native1 = test_constructor._load_code(open(code_filename, 'rb').read())
+ native2 = None
+ try:
+ output = yaml.dump(native1, Dumper=test_constructor.MyDumper,
+ allow_unicode=allow_unicode)
+ native2 = yaml.load(output, Loader=test_constructor.MyLoader)
+ try:
+ if native1 == native2:
+ continue
+ except TypeError:
+ pass
+ value1 = test_constructor._serialize_value(native1)
+ value2 = test_constructor._serialize_value(native2)
+ if verbose:
+ print("SERIALIZED NATIVE1:")
+ print(value1)
+ print("SERIALIZED NATIVE2:")
+ print(value2)
+ assert value1 == value2, (native1, native2)
+ finally:
+ if verbose:
+ print("NATIVE1:")
+ pprint.pprint(native1)
+ print("NATIVE2:")
+ pprint.pprint(native2)
+ print("OUTPUT:")
+ print(output)
+
+test_representer_types.unittest = ['.code']
+
+if __name__ == '__main__':
+ import test_appliance
+ test_appliance.run(globals())
+
diff --git a/tests/lib3/test_resolver.py b/tests/lib3/test_resolver.py
new file mode 100644
index 0000000..f059dab
--- /dev/null
+++ b/tests/lib3/test_resolver.py
@@ -0,0 +1,92 @@
+
+import yaml
+import pprint
+
+def test_implicit_resolver(data_filename, detect_filename, verbose=False):
+ correct_tag = None
+ node = None
+ try:
+ correct_tag = open(detect_filename, 'r').read().strip()
+ node = yaml.compose(open(data_filename, 'rb'))
+ assert isinstance(node, yaml.SequenceNode), node
+ for scalar in node.value:
+ assert isinstance(scalar, yaml.ScalarNode), scalar
+ assert scalar.tag == correct_tag, (scalar.tag, correct_tag)
+ finally:
+ if verbose:
+ print("CORRECT TAG:", correct_tag)
+ if hasattr(node, 'value'):
+ print("CHILDREN:")
+ pprint.pprint(node.value)
+
+test_implicit_resolver.unittest = ['.data', '.detect']
+
+def _make_path_loader_and_dumper():
+ global MyLoader, MyDumper
+
+ class MyLoader(yaml.Loader):
+ pass
+ class MyDumper(yaml.Dumper):
+ pass
+
+ yaml.add_path_resolver('!root', [],
+ Loader=MyLoader, Dumper=MyDumper)
+ yaml.add_path_resolver('!root/scalar', [], str,
+ Loader=MyLoader, Dumper=MyDumper)
+ yaml.add_path_resolver('!root/key11/key12/*', ['key11', 'key12'],
+ Loader=MyLoader, Dumper=MyDumper)
+ yaml.add_path_resolver('!root/key21/1/*', ['key21', 1],
+ Loader=MyLoader, Dumper=MyDumper)
+ yaml.add_path_resolver('!root/key31/*/*/key14/map', ['key31', None, None, 'key14'], dict,
+ Loader=MyLoader, Dumper=MyDumper)
+
+ return MyLoader, MyDumper
+
+def _convert_node(node):
+ if isinstance(node, yaml.ScalarNode):
+ return (node.tag, node.value)
+ elif isinstance(node, yaml.SequenceNode):
+ value = []
+ for item in node.value:
+ value.append(_convert_node(item))
+ return (node.tag, value)
+ elif isinstance(node, yaml.MappingNode):
+ value = []
+ for key, item in node.value:
+ value.append((_convert_node(key), _convert_node(item)))
+ return (node.tag, value)
+
+def test_path_resolver_loader(data_filename, path_filename, verbose=False):
+ _make_path_loader_and_dumper()
+ nodes1 = list(yaml.compose_all(open(data_filename, 'rb').read(), Loader=MyLoader))
+ nodes2 = list(yaml.compose_all(open(path_filename, 'rb').read()))
+ try:
+ for node1, node2 in zip(nodes1, nodes2):
+ data1 = _convert_node(node1)
+ data2 = _convert_node(node2)
+ assert data1 == data2, (data1, data2)
+ finally:
+ if verbose:
+ print(yaml.serialize_all(nodes1))
+
+test_path_resolver_loader.unittest = ['.data', '.path']
+
+def test_path_resolver_dumper(data_filename, path_filename, verbose=False):
+ _make_path_loader_and_dumper()
+ for filename in [data_filename, path_filename]:
+ output = yaml.serialize_all(yaml.compose_all(open(filename, 'rb')), Dumper=MyDumper)
+ if verbose:
+ print(output)
+ nodes1 = yaml.compose_all(output)
+ nodes2 = yaml.compose_all(open(data_filename, 'rb'))
+ for node1, node2 in zip(nodes1, nodes2):
+ data1 = _convert_node(node1)
+ data2 = _convert_node(node2)
+ assert data1 == data2, (data1, data2)
+
+test_path_resolver_dumper.unittest = ['.data', '.path']
+
+if __name__ == '__main__':
+ import test_appliance
+ test_appliance.run(globals())
+
diff --git a/tests/lib3/test_structure.py b/tests/lib3/test_structure.py
new file mode 100644
index 0000000..4d9c4ea
--- /dev/null
+++ b/tests/lib3/test_structure.py
@@ -0,0 +1,187 @@
+
+import yaml, canonical
+import pprint
+
+def _convert_structure(loader):
+ if loader.check_event(yaml.ScalarEvent):
+ event = loader.get_event()
+ if event.tag or event.anchor or event.value:
+ return True
+ else:
+ return None
+ elif loader.check_event(yaml.SequenceStartEvent):
+ loader.get_event()
+ sequence = []
+ while not loader.check_event(yaml.SequenceEndEvent):
+ sequence.append(_convert_structure(loader))
+ loader.get_event()
+ return sequence
+ elif loader.check_event(yaml.MappingStartEvent):
+ loader.get_event()
+ mapping = []
+ while not loader.check_event(yaml.MappingEndEvent):
+ key = _convert_structure(loader)
+ value = _convert_structure(loader)
+ mapping.append((key, value))
+ loader.get_event()
+ return mapping
+ elif loader.check_event(yaml.AliasEvent):
+ loader.get_event()
+ return '*'
+ else:
+ loader.get_event()
+ return '?'
+
+def test_structure(data_filename, structure_filename, verbose=False):
+ nodes1 = []
+ nodes2 = eval(open(structure_filename, 'r').read())
+ try:
+ loader = yaml.Loader(open(data_filename, 'rb'))
+ while loader.check_event():
+ if loader.check_event(yaml.StreamStartEvent, yaml.StreamEndEvent,
+ yaml.DocumentStartEvent, yaml.DocumentEndEvent):
+ loader.get_event()
+ continue
+ nodes1.append(_convert_structure(loader))
+ if len(nodes1) == 1:
+ nodes1 = nodes1[0]
+ assert nodes1 == nodes2, (nodes1, nodes2)
+ finally:
+ if verbose:
+ print("NODES1:")
+ pprint.pprint(nodes1)
+ print("NODES2:")
+ pprint.pprint(nodes2)
+
+test_structure.unittest = ['.data', '.structure']
+
+def _compare_events(events1, events2, full=False):
+ assert len(events1) == len(events2), (len(events1), len(events2))
+ for event1, event2 in zip(events1, events2):
+ assert event1.__class__ == event2.__class__, (event1, event2)
+ if isinstance(event1, yaml.AliasEvent) and full:
+ assert event1.anchor == event2.anchor, (event1, event2)
+ if isinstance(event1, (yaml.ScalarEvent, yaml.CollectionStartEvent)):
+ if (event1.tag not in [None, '!'] and event2.tag not in [None, '!']) or full:
+ assert event1.tag == event2.tag, (event1, event2)
+ if isinstance(event1, yaml.ScalarEvent):
+ assert event1.value == event2.value, (event1, event2)
+
+def test_parser(data_filename, canonical_filename, verbose=False):
+ events1 = None
+ events2 = None
+ try:
+ events1 = list(yaml.parse(open(data_filename, 'rb')))
+ events2 = list(yaml.canonical_parse(open(canonical_filename, 'rb')))
+ _compare_events(events1, events2)
+ finally:
+ if verbose:
+ print("EVENTS1:")
+ pprint.pprint(events1)
+ print("EVENTS2:")
+ pprint.pprint(events2)
+
+test_parser.unittest = ['.data', '.canonical']
+
+def test_parser_on_canonical(canonical_filename, verbose=False):
+ events1 = None
+ events2 = None
+ try:
+ events1 = list(yaml.parse(open(canonical_filename, 'rb')))
+ events2 = list(yaml.canonical_parse(open(canonical_filename, 'rb')))
+ _compare_events(events1, events2, full=True)
+ finally:
+ if verbose:
+ print("EVENTS1:")
+ pprint.pprint(events1)
+ print("EVENTS2:")
+ pprint.pprint(events2)
+
+test_parser_on_canonical.unittest = ['.canonical']
+
+def _compare_nodes(node1, node2):
+ assert node1.__class__ == node2.__class__, (node1, node2)
+ assert node1.tag == node2.tag, (node1, node2)
+ if isinstance(node1, yaml.ScalarNode):
+ assert node1.value == node2.value, (node1, node2)
+ else:
+ assert len(node1.value) == len(node2.value), (node1, node2)
+ for item1, item2 in zip(node1.value, node2.value):
+ if not isinstance(item1, tuple):
+ item1 = (item1,)
+ item2 = (item2,)
+ for subnode1, subnode2 in zip(item1, item2):
+ _compare_nodes(subnode1, subnode2)
+
+def test_composer(data_filename, canonical_filename, verbose=False):
+ nodes1 = None
+ nodes2 = None
+ try:
+ nodes1 = list(yaml.compose_all(open(data_filename, 'rb')))
+ nodes2 = list(yaml.canonical_compose_all(open(canonical_filename, 'rb')))
+ assert len(nodes1) == len(nodes2), (len(nodes1), len(nodes2))
+ for node1, node2 in zip(nodes1, nodes2):
+ _compare_nodes(node1, node2)
+ finally:
+ if verbose:
+ print("NODES1:")
+ pprint.pprint(nodes1)
+ print("NODES2:")
+ pprint.pprint(nodes2)
+
+test_composer.unittest = ['.data', '.canonical']
+
+def _make_loader():
+ global MyLoader
+
+ class MyLoader(yaml.Loader):
+ def construct_sequence(self, node):
+ return tuple(yaml.Loader.construct_sequence(self, node))
+ def construct_mapping(self, node):
+ pairs = self.construct_pairs(node)
+ pairs.sort()
+ return pairs
+ def construct_undefined(self, node):
+ return self.construct_scalar(node)
+
+ MyLoader.add_constructor('tag:yaml.org,2002:map', MyLoader.construct_mapping)
+ MyLoader.add_constructor(None, MyLoader.construct_undefined)
+
+def _make_canonical_loader():
+ global MyCanonicalLoader
+
+ class MyCanonicalLoader(yaml.CanonicalLoader):
+ def construct_sequence(self, node):
+ return tuple(yaml.CanonicalLoader.construct_sequence(self, node))
+ def construct_mapping(self, node):
+ pairs = self.construct_pairs(node)
+ pairs.sort()
+ return pairs
+ def construct_undefined(self, node):
+ return self.construct_scalar(node)
+
+ MyCanonicalLoader.add_constructor('tag:yaml.org,2002:map', MyCanonicalLoader.construct_mapping)
+ MyCanonicalLoader.add_constructor(None, MyCanonicalLoader.construct_undefined)
+
+def test_constructor(data_filename, canonical_filename, verbose=False):
+ _make_loader()
+ _make_canonical_loader()
+ native1 = None
+ native2 = None
+ try:
+ native1 = list(yaml.load_all(open(data_filename, 'rb'), Loader=MyLoader))
+ native2 = list(yaml.load_all(open(canonical_filename, 'rb'), Loader=MyCanonicalLoader))
+ assert native1 == native2, (native1, native2)
+ finally:
+ if verbose:
+ print("NATIVE1:")
+ pprint.pprint(native1)
+ print("NATIVE2:")
+ pprint.pprint(native2)
+
+test_constructor.unittest = ['.data', '.canonical']
+
+if __name__ == '__main__':
+ import test_appliance
+ test_appliance.run(globals())
+
diff --git a/tests/lib3/test_tokens.py b/tests/lib3/test_tokens.py
new file mode 100644
index 0000000..828945a
--- /dev/null
+++ b/tests/lib3/test_tokens.py
@@ -0,0 +1,77 @@
+
+import yaml
+import pprint
+
+# Tokens mnemonic:
+# directive: %
+# document_start: ---
+# document_end: ...
+# alias: *
+# anchor: &
+# tag: !
+# scalar _
+# block_sequence_start: [[
+# block_mapping_start: {{
+# block_end: ]}
+# flow_sequence_start: [
+# flow_sequence_end: ]
+# flow_mapping_start: {
+# flow_mapping_end: }
+# entry: ,
+# key: ?
+# value: :
+
+_replaces = {
+ yaml.DirectiveToken: '%',
+ yaml.DocumentStartToken: '---',
+ yaml.DocumentEndToken: '...',
+ yaml.AliasToken: '*',
+ yaml.AnchorToken: '&',
+ yaml.TagToken: '!',
+ yaml.ScalarToken: '_',
+ yaml.BlockSequenceStartToken: '[[',
+ yaml.BlockMappingStartToken: '{{',
+ yaml.BlockEndToken: ']}',
+ yaml.FlowSequenceStartToken: '[',
+ yaml.FlowSequenceEndToken: ']',
+ yaml.FlowMappingStartToken: '{',
+ yaml.FlowMappingEndToken: '}',
+ yaml.BlockEntryToken: ',',
+ yaml.FlowEntryToken: ',',
+ yaml.KeyToken: '?',
+ yaml.ValueToken: ':',
+}
+
+def test_tokens(data_filename, tokens_filename, verbose=False):
+ tokens1 = []
+ tokens2 = open(tokens_filename, 'r').read().split()
+ try:
+ for token in yaml.scan(open(data_filename, 'rb')):
+ if not isinstance(token, (yaml.StreamStartToken, yaml.StreamEndToken)):
+ tokens1.append(_replaces[token.__class__])
+ finally:
+ if verbose:
+ print("TOKENS1:", ' '.join(tokens1))
+ print("TOKENS2:", ' '.join(tokens2))
+ assert len(tokens1) == len(tokens2), (tokens1, tokens2)
+ for token1, token2 in zip(tokens1, tokens2):
+ assert token1 == token2, (token1, token2)
+
+test_tokens.unittest = ['.data', '.tokens']
+
+def test_scanner(data_filename, canonical_filename, verbose=False):
+ for filename in [data_filename, canonical_filename]:
+ tokens = []
+ try:
+ for token in yaml.scan(open(filename, 'rb')):
+ tokens.append(token.__class__.__name__)
+ finally:
+ if verbose:
+ pprint.pprint(tokens)
+
+test_scanner.unittest = ['.data', '.canonical']
+
+if __name__ == '__main__':
+ import test_appliance
+ test_appliance.run(globals())
+
diff --git a/tests/lib3/test_yaml.py b/tests/lib3/test_yaml.py
new file mode 100644
index 0000000..d195e1a
--- /dev/null
+++ b/tests/lib3/test_yaml.py
@@ -0,0 +1,17 @@
+
+from test_mark import *
+from test_reader import *
+from test_canonical import *
+from test_tokens import *
+from test_structure import *
+from test_errors import *
+from test_resolver import *
+from test_constructor import *
+from test_emitter import *
+from test_representer import *
+from test_recursive import *
+
+if __name__ == '__main__':
+ import test_appliance
+ test_appliance.run(globals())
+
diff --git a/tests/lib3/test_yaml_ext.py b/tests/lib3/test_yaml_ext.py
new file mode 100644
index 0000000..57c02c6
--- /dev/null
+++ b/tests/lib3/test_yaml_ext.py
@@ -0,0 +1,273 @@
+
+import _yaml, yaml
+import types, pprint
+
+yaml.PyBaseLoader = yaml.BaseLoader
+yaml.PySafeLoader = yaml.SafeLoader
+yaml.PyLoader = yaml.Loader
+yaml.PyBaseDumper = yaml.BaseDumper
+yaml.PySafeDumper = yaml.SafeDumper
+yaml.PyDumper = yaml.Dumper
+
+old_scan = yaml.scan
+def new_scan(stream, Loader=yaml.CLoader):
+ return old_scan(stream, Loader)
+
+old_parse = yaml.parse
+def new_parse(stream, Loader=yaml.CLoader):
+ return old_parse(stream, Loader)
+
+old_compose = yaml.compose
+def new_compose(stream, Loader=yaml.CLoader):
+ return old_compose(stream, Loader)
+
+old_compose_all = yaml.compose_all
+def new_compose_all(stream, Loader=yaml.CLoader):
+ return old_compose_all(stream, Loader)
+
+old_load = yaml.load
+def new_load(stream, Loader=yaml.CLoader):
+ return old_load(stream, Loader)
+
+old_load_all = yaml.load_all
+def new_load_all(stream, Loader=yaml.CLoader):
+ return old_load_all(stream, Loader)
+
+old_safe_load = yaml.safe_load
+def new_safe_load(stream):
+ return old_load(stream, yaml.CSafeLoader)
+
+old_safe_load_all = yaml.safe_load_all
+def new_safe_load_all(stream):
+ return old_load_all(stream, yaml.CSafeLoader)
+
+old_emit = yaml.emit
+def new_emit(events, stream=None, Dumper=yaml.CDumper, **kwds):
+ return old_emit(events, stream, Dumper, **kwds)
+
+old_serialize = yaml.serialize
+def new_serialize(node, stream, Dumper=yaml.CDumper, **kwds):
+ return old_serialize(node, stream, Dumper, **kwds)
+
+old_serialize_all = yaml.serialize_all
+def new_serialize_all(nodes, stream=None, Dumper=yaml.CDumper, **kwds):
+ return old_serialize_all(nodes, stream, Dumper, **kwds)
+
+old_dump = yaml.dump
+def new_dump(data, stream=None, Dumper=yaml.CDumper, **kwds):
+ return old_dump(data, stream, Dumper, **kwds)
+
+old_dump_all = yaml.dump_all
+def new_dump_all(documents, stream=None, Dumper=yaml.CDumper, **kwds):
+ return old_dump_all(documents, stream, Dumper, **kwds)
+
+old_safe_dump = yaml.safe_dump
+def new_safe_dump(data, stream=None, **kwds):
+ return old_dump(data, stream, yaml.CSafeDumper, **kwds)
+
+old_safe_dump_all = yaml.safe_dump_all
+def new_safe_dump_all(documents, stream=None, **kwds):
+ return old_dump_all(documents, stream, yaml.CSafeDumper, **kwds)
+
+def _set_up():
+ yaml.BaseLoader = yaml.CBaseLoader
+ yaml.SafeLoader = yaml.CSafeLoader
+ yaml.Loader = yaml.CLoader
+ yaml.BaseDumper = yaml.CBaseDumper
+ yaml.SafeDumper = yaml.CSafeDumper
+ yaml.Dumper = yaml.CDumper
+ yaml.scan = new_scan
+ yaml.parse = new_parse
+ yaml.compose = new_compose
+ yaml.compose_all = new_compose_all
+ yaml.load = new_load
+ yaml.load_all = new_load_all
+ yaml.safe_load = new_safe_load
+ yaml.safe_load_all = new_safe_load_all
+ yaml.emit = new_emit
+ yaml.serialize = new_serialize
+ yaml.serialize_all = new_serialize_all
+ yaml.dump = new_dump
+ yaml.dump_all = new_dump_all
+ yaml.safe_dump = new_safe_dump
+ yaml.safe_dump_all = new_safe_dump_all
+
+def _tear_down():
+ yaml.BaseLoader = yaml.PyBaseLoader
+ yaml.SafeLoader = yaml.PySafeLoader
+ yaml.Loader = yaml.PyLoader
+ yaml.BaseDumper = yaml.PyBaseDumper
+ yaml.SafeDumper = yaml.PySafeDumper
+ yaml.Dumper = yaml.PyDumper
+ yaml.scan = old_scan
+ yaml.parse = old_parse
+ yaml.compose = old_compose
+ yaml.compose_all = old_compose_all
+ yaml.load = old_load
+ yaml.load_all = old_load_all
+ yaml.safe_load = old_safe_load
+ yaml.safe_load_all = old_safe_load_all
+ yaml.emit = old_emit
+ yaml.serialize = old_serialize
+ yaml.serialize_all = old_serialize_all
+ yaml.dump = old_dump
+ yaml.dump_all = old_dump_all
+ yaml.safe_dump = old_safe_dump
+ yaml.safe_dump_all = old_safe_dump_all
+
+def test_c_version(verbose=False):
+ if verbose:
+ print(_yaml.get_version())
+ print(_yaml.get_version_string())
+ assert ("%s.%s.%s" % _yaml.get_version()) == _yaml.get_version_string(), \
+ (_yaml.get_version(), _yaml.get_version_string())
+
+def _compare_scanners(py_data, c_data, verbose):
+ py_tokens = list(yaml.scan(py_data, Loader=yaml.PyLoader))
+ c_tokens = []
+ try:
+ for token in yaml.scan(c_data, Loader=yaml.CLoader):
+ c_tokens.append(token)
+ assert len(py_tokens) == len(c_tokens), (len(py_tokens), len(c_tokens))
+ for py_token, c_token in zip(py_tokens, c_tokens):
+ assert py_token.__class__ == c_token.__class__, (py_token, c_token)
+ if hasattr(py_token, 'value'):
+ assert py_token.value == c_token.value, (py_token, c_token)
+ if isinstance(py_token, yaml.StreamEndToken):
+ continue
+ py_start = (py_token.start_mark.index, py_token.start_mark.line, py_token.start_mark.column)
+ py_end = (py_token.end_mark.index, py_token.end_mark.line, py_token.end_mark.column)
+ c_start = (c_token.start_mark.index, c_token.start_mark.line, c_token.start_mark.column)
+ c_end = (c_token.end_mark.index, c_token.end_mark.line, c_token.end_mark.column)
+ assert py_start == c_start, (py_start, c_start)
+ assert py_end == c_end, (py_end, c_end)
+ finally:
+ if verbose:
+ print("PY_TOKENS:")
+ pprint.pprint(py_tokens)
+ print("C_TOKENS:")
+ pprint.pprint(c_tokens)
+
+def test_c_scanner(data_filename, canonical_filename, verbose=False):
+ _compare_scanners(open(data_filename, 'rb'),
+ open(data_filename, 'rb'), verbose)
+ _compare_scanners(open(data_filename, 'rb').read(),
+ open(data_filename, 'rb').read(), verbose)
+ _compare_scanners(open(canonical_filename, 'rb'),
+ open(canonical_filename, 'rb'), verbose)
+ _compare_scanners(open(canonical_filename, 'rb').read(),
+ open(canonical_filename, 'rb').read(), verbose)
+
+test_c_scanner.unittest = ['.data', '.canonical']
+test_c_scanner.skip = ['.skip-ext']
+
+def _compare_parsers(py_data, c_data, verbose):
+ py_events = list(yaml.parse(py_data, Loader=yaml.PyLoader))
+ c_events = []
+ try:
+ for event in yaml.parse(c_data, Loader=yaml.CLoader):
+ c_events.append(event)
+ assert len(py_events) == len(c_events), (len(py_events), len(c_events))
+ for py_event, c_event in zip(py_events, c_events):
+ for attribute in ['__class__', 'anchor', 'tag', 'implicit',
+ 'value', 'explicit', 'version', 'tags']:
+ py_value = getattr(py_event, attribute, None)
+ c_value = getattr(c_event, attribute, None)
+ assert py_value == c_value, (py_event, c_event, attribute)
+ finally:
+ if verbose:
+ print("PY_EVENTS:")
+ pprint.pprint(py_events)
+ print("C_EVENTS:")
+ pprint.pprint(c_events)
+
+def test_c_parser(data_filename, canonical_filename, verbose=False):
+ _compare_parsers(open(data_filename, 'rb'),
+ open(data_filename, 'rb'), verbose)
+ _compare_parsers(open(data_filename, 'rb').read(),
+ open(data_filename, 'rb').read(), verbose)
+ _compare_parsers(open(canonical_filename, 'rb'),
+ open(canonical_filename, 'rb'), verbose)
+ _compare_parsers(open(canonical_filename, 'rb').read(),
+ open(canonical_filename, 'rb').read(), verbose)
+
+test_c_parser.unittest = ['.data', '.canonical']
+test_c_parser.skip = ['.skip-ext']
+
+def _compare_emitters(data, verbose):
+ events = list(yaml.parse(data, Loader=yaml.PyLoader))
+ c_data = yaml.emit(events, Dumper=yaml.CDumper)
+ if verbose:
+ print(c_data)
+ py_events = list(yaml.parse(c_data, Loader=yaml.PyLoader))
+ c_events = list(yaml.parse(c_data, Loader=yaml.CLoader))
+ try:
+ assert len(events) == len(py_events), (len(events), len(py_events))
+ assert len(events) == len(c_events), (len(events), len(c_events))
+ for event, py_event, c_event in zip(events, py_events, c_events):
+ for attribute in ['__class__', 'anchor', 'tag', 'implicit',
+ 'value', 'explicit', 'version', 'tags']:
+ value = getattr(event, attribute, None)
+ py_value = getattr(py_event, attribute, None)
+ c_value = getattr(c_event, attribute, None)
+ if attribute == 'tag' and value in [None, '!'] \
+ and py_value in [None, '!'] and c_value in [None, '!']:
+ continue
+ if attribute == 'explicit' and (py_value or c_value):
+ continue
+ assert value == py_value, (event, py_event, attribute)
+ assert value == c_value, (event, c_event, attribute)
+ finally:
+ if verbose:
+ print("EVENTS:")
+ pprint.pprint(events)
+ print("PY_EVENTS:")
+ pprint.pprint(py_events)
+ print("C_EVENTS:")
+ pprint.pprint(c_events)
+
+def test_c_emitter(data_filename, canonical_filename, verbose=False):
+ _compare_emitters(open(data_filename, 'rb').read(), verbose)
+ _compare_emitters(open(canonical_filename, 'rb').read(), verbose)
+
+test_c_emitter.unittest = ['.data', '.canonical']
+test_c_emitter.skip = ['.skip-ext']
+
+def wrap_ext_function(function):
+ def wrapper(*args, **kwds):
+ _set_up()
+ try:
+ function(*args, **kwds)
+ finally:
+ _tear_down()
+ wrapper.__name__ = '%s_ext' % function.__name__
+ wrapper.unittest = function.unittest
+ wrapper.skip = getattr(function, 'skip', [])+['.skip-ext']
+ return wrapper
+
+def wrap_ext(collections):
+ functions = []
+ if not isinstance(collections, list):
+ collections = [collections]
+ for collection in collections:
+ if not isinstance(collection, dict):
+ collection = vars(collection)
+ keys = collection.keys()
+ keys.sort()
+ for key in keys:
+ value = collection[key]
+ if isinstance(value, types.FunctionType) and hasattr(value, 'unittest'):
+ functions.append(wrap_ext_function(value))
+ for function in functions:
+ assert function.unittest_name not in globals()
+ globals()[function.unittest_name] = function
+
+import test_tokens, test_structure, test_errors, test_resolver, test_constructor, \
+ test_emitter, test_representer, test_recursive
+wrap_ext([test_tokens, test_structure, test_errors, test_resolver, test_constructor,
+ test_emitter, test_representer, test_recursive])
+
+if __name__ == '__main__':
+ import test_appliance
+ test_appliance.run(globals())
+