summaryrefslogtreecommitdiff
path: root/tests/lib/test_tokens.py
diff options
context:
space:
mode:
authorxi <xi@18f92427-320e-0410-9341-c67f048884a3>2008-12-29 19:05:11 +0000
committerxi <xi@18f92427-320e-0410-9341-c67f048884a3>2008-12-29 19:05:11 +0000
commit3b234aa5d5f355d0c94a735ace48c313fb6fa6f8 (patch)
tree49dc07f8c83bbaa783f5b3be9bfa6108fa9e849e /tests/lib/test_tokens.py
parentacc558ffdc125d1f78a81a9e4b4267c4fb4bd9a8 (diff)
downloadpyyaml-3b234aa5d5f355d0c94a735ace48c313fb6fa6f8.tar.gz
Share data files between Py2 and Py3 test suites.
git-svn-id: http://svn.pyyaml.org/pyyaml/trunk@330 18f92427-320e-0410-9341-c67f048884a3
Diffstat (limited to 'tests/lib/test_tokens.py')
-rw-r--r--tests/lib/test_tokens.py77
1 files changed, 77 insertions, 0 deletions
diff --git a/tests/lib/test_tokens.py b/tests/lib/test_tokens.py
new file mode 100644
index 0000000..9613fa0
--- /dev/null
+++ b/tests/lib/test_tokens.py
@@ -0,0 +1,77 @@
+
+import yaml
+import pprint
+
+# Tokens mnemonic:
+# directive: %
+# document_start: ---
+# document_end: ...
+# alias: *
+# anchor: &
+# tag: !
+# scalar _
+# block_sequence_start: [[
+# block_mapping_start: {{
+# block_end: ]}
+# flow_sequence_start: [
+# flow_sequence_end: ]
+# flow_mapping_start: {
+# flow_mapping_end: }
+# entry: ,
+# key: ?
+# value: :
+
+_replaces = {
+ yaml.DirectiveToken: '%',
+ yaml.DocumentStartToken: '---',
+ yaml.DocumentEndToken: '...',
+ yaml.AliasToken: '*',
+ yaml.AnchorToken: '&',
+ yaml.TagToken: '!',
+ yaml.ScalarToken: '_',
+ yaml.BlockSequenceStartToken: '[[',
+ yaml.BlockMappingStartToken: '{{',
+ yaml.BlockEndToken: ']}',
+ yaml.FlowSequenceStartToken: '[',
+ yaml.FlowSequenceEndToken: ']',
+ yaml.FlowMappingStartToken: '{',
+ yaml.FlowMappingEndToken: '}',
+ yaml.BlockEntryToken: ',',
+ yaml.FlowEntryToken: ',',
+ yaml.KeyToken: '?',
+ yaml.ValueToken: ':',
+}
+
+def test_tokens(data_filename, tokens_filename, verbose=False):
+ tokens1 = []
+ tokens2 = open(tokens_filename, 'rb').read().split()
+ try:
+ for token in yaml.scan(open(data_filename, 'rb')):
+ if not isinstance(token, (yaml.StreamStartToken, yaml.StreamEndToken)):
+ tokens1.append(_replaces[token.__class__])
+ finally:
+ if verbose:
+ print "TOKENS1:", ' '.join(tokens1)
+ print "TOKENS2:", ' '.join(tokens2)
+ assert len(tokens1) == len(tokens2), (tokens1, tokens2)
+ for token1, token2 in zip(tokens1, tokens2):
+ assert token1 == token2, (token1, token2)
+
+test_tokens.unittest = ['.data', '.tokens']
+
+def test_scanner(data_filename, canonical_filename, verbose=False):
+ for filename in [data_filename, canonical_filename]:
+ tokens = []
+ try:
+ for token in yaml.scan(open(filename, 'rb')):
+ tokens.append(token.__class__.__name__)
+ finally:
+ if verbose:
+ pprint.pprint(tokens)
+
+test_scanner.unittest = ['.data', '.canonical']
+
+if __name__ == '__main__':
+ import test_appliance
+ test_appliance.run(globals())
+