1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
|
import test_appliance
from yaml.reader import *
from yaml.tokens import *
from yaml.scanner import *
class TestTokens(test_appliance.TestAppliance):
# Tokens mnemonic:
# directive: %
# document_start: ---
# document_end: ...
# alias: *
# anchor: &
# tag: !
# scalar _
# block_sequence_start: [[
# block_mapping_start: {{
# block_end: ]}
# flow_sequence_start: [
# flow_sequence_end: ]
# flow_mapping_start: {
# flow_mapping_end: }
# entry: ,
# key: ?
# value: :
replaces = {
YAMLDirectiveToken: '%',
TagDirectiveToken: '%',
ReservedDirectiveToken: '%',
DocumentStartToken: '---',
DocumentEndToken: '...',
AliasToken: '*',
AnchorToken: '&',
TagToken: '!',
ScalarToken: '_',
BlockSequenceStartToken: '[[',
BlockMappingStartToken: '{{',
BlockEndToken: ']}',
FlowSequenceStartToken: '[',
FlowSequenceEndToken: ']',
FlowMappingStartToken: '{',
FlowMappingEndToken: '}',
EntryToken: ',',
KeyToken: '?',
ValueToken: ':',
}
def _testTokens(self, test_name, data_filename, tokens_filename):
tokens1 = None
tokens2 = file(tokens_filename, 'rb').read().split()
try:
scanner = Scanner(Reader(file(data_filename, 'rb')))
tokens1 = []
while not isinstance(scanner.peek_token(), StreamEndToken):
tokens1.append(scanner.get_token())
tokens1 = [self.replaces[t.__class__] for t in tokens1]
self.failUnlessEqual(tokens1, tokens2)
except:
print
print "DATA:"
print file(data_filename, 'rb').read()
print "TOKENS1:", tokens1
print "TOKENS2:", tokens2
raise
TestTokens.add_tests('testTokens', '.data', '.tokens')
class TestScanner(test_appliance.TestAppliance):
def _testScanner(self, test_name, data_filename, canonical_filename):
for filename in [canonical_filename, data_filename]:
tokens = None
try:
scanner = Scanner(Reader(file(filename, 'rb')))
tokens = []
while not isinstance(scanner.peek_token(), StreamEndToken):
tokens.append(scanner.get_token().__class__.__name__)
except:
print
print "DATA:"
print file(data_filename, 'rb').read()
print "TOKENS:", tokens
raise
TestScanner.add_tests('testScanner', '.data', '.canonical')
|