summaryrefslogtreecommitdiff
path: root/_test/lib/test_resolver.py
blob: 0a04e7a6354ff88f43c31e1871e3398c8b2dab20 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
from __future__ import absolute_import
from __future__ import print_function

import ruamel.yaml as yaml
import pprint
from ruamel.yaml.compat import PY3


def test_implicit_resolver(data_filename, detect_filename, verbose=False):
    correct_tag = None
    node = None
    try:
        with open(detect_filename, 'r' if PY3 else 'rb') as fp0:
            correct_tag = fp0.read().strip()
        with open(data_filename, 'rb') as fp0:
            node = yaml.compose(fp0)
        assert isinstance(node, yaml.SequenceNode), node
        for scalar in node.value:
            assert isinstance(scalar, yaml.ScalarNode), scalar
            assert scalar.tag == correct_tag, (scalar.tag, correct_tag)
    finally:
        if verbose:
            print('CORRECT TAG:', correct_tag)
            if hasattr(node, 'value'):
                print('CHILDREN:')
                pprint.pprint(node.value)


test_implicit_resolver.unittest = ['.data', '.detect']


def _make_path_loader_and_dumper():
    global MyLoader, MyDumper

    class MyLoader(yaml.Loader):
        pass

    class MyDumper(yaml.Dumper):
        pass

    yaml.add_path_resolver(u'!root', [], Loader=MyLoader, Dumper=MyDumper)
    yaml.add_path_resolver(u'!root/scalar', [], str, Loader=MyLoader, Dumper=MyDumper)
    yaml.add_path_resolver(
        u'!root/key11/key12/*', ['key11', 'key12'], Loader=MyLoader, Dumper=MyDumper
    )
    yaml.add_path_resolver(u'!root/key21/1/*', ['key21', 1], Loader=MyLoader, Dumper=MyDumper)
    yaml.add_path_resolver(
        u'!root/key31/*/*/key14/map',
        ['key31', None, None, 'key14'],
        dict,
        Loader=MyLoader,
        Dumper=MyDumper,
    )

    return MyLoader, MyDumper


def _convert_node(node):
    if isinstance(node, yaml.ScalarNode):
        return (node.tag, node.value)
    elif isinstance(node, yaml.SequenceNode):
        value = []
        for item in node.value:
            value.append(_convert_node(item))
        return (node.tag, value)
    elif isinstance(node, yaml.MappingNode):
        value = []
        for key, item in node.value:
            value.append((_convert_node(key), _convert_node(item)))
        return (node.tag, value)


def test_path_resolver_loader(data_filename, path_filename, verbose=False):
    _make_path_loader_and_dumper()
    with open(data_filename, 'rb') as fp0:
        nodes1 = list(yaml.compose_all(fp0.read(), Loader=MyLoader))
    with open(path_filename, 'rb') as fp0:
        nodes2 = list(yaml.compose_all(fp0.read()))
    try:
        for node1, node2 in zip(nodes1, nodes2):
            data1 = _convert_node(node1)
            data2 = _convert_node(node2)
            assert data1 == data2, (data1, data2)
    finally:
        if verbose:
            print(yaml.serialize_all(nodes1))


test_path_resolver_loader.unittest = ['.data', '.path']


def test_path_resolver_dumper(data_filename, path_filename, verbose=False):
    _make_path_loader_and_dumper()
    for filename in [data_filename, path_filename]:
        with open(filename, 'rb') as fp0:
            output = yaml.serialize_all(yaml.compose_all(fp0), Dumper=MyDumper)
        if verbose:
            print(output)
        nodes1 = yaml.compose_all(output)
        with open(data_filename, 'rb') as fp0:
            nodes2 = yaml.compose_all(fp0)
        for node1, node2 in zip(nodes1, nodes2):
            data1 = _convert_node(node1)
            data2 = _convert_node(node2)
            assert data1 == data2, (data1, data2)


test_path_resolver_dumper.unittest = ['.data', '.path']

if __name__ == '__main__':
    import test_appliance

    test_appliance.run(globals())