summaryrefslogtreecommitdiff
path: root/json/bin/jsonschema_suite
blob: 000103c3e3c08241abe4e1c5863adc704bd86681 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
#! /usr/bin/env python3
from pathlib import Path
import argparse
import errno
import json
import os
import random
import shutil
import sys
import textwrap
import unittest
import warnings

try:
    import jsonschema.validators
except ImportError:
    jsonschema = None
    VALIDATORS = {}
else:
    VALIDATORS = {
        "draft3": jsonschema.validators.Draft3Validator,
        "draft4": jsonschema.validators.Draft4Validator,
        "draft6": jsonschema.validators.Draft6Validator,
        "draft7": jsonschema.validators.Draft7Validator,
        "draft2019-09": jsonschema.validators.Draft201909Validator,
        "draft2020-12": jsonschema.validators.Draft202012Validator,
        "latest": jsonschema.validators.Draft202012Validator,
    }


ROOT_DIR = Path(__file__).parent.parent
SUITE_ROOT_DIR = ROOT_DIR / "tests"
REMOTES_DIR = ROOT_DIR / "remotes"

TESTSUITE_SCHEMA = json.loads((ROOT_DIR / "test-schema.json").read_text())


def files(paths):
    """
    Each test file in the provided paths, as an array of test cases.
    """
    for path in paths:
        yield path, json.loads(path.read_text())


def cases(paths):
    """
    Each test case within each file in the provided paths.
    """
    for _, test_file in files(paths):
        yield from test_file


def tests(paths):
    """
    Each individual test within all cases within the provided paths.
    """
    for case in cases(paths):
        for test in case["tests"]:
            test["schema"] = case["schema"]
            yield test


def collect(root_dir):
    """
    All of the test file paths within the given root directory, recursively.
    """
    return root_dir.glob("**/*.json")


class SanityTests(unittest.TestCase):
    @classmethod
    def setUpClass(cls):
        print(f"Looking for tests in {SUITE_ROOT_DIR}")
        print(f"Looking for remotes in {REMOTES_DIR}")

        cls.test_files = list(collect(SUITE_ROOT_DIR))
        assert cls.test_files, "Didn't find the test files!"
        print(f"Found {len(cls.test_files)} test files")

        cls.remote_files = list(collect(REMOTES_DIR))
        assert cls.remote_files, "Didn't find the remote files!"
        print(f"Found {len(cls.remote_files)} remote files")

    def assertUnique(self, iterable):
        """
        Assert that the elements of an iterable are unique.
        """

        seen, duplicated = set(), set()
        for each in iterable:
            if each in seen:
                duplicated.add(each)
            seen.add(each)
        self.assertFalse(duplicated, "Elements are not unique.")

    def assertFollowsDescriptionStyle(self, description):
        """
        Instead of saying "test that X frobs" or "X should frob" use "X frobs".

        See e.g. https://jml.io/pages/test-docstrings.html

        This test isn't comprehensive (it doesn't catch all the extra
        verbiage there), but it's just to catch whatever it manages to
        cover.
        """

        message = (
            "In descriptions, don't say 'Test that X frobs' or 'X should "
            "frob' or 'X should be valid'. Just say 'X frobs' or 'X is "
            "valid'. It's shorter, and the test suite is entirely about "
            "what *should* be already. "
            "See https://jml.io/pages/test-docstrings.html for help."
        )
        self.assertNotRegex(description, r"\bshould\b", message)
        self.assertNotRegex(description, r"(?i)\btest(s)? that\b", message)

    def test_all_test_files_are_valid_json(self):
        """
        All test files contain valid JSON.
        """
        for path in self.test_files:
            with self.subTest(path=path):
                try:
                    json.loads(path.read_text())
                except ValueError as error:
                    self.fail(f"{path} contains invalid JSON ({error})")

    def test_all_remote_files_are_valid_json(self):
        """
        All remote files contain valid JSON.
        """
        for path in self.remote_files:
            with self.subTest(path=path):
                try:
                    json.loads(path.read_text())
                except ValueError as error:
                    self.fail(f"{path} contains invalid JSON ({error})")

    def test_all_case_descriptions_have_reasonable_length(self):
        """
        All cases have reasonably long descriptions.
        """
        for case in cases(self.test_files):
            with self.subTest(description=case["description"]):
                self.assertLess(
                    len(case["description"]),
                    150,
                    "Description is too long (keep it to less than 150 chars)."
                )

    def test_all_test_descriptions_have_reasonable_length(self):
        """
        All tests have reasonably long descriptions.
        """
        for count, test in enumerate(tests(self.test_files)):
            with self.subTest(description=test["description"]):
                self.assertLess(
                    len(test["description"]),
                    70,
                    "Description is too long (keep it to less than 70 chars)."
                )
        print(f"Found {count} tests.")

    def test_all_case_descriptions_are_unique(self):
        """
        All cases have unique descriptions in their files.
        """
        for path, cases in files(self.test_files):
            with self.subTest(path=path):
                self.assertUnique(case["description"] for case in cases)

    def test_all_test_descriptions_are_unique(self):
        """
        All test cases have unique test descriptions in their tests.
        """
        for count, case in enumerate(cases(self.test_files)):
            with self.subTest(description=case["description"]):
                self.assertUnique(
                    test["description"] for test in case["tests"]
                )
        print(f"Found {count} test cases.")

    def test_case_descriptions_do_not_use_modal_verbs(self):
        for case in cases(self.test_files):
            with self.subTest(description=case["description"]):
                self.assertFollowsDescriptionStyle(case["description"])

    def test_test_descriptions_do_not_use_modal_verbs(self):
        for test in tests(self.test_files):
            with self.subTest(description=test["description"]):
                self.assertFollowsDescriptionStyle(test["description"])

    @unittest.skipIf(jsonschema is None, "Validation library not present!")
    def test_all_schemas_are_valid(self):
        """
        All schemas are valid under their metaschemas.
        """
        for version in SUITE_ROOT_DIR.iterdir():
            if not version.is_dir():
                continue

            Validator = VALIDATORS.get(version.name)
            if Validator is not None:
                test_files = collect(version)
                for case in cases(test_files):
                    with self.subTest(case=case):
                        try:
                            Validator.check_schema(case["schema"])
                        except jsonschema.SchemaError:
                            self.fail(
                                "Found an invalid schema."
                                "See the traceback for details on why."
                            )
            else:
                warnings.warn(f"No schema validator for {version.name}")

    @unittest.skipIf(jsonschema is None, "Validation library not present!")
    def test_suites_are_valid(self):
        """
        All test files are valid under test-schema.json.
        """
        Validator = jsonschema.validators.validator_for(TESTSUITE_SCHEMA)
        validator = Validator(TESTSUITE_SCHEMA)
        for path, cases in files(self.test_files):
            with self.subTest(path=path):
                try:
                    validator.validate(cases)
                except jsonschema.ValidationError as error:
                    self.fail(str(error))


def main(arguments):
    if arguments.command == "check":
        suite = unittest.TestLoader().loadTestsFromTestCase(SanityTests)
        result = unittest.TextTestRunner().run(suite)
        sys.exit(not result.wasSuccessful())
    elif arguments.command == "flatten":
        selected_cases = [case for case in cases(collect(arguments.version))]

        if arguments.randomize:
            random.shuffle(selected_cases)

        json.dump(selected_cases, sys.stdout, indent=4, sort_keys=True)
    elif arguments.command == "remotes":
        remotes = {}
        for path in collect(REMOTES_DIR):
            relative_path = os.path.relpath(path, REMOTES_DIR)
            remotes[relative_path] = json.loads(path.read_text())
        json.dump(remotes, sys.stdout, indent=4, sort_keys=True)
    elif arguments.command == "dump_remotes":
        if arguments.update:
            shutil.rmtree(arguments.out_dir, ignore_errors=True)

        try:
            shutil.copytree(REMOTES_DIR, arguments.out_dir)
        except FileExistsError:
            print(f"{arguments.out_dir} already exists. Aborting.")
            sys.exit(1)
    elif arguments.command == "serve":
        try:
            import flask
        except ImportError:
            print(textwrap.dedent("""
                The Flask library is required to serve the remote schemas.

                You can install it by running `pip install Flask`.

                Alternatively, see the `jsonschema_suite remotes` or
                `jsonschema_suite dump_remotes` commands to create static files
                that can be served with your own web server.
            """.strip("\n")))
            sys.exit(1)

        app = flask.Flask(__name__)

        @app.route("/<path:path>")
        def serve_path(path):
            return flask.send_from_directory(REMOTES_DIR, path)

        app.run(port=1234)


parser = argparse.ArgumentParser(
    description="JSON Schema Test Suite utilities",
)
subparsers = parser.add_subparsers(
    help="utility commands", dest="command", metavar="COMMAND"
)
subparsers.required = True

check = subparsers.add_parser("check", help="Sanity check the test suite.")

flatten = subparsers.add_parser(
    "flatten",
    help="Output a flattened file containing a selected version's test cases."
)
flatten.add_argument(
    "--randomize",
    action="store_true",
    help="Randomize the order of the outputted cases.",
)
flatten.add_argument(
    "version", help="The directory containing the version to output",
)

remotes = subparsers.add_parser(
    "remotes",
    help="Output the expected URLs and their associated schemas for remote "
         "ref tests as a JSON object."
)

dump_remotes = subparsers.add_parser(
    "dump_remotes", help="Dump the remote ref schemas into a file tree",
)
dump_remotes.add_argument(
    "--update",
    action="store_true",
    help="Update the remotes in an existing directory.",
)
dump_remotes.add_argument(
    "--out-dir",
    default=REMOTES_DIR,
    type=os.path.abspath,
    help="The output directory to create as the root of the file tree",
)

serve = subparsers.add_parser(
    "serve",
    help="Start a webserver to serve schemas used by remote ref tests."
)

if __name__ == "__main__":
    main(parser.parse_args())