summaryrefslogtreecommitdiff
path: root/buildscripts/bypass_compile_and_fetch_binaries.py
blob: 8a48af3aafa223a7998cdab2832610e7c60929e3 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
#!/usr/bin/env python3
"""Bypass compile and fetch binaries."""

from collections import namedtuple
import json
import logging
import os
import sys
import tarfile
from tempfile import TemporaryDirectory
import urllib.error
import urllib.parse
import urllib.request
from typing import Any, Dict, List

import click

from evergreen.api import RetryingEvergreenApi, EvergreenApi, Build, Task
from git.repo import Repo
import requests
import structlog
from structlog.stdlib import LoggerFactory
import yaml

# Get relative imports to work when the package is not installed on the PYTHONPATH.
if __name__ == "__main__" and __package__ is None:
    sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))

# pylint: disable=wrong-import-position
from buildscripts.ciconfig.evergreen import parse_evergreen_file
# pylint: enable=wrong-import-position

structlog.configure(logger_factory=LoggerFactory())
LOGGER = structlog.get_logger(__name__)

EVG_CONFIG_FILE = ".evergreen.yml"

_IS_WINDOWS = (sys.platform == "win32" or sys.platform == "cygwin")

# If changes are only from files in the bypass_files list or the bypass_directories list, then
# bypass compile, unless they are also found in the BYPASS_EXTRA_CHECKS_REQUIRED lists. All other
# file changes lead to compile.
BYPASS_WHITELIST = {
    "files": {
        "etc/evergreen.yml",
    },
    "directories": {
        "buildscripts/",
        "jstests/",
        "pytests/",
    },
}  # yapf: disable

# These files are exceptions to any whitelisted directories in bypass_directories. Changes to
# any of these files will disable compile bypass. Add files you know should specifically cause
# compilation.
BYPASS_BLACKLIST = {
    "files": {
        "buildscripts/errorcodes.py",
        "buildscripts/make_archive.py",
        "buildscripts/moduleconfig.py",
        "buildscripts/msitrim.py",
        "buildscripts/packager_enterprise.py",
        "buildscripts/packager.py",
        "buildscripts/scons.py",
        "buildscripts/utils.py",
    },
    "directories": {
        "buildscripts/idl/",
        "src/",
    }
}  # yapf: disable

# Changes to the BYPASS_EXTRA_CHECKS_REQUIRED_LIST may or may not allow bypass compile, depending
# on the change. If a file is added to this list, the _check_file_for_bypass() function should be
# updated to perform any extra checks on that file.
BYPASS_EXTRA_CHECKS_REQUIRED = {
    "etc/evergreen.yml",
}  # yapf: disable

# Expansions in etc/evergreen.yml that must not be changed in order to bypass compile.
EXPANSIONS_TO_CHECK = {
    "compile_flags",
}  # yapf: disable

# SERVER-21492 related issue where without running scons the jstests/libs/key1
# and key2 files are not chmod to 0600. Need to change permissions since we bypass SCons.
ARTIFACTS_NEEDING_PERMISSIONS = {
    os.path.join("jstests", "libs", "key1"): 0o600,
    os.path.join("jstests", "libs", "key2"): 0o600,
    os.path.join("jstests", "libs", "keyForRollover"): 0o600,
}

ARTIFACT_ENTRIES_MAP = {
    "mongo_binaries": "Binaries",
    "mongo_debugsymbols": "mongo-debugsymbols.tgz",
    "mh_archive": "MH Binaries",
    "mh_debugsymbols": "MH Debuginfo",
}

TargetBuild = namedtuple("TargetBuild", [
    "project",
    "revision",
    "build_variant",
])


def executable_name(pathname, destdir=""):
    """Return the executable name."""
    # Ensure that executable files on Windows have a ".exe" extension.
    if _IS_WINDOWS and os.path.splitext(pathname)[1] != ".exe":
        pathname = "{}.exe".format(pathname)

    if destdir:
        return os.path.join(destdir, "bin", pathname)

    return pathname


def archive_name(archive):
    """Return the archive name."""
    # Ensure the right archive extension is used for Windows.
    if _IS_WINDOWS:
        return "{}.zip".format(archive)
    return "{}.tgz".format(archive)


def requests_get_json(url):
    """Return the JSON response."""
    response = requests.get(url)
    response.raise_for_status()

    try:
        return response.json()
    except ValueError:
        LOGGER.warning("Invalid JSON object returned with response", response=response.text)
        raise


def write_out_bypass_compile_expansions(patch_file, **expansions):
    """Write out the macro expansions to given file."""
    with open(patch_file, "w") as out_file:
        LOGGER.info("Saving compile bypass expansions", patch_file=patch_file,
                    expansions=expansions)
        yaml.safe_dump(expansions, out_file, default_flow_style=False)


def write_out_artifacts(json_file, artifacts):
    """Write out the JSON file with URLs of artifacts to given file."""
    with open(json_file, "w") as out_file:
        LOGGER.info("Generating artifacts.json from pre-existing artifacts", json=json.dumps(
            artifacts, indent=4))
        json.dump(artifacts, out_file)


def _create_bypass_path(prefix, build_id, name):
    """
    Create the path for the bypass expansions.

    :param prefix: Prefix of the path.
    :param build_id: Build-Id to use.
    :param name: Name of file.
    :return: Path to use for bypass expansion.
    """
    return archive_name(f"{prefix}/{name}-{build_id}")


def _artifact_to_bypass_path(project: str, artifact_url: str) -> str:
    """
    Get the unique part of the path for the given artifact url.

    :param project: Evergreen project being run in.
    :param artifact_url: Full url or artifact.
    :return: Unique part of URL containing path to artifact.
    """
    start_idx = artifact_url.find(project)
    return artifact_url[start_idx:]


def generate_bypass_expansions(target: TargetBuild, artifacts_list: List) -> Dict[str, Any]:
    """
    Create a dictionary of the generated bypass expansions.

    :param target: Build being targeted.
    :param artifacts_list: List of artifacts being bypassed.
    :returns: Dictionary of expansions to update.
    """
    # Convert the artifacts list to a dictionary for easy lookup.
    artifacts_dict = {artifact["name"].strip(): artifact["link"] for artifact in artifacts_list}

    bypass_expansions = {
        key: _artifact_to_bypass_path(target.project, artifacts_dict[value])
        for key, value in ARTIFACT_ENTRIES_MAP.items()
    }
    bypass_expansions["bypass_compile"] = True
    return bypass_expansions


def _get_original_etc_evergreen(path):
    """
    Get the etc/evergreen configuration before the changes were made.

    :param path: path to etc/evergreen.
    :return: An EvergreenProjectConfig for the previous etc/evergreen file.
    """
    repo = Repo(".")
    previous_contents = repo.git.show([f"HEAD:{path}"])
    with TemporaryDirectory() as tmpdir:
        file_path = os.path.join(tmpdir, "evergreen.yml")
        with open(file_path, "w") as fp:
            fp.write(previous_contents)
        return parse_evergreen_file(file_path)


def _check_etc_evergreen_for_bypass(path, build_variant):
    """
    Check if changes to etc/evergreen can be allowed to bypass compile.

    :param path: Path to etc/evergreen file.
    :param build_variant: Build variant to check.
    :return: True if changes can bypass compile.
    """
    variant_before = _get_original_etc_evergreen(path).get_variant(build_variant)
    variant_after = parse_evergreen_file(path).get_variant(build_variant)

    for expansion in EXPANSIONS_TO_CHECK:
        if variant_before.expansion(expansion) != variant_after.expansion(expansion):
            return False

    return True


def _check_file_for_bypass(file, build_variant):
    """
    Check if changes to the given file can be allowed to bypass compile.

    :param file: File to check.
    :param build_variant: Build Variant to check.
    :return: True if changes can bypass compile.
    """
    if file == "etc/evergreen.yml":
        return _check_etc_evergreen_for_bypass(file, build_variant)

    return True


def _file_in_group(filename, group):
    """
    Determine if changes to the given filename require compile to be run.

    :param filename: Filename to check.
    :param group: Dictionary containing files and filename to check.
    :return: True if compile should be run for filename.
    """
    if "files" not in group:
        raise TypeError("No list of files to check.")
    if filename in group["files"]:
        return True

    if "directories" not in group:
        raise TypeError("No list of directories to check.")
    if any(filename.startswith(directory) for directory in group["directories"]):
        return True

    return False


def should_bypass_compile(patch_file, build_variant):
    """
    Determine whether the compile stage should be bypassed based on the modified patch files.

    We use lists of files and directories to more precisely control which modified patch files will
    lead to compile bypass.
    :param patch_file: A list of all files modified in patch build.
    :param build_variant: Build variant where compile is running.
    :returns: True if compile should be bypassed.
    """
    with open(patch_file, "r") as pch:
        for filename in pch:
            filename = filename.rstrip()
            # Skip directories that show up in 'git diff HEAD --name-only'.
            if os.path.isdir(filename):
                continue

            log = LOGGER.bind(filename=filename)
            if _file_in_group(filename, BYPASS_BLACKLIST):
                log.warning("Compile bypass disabled due to blacklisted file")
                return False

            if not _file_in_group(filename, BYPASS_WHITELIST):
                log.warning("Compile bypass disabled due to non-whitelisted file")
                return False

            if filename in BYPASS_EXTRA_CHECKS_REQUIRED:
                if not _check_file_for_bypass(filename, build_variant):
                    log.warning("Compile bypass disabled due to extra checks for file.")
                    return False

    return True


def find_build_for_previous_compile_task(evg_api: EvergreenApi, target: TargetBuild) -> Build:
    """
    Find build_id of the base revision.

    :param evg_api: Evergreen.py object.
    :param target: Build being targeted.
    :return: build_id of the base revision.
    """
    project_prefix = target.project.replace("-", "_")
    version_of_base_revision = "{}_{}".format(project_prefix, target.revision)
    version = evg_api.version_by_id(version_of_base_revision)
    build = version.build_by_variant(target.build_variant)
    return build


def find_previous_compile_task(build: Build) -> Task:
    """
    Find compile task that should be used for skip compile.

    :param build: Build containing the desired compile task.
    :return: Evergreen.py object containing data about the desired compile task.
    """
    tasks = [task for task in build.get_tasks() if task.display_name == "compile"]
    assert len(tasks) == 1
    return tasks[0]


def fetch_artifacts(build: Build, revision: str):
    """
    Fetch artifacts from a given revision.

    :param build: Build id of the desired artifacts.
    :param revision: The revision being fetched from.
    :return: Artifacts from the revision.
    """
    LOGGER.info("Fetching artifacts", build_id=build.id, revision=revision)
    task = find_previous_compile_task(build)
    if task is None or not task.is_success():
        LOGGER.warning(
            "Could not retrieve artifacts because the compile task for base commit"
            " was not available. Default compile bypass to false.", task_id=task.task_id)
        raise ValueError("No artifacts were found for the current task")
    LOGGER.info("Fetching pre-existing artifacts from compile task", task_id=task.task_id)
    artifacts = []
    for artifact in task.artifacts:
        filename = os.path.basename(artifact.url)
        if filename.startswith(build.id):
            LOGGER.info("Retrieving archive", filename=filename)
            # This is the artifacts.tgz as referenced in evergreen.yml.
            try:
                urllib.request.urlretrieve(artifact.url, filename)
            except urllib.error.ContentTooShortError:
                LOGGER.warning(
                    "The artifact could not be completely downloaded. Default"
                    " compile bypass to false.", filename=filename)
                raise ValueError("No artifacts were found for the current task")
            # Need to extract certain files from the pre-existing artifacts.tgz.
            extract_files = [
                executable_name("mongobridge", destdir=os.getenv("DESTDIR")),
                executable_name("mongotmock", destdir=os.getenv("DESTDIR")),
                executable_name("wt", destdir=os.getenv("DESTDIR")),
            ]
            with tarfile.open(filename, "r:gz") as tar:
                # The repo/ directory contains files needed by the package task. May
                # need to add other files that would otherwise be generated by SCons
                # if we did not bypass compile.
                subdir = [
                    tarinfo for tarinfo in tar.getmembers()
                    if tarinfo.name.startswith("repo/") or tarinfo.name in extract_files
                ]
                LOGGER.info("Extracting the files...", filename=filename,
                            files="\n".join(tarinfo.name for tarinfo in subdir))
                tar.extractall(members=subdir)
        elif filename.startswith("mongo-src"):
            LOGGER.info("Retrieving mongo source", filename=filename)
            # This is the distsrc.[tgz|zip] as referenced in evergreen.yml.
            try:
                urllib.request.urlretrieve(artifact.url, filename)
            except urllib.error.ContentTooShortError:
                LOGGER.warn(
                    "The artifact could not be completely downloaded. Default"
                    " compile bypass to false.", filename=filename)
                raise ValueError("No artifacts were found for the current task")
            extension = os.path.splitext(filename)[1]
            distsrc_filename = "distsrc{}".format(extension)
            LOGGER.info("Renaming", filename=filename, rename=distsrc_filename)
            os.rename(filename, distsrc_filename)
        else:
            LOGGER.info("Linking base artifact to this patch build", filename=filename)
            # For other artifacts we just add their URLs to the JSON file to upload.
            files = {
                "name": artifact.name,
                "link": artifact.url,
                "visibility": "private",
            }
            # Check the link exists, else raise an exception. Compile bypass is disabled.
            requests.head(artifact.url).raise_for_status()
            artifacts.append(files)
    return artifacts


def update_artifact_permissions(permission_dict):
    """
    Update the given files with the specified permissions.

    :param permission_dict: Keys of dict should be files to update, values should be permissions.
    """
    for path, perm in permission_dict.items():
        os.chmod(path, perm)


def gather_artifacts_and_update_expansions(build: Build, target: TargetBuild, json_artifact_file,
                                           expansions_file):
    """
    Fetch the artifacts for this build and save them to be used by other tasks.

    :param build: build containing artifacts.
    :param target: Target build being bypassed.
    :param json_artifact_file: File to write json artifacts to.
    :param expansions_file: Files to write expansions to.
    """
    artifacts = fetch_artifacts(build, target.revision)
    update_artifact_permissions(ARTIFACTS_NEEDING_PERMISSIONS)
    write_out_artifacts(json_artifact_file, artifacts)

    LOGGER.info("Creating expansions files", target=target, build_id=build.id)

    expansions = generate_bypass_expansions(target, artifacts)
    write_out_bypass_compile_expansions(expansions_file, **expansions)


@click.command()
@click.option("--project", required=True, help="The evergreen project.")
@click.option("--build-variant", required=True,
              help="The build variant whose artifacts we want to use.")
@click.option("--revision", required=True, help="Base revision of the build.")
@click.option("--patch-file", required=True, help="A list of all files modified in patch build.")
@click.option("--out-file", required=True, help="File to write expansions to.")
@click.option("--json-artifact", required=True,
              help="The JSON file to write out the metadata of files to attach to task.")
def main(  # pylint: disable=too-many-arguments,too-many-locals,too-many-statements
        project, build_variant, revision, patch_file, out_file, json_artifact):
    """
    Create a file with expansions that can be used to bypass compile.

    If for any reason bypass compile is false, we do not write out the expansion. Only if we
    determine to bypass compile do we write out the expansions.
    \f

    :param project: The evergreen project.
    :param build_variant: The build variant whose artifacts we want to use.
    :param revision: Base revision of the build.
    :param patch_file: A list of all files modified in patch build.
    :param out_file: File to write expansions to.
    :param json_artifact: The JSON file to write out the metadata of files to attach to task.
    """
    logging.basicConfig(
        format="[%(asctime)s - %(name)s - %(levelname)s] %(message)s",
        level=logging.DEBUG,
        stream=sys.stdout,
    )

    target = TargetBuild(project=project, build_variant=build_variant, revision=revision)

    # Determine if we should bypass compile based on modified patch files.
    if should_bypass_compile(patch_file, build_variant):
        evg_api = RetryingEvergreenApi.get_api(config_file=EVG_CONFIG_FILE)
        build = find_build_for_previous_compile_task(evg_api, target)
        if not build:
            LOGGER.warning("Could not find build id. Default compile bypass to false.",
                           revision=revision, project=project)
            return

        gather_artifacts_and_update_expansions(build, target, json_artifact, out_file)


if __name__ == "__main__":
    main()  # pylint: disable=no-value-for-parameter