summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--buildscripts/simple_report.py102
-rw-r--r--buildscripts/tests/test_simple_report.py109
-rwxr-xr-xbuildscripts/yamllinters.sh2
-rw-r--r--etc/evergreen_yml_components/definitions.yml3
-rw-r--r--etc/pip/components/core.req1
-rw-r--r--evergreen/lint_fuzzer_sanity_all.sh9
-rw-r--r--evergreen/lint_fuzzer_sanity_patch.sh7
-rw-r--r--evergreen/lint_shellscripts.sh12
-rwxr-xr-xevergreen/lint_yaml.sh8
-rwxr-xr-xevergreen/run_clang_tidy.sh10
-rwxr-xr-xevergreen/scons_lint.sh8
11 files changed, 260 insertions, 11 deletions
diff --git a/buildscripts/simple_report.py b/buildscripts/simple_report.py
new file mode 100644
index 00000000000..4364530eed8
--- /dev/null
+++ b/buildscripts/simple_report.py
@@ -0,0 +1,102 @@
+"""Given a test name, path to log file and exit code, generate/append an Evergreen report.json."""
+import json
+import pathlib
+import os
+from typing import List, Dict, Optional
+from typing_extensions import TypedDict
+import click
+
+
+class Result(TypedDict, total=False):
+ """Evergreen test result."""
+
+ status: str
+ exit_code: int
+ test_file: str
+ start: int
+ end: int
+ elapsed: int
+ log_raw: str
+
+
+class Report(TypedDict):
+ """Evergreen report."""
+
+ failures: int
+ results: List[Result]
+
+
+def _open_and_truncate_log_lines(log_file: pathlib.Path) -> List[str]:
+ with open(log_file) as fh:
+ lines = fh.read().splitlines()
+ for i, line in enumerate(lines):
+ if line == "scons: done reading SConscript files.":
+ offset = i
+ # if possible, also shave off the current and next line
+ # as they contain:
+ # scons: done reading SConscript files.
+ # scons: Building targets ...
+ # which is superfluous.
+ if len(lines) > i + 2:
+ offset = i + 2
+ return lines[offset:]
+
+ return lines
+
+
+def _clean_log_file(log_file: pathlib.Path, dedup_lines: bool) -> str:
+ lines = _open_and_truncate_log_lines(log_file)
+ if dedup_lines:
+ lines = _dedup_lines(lines)
+ return os.linesep.join(lines)
+
+
+def _make_report(test_name: str, log_file_contents: str, exit_code: int) -> Report:
+ status = "pass" if exit_code == 0 else "fail"
+ return Report({
+ 'failures':
+ 0 if exit_code == 0 else 1, "results": [
+ Result({
+ "status": status, "exit_code": exit_code, "test_file": test_name,
+ "log_raw": log_file_contents
+ })
+ ]
+ })
+
+
+def _try_combine_reports(out: Report):
+ try:
+ with open("report.json") as fh:
+ report = json.load(fh)
+ out["results"] += report["results"]
+ out["failures"] += report["failures"]
+ except NameError:
+ pass
+ except IOError:
+ pass
+
+
+def _dedup_lines(lines: List[str]) -> List[str]:
+ return list(set(lines))
+
+
+def _put_report(out: Report):
+ with open("report.json", "w") as fh:
+ json.dump(out, fh)
+
+
+@click.command()
+@click.option("--test-name", required=True, type=str)
+@click.option("--log-file", required=True, type=pathlib.Path)
+@click.option("--exit-code", required=True, type=int)
+@click.option("--dedup-lines", is_flag=True)
+def main(test_name: str, log_file: pathlib.Path, exit_code: int, dedup_lines: bool):
+ """Given a test name, path to log file and exit code, generate/append an Evergreen report.json."""
+ log_file_contents = _clean_log_file(log_file, dedup_lines)
+ report = _make_report(test_name, log_file_contents, exit_code)
+ _try_combine_reports(report)
+ _put_report(report)
+
+
+if __name__ == "__main__":
+ main() # pylint: disable=no-value-for-parameter
diff --git a/buildscripts/tests/test_simple_report.py b/buildscripts/tests/test_simple_report.py
new file mode 100644
index 00000000000..456a9d9dc47
--- /dev/null
+++ b/buildscripts/tests/test_simple_report.py
@@ -0,0 +1,109 @@
+# pylint: disable=missing-function-docstring,missing-class-docstring
+"""Simple_report test."""
+import unittest
+import random
+import textwrap
+import sys
+import os
+from unittest.mock import patch, mock_open
+
+from click.testing import CliRunner
+
+import buildscripts.simple_report
+
+
+def ns(name): # pylint: disable=invalid-name
+ return f"buildscripts.simple_report.{name}"
+
+
+def fix_newlines(string: str) -> str:
+ # These need to be CRLF newlines on Windows, so we split and rejoin
+ # on os.linesep to fix that
+ return os.linesep.join(string.splitlines())
+
+
+class TestSimpleReport(unittest.TestCase):
+ def __init__(self, *args, **kwargs):
+ super(TestSimpleReport, self).__init__(*args, **kwargs)
+ self.seed = random.randrange(sys.maxsize)
+ self.rng = random.Random(self.seed)
+
+ def test_trivial_report(self):
+ """Run test 100x with randomly generated error codes."""
+ print(f"TestSimpleReport.test_trivial_report seed: {self.seed}")
+ for _ in range(0, 100):
+ self._test_trivial_report() # pylint: disable=no-value-for-parameter
+
+ @patch(ns("_try_combine_reports"))
+ @patch(ns("_clean_log_file"))
+ @patch(ns("_put_report"))
+ def _test_trivial_report(self, mock_put_report, mock_clean_log_file, _mock_try_combine_reports):
+ exit_code = self.rng.randint(0, 254)
+ print(f"Trying exit code: {exit_code}")
+ mock_clean_log_file.return_value = "I'm a little test log, short and stdout."
+ runner = CliRunner()
+ result = runner.invoke(
+ buildscripts.simple_report.main,
+ ["--test-name", "potato", "--log-file", "test.log", "--exit-code",
+ str(exit_code)])
+ report = mock_put_report.call_args[0][0]
+ results = mock_put_report.call_args[0][0]["results"]
+ if exit_code == 0:
+ self.assertEqual(results[0]["status"], "pass")
+ self.assertEqual(report["failures"], 0)
+ else:
+ self.assertEqual(results[0]["status"], "fail")
+ self.assertEqual(report["failures"], 1)
+ self.assertEqual(result.exit_code, 0)
+
+ @patch(ns("_try_combine_reports"))
+ @patch(ns("_put_report"))
+ def test_truncate_scons(self, mock_put_report, _mock_try_combine_reports):
+ exit_code = 0
+ data = fix_newlines(
+ textwrap.dedent("""\
+TO BE TRUNCATED
+TO BE TRUNCATED
+TO BE TRUNCATED
+TO BE TRUNCATED
+scons: done reading SConscript files.
+scons: Building targets ...
+interesting part"""))
+
+ with patch("builtins.open", mock_open(read_data=data)) as _mock_file:
+ runner = CliRunner()
+ result = runner.invoke(
+ buildscripts.simple_report.main,
+ ["--test-name", "potato", "--log-file", "test.log", "--exit-code",
+ str(exit_code)])
+ report = mock_put_report.call_args[0][0]
+ results = mock_put_report.call_args[0][0]["results"]
+ self.assertEqual(results[0]["status"], "pass")
+ self.assertEqual(results[0]["log_raw"], "interesting part")
+ self.assertEqual(report["failures"], 0)
+ self.assertEqual(result.exit_code, 0)
+
+ @patch(ns("_try_combine_reports"))
+ @patch(ns("_put_report"))
+ def test_non_scons_log(self, mock_put_report, _mock_try_combine_reports):
+ exit_code = 0
+ data = fix_newlines(
+ textwrap.dedent("""\
+*NOT* TO BE TRUNCATED
+*NOT* TO BE TRUNCATED
+*NOT* TO BE TRUNCATED
+*NOT* TO BE TRUNCATED
+interesting part"""))
+
+ with patch("builtins.open", mock_open(read_data=data)) as _mock_file:
+ runner = CliRunner()
+ result = runner.invoke(
+ buildscripts.simple_report.main,
+ ["--test-name", "potato", "--log-file", "test.log", "--exit-code",
+ str(exit_code)])
+ report = mock_put_report.call_args[0][0]
+ results = mock_put_report.call_args[0][0]["results"]
+ self.assertEqual(results[0]["status"], "pass")
+ self.assertEqual(results[0]["log_raw"], data)
+ self.assertEqual(report["failures"], 0)
+ self.assertEqual(result.exit_code, 0)
diff --git a/buildscripts/yamllinters.sh b/buildscripts/yamllinters.sh
index 0867b61f299..8358ac2b9c7 100755
--- a/buildscripts/yamllinters.sh
+++ b/buildscripts/yamllinters.sh
@@ -3,7 +3,7 @@ set -o errexit
BASEDIR=$(dirname "$0")
cd "$BASEDIR/../"
-find buildscripts etc jstests -name '*.y*ml' -exec yamllint -c etc/yamllint_config.yml {} +
+yamllint -c etc/yamllint_config.yml buildscripts etc jstests
# TODO: SERVER-64923 re-enable YAML linters.
#evergreen evaluate ${evergreen_config_file_path} > etc/evaluated_evergreen.yml
diff --git a/etc/evergreen_yml_components/definitions.yml b/etc/evergreen_yml_components/definitions.yml
index 4ce66ac060d..1f71a030001 100644
--- a/etc/evergreen_yml_components/definitions.yml
+++ b/etc/evergreen_yml_components/definitions.yml
@@ -3004,6 +3004,8 @@ tasks:
- *f_expansions_write
- *kill_processes
- *cleanup_environment
+ - func: "set up venv"
+ - func: "upload pip requirements"
- command: subprocess.exec
type: test
params:
@@ -7413,6 +7415,7 @@ task_groups:
- func: "generate compile expansions"
teardown_group:
- func: "f_expansions_write"
+ - func: "attach report"
- func: "umount shared scons directory"
- func: "cleanup environment"
setup_task:
diff --git a/etc/pip/components/core.req b/etc/pip/components/core.req
index d73d60dcd9b..0c1692e10fd 100644
--- a/etc/pip/components/core.req
+++ b/etc/pip/components/core.req
@@ -4,3 +4,4 @@ pymongo >= 3.9, < 4.0
PyYAML >= 3.0.0, <= 6.0.0
types-PyYAML ~= 6.0.5
requests >= 2.0.0, <= 2.26.0
+typing-extensions >= 3.7.4
diff --git a/evergreen/lint_fuzzer_sanity_all.sh b/evergreen/lint_fuzzer_sanity_all.sh
index 16ae53b923c..97272296cc3 100644
--- a/evergreen/lint_fuzzer_sanity_all.sh
+++ b/evergreen/lint_fuzzer_sanity_all.sh
@@ -3,10 +3,15 @@ DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" > /dev/null 2>&1 && pwd)"
cd src
-set -eo pipefail
+set -o pipefail
set -o verbose
add_nodejs_to_path
# Run parse-jsfiles on 50 files at a time with 32 processes in parallel.
-find "$PWD/jstests" "$PWD/src/mongo/db/modules/enterprise" -name "*.js" -print | xargs -P 32 -L 50 npm run --prefix jstestfuzz parse-jsfiles --
+find "$PWD/jstests" "$PWD/src/mongo/db/modules/enterprise" -name "*.js" -print | xargs -P 32 -L 50 npm run --prefix jstestfuzz parse-jsfiles -- | tee lint_fuzzer_sanity.log
+exit_code=$?
+
+activate_venv
+$python ./buildscripts/simple_report.py --test-name lint_fuzzer_sanity_all --log-file lint_fuzzer_sanity.log --exit-code $exit_code
+exit $exit_code
diff --git a/evergreen/lint_fuzzer_sanity_patch.sh b/evergreen/lint_fuzzer_sanity_patch.sh
index 11196aff1ea..70d58d9bfe5 100644
--- a/evergreen/lint_fuzzer_sanity_patch.sh
+++ b/evergreen/lint_fuzzer_sanity_patch.sh
@@ -3,9 +3,10 @@ DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" > /dev/null 2>&1 && pwd)"
cd src
-set -eo pipefail
+set -o pipefail
set -o verbose
+activate_venv
add_nodejs_to_path
mkdir -p jstestfuzzinput jstestfuzzoutput
@@ -28,5 +29,7 @@ if [[ "$(ls -A $indir)" ]]; then
npm run --prefix jstestfuzz jstestfuzz -- --jsTestsDir $indir --out $outdir --numSourceFiles $num_files --numGeneratedFiles 50
# Run parse-jsfiles on 50 files at a time with 32 processes in parallel.
- ls -1 -d $outdir/* | xargs -P 32 -L 50 npm run --prefix jstestfuzz parse-jsfiles --
+ ls -1 -d $outdir/* | xargs -P 32 -L 50 npm run --prefix jstestfuzz parse-jsfiles -- | tee lint_fuzzer_sanity.log
+ exit_code=$?
+ $python ./buildscripts/simple_report.py --test-name lint_fuzzer_sanity_patch --log-file lint_fuzzer_sanity.log --exit-code $exit_code
fi
diff --git a/evergreen/lint_shellscripts.sh b/evergreen/lint_shellscripts.sh
index 23372cf8cdc..615f288c308 100644
--- a/evergreen/lint_shellscripts.sh
+++ b/evergreen/lint_shellscripts.sh
@@ -1,4 +1,14 @@
+DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" > /dev/null 2>&1 && pwd)"
+. "$DIR/prelude.sh"
+
+set -o pipefail
+
cd src
PATH="/opt/shfmt/v3.2.4/bin:$PATH"
-./buildscripts/shellscripts-linters.sh
+./buildscripts/shellscripts-linters.sh | tee shellscripts.log
+exit_code=$?
+
+activate_venv
+$python ./buildscripts/simple_report.py --test-name shfmt --log-file shellscripts.log --exit-code $exit_code
+exit $exit_code
diff --git a/evergreen/lint_yaml.sh b/evergreen/lint_yaml.sh
index 73356b0a778..0610ce1db6c 100755
--- a/evergreen/lint_yaml.sh
+++ b/evergreen/lint_yaml.sh
@@ -1,7 +1,13 @@
DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" > /dev/null 2>&1 && pwd)"
. "$DIR/prelude.sh"
+set -o pipefail
+
cd src
activate_venv
-./buildscripts/yamllinters.sh
+./buildscripts/yamllinters.sh | tee yamllinters.log
+exit_code=$?
+
+$python ./buildscripts/simple_report.py --test-name yamllinters --log-file yamllinters.log --exit-code $exit_code
+exit $exit_code
diff --git a/evergreen/run_clang_tidy.sh b/evergreen/run_clang_tidy.sh
index 241385476b5..b1de18e730f 100755
--- a/evergreen/run_clang_tidy.sh
+++ b/evergreen/run_clang_tidy.sh
@@ -1,8 +1,14 @@
DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" > /dev/null 2>&1 && pwd)"
. "$DIR/prelude.sh"
-set -o errexit
+set -o pipefail
set -o verbose
cd src
-source buildscripts/clang_tidy.sh ${clang_tidy_toolchain}
+bash buildscripts/clang_tidy.sh ${clang_tidy_toolchain} | tee clang-tidy.log
+exit_code=$?
+
+activate_venv
+$python ./buildscripts/simple_report.py --test-name clang_tidy --log-file clang-tidy.log --exit-code $exit_code --dedup-lines
+echo $?
+exit $exit_code
diff --git a/evergreen/scons_lint.sh b/evergreen/scons_lint.sh
index 1cdf6ad9bf9..633ea98b11e 100755
--- a/evergreen/scons_lint.sh
+++ b/evergreen/scons_lint.sh
@@ -3,7 +3,7 @@ DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" > /dev/null 2>&1 && pwd)"
cd src
-set -o errexit
+set -o pipefail
set -o verbose
activate_venv
@@ -18,4 +18,8 @@ export MYPY="$(
)"
echo "Found mypy executable at '$MYPY'"
export extra_flags=""
-eval ${compile_env} python3 ./buildscripts/scons.py ${compile_flags} $extra_flags --stack-size=1024 GITDIFFFLAGS="${revision}" REVISION="${revision}" ENTERPRISE_REV="${enterprise_rev}" ${targets}
+eval ${compile_env} python3 ./buildscripts/scons.py ${compile_flags} $extra_flags --stack-size=1024 GITDIFFFLAGS="${revision}" REVISION="${revision}" ENTERPRISE_REV="${enterprise_rev}" ${targets} | tee scons-lint.log
+exit_code=$?
+
+$python ./buildscripts/simple_report.py --test-name "${targets}" --log-file scons-lint.log --exit-code $exit_code
+exit $exit_code