summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorBen Gamari <ben@smart-cactus.org>2021-03-19 11:00:05 -0400
committerBen Gamari <ben@smart-cactus.org>2021-03-19 19:58:52 -0400
commitd03d876185da2db9b397c58a383c0eac1892cafc (patch)
treef84725dcacb763322556fa6a392d3cb40b054f58
parent0a9866854384875e88406e495aba6594ac47799d (diff)
downloadhaskell-d03d876185da2db9b397c58a383c0eac1892cafc.tar.gz
gitlab-ci: Ignore performance improvements in marge jobs
Currently we have far too many merge failures due to cumulative performance improvements. Avoid this by accepting metric decreases in marge-bot jobs. Fixes #19562.
-rw-r--r--.gitlab-ci.yml4
-rwxr-xr-x.gitlab/ci.sh14
-rw-r--r--testsuite/driver/testlib.py25
3 files changed, 29 insertions, 14 deletions
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 4af4247481..5fa73c896b 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -496,7 +496,7 @@ validate-x86_64-darwin:
TEST_ENV: "x86_64-darwin"
BUILD_FLAVOUR: "validate"
# Due to #19025
- RUNTEST_ARGS: "--ignore-perf-failures=all"
+ IGNORE_PERF_FAILURES: "all"
after_script:
- cp -Rf $HOME/.cabal cabal-cache
- .gitlab/ci.sh clean
@@ -523,7 +523,7 @@ validate-x86_64-darwin:
CONFIGURE_ARGS: --with-intree-gmp
TEST_ENV: "x86_64-darwin-hadrian"
# Due to #19025
- RUNTEST_ARGS: "--ignore-perf-failures=all"
+ IGNORE_PERF_FAILURES: "all"
BIN_DIST_NAME: "ghc-x86_64-apple-darwin"
BUILD_FLAVOUR: "validate"
script:
diff --git a/.gitlab/ci.sh b/.gitlab/ci.sh
index 8cebcd1d71..df43491c5b 100755
--- a/.gitlab/ci.sh
+++ b/.gitlab/ci.sh
@@ -46,6 +46,9 @@ Environment variables affecting both build systems:
VERBOSE Set to non-empty for verbose build output
RUNTEST_ARGS Arguments passed to runtest.py
MSYSTEM (Windows-only) Which platform to build form (MINGW64 or MINGW32).
+ IGNORE_PERF_FAILURES
+ Whether to ignore perf failures (one of "increases",
+ "decreases", or "all")
Environment variables determining build configuration of Make system:
@@ -562,6 +565,17 @@ if [ -n "$CROSS_TARGET" ]; then
target_triple="$CROSS_TARGET"
fi
+# Ignore performance improvements in @marge-bot batches.
+# See #19562.
+if [ "$GITLAB_CI_BRANCH" == "wip/marge_bot_batch_merge_job" ]; then
+ if [ -z "$IGNORE_PERF_FAILURES" ]; then
+ IGNORE_PERF_FAILURES="decreases"
+ fi
+fi
+if [ -n "$IGNORE_PERF_FAILURES" ]; then
+ RUNTEST_ARGS="--ignore-perf-failures=$IGNORE_PERF_FAILURES"
+fi
+
set_toolchain_paths
case $1 in
diff --git a/testsuite/driver/testlib.py b/testsuite/driver/testlib.py
index 4ccc790e37..b5d2744e58 100644
--- a/testsuite/driver/testlib.py
+++ b/testsuite/driver/testlib.py
@@ -1511,7 +1511,7 @@ def check_stats(name: TestName,
field_match = re.search('\\("' + stat_file_metric + '", "([0-9]+)"\\)', stats_file_contents)
if field_match is None:
print('Failed to find metric: ', stat_file_metric)
- metric_result = failBecause('no such stats metric')
+ result = failBecause('no such stats metric')
else:
val = field_match.group(1)
assert val is not None
@@ -1528,24 +1528,25 @@ def check_stats(name: TestName,
perf_change = MetricChange.NewMetric
else:
tolerance_dev = baseline_and_dev.deviation
- (change, metric_result) = Perf.check_stats_change(
+ (perf_change, metric_result) = Perf.check_stats_change(
perf_stat,
baseline,
tolerance_dev,
config.allowed_perf_changes,
config.verbose >= 4)
+
t.metrics.append(PerfMetric(change=perf_change, stat=perf_stat, baseline=baseline))
- # If any metric fails then the test fails.
- # Note, the remaining metrics are still run so that
- # a complete list of changes can be presented to the user.
- if metric_result.passFail == 'fail':
- if config.ignore_perf_increases and change == MetricChange.Increase:
- pass
- elif config.ignore_perf_decreases and change == MetricChange.Decrease:
- pass
- else:
- result = metric_result
+ # If any metric fails then the test fails.
+ # Note, the remaining metrics are still run so that
+ # a complete list of changes can be presented to the user.
+ if metric_result.passFail == 'fail':
+ if config.ignore_perf_increases and perf_change == MetricChange.Increase:
+ metric_result = passed()
+ elif config.ignore_perf_decreases and perf_change == MetricChange.Decrease:
+ metric_result = passed()
+
+ result = metric_result
return result