From a9a9d32b210f3fa7dee8d12bbd2748e152207ea6 Mon Sep 17 00:00:00 2001 From: "James E. Blair" Date: Thu, 21 Jul 2022 14:05:57 -0700 Subject: Fix duplicate setResult calls in deduplicated builds We call item.setResult after a build is complete so that the queue item can do any internal processing necessary (for example, prepare data structures for child jobs, or move the build to the retry_builds list). In the case of deduplicated builds, we should do that for every queue item the build participates in since each item may have a different job graph. We were not correctly identifying other builds of deduplicated jobs and so in the case of a dependency cycle we would call setResult on jobs of the same name in that cycle regardless of whether they were deduplicated. This corrects the issue and adds a test to detect that case. Change-Id: I4c47beb2709a77c21c11c97f1d1a8f743d4bf5eb --- tests/base.py | 6 +++ tests/fixtures/layouts/job-dedup-false.yaml | 1 + tests/unit/test_circular_dependencies.py | 67 +++++++++++++++++++++++++++++ 3 files changed, 74 insertions(+) (limited to 'tests') diff --git a/tests/base.py b/tests/base.py index 5a85ea0d7..1ea699a51 100644 --- a/tests/base.py +++ b/tests/base.py @@ -3091,6 +3091,8 @@ class FakeBuild(object): self.paused = False self.aborted = False self.requeue = False + self.should_fail = False + self.should_retry = False self.created = time.time() self.changes = None items = self.parameters['zuul']['items'] @@ -3162,6 +3164,8 @@ class FakeBuild(object): return result def shouldFail(self): + if self.should_fail: + return True changes = self.executor_server.fail_tests.get(self.name, []) for change in changes: if self.hasChanges(change): @@ -3169,6 +3173,8 @@ class FakeBuild(object): return False def shouldRetry(self): + if self.should_retry: + return True entries = self.executor_server.retry_tests.get(self.name, []) for entry in entries: if self.hasChanges(entry['change']): diff --git a/tests/fixtures/layouts/job-dedup-false.yaml b/tests/fixtures/layouts/job-dedup-false.yaml index 2c0e6ee2e..9254f0b41 100644 --- a/tests/fixtures/layouts/job-dedup-false.yaml +++ b/tests/fixtures/layouts/job-dedup-false.yaml @@ -39,6 +39,7 @@ - job: name: common-job deduplicate: false + pre-run: playbooks/pre.yaml required-projects: - org/project1 - org/project2 diff --git a/tests/unit/test_circular_dependencies.py b/tests/unit/test_circular_dependencies.py index d70293f17..202248d09 100644 --- a/tests/unit/test_circular_dependencies.py +++ b/tests/unit/test_circular_dependencies.py @@ -1703,6 +1703,73 @@ class TestGerritCircularDependencies(ZuulTestCase): ], ordered=False) self.assertEqual(len(self.fake_nodepool.history), 3) + @simple_layout('layouts/job-dedup-false.yaml') + def test_job_deduplication_false_failed_job(self): + # Test that if we are *not* deduplicating jobs, we don't + # duplicate the result on two different builds. + # The way we check that is to retry the common-job between two + # items, but only once, and only on one item. The other item + # should be unaffected. + self.executor_server.hold_jobs_in_build = True + A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A') + B = self.fake_gerrit.addFakeChange('org/project2', 'master', 'B') + + # A <-> B + A.data["commitMessage"] = "{}\n\nDepends-On: {}\n".format( + A.subject, B.data["url"] + ) + B.data["commitMessage"] = "{}\n\nDepends-On: {}\n".format( + B.subject, A.data["url"] + ) + + A.addApproval('Code-Review', 2) + B.addApproval('Code-Review', 2) + + self.fake_gerrit.addEvent(A.addApproval('Approved', 1)) + self.fake_gerrit.addEvent(B.addApproval('Approved', 1)) + + # If we don't make sure these jobs finish first, then one of + # the items may complete before the other and cause Zuul to + # abort the project*-job on the other item (with a "bundle + # failed to merge" error). + self.waitUntilSettled() + for build in self.builds: + if build.name == 'common-job' and build.project == 'org/project1': + break + else: + raise Exception("Unable to find build") + build.should_retry = True + + # Store a reference to the queue items so we can inspect their + # internal attributes later to double check the retry build + # count is correct. + tenant = self.scheds.first.sched.abide.tenants.get('tenant-one') + pipeline = tenant.layout.pipelines['gate'] + items = pipeline.getAllItems() + self.assertEqual(len(items), 2) + + self.executor_server.release('project1-job') + self.executor_server.release('project2-job') + self.waitUntilSettled() + self.executor_server.hold_jobs_in_build = False + self.executor_server.release() + self.waitUntilSettled() + + self.assertEqual(A.data['status'], 'MERGED') + self.assertEqual(B.data['status'], 'MERGED') + self.assertHistory([ + dict(name="project2-job", result="SUCCESS", changes="2,1 1,1"), + dict(name="project1-job", result="SUCCESS", changes="2,1 1,1"), + dict(name="common-job", result=None, changes="2,1 1,1"), + dict(name="common-job", result="SUCCESS", changes="2,1 1,1"), + dict(name="common-job", result="SUCCESS", changes="2,1 1,1"), + ], ordered=False) + self.assertEqual(len(self.fake_nodepool.history), 5) + self.assertEqual(items[0].change.project.name, 'org/project2') + self.assertEqual(len(items[0].current_build_set.retry_builds), 0) + self.assertEqual(items[1].change.project.name, 'org/project1') + self.assertEqual(len(items[1].current_build_set.retry_builds), 1) + @simple_layout('layouts/job-dedup-noop.yaml') def test_job_deduplication_noop(self): # Test that we don't deduplicate noop (there's no good reason -- cgit v1.2.1