diff options
-rw-r--r-- | releasenotes/notes/implied-branch-exact-match-74cd3f227a2f6361.yaml | 10 | ||||
-rw-r--r-- | releasenotes/notes/project-regex-update-dcc183d923a6acdd.yaml | 8 | ||||
-rw-r--r-- | tests/fixtures/config/two-tenant/exclude-all.yaml | 20 | ||||
-rw-r--r-- | tests/unit/test_connection.py | 109 | ||||
-rw-r--r-- | tests/unit/test_scheduler.py | 52 | ||||
-rw-r--r-- | tests/unit/test_v3.py | 23 | ||||
-rw-r--r-- | zuul/change_matcher.py | 16 | ||||
-rw-r--r-- | zuul/configloader.py | 24 | ||||
-rw-r--r-- | zuul/driver/sql/sqlreporter.py | 176 | ||||
-rw-r--r-- | zuul/manager/__init__.py | 6 | ||||
-rw-r--r-- | zuul/model.py | 39 | ||||
-rw-r--r-- | zuul/scheduler.py | 6 | ||||
-rw-r--r-- | zuul/zk/event_queues.py | 2 |
13 files changed, 379 insertions, 112 deletions
diff --git a/releasenotes/notes/implied-branch-exact-match-74cd3f227a2f6361.yaml b/releasenotes/notes/implied-branch-exact-match-74cd3f227a2f6361.yaml new file mode 100644 index 000000000..caca449fb --- /dev/null +++ b/releasenotes/notes/implied-branch-exact-match-74cd3f227a2f6361.yaml @@ -0,0 +1,10 @@ +--- +fixes: + - | + Projects and jobs on branches whose names have special characters + in regular expressions could fail to match changes as intended. + Implied branch matchers automatically generated from branch names + are now treated as requiring exact matches. Any user-specified + branch matcher (including in :attr:`job.branches` and + :attr:`pragma.implied-branches`) are still treated as regular + expressions. diff --git a/releasenotes/notes/project-regex-update-dcc183d923a6acdd.yaml b/releasenotes/notes/project-regex-update-dcc183d923a6acdd.yaml new file mode 100644 index 000000000..87c0e3fb1 --- /dev/null +++ b/releasenotes/notes/project-regex-update-dcc183d923a6acdd.yaml @@ -0,0 +1,8 @@ +--- +fixes: + - | + Project name regex handling has been updated to return all possible + matches. Previously if there were collisions with short names it was an + error. The point of the regex system is to simplify configuration and + apply configs to all projects that match. Collisions don't impact this + behavior so we don't need to raise an error in these cases. diff --git a/tests/fixtures/config/two-tenant/exclude-all.yaml b/tests/fixtures/config/two-tenant/exclude-all.yaml new file mode 100644 index 000000000..60938c366 --- /dev/null +++ b/tests/fixtures/config/two-tenant/exclude-all.yaml @@ -0,0 +1,20 @@ +- tenant: + name: tenant-one + source: + gerrit: + config-projects: + - common-config + untrusted-projects: + - org/project1 + - org/project2: + include: [] + +- tenant: + name: tenant-two + source: + gerrit: + config-projects: + - common-config + untrusted-projects: + - org/project1 + - org/project2 diff --git a/tests/unit/test_connection.py b/tests/unit/test_connection.py index d2956fa03..18db91789 100644 --- a/tests/unit/test_connection.py +++ b/tests/unit/test_connection.py @@ -345,6 +345,115 @@ class TestSQLConnectionMysql(ZuulTestCase): check_results() + def test_sql_intermittent_failure(self): + # Test that if we fail to create the buildset at the start of + # a build, we still create it at the end. + self.executor_server.hold_jobs_in_build = True + + A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A') + self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1)) + self.waitUntilSettled() + + # Delete the buildset + with self.scheds.first.connections.getSqlConnection().\ + engine.connect() as conn: + + result = conn.execute(sa.text( + f"delete from {self.expected_table_prefix}zuul_build;")) + result = conn.execute(sa.text( + f"delete from {self.expected_table_prefix}zuul_buildset;")) + result = conn.execute(sa.text("commit;")) + + self.executor_server.hold_jobs_in_build = False + self.executor_server.release() + self.waitUntilSettled() + + # Check the results + tenant = self.scheds.first.sched.abide.tenants.get("tenant-one") + pipeline = tenant.layout.pipelines['check'] + reporter = self.scheds.first.connections.getSqlReporter( + pipeline) + + with self.scheds.first.connections.getSqlConnection().\ + engine.connect() as conn: + + result = conn.execute( + sa.sql.select([reporter.connection.zuul_buildset_table]) + ) + + buildsets = result.fetchall() + self.assertEqual(1, len(buildsets)) + buildset0 = buildsets[0] + + buildset0_builds = conn.execute( + sa.sql.select( + [reporter.connection.zuul_build_table] + ).where( + reporter.connection.zuul_build_table.c.buildset_id == + buildset0['id'] + ) + ).fetchall() + + self.assertEqual(len(buildset0_builds), 5) + + def test_sql_retry(self): + # Exercise the SQL retry code + reporter = self.scheds.first.sched.sql + reporter.test_buildset_retries = 0 + reporter.test_build_retries = 0 + reporter.retry_delay = 0 + + orig_createBuildset = reporter._createBuildset + orig_createBuild = reporter._createBuild + + def _createBuildset(*args, **kw): + ret = orig_createBuildset(*args, **kw) + if reporter.test_buildset_retries == 0: + reporter.test_buildset_retries += 1 + raise sa.exc.DBAPIError(None, None, None) + return ret + + def _createBuild(*args, **kw): + ret = orig_createBuild(*args, **kw) + if reporter.test_build_retries == 0: + reporter.test_build_retries += 1 + raise sa.exc.DBAPIError(None, None, None) + return ret + + reporter._createBuildset = _createBuildset + reporter._createBuild = _createBuild + + A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A') + self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1)) + self.waitUntilSettled() + + # Check the results + + self.assertEqual(reporter.test_buildset_retries, 1) + self.assertEqual(reporter.test_build_retries, 1) + + with self.scheds.first.connections.getSqlConnection().\ + engine.connect() as conn: + + result = conn.execute( + sa.sql.select([reporter.connection.zuul_buildset_table]) + ) + + buildsets = result.fetchall() + self.assertEqual(1, len(buildsets)) + buildset0 = buildsets[0] + + buildset0_builds = conn.execute( + sa.sql.select( + [reporter.connection.zuul_build_table] + ).where( + reporter.connection.zuul_build_table.c.buildset_id == + buildset0['id'] + ) + ).fetchall() + + self.assertEqual(len(buildset0_builds), 5) + class TestSQLConnectionPostgres(TestSQLConnectionMysql): config_file = 'zuul-sql-driver-postgres.conf' diff --git a/tests/unit/test_scheduler.py b/tests/unit/test_scheduler.py index c596028de..3cb2507c4 100644 --- a/tests/unit/test_scheduler.py +++ b/tests/unit/test_scheduler.py @@ -8375,6 +8375,58 @@ class TestPipelineSupersedes(ZuulTestCase): ], ordered=False) +class TestSchedulerExcludeAll(ZuulTestCase): + tenant_config_file = 'config/two-tenant/exclude-all.yaml' + + def test_skip_reconfig_exclude_all(self): + """Test that we don't trigger a reconfiguration for a tenant + when the changed project excludes all config.""" + config = textwrap.dedent( + """ + - job: + name: project2-test + parent: test + + - project: + check: + jobs: + - project2-test + """) + file_dict = {'zuul.yaml': config} + A = self.fake_gerrit.addFakeChange('org/project2', 'master', 'A', + files=file_dict) + self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1)) + self.waitUntilSettled() + self.assertHistory([ + dict(name='project2-test', result='SUCCESS', changes='1,1'), + ]) + + sched = self.scheds.first.sched + tenant_one_layout_state = sched.local_layout_state["tenant-one"] + tenant_two_layout_state = sched.local_layout_state["tenant-two"] + + A.setMerged() + self.fake_gerrit.addEvent(A.getChangeMergedEvent()) + self.waitUntilSettled() + + # We don't expect a reconfiguration for tenant-one as it excludes + # all config of org/project2. + self.assertEqual(sched.local_layout_state["tenant-one"], + tenant_one_layout_state) + # As tenant-two includes the config from org/project2, the merge of + # change A should have triggered a reconfig. + self.assertGreater(sched.local_layout_state["tenant-two"], + tenant_two_layout_state) + + B = self.fake_gerrit.addFakeChange('org/project2', 'master', 'B') + self.fake_gerrit.addEvent(B.getPatchsetCreatedEvent(1)) + self.waitUntilSettled() + self.assertHistory([ + dict(name='project2-test', result='SUCCESS', changes='1,1'), + dict(name='project2-test', result='SUCCESS', changes='2,1'), + ]) + + class TestReportBuildPage(ZuulTestCase): tenant_config_file = 'config/build-page/main.yaml' diff --git a/tests/unit/test_v3.py b/tests/unit/test_v3.py index 279c1d1ca..5e21cd57f 100644 --- a/tests/unit/test_v3.py +++ b/tests/unit/test_v3.py @@ -867,6 +867,29 @@ class TestBranchMismatch(ZuulTestCase): dict(name='project-test2', result='SUCCESS', changes='1,1'), ], ordered=False) + def test_implied_branch_matcher_regex(self): + # Test that branch names that look like regexes aren't treated + # as such for implied branch matchers. + + # Make sure the parent job repo is branched, so it gets + # implied branch matchers. + + # The '+' in the branch name would cause the change not to + # match if it is treated as a regex. + self.create_branch('org/project1', 'feature/foo-0.1.12+bar') + self.fake_gerrit.addEvent( + self.fake_gerrit.getFakeBranchCreatedEvent( + 'org/project1', 'feature/foo-0.1.12+bar')) + + A = self.fake_gerrit.addFakeChange( + 'org/project1', 'feature/foo-0.1.12+bar', 'A') + self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1)) + self.waitUntilSettled() + + self.assertHistory([ + dict(name='project-test1', result='SUCCESS', changes='1,1'), + ], ordered=False) + class TestBranchRef(ZuulTestCase): tenant_config_file = 'config/branch-ref/main.yaml' diff --git a/zuul/change_matcher.py b/zuul/change_matcher.py index 7d045894d..139c7b6a9 100644 --- a/zuul/change_matcher.py +++ b/zuul/change_matcher.py @@ -58,13 +58,13 @@ class ProjectMatcher(AbstractChangeMatcher): class BranchMatcher(AbstractChangeMatcher): - fullmatch = False + exactmatch = False def matches(self, change): if hasattr(change, 'branch'): # an implied branch matcher must do a fullmatch to work correctly - if self.fullmatch: - if self.regex.fullmatch(change.branch): + if self.exactmatch: + if self._regex == change.branch: return True else: if self.regex.match(change.branch): @@ -74,13 +74,17 @@ class BranchMatcher(AbstractChangeMatcher): return True if hasattr(change, 'containing_branches'): for branch in change.containing_branches: - if self.regex.fullmatch(branch): - return True + if self.exactmatch: + if self._regex == branch: + return True + else: + if self.regex.fullmatch(branch): + return True return False class ImpliedBranchMatcher(BranchMatcher): - fullmatch = True + exactmatch = True class FileMatcher(AbstractChangeMatcher): diff --git a/zuul/configloader.py b/zuul/configloader.py index 425e52f36..29e7a2ede 100644 --- a/zuul/configloader.py +++ b/zuul/configloader.py @@ -23,6 +23,7 @@ import subprocess import voluptuous as vs +from zuul import change_matcher from zuul import model from zuul.lib import yamlutil as yaml import zuul.manager.dependent @@ -447,7 +448,14 @@ class PragmaParser(object): branches = conf.get('implied-branches') if branches is not None: - source_context.implied_branches = as_list(branches) + # This is a BranchMatcher (not an ImpliedBranchMatcher) + # because as user input, we allow/expect this to be + # regular expressions. Only truly implicit branch names + # (automatically generated from source file branches) are + # ImpliedBranchMatchers. + source_context.implied_branches = [ + change_matcher.BranchMatcher(x) + for x in as_list(branches)] class NodeSetParser(object): @@ -910,14 +918,13 @@ class JobParser(object): job.allowed_projects = frozenset(allowed) branches = None - implied = False if 'branches' in conf: - branches = as_list(conf['branches']) + branches = [change_matcher.BranchMatcher(x) + for x in as_list(conf['branches'])] elif not project_pipeline: branches = self.pcontext.getImpliedBranches(job.source_context) - implied = True if branches: - job.setBranchMatcher(branches, implied=implied) + job.setBranchMatcher(branches) if 'files' in conf: job.setFileMatcher(as_list(conf['files'])) if 'irrelevant-files' in conf: @@ -1117,7 +1124,8 @@ class ProjectParser(object): project_config.setImpliedBranchMatchers([]) else: project_config.setImpliedBranchMatchers( - [source_context.branch]) + [change_matcher.ImpliedBranchMatcher( + source_context.branch)]) # Add templates for name in conf.get('templates', []): @@ -1464,7 +1472,7 @@ class ParseContext(object): if source_context.implied_branch_matchers is True: if source_context.implied_branches is not None: return source_context.implied_branches - return [source_context.branch] + return [change_matcher.ImpliedBranchMatcher(source_context.branch)] elif source_context.implied_branch_matchers is False: return None @@ -1482,7 +1490,7 @@ class ParseContext(object): if source_context.implied_branches is not None: return source_context.implied_branches - return [source_context.branch] + return [change_matcher.ImpliedBranchMatcher(source_context.branch)] class TenantParser(object): diff --git a/zuul/driver/sql/sqlreporter.py b/zuul/driver/sql/sqlreporter.py index ea0f35945..edce622f8 100644 --- a/zuul/driver/sql/sqlreporter.py +++ b/zuul/driver/sql/sqlreporter.py @@ -18,6 +18,8 @@ import logging import time import voluptuous as v +import sqlalchemy.exc + from zuul.lib.result_data import get_artifacts_from_result_data from zuul.reporter import BaseReporter @@ -27,6 +29,8 @@ class SQLReporter(BaseReporter): name = 'sql' log = logging.getLogger("zuul.SQLReporter") + retry_count = 3 + retry_delay = 5 def _getBuildData(self, item, job, build): (result, _) = item.formatJobResult(job, build) @@ -41,10 +45,7 @@ class SQLReporter(BaseReporter): tz=datetime.timezone.utc) return result, build.log_url, start, end - def reportBuildsetStart(self, buildset): - """Create the initial buildset entry in the db""" - if not buildset.uuid: - return + def _createBuildset(self, db, buildset): event_id = None event_timestamp = None item = buildset.item @@ -52,25 +53,39 @@ class SQLReporter(BaseReporter): event_id = getattr(item.event, "zuul_event_id", None) event_timestamp = datetime.datetime.fromtimestamp( item.event.timestamp, tz=datetime.timezone.utc) + db_buildset = db.createBuildSet( + uuid=buildset.uuid, + tenant=item.pipeline.tenant.name, + pipeline=item.pipeline.name, + project=item.change.project.name, + change=getattr(item.change, 'number', None), + patchset=getattr(item.change, 'patchset', None), + ref=getattr(item.change, 'ref', ''), + oldrev=getattr(item.change, 'oldrev', ''), + newrev=getattr(item.change, 'newrev', ''), + branch=getattr(item.change, 'branch', ''), + zuul_ref=buildset.ref, + ref_url=item.change.url, + event_id=event_id, + event_timestamp=event_timestamp, + ) + return db_buildset - with self.connection.getSession() as db: - db_buildset = db.createBuildSet( - uuid=buildset.uuid, - tenant=item.pipeline.tenant.name, - pipeline=item.pipeline.name, - project=item.change.project.name, - change=getattr(item.change, 'number', None), - patchset=getattr(item.change, 'patchset', None), - ref=getattr(item.change, 'ref', ''), - oldrev=getattr(item.change, 'oldrev', ''), - newrev=getattr(item.change, 'newrev', ''), - branch=getattr(item.change, 'branch', ''), - zuul_ref=buildset.ref, - ref_url=item.change.url, - event_id=event_id, - event_timestamp=event_timestamp, - ) - return db_buildset + def reportBuildsetStart(self, buildset): + """Create the initial buildset entry in the db""" + if not buildset.uuid: + return + + for retry_count in range(self.retry_count): + try: + with self.connection.getSession() as db: + return self._createBuildset(db, buildset) + except sqlalchemy.exc.DBAPIError: + if retry_count < self.retry_count - 1: + self.log.error("Unable to create buildset, will retry") + time.sleep(self.retry_delay) + else: + self.log.exception("Unable to create buildset") def reportBuildsetEnd(self, buildset, action, final, result=None): if not buildset.uuid: @@ -80,55 +95,79 @@ class SQLReporter(BaseReporter): buildset.item, with_jobs=False, action=action) else: message = None - with self.connection.getSession() as db: - db_buildset = db.getBuildset( - tenant=buildset.item.pipeline.tenant.name, uuid=buildset.uuid) - if db_buildset: - db_buildset.result = buildset.result or result - db_buildset.message = message - end_time = db_buildset.first_build_start_time - for build in db_buildset.builds: - if (build.end_time and end_time - and build.end_time > end_time): - end_time = build.end_time - db_buildset.last_build_end_time = end_time - elif buildset.builds: - self.log.error("Unable to find buildset " - f"{buildset.uuid} in DB") + for retry_count in range(self.retry_count): + try: + with self.connection.getSession() as db: + db_buildset = db.getBuildset( + tenant=buildset.item.pipeline.tenant.name, + uuid=buildset.uuid) + if not db_buildset: + db_buildset = self._createBuildset(db, buildset) + db_buildset.result = buildset.result or result + db_buildset.message = message + end_time = db_buildset.first_build_start_time + for build in db_buildset.builds: + if (build.end_time and end_time + and build.end_time > end_time): + end_time = build.end_time + db_buildset.last_build_end_time = end_time + return + except sqlalchemy.exc.DBAPIError: + if retry_count < self.retry_count - 1: + self.log.error("Unable to update buildset, will retry") + time.sleep(self.retry_delay) + else: + self.log.exception("Unable to update buildset") def reportBuildStart(self, build): - with self.connection.getSession() as db: - db_build = self._createBuild(db, build) - return db_build + for retry_count in range(self.retry_count): + try: + with self.connection.getSession() as db: + db_build = self._createBuild(db, build) + return db_build + except sqlalchemy.exc.DBAPIError: + if retry_count < self.retry_count - 1: + self.log.error("Unable to create build, will retry") + time.sleep(self.retry_delay) + else: + self.log.exception("Unable to create build") def reportBuildEnd(self, build, tenant, final): - with self.connection.getSession() as db: - db_build = db.getBuild(tenant=tenant, uuid=build.uuid) - if not db_build: - db_build = self._createBuild(db, build) - - end_time = build.end_time or time.time() - end = datetime.datetime.fromtimestamp( - end_time, tz=datetime.timezone.utc) - - db_build.result = build.result - db_build.end_time = end - db_build.log_url = build.log_url - db_build.error_detail = build.error_detail - db_build.final = final - db_build.held = build.held - - for provides in build.job.provides: - db_build.createProvides(name=provides) - - for artifact in get_artifacts_from_result_data( - build.result_data, - logger=self.log): - if 'metadata' in artifact: - artifact['metadata'] = json.dumps(artifact['metadata']) - db_build.createArtifact(**artifact) - - return db_build + for retry_count in range(self.retry_count): + try: + with self.connection.getSession() as db: + db_build = db.getBuild(tenant=tenant, uuid=build.uuid) + if not db_build: + db_build = self._createBuild(db, build) + + end_time = build.end_time or time.time() + end = datetime.datetime.fromtimestamp( + end_time, tz=datetime.timezone.utc) + + db_build.result = build.result + db_build.end_time = end + db_build.log_url = build.log_url + db_build.error_detail = build.error_detail + db_build.final = final + db_build.held = build.held + + for provides in build.job.provides: + db_build.createProvides(name=provides) + + for artifact in get_artifacts_from_result_data( + build.result_data, + logger=self.log): + if 'metadata' in artifact: + artifact['metadata'] = json.dumps( + artifact['metadata']) + db_build.createArtifact(**artifact) + return db_build + except sqlalchemy.exc.DBAPIError: + if retry_count < self.retry_count - 1: + self.log.error("Unable to update build, will retry") + time.sleep(self.retry_delay) + else: + self.log.exception("Unable to update build") def _createBuild(self, db, build): start_time = build.start_time or time.time() @@ -137,6 +176,9 @@ class SQLReporter(BaseReporter): buildset = build.build_set db_buildset = db.getBuildset( tenant=buildset.item.pipeline.tenant.name, uuid=buildset.uuid) + if not db_buildset: + self.log.warning("Creating missing buildset %s", buildset.uuid) + db_buildset = self._createBuildset(db, buildset) if db_buildset.first_build_start_time is None: db_buildset.first_build_start_time = start diff --git a/zuul/manager/__init__.py b/zuul/manager/__init__.py index 84897b3df..418274859 100644 --- a/zuul/manager/__init__.py +++ b/zuul/manager/__init__.py @@ -1583,9 +1583,9 @@ class PipelineManager(metaclass=ABCMeta): request_id, cached=True) if not node_request: continue - if node_request.fulfilled: - # If the node request is already fulfilled, there is no - # need to update the relative priority. + if node_request.state != model.STATE_REQUESTED: + # If the node request was locked and accepted by a + # provider, we can no longer update the relative priority. continue if node_request.relative_priority != priority: self.sched.nodepool.reviseRequest( diff --git a/zuul/model.py b/zuul/model.py index 0f632264f..0bd5cb88b 100644 --- a/zuul/model.py +++ b/zuul/model.py @@ -2695,16 +2695,9 @@ class Job(ConfigObject): # Return the raw branch list that match this job return self._branches - def setBranchMatcher(self, branches, implied=False): + def setBranchMatcher(self, matchers): # Set the branch matcher to match any of the supplied branches - self._branches = branches - matchers = [] - if implied: - matcher_class = change_matcher.ImpliedBranchMatcher - else: - matcher_class = change_matcher.BranchMatcher - for branch in branches: - matchers.append(matcher_class(branch)) + self._branches = [x._regex for x in matchers] self.branch_matcher = change_matcher.MatchAny(matchers) def setFileMatcher(self, files): @@ -6376,16 +6369,13 @@ class ProjectConfig(ConfigObject): r.queue_name = self.queue_name return r - def setImpliedBranchMatchers(self, branches): - if len(branches) == 0: + def setImpliedBranchMatchers(self, matchers): + if len(matchers) == 0: self.branch_matcher = None - elif len(branches) > 1: - matchers = [change_matcher.ImpliedBranchMatcher(branch) - for branch in branches] + elif len(matchers) > 1: self.branch_matcher = change_matcher.MatchAny(matchers) else: - self.branch_matcher = change_matcher.ImpliedBranchMatcher( - branches[0]) + self.branch_matcher = matchers[0] def changeMatches(self, change): if self.branch_matcher and not self.branch_matcher.matches(change): @@ -7506,8 +7496,7 @@ class Tenant(object): :arg str regex: The regex to match :returns: A list of tuples (trusted, project) describing the found - projects. Raises an exception if the same project name is found - several times across multiple hostnames. + projects. """ matcher = re2.compile(regex) @@ -7515,18 +7504,12 @@ class Tenant(object): result = [] for name, hostname_dict in self.projects.items(): - if matcher.fullmatch(name): - # validate that this match is unambiguous - values = list(hostname_dict.values()) - if len(values) > 1: - raise Exception("Project name '%s' is ambiguous, " - "please fully qualify the project " - "with a hostname. Valid hostnames " - "are %s." % (name, hostname_dict.keys())) - projects.append(values[0]) + projects.extend(hostname_dict.values()) else: - # try to match canonical project names + # It is possible for the regex to match specific connection + # prefixes. Check these more specific names if we didn't add + # all of the possible canonical names already. for project in hostname_dict.values(): if matcher.fullmatch(project.canonical_name): projects.append(project) diff --git a/zuul/scheduler.py b/zuul/scheduler.py index ea8717222..0654c3d5d 100644 --- a/zuul/scheduler.py +++ b/zuul/scheduler.py @@ -2214,6 +2214,12 @@ class Scheduler(threading.Thread): reconfigure_tenant = False + # If all config classes are excluded for this project we don't need + # to trigger a reconfiguration. + tpc = tenant.project_configs.get(project.canonical_name) + if tpc and not tpc.load_classes: + reconfigure_tenant = False + # But if the event is that branch protection status has # changed, do reconfigure. if (event.isBranchProtectionChanged()): diff --git a/zuul/zk/event_queues.py b/zuul/zk/event_queues.py index 06127b041..0e5cba987 100644 --- a/zuul/zk/event_queues.py +++ b/zuul/zk/event_queues.py @@ -47,6 +47,8 @@ MANAGEMENT_EVENT_TYPE_MAP = { "PromoteEvent": model.PromoteEvent, "ReconfigureEvent": model.ReconfigureEvent, "TenantReconfigureEvent": model.TenantReconfigureEvent, + "PipelinePostConfigEvent": model.PipelinePostConfigEvent, + "PipelineSemaphoreReleaseEvent": model.PipelineSemaphoreReleaseEvent, } # /zuul/events/tenant TENANT_ROOT |