summaryrefslogtreecommitdiff
path: root/zuul
diff options
context:
space:
mode:
Diffstat (limited to 'zuul')
-rw-r--r--zuul/ansible/base/callback/zuul_stream.py5
-rwxr-xr-xzuul/cmd/client.py6
-rw-r--r--zuul/configloader.py112
-rw-r--r--zuul/driver/gerrit/gerritconnection.py50
-rw-r--r--zuul/driver/gerrit/gerritmodel.py385
-rw-r--r--zuul/driver/gerrit/gerritsource.py39
-rw-r--r--zuul/driver/gerrit/gerrittrigger.py7
-rw-r--r--zuul/driver/github/githubconnection.py16
-rw-r--r--zuul/driver/github/githubmodel.py441
-rw-r--r--zuul/driver/github/githubreporter.py13
-rw-r--r--zuul/driver/github/githubsource.py28
-rw-r--r--zuul/driver/github/githubtrigger.py7
-rw-r--r--zuul/driver/mqtt/mqttconnection.py15
-rw-r--r--zuul/driver/sql/sqlconnection.py39
-rw-r--r--zuul/executor/server.py4
-rw-r--r--zuul/merger/merger.py6
-rw-r--r--zuul/model.py60
-rw-r--r--zuul/zk/job_request_queue.py2
18 files changed, 742 insertions, 493 deletions
diff --git a/zuul/ansible/base/callback/zuul_stream.py b/zuul/ansible/base/callback/zuul_stream.py
index b5c14691b..3f886c797 100644
--- a/zuul/ansible/base/callback/zuul_stream.py
+++ b/zuul/ansible/base/callback/zuul_stream.py
@@ -44,6 +44,7 @@ import time
from ansible.plugins.callback import default
from ansible.module_utils._text import to_text
+from ansible.module_utils.parsing.convert_bool import boolean
from zuul.ansible import paths
from zuul.ansible import logconfig
@@ -333,6 +334,10 @@ class CallbackModule(default.CallbackModule):
if (ip in ('localhost', '127.0.0.1')):
# Don't try to stream from localhost
continue
+ if boolean(play_vars[host].get(
+ 'zuul_console_disabled', False)):
+ # The user has told us not to even try
+ continue
if play_vars[host].get('ansible_connection') in ('winrm',):
# The winrm connections don't support streaming for now
continue
diff --git a/zuul/cmd/client.py b/zuul/cmd/client.py
index 62e51ac3f..6fa20c7c4 100755
--- a/zuul/cmd/client.py
+++ b/zuul/cmd/client.py
@@ -540,6 +540,10 @@ class Client(zuul.cmd.ZuulApp):
cmd_prune_database.add_argument(
'--older-than',
help='relative time (e.g., "24h" or "180d")')
+ cmd_prune_database.add_argument(
+ '--batch-size',
+ default=10000,
+ help='transaction batch size')
cmd_prune_database.set_defaults(func=self.prune_database)
return parser
@@ -1049,7 +1053,7 @@ class Client(zuul.cmd.ZuulApp):
cutoff = parse_cutoff(now, args.before, args.older_than)
self.configure_connections(source_only=False, require_sql=True)
connection = self.connections.getSqlConnection()
- connection.deleteBuildsets(cutoff)
+ connection.deleteBuildsets(cutoff, args.batch_size)
sys.exit(0)
diff --git a/zuul/configloader.py b/zuul/configloader.py
index fe22fe0f8..4f472cb4e 100644
--- a/zuul/configloader.py
+++ b/zuul/configloader.py
@@ -2151,21 +2151,26 @@ class TenantParser(object):
for future in futures:
future.result()
- try:
- self._processCatJobs(abide, tenant, loading_errors, jobs,
- min_ltimes)
- except Exception:
- self.log.exception("Error processing cat jobs, canceling")
- for job in jobs:
+ for i, job in enumerate(jobs, start=1):
+ try:
try:
- self.log.debug("Canceling cat job %s", job)
+ self._processCatJob(abide, tenant, loading_errors, job,
+ min_ltimes)
+ except TimeoutError:
self.merger.cancel(job)
- except Exception:
- self.log.exception("Unable to cancel job %s", job)
- if not ignore_cat_exception:
- raise
- if not ignore_cat_exception:
- raise
+ raise
+ except Exception:
+ self.log.exception("Error processing cat job")
+ if not ignore_cat_exception:
+ # Cancel remaining jobs
+ for cancel_job in jobs[i:]:
+ self.log.debug("Canceling cat job %s", cancel_job)
+ try:
+ self.merger.cancel(cancel_job)
+ except Exception:
+ self.log.exception(
+ "Unable to cancel job %s", cancel_job)
+ raise
def _cacheTenantYAMLBranch(self, abide, tenant, loading_errors, min_ltimes,
tpc, project, branch, jobs):
@@ -2234,49 +2239,48 @@ class TenantParser(object):
job.source_context = source_context
jobs.append(job)
- def _processCatJobs(self, abide, tenant, loading_errors, jobs, min_ltimes):
+ def _processCatJob(self, abide, tenant, loading_errors, job, min_ltimes):
# Called at the end of _cacheTenantYAML after all cat jobs
# have been submitted
- for job in jobs:
- self.log.debug("Waiting for cat job %s" % (job,))
- res = job.wait(self.merger.git_timeout)
- if not res:
- # We timed out
- raise Exception("Cat job %s timed out; consider setting "
- "merger.git_timeout in zuul.conf" % (job,))
- if not job.updated:
- raise Exception("Cat job %s failed" % (job,))
- self.log.debug("Cat job %s got files %s" %
- (job, job.files.keys()))
-
- self._updateUnparsedBranchCache(abide, tenant, job.source_context,
- job.files, loading_errors,
- job.ltime, min_ltimes)
-
- # Save all config files in Zookeeper (not just for the current tpc)
- files_cache = self.unparsed_config_cache.getFilesCache(
- job.source_context.project_canonical_name,
- job.source_context.branch)
- with self.unparsed_config_cache.writeLock(
- job.source_context.project_canonical_name):
- # Prevent files cache ltime from going backward
- if files_cache.ltime >= job.ltime:
- self.log.info(
- "Discarding job %s result since the files cache was "
- "updated in the meantime", job)
- continue
- # Since the cat job returns all required config files
- # for ALL tenants the project is a part of, we can
- # clear the whole cache and then populate it with the
- # updated content.
- files_cache.clear()
- for fn, content in job.files.items():
- # Cache file in Zookeeper
- if content is not None:
- files_cache[fn] = content
- files_cache.setValidFor(job.extra_config_files,
- job.extra_config_dirs,
- job.ltime)
+ self.log.debug("Waiting for cat job %s" % (job,))
+ res = job.wait(self.merger.git_timeout)
+ if not res:
+ # We timed out
+ raise TimeoutError(f"Cat job {job} timed out; consider setting "
+ "merger.git_timeout in zuul.conf")
+ if not job.updated:
+ raise Exception("Cat job %s failed" % (job,))
+ self.log.debug("Cat job %s got files %s" %
+ (job, job.files.keys()))
+
+ self._updateUnparsedBranchCache(abide, tenant, job.source_context,
+ job.files, loading_errors,
+ job.ltime, min_ltimes)
+
+ # Save all config files in Zookeeper (not just for the current tpc)
+ files_cache = self.unparsed_config_cache.getFilesCache(
+ job.source_context.project_canonical_name,
+ job.source_context.branch)
+ with self.unparsed_config_cache.writeLock(
+ job.source_context.project_canonical_name):
+ # Prevent files cache ltime from going backward
+ if files_cache.ltime >= job.ltime:
+ self.log.info(
+ "Discarding job %s result since the files cache was "
+ "updated in the meantime", job)
+ return
+ # Since the cat job returns all required config files
+ # for ALL tenants the project is a part of, we can
+ # clear the whole cache and then populate it with the
+ # updated content.
+ files_cache.clear()
+ for fn, content in job.files.items():
+ # Cache file in Zookeeper
+ if content is not None:
+ files_cache[fn] = content
+ files_cache.setValidFor(job.extra_config_files,
+ job.extra_config_dirs,
+ job.ltime)
def _updateUnparsedBranchCache(self, abide, tenant, source_context, files,
loading_errors, ltime, min_ltimes):
diff --git a/zuul/driver/gerrit/gerritconnection.py b/zuul/driver/gerrit/gerritconnection.py
index f871671aa..990b7b235 100644
--- a/zuul/driver/gerrit/gerritconnection.py
+++ b/zuul/driver/gerrit/gerritconnection.py
@@ -1182,9 +1182,34 @@ class GerritConnection(ZKChangeCacheMixin, ZKBranchCacheMixin, BaseConnection):
return True
if change.wip:
return False
- if change.missing_labels <= set(allow_needs):
- return True
- return False
+ if change.missing_labels > set(allow_needs):
+ self.log.debug("Unable to merge due to "
+ "missing labels: %s", change.missing_labels)
+ return False
+ for sr in change.submit_requirements:
+ if sr.get('status') == 'UNSATISFIED':
+ # Otherwise, we don't care and should skip.
+
+ # We're going to look at each unsatisfied submit
+ # requirement, and if one of the involved labels is an
+ # "allow_needs" label, we will assume that Zuul may be
+ # able to take an action which can cause the
+ # requirement to be satisfied, and we will ignore it.
+ # Otherwise, it is likely a requirement that Zuul can
+ # not alter in which case the requirement should stand
+ # and block merging.
+ result = sr.get("submittability_expression_result", {})
+ expression = result.get("expression", '')
+ expr_contains_allow = False
+ for allow in allow_needs:
+ if f'label:{allow}' in expression:
+ expr_contains_allow = True
+ break
+ if not expr_contains_allow:
+ self.log.debug("Unable to merge due to "
+ "submit requirement: %s", sr)
+ return False
+ return True
def getProjectOpenChanges(self, project: Project) -> List[GerritChange]:
# This is a best-effort function in case Gerrit is unable to return
@@ -1443,13 +1468,22 @@ class GerritConnection(ZKChangeCacheMixin, ZKBranchCacheMixin, BaseConnection):
return data
def queryChangeHTTP(self, number, event=None):
- data = self.get('changes/%s?o=DETAILED_ACCOUNTS&o=CURRENT_REVISION&'
- 'o=CURRENT_COMMIT&o=CURRENT_FILES&o=LABELS&'
- 'o=DETAILED_LABELS' % (number,))
+ query = ('changes/%s?o=DETAILED_ACCOUNTS&o=CURRENT_REVISION&'
+ 'o=CURRENT_COMMIT&o=CURRENT_FILES&o=LABELS&'
+ 'o=DETAILED_LABELS' % (number,))
+ if self.version >= (3, 5, 0):
+ query += '&o=SUBMIT_REQUIREMENTS'
+ data = self.get(query)
related = self.get('changes/%s/revisions/%s/related' % (
number, data['current_revision']))
- files = self.get('changes/%s/revisions/%s/files?parent=1' % (
- number, data['current_revision']))
+
+ files_query = 'changes/%s/revisions/%s/files' % (
+ number, data['current_revision'])
+
+ if data['revisions'][data['current_revision']]['commit']['parents']:
+ files_query += '?parent=1'
+
+ files = self.get(files_query)
return data, related, files
def queryChange(self, number, event=None):
diff --git a/zuul/driver/gerrit/gerritmodel.py b/zuul/driver/gerrit/gerritmodel.py
index 0ac3e7f9d..7b57ec934 100644
--- a/zuul/driver/gerrit/gerritmodel.py
+++ b/zuul/driver/gerrit/gerritmodel.py
@@ -19,8 +19,8 @@ import urllib.parse
import dateutil.parser
from zuul.model import EventFilter, RefFilter
-from zuul.model import Change, TriggerEvent
-from zuul.driver.util import time_to_seconds
+from zuul.model import Change, TriggerEvent, FalseWithReason
+from zuul.driver.util import time_to_seconds, to_list
from zuul import exceptions
EMPTY_GIT_REF = '0' * 40 # git sha of all zeros, used during creates/deletes
@@ -34,6 +34,7 @@ class GerritChange(Change):
self.wip = None
self.approvals = []
self.missing_labels = set()
+ self.submit_requirements = []
self.commit = None
self.zuul_query_ltime = None
@@ -52,6 +53,7 @@ class GerritChange(Change):
"wip": self.wip,
"approvals": self.approvals,
"missing_labels": list(self.missing_labels),
+ "submit_requirements": self.submit_requirements,
"commit": self.commit,
"zuul_query_ltime": self.zuul_query_ltime,
})
@@ -64,6 +66,7 @@ class GerritChange(Change):
self.wip = data["wip"]
self.approvals = data["approvals"]
self.missing_labels = set(data["missing_labels"])
+ self.submit_requirements = data.get("submit_requirements", [])
self.commit = data.get("commit")
self.zuul_query_ltime = data.get("zuul_query_ltime")
@@ -189,6 +192,7 @@ class GerritChange(Change):
if 'approved' in label_data:
continue
self.missing_labels.add(label_name)
+ self.submit_requirements = data.get('submit_requirements', [])
self.open = data['status'] == 'NEW'
self.status = data['status']
self.wip = data.get('work_in_progress', False)
@@ -243,110 +247,32 @@ class GerritTriggerEvent(TriggerEvent):
return 'change-abandoned' == self.type
-class GerritApprovalFilter(object):
- def __init__(self, required_approvals=[], reject_approvals=[]):
- self._required_approvals = copy.deepcopy(required_approvals)
- self.required_approvals = self._tidy_approvals(
- self._required_approvals)
- self._reject_approvals = copy.deepcopy(reject_approvals)
- self.reject_approvals = self._tidy_approvals(self._reject_approvals)
-
- def _tidy_approvals(self, approvals):
- for a in approvals:
- for k, v in a.items():
- if k == 'username':
- a['username'] = re.compile(v)
- elif k == 'email':
- a['email'] = re.compile(v)
- elif k == 'newer-than':
- a[k] = time_to_seconds(v)
- elif k == 'older-than':
- a[k] = time_to_seconds(v)
- return approvals
-
- def _match_approval_required_approval(self, rapproval, approval):
- # Check if the required approval and approval match
- if 'description' not in approval:
- return False
- now = time.time()
- by = approval.get('by', {})
- for k, v in rapproval.items():
- if k == 'username':
- if (not v.search(by.get('username', ''))):
- return False
- elif k == 'email':
- if (not v.search(by.get('email', ''))):
- return False
- elif k == 'newer-than':
- t = now - v
- if (approval['grantedOn'] < t):
- return False
- elif k == 'older-than':
- t = now - v
- if (approval['grantedOn'] >= t):
- return False
- else:
- if not isinstance(v, list):
- v = [v]
- if (approval['description'] != k or
- int(approval['value']) not in v):
- return False
- return True
-
- def matchesApprovals(self, change):
- if self.required_approvals or self.reject_approvals:
- if not hasattr(change, 'number'):
- # Not a change, no reviews
- return False
- if self.required_approvals and not change.approvals:
- # A change with no approvals can not match
- return False
-
- # TODO(jhesketh): If we wanted to optimise this slightly we could
- # analyse both the REQUIRE and REJECT filters by looping over the
- # approvals on the change and keeping track of what we have checked
- # rather than needing to loop on the change approvals twice
- return (self.matchesRequiredApprovals(change) and
- self.matchesNoRejectApprovals(change))
-
- def matchesRequiredApprovals(self, change):
- # Check if any approvals match the requirements
- for rapproval in self.required_approvals:
- matches_rapproval = False
- for approval in change.approvals:
- if self._match_approval_required_approval(rapproval, approval):
- # We have a matching approval so this requirement is
- # fulfilled
- matches_rapproval = True
- break
- if not matches_rapproval:
- return False
- return True
-
- def matchesNoRejectApprovals(self, change):
- # Check to make sure no approvals match a reject criteria
- for rapproval in self.reject_approvals:
- for approval in change.approvals:
- if self._match_approval_required_approval(rapproval, approval):
- # A reject approval has been matched, so we reject
- # immediately
- return False
- # To get here no rejects can have been matched so we should be good to
- # queue
- return True
-
-
-class GerritEventFilter(EventFilter, GerritApprovalFilter):
+class GerritEventFilter(EventFilter):
def __init__(self, connection_name, trigger, types=[], branches=[],
refs=[], event_approvals={}, comments=[], emails=[],
usernames=[], required_approvals=[], reject_approvals=[],
- uuid=None, scheme=None, ignore_deletes=True):
+ uuid=None, scheme=None, ignore_deletes=True,
+ require=None, reject=None):
EventFilter.__init__(self, connection_name, trigger)
- GerritApprovalFilter.__init__(self,
- required_approvals=required_approvals,
- reject_approvals=reject_approvals)
+ # TODO: Backwards compat, remove after 9.x:
+ if required_approvals and require is None:
+ require = {'approval': required_approvals}
+ if reject_approvals and reject is None:
+ reject = {'approval': reject_approvals}
+
+ if require:
+ self.require_filter = GerritRefFilter.requiresFromConfig(
+ connection_name, require)
+ else:
+ self.require_filter = None
+
+ if reject:
+ self.reject_filter = GerritRefFilter.rejectFromConfig(
+ connection_name, reject)
+ else:
+ self.reject_filter = None
self._types = types
self._branches = branches
@@ -384,18 +310,16 @@ class GerritEventFilter(EventFilter, GerritApprovalFilter):
if self.event_approvals:
ret += ' event_approvals: %s' % ', '.join(
['%s:%s' % a for a in self.event_approvals.items()])
- if self.required_approvals:
- ret += ' required_approvals: %s' % ', '.join(
- ['%s' % a for a in self._required_approvals])
- if self.reject_approvals:
- ret += ' reject_approvals: %s' % ', '.join(
- ['%s' % a for a in self._reject_approvals])
if self._comments:
ret += ' comments: %s' % ', '.join(self._comments)
if self._emails:
ret += ' emails: %s' % ', '.join(self._emails)
if self._usernames:
ret += ' usernames: %s' % ', '.join(self._usernames)
+ if self.require_filter:
+ ret += ' require: %s' % repr(self.require_filter)
+ if self.reject_filter:
+ ret += ' reject: %s' % repr(self.reject_filter)
ret += '>'
return ret
@@ -410,7 +334,8 @@ class GerritEventFilter(EventFilter, GerritApprovalFilter):
if etype.match(event.type):
matches_type = True
if self.types and not matches_type:
- return False
+ return FalseWithReason("Types %s do not match %s" % (
+ self.types, event.type))
if event.type == 'pending-check':
if self.uuid and event.uuid != self.uuid:
@@ -424,7 +349,8 @@ class GerritEventFilter(EventFilter, GerritApprovalFilter):
if branch.match(event.branch):
matches_branch = True
if self.branches and not matches_branch:
- return False
+ return FalseWithReason("Branches %s do not match %s" % (
+ self.branches, event.branch))
# refs are ORed
matches_ref = False
@@ -433,11 +359,12 @@ class GerritEventFilter(EventFilter, GerritApprovalFilter):
if ref.match(event.ref):
matches_ref = True
if self.refs and not matches_ref:
- return False
+ return FalseWithReason(
+ "Refs %s do not match %s" % (self.refs, event.ref))
if self.ignore_deletes and event.newrev == EMPTY_GIT_REF:
# If the updated ref has an empty git sha (all 0s),
# then the ref is being deleted
- return False
+ return FalseWithReason("Ref deletion events are ignored")
# comments are ORed
matches_comment_re = False
@@ -451,7 +378,8 @@ class GerritEventFilter(EventFilter, GerritApprovalFilter):
comment_re.search(comment)):
matches_comment_re = True
if self.comments and not matches_comment_re:
- return False
+ return FalseWithReason("Comments %s do not match %s" % (
+ self.comments, event.patchsetcomments))
# We better have an account provided by Gerrit to do
# email filtering.
@@ -464,7 +392,8 @@ class GerritEventFilter(EventFilter, GerritApprovalFilter):
email_re.search(account_email)):
matches_email_re = True
if self.emails and not matches_email_re:
- return False
+ return FalseWithReason("Username %s does not match %s" % (
+ self.emails, account_email))
# usernames are ORed
account_username = event.account.get('username')
@@ -474,7 +403,8 @@ class GerritEventFilter(EventFilter, GerritApprovalFilter):
username_re.search(account_username)):
matches_username_re = True
if self.usernames and not matches_username_re:
- return False
+ return FalseWithReason("Username %s does not match %s" % (
+ self.usernames, account_username))
# approvals are ANDed
for category, value in self.event_approvals.items():
@@ -484,29 +414,73 @@ class GerritEventFilter(EventFilter, GerritApprovalFilter):
int(eapp['value']) == int(value)):
matches_approval = True
if not matches_approval:
- return False
+ return FalseWithReason("Approvals %s do not match %s" % (
+ self.event_approvals, event.approvals))
- # required approvals are ANDed (reject approvals are ORed)
- if not self.matchesApprovals(change):
- return False
+ if self.require_filter:
+ require_filter_result = self.require_filter.matches(change)
+ if not require_filter_result:
+ return require_filter_result
+
+ if self.reject_filter:
+ reject_filter_result = self.reject_filter.matches(change)
+ if not reject_filter_result:
+ return reject_filter_result
return True
-class GerritRefFilter(RefFilter, GerritApprovalFilter):
- def __init__(self, connection_name, open=None, current_patchset=None,
- wip=None, statuses=[], required_approvals=[],
- reject_approvals=[]):
+class GerritRefFilter(RefFilter):
+ def __init__(self, connection_name,
+ open=None, reject_open=None,
+ current_patchset=None, reject_current_patchset=None,
+ wip=None, reject_wip=None,
+ statuses=[], reject_statuses=[],
+ required_approvals=[], reject_approvals=[]):
RefFilter.__init__(self, connection_name)
- GerritApprovalFilter.__init__(self,
- required_approvals=required_approvals,
- reject_approvals=reject_approvals)
-
- self.open = open
- self.wip = wip
- self.current_patchset = current_patchset
+ self._required_approvals = copy.deepcopy(required_approvals)
+ self.required_approvals = self._tidy_approvals(
+ self._required_approvals)
+ self._reject_approvals = copy.deepcopy(reject_approvals)
+ self.reject_approvals = self._tidy_approvals(self._reject_approvals)
self.statuses = statuses
+ self.reject_statuses = reject_statuses
+
+ if reject_open is not None:
+ self.open = not reject_open
+ else:
+ self.open = open
+ if reject_wip is not None:
+ self.wip = not reject_wip
+ else:
+ self.wip = wip
+ if reject_current_patchset is not None:
+ self.current_patchset = not reject_current_patchset
+ else:
+ self.current_patchset = current_patchset
+
+ @classmethod
+ def requiresFromConfig(cls, connection_name, config):
+ return cls(
+ connection_name=connection_name,
+ open=config.get('open'),
+ current_patchset=config.get('current-patchset'),
+ wip=config.get('wip'),
+ statuses=to_list(config.get('status')),
+ required_approvals=to_list(config.get('approval')),
+ )
+
+ @classmethod
+ def rejectFromConfig(cls, connection_name, config):
+ return cls(
+ connection_name=connection_name,
+ reject_open=config.get('open'),
+ reject_current_patchset=config.get('current-patchset'),
+ reject_wip=config.get('wip'),
+ reject_statuses=to_list(config.get('status')),
+ reject_approvals=to_list(config.get('approval')),
+ )
def __repr__(self):
ret = '<GerritRefFilter'
@@ -514,10 +488,14 @@ class GerritRefFilter(RefFilter, GerritApprovalFilter):
ret += ' connection_name: %s' % self.connection_name
if self.open is not None:
ret += ' open: %s' % self.open
+ if self.wip is not None:
+ ret += ' wip: %s' % self.wip
if self.current_patchset is not None:
ret += ' current-patchset: %s' % self.current_patchset
if self.statuses:
ret += ' statuses: %s' % ', '.join(self.statuses)
+ if self.reject_statuses:
+ ret += ' reject-statuses: %s' % ', '.join(self.reject_statuses)
if self.required_approvals:
ret += (' required-approvals: %s' %
str(self.required_approvals))
@@ -529,37 +507,138 @@ class GerritRefFilter(RefFilter, GerritApprovalFilter):
return ret
def matches(self, change):
+ if self.open is not None:
+ # if a "change" has no number, it's not a change, but a push
+ # and cannot possibly pass this test.
+ if hasattr(change, 'number'):
+ if self.open != change.open:
+ return FalseWithReason(
+ "Change does not match open requirement")
+ else:
+ return FalseWithReason("Ref is not a Change")
- filters = [
- {
- "required": self.open,
- "value": change.open
- },
- {
- "required": self.current_patchset,
- "value": change.is_current_patchset
- },
- {
- "required": self.wip,
- "value": change.wip
- },
- ]
- configured = filter(lambda x: x["required"] is not None, filters)
-
- # if a "change" has no number, it's not a change, but a push
- # and cannot possibly pass this test.
- if hasattr(change, 'number'):
- if any(map(lambda x: x["required"] != x["value"], configured)):
- return False
- elif configured:
- return False
+ if self.current_patchset is not None:
+ # if a "change" has no number, it's not a change, but a push
+ # and cannot possibly pass this test.
+ if hasattr(change, 'number'):
+ if self.current_patchset != change.is_current_patchset:
+ return FalseWithReason(
+ "Change does not match current patchset requirement")
+ else:
+ return FalseWithReason("Ref is not a Change")
+
+ if self.wip is not None:
+ # if a "change" has no number, it's not a change, but a push
+ # and cannot possibly pass this test.
+ if hasattr(change, 'number'):
+ if self.wip != change.wip:
+ return FalseWithReason(
+ "Change does not match WIP requirement")
+ else:
+ return FalseWithReason("Ref is not a Change")
if self.statuses:
if change.status not in self.statuses:
- return False
+ return FalseWithReason(
+ "Required statuses %s do not match %s" % (
+ self.statuses, change.status))
+ if self.reject_statuses:
+ if change.status in self.reject_statuses:
+ return FalseWithReason(
+ "Reject statuses %s match %s" % (
+ self.reject_statuses, change.status))
# required approvals are ANDed (reject approvals are ORed)
- if not self.matchesApprovals(change):
+ matches_approvals_result = self.matchesApprovals(change)
+ if not matches_approvals_result:
+ return matches_approvals_result
+
+ return True
+
+ def _tidy_approvals(self, approvals):
+ for a in approvals:
+ for k, v in a.items():
+ if k == 'username':
+ a['username'] = re.compile(v)
+ elif k == 'email':
+ a['email'] = re.compile(v)
+ elif k == 'newer-than':
+ a[k] = time_to_seconds(v)
+ elif k == 'older-than':
+ a[k] = time_to_seconds(v)
+ return approvals
+
+ def _match_approval_required_approval(self, rapproval, approval):
+ # Check if the required approval and approval match
+ if 'description' not in approval:
return False
+ now = time.time()
+ by = approval.get('by', {})
+ for k, v in rapproval.items():
+ if k == 'username':
+ if (not v.search(by.get('username', ''))):
+ return False
+ elif k == 'email':
+ if (not v.search(by.get('email', ''))):
+ return False
+ elif k == 'newer-than':
+ t = now - v
+ if (approval['grantedOn'] < t):
+ return False
+ elif k == 'older-than':
+ t = now - v
+ if (approval['grantedOn'] >= t):
+ return False
+ else:
+ if not isinstance(v, list):
+ v = [v]
+ if (approval['description'] != k or
+ int(approval['value']) not in v):
+ return False
+ return True
+ def matchesApprovals(self, change):
+ if self.required_approvals or self.reject_approvals:
+ if not hasattr(change, 'number'):
+ # Not a change, no reviews
+ return FalseWithReason("Ref is not a Change")
+ if self.required_approvals and not change.approvals:
+ # A change with no approvals can not match
+ return FalseWithReason("Approvals %s does not match %s" % (
+ self.required_approvals, change.approvals))
+
+ # TODO(jhesketh): If we wanted to optimise this slightly we could
+ # analyse both the REQUIRE and REJECT filters by looping over the
+ # approvals on the change and keeping track of what we have checked
+ # rather than needing to loop on the change approvals twice
+ return (self.matchesRequiredApprovals(change) and
+ self.matchesNoRejectApprovals(change))
+
+ def matchesRequiredApprovals(self, change):
+ # Check if any approvals match the requirements
+ for rapproval in self.required_approvals:
+ matches_rapproval = False
+ for approval in change.approvals:
+ if self._match_approval_required_approval(rapproval, approval):
+ # We have a matching approval so this requirement is
+ # fulfilled
+ matches_rapproval = True
+ break
+ if not matches_rapproval:
+ return FalseWithReason(
+ "Required approvals %s do not match %s" % (
+ self.required_approvals, change.approvals))
+ return True
+
+ def matchesNoRejectApprovals(self, change):
+ # Check to make sure no approvals match a reject criteria
+ for rapproval in self.reject_approvals:
+ for approval in change.approvals:
+ if self._match_approval_required_approval(rapproval, approval):
+ # A reject approval has been matched, so we reject
+ # immediately
+ return FalseWithReason("Reject approvals %s match %s" % (
+ self.reject_approvals, change.approvals))
+ # To get here no rejects can have been matched so we should be good to
+ # queue
return True
diff --git a/zuul/driver/gerrit/gerritsource.py b/zuul/driver/gerrit/gerritsource.py
index f42e93254..4e7a32b83 100644
--- a/zuul/driver/gerrit/gerritsource.py
+++ b/zuul/driver/gerrit/gerritsource.py
@@ -20,7 +20,7 @@ from urllib.parse import urlparse
from zuul.source import BaseSource
from zuul.model import Project
from zuul.driver.gerrit.gerritmodel import GerritRefFilter
-from zuul.driver.util import scalar_or_list, to_list
+from zuul.driver.util import scalar_or_list
from zuul.lib.dependson import find_dependency_headers
from zuul.zk.change_cache import ChangeKey
@@ -164,11 +164,14 @@ class GerritSource(BaseSource):
change = self.connection._getChange(change_key)
changes[change_key] = change
- for change in changes.values():
- for git_key in change.git_needs_changes:
- if git_key in changes:
+ # Convert to list here because the recursive call can mutate
+ # the set.
+ for change in list(changes.values()):
+ for git_change_ref in change.git_needs_changes:
+ change_key = ChangeKey.fromReference(git_change_ref)
+ if change_key in changes:
continue
- git_change = self.getChange(git_key)
+ git_change = self.getChange(change_key)
if not git_change.topic or git_change.topic == topic:
continue
self.getChangesByTopic(git_change.topic, changes)
@@ -206,21 +209,15 @@ class GerritSource(BaseSource):
return self.connection._getGitwebUrl(project, sha)
def getRequireFilters(self, config):
- f = GerritRefFilter(
- connection_name=self.connection.connection_name,
- open=config.get('open'),
- current_patchset=config.get('current-patchset'),
- wip=config.get('wip'),
- statuses=to_list(config.get('status')),
- required_approvals=to_list(config.get('approval')),
- )
+ f = GerritRefFilter.requiresFromConfig(
+ self.connection.connection_name,
+ config)
return [f]
def getRejectFilters(self, config):
- f = GerritRefFilter(
- connection_name=self.connection.connection_name,
- reject_approvals=to_list(config.get('approval')),
- )
+ f = GerritRefFilter.rejectFromConfig(
+ self.connection.connection_name,
+ config)
return [f]
def getRefForChange(self, change):
@@ -244,11 +241,13 @@ def getRequireSchema():
'current-patchset': bool,
'wip': bool,
'status': scalar_or_list(str)}
-
return require
def getRejectSchema():
- reject = {'approval': scalar_or_list(approval)}
-
+ reject = {'approval': scalar_or_list(approval),
+ 'open': bool,
+ 'current-patchset': bool,
+ 'wip': bool,
+ 'status': scalar_or_list(str)}
return reject
diff --git a/zuul/driver/gerrit/gerrittrigger.py b/zuul/driver/gerrit/gerrittrigger.py
index dc8a7db68..dff5dc32c 100644
--- a/zuul/driver/gerrit/gerrittrigger.py
+++ b/zuul/driver/gerrit/gerrittrigger.py
@@ -16,6 +16,7 @@ import logging
import voluptuous as v
from zuul.trigger import BaseTrigger
from zuul.driver.gerrit.gerritmodel import GerritEventFilter
+from zuul.driver.gerrit import gerritsource
from zuul.driver.util import scalar_or_list, to_list
@@ -59,7 +60,9 @@ class GerritTrigger(BaseTrigger):
),
uuid=trigger.get('uuid'),
scheme=trigger.get('scheme'),
- ignore_deletes=ignore_deletes
+ ignore_deletes=ignore_deletes,
+ require=trigger.get('require'),
+ reject=trigger.get('reject'),
)
efilters.append(f)
@@ -101,6 +104,8 @@ def getSchema():
'approval': scalar_or_list(variable_dict),
'require-approval': scalar_or_list(approval),
'reject-approval': scalar_or_list(approval),
+ 'require': gerritsource.getRequireSchema(),
+ 'reject': gerritsource.getRejectSchema(),
}
return gerrit_trigger
diff --git a/zuul/driver/github/githubconnection.py b/zuul/driver/github/githubconnection.py
index cffbd6769..a1353cb4d 100644
--- a/zuul/driver/github/githubconnection.py
+++ b/zuul/driver/github/githubconnection.py
@@ -81,6 +81,10 @@ ANNOTATION_LEVELS = {
"warning": "warning",
"error": "failure",
}
+# The maximum size for the 'message' field is 64 KB. Since it's unclear
+# from the Github docs if the unit is KiB or KB we'll use KB to be on
+# the safe side.
+ANNOTATION_MAX_MESSAGE_SIZE = 64 * 1000
EventTuple = collections.namedtuple(
"EventTuple", [
@@ -403,7 +407,8 @@ class GithubEventProcessor(object):
# Returns empty on unhandled events
return
- self.log.debug("Handling %s event", self.event_type)
+ self.log.debug("Handling %s event with installation id %s",
+ self.event_type, installation_id)
events = []
try:
events = method()
@@ -679,6 +684,11 @@ class GithubEventProcessor(object):
branch, project_name)
events.append(
self._branch_protection_rule_to_event(project_name, branch))
+
+ for event in events:
+ # Make sure every event has a branch cache ltime
+ self.connection.clearConnectionCacheOnBranchEvent(event)
+
return events
def _branch_protection_rule_to_event(self, project_name, branch):
@@ -2432,7 +2442,9 @@ class GithubConnection(ZKChangeCacheMixin, ZKBranchCacheMixin, BaseConnection):
raw_annotation = {
"path": fn,
"annotation_level": annotation_level,
- "message": comment["message"],
+ "message": comment["message"].encode(
+ "utf8")[:ANNOTATION_MAX_MESSAGE_SIZE].decode(
+ "utf8", "ignore"),
"start_line": start_line,
"end_line": end_line,
"start_column": start_column,
diff --git a/zuul/driver/github/githubmodel.py b/zuul/driver/github/githubmodel.py
index 30610cf4e..71238d070 100644
--- a/zuul/driver/github/githubmodel.py
+++ b/zuul/driver/github/githubmodel.py
@@ -21,7 +21,7 @@ import time
from zuul.model import Change, TriggerEvent, EventFilter, RefFilter
from zuul.model import FalseWithReason
-from zuul.driver.util import time_to_seconds
+from zuul.driver.util import time_to_seconds, to_list
EMPTY_GIT_REF = '0' * 40 # git sha of all zeros, used during creates/deletes
@@ -170,149 +170,31 @@ class GithubTriggerEvent(TriggerEvent):
return ' '.join(r)
-class GithubCommonFilter(object):
- def __init__(self, required_reviews=[], required_statuses=[],
- reject_reviews=[], reject_statuses=[]):
- self._required_reviews = copy.deepcopy(required_reviews)
- self._reject_reviews = copy.deepcopy(reject_reviews)
- self.required_reviews = self._tidy_reviews(self._required_reviews)
- self.reject_reviews = self._tidy_reviews(self._reject_reviews)
- self.required_statuses = required_statuses
- self.reject_statuses = reject_statuses
-
- def _tidy_reviews(self, reviews):
- for r in reviews:
- for k, v in r.items():
- if k == 'username':
- r['username'] = re.compile(v)
- elif k == 'email':
- r['email'] = re.compile(v)
- elif k == 'newer-than':
- r[k] = time_to_seconds(v)
- elif k == 'older-than':
- r[k] = time_to_seconds(v)
- return reviews
-
- def _match_review_required_review(self, rreview, review):
- # Check if the required review and review match
- now = time.time()
- by = review.get('by', {})
- for k, v in rreview.items():
- if k == 'username':
- if (not v.search(by.get('username', ''))):
- return False
- elif k == 'email':
- if (not v.search(by.get('email', ''))):
- return False
- elif k == 'newer-than':
- t = now - v
- if (review['grantedOn'] < t):
- return False
- elif k == 'older-than':
- t = now - v
- if (review['grantedOn'] >= t):
- return False
- elif k == 'type':
- if review['type'] != v:
- return False
- elif k == 'permission':
- # If permission is read, we've matched. You must have read
- # to provide a review.
- if v != 'read':
- # Admins have implicit write.
- if v == 'write':
- if review['permission'] not in ('write', 'admin'):
- return False
- elif v == 'admin':
- if review['permission'] != 'admin':
- return False
- return True
-
- def matchesReviews(self, change):
- if self.required_reviews or self.reject_reviews:
- if not hasattr(change, 'number'):
- # not a PR, no reviews
- return FalseWithReason("Change is not a PR")
- if self.required_reviews and not change.reviews:
- # No reviews means no matching of required bits
- # having reject reviews but no reviews on the change is okay
- return FalseWithReason("Reviews %s does not match %s" % (
- self.required_reviews, change.reviews))
-
- return (self.matchesRequiredReviews(change) and
- self.matchesNoRejectReviews(change))
-
- def matchesRequiredReviews(self, change):
- for rreview in self.required_reviews:
- matches_review = False
- for review in change.reviews:
- if self._match_review_required_review(rreview, review):
- # Consider matched if any review matches
- matches_review = True
- break
- if not matches_review:
- return FalseWithReason(
- "Required reviews %s does not match %s" % (
- self.required_reviews, change.reviews))
- return True
-
- def matchesNoRejectReviews(self, change):
- for rreview in self.reject_reviews:
- for review in change.reviews:
- if self._match_review_required_review(rreview, review):
- # A review matched, we can reject right away
- return FalseWithReason("Reject reviews %s matches %s" % (
- self.reject_reviews, change.reviews))
- return True
-
- def matchesStatuses(self, change):
- if self.required_statuses or self.reject_statuses:
- if not hasattr(change, 'number'):
- # not a PR, no status
- return FalseWithReason("Can't match statuses without PR")
- if self.required_statuses and not change.status:
- return FalseWithReason(
- "Required statuses %s does not match %s" % (
- self.required_statuses, change.status))
- required_statuses_results = self.matchesRequiredStatuses(change)
- if not required_statuses_results:
- return required_statuses_results
- return self.matchesNoRejectStatuses(change)
-
- def matchesRequiredStatuses(self, change):
- # statuses are ORed
- # A PR head can have multiple statuses on it. If the change
- # statuses and the filter statuses are a null intersection, there
- # are no matches and we return false
- if self.required_statuses:
- for required_status in self.required_statuses:
- for status in change.status:
- if re2.fullmatch(required_status, status):
- return True
- return FalseWithReason("RequiredStatuses %s does not match %s" % (
- self.required_statuses, change.status))
- return True
-
- def matchesNoRejectStatuses(self, change):
- # statuses are ANDed
- # If any of the rejected statusses are present, we return false
- for rstatus in self.reject_statuses:
- for status in change.status:
- if re2.fullmatch(rstatus, status):
- return FalseWithReason("NoRejectStatuses %s matches %s" % (
- self.reject_statuses, change.status))
- return True
+class GithubEventFilter(EventFilter):
+ def __init__(self, connection_name, trigger, types=[],
+ branches=[], refs=[], comments=[], actions=[],
+ labels=[], unlabels=[], states=[], statuses=[],
+ required_statuses=[], check_runs=[],
+ ignore_deletes=True,
+ require=None, reject=None):
+ EventFilter.__init__(self, connection_name, trigger)
-class GithubEventFilter(EventFilter, GithubCommonFilter):
- def __init__(self, connection_name, trigger, types=[], branches=[],
- refs=[], comments=[], actions=[], labels=[], unlabels=[],
- states=[], statuses=[], required_statuses=[],
- check_runs=[], ignore_deletes=True):
+ # TODO: Backwards compat, remove after 9.x:
+ if required_statuses and require is None:
+ require = {'status': required_statuses}
- EventFilter.__init__(self, connection_name, trigger)
+ if require:
+ self.require_filter = GithubRefFilter.requiresFromConfig(
+ connection_name, require)
+ else:
+ self.require_filter = None
- GithubCommonFilter.__init__(self, required_statuses=required_statuses)
+ if reject:
+ self.reject_filter = GithubRefFilter.rejectFromConfig(
+ connection_name, reject)
+ else:
+ self.reject_filter = None
self._types = types
self._branches = branches
@@ -327,7 +209,6 @@ class GithubEventFilter(EventFilter, GithubCommonFilter):
self.unlabels = unlabels
self.states = states
self.statuses = statuses
- self.required_statuses = required_statuses
self.check_runs = check_runs
self.ignore_deletes = ignore_deletes
@@ -356,8 +237,10 @@ class GithubEventFilter(EventFilter, GithubCommonFilter):
ret += ' states: %s' % ', '.join(self.states)
if self.statuses:
ret += ' statuses: %s' % ', '.join(self.statuses)
- if self.required_statuses:
- ret += ' required_statuses: %s' % ', '.join(self.required_statuses)
+ if self.require_filter:
+ ret += ' require: %s' % repr(self.require_filter)
+ if self.reject_filter:
+ ret += ' reject: %s' % repr(self.reject_filter)
ret += '>'
return ret
@@ -372,7 +255,7 @@ class GithubEventFilter(EventFilter, GithubCommonFilter):
if etype.match(event.type):
matches_type = True
if self.types and not matches_type:
- return FalseWithReason("Types %s doesn't match %s" % (
+ return FalseWithReason("Types %s do not match %s" % (
self.types, event.type))
# branches are ORed
@@ -381,7 +264,7 @@ class GithubEventFilter(EventFilter, GithubCommonFilter):
if branch.match(event.branch):
matches_branch = True
if self.branches and not matches_branch:
- return FalseWithReason("Branches %s doesn't match %s" % (
+ return FalseWithReason("Branches %s do not match %s" % (
self.branches, event.branch))
# refs are ORed
@@ -392,11 +275,11 @@ class GithubEventFilter(EventFilter, GithubCommonFilter):
matches_ref = True
if self.refs and not matches_ref:
return FalseWithReason(
- "Refs %s doesn't match %s" % (self.refs, event.ref))
+ "Refs %s do not match %s" % (self.refs, event.ref))
if self.ignore_deletes and event.newrev == EMPTY_GIT_REF:
# If the updated ref has an empty git sha (all 0s),
# then the ref is being deleted
- return FalseWithReason("Ref deletion are ignored")
+ return FalseWithReason("Ref deletion events are ignored")
# comments are ORed
matches_comment_re = False
@@ -405,7 +288,7 @@ class GithubEventFilter(EventFilter, GithubCommonFilter):
comment_re.search(event.comment)):
matches_comment_re = True
if self.comments and not matches_comment_re:
- return FalseWithReason("Comments %s doesn't match %s" % (
+ return FalseWithReason("Comments %s do not match %s" % (
self.comments, event.comment))
# actions are ORed
@@ -414,7 +297,7 @@ class GithubEventFilter(EventFilter, GithubCommonFilter):
if (event.action == action):
matches_action = True
if self.actions and not matches_action:
- return FalseWithReason("Actions %s doesn't match %s" % (
+ return FalseWithReason("Actions %s do not match %s" % (
self.actions, event.action))
# check_runs are ORed
@@ -425,22 +308,22 @@ class GithubEventFilter(EventFilter, GithubCommonFilter):
check_run_found = True
break
if not check_run_found:
- return FalseWithReason("Check_runs %s doesn't match %s" % (
+ return FalseWithReason("Check runs %s do not match %s" % (
self.check_runs, event.check_run))
# labels are ORed
if self.labels and event.label not in self.labels:
- return FalseWithReason("Labels %s doesn't match %s" % (
+ return FalseWithReason("Labels %s do not match %s" % (
self.labels, event.label))
# unlabels are ORed
if self.unlabels and event.unlabel not in self.unlabels:
- return FalseWithReason("Unlabels %s doesn't match %s" % (
+ return FalseWithReason("Unlabels %s do not match %s" % (
self.unlabels, event.unlabel))
# states are ORed
if self.states and event.state not in self.states:
- return FalseWithReason("States %s doesn't match %s" % (
+ return FalseWithReason("States %s do not match %s" % (
self.states, event.state))
# statuses are ORed
@@ -451,26 +334,40 @@ class GithubEventFilter(EventFilter, GithubCommonFilter):
status_found = True
break
if not status_found:
- return FalseWithReason("Statuses %s doesn't match %s" % (
+ return FalseWithReason("Statuses %s do not match %s" % (
self.statuses, event.status))
- return self.matchesStatuses(change)
+ if self.require_filter:
+ require_filter_result = self.require_filter.matches(change)
+ if not require_filter_result:
+ return require_filter_result
+
+ if self.reject_filter:
+ reject_filter_result = self.reject_filter.matches(change)
+ if not reject_filter_result:
+ return reject_filter_result
+
+ return True
-class GithubRefFilter(RefFilter, GithubCommonFilter):
+class GithubRefFilter(RefFilter):
def __init__(self, connection_name, statuses=[],
- required_reviews=[], reject_reviews=[], open=None,
+ reviews=[], reject_reviews=[], open=None,
merged=None, current_patchset=None, draft=None,
reject_open=None, reject_merged=None,
reject_current_patchset=None, reject_draft=None,
labels=[], reject_labels=[], reject_statuses=[]):
RefFilter.__init__(self, connection_name)
- GithubCommonFilter.__init__(self, required_reviews=required_reviews,
- reject_reviews=reject_reviews,
- required_statuses=statuses,
- reject_statuses=reject_statuses)
- self.statuses = statuses
+ self._required_reviews = copy.deepcopy(reviews)
+ self._reject_reviews = copy.deepcopy(reject_reviews)
+ self.required_reviews = self._tidy_reviews(self._required_reviews)
+ self.reject_reviews = self._tidy_reviews(self._reject_reviews)
+ self.required_statuses = statuses
+ self.reject_statuses = reject_statuses
+ self.required_labels = labels
+ self.reject_labels = reject_labels
+
if reject_open is not None:
self.open = not reject_open
else:
@@ -487,23 +384,51 @@ class GithubRefFilter(RefFilter, GithubCommonFilter):
self.draft = not reject_draft
else:
self.draft = draft
- self.labels = labels
- self.reject_labels = reject_labels
+
+ @classmethod
+ def requiresFromConfig(cls, connection_name, config):
+ return cls(
+ connection_name=connection_name,
+ statuses=to_list(config.get('status')),
+ reviews=to_list(config.get('review')),
+ labels=to_list(config.get('label')),
+ open=config.get('open'),
+ merged=config.get('merged'),
+ current_patchset=config.get('current-patchset'),
+ draft=config.get('draft'),
+ )
+
+ @classmethod
+ def rejectFromConfig(cls, connection_name, config):
+ return cls(
+ connection_name=connection_name,
+ reject_statuses=to_list(config.get('status')),
+ reject_reviews=to_list(config.get('review')),
+ reject_labels=to_list(config.get('label')),
+ reject_open=config.get('open'),
+ reject_merged=config.get('merged'),
+ reject_current_patchset=config.get('current-patchset'),
+ reject_draft=config.get('draft'),
+ )
def __repr__(self):
ret = '<GithubRefFilter'
ret += ' connection_name: %s' % self.connection_name
- if self.statuses:
- ret += ' statuses: %s' % ', '.join(self.statuses)
+ if self.required_statuses:
+ ret += ' status: %s' % str(self.required_statuses)
if self.reject_statuses:
- ret += ' reject-statuses: %s' % ', '.join(self.reject_statuses)
+ ret += ' reject-status: %s' % str(self.reject_statuses)
if self.required_reviews:
- ret += (' required-reviews: %s' %
+ ret += (' reviews: %s' %
str(self.required_reviews))
if self.reject_reviews:
ret += (' reject-reviews: %s' %
str(self.reject_reviews))
+ if self.required_labels:
+ ret += ' labels: %s' % str(self.required_labels)
+ if self.reject_labels:
+ ret += ' reject-labels: %s' % str(self.reject_labels)
if self.open is not None:
ret += ' open: %s' % self.open
if self.merged is not None:
@@ -512,20 +437,175 @@ class GithubRefFilter(RefFilter, GithubCommonFilter):
ret += ' current-patchset: %s' % self.current_patchset
if self.draft is not None:
ret += ' draft: %s' % self.draft
- if self.labels:
- ret += ' labels: %s' % self.labels
- if self.reject_labels:
- ret += ' reject-labels: %s' % self.reject_labels
ret += '>'
return ret
+ def _tidy_reviews(self, reviews):
+ for r in reviews:
+ for k, v in r.items():
+ if k == 'username':
+ r['username'] = re.compile(v)
+ elif k == 'email':
+ r['email'] = re.compile(v)
+ elif k == 'newer-than':
+ r[k] = time_to_seconds(v)
+ elif k == 'older-than':
+ r[k] = time_to_seconds(v)
+ return reviews
+
+ def _match_review_required_review(self, rreview, review):
+ # Check if the required review and review match
+ now = time.time()
+ by = review.get('by', {})
+ for k, v in rreview.items():
+ if k == 'username':
+ if (not v.search(by.get('username', ''))):
+ return False
+ elif k == 'email':
+ if (not v.search(by.get('email', ''))):
+ return False
+ elif k == 'newer-than':
+ t = now - v
+ if (review['grantedOn'] < t):
+ return False
+ elif k == 'older-than':
+ t = now - v
+ if (review['grantedOn'] >= t):
+ return False
+ elif k == 'type':
+ if review['type'] != v:
+ return False
+ elif k == 'permission':
+ # If permission is read, we've matched. You must have read
+ # to provide a review.
+ if v != 'read':
+ # Admins have implicit write.
+ if v == 'write':
+ if review['permission'] not in ('write', 'admin'):
+ return False
+ elif v == 'admin':
+ if review['permission'] != 'admin':
+ return False
+ return True
+
+ def matchesReviews(self, change):
+ if self.required_reviews or self.reject_reviews:
+ if not hasattr(change, 'number'):
+ # not a PR, no reviews
+ return FalseWithReason("Change is not a PR")
+ if self.required_reviews and not change.reviews:
+ # No reviews means no matching of required bits
+ # having reject reviews but no reviews on the change is okay
+ return FalseWithReason("Reviews %s do not match %s" % (
+ self.required_reviews, change.reviews))
+
+ return (self.matchesRequiredReviews(change) and
+ self.matchesNoRejectReviews(change))
+
+ def matchesRequiredReviews(self, change):
+ for rreview in self.required_reviews:
+ matches_review = False
+ for review in change.reviews:
+ if self._match_review_required_review(rreview, review):
+ # Consider matched if any review matches
+ matches_review = True
+ break
+ if not matches_review:
+ return FalseWithReason(
+ "Required reviews %s do not match %s" % (
+ self.required_reviews, change.reviews))
+ return True
+
+ def matchesNoRejectReviews(self, change):
+ for rreview in self.reject_reviews:
+ for review in change.reviews:
+ if self._match_review_required_review(rreview, review):
+ # A review matched, we can reject right away
+ return FalseWithReason("Reject reviews %s match %s" % (
+ self.reject_reviews, change.reviews))
+ return True
+
+ def matchesStatuses(self, change):
+ if self.required_statuses or self.reject_statuses:
+ if not hasattr(change, 'number'):
+ # not a PR, no status
+ return FalseWithReason("Can not match statuses without PR")
+ if self.required_statuses and not change.status:
+ return FalseWithReason(
+ "Required statuses %s do not match %s" % (
+ self.required_statuses, change.status))
+ required_statuses_results = self.matchesRequiredStatuses(change)
+ if not required_statuses_results:
+ return required_statuses_results
+ return self.matchesNoRejectStatuses(change)
+
+ def matchesRequiredStatuses(self, change):
+ # statuses are ORed
+ # A PR head can have multiple statuses on it. If the change
+ # statuses and the filter statuses are a null intersection, there
+ # are no matches and we return false
+ if self.required_statuses:
+ for required_status in self.required_statuses:
+ for status in change.status:
+ if re2.fullmatch(required_status, status):
+ return True
+ return FalseWithReason("Required statuses %s do not match %s" % (
+ self.required_statuses, change.status))
+ return True
+
+ def matchesNoRejectStatuses(self, change):
+ # statuses are ANDed
+ # If any of the rejected statusses are present, we return false
+ for rstatus in self.reject_statuses:
+ for status in change.status:
+ if re2.fullmatch(rstatus, status):
+ return FalseWithReason("Reject statuses %s match %s" % (
+ self.reject_statuses, change.status))
+ return True
+
+ def matchesLabels(self, change):
+ if self.required_labels or self.reject_labels:
+ if not hasattr(change, 'number'):
+ # not a PR, no label
+ return FalseWithReason("Can not match labels without PR")
+ if self.required_labels and not change.labels:
+ # No labels means no matching of required bits
+ # having reject labels but no labels on the change is okay
+ return FalseWithReason(
+ "Required labels %s does not match %s" % (
+ self.required_labels, change.labels))
+ return (self.matchesRequiredLabels(change) and
+ self.matchesNoRejectLabels(change))
+
+ def matchesRequiredLabels(self, change):
+ for label in self.required_labels:
+ if label not in change.labels:
+ return FalseWithReason("Labels %s do not match %s" % (
+ self.required_labels, change.labels))
+ return True
+
+ def matchesNoRejectLabels(self, change):
+ for label in self.reject_labels:
+ if label in change.labels:
+ return FalseWithReason("Reject labels %s match %s" % (
+ self.reject_labels, change.labels))
+ return True
+
def matches(self, change):
statuses_result = self.matchesStatuses(change)
if not statuses_result:
return statuses_result
+ reviews_result = self.matchesReviews(change)
+ if not reviews_result:
+ return reviews_result
+
+ labels_result = self.matchesLabels(change)
+ if not labels_result:
+ return labels_result
+
if self.open is not None:
# if a "change" has no number, it's not a change, but a push
# and cannot possibly pass this test.
@@ -566,21 +646,4 @@ class GithubRefFilter(RefFilter, GithubCommonFilter):
else:
return FalseWithReason("Change is not a PR")
- # required reviews are ANDed (reject reviews are ORed)
- reviews_result = self.matchesReviews(change)
- if not reviews_result:
- return reviews_result
-
- # required labels are ANDed
- for label in self.labels:
- if label not in change.labels:
- return FalseWithReason("Labels %s does not match %s" % (
- self.labels, change.labels))
-
- # rejected reviews are OR'd
- for label in self.reject_labels:
- if label in change.labels:
- return FalseWithReason("RejectLabels %s matches %s" % (
- self.reject_labels, change.labels))
-
return True
diff --git a/zuul/driver/github/githubreporter.py b/zuul/driver/github/githubreporter.py
index 1f44303bd..396516038 100644
--- a/zuul/driver/github/githubreporter.py
+++ b/zuul/driver/github/githubreporter.py
@@ -193,13 +193,13 @@ class GithubReporter(BaseReporter):
self.log.warning('Merge mode %s not supported by Github', mode)
raise MergeFailure('Merge mode %s not supported by Github' % mode)
- merge_mode = self.merge_modes[merge_mode]
project = item.change.project.name
pr_number = item.change.number
sha = item.change.patchset
log.debug('Reporting change %s, params %s, merging via API',
item.change, self.config)
- message = self._formatMergeMessage(item.change)
+ message = self._formatMergeMessage(item.change, merge_mode)
+ merge_mode = self.merge_modes[merge_mode]
for i in [1, 2]:
try:
@@ -319,10 +319,13 @@ class GithubReporter(BaseReporter):
self.connection.unlabelPull(project, pr_number, label,
zuul_event_id=item.event)
- def _formatMergeMessage(self, change):
+ def _formatMergeMessage(self, change, merge_mode):
message = []
- if change.title:
- message.append(change.title)
+ # For squash merges we don't need to add the title to the body
+ # as it will already be set as the commit subject.
+ if merge_mode != model.MERGER_SQUASH_MERGE:
+ if change.title:
+ message.append(change.title)
if change.body_text:
message.append(change.body_text)
merge_message = "\n\n".join(message)
diff --git a/zuul/driver/github/githubsource.py b/zuul/driver/github/githubsource.py
index 0a94a1730..01901dfd4 100644
--- a/zuul/driver/github/githubsource.py
+++ b/zuul/driver/github/githubsource.py
@@ -21,7 +21,7 @@ import voluptuous as v
from zuul.source import BaseSource
from zuul.model import Project
from zuul.driver.github.githubmodel import GithubRefFilter
-from zuul.driver.util import scalar_or_list, to_list
+from zuul.driver.util import scalar_or_list
from zuul.zk.change_cache import ChangeKey
@@ -165,29 +165,15 @@ class GithubSource(BaseSource):
return time.strptime(timestamp, '%Y-%m-%dT%H:%M:%SZ')
def getRequireFilters(self, config):
- f = GithubRefFilter(
- connection_name=self.connection.connection_name,
- statuses=to_list(config.get('status')),
- required_reviews=to_list(config.get('review')),
- open=config.get('open'),
- merged=config.get('merged'),
- current_patchset=config.get('current-patchset'),
- draft=config.get('draft'),
- labels=to_list(config.get('label')),
- )
+ f = GithubRefFilter.requiresFromConfig(
+ self.connection.connection_name,
+ config)
return [f]
def getRejectFilters(self, config):
- f = GithubRefFilter(
- connection_name=self.connection.connection_name,
- reject_reviews=to_list(config.get('review')),
- reject_labels=to_list(config.get('label')),
- reject_statuses=to_list(config.get('status')),
- reject_open=config.get('open'),
- reject_merged=config.get('merged'),
- reject_current_patchset=config.get('current-patchset'),
- reject_draft=config.get('draft'),
- )
+ f = GithubRefFilter.rejectFromConfig(
+ self.connection.connection_name,
+ config)
return [f]
def getRefForChange(self, change):
diff --git a/zuul/driver/github/githubtrigger.py b/zuul/driver/github/githubtrigger.py
index 76d8f574e..5072fda43 100644
--- a/zuul/driver/github/githubtrigger.py
+++ b/zuul/driver/github/githubtrigger.py
@@ -16,6 +16,7 @@ import logging
import voluptuous as v
from zuul.trigger import BaseTrigger
from zuul.driver.github.githubmodel import GithubEventFilter
+from zuul.driver.github import githubsource
from zuul.driver.util import scalar_or_list, to_list
@@ -50,7 +51,9 @@ class GithubTrigger(BaseTrigger):
unlabels=to_list(trigger.get('unlabel')),
states=to_list(trigger.get('state')),
statuses=to_list(trigger.get('status')),
- required_statuses=to_list(trigger.get('require-status'))
+ required_statuses=to_list(trigger.get('require-status')),
+ require=trigger.get('require'),
+ reject=trigger.get('reject'),
)
efilters.append(f)
@@ -75,6 +78,8 @@ def getSchema():
'unlabel': scalar_or_list(str),
'state': scalar_or_list(str),
'require-status': scalar_or_list(str),
+ 'require': githubsource.getRequireSchema(),
+ 'reject': githubsource.getRejectSchema(),
'status': scalar_or_list(str),
'check': scalar_or_list(str),
}
diff --git a/zuul/driver/mqtt/mqttconnection.py b/zuul/driver/mqtt/mqttconnection.py
index 7f221282f..4a028ba23 100644
--- a/zuul/driver/mqtt/mqttconnection.py
+++ b/zuul/driver/mqtt/mqttconnection.py
@@ -64,6 +64,12 @@ class MQTTConnection(BaseConnection):
def onLoad(self, zk_client, component_registry):
self.log.debug("Starting MQTT Connection")
+
+ # If the connection was not loaded by a scheduler, but by e.g.
+ # zuul-web, we want to stop here.
+ if not self.sched:
+ return
+
try:
self.client.connect(
self.connection_config.get('server', 'localhost'),
@@ -76,10 +82,11 @@ class MQTTConnection(BaseConnection):
self.client.loop_start()
def onStop(self):
- self.log.debug("Stopping MQTT Connection")
- self.client.loop_stop()
- self.client.disconnect()
- self.connected = False
+ if self.connected:
+ self.log.debug("Stopping MQTT Connection")
+ self.client.loop_stop()
+ self.client.disconnect()
+ self.connected = False
def publish(self, topic, message, qos, zuul_event_id):
log = get_annotated_logger(self.log, zuul_event_id)
diff --git a/zuul/driver/sql/sqlconnection.py b/zuul/driver/sql/sqlconnection.py
index 2d5c39ec3..7a4aea626 100644
--- a/zuul/driver/sql/sqlconnection.py
+++ b/zuul/driver/sql/sqlconnection.py
@@ -247,12 +247,25 @@ class DatabaseSession(object):
except sqlalchemy.orm.exc.MultipleResultsFound:
raise Exception("Multiple buildset found with uuid %s", uuid)
- def deleteBuildsets(self, cutoff):
+ def deleteBuildsets(self, cutoff, batch_size):
"""Delete buildsets before the cutoff"""
# delete buildsets updated before the cutoff
- for buildset in self.getBuildsets(updated_max=cutoff):
- self.session().delete(buildset)
+ deleted = True
+ while deleted:
+ deleted = False
+ oldest = None
+ for buildset in self.getBuildsets(
+ updated_max=cutoff, limit=batch_size):
+ deleted = True
+ if oldest is None:
+ oldest = buildset.updated
+ else:
+ oldest = min(oldest, buildset.updated)
+ self.session().delete(buildset)
+ self.session().commit()
+ if deleted:
+ self.log.info("Deleted from %s to %s", oldest, cutoff)
class SQLConnection(BaseConnection):
@@ -409,7 +422,10 @@ class SQLConnection(BaseConnection):
final = sa.Column(sa.Boolean)
held = sa.Column(sa.Boolean)
nodeset = sa.Column(sa.String(255))
- buildset = orm.relationship(BuildSetModel, backref="builds")
+ buildset = orm.relationship(BuildSetModel,
+ backref=orm.backref(
+ "builds",
+ cascade="all, delete-orphan"))
sa.Index(self.table_prefix + 'job_name_buildset_id_idx',
job_name, buildset_id)
@@ -468,7 +484,10 @@ class SQLConnection(BaseConnection):
name = sa.Column(sa.String(255))
url = sa.Column(sa.TEXT())
meta = sa.Column('metadata', sa.TEXT())
- build = orm.relationship(BuildModel, backref="artifacts")
+ build = orm.relationship(BuildModel,
+ backref=orm.backref(
+ "artifacts",
+ cascade="all, delete-orphan"))
class ProvidesModel(Base):
__tablename__ = self.table_prefix + PROVIDES_TABLE
@@ -476,7 +495,10 @@ class SQLConnection(BaseConnection):
build_id = sa.Column(sa.Integer, sa.ForeignKey(
self.table_prefix + BUILD_TABLE + ".id"))
name = sa.Column(sa.String(255))
- build = orm.relationship(BuildModel, backref="provides")
+ build = orm.relationship(BuildModel,
+ backref=orm.backref(
+ "provides",
+ cascade="all, delete-orphan"))
class BuildEventModel(Base):
__tablename__ = self.table_prefix + BUILD_EVENTS_TABLE
@@ -486,7 +508,10 @@ class SQLConnection(BaseConnection):
event_time = sa.Column(sa.DateTime)
event_type = sa.Column(sa.String(255))
description = sa.Column(sa.TEXT())
- build = orm.relationship(BuildModel, backref="build_events")
+ build = orm.relationship(BuildModel,
+ backref=orm.backref(
+ "build_events",
+ cascade="all, delete-orphan"))
self.buildEventModel = BuildEventModel
self.zuul_build_event_table = self.buildEventModel.__table__
diff --git a/zuul/executor/server.py b/zuul/executor/server.py
index 0d2d95361..6dbf62de0 100644
--- a/zuul/executor/server.py
+++ b/zuul/executor/server.py
@@ -3633,6 +3633,10 @@ class ExecutorServer(BaseMergeServer):
log.exception('Process pool got broken')
self.resetProcessPool()
task.transient_error = True
+ except IOError:
+ log.exception('Got I/O error while updating repo %s/%s',
+ task.connection_name, task.project_name)
+ task.transient_error = True
except Exception:
log.exception('Got exception while updating repo %s/%s',
task.connection_name, task.project_name)
diff --git a/zuul/merger/merger.py b/zuul/merger/merger.py
index 1df833bc5..845925bfa 100644
--- a/zuul/merger/merger.py
+++ b/zuul/merger/merger.py
@@ -722,9 +722,11 @@ class Repo(object):
ret = {}
repo = self.createRepoObject(zuul_event_id)
if branch:
- tree = repo.heads[branch].commit.tree
+ head = repo.heads[branch].commit
else:
- tree = repo.commit(commit).tree
+ head = repo.commit(commit)
+ log.debug("Getting files for %s at %s", self.local_path, head.hexsha)
+ tree = head.tree
for fn in files:
if fn in tree:
if tree[fn].type != 'blob':
diff --git a/zuul/model.py b/zuul/model.py
index 5be5923a5..e36f9d670 100644
--- a/zuul/model.py
+++ b/zuul/model.py
@@ -3007,20 +3007,30 @@ class Job(ConfigObject):
# possibility of success, which may help prevent errors in
# most cases. If we don't raise an error here, the
# possibility of later failure still remains.
- nonfinal_parents = [p for p in parents if not p.final]
- if not nonfinal_parents:
+ nonfinal_parent_found = False
+ nonintermediate_parent_found = False
+ nonprotected_parent_found = False
+ for p in parents:
+ if not p.final:
+ nonfinal_parent_found = True
+ if not p.intermediate:
+ nonintermediate_parent_found = True
+ if not p.protected:
+ nonprotected_parent_found = True
+ if (nonfinal_parent_found and
+ nonintermediate_parent_found and
+ nonprotected_parent_found):
+ break
+
+ if not nonfinal_parent_found:
raise Exception(
f'The parent of job "{self.name}", "{self.parent}" '
'is final and can not act as a parent')
- nonintermediate_parents = [
- p for p in parents if not p.intermediate]
- if not nonintermediate_parents and not self.abstract:
+ if not nonintermediate_parent_found and not self.abstract:
raise Exception(
f'The parent of job "{self.name}", "{self.parent}" '
f'is intermediate but "{self.name}" is not abstract')
- nonprotected_parents = [
- p for p in parents if not p.protected]
- if (not nonprotected_parents and
+ if (not nonprotected_parent_found and
parents[0].source_context.project_canonical_name !=
self.source_context.project_canonical_name):
raise Exception(
@@ -7787,31 +7797,33 @@ class Layout(object):
def addJob(self, job):
# We can have multiple variants of a job all with the same
# name, but these variants must all be defined in the same repo.
- prior_jobs = [j for j in self.getJobs(job.name) if
- j.source_context.project_canonical_name !=
- job.source_context.project_canonical_name]
# Unless the repo is permitted to shadow another. If so, and
# the job we are adding is from a repo that is permitted to
# shadow the one with the older jobs, skip adding this job.
job_project = job.source_context.project_canonical_name
job_tpc = self.tenant.project_configs[job_project]
skip_add = False
- for prior_job in prior_jobs[:]:
- prior_project = prior_job.source_context.project_canonical_name
- if prior_project in job_tpc.shadow_projects:
- prior_jobs.remove(prior_job)
- skip_add = True
-
+ prior_jobs = self.jobs.get(job.name, [])
if prior_jobs:
- raise Exception("Job %s in %s is not permitted to shadow "
- "job %s in %s" % (
- job,
- job.source_context.project_name,
- prior_jobs[0],
- prior_jobs[0].source_context.project_name))
+ # All jobs we've added so far should be from the same
+ # project, so pick the first one.
+ prior_job = prior_jobs[0]
+ if (prior_job.source_context.project_canonical_name !=
+ job.source_context.project_canonical_name):
+ prior_project = prior_job.source_context.project_canonical_name
+ if prior_project in job_tpc.shadow_projects:
+ skip_add = True
+ else:
+ raise Exception("Job %s in %s is not permitted to shadow "
+ "job %s in %s" % (
+ job,
+ job.source_context.project_name,
+ prior_job,
+ prior_job.source_context.project_name))
+
if skip_add:
return False
- if job.name in self.jobs:
+ if prior_jobs:
self.jobs[job.name].append(job)
else:
self.jobs[job.name] = [job]
diff --git a/zuul/zk/job_request_queue.py b/zuul/zk/job_request_queue.py
index 175c57b90..7c85ae95e 100644
--- a/zuul/zk/job_request_queue.py
+++ b/zuul/zk/job_request_queue.py
@@ -609,7 +609,7 @@ class JobRequestQueue(ZooKeeperSimpleBase):
self.kazoo_client.delete(lock_path, recursive=True)
except Exception:
self.log.exception(
- "Unable to delete lock %s", path)
+ "Unable to delete lock %s", lock_path)
except Exception:
self.log.exception("Error cleaning up locks %s", self)