summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--doc/source/launchers.rst4
-rw-r--r--doc/source/zuul.rst10
-rw-r--r--etc/status/public_html/zuul.app.js7
-rw-r--r--requirements.txt2
-rwxr-xr-xtests/base.py40
-rw-r--r--tests/fixtures/layout-no-timer.yaml16
-rwxr-xr-xtests/test_scheduler.py140
-rw-r--r--tests/test_zuultrigger.py4
-rwxr-xr-xtools/zuul-changes.py21
-rw-r--r--zuul/launcher/gearman.py13
-rw-r--r--zuul/lib/cloner.py2
-rw-r--r--zuul/lib/gerrit.py2
-rw-r--r--zuul/lib/swift.py4
-rw-r--r--zuul/merger/client.py2
-rw-r--r--zuul/merger/server.py4
-rw-r--r--zuul/model.py3
-rw-r--r--zuul/trigger/gerrit.py68
-rw-r--r--zuul/webapp.py7
18 files changed, 277 insertions, 72 deletions
diff --git a/doc/source/launchers.rst b/doc/source/launchers.rst
index c79929129..b95354f61 100644
--- a/doc/source/launchers.rst
+++ b/doc/source/launchers.rst
@@ -78,6 +78,10 @@ comment-added events):
The target branch for the change that triggered this build.
**ZUUL_CHANGE**
The Gerrit change ID for the change that triggered this build.
+**ZUUL_CHANGES**
+ A caret character separated list of the changes upon which this build
+ is dependent upon in the form of a colon character separated list
+ consisting of project name, target branch, and revision ref.
**ZUUL_CHANGE_IDS**
All of the Gerrit change IDs that are included in this build (useful
when the DependentPipelineManager combines changes for testing).
diff --git a/doc/source/zuul.rst b/doc/source/zuul.rst
index 9be4deb5b..6c77477ac 100644
--- a/doc/source/zuul.rst
+++ b/doc/source/zuul.rst
@@ -1092,13 +1092,11 @@ read these saved events and act on them.
If you need to abort Zuul and intend to manually requeue changes for
jobs which were running in its pipelines, prior to terminating you can
use the zuul-changes.py tool script to simplify the process. For
-example, this would give you a list of Gerrit commands to reverify or
-recheck changes for the gate and check pipelines respectively::
+example, this would give you a list of zuul-enqueue commands to requeue
+changes for the gate and check pipelines respectively::
- ./tools/zuul-changes.py --review-host=review.openstack.org \
- http://zuul.openstack.org/ gate 'reverify'
- ./tools/zuul-changes.py --review-host=review.openstack.org \
- http://zuul.openstack.org/ check 'recheck'
+ ./tools/zuul-changes.py http://zuul.openstack.org/ gate
+ ./tools/zuul-changes.py http://zuul.openstack.org/ check
If you send a SIGUSR2 to the zuul-server process, or the forked process
that runs the Gearman daemon, Zuul will dump a stack trace for each
diff --git a/etc/status/public_html/zuul.app.js b/etc/status/public_html/zuul.app.js
index 6f87a92c4..640437b00 100644
--- a/etc/status/public_html/zuul.app.js
+++ b/etc/status/public_html/zuul.app.js
@@ -39,6 +39,9 @@ function zuul_build_dom($, container) {
});
}
+/**
+ * @return The $.zuul instance
+ */
function zuul_start($) {
// Start the zuul app (expects default dom)
@@ -94,4 +97,6 @@ function zuul_start($) {
}
});
});
-} \ No newline at end of file
+
+ return zuul;
+}
diff --git a/requirements.txt b/requirements.txt
index f5525b60f..c68299993 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -11,7 +11,7 @@ python-daemon>=2.0.4
extras
statsd>=1.0.0,<3.0
voluptuous>=0.7
-gear>=0.5.4,<1.0.0
+gear>=0.5.7,<1.0.0
apscheduler>=2.1.1,<3.0
PrettyTable>=0.6,<0.8
babel>=1.0
diff --git a/tests/base.py b/tests/base.py
index 18d5f5a84..8c96d18a5 100755
--- a/tests/base.py
+++ b/tests/base.py
@@ -378,6 +378,8 @@ class FakeChange(object):
class FakeGerrit(object):
+ log = logging.getLogger("zuul.test.FakeGerrit")
+
def __init__(self, *args, **kw):
self.event_queue = Queue.Queue()
self.fixture_dir = os.path.join(FIXTURE_DIR, 'gerrit')
@@ -394,7 +396,7 @@ class FakeGerrit(object):
return c
def addEvent(self, data):
- return self.event_queue.put(data)
+ return self.event_queue.put((time.time(), data))
def getEvent(self):
return self.event_queue.get()
@@ -418,12 +420,18 @@ class FakeGerrit(object):
return {}
def simpleQuery(self, query):
+ self.log.debug("simpleQuery: %s" % query)
self.queries.append(query)
if query.startswith('change:'):
# Query a specific changeid
changeid = query[len('change:'):]
l = [change.query() for change in self.changes.values()
if change.data['id'] == changeid]
+ elif query.startswith('message:'):
+ # Query the content of a commit message
+ msg = query[len('message:'):].strip()
+ l = [change.query() for change in self.changes.values()
+ if msg in change.data['commitMessage']]
else:
# Query all open changes
l = [change.query() for change in self.changes.values()]
@@ -471,6 +479,7 @@ class FakeGerritTrigger(zuul.trigger.gerrit.Gerrit):
def __init__(self, upstream_root, *args):
super(FakeGerritTrigger, self).__init__(*args)
self.upstream_root = upstream_root
+ self.gerrit_connector.delay = 0.0
def getGitUrl(self, project):
return os.path.join(self.upstream_root, project.name)
@@ -1107,6 +1116,12 @@ class ZuulTestCase(BaseTestCase):
while len(self.gearman_server.functions) < count:
time.sleep(0)
+ def orderedRelease(self):
+ # Run one build at a time to ensure non-race order:
+ while len(self.builds):
+ self.release(self.builds[0])
+ self.waitUntilSettled()
+
def release(self, job):
if isinstance(job, FakeBuild):
job.release()
@@ -1159,8 +1174,6 @@ class ZuulTestCase(BaseTestCase):
return True
def areAllBuildsWaiting(self):
- ret = True
-
builds = self.launcher.builds.values()
for build in builds:
client_job = None
@@ -1172,35 +1185,34 @@ class ZuulTestCase(BaseTestCase):
if not client_job:
self.log.debug("%s is not known to the gearman client" %
build)
- ret = False
- continue
+ return False
if not client_job.handle:
self.log.debug("%s has no handle" % client_job)
- ret = False
- continue
+ return False
server_job = self.gearman_server.jobs.get(client_job.handle)
if not server_job:
self.log.debug("%s is not known to the gearman server" %
client_job)
- ret = False
- continue
+ return False
if not hasattr(server_job, 'waiting'):
self.log.debug("%s is being enqueued" % server_job)
- ret = False
- continue
+ return False
if server_job.waiting:
continue
worker_job = self.worker.gearman_jobs.get(server_job.unique)
if worker_job:
+ if build.number is None:
+ self.log.debug("%s has not reported start" % worker_job)
+ return False
if worker_job.build.isWaiting():
continue
else:
self.log.debug("%s is running" % worker_job)
- ret = False
+ return False
else:
self.log.debug("%s is unassigned" % server_job)
- ret = False
- return ret
+ return False
+ return True
def waitUntilSettled(self):
self.log.debug("Waiting until settled...")
diff --git a/tests/fixtures/layout-no-timer.yaml b/tests/fixtures/layout-no-timer.yaml
index 9436821c7..ca40d13d0 100644
--- a/tests/fixtures/layout-no-timer.yaml
+++ b/tests/fixtures/layout-no-timer.yaml
@@ -1,14 +1,28 @@
pipelines:
+ - name: check
+ manager: IndependentPipelineManager
+ trigger:
+ gerrit:
+ - event: patchset-created
+ success:
+ gerrit:
+ verified: 1
+ failure:
+ gerrit:
+ verified: -1
+
- name: periodic
manager: IndependentPipelineManager
# Trigger is required, set it to one that is a noop
# during tests that check the timer trigger.
trigger:
gerrit:
- - event: patchset-created
+ - event: ref-updated
projects:
- name: org/project
+ check:
+ - project-test1
periodic:
- project-bitrot-stable-old
- project-bitrot-stable-older
diff --git a/tests/test_scheduler.py b/tests/test_scheduler.py
index b44dba6c1..3b59e3e2a 100755
--- a/tests/test_scheduler.py
+++ b/tests/test_scheduler.py
@@ -1710,6 +1710,41 @@ class TestScheduler(ZuulTestCase):
self.assertEqual(A.reported, 0, "Abandoned change should not report")
self.assertEqual(B.reported, 1, "Change should report")
+ def test_abandoned_not_timer(self):
+ "Test that an abandoned change does not cancel timer jobs"
+
+ self.worker.hold_jobs_in_build = True
+
+ # Start timer trigger - also org/project
+ self.config.set('zuul', 'layout_config',
+ 'tests/fixtures/layout-idle.yaml')
+ self.sched.reconfigure(self.config)
+ self.registerJobs()
+ # The pipeline triggers every second, so we should have seen
+ # several by now.
+ time.sleep(5)
+ self.waitUntilSettled()
+ # Stop queuing timer triggered jobs so that the assertions
+ # below don't race against more jobs being queued.
+ self.config.set('zuul', 'layout_config',
+ 'tests/fixtures/layout-no-timer.yaml')
+ self.sched.reconfigure(self.config)
+ self.registerJobs()
+ self.assertEqual(len(self.builds), 2, "Two timer jobs")
+
+ A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
+ self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+ self.assertEqual(len(self.builds), 3, "One change plus two timer jobs")
+
+ self.fake_gerrit.addEvent(A.getChangeAbandonedEvent())
+ self.waitUntilSettled()
+
+ self.assertEqual(len(self.builds), 2, "Two timer jobs remain")
+
+ self.worker.release()
+ self.waitUntilSettled()
+
def test_zuul_url_return(self):
"Test if ZUUL_URL is returning when zuul_url is set in zuul.conf"
self.assertTrue(self.sched.config.has_option('merger', 'zuul_url'))
@@ -2089,9 +2124,7 @@ class TestScheduler(ZuulTestCase):
self.waitUntilSettled()
# Run one build at a time to ensure non-race order:
- for x in range(6):
- self.release(self.builds[0])
- self.waitUntilSettled()
+ self.orderedRelease()
self.worker.hold_jobs_in_build = False
self.waitUntilSettled()
@@ -2120,7 +2153,10 @@ class TestScheduler(ZuulTestCase):
self.assertIn('Content-Type', headers)
self.assertEqual(headers['Content-Type'],
'application/json; charset=UTF-8')
+ self.assertIn('Access-Control-Allow-Origin', headers)
+ self.assertIn('Cache-Control', headers)
self.assertIn('Last-Modified', headers)
+ self.assertIn('Expires', headers)
data = f.read()
self.worker.hold_jobs_in_build = False
@@ -3282,6 +3318,45 @@ For CI problems and help debugging, contact ci@example.org"""
self.assertEqual(A.data['status'], 'MERGED')
self.assertEqual(A.reported, 2)
+ def test_crd_gate_reverse(self):
+ "Test reverse cross-repo dependencies"
+ A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
+ B = self.fake_gerrit.addFakeChange('org/project2', 'master', 'B')
+ A.addApproval('CRVW', 2)
+ B.addApproval('CRVW', 2)
+
+ # A Depends-On: B
+
+ A.data['commitMessage'] = '%s\n\nDepends-On: %s\n' % (
+ A.subject, B.data['id'])
+
+ self.fake_gerrit.addEvent(A.addApproval('APRV', 1))
+ self.waitUntilSettled()
+
+ self.assertEqual(A.data['status'], 'NEW')
+ self.assertEqual(B.data['status'], 'NEW')
+
+ self.worker.hold_jobs_in_build = True
+ A.addApproval('APRV', 1)
+ self.fake_gerrit.addEvent(B.addApproval('APRV', 1))
+ self.waitUntilSettled()
+
+ self.worker.release('.*-merge')
+ self.waitUntilSettled()
+ self.worker.release('.*-merge')
+ self.waitUntilSettled()
+ self.worker.hold_jobs_in_build = False
+ self.worker.release()
+ self.waitUntilSettled()
+
+ self.assertEqual(A.data['status'], 'MERGED')
+ self.assertEqual(B.data['status'], 'MERGED')
+ self.assertEqual(A.reported, 2)
+ self.assertEqual(B.reported, 2)
+
+ self.assertEqual(self.getJobFromHistory('project1-merge').changes,
+ '2,1 1,1')
+
def test_crd_cycle(self):
"Test cross-repo dependency cycles"
A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
@@ -3348,7 +3423,7 @@ For CI problems and help debugging, contact ci@example.org"""
def test_crd_check_git_depends(self):
"Test single-repo dependencies in independent pipelines"
- self.gearman_server.hold_jobs_in_queue = True
+ self.gearman_server.hold_jobs_in_build = True
A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
B = self.fake_gerrit.addFakeChange('org/project1', 'master', 'B')
@@ -3360,8 +3435,8 @@ For CI problems and help debugging, contact ci@example.org"""
self.fake_gerrit.addEvent(B.getPatchsetCreatedEvent(1))
self.waitUntilSettled()
- self.gearman_server.hold_jobs_in_queue = False
- self.gearman_server.release()
+ self.orderedRelease()
+ self.gearman_server.hold_jobs_in_build = False
self.waitUntilSettled()
self.assertEqual(A.data['status'], 'NEW')
@@ -3378,7 +3453,7 @@ For CI problems and help debugging, contact ci@example.org"""
def test_crd_check_duplicate(self):
"Test duplicate check in independent pipelines"
- self.gearman_server.hold_jobs_in_queue = True
+ self.worker.hold_jobs_in_build = True
A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
B = self.fake_gerrit.addFakeChange('org/project1', 'master', 'B')
check_pipeline = self.sched.layout.pipelines['check']
@@ -3399,12 +3474,11 @@ For CI problems and help debugging, contact ci@example.org"""
self.waitUntilSettled()
self.assertEqual(len(check_pipeline.getAllItems()), 3)
- self.gearman_server.hold_jobs_in_queue = False
- self.gearman_server.release('.*-merge')
- self.waitUntilSettled()
- self.gearman_server.release('.*-merge')
- self.waitUntilSettled()
- self.gearman_server.release()
+ # Release jobs in order to avoid races with change A jobs
+ # finishing before change B jobs.
+ self.orderedRelease()
+ self.worker.hold_jobs_in_build = False
+ self.worker.release()
self.waitUntilSettled()
self.assertEqual(A.data['status'], 'NEW')
@@ -3501,3 +3575,43 @@ For CI problems and help debugging, contact ci@example.org"""
# Each job should have tested exactly one change
for job in self.history:
self.assertEqual(len(job.changes.split()), 1)
+
+ def test_crd_check_transitive(self):
+ "Test transitive cross-repo dependencies"
+ # Specifically, if A -> B -> C, and C gets a new patchset and
+ # A gets a new patchset, ensure the test of A,2 includes B,1
+ # and C,2 (not C,1 which would indicate stale data in the
+ # cache for B).
+ A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
+ B = self.fake_gerrit.addFakeChange('org/project2', 'master', 'B')
+ C = self.fake_gerrit.addFakeChange('org/project3', 'master', 'C')
+
+ # A Depends-On: B
+ A.data['commitMessage'] = '%s\n\nDepends-On: %s\n' % (
+ A.subject, B.data['id'])
+
+ # B Depends-On: C
+ B.data['commitMessage'] = '%s\n\nDepends-On: %s\n' % (
+ B.subject, C.data['id'])
+
+ self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+ self.assertEqual(self.history[-1].changes, '3,1 2,1 1,1')
+
+ self.fake_gerrit.addEvent(B.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+ self.assertEqual(self.history[-1].changes, '3,1 2,1')
+
+ self.fake_gerrit.addEvent(C.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+ self.assertEqual(self.history[-1].changes, '3,1')
+
+ C.addPatchset()
+ self.fake_gerrit.addEvent(C.getPatchsetCreatedEvent(2))
+ self.waitUntilSettled()
+ self.assertEqual(self.history[-1].changes, '3,2')
+
+ A.addPatchset()
+ self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(2))
+ self.waitUntilSettled()
+ self.assertEqual(self.history[-1].changes, '3,2 2,1 1,2')
diff --git a/tests/test_zuultrigger.py b/tests/test_zuultrigger.py
index a26fa8605..2f0e4f052 100644
--- a/tests/test_zuultrigger.py
+++ b/tests/test_zuultrigger.py
@@ -111,8 +111,8 @@ class TestZuulTrigger(ZuulTestCase):
"merged with the current state of the repository. Please rebase "
"your change and upload a new patchset.")
- self.assertEqual(self.fake_gerrit.queries[0],
- "project:org/project status:open")
+ self.assertTrue("project:org/project status:open" in
+ self.fake_gerrit.queries)
# Reconfigure and run the test again. This is a regression
# check to make sure that we don't end up with a stale trigger
diff --git a/tools/zuul-changes.py b/tools/zuul-changes.py
index 7fc541b1e..9dbf504e7 100755
--- a/tools/zuul-changes.py
+++ b/tools/zuul-changes.py
@@ -1,5 +1,6 @@
#!/usr/bin/env python
# Copyright 2013 OpenStack Foundation
+# Copyright 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
@@ -13,9 +14,6 @@
# License for the specific language governing permissions and limitations
# under the License.
-# Print commands to leave gerrit comments for every change in one of
-# Zuul's pipelines.
-
import urllib2
import json
import argparse
@@ -23,9 +21,6 @@ import argparse
parser = argparse.ArgumentParser()
parser.add_argument('url', help='The URL of the running Zuul instance')
parser.add_argument('pipeline_name', help='The name of the Zuul pipeline')
-parser.add_argument('comment', help='The text of the Gerrit comment')
-parser.add_argument('--review-host', default='review',
- help='The Gerrit hostname')
options = parser.parse_args()
data = urllib2.urlopen('%s/status.json' % options.url).read()
@@ -37,7 +32,13 @@ for pipeline in data['pipelines']:
for queue in pipeline['change_queues']:
for head in queue['heads']:
for change in head:
- print 'ssh %s gerrit review %s --message \\"%s\\"' % (
- options.review_host,
- change['id'],
- options.comment)
+ if not change['live']:
+ continue
+ cid, cps = change['id'].split(',')
+ print (
+ "zuul enqueue --trigger gerrit --pipeline %s "
+ "--project %s --change %s,%s" % (
+ options.pipeline_name,
+ change['project'],
+ cid, cps)
+ )
diff --git a/zuul/launcher/gearman.py b/zuul/launcher/gearman.py
index 564a554ba..653678a9a 100644
--- a/zuul/launcher/gearman.py
+++ b/zuul/launcher/gearman.py
@@ -231,6 +231,8 @@ class Gearman(object):
s_config = {}
s_config.update((k, v.format(item=item, job=job,
change=item.change))
+ if isinstance(v, basestring)
+ else (k, v)
for k, v in s.items())
(swift_instructions['URL'],
@@ -402,14 +404,15 @@ class Gearman(object):
self.log.debug("Removed build %s from queue" % build)
return
+ time.sleep(1)
+
self.log.debug("Still unable to find build %s to cancel" % build)
if build.number:
self.log.debug("Build %s has just started" % build)
- else:
- self.log.error("Build %s has not started but was not"
- "found in queue; canceling anyway" % build)
- self.cancelRunningBuild(build)
- self.log.debug("Canceled possibly running build %s" % build)
+ self.log.debug("Canceled running build %s" % build)
+ self.cancelRunningBuild(build)
+ return
+ self.log.debug("Unable to cancel build %s" % build)
def onBuildCompleted(self, job, result=None):
if job.unique in self.meta_jobs:
diff --git a/zuul/lib/cloner.py b/zuul/lib/cloner.py
index 2b35e41a3..67e238a8a 100644
--- a/zuul/lib/cloner.py
+++ b/zuul/lib/cloner.py
@@ -125,7 +125,7 @@ class Cloner(object):
repo = self.cloneUpstream(project, dest)
- repo.update()
+ repo.reset()
# Ensure that we don't have stale remotes around
repo.prune()
diff --git a/zuul/lib/gerrit.py b/zuul/lib/gerrit.py
index 9aeff3df8..6c7906bd2 100644
--- a/zuul/lib/gerrit.py
+++ b/zuul/lib/gerrit.py
@@ -39,7 +39,7 @@ class GerritWatcher(threading.Thread):
data = json.loads(l)
self.log.debug("Received data from Gerrit event stream: \n%s" %
pprint.pformat(data))
- self.gerrit.addEvent(data)
+ self.gerrit.addEvent((time.time(), data))
def _listen(self, stdout, stderr):
poll = select.poll()
diff --git a/zuul/lib/swift.py b/zuul/lib/swift.py
index 9b9bea375..3c411d3ff 100644
--- a/zuul/lib/swift.py
+++ b/zuul/lib/swift.py
@@ -147,8 +147,10 @@ class Swift(object):
settings[key] = kwargs[altkey]
elif self.config.has_option('swift', 'default_' + key):
settings[key] = self.config.get('swift', 'default_' + key)
+ # TODO: these are always strings; some should be converted
+ # to ints.
- expires = int(time() + settings['expiry'])
+ expires = int(time() + int(settings['expiry']))
redirect = ''
url = os.path.join(self.storage_url, settings['container'],
diff --git a/zuul/merger/client.py b/zuul/merger/client.py
index 8d8f7eebd..950c385d3 100644
--- a/zuul/merger/client.py
+++ b/zuul/merger/client.py
@@ -84,10 +84,10 @@ class MergeClient(object):
def submitJob(self, name, data, build_set,
precedence=zuul.model.PRECEDENCE_NORMAL):
uuid = str(uuid4().hex)
- self.log.debug("Submitting job %s with data %s" % (name, data))
job = gear.Job(name,
json.dumps(data),
unique=uuid)
+ self.log.debug("Submitting job %s with data %s" % (job, data))
self.build_sets[uuid] = build_set
self.gearman.submitJob(job, precedence=precedence,
timeout=300)
diff --git a/zuul/merger/server.py b/zuul/merger/server.py
index 0d105f677..1a0232280 100644
--- a/zuul/merger/server.py
+++ b/zuul/merger/server.py
@@ -90,10 +90,10 @@ class MergeServer(object):
job = self.worker.getJob()
try:
if job.name == 'merger:merge':
- self.log.debug("Got merge job.")
+ self.log.debug("Got merge job: %s" % job.unique)
self.merge(job)
elif job.name == 'merger:update':
- self.log.debug("Got update job.")
+ self.log.debug("Got update job: %s" % job.unique)
self.update(job)
else:
self.log.error("Unable to handle job %s" % job.name)
diff --git a/zuul/model.py b/zuul/model.py
index 8dc28dfbe..4d402ff9c 100644
--- a/zuul/model.py
+++ b/zuul/model.py
@@ -965,7 +965,8 @@ class NullChange(Changeish):
return None
def equals(self, other):
- if (self.project == other.project):
+ if (self.project == other.project
+ and other._id() is None):
return True
return False
diff --git a/zuul/trigger/gerrit.py b/zuul/trigger/gerrit.py
index c5fdf9af5..c28401c56 100644
--- a/zuul/trigger/gerrit.py
+++ b/zuul/trigger/gerrit.py
@@ -26,6 +26,7 @@ class GerritEventConnector(threading.Thread):
"""Move events from Gerrit to the scheduler."""
log = logging.getLogger("zuul.GerritEventConnector")
+ delay = 5.0
def __init__(self, gerrit, sched, trigger):
super(GerritEventConnector, self).__init__()
@@ -37,12 +38,20 @@ class GerritEventConnector(threading.Thread):
def stop(self):
self._stopped = True
- self.gerrit.addEvent(None)
+ self.gerrit.addEvent((None, None))
def _handleEvent(self):
- data = self.gerrit.getEvent()
+ ts, data = self.gerrit.getEvent()
if self._stopped:
return
+ # Gerrit can produce inconsistent data immediately after an
+ # event, So ensure that we do not deliver the event to Zuul
+ # until at least a certain amount of time has passed. Note
+ # that if we receive several events in succession, we will
+ # only need to delay for the first event. In essence, Zuul
+ # should always be a constant number of seconds behind Gerrit.
+ now = time.time()
+ time.sleep(max((ts + self.delay) - now, 0.0))
event = TriggerEvent()
event.type = data.get('type')
event.trigger_name = self.trigger.name
@@ -362,6 +371,27 @@ class Gerrit(object):
records.extend(self.gerrit.simpleQuery(query))
return records
+ def _getNeededByFromCommit(self, change_id):
+ records = []
+ seen = set()
+ query = 'message:%s' % change_id
+ self.log.debug("Running query %s to find changes needed-by" %
+ (query,))
+ results = self.gerrit.simpleQuery(query)
+ for result in results:
+ for match in self.depends_on_re.findall(
+ result['commitMessage']):
+ if match != change_id:
+ continue
+ key = (result['number'], result['currentPatchSet']['number'])
+ if key in seen:
+ continue
+ self.log.debug("Found change %s,%s needs %s from commit" %
+ (key[0], key[1], change_id))
+ seen.add(key)
+ records.append(result)
+ return records
+
def updateChange(self, change, history=None):
self.log.info("Updating information for %s,%s" %
(change.number, change.patchset))
@@ -378,18 +408,19 @@ class Gerrit(object):
change.branch = data['branch']
change.url = data['url']
max_ps = 0
- change.files = []
+ files = []
for ps in data['patchSets']:
if ps['number'] == change.patchset:
change.refspec = ps['ref']
for f in ps.get('files', []):
- change.files.append(f['file'])
+ files.append(f['file'])
if int(ps['number']) > int(max_ps):
max_ps = ps['number']
if max_ps == change.patchset:
change.is_current_patchset = True
else:
change.is_current_patchset = False
+ change.files = files
change.is_merged = self._isMerged(change)
change.approvals = data['currentPatchSet'].get('approvals', [])
@@ -408,7 +439,7 @@ class Gerrit(object):
history = history[:]
history.append(change.number)
- change.needs_changes = []
+ needs_changes = []
if 'dependsOn' in data:
parts = data['dependsOn'][0]['ref'].split('/')
dep_num, dep_ps = parts[3], parts[4]
@@ -418,8 +449,8 @@ class Gerrit(object):
self.log.debug("Getting git-dependent change %s,%s" %
(dep_num, dep_ps))
dep = self._getChange(dep_num, dep_ps, history=history)
- if (not dep.is_merged) and dep not in change.needs_changes:
- change.needs_changes.append(dep)
+ if (not dep.is_merged) and dep not in needs_changes:
+ needs_changes.append(dep)
for record in self._getDependsOnFromCommit(data['commitMessage']):
dep_num = record['number']
@@ -430,17 +461,32 @@ class Gerrit(object):
self.log.debug("Getting commit-dependent change %s,%s" %
(dep_num, dep_ps))
dep = self._getChange(dep_num, dep_ps, history=history)
- if (not dep.is_merged) and dep not in change.needs_changes:
- change.needs_changes.append(dep)
+ if (not dep.is_merged) and dep not in needs_changes:
+ needs_changes.append(dep)
+ change.needs_changes = needs_changes
- change.needed_by_changes = []
+ needed_by_changes = []
if 'neededBy' in data:
for needed in data['neededBy']:
parts = needed['ref'].split('/')
dep_num, dep_ps = parts[3], parts[4]
dep = self._getChange(dep_num, dep_ps)
if (not dep.is_merged) and dep.is_current_patchset:
- change.needed_by_changes.append(dep)
+ needed_by_changes.append(dep)
+
+ for record in self._getNeededByFromCommit(data['id']):
+ dep_num = record['number']
+ dep_ps = record['currentPatchSet']['number']
+ self.log.debug("Getting commit-needed change %s,%s" %
+ (dep_num, dep_ps))
+ # Because a commit needed-by may be a cross-repo
+ # dependency, cause that change to refresh so that it will
+ # reference the latest patchset of its Depends-On (this
+ # change).
+ dep = self._getChange(dep_num, dep_ps, refresh=True)
+ if (not dep.is_merged) and dep.is_current_patchset:
+ needed_by_changes.append(dep)
+ change.needed_by_changes = needed_by_changes
return change
diff --git a/zuul/webapp.py b/zuul/webapp.py
index e289398ac..44c333bf9 100644
--- a/zuul/webapp.py
+++ b/zuul/webapp.py
@@ -121,5 +121,10 @@ class WebApp(threading.Thread):
raise webob.exc.HTTPNotFound()
response.headers['Access-Control-Allow-Origin'] = '*'
+
+ response.cache_control.public = True
+ response.cache_control.max_age = self.cache_expiry
response.last_modified = self.cache_time
- return response
+ response.expires = self.cache_time + self.cache_expiry
+
+ return response.conditional_response_app