summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--doc/source/zuul.rst62
-rw-r--r--etc/status/.gitignore2
-rwxr-xr-xetc/status/fetch-dependencies.sh4
-rw-r--r--etc/status/public_html/index.html3
-rw-r--r--etc/status/public_html/jquery.zuul.js17
-rw-r--r--requirements.txt5
-rwxr-xr-xtests/base.py56
-rw-r--r--tests/fixtures/layout-ignore-dependencies.yaml28
-rw-r--r--tests/fixtures/layout-skip-if.yaml29
-rw-r--r--tests/fixtures/layout-zuultrigger-merged.yaml1
-rw-r--r--tests/fixtures/layouts/good_layout.yaml11
-rw-r--r--tests/test_change_matcher.py154
-rw-r--r--tests/test_cloner.py177
-rw-r--r--tests/test_model.py64
-rwxr-xr-xtests/test_scheduler.py720
-rw-r--r--tests/test_zuultrigger.py22
-rwxr-xr-xtools/zuul-changes.py21
-rw-r--r--tox.ini7
-rw-r--r--zuul/change_matcher.py132
-rw-r--r--zuul/cmd/client.py3
-rwxr-xr-xzuul/cmd/cloner.py6
-rwxr-xr-xzuul/cmd/server.py5
-rw-r--r--zuul/launcher/gearman.py2
-rw-r--r--zuul/layoutvalidator.py17
-rw-r--r--zuul/lib/cloner.py7
-rw-r--r--zuul/lib/gerrit.py2
-rw-r--r--zuul/lib/swift.py7
-rw-r--r--zuul/merger/merger.py2
-rw-r--r--zuul/model.py103
-rw-r--r--zuul/scheduler.py498
-rw-r--r--zuul/trigger/gerrit.py75
-rw-r--r--zuul/trigger/zuultrigger.py13
32 files changed, 1919 insertions, 336 deletions
diff --git a/doc/source/zuul.rst b/doc/source/zuul.rst
index 6e9fb05a9..9be4deb5b 100644
--- a/doc/source/zuul.rst
+++ b/doc/source/zuul.rst
@@ -569,6 +569,15 @@ explanation of each of the parameters::
well. To suppress this behavior (and allow jobs to continue
running), set this to ``false``. Default: ``true``.
+**ignore-dependencies**
+ In any kind of pipeline (dependent or independent), Zuul will
+ attempt to enqueue all dependencies ahead of the current change so
+ that they are tested together (independent pipelines report the
+ results of each change regardless of the results of changes ahead).
+ To ignore dependencies completely in an independent pipeline, set
+ this to ``true``. This option is ignored by dependent pipelines.
+ The default is: ``false``.
+
**success**
Describes where Zuul should report to if all the jobs complete
successfully.
@@ -800,6 +809,47 @@ each job as it builds a list from the project specification.
file patterns listed here. This field is treated as a regular
expression and multiple expressions may be listed.
+**skip-if (optional)**
+
+ This job should not be run if all the patterns specified by the
+ optional fields listed below match on their targets. When multiple
+ sets of parameters are provided, this job will be skipped if any set
+ matches. For example: ::
+
+ jobs:
+ - name: check-tempest-dsvm-neutron
+ skip-if:
+ - project: ^openstack/neutron$
+ branch: ^stable/juno$
+ all-files-match-any:
+ - ^neutron/tests/.*$
+ - ^tools/.*$
+ - all-files-match-any:
+ - ^doc/.*$
+ - ^.*\.rst$
+
+ With this configuration, the job would be skipped for a neutron
+ patchset for the stable/juno branch provided that every file in the
+ change matched at least one of the specified file regexes. The job
+ will also be skipped for any patchset that modified only the doc
+ tree or rst files.
+
+ *project* (optional)
+ The regular expression to match against the project of the change.
+
+ *branch* (optional)
+ The regular expression to match against the branch or ref of the
+ change.
+
+ *all-files-match-any* (optional)
+ A list of regular expressions intended to match the files involved
+ in the change. This parameter will be considered matching a
+ change only if all files in a change match at least one of these
+ expressions.
+
+ The pattern for '/COMMIT_MSG' is always matched on and does not
+ have to be included.
+
**voting (optional)**
Boolean value (``true`` or ``false``) that indicates whatever
a job is voting or not. Default: ``true``.
@@ -1046,13 +1096,15 @@ example, this would give you a list of Gerrit commands to reverify or
recheck changes for the gate and check pipelines respectively::
./tools/zuul-changes.py --review-host=review.openstack.org \
- http://zuul.openstack.org/ gate 'reverify no bug'
+ http://zuul.openstack.org/ gate 'reverify'
./tools/zuul-changes.py --review-host=review.openstack.org \
- http://zuul.openstack.org/ check 'recheck no bug'
+ http://zuul.openstack.org/ check 'recheck'
-If you send a SIGUSR2 to the zuul-server process, Zuul will dump a stack
-trace for each running thread into its debug log. This is useful for
-tracking down deadlock or otherwise slow threads.
+If you send a SIGUSR2 to the zuul-server process, or the forked process
+that runs the Gearman daemon, Zuul will dump a stack trace for each
+running thread into its debug log. It is written under the log bucket
+``zuul.stack_dump``. This is useful for tracking down deadlock or
+otherwise slow threads.
When `yappi <https://code.google.com/p/yappi/>`_ (Yet Another Python
Profiler) is available, additional functions' and threads' stats are
diff --git a/etc/status/.gitignore b/etc/status/.gitignore
index 8b94cad18..1ecdbed42 100644
--- a/etc/status/.gitignore
+++ b/etc/status/.gitignore
@@ -1,4 +1,4 @@
public_html/jquery.min.js
-public_html/jquery-visibility.min.js
+public_html/jquery-visibility.js
public_html/bootstrap
public_html/jquery.graphite.js
diff --git a/etc/status/fetch-dependencies.sh b/etc/status/fetch-dependencies.sh
index 4868310ad..b31d0de5f 100755
--- a/etc/status/fetch-dependencies.sh
+++ b/etc/status/fetch-dependencies.sh
@@ -3,10 +3,10 @@ BASE_DIR=$(cd $(dirname $0); pwd)
echo "Destination: $BASE_DIR/public_html"
echo "Fetching jquery.min.js..."
-curl --silent http://code.jquery.com/jquery.min.js > $BASE_DIR/public_html/jquery.min.js
+curl -L --silent http://code.jquery.com/jquery.min.js > $BASE_DIR/public_html/jquery.min.js
echo "Fetching jquery-visibility.min.js..."
-curl --silent https://raw.githubusercontent.com/mathiasbynens/jquery-visibility/master/jquery-visibility.js > $BASE_DIR/public_html/jquery-visibility.min.js
+curl -L --silent https://raw.githubusercontent.com/mathiasbynens/jquery-visibility/master/jquery-visibility.js > $BASE_DIR/public_html/jquery-visibility.js
echo "Fetching jquery.graphite.js..."
curl -L --silent https://github.com/prestontimmons/graphitejs/archive/master.zip > jquery-graphite.zip
diff --git a/etc/status/public_html/index.html b/etc/status/public_html/index.html
index d77470bb7..3bd7a12fc 100644
--- a/etc/status/public_html/index.html
+++ b/etc/status/public_html/index.html
@@ -20,7 +20,6 @@ under the License.
<head>
<title>Zuul Status</title>
<link rel="stylesheet" href="bootstrap/css/bootstrap.min.css">
- <link rel="stylesheet" href="bootstrap/css/bootstrap-responsive.min.css">
<link rel="stylesheet" href="styles/zuul.css" />
</head>
<body>
@@ -28,7 +27,7 @@ under the License.
<div id="zuul_container"></div>
<script src="jquery.min.js"></script>
- <script src="jquery-visibility.min.js"></script>
+ <script src="jquery-visibility.js"></script>
<script src="jquery.graphite.js"></script>
<script src="jquery.zuul.js"></script>
<script src="zuul.app.js"></script>
diff --git a/etc/status/public_html/jquery.zuul.js b/etc/status/public_html/jquery.zuul.js
index 5d155aff6..5e442058e 100644
--- a/etc/status/public_html/jquery.zuul.js
+++ b/etc/status/public_html/jquery.zuul.js
@@ -316,9 +316,11 @@
var $enqueue_time = $('<small />').addClass('time')
.attr('title', 'Elapsed Time').html(enqueue_time);
- var $right = $('<div />')
- .addClass('col-xs-4 text-right')
- .append($remaining_time, $('<br />'), $enqueue_time);
+ var $right = $('<div />');
+ if (change.live === true) {
+ $right.addClass('col-xs-4 text-right')
+ .append($remaining_time, $('<br />'), $enqueue_time);
+ }
var $header = $('<div />')
.addClass('row')
@@ -370,6 +372,11 @@
icon_title = 'Waiting until closer to head of queue to' +
' start jobs';
}
+ else if (change.live !== true) {
+ // Grey icon
+ icon_name = 'grey.png';
+ icon_title = 'Dependent change independently tested';
+ }
else if (change.failing_reasons &&
change.failing_reasons.length > 0) {
var reason = change.failing_reasons.join(', ');
@@ -835,7 +842,9 @@
});
$.each(change_queue.heads, function(head_i, head) {
$.each(head, function(change_i, change) {
- count += 1;
+ if (change.live === true) {
+ count += 1;
+ }
var idx = tree.indexOf(change.id);
if (idx > -1) {
change._tree_index = idx;
diff --git a/requirements.txt b/requirements.txt
index 81257abe3..f5525b60f 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -5,10 +5,9 @@ PyYAML>=3.1.0
Paste
WebOb>=1.2.3,<1.3
paramiko>=1.8.0
-GitPython>=0.3.2.1
-lockfile>=0.8
+GitPython>=0.3.3
ordereddict
-python-daemon<2.0
+python-daemon>=2.0.4
extras
statsd>=1.0.0,<3.0
voluptuous>=0.7
diff --git a/tests/base.py b/tests/base.py
index 6045bed9d..18d5f5a84 100755
--- a/tests/base.py
+++ b/tests/base.py
@@ -262,14 +262,16 @@ class FakeChange(object):
granted_on=None):
if not granted_on:
granted_on = time.time()
- approval = {'description': self.categories[category][0],
- 'type': category,
- 'value': str(value),
- 'by': {
- 'username': username,
- 'email': username + '@example.com',
- },
- 'grantedOn': int(granted_on)}
+ approval = {
+ 'description': self.categories[category][0],
+ 'type': category,
+ 'value': str(value),
+ 'by': {
+ 'username': username,
+ 'email': username + '@example.com',
+ },
+ 'grantedOn': int(granted_on)
+ }
for i, x in enumerate(self.patchsets[-1]['approvals'][:]):
if x['by']['username'] == username and x['type'] == category:
del self.patchsets[-1]['approvals'][i]
@@ -359,7 +361,7 @@ class FakeChange(object):
def setMerged(self):
if (self.depends_on_change and
- self.depends_on_change.data['status'] != 'MERGED'):
+ self.depends_on_change.data['status'] != 'MERGED'):
return
if self.fail_merge:
return
@@ -416,11 +418,15 @@ class FakeGerrit(object):
return {}
def simpleQuery(self, query):
- # This is currently only used to return all open changes for a
- # project
self.queries.append(query)
- l = [change.query() for change in self.changes.values()]
- l.append({"type":"stats","rowCount":1,"runTimeMilliseconds":3})
+ if query.startswith('change:'):
+ # Query a specific changeid
+ changeid = query[len('change:'):]
+ l = [change.query() for change in self.changes.values()
+ if change.data['id'] == changeid]
+ else:
+ # Query all open changes
+ l = [change.query() for change in self.changes.values()]
return l
def startWatching(self, *args, **kw):
@@ -805,11 +811,11 @@ class FakeSwiftClientConnection(swiftclient.client.Connection):
return endpoint, ''
-class ZuulTestCase(testtools.TestCase):
+class BaseTestCase(testtools.TestCase):
log = logging.getLogger("zuul.test")
def setUp(self):
- super(ZuulTestCase, self).setUp()
+ super(BaseTestCase, self).setUp()
test_timeout = os.environ.get('OS_TEST_TIMEOUT', 0)
try:
test_timeout = int(test_timeout)
@@ -833,9 +839,16 @@ class ZuulTestCase(testtools.TestCase):
level=logging.DEBUG,
format='%(asctime)s %(name)-32s '
'%(levelname)-8s %(message)s'))
+
+
+class ZuulTestCase(BaseTestCase):
+
+ def setUp(self):
+ super(ZuulTestCase, self).setUp()
if USE_TEMPDIR:
tmp_root = self.useFixture(fixtures.TempDir(
- rootdir=os.environ.get("ZUUL_TEST_ROOT"))).path
+ rootdir=os.environ.get("ZUUL_TEST_ROOT"))
+ ).path
else:
tmp_root = os.environ.get("ZUUL_TEST_ROOT")
self.test_root = os.path.join(tmp_root, "zuul-test")
@@ -934,7 +947,8 @@ class ZuulTestCase(testtools.TestCase):
self.sched.registerTrigger(self.gerrit)
self.timer = zuul.trigger.timer.Timer(self.config, self.sched)
self.sched.registerTrigger(self.timer)
- self.zuultrigger = zuul.trigger.zuultrigger.ZuulTrigger(self.config, self.sched)
+ self.zuultrigger = zuul.trigger.zuultrigger.ZuulTrigger(self.config,
+ self.sched)
self.sched.registerTrigger(self.zuultrigger)
self.sched.registerReporter(
@@ -972,6 +986,10 @@ class ZuulTestCase(testtools.TestCase):
repos.append(obj)
self.assertEqual(len(repos), 0)
self.assertEmptyQueues()
+ for pipeline in self.sched.layout.pipelines.values():
+ if isinstance(pipeline.manager,
+ zuul.scheduler.IndependentPipelineManager):
+ self.assertEqual(len(pipeline.queues), 0)
def shutdown(self):
self.log.debug("Shutting down after tests")
@@ -1205,10 +1223,10 @@ class ZuulTestCase(testtools.TestCase):
self.sched.trigger_event_queue.join()
self.sched.result_event_queue.join()
self.sched.run_handler_lock.acquire()
- if (self.sched.trigger_event_queue.empty() and
+ if (not self.merge_client.build_sets and
+ self.sched.trigger_event_queue.empty() and
self.sched.result_event_queue.empty() and
self.fake_gerrit.event_queue.empty() and
- not self.merge_client.build_sets and
self.haveAllBuildsReported() and
self.areAllBuildsWaiting()):
self.sched.run_handler_lock.release()
diff --git a/tests/fixtures/layout-ignore-dependencies.yaml b/tests/fixtures/layout-ignore-dependencies.yaml
new file mode 100644
index 000000000..5c0257cce
--- /dev/null
+++ b/tests/fixtures/layout-ignore-dependencies.yaml
@@ -0,0 +1,28 @@
+pipelines:
+ - name: check
+ manager: IndependentPipelineManager
+ ignore-dependencies: true
+ trigger:
+ gerrit:
+ - event: patchset-created
+ success:
+ gerrit:
+ verified: 1
+ failure:
+ gerrit:
+ verified: -1
+
+projects:
+ - name: org/project1
+ check:
+ - project1-merge:
+ - project1-test1
+ - project1-test2
+ - project1-project2-integration
+
+ - name: org/project2
+ check:
+ - project2-merge:
+ - project2-test1
+ - project2-test2
+ - project1-project2-integration
diff --git a/tests/fixtures/layout-skip-if.yaml b/tests/fixtures/layout-skip-if.yaml
new file mode 100644
index 000000000..0cfb445cd
--- /dev/null
+++ b/tests/fixtures/layout-skip-if.yaml
@@ -0,0 +1,29 @@
+pipelines:
+ - name: check
+ manager: IndependentPipelineManager
+ trigger:
+ gerrit:
+ - event: patchset-created
+ success:
+ gerrit:
+ verified: 1
+ failure:
+ gerrit:
+ verified: -1
+
+
+jobs:
+ # Defining a metajob will validate that the skip-if attribute of the
+ # metajob is correctly copied to the job.
+ - name: ^.*skip-if$
+ skip-if:
+ - project: ^org/project$
+ branch: ^master$
+ all-files-match-any:
+ - ^README$
+ - name: project-test-skip-if
+
+projects:
+ - name: org/project
+ check:
+ - project-test-skip-if
diff --git a/tests/fixtures/layout-zuultrigger-merged.yaml b/tests/fixtures/layout-zuultrigger-merged.yaml
index 657700dfe..bb06ddef4 100644
--- a/tests/fixtures/layout-zuultrigger-merged.yaml
+++ b/tests/fixtures/layout-zuultrigger-merged.yaml
@@ -36,6 +36,7 @@ pipelines:
- name: merge-check
manager: IndependentPipelineManager
source: gerrit
+ ignore-dependencies: true
trigger:
zuul:
- event: project-change-merged
diff --git a/tests/fixtures/layouts/good_layout.yaml b/tests/fixtures/layouts/good_layout.yaml
index 4bd5e70fe..fc2effd66 100644
--- a/tests/fixtures/layouts/good_layout.yaml
+++ b/tests/fixtures/layouts/good_layout.yaml
@@ -43,6 +43,17 @@ pipelines:
verified: -2
workinprogress: true
+ - name: merge-check
+ manager: IndependentPipelineManager
+ source: gerrit
+ ignore-dependencies: true
+ trigger:
+ zuul:
+ - event: project-change-merged
+ merge-failure:
+ gerrit:
+ verified: -1
+
jobs:
- name: ^.*-merge$
failure-message: Unable to merge change
diff --git a/tests/test_change_matcher.py b/tests/test_change_matcher.py
new file mode 100644
index 000000000..1f4ab93d6
--- /dev/null
+++ b/tests/test_change_matcher.py
@@ -0,0 +1,154 @@
+# Copyright 2015 Red Hat, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from zuul import change_matcher as cm
+from zuul import model
+
+from tests.base import BaseTestCase
+
+
+class BaseTestMatcher(BaseTestCase):
+
+ project = 'project'
+
+ def setUp(self):
+ super(BaseTestMatcher, self).setUp()
+ self.change = model.Change(self.project)
+
+
+class TestAbstractChangeMatcher(BaseTestMatcher):
+
+ def test_str(self):
+ matcher = cm.ProjectMatcher(self.project)
+ self.assertEqual(str(matcher), '{ProjectMatcher:project}')
+
+ def test_repr(self):
+ matcher = cm.ProjectMatcher(self.project)
+ self.assertEqual(repr(matcher), '<ProjectMatcher project>')
+
+
+class TestProjectMatcher(BaseTestMatcher):
+
+ def test_matches_returns_true(self):
+ matcher = cm.ProjectMatcher(self.project)
+ self.assertTrue(matcher.matches(self.change))
+
+ def test_matches_returns_false(self):
+ matcher = cm.ProjectMatcher('not_project')
+ self.assertFalse(matcher.matches(self.change))
+
+
+class TestBranchMatcher(BaseTestMatcher):
+
+ def setUp(self):
+ super(TestBranchMatcher, self).setUp()
+ self.matcher = cm.BranchMatcher('foo')
+
+ def test_matches_returns_true_on_matching_branch(self):
+ self.change.branch = 'foo'
+ self.assertTrue(self.matcher.matches(self.change))
+
+ def test_matches_returns_true_on_matching_ref(self):
+ self.change.branch = 'bar'
+ self.change.ref = 'foo'
+ self.assertTrue(self.matcher.matches(self.change))
+
+ def test_matches_returns_false_for_no_match(self):
+ self.change.branch = 'bar'
+ self.change.ref = 'baz'
+ self.assertFalse(self.matcher.matches(self.change))
+
+ def test_matches_returns_false_for_missing_attrs(self):
+ delattr(self.change, 'branch')
+ # ref is by default not an attribute
+ self.assertFalse(self.matcher.matches(self.change))
+
+
+class TestFileMatcher(BaseTestMatcher):
+
+ def setUp(self):
+ super(TestFileMatcher, self).setUp()
+ self.matcher = cm.FileMatcher('filename')
+
+ def test_matches_returns_true(self):
+ self.change.files = ['filename']
+ self.assertTrue(self.matcher.matches(self.change))
+
+ def test_matches_returns_false_when_no_files(self):
+ self.assertFalse(self.matcher.matches(self.change))
+
+ def test_matches_returns_false_when_files_attr_missing(self):
+ delattr(self.change, 'files')
+ self.assertFalse(self.matcher.matches(self.change))
+
+
+class TestAbstractMatcherCollection(BaseTestMatcher):
+
+ def test_str(self):
+ matcher = cm.MatchAll([cm.FileMatcher('foo')])
+ self.assertEqual(str(matcher), '{MatchAll:{FileMatcher:foo}}')
+
+ def test_repr(self):
+ matcher = cm.MatchAll([])
+ self.assertEqual(repr(matcher), '<MatchAll>')
+
+
+class TestMatchAllFiles(BaseTestMatcher):
+
+ def setUp(self):
+ super(TestMatchAllFiles, self).setUp()
+ self.matcher = cm.MatchAllFiles([cm.FileMatcher('^docs/.*$')])
+
+ def _test_matches(self, expected, files=None):
+ if files is not None:
+ self.change.files = files
+ self.assertEqual(expected, self.matcher.matches(self.change))
+
+ def test_matches_returns_false_when_files_attr_missing(self):
+ delattr(self.change, 'files')
+ self._test_matches(False)
+
+ def test_matches_returns_false_when_no_files(self):
+ self._test_matches(False)
+
+ def test_matches_returns_false_when_not_all_files_match(self):
+ self._test_matches(False, files=['docs/foo', 'foo/bar'])
+
+ def test_matches_returns_true_when_commit_message_matches(self):
+ self._test_matches(True, files=['/COMMIT_MSG'])
+
+ def test_matches_returns_true_when_all_files_match(self):
+ self._test_matches(True, files=['docs/foo'])
+
+
+class TestMatchAll(BaseTestMatcher):
+
+ def test_matches_returns_true(self):
+ matcher = cm.MatchAll([cm.ProjectMatcher(self.project)])
+ self.assertTrue(matcher.matches(self.change))
+
+ def test_matches_returns_false_for_missing_matcher(self):
+ matcher = cm.MatchAll([cm.ProjectMatcher('not_project')])
+ self.assertFalse(matcher.matches(self.change))
+
+
+class TestMatchAny(BaseTestMatcher):
+
+ def test_matches_returns_true(self):
+ matcher = cm.MatchAny([cm.ProjectMatcher(self.project)])
+ self.assertTrue(matcher.matches(self.change))
+
+ def test_matches_returns_false(self):
+ matcher = cm.MatchAny([cm.ProjectMatcher('not_project')])
+ self.assertFalse(matcher.matches(self.change))
diff --git a/tests/test_cloner.py b/tests/test_cloner.py
index ab2683d81..137c1570e 100644
--- a/tests/test_cloner.py
+++ b/tests/test_cloner.py
@@ -18,13 +18,13 @@
import logging
import os
import shutil
+import time
import git
import zuul.lib.cloner
from tests.base import ZuulTestCase
-from tests.base import FIXTURE_DIR
logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s %(name)-32s '
@@ -80,11 +80,10 @@ class TestCloner(ZuulTestCase):
B.setMerged()
upstream = self.getUpstreamRepos(projects)
- states = [
- {'org/project1': self.builds[0].parameters['ZUUL_COMMIT'],
- 'org/project2': str(upstream['org/project2'].commit('master')),
- },
- ]
+ states = [{
+ 'org/project1': self.builds[0].parameters['ZUUL_COMMIT'],
+ 'org/project2': str(upstream['org/project2'].commit('master')),
+ }]
for number, build in enumerate(self.builds):
self.log.debug("Build parameters: %s", build.parameters)
@@ -96,7 +95,7 @@ class TestCloner(ZuulTestCase):
zuul_ref=build.parameters['ZUUL_REF'],
zuul_url=self.git_root,
cache_dir=cache_root,
- )
+ )
cloner.execute()
work = self.getWorkspaceRepos(projects)
state = states[number]
@@ -109,9 +108,11 @@ class TestCloner(ZuulTestCase):
work = self.getWorkspaceRepos(projects)
upstream_repo_path = os.path.join(self.upstream_root, 'org/project1')
- self.assertEquals(work['org/project1'].remotes.origin.url,
- upstream_repo_path,
- 'workspace repo origin should be upstream, not cache')
+ self.assertEquals(
+ work['org/project1'].remotes.origin.url,
+ upstream_repo_path,
+ 'workspace repo origin should be upstream, not cache'
+ )
self.worker.hold_jobs_in_build = False
self.worker.release()
@@ -140,7 +141,7 @@ class TestCloner(ZuulTestCase):
{'org/project1': self.builds[0].parameters['ZUUL_COMMIT'],
'org/project2': self.builds[1].parameters['ZUUL_COMMIT'],
},
- ]
+ ]
for number, build in enumerate(self.builds):
self.log.debug("Build parameters: %s", build.parameters)
@@ -151,7 +152,7 @@ class TestCloner(ZuulTestCase):
zuul_branch=build.parameters['ZUUL_BRANCH'],
zuul_ref=build.parameters['ZUUL_REF'],
zuul_url=self.git_root,
- )
+ )
cloner.execute()
work = self.getWorkspaceRepos(projects)
state = states[number]
@@ -176,7 +177,8 @@ class TestCloner(ZuulTestCase):
self.create_branch('org/project2', 'stable/havana')
self.create_branch('org/project4', 'stable/havana')
A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
- B = self.fake_gerrit.addFakeChange('org/project2', 'stable/havana', 'B')
+ B = self.fake_gerrit.addFakeChange('org/project2', 'stable/havana',
+ 'B')
C = self.fake_gerrit.addFakeChange('org/project3', 'master', 'C')
A.addApproval('CRVW', 2)
B.addApproval('CRVW', 2)
@@ -209,7 +211,7 @@ class TestCloner(ZuulTestCase):
'org/project4': str(upstream['org/project4'].
commit('master')),
},
- ]
+ ]
for number, build in enumerate(self.builds):
self.log.debug("Build parameters: %s", build.parameters)
@@ -220,7 +222,7 @@ class TestCloner(ZuulTestCase):
zuul_branch=build.parameters['ZUUL_BRANCH'],
zuul_ref=build.parameters['ZUUL_REF'],
zuul_url=self.git_root,
- )
+ )
cloner.execute()
work = self.getWorkspaceRepos(projects)
state = states[number]
@@ -248,9 +250,11 @@ class TestCloner(ZuulTestCase):
self.create_branch('org/project5', 'stable/havana')
A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
B = self.fake_gerrit.addFakeChange('org/project2', 'master', 'B')
- C = self.fake_gerrit.addFakeChange('org/project3', 'stable/havana', 'C')
+ C = self.fake_gerrit.addFakeChange('org/project3', 'stable/havana',
+ 'C')
D = self.fake_gerrit.addFakeChange('org/project3', 'master', 'D')
- E = self.fake_gerrit.addFakeChange('org/project4', 'stable/havana', 'E')
+ E = self.fake_gerrit.addFakeChange('org/project4', 'stable/havana',
+ 'E')
A.addApproval('CRVW', 2)
B.addApproval('CRVW', 2)
C.addApproval('CRVW', 2)
@@ -270,46 +274,61 @@ class TestCloner(ZuulTestCase):
upstream = self.getUpstreamRepos(projects)
states = [
{'org/project1': self.builds[0].parameters['ZUUL_COMMIT'],
- 'org/project2': str(upstream['org/project2'].commit('stable/havana')),
- 'org/project3': str(upstream['org/project3'].commit('stable/havana')),
- 'org/project4': str(upstream['org/project4'].commit('stable/havana')),
- 'org/project5': str(upstream['org/project5'].commit('stable/havana')),
+ 'org/project2': str(upstream['org/project2'].commit(
+ 'stable/havana')),
+ 'org/project3': str(upstream['org/project3'].commit(
+ 'stable/havana')),
+ 'org/project4': str(upstream['org/project4'].commit(
+ 'stable/havana')),
+ 'org/project5': str(upstream['org/project5'].commit(
+ 'stable/havana')),
'org/project6': str(upstream['org/project6'].commit('master')),
},
{'org/project1': self.builds[0].parameters['ZUUL_COMMIT'],
- 'org/project2': str(upstream['org/project2'].commit('stable/havana')),
- 'org/project3': str(upstream['org/project3'].commit('stable/havana')),
- 'org/project4': str(upstream['org/project4'].commit('stable/havana')),
- 'org/project5': str(upstream['org/project5'].commit('stable/havana')),
+ 'org/project2': str(upstream['org/project2'].commit(
+ 'stable/havana')),
+ 'org/project3': str(upstream['org/project3'].commit(
+ 'stable/havana')),
+ 'org/project4': str(upstream['org/project4'].commit(
+ 'stable/havana')),
+ 'org/project5': str(upstream['org/project5'].commit(
+ 'stable/havana')),
'org/project6': str(upstream['org/project6'].commit('master')),
},
{'org/project1': self.builds[0].parameters['ZUUL_COMMIT'],
- 'org/project2': str(upstream['org/project2'].commit('stable/havana')),
+ 'org/project2': str(upstream['org/project2'].commit(
+ 'stable/havana')),
'org/project3': self.builds[2].parameters['ZUUL_COMMIT'],
- 'org/project4': str(upstream['org/project4'].commit('stable/havana')),
+ 'org/project4': str(upstream['org/project4'].commit(
+ 'stable/havana')),
- 'org/project5': str(upstream['org/project5'].commit('stable/havana')),
+ 'org/project5': str(upstream['org/project5'].commit(
+ 'stable/havana')),
'org/project6': str(upstream['org/project6'].commit('master')),
},
{'org/project1': self.builds[0].parameters['ZUUL_COMMIT'],
- 'org/project2': str(upstream['org/project2'].commit('stable/havana')),
+ 'org/project2': str(upstream['org/project2'].commit(
+ 'stable/havana')),
'org/project3': self.builds[2].parameters['ZUUL_COMMIT'],
- 'org/project4': str(upstream['org/project4'].commit('stable/havana')),
- 'org/project5': str(upstream['org/project5'].commit('stable/havana')),
+ 'org/project4': str(upstream['org/project4'].commit(
+ 'stable/havana')),
+ 'org/project5': str(upstream['org/project5'].commit(
+ 'stable/havana')),
'org/project6': str(upstream['org/project6'].commit('master')),
},
{'org/project1': self.builds[0].parameters['ZUUL_COMMIT'],
- 'org/project2': str(upstream['org/project2'].commit('stable/havana')),
+ 'org/project2': str(upstream['org/project2'].commit(
+ 'stable/havana')),
'org/project3': self.builds[2].parameters['ZUUL_COMMIT'],
'org/project4': self.builds[4].parameters['ZUUL_COMMIT'],
- 'org/project5': str(upstream['org/project5'].commit('stable/havana')),
+ 'org/project5': str(upstream['org/project5'].commit(
+ 'stable/havana')),
'org/project6': str(upstream['org/project6'].commit('master')),
},
- ]
+ ]
for number, build in enumerate(self.builds):
self.log.debug("Build parameters: %s", build.parameters)
- change_number = int(build.parameters['ZUUL_CHANGE'])
cloner = zuul.lib.cloner.Cloner(
git_base_url=self.upstream_root,
projects=projects,
@@ -317,8 +336,8 @@ class TestCloner(ZuulTestCase):
zuul_branch=build.parameters['ZUUL_BRANCH'],
zuul_ref=build.parameters['ZUUL_REF'],
zuul_url=self.git_root,
- branch='stable/havana', # Old branch for upgrade
- )
+ branch='stable/havana', # Old branch for upgrade
+ )
cloner.execute()
work = self.getWorkspaceRepos(projects)
state = states[number]
@@ -368,11 +387,10 @@ class TestCloner(ZuulTestCase):
'org/project5': str(upstream['org/project5'].commit('master')),
'org/project6': str(upstream['org/project6'].commit('master')),
},
- ]
+ ]
for number, build in enumerate(self.builds):
self.log.debug("Build parameters: %s", build.parameters)
- change_number = int(build.parameters['ZUUL_CHANGE'])
cloner = zuul.lib.cloner.Cloner(
git_base_url=self.upstream_root,
projects=projects,
@@ -380,8 +398,8 @@ class TestCloner(ZuulTestCase):
zuul_branch=build.parameters['ZUUL_BRANCH'],
zuul_ref=build.parameters['ZUUL_REF'],
zuul_url=self.git_root,
- branch='master', # New branch for upgrade
- )
+ branch='master', # New branch for upgrade
+ )
cloner.execute()
work = self.getWorkspaceRepos(projects)
state = states[number]
@@ -409,7 +427,8 @@ class TestCloner(ZuulTestCase):
A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
B = self.fake_gerrit.addFakeChange('org/project1', 'master', 'B')
C = self.fake_gerrit.addFakeChange('org/project2', 'master', 'C')
- D = self.fake_gerrit.addFakeChange('org/project3', 'stable/havana', 'D')
+ D = self.fake_gerrit.addFakeChange('org/project3', 'stable/havana',
+ 'D')
A.addApproval('CRVW', 2)
B.addApproval('CRVW', 2)
C.addApproval('CRVW', 2)
@@ -451,13 +470,13 @@ class TestCloner(ZuulTestCase):
'org/project3': self.builds[3].parameters['ZUUL_COMMIT'],
'org/project4': str(upstream['org/project4'].commit('master')),
'org/project5': str(upstream['org/project5'].commit('master')),
- 'org/project6': str(upstream['org/project6'].commit('stable/havana')),
+ 'org/project6': str(upstream['org/project6'].commit(
+ 'stable/havana')),
},
- ]
+ ]
for number, build in enumerate(self.builds):
self.log.debug("Build parameters: %s", build.parameters)
- change_number = int(build.parameters['ZUUL_CHANGE'])
cloner = zuul.lib.cloner.Cloner(
git_base_url=self.upstream_root,
projects=projects,
@@ -466,7 +485,72 @@ class TestCloner(ZuulTestCase):
zuul_ref=build.parameters['ZUUL_REF'],
zuul_url=self.git_root,
project_branches={'org/project4': 'master'},
- )
+ )
+ cloner.execute()
+ work = self.getWorkspaceRepos(projects)
+ state = states[number]
+
+ for project in projects:
+ self.assertEquals(state[project],
+ str(work[project].commit('HEAD')),
+ 'Project %s commit for build %s should '
+ 'be correct' % (project, number))
+ shutil.rmtree(self.workspace_root)
+
+ self.worker.hold_jobs_in_build = False
+ self.worker.release()
+ self.waitUntilSettled()
+
+ def test_periodic(self):
+ self.worker.hold_jobs_in_build = True
+ self.create_branch('org/project', 'stable/havana')
+ self.config.set('zuul', 'layout_config',
+ 'tests/fixtures/layout-timer.yaml')
+ self.sched.reconfigure(self.config)
+ self.registerJobs()
+
+ # The pipeline triggers every second, so we should have seen
+ # several by now.
+ time.sleep(5)
+ self.waitUntilSettled()
+
+ builds = self.builds[:]
+
+ self.worker.hold_jobs_in_build = False
+ # Stop queuing timer triggered jobs so that the assertions
+ # below don't race against more jobs being queued.
+ self.config.set('zuul', 'layout_config',
+ 'tests/fixtures/layout-no-timer.yaml')
+ self.sched.reconfigure(self.config)
+ self.registerJobs()
+ self.worker.release()
+ self.waitUntilSettled()
+
+ projects = ['org/project']
+
+ self.assertEquals(2, len(builds), "Two builds are running")
+
+ upstream = self.getUpstreamRepos(projects)
+ states = [
+ {'org/project':
+ str(upstream['org/project'].commit('stable/havana')),
+ },
+ {'org/project':
+ str(upstream['org/project'].commit('stable/havana')),
+ },
+ ]
+
+ for number, build in enumerate(builds):
+ self.log.debug("Build parameters: %s", build.parameters)
+ cloner = zuul.lib.cloner.Cloner(
+ git_base_url=self.upstream_root,
+ projects=projects,
+ workspace=self.workspace_root,
+ zuul_branch=build.parameters.get('ZUUL_BRANCH', None),
+ zuul_ref=build.parameters.get('ZUUL_REF', None),
+ zuul_url=self.git_root,
+ branch='stable/havana',
+ )
cloner.execute()
work = self.getWorkspaceRepos(projects)
state = states[number]
@@ -476,6 +560,7 @@ class TestCloner(ZuulTestCase):
str(work[project].commit('HEAD')),
'Project %s commit for build %s should '
'be correct' % (project, number))
+
shutil.rmtree(self.workspace_root)
self.worker.hold_jobs_in_build = False
diff --git a/tests/test_model.py b/tests/test_model.py
new file mode 100644
index 000000000..271161869
--- /dev/null
+++ b/tests/test_model.py
@@ -0,0 +1,64 @@
+# Copyright 2015 Red Hat, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from zuul import change_matcher as cm
+from zuul import model
+
+from tests.base import BaseTestCase
+
+
+class TestJob(BaseTestCase):
+
+ @property
+ def job(self):
+ job = model.Job('job')
+ job.skip_if_matcher = cm.MatchAll([
+ cm.ProjectMatcher('^project$'),
+ cm.MatchAllFiles([cm.FileMatcher('^docs/.*$')]),
+ ])
+ return job
+
+ def test_change_matches_returns_false_for_matched_skip_if(self):
+ change = model.Change('project')
+ change.files = ['docs/foo']
+ self.assertFalse(self.job.changeMatches(change))
+
+ def test_change_matches_returns_true_for_unmatched_skip_if(self):
+ change = model.Change('project')
+ change.files = ['foo']
+ self.assertTrue(self.job.changeMatches(change))
+
+ def test_copy_retains_skip_if(self):
+ job = model.Job('job')
+ job.copy(self.job)
+ self.assertTrue(job.skip_if_matcher)
+
+ def _assert_job_booleans_are_not_none(self, job):
+ self.assertIsNotNone(job.voting)
+ self.assertIsNotNone(job.hold_following_changes)
+
+ def test_job_sets_defaults_for_boolean_attributes(self):
+ job = model.Job('job')
+ self._assert_job_booleans_are_not_none(job)
+
+ def test_metajob_does_not_set_defaults_for_boolean_attributes(self):
+ job = model.Job('^job')
+ self.assertIsNone(job.voting)
+ self.assertIsNone(job.hold_following_changes)
+
+ def test_metajob_copy_does_not_set_undefined_boolean_attributes(self):
+ job = model.Job('job')
+ metajob = model.Job('^job')
+ job.copy(metajob)
+ self._assert_job_booleans_are_not_none(job)
diff --git a/tests/test_scheduler.py b/tests/test_scheduler.py
index a7548c132..b44dba6c1 100755
--- a/tests/test_scheduler.py
+++ b/tests/test_scheduler.py
@@ -22,22 +22,62 @@ import shutil
import time
import urllib
import urllib2
+import yaml
import git
import testtools
+import zuul.change_matcher
import zuul.scheduler
import zuul.rpcclient
import zuul.reporter.gerrit
import zuul.reporter.smtp
-from tests.base import ZuulTestCase, repack_repo
+from tests.base import (
+ BaseTestCase,
+ ZuulTestCase,
+ repack_repo,
+)
logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s %(name)-32s '
'%(levelname)-8s %(message)s')
+class TestSchedulerConfigParsing(BaseTestCase):
+
+ def test_parse_skip_if(self):
+ job_yaml = """
+jobs:
+ - name: job_name
+ skip-if:
+ - project: ^project_name$
+ branch: ^stable/icehouse$
+ all-files-match-any:
+ - ^filename$
+ - project: ^project2_name$
+ all-files-match-any:
+ - ^filename2$
+ """.strip()
+ data = yaml.load(job_yaml)
+ config_job = data.get('jobs')[0]
+ sched = zuul.scheduler.Scheduler()
+ cm = zuul.change_matcher
+ expected = cm.MatchAny([
+ cm.MatchAll([
+ cm.ProjectMatcher('^project_name$'),
+ cm.BranchMatcher('^stable/icehouse$'),
+ cm.MatchAllFiles([cm.FileMatcher('^filename$')]),
+ ]),
+ cm.MatchAll([
+ cm.ProjectMatcher('^project2_name$'),
+ cm.MatchAllFiles([cm.FileMatcher('^filename2$')]),
+ ]),
+ ])
+ matcher = sched._parseSkipIf(config_job)
+ self.assertEqual(expected, matcher)
+
+
class TestScheduler(ZuulTestCase):
def test_jobs_launched(self):
@@ -601,6 +641,91 @@ class TestScheduler(ZuulTestCase):
self.assertEqual(B.reported, 2)
self.assertEqual(C.reported, 2)
+ def test_needed_changes_enqueue(self):
+ "Test that a needed change is enqueued ahead"
+ # A Given a git tree like this, if we enqueue
+ # / \ change C, we should walk up and down the tree
+ # B G and enqueue changes in the order ABCDEFG.
+ # /|\ This is also the order that you would get if
+ # *C E F you enqueued changes in the order ABCDEFG, so
+ # / the ordering is stable across re-enqueue events.
+ # D
+
+ A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
+ B = self.fake_gerrit.addFakeChange('org/project', 'master', 'B')
+ C = self.fake_gerrit.addFakeChange('org/project', 'master', 'C')
+ D = self.fake_gerrit.addFakeChange('org/project', 'master', 'D')
+ E = self.fake_gerrit.addFakeChange('org/project', 'master', 'E')
+ F = self.fake_gerrit.addFakeChange('org/project', 'master', 'F')
+ G = self.fake_gerrit.addFakeChange('org/project', 'master', 'G')
+ B.setDependsOn(A, 1)
+ C.setDependsOn(B, 1)
+ D.setDependsOn(C, 1)
+ E.setDependsOn(B, 1)
+ F.setDependsOn(B, 1)
+ G.setDependsOn(A, 1)
+
+ A.addApproval('CRVW', 2)
+ B.addApproval('CRVW', 2)
+ C.addApproval('CRVW', 2)
+ D.addApproval('CRVW', 2)
+ E.addApproval('CRVW', 2)
+ F.addApproval('CRVW', 2)
+ G.addApproval('CRVW', 2)
+ self.fake_gerrit.addEvent(C.addApproval('APRV', 1))
+
+ self.waitUntilSettled()
+
+ self.assertEqual(A.data['status'], 'NEW')
+ self.assertEqual(B.data['status'], 'NEW')
+ self.assertEqual(C.data['status'], 'NEW')
+ self.assertEqual(D.data['status'], 'NEW')
+ self.assertEqual(E.data['status'], 'NEW')
+ self.assertEqual(F.data['status'], 'NEW')
+ self.assertEqual(G.data['status'], 'NEW')
+
+ # We're about to add approvals to changes without adding the
+ # triggering events to Zuul, so that we can be sure that it is
+ # enqueing the changes based on dependencies, not because of
+ # triggering events. Since it will have the changes cached
+ # already (without approvals), we need to clear the cache
+ # first.
+ source = self.sched.layout.pipelines['gate'].source
+ source.maintainCache([])
+
+ self.worker.hold_jobs_in_build = True
+ A.addApproval('APRV', 1)
+ B.addApproval('APRV', 1)
+ D.addApproval('APRV', 1)
+ E.addApproval('APRV', 1)
+ F.addApproval('APRV', 1)
+ G.addApproval('APRV', 1)
+ self.fake_gerrit.addEvent(C.addApproval('APRV', 1))
+
+ for x in range(8):
+ self.worker.release('.*-merge')
+ self.waitUntilSettled()
+ self.worker.hold_jobs_in_build = False
+ self.worker.release()
+ self.waitUntilSettled()
+
+ self.assertEqual(A.data['status'], 'MERGED')
+ self.assertEqual(B.data['status'], 'MERGED')
+ self.assertEqual(C.data['status'], 'MERGED')
+ self.assertEqual(D.data['status'], 'MERGED')
+ self.assertEqual(E.data['status'], 'MERGED')
+ self.assertEqual(F.data['status'], 'MERGED')
+ self.assertEqual(G.data['status'], 'MERGED')
+ self.assertEqual(A.reported, 2)
+ self.assertEqual(B.reported, 2)
+ self.assertEqual(C.reported, 2)
+ self.assertEqual(D.reported, 2)
+ self.assertEqual(E.reported, 2)
+ self.assertEqual(F.reported, 2)
+ self.assertEqual(G.reported, 2)
+ self.assertEqual(self.history[6].changes,
+ '1,1 2,1 3,1 4,1 5,1 6,1 7,1')
+
def test_trigger_cache(self):
"Test that the trigger cache operates correctly"
self.worker.hold_jobs_in_build = True
@@ -1412,35 +1537,178 @@ class TestScheduler(ZuulTestCase):
self.assertEqual(D.reported, 2)
self.assertEqual(len(self.history), 9) # 3 each for A, B, D.
- def test_abandoned_change_dequeues(self):
- "Test that an abandoned change is dequeued"
+ def test_new_patchset_check(self):
+ "Test a new patchset in check"
self.worker.hold_jobs_in_build = True
A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
+ B = self.fake_gerrit.addFakeChange('org/project', 'master', 'B')
+ check_pipeline = self.sched.layout.pipelines['check']
+
+ # Add two git-dependent changes
+ B.setDependsOn(A, 1)
+ self.fake_gerrit.addEvent(B.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
self.waitUntilSettled()
+
+ # A live item, and a non-live/live pair
+ items = check_pipeline.getAllItems()
+ self.assertEqual(len(items), 3)
+
+ self.assertEqual(items[0].change.number, '1')
+ self.assertEqual(items[0].change.patchset, '1')
+ self.assertFalse(items[0].live)
+
+ self.assertEqual(items[1].change.number, '2')
+ self.assertEqual(items[1].change.patchset, '1')
+ self.assertTrue(items[1].live)
+
+ self.assertEqual(items[2].change.number, '1')
+ self.assertEqual(items[2].change.patchset, '1')
+ self.assertTrue(items[2].live)
+
+ # Add a new patchset to A
+ A.addPatchset()
+ self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(2))
+ self.waitUntilSettled()
+
+ # The live copy of A,1 should be gone, but the non-live and B
+ # should continue, and we should have a new A,2
+ items = check_pipeline.getAllItems()
+ self.assertEqual(len(items), 3)
+
+ self.assertEqual(items[0].change.number, '1')
+ self.assertEqual(items[0].change.patchset, '1')
+ self.assertFalse(items[0].live)
+
+ self.assertEqual(items[1].change.number, '2')
+ self.assertEqual(items[1].change.patchset, '1')
+ self.assertTrue(items[1].live)
+
+ self.assertEqual(items[2].change.number, '1')
+ self.assertEqual(items[2].change.patchset, '2')
+ self.assertTrue(items[2].live)
+
+ # Add a new patchset to B
+ B.addPatchset()
+ self.fake_gerrit.addEvent(B.getPatchsetCreatedEvent(2))
+ self.waitUntilSettled()
+
+ # The live copy of B,1 should be gone, and it's non-live copy of A,1
+ # but we should have a new B,2 (still based on A,1)
+ items = check_pipeline.getAllItems()
+ self.assertEqual(len(items), 3)
+
+ self.assertEqual(items[0].change.number, '1')
+ self.assertEqual(items[0].change.patchset, '2')
+ self.assertTrue(items[0].live)
+
+ self.assertEqual(items[1].change.number, '1')
+ self.assertEqual(items[1].change.patchset, '1')
+ self.assertFalse(items[1].live)
+
+ self.assertEqual(items[2].change.number, '2')
+ self.assertEqual(items[2].change.patchset, '2')
+ self.assertTrue(items[2].live)
+
+ self.builds[0].release()
+ self.waitUntilSettled()
+ self.builds[0].release()
+ self.waitUntilSettled()
+ self.worker.hold_jobs_in_build = False
+ self.worker.release()
+ self.waitUntilSettled()
+
+ self.assertEqual(A.reported, 1)
+ self.assertEqual(B.reported, 1)
+ self.assertEqual(self.history[0].result, 'ABORTED')
+ self.assertEqual(self.history[0].changes, '1,1')
+ self.assertEqual(self.history[1].result, 'ABORTED')
+ self.assertEqual(self.history[1].changes, '1,1 2,1')
+ self.assertEqual(self.history[2].result, 'SUCCESS')
+ self.assertEqual(self.history[2].changes, '1,2')
+ self.assertEqual(self.history[3].result, 'SUCCESS')
+ self.assertEqual(self.history[3].changes, '1,1 2,2')
+
+ def test_abandoned_gate(self):
+ "Test that an abandoned change is dequeued from gate"
+
+ self.worker.hold_jobs_in_build = True
+
+ A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
+ A.addApproval('CRVW', 2)
+ self.fake_gerrit.addEvent(A.addApproval('APRV', 1))
+ self.waitUntilSettled()
self.assertEqual(len(self.builds), 1, "One job being built (on hold)")
self.assertEqual(self.builds[0].name, 'project-merge')
self.fake_gerrit.addEvent(A.getChangeAbandonedEvent())
self.waitUntilSettled()
- # For debugging purposes...
- #for pipeline in self.sched.layout.pipelines.values():
- # for queue in pipeline.queues:
- # self.log.info("pipepline %s queue %s contents %s" % (
- # pipeline.name, queue.name, queue.queue))
-
self.worker.release('.*-merge')
self.waitUntilSettled()
self.assertEqual(len(self.builds), 0, "No job running")
- self.assertEmptyQueues()
self.assertEqual(len(self.history), 1, "Only one build in history")
self.assertEqual(self.history[0].result, 'ABORTED',
+ "Build should have been aborted")
+ self.assertEqual(A.reported, 1,
+ "Abandoned gate change should report only start")
+
+ def test_abandoned_check(self):
+ "Test that an abandoned change is dequeued from check"
+
+ self.worker.hold_jobs_in_build = True
+
+ A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
+ B = self.fake_gerrit.addFakeChange('org/project', 'master', 'B')
+ check_pipeline = self.sched.layout.pipelines['check']
+
+ # Add two git-dependent changes
+ B.setDependsOn(A, 1)
+ self.fake_gerrit.addEvent(B.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+ self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+ # A live item, and a non-live/live pair
+ items = check_pipeline.getAllItems()
+ self.assertEqual(len(items), 3)
+
+ self.assertEqual(items[0].change.number, '1')
+ self.assertFalse(items[0].live)
+
+ self.assertEqual(items[1].change.number, '2')
+ self.assertTrue(items[1].live)
+
+ self.assertEqual(items[2].change.number, '1')
+ self.assertTrue(items[2].live)
+
+ # Abandon A
+ self.fake_gerrit.addEvent(A.getChangeAbandonedEvent())
+ self.waitUntilSettled()
+
+ # The live copy of A should be gone, but the non-live and B
+ # should continue
+ items = check_pipeline.getAllItems()
+ self.assertEqual(len(items), 2)
+
+ self.assertEqual(items[0].change.number, '1')
+ self.assertFalse(items[0].live)
+
+ self.assertEqual(items[1].change.number, '2')
+ self.assertTrue(items[1].live)
+
+ self.worker.hold_jobs_in_build = False
+ self.worker.release()
+ self.waitUntilSettled()
+
+ self.assertEqual(len(self.history), 4)
+ self.assertEqual(self.history[0].result, 'ABORTED',
'Build should have been aborted')
self.assertEqual(A.reported, 0, "Abandoned change should not report")
+ self.assertEqual(B.reported, 1, "Change should report")
def test_zuul_url_return(self):
"Test if ZUUL_URL is returning when zuul_url is set in zuul.conf"
@@ -1737,6 +2005,33 @@ class TestScheduler(ZuulTestCase):
self.assertEqual(B.data['status'], 'MERGED')
self.assertEqual(B.reported, 2)
+ def _test_skip_if_jobs(self, branch, should_skip):
+ "Test that jobs with a skip-if filter run only when appropriate"
+ self.config.set('zuul', 'layout_config',
+ 'tests/fixtures/layout-skip-if.yaml')
+ self.sched.reconfigure(self.config)
+ self.registerJobs()
+
+ change = self.fake_gerrit.addFakeChange('org/project',
+ branch,
+ 'test skip-if')
+ self.fake_gerrit.addEvent(change.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+
+ tested_change_ids = [x.changes[0] for x in self.history
+ if x.name == 'project-test-skip-if']
+
+ if should_skip:
+ self.assertEqual([], tested_change_ids)
+ else:
+ self.assertIn(change.data['number'], tested_change_ids)
+
+ def test_skip_if_match_skips_job(self):
+ self._test_skip_if_jobs(branch='master', should_skip=True)
+
+ def test_skip_if_no_match_runs_job(self):
+ self._test_skip_if_jobs(branch='mp', should_skip=False)
+
def test_test_config(self):
"Test that we can test the config"
sched = zuul.scheduler.Scheduler()
@@ -1836,7 +2131,7 @@ class TestScheduler(ZuulTestCase):
status_jobs = set()
for p in data['pipelines']:
for q in p['change_queues']:
- if q['dependent']:
+ if p['name'] in ['gate', 'conflict']:
self.assertEqual(q['window'], 20)
else:
self.assertEqual(q['window'], 0)
@@ -2777,7 +3072,6 @@ For CI problems and help debugging, contact ci@example.org"""
self.assertEqual('project-merge', job['name'])
self.assertEqual('gate', job['pipeline'])
self.assertEqual(False, job['retry'])
- self.assertEqual(13, len(job['parameters']))
self.assertEqual('https://server/job/project-merge/0/',
job['url'])
self.assertEqual(7, len(job['worker']))
@@ -2805,3 +3099,405 @@ For CI problems and help debugging, contact ci@example.org"""
self.getJobFromHistory('experimental-project-test').result,
'SUCCESS')
self.assertEqual(A.reported, 1)
+
+ def test_crd_gate(self):
+ "Test cross-repo dependencies"
+ A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
+ B = self.fake_gerrit.addFakeChange('org/project2', 'master', 'B')
+ A.addApproval('CRVW', 2)
+ B.addApproval('CRVW', 2)
+
+ AM2 = self.fake_gerrit.addFakeChange('org/project1', 'master', 'AM2')
+ AM1 = self.fake_gerrit.addFakeChange('org/project1', 'master', 'AM1')
+ AM2.setMerged()
+ AM1.setMerged()
+
+ BM2 = self.fake_gerrit.addFakeChange('org/project2', 'master', 'BM2')
+ BM1 = self.fake_gerrit.addFakeChange('org/project2', 'master', 'BM1')
+ BM2.setMerged()
+ BM1.setMerged()
+
+ # A -> AM1 -> AM2
+ # B -> BM1 -> BM2
+ # A Depends-On: B
+ # M2 is here to make sure it is never queried. If it is, it
+ # means zuul is walking down the entire history of merged
+ # changes.
+
+ B.setDependsOn(BM1, 1)
+ BM1.setDependsOn(BM2, 1)
+
+ A.setDependsOn(AM1, 1)
+ AM1.setDependsOn(AM2, 1)
+
+ A.data['commitMessage'] = '%s\n\nDepends-On: %s\n' % (
+ A.subject, B.data['id'])
+
+ self.fake_gerrit.addEvent(A.addApproval('APRV', 1))
+ self.waitUntilSettled()
+
+ self.assertEqual(A.data['status'], 'NEW')
+ self.assertEqual(B.data['status'], 'NEW')
+
+ source = self.sched.layout.pipelines['gate'].source
+ source.maintainCache([])
+
+ self.worker.hold_jobs_in_build = True
+ B.addApproval('APRV', 1)
+ self.fake_gerrit.addEvent(A.addApproval('APRV', 1))
+ self.waitUntilSettled()
+
+ self.worker.release('.*-merge')
+ self.waitUntilSettled()
+ self.worker.release('.*-merge')
+ self.waitUntilSettled()
+ self.worker.hold_jobs_in_build = False
+ self.worker.release()
+ self.waitUntilSettled()
+
+ self.assertEqual(AM2.queried, 0)
+ self.assertEqual(BM2.queried, 0)
+ self.assertEqual(A.data['status'], 'MERGED')
+ self.assertEqual(B.data['status'], 'MERGED')
+ self.assertEqual(A.reported, 2)
+ self.assertEqual(B.reported, 2)
+
+ self.assertEqual(self.getJobFromHistory('project1-merge').changes,
+ '2,1 1,1')
+
+ def test_crd_branch(self):
+ "Test cross-repo dependencies in multiple branches"
+ A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
+ B = self.fake_gerrit.addFakeChange('org/project2', 'master', 'B')
+ C = self.fake_gerrit.addFakeChange('org/project2', 'mp', 'C')
+ C.data['id'] = B.data['id']
+ A.addApproval('CRVW', 2)
+ B.addApproval('CRVW', 2)
+ C.addApproval('CRVW', 2)
+
+ # A Depends-On: B+C
+ A.data['commitMessage'] = '%s\n\nDepends-On: %s\n' % (
+ A.subject, B.data['id'])
+
+ self.worker.hold_jobs_in_build = True
+ B.addApproval('APRV', 1)
+ C.addApproval('APRV', 1)
+ self.fake_gerrit.addEvent(A.addApproval('APRV', 1))
+ self.waitUntilSettled()
+
+ self.worker.release('.*-merge')
+ self.waitUntilSettled()
+ self.worker.release('.*-merge')
+ self.waitUntilSettled()
+ self.worker.release('.*-merge')
+ self.waitUntilSettled()
+ self.worker.hold_jobs_in_build = False
+ self.worker.release()
+ self.waitUntilSettled()
+
+ self.assertEqual(A.data['status'], 'MERGED')
+ self.assertEqual(B.data['status'], 'MERGED')
+ self.assertEqual(C.data['status'], 'MERGED')
+ self.assertEqual(A.reported, 2)
+ self.assertEqual(B.reported, 2)
+ self.assertEqual(C.reported, 2)
+
+ self.assertEqual(self.getJobFromHistory('project1-merge').changes,
+ '2,1 3,1 1,1')
+
+ def test_crd_multiline(self):
+ "Test multiple depends-on lines in commit"
+ A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
+ B = self.fake_gerrit.addFakeChange('org/project2', 'master', 'B')
+ C = self.fake_gerrit.addFakeChange('org/project2', 'master', 'C')
+ A.addApproval('CRVW', 2)
+ B.addApproval('CRVW', 2)
+ C.addApproval('CRVW', 2)
+
+ # A Depends-On: B+C
+ A.data['commitMessage'] = '%s\n\nDepends-On: %s\nDepends-On: %s\n' % (
+ A.subject, B.data['id'], C.data['id'])
+
+ self.worker.hold_jobs_in_build = True
+ B.addApproval('APRV', 1)
+ C.addApproval('APRV', 1)
+ self.fake_gerrit.addEvent(A.addApproval('APRV', 1))
+ self.waitUntilSettled()
+
+ self.worker.release('.*-merge')
+ self.waitUntilSettled()
+ self.worker.release('.*-merge')
+ self.waitUntilSettled()
+ self.worker.release('.*-merge')
+ self.waitUntilSettled()
+ self.worker.hold_jobs_in_build = False
+ self.worker.release()
+ self.waitUntilSettled()
+
+ self.assertEqual(A.data['status'], 'MERGED')
+ self.assertEqual(B.data['status'], 'MERGED')
+ self.assertEqual(C.data['status'], 'MERGED')
+ self.assertEqual(A.reported, 2)
+ self.assertEqual(B.reported, 2)
+ self.assertEqual(C.reported, 2)
+
+ self.assertEqual(self.getJobFromHistory('project1-merge').changes,
+ '2,1 3,1 1,1')
+
+ def test_crd_unshared_gate(self):
+ "Test cross-repo dependencies in unshared gate queues"
+ A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
+ B = self.fake_gerrit.addFakeChange('org/project', 'master', 'B')
+ A.addApproval('CRVW', 2)
+ B.addApproval('CRVW', 2)
+
+ # A Depends-On: B
+ A.data['commitMessage'] = '%s\n\nDepends-On: %s\n' % (
+ A.subject, B.data['id'])
+
+ # A and B do not share a queue, make sure that A is unable to
+ # enqueue B (and therefore, A is unable to be enqueued).
+ B.addApproval('APRV', 1)
+ self.fake_gerrit.addEvent(A.addApproval('APRV', 1))
+ self.waitUntilSettled()
+
+ self.assertEqual(A.data['status'], 'NEW')
+ self.assertEqual(B.data['status'], 'NEW')
+ self.assertEqual(A.reported, 0)
+ self.assertEqual(B.reported, 0)
+ self.assertEqual(len(self.history), 0)
+
+ # Enqueue and merge B alone.
+ self.fake_gerrit.addEvent(B.addApproval('APRV', 1))
+ self.waitUntilSettled()
+
+ self.assertEqual(B.data['status'], 'MERGED')
+ self.assertEqual(B.reported, 2)
+
+ # Now that B is merged, A should be able to be enqueued and
+ # merged.
+ self.fake_gerrit.addEvent(A.addApproval('APRV', 1))
+ self.waitUntilSettled()
+
+ self.assertEqual(A.data['status'], 'MERGED')
+ self.assertEqual(A.reported, 2)
+
+ def test_crd_cycle(self):
+ "Test cross-repo dependency cycles"
+ A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
+ B = self.fake_gerrit.addFakeChange('org/project2', 'master', 'B')
+ A.addApproval('CRVW', 2)
+ B.addApproval('CRVW', 2)
+
+ # A -> B -> A (via commit-depends)
+
+ A.data['commitMessage'] = '%s\n\nDepends-On: %s\n' % (
+ A.subject, B.data['id'])
+ B.data['commitMessage'] = '%s\n\nDepends-On: %s\n' % (
+ B.subject, A.data['id'])
+
+ self.fake_gerrit.addEvent(A.addApproval('APRV', 1))
+ self.waitUntilSettled()
+
+ self.assertEqual(A.reported, 0)
+ self.assertEqual(B.reported, 0)
+ self.assertEqual(A.data['status'], 'NEW')
+ self.assertEqual(B.data['status'], 'NEW')
+
+ def test_crd_check(self):
+ "Test cross-repo dependencies in independent pipelines"
+
+ self.gearman_server.hold_jobs_in_queue = True
+ A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
+ B = self.fake_gerrit.addFakeChange('org/project2', 'master', 'B')
+
+ # A Depends-On: B
+ A.data['commitMessage'] = '%s\n\nDepends-On: %s\n' % (
+ A.subject, B.data['id'])
+
+ self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+
+ queue = self.gearman_server.getQueue()
+ ref = self.getParameter(queue[-1], 'ZUUL_REF')
+ self.gearman_server.hold_jobs_in_queue = False
+ self.gearman_server.release()
+ self.waitUntilSettled()
+
+ path = os.path.join(self.git_root, "org/project1")
+ repo = git.Repo(path)
+ repo_messages = [c.message.strip() for c in repo.iter_commits(ref)]
+ repo_messages.reverse()
+ correct_messages = ['initial commit', 'A-1']
+ self.assertEqual(repo_messages, correct_messages)
+
+ path = os.path.join(self.git_root, "org/project2")
+ repo = git.Repo(path)
+ repo_messages = [c.message.strip() for c in repo.iter_commits(ref)]
+ repo_messages.reverse()
+ correct_messages = ['initial commit', 'B-1']
+ self.assertEqual(repo_messages, correct_messages)
+
+ self.assertEqual(A.data['status'], 'NEW')
+ self.assertEqual(B.data['status'], 'NEW')
+ self.assertEqual(A.reported, 1)
+ self.assertEqual(B.reported, 0)
+
+ self.assertEqual(self.history[0].changes, '2,1 1,1')
+ self.assertEqual(len(self.sched.layout.pipelines['check'].queues), 0)
+
+ def test_crd_check_git_depends(self):
+ "Test single-repo dependencies in independent pipelines"
+ self.gearman_server.hold_jobs_in_queue = True
+ A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
+ B = self.fake_gerrit.addFakeChange('org/project1', 'master', 'B')
+
+ # Add two git-dependent changes and make sure they both report
+ # success.
+ B.setDependsOn(A, 1)
+ self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+ self.fake_gerrit.addEvent(B.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+
+ self.gearman_server.hold_jobs_in_queue = False
+ self.gearman_server.release()
+ self.waitUntilSettled()
+
+ self.assertEqual(A.data['status'], 'NEW')
+ self.assertEqual(B.data['status'], 'NEW')
+ self.assertEqual(A.reported, 1)
+ self.assertEqual(B.reported, 1)
+
+ self.assertEqual(self.history[0].changes, '1,1')
+ self.assertEqual(self.history[-1].changes, '1,1 2,1')
+ self.assertEqual(len(self.sched.layout.pipelines['check'].queues), 0)
+
+ self.assertIn('Build succeeded', A.messages[0])
+ self.assertIn('Build succeeded', B.messages[0])
+
+ def test_crd_check_duplicate(self):
+ "Test duplicate check in independent pipelines"
+ self.gearman_server.hold_jobs_in_queue = True
+ A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
+ B = self.fake_gerrit.addFakeChange('org/project1', 'master', 'B')
+ check_pipeline = self.sched.layout.pipelines['check']
+
+ # Add two git-dependent changes...
+ B.setDependsOn(A, 1)
+ self.fake_gerrit.addEvent(B.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+ self.assertEqual(len(check_pipeline.getAllItems()), 2)
+
+ # ...make sure the live one is not duplicated...
+ self.fake_gerrit.addEvent(B.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+ self.assertEqual(len(check_pipeline.getAllItems()), 2)
+
+ # ...but the non-live one is able to be.
+ self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+ self.assertEqual(len(check_pipeline.getAllItems()), 3)
+
+ self.gearman_server.hold_jobs_in_queue = False
+ self.gearman_server.release('.*-merge')
+ self.waitUntilSettled()
+ self.gearman_server.release('.*-merge')
+ self.waitUntilSettled()
+ self.gearman_server.release()
+ self.waitUntilSettled()
+
+ self.assertEqual(A.data['status'], 'NEW')
+ self.assertEqual(B.data['status'], 'NEW')
+ self.assertEqual(A.reported, 1)
+ self.assertEqual(B.reported, 1)
+
+ self.assertEqual(self.history[0].changes, '1,1 2,1')
+ self.assertEqual(self.history[1].changes, '1,1')
+ self.assertEqual(len(self.sched.layout.pipelines['check'].queues), 0)
+
+ self.assertIn('Build succeeded', A.messages[0])
+ self.assertIn('Build succeeded', B.messages[0])
+
+ def test_crd_check_reconfiguration(self):
+ "Test cross-repo dependencies re-enqueued in independent pipelines"
+
+ self.gearman_server.hold_jobs_in_queue = True
+ A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
+ B = self.fake_gerrit.addFakeChange('org/project2', 'master', 'B')
+
+ # A Depends-On: B
+ A.data['commitMessage'] = '%s\n\nDepends-On: %s\n' % (
+ A.subject, B.data['id'])
+
+ self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+
+ self.sched.reconfigure(self.config)
+
+ # Make sure the items still share a change queue, and the
+ # first one is not live.
+ self.assertEqual(len(self.sched.layout.pipelines['check'].queues), 1)
+ queue = self.sched.layout.pipelines['check'].queues[0]
+ first_item = queue.queue[0]
+ for item in queue.queue:
+ self.assertEqual(item.queue, first_item.queue)
+ self.assertFalse(first_item.live)
+ self.assertTrue(queue.queue[1].live)
+
+ self.gearman_server.hold_jobs_in_queue = False
+ self.gearman_server.release()
+ self.waitUntilSettled()
+
+ self.assertEqual(A.data['status'], 'NEW')
+ self.assertEqual(B.data['status'], 'NEW')
+ self.assertEqual(A.reported, 1)
+ self.assertEqual(B.reported, 0)
+
+ self.assertEqual(self.history[0].changes, '2,1 1,1')
+ self.assertEqual(len(self.sched.layout.pipelines['check'].queues), 0)
+
+ def test_crd_check_ignore_dependencies(self):
+ "Test cross-repo dependencies can be ignored"
+ self.config.set('zuul', 'layout_config',
+ 'tests/fixtures/layout-ignore-dependencies.yaml')
+ self.sched.reconfigure(self.config)
+ self.registerJobs()
+
+ self.gearman_server.hold_jobs_in_queue = True
+ A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
+ B = self.fake_gerrit.addFakeChange('org/project2', 'master', 'B')
+ C = self.fake_gerrit.addFakeChange('org/project2', 'master', 'C')
+
+ # A Depends-On: B
+ A.data['commitMessage'] = '%s\n\nDepends-On: %s\n' % (
+ A.subject, B.data['id'])
+ # C git-depends on B
+ C.setDependsOn(B, 1)
+ self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+ self.fake_gerrit.addEvent(B.getPatchsetCreatedEvent(1))
+ self.fake_gerrit.addEvent(C.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+
+ # Make sure none of the items share a change queue, and all
+ # are live.
+ check_pipeline = self.sched.layout.pipelines['check']
+ self.assertEqual(len(check_pipeline.queues), 3)
+ self.assertEqual(len(check_pipeline.getAllItems()), 3)
+ for item in check_pipeline.getAllItems():
+ self.assertTrue(item.live)
+
+ self.gearman_server.hold_jobs_in_queue = False
+ self.gearman_server.release()
+ self.waitUntilSettled()
+
+ self.assertEqual(A.data['status'], 'NEW')
+ self.assertEqual(B.data['status'], 'NEW')
+ self.assertEqual(C.data['status'], 'NEW')
+ self.assertEqual(A.reported, 1)
+ self.assertEqual(B.reported, 1)
+ self.assertEqual(C.reported, 1)
+
+ # Each job should have tested exactly one change
+ for job in self.history:
+ self.assertEqual(len(job.changes.split()), 1)
diff --git a/tests/test_zuultrigger.py b/tests/test_zuultrigger.py
index 9a90a982e..a26fa8605 100644
--- a/tests/test_zuultrigger.py
+++ b/tests/test_zuultrigger.py
@@ -15,7 +15,6 @@
# under the License.
import logging
-import time
from tests.base import ZuulTestCase
@@ -46,9 +45,9 @@ class TestZuulTrigger(ZuulTestCase):
A.addApproval('CRVW', 2)
B1.addApproval('CRVW', 2)
B2.addApproval('CRVW', 2)
- A.addApproval('VRFY', 1) # required by gate
- B1.addApproval('VRFY', -1) # should go to check
- B2.addApproval('VRFY', 1) # should go to gate
+ A.addApproval('VRFY', 1) # required by gate
+ B1.addApproval('VRFY', -1) # should go to check
+ B2.addApproval('VRFY', 1) # should go to gate
B1.addApproval('APRV', 1)
B2.addApproval('APRV', 1)
B1.setDependsOn(A, 1)
@@ -66,7 +65,7 @@ class TestZuulTrigger(ZuulTestCase):
for job in self.history:
if job.changes == '1,1':
self.assertEqual(job.name, 'project-gate')
- elif job.changes == '2,1':
+ elif job.changes == '1,1 2,1':
self.assertEqual(job.name, 'project-check')
elif job.changes == '1,1 3,1':
self.assertEqual(job.name, 'project-gate')
@@ -106,11 +105,14 @@ class TestZuulTrigger(ZuulTestCase):
self.assertEqual(C.reported, 0)
self.assertEqual(D.reported, 0)
self.assertEqual(E.reported, 0)
- self.assertEqual(B.messages[0],
+ self.assertEqual(
+ B.messages[0],
"Merge Failed.\n\nThis change was unable to be automatically "
"merged with the current state of the repository. Please rebase "
"your change and upload a new patchset.")
- self.assertEqual(self.fake_gerrit.queries[0], "project:org/project status:open")
+
+ self.assertEqual(self.fake_gerrit.queries[0],
+ "project:org/project status:open")
# Reconfigure and run the test again. This is a regression
# check to make sure that we don't end up with a stale trigger
@@ -129,8 +131,10 @@ class TestZuulTrigger(ZuulTestCase):
self.assertEqual(C.reported, 0)
self.assertEqual(D.reported, 2)
self.assertEqual(E.reported, 1)
- self.assertEqual(E.messages[0],
+ self.assertEqual(
+ E.messages[0],
"Merge Failed.\n\nThis change was unable to be automatically "
"merged with the current state of the repository. Please rebase "
"your change and upload a new patchset.")
- self.assertEqual(self.fake_gerrit.queries[1], "project:org/project status:open")
+ self.assertEqual(self.fake_gerrit.queries[1],
+ "project:org/project status:open")
diff --git a/tools/zuul-changes.py b/tools/zuul-changes.py
index 7fc541b1e..9dbf504e7 100755
--- a/tools/zuul-changes.py
+++ b/tools/zuul-changes.py
@@ -1,5 +1,6 @@
#!/usr/bin/env python
# Copyright 2013 OpenStack Foundation
+# Copyright 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
@@ -13,9 +14,6 @@
# License for the specific language governing permissions and limitations
# under the License.
-# Print commands to leave gerrit comments for every change in one of
-# Zuul's pipelines.
-
import urllib2
import json
import argparse
@@ -23,9 +21,6 @@ import argparse
parser = argparse.ArgumentParser()
parser.add_argument('url', help='The URL of the running Zuul instance')
parser.add_argument('pipeline_name', help='The name of the Zuul pipeline')
-parser.add_argument('comment', help='The text of the Gerrit comment')
-parser.add_argument('--review-host', default='review',
- help='The Gerrit hostname')
options = parser.parse_args()
data = urllib2.urlopen('%s/status.json' % options.url).read()
@@ -37,7 +32,13 @@ for pipeline in data['pipelines']:
for queue in pipeline['change_queues']:
for head in queue['heads']:
for change in head:
- print 'ssh %s gerrit review %s --message \\"%s\\"' % (
- options.review_host,
- change['id'],
- options.comment)
+ if not change['live']:
+ continue
+ cid, cps = change['id'].split(',')
+ print (
+ "zuul enqueue --trigger gerrit --pipeline %s "
+ "--project %s --change %s,%s" % (
+ options.pipeline_name,
+ change['project'],
+ cid, cps)
+ )
diff --git a/tox.ini b/tox.ini
index 6e459699d..d716bb715 100644
--- a/tox.ini
+++ b/tox.ini
@@ -20,7 +20,7 @@ commands =
downloadcache = ~/cache/pip
[testenv:pep8]
-commands = flake8
+commands = flake8 {posargs}
[testenv:cover]
commands =
@@ -36,7 +36,8 @@ commands = {posargs}
commands = zuul-server -c etc/zuul.conf-sample -t -l {posargs}
[flake8]
-ignore = E125,H
-select = H231
+# These are ignored intentionally in openstack-infra projects;
+# please don't submit patches that solely correct them or enable them.
+ignore = E125,E129,H
show-source = True
exclude = .venv,.tox,dist,doc,build,*.egg
diff --git a/zuul/change_matcher.py b/zuul/change_matcher.py
new file mode 100644
index 000000000..ed380f0ae
--- /dev/null
+++ b/zuul/change_matcher.py
@@ -0,0 +1,132 @@
+# Copyright 2015 Red Hat, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""
+This module defines classes used in matching changes based on job
+configuration.
+"""
+
+import re
+
+
+class AbstractChangeMatcher(object):
+
+ def __init__(self, regex):
+ self._regex = regex
+ self.regex = re.compile(regex)
+
+ def matches(self, change):
+ """Return a boolean indication of whether change matches
+ implementation-specific criteria.
+ """
+ raise NotImplementedError()
+
+ def copy(self):
+ return self.__class__(self._regex)
+
+ def __eq__(self, other):
+ return str(self) == str(other)
+
+ def __str__(self):
+ return '{%s:%s}' % (self.__class__.__name__, self._regex)
+
+ def __repr__(self):
+ return '<%s %s>' % (self.__class__.__name__, self._regex)
+
+
+class ProjectMatcher(AbstractChangeMatcher):
+
+ def matches(self, change):
+ return self.regex.match(str(change.project))
+
+
+class BranchMatcher(AbstractChangeMatcher):
+
+ def matches(self, change):
+ return (
+ (hasattr(change, 'branch') and self.regex.match(change.branch)) or
+ (hasattr(change, 'ref') and self.regex.match(change.ref))
+ )
+
+
+class FileMatcher(AbstractChangeMatcher):
+
+ def matches(self, change):
+ if not hasattr(change, 'files'):
+ return False
+ for file_ in change.files:
+ if self.regex.match(file_):
+ return True
+ return False
+
+
+class AbstractMatcherCollection(AbstractChangeMatcher):
+
+ def __init__(self, matchers):
+ self.matchers = matchers
+
+ def __eq__(self, other):
+ return str(self) == str(other)
+
+ def __str__(self):
+ return '{%s:%s}' % (self.__class__.__name__,
+ ','.join([str(x) for x in self.matchers]))
+
+ def __repr__(self):
+ return '<%s>' % self.__class__.__name__
+
+ def copy(self):
+ return self.__class__(self.matchers[:])
+
+
+class MatchAllFiles(AbstractMatcherCollection):
+
+ commit_regex = re.compile('^/COMMIT_MSG$')
+
+ @property
+ def regexes(self):
+ for matcher in self.matchers:
+ yield matcher.regex
+ yield self.commit_regex
+
+ def matches(self, change):
+ if not (hasattr(change, 'files') and change.files):
+ return False
+ for file_ in change.files:
+ matched_file = False
+ for regex in self.regexes:
+ if regex.match(file_):
+ matched_file = True
+ break
+ if not matched_file:
+ return False
+ return True
+
+
+class MatchAll(AbstractMatcherCollection):
+
+ def matches(self, change):
+ for matcher in self.matchers:
+ if not matcher.matches(change):
+ return False
+ return True
+
+
+class MatchAny(AbstractMatcherCollection):
+
+ def matches(self, change):
+ for matcher in self.matchers:
+ if matcher.matches(change):
+ return True
+ return False
diff --git a/zuul/cmd/client.py b/zuul/cmd/client.py
index 766a4ef83..bc2c152d8 100644
--- a/zuul/cmd/client.py
+++ b/zuul/cmd/client.py
@@ -232,9 +232,6 @@ class Client(zuul.cmd.ZuulApp):
'number': {
'title': 'Number'
},
- 'parameters': {
- 'title': 'Parameters'
- },
'worker.name': {
'title': 'Worker'
},
diff --git a/zuul/cmd/cloner.py b/zuul/cmd/cloner.py
index 892216103..d0bb96694 100755
--- a/zuul/cmd/cloner.py
+++ b/zuul/cmd/cloner.py
@@ -65,20 +65,20 @@ class Cloner(zuul.cmd.ZuulApp):
project_env = parser.add_argument_group(
'project tuning'
- )
+ )
project_env.add_argument(
'--branch',
help=('branch to checkout instead of Zuul selected branch, '
'for example to specify an alternate branch to test '
'client library compatibility.')
- )
+ )
project_env.add_argument(
'--project-branch', nargs=1, action='append',
metavar='PROJECT=BRANCH',
help=('project-specific branch to checkout which takes precedence '
'over --branch if it is provided; may be specified multiple '
'times.')
- )
+ )
zuul_env = parser.add_argument_group(
'zuul environnement',
diff --git a/zuul/cmd/server.py b/zuul/cmd/server.py
index 25dab6f42..832eae412 100755
--- a/zuul/cmd/server.py
+++ b/zuul/cmd/server.py
@@ -150,6 +150,7 @@ class Server(zuul.cmd.ZuulApp):
import zuul.webapp
import zuul.rpclistener
+ signal.signal(signal.SIGUSR2, zuul.cmd.stack_dump_handler)
if (self.config.has_option('gearman_server', 'start') and
self.config.getboolean('gearman_server', 'start')):
self.start_gear_server()
@@ -165,7 +166,8 @@ class Server(zuul.cmd.ZuulApp):
merger = zuul.merger.client.MergeClient(self.config, self.sched)
gerrit = zuul.trigger.gerrit.Gerrit(self.config, self.sched)
timer = zuul.trigger.timer.Timer(self.config, self.sched)
- zuultrigger = zuul.trigger.zuultrigger.ZuulTrigger(self.config, self.sched)
+ zuultrigger = zuul.trigger.zuultrigger.ZuulTrigger(self.config,
+ self.sched)
if self.config.has_option('zuul', 'status_expiry'):
cache_expiry = self.config.getint('zuul', 'status_expiry')
else:
@@ -203,7 +205,6 @@ class Server(zuul.cmd.ZuulApp):
signal.signal(signal.SIGHUP, self.reconfigure_handler)
signal.signal(signal.SIGUSR1, self.exit_handler)
- signal.signal(signal.SIGUSR2, zuul.cmd.stack_dump_handler)
signal.signal(signal.SIGTERM, self.term_handler)
while True:
try:
diff --git a/zuul/launcher/gearman.py b/zuul/launcher/gearman.py
index 564a554ba..915151e7b 100644
--- a/zuul/launcher/gearman.py
+++ b/zuul/launcher/gearman.py
@@ -231,6 +231,8 @@ class Gearman(object):
s_config = {}
s_config.update((k, v.format(item=item, job=job,
change=item.change))
+ if isinstance(v, basestring)
+ else (k, v)
for k, v in s.items())
(swift_instructions['URL'],
diff --git a/zuul/layoutvalidator.py b/zuul/layoutvalidator.py
index 5a32c769e..88d10e25b 100644
--- a/zuul/layoutvalidator.py
+++ b/zuul/layoutvalidator.py
@@ -20,6 +20,7 @@ import string
from zuul.trigger import gerrit
+
# Several forms accept either a single item or a list, this makes
# specifying that in the schema easy (and explicit).
def toList(x):
@@ -105,6 +106,7 @@ class LayoutSchema(object):
'merge-failure-message': str,
'footer-message': str,
'dequeue-on-new-patchset': bool,
+ 'ignore-dependencies': bool,
'trigger': trigger,
'success': report_actions,
'failure': report_actions,
@@ -133,6 +135,11 @@ class LayoutSchema(object):
'logserver-prefix': str,
}
+ skip_if = {'project': str,
+ 'branch': str,
+ 'all-files-match-any': toList(str),
+ }
+
job = {v.Required('name'): str,
'queue-name': str,
'failure-message': str,
@@ -145,6 +152,7 @@ class LayoutSchema(object):
'branch': toList(str),
'files': toList(str),
'swift': toList(swift),
+ 'skip-if': toList(skip_if),
}
jobs = [job]
@@ -152,11 +160,11 @@ class LayoutSchema(object):
def validateJob(self, value, path=[]):
if isinstance(value, list):
- for (i, v) in enumerate(value):
- self.validateJob(v, path + [i])
+ for (i, val) in enumerate(value):
+ self.validateJob(val, path + [i])
elif isinstance(value, dict):
- for k, v in value.items():
- self.validateJob(v, path + [k])
+ for k, val in value.items():
+ self.validateJob(val, path + [k])
else:
self.job_name.schema(value)
@@ -278,4 +286,3 @@ class LayoutValidator(object):
for pipeline in data['pipelines']:
if 'gerrit' in pipeline['trigger']:
gerrit.validate_trigger(pipeline['trigger'])
-
diff --git a/zuul/lib/cloner.py b/zuul/lib/cloner.py
index 1507fbd46..67e238a8a 100644
--- a/zuul/lib/cloner.py
+++ b/zuul/lib/cloner.py
@@ -39,8 +39,8 @@ class Cloner(object):
self.cache_dir = cache_dir
self.projects = projects
self.workspace = workspace
- self.zuul_branch = zuul_branch
- self.zuul_ref = zuul_ref
+ self.zuul_branch = zuul_branch or ''
+ self.zuul_ref = zuul_ref or ''
self.zuul_url = zuul_url
self.project_branches = project_branches or {}
@@ -80,8 +80,7 @@ class Cloner(object):
new_repo = git.Repo.clone_from(git_cache, dest)
self.log.info("Updating origin remote in repo %s to %s",
project, git_upstream)
- origin = new_repo.remotes.origin.config_writer.set(
- 'url', git_upstream)
+ new_repo.remotes.origin.config_writer.set('url', git_upstream)
else:
self.log.info("Creating repo %s from upstream %s",
project, git_upstream)
diff --git a/zuul/lib/gerrit.py b/zuul/lib/gerrit.py
index 5aad953f3..9aeff3df8 100644
--- a/zuul/lib/gerrit.py
+++ b/zuul/lib/gerrit.py
@@ -120,7 +120,7 @@ class Gerrit(object):
if v is True:
cmd += ' --%s' % k
else:
- cmd += ' --%s %s' % (k, v)
+ cmd += ' --label %s=%s' % (k, v)
cmd += ' %s' % change
out, err = self._ssh(cmd)
return err
diff --git a/zuul/lib/swift.py b/zuul/lib/swift.py
index 5781a4d82..3c411d3ff 100644
--- a/zuul/lib/swift.py
+++ b/zuul/lib/swift.py
@@ -45,7 +45,8 @@ class Swift(object):
try:
if self.config.has_section('swift'):
if (not self.config.has_option('swift', 'Send-Temp-Url-Key')
- or self.config.getboolean('swift', 'Send-Temp-Url-Key')):
+ or self.config.getboolean('swift',
+ 'Send-Temp-Url-Key')):
self.connect()
# Tell swift of our key
@@ -146,8 +147,10 @@ class Swift(object):
settings[key] = kwargs[altkey]
elif self.config.has_option('swift', 'default_' + key):
settings[key] = self.config.get('swift', 'default_' + key)
+ # TODO: these are always strings; some should be converted
+ # to ints.
- expires = int(time() + settings['expiry'])
+ expires = int(time() + int(settings['expiry']))
redirect = ''
url = os.path.join(self.storage_url, settings['container'],
diff --git a/zuul/merger/merger.py b/zuul/merger/merger.py
index 922c67ef3..8774f109e 100644
--- a/zuul/merger/merger.py
+++ b/zuul/merger/merger.py
@@ -105,7 +105,7 @@ class Repo(object):
def getCommitFromRef(self, refname):
repo = self.createRepoObject()
- if not refname in repo.refs:
+ if refname not in repo.refs:
return None
ref = repo.refs[refname]
return ref.commit
diff --git a/zuul/model.py b/zuul/model.py
index 67ce8be96..8dc28dfbe 100644
--- a/zuul/model.py
+++ b/zuul/model.py
@@ -73,6 +73,7 @@ class Pipeline(object):
self.success_message = None
self.footer_message = None
self.dequeue_on_new_patchset = True
+ self.ignore_dependencies = False
self.job_trees = {} # project -> JobTree
self.manager = None
self.queues = []
@@ -111,15 +112,20 @@ class Pipeline(object):
return queue
return None
+ def removeQueue(self, queue):
+ self.queues.remove(queue)
+
def getJobTree(self, project):
tree = self.job_trees.get(project)
return tree
- def getJobs(self, changeish):
- tree = self.getJobTree(changeish.project)
+ def getJobs(self, item):
+ if not item.live:
+ return []
+ tree = self.getJobTree(item.change.project)
if not tree:
return []
- return changeish.filterJobs(tree.getJobs())
+ return item.change.filterJobs(tree.getJobs())
def _findJobsToRun(self, job_trees, item):
torun = []
@@ -148,27 +154,29 @@ class Pipeline(object):
return torun
def findJobsToRun(self, item):
+ if not item.live:
+ return []
tree = self.getJobTree(item.change.project)
if not tree:
return []
return self._findJobsToRun(tree.job_trees, item)
def haveAllJobsStarted(self, item):
- for job in self.getJobs(item.change):
+ for job in self.getJobs(item):
build = item.current_build_set.getBuild(job.name)
if not build or not build.start_time:
return False
return True
def areAllJobsComplete(self, item):
- for job in self.getJobs(item.change):
+ for job in self.getJobs(item):
build = item.current_build_set.getBuild(job.name)
if not build or not build.result:
return False
return True
def didAllJobsSucceed(self, item):
- for job in self.getJobs(item.change):
+ for job in self.getJobs(item):
if not job.voting:
continue
build = item.current_build_set.getBuild(job.name)
@@ -184,7 +192,7 @@ class Pipeline(object):
return True
def didAnyJobFail(self, item):
- for job in self.getJobs(item.change):
+ for job in self.getJobs(item):
if not job.voting:
continue
build = item.current_build_set.getBuild(job.name)
@@ -193,7 +201,9 @@ class Pipeline(object):
return False
def isHoldingFollowingChanges(self, item):
- for job in self.getJobs(item.change):
+ if not item.live:
+ return False
+ for job in self.getJobs(item):
if not job.hold_following_changes:
continue
build = item.current_build_set.getBuild(job.name)
@@ -256,7 +266,6 @@ class Pipeline(object):
j_queues.append(j_queue)
j_queue['heads'] = []
j_queue['window'] = queue.window
- j_queue['dependent'] = queue.dependent
j_changes = []
for e in queue.queue:
@@ -266,8 +275,8 @@ class Pipeline(object):
j_changes = []
j_changes.append(e.formatJSON())
if (len(j_changes) > 1 and
- (j_changes[-2]['remaining_time'] is not None) and
- (j_changes[-1]['remaining_time'] is not None)):
+ (j_changes[-2]['remaining_time'] is not None) and
+ (j_changes[-1]['remaining_time'] is not None)):
j_changes[-1]['remaining_time'] = max(
j_changes[-2]['remaining_time'],
j_changes[-1]['remaining_time'])
@@ -303,8 +312,8 @@ class ChangeQueue(object):
different projects; this is one of them. For instance, there may
a queue shared by interrelated projects foo and bar, and a second
queue for independent project baz. Pipelines have one or more
- PipelineQueues."""
- def __init__(self, pipeline, dependent=True, window=0, window_floor=1,
+ ChangeQueues."""
+ def __init__(self, pipeline, window=0, window_floor=1,
window_increase_type='linear', window_increase_factor=1,
window_decrease_type='exponential', window_decrease_factor=2):
self.pipeline = pipeline
@@ -314,7 +323,6 @@ class ChangeQueue(object):
self.projects = []
self._jobs = set()
self.queue = []
- self.dependent = dependent
self.window = window
self.window_floor = window_floor
self.window_increase_type = window_increase_type
@@ -340,7 +348,7 @@ class ChangeQueue(object):
for job in self._jobs:
if job.queue_name:
if (self.assigned_name and
- job.queue_name != self.assigned_name):
+ job.queue_name != self.assigned_name):
raise Exception("More than one name assigned to "
"change queue: %s != %s" %
(self.assigned_name, job.queue_name))
@@ -348,14 +356,15 @@ class ChangeQueue(object):
self.name = self.assigned_name or self.generated_name
def enqueueChange(self, change):
- item = QueueItem(self.pipeline, change)
+ item = QueueItem(self, change)
self.enqueueItem(item)
item.enqueue_time = time.time()
return item
def enqueueItem(self, item):
item.pipeline = self.pipeline
- if self.dependent and self.queue:
+ item.queue = self
+ if self.queue:
item.item_ahead = self.queue[-1]
item.item_ahead.items_behind.append(item)
self.queue.append(item)
@@ -374,8 +383,6 @@ class ChangeQueue(object):
item.dequeue_time = time.time()
def moveItem(self, item, item_ahead):
- if not self.dependent:
- return False
if item.item_ahead == item_ahead:
return False
# Remove from current location
@@ -399,20 +406,20 @@ class ChangeQueue(object):
# TODO merge semantics
def isActionable(self, item):
- if self.dependent and self.window:
+ if self.window:
return item in self.queue[:self.window]
else:
return True
def increaseWindowSize(self):
- if self.dependent:
+ if self.window:
if self.window_increase_type == 'linear':
self.window += self.window_increase_factor
elif self.window_increase_type == 'exponential':
self.window *= self.window_increase_factor
def decreaseWindowSize(self):
- if self.dependent:
+ if self.window:
if self.window_decrease_type == 'linear':
self.window = max(
self.window_floor,
@@ -445,12 +452,19 @@ class Job(object):
self.failure_pattern = None
self.success_pattern = None
self.parameter_function = None
- self.hold_following_changes = False
- self.voting = True
+ # A metajob should only supply values for attributes that have
+ # been explicitly provided, so avoid setting boolean defaults.
+ if self.is_metajob:
+ self.hold_following_changes = None
+ self.voting = None
+ else:
+ self.hold_following_changes = False
+ self.voting = True
self.branches = []
self._branches = []
self.files = []
self._files = []
+ self.skip_if_matcher = None
self.swift = {}
def __str__(self):
@@ -459,6 +473,10 @@ class Job(object):
def __repr__(self):
return '<Job %s>' % (self.name)
+ @property
+ def is_metajob(self):
+ return self.name.startswith('^')
+
def copy(self, other):
if other.failure_message:
self.failure_message = other.failure_message
@@ -476,10 +494,15 @@ class Job(object):
if other.files:
self.files = other.files[:]
self._files = other._files[:]
+ if other.skip_if_matcher:
+ self.skip_if_matcher = other.skip_if_matcher.copy()
if other.swift:
self.swift.update(other.swift)
- self.hold_following_changes = other.hold_following_changes
- self.voting = other.voting
+ # Only non-None values should be copied for boolean attributes.
+ if other.hold_following_changes is not None:
+ self.hold_following_changes = other.hold_following_changes
+ if other.voting is not None:
+ self.voting = other.voting
def changeMatches(self, change):
matches_branch = False
@@ -500,6 +523,9 @@ class Job(object):
if self.files and not matches_file:
return False
+ if self.skip_if_matcher and self.skip_if_matcher.matches(change):
+ return False
+
return True
@@ -650,8 +676,9 @@ class BuildSet(object):
class QueueItem(object):
"""A changish inside of a Pipeline queue"""
- def __init__(self, pipeline, change):
- self.pipeline = pipeline
+ def __init__(self, queue, change):
+ self.pipeline = queue.pipeline
+ self.queue = queue
self.change = change # a changeish
self.build_sets = []
self.dequeued_needing_change = False
@@ -662,7 +689,8 @@ class QueueItem(object):
self.enqueue_time = None
self.dequeue_time = None
self.reported = False
- self.active = False
+ self.active = False # Whether an item is within an active window
+ self.live = True # Whether an item is intended to be processed at all
def __repr__(self):
if self.pipeline:
@@ -694,6 +722,7 @@ class QueueItem(object):
changeish = self.change
ret = {}
ret['active'] = self.active
+ ret['live'] = self.live
if hasattr(changeish, 'url') and changeish.url is not None:
ret['url'] = changeish.url
else:
@@ -714,7 +743,7 @@ class QueueItem(object):
else:
ret['owner'] = None
max_remaining = 0
- for job in self.pipeline.getJobs(changeish):
+ for job in self.pipeline.getJobs(self):
now = time.time()
build = self.current_build_set.getBuild(job.name)
elapsed = None
@@ -764,7 +793,6 @@ class QueueItem(object):
'canceled': build.canceled if build else None,
'retry': build.retry if build else None,
'number': build.number if build else None,
- 'parameters': build.parameters if build else None,
'worker': worker
})
@@ -790,7 +818,7 @@ class QueueItem(object):
changeish.project.name,
changeish._id(),
self.item_ahead)
- for job in self.pipeline.getJobs(changeish):
+ for job in self.pipeline.getJobs(self):
build = self.current_build_set.getBuild(job.name)
if build:
result = build.result
@@ -852,7 +880,7 @@ class Change(Changeish):
self.refspec = None
self.files = []
- self.needs_change = None
+ self.needs_changes = []
self.needed_by_changes = []
self.is_current_patchset = True
self.can_merge = False
@@ -885,8 +913,8 @@ class Change(Changeish):
def getRelatedChanges(self):
related = set()
- if self.needs_change:
- related.add(self.needs_change)
+ for c in self.needs_changes:
+ related.add(c)
for c in self.needed_by_changes:
related.add(c)
related.update(c.getRelatedChanges())
@@ -1150,7 +1178,7 @@ class EventFilter(BaseFilter):
matches_email_re = False
for email_re in self.emails:
if (account_email is not None and
- email_re.search(account_email)):
+ email_re.search(account_email)):
matches_email_re = True
if self.emails and not matches_email_re:
return False
@@ -1253,8 +1281,7 @@ class Layout(object):
if name in self.jobs:
return self.jobs[name]
job = Job(name)
- if name.startswith('^'):
- # This is a meta-job
+ if job.is_metajob:
regex = re.compile(name)
self.metajobs.append((regex, job))
else:
diff --git a/zuul/scheduler.py b/zuul/scheduler.py
index 25f3ae0df..131ad62c3 100644
--- a/zuul/scheduler.py
+++ b/zuul/scheduler.py
@@ -1,4 +1,4 @@
-# Copyright 2012-2014 Hewlett-Packard Development Company, L.P.
+# Copyright 2012-2015 Hewlett-Packard Development Company, L.P.
# Copyright 2013 OpenStack Foundation
# Copyright 2013 Antoine "hashar" Musso
# Copyright 2013 Wikimedia Foundation Inc.
@@ -31,6 +31,7 @@ import layoutvalidator
import model
from model import ActionReporter, Pipeline, Project, ChangeQueue
from model import EventFilter, ChangeishFilter
+from zuul import change_matcher
from zuul import version as zuul_version
statsd = extras.try_import('statsd.statsd')
@@ -166,6 +167,14 @@ class MergeCompletedEvent(ResultEvent):
self.commit = commit
+def toList(item):
+ if not item:
+ return []
+ if isinstance(item, list):
+ return item
+ return [item]
+
+
class Scheduler(threading.Thread):
log = logging.getLogger("zuul.Scheduler")
@@ -199,17 +208,38 @@ class Scheduler(threading.Thread):
def testConfig(self, config_path):
return self._parseConfig(config_path)
+ def _parseSkipIf(self, config_job):
+ cm = change_matcher
+ skip_matchers = []
+
+ for config_skip in config_job.get('skip-if', []):
+ nested_matchers = []
+
+ project_regex = config_skip.get('project')
+ if project_regex:
+ nested_matchers.append(cm.ProjectMatcher(project_regex))
+
+ branch_regex = config_skip.get('branch')
+ if branch_regex:
+ nested_matchers.append(cm.BranchMatcher(branch_regex))
+
+ file_regexes = toList(config_skip.get('all-files-match-any'))
+ if file_regexes:
+ file_matchers = [cm.FileMatcher(x) for x in file_regexes]
+ all_files_matcher = cm.MatchAllFiles(file_matchers)
+ nested_matchers.append(all_files_matcher)
+
+ # All patterns need to match a given skip-if predicate
+ skip_matchers.append(cm.MatchAll(nested_matchers))
+
+ if skip_matchers:
+ # Any skip-if predicate can be matched to trigger a skip
+ return cm.MatchAny(skip_matchers)
+
def _parseConfig(self, config_path):
layout = model.Layout()
project_templates = {}
- def toList(item):
- if not item:
- return []
- if isinstance(item, list):
- return item
- return [item]
-
if config_path:
config_path = os.path.expanduser(config_path)
if not os.path.exists(config_path):
@@ -235,7 +265,8 @@ class Scheduler(threading.Thread):
pipeline = Pipeline(conf_pipeline['name'])
pipeline.description = conf_pipeline.get('description')
# TODO(jeblair): remove backwards compatibility:
- pipeline.source = self.triggers[conf_pipeline.get('source', 'gerrit')]
+ pipeline.source = self.triggers[conf_pipeline.get('source',
+ 'gerrit')]
precedence = model.PRECEDENCE_MAP[conf_pipeline.get('precedence')]
pipeline.precedence = precedence
pipeline.failure_message = conf_pipeline.get('failure-message',
@@ -250,6 +281,8 @@ class Scheduler(threading.Thread):
pipeline.footer_message = conf_pipeline.get('footer-message', "")
pipeline.dequeue_on_new_patchset = conf_pipeline.get(
'dequeue-on-new-patchset', True)
+ pipeline.ignore_dependencies = conf_pipeline.get(
+ 'ignore-dependencies', False)
action_reporters = {}
for action in ['start', 'success', 'failure', 'merge-failure']:
@@ -314,16 +347,19 @@ class Scheduler(threading.Thread):
usernames = toList(trigger.get('username'))
if not usernames:
usernames = toList(trigger.get('username_filter'))
- f = EventFilter(trigger=self.triggers['gerrit'],
- types=toList(trigger['event']),
- branches=toList(trigger.get('branch')),
- refs=toList(trigger.get('ref')),
- event_approvals=approvals,
- comments=comments,
- emails=emails,
- usernames=usernames,
- required_approvals=
- toList(trigger.get('require-approval')))
+ f = EventFilter(
+ trigger=self.triggers['gerrit'],
+ types=toList(trigger['event']),
+ branches=toList(trigger.get('branch')),
+ refs=toList(trigger.get('ref')),
+ event_approvals=approvals,
+ comments=comments,
+ emails=emails,
+ usernames=usernames,
+ required_approvals=toList(
+ trigger.get('require-approval')
+ )
+ )
manager.event_filters.append(f)
if 'timer' in conf_pipeline['trigger']:
for trigger in toList(conf_pipeline['trigger']['timer']):
@@ -333,11 +369,14 @@ class Scheduler(threading.Thread):
manager.event_filters.append(f)
if 'zuul' in conf_pipeline['trigger']:
for trigger in toList(conf_pipeline['trigger']['zuul']):
- f = EventFilter(trigger=self.triggers['zuul'],
- types=toList(trigger['event']),
- pipelines=toList(trigger.get('pipeline')),
- required_approvals=
- toList(trigger.get('require-approval')))
+ f = EventFilter(
+ trigger=self.triggers['zuul'],
+ types=toList(trigger['event']),
+ pipelines=toList(trigger.get('pipeline')),
+ required_approvals=toList(
+ trigger.get('require-approval')
+ )
+ )
manager.event_filters.append(f)
for project_template in data.get('project-templates', []):
@@ -388,6 +427,9 @@ class Scheduler(threading.Thread):
if files:
job._files = files
job.files = [re.compile(x) for x in files]
+ skip_if_matcher = self._parseSkipIf(config_job)
+ if skip_if_matcher:
+ job.skip_if_matcher = skip_if_matcher
swift = toList(config_job.get('swift'))
if swift:
for s in swift:
@@ -631,11 +673,15 @@ class Scheduler(threading.Thread):
self.log.debug("Re-enqueueing changes for pipeline %s" % name)
items_to_remove = []
builds_to_remove = []
+ last_head = None
for shared_queue in old_pipeline.queues:
for item in shared_queue.queue:
+ if not item.item_ahead:
+ last_head = item
item.item_ahead = None
item.items_behind = []
item.pipeline = None
+ item.queue = None
project = layout.projects.get(item.change.project.name)
if not project:
self.log.warning("Unable to find project for "
@@ -651,7 +697,8 @@ class Scheduler(threading.Thread):
build.job = job
else:
builds_to_remove.append(build)
- if not new_pipeline.manager.reEnqueueItem(item):
+ if not new_pipeline.manager.reEnqueueItem(item,
+ last_head):
items_to_remove.append(item)
for item in items_to_remove:
for build in item.current_build_set.getBuilds():
@@ -692,7 +739,7 @@ class Scheduler(threading.Thread):
break
for item in shared_queue.queue:
if (item.change.number == change_ids[0][0] and
- item.change.patchset == change_ids[0][1]):
+ item.change.patchset == change_ids[0][1]):
change_queue = shared_queue
break
if not change_queue:
@@ -702,7 +749,7 @@ class Scheduler(threading.Thread):
found = False
for item in change_queue.queue:
if (item.change.number == number and
- item.change.patchset == patchset):
+ item.change.patchset == patchset):
found = True
items_to_enqueue.append(item)
break
@@ -965,6 +1012,8 @@ class BasePipelineManager(object):
efilters += str(b)
for f in tree.job._files:
efilters += str(f)
+ if tree.job.skip_if_matcher:
+ efilters += str(tree.job.skip_if_matcher)
if efilters:
efilters = ' ' + efilters
hold = ''
@@ -1017,9 +1066,17 @@ class BasePipelineManager(object):
return True
return False
- def isChangeAlreadyInQueue(self, change):
- for c in self.pipeline.getChangesInQueue():
- if change.equals(c):
+ def isChangeAlreadyInPipeline(self, change):
+ # Checks live items in the pipeline
+ for item in self.pipeline.getAllItems():
+ if item.live and change.equals(item.change):
+ return True
+ return False
+
+ def isChangeAlreadyInQueue(self, change, change_queue):
+ # Checks any item in the specified change queue
+ for item in change_queue.queue:
+ if change.equals(item.change):
return True
return False
@@ -1057,16 +1114,18 @@ class BasePipelineManager(object):
def isChangeReadyToBeEnqueued(self, change):
return True
- def enqueueChangesAhead(self, change, quiet, ignore_requirements):
+ def enqueueChangesAhead(self, change, quiet, ignore_requirements,
+ change_queue):
return True
- def enqueueChangesBehind(self, change, quiet, ignore_requirements):
+ def enqueueChangesBehind(self, change, quiet, ignore_requirements,
+ change_queue):
return True
- def checkForChangesNeededBy(self, change):
+ def checkForChangesNeededBy(self, change, change_queue):
return True
- def getFailingDependentItem(self, item):
+ def getFailingDependentItems(self, item):
return None
def getDependentItems(self, item):
@@ -1087,42 +1146,54 @@ class BasePipelineManager(object):
return None
def findOldVersionOfChangeAlreadyInQueue(self, change):
- for c in self.pipeline.getChangesInQueue():
- if change.isUpdateOf(c):
- return c
+ for item in self.pipeline.getAllItems():
+ if not item.live:
+ continue
+ if change.isUpdateOf(item.change):
+ return item
return None
def removeOldVersionsOfChange(self, change):
if not self.pipeline.dequeue_on_new_patchset:
return
- old_change = self.findOldVersionOfChangeAlreadyInQueue(change)
- if old_change:
+ old_item = self.findOldVersionOfChangeAlreadyInQueue(change)
+ if old_item:
self.log.debug("Change %s is a new version of %s, removing %s" %
- (change, old_change, old_change))
- self.removeChange(old_change)
+ (change, old_item.change, old_item))
+ self.removeItem(old_item)
def removeAbandonedChange(self, change):
self.log.debug("Change %s abandoned, removing." % change)
- self.removeChange(change)
-
- def reEnqueueItem(self, item):
- change_queue = self.pipeline.getQueue(item.change.project)
- if change_queue:
- self.log.debug("Re-enqueing change %s in queue %s" %
- (item.change, change_queue))
- change_queue.enqueueItem(item)
- self.reportStats(item)
- return True
- else:
- self.log.error("Unable to find change queue for project %s" %
- item.change.project)
- return False
+ for item in self.pipeline.getAllItems():
+ if not item.live:
+ continue
+ if item.change.equals(change):
+ self.removeItem(item)
+
+ def reEnqueueItem(self, item, last_head):
+ with self.getChangeQueue(item.change, last_head.queue) as change_queue:
+ if change_queue:
+ self.log.debug("Re-enqueing change %s in queue %s" %
+ (item.change, change_queue))
+ change_queue.enqueueItem(item)
+ self.reportStats(item)
+ return True
+ else:
+ self.log.error("Unable to find change queue for project %s" %
+ item.change.project)
+ return False
def addChange(self, change, quiet=False, enqueue_time=None,
- ignore_requirements=False):
+ ignore_requirements=False, live=True,
+ change_queue=None):
self.log.debug("Considering adding change %s" % change)
- if self.isChangeAlreadyInQueue(change):
- self.log.debug("Change %s is already in queue, ignoring" % change)
+
+ # If we are adding a live change, check if it's a live item
+ # anywhere in the pipeline. Otherwise, we will perform the
+ # duplicate check below on the specific change_queue.
+ if live and self.isChangeAlreadyInPipeline(change):
+ self.log.debug("Change %s is already in pipeline, "
+ "ignoring" % change)
return True
if not self.isChangeReadyToBeEnqueued(change):
@@ -1137,16 +1208,24 @@ class BasePipelineManager(object):
"requirement %s" % (change, f))
return False
- if not self.enqueueChangesAhead(change, quiet, ignore_requirements):
- self.log.debug("Failed to enqueue changes ahead of %s" % change)
- return False
+ with self.getChangeQueue(change, change_queue) as change_queue:
+ if not change_queue:
+ self.log.debug("Unable to find change queue for "
+ "change %s in project %s" %
+ (change, change.project))
+ return False
- if self.isChangeAlreadyInQueue(change):
- self.log.debug("Change %s is already in queue, ignoring" % change)
- return True
+ if not self.enqueueChangesAhead(change, quiet, ignore_requirements,
+ change_queue):
+ self.log.debug("Failed to enqueue changes "
+ "ahead of %s" % change)
+ return False
+
+ if self.isChangeAlreadyInQueue(change, change_queue):
+ self.log.debug("Change %s is already in queue, "
+ "ignoring" % change)
+ return True
- change_queue = self.pipeline.getQueue(change.project)
- if change_queue:
self.log.debug("Adding change %s to queue %s" %
(change, change_queue))
if not quiet:
@@ -1155,29 +1234,26 @@ class BasePipelineManager(object):
item = change_queue.enqueueChange(change)
if enqueue_time:
item.enqueue_time = enqueue_time
+ item.live = live
self.reportStats(item)
- self.enqueueChangesBehind(change, quiet, ignore_requirements)
- self.sched.triggers['zuul'].onChangeEnqueued(item.change, self.pipeline)
- else:
- self.log.error("Unable to find change queue for project %s" %
- change.project)
- return False
+ self.enqueueChangesBehind(change, quiet, ignore_requirements,
+ change_queue)
+ self.sched.triggers['zuul'].onChangeEnqueued(item.change,
+ self.pipeline)
+ return True
def dequeueItem(self, item):
self.log.debug("Removing change %s from queue" % item.change)
- change_queue = self.pipeline.getQueue(item.change.project)
- change_queue.dequeueItem(item)
+ item.queue.dequeueItem(item)
- def removeChange(self, change):
- # Remove a change from the queue, probably because it has been
+ def removeItem(self, item):
+ # Remove an item from the queue, probably because it has been
# superseded by another change.
- for item in self.pipeline.getAllItems():
- if item.change == change:
- self.log.debug("Canceling builds behind change: %s "
- "because it is being removed." % item.change)
- self.cancelJobs(item)
- self.dequeueItem(item)
- self.reportStats(item)
+ self.log.debug("Canceling builds behind change: %s "
+ "because it is being removed." % item.change)
+ self.cancelJobs(item)
+ self.dequeueItem(item)
+ self.reportStats(item)
def _makeMergerItem(self, item):
# Create a dictionary with all info about the item needed by
@@ -1277,32 +1353,34 @@ class BasePipelineManager(object):
def _processOneItem(self, item, nnfi, ready_ahead):
changed = False
item_ahead = item.item_ahead
- change_queue = self.pipeline.getQueue(item.change.project)
+ if item_ahead and (not item_ahead.live):
+ item_ahead = None
+ change_queue = item.queue
failing_reasons = [] # Reasons this item is failing
- if self.checkForChangesNeededBy(item.change) is not True:
+ if self.checkForChangesNeededBy(item.change, change_queue) is not True:
# It's not okay to enqueue this change, we should remove it.
self.log.info("Dequeuing change %s because "
"it can no longer merge" % item.change)
self.cancelJobs(item)
self.dequeueItem(item)
self.pipeline.setDequeuedNeedingChange(item)
- try:
- self.reportItem(item)
- except MergeFailure:
- pass
+ if item.live:
+ try:
+ self.reportItem(item)
+ except MergeFailure:
+ pass
return (True, nnfi, ready_ahead)
- dep_item = self.getFailingDependentItem(item)
+ dep_items = self.getFailingDependentItems(item)
actionable = change_queue.isActionable(item)
item.active = actionable
ready = False
- if dep_item:
+ if dep_items:
failing_reasons.append('a needed change is failing')
self.cancelJobs(item, prime=False)
else:
item_ahead_merged = False
- if ((item_ahead and item_ahead.change.is_merged) or
- not change_queue.dependent):
+ if (item_ahead and item_ahead.change.is_merged):
item_ahead_merged = True
if (item_ahead != nnfi and not item_ahead_merged):
# Our current base is different than what we expected,
@@ -1325,7 +1403,12 @@ class BasePipelineManager(object):
changed = True
if self.pipeline.didAnyJobFail(item):
failing_reasons.append("at least one job failed")
- if (not item_ahead) and self.pipeline.areAllJobsComplete(item):
+ if (not item.live) and (not item.items_behind):
+ failing_reasons.append("is a non-live item with no items behind")
+ self.dequeueItem(item)
+ changed = True
+ if ((not item_ahead) and self.pipeline.areAllJobsComplete(item)
+ and item.live):
try:
self.reportItem(item)
except MergeFailure:
@@ -1337,7 +1420,7 @@ class BasePipelineManager(object):
self.cancelJobs(item_behind)
self.dequeueItem(item)
changed = True
- elif not failing_reasons:
+ elif not failing_reasons and item.live:
nnfi = item
item.current_build_set.failing_reasons = failing_reasons
if failing_reasons:
@@ -1421,7 +1504,7 @@ class BasePipelineManager(object):
item.change.branch)
self.log.info("Reported change %s status: all-succeeded: %s, "
"merged: %s" % (item.change, succeeded, merged))
- change_queue = self.pipeline.getQueue(item.change.project)
+ change_queue = item.queue
if not (succeeded and merged):
self.log.debug("Reported change %s failed tests or failed "
"to merge" % (item.change))
@@ -1491,7 +1574,7 @@ class BasePipelineManager(object):
else:
url_pattern = None
- for job in self.pipeline.getJobs(item.change):
+ for job in self.pipeline.getJobs(item):
build = item.current_build_set.getBuild(job.name)
result = build.result
pattern = url_pattern
@@ -1671,6 +1754,18 @@ class BasePipelineManager(object):
self.log.exception("Exception reporting pipeline stats")
+class DynamicChangeQueueContextManager(object):
+ def __init__(self, change_queue):
+ self.change_queue = change_queue
+
+ def __enter__(self):
+ return self.change_queue
+
+ def __exit__(self, etype, value, tb):
+ if self.change_queue and not self.change_queue.queue:
+ self.change_queue.pipeline.removeQueue(self.change_queue.queue)
+
+
class IndependentPipelineManager(BasePipelineManager):
log = logging.getLogger("zuul.IndependentPipelineManager")
changes_merge = False
@@ -1678,11 +1773,86 @@ class IndependentPipelineManager(BasePipelineManager):
def _postConfig(self, layout):
super(IndependentPipelineManager, self)._postConfig(layout)
- change_queue = ChangeQueue(self.pipeline, dependent=False)
- for project in self.pipeline.getProjects():
- change_queue.addProject(project)
-
+ def getChangeQueue(self, change, existing=None):
+ # creates a new change queue for every change
+ if existing:
+ return DynamicChangeQueueContextManager(existing)
+ if change.project not in self.pipeline.getProjects():
+ return DynamicChangeQueueContextManager(None)
+ change_queue = ChangeQueue(self.pipeline)
+ change_queue.addProject(change.project)
self.pipeline.addQueue(change_queue)
+ return DynamicChangeQueueContextManager(change_queue)
+
+ def enqueueChangesAhead(self, change, quiet, ignore_requirements,
+ change_queue):
+ ret = self.checkForChangesNeededBy(change, change_queue)
+ if ret in [True, False]:
+ return ret
+ self.log.debug(" Changes %s must be merged ahead of %s" %
+ (ret, change))
+ for needed_change in ret:
+ # This differs from the dependent pipeline by enqueuing
+ # changes ahead as "not live", that is, not intended to
+ # have jobs run. Also, pipeline requirements are always
+ # ignored (which is safe because the changes are not
+ # live).
+ r = self.addChange(needed_change, quiet=True,
+ ignore_requirements=True,
+ live=False, change_queue=change_queue)
+ if not r:
+ return False
+ return True
+
+ def checkForChangesNeededBy(self, change, change_queue):
+ if self.pipeline.ignore_dependencies:
+ return True
+ self.log.debug("Checking for changes needed by %s:" % change)
+ # Return true if okay to proceed enqueing this change,
+ # false if the change should not be enqueued.
+ if not hasattr(change, 'needs_changes'):
+ self.log.debug(" Changeish does not support dependencies")
+ return True
+ if not change.needs_changes:
+ self.log.debug(" No changes needed")
+ return True
+ changes_needed = []
+ for needed_change in change.needs_changes:
+ self.log.debug(" Change %s needs change %s:" % (
+ change, needed_change))
+ if needed_change.is_merged:
+ self.log.debug(" Needed change is merged")
+ continue
+ if self.isChangeAlreadyInQueue(needed_change, change_queue):
+ self.log.debug(" Needed change is already ahead in the queue")
+ continue
+ self.log.debug(" Change %s is needed" % needed_change)
+ if needed_change not in changes_needed:
+ changes_needed.append(needed_change)
+ continue
+ # This differs from the dependent pipeline check in not
+ # verifying that the dependent change is mergable.
+ if changes_needed:
+ return changes_needed
+ return True
+
+ def dequeueItem(self, item):
+ super(IndependentPipelineManager, self).dequeueItem(item)
+ # An independent pipeline manager dynamically removes empty
+ # queues
+ if not item.queue.queue:
+ self.pipeline.removeQueue(item.queue)
+
+
+class StaticChangeQueueContextManager(object):
+ def __init__(self, change_queue):
+ self.change_queue = change_queue
+
+ def __enter__(self):
+ return self.change_queue
+
+ def __exit__(self, etype, value, tb):
+ pass
class DependentPipelineManager(BasePipelineManager):
@@ -1744,6 +1914,12 @@ class DependentPipelineManager(BasePipelineManager):
new_change_queues.append(a)
return new_change_queues
+ def getChangeQueue(self, change, existing=None):
+ if existing:
+ return StaticChangeQueueContextManager(existing)
+ return StaticChangeQueueContextManager(
+ self.pipeline.getQueue(change.project))
+
def isChangeReadyToBeEnqueued(self, change):
if not self.pipeline.source.canMerge(change,
self.getSubmitAllowNeeds()):
@@ -1751,71 +1927,111 @@ class DependentPipelineManager(BasePipelineManager):
return False
return True
- def enqueueChangesBehind(self, change, quiet, ignore_requirements):
+ def enqueueChangesBehind(self, change, quiet, ignore_requirements,
+ change_queue):
to_enqueue = []
self.log.debug("Checking for changes needing %s:" % change)
if not hasattr(change, 'needed_by_changes'):
self.log.debug(" Changeish does not support dependencies")
return
- for needs in change.needed_by_changes:
- if self.pipeline.source.canMerge(needs,
+ for other_change in change.needed_by_changes:
+ with self.getChangeQueue(other_change) as other_change_queue:
+ if other_change_queue != change_queue:
+ self.log.debug(" Change %s in project %s can not be "
+ "enqueued in the target queue %s" %
+ (other_change, other_change.project,
+ change_queue))
+ continue
+ if self.pipeline.source.canMerge(other_change,
self.getSubmitAllowNeeds()):
self.log.debug(" Change %s needs %s and is ready to merge" %
- (needs, change))
- to_enqueue.append(needs)
+ (other_change, change))
+ to_enqueue.append(other_change)
+
if not to_enqueue:
self.log.debug(" No changes need %s" % change)
for other_change in to_enqueue:
self.addChange(other_change, quiet=quiet,
- ignore_requirements=ignore_requirements)
+ ignore_requirements=ignore_requirements,
+ change_queue=change_queue)
- def enqueueChangesAhead(self, change, quiet, ignore_requirements):
- ret = self.checkForChangesNeededBy(change)
+ def enqueueChangesAhead(self, change, quiet, ignore_requirements,
+ change_queue):
+ ret = self.checkForChangesNeededBy(change, change_queue)
if ret in [True, False]:
return ret
- self.log.debug(" Change %s must be merged ahead of %s" %
+ self.log.debug(" Changes %s must be merged ahead of %s" %
(ret, change))
- return self.addChange(ret, quiet=quiet,
- ignore_requirements=ignore_requirements)
+ for needed_change in ret:
+ r = self.addChange(needed_change, quiet=quiet,
+ ignore_requirements=ignore_requirements,
+ change_queue=change_queue)
+ if not r:
+ return False
+ return True
- def checkForChangesNeededBy(self, change):
+ def checkForChangesNeededBy(self, change, change_queue):
self.log.debug("Checking for changes needed by %s:" % change)
# Return true if okay to proceed enqueing this change,
# false if the change should not be enqueued.
- if not hasattr(change, 'needs_change'):
+ if not hasattr(change, 'needs_changes'):
self.log.debug(" Changeish does not support dependencies")
return True
- if not change.needs_change:
+ if not change.needs_changes:
self.log.debug(" No changes needed")
return True
- if change.needs_change.is_merged:
- self.log.debug(" Needed change is merged")
- return True
- if not change.needs_change.is_current_patchset:
- self.log.debug(" Needed change is not the current patchset")
- return False
- if self.isChangeAlreadyInQueue(change.needs_change):
- self.log.debug(" Needed change is already ahead in the queue")
- return True
- if self.pipeline.source.canMerge(change.needs_change,
- self.getSubmitAllowNeeds()):
- self.log.debug(" Change %s is needed" %
- change.needs_change)
- return change.needs_change
- # The needed change can't be merged.
- self.log.debug(" Change %s is needed but can not be merged" %
- change.needs_change)
- return False
+ changes_needed = []
+ # Ignore supplied change_queue
+ with self.getChangeQueue(change) as change_queue:
+ for needed_change in change.needs_changes:
+ self.log.debug(" Change %s needs change %s:" % (
+ change, needed_change))
+ if needed_change.is_merged:
+ self.log.debug(" Needed change is merged")
+ continue
+ with self.getChangeQueue(needed_change) as needed_change_queue:
+ if needed_change_queue != change_queue:
+ self.log.debug(" Change %s in project %s does not "
+ "share a change queue with %s "
+ "in project %s" %
+ (needed_change, needed_change.project,
+ change, change.project))
+ return False
+ if not needed_change.is_current_patchset:
+ self.log.debug(" Needed change is not the "
+ "current patchset")
+ return False
+ if self.isChangeAlreadyInQueue(needed_change, change_queue):
+ self.log.debug(" Needed change is already ahead "
+ "in the queue")
+ continue
+ if self.pipeline.source.canMerge(needed_change,
+ self.getSubmitAllowNeeds()):
+ self.log.debug(" Change %s is needed" % needed_change)
+ if needed_change not in changes_needed:
+ changes_needed.append(needed_change)
+ continue
+ # The needed change can't be merged.
+ self.log.debug(" Change %s is needed but can not be merged" %
+ needed_change)
+ return False
+ if changes_needed:
+ return changes_needed
+ return True
- def getFailingDependentItem(self, item):
- if not hasattr(item.change, 'needs_change'):
- return None
- if not item.change.needs_change:
+ def getFailingDependentItems(self, item):
+ if not hasattr(item.change, 'needs_changes'):
return None
- needs_item = self.getItemForChange(item.change.needs_change)
- if not needs_item:
+ if not item.change.needs_changes:
return None
- if needs_item.current_build_set.failing_reasons:
- return needs_item
+ failing_items = set()
+ for needed_change in item.change.needs_changes:
+ needed_item = self.getItemForChange(needed_change)
+ if not needed_item:
+ continue
+ if needed_item.current_build_set.failing_reasons:
+ failing_items.add(needed_item)
+ if failing_items:
+ return failing_items
return None
diff --git a/zuul/trigger/gerrit.py b/zuul/trigger/gerrit.py
index 0c0a37654..c5fdf9af5 100644
--- a/zuul/trigger/gerrit.py
+++ b/zuul/trigger/gerrit.py
@@ -13,6 +13,7 @@
# under the License.
import logging
+import re
import threading
import time
import urllib2
@@ -93,7 +94,6 @@ class GerritEventConnector(threading.Thread):
refresh=True)
self.sched.addEvent(event)
- self.gerrit.eventDone()
def run(self):
while True:
@@ -103,6 +103,8 @@ class GerritEventConnector(threading.Thread):
self._handleEvent()
except:
self.log.exception("Exception moving Gerrit event:")
+ finally:
+ self.gerrit.eventDone()
class Gerrit(object):
@@ -111,6 +113,9 @@ class Gerrit(object):
replication_timeout = 300
replication_retry_interval = 5
+ depends_on_re = re.compile(r"^Depends-On: (I[0-9a-f]{40})\s*$",
+ re.MULTILINE | re.IGNORECASE)
+
def __init__(self, config, sched):
self._change_cache = {}
self.sched = sched
@@ -250,7 +255,7 @@ class Gerrit(object):
data = change._data
if not data:
return False
- if not 'submitRecords' in data:
+ if 'submitRecords' not in data:
return False
try:
for sr in data['submitRecords']:
@@ -304,7 +309,7 @@ class Gerrit(object):
change = NullChange(project)
return change
- def _getChange(self, number, patchset, refresh=False):
+ def _getChange(self, number, patchset, refresh=False, history=None):
key = '%s,%s' % (number, patchset)
change = None
if key in self._change_cache:
@@ -318,7 +323,7 @@ class Gerrit(object):
key = '%s,%s' % (change.number, change.patchset)
self._change_cache[key] = change
try:
- self.updateChange(change)
+ self.updateChange(change, history)
except Exception:
del self._change_cache[key]
raise
@@ -328,18 +333,36 @@ class Gerrit(object):
# This is a best-effort function in case Gerrit is unable to return
# a particular change. It happens.
query = "project:%s status:open" % (project.name,)
- self.log.debug("Running query %s to get project open changes" % (query,))
+ self.log.debug("Running query %s to get project open changes" %
+ (query,))
data = self.gerrit.simpleQuery(query)
changes = []
for record in data:
try:
- changes.append(self._getChange(record['number'],
- record['currentPatchSet']['number']))
+ changes.append(
+ self._getChange(record['number'],
+ record['currentPatchSet']['number']))
except Exception:
- self.log.exception("Unable to query change %s" % (record.get('number'),))
+ self.log.exception("Unable to query change %s" %
+ (record.get('number'),))
return changes
- def updateChange(self, change):
+ def _getDependsOnFromCommit(self, message):
+ records = []
+ seen = set()
+ for match in self.depends_on_re.findall(message):
+ if match in seen:
+ self.log.debug("Ignoring duplicate Depends-On: %s" %
+ (match,))
+ continue
+ seen.add(match)
+ query = "change:%s" % (match,)
+ self.log.debug("Running query %s to find needed changes" %
+ (query,))
+ records.extend(self.gerrit.simpleQuery(query))
+ return records
+
+ def updateChange(self, change, history=None):
self.log.info("Updating information for %s,%s" %
(change.number, change.patchset))
data = self.gerrit.query(change.number)
@@ -379,13 +402,36 @@ class Gerrit(object):
# for dependencies.
return change
- change.needs_change = None
+ if history is None:
+ history = []
+ else:
+ history = history[:]
+ history.append(change.number)
+
+ change.needs_changes = []
if 'dependsOn' in data:
parts = data['dependsOn'][0]['ref'].split('/')
dep_num, dep_ps = parts[3], parts[4]
- dep = self._getChange(dep_num, dep_ps)
- if not dep.is_merged:
- change.needs_change = dep
+ if dep_num in history:
+ raise Exception("Dependency cycle detected: %s in %s" % (
+ dep_num, history))
+ self.log.debug("Getting git-dependent change %s,%s" %
+ (dep_num, dep_ps))
+ dep = self._getChange(dep_num, dep_ps, history=history)
+ if (not dep.is_merged) and dep not in change.needs_changes:
+ change.needs_changes.append(dep)
+
+ for record in self._getDependsOnFromCommit(data['commitMessage']):
+ dep_num = record['number']
+ dep_ps = record['currentPatchSet']['number']
+ if dep_num in history:
+ raise Exception("Dependency cycle detected: %s in %s" % (
+ dep_num, history))
+ self.log.debug("Getting commit-dependent change %s,%s" %
+ (dep_num, dep_ps))
+ dep = self._getChange(dep_num, dep_ps, history=history)
+ if (not dep.is_merged) and dep not in change.needs_changes:
+ change.needs_changes.append(dep)
change.needed_by_changes = []
if 'neededBy' in data:
@@ -393,7 +439,7 @@ class Gerrit(object):
parts = needed['ref'].split('/')
dep_num, dep_ps = parts[3], parts[4]
dep = self._getChange(dep_num, dep_ps)
- if not dep.is_merged and dep.is_current_patchset:
+ if (not dep.is_merged) and dep.is_current_patchset:
change.needed_by_changes.append(dep)
return change
@@ -423,4 +469,3 @@ def validate_trigger(trigger_data):
raise voluptuous.Invalid(
"The event %s does not include ref information, Zuul cannot "
"use ref filter 'ref: %s'" % (event['event'], event['ref']))
-
diff --git a/zuul/trigger/zuultrigger.py b/zuul/trigger/zuultrigger.py
index 27098ab81..4418d6f8e 100644
--- a/zuul/trigger/zuultrigger.py
+++ b/zuul/trigger/zuultrigger.py
@@ -47,8 +47,9 @@ class ZuulTrigger(object):
try:
self._createProjectChangeMergedEvents(change)
except Exception:
- self.log.exception("Unable to create project-change-merged events for %s" %
- (change,))
+ self.log.exception(
+ "Unable to create project-change-merged events for "
+ "%s" % (change,))
def onChangeEnqueued(self, change, pipeline):
# Called each time a change is enqueued in a pipeline
@@ -56,11 +57,13 @@ class ZuulTrigger(object):
try:
self._createParentChangeEnqueuedEvents(change, pipeline)
except Exception:
- self.log.exception("Unable to create parent-change-enqueued events for %s in %s" %
- (change, pipeline))
+ self.log.exception(
+ "Unable to create parent-change-enqueued events for "
+ "%s in %s" % (change, pipeline))
def _createProjectChangeMergedEvents(self, change):
- changes = self.sched.triggers['gerrit'].getProjectOpenChanges(change.project)
+ changes = self.sched.triggers['gerrit'].getProjectOpenChanges(
+ change.project)
for open_change in changes:
self._createProjectChangeMergedEvent(open_change)