summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--doc/source/discussion/components.rst25
-rw-r--r--doc/source/howtos/nodepool_install.rst3
-rw-r--r--doc/source/howtos/ubuntu_setup.rst54
-rw-r--r--doc/source/howtos/zuul-from-scratch.rst1
-rw-r--r--doc/source/howtos/zuul_install.rst3
-rw-r--r--doc/source/reference/drivers/github.rst4
-rw-r--r--doc/source/reference/drivers/pagure.rst6
-rw-r--r--doc/source/reference/pipeline_def.rst13
-rw-r--r--releasenotes/notes/ansible-callbacks-c3bfce1a5cae6b15.yaml5
-rw-r--r--releasenotes/notes/dequeue-reporting-620f364309587304.yaml6
-rw-r--r--tests/base.py57
-rw-r--r--tests/fixtures/config/ansible-callbacks/git/common-config/playbooks/callback.yaml4
-rw-r--r--tests/fixtures/config/ansible-callbacks/git/common-config/playbooks/callback_plugins/test_callback.py35
-rw-r--r--tests/fixtures/config/ansible-callbacks/git/common-config/zuul.yaml21
-rw-r--r--tests/fixtures/config/ansible-callbacks/main.yaml6
-rw-r--r--tests/fixtures/config/broken-multi-tenant/git/common-config/playbooks/job.yaml2
-rw-r--r--tests/fixtures/config/broken-multi-tenant/git/common-config/zuul.yaml23
-rw-r--r--tests/fixtures/config/broken-multi-tenant/git/org_project1/README1
-rw-r--r--tests/fixtures/config/broken-multi-tenant/git/org_project2/README1
-rw-r--r--tests/fixtures/config/broken-multi-tenant/git/org_project2/zuul.yaml3
-rw-r--r--tests/fixtures/config/broken-multi-tenant/git/org_project3/README1
-rw-r--r--tests/fixtures/config/broken-multi-tenant/git/org_project3/zuul.yaml2
-rw-r--r--tests/fixtures/config/broken-multi-tenant/main.yaml19
-rw-r--r--tests/fixtures/layouts/dequeue-reporting.yaml85
-rw-r--r--tests/fixtures/layouts/reporting-github.yaml3
-rw-r--r--tests/fixtures/layouts/requirements-pagure.yaml20
-rw-r--r--tests/fixtures/zuul-executor-ansible-callback.conf48
-rw-r--r--tests/unit/test_executor.py45
-rw-r--r--tests/unit/test_github_driver.py55
-rw-r--r--tests/unit/test_pagure_driver.py28
-rw-r--r--tests/unit/test_reporting.py137
-rw-r--r--tests/unit/test_v3.py74
-rwxr-xr-xtools/pip.sh10
-rw-r--r--zuul/configloader.py7
-rw-r--r--zuul/driver/github/githubreporter.py15
-rw-r--r--zuul/driver/pagure/pagureconnection.py46
-rw-r--r--zuul/executor/server.py25
-rw-r--r--zuul/lib/gearworker.py6
-rw-r--r--zuul/manager/__init__.py35
-rw-r--r--zuul/model.py5
-rw-r--r--zuul/reporter/__init__.py9
41 files changed, 894 insertions, 54 deletions
diff --git a/doc/source/discussion/components.rst b/doc/source/discussion/components.rst
index ba36c89c6..7cf715313 100644
--- a/doc/source/discussion/components.rst
+++ b/doc/source/discussion/components.rst
@@ -845,6 +845,31 @@ The following sections of ``zuul.conf`` are used by the executor:
Value to pass to `git config user.name
<https://git-scm.com/book/en/v2/Getting-Started-First-Time-Git-Setup>`_.
+.. attr:: ansible_callback "<name>"
+
+ To whitelist ansible callback ``<name>``. Any attributes found is this section
+ will be added to the ``callback_<name>`` section in ansible.cfg.
+
+ An example of what configuring the builtin mail callback would look like.
+ The configuration in zuul.conf.
+
+ .. code-block:: ini
+
+ [ansible_callback "mail"]
+ to = user@example.org
+ sender = zuul@example.org
+
+ Would generate the following in ansible.cfg:
+
+ .. code-block:: ini
+
+ [defaults]
+ callback_whitelist = mail
+
+ [callback_mail]
+ to = user@example.org
+ sender = zuul@example.org
+
Operation
~~~~~~~~~
diff --git a/doc/source/howtos/nodepool_install.rst b/doc/source/howtos/nodepool_install.rst
index 7a4c6dd02..015ebbde1 100644
--- a/doc/source/howtos/nodepool_install.rst
+++ b/doc/source/howtos/nodepool_install.rst
@@ -39,6 +39,9 @@ are required.
# For openSUSE:
sudo zypper install -y $(bindep -b compile)
+ # For Ubuntu:
+ sudo apt-get install -y $(bindep -b compile)
+
# All:
sudo pip3 install .
popd
diff --git a/doc/source/howtos/ubuntu_setup.rst b/doc/source/howtos/ubuntu_setup.rst
new file mode 100644
index 000000000..bd9b68378
--- /dev/null
+++ b/doc/source/howtos/ubuntu_setup.rst
@@ -0,0 +1,54 @@
+:orphan:
+
+Ubuntu
+======
+
+We're going to be using Ubuntu on a cloud server for this installation.
+
+Prerequisites
+-------------
+
+- Port 9000 must be open and accessible from the Internet so that
+ GitHub can communicate with the Zuul web service.
+
+Login to your environment
+-------------------------
+
+Since we'll be using a cloud image for Ubuntu, our login user will
+be ``ubuntu`` which will also be the staging user for installation of
+Zuul and Nodepool.
+
+To get started, ssh to your machine as the ``ubuntu`` user.
+
+.. code-block:: shell
+
+ ssh ubuntu@<ip_address>
+
+Environment Setup
+-----------------
+
+First, make sure the system packages are up to date, and then install
+some packages which will be required later. Most of Zuul's binary
+dependencies are handled by the bindep program, but a few additional
+dependencies are needed to install bindep, and for other commands
+which we will use in these instructions.
+
+.. code-block:: shell
+
+ sudo apt-get update
+ sudo apt-get install python3-pip git
+
+ # install bindep, the --user setting will install bindep only in
+ # the user profile not global.
+ pip3 install --user bindep
+
+Install Zookeeper
+-----------------
+
+Nodepool uses Zookeeper to keep track of information about the
+resources it manages, and it's also how Zuul makes requests to
+Nodepool for nodes.
+
+.. code-block:: console
+
+ sudo apt-get install -y zookeeper zookeeperd
diff --git a/doc/source/howtos/zuul-from-scratch.rst b/doc/source/howtos/zuul-from-scratch.rst
index 7e77e94ff..2c0965c6f 100644
--- a/doc/source/howtos/zuul-from-scratch.rst
+++ b/doc/source/howtos/zuul-from-scratch.rst
@@ -18,6 +18,7 @@ Follow the instructions below, depending on your server type.
* :doc:`fedora27_setup`
* :doc:`centos7_setup`
* :doc:`opensuse_leap15_setup`
+ * :doc:`ubuntu_setup`
Installation
------------
diff --git a/doc/source/howtos/zuul_install.rst b/doc/source/howtos/zuul_install.rst
index 09ded9d15..8f61651ac 100644
--- a/doc/source/howtos/zuul_install.rst
+++ b/doc/source/howtos/zuul_install.rst
@@ -41,6 +41,9 @@ required.
# For openSUSE:
$ zypper install -y $(bindep -b compile)
+ # For Ubuntu:
+ $ apt-get install -y $(bindep -b compile)
+
# All:
$ tools/install-js-tools.sh
diff --git a/doc/source/reference/drivers/github.rst b/doc/source/reference/drivers/github.rst
index c2c085eca..df649d3eb 100644
--- a/doc/source/reference/drivers/github.rst
+++ b/doc/source/reference/drivers/github.rst
@@ -366,8 +366,8 @@ itself. Status name, description, and context is taken from the pipeline.
.. attr:: check
If the reporter should utilize github's checks API to set the commit
- status, this must be set to ``in_progress``, ``success`` or ``failure``
- (depending on which status the reporter should report).
+ status, this must be set to ``in_progress``, ``success``, ``failure``
+ or ``cancelled`` (depending on which status the reporter should report).
.. attr:: comment
:default: true
diff --git a/doc/source/reference/drivers/pagure.rst b/doc/source/reference/drivers/pagure.rst
index 13a7ec272..7845ae79a 100644
--- a/doc/source/reference/drivers/pagure.rst
+++ b/doc/source/reference/drivers/pagure.rst
@@ -82,6 +82,12 @@ The supported options in ``zuul.conf`` connections are:
Path to the Pagure Git repositories. Used to clone.
+ .. attr:: app_name
+ :default: Zuul
+
+ Display name that will appear as the application name in front
+ of each CI status flag.
+
.. attr:: source_whitelist
:default: ''
diff --git a/doc/source/reference/pipeline_def.rst b/doc/source/reference/pipeline_def.rst
index e2c2d9587..da13570f7 100644
--- a/doc/source/reference/pipeline_def.rst
+++ b/doc/source/reference/pipeline_def.rst
@@ -219,6 +219,13 @@ success, the pipeline reports back to Gerrit with ``Verified`` vote of
The introductory text in reports when an item is dequeued
without running any jobs. Empty by default.
+ .. attr:: dequeue-message
+ :default: Build canceled.
+
+ The introductory text in reports when an item is dequeued.
+ The dequeue message only applies if the item was dequeued without
+ a result.
+
.. attr:: footer-message
Supplies additional information after test results. Useful for
@@ -354,6 +361,12 @@ success, the pipeline reports back to Gerrit with ``Verified`` vote of
These reporters describe what Zuul should do when a pipeline is
disabled. See ``disable-after-consecutive-failures``.
+ .. attr:: dequeue
+
+ These reporters describe what Zuul should do if an item is
+ dequeued. The dequeue reporters will only apply, if the item
+ was dequeued without a result.
+
The following options can be used to alter Zuul's behavior to
mitigate situations in which jobs are failing frequently (perhaps
due to a problem with an external dependency, or unusually high
diff --git a/releasenotes/notes/ansible-callbacks-c3bfce1a5cae6b15.yaml b/releasenotes/notes/ansible-callbacks-c3bfce1a5cae6b15.yaml
new file mode 100644
index 000000000..9d1c3b8da
--- /dev/null
+++ b/releasenotes/notes/ansible-callbacks-c3bfce1a5cae6b15.yaml
@@ -0,0 +1,5 @@
+---
+features:
+ - |
+ Zuul now supports whitelisting and configuring ansible callbacks with
+ :attr:`ansible_callback "<name>"`.
diff --git a/releasenotes/notes/dequeue-reporting-620f364309587304.yaml b/releasenotes/notes/dequeue-reporting-620f364309587304.yaml
new file mode 100644
index 000000000..9cd28ad52
--- /dev/null
+++ b/releasenotes/notes/dequeue-reporting-620f364309587304.yaml
@@ -0,0 +1,6 @@
+---
+features:
+ - |
+ Pipelines now provide a :attr:`pipeline.dequeue` reporter action so that
+ reporters may run whenever an item is dequeued. The dequeue reporters will
+ only apply if the item wasn't a success or failure.
diff --git a/tests/base.py b/tests/base.py
index 4a4e536a3..a697be384 100644
--- a/tests/base.py
+++ b/tests/base.py
@@ -1235,19 +1235,33 @@ class FakePagurePullRequest(object):
return self._getPullRequestEvent(
'pull-request.tag.added', pull_data_field='pull_request')
- def getPullRequestStatusSetEvent(self, status):
+ def getPullRequestStatusSetEvent(self, status, username="zuul"):
self.addFlag(
- status, "https://url", "Build %s" % status)
+ status, "https://url", "Build %s" % status, username)
return self._getPullRequestEvent('pull-request.flag.added')
- def addFlag(self, status, url, comment, username="Pingou"):
+ def insertFlag(self, flag):
+ to_pop = None
+ for i, _flag in enumerate(self.flags):
+ if _flag['uid'] == flag['uid']:
+ to_pop = i
+ if to_pop is not None:
+ self.flags.pop(to_pop)
+ self.flags.insert(0, flag)
+
+ def addFlag(self, status, url, comment, username="zuul"):
+ flag_uid = "%s-%s-%s" % (username, self.number, self.project)
flag = {
- "username": username,
+ "username": "Zuul CI",
+ "user": {
+ "name": username
+ },
+ "uid": flag_uid[:32],
"comment": comment,
"status": status,
"url": url
}
- self.flags.insert(0, flag)
+ self.insertFlag(flag)
self._updateTimeStamp()
def editInitialComment(self, initial_comment):
@@ -1405,13 +1419,18 @@ class FakePagureAPIClient(pagureconnection.PagureAPIClient):
pr.is_merged = True
return {}, 200, "", "POST"
+ match = re.match(r'.+/api/0/-/whoami$', url)
+ if match:
+ return {"username": "zuul"}, 200, "", "POST"
+
if not params:
return self.gen_error("POST")
match = re.match(r'.+/api/0/(.+)/pull-request/(\d+)/flag$', url)
if match:
pr = self._get_pr(match)
- pr.flags.insert(0, params)
+ params['user'] = {"name": "zuul"}
+ pr.insertFlag(params)
match = re.match(r'.+/api/0/(.+)/pull-request/(\d+)/comment$', url)
if match:
@@ -1438,9 +1457,12 @@ class FakePagureConnection(pagureconnection.PagureConnection):
self.cloneurl = self.upstream_root
def get_project_api_client(self, project):
- return FakePagureAPIClient(
+ client = FakePagureAPIClient(
self.baseurl, None, project,
pull_requests_db=self.pull_requests)
+ if not self.username:
+ self.set_my_username(client)
+ return client
def get_project_webhook_token(self, project):
return 'fake_webhook_token-%s' % project
@@ -2663,8 +2685,11 @@ class RecordingExecutorServer(zuul.executor.server.ExecutorServer):
"""
builds = self.running_builds[:]
- self.log.debug("Releasing build %s (%s)" % (regex,
- len(self.running_builds)))
+ if len(builds) == 0:
+ self.log.debug('No running builds to release')
+ return
+
+ self.log.debug("Releasing build %s (%s)" % (regex, len(builds)))
for build in builds:
if not regex or re.match(regex, build.name):
self.log.debug("Releasing build %s" %
@@ -2674,7 +2699,7 @@ class RecordingExecutorServer(zuul.executor.server.ExecutorServer):
self.log.debug("Not releasing build %s" %
(build.parameters['zuul']['build']))
self.log.debug("Done releasing builds %s (%s)" %
- (regex, len(self.running_builds)))
+ (regex, len(builds)))
def executeJob(self, job):
build = FakeBuild(self, job)
@@ -4376,7 +4401,9 @@ class ZuulTestCase(BaseTestCase):
def waitUntilSettled(self, msg="", matcher=None) -> None:
self.log.debug("Waiting until settled... (%s)", msg)
start = time.time()
+ i = 0
while True:
+ i = i + 1
if time.time() - start > self.wait_timeout:
self.log.error("Timeout waiting for Zuul to settle")
self.log.error("Queue status:")
@@ -4393,9 +4420,10 @@ class ZuulTestCase(BaseTestCase):
self.log.error("[Sched: %s] Merge client jobs: %s" %
(app.sched, app.sched.merger.jobs,))
raise Exception("Timeout waiting for Zuul to settle")
- # Make sure no new events show up while we're checking
+ # Make sure no new events show up while we're checking
self.executor_server.lock.acquire()
+
# have all build states propogated to zuul?
if self.__haveAllBuildsReported(matcher):
# Join ensures that the queue is empty _and_ events have been
@@ -4416,7 +4444,8 @@ class ZuulTestCase(BaseTestCase):
self.scheds.execute(
lambda app: app.sched.run_handler_lock.release())
self.executor_server.lock.release()
- self.log.debug("...settled. (%s)", msg)
+ self.log.debug("...settled after %.3f ms / %s loops (%s)",
+ time.time() - start, i, msg)
self.logState()
return
self.scheds.execute(
@@ -4665,6 +4694,10 @@ class ZuulTestCase(BaseTestCase):
completed.
"""
+ if not self.history:
+ self.log.debug("Build history: no builds ran")
+ return
+
self.log.debug("Build history:")
for build in self.history:
self.log.debug(build)
diff --git a/tests/fixtures/config/ansible-callbacks/git/common-config/playbooks/callback.yaml b/tests/fixtures/config/ansible-callbacks/git/common-config/playbooks/callback.yaml
new file mode 100644
index 000000000..50bbbbfc5
--- /dev/null
+++ b/tests/fixtures/config/ansible-callbacks/git/common-config/playbooks/callback.yaml
@@ -0,0 +1,4 @@
+- hosts: localhost
+ gather_facts: smart
+ tasks:
+ - command: echo test
diff --git a/tests/fixtures/config/ansible-callbacks/git/common-config/playbooks/callback_plugins/test_callback.py b/tests/fixtures/config/ansible-callbacks/git/common-config/playbooks/callback_plugins/test_callback.py
new file mode 100644
index 000000000..39ff7cd49
--- /dev/null
+++ b/tests/fixtures/config/ansible-callbacks/git/common-config/playbooks/callback_plugins/test_callback.py
@@ -0,0 +1,35 @@
+from ansible.plugins.callback import CallbackBase
+
+import os
+
+DOCUMENTATION = '''
+ options:
+ file_name:
+ description: ""
+ ini:
+ - section: callback_test_callback
+ key: file_name
+ required: True
+ type: string
+'''
+
+
+class CallbackModule(CallbackBase):
+ CALLBACK_VERSION = 1.0
+ CALLBACK_NEEDS_WHITELIST = True
+
+ def __init__(self):
+ super(CallbackModule, self).__init__()
+
+ def set_options(self, task_keys=None, var_options=None, direct=None):
+ super(CallbackModule, self).set_options(task_keys=task_keys,
+ var_options=var_options,
+ direct=direct)
+
+ self.file_name = self.get_option('file_name')
+
+ def v2_on_any(self, *args, **kwargs):
+ path = os.path.join(os.path.dirname(__file__), self.file_name)
+ self._display.display("Touching file: {}".format(path))
+ with open(path, 'w'):
+ pass
diff --git a/tests/fixtures/config/ansible-callbacks/git/common-config/zuul.yaml b/tests/fixtures/config/ansible-callbacks/git/common-config/zuul.yaml
new file mode 100644
index 000000000..4acf6efb8
--- /dev/null
+++ b/tests/fixtures/config/ansible-callbacks/git/common-config/zuul.yaml
@@ -0,0 +1,21 @@
+- pipeline:
+ name: promote
+ manager: supercedent
+ post-review: true
+ trigger:
+ gerrit:
+ - event: change-merged
+
+- job:
+ name: callback-test
+ parent: null
+ run: playbooks/callback.yaml
+ nodeset:
+ nodes:
+ - name: ubuntu-xenial
+ label: ubuntu-xenial
+
+- project:
+ promote:
+ jobs:
+ - callback-test
diff --git a/tests/fixtures/config/ansible-callbacks/main.yaml b/tests/fixtures/config/ansible-callbacks/main.yaml
new file mode 100644
index 000000000..9d01f542f
--- /dev/null
+++ b/tests/fixtures/config/ansible-callbacks/main.yaml
@@ -0,0 +1,6 @@
+- tenant:
+ name: tenant-one
+ source:
+ gerrit:
+ config-projects:
+ - common-config
diff --git a/tests/fixtures/config/broken-multi-tenant/git/common-config/playbooks/job.yaml b/tests/fixtures/config/broken-multi-tenant/git/common-config/playbooks/job.yaml
new file mode 100644
index 000000000..f679dceae
--- /dev/null
+++ b/tests/fixtures/config/broken-multi-tenant/git/common-config/playbooks/job.yaml
@@ -0,0 +1,2 @@
+- hosts: all
+ tasks: []
diff --git a/tests/fixtures/config/broken-multi-tenant/git/common-config/zuul.yaml b/tests/fixtures/config/broken-multi-tenant/git/common-config/zuul.yaml
new file mode 100644
index 000000000..406a64248
--- /dev/null
+++ b/tests/fixtures/config/broken-multi-tenant/git/common-config/zuul.yaml
@@ -0,0 +1,23 @@
+- pipeline:
+ name: check
+ manager: independent
+ trigger:
+ gerrit:
+ - event: patchset-created
+ success:
+ gerrit:
+ Verified: 1
+ failure:
+ gerrit:
+ Verified: -1
+
+- job:
+ name: base
+ parent: null
+ run: playbooks/job.yaml
+
+- project:
+ name: ^.*
+ check:
+ jobs:
+ - base
diff --git a/tests/fixtures/config/broken-multi-tenant/git/org_project1/README b/tests/fixtures/config/broken-multi-tenant/git/org_project1/README
new file mode 100644
index 000000000..9daeafb98
--- /dev/null
+++ b/tests/fixtures/config/broken-multi-tenant/git/org_project1/README
@@ -0,0 +1 @@
+test
diff --git a/tests/fixtures/config/broken-multi-tenant/git/org_project2/README b/tests/fixtures/config/broken-multi-tenant/git/org_project2/README
new file mode 100644
index 000000000..9daeafb98
--- /dev/null
+++ b/tests/fixtures/config/broken-multi-tenant/git/org_project2/README
@@ -0,0 +1 @@
+test
diff --git a/tests/fixtures/config/broken-multi-tenant/git/org_project2/zuul.yaml b/tests/fixtures/config/broken-multi-tenant/git/org_project2/zuul.yaml
new file mode 100644
index 000000000..40f68b640
--- /dev/null
+++ b/tests/fixtures/config/broken-multi-tenant/git/org_project2/zuul.yaml
@@ -0,0 +1,3 @@
+- job:
+ name: child-job
+ parent: parent-job
diff --git a/tests/fixtures/config/broken-multi-tenant/git/org_project3/README b/tests/fixtures/config/broken-multi-tenant/git/org_project3/README
new file mode 100644
index 000000000..9daeafb98
--- /dev/null
+++ b/tests/fixtures/config/broken-multi-tenant/git/org_project3/README
@@ -0,0 +1 @@
+test
diff --git a/tests/fixtures/config/broken-multi-tenant/git/org_project3/zuul.yaml b/tests/fixtures/config/broken-multi-tenant/git/org_project3/zuul.yaml
new file mode 100644
index 000000000..beef1faa0
--- /dev/null
+++ b/tests/fixtures/config/broken-multi-tenant/git/org_project3/zuul.yaml
@@ -0,0 +1,2 @@
+- job:
+ name: parent-job
diff --git a/tests/fixtures/config/broken-multi-tenant/main.yaml b/tests/fixtures/config/broken-multi-tenant/main.yaml
new file mode 100644
index 000000000..053056e9f
--- /dev/null
+++ b/tests/fixtures/config/broken-multi-tenant/main.yaml
@@ -0,0 +1,19 @@
+- tenant:
+ name: tenant-one
+ source:
+ gerrit:
+ config-projects:
+ - common-config
+ untrusted-projects:
+ - org/project2
+ - org/project3
+
+- tenant:
+ name: tenant-two
+ source:
+ gerrit:
+ config-projects:
+ - common-config
+ untrusted-projects:
+ - org/project1
+ - org/project2
diff --git a/tests/fixtures/layouts/dequeue-reporting.yaml b/tests/fixtures/layouts/dequeue-reporting.yaml
new file mode 100644
index 000000000..52afe9a5a
--- /dev/null
+++ b/tests/fixtures/layouts/dequeue-reporting.yaml
@@ -0,0 +1,85 @@
+- pipeline:
+ name: check
+ manager: independent
+ failure-message: Build failed (check)
+ success-message: Build succeeded (check)
+ dequeue-message: Build canceled (check)
+ start-message: Build started (check)
+ trigger:
+ gerrit:
+ - event: patchset-created
+ success:
+ gerrit:
+ Verified: 1
+ failure:
+ gerrit:
+ Verified: -1
+ start:
+ gerrit:
+ Verified: 0
+ dequeue:
+ gerrit:
+ Verified: 0
+
+- pipeline:
+ name: gate
+ manager: dependent
+ supercedes: check
+ failure-message: Build failed (gate)
+ success-message: Build succeeded (gate)
+ dequeue-message: Build canceled (gate)
+ start-message: Build started (gate)
+ trigger:
+ gerrit:
+ - event: comment-added
+ approval:
+ - Approved: 1
+ success:
+ gerrit:
+ Verified: 2
+ submit: true
+ failure:
+ gerrit:
+ Verified: -2
+ start:
+ gerrit:
+ Verified: 0
+ dequeue:
+ gerrit:
+ Verified: 0
+ precedence: high
+
+- job:
+ name: base
+ parent: null
+ run: playbooks/base.yaml
+
+- job:
+ name: project-test1
+ run: playbooks/project-test1.yaml
+
+- job:
+ name: project-test2
+ run: playbooks/project-test2.yaml
+
+- job:
+ name: project-merge
+ hold-following-changes: true
+ run: playbooks/project-merge.yaml
+
+- project:
+ name: org/project
+ check:
+ jobs:
+ - project-merge
+ - project-test1:
+ dependencies: project-merge
+ - project-test2:
+ dependencies: project-merge
+ gate:
+ jobs:
+ - project-merge
+ - project-test1:
+ dependencies: project-merge
+ - project-test2:
+ dependencies: project-merge
diff --git a/tests/fixtures/layouts/reporting-github.yaml b/tests/fixtures/layouts/reporting-github.yaml
index c35207af1..a9cbf8fcb 100644
--- a/tests/fixtures/layouts/reporting-github.yaml
+++ b/tests/fixtures/layouts/reporting-github.yaml
@@ -96,6 +96,9 @@
failure:
github:
check: failure
+ dequeue:
+ github:
+ check: cancelled
- pipeline:
name: gate
diff --git a/tests/fixtures/layouts/requirements-pagure.yaml b/tests/fixtures/layouts/requirements-pagure.yaml
index 998fa7fdb..4a95d8f08 100644
--- a/tests/fixtures/layouts/requirements-pagure.yaml
+++ b/tests/fixtures/layouts/requirements-pagure.yaml
@@ -67,6 +67,18 @@
status: 'success'
- pipeline:
+ name: require-flag
+ manager: independent
+ require:
+ pagure:
+ status: success
+ trigger:
+ pagure:
+ - event: pg_pull_request
+ action: status
+ status: success
+
+- pipeline:
name: require-trigger-pg-closed-merged
precedence: high
manager: independent
@@ -125,4 +137,10 @@
name: org/project6
require-trigger-pg-closed-merged:
jobs:
- - project-test \ No newline at end of file
+ - project-test
+
+- project:
+ name: org/project7
+ require-flag:
+ jobs:
+ - project-test
diff --git a/tests/fixtures/zuul-executor-ansible-callback.conf b/tests/fixtures/zuul-executor-ansible-callback.conf
new file mode 100644
index 000000000..cf4592f83
--- /dev/null
+++ b/tests/fixtures/zuul-executor-ansible-callback.conf
@@ -0,0 +1,48 @@
+# Checks to make sure no key is configured in the
+# [defaults] section of ansible.cfg, setting the
+# same key twice would cause an error.
+
+# Equal sign in section name will not be treated as configuration field in ansible
+[ansible_callback "nocows = True"]
+[ansible_callback "nocows = False"]
+# \n will not be treated as a newline character
+[ansible_callback "\nnocows = True"]
+[ansible_callback "\nnocows = False"]
+# A single '%' sign would cause error if interpolation syntax is enabled
+[ansible_callback "ansible_interpolation"]
+test_field = test-%%-value
+
+[ansible_callback "test_callback"]
+file_name = callback-success
+
+[gearman]
+server=127.0.0.1
+
+[statsd]
+# note, use 127.0.0.1 rather than localhost to avoid getting ipv6
+# see: https://github.com/jsocol/pystatsd/issues/61
+server=127.0.0.1
+
+[scheduler]
+tenant_config=main.yaml
+
+[merger]
+git_dir=/tmp/zuul-test/merger-git
+git_user_email=zuul@example.com
+git_user_name=zuul
+
+[executor]
+git_dir=/tmp/zuul-test/executor-git
+
+[connection gerrit]
+driver=gerrit
+server=review.example.com
+user=jenkins
+sshkey=fake_id_rsa_path
+
+[connection smtp]
+driver=smtp
+server=localhost
+port=25
+default_from=zuul@example.com
+default_to=you@example.com
diff --git a/tests/unit/test_executor.py b/tests/unit/test_executor.py
index c0fbc5546..f16892035 100644
--- a/tests/unit/test_executor.py
+++ b/tests/unit/test_executor.py
@@ -15,6 +15,7 @@
import json
import logging
+import configparser
import multiprocessing
import os
import time
@@ -816,6 +817,50 @@ class TestExecutorFacts(AnsibleZuulTestCase):
self.assertEqual(18, len(date_time))
+class TestAnsibleCallbackConfigs(AnsibleZuulTestCase):
+
+ config_file = 'zuul-executor-ansible-callback.conf'
+ tenant_config_file = 'config/ansible-callbacks/main.yaml'
+
+ def test_ansible_callback_config(self):
+ self.executor_server.keep_jobdir = True
+ A = self.fake_gerrit.addFakeChange('common-config', 'master', 'A')
+ self.fake_gerrit.addEvent(A.getChangeMergedEvent())
+ self.waitUntilSettled()
+
+ callbacks = [
+ 'callback_test_callback',
+ 'callback_nocows = True',
+ 'callback_nocows = False',
+ 'callback_\\nnocows = True',
+ 'callback_\\nnocows = False',
+ 'callback_ansible_interpolation'
+ ]
+
+ p = os.path.join(self.getJobFromHistory('callback-test').jobdir.root,
+ 'ansible/playbook_0/ansible.cfg')
+ self.assertEqual(self.getJobFromHistory('callback-test').result,
+ 'SUCCESS')
+
+ c = configparser.ConfigParser(interpolation=None)
+ c.read(p)
+ for callback in callbacks:
+ self.assertIn(callback, c.sections())
+ self.assertIn('test_field', c['callback_ansible_interpolation'])
+ self.assertIn('test-%-value',
+ c['callback_ansible_interpolation']['test_field'])
+
+ self.assertIn('file_name', c['callback_test_callback'])
+ self.assertEqual('callback-success',
+ c['callback_test_callback']['file_name'])
+ callback_result_file = os.path.join(
+ self.getJobFromHistory('callback-test').jobdir.root,
+ 'trusted/project_0/review.example.com/',
+ 'common-config/playbooks/callback_plugins/',
+ c['callback_test_callback']['file_name'])
+ self.assertTrue(os.path.isfile(callback_result_file))
+
+
class TestExecutorEnvironment(AnsibleZuulTestCase):
tenant_config_file = 'config/zuul-environment-filter/main.yaml'
diff --git a/tests/unit/test_github_driver.py b/tests/unit/test_github_driver.py
index 0558c9994..39c33440a 100644
--- a/tests/unit/test_github_driver.py
+++ b/tests/unit/test_github_driver.py
@@ -1685,6 +1685,61 @@ class TestGithubAppDriver(ZuulGithubAppTestCase):
self.assertTrue(A.is_merged)
@simple_layout("layouts/reporting-github.yaml", driver="github")
+ def test_reporting_checks_api_dequeue(self):
+ "Test that a dequeued change will be reported back to the check run"
+ project = "org/project3"
+ github = self.fake_github.getGithubClient(None)
+
+ client = zuul.rpcclient.RPCClient(
+ "127.0.0.1", self.gearman_server.port
+ )
+ self.addCleanup(client.shutdown)
+
+ self.executor_server.hold_jobs_in_build = True
+ A = self.fake_github.openFakePullRequest(project, "master", "A")
+ self.fake_github.emitEvent(A.getPullRequestOpenedEvent())
+ self.waitUntilSettled()
+
+ # We should have a pending check for the head sha
+ self.assertIn(
+ A.head_sha, github.repo_from_project(project)._commits.keys())
+ check_runs = self.fake_github.getCommitChecks(project, A.head_sha)
+
+ self.assertEqual(1, len(check_runs))
+ check_run = check_runs[0]
+
+ self.assertEqual("tenant-one/checks-api-reporting", check_run["name"])
+ self.assertEqual("in_progress", check_run["status"])
+ self.assertThat(
+ check_run["output"]["summary"],
+ MatchesRegex(r'.*Starting checks-api-reporting jobs.*', re.DOTALL)
+ )
+
+ # Use the client to dequeue the pending change
+ client.dequeue(
+ tenant="tenant-one",
+ pipeline="checks-api-reporting",
+ project="org/project3",
+ change="{},{}".format(A.number, A.head_sha),
+ ref=None,
+ )
+ self.waitUntilSettled()
+
+ # We should now have a cancelled check run for the head sha
+ check_runs = self.fake_github.getCommitChecks(project, A.head_sha)
+ self.assertEqual(1, len(check_runs))
+ check_run = check_runs[0]
+
+ self.assertEqual("tenant-one/checks-api-reporting", check_run["name"])
+ self.assertEqual("completed", check_run["status"])
+ self.assertEqual("cancelled", check_run["conclusion"])
+ self.assertThat(
+ check_run["output"]["summary"],
+ MatchesRegex(r'.*Build canceled.*', re.DOTALL)
+ )
+ self.assertIsNotNone(check_run["completed_at"])
+
+ @simple_layout("layouts/reporting-github.yaml", driver="github")
def test_update_non_existing_check_run(self):
project = "org/project3"
github = self.fake_github.getGithubClient(None)
diff --git a/tests/unit/test_pagure_driver.py b/tests/unit/test_pagure_driver.py
index 04daee45a..0393a13d0 100644
--- a/tests/unit/test_pagure_driver.py
+++ b/tests/unit/test_pagure_driver.py
@@ -61,9 +61,8 @@ class TestPagureDriver(ZuulTestCase):
self.assertThat(
A.comments[1]['comment'],
MatchesRegex(r'.*\[project-test2 \]\(.*\).*', re.DOTALL))
- self.assertEqual(2, len(A.flags))
+ self.assertEqual(1, len(A.flags))
self.assertEqual('success', A.flags[0]['status'])
- self.assertEqual('pending', A.flags[1]['status'])
@simple_layout('layouts/basic-pagure.yaml', driver='pagure')
def test_pull_request_updated(self):
@@ -493,6 +492,31 @@ class TestPagureDriver(ZuulTestCase):
self.assertEqual(1, len(self.history))
@simple_layout('layouts/requirements-pagure.yaml', driver='pagure')
+ def test_flag_require(self):
+
+ A = self.fake_pagure.openFakePullRequest(
+ 'org/project7', 'master', 'A')
+
+ # CI status from other CIs must not be handled
+ self.fake_pagure.emitEvent(
+ A.getPullRequestStatusSetEvent("success", username="notzuul"))
+ self.waitUntilSettled()
+ self.assertEqual(0, len(self.history))
+ self.assertEqual(1, len(A.flags))
+
+ self.fake_pagure.emitEvent(
+ A.getPullRequestStatusSetEvent("failure"))
+ self.waitUntilSettled()
+ self.assertEqual(0, len(self.history))
+ self.assertEqual(2, len(A.flags))
+
+ self.fake_pagure.emitEvent(
+ A.getPullRequestStatusSetEvent("success"))
+ self.waitUntilSettled()
+ self.assertEqual(1, len(self.history))
+ self.assertEqual(2, len(A.flags))
+
+ @simple_layout('layouts/requirements-pagure.yaml', driver='pagure')
def test_pull_request_closed(self):
A = self.fake_pagure.openFakePullRequest(
diff --git a/tests/unit/test_reporting.py b/tests/unit/test_reporting.py
new file mode 100644
index 000000000..dab041244
--- /dev/null
+++ b/tests/unit/test_reporting.py
@@ -0,0 +1,137 @@
+# Copyright 2020 BMW Group
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import zuul.rpcclient
+
+from tests.base import ZuulTestCase, simple_layout
+
+
+class TestReporting(ZuulTestCase):
+ tenant_config_file = "config/single-tenant/main.yaml"
+
+ @simple_layout("layouts/dequeue-reporting.yaml")
+ def test_dequeue_reporting(self):
+ """Check that explicitly dequeued items are reported as dequeued"""
+
+ # We use the rpcclient to explicitly dequeue the item
+ client = zuul.rpcclient.RPCClient(
+ "127.0.0.1", self.gearman_server.port
+ )
+ self.addCleanup(client.shutdown)
+
+ self.executor_server.hold_jobs_in_build = True
+ A = self.fake_gerrit.addFakeChange("org/project", "master", "A")
+ A.addApproval("Code-Review", 2)
+ self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+
+ client.dequeue(
+ tenant="tenant-one",
+ pipeline="check",
+ project="org/project",
+ change="1,1",
+ ref=None,
+ )
+ self.waitUntilSettled()
+
+ tenant = self.scheds.first.sched.abide.tenants.get('tenant-one')
+ check_pipeline = tenant.layout.pipelines['check']
+
+ # A should have been reported two times: start, cancel
+ self.assertEqual(2, A.reported)
+ self.assertEqual(2, len(A.messages))
+ self.assertIn("Build started (check)", A.messages[0])
+ self.assertIn("Build canceled (check)", A.messages[1])
+ # There shouldn't be any successful items
+ self.assertEqual(len(check_pipeline.getAllItems()), 0)
+ # But one canceled
+ self.assertEqual(self.countJobResults(self.history, "ABORTED"), 1)
+
+ @simple_layout("layouts/dequeue-reporting.yaml")
+ def test_dequeue_reporting_gate_reset(self):
+ """Check that a gate reset is not reported as dequeued"""
+
+ A = self.fake_gerrit.addFakeChange("org/project", "master", "A")
+ B = self.fake_gerrit.addFakeChange("org/project", "master", "B")
+ A.addApproval("Code-Review", 2)
+ B.addApproval("Code-Review", 2)
+
+ self.executor_server.failJob("project-test1", A)
+
+ self.fake_gerrit.addEvent(A.addApproval("Approved", 1))
+ self.fake_gerrit.addEvent(B.addApproval("Approved", 1))
+ self.waitUntilSettled()
+
+ # None of the items should be reported as dequeued, only success or
+ # failure
+ self.assertEqual(A.data["status"], "NEW")
+ self.assertEqual(B.data["status"], "MERGED")
+ self.assertEqual(A.reported, 2)
+ self.assertEqual(B.reported, 2)
+
+ self.assertIn("Build started (gate)", A.messages[0])
+ self.assertIn("Build failed (gate)", A.messages[1])
+ self.assertIn("Build started (gate)", B.messages[0])
+ self.assertIn("Build succeeded (gate)", B.messages[1])
+
+ @simple_layout("layouts/dequeue-reporting.yaml")
+ def test_dequeue_reporting_supercedes(self):
+ """Test that a superceeded change is reported as dequeued"""
+
+ self.executor_server.hold_jobs_in_build = True
+
+ A = self.fake_gerrit.addFakeChange("org/project", "master", "A")
+ self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+
+ A.addApproval("Code-Review", 2)
+ self.fake_gerrit.addEvent(A.addApproval("Approved", 1))
+
+ self.executor_server.hold_jobs_in_build = False
+ self.executor_server.release()
+ self.waitUntilSettled()
+
+ self.assertEqual(4, A.reported)
+
+ self.assertIn("Build started (check)", A.messages[0])
+ self.assertIn("Build canceled (check)", A.messages[1])
+ self.assertIn("Build started (gate)", A.messages[2])
+ self.assertIn("Build succeeded (gate)", A.messages[3])
+
+ @simple_layout("layouts/dequeue-reporting.yaml")
+ def test_dequeue_reporting_new_patchset(self):
+ "Test that change superceeded by a new patchset is reported as deqeued"
+
+ self.executor_server.hold_jobs_in_build = True
+
+ A = self.fake_gerrit.addFakeChange("org/project", "master", "A")
+ self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+
+ self.assertEqual(1, len(self.builds))
+
+ A.addPatchset()
+ self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(2))
+ self.waitUntilSettled()
+
+ self.executor_server.hold_jobs_in_build = False
+ self.executor_server.release()
+ self.waitUntilSettled()
+
+ self.assertEqual(4, A.reported)
+
+ self.assertIn("Build started (check)", A.messages[0])
+ self.assertIn("Build canceled (check)", A.messages[1])
+ self.assertIn("Build started (check)", A.messages[2])
+ self.assertIn("Build succeeded (check)", A.messages[3])
diff --git a/tests/unit/test_v3.py b/tests/unit/test_v3.py
index 7753b9633..a80675544 100644
--- a/tests/unit/test_v3.py
+++ b/tests/unit/test_v3.py
@@ -3358,6 +3358,80 @@ class TestBrokenConfig(ZuulTestCase):
"A should have failed the check pipeline")
+class TestBrokenMultiTenantConfig(ZuulTestCase):
+ # Test we can deal with a broken multi-tenant config
+
+ tenant_config_file = 'config/broken-multi-tenant/main.yaml'
+
+ def test_loading_errors(self):
+ # This regression test came about when we discovered the following:
+
+ # * We cache configuration objects if they load without error
+ # in their first tenant; that means that they can show up as
+ # errors in later tenants, but as long as those other
+ # tenants aren't proposing changes to that repo (which is
+ # unlikely in this situation; this usually arises if the
+ # tenant just wants to use some foreign jobs), users won't
+ # be blocked by the error.
+ #
+ # * If a merge job for a dynamic config change arrives out of
+ # order, we will build the new configuration and if there
+ # are errors, we will compare it to the previous
+ # configuration to determine if they are relevant, but that
+ # caused an error since the previous layout had not been
+ # calculated yet. It's pretty hard to end up with
+ # irrelevant errors except by virtue of the first point
+ # above, which is why this test relies on a second tenant.
+
+ # This test has two tenants. The first loads project2, and
+ # project3 without errors and all config objects are cached.
+ # The second tenant loads only project1 and project2.
+ # Project2 references a job that is defined in project3, so
+ # the tenant loads with an error, but proceeds.
+
+ # Don't run any merge jobs, so we can run them out of order.
+ self.gearman_server.hold_merge_jobs_in_queue = True
+
+ # Create a first change which modifies the config (and
+ # therefore will require a merge job).
+ in_repo_conf = textwrap.dedent(
+ """
+ - job: {'name': 'foo'}
+ """)
+ file_dict = {'.zuul.yaml': in_repo_conf}
+ A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A',
+ files=file_dict)
+
+ # Create a second change which also modifies the config.
+ B = self.fake_gerrit.addFakeChange('org/project1', 'master', 'B',
+ files=file_dict)
+ B.setDependsOn(A, 1)
+ self.fake_gerrit.addEvent(B.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+
+ # There should be a merge job for each change.
+ self.assertEqual(len(self.scheds.first.sched.merger.jobs), 2)
+
+ jobs = [job for job in self.gearman_server.getQueue()
+ if job.name.startswith(b'merger:')]
+ # Release the second merge job.
+ jobs[-1].waiting = False
+ self.gearman_server.wakeConnections()
+ self.waitUntilSettled()
+
+ # At this point we should still be waiting on the first
+ # change's merge job.
+ self.assertHistory([])
+
+ # Proceed.
+ self.gearman_server.hold_merge_jobs_in_queue = False
+ self.gearman_server.release()
+ self.waitUntilSettled()
+ self.assertHistory([
+ dict(name='base', result='SUCCESS', changes='1,1 2,1'),
+ ])
+
+
class TestProjectKeys(ZuulTestCase):
# Test that we can generate project keys
diff --git a/tools/pip.sh b/tools/pip.sh
index 469d119eb..7db784690 100755
--- a/tools/pip.sh
+++ b/tools/pip.sh
@@ -23,7 +23,15 @@ then
pip install nodeenv
# Initialize nodeenv and tell it to re-use the currently active virtualenv
# TODO(jeblair): remove node version pin. upath 1.0.4 objects to node >9.
- nodeenv --python-virtualenv -n 10.16.0
+ attempts=0
+ until nodeenv --python-virtualenv -n 10.16.0; do
+ ((attempts++))
+ if [[ $attempts > 2 ]]
+ then
+ echo "Failed creating nodeenv"
+ exit 1
+ fi
+ done
# Use -g because inside of the virtualenv '-g' means 'install into the'
# virtualenv - as opposed to installing into the local node_modules.
# Avoid writing a package-lock.json file since we don't use it.
diff --git a/zuul/configloader.py b/zuul/configloader.py
index f985804c6..b999bd0c4 100644
--- a/zuul/configloader.py
+++ b/zuul/configloader.py
@@ -1151,6 +1151,7 @@ class PipelineParser(object):
'merge-failure': 'merge_failure_actions',
'no-jobs': 'no_jobs_actions',
'disabled': 'disabled_actions',
+ 'dequeue': 'dequeue_actions',
}
def __init__(self, pcontext):
@@ -1200,6 +1201,7 @@ class PipelineParser(object):
'merge-failure-message': str,
'no-jobs-message': str,
'footer-message': str,
+ 'dequeue-message': str,
'dequeue-on-new-patchset': bool,
'ignore-dependencies': bool,
'post-review': bool,
@@ -1218,7 +1220,7 @@ class PipelineParser(object):
pipeline['reject'] = self.getDriverSchema('reject')
pipeline['trigger'] = vs.Required(self.getDriverSchema('trigger'))
for action in ['enqueue', 'start', 'success', 'failure',
- 'merge-failure', 'no-jobs', 'disabled']:
+ 'merge-failure', 'no-jobs', 'disabled', 'dequeue']:
pipeline[action] = self.getDriverSchema('reporter')
return vs.Schema(pipeline)
@@ -1247,6 +1249,9 @@ class PipelineParser(object):
"Starting {pipeline.name} jobs.")
pipeline.enqueue_message = conf.get('enqueue-message', "")
pipeline.no_jobs_message = conf.get('no-jobs-message', "")
+ pipeline.dequeue_message = conf.get(
+ "dequeue-message", "Build canceled."
+ )
pipeline.dequeue_on_new_patchset = conf.get(
'dequeue-on-new-patchset', True)
pipeline.ignore_dependencies = conf.get(
diff --git a/zuul/driver/github/githubreporter.py b/zuul/driver/github/githubreporter.py
index 2c9ccec34..a5a509623 100644
--- a/zuul/driver/github/githubreporter.py
+++ b/zuul/driver/github/githubreporter.py
@@ -213,11 +213,16 @@ class GithubReporter(BaseReporter):
pr_number = item.change.number
sha = item.change.patchset
- # Check if the buildset is finished or not. In case it's finished, we
- # must provide additional parameters when updating the check_run via
- # the Github API later on.
- completed = item.current_build_set.result is not None
status = self._check
+ # We declare a item as completed if it either has a result
+ # (success|failure) or a dequeue reporter is called (cancelled in case
+ # of Github checks API). For the latter one, the item might or might
+ # not have a result, but we still must set a conclusion on the check
+ # run. Thus, we cannot rely on the buildset's result only, but also
+ # check the state the reporter is going to report.
+ completed = (
+ item.current_build_set.result is not None or status == "cancelled"
+ )
log.debug(
"Updating check for change %s, params %s, context %s, message: %s",
@@ -313,6 +318,6 @@ def getSchema():
'unlabel': scalar_or_list(str),
'review': v.Any('approve', 'request-changes', 'comment'),
'review-body': str,
- 'check': v.Any("in_progress", "success", "failure"),
+ 'check': v.Any("in_progress", "success", "failure", "cancelled"),
})
return github_reporter
diff --git a/zuul/driver/pagure/pagureconnection.py b/zuul/driver/pagure/pagureconnection.py
index 5df701230..becc803c8 100644
--- a/zuul/driver/pagure/pagureconnection.py
+++ b/zuul/driver/pagure/pagureconnection.py
@@ -205,6 +205,7 @@ class PagureEventConnector(threading.Thread):
'pull-request.closed': self._event_pull_request_closed,
'pull-request.new': self._event_pull_request,
'pull-request.flag.added': self._event_flag_added,
+ 'pull-request.flag.updated': self._event_flag_added,
'git.receive': self._event_ref_updated,
'git.branch.creation': self._event_ref_created,
'git.branch.deletion': self._event_ref_deleted,
@@ -437,6 +438,12 @@ class PagureAPIClient():
ret.status_code, ret.text))
return ret.json(), ret.status_code, ret.url, 'POST'
+ def whoami(self):
+ path = '-/whoami'
+ resp = self.post(self.base_url + path)
+ self._manage_error(*resp)
+ return resp[0]['username']
+
def get_project_branches(self):
path = '%s/git/branches' % self.project
resp = self.get(self.base_url + path)
@@ -455,23 +462,29 @@ class PagureAPIClient():
self._manage_error(*resp)
return resp[0]
- def get_pr_flags(self, number, last=False):
+ def get_pr_flags(self, number, owner, last=False):
path = '%s/pull-request/%s/flag' % (self.project, number)
resp = self.get(self.base_url + path)
self._manage_error(*resp)
data = resp[0]
+ owned_flags = [
+ flag for flag in data['flags']
+ if flag['user']['name'] == owner]
if last:
- if data['flags']:
- return data['flags'][0]
+ if owned_flags:
+ return owned_flags[0]
else:
return {}
else:
- return data['flags']
+ return owned_flags
- def set_pr_flag(self, number, status, url, description):
+ def set_pr_flag(
+ self, number, status, url, description, app_name, username):
+ flag_uid = "%s-%s-%s" % (username, number, self.project)
params = {
- "username": "Zuul",
+ "username": app_name,
"comment": "Jobs result is %s" % status,
+ "uid": flag_uid[:32],
"status": status,
"url": url}
path = '%s/pull-request/%s/flag' % (self.project, number)
@@ -504,7 +517,6 @@ class PagureAPIClient():
class PagureConnection(BaseConnection):
driver_name = 'pagure'
log = logging.getLogger("zuul.PagureConnection")
- payload_path = 'payload'
def __init__(self, driver, connection_name, connection_config):
super(PagureConnection, self).__init__(
@@ -517,6 +529,9 @@ class PagureConnection(BaseConnection):
'canonical_hostname', self.server)
self.git_ssh_key = self.connection_config.get('sshkey')
self.api_token = self.connection_config.get('api_token')
+ self.app_name = self.connection_config.get(
+ 'app_name', 'Zuul')
+ self.username = None
self.baseurl = self.connection_config.get(
'baseurl', 'https://%s' % self.server).rstrip('/')
self.cloneurl = self.connection_config.get(
@@ -561,9 +576,17 @@ class PagureConnection(BaseConnection):
def eventDone(self):
self.event_queue.task_done()
+ def set_my_username(self, client):
+ self.log.debug("Fetching my username ...")
+ self.username = client.whoami()
+ self.log.debug("My username is %s" % self.username)
+
def get_project_api_client(self, project):
self.log.debug("Building project %s api_client" % project)
- return PagureAPIClient(self.baseurl, self.api_token, project)
+ client = PagureAPIClient(self.baseurl, self.api_token, project)
+ if not self.username:
+ self.set_my_username(client)
+ return client
def get_project_webhook_token(self, project, force_refresh=False):
token = self.webhook_tokens.get(project)
@@ -695,7 +718,7 @@ class PagureConnection(BaseConnection):
def _hasRequiredStatusChecks(self, change):
pagure = self.get_project_api_client(change.project.name)
- flag = pagure.get_pr_flags(change.number, last=True)
+ flag = pagure.get_pr_flags(change.number, self.username, last=True)
return True if flag.get('status', '') == 'success' else False
def canMerge(self, change, allow_needs, event=None):
@@ -803,14 +826,15 @@ class PagureConnection(BaseConnection):
def setCommitStatus(self, project, number, state, url='',
description='', context=''):
pagure = self.get_project_api_client(project)
- pagure.set_pr_flag(number, state, url, description)
+ pagure.set_pr_flag(
+ number, state, url, description, self.app_name, self.username)
self.log.info("Set pull-request CI flag status : %s" % description)
# Wait for 1 second as flag timestamp is by second
time.sleep(1)
def getCommitStatus(self, project, number):
pagure = self.get_project_api_client(project)
- flag = pagure.get_pr_flags(number, last=True)
+ flag = pagure.get_pr_flags(number, self.username, last=True)
self.log.info(
"Got pull-request CI status for PR %s on %s status: %s" % (
number, project, flag.get('status')))
diff --git a/zuul/executor/server.py b/zuul/executor/server.py
index 27cbcf98d..5cb66f759 100644
--- a/zuul/executor/server.py
+++ b/zuul/executor/server.py
@@ -861,6 +861,7 @@ class AnsibleJob(object):
self.callback_dir = os.path.join(plugin_dir, 'callback')
self.lookup_dir = os.path.join(plugin_dir, 'lookup')
self.filter_dir = os.path.join(plugin_dir, 'filter')
+ self.ansible_callbacks = self.executor_server.ansible_callbacks
def run(self):
self.running = True
@@ -2076,6 +2077,11 @@ class AnsibleJob(object):
# and reduces CPU load of the ansible process.
config.write('internal_poll_interval = 0.01\n')
+ if self.ansible_callbacks:
+ config.write('callback_whitelist =\n')
+ for callback in self.ansible_callbacks.keys():
+ config.write(' %s,\n' % callback)
+
config.write('[ssh_connection]\n')
# NOTE(pabelanger): Try up to 3 times to run a task on a host, this
# helps to mitigate UNREACHABLE host errors with SSH.
@@ -2095,6 +2101,12 @@ class AnsibleJob(object):
"-o UserKnownHostsFile=%s" % self.jobdir.known_hosts
config.write('ssh_args = %s\n' % ssh_args)
+ if self.ansible_callbacks:
+ for cb_name, cb_config in self.ansible_callbacks.items():
+ config.write("[callback_%s]\n" % cb_name)
+ for k, n in cb_config.items():
+ config.write("%s = %s\n" % (k, n))
+
def _ansibleTimeout(self, msg):
self.log.warning(msg)
self.abortRunningProc()
@@ -2551,6 +2563,17 @@ class ExecutorServer(BaseMergeServer):
'ansible_setup_timeout', 60))
self.zone = get_default(self.config, 'executor', 'zone')
+ self.ansible_callbacks = {}
+ for section_name in self.config.sections():
+ cb_match = re.match(r'^ansible_callback ([\'\"]?)(.*)(\1)$',
+ section_name, re.I)
+ if not cb_match:
+ continue
+ cb_name = cb_match.group(2)
+ self.ansible_callbacks[cb_name] = dict(
+ self.config.items(section_name)
+ )
+
# TODO(tobiash): Take cgroups into account
self.update_workers = multiprocessing.cpu_count()
self.update_threads = []
@@ -2679,7 +2702,7 @@ class ExecutorServer(BaseMergeServer):
self.command_thread.daemon = True
self.command_thread.start()
- self.log.debug("Starting workers")
+ self.log.debug("Starting %s update workers" % self.update_workers)
for i in range(self.update_workers):
update_thread = threading.Thread(target=self._updateLoop,
name='update')
diff --git a/zuul/lib/gearworker.py b/zuul/lib/gearworker.py
index 1bffbd4de..1e1433692 100644
--- a/zuul/lib/gearworker.py
+++ b/zuul/lib/gearworker.py
@@ -51,18 +51,18 @@ class ZuulGearWorker:
self.ssl_cert, self.ssl_ca,
keepalive=True, tcp_keepidle=60,
tcp_keepintvl=30, tcp_keepcnt=5)
- self.log.debug('Waiting for server')
+ self.log.debug('Waiting for gearman')
self.gearman.waitForServer()
self.register()
self.thread.start()
def register(self):
- self.log.debug('Registering jobs')
+ self.log.debug('Registering %s jobs' % len(self.jobs))
for job in self.jobs:
self.gearman.registerFunction(job)
def unregister(self):
- self.log.debug('Unregistering jobs')
+ self.log.debug('Unregistering all jobs (%s)' % len(self.jobs))
for job in self.jobs:
self.gearman.unRegisterFunction(job)
diff --git a/zuul/manager/__init__.py b/zuul/manager/__init__.py
index 469d2c1b3..fdce0c7b7 100644
--- a/zuul/manager/__init__.py
+++ b/zuul/manager/__init__.py
@@ -169,6 +169,19 @@ class PipelineManager(metaclass=ABCMeta):
self.log.error("Reporting item start %s received: %s" %
(item, ret))
+ def reportDequeue(self, item):
+ if not self.pipeline._disabled:
+ self.log.info(
+ "Reporting dequeue, action %s item%s",
+ self.pipeline.dequeue_actions,
+ item,
+ )
+ ret = self.sendReport(self.pipeline.dequeue_actions, item)
+ if ret:
+ self.log.error(
+ "Reporting item dequeue %s received: %s", item, ret
+ )
+
def sendReport(self, action_reporters, item, message=None):
"""Sends the built message off to configured reporters.
@@ -371,6 +384,12 @@ class PipelineManager(metaclass=ABCMeta):
log = get_annotated_logger(self.log, item.event)
log.debug("Removing change %s from queue", item.change)
item.queue.dequeueItem(item)
+ # In case a item is dequeued that doesn't have a result yet
+ # (success/failed/...) we report it as dequeued.
+ # Without this check, all items with a valid result would be reported
+ # twice.
+ if not item.current_build_set.result and item.live:
+ self.reportDequeue(item)
def removeItem(self, item):
log = get_annotated_logger(self.log, item.event)
@@ -563,19 +582,6 @@ class PipelineManager(metaclass=ABCMeta):
zuul_event_id=None)
untrusted_errors = len(untrusted_layout.loading_errors) > 0
- # TODO (jeblair): remove this section of extra verbose
- # debug logging when we have resolved the loading_errors
- # bug.
- log.debug("Dynamic layout: trusted errors: %s layout: %s",
- trusted_errors, trusted_layout)
- if trusted_layout:
- for err in trusted_layout.loading_errors.errors[:10]:
- log.debug(err.error)
- log.debug("Dynamic layout: untrusted errors: %s layout: %s",
- untrusted_errors, untrusted_layout)
- if untrusted_layout:
- for err in untrusted_layout.loading_errors.errors[:10]:
- log.debug(err.error)
# Configuration state handling switchboard. Intentionally verbose
# and repetetive to be exceptionally clear that we handle all
# possible cases correctly. Note we never return trusted_layout
@@ -666,6 +672,9 @@ class PipelineManager(metaclass=ABCMeta):
def getLayout(self, item):
if item.item_ahead:
fallback_layout = item.item_ahead.layout
+ if fallback_layout is None:
+ # We're probably waiting on a merge job for the item ahead.
+ return None
else:
fallback_layout = item.pipeline.tenant.layout
if not item.change.updatesConfig(item.pipeline.tenant):
diff --git a/zuul/model.py b/zuul/model.py
index a5dd8827d..a1e3d3dc4 100644
--- a/zuul/model.py
+++ b/zuul/model.py
@@ -261,6 +261,7 @@ class Pipeline(object):
self.footer_message = None
self.enqueue_message = None
self.start_message = None
+ self.dequeue_message = None
self.post_review = False
self.dequeue_on_new_patchset = True
self.ignore_dependencies = False
@@ -276,6 +277,7 @@ class Pipeline(object):
self.merge_failure_actions = []
self.no_jobs_actions = []
self.disabled_actions = []
+ self.dequeue_actions = []
self.disable_at = None
self._consecutive_failures = 0
self._disabled = False
@@ -295,7 +297,8 @@ class Pipeline(object):
self.failure_actions +
self.merge_failure_actions +
self.no_jobs_actions +
- self.disabled_actions
+ self.disabled_actions +
+ self.dequeue_actions
)
def __repr__(self):
diff --git a/zuul/reporter/__init__.py b/zuul/reporter/__init__.py
index ac5f371d6..da3c606b4 100644
--- a/zuul/reporter/__init__.py
+++ b/zuul/reporter/__init__.py
@@ -123,7 +123,8 @@ class BaseReporter(object, metaclass=abc.ABCMeta):
'failure': self._formatItemReportFailure,
'merge-failure': self._formatItemReportMergeFailure,
'no-jobs': self._formatItemReportNoJobs,
- 'disabled': self._formatItemReportDisabled
+ 'disabled': self._formatItemReportDisabled,
+ 'dequeue': self._formatItemReportDequeue,
}
return format_methods[self._action]
@@ -208,6 +209,12 @@ class BaseReporter(object, metaclass=abc.ABCMeta):
else:
return self._formatItemReport(item)
+ def _formatItemReportDequeue(self, item, with_jobs=True):
+ msg = item.pipeline.dequeue_message
+ if with_jobs:
+ msg += '\n\n' + self._formatItemReportJobs(item)
+ return msg
+
def _getItemReportJobsFields(self, item):
# Extract the report elements from an item
config = self.connection.sched.config