diff options
91 files changed, 1096 insertions, 210 deletions
diff --git a/.zuul.yaml b/.zuul.yaml index 72684c460..9d9000756 100644 --- a/.zuul.yaml +++ b/.zuul.yaml @@ -55,6 +55,12 @@ zuul_ansible_version: 5 - job: + name: zuul-stream-functional-6 + parent: zuul-stream-functional + vars: + zuul_ansible_version: 6 + +- job: name: zuul-tox description: | Zuul unit tests with ZooKeeper running @@ -320,6 +326,7 @@ - zuul-stream-functional-2.8 - zuul-stream-functional-2.9 - zuul-stream-functional-5 + - zuul-stream-functional-6 - zuul-tox-remote - zuul-quick-start: requires: nodepool-container-image @@ -350,6 +357,7 @@ - zuul-stream-functional-2.8 - zuul-stream-functional-2.9 - zuul-stream-functional-5 + - zuul-stream-functional-6 - zuul-tox-remote - zuul-quick-start: requires: nodepool-container-image diff --git a/doc/source/config/pipeline.rst b/doc/source/config/pipeline.rst index f1c294775..f4d7cce69 100644 --- a/doc/source/config/pipeline.rst +++ b/doc/source/config/pipeline.rst @@ -332,9 +332,16 @@ success, the pipeline reports back to Gerrit with ``Verified`` vote of .. attr:: merge-conflict These reporters describe what Zuul should do if it is unable to - merge in the patchset. If no merge-conflict reporters are listed - then the ``failure`` reporters will be used to notify of - unsuccessful merges. + merge the patchset into the current state of the target + branch. If no merge-conflict reporters are listed then the + ``failure`` reporters will be used. + + .. attr:: config-error + + These reporters describe what Zuul should do if it encounters a + configuration error while trying to enqueue the item. If no + config-error reporters are listed then the ``failure`` reporters + will be used. .. attr:: enqueue diff --git a/doc/source/developer/ansible.rst b/doc/source/developer/ansible.rst index 415c47df7..c3135debe 100644 --- a/doc/source/developer/ansible.rst +++ b/doc/source/developer/ansible.rst @@ -4,19 +4,11 @@ Ansible Integration Zuul contains Ansible modules and plugins to control the execution of Ansible Job content. -Build Log Support ------------------ +Zuul provides realtime build log streaming to end users so that users +can watch long-running jobs in progress. -Zuul provides realtime build log streaming to end users so that users can -watch long-running jobs in progress. As jobs may be written that execute a -shell script that could run for a long time, additional effort is expended -to stream stdout and stderr of shell tasks as they happen rather than waiting -for the command to finish. - -Zuul contains a modified version of the :ansible:module:`command` -that starts a log streaming daemon on the build node. - -.. automodule:: zuul.ansible.base.library.command +Streaming job output +-------------------- All jobs run with the :py:mod:`zuul.ansible.base.callback.zuul_stream` callback plugin enabled, which writes the build log to a file so that the @@ -35,10 +27,55 @@ exposes that log stream over a websocket connection as part of In addition to real-time streaming, Zuul also installs another callback module, :py:mod:`zuul.ansible.base.callback.zuul_json.CallbackModule` that collects all of the information about a given run into a json file which is written to the -work dir so that it can be published along with build logs. Since the streaming -log is by necessity a single text stream, choices have to be made for -readability about what data is shown and what is not shown. The json log file -is intended to allow for a richer more interactive set of data to be displayed -to the user. +work dir so that it can be published along with build logs. .. autoclass:: zuul.ansible.base.callback.zuul_json.CallbackModule + +Since the streaming log is by necessity a single text stream, choices +have to be made for readability about what data is shown and what is +not shown. The json log file is intended to allow for a richer more +interactive set of data to be displayed to the user. + +.. _zuul_console_streaming: + +Capturing live command output +----------------------------- + +As jobs may execute long-running shell scripts or other commands, +additional effort is expended to stream ``stdout`` and ``stderr`` of +shell tasks as they happen rather than waiting for the command to +finish. + +The global job configuration should run the ``zuul_console`` task as a +very early prerequisite step. + +.. automodule:: zuul.ansible.base.library.zuul_console + +This will start a daemon that listens on TCP port 19885 on the testing +node. This daemon can be queried to stream back the output of shell +tasks as described below. + +Zuul contains a modified version of Ansible's +:ansible:module:`command` module that overrides the default +implementation. + +.. automodule:: zuul.ansible.base.library.command + +This library will capture the output of the running +command and write it to a temporary file on the host the command is +running on. These files are named in the format +``/tmp/console-<uuid>-<task_id>-<host>.log`` + +The ``zuul_stream`` callback mentioned above will send a request to +the remote ``zuul_console`` daemon, providing the uuid and task id of +the task it is currently processing. The ``zuul_console`` daemon will +then read the logfile from disk and stream the data back as it +appears, which ``zuul_stream`` will then present as described above. + +The ``zuul_stream`` callback will indicate to the ``zuul_console`` +daemon when it has finished reading the task, which prompts the remote +side to remove the temporary streaming output files. In some cases, +aborting the Ansible process may not give the ``zuul_stream`` callback +the chance to send this notice, leaking the temporary files. If nodes +are ephemeral this makes little difference, but these files may be +visible on static nodes. diff --git a/doc/source/drivers/timer.rst b/doc/source/drivers/timer.rst index 1d7931c5e..57de1573d 100644 --- a/doc/source/drivers/timer.rst +++ b/doc/source/drivers/timer.rst @@ -18,6 +18,9 @@ will enqueue an event into its pipeline for every project and branch defined in the configuration. Any job associated with the pipeline will run in response to that event. +Zuul implements the timer using `apscheduler`_, Please check the +`apscheduler documentation`_ for more information about the syntax. + .. attr:: pipeline.trigger.timer The timer trigger supports the following attributes: @@ -27,9 +30,17 @@ will run in response to that event. The time specification in cron syntax. Only the 5 part syntax is supported, not the symbolic names. Example: ``0 0 * * *`` - runs at midnight. The first weekday is Monday. An optional 6th - part specifies seconds. The optional 7th part specifies a - jitter in seconds. This delays the trigger randomly, limited by + runs at midnight. + An optional 6th part specifies seconds. The optional 7th part specifies + a jitter in seconds. This delays the trigger randomly, limited by the specified value. Example ``0 0 * * * * 60`` runs at midnight or randomly up to 60 seconds later. The jitter is applied individually to each project-branch combination. + + .. warning:: + Be aware the day-of-week value differs from from cron. + The first weekday is Monday (0), and the last is Sunday (6). + + +.. _apscheduler: https://apscheduler.readthedocs.io/ +.. _apscheduler documentation: https://apscheduler.readthedocs.io/en/3.x/modules/triggers/cron.html#module-apscheduler.triggers.cron diff --git a/doc/source/howtos/nodepool_static.rst b/doc/source/howtos/nodepool_static.rst index ff2d35d6a..c10672e7b 100644 --- a/doc/source/howtos/nodepool_static.rst +++ b/doc/source/howtos/nodepool_static.rst @@ -15,9 +15,9 @@ the following requirements: * Must be reachable by Zuul executors and have SSH access enabled. * Must have a user that Zuul can use for SSH. -* Must have Python 2 installed for Ansible. -* Must be reachable by Zuul executors over TCP port 19885 (console log - streaming). +* Must have an Ansible supported Python installed +* Must be reachable by Zuul executors over TCP port 19885 for console + log streaming. See :ref:`nodepool_console_streaming` When setting up your nodepool.yaml file, you will need the host keys for each node for the ``host-key`` value. This can be obtained with @@ -40,7 +40,7 @@ nodes. Place this file in ``/etc/nodepool/nodepool.yaml``: - host: localhost labels: - - name: ubuntu-xenial + - name: ubuntu-jammy providers: - name: static-vms @@ -49,14 +49,34 @@ nodes. Place this file in ``/etc/nodepool/nodepool.yaml``: - name: main nodes: - name: 192.168.1.10 - labels: ubuntu-xenial + labels: ubuntu-jammy host-key: "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIGXqY02bdYqg1BcIf2x08zs60rS6XhlBSQ4qE47o5gb" username: zuul - name: 192.168.1.11 - labels: ubuntu-xenial + labels: ubuntu-jammy host-key: "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIGXqY02bdYqg1BcIf2x08zs60rS6XhlBSQ5sE47o5gc" username: zuul EOF" Make sure that ``username``, ``host-key``, IP addresses and label names are customized for your environment. + +.. _nodepool_console_streaming: + +Log streaming +------------- + +The log streaming service enables Zuul to show the live status of +long-running ``shell`` or ``command`` tasks. The server side is setup +by the ``zuul_console:`` task built-in to Zuul's Ansible installation. +The executor requires the ability to communicate with the job nodes on +port 19885 for this to work. + +The log streaming service may leave files on the static node in the +format ``/tmp/console-<uuid>-<task_id>-<host>.log`` if jobs are +interrupted. These may be safely removed after a short period of +inactivity with a command such as + +.. code-block:: shell + + find /tmp -maxdepth 1 -name 'console-*-*-<host>.log' -mtime +2 -delete diff --git a/doc/source/installation.rst b/doc/source/installation.rst index a9a526f13..17665ca76 100644 --- a/doc/source/installation.rst +++ b/doc/source/installation.rst @@ -10,11 +10,15 @@ Nodepool ~~~~~~~~ In order to run all but the simplest jobs, Zuul uses a companion -program, Nodepool, to supply the nodes (whether dynamic cloud -instances or static hardware) used by jobs. Before starting Zuul, -ensure you have Nodepool installed and any images you require built. -Zuul only makes one requirement of these nodes: that it be able to log -in given a username and ssh private key. +program `Nodepool <https://opendev.org/zuul/nodepool>`__ to supply the +nodes (whether dynamic cloud instances or static hardware) used by +jobs. Before starting Zuul, ensure you have Nodepool installed and +any images you require built. + +Zuul must be able to log into the nodes provisioned by Nodepool with a +given username and SSH private key. Executors should also be able to +talk to nodes on TCP port 19885 for log streaming; see +:ref:`nodepool_console_streaming`. ZooKeeper ~~~~~~~~~ diff --git a/doc/source/job-content.rst b/doc/source/job-content.rst index 9b1059502..75044cf1c 100644 --- a/doc/source/job-content.rst +++ b/doc/source/job-content.rst @@ -332,6 +332,11 @@ of item. connectivity issues then previous attempts may have been cancelled, and this value will be greater than 1. + .. var:: ansible_version + + The version of the Ansible community package release used for executing + the job. + .. var:: project The item's project. This is a data structure with the following diff --git a/doc/source/tutorials/keycloak.rst b/doc/source/tutorials/keycloak.rst index 5242a4f05..896f35479 100644 --- a/doc/source/tutorials/keycloak.rst +++ b/doc/source/tutorials/keycloak.rst @@ -46,14 +46,14 @@ that we can update Zuul's configuration to add authentication. .. code-block:: shell cd zuul/doc/source/examples - sudo -E docker-compose-compose -p zuul-tutorial down + sudo -E docker-compose -p zuul-tutorial stop Restart the containers with a new Zuul configuration. .. code-block:: shell cd zuul/doc/source/examples - ZUUL_TUTORIAL_CONFIG="./keycloak/etc_zuul/" sudo -E docker-compose-compose -p zuul-tutorial up -d + ZUUL_TUTORIAL_CONFIG="./keycloak/etc_zuul/" sudo -E docker-compose -p zuul-tutorial up -d This tells docker-compose to use these Zuul `config files <https://opendev.org/zuul/zuul/src/branch/master/doc/source/examples/keycloak>`_. @@ -67,7 +67,7 @@ with this command: .. code-block:: shell cd zuul/doc/source/examples/keycloak - sudo -E docker-compose-compose -p zuul-tutorial-keycloak up -d + sudo -E docker-compose -p zuul-tutorial-keycloak up -d Once Keycloak is running, you can visit the web interface at http://localhost:8082/ diff --git a/playbooks/tutorial/admin.yaml b/playbooks/tutorial/admin.yaml index 9b36069e7..92d2b6d1f 100644 --- a/playbooks/tutorial/admin.yaml +++ b/playbooks/tutorial/admin.yaml @@ -2,13 +2,13 @@ - name: Run docker-compose down when: not local shell: - cmd: docker-compose -p zuul-tutorial down + cmd: docker-compose -p zuul-tutorial stop chdir: src/opendev.org/zuul/zuul/doc/source/examples - name: Run docker-compose down when: local shell: - cmd: docker-compose -p zuul-tutorial down + cmd: docker-compose -p zuul-tutorial stop chdir: ../../doc/source/examples # Restart with the new config @@ -55,3 +55,24 @@ until: result.status == 200 and result.json["zuul_version"] is defined changed_when: false +- name: Verify Keycloak authentication is available + uri: + url: http://localhost:9000/api/tenant/example-tenant/info + method: GET + return_content: true + status_code: 200 + body_format: json + register: result + failed_when: result.json["info"]["capabilities"]["auth"]["realms"]["zuul-demo"]["authority"] != "http://keycloak:8082/realms/zuul-demo" + changed_when: false + +- name: Verify that old builds are available + uri: + url: "http://localhost:9000/api/tenant/example-tenant/builds" + method: GET + return_content: true + status_code: 200 + body_format: json + register: result + failed_when: "result.json | length < 4" + changed_when: false diff --git a/playbooks/zuul-stream/2.7-container.yaml b/playbooks/zuul-stream/2.7-container.yaml new file mode 100644 index 000000000..76998a01d --- /dev/null +++ b/playbooks/zuul-stream/2.7-container.yaml @@ -0,0 +1,21 @@ +- name: Install docker + include_role: + name: ensure-docker + +- name: Build 2.7 container environment + shell: | + pushd {{ ansible_user_dir }}/src/opendev.org/zuul/zuul/playbooks/zuul-stream/fixtures/ + cat ~/.ssh/id_rsa.pub > authorized_keys + docker build -f Dockerfile.py27 -t zuul_python27 . + args: + executable: /bin/bash + +- name: Run 2.7 container + shell: | + docker run -d -p 2022:22 -p 19887:19887 zuul_python27 + docker ps + +- name: Accept host keys + shell: | + ssh-keyscan -p 2022 localhost >> ~/.ssh/known_hosts + ssh-keyscan -p 2022 127.0.0.2 >> ~/.ssh/known_hosts diff --git a/playbooks/zuul-stream/create-inventory.yaml b/playbooks/zuul-stream/create-inventory.yaml new file mode 100644 index 000000000..c2be02749 --- /dev/null +++ b/playbooks/zuul-stream/create-inventory.yaml @@ -0,0 +1,38 @@ +- name: Copy inventory + copy: + src: "{{ zuul.executor.log_root }}/zuul-info/inventory.yaml" + dest: "{{ ansible_user_dir }}/inventory.yaml" + +- name: Slurp inventory + slurp: + path: "{{ ansible_user_dir }}/inventory.yaml" + register: _inventory_yaml + +- name: Extract inventory + set_fact: + _new_inventory: "{{ _inventory_yaml['content'] | b64decode | from_yaml }}" + +- name: Setup new facts + set_fact: + _docker_inventory: + all: + children: + node: + hosts: + node3: null + hosts: + node3: + ansible_connection: ssh + ansible_host: 127.0.0.2 + ansible_port: 2022 + ansible_user: root + ansible_python_interpreter: /usr/local/bin/python2.7 + +- name: Merge all facts + set_fact: + _new_inventory: '{{ _new_inventory | combine(_docker_inventory, recursive=True) }}' + +- name: Write out inventory + copy: + content: '{{ _new_inventory | to_nice_yaml }}' + dest: '{{ ansible_user_dir }}/inventory.yaml' diff --git a/playbooks/zuul-stream/fixtures/Dockerfile.py27 b/playbooks/zuul-stream/fixtures/Dockerfile.py27 new file mode 100644 index 000000000..a30157b18 --- /dev/null +++ b/playbooks/zuul-stream/fixtures/Dockerfile.py27 @@ -0,0 +1,24 @@ +FROM python:2.7.18-buster AS buster-2.7-ssh + +ENV DEBIAN_FRONTEND noninteractive + +RUN apt-get update \ + && apt-get install -y dumb-init openssh-server \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* + +RUN mkdir /var/run/sshd && chmod 0755 /var/run/sshd + +# This may or not be required to allow logins by preventing pam_loginuid +# trying to write out audit level things that may not work in a container +RUN sed -ri 's/session(\s+)required(\s+)pam_loginuid.so/session\1optional\2pam_loginuid.so/' /etc/pam.d/sshd + +RUN ssh-keygen -A -v + +RUN ssh-keygen -t ed25519 -f /root/.ssh/id_ed25519 + +COPY authorized_keys /root/.ssh/authorized_keys +RUN chmod 0600 /root/.ssh/authorized_keys + +ENTRYPOINT ["/usr/bin/dumb-init", "--"] +CMD ["/usr/sbin/sshd", "-D", "-o", "ListenAddress=0.0.0.0" ] diff --git a/playbooks/zuul-stream/functional.yaml b/playbooks/zuul-stream/functional.yaml index 7ae4704a9..63e13e3f5 100644 --- a/playbooks/zuul-stream/functional.yaml +++ b/playbooks/zuul-stream/functional.yaml @@ -31,11 +31,6 @@ mv job-output.txt job-output-success-19887.txt mv job-output.json job-output-success-19887.json - - name: Check protocol version - assert: - that: - - "'[node1] Reports streaming version: 1' in _success_output.stdout" - # Streamer puts out a line like # [node1] Starting to log 916b2084-4bbb-80e5-248e-000000000016-1-node1 for task TASK: Print binary data # One of the tasks in job-output shows find: results; @@ -53,10 +48,13 @@ # NOTE(ianw) 2022-07 : we deliberatly have this second step to run # against the console setup by the infrastructure executor in the # job pre playbooks as a backwards compatability sanity check. + # The py27 container job (node3) is not running an existing + # console streamer, so that will not output anything -- limit this + # out. - name: Run ansible that should succeed against extant console command: > /usr/lib/zuul/ansible/{{ zuul_ansible_version }}/bin/ansible-playbook - -e "new_console=false" + -e "new_console=false" --limit="node1,node2" src/opendev.org/zuul/zuul/playbooks/zuul-stream/fixtures/test-stream.yaml environment: ZUUL_JOB_LOG_CONFIG: "{{ ansible_user_dir}}/logging.json" @@ -77,8 +75,12 @@ - name: Validate outputs include_tasks: validate.yaml loop: - - job-output-success-19887.txt - - job-output-success-19885.txt + - { node: 'node1', filename: 'job-output-success-19887.txt' } + - { node: 'node2', filename: 'job-output-success-19887.txt' } + - { node: 'node1', filename: 'job-output-success-19885.txt' } + - { node: 'node2', filename: 'job-output-success-19885.txt' } + # node3 only listen on 19887 + - { node: 'node3', filename: 'job-output-success-19887.txt' } # failure case @@ -103,8 +105,10 @@ shell: | egrep "^.+\| node1 \| Exception: Test module failure exception fail-task" job-output-failure.txt egrep "^.+\| node2 \| Exception: Test module failure exception fail-task" job-output-failure.txt + egrep "^.+\| node3 \| Exception: Test module failure exception fail-task" job-output-failure.txt - name: Validate output - failure item loop with exception shell: | egrep "^.+\| node1 \| Exception: Test module failure exception fail-loop" job-output-failure.txt egrep "^.+\| node2 \| Exception: Test module failure exception fail-loop" job-output-failure.txt + egrep "^.+\| node3 \| Exception: Test module failure exception fail-loop" job-output-failure.txt diff --git a/playbooks/zuul-stream/pre.yaml b/playbooks/zuul-stream/pre.yaml index 23fae3549..9753fab85 100644 --- a/playbooks/zuul-stream/pre.yaml +++ b/playbooks/zuul-stream/pre.yaml @@ -9,6 +9,12 @@ post_tasks: + - name: Setup 2.7 container environment + include_tasks: 2.7-container.yaml + + - name: Setup inventory + include_tasks: create-inventory.yaml + - name: Install pip shell: |+ python3 -m pip install --upgrade pip setuptools wheel @@ -36,11 +42,6 @@ # venvs) and the installation fails due to conflicts. SETUPTOOLS_USE_DISTUTILS: stdlib - - name: Copy inventory - copy: - src: "{{ zuul.executor.log_root }}/zuul-info/inventory.yaml" - dest: "{{ ansible_user_dir }}/inventory.yaml" - - name: Copy ansible.cfg template: src: templates/ansible.cfg.j2 diff --git a/playbooks/zuul-stream/validate.yaml b/playbooks/zuul-stream/validate.yaml index 73ccd873a..81c613406 100644 --- a/playbooks/zuul-stream/validate.yaml +++ b/playbooks/zuul-stream/validate.yaml @@ -1,38 +1,29 @@ - name: Validate output - setupvar shell: | - egrep "^.*\| node1 \|\s+\"setupvar\": {" {{ item }} - egrep "^.*\| node2 \|\s+\"setupvar\": {" {{ item }} + egrep "^.*\| {{ item.node }} \|\s+\"setupvar\": {" {{ item.filename }} - name: Validate output - shell task shell: | - egrep "^.*\| node1 \| 1: lo:" {{ item }} - egrep "^.*\| node2 \| 1: lo:" {{ item }} + egrep "^.*\| {{ item.node }} \| 1: lo:" {{ item.filename }} - name: Validate output - loop with items shell: | - egrep "^.+\| node1 \| ok: Item: item1" {{ item }} - egrep "^.+\| node1 \| ok: Item: item2" {{ item }} - egrep "^.+\| node1 \| ok: Item: item3" {{ item }} - egrep "^.+\| node2 \| ok: Item: item1" {{ item }} - egrep "^.+\| node2 \| ok: Item: item2" {{ item }} - egrep "^.+\| node2 \| ok: Item: item3" {{ item }} + egrep "^.+\| {{ item.node }} \| ok: Item: item1" {{ item.filename }} + egrep "^.+\| {{ item.node }} \| ok: Item: item2" {{ item.filename }} + egrep "^.+\| {{ item.node }} \| ok: Item: item3" {{ item.filename }} - name: Validate output - loop with complex items shell: | - egrep "^.+\| node1 \| ok: Item: Runtime" {{ item }} - egrep "^.+\| node2 \| ok: Item: Runtime" {{ item }} + egrep "^.+\| {{ item.node }} \| ok: Item: Runtime" {{ item.filename }} - name: Validate output - failed shell task shell: | - egrep "^.+\| node1 \| Exception: Test module failure exception task" {{ item }} - egrep "^.+\| node2 \| Exception: Test module failure exception task" {{ item }} + egrep "^.+\| {{ item.node }} \| Exception: Test module failure exception task" {{ item.filename }} - name: Validate output - item loop with exception shell: | - egrep "^.+\| node1 \| Exception: Test module failure exception loop" {{ item }} - egrep "^.+\| node2 \| Exception: Test module failure exception loop" {{ item }} + egrep "^.+\| {{ item.node }} \| Exception: Test module failure exception loop" {{ item.filename }} - name: Validate output - binary data shell: | - egrep "^.*\| node1 \| \\\\x80abc" {{ item }} - egrep "^.*\| node2 \| \\\\x80abc" {{ item }} + egrep "^.*\| {{ item.node }} \| \\\\x80abc" {{ item.filename }} diff --git a/releasenotes/notes/ansible-6-f939b4d160b41ec3.yaml b/releasenotes/notes/ansible-6-f939b4d160b41ec3.yaml new file mode 100644 index 000000000..c1bb9d534 --- /dev/null +++ b/releasenotes/notes/ansible-6-f939b4d160b41ec3.yaml @@ -0,0 +1,6 @@ +--- +features: + - | + Ansible version 6 is now available. The default Ansible version + is still 5, but version 6 may be selected by using + :attr:`job.ansible-version`. diff --git a/releasenotes/notes/config-error-reporter-34887223d91544d1.yaml b/releasenotes/notes/config-error-reporter-34887223d91544d1.yaml new file mode 100644 index 000000000..51690f2fa --- /dev/null +++ b/releasenotes/notes/config-error-reporter-34887223d91544d1.yaml @@ -0,0 +1,6 @@ +--- +features: + - | + A new :attr:`pipeline.config-error` pipeline reporter is available + for customizing reporter actions related to Zuul configuration + errors. diff --git a/releasenotes/notes/deprecate-ansible-2-4c22db35d3c6c765.yaml b/releasenotes/notes/deprecate-ansible-2-4c22db35d3c6c765.yaml new file mode 100644 index 000000000..09a0a128c --- /dev/null +++ b/releasenotes/notes/deprecate-ansible-2-4c22db35d3c6c765.yaml @@ -0,0 +1,5 @@ +--- +upgrade: + - | + Ansible versions 2.8 and 2.9 are now deprecated in Zuul since they + are both unmaintaned. Ansible 5 is now the default version in Zuul. @@ -1,12 +1,12 @@ [metadata] name = zuul summary = A Project Gating System -description-file = +description_file = README.rst author = Zuul Team -author-email = zuul-discuss@lists.zuul-ci.org -home-page = https://zuul-ci.org/ -python-requires = >=3.8 +author_email = zuul-discuss@lists.zuul-ci.org +home_page = https://zuul-ci.org/ +python_requires = >=3.8 classifier = Intended Audience :: Information Technology Intended Audience :: System Administrators diff --git a/tests/fixtures/config/ansible-callbacks/git/common-config/playbooks/callback.yaml b/tests/fixtures/config/ansible-callbacks/git/common-config/playbooks/callback.yaml index 50bbbbfc5..13ddac988 100644 --- a/tests/fixtures/config/ansible-callbacks/git/common-config/playbooks/callback.yaml +++ b/tests/fixtures/config/ansible-callbacks/git/common-config/playbooks/callback.yaml @@ -1,4 +1,8 @@ - hosts: localhost - gather_facts: smart + gather_facts: false tasks: - command: echo test + + - name: Echo ansible version. + debug: + msg: Ansible version={{ ansible_version.major }}.{{ ansible_version.minor }} diff --git a/tests/fixtures/config/ansible-callbacks/git/common-config/playbooks/callback_plugins/test_callback.py b/tests/fixtures/config/ansible-callbacks/git/common-config/playbooks/callback_plugins/test_callback.py index 39ff7cd49..2597370bc 100644 --- a/tests/fixtures/config/ansible-callbacks/git/common-config/playbooks/callback_plugins/test_callback.py +++ b/tests/fixtures/config/ansible-callbacks/git/common-config/playbooks/callback_plugins/test_callback.py @@ -15,17 +15,20 @@ DOCUMENTATION = ''' class CallbackModule(CallbackBase): - CALLBACK_VERSION = 1.0 + """ + test callback + """ + CALLBACK_VERSION = 2.0 CALLBACK_NEEDS_WHITELIST = True + # aggregate means we can be loaded and not be the stdout plugin + CALLBACK_TYPE = 'aggregate' + CALLBACK_NAME = 'test_callback' def __init__(self): super(CallbackModule, self).__init__() - def set_options(self, task_keys=None, var_options=None, direct=None): - super(CallbackModule, self).set_options(task_keys=task_keys, - var_options=var_options, - direct=direct) - + def set_options(self, *args, **kw): + super(CallbackModule, self).set_options(*args, **kw) self.file_name = self.get_option('file_name') def v2_on_any(self, *args, **kwargs): diff --git a/tests/fixtures/config/ansible-callbacks/main.yaml b/tests/fixtures/config/ansible-callbacks/main.yaml index 9d01f542f..1e5247e4a 100644 --- a/tests/fixtures/config/ansible-callbacks/main.yaml +++ b/tests/fixtures/config/ansible-callbacks/main.yaml @@ -1,5 +1,6 @@ - tenant: name: tenant-one + default-ansible-version: SETME source: gerrit: config-projects: diff --git a/tests/fixtures/config/ansible-callbacks/main28.yaml b/tests/fixtures/config/ansible-callbacks/main28.yaml new file mode 100644 index 000000000..371710b4f --- /dev/null +++ b/tests/fixtures/config/ansible-callbacks/main28.yaml @@ -0,0 +1,7 @@ +- tenant: + name: tenant-one + default-ansible-version: '2.8' + source: + gerrit: + config-projects: + - common-config diff --git a/tests/fixtures/config/ansible-callbacks/main29.yaml b/tests/fixtures/config/ansible-callbacks/main29.yaml new file mode 100644 index 000000000..b127139a9 --- /dev/null +++ b/tests/fixtures/config/ansible-callbacks/main29.yaml @@ -0,0 +1,7 @@ +- tenant: + name: tenant-one + default-ansible-version: '2.9' + source: + gerrit: + config-projects: + - common-config diff --git a/tests/fixtures/config/ansible-callbacks/main5.yaml b/tests/fixtures/config/ansible-callbacks/main5.yaml new file mode 100644 index 000000000..5efc12339 --- /dev/null +++ b/tests/fixtures/config/ansible-callbacks/main5.yaml @@ -0,0 +1,7 @@ +- tenant: + name: tenant-one + default-ansible-version: '5' + source: + gerrit: + config-projects: + - common-config diff --git a/tests/fixtures/config/ansible-callbacks/main6.yaml b/tests/fixtures/config/ansible-callbacks/main6.yaml new file mode 100644 index 000000000..2467362bb --- /dev/null +++ b/tests/fixtures/config/ansible-callbacks/main6.yaml @@ -0,0 +1,7 @@ +- tenant: + name: tenant-one + default-ansible-version: '6' + source: + gerrit: + config-projects: + - common-config diff --git a/tests/fixtures/config/ansible-versions/git/common-config/zuul.yaml b/tests/fixtures/config/ansible-versions/git/common-config/zuul.yaml index 91c8d6bca..e1bac5e01 100644 --- a/tests/fixtures/config/ansible-versions/git/common-config/zuul.yaml +++ b/tests/fixtures/config/ansible-versions/git/common-config/zuul.yaml @@ -25,7 +25,7 @@ parent: ansible-version vars: test_ansible_version_major: 2 - test_ansible_version_minor: 9 + test_ansible_version_minor: 12 # This job is used by a test case specifying a different ansible version in # zuul.conf @@ -60,6 +60,14 @@ test_ansible_version_major: 2 test_ansible_version_minor: 12 +- job: + name: ansible-6 + parent: ansible-version + ansible-version: 6 + vars: + test_ansible_version_major: 2 + test_ansible_version_minor: 13 + - project: name: common-config check: @@ -68,6 +76,7 @@ - ansible-28 - ansible-29 - ansible-5 + - ansible-6 - project: name: org/project @@ -77,3 +86,4 @@ - ansible-28 - ansible-29 - ansible-5 + - ansible-6 diff --git a/tests/fixtures/config/ansible/git/org_ansible/playbooks/hello-ansible.yaml b/tests/fixtures/config/ansible/git/org_ansible/playbooks/hello-ansible.yaml index 17ddc1661..d0458c710 100644 --- a/tests/fixtures/config/ansible/git/org_ansible/playbooks/hello-ansible.yaml +++ b/tests/fixtures/config/ansible/git/org_ansible/playbooks/hello-ansible.yaml @@ -3,3 +3,7 @@ - name: hello debug: msg: hello ansible + + - name: Echo ansible version. + debug: + msg: Ansible version={{ ansible_version.major }}.{{ ansible_version.minor }} diff --git a/tests/fixtures/config/ansible/main.yaml b/tests/fixtures/config/ansible/main.yaml index 94e7aa78c..473bb5ef8 100644 --- a/tests/fixtures/config/ansible/main.yaml +++ b/tests/fixtures/config/ansible/main.yaml @@ -1,5 +1,6 @@ - tenant: name: tenant-one + default-ansible-version: SETME source: gerrit: config-projects: diff --git a/tests/fixtures/config/ansible/main28.yaml b/tests/fixtures/config/ansible/main28.yaml new file mode 100644 index 000000000..f2add49c7 --- /dev/null +++ b/tests/fixtures/config/ansible/main28.yaml @@ -0,0 +1,11 @@ +- tenant: + name: tenant-one + default-ansible-version: '2.8' + source: + gerrit: + config-projects: + - common-config + untrusted-projects: + - org/project + - bare-role + - org/ansible diff --git a/tests/fixtures/config/ansible/main29.yaml b/tests/fixtures/config/ansible/main29.yaml new file mode 100644 index 000000000..758292950 --- /dev/null +++ b/tests/fixtures/config/ansible/main29.yaml @@ -0,0 +1,11 @@ +- tenant: + name: tenant-one + default-ansible-version: '2.9' + source: + gerrit: + config-projects: + - common-config + untrusted-projects: + - org/project + - bare-role + - org/ansible diff --git a/tests/fixtures/config/ansible/main5.yaml b/tests/fixtures/config/ansible/main5.yaml new file mode 100644 index 000000000..b2364e80b --- /dev/null +++ b/tests/fixtures/config/ansible/main5.yaml @@ -0,0 +1,11 @@ +- tenant: + name: tenant-one + default-ansible-version: '5' + source: + gerrit: + config-projects: + - common-config + untrusted-projects: + - org/project + - bare-role + - org/ansible diff --git a/tests/fixtures/config/ansible/main6.yaml b/tests/fixtures/config/ansible/main6.yaml new file mode 100644 index 000000000..7db6af6da --- /dev/null +++ b/tests/fixtures/config/ansible/main6.yaml @@ -0,0 +1,11 @@ +- tenant: + name: tenant-one + default-ansible-version: '6' + source: + gerrit: + config-projects: + - common-config + untrusted-projects: + - org/project + - bare-role + - org/ansible diff --git a/tests/fixtures/config/executor-facts/git/org_project/playbooks/datetime-fact.yaml b/tests/fixtures/config/executor-facts/git/org_project/playbooks/datetime-fact.yaml index 300dfa5f0..53819aa00 100644 --- a/tests/fixtures/config/executor-facts/git/org_project/playbooks/datetime-fact.yaml +++ b/tests/fixtures/config/executor-facts/git/org_project/playbooks/datetime-fact.yaml @@ -1,5 +1,5 @@ - hosts: localhost - gather_facts: smart + gather_facts: no tasks: - debug: var: date_time @@ -9,3 +9,6 @@ var: ansible_date_time - assert: that: ansible_date_time is not defined + - name: Echo ansible version + debug: + msg: Ansible version={{ ansible_version.major }}.{{ ansible_version.minor }} diff --git a/tests/fixtures/config/executor-facts/main.yaml b/tests/fixtures/config/executor-facts/main.yaml index 208e274b1..37c9dd4fc 100644 --- a/tests/fixtures/config/executor-facts/main.yaml +++ b/tests/fixtures/config/executor-facts/main.yaml @@ -1,5 +1,6 @@ - tenant: name: tenant-one + default-ansible-version: SETME source: gerrit: config-projects: diff --git a/tests/fixtures/config/executor-facts/main28.yaml b/tests/fixtures/config/executor-facts/main28.yaml new file mode 100644 index 000000000..686899bf8 --- /dev/null +++ b/tests/fixtures/config/executor-facts/main28.yaml @@ -0,0 +1,9 @@ +- tenant: + name: tenant-one + default-ansible-version: '2.8' + source: + gerrit: + config-projects: + - common-config + untrusted-projects: + - org/project diff --git a/tests/fixtures/config/executor-facts/main29.yaml b/tests/fixtures/config/executor-facts/main29.yaml new file mode 100644 index 000000000..df934ff22 --- /dev/null +++ b/tests/fixtures/config/executor-facts/main29.yaml @@ -0,0 +1,9 @@ +- tenant: + name: tenant-one + default-ansible-version: '2.9' + source: + gerrit: + config-projects: + - common-config + untrusted-projects: + - org/project diff --git a/tests/fixtures/config/executor-facts/main5.yaml b/tests/fixtures/config/executor-facts/main5.yaml new file mode 100644 index 000000000..55d9d10c0 --- /dev/null +++ b/tests/fixtures/config/executor-facts/main5.yaml @@ -0,0 +1,9 @@ +- tenant: + name: tenant-one + default-ansible-version: '5' + source: + gerrit: + config-projects: + - common-config + untrusted-projects: + - org/project diff --git a/tests/fixtures/config/executor-facts/main6.yaml b/tests/fixtures/config/executor-facts/main6.yaml new file mode 100644 index 000000000..792f13402 --- /dev/null +++ b/tests/fixtures/config/executor-facts/main6.yaml @@ -0,0 +1,9 @@ +- tenant: + name: tenant-one + default-ansible-version: '6' + source: + gerrit: + config-projects: + - common-config + untrusted-projects: + - org/project diff --git a/tests/fixtures/config/inventory/git/common-config/zuul.yaml b/tests/fixtures/config/inventory/git/common-config/zuul.yaml index 6b5fe67d0..ca687139d 100644 --- a/tests/fixtures/config/inventory/git/common-config/zuul.yaml +++ b/tests/fixtures/config/inventory/git/common-config/zuul.yaml @@ -140,3 +140,12 @@ label: ubuntu-xenial ansible-version: '5' run: playbooks/ansible-version.yaml + +- job: + name: ansible-version6-inventory + nodeset: + nodes: + - name: ubuntu-xenial + label: ubuntu-xenial + ansible-version: '6' + run: playbooks/ansible-version.yaml diff --git a/tests/fixtures/config/inventory/git/org_project/.zuul.yaml b/tests/fixtures/config/inventory/git/org_project/.zuul.yaml index 1028a0ff2..69ec3127e 100644 --- a/tests/fixtures/config/inventory/git/org_project/.zuul.yaml +++ b/tests/fixtures/config/inventory/git/org_project/.zuul.yaml @@ -10,3 +10,4 @@ - ansible-version28-inventory - ansible-version29-inventory - ansible-version5-inventory + - ansible-version6-inventory diff --git a/tests/fixtures/config/remote-zuul-stream/git/org_project/playbooks/command.yaml b/tests/fixtures/config/remote-zuul-stream/git/org_project/playbooks/command.yaml index d737a1a9b..539db80b7 100644 --- a/tests/fixtures/config/remote-zuul-stream/git/org_project/playbooks/command.yaml +++ b/tests/fixtures/config/remote-zuul-stream/git/org_project/playbooks/command.yaml @@ -114,3 +114,17 @@ - name: Command Not Found command: command-not-found failed_when: false + +- hosts: compute1 + tasks: + + - name: Debug raw variable in msg + debug: + msg: '{{ ansible_version }}' + + - name: Debug raw variable in a loop + debug: + msg: '{{ ansible_version }}' + loop: + - 1 + - 2 diff --git a/tests/fixtures/config/remote-zuul-stream/git/org_project/playbooks/library/broken_module_exception.py b/tests/fixtures/config/remote-zuul-stream/git/org_project/playbooks/library/broken_module_exception.py index 6cfa0a3d0..7933f15a8 100755 --- a/tests/fixtures/config/remote-zuul-stream/git/org_project/playbooks/library/broken_module_exception.py +++ b/tests/fixtures/config/remote-zuul-stream/git/org_project/playbooks/library/broken_module_exception.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 def main(): diff --git a/tests/fixtures/config/remote-zuul-stream/git/org_project/playbooks/library/broken_module_no_result.py b/tests/fixtures/config/remote-zuul-stream/git/org_project/playbooks/library/broken_module_no_result.py index 065509d0d..0bdced5eb 100755 --- a/tests/fixtures/config/remote-zuul-stream/git/org_project/playbooks/library/broken_module_no_result.py +++ b/tests/fixtures/config/remote-zuul-stream/git/org_project/playbooks/library/broken_module_no_result.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 def main(): diff --git a/tests/fixtures/layouts/freeze-job-failure.yaml b/tests/fixtures/layouts/freeze-job-failure.yaml new file mode 100644 index 000000000..ae3f48324 --- /dev/null +++ b/tests/fixtures/layouts/freeze-job-failure.yaml @@ -0,0 +1,32 @@ +- pipeline: + name: check + manager: independent + trigger: + gerrit: + - event: patchset-created + success: + gerrit: + Verified: 1 + failure: + gerrit: + Verified: -1 + +- job: + name: base + parent: null + run: playbooks/base.yaml + +- job: + name: project-test1 + run: playbooks/project-test1.yaml + +- job: + name: project-test2 + run: playbooks/project-test2.yaml + +- project: + name: org/project + check: + jobs: + - project-test2: + dependencies: project-test1 diff --git a/tests/fixtures/layouts/special-characters-job.yaml b/tests/fixtures/layouts/special-characters-job.yaml new file mode 100644 index 000000000..20308c6d6 --- /dev/null +++ b/tests/fixtures/layouts/special-characters-job.yaml @@ -0,0 +1,2 @@ +- job: + name: "a@b/c" diff --git a/tests/fixtures/layouts/timer-freeze-job-failure.yaml b/tests/fixtures/layouts/timer-freeze-job-failure.yaml new file mode 100644 index 000000000..2e6d709bb --- /dev/null +++ b/tests/fixtures/layouts/timer-freeze-job-failure.yaml @@ -0,0 +1,26 @@ +- pipeline: + name: periodic + manager: independent + trigger: + timer: + - time: '* * * * * */1' + +- job: + name: base + parent: null + run: playbooks/base.yaml + +- job: + name: project-test1 + run: playbooks/project-test1.yaml + +- job: + name: project-test2 + run: playbooks/project-test2.yaml + +- project: + name: org/project + periodic: + jobs: + - project-test2: + dependencies: project-test1 diff --git a/tests/remote/test_remote_action_modules.py b/tests/remote/test_remote_action_modules.py index bbe6db0a0..30e430b74 100644 --- a/tests/remote/test_remote_action_modules.py +++ b/tests/remote/test_remote_action_modules.py @@ -109,3 +109,11 @@ class TestActionModules5(AnsibleZuulTestCase, FunctionalActionModulesMixIn): def setUp(self): super().setUp() self._setUp() + + +class TestActionModules6(AnsibleZuulTestCase, FunctionalActionModulesMixIn): + ansible_version = '6' + + def setUp(self): + super().setUp() + self._setUp() diff --git a/tests/remote/test_remote_zuul_json.py b/tests/remote/test_remote_zuul_json.py index 120235ec9..e4510e7d1 100644 --- a/tests/remote/test_remote_zuul_json.py +++ b/tests/remote/test_remote_zuul_json.py @@ -166,3 +166,11 @@ class TestZuulJSON5(AnsibleZuulTestCase, FunctionalZuulJSONMixIn): def setUp(self): super().setUp() self._setUp() + + +class TestZuulJSON6(AnsibleZuulTestCase, FunctionalZuulJSONMixIn): + ansible_version = '6' + + def setUp(self): + super().setUp() + self._setUp() diff --git a/tests/remote/test_remote_zuul_stream.py b/tests/remote/test_remote_zuul_stream.py index 1c705127e..b84c4b0d8 100644 --- a/tests/remote/test_remote_zuul_stream.py +++ b/tests/remote/test_remote_zuul_stream.py @@ -12,6 +12,8 @@ # License for the specific language governing permissions and limitations # under the License. +import io +import logging import os import re import textwrap @@ -31,6 +33,12 @@ class FunctionalZuulStreamMixIn: self.executor_server.log_console_port = self.log_console_port self.wait_timeout = 180 self.fake_nodepool.remote_ansible = True + # This catches the Ansible output; rather than the callback + # output captured in the job log. For example if the callback + # fails, there will be an error output in this stream. + self.logger = logging.getLogger('zuul.AnsibleJob') + self.console_output = io.StringIO() + self.logger.addHandler(logging.StreamHandler(self.console_output)) ansible_remote = os.environ.get('ZUUL_REMOTE_IPV4') self.assertIsNotNone(ansible_remote) @@ -92,14 +100,20 @@ class FunctionalZuulStreamMixIn: with open(path) as f: return f.read() - def assertLogLine(self, line, log): - pattern = (r'^\d\d\d\d-\d\d-\d\d \d\d:\d\d\:\d\d\.\d\d\d\d\d\d \| %s$' - % line) + def _assertLogLine(self, line, log, full_match=True): + pattern = (r'^\d\d\d\d-\d\d-\d\d \d\d:\d\d\:\d\d\.\d\d\d\d\d\d \| %s%s' + % (line, '$' if full_match else '')) log_re = re.compile(pattern, re.MULTILINE) m = log_re.search(log) if m is None: raise Exception("'%s' not found in log" % (line,)) + def assertLogLineStartsWith(self, line, log): + self._assertLogLine(line, log, full_match=False) + + def assertLogLine(self, line, log): + self._assertLogLine(line, log, full_match=True) + def _getLogTime(self, line, log): pattern = (r'^(\d\d\d\d-\d\d-\d\d \d\d:\d\d\:\d\d\.\d\d\d\d\d\d)' r' \| %s\n' @@ -120,7 +134,21 @@ class FunctionalZuulStreamMixIn: build = self.history[-1] self.assertEqual(build.result, 'SUCCESS') + console_output = self.console_output.getvalue() + # This should be generic enough to match any callback + # plugin failures, which look something like + # + # [WARNING]: Failure using method (v2_runner_on_ok) in \ + # callback plugin + # (<ansible.plugins.callback.zuul_stream.CallbackModule object at' + # 0x7f89f72a20b0>): 'dict' object has no attribute 'startswith'" + # Callback Exception: + # ... + # + self.assertNotIn('[WARNING]: Failure using method', console_output) + text = self._get_job_output(build) + self.assertLogLine( r'RUN START: \[untrusted : review.example.com/org/project/' r'playbooks/command.yaml@master\]', text) @@ -186,6 +214,20 @@ class FunctionalZuulStreamMixIn: self.assertLess((time2 - time1) / timedelta(milliseconds=1), 9000) + # This is from the debug: msg='{{ ansible_version }}' + # testing raw variable output. To make it version + # agnostic, match just the start of + # compute1 | ok: {'string': '2.9.27'... + + # NOTE(ianw) 2022-08-24 : I don't know why the callback + # for debug: msg= doesn't put the hostname first like + # other output. Undetermined if bug or feature. + self.assertLogLineStartsWith( + r"""\{'string': '\d.""", text) + # ... handling loops is a different path, and that does + self.assertLogLineStartsWith( + r"""compute1 \| ok: \{'string': '\d.""", text) + def test_module_exception(self): job = self._run_job('module_failure_exception') with self.jobLog(job): @@ -238,3 +280,12 @@ class TestZuulStream5(AnsibleZuulTestCase, FunctionalZuulStreamMixIn): def setUp(self): super().setUp() self._setUp() + + +class TestZuulStream6(AnsibleZuulTestCase, FunctionalZuulStreamMixIn): + ansible_version = '6' + ansible_core_version = '2.13' + + def setUp(self): + super().setUp() + self._setUp() diff --git a/tests/unit/test_circular_dependencies.py b/tests/unit/test_circular_dependencies.py index 2223008d5..292941c13 100644 --- a/tests/unit/test_circular_dependencies.py +++ b/tests/unit/test_circular_dependencies.py @@ -13,6 +13,7 @@ # You should have received a copy of the GNU General Public License # along with this software. If not, see <http://www.gnu.org/licenses/>. +import re import textwrap from zuul.model import PromoteEvent @@ -599,8 +600,23 @@ class TestGerritCircularDependencies(ZuulTestCase): self.assertIn("depends on a change that failed to merge", A.messages[-1]) + self.assertTrue(re.search(r'Change http://localhost:\d+/2 is needed', + A.messages[-1])) + self.assertFalse(re.search('Change .*? can not be merged', + A.messages[-1])) + self.assertIn("bundle that failed.", B.messages[-1]) + self.assertFalse(re.search('Change http://localhost:.*? is needed', + B.messages[-1])) + self.assertFalse(re.search('Change .*? can not be merged', + B.messages[-1])) + self.assertIn("bundle that failed.", C.messages[-1]) + self.assertFalse(re.search('Change http://localhost:.*? is needed', + C.messages[-1])) + self.assertFalse(re.search('Change .*? can not be merged', + C.messages[-1])) + self.assertEqual(A.data["status"], "NEW") self.assertEqual(B.data["status"], "NEW") self.assertEqual(C.data["status"], "NEW") @@ -2441,3 +2457,21 @@ class TestGithubCircularDependencies(ZuulTestCase): self.assertEqual(len(B.comments), 2) self.assertFalse(A.is_merged) self.assertFalse(B.is_merged) + + self.assertIn("part of a bundle that can not merge", + A.comments[-1]) + self.assertTrue( + re.search("Change https://github.com/gh/project/pull/1 " + "can not be merged", + A.comments[-1])) + self.assertFalse(re.search('Change .*? is needed', + A.comments[-1])) + + self.assertIn("part of a bundle that can not merge", + B.comments[-1]) + self.assertTrue( + re.search("Change https://github.com/gh/project/pull/1 " + "can not be merged", + B.comments[-1])) + self.assertFalse(re.search('Change .*? is needed', + B.comments[-1])) diff --git a/tests/unit/test_executor.py b/tests/unit/test_executor.py index d18cf4448..27ca75531 100644 --- a/tests/unit/test_executor.py +++ b/tests/unit/test_executor.py @@ -838,8 +838,10 @@ class TestLineMapping(AnsibleZuulTestCase): ) -class TestExecutorFacts(AnsibleZuulTestCase): +class ExecutorFactsMixin: + # These should be overridden in child classes. tenant_config_file = 'config/executor-facts/main.yaml' + ansible_major_minor = 'X.Y' def _get_file(self, build, path): p = os.path.join(build.jobdir.root, path) @@ -861,12 +863,39 @@ class TestExecutorFacts(AnsibleZuulTestCase): date_time = \ j[0]['plays'][0]['tasks'][0]['hosts']['localhost']['date_time'] self.assertEqual(18, len(date_time)) + build = self.getJobFromHistory('datetime-fact', result='SUCCESS') + with open(build.jobdir.job_output_file) as f: + output = f.read() + self.assertIn(f'Ansible version={self.ansible_major_minor}', + output) -class TestAnsibleCallbackConfigs(AnsibleZuulTestCase): +class TestExecutorFacts28(AnsibleZuulTestCase, ExecutorFactsMixin): + tenant_config_file = 'config/executor-facts/main28.yaml' + ansible_major_minor = '2.8' + +class TestExecutorFacts29(AnsibleZuulTestCase, ExecutorFactsMixin): + tenant_config_file = 'config/executor-facts/main29.yaml' + ansible_major_minor = '2.9' + + +class TestExecutorFacts5(AnsibleZuulTestCase, ExecutorFactsMixin): + tenant_config_file = 'config/executor-facts/main5.yaml' + ansible_major_minor = '2.12' + + +class TestExecutorFacts6(AnsibleZuulTestCase, ExecutorFactsMixin): + tenant_config_file = 'config/executor-facts/main6.yaml' + ansible_major_minor = '2.13' + + +class AnsibleCallbackConfigsMixin: config_file = 'zuul-executor-ansible-callback.conf' + + # These should be overridden in child classes. tenant_config_file = 'config/ansible-callbacks/main.yaml' + ansible_major_minor = 'X.Y' def test_ansible_callback_config(self): self.executor_server.keep_jobdir = True @@ -905,6 +934,39 @@ class TestAnsibleCallbackConfigs(AnsibleZuulTestCase): 'common-config/playbooks/callback_plugins/', c['callback_test_callback']['file_name']) self.assertTrue(os.path.isfile(callback_result_file)) + build = self.getJobFromHistory('callback-test', result='SUCCESS') + with open(build.jobdir.job_output_file) as f: + output = f.read() + self.assertIn(f'Ansible version={self.ansible_major_minor}', + output) + + +class TestAnsibleCallbackConfigs28(AnsibleZuulTestCase, + AnsibleCallbackConfigsMixin): + config_file = 'zuul-executor-ansible-callback.conf' + tenant_config_file = 'config/ansible-callbacks/main28.yaml' + ansible_major_minor = '2.8' + + +class TestAnsibleCallbackConfigs29(AnsibleZuulTestCase, + AnsibleCallbackConfigsMixin): + config_file = 'zuul-executor-ansible-callback.conf' + tenant_config_file = 'config/ansible-callbacks/main29.yaml' + ansible_major_minor = '2.9' + + +class TestAnsibleCallbackConfigs5(AnsibleZuulTestCase, + AnsibleCallbackConfigsMixin): + config_file = 'zuul-executor-ansible-callback.conf' + tenant_config_file = 'config/ansible-callbacks/main5.yaml' + ansible_major_minor = '2.12' + + +class TestAnsibleCallbackConfigs6(AnsibleZuulTestCase, + AnsibleCallbackConfigsMixin): + config_file = 'zuul-executor-ansible-callback.conf' + tenant_config_file = 'config/ansible-callbacks/main6.yaml' + ansible_major_minor = '2.13' class TestExecutorEnvironment(AnsibleZuulTestCase): diff --git a/tests/unit/test_gerrit.py b/tests/unit/test_gerrit.py index aa2bb1758..47545b9be 100644 --- a/tests/unit/test_gerrit.py +++ b/tests/unit/test_gerrit.py @@ -13,6 +13,7 @@ # under the License. import os +import threading import textwrap from unittest import mock @@ -868,3 +869,58 @@ class TestGerritFake(ZuulTestCase): # The Gerrit connection method filters out the queried change ret = self.fake_gerrit._getSubmittedTogether(C1, None) self.assertEqual(ret, [(4, 1)]) + + +class TestGerritConnection(ZuulTestCase): + config_file = 'zuul-gerrit-web.conf' + tenant_config_file = 'config/single-tenant/main.yaml' + + def test_zuul_query_ltime(self): + # Add a lock around the event queue iterator so that we can + # ensure that multiple events arrive before the first is + # processed. + lock = threading.Lock() + + orig_iterEvents = self.fake_gerrit.gerrit_event_connector.\ + event_queue._iterEvents + + def _iterEvents(*args, **kw): + with lock: + return orig_iterEvents(*args, **kw) + + self.patch(self.fake_gerrit.gerrit_event_connector.event_queue, + '_iterEvents', _iterEvents) + + A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A') + B = self.fake_gerrit.addFakeChange('org/project', 'master', 'B') + B.setDependsOn(A, 1) + # Hold the connection queue processing so these events get + # processed together + with lock: + self.fake_gerrit.addEvent(A.addApproval('Code-Review', 2)) + self.fake_gerrit.addEvent(B.addApproval('Approved', 1)) + self.fake_gerrit.addEvent(B.addApproval('Code-Review', 2)) + self.waitUntilSettled() + self.assertHistory([]) + # One query for each change in the above cluster of events. + self.assertEqual(A.queried, 1) + self.assertEqual(B.queried, 1) + self.fake_gerrit.addEvent(A.addApproval('Approved', 1)) + self.waitUntilSettled() + self.assertHistory([ + dict(name="project-merge", result="SUCCESS", changes="1,1"), + dict(name="project-test1", result="SUCCESS", changes="1,1"), + dict(name="project-test2", result="SUCCESS", changes="1,1"), + dict(name="project-merge", result="SUCCESS", changes="1,1 2,1"), + dict(name="project-test1", result="SUCCESS", changes="1,1 2,1"), + dict(name="project-test2", result="SUCCESS", changes="1,1 2,1"), + ], ordered=False) + # One query due to the event on change A, followed by a query + # to verify the merge. + self.assertEqual(A.queried, 3) + # No query for change B necessary since our cache is up to + # date with respect for the triggering event. One query to + # verify the merge. + self.assertEqual(B.queried, 2) + self.assertEqual(A.data['status'], 'MERGED') + self.assertEqual(B.data['status'], 'MERGED') diff --git a/tests/unit/test_inventory.py b/tests/unit/test_inventory.py index 8f5cca9ac..83a62a0e7 100644 --- a/tests/unit/test_inventory.py +++ b/tests/unit/test_inventory.py @@ -104,6 +104,7 @@ class TestInventoryGithub(TestInventoryBase): z_vars = inventory['all']['vars']['zuul'] self.assertIn('executor', z_vars) self.assertIn('src_root', z_vars['executor']) + self.assertIn('ansible_version', z_vars) self.assertIn('job', z_vars) self.assertIn('event_id', z_vars) self.assertEqual(z_vars['job'], 'single-inventory') @@ -137,6 +138,7 @@ class TestInventoryPythonPath(TestInventoryBase): z_vars = inventory['all']['vars']['zuul'] self.assertIn('executor', z_vars) self.assertIn('src_root', z_vars['executor']) + self.assertIn('ansible_version', z_vars) self.assertIn('job', z_vars) self.assertEqual(z_vars['job'], 'single-inventory') self.assertEqual(z_vars['message'], 'QQ==') @@ -167,6 +169,7 @@ class TestInventoryShellType(TestInventoryBase): z_vars = inventory['all']['vars']['zuul'] self.assertIn('executor', z_vars) self.assertIn('src_root', z_vars['executor']) + self.assertIn('ansible_version', z_vars) self.assertIn('job', z_vars) self.assertEqual(z_vars['job'], 'single-inventory') self.assertEqual(z_vars['message'], 'QQ==') @@ -195,6 +198,7 @@ class TestInventoryAutoPython(TestInventoryBase): self.assertIn('executor', z_vars) self.assertIn('src_root', z_vars['executor']) self.assertIn('job', z_vars) + self.assertEqual(z_vars['ansible_version'], '2.8') self.assertEqual(z_vars['job'], 'ansible-version28-inventory') self.assertEqual(z_vars['message'], 'QQ==') @@ -219,6 +223,7 @@ class TestInventoryAutoPython(TestInventoryBase): self.assertIn('executor', z_vars) self.assertIn('src_root', z_vars['executor']) self.assertIn('job', z_vars) + self.assertEqual(z_vars['ansible_version'], '2.9') self.assertEqual(z_vars['job'], 'ansible-version29-inventory') self.assertEqual(z_vars['message'], 'QQ==') @@ -243,12 +248,37 @@ class TestInventoryAutoPython(TestInventoryBase): self.assertIn('executor', z_vars) self.assertIn('src_root', z_vars['executor']) self.assertIn('job', z_vars) + self.assertEqual(z_vars['ansible_version'], '5') self.assertEqual(z_vars['job'], 'ansible-version5-inventory') self.assertEqual(z_vars['message'], 'QQ==') self.executor_server.release() self.waitUntilSettled() + def test_auto_python_ansible6_inventory(self): + inventory = self._get_build_inventory('ansible-version6-inventory') + + all_nodes = ('ubuntu-xenial',) + self.assertIn('all', inventory) + self.assertIn('hosts', inventory['all']) + self.assertIn('vars', inventory['all']) + for node_name in all_nodes: + self.assertIn(node_name, inventory['all']['hosts']) + node_vars = inventory['all']['hosts'][node_name] + self.assertEqual( + 'auto', node_vars['ansible_python_interpreter']) + + self.assertIn('zuul', inventory['all']['vars']) + z_vars = inventory['all']['vars']['zuul'] + self.assertIn('executor', z_vars) + self.assertIn('src_root', z_vars['executor']) + self.assertIn('job', z_vars) + self.assertEqual(z_vars['job'], 'ansible-version6-inventory') + self.assertEqual(z_vars['message'], 'QQ==') + + self.executor_server.release() + self.waitUntilSettled() + class TestInventory(TestInventoryBase): diff --git a/tests/unit/test_scheduler.py b/tests/unit/test_scheduler.py index 66c508fea..3445e9dc6 100644 --- a/tests/unit/test_scheduler.py +++ b/tests/unit/test_scheduler.py @@ -5343,6 +5343,11 @@ For CI problems and help debugging, contact ci@example.org""" self.assertIn('Error merging gerrit/org/project', B.messages[0]) self.assertNotIn('logs.example.com', B.messages[0]) self.assertNotIn('SKIPPED', B.messages[0]) + buildsets = list( + self.scheds.first.connections.connections[ + 'database'].getBuildsets()) + self.assertEqual(buildsets[0].result, 'MERGE_CONFLICT') + self.assertIn('This change or one of', buildsets[0].message) def test_submit_failure(self): A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A') @@ -5357,6 +5362,44 @@ For CI problems and help debugging, contact ci@example.org""" 'database'].getBuildsets()) self.assertEqual(buildsets[0].result, 'MERGE_FAILURE') + @simple_layout('layouts/timer-freeze-job-failure.yaml') + def test_periodic_freeze_job_failure(self): + self.waitUntilSettled() + + for x in iterate_timeout(30, 'buildset complete'): + buildsets = list( + self.scheds.first.connections.connections[ + 'database'].getBuildsets()) + if buildsets: + break + # Stop queuing timer triggered jobs so that the assertions + # below don't race against more jobs being queued. + self.commitConfigUpdate('org/common-config', 'layouts/no-timer.yaml') + self.scheds.execute(lambda app: app.sched.reconfigure(app.config)) + self.waitUntilSettled() + # If APScheduler is in mid-event when we remove the job, we + # can end up with one more event firing, so give it an extra + # second to settle. + time.sleep(3) + self.waitUntilSettled() + + self.assertEqual(buildsets[0].result, 'CONFIG_ERROR') + self.assertIn('Job project-test2 depends on project-test1 ' + 'which was not run', buildsets[0].message) + + @simple_layout('layouts/freeze-job-failure.yaml') + def test_freeze_job_failure(self): + A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A') + self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1)) + self.waitUntilSettled() + + buildsets = list( + self.scheds.first.connections.connections[ + 'database'].getBuildsets()) + self.assertEqual(buildsets[0].result, 'CONFIG_ERROR') + self.assertIn('Job project-test2 depends on project-test1 ' + 'which was not run', buildsets[0].message) + @simple_layout('layouts/nonvoting-pipeline.yaml') def test_nonvoting_pipeline(self): "Test that a nonvoting pipeline (experimental) can still report" diff --git a/tests/unit/test_v3.py b/tests/unit/test_v3.py index a89bb3007..d35b23285 100644 --- a/tests/unit/test_v3.py +++ b/tests/unit/test_v3.py @@ -3733,9 +3733,9 @@ class TestInRepoJoin(ZuulTestCase): class FunctionalAnsibleMixIn(object): # A temporary class to hold new tests while others are disabled + # These should be overridden in child classes. tenant_config_file = 'config/ansible/main.yaml' - # This should be overriden in child classes. - ansible_version = '2.9' + ansible_major_minor = 'X.Y' def test_playbook(self): # This test runs a bit long and needs extra time. @@ -3826,6 +3826,7 @@ class FunctionalAnsibleMixIn(object): self.assertEqual(build_bubblewrap.result, 'SUCCESS') def test_repo_ansible(self): + self.executor_server.keep_jobdir = True A = self.fake_gerrit.addFakeChange('org/ansible', 'master', 'A') self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1)) self.waitUntilSettled() @@ -3835,18 +3836,31 @@ class FunctionalAnsibleMixIn(object): self.assertHistory([ dict(name='hello-ansible', result='SUCCESS', changes='1,1'), ]) + build = self.getJobFromHistory('hello-ansible', result='SUCCESS') + with open(build.jobdir.job_output_file) as f: + output = f.read() + self.assertIn(f'Ansible version={self.ansible_major_minor}', + output) class TestAnsible28(AnsibleZuulTestCase, FunctionalAnsibleMixIn): - ansible_version = '2.8' + tenant_config_file = 'config/ansible/main28.yaml' + ansible_major_minor = '2.8' class TestAnsible29(AnsibleZuulTestCase, FunctionalAnsibleMixIn): - ansible_version = '2.9' + tenant_config_file = 'config/ansible/main29.yaml' + ansible_major_minor = '2.9' class TestAnsible5(AnsibleZuulTestCase, FunctionalAnsibleMixIn): - ansible_version = '5' + tenant_config_file = 'config/ansible/main5.yaml' + ansible_major_minor = '2.12' + + +class TestAnsible6(AnsibleZuulTestCase, FunctionalAnsibleMixIn): + tenant_config_file = 'config/ansible/main6.yaml' + ansible_major_minor = '2.13' class TestPrePlaybooks(AnsibleZuulTestCase): @@ -7942,6 +7956,7 @@ class TestAnsibleVersion(AnsibleZuulTestCase): dict(name='ansible-28', result='SUCCESS', changes='1,1'), dict(name='ansible-29', result='SUCCESS', changes='1,1'), dict(name='ansible-5', result='SUCCESS', changes='1,1'), + dict(name='ansible-6', result='SUCCESS', changes='1,1'), ], ordered=False) @@ -7963,6 +7978,7 @@ class TestDefaultAnsibleVersion(AnsibleZuulTestCase): dict(name='ansible-28', result='SUCCESS', changes='1,1'), dict(name='ansible-29', result='SUCCESS', changes='1,1'), dict(name='ansible-5', result='SUCCESS', changes='1,1'), + dict(name='ansible-6', result='SUCCESS', changes='1,1'), ], ordered=False) diff --git a/tests/unit/test_web.py b/tests/unit/test_web.py index 53546c1de..b15c01a69 100644 --- a/tests/unit/test_web.py +++ b/tests/unit/test_web.py @@ -1005,6 +1005,11 @@ class TestWeb(BaseTestWeb): job = self.get_url("api/tenant/tenant-one/job/noop").json() self.assertEqual("noop", job[0]["name"]) + @simple_layout('layouts/special-characters-job.yaml') + def test_web_job_special_characters(self): + job = self.get_url("api/tenant/tenant-one/job/a%40b%2Fc").json() + self.assertEqual("a@b/c", job[0]["name"]) + def test_freeze_jobs(self): # Test can get a list of the jobs for a given project+pipeline+branch. resp = self.get_url( @@ -1061,7 +1066,7 @@ class TestWeb(BaseTestWeb): job_params = { 'job': 'project-test1', - 'ansible_version': '2.9', + 'ansible_version': '5', 'timeout': None, 'post_timeout': None, 'items': [], @@ -1159,7 +1164,7 @@ class TestWeb(BaseTestWeb): "noop") job_params = { - 'ansible_version': '2.9', + 'ansible_version': '5', 'branch': 'master', 'extra_vars': {}, 'group_vars': {}, diff --git a/tools/test-setup-docker.sh b/tools/test-setup-docker.sh index 81f4b2977..a0fcf9f5a 100755 --- a/tools/test-setup-docker.sh +++ b/tools/test-setup-docker.sh @@ -8,7 +8,11 @@ # environment. set -xeu -ROOTCMD=${ROOTCMD:-} + +# Default ROOTCMD to the 'env' command, otherwise variable assignments will be +# interpreted as command when no ROOTCMD is given. The reason for that is +# Bash's simple command expansion. +ROOTCMD=${ROOTCMD:-env} cd $(dirname $0) SCRIPT_DIR="$(pwd)" diff --git a/web/src/containers/build/Console.jsx b/web/src/containers/build/Console.jsx index 0b365df62..194e314ee 100644 --- a/web/src/containers/build/Console.jsx +++ b/web/src/containers/build/Console.jsx @@ -50,6 +50,19 @@ class TaskOutput extends React.Component { renderResults(value) { const interesting_results = [] + + // This was written to assume "value" is an array of + // key/value mappings to output. This seems to be a + // good assumption for the most part, but "package:" for + // whatever reason outputs a result that is just an array of + // strings with what packages were installed. So, if we + // see an array of strings as the value, we just swizzle + // that into a key/value so it displays usefully. + const isAllStrings = value.every(i => typeof i === 'string') + if (isAllStrings) { + value = [ {output: [...value]} ] + } + value.forEach((result, idx) => { const keys = Object.entries(result).filter( ([key, value]) => shouldIncludeKey( diff --git a/web/src/containers/jobs/Jobs.jsx b/web/src/containers/jobs/Jobs.jsx index d7ab4bc69..71395f1d1 100644 --- a/web/src/containers/jobs/Jobs.jsx +++ b/web/src/containers/jobs/Jobs.jsx @@ -62,7 +62,7 @@ class JobsList extends React.Component { const createNode = (job, extra) => ({ text: ( <React.Fragment> - <Link to={linkPrefix + job.name}>{job.name}</Link> + <Link to={linkPrefix + encodeURIComponent(job.name)}>{job.name}</Link> {extra && (<span> ({extra})</span>)} {job.description && ( <span style={{marginLeft: '10px'}}>{job.description}</span> diff --git a/zuul/ansible/6/action/__init__.py b/zuul/ansible/6/action/__init__.py new file mode 120000 index 000000000..4048e7ac1 --- /dev/null +++ b/zuul/ansible/6/action/__init__.py @@ -0,0 +1 @@ +../../base/action/__init__.py
\ No newline at end of file diff --git a/zuul/ansible/6/action/command.py b/zuul/ansible/6/action/command.py new file mode 120000 index 000000000..56c6b636f --- /dev/null +++ b/zuul/ansible/6/action/command.py @@ -0,0 +1 @@ +../../base/action/command.py
\ No newline at end of file diff --git a/zuul/ansible/6/action/command.pyi b/zuul/ansible/6/action/command.pyi new file mode 120000 index 000000000..a003281ca --- /dev/null +++ b/zuul/ansible/6/action/command.pyi @@ -0,0 +1 @@ +../../base/action/command.pyi
\ No newline at end of file diff --git a/zuul/ansible/6/action/zuul_return.py b/zuul/ansible/6/action/zuul_return.py new file mode 120000 index 000000000..83c2fc619 --- /dev/null +++ b/zuul/ansible/6/action/zuul_return.py @@ -0,0 +1 @@ +../../base/action/zuul_return.py
\ No newline at end of file diff --git a/zuul/ansible/6/callback/__init__.py b/zuul/ansible/6/callback/__init__.py new file mode 120000 index 000000000..00b974388 --- /dev/null +++ b/zuul/ansible/6/callback/__init__.py @@ -0,0 +1 @@ +../../base/callback/__init__.py
\ No newline at end of file diff --git a/zuul/ansible/6/callback/zuul_json.py b/zuul/ansible/6/callback/zuul_json.py new file mode 120000 index 000000000..b0a07779b --- /dev/null +++ b/zuul/ansible/6/callback/zuul_json.py @@ -0,0 +1 @@ +../../base/callback/zuul_json.py
\ No newline at end of file diff --git a/zuul/ansible/6/callback/zuul_stream.py b/zuul/ansible/6/callback/zuul_stream.py new file mode 120000 index 000000000..f75561bf4 --- /dev/null +++ b/zuul/ansible/6/callback/zuul_stream.py @@ -0,0 +1 @@ +../../base/callback/zuul_stream.py
\ No newline at end of file diff --git a/zuul/ansible/6/callback/zuul_unreachable.py b/zuul/ansible/6/callback/zuul_unreachable.py new file mode 120000 index 000000000..205baca6f --- /dev/null +++ b/zuul/ansible/6/callback/zuul_unreachable.py @@ -0,0 +1 @@ +../../base/callback/zuul_unreachable.py
\ No newline at end of file diff --git a/zuul/ansible/6/filter/__init__.py b/zuul/ansible/6/filter/__init__.py new file mode 120000 index 000000000..f80a4da61 --- /dev/null +++ b/zuul/ansible/6/filter/__init__.py @@ -0,0 +1 @@ +../../base/filter/__init__.py
\ No newline at end of file diff --git a/zuul/ansible/6/filter/zuul_filters.py b/zuul/ansible/6/filter/zuul_filters.py new file mode 120000 index 000000000..d406e5fe6 --- /dev/null +++ b/zuul/ansible/6/filter/zuul_filters.py @@ -0,0 +1 @@ +../../base/filter/zuul_filters.py
\ No newline at end of file diff --git a/zuul/ansible/6/library/__init__.py b/zuul/ansible/6/library/__init__.py new file mode 120000 index 000000000..0b68ce0f4 --- /dev/null +++ b/zuul/ansible/6/library/__init__.py @@ -0,0 +1 @@ +../../base/library/__init__.py
\ No newline at end of file diff --git a/zuul/ansible/6/library/command.py b/zuul/ansible/6/library/command.py new file mode 120000 index 000000000..9c7633169 --- /dev/null +++ b/zuul/ansible/6/library/command.py @@ -0,0 +1 @@ +../../base/library/command.py
\ No newline at end of file diff --git a/zuul/ansible/6/library/zuul_console.py b/zuul/ansible/6/library/zuul_console.py new file mode 120000 index 000000000..7c905e0f9 --- /dev/null +++ b/zuul/ansible/6/library/zuul_console.py @@ -0,0 +1 @@ +../../base/library/zuul_console.py
\ No newline at end of file diff --git a/zuul/ansible/6/logconfig.py b/zuul/ansible/6/logconfig.py new file mode 120000 index 000000000..767cb2e81 --- /dev/null +++ b/zuul/ansible/6/logconfig.py @@ -0,0 +1 @@ +../logconfig.py
\ No newline at end of file diff --git a/zuul/ansible/6/paths.py b/zuul/ansible/6/paths.py new file mode 120000 index 000000000..dbdb1858e --- /dev/null +++ b/zuul/ansible/6/paths.py @@ -0,0 +1 @@ +../paths.py
\ No newline at end of file diff --git a/zuul/ansible/base/callback/zuul_stream.py b/zuul/ansible/base/callback/zuul_stream.py index f31983ed6..b5c14691b 100644 --- a/zuul/ansible/base/callback/zuul_stream.py +++ b/zuul/ansible/base/callback/zuul_stream.py @@ -43,13 +43,18 @@ import threading import time from ansible.plugins.callback import default +from ansible.module_utils._text import to_text from zuul.ansible import paths from zuul.ansible import logconfig -LOG_STREAM_PORT = int(os.environ.get("ZUUL_CONSOLE_PORT", 19885)) LOG_STREAM_VERSION = 0 +# This is intended to be only used for testing where we change the +# port so we can run another instance that doesn't conflict with one +# setup by the test environment +LOG_STREAM_PORT = int(os.environ.get("ZUUL_CONSOLE_PORT", 19885)) + def zuul_filter_result(result): """Remove keys from shell/command output. @@ -121,6 +126,21 @@ class CallbackModule(default.CallbackModule): self._logger = logging.getLogger('zuul.executor.ansible') def _log(self, msg, ts=None, job=True, executor=False, debug=False): + # With the default "linear" strategy (and likely others), + # Ansible will send the on_task_start callback, and then fork + # a worker process to execute that task. Since we spawn a + # thread in the on_task_start callback, we can end up emitting + # a log message in this method while Ansible is forking. If a + # forked process inherits a Python file object (i.e., stdout) + # that is locked by a thread that doesn't exist in the fork + # (i.e., this one), it can deadlock when trying to flush the + # file object. To minimize the chances of that happening, we + # should avoid using _display outside the main thread. + # Therefore: + + # Do not set executor=True from any calls from a thread + # spawned in this callback. + msg = msg.rstrip() if job: now = ts or datetime.datetime.now() @@ -143,10 +163,6 @@ class CallbackModule(default.CallbackModule): s.settimeout(None) return s except socket.timeout: - self._log( - "Timeout exception waiting for the logger. " - "Please check connectivity to [%s:%s]" - % (ip, port), executor=True) self._log_streamline( "localhost", "Timeout exception waiting for the logger. " @@ -155,16 +171,12 @@ class CallbackModule(default.CallbackModule): return None except Exception: if logger_retries % 10 == 0: - self._log("[%s] Waiting on logger" % host, - executor=True, debug=True) + self._log("[%s] Waiting on logger" % host) logger_retries += 1 time.sleep(0.1) continue def _read_log(self, host, ip, port, log_id, task_name, hosts): - self._log("[%s] Starting to log %s for task %s" - % (host, log_id, task_name), job=False, executor=True) - s = self._read_log_connect(host, ip, port) if s is None: # Can't connect; _read_log_connect() already logged an @@ -188,9 +200,6 @@ class CallbackModule(default.CallbackModule): return else: self._zuul_console_version = int(buff) - self._log('[%s] Reports streaming version: %d' % - (host, self._zuul_console_version), - job=False, executor=True) if self._zuul_console_version >= 1: msg = 's:%s\n' % log_id @@ -315,13 +324,13 @@ class CallbackModule(default.CallbackModule): hosts = self._get_task_hosts(task) for host, inventory_hostname in hosts: port = LOG_STREAM_PORT - if host in ('localhost', '127.0.0.1'): + if (host in ('localhost', '127.0.0.1')): # Don't try to stream from localhost continue ip = play_vars[host].get( 'ansible_host', play_vars[host].get( 'ansible_inventory_host')) - if ip in ('localhost', '127.0.0.1'): + if (ip in ('localhost', '127.0.0.1')): # Don't try to stream from localhost continue if play_vars[host].get('ansible_connection') in ('winrm',): @@ -349,6 +358,9 @@ class CallbackModule(default.CallbackModule): log_id = "%s-%s-%s" % ( self._task._uuid, count, log_host) + self._log("[%s] Starting to log %s for task %s" + % (host, log_id, task_name), + job=False, executor=True) streamer = threading.Thread( target=self._read_log, args=( host, ip, port, log_id, task_name, hosts)) @@ -369,7 +381,7 @@ class CallbackModule(default.CallbackModule): streamer.join(30) if streamer.is_alive(): msg = "[Zuul] Log Stream did not terminate" - self._log(msg, job=True, executor=True) + self._log(msg) self._streamers_stop = False def _process_result_for_localhost(self, result, is_task=True): @@ -492,8 +504,7 @@ class CallbackModule(default.CallbackModule): if result._task.loop and 'results' in result_dict: # items have their own events pass - - elif result_dict.get('msg', '').startswith('MODULE FAILURE'): + elif to_text(result_dict.get('msg', '')).startswith('MODULE FAILURE'): self._log_module_failure(result, result_dict) elif result._task.action == 'debug': # this is a debug statement, handle it special @@ -512,7 +523,7 @@ class CallbackModule(default.CallbackModule): # user provided. Note that msg may be a multi line block quote # so we handle that here as well. if keyname == 'msg': - msg_lines = result_dict['msg'].rstrip().split('\n') + msg_lines = to_text(result_dict['msg']).rstrip().split('\n') for msg_line in msg_lines: self._log(msg=msg_line) else: @@ -535,10 +546,18 @@ class CallbackModule(default.CallbackModule): elif result_dict.get('msg') == 'All items completed': self._log_message(result, result_dict['msg']) else: - self._log_message( - result, - "Runtime: {delta}".format( - **result_dict)) + if 'delta' in result_dict: + self._log_message( + result, + "Runtime: {delta}".format( + **result_dict)) + else: + # NOTE(ianw) 2022-08-24 : *Fairly* sure that you only + # fall into here when the call actually fails (and has + # not start/end time), but it is ignored by + # failed_when matching. + self._log_message(result, msg='ERROR (ignored)', + result_dict=result_dict) def v2_runner_item_on_ok(self, result): result_dict = dict(result._result) @@ -554,7 +573,7 @@ class CallbackModule(default.CallbackModule): # changes. loop_var = result_dict.get('ansible_loop_var', 'item') - if result_dict.get('msg', '').startswith('MODULE FAILURE'): + if to_text(result_dict.get('msg', '')).startswith('MODULE FAILURE'): self._log_module_failure(result, result_dict) elif result._task.action not in ('command', 'shell', 'win_command', 'win_shell'): @@ -597,7 +616,7 @@ class CallbackModule(default.CallbackModule): # changes. loop_var = result_dict.get('ansible_loop_var', 'item') - if result_dict.get('msg', '').startswith('MODULE FAILURE'): + if to_text(result_dict.get('msg', '')).startswith('MODULE FAILURE'): self._log_module_failure(result, result_dict) elif result._task.action not in ('command', 'shell', 'win_command', 'win_shell'): @@ -730,7 +749,13 @@ class CallbackModule(default.CallbackModule): msg = result_dict['msg'] result_dict = None if msg: - msg_lines = msg.rstrip().split('\n') + # ensure msg is a string; e.g. + # + # debug: + # msg: '{{ var }}' + # + # may not be! + msg_lines = to_text(msg).rstrip().split('\n') if len(msg_lines) > 1: self._log("{host} | {status}:".format( host=hostname, status=status)) diff --git a/zuul/configloader.py b/zuul/configloader.py index eb468518f..365967d56 100644 --- a/zuul/configloader.py +++ b/zuul/configloader.py @@ -1176,6 +1176,7 @@ class PipelineParser(object): 'success': 'success_actions', 'failure': 'failure_actions', 'merge-conflict': 'merge_conflict_actions', + 'config-error': 'config_error_actions', 'no-jobs': 'no_jobs_actions', 'disabled': 'disabled_actions', 'dequeue': 'dequeue_actions', @@ -1250,7 +1251,7 @@ class PipelineParser(object): pipeline['trigger'] = vs.Required(self.getDriverSchema('trigger')) for action in ['enqueue', 'start', 'success', 'failure', 'merge-conflict', 'merge-failure', 'no-jobs', - 'disabled', 'dequeue']: + 'disabled', 'dequeue', 'config-error']: pipeline[action] = self.getDriverSchema('reporter') return vs.Schema(pipeline) @@ -1318,6 +1319,10 @@ class PipelineParser(object): if not pipeline.merge_conflict_actions: pipeline.merge_conflict_actions = pipeline.failure_actions + # If config-error actions aren't explicit, use the failure actions + if not pipeline.config_error_actions: + pipeline.config_error_actions = pipeline.failure_actions + pipeline.disable_at = conf.get( 'disable-after-consecutive-failures', None) diff --git a/zuul/driver/gerrit/gerritconnection.py b/zuul/driver/gerrit/gerritconnection.py index 1ec334915..2213293f8 100644 --- a/zuul/driver/gerrit/gerritconnection.py +++ b/zuul/driver/gerrit/gerritconnection.py @@ -103,10 +103,12 @@ class GerritChangeData(object): SSH = 1 HTTP = 2 - def __init__(self, fmt, data, related=None, files=None): + def __init__(self, fmt, data, related=None, files=None, + zuul_query_ltime=None): self.format = fmt self.data = data self.files = files + self.zuul_query_ltime = zuul_query_ltime if fmt == self.SSH: self.parseSSH(data) @@ -329,19 +331,20 @@ class GerritEventConnector(threading.Thread): self.connection.clearConnectionCacheOnBranchEvent(event) - self._getChange(event) + self._getChange(event, connection_event.zuul_event_ltime) self.connection.logEvent(event) self.connection.sched.addTriggerEvent( self.connection.driver_name, event ) - def _getChange(self, event): + def _getChange(self, event, connection_event_ltime): # Grab the change if we are managing the project or if it exists in the # cache as it may be a dependency if event.change_number: refresh = True change_key = self.connection.source.getChangeKey(event) - if self.connection._change_cache.get(change_key) is None: + change = self.connection._change_cache.get(change_key) + if change is None: refresh = False for tenant in self.connection.sched.abide.tenants.values(): # TODO(fungi): it would be better to have some simple means @@ -353,6 +356,13 @@ class GerritEventConnector(threading.Thread): event.project_name))): refresh = True break + else: + # We have a cache entry for this change Get the + # query ltime for the cache entry; if it's after the + # event ltime, we don't need to refresh. + if (change.zuul_query_ltime and + change.zuul_query_ltime > connection_event_ltime): + refresh = False if refresh: # Call _getChange for the side effect of updating the @@ -1418,15 +1428,20 @@ class GerritConnection(ZKChangeCacheMixin, ZKBranchCacheMixin, BaseConnection): def queryChange(self, number, event=None): for attempt in range(3): + # Get a query ltime -- any events before this point should be + # included in our change data. + zuul_query_ltime = self.sched.zk_client.getCurrentLtime() try: if self.session: data, related, files = self.queryChangeHTTP( number, event=event) return GerritChangeData(GerritChangeData.HTTP, - data, related, files) + data, related, files, + zuul_query_ltime=zuul_query_ltime) else: data = self.queryChangeSSH(number, event=event) - return GerritChangeData(GerritChangeData.SSH, data) + return GerritChangeData(GerritChangeData.SSH, data, + zuul_query_ltime=zuul_query_ltime) except Exception: if attempt >= 3: raise diff --git a/zuul/driver/gerrit/gerritmodel.py b/zuul/driver/gerrit/gerritmodel.py index f0ec32f77..0ac3e7f9d 100644 --- a/zuul/driver/gerrit/gerritmodel.py +++ b/zuul/driver/gerrit/gerritmodel.py @@ -35,8 +35,10 @@ class GerritChange(Change): self.approvals = [] self.missing_labels = set() self.commit = None + self.zuul_query_ltime = None def update(self, data, connection): + self.zuul_query_ltime = data.zuul_query_ltime if data.format == data.SSH: self.updateFromSSH(data.data, connection) else: @@ -51,6 +53,7 @@ class GerritChange(Change): "approvals": self.approvals, "missing_labels": list(self.missing_labels), "commit": self.commit, + "zuul_query_ltime": self.zuul_query_ltime, }) return d @@ -62,6 +65,7 @@ class GerritChange(Change): self.approvals = data["approvals"] self.missing_labels = set(data["missing_labels"]) self.commit = data.get("commit") + self.zuul_query_ltime = data.get("zuul_query_ltime") def updateFromSSH(self, data, connection): if self.patchset is None: diff --git a/zuul/driver/gerrit/gerritreporter.py b/zuul/driver/gerrit/gerritreporter.py index b99133dce..c38a9484a 100644 --- a/zuul/driver/gerrit/gerritreporter.py +++ b/zuul/driver/gerrit/gerritreporter.py @@ -36,6 +36,9 @@ class GerritReporter(BaseReporter): self._checks_api = action.pop('checks-api', None) self._labels = action + def __repr__(self): + return f"<GerritReporter: {self._action}>" + def report(self, item, phase1=True, phase2=True): """Send a message to gerrit.""" log = get_annotated_logger(self.log, item.event) diff --git a/zuul/driver/sql/sqlreporter.py b/zuul/driver/sql/sqlreporter.py index cf75a7495..d16f50fcb 100644 --- a/zuul/driver/sql/sqlreporter.py +++ b/zuul/driver/sql/sqlreporter.py @@ -176,6 +176,8 @@ class SQLReporter(BaseReporter): start = datetime.datetime.fromtimestamp(start_time, tz=datetime.timezone.utc) buildset = build.build_set + if not buildset: + return db_buildset = db.getBuildset( tenant=buildset.item.pipeline.tenant.name, uuid=buildset.uuid) if not db_buildset: diff --git a/zuul/executor/server.py b/zuul/executor/server.py index e00612e9e..89f93b8c5 100644 --- a/zuul/executor/server.py +++ b/zuul/executor/server.py @@ -1837,6 +1837,11 @@ class AnsibleJob(object): if not self.jobdir.cleanup_playbooks: return + if not self.frozen_hostvars: + # Job failed before we could load the frozen hostvars. + # This means we can't run any cleanup playbooks. + return + # TODO: make this configurable cleanup_timeout = 300 @@ -2443,6 +2448,7 @@ class AnsibleJob(object): work_root=self.jobdir.work_root, result_data_file=self.jobdir.result_data_file, inventory_file=self.jobdir.inventory) + zuul_vars['ansible_version'] = self.ansible_version # Add playbook_context info zuul_vars['playbook_context'] = dict( diff --git a/zuul/lib/ansible-config.conf b/zuul/lib/ansible-config.conf index 5bc3bd325..35a9f59fe 100644 --- a/zuul/lib/ansible-config.conf +++ b/zuul/lib/ansible-config.conf @@ -1,6 +1,6 @@ # This file describes the currently supported ansible versions [common] -default_version = 2.9 +default_version = 5 # OpenStackSDK 0.99.0 coincides with CORS header problems in some providers requirements = openstacksdk<0.99 openshift jmespath google-cloud-storage pywinrm boto3 azure-storage-blob ibm-cos-sdk netaddr passlib @@ -8,11 +8,16 @@ requirements = openstacksdk<0.99 openshift jmespath google-cloud-storage pywinrm # Ansible 2.8.16 breaks the k8s connection plugin # Jinja 3.1.1 is incompatible with 2.8 requirements = ansible>=2.8,<2.9,!=2.8.16 Jinja2<3.1.0 +deprecated = true [2.9] # Ansible 2.9.14 breaks the k8s connection plugin # https://github.com/ansible/ansible/issues/72171 requirements = ansible>=2.9,<2.10,!=2.9.14 +deprecated = true [5] requirements = ansible>=5.0,<6.0 + +[6] +requirements = ansible>=6.0,<7.0 diff --git a/zuul/manager/__init__.py b/zuul/manager/__init__.py index a66f5ad22..637d31d9f 100644 --- a/zuul/manager/__init__.py +++ b/zuul/manager/__init__.py @@ -322,9 +322,10 @@ class PipelineManager(metaclass=ABCMeta): (item, ret)) def reportNormalBuildsetEnd(self, build_set, action, final, result=None): - # Report a buildset end, but only if there are jobs - if (build_set.job_graph and - len(build_set.job_graph.jobs) > 0): + # Report a buildset end if there are jobs or errors + if ((build_set.job_graph and len(build_set.job_graph.jobs) > 0) or + build_set.config_errors or + build_set.unable_to_merge): self.sql.reportBuildsetEnd(build_set, action, final, result) @@ -379,9 +380,16 @@ class PipelineManager(metaclass=ABCMeta): dependency_graph=None): return True - def checkForChangesNeededBy(self, change, change_queue, event, + def getMissingNeededChanges(self, change, change_queue, event, dependency_graph=None): - return True + """Check that all needed changes are ahead in the queue. + + Return a list of any that are missing. If it is not possible + to correct the missing changes, "abort" will be true. + + :returns: (abort, needed_changes) + """ + return False, [] def getFailingDependentItems(self, item, nnfi): return None @@ -690,9 +698,11 @@ class PipelineManager(metaclass=ABCMeta): return queue_config.dependencies_by_topic - def canMergeCycle(self, bundle): - """Check if the cycle still fulfills the pipeline's ready criteria.""" - return True + def getNonMergeableCycleChanges(self, bundle): + + """Return changes in the cycle that do not fulfill + the pipeline's ready criteria.""" + return [] def updateBundle(self, item, change_queue, cycle): if not cycle: @@ -1474,9 +1484,9 @@ class PipelineManager(metaclass=ABCMeta): item.change, item.event) else: meets_reqs = True - needs_met = self.checkForChangesNeededBy(item.change, change_queue, - item.event) is True - if not (meets_reqs and needs_met): + abort, needs_changes = self.getMissingNeededChanges( + item.change, change_queue, item.event) + if not (meets_reqs and not needs_changes): # It's not okay to enqueue this change, we should remove it. log.info("Dequeuing change %s because " "it can no longer merge" % item.change) @@ -1486,7 +1496,12 @@ class PipelineManager(metaclass=ABCMeta): elif not meets_reqs: item.setDequeuedMissingRequirements() else: - item.setDequeuedNeedingChange() + clist = ', '.join([c.url for c in needs_changes]) + if len(needs_changes) > 1: + msg = f'Changes {clist} are needed.' + else: + msg = f'Change {clist} is needed.' + item.setDequeuedNeedingChange(msg) if item.live: try: self.reportItem(item) @@ -1558,17 +1573,22 @@ class PipelineManager(metaclass=ABCMeta): ) # Before starting to merge the cycle items, make sure they # can still be merged, to reduce the chance of a partial merge. - if ( - can_report - and not item.bundle.started_reporting - and not self.canMergeCycle(item.bundle) - ): - item.bundle.cannot_merge = True - failing_reasons.append("cycle can not be merged") - log.debug( - "Dequeuing item %s because cycle can no longer merge", - item - ) + if can_report and not item.bundle.started_reporting: + non_mergeable_cycle_changes = self.getNonMergeableCycleChanges( + item.bundle) + if non_mergeable_cycle_changes: + clist = ', '.join([ + c.url for c in non_mergeable_cycle_changes]) + if len(non_mergeable_cycle_changes) > 1: + msg = f'Changes {clist} can not be merged.' + else: + msg = f'Change {clist} can not be merged.' + item.bundle.cannot_merge = msg + failing_reasons.append("cycle can not be merged") + log.debug( + "Dequeuing item %s because cycle can no longer merge", + item + ) item.bundle.started_reporting = can_report if can_report: @@ -2013,9 +2033,8 @@ class PipelineManager(metaclass=ABCMeta): item.setReportedResult('NO_JOBS') elif item.getConfigErrors(): log.debug("Invalid config for change %s", item.change) - # TODOv3(jeblair): consider a new reporter action for this - action = 'merge-conflict' - actions = self.pipeline.merge_conflict_actions + action = 'config-error' + actions = self.pipeline.config_error_actions item.setReportedResult('CONFIG_ERROR') elif item.didMergerFail(): log.debug("Merge conflict") diff --git a/zuul/manager/dependent.py b/zuul/manager/dependent.py index d4d4f05dd..db1bf48b1 100644 --- a/zuul/manager/dependent.py +++ b/zuul/manager/dependent.py @@ -56,8 +56,10 @@ class DependentPipelineManager(SharedQueuePipelineManager): return False return True - def canMergeCycle(self, bundle): - """Check if the cycle still fulfills the pipeline's ready criteria.""" + def getNonMergeableCycleChanges(self, bundle): + """Return changes in the cycle that do not fulfill + the pipeline's ready criteria.""" + changes = [] for item in bundle.items: source = item.change.project.source if not source.canMerge( @@ -68,8 +70,8 @@ class DependentPipelineManager(SharedQueuePipelineManager): ): log = get_annotated_logger(self.log, item.event) log.debug("Change %s can no longer be merged", item.change) - return False - return True + changes.append(item.change) + return changes def enqueueChangesBehind(self, change, event, quiet, ignore_requirements, change_queue, history=None, @@ -149,13 +151,17 @@ class DependentPipelineManager(SharedQueuePipelineManager): # Don't enqueue dependencies ahead of a non-change ref. return True - ret = self.checkForChangesNeededBy(change, change_queue, event, - dependency_graph=dependency_graph, - warnings=warnings) - if ret in [True, False]: - return ret - log.debug(" Changes %s must be merged ahead of %s", ret, change) - for needed_change in ret: + abort, needed_changes = self.getMissingNeededChanges( + change, change_queue, event, + dependency_graph=dependency_graph, + warnings=warnings) + if abort: + return False + if not needed_changes: + return True + log.debug(" Changes %s must be merged ahead of %s", + needed_changes, change) + for needed_change in needed_changes: # If the change is already in the history, but the change also has # a git level dependency, we need to enqueue it before the current # change. @@ -169,7 +175,7 @@ class DependentPipelineManager(SharedQueuePipelineManager): return False return True - def checkForChangesNeededBy(self, change, change_queue, event, + def getMissingNeededChanges(self, change, change_queue, event, dependency_graph=None, warnings=None): log = get_annotated_logger(self.log, event) @@ -178,11 +184,12 @@ class DependentPipelineManager(SharedQueuePipelineManager): log.debug("Checking for changes needed by %s:" % change) if not hasattr(change, 'needs_changes'): log.debug(" %s does not support dependencies", type(change)) - return True + return False, [] if not change.needs_changes: log.debug(" No changes needed") - return True + return False, [] changes_needed = [] + abort = False # Ignore supplied change_queue with self.getChangeQueue(change, event) as change_queue: for needed_change in self.resolveChangeReferences( @@ -212,10 +219,12 @@ class DependentPipelineManager(SharedQueuePipelineManager): log.debug(" " + msg) if warnings is not None: warnings.append(msg) - return False + changes_needed.append(needed_change) + abort = True if not needed_change.is_current_patchset: log.debug(" Needed change is not the current patchset") - return False + changes_needed.append(needed_change) + abort = True if self.isChangeAlreadyInQueue(needed_change, change_queue): log.debug(" Needed change is already ahead in the queue") continue @@ -229,10 +238,9 @@ class DependentPipelineManager(SharedQueuePipelineManager): # The needed change can't be merged. log.debug(" Change %s is needed but can not be merged", needed_change) - return False - if changes_needed: - return changes_needed - return True + changes_needed.append(needed_change) + abort = True + return abort, changes_needed def getFailingDependentItems(self, item, nnfi): if not hasattr(item.change, 'needs_changes'): diff --git a/zuul/manager/independent.py b/zuul/manager/independent.py index 7cd14ffd5..b70e9184b 100644 --- a/zuul/manager/independent.py +++ b/zuul/manager/independent.py @@ -49,12 +49,16 @@ class IndependentPipelineManager(PipelineManager): # Don't enqueue dependencies ahead of a non-change ref. return True - ret = self.checkForChangesNeededBy(change, change_queue, event, - dependency_graph=dependency_graph) - if ret in [True, False]: - return ret - log.debug(" Changes %s must be merged ahead of %s" % (ret, change)) - for needed_change in ret: + abort, needed_changes = self.getMissingNeededChanges( + change, change_queue, event, + dependency_graph=dependency_graph) + if abort: + return False + if not needed_changes: + return True + log.debug(" Changes %s must be merged ahead of %s" % ( + needed_changes, change)) + for needed_change in needed_changes: # This differs from the dependent pipeline by enqueuing # changes ahead as "not live", that is, not intended to # have jobs run. Also, pipeline requirements are always @@ -69,22 +73,23 @@ class IndependentPipelineManager(PipelineManager): return False return True - def checkForChangesNeededBy(self, change, change_queue, event, + def getMissingNeededChanges(self, change, change_queue, event, dependency_graph=None): log = get_annotated_logger(self.log, event) if self.pipeline.ignore_dependencies: - return True + return False, [] log.debug("Checking for changes needed by %s:" % change) # Return true if okay to proceed enqueing this change, # false if the change should not be enqueued. if not hasattr(change, 'needs_changes'): log.debug(" %s does not support dependencies" % type(change)) - return True + return False, [] if not change.needs_changes: log.debug(" No changes needed") - return True + return False, [] changes_needed = [] + abort = False for needed_change in self.resolveChangeReferences( change.needs_changes): log.debug(" Change %s needs change %s:" % ( @@ -108,9 +113,7 @@ class IndependentPipelineManager(PipelineManager): continue # This differs from the dependent pipeline check in not # verifying that the dependent change is mergable. - if changes_needed: - return changes_needed - return True + return abort, changes_needed def dequeueItem(self, item): super(IndependentPipelineManager, self).dequeueItem(item) diff --git a/zuul/merger/merger.py b/zuul/merger/merger.py index c3e20e21b..34b495fcc 100644 --- a/zuul/merger/merger.py +++ b/zuul/merger/merger.py @@ -733,10 +733,8 @@ class Repo(object): return log = get_annotated_logger(self.log, zuul_event_id) log.debug("Set remote url to %s", redact_url(url)) + self._git_set_remote_url(self.createRepoObject(zuul_event_id), url) self.remote_url = url - self._git_set_remote_url( - self.createRepoObject(zuul_event_id), - self.remote_url) def mapLine(self, commit, filename, lineno, zuul_event_id=None): repo = self.createRepoObject(zuul_event_id) @@ -1244,11 +1242,13 @@ class Merger(object): item['connection'], item['project'], repo_state, item['ref'], item['newrev']) item = items[-1] - repo = self.getRepo(item['connection'], item['project']) # A list of branch names the last item appears in. item_in_branches = [] if item.get('newrev'): - item_in_branches = repo.contains(item['newrev']) + lock = repo_locks.getRepoLock(item['connection'], item['project']) + with lock: + repo = self.getRepo(item['connection'], item['project']) + item_in_branches = repo.contains(item['newrev']) return (True, repo_state, item_in_branches) def getFiles(self, connection_name, project_name, branch, files, dirs=[]): diff --git a/zuul/model.py b/zuul/model.py index 963332826..0d889f557 100644 --- a/zuul/model.py +++ b/zuul/model.py @@ -4237,7 +4237,7 @@ class QueueItem(zkobject.ZKObject): pipeline=None, queue=None, change=None, # a ref - dequeued_needing_change=False, + dequeued_needing_change=None, dequeued_missing_requirements=False, current_build_set=None, item_ahead=None, @@ -4405,11 +4405,14 @@ class QueueItem(zkobject.ZKObject): self.current_build_set.updateAttributes( self.pipeline.manager.current_context, result=result) - def warning(self, msg): + def warning(self, msgs): with self.current_build_set.activeContext( self.pipeline.manager.current_context): - self.current_build_set.warning_messages.append(msg) - self.log.info(msg) + if not isinstance(msgs, list): + msgs = [msgs] + for msg in msgs: + self.current_build_set.warning_messages.append(msg) + self.log.info(msg) def freezeJobGraph(self, layout, context, skip_file_matcher, @@ -4583,7 +4586,7 @@ class QueueItem(zkobject.ZKObject): def cannotMergeBundle(self): if self.bundle: - return self.bundle.cannot_merge + return bool(self.bundle.cannot_merge) return False def didMergerFail(self): @@ -4595,7 +4598,7 @@ class QueueItem(zkobject.ZKObject): return [] def wasDequeuedNeedingChange(self): - return self.dequeued_needing_change + return bool(self.dequeued_needing_change) def wasDequeuedMissingRequirements(self): return self.dequeued_missing_requirements @@ -5118,10 +5121,10 @@ class QueueItem(zkobject.ZKObject): self.setResult(fakebuild) return fakebuild - def setDequeuedNeedingChange(self): + def setDequeuedNeedingChange(self, msg): self.updateAttributes( self.pipeline.manager.current_context, - dequeued_needing_change=True) + dequeued_needing_change=msg) self._setAllJobsSkipped() def setDequeuedMissingRequirements(self): @@ -5494,7 +5497,7 @@ class Bundle: self.items = [] self.started_reporting = False self.failed_reporting = False - self.cannot_merge = False + self.cannot_merge = None def __repr__(self): return '<Bundle 0x{:x} {}'.format(id(self), self.items) diff --git a/zuul/reporter/__init__.py b/zuul/reporter/__init__.py index 5723316a3..5af48abc6 100644 --- a/zuul/reporter/__init__.py +++ b/zuul/reporter/__init__.py @@ -135,6 +135,7 @@ class BaseReporter(object, metaclass=abc.ABCMeta): 'failure': self._formatItemReportFailure, 'merge-conflict': self._formatItemReportMergeConflict, 'merge-failure': self._formatItemReportMergeFailure, + 'config-error': self._formatItemReportConfigError, 'no-jobs': self._formatItemReportNoJobs, 'disabled': self._formatItemReportDisabled, 'dequeue': self._formatItemReportDequeue, @@ -191,9 +192,13 @@ class BaseReporter(object, metaclass=abc.ABCMeta): def _formatItemReportFailure(self, item, with_jobs=True): if item.cannotMergeBundle(): - msg = 'This change is part of a bundle that failed to merge.\n' + msg = 'This change is part of a bundle that can not merge.\n' + if isinstance(item.bundle.cannot_merge, str): + msg += '\n' + item.bundle.cannot_merge + '\n' elif item.dequeued_needing_change: msg = 'This change depends on a change that failed to merge.\n' + if isinstance(item.dequeued_needing_change, str): + msg += '\n' + item.dequeued_needing_change + '\n' elif item.dequeued_missing_requirements: msg = ('This change is unable to merge ' 'due to a missing merge requirement.\n') @@ -222,6 +227,13 @@ class BaseReporter(object, metaclass=abc.ABCMeta): def _formatItemReportMergeFailure(self, item, with_jobs=True): return 'This change was not merged by the code review system.\n' + def _formatItemReportConfigError(self, item, with_jobs=True): + if item.getConfigErrors(): + msg = str(item.getConfigErrors()[0].error) + else: + msg = "Unknown configuration error" + return msg + def _formatItemReportNoJobs(self, item, with_jobs=True): status_url = get_default(self.connection.sched.config, 'web', 'status_url', '') @@ -250,8 +262,8 @@ class BaseReporter(object, metaclass=abc.ABCMeta): def _formatItemReportOtherBundleItems(self, item): related_changes = item.pipeline.manager.resolveChangeReferences( item.change.needs_changes) - return "Related changes:\n{}".format("\n".join( - c.url for c in related_changes if c is not item.change)) + return "Related changes:\n{}\n".format("\n".join( + f' - {c.url}' for c in related_changes if c is not item.change)) def _getItemReportJobsFields(self, item): # Extract the report elements from an item diff --git a/zuul/web/__init__.py b/zuul/web/__init__.py index f06dd0b52..644b82bec 100755 --- a/zuul/web/__init__.py +++ b/zuul/web/__init__.py @@ -32,6 +32,7 @@ import ssl import threading import uuid import prometheus_client +import urllib.parse import zuul.executor.common from zuul import exceptions @@ -1170,6 +1171,7 @@ class ZuulWebAPI(object): @cherrypy.tools.json_out( content_type='application/json; charset=utf-8', handler=json_handler) def job(self, tenant_name, job_name): + job_name = urllib.parse.unquote_plus(job_name) tenant = self._getTenantOrRaise(tenant_name) job_variants = tenant.layout.jobs.get(job_name) result = [] diff --git a/zuul/zk/event_queues.py b/zuul/zk/event_queues.py index 52ffd582e..ad7529791 100644 --- a/zuul/zk/event_queues.py +++ b/zuul/zk/event_queues.py @@ -909,7 +909,7 @@ class ConnectionEventQueue(ZooKeeperEventQueue): self._put({'event_data': data}) def __iter__(self): - for data, ack_ref, _ in self._iterEvents(): + for data, ack_ref, zstat in self._iterEvents(): if not data: self.log.warning("Malformed event found: %s", data) self._remove(ack_ref.path) @@ -918,6 +918,7 @@ class ConnectionEventQueue(ZooKeeperEventQueue): event = model.ConnectionEvent.fromDict( data.get('event_data', data)) event.ack_ref = ack_ref + event.zuul_event_ltime = zstat.creation_transaction_id yield event |