summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDaniel Smith <daniel.smith@qt.io>2021-08-30 12:47:13 +0200
committerDaniel Smith <Daniel.Smith@qt.io>2022-02-21 07:58:56 +0000
commite527e1011fbf121225ee7bd1107c239998147e34 (patch)
tree48ea22f5ae52620990aae86db7dd0dd1d14e4323
parent78e1cd38b6aa391989e4905ea128ab9ceea90d3b (diff)
downloadqtrepotools-e527e1011fbf121225ee7bd1107c239998147e34.tar.gz
Say hello to the new Dependency Update Utility
This replaces the script/bot at qt/qtqa/src/qtmoduleupdater with a more extensible utility. While this utility can be plugged into an automation host like Jenkins to act as a bot, it may also be used as a standalone tool to bring any repo's dependencies up-to-date with qt/qt5.git or dependency repo HEADs. See the included README.md for further description of capabilities, use cases and examples. Task-number: QTQAINFRA-4594 Change-Id: I6e21f90e410044561bfa4d0dfa72cd09687de321 Reviewed-by: Oswald Buddenhagen <oswald.buddenhagen@gmx.de> Reviewed-by: Volker Hilsheimer <volker.hilsheimer@qt.io>
-rw-r--r--util/dependency_updater/Pipfile18
-rw-r--r--util/dependency_updater/Pipfile.lock162
-rw-r--r--util/dependency_updater/README.md196
-rw-r--r--util/dependency_updater/config.yaml.template6
-rw-r--r--util/dependency_updater/main.py350
-rw-r--r--util/dependency_updater/tools/__init__.py5
-rw-r--r--util/dependency_updater/tools/config.py117
-rw-r--r--util/dependency_updater/tools/datasources/README.md17
-rw-r--r--util/dependency_updater/tools/datasources/__init__.py3
-rw-r--r--util/dependency_updater/tools/datasources/datasources.py57
-rw-r--r--util/dependency_updater/tools/datasources/gerrit_client.py111
-rw-r--r--util/dependency_updater/tools/dependency_resolver.py250
-rw-r--r--util/dependency_updater/tools/namespace.py52
-rw-r--r--util/dependency_updater/tools/proposal.py73
-rw-r--r--util/dependency_updater/tools/repo.py130
-rw-r--r--util/dependency_updater/tools/state.py190
-rw-r--r--util/dependency_updater/tools/teams_connector.py190
-rw-r--r--util/dependency_updater/tools/toolbox.py1313
18 files changed, 3240 insertions, 0 deletions
diff --git a/util/dependency_updater/Pipfile b/util/dependency_updater/Pipfile
new file mode 100644
index 0000000..e5040d8
--- /dev/null
+++ b/util/dependency_updater/Pipfile
@@ -0,0 +1,18 @@
+[[source]]
+url = "https://pypi.org/simple"
+verify_ssl = true
+name = "pypi"
+
+[packages]
+urllib3 = "*"
+gitpython = "*"
+url-normalize = "*"
+pyyaml = "*"
+packaging = "*"
+pymsteams = "*"
+python-gerrit-api = {git = "https://github.com/DanSoQt/python-gerrit-api"}
+
+[dev-packages]
+
+[requires]
+python_version = "3.9"
diff --git a/util/dependency_updater/Pipfile.lock b/util/dependency_updater/Pipfile.lock
new file mode 100644
index 0000000..7419ce6
--- /dev/null
+++ b/util/dependency_updater/Pipfile.lock
@@ -0,0 +1,162 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "e8c9231254ce682a3f4f5db56a40c50a2a01c95aa0c1a12af28f4543b10cca00"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "3.9"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {
+ "certifi": {
+ "hashes": [
+ "sha256:2bbf76fd432960138b3ef6dda3dde0544f27cbf8546c458e60baf371917ba9ee",
+ "sha256:50b1e4f8446b06f41be7dd6338db18e0990601dce795c2b1686458aa7e8fa7d8"
+ ],
+ "version": "==2021.5.30"
+ },
+ "charset-normalizer": {
+ "hashes": [
+ "sha256:0c8911edd15d19223366a194a513099a302055a962bca2cec0f54b8b63175d8b",
+ "sha256:f23667ebe1084be45f6ae0538e4a5a865206544097e4e8bbcacf42cd02a348f3"
+ ],
+ "markers": "python_version >= '3'",
+ "version": "==2.0.4"
+ },
+ "gitdb": {
+ "hashes": [
+ "sha256:6c4cc71933456991da20917998acbe6cf4fb41eeaab7d6d67fbc05ecd4c865b0",
+ "sha256:96bf5c08b157a666fec41129e6d327235284cca4c81e92109260f353ba138005"
+ ],
+ "markers": "python_version >= '3.4'",
+ "version": "==4.0.7"
+ },
+ "gitpython": {
+ "hashes": [
+ "sha256:b838a895977b45ab6f0cc926a9045c8d1c44e2b653c1fcc39fe91f42c6e8f05b",
+ "sha256:fce760879cd2aebd2991b3542876dc5c4a909b30c9d69dfc488e504a8db37ee8"
+ ],
+ "index": "pypi",
+ "version": "==3.1.18"
+ },
+ "idna": {
+ "hashes": [
+ "sha256:14475042e284991034cb48e06f6851428fb14c4dc953acd9be9a5e95c7b6dd7a",
+ "sha256:467fbad99067910785144ce333826c71fb0e63a425657295239737f7ecd125f3"
+ ],
+ "markers": "python_version >= '3'",
+ "version": "==3.2"
+ },
+ "packaging": {
+ "hashes": [
+ "sha256:7dc96269f53a4ccec5c0670940a4281106dd0bb343f47b7471f779df49c2fbe7",
+ "sha256:c86254f9220d55e31cc94d69bade760f0847da8000def4dfe1c6b872fd14ff14"
+ ],
+ "index": "pypi",
+ "version": "==21.0"
+ },
+ "pymsteams": {
+ "hashes": [
+ "sha256:170529a3909d84b517e8b0770beba382482fbd663f78c2f4db820bec2a5e4052"
+ ],
+ "index": "pypi",
+ "version": "==0.1.15"
+ },
+ "pyparsing": {
+ "hashes": [
+ "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1",
+ "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"
+ ],
+ "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'",
+ "version": "==2.4.7"
+ },
+ "python-gerrit-api": {
+ "git": "https://github.com/DanSoQt/python-gerrit-api",
+ "ref": "de8832a71f99481e0d049c7f5fa11b34858765d5"
+ },
+ "pyyaml": {
+ "hashes": [
+ "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf",
+ "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696",
+ "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393",
+ "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77",
+ "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922",
+ "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5",
+ "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8",
+ "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10",
+ "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc",
+ "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018",
+ "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e",
+ "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253",
+ "sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347",
+ "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183",
+ "sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541",
+ "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb",
+ "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185",
+ "sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc",
+ "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db",
+ "sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa",
+ "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46",
+ "sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122",
+ "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b",
+ "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63",
+ "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df",
+ "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc",
+ "sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247",
+ "sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6",
+ "sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0"
+ ],
+ "index": "pypi",
+ "version": "==5.4.1"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24",
+ "sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7"
+ ],
+ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'",
+ "version": "==2.26.0"
+ },
+ "six": {
+ "hashes": [
+ "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926",
+ "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"
+ ],
+ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
+ "version": "==1.16.0"
+ },
+ "smmap": {
+ "hashes": [
+ "sha256:7e65386bd122d45405ddf795637b7f7d2b532e7e401d46bbe3fb49b9986d5182",
+ "sha256:a9a7479e4c572e2e775c404dcd3080c8dc49f39918c2cf74913d30c4c478e3c2"
+ ],
+ "markers": "python_version >= '3.5'",
+ "version": "==4.0.0"
+ },
+ "url-normalize": {
+ "hashes": [
+ "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2",
+ "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"
+ ],
+ "index": "pypi",
+ "version": "==1.4.3"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:39fb8672126159acb139a7718dd10806104dec1e2f0f6c88aab05d17df10c8d4",
+ "sha256:f57b4c16c62fa2760b7e3d97c35b255512fb6b59a259730f36ba32ce9f8e342f"
+ ],
+ "index": "pypi",
+ "version": "==1.26.6"
+ }
+ },
+ "develop": {}
+}
diff --git a/util/dependency_updater/README.md b/util/dependency_updater/README.md
new file mode 100644
index 0000000..70c8a73
--- /dev/null
+++ b/util/dependency_updater/README.md
@@ -0,0 +1,196 @@
+# Dependency Update Utility
+
+### What is this for?
+
+*This utility replaces the script at qt/qtqa/src/qtmoduleupdater*
+
+Every submodule of qt6 includes a "dependencies.yaml" file which
+specifies a revision of the other modules it directly depends on.
+If checked out together, the given module should be guaranteed to
+build and pass tests. In this model, a "dependency tree" forms,
+where "leaf" modules depend on "trunk" or "root" modules.
+
+In order to update a leaf module's dependencies, the tree must be
+walked down to the root-most module it depends on which itself
+has no dependencies. Then, starting from the root (often qtbase),
+each module's dependencies.yaml file is updated with the latest SHA.
+When a trunk module passes CI after being updated with new root SHAs,
+leaf modules can be updated with the latest trunk SHAs.
+
+### How does it work?
+This script/bot takes a list of repositories, either explicitly or
+gathered from qt5.git's .gitmodules file, and discovers the
+dependency tree by cross-checking every module's dependencies.yaml file.
+During this process, it collects the latest SHAs for each module, and
+also checks for modules which need to be updated due to inconsistent
+SHAs across modules for the same dependency. **Further, if a dependency
+is discovered that was not explicitly passed to the script, it is
+added to internal memory and will be updated as well if necessary.**
+
+The script provides a number of modes of operation. See the script's
+usage manual below.
+
+In typical usage, the script will need to be run multiple times to
+complete a round, as it creates change requests on
+codereview.qt-project.org which must be integrated. When a tracked
+change is integrated, the script should be re-run to progress the
+update round. When all required module changes are successfully
+integrated, if run with options to do so, the script will perform
+an update to qt5.git and/or yocto/meta-qt6 with the updated submodule
+SHAs collected during the round. When a round completes, the state
+is cleared and a new round will begin when the script is run again.
+
+Note: It is safe to run the script on a timer/trigger. Since the script
+keeps track of actions it is taking, it should never duplicate work
+if a dependency update is in-progress. If there is no work to be done,
+such a message will be printed and the script will exit.
+
+### Usage
+A number of python modules are required. Usage of a Python virtual
+environment such as pipenv is strongly suggested.
+
+```
+pipenv install
+pipenv run python3 main.py [args]
+```
+
+See the script's --help output for a full list of options. The
+example scenarios below cover some of the most commonly used situations.
+
+Note that most scenarios require the utility to be run multiple times
+to complete an update "round" since multiple modules in the dependency
+tree often need to be updated separately, and in a specific order.
+
+1. Clean and clear the utility's state and forget previous runs for a branch.
+ 1. `python3 main.py --branch dev --reset`
+ 2. This action clears state for the specified branch. You should always
+ clear the working state before starting a fresh round/operation.
+ 3. Note that simply deleting the local __state/state.bin_ file will
+ not necessarily clear the state. This utility also stores state
+ data in codereview personal branches if ssh credentials are configured
+ in ~/.ssh/config. Always run the utility with `--reset` to clear state.
+
+2. Run the utility to perform a one-time sync of a module with qt/qt5.git
+ submodule SHAs.
+ 1. `python3 main.py --branch dev --noState --repos
+ qt-extensions/qthttpserver`
+ 2. This creates a new change for `qt-extensions/qthttpserver` in
+ codereview which needs to go through the normal review process.
+ The dependencies.yaml file is updated to the latest SHAs in qt5.git
+ 3. This scenario does not usually require more than a single run of
+ the tool. As such, `--noState` can be used to avoid the need to
+ reset the utility before running. This also prevents interference to
+ any ongoing rounds since no state data is written.
+
+3. Run the utility to simulate an action
+ 1. `python3 main.py --sim --branch dev --repos qt-extensions/qthttpserver`
+ 2. Performing a dry-run prints to the console which actions the script
+ would take. No actions are actually performed on gerrit, and no
+ changes will be created. Further, using --sim will never update the
+ local persistent state of the tool.
+
+4. Perform an update to one or more repos using the latest available
+ SHAs from each dependency's own repo.
+ 1. `python3 main.py --head --branch dev --repos qt-extensions/qthttpserver`
+ 2. This assembles a full dependency map for the given repo(s) and updates
+ each one, starting from the most base module.
+ 3. Performing an update with `--head` should be used with a clean state.
+ 4. When the first updates are merged, run the utility again with the same
+ arguments to continue the "round".
+ 5. Continue to run the utility repeatedly to progress the round until
+ the target module is updated with new SHAs for its dependencies.
+
+5. Perform an update for one or more modules, then update qt/qt5.git and
+ yocto/meta-qt6 with the new module SHA(s).
+ 1. `python3 main.py --head --branch dev --qt5Update --yoctoUpdate --repos
+ qt-extensions/qthttpserver`
+ 2. Performs as (4) above, but when the target module(s) have been updated,
+ a further run of the utility will update qt/qt5.git and/or
+ yocto/meta-qt6.git with the merged SHAs of the target modules. Only
+ modules which already exist in the super-repos will be updated.
+
+6. Rewind an ongoing round to a specific dependency to pull in additional
+ changes and continue the update round.
+ 1. `python3 main.py --head --branch dev --rewind qt/qtdeclarative --repos
+ qt-extensions/qthttpserver`
+ 2. In this example, if the round has already successfully merged an update
+ for _qt/qtdeclarative_ but _qt/qtwebsockets_ is broken until a further
+ fix can be picked up in _qt/qtdeclarative_, using
+ `--rewind qt/qtdeclarative` will clear the state for modules which
+ depend on _qt/qtdeclarative_ and pull the new head of _qt/qtdeclarative_,
+ then continue from there.
+
+7. Remove a dependency from a module
+ 1. `python3 main.py --head --branch dev --dropDependency
+ qt/qtsvg:qt-extensions/qthttpserver --repos qt-extensions/qthttpserver`
+ 2. If a repo no longer needs a dependency, it can be removed in this way.
+ 3. Combine `--dropDependency` with `--qt5Update` to remove a dependency
+ from a module and ensure that qt/qt5.git's .gitmodules reference file
+ also gets updated appropriately.
+ 4. _**Note:**_ This action is destructive! See the `--help` output for
+ important information and detailed usage instructions
+
+8. Update all current modules in qt/qt5.git
+ 1. `python3 main.py --default-repos --branch dev --qt5Update --yoctoUpdate`
+ 2. This collects a list of all modules in qt5 marked as 'essential',
+ 'addon', 'deprecated', 'ignore', or 'preview' in the _.gitmodules_
+ file of _qt/qt5.git_ and updates the dependency tree to the latest
+ branch HEAD of each module.
+ 3. When finished, qt/qt5.git and yocto/meta-qt6 are updated with the
+ SHAs of all modules updated in the round
+
+9. Include in a round additional repos/modules which should be considered
+ "non-blocking" by the utility.
+ 1. `python3 main.py --default-repos --branch dev --qt5Update --yoctoUpdate
+ --nonBlockingRepos qt-extensions/qthttpserver`
+ 1. This will perform an update round as requested, but if a non-blocking
+ module update fails, it will be ignored for the rest of the round,
+ allowing all other repos to continue normally.
+ 2. If any repos specified by `--repos` or gathered automatically by
+ `--default-repos` require a repo specified as non-blocking, that
+ non-blocking repo will be converted to a blocking-status since
+ failure in it would lead to the failure of a normally blocking
+ module.
+
+11. Auto-approve and stage module updates
+ 1. `python3 main.py --stage --default-repos --branch dev --qt5Update
+ --yoctoUpdate`
+ 2. If the user running the tool has provided codereview credentials which
+ have Approver and QtStage rights access, the utility will self-approve
+ created changes and automatically stage them.
+ 3. **_Note:_** Use of this option is generally discouraged outside of
+ automation.
+
+
+### Further Notes
+- The script may take some time to run, depending on internet speed,
+ responsiveness of Gerrit, and how many repositories it is updating.
+
+- Unless run with `--noState`, a run of the script will attempt to use
+ saved information about repositories and their dependencies/SHAs.
+ If you experience trouble when trying to start a round, try running
+ the script with --reset to clear data for the target branch before
+ continuing.
+
+- Use of `--noState` is intended to allow the user to perform atomic
+ operations without the need to reset or save state, potentially
+ contaminating an ongoing round for a given branch. Generally,
+ this would be used when syncing a repo's dependencies.yaml
+ with qt5.git's current submodule SHAs, as this usually does not
+ require more than one run of the script.
+
+- `--sweepChanges` will sweep which have the script's GERRIT_USERNAME
+ as a reviewer on the change. In practice, this option is usually
+ reserved for the Qt Submodule Update bot, but can be enabled ***if you
+ know what you are doing.***
+
+- Repo prefixes are fuzzy. The script defaults to "qt/", but in theory
+ any prefix can be used. If you set the prefix to "playground/", the
+ script will prefer dependencies which exist there. If a dependency
+ cannot be found in the preferred namespace, a fuzzy-match search is
+ performed and the best match is attempted. The user will be notified
+ of any fuzzy-match repo selections which are made.
+
+- Running the utility at-will during an ongoing round is a safe operation.
+ The utility will simply update its internal state of ongoing changes
+ in codereview and exit if no further action can be taken.
diff --git a/util/dependency_updater/config.yaml.template b/util/dependency_updater/config.yaml.template
new file mode 100644
index 0000000..81cd9a9
--- /dev/null
+++ b/util/dependency_updater/config.yaml.template
@@ -0,0 +1,6 @@
+GERRIT_HOST: codereview.qt-project.org
+GERRIT_STATE_PATH: playground/tqtc-personal-projects
+GERRIT_USERNAME: ''
+GERRIT_PASSWORD: ''
+MS_TEAMS_NOTIFY_URL: ''
+REPOS: []
diff --git a/util/dependency_updater/main.py b/util/dependency_updater/main.py
new file mode 100644
index 0000000..eaefd60
--- /dev/null
+++ b/util/dependency_updater/main.py
@@ -0,0 +1,350 @@
+############################################################################
+##
+## Copyright (C) 2021 The Qt Company Ltd.
+## Contact: https://www.qt.io/licensing/
+##
+## This file is part of the qtqa module of the Qt Toolkit.
+##
+## $QT_BEGIN_LICENSE:LGPL$
+## Commercial License Usage
+## Licensees holding valid commercial Qt licenses may use this file in
+## accordance with the commercial license agreement provided with the
+## Software or, alternatively, in accordance with the terms contained in
+## a written agreement between you and The Qt Company. For licensing terms
+## and conditions see https://www.qt.io/terms-conditions. For further
+## information use the contact form at https://www.qt.io/contact-us.
+##
+## GNU Lesser General Public License Usage
+## Alternatively, this file may be used under the terms of the GNU Lesser
+## General Public License version 3 as published by the Free Software
+## Foundation and appearing in the file LICENSE.LGPL3 included in the
+## packaging of this file. Please review the following information to
+## ensure the GNU Lesser General Public License version 3 requirements
+## will be met: https://www.gnu.org/licenses/lgpl-3.0.html.
+##
+## GNU General Public License Usage
+## Alternatively, this file may be used under the terms of the GNU
+## General Public License version 2.0 or (at your option) the GNU General
+## Public license version 3 or any later version approved by the KDE Free
+## Qt Foundation. The licenses are as published by the Free Software
+## Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3
+## included in the packaging of this file. Please review the following
+## information to ensure the GNU General Public License requirements will
+## be met: https://www.gnu.org/licenses/gpl-2.0.html and
+## https://www.gnu.org/licenses/gpl-3.0.html.
+##
+## $QT_END_LICENSE$
+##
+############################################################################
+
+import argparse
+import os
+import sys
+
+import yaml
+
+from tools import Namespace, config as Config, state, toolbox, dependency_resolver, repo as Repo
+
+
+def parse_args(print_help: bool = False) -> Namespace:
+ parser = argparse.ArgumentParser(description="Execute a round of dependency updates by branch.", formatter_class=argparse.RawTextHelpFormatter)
+ parser.add_argument('--sim', dest='simulate', action='store_true',
+ help="Simulate a run of the tool, but don't send any alerts or save\n"
+ "the final state.")
+ parser.add_argument('--reset', dest='reset', action='store_true',
+ help="Forget current update state of [branch], then exit. Requires branch.")
+ parser.add_argument('--pauseOnFail', dest='pause_on_finish_fail', action='store_true',
+ help="If the round finished with failures in blocking repos, do not reset\n"
+ "the round. Hold the current state until rewound or reset.")
+ parser.add_argument('--retryFailed', dest='retry_failed', action='store_true',
+ help="Retries failed updates on top of the branch HEAD for a given module.\n"
+ "Generally only used when a round has failed via use of --pauseOnFail\n"
+ "or when non-blocking modules fail to merge.")
+ parser.add_argument('--reset-stage-count', dest='reset_stage_count', action='store_true',
+ help="Reset all in-progress and retrying repos' stage attempt counters.\n"
+ "Useful intervention in an ongoing round which is about to fail.")
+ parser.add_argument('-b', '--branch', dest='branch', type=str, default="dev",
+ help="Branch to update against.")
+ parser.add_argument('--noState', dest='no_state', action='store_true',
+ help="Perform this update isolated from any saved state for this branch.\n"
+ "Do not save state when completed. Enable this switch to perform\n"
+ "one-off updates of repos independent of a normal round. Will not\n"
+ "interfere with ongoing rounds.",
+ default=False)
+ parser.add_argument('--default-repos', dest='update_default_repos', action='store_true',
+ help="Update all modules in qt5 marked as 'essential',\n"
+ " 'addon', 'deprecated', 'ignore', or 'preview'",
+ default=False)
+ parser.add_argument('-p', '--prefix', dest='repo_prefix', default="qt/",
+ help="Prefer repos with this prefix when choosing the sha to use in\n"
+ "the update. Intended for use against 'qt/tqtc-' repos.")
+ parser.add_argument('--head', dest='use_head', action='store_true',
+ help="Use latest HEAD for all dependencies instead of the latest qt5\n"
+ "dependency map as the starting point.\n"
+ "Implied by --default-repos.")
+ parser.add_argument('-c', '--sweepChanges', dest='sweep_changes', action='store_true',
+ help="Search gerrit for changes with the Submodule Update Bot\n"
+ "(or the current user) added as a reviewer on the change.\n"
+ "Sweep those changes in with this submodule update round.")
+ parser.add_argument('--rewind', dest='rewind_module',
+ help="Rewind the round to the specified module and recalculate\n"
+ "dependencies. Useful to pull in a fix required by leaf modules\n"
+ "without restarting the round.")
+ parser.add_argument('--dropDependency', dest="drop_dependency",
+ help="IMPORTANT: This action is destructive!\n"
+ " FORMAT: dependency[:repo,]\n"
+ " Specify the dependency to drop. If it should be selectively\n"
+ " dropped, follow the dependency with a colon ':', and a\n"
+ " comma-separated list of repos to drop the dependency from.\n"
+ " If a list of repos to drop the dependency from is not supplied,\n"
+ " the dependency will be dropped from ALL repos being processed.")
+ parser.add_argument('-s', '--stage', dest='stage', action='store_true',
+ help="Automatically stage proposed updates if able to self-approve.")
+ parser.add_argument('-q', '--qt5Update', dest='update_supermodule', action='store_true',
+ help="Perform an update to the qt5/qt6 supermodule when all\n"
+ "updates have succeeded")
+ parser.add_argument('--yoctoUpdate', dest='update_yocto_meta', action='store_true',
+ help="Update the yocto/meta-qt6 repo with the shas from this round.")
+ parser.add_argument('-r', '--repos', dest="repos", nargs='*',
+ help="List of repos to update.\n")
+ parser.add_argument('-n', '--nonBlockingRepos', dest="non_blocking_repos", nargs='*',
+ help="List of non-blocking repos to update. These will be included in the\n"
+ "round but will not cause a failure if they fail to integrate unless\n"
+ "another blocking module depends on it.")
+ if print_help:
+ parser.print_help()
+ args = parser.parse_args()
+
+ if args.simulate:
+ print("INFO: Running in simulation mode. No alerts will be sent,"
+ " and state will not be saved!")
+ return args
+
+
+def clear():
+ """Clear the console screen using the OS built-in methods."""
+ if sys.platform == "win32":
+ os.system('cls')
+ else:
+ os.system('clear')
+
+
+def main():
+ # Initial setup
+ config = Config._load_config("config.yaml", parse_args())
+ config.datasources.load_datasources(config)
+ config.state_repo = state.check_create_local_repo(config)
+ if config.args.reset:
+ state.clear_state(config)
+ exit()
+ if config.args.update_default_repos:
+ config.args.use_head = True
+ if config.args.rewind_module:
+ config.rewind_module = toolbox.search_for_repo(config, config.args.rewind_module)
+ # Load the state cache
+ config.state_data = state.load_updates_state(config)
+ # Check to see if we should abort as finished-failed
+ if config.state_data.get("pause_on_finish_fail"):
+ if not any([config.args.retry_failed, config.args.rewind_module]):
+ print(
+ "Round is in Failed_finish state and this round was run in Paused On Finish Fail Mode.\n"
+ "To move the round forward, run the script with one of the following --reset,"
+ " --rewind, or --retry_failed")
+ parse_args(print_help=True)
+ exit()
+ # Continue the round and try again.
+ del config.state_data["pause_on_finish_fail"]
+ if config.args.retry_failed:
+ for module in [r for r in config.state_data.values()
+ if r.progress == Repo.PROGRESS.DONE_FAILED_BLOCKING]:
+ toolbox.reset_module_properties(config, module)
+ report_new_round = False
+ if not config.state_data and config.args.update_default_repos:
+ # State data is always empty if the round is fresh.
+ report_new_round = True
+
+ # Collect the list of qt5 modules for our reference.
+ config.qt5_default = toolbox.get_qt5_submodules(config, ["essential", "addon", "deprecated",
+ "preview"])
+
+ # Collect Repo objects for everything in the cache or list of qt5 modules, as necessary.
+ repos = toolbox.get_repos(config)
+ if repos.get(f"{config.args.repo_prefix}qtbase") and report_new_round:
+ qtbase = repos[f"{config.args.repo_prefix}qtbase"]
+ config.teams_connector.send_teams_webhook_basic(
+ repo=qtbase,
+ text=f"INFO: New round started on {qtbase.branch} with"
+ f" {qtbase.id}@{qtbase.original_ref}")
+ # Update the working state with any newly added repos passed to the script.
+ config.state_data = state.update_state_data(config.state_data, repos)
+
+ # Update the progress of all repos in the state since the last run of the tool.
+ for repo in config.state_data.values():
+ repo.progress, repo.proposal.merged_ref, repo.proposal.gerrit_status = \
+ toolbox.get_check_progress(config, repo)
+
+ # Collect necessary data if dropping a dependency from a repo.
+ if config.args.drop_dependency:
+ split = config.args.drop_dependency.split(":")
+ config.drop_dependency = toolbox.search_for_repo(config, split[0])
+ if len(split) > 1:
+ config.drop_dependency_from = \
+ [toolbox.search_for_repo(config, r) for r in split[1].split(",")]
+ else:
+ config.drop_dependency_from = repos
+
+ # Discover dependencies and add any missing repos to the list. We might need to update them too.
+ config.state_data = dependency_resolver.discover_repo_dependencies(config)
+
+ # Mark non-blocking repos as blocking if a blocking repo depends on it.
+ config.state_data = dependency_resolver.cross_check_non_blocking_repos(config)
+
+ # Undo any work done in modules which depend on rewind_module, if set.
+ if config.args.rewind_module:
+ # Set the module to rewind to so that we generate new proposals for
+ # any modules which depend directly or indirectly on it.
+ if config.state_data[config.rewind_module.id].progress < Repo.PROGRESS.DONE:
+ # Rewinding to a module which hasn't merged yet will break the round!
+ print(f"Unable to rewind to a not-yet-updated module. {config.rewind_module.id}"
+ f" is in state: {config.state_data[config.rewind_module.id].progress.name}."
+ f"\nHint: Try rewinding to one if its dependencies:"
+ f" {config.state_data[config.rewind_module.id].dep_list}")
+ else:
+ config.state_data[config.rewind_module.id].proposal.change_id = ""
+ new_sha = toolbox.get_head(config, config.state_data[config.rewind_module.id], True)
+ print(f"\nRewinding round to {config.rewind_module.id} @ {new_sha}\n")
+ config.state_data[config.rewind_module.id].original_ref = new_sha
+ config.state_data[config.rewind_module.id].proposal.merged_ref = new_sha
+ config.state_data[config.rewind_module.id].progress = Repo.PROGRESS.DONE_NO_UPDATE
+ config.teams_connector.send_teams_webhook_basic(
+ repo=config.rewind_module,
+ text=f"INFO: Rewinding '{config.args.branch}' to {new_sha}."
+ f" Modules depending on {config.rewind_module.id} have been reset.")
+ if config.args.update_supermodule and config.state_data.get("qt/qt5") \
+ and not config.rewind_module.id == "yocto/meta-qt6":
+ del config.state_data["qt/qt5"]
+ if config.args.update_yocto_meta and config.state_data.get("yocto/meta-qt6") \
+ and not config.rewind_module.id == "qt/qt5":
+ del config.state_data["yocto/meta-qt6"]
+
+ # bump the progress of repos that have had updates pushed and merged.
+ for repo in config.state_data.values():
+ repo.progress, repo.proposal.merged_ref, repo.proposal.gerrit_status = \
+ toolbox.get_check_progress(config, repo)
+
+ # Retry any modules which are ready but have failed to merge in CI.
+ for repo in [r for r in config.state_data.values() if r.progress == Repo.PROGRESS.RETRY]:
+ if config.args.reset_stage_count:
+ repo = toolbox.reset_stage_count(repo)
+ if repo.retry_count < 3:
+ if repo.retry_count == 1:
+ # Send a warning message if the update has failed to merge twice.
+ print(f"Collecting log snippet for failed integration in {repo.id}...")
+ failed_tests_snip = toolbox.parse_failed_integration_log(config, repo)
+ print(failed_tests_snip)
+ config.teams_connector.send_teams_webhook_module_failed(repo,
+ text_override=f"Dependency update on *{repo.id}*"
+ f" is failing in **{repo.branch}**. Two automatic retries left.",
+ test_failures=failed_tests_snip,
+ pause_links=True)
+ if config.args.stage:
+ repo = toolbox.retry_update(config, repo)
+ else:
+ print(
+ f"WARN: Unable to re-stage {repo.id} because automatic staging is not enabled.\n"
+ f"You must stage {repo.proposal.change_id} manually!")
+ elif repo.is_non_blocking:
+ print(f"Dependency Update to non-blocking repo {repo.id} failed.")
+ repo.progress = Repo.PROGRESS.DONE_FAILED_NON_BLOCKING
+ else:
+ # Clear state and reset, or allow broken updates to fail so others can be updated.
+ # state.clear_state(config) # Hard-disabled reset for now due to long turn-around-time of bugfixes.
+ print(f"Dependency Update to {repo.id} failed.")
+ repo.progress = Repo.PROGRESS.DONE_FAILED_BLOCKING
+ config.teams_connector.send_teams_webhook_module_failed(repo,
+ test_failures=toolbox.parse_failed_integration_log(
+ config, repo))
+
+ # Check and see if we're ready to push a supermodule update if all the blocking repos
+ # Have finished updating successfully.
+ config.state_data = toolbox.do_try_supermodule_updates(config)
+
+ # Finally, we're ready to start resolving dependencies for modules which are PROGRESS.READY
+ for repo in [r for r in config.state_data.values() if r.progress < Repo.PROGRESS.IN_PROGRESS]:
+ print(f"Checking inconsistencies in: {repo.id}")
+ repo.proposal.inconsistent_set = \
+ dependency_resolver.discover_dep_inconsistencies(config, repo)
+ print(f"{repo.id}: {repo.proposal.inconsistent_set}")
+
+ # Generate current_state for later comparison to comprehend if the script took any action
+ current_state, formatted_state = toolbox.state_printer(config)
+ print("\n-=-=-=-=-=-State before pushing updates-=-=-=-=-=-")
+ print(formatted_state)
+ print("-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=\n")
+
+ # Create new dependencies.yaml proposals for all PROGRESS.READY modules.
+ config.state_data = dependency_resolver.recursive_prepare_updates(config)
+
+ for repo in [r for r in config.state_data.values() if r.progress == Repo.PROGRESS.READY]:
+ print(f"Proposed update to {repo.id}:")
+ print("-----------------------------")
+ print(yaml.dump(repo.proposal.proposed_yaml))
+ print("-----------------------------")
+ print()
+
+ # Do the actual gerrit pushes and staging of changes.
+ if not config.args.simulate:
+ for repo in [r for r in config.state_data.values() if
+ r.progress == Repo.PROGRESS.READY and not r.is_supermodule]:
+ repo.proposal.change_id, repo.proposal.change_number \
+ = toolbox.search_existing_change(config, repo, "Update dependencies")
+ repo.proposal = toolbox.push_submodule_update(config, repo)
+ if repo.proposal.change_id:
+ repo.progress = Repo.PROGRESS.IN_PROGRESS
+ elif repo.proposal.merged_ref:
+ repo.progress = Repo.PROGRESS.DONE_NO_UPDATE
+ for repo in [r for r in config.state_data.values() if
+ r.progress == Repo.PROGRESS.IN_PROGRESS]:
+ if config.args.stage and toolbox.stage_update(config, repo):
+ repo.stage_count += 1
+
+ # Check a second time if we need to do a supermodule update, as the above step may
+ # have resulted in a bunch of repos considered PROGRESS.DONE_NO_UPDATE
+ config.state_data = toolbox.do_try_supermodule_updates(config)
+
+ final_state, formatted_state = toolbox.state_printer(config)
+ if final_state != current_state:
+ print("\n-=-=-=-=-=-State after pushing updates-=-=-=-=-=-")
+ print(formatted_state)
+ print("-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=\n")
+ else:
+ print("No updates pushed this round. Nothing else to do this run.")
+
+ # Determine how to exit
+ clear_state = False
+ if not any([r.progress for r in config.state_data.values() if
+ r.progress < Repo.PROGRESS.DONE.value]):
+ if config.args.simulate:
+ print("INFO: Done with this round, but not clearing state because --sim was used.")
+ elif config.args.pause_on_finish_fail and not config.state_data.get("pause_on_finish_fail"):
+ print(
+ "Done with this round: Running in Pause On Finish Fail mode. Not resetting state.")
+ config.state_data["pause_on_finish_fail"] = Repo.Repo(id="pause_on_finish_fail",
+ prefix="",
+ progress=Repo.PROGRESS.IGNORE_IS_META)
+ config.teams_connector.send_teams_webhook_finish_failed(
+ text=f"Update round on {config.args.branch} failed with errors."
+ f" Pausing the round until rewind/reset.", config=config, reset_links=True)
+ else:
+ # Everything was successful! Hooray! The round finished.
+ print("Done with this round! Clearing state.")
+ clear_state = True
+ config.teams_connector.send_teams_webhook_basic(
+ text=f"INFO: Reset/Finished update round on '{config.args.branch}'")
+
+ # Dump the state to disk and save to codereview if available.
+ state.save_updates_state(config, clear_state)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/util/dependency_updater/tools/__init__.py b/util/dependency_updater/tools/__init__.py
new file mode 100644
index 0000000..d5fb2a8
--- /dev/null
+++ b/util/dependency_updater/tools/__init__.py
@@ -0,0 +1,5 @@
+__all__ = ['config', 'namespace', 'teams_connector']
+
+from .config import Config
+from .namespace import Namespace
+from .teams_connector import TeamsConnector
diff --git a/util/dependency_updater/tools/config.py b/util/dependency_updater/tools/config.py
new file mode 100644
index 0000000..93b0cb5
--- /dev/null
+++ b/util/dependency_updater/tools/config.py
@@ -0,0 +1,117 @@
+############################################################################
+##
+## Copyright (C) 2021 The Qt Company Ltd.
+## Contact: https://www.qt.io/licensing/
+##
+## This file is part of the qtqa module of the Qt Toolkit.
+##
+## $QT_BEGIN_LICENSE:LGPL$
+## Commercial License Usage
+## Licensees holding valid commercial Qt licenses may use this file in
+## accordance with the commercial license agreement provided with the
+## Software or, alternatively, in accordance with the terms contained in
+## a written agreement between you and The Qt Company. For licensing terms
+## and conditions see https://www.qt.io/terms-conditions. For further
+## information use the contact form at https://www.qt.io/contact-us.
+##
+## GNU Lesser General Public License Usage
+## Alternatively, this file may be used under the terms of the GNU Lesser
+## General Public License version 3 as published by the Free Software
+## Foundation and appearing in the file LICENSE.LGPL3 included in the
+## packaging of this file. Please review the following information to
+## ensure the GNU Lesser General Public License version 3 requirements
+## will be met: https://www.gnu.org/licenses/lgpl-3.0.html.
+##
+## GNU General Public License Usage
+## Alternatively, this file may be used under the terms of the GNU
+## General Public License version 2.0 or (at your option) the GNU General
+## Public license version 3 or any later version approved by the KDE Free
+## Qt Foundation. The licenses are as published by the Free Software
+## Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3
+## included in the packaging of this file. Please review the following
+## information to ensure the GNU General Public License requirements will
+## be met: https://www.gnu.org/licenses/gpl-2.0.html and
+## https://www.gnu.org/licenses/gpl-3.0.html.
+##
+## $QT_END_LICENSE$
+##
+############################################################################
+
+import json
+import os
+from pathlib import Path
+from shutil import copyfile
+
+import urllib3 as urllib
+import yaml
+from url_normalize import url_normalize
+
+from .datasources.datasources import Datasources
+from .namespace import Namespace
+from .repo import Repo
+from .teams_connector import TeamsConnector
+
+
+class Config(Namespace):
+ """Configuration object. Also contains datasources for use."""
+ args: Namespace
+ cwd: os.PathLike
+ datasources: Datasources = Datasources()
+ teams_connector: TeamsConnector
+ GERRIT_HOST: str
+ GERRIT_STATE_PATH: str
+ GERRIT_USERNAME: str
+ GERRIT_PASSWORD: str
+ MS_TEAMS_NOTIFY_URL: str
+ state_repo: Repo
+ state_data: dict[str, Repo] = {}
+ _state_ref: str = None
+ qt5_default: dict[str, Repo] = {}
+ suppress_warn: bool = False
+ REPOS: list[str]
+ NON_BLOCKING_REPOS: list[str] = []
+ rewind_module: Repo = None
+ drop_dependency: Repo = None
+ drop_dependency_from: list[Repo] = None
+
+
+def _load_config(file, args):
+ """Load configuration from disk or environment"""
+ cwd = Path(__file__).parent.parent
+ file = cwd.joinpath(file)
+ c = dict()
+ if file.exists():
+ with open(file) as config_file:
+ c = yaml.load(config_file, Loader=yaml.SafeLoader)
+ else:
+ try:
+ copyfile(file.parent / (file.name + ".template"), file)
+ print("Config file not found, so we created 'config.yaml' from the template.")
+ with open(file) as config_file:
+ c = yaml.load(config_file)
+ except FileNotFoundError:
+ print("ERROR: Unable to load config because config.yaml, or config.yaml.template\n"
+ "was not found on disk. Please pull/checkout config.yaml.template from\n"
+ "the repo again.")
+
+ for key in c.keys():
+ if os.environ.get(key):
+ print(f'Overriding config option {key} with environment variable.')
+ c[key] = os.environ[key]
+ config = Config(**c)
+ config.cwd = cwd
+ config.args = args
+ config.GERRIT_HOST = url_normalize(config.GERRIT_HOST)
+ config.teams_connector = TeamsConnector(config)
+ ssh_file = Path(os.path.expanduser('~'), ".ssh", "config")
+ if ssh_file.exists():
+ with open(ssh_file) as ssh_config:
+ contents = ssh_config.read()
+ gerrit_base_url = urllib.util.parse_url(config.GERRIT_HOST).host
+ loc = contents.find(gerrit_base_url)
+ user_loc = contents.find("User", loc)
+ user_name = contents[user_loc:contents.find("\n", user_loc)].split(" ")[1]
+ if user_name:
+ config._state_ref = f"refs/personal/{user_name or config.GERRIT_USERNAME}" \
+ f"/submodule_updater"
+ return config
diff --git a/util/dependency_updater/tools/datasources/README.md b/util/dependency_updater/tools/datasources/README.md
new file mode 100644
index 0000000..f23c19b
--- /dev/null
+++ b/util/dependency_updater/tools/datasources/README.md
@@ -0,0 +1,17 @@
+# Datasources
+
+Credentials can be pre-configured via the root level
+`config.yaml` or via environment variables of the same name. If credentials are not supplied or are
+incorrect, the user will be prompted at runtime to re-enter valid credentials the datasource.
+
+**Datasources are available on the Config object as `config.datasources`.**
+
+### Gerrit Code Review Client
+
+Accessible via the config object @ `config.datasources.gerrit_client`
+
+**Authentication is mandatory to create changes, auto-approve, or stage changes.** The tool defaults
+to simulating updates if credentials are not supplied.
+
+Provides a sanitized JSON response from gerrit queries, based on
+the [Gerrit API reference](https://gerrit-review.googlesource.com/Documentation/rest-api.html)
diff --git a/util/dependency_updater/tools/datasources/__init__.py b/util/dependency_updater/tools/datasources/__init__.py
new file mode 100644
index 0000000..c2daaf6
--- /dev/null
+++ b/util/dependency_updater/tools/datasources/__init__.py
@@ -0,0 +1,3 @@
+__all__ = ['gerrit_client']
+
+from .gerrit_client import create_gerrit_client
diff --git a/util/dependency_updater/tools/datasources/datasources.py b/util/dependency_updater/tools/datasources/datasources.py
new file mode 100644
index 0000000..33ecfdd
--- /dev/null
+++ b/util/dependency_updater/tools/datasources/datasources.py
@@ -0,0 +1,57 @@
+############################################################################
+##
+## Copyright (C) 2021 The Qt Company Ltd.
+## Contact: https://www.qt.io/licensing/
+##
+## This file is part of the qtqa module of the Qt Toolkit.
+##
+## $QT_BEGIN_LICENSE:LGPL$
+## Commercial License Usage
+## Licensees holding valid commercial Qt licenses may use this file in
+## accordance with the commercial license agreement provided with the
+## Software or, alternatively, in accordance with the terms contained in
+## a written agreement between you and The Qt Company. For licensing terms
+## and conditions see https://www.qt.io/terms-conditions. For further
+## information use the contact form at https://www.qt.io/contact-us.
+##
+## GNU Lesser General Public License Usage
+## Alternatively, this file may be used under the terms of the GNU Lesser
+## General Public License version 3 as published by the Free Software
+## Foundation and appearing in the file LICENSE.LGPL3 included in the
+## packaging of this file. Please review the following information to
+## ensure the GNU Lesser General Public License version 3 requirements
+## will be met: https://www.gnu.org/licenses/lgpl-3.0.html.
+##
+## GNU General Public License Usage
+## Alternatively, this file may be used under the terms of the GNU
+## General Public License version 2.0 or (at your option) the GNU General
+## Public license version 3 or any later version approved by the KDE Free
+## Qt Foundation. The licenses are as published by the Free Software
+## Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3
+## included in the packaging of this file. Please review the following
+## information to ensure the GNU General Public License requirements will
+## be met: https://www.gnu.org/licenses/gpl-2.0.html and
+## https://www.gnu.org/licenses/gpl-3.0.html.
+##
+## $QT_END_LICENSE$
+##
+############################################################################
+
+import sys
+
+from gerrit import GerritClient
+
+from tools.namespace import Namespace
+
+
+class Datasources(Namespace):
+ gerrit_client: GerritClient = None
+
+ def load_datasources(self, config):
+ print("Discovering and configuring datasources...")
+ datasource_names = [o for o in Datasources.__dict__.keys() if o.endswith("_client")]
+ for func_name in datasource_names:
+ dict.__setattr__(self, func_name,
+ getattr(sys.modules["tools.datasources." + func_name],
+ "create_" + func_name)(config))
+ print("Done loading datasources!")
diff --git a/util/dependency_updater/tools/datasources/gerrit_client.py b/util/dependency_updater/tools/datasources/gerrit_client.py
new file mode 100644
index 0000000..9280db5
--- /dev/null
+++ b/util/dependency_updater/tools/datasources/gerrit_client.py
@@ -0,0 +1,111 @@
+############################################################################
+##
+## Copyright (C) 2021 The Qt Company Ltd.
+## Contact: https://www.qt.io/licensing/
+##
+## This file is part of the qtqa module of the Qt Toolkit.
+##
+## $QT_BEGIN_LICENSE:LGPL$
+## Commercial License Usage
+## Licensees holding valid commercial Qt licenses may use this file in
+## accordance with the commercial license agreement provided with the
+## Software or, alternatively, in accordance with the terms contained in
+## a written agreement between you and The Qt Company. For licensing terms
+## and conditions see https://www.qt.io/terms-conditions. For further
+## information use the contact form at https://www.qt.io/contact-us.
+##
+## GNU Lesser General Public License Usage
+## Alternatively, this file may be used under the terms of the GNU Lesser
+## General Public License version 3 as published by the Free Software
+## Foundation and appearing in the file LICENSE.LGPL3 included in the
+## packaging of this file. Please review the following information to
+## ensure the GNU Lesser General Public License version 3 requirements
+## will be met: https://www.gnu.org/licenses/lgpl-3.0.html.
+##
+## GNU General Public License Usage
+## Alternatively, this file may be used under the terms of the GNU
+## General Public License version 2.0 or (at your option) the GNU General
+## Public license version 3 or any later version approved by the KDE Free
+## Qt Foundation. The licenses are as published by the Free Software
+## Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3
+## included in the packaging of this file. Please review the following
+## information to ensure the GNU General Public License requirements will
+## be met: https://www.gnu.org/licenses/gpl-2.0.html and
+## https://www.gnu.org/licenses/gpl-3.0.html.
+##
+## $QT_END_LICENSE$
+##
+############################################################################
+import os
+import signal
+from getpass import getpass
+
+import gerrit.utils.exceptions
+from gerrit import GerritClient
+
+
+def test_gerrit_auth(client: GerritClient) -> bool:
+ try:
+ client.projects.get("qt/qtbase").HEAD
+ except gerrit.utils.exceptions.ClientError as e:
+ print(e, e.args)
+ return False
+ print("Gerrit auth OK...")
+ return True
+
+
+def validate_gerrit_config(config, client):
+ def input_with_maybe_timeout(func: [input, getpass], message: str) -> str:
+ """Try to get input with timeout on Unix-based, or wait indefinitely on Windows."""
+
+ def interrupted(signum, frame):
+ """called when read times out"""
+ raise TimeoutError
+
+ if os.name == 'posix':
+ signal.signal(signal.SIGALRM, interrupted)
+ signal.alarm(15)
+ try:
+ i = func(message)
+ except KeyboardInterrupt:
+ if os.name == 'posix':
+ signal.alarm(0)
+ raise KeyboardInterrupt
+ if os.name == 'posix':
+ signal.alarm(0)
+ return i
+
+ while not test_gerrit_auth(client):
+ print("Bad Gerrit user or password.\n"
+ f"Authenticated access to {config.GERRIT_HOST} is recommended for operation.")
+ print(f"\nConfigured username '{config.GERRIT_USERNAME}'")
+ try:
+ u = input_with_maybe_timeout(input,
+ "Press Return to accept or re-enter your username now: ")
+ except KeyboardInterrupt:
+ print("\n\nGerrit Username input cancelled. Proceeding without gerrit authentication!")
+ config.GERRIT_USERNAME = ""
+ config.GERRIT_PASSWORD = ""
+ return create_gerrit_client(config)
+ if u:
+ config.GERRIT_USERNAME = u
+ try:
+ p = input_with_maybe_timeout(getpass, "Please re-enter your password: ")
+ except KeyboardInterrupt:
+ print("\n\nGerrit Username input cancelled. Proceeding without gerrit authentication!")
+ config.GERRIT_USERNAME = ""
+ config.GERRIT_PASSWORD = ""
+ if not p:
+ config.GERRIT_USERNAME = ""
+ config.GERRIT_PASSWORD = ""
+ return create_gerrit_client(config)
+ return client
+
+
+def create_gerrit_client(config):
+ """Create an instance of an Gerrit client.
+ Will prompt for credentials if not configured."""
+ client = GerritClient(base_url=config.GERRIT_HOST, username=config.GERRIT_USERNAME,
+ password=config.GERRIT_PASSWORD)
+ client = validate_gerrit_config(config, client)
+ return client
diff --git a/util/dependency_updater/tools/dependency_resolver.py b/util/dependency_updater/tools/dependency_resolver.py
new file mode 100644
index 0000000..f7463dd
--- /dev/null
+++ b/util/dependency_updater/tools/dependency_resolver.py
@@ -0,0 +1,250 @@
+############################################################################
+##
+## Copyright (C) 2020 The Qt Company Ltd.
+## Contact: https://www.qt.io/licensing/
+##
+## This file is part of the utils of the Qt Toolkit.
+##
+## $QT_BEGIN_LICENSE:LGPL$
+## Commercial License Usage
+## Licensees holding valid commercial Qt licenses may use this file in
+## accordance with the commercial license agreement provided with the
+## Software or, alternatively, in accordance with the terms contained in
+## a written agreement between you and The Qt Company. For licensing terms
+## and conditions see https://www.qt.io/terms-conditions. For further
+## information use the contact form at https://www.qt.io/contact-us.
+##
+## GNU Lesser General Public License Usage
+## Alternatively, this file may be used under the terms of the GNU Lesser
+## General Public License version 3 as published by the Free Software
+## Foundation and appearing in the file LICENSE.LGPL3 included in the
+## packaging of this file. Please review the following information to
+## ensure the GNU Lesser General Public License version 3 requirements
+## will be met: https://www.gnu.org/licenses/lgpl-3.0.html.
+##
+## GNU General Public License Usage
+## Alternatively, this file may be used under the terms of the GNU
+## General Public License version 2.0 or (at your option) the GNU General
+## Public license version 3 or any later version approved by the KDE Free
+## Qt Foundation. The licenses are as published by the Free Software
+## Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3
+## included in the packaging of this file. Please review the following
+## information to ensure the GNU General Public License requirements will
+## be met: https://www.gnu.org/licenses/gpl-2.0.html and
+## https://www.gnu.org/licenses/gpl-3.0.html.
+##
+## $QT_END_LICENSE$
+##
+############################################################################
+import copy
+import json
+
+from tools import toolbox
+from .config import Config
+from .proposal import Proposal
+from .repo import Repo, PROGRESS
+
+
+def recursive_prepare_updates(config) -> dict[str, Repo]:
+ """Iterate through the list of repos and prepare updates for all
+ in the READY state, or bump the repo to DONE_NO_UPDATE if it has
+ no dependencies. Re-perform the operation as many times as needed
+ until all eligible repos have had their update created."""
+ reload = False
+ for repo in config.state_data.values():
+ config.state_data[repo.id], trigger_reload = prepare_update(config, repo)
+ reload = bool(reload + trigger_reload)
+ for repo in config.state_data.values():
+ if repo.progress == PROGRESS.READY and not repo.dep_list and not repo.is_supermodule:
+ reload = True
+ repo.progress = PROGRESS.DONE_NO_UPDATE
+ repo.proposal.merged_ref = repo.original_ref
+ repo.proposal.proposed_yaml = repo.deps_yaml
+ if reload:
+ config.state_data = recursive_prepare_updates(config)
+ return config.state_data
+
+
+def prepare_update(config: Config, repo: Repo) -> tuple[Repo, bool]:
+ """Bump progress of a repo if it's dependencies are met,
+ then create a proposal if it doesn't already exist."""
+
+ repo.progress, progress_changed = determine_ready(config, repo)
+ if repo.is_supermodule:
+ return repo, False
+ repo.proposal = retrieve_or_generate_proposal(config, repo)
+ reload = False
+ if progress_changed and repo.progress >= PROGRESS.DONE:
+ reload = True
+ elif not repo.proposal and repo.progress < PROGRESS.WAIT_DEPENDENCY:
+ print(f"moving {repo.id} to DONE_NO_UPDATE")
+ repo.progress = PROGRESS.DONE_NO_UPDATE
+ repo.proposal.merged_ref = repo.original_ref
+ repo.proposal.proposed_yaml = repo.deps_yaml
+ reload = True
+ return repo, reload
+
+
+def retrieve_or_generate_proposal(config: Config, repo) -> Proposal:
+ """Return the proposed YAML if it exists and should not be updated,
+ otherwise, generate a new one with the latest shas."""
+ if repo.progress in [PROGRESS.DONE_FAILED_NON_BLOCKING, PROGRESS.DONE_FAILED_BLOCKING, PROGRESS.DONE, PROGRESS.DONE_NO_UPDATE,
+ PROGRESS.WAIT_DEPENDENCY, PROGRESS.WAIT_INCONSISTENT,
+ PROGRESS.IN_PROGRESS]:
+ return repo.proposal
+ else:
+ # Create a new proposal for failed updates if the user has specified
+ # --retryFailed in the arguments.
+ if repo.progress == PROGRESS.RETRY and not config.args.retry_failed:
+ return repo.proposal
+ print(f"Creating new proposal for {repo.id}: {repo.progress.name}")
+ proposal = copy.deepcopy(repo.deps_yaml)
+ for dep in repo.deps_yaml.get("dependencies"):
+ prefix, dep_name = toolbox.strip_prefix(dep)
+ full_name = [n for n in repo.dep_list if dep_name in n].pop()
+ proposal["dependencies"][dep]["ref"] = toolbox.get_head(config, full_name)
+ if proposal == repo.deps_yaml:
+ print(f"{repo.id} dependencies are already up-to-date")
+ else:
+ repo.proposal.proposed_yaml = proposal
+ return repo.proposal
+
+
+def check_subtree(config, source: Repo, source_ref: str, target: Repo) -> tuple[
+ str, tuple[str, str]]:
+ """Compare a sha between two repos' dependencies.yaml references for the same dependency.
+ Recurse for each dependency which is not the same as the source.
+
+ :returns: the id of a target repo which has a mismatching sha to the source_ref"""
+ deps = target.deps_yaml.get(
+ "dependencies") if target.progress < PROGRESS.DONE else target.proposal.proposed_yaml.get(
+ "dependencies")
+ for dependency in deps.keys():
+ if source.name in dependency:
+ if not source_ref == deps[dependency]["ref"]:
+ if source.proposal.merged_ref == deps[dependency]["ref"]:
+ continue
+ else:
+ print(f"source {source.name}:{source_ref[:10]} is not the same as {source.name}"
+ f" in {target.name}->{dependency}:{deps[dependency]['ref']}") # Verbose!
+ return source.id, (target.id, deps[dependency]["ref"])
+ else:
+ clean_name = toolbox.strip_prefix(dependency)[1]
+ new_target = config.state_data[toolbox.search_for_repo(config, clean_name).id]
+ return check_subtree(config, source, source_ref, new_target)
+ return tuple()
+
+
+def discover_dep_inconsistencies(config: Config, repo: Repo) \
+ -> dict[str, set[str]]:
+ """Traverse the dependency tree of a repo, finding mismatching shas among dependency
+ refrences. This allows us to determine the lowest-level module that must be updated
+ in order to begin or continue a round."""
+ mismatches = dict()
+ if not repo.deps_yaml.get("dependencies"):
+ return {}
+ for top_dep in repo.deps_yaml["dependencies"].keys():
+ top_dep_repo = toolbox.search_for_repo(config, top_dep)
+ for other_dep in repo.deps_yaml["dependencies"].keys():
+ other_dep_repo = toolbox.search_for_repo(config, other_dep)
+ top_dep_ref = repo.deps_yaml["dependencies"][top_dep]["ref"]
+ temp = check_subtree(config, top_dep_repo, top_dep_ref, other_dep_repo)
+ if temp:
+ if not temp[0] in mismatches.keys():
+ mismatches[temp[0]] = set()
+ mismatches[temp[0]].add(temp[1])
+ return mismatches
+
+
+def discover_repo_dependencies(config: Config, repos_override: list[Repo] = None) -> dict[
+ str, Repo]:
+ """Traverse the dependency tree for a repo and add any repos found
+ to the list of repos to update if it was not already specified
+ or found to be part of qt5 default modules.
+ Compile the list of dependencies for a given repo, direct and indirect."""
+ for repo in repos_override or copy.deepcopy(list(config.state_data.values())):
+ dep_list = set()
+ if repo.progress >= PROGRESS.IN_PROGRESS and not config.rewind_module:
+ # If a module is done, or had no update, we don't care about its dependencies.
+ # This means that if we're discovering against qt5.git submodule shas,
+ # we won't traverse the tree at all since we don't care about what a
+ # direct dependency requires.
+ continue
+ if not repo.deps_yaml:
+ repo.deps_yaml, repo.branch = toolbox.get_dependencies_yaml(config, repo)
+ for dep in repo.deps_yaml.get('dependencies'):
+ # Recurse through the tree until no more dependencies are found.
+ relative_prefix, bare_dep = toolbox.strip_prefix(dep)
+ key, dep_repo = toolbox.get_repos(config, [bare_dep], None).popitem()
+ dep_list.add(dep_repo.id)
+ config.state_data.update({key: dep_repo})
+ # Retrieve the complete list of dependencies for this dependency.
+ sub_deps = discover_repo_dependencies(config, [dep_repo])[dep_repo.id].dep_list
+ # Add these dependencies to the master list for the repo we were first looking at.
+ dep_list.update(sub_deps)
+ repo.dep_list = list(dep_list)
+ # Update this repo in our master list of repos.
+ config.state_data.update({repo.id: repo})
+ # Cross-check that we didn't miss anything
+ config.state_data = discover_missing_dependencies(config, repo)
+ if config.rewind_module and config.rewind_module.id in config.state_data[repo.id].dep_list:
+ # If the module depends on the module we needed to rewind
+ # to, either directly or indirectly, reset the state
+ # and treat it as though it hasn't been updated.
+ config.state_data[repo.id] = \
+ toolbox.reset_module_properties(config, config.state_data[repo.id])
+ return config.state_data
+
+
+def cross_check_non_blocking_repos(config: Config) -> dict[str, Repo]:
+ """Examine dependencies of all blocking repos. Convert a repo from non-blocking status
+ to blocking if it is a dependency of a blocking repo to be updated."""
+ for repo in config.state_data.values():
+ if not repo.is_non_blocking:
+ if repo.dep_list:
+ for dep in repo.dep_list:
+ if config.state_data.get(dep) and config.state_data.get(dep).is_non_blocking:
+ config.state_data[dep].is_non_blocking = False
+ return config.state_data
+
+
+def discover_missing_dependencies(config: Config, repo: Repo) -> dict[str, Repo]:
+ """Given a repo's dependency list, check for missing repos in the
+ state_data and add them"""
+ for dep in repo.dep_list:
+ if dep not in config.state_data.keys():
+ # Initialize the missing repo with the minimum data needed.
+ temp_repo = toolbox.get_repos(config, repos_override=[dep], non_blocking_override=None)[dep]
+ config.state_data[temp_repo.id] = temp_repo
+ config.state_data = discover_repo_dependencies(config)
+ return config.state_data
+
+
+def determine_ready(config: Config, repo: Repo) -> tuple[PROGRESS, bool]:
+ """Check to see if a repo is waiting on another, or if all
+ dependency conflicts have been resolved and/or updated."""
+ worst_state = PROGRESS.READY
+
+ def is_worse(state):
+ nonlocal worst_state
+ if state > worst_state:
+ worst_state = state
+
+ if repo.proposal.inconsistent_set:
+ is_worse(PROGRESS.WAIT_INCONSISTENT)
+ if repo.progress < PROGRESS.IN_PROGRESS:
+ for dependency in repo.dep_list:
+ if dependency in config.state_data.keys():
+ if config.state_data[dependency].progress < PROGRESS.DONE:
+ is_worse(PROGRESS.WAIT_DEPENDENCY)
+ elif config.state_data[dependency].progress == PROGRESS.DONE_FAILED_NON_BLOCKING:
+ print(f"WARN: {repo.id} dependency {dependency} is a non-blocking module which"
+ f" failed. Marking {repo.id} as failed.")
+ is_worse(PROGRESS.DONE_FAILED_NON_BLOCKING)
+ elif config.state_data[dependency].progress == PROGRESS.DONE_FAILED_BLOCKING:
+ print(f"WARN: {repo.id} dependency {dependency} is a blocking module which"
+ f" failed. Marking {repo.id} as failed-blocking.")
+ is_worse(PROGRESS.DONE_FAILED_BLOCKING)
+ return worst_state, repo.progress != worst_state
+ else:
+ return repo.progress, False
diff --git a/util/dependency_updater/tools/namespace.py b/util/dependency_updater/tools/namespace.py
new file mode 100644
index 0000000..7e4edda
--- /dev/null
+++ b/util/dependency_updater/tools/namespace.py
@@ -0,0 +1,52 @@
+############################################################################
+##
+## Copyright (C) 2021 The Qt Company Ltd.
+## Contact: https://www.qt.io/licensing/
+##
+## This file is part of the qtqa module of the Qt Toolkit.
+##
+## $QT_BEGIN_LICENSE:LGPL$
+## Commercial License Usage
+## Licensees holding valid commercial Qt licenses may use this file in
+## accordance with the commercial license agreement provided with the
+## Software or, alternatively, in accordance with the terms contained in
+## a written agreement between you and The Qt Company. For licensing terms
+## and conditions see https://www.qt.io/terms-conditions. For further
+## information use the contact form at https://www.qt.io/contact-us.
+##
+## GNU Lesser General Public License Usage
+## Alternatively, this file may be used under the terms of the GNU Lesser
+## General Public License version 3 as published by the Free Software
+## Foundation and appearing in the file LICENSE.LGPL3 included in the
+## packaging of this file. Please review the following information to
+## ensure the GNU Lesser General Public License version 3 requirements
+## will be met: https://www.gnu.org/licenses/lgpl-3.0.html.
+##
+## GNU General Public License Usage
+## Alternatively, this file may be used under the terms of the GNU
+## General Public License version 2.0 or (at your option) the GNU General
+## Public license version 3 or any later version approved by the KDE Free
+## Qt Foundation. The licenses are as published by the Free Software
+## Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3
+## included in the packaging of this file. Please review the following
+## information to ensure the GNU General Public License requirements will
+## be met: https://www.gnu.org/licenses/gpl-2.0.html and
+## https://www.gnu.org/licenses/gpl-3.0.html.
+##
+## $QT_END_LICENSE$
+##
+############################################################################
+
+class Namespace(object):
+ """Inheriting this class enables property-style 'object.attr'
+ access to member attributes instead of relying on dict-style
+ 'object[attr]' and '.get(attr)' accessors.
+ """
+
+ def __init__(self, **kwargs): self.__dict__.update(kwargs)
+
+ @property # For use when serializing, to dump back to JSON
+ def as_map(self): return self.__dict__
+
+ def __repr__(self):
+ return str(self.as_map)
diff --git a/util/dependency_updater/tools/proposal.py b/util/dependency_updater/tools/proposal.py
new file mode 100644
index 0000000..b12fe4f
--- /dev/null
+++ b/util/dependency_updater/tools/proposal.py
@@ -0,0 +1,73 @@
+############################################################################
+##
+## Copyright (C) 2020 The Qt Company Ltd.
+## Contact: https://www.qt.io/licensing/
+##
+## This file is part of the utils of the Qt Toolkit.
+##
+## $QT_BEGIN_LICENSE:LGPL$
+## Commercial License Usage
+## Licensees holding valid commercial Qt licenses may use this file in
+## accordance with the commercial license agreement provided with the
+## Software or, alternatively, in accordance with the terms contained in
+## a written agreement between you and The Qt Company. For licensing terms
+## and conditions see https://www.qt.io/terms-conditions. For further
+## information use the contact form at https://www.qt.io/contact-us.
+##
+## GNU Lesser General Public License Usage
+## Alternatively, this file may be used under the terms of the GNU Lesser
+## General Public License version 3 as published by the Free Software
+## Foundation and appearing in the file LICENSE.LGPL3 included in the
+## packaging of this file. Please review the following information to
+## ensure the GNU Lesser General Public License version 3 requirements
+## will be met: https://www.gnu.org/licenses/lgpl-3.0.html.
+##
+## GNU General Public License Usage
+## Alternatively, this file may be used under the terms of the GNU
+## General Public License version 2.0 or (at your option) the GNU General
+## Public license version 3 or any later version approved by the KDE Free
+## Qt Foundation. The licenses are as published by the Free Software
+## Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3
+## included in the packaging of this file. Please review the following
+## information to ensure the GNU General Public License requirements will
+## be met: https://www.gnu.org/licenses/gpl-2.0.html and
+## https://www.gnu.org/licenses/gpl-3.0.html.
+##
+## $QT_END_LICENSE$
+##
+############################################################################
+
+class Proposal:
+ proposed_yaml: dict
+ change_id: str
+ change_number: str
+ gerrit_status: str = ""
+ merged_ref: str = ""
+ inconsistent_set: dict
+
+ def __init__(self, proposed_yaml: dict = None,
+ change_id: str = None, change_number: str = None, inconsistent_set: dict = None):
+ self.proposed_yaml = proposed_yaml
+ self.change_id = change_id
+ self.change_number = change_number
+ self.inconsistent_set = inconsistent_set
+
+ def __setattr__(self, key, value):
+ if key == "change_number" and type(value) is int:
+ self.__dict__[key] = str(value)
+ else:
+ self.__dict__[key] = value
+
+ def __str__(self):
+ return f"Proposal(change_id='{self.change_id}'," \
+ f" change_number={self.change_number}" \
+ f" gerrit_status='{self.gerrit_status}'" \
+ f" inconsistent_set={self.inconsistent_set}," \
+ f" merged_ref={self.merged_ref}," \
+ f" proposed yaml={self.proposed_yaml})"
+
+ def __bool__(self):
+ if self.proposed_yaml or self.change_id or self.inconsistent_set:
+ return True
+ else:
+ return False
diff --git a/util/dependency_updater/tools/repo.py b/util/dependency_updater/tools/repo.py
new file mode 100644
index 0000000..9a4b9d4
--- /dev/null
+++ b/util/dependency_updater/tools/repo.py
@@ -0,0 +1,130 @@
+############################################################################
+##
+## Copyright (C) 2020 The Qt Company Ltd.
+## Contact: https://www.qt.io/licensing/
+##
+## This file is part of the utils of the Qt Toolkit.
+##
+## $QT_BEGIN_LICENSE:LGPL$
+## Commercial License Usage
+## Licensees holding valid commercial Qt licenses may use this file in
+## accordance with the commercial license agreement provided with the
+## Software or, alternatively, in accordance with the terms contained in
+## a written agreement between you and The Qt Company. For licensing terms
+## and conditions see https://www.qt.io/terms-conditions. For further
+## information use the contact form at https://www.qt.io/contact-us.
+##
+## GNU Lesser General Public License Usage
+## Alternatively, this file may be used under the terms of the GNU Lesser
+## General Public License version 3 as published by the Free Software
+## Foundation and appearing in the file LICENSE.LGPL3 included in the
+## packaging of this file. Please review the following information to
+## ensure the GNU Lesser General Public License version 3 requirements
+## will be met: https://www.gnu.org/licenses/lgpl-3.0.html.
+##
+## GNU General Public License Usage
+## Alternatively, this file may be used under the terms of the GNU
+## General Public License version 2.0 or (at your option) the GNU General
+## Public license version 3 or any later version approved by the KDE Free
+## Qt Foundation. The licenses are as published by the Free Software
+## Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3
+## included in the packaging of this file. Please review the following
+## information to ensure the GNU General Public License requirements will
+## be met: https://www.gnu.org/licenses/gpl-2.0.html and
+## https://www.gnu.org/licenses/gpl-3.0.html.
+##
+## $QT_END_LICENSE$
+##
+############################################################################
+import re
+from enum import IntEnum
+from urllib.parse import unquote
+
+import yaml
+
+from .namespace import Namespace
+from .proposal import Proposal
+
+
+class PROGRESS(IntEnum):
+ ERROR = 0
+ UNSPECIFIED = 1
+ READY = 2
+ WAIT_DEPENDENCY = 3
+ WAIT_INCONSISTENT = 4
+ IN_PROGRESS = 5
+ RETRY = 6
+ DONE = 7
+ DONE_NO_UPDATE = 8
+ DONE_FAILED_NON_BLOCKING = 9
+ DONE_FAILED_BLOCKING = 10
+ IGNORE_IS_META = 11
+
+
+class Repo(Namespace):
+ """Base information about a repository/submodule"""
+ id: str = "" # Fully qualified id such as qt/qtbase or qt/tqtc-qtbase
+ prefix: str = "" # Bare prefix such as qt/ or qt/tqtc-
+ name: str = "" # Bare name such as qtbase
+ original_ref: str = "" # Ref to associate with this repo. This value should never be changed.
+ branch: str = "" # Branch where dependencies.yaml was found. May differ from the specified branch.
+ deps_yaml: yaml = dict()
+ dep_list: list[str]
+ proposal: Proposal = Proposal()
+ to_stage: list[str]
+ progress: PROGRESS = PROGRESS.UNSPECIFIED
+ stage_count: int = 0
+ retry_count: int = 0
+ is_supermodule: bool = False # Bypasses dependency calculation
+ # Does not stop the round from continuing unless a blocking module depends on it.
+ is_non_blocking: bool = False
+
+ def __init__(self, id: str, prefix: str,
+ proposal: Proposal = None,
+ to_stage: list[str] = None, **kwargs):
+ super().__init__(**kwargs)
+ self.to_stage = list()
+ self.dep_list = list()
+ self.id = unquote(id)
+ self.prefix = prefix
+ self.name = id.removeprefix(prefix)
+ self.proposal = proposal or Proposal()
+ if to_stage is not None:
+ self.to_stage = to_stage
+ if proposal and proposal.change_id not in self.to_stage:
+ self.to_stage.append(proposal.change_id)
+
+ def __str__(self):
+ return f"Repo(id='{self.id}', name='{self.name}'," \
+ f" ref='{self.original_ref}', branch='{self.branch}'," \
+ f" progress={self.progress}," \
+ f" stage_count={self.stage_count}," \
+ f" retry_count={self.retry_count}," \
+ f" proposal={str(self.proposal)})"
+
+ def __repr__(self):
+ return self.id
+
+ def __eq__(self, other: [str, 'Repo']):
+ if type(other) == str:
+ r = r'((?:.*/){1,}(?:(?!(.*-){2,})|(?:[^-]*-)))'
+ re_other_prefix = re.findall(r, other)
+ if len(re_other_prefix):
+ other_prefix: str = re_other_prefix.pop()[0]
+ # Strip relative prefixes from dependency.yaml inputs
+ if other_prefix.startswith('../'):
+ return other.removeprefix(other_prefix) == self.name
+ else:
+ return other == self.id
+ else:
+ return other == self.name
+ return self.id == other.id
+
+ def merge(self, other: "Repo"):
+ if self.progress >= PROGRESS.DONE:
+ # Anything marked as done should only ever be updated
+ # with specific intention, not blindly merged.
+ return
+ for prop, val in vars(other).items():
+ if val:
+ self.__setattr__(prop, val)
diff --git a/util/dependency_updater/tools/state.py b/util/dependency_updater/tools/state.py
new file mode 100644
index 0000000..47d3d15
--- /dev/null
+++ b/util/dependency_updater/tools/state.py
@@ -0,0 +1,190 @@
+############################################################################
+##
+## Copyright (C) 2021 The Qt Company Ltd.
+## Contact: https://www.qt.io/licensing/
+##
+## This file is part of the qtqa module of the Qt Toolkit.
+##
+## $QT_BEGIN_LICENSE:LGPL$
+## Commercial License Usage
+## Licensees holding valid commercial Qt licenses may use this file in
+## accordance with the commercial license agreement provided with the
+## Software or, alternatively, in accordance with the terms contained in
+## a written agreement between you and The Qt Company. For licensing terms
+## and conditions see https://www.qt.io/terms-conditions. For further
+## information use the contact form at https://www.qt.io/contact-us.
+##
+## GNU Lesser General Public License Usage
+## Alternatively, this file may be used under the terms of the GNU Lesser
+## General Public License version 3 as published by the Free Software
+## Foundation and appearing in the file LICENSE.LGPL3 included in the
+## packaging of this file. Please review the following information to
+## ensure the GNU Lesser General Public License version 3 requirements
+## will be met: https://www.gnu.org/licenses/lgpl-3.0.html.
+##
+## GNU General Public License Usage
+## Alternatively, this file may be used under the terms of the GNU
+## General Public License version 2.0 or (at your option) the GNU General
+## Public license version 3 or any later version approved by the KDE Free
+## Qt Foundation. The licenses are as published by the Free Software
+## Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3
+## included in the packaging of this file. Please review the following
+## information to ensure the GNU General Public License requirements will
+## be met: https://www.gnu.org/licenses/gpl-2.0.html and
+## https://www.gnu.org/licenses/gpl-3.0.html.
+##
+## $QT_END_LICENSE$
+##
+############################################################################
+
+import os
+import pickle
+import shutil
+from pathlib import Path
+from time import sleep
+
+import git.exc
+from git import Git
+from git import Repo as GitRepo, exc
+
+from .repo import Repo
+
+
+def fetch_and_checkout(config, repo):
+ """Try to fetch the remote ref in the personal gerrit branch for
+ the running user."""
+ g = Git(repo.working_tree_dir)
+ try:
+ g.fetch(['origin', config._state_ref])
+ g.checkout('FETCH_HEAD')
+ except git.exc.GitCommandError as e:
+ if "couldn't find remote ref refs/personal" in e.stderr:
+ pass
+ else:
+ print(e)
+ del g
+
+
+def check_create_local_repo(config) -> GitRepo:
+ """Create a local repo for saving state and push it to
+ the user's personal ref. Checkout any existing version
+ on the user's personal remote, or create a new commit"""
+ path = Path(config.cwd, "_state")
+ if not path.exists():
+ os.mkdir(path)
+ try:
+ repo = GitRepo(path)
+ if "origin" not in [r.name for r in repo.remotes] and config._state_ref:
+ repo.create_remote('origin',
+ f"ssh://{config.GERRIT_HOST[8:]}/{config.GERRIT_STATE_PATH}")
+ except exc.InvalidGitRepositoryError:
+ repo = GitRepo.init(path)
+ if config._state_ref:
+ repo.create_remote('origin',
+ f"ssh://{config.GERRIT_HOST[8:]}/{config.GERRIT_STATE_PATH}")
+ fetch_and_checkout(config, repo)
+ state_path = Path(repo.working_tree_dir, "state.bin")
+ if not state_path.exists():
+ with open(state_path, 'wb') as state_file:
+ pickle.dump({}, state_file)
+ repo.index.add('state.bin')
+ repo.index.commit("Empty state")
+ if config._state_ref:
+ repo.remotes.origin.push(['-f', f"HEAD:{config._state_ref}"])
+ if not config._state_ref:
+ print("\nWARN: Unable to create git remote for state!\n"
+ "WARN: State will only be saved locally to _state/state.bin.\n"
+ "INFO: Please configure an ssh user in ~/.ssh/config for your gerrit host\n"
+ "INFO: as set by 'GERRIT_HOST' in config.yaml in order to save state in gerrit.\n")
+ return repo
+
+
+def load_updates_state(config) -> dict[str, Repo]:
+ """Load previous state and apply retention policy if not simulating a run."""
+ if config.args.no_state:
+ print("Running in no-state mode! No state loaded, and progress will not be saved on exit!")
+ return {}
+ print("\nLoading saved update data from codereview...")
+ if config._state_ref:
+ fetch_and_checkout(config, config.state_repo)
+ state_path = Path(config.state_repo.working_tree_dir, "state.bin")
+
+ if not state_path.exists():
+ with open(state_path, 'wb') as state_file:
+ pickle.dump(dict(), state_file)
+ state_data = {}
+ with open(state_path, mode='rb') as state_file:
+ state_data = pickle.load(state_file)
+
+ print("Done loading state data!")
+ if state_data.get(config.args.branch):
+ return state_data[config.args.branch]
+ else:
+ return {}
+
+
+def update_state_data(old_state: dict[str, Repo], new_data: dict[str, Repo]) -> dict[
+ str, Repo]:
+ """Merge two update set dicts"""
+ updated = old_state
+ for key in new_data.keys():
+ if old_state.get(key):
+ updated[key].merge(new_data[key])
+ else:
+ updated[key] = new_data[key]
+ return updated
+
+
+def save_updates_state(config, _clear_state: bool = False) -> None:
+ """Save updates to the state file"""
+ if not config.args.simulate:
+ if _clear_state:
+ clear_state(config)
+ return
+ print("Saving update state data to codereview...")
+ state_path = Path(config.state_repo.working_tree_dir, "state.bin")
+ data: dict[str, dict[str, Repo]] = {}
+ with open(state_path, 'rb') as state_file:
+ data = pickle.load(state_file)
+ data[config.args.branch] = config.state_data
+ with open(state_path, 'wb') as state_file:
+ pickle.dump(data, state_file)
+ config.state_repo.index.add("state.bin")
+ config.state_repo.index.commit("Update state")
+ if config._state_ref:
+ config.state_repo.remotes.origin.push(['-f', f"HEAD:{config._state_ref}"])
+ elif config.args.no_state:
+ print("Running in no-state mode. Not saving state!")
+
+
+def clear_state(config) -> None:
+ """Clear state data. All branches are wiped if not specified!"""
+ print("Clearing state and resetting updates...")
+ if config.args.branch:
+ config.state_data = {}
+ save_updates_state(config)
+ print(f"Clearing branch state for {config.args.branch}")
+ return
+
+ if config._state_ref:
+ try:
+ config.state_repo.remotes.origin.push(['-f', f":{config._state_ref}"])
+ print("Cleared remote state on codereview...")
+ except git.exc.GitCommandError:
+ print(
+ "WARN: Failed to push an empty commit, probably because the state is already clear.")
+ del config.state_repo # Need to tear down the instance of PyGit to close the file handle.
+ sleep(5) # workaround for sometimes slow closing of git handles.
+ else:
+ print("\nWARN: No state remote ref set! Only deleting local state.bin file.\n"
+ "WARN: Run this script again with --reset after configuring an ssh user\n"
+ "WARN: in ~/.ssh/config for your gerrit host as set by 'GERRIT_HOST' in config.yaml.\n"
+ "WARN: If a remote state exists next time this script is run, it will likely\n"
+ "WARN: cause unexpected behavior!")
+ shutil.rmtree(Path(config.cwd, "_state"), onerror=_unlink_file)
+ print("Deleted local state files.")
+
+
+def _unlink_file(function, path, excinfo):
+ """In the case that shutil.rmtree fails on a file."""
+ os.unlink(path)
diff --git a/util/dependency_updater/tools/teams_connector.py b/util/dependency_updater/tools/teams_connector.py
new file mode 100644
index 0000000..28f1372
--- /dev/null
+++ b/util/dependency_updater/tools/teams_connector.py
@@ -0,0 +1,190 @@
+############################################################################
+##
+## Copyright (C) 2021 The Qt Company Ltd.
+## Contact: https://www.qt.io/licensing/
+##
+## This file is part of the qtqa module of the Qt Toolkit.
+##
+## $QT_BEGIN_LICENSE:LGPL$
+## Commercial License Usage
+## Licensees holding valid commercial Qt licenses may use this file in
+## accordance with the commercial license agreement provided with the
+## Software or, alternatively, in accordance with the terms contained in
+## a written agreement between you and The Qt Company. For licensing terms
+## and conditions see https://www.qt.io/terms-conditions. For further
+## information use the contact form at https://www.qt.io/contact-us.
+##
+## GNU Lesser General Public License Usage
+## Alternatively, this file may be used under the terms of the GNU Lesser
+## General Public License version 3 as published by the Free Software
+## Foundation and appearing in the file LICENSE.LGPL3 included in the
+## packaging of this file. Please review the following information to
+## ensure the GNU Lesser General Public License version 3 requirements
+## will be met: https://www.gnu.org/licenses/lgpl-3.0.html.
+##
+## GNU General Public License Usage
+## Alternatively, this file may be used under the terms of the GNU
+## General Public License version 2.0 or (at your option) the GNU General
+## Public license version 3 or any later version approved by the KDE Free
+## Qt Foundation. The licenses are as published by the Free Software
+## Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3
+## included in the packaging of this file. Please review the following
+## information to ensure the GNU General Public License requirements will
+## be met: https://www.gnu.org/licenses/gpl-2.0.html and
+## https://www.gnu.org/licenses/gpl-3.0.html.
+##
+## $QT_END_LICENSE$
+##
+############################################################################
+import json
+
+import pymsteams as msteams
+import yaml
+from gerrit.changes import change as GerritChange
+from .repo import Repo, PROGRESS
+from typing import Union
+
+
+def gerrit_link_maker(config, change: Union[GerritChange.GerritChange, Repo]) -> tuple[str, str]:
+ repo = ""
+ _change = None
+ if type(change) == GerritChange:
+ repo = change.project
+ _change = change
+ elif type(change) == Repo:
+ repo = change.id
+ _change = config.datasources.gerrit_client.changes.get(change.proposal.change_id)
+ if not repo:
+ return "", ""
+ subject = _change.subject
+ mini_sha = _change.get_revision("current").get_commit().commit[:10]
+ url = f"{config.GERRIT_HOST}c/{repo}/+/{_change._number}"
+ return f"({mini_sha}) {subject[:70]}{'...' if len(subject) > 70 else ''}", url
+
+
+class TeamsConnector:
+ def __init__(self, config):
+ self.config = config
+ self.endpoint = config.MS_TEAMS_NOTIFY_URL
+ if self.endpoint:
+ print("MS Teams connector initialized")
+ else:
+ print("MS Teams connector disabled: No webhook URL provided.")
+
+ @staticmethod
+ def _link_creator_failed_stage(card: msteams.connectorcard,
+ repo) -> msteams.connectorcard:
+ for change_id in repo.to_stage:
+ card.addLinkButton(*gerrit_link_maker(repo, change_id))
+ return card
+
+ def _card_formatter_failed_stage(self, card: msteams.connectorcard,
+ repo) -> msteams.connectorcard:
+ card.color('#CB9E5A')
+ card.title(f"{repo.id}: Dependency update warning")
+ if len(repo.to_stage) == 1:
+ card.text(f"Staging of dependency update in {repo.id} -> {repo.branch} failed.")
+ else:
+ card.text(f"Co-staging changes with the dependency update in {repo.id} -> {repo.branch}"
+ f" failed.\nChanges:")
+ self._link_creator_failed_stage(card, repo)
+ return card
+
+ def send_teams_webhook_failed_stage(self, repo):
+ if self.config.args.simulate:
+ print(f"SIM: send Staging Failed Teams webhook for {repo.id}")
+ return
+ if self.endpoint:
+ message_card = msteams.connectorcard(self.endpoint)
+ message_card = self._card_formatter_failed_stage(message_card, repo)
+ message_card.send()
+ print(message_card.last_http_status.status_code, message_card.last_http_status.reason,
+ message_card.last_http_status.text)
+ if message_card.last_http_status.status_code != 200:
+ print(
+ f"WARN: Unable to send alert to webhook for {repo.id}")
+ return False
+ return True
+
+ def send_teams_webhook_module_failed(self, repo, text_override: str = None, test_failures: str = None, pause_links: bool = False):
+ if self.config.args.simulate:
+ print(f"SIM: send Teams webhook for {repo.id} with text:"
+ + (text_override or f"Dependency update on *{repo.id}* failed in **{repo.branch}**")
+ + '\n' + test_failures)
+ return
+ if self.endpoint:
+ message_card = msteams.connectorcard(self.endpoint)
+ message_card.color('#FF0000')
+ message_card.text(text_override or f"Dependency update on *{repo.id}* failed in **{repo.branch}*"
+ f"*")
+ message_card.addSection(msteams.cardsection().linkButton(*gerrit_link_maker(self.config, repo)))
+ if pause_links:
+ pause_section = msteams.cardsection()
+ pause = msteams.potentialaction(f"Pause Updates on '{repo.branch}' (This failure can be fixed)", "HttpPOST")
+ pause.payload["target"] = "https://qt-cherry-pick-bot.herokuapp.com/pause-submodule-updates"
+ pause.payload["body"] = yaml.dump({"branch": repo.branch})
+ msteams.connectorcard.addPotentialAction(pause_section, pause)
+ resume = msteams.potentialaction(f"Resume Updates on '{repo.branch}'", "HttpPOST")
+ resume.payload["target"] = "https://qt-cherry-pick-bot.herokuapp.com/resume-submodule-updates"
+ resume.payload["body"] = yaml.dump({"branch": repo.branch})
+ msteams.connectorcard.addPotentialAction(pause_section, resume)
+ message_card.addSection(pause_section)
+ if test_failures:
+ message_card.addSection(
+ msteams.cardsection().text('```\nBuild/Test Log:\n' + test_failures))
+ message_card.send()
+ if message_card.last_http_status.status_code != 200:
+ print(
+ f"WARN: Unable to send alert to webhook for {repo.id}")
+ return False
+ return True
+
+ def send_teams_webhook_finish_failed(self, text: str, config, reset_links=False):
+ if self.config.args.simulate:
+ print(f"SIM: send Teams webhook for Round Finished Failed with text: {text}")
+ return True
+ if self.endpoint:
+ message_card = msteams.connectorcard(self.endpoint)
+ message_card.text(text)
+ if reset_links:
+ reset_section = msteams.cardsection()
+ reset = msteams.potentialaction(
+ f"Reset round (New qtbase)", "HttpPOST")
+ reset.payload[
+ "target"] = "https://qt-cherry-pick-bot.herokuapp.com/reset-submodule-updates"
+ reset.payload["body"] = yaml.dump({"branch": config.args.branch})
+ msteams.connectorcard.addPotentialAction(reset_section, reset)
+ retry = msteams.potentialaction(
+ f"Retry current failed modules on '{config.args.branch}'", "HttpPOST")
+ retry.payload[
+ "target"] = "https://qt-cherry-pick-bot.herokuapp.com/retry-submodule-updates"
+ retry.payload["body"] = yaml.dump({"branch": config.args.branch})
+ msteams.connectorcard.addPotentialAction(reset_section, retry)
+ message_card.addSection(reset_section)
+ failed_section = msteams.cardsection()
+ failed_modules_text = "\n".join([r.id for r in config.state_data.values()
+ if r.progress == PROGRESS.DONE_FAILED_BLOCKING])
+ failed_section.text(f"```\nFailed Modules on {config.args.branch}:\n{failed_modules_text}")
+ message_card.addSection(failed_section)
+ message_card.send()
+ if message_card.last_http_status.status_code != 200:
+ print(
+ f"WARN: Unable to send alert to webhook for Round Failed Finished on {config.args.branch}")
+ return False
+ return True
+
+ def send_teams_webhook_basic(self, text: str, repo: Repo = None, reset_links=False):
+ if self.config.args.simulate:
+ print(f"SIM: send Teams webhook for {repo.id} with text: {text}")
+ return True
+ if self.endpoint:
+ message_card = msteams.connectorcard(self.endpoint)
+ message_card.text(text)
+ if repo and repo.proposal.change_id:
+ message_card.addLinkButton(*gerrit_link_maker(self.config, repo))
+ message_card.send()
+ if message_card.last_http_status.status_code != 200:
+ print(
+ f"WARN: Unable to send alert to webhook for {repo.id}")
+ return False
+ return True
diff --git a/util/dependency_updater/tools/toolbox.py b/util/dependency_updater/tools/toolbox.py
new file mode 100644
index 0000000..6fc70d4
--- /dev/null
+++ b/util/dependency_updater/tools/toolbox.py
@@ -0,0 +1,1313 @@
+############################################################################
+##
+## Copyright (C) 2020 The Qt Company Ltd.
+## Contact: https://www.qt.io/licensing/
+##
+## This file is part of the utils of the Qt Toolkit.
+##
+## $QT_BEGIN_LICENSE:LGPL$
+## Commercial License Usage
+## Licensees holding valid commercial Qt licenses may use this file in
+## accordance with the commercial license agreement provided with the
+## Software or, alternatively, in accordance with the terms contained in
+## a written agreement between you and The Qt Company. For licensing terms
+## and conditions see https://www.qt.io/terms-conditions. For further
+## information use the contact form at https://www.qt.io/contact-us.
+##
+## GNU Lesser General Public License Usage
+## Alternatively, this file may be used under the terms of the GNU Lesser
+## General Public License version 3 as published by the Free Software
+## Foundation and appearing in the file LICENSE.LGPL3 included in the
+## packaging of this file. Please review the following information to
+## ensure the GNU Lesser General Public License version 3 requirements
+## will be met: https://www.gnu.org/licenses/lgpl-3.0.html.
+##
+## GNU General Public License Usage
+## Alternatively, this file may be used under the terms of the GNU
+## General Public License version 2.0 or (at your option) the GNU General
+## Public license version 3 or any later version approved by the KDE Free
+## Qt Foundation. The licenses are as published by the Free Software
+## Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3
+## included in the packaging of this file. Please review the following
+## information to ensure the GNU General Public License requirements will
+## be met: https://www.gnu.org/licenses/gpl-2.0.html and
+## https://www.gnu.org/licenses/gpl-3.0.html.
+##
+## $QT_END_LICENSE$
+##
+############################################################################
+
+import base64
+import copy
+import difflib
+import json
+import re
+import time
+import urllib
+from typing import Union
+from urllib.parse import unquote
+
+import requests
+import yaml
+from gerrit.changes import change as GerritChange, edit as GerritChangeEdit
+from gerrit.utils import exceptions as GerritExceptions
+
+from .config import Config
+from .proposal import Proposal
+from .repo import Repo, PROGRESS
+
+
+def strip_prefix(repo: Union[str, Repo]) -> tuple[str, str]:
+ """Separate the prefix (if exists) and repo name of an input.
+
+ Matches namespace and project prefixes up to the last forward slash or the
+ first hyphen thereafter if the final section contains more than one hyphen.
+ qt-labs/tqtc-demo-moviedb matches on "qt-labs/tqtc-"
+ playground/qt-creator/plugin-scripting matches on "playground/qt-creator/"
+ """
+ r = r'((?:.*/){1,}(?:(?!(.*-){2,})|(?:[^-]*-)))'
+ raw_name = ""
+ raw_prefix = ""
+ raw_result = re.findall(r, repo)
+ if type(repo) == Repo:
+ repo = repo.id
+ if len(raw_result):
+ raw_prefix = raw_result.pop()[0]
+ raw_name = repo.removeprefix(raw_prefix)
+ else:
+ raw_name = repo
+ return raw_prefix, raw_name
+
+
+def make_full_id(repo: Repo, change_id_override: str = "") -> str:
+ """Create gerrit change IDs from smaller bits if necessary
+ Format: repo~branch~change_id"""
+ retstr = ""
+ if (repo.id in change_id_override or urllib.parse.quote_plus(repo.id) in change_id_override)\
+ and (repo.branch in change_id_override or urllib.parse.quote_plus(repo.branch) in change_id_override):
+ # Already formatted!
+ retstr = change_id_override
+ elif change_id_override:
+ retstr = f"{repo.id}~{repo.branch}~{change_id_override}"
+ elif repo.proposal:
+ retstr = f"{repo.id}~{repo.branch}~{repo.proposal.change_id}"
+ return retstr
+
+
+def gerrit_link_maker(config: Config, change: Union[GerritChange.GerritChange, Repo],
+ change_override: str = "") -> tuple[str, str]:
+ """Make user-clickable links to changes on gerrit."""
+ repo = ""
+ _change = None
+ if type(change) == GerritChange:
+ repo = change.project
+ _change = change
+ elif change_override:
+ repo = change.id
+ _change = config.datasources.gerrit_client.changes.get(change_override)
+ elif type(change) == Repo:
+ repo = change.id
+ _change = config.datasources.gerrit_client.changes.get(change.proposal.change_id)
+ if not repo:
+ return "", ""
+ subject = _change.subject
+ mini_sha = _change.get_revision("current").get_commit().commit[:10]
+ url = f"{config.GERRIT_HOST}c/{repo}/+/{_change._number}"
+ return f"({mini_sha}) {subject[:70]}{'...' if len(subject) > 70 else ''}", url
+
+
+def get_repos(config: Config, repos_override: list[str] = None, non_blocking_override: Union[list[str], None] = []) -> dict[str, Repo]:
+ """Create a dict of initialized Repo objects. If repos_override is not specified,
+ repos from the application's config/arguments are initialized alongside qt5 submodules
+
+ :argument repos_override: If set, returns a dict of Repo which only includes these repos."""
+ base_repos = repos_override or config.args.repos or config.REPOS
+ non_blocking_repos = non_blocking_override
+ if not non_blocking_repos and non_blocking_repos is not None:
+ non_blocking_repos = config.args.non_blocking_repos or config.NON_BLOCKING_REPOS
+ other_repos = []
+ if not repos_override or non_blocking_override:
+ # Only look at the repos in the override if it is set.
+ if config.args.update_default_repos or (base_repos and not config.args.use_head):
+ other_repos = config.qt5_default.keys()
+ repo_list = list(set(base_repos).union(set(other_repos)))
+
+ assert repo_list, \
+ ("ERROR: Must supply at least one repo to update!\n"
+ "Set repos to update via positional arguments, "
+ "config.yaml, or environment variable REPOS")
+ # Gather all the repo names into Repo() objects.
+ repos = [search_for_repo(config, r) for r in repo_list]
+ if non_blocking_repos:
+ for repo in non_blocking_repos:
+ r = search_for_repo(config, repo)
+ r.is_non_blocking = True
+ repos.append(r)
+ retdict = dict()
+ for repo in repos:
+ if repo.id in config.state_data.keys():
+ # Use the one we've got in the state if it's there.
+ retdict[repo.id] = config.state_data[repo.id]
+ # override previous blocking switch. It will be disabled again if anything depends on it
+ retdict[repo.id].is_non_blocking = repo.is_non_blocking
+ else:
+ # Initialize the new repo
+ repo.deps_yaml, repo.branch = get_dependencies_yaml(config, repo)
+ repo.original_ref = get_head(config, repo)
+ retdict[repo.id] = repo
+ if not config.args.update_default_repos and not config.args.use_head:
+ for repo in retdict.keys():
+ if repo in config.qt5_default.keys() and repo not in base_repos:
+ retdict[repo].progress = PROGRESS.DONE_NO_UPDATE
+ retdict[repo].proposal.merged_ref = retdict[repo].original_ref
+ retdict[repo].proposal.proposed_yaml = retdict[repo].deps_yaml
+ return retdict
+
+
+def get_state_repo(config, repo: Union[str, Repo]) -> Union[None, Repo]:
+ """Locate a repo in the state file by string name or Repo.id"""
+ if type(repo) == str:
+ if repo in config.state_data.keys():
+ return config.state_data[repo]
+ elif type(Repo) == Repo:
+ if repo.id in config.state_data.keys():
+ return config.state_data[repo.id]
+ return None
+
+
+def search_for_repo(config, repo: Union[str, Repo]) -> Union[None, Repo]:
+ """Search gerrit for a repository.
+ :returns: bare repo initialized, or the repo from the state file if it exists."""
+ gerrit = config.datasources.gerrit_client
+ raw_prefix, raw_name = strip_prefix(repo)
+ if raw_prefix + raw_name in config.state_data.keys():
+ return config.state_data[raw_prefix + raw_name]
+ search_response = gerrit.projects.regex('.*/.*' + raw_name if raw_name else repo.name)
+ repo_names = [unquote(value.id) for value in search_response]
+ for name in repo_names:
+ if name.startswith(config.args.repo_prefix):
+ ret_repo = get_state_repo(config, name)
+ return ret_repo if ret_repo else Repo(name, config.args.repo_prefix)
+ # If we didn't find a default [prefix] repo name, check to see if the original prefix and
+ # name were in the search results, otherwise return the best guess with the prefix it has.
+ if raw_prefix + raw_name in repo_names: # Exact match
+ ret_repo = get_state_repo(config, repo)
+ return ret_repo if ret_repo else Repo(repo, raw_prefix)
+ try:
+ guess_id = str(difflib.get_close_matches(raw_name, repo_names, n=1).pop())
+ except IndexError:
+ try:
+ prefixes, names = zip(*[strip_prefix(n) for n in repo_names])
+ guess_name = str(difflib.get_close_matches(raw_name, names, n=1).pop())
+ guess_id = prefixes[names.index(guess_name)] + guess_name
+ ret_repo = get_state_repo(config, guess_id)
+ return ret_repo if ret_repo else Repo(guess_id, prefixes[names.index(guess_name)])
+ except IndexError:
+ if not config.suppress_warn:
+ print(f"WARN: No close match found for {raw_prefix + raw_name} among {repo_names}")
+ return None
+ guess_prefix, guess_name = strip_prefix(guess_id)
+ print(f"INFO: Guessing fuzzy match {guess_id} for {repo}")
+ ret_repo = get_state_repo(config, guess_id)
+ return ret_repo if ret_repo else Repo(guess_id, guess_prefix)
+
+
+def parse_gitmodules(config: Config, repo: [Repo, str], branch: str = "", ref: str = "") -> dict[
+ str, dict[str, str]]:
+ """Retrieve .gitmodules and parse it into a dict.
+
+ :param branch branch: exclusive with ref. Pull from branch head.
+ :param ref ref: exclusive with branch. Pull from given ref.
+
+ schema:
+ {
+ submodule_name: {
+ key: value,
+ }
+ }
+ """
+ repo_id = repo.id if type(repo) == Repo else repo
+ branch = branch if branch.startswith("refs/heads/") else 'refs/heads/' + branch
+ gerrit = config.datasources.gerrit_client
+ retdict = dict()
+ try:
+ if ref:
+ gitmodules = gerrit.projects.get(repo_id).get_commit(ref) \
+ .get_file_content(".gitmodules")
+ else:
+ gitmodules = gerrit.projects.get(repo_id).branches.get(branch) \
+ .get_file_content('.gitmodules')
+ except GerritExceptions.NotFoundError:
+ print(f"WARN: {repo_id} does not contain .gitmodules! "
+ f"It probably doesn't have any submodules.")
+ return retdict
+ raw_response = bytes.decode(base64.b64decode(gitmodules), "utf-8")
+ for module_text in raw_response.split('[submodule '):
+ if not module_text:
+ continue
+ split_module = module_text.split('\n')
+ item = split_module.pop(0) # was '[submodule "<name>"]' line before splitting
+ assert item.startswith('"') and item.endswith('"]'), module_text
+ item = item[1:-2] # module name; followed by key = value lines, then an empty line
+ data = dict(line.strip().split(' = ') for line in split_module if line)
+ retdict[item] = data
+ return retdict
+
+
+def get_qt5_submodules(config: Config, types: list[str]) -> dict[str, Repo]:
+ """Collect the list of submodules in qt5.git for a given branch."""
+ if not types:
+ print("WARN: No types passed to get_qt5_submodules!")
+ return dict()
+ gitmodules = parse_gitmodules(config, config.args.repo_prefix + 'qt5', config.args.branch)
+ retdict = dict()
+ for item, data in gitmodules.items():
+ if data.get('status') in types:
+ submodule_repo = search_for_repo(config, item)
+ if config.drop_dependency and f"{config.args.repo_prefix}qt5" in config.drop_dependency_from:
+ continue # Ignore this repo. We'll be dropping it.
+ if submodule_repo.id in config.state_data.keys():
+ retdict[submodule_repo.id] = config.state_data[submodule_repo.id]
+ else:
+ retdict[submodule_repo.id] = submodule_repo
+ return retdict
+
+
+def get_head(config: Config, repo: Union[Repo, str], pull_head: bool = False) -> str:
+ """Fetch the branch head of a repo from codereview, or return the
+ saved ref from state if the repo progress is >= PROGRESS.DONE.
+ Override state refs and pull remote branch HEAD with pull_head=True"""
+ gerrit = config.datasources.gerrit_client
+ if type(repo) == str:
+ repo = search_for_repo(config, repo)
+ if not pull_head and repo.id in config.state_data.keys() \
+ and config.state_data[repo.id].progress >= PROGRESS.DONE:
+ if config.state_data[repo.id].proposal.merged_ref:
+ return config.state_data[repo.id].proposal.merged_ref
+ else:
+ return config.state_data[repo.id].original_ref
+ if repo.id in config.qt5_default.keys() and not config.args.use_head:
+ r = gerrit.projects.get(config.args.repo_prefix + 'qt5').branches.get(
+ 'refs/heads/' + config.args.branch).get_file_content(repo.name)
+ return bytes.decode(base64.b64decode(r), "utf-8")
+ else:
+ branches = [config.args.branch, "dev", "master"]
+ branch_head = None
+ for branch in branches:
+ try:
+ branch_head = gerrit.projects.get(repo.id).branches.get("refs/heads/" + branch)
+ if branch != config.args.branch and not config.suppress_warn:
+ print(f"INFO: Using {branch} instead of {config.args.branch} "
+ f"as the reference for {repo}")
+ break
+ except GerritExceptions.UnknownBranch:
+ continue
+ if not branch_head:
+ if not config.suppress_warn:
+ print(f"Exhausted branch options for {repo}! Tried {branches}")
+ return ""
+ else:
+ return branch_head.revision
+
+
+def get_top_integration_sha(config, repo: Repo) -> str:
+ """Use a Repo's change ID to fetch the gerrit comments
+ to look for the top-most change integrated at the same time.
+ Use the sha of that change as the merged-sha for the repo.
+ This ensures that dependencies are correct in leaf modules
+ which may expect all the co-staged changes to be available."""
+ if not repo.proposal.change_id:
+ return ""
+ gerrit = config.datasources.gerrit_client
+ change = gerrit.changes.get(repo.proposal.change_id)
+ messages = change.messages.list()
+ integration_id = ""
+ for message in messages:
+ # Look for the message from COIN
+ if "Continuous Integration: Passed" in message.message:
+ m_lines = message.message.splitlines()
+ for line in m_lines:
+ # Here begins the list of changes integrated together.
+ if line.strip().startswith("Details: "):
+ url = line.strip().split(" ")[1] # Grab the COIN URL from the line.
+ integration_id = url.split("/")[-1] # Get just the integration ID
+ break
+ break
+ if integration_id:
+ r = requests.get(f"https://testresults.qt.io/coin/api/integration/{repo.id}/tasks/{integration_id}")
+ if r.status_code == 200:
+ sha = json.loads(r.text)[4]["1"]["rec"]["6"]["str"]
+ print(f"Found integration sha {sha} from Integration ID: {integration_id}")
+ return sha
+ else:
+ # Fallback to internal COIN if available. The task probably hadn't replicated to
+ # testresults yet.
+ r = requests.get(f"http://coin/coin/api/integration/{repo.id}/tasks/{integration_id}")
+ if r.status_code == 200:
+ sha = json.loads(r.text)[4]["1"]["rec"]["6"]["str"]
+ print(f"Found integration sha {sha} from Integration ID: {integration_id}")
+ return sha
+ print(f"ERROR: Failed to retrieve integration sha from testresults/coin for integration ID"
+ f" {integration_id}.\n"
+ f"\tRepo: {repo.id}, submodule update change ID: {repo.proposal.change_id}\n"
+ f"\t{gerrit_link_maker(config, change)}"
+ f"DISABLING AUTOMATIC STAGING AND CONTINUING...")
+ config.args.stage = False
+ config.teams_connector.send_teams_webhook_basic(
+ f"Error in updating {repo.id}."
+ f" Could not retrieve merged sha for {repo.proposal.change_id}!", repo=repo)
+ return ""
+
+
+def strip_agent_from_test_log(text: str):
+ """Strip the first number of characters from lines in COIN logs.
+ Makes displaying logs more friendly."""
+ r = re.compile(r'agent.+go:[\d]+:\s')
+ return re.sub(r, "", text)
+
+
+def parse_failed_integration_log(config, repo: Repo = None, log_url: str = "") -> str:
+ """Use a Repo's change ID to fetch the gerrit comments
+ to look for the most recent integration failure.
+ Retrieve the log and parse it to snip out failed test cases."""
+ if not log_url and not (repo and repo.proposal.change_id):
+ return ""
+ if repo:
+ gerrit = config.datasources.gerrit_client
+ change = gerrit.changes.get(repo.proposal.change_id)
+ messages = change.messages.list()
+ for message in reversed(messages):
+ # Look for the message from COIN from the bottom up:
+ if "Continuous Integration: Passed" in message.message:
+ # Return if the integration passed. We don't need to parse the log.
+ return ""
+ elif "Continuous Integration: Failed" in message.message:
+ m_lines = message.message.splitlines()
+ start = False
+ for line in m_lines:
+ # Locate the build log
+ if line.strip().startswith("Build log:"):
+ start = True
+ if line.removeprefix("Build log:").strip().startswith("http"):
+ log_url = line.removeprefix("Build log:").strip()
+ break
+ else:
+ continue
+ if line.strip().startswith("Details: "):
+ break
+ if start:
+ log_url += line.strip()
+ break
+
+ if not log_url:
+ # No integrations yet?
+ return ""
+ r = requests.get(log_url)
+ if r.status_code == 200:
+ log_text = r.content.decode("utf-8")
+ if repo:
+ print(f"Found integration failure log for {repo.proposal.change_id}")
+ else:
+ if repo:
+ print(f"Error retrieving build log for {repo.proposal.change_id}")
+ return ""
+ ret_str = ""
+ build_failure = parse_log_build_failures(log_text)
+ if build_failure:
+ return build_failure
+
+ test_failures = parse_log_test_failures(log_text)
+
+ if test_failures:
+ ret_str += f"Total failed test cases: {len(test_failures)}\n"
+ for fail_case in test_failures.keys():
+ ret_str += fail_case + "\n"
+ ret_str += '\n'.join(test_failures[fail_case]) + '\n'
+ # ret_str += "\n"
+ ret_str = strip_agent_from_test_log(ret_str)
+ return ret_str
+
+
+def parse_log_test_failures(logtext: str) -> dict[str, list[str]]:
+ """Parse out some basic failure cases from a COIN log.
+ This is only basic parsing. Many fail reasons are not caught by this parsing."""
+ ret_dict = {}
+ # Normal failures
+ pattern = re.compile(r'[0-9]+(?<!0) failed,')
+ if pattern.search(logtext): # Find match(es) with the regex pattern above.
+ # Iterate through the matches for failed tests and strip out the individual cases.
+ for match in pattern.finditer(logtext):
+ tstnameindex = 0
+ tstnameindex = logtext.rfind("********* Start testing of ", 0,
+ match.span()[0]) + 27 # Search for the test case.
+
+ tstname = logtext[tstnameindex:
+ logtext.find("*********", tstnameindex) - 1]
+ ret_dict[tstname] = []
+
+ fail_count = int(re.match(r'[0-9]*', match.group(0)).group(0))
+ if fail_count > 5:
+ ret_dict[tstname].append(f"Too many fail cases ({fail_count}). See full log for details.")
+ break
+
+ # Save a snip of the log with the failed test(s)
+ logsnip = logtext[tstnameindex: logtext.find("********* Finished testing of ", tstnameindex)]
+
+ for failcasematch in re.finditer('FAIL! : ', logsnip): # Find the actual fail case
+ # Look for the end of the case. We know coin always prints the file location.
+ # Grab up the newline when we find that.
+ failcasestring = logsnip[failcasematch.span()[0]:
+ logsnip.find('\n', logsnip.find("Loc: ", failcasematch.span()[0]))] + "\n"
+ ret_dict[tstname].append(failcasestring)
+
+ # Crashes
+ crash_index = logtext.rfind("ERROR: Uncontrolled test CRASH!")
+ if crash_index > 0:
+ tstnameindex = logtext.find("CMake Error at ", crash_index) + 15 # Search for test case
+ re_tstname = re.compile(r"Test\s+#[0-9]+:\s(tst_.+)\s\.+\*\*\*Failed")
+ try:
+ tstname = re_tstname.search(logtext, tstnameindex).groups()[0]
+ except AttributeError:
+ # Failed to parse the end of the crash, ignore it and let a human read the log.
+ return ret_dict
+ ret_dict[tstname] = []
+
+ # Save a snip of the log with the failed test(s)
+ logsnip = logtext[
+ crash_index: logtext.find("\n", logtext.find("***Failed ", tstnameindex))]
+
+ ret_dict[tstname].append(logsnip)
+
+ return ret_dict
+
+
+def parse_log_build_failures(logtext: str) -> str:
+ """Parse the most basic of build errors."""
+ # Normal failures
+ err_pos = logtext.find(" error generated.")
+ if err_pos > 0:
+ start_pos = logtext.rfind("FAILED: ", err_pos)
+ return logtext[start_pos:err_pos]
+ return ""
+
+
+def get_dependencies_yaml(config, repo: Repo, fetch_head: bool = False) -> tuple[yaml, str]:
+ """Fetches the dependencies.yaml file of a repo from gerrit,
+ or returns the saved dependencies.yaml from state if the repo
+ progress is >= PROGRESS.DONE"""
+ gerrit = config.datasources.gerrit_client
+ found_branch = config.args.branch
+ r = None
+ if repo.id in config.qt5_default.keys() and not config.args.use_head and not fetch_head:
+ if repo.id in config.state_data.keys():
+ print(f"Using state data for {repo.id}")
+ return config.state_data[repo.id].deps_yaml, config.state_data[repo.id].branch
+ qt5_repo_sha = get_head(config, repo)
+ try:
+ r = gerrit.projects.get(repo.id).get_commit(qt5_repo_sha).get_file_content(
+ 'dependencies.yaml')
+ except GerritExceptions.NotFoundError:
+ pass
+
+ if not r:
+ if repo.id in config.state_data.keys() and not fetch_head and \
+ config.state_data[repo.id].progress >= PROGRESS.DONE:
+ print(f"Using state deps.yaml from merged repo {repo.id}")
+ if config.state_data[repo.id].proposal:
+ return config.state_data[repo.id].proposal.proposed_yaml, config.state_repo[
+ repo.id].branch
+ else:
+ return config.state_data[repo.id].deps_yaml, config.state_repo[repo.id].branch
+ branches = [config.args.branch, "dev", "master"]
+ for branch in branches:
+ try:
+ r = gerrit.projects.get(repo.id).branches.get(
+ 'refs/heads/' + branch).get_file_content('dependencies.yaml')
+ found_branch = branch
+ if not branch == config.args.branch:
+ if not config.suppress_warn:
+ print(f"INFO: Found dependencies.yaml in {repo.id} on branch {branch}"
+ f" instead of {config.args.branch}")
+ break
+ except (GerritExceptions.UnknownBranch, GerritExceptions.NotFoundError):
+ continue
+
+ if not r:
+ print(f"WARN: {repo.id} doesn't seem to have a dependencies.yaml file!\n")
+ # "Disabling automatic staging, as this may cause unintended behavior.") # TODO: Determine if this needs to disable smartly.
+ # config.args.stage = False
+ return {"dependencies": {}}, found_branch
+
+ d = dict()
+ try:
+ d = yaml.load(bytes.decode(base64.b64decode(r), "utf-8"), Loader=yaml.FullLoader)
+ if config.drop_dependency and repo in config.drop_dependency_from:
+ drop = [k for k in d.get("dependencies").keys() if repo.name in k]
+ if drop:
+ del d["dependencies"][drop.pop()]
+ except yaml.YAMLError as e:
+ if not config.suppress_warn:
+ print(f"ERROR: Failed to load dependencies yaml file.\nYAML Exception: {e}")
+ return d, found_branch
+
+
+def get_check_progress(config: Config, repo: Repo) -> (PROGRESS, str, str):
+ """Determine the progress status of a submodule update
+
+ :returns: progress: PROGRESS, merged_ref: str, gerrit_change_status: str[NEW, MERGED, STAGED, INTEGRATING, ABANDONED]"""
+ if repo.progress >= PROGRESS.DONE:
+ return repo.progress, repo.proposal.merged_ref, "MERGED"
+ elif repo.proposal.proposed_yaml and not repo.proposal.change_id:
+ if repo.proposal.inconsistent_set:
+ return PROGRESS.WAIT_INCONSISTENT, "", ""
+ else:
+ return PROGRESS.READY, "", ""
+ elif repo.proposal.change_id:
+ change = config.datasources.gerrit_client.changes.get(repo.proposal.change_id)
+ remote_status = change.status
+ if remote_status == "NEW" and repo.progress == PROGRESS.IN_PROGRESS and repo.stage_count > 0:
+ return PROGRESS.RETRY, "", remote_status
+ elif remote_status == "STAGED" or remote_status == "INTEGRATING":
+ return PROGRESS.IN_PROGRESS, "", remote_status
+ elif remote_status == "MERGED":
+ integration_top_sha = get_top_integration_sha(config, repo)
+ return PROGRESS.DONE, integration_top_sha, remote_status
+ elif remote_status == "ABANDONED":
+ return PROGRESS.ERROR, "", remote_status
+ return repo.progress, "", repo.proposal.gerrit_status
+
+
+def retry_update(config: Config, repo: Repo) -> Repo:
+ """Restage changes from a failed integration attempt and increment
+ the retry counter."""
+ repo.retry_count += 1
+ if stage_update(config, repo):
+ repo.stage_count += 1
+ return repo
+
+
+def post_gerrit_comment(config: Config, change_id: str, message: str) -> None:
+ """Post a simple comment to a gerrit change."""
+ if config.args.simulate:
+ print(f'SIM: Post gerrit comment on {change_id} with message: "{message}"')
+ change = config.datasources.gerrit_client.changes.get(change_id)
+ try:
+ change.get_revision("current").set_review({"message": message})
+ except GerritExceptions.ConflictError as e:
+ print(f"WARN: Failed to post comment on {change_id}: {e}")
+
+
+def stage_update(config: Config, repo: Repo) -> bool:
+ """Perform a 'safe stage' on the update by attempting to stage all
+ updates together, but cancel the attempt and unstage if a conflict
+ is generated during staging."""
+ if repo.proposal.gerrit_status in ["STAGED", "INTEGRATING", "MERGED"]:
+ print(f"{repo.id} update is already {repo.proposal.gerrit_status}. Skipping.")
+ return False
+ if repo.proposal.change_id not in repo.to_stage:
+ repo.to_stage.append(repo.proposal.change_id)
+ if config.args.sweep_changes:
+ repo.to_stage = list(set(repo.to_stage).union(gather_costaging_changes(config, repo)))
+ print(f"Preparing to stage changes for {repo.id}: {repo.to_stage}")
+ error = False
+ # Create a list of links for each change staged.
+ gerrit_link_self = " ".join(gerrit_link_maker(config, repo))
+ costaging_changes_links = "\n" + "\n".join(
+ [" ".join(gerrit_link_maker(config, repo, change_override=make_full_id(repo, change_id))) for change_id in repo.to_stage if change_id != repo.proposal.change_id])
+ for change_id in repo.to_stage:
+ if repo.proposal.change_id == change_id:
+ if len(repo.to_stage) > 1:
+ message = f"Staging this update with other changes:\n{costaging_changes_links}"
+ else:
+ message = ""
+ else:
+ message = "Staging this change automatically with the dependency update for this" \
+ f" module:\n" \
+ f"{gerrit_link_self}"
+ if stage_change(config, change_id, message):
+ print(f"{repo.id}: Staged "
+ f"{'submodule update' if repo.proposal.change_id == change_id else 'related change'}"
+ f" {change_id}")
+ else:
+ if repo.proposal.change_id == change_id:
+ post_gerrit_comment(config, change_id,
+ f"Failed to stage this dependency update automatically.\n"
+ f"{'Co-staged with ' + costaging_changes_links if len(repo.to_stage) > 1 else ''}")
+ else:
+ post_gerrit_comment(config, change_id, "Failed to stage this change automatically"
+ " with the dependency update for this repo."
+ " It probably created a merge conflict."
+ " Please review.\n"
+ f"See: {gerrit_link_self}.")
+ error = True
+ if error:
+ print(f"failed to stage {repo.id}: {repo.to_stage}\n")
+ config.teams_connector.send_teams_webhook_failed_stage(repo)
+ # for change_id in repo.to_stage:
+ # unstage_change(config, change_id)
+ # print(f"Changes to be staged together for {repo.id} have now been unstaged:\n"
+ # ', '.join(repo.to_stage))
+ return False
+ print()
+ return True
+
+
+def stage_change(config: Config, change_id: str, comment: str = "") -> bool:
+ """Stage a change in gerrit. Requires the QtStage permission."""
+ if config.args.simulate:
+ print(f"SIM: Simulated successful staging of {change_id}")
+ return True
+ change = config.datasources.gerrit_client.changes.get(change_id)
+ # Sleep for one second to give gerrit a second to cool off. If the change was just
+ # created, sometimes gerrit can be slow to release the lock, resulting in a 409 response code.
+ time.sleep(1)
+ try:
+ change.stage()
+ if comment:
+ post_gerrit_comment(config, change_id, comment)
+ except GerritExceptions.NotAllowedError:
+ print(f"WARN: Unable to stage {change_id} automatically.\n"
+ f"Either you do not have permissions to stage this change, or the branch is closed.")
+ return False
+ except GerritExceptions.ConflictError:
+ print(f"ERROR: Unable to stage {change_id} automatically.\n"
+ "The change contains conflicts and cannot be staged. Please verify that no other\n"
+ "changes currently staged conflict with this update.")
+ return False
+ return True
+
+
+def unstage_change(config: Config, change_id: str) -> bool:
+ """Unstage a change from gerrit"""
+ change = config.datasources.gerrit_client.changes.get(change_id)
+ try:
+ change.unstage()
+ except GerritExceptions.ConflictError:
+ return False
+ return True
+
+
+def gather_costaging_changes(config: Config, repo: Repo) -> list[str]:
+ """Gather changes where the bot is tagged as reviewer"""
+ gerrit = config.datasources.gerrit_client
+ changes = gerrit.changes.search(f'q=reviewer:{config.GERRIT_USERNAME}'
+ f'+status:open'
+ f'+label:"Code-Review=2"'
+ f'+label:"Sanity-Review=1"'
+ f'+branch:{repo.branch}'
+ f'+repo:{repo.id}')
+ ret_list: list[str] = []
+ for change in changes:
+ if change.change_id not in repo.to_stage:
+ # Append the fully scoped change ID since it's possible
+ # These change ids may exist in multiple branches.
+ ret_list.append(change.id)
+ return ret_list
+
+
+def search_existing_change(config: Config, repo: Repo, message: str) -> tuple[str, str]:
+ """Try to re-use open changes the bot created where possible
+ instead of spamming new changes"""
+ changes = config.datasources.gerrit_client.changes \
+ .search(f'q=message:"{message}"'
+ f'+owner:{config.GERRIT_USERNAME}'
+ f'+(status:open+OR+status:staged+OR+status:integrating)'
+ f'+branch:{repo.branch}'
+ f'+repo:{repo.id}')
+ if changes:
+ change = changes.pop()
+ return change.change_id, change._number
+ return "", ""
+
+
+def push_submodule_update(config: Config, repo: Repo, retry: bool = False) -> Proposal:
+ """Push the submodule update to codereview"""
+ deps_yaml_file = yaml.dump(repo.proposal.proposed_yaml)
+
+ print()
+
+ current_head_deps, _ = get_dependencies_yaml(config, repo, fetch_head=True)
+ if current_head_deps == repo.proposal.proposed_yaml:
+ repo.proposal.merged_ref = get_head(config, repo, pull_head=True)
+ repo.proposal.change_id = ""
+ repo.proposal.change_number = ""
+ print(f"Branch head for {repo.id} is already up-to-date! Not pushing an update!")
+ return repo.proposal
+
+ change, edit = acquire_change_edit(config, repo,
+ f"Update dependencies on '{repo.branch}' in {repo.id}")
+ if not edit:
+ # This can occur if the round was reset or rewound while a patch was still
+ # integrating. Instead of creating a new change, locate it, compare our yaml file
+ # with the integrating one.
+ current_patch_deps = yaml.load(bytes.decode(base64.b64decode(
+ change.get_revision("current").get_commit().get_file_content("dependencies.yaml")),
+ 'utf-8'), Loader=yaml.Loader)
+ if current_patch_deps == repo.proposal.proposed_yaml:
+ repo.proposal.gerrit_status = change.status
+ print(f"Currently {change.status} change in {repo.id} is already up-to-date!")
+ else:
+ # If the found change's file doesn't match our proposal, then our proposal is newer.
+ # We must abort the update for this repo and wait until the currently integrating
+ # change merges or fails to integrate.
+ repo.proposal.change_id = ""
+ repo.proposal.change_number = ""
+ print(current_patch_deps, "\n", deps_yaml_file)
+ print(f"WARN: Found a currently {change.status} change which doesn't match "
+ f"the proposed update! Waiting until {repo.id} -> {change.change_id} "
+ f"merges or fails.")
+ return repo.proposal
+ try:
+ change.rebase({"base": ""})
+ print(f"Rebased change {change.change_id}")
+ except GerritExceptions.ConflictError:
+ if not change.get_revision("current").get_commit().parents[0]["commit"]\
+ == get_head(config, repo, True):
+ print("WARN: Failed to rebase change due to conflicts."
+ " Abandoning and recreating the change.")
+ # Failed to rebase because of conflicts
+ edit.delete()
+ post_gerrit_comment(config, change.change_id, "Abandoning this change because"
+ "it cannot be rebased without conflicts.")
+ change.abandon()
+ repo.proposal.change_id = ""
+ repo.proposal.change_number = ""
+ repo.proposal = push_submodule_update(config, repo)
+ return repo.proposal
+ else:
+ # Already on HEAD. OK to move on.
+ pass
+ try:
+ edit.put_change_file_content("dependencies.yaml", deps_yaml_file)
+ file_content_edit = bytes.decode(base64.b64decode(edit.get_change_file_content("dependencies.yaml")))
+ print(f"Push file succeeded? {deps_yaml_file == file_content_edit}\n{file_content_edit if deps_yaml_file != file_content_edit else ''}")
+ time.sleep(1)
+ edit.publish({
+ "notify": "NONE"
+ })
+ print(f"Published edit as new patchset on {change.change_id}")
+ except GerritExceptions.ConflictError:
+ # A conflict error at this point just means that no
+ # changes were made. So just catch the exception and
+ # move on.
+ change.abandon()
+ repo.proposal.change_id = ""
+ repo.proposal.change_number = ""
+ if not retry:
+ print("Retrying update with a fresh change...")
+ repo.proposal = push_submodule_update(config, repo, retry=True)
+ return repo.proposal
+ approve_change_id(change, repo.id)
+ return repo.proposal
+
+
+def do_try_supermodule_updates(config: Config) -> dict[str, Repo]:
+ """Push supermodule updates if needed"""
+ blocking_repos = [r for r in config.state_data.values() if not r.is_non_blocking]
+ if not any([r for r in blocking_repos if r.id not in ["qt/qt5", "yocto/meta-qt6"]
+ and (r.progress < PROGRESS.DONE or r.progress == PROGRESS.DONE_FAILED_BLOCKING)]):
+ if config.args.update_supermodule:
+ supermodule = push_supermodule_update(config)
+ config.state_data[supermodule.id] = supermodule
+
+ if config.args.update_yocto_meta:
+ yocto = push_yocto_update(config)
+ config.state_data[yocto.id] = yocto
+ return config.state_data
+
+
+def push_supermodule_update(config: Config, retry: bool = False) -> Repo:
+ """Push the meta-update with all the new shas to the supermodule repo"""
+ gerrit = config.datasources.gerrit_client
+ qt5_name = config.args.repo_prefix + "qt5"
+ qt5_repo = search_for_repo(config, qt5_name)
+ if qt5_repo.progress >= PROGRESS.IN_PROGRESS:
+ return qt5_repo
+ qt5_repo.is_supermodule = True
+ qt5_repo.branch = config.args.branch
+ qt5_repo.proposal.change_id, qt5_repo.proposal.change_number \
+ = search_existing_change(config, qt5_repo, "Update Submodules")
+ gitmodules_orig = bytes.decode(base64.b64decode(gerrit.projects.get(qt5_name).branches.get(
+ f"refs/heads/{config.args.branch}").get_file_content(".gitmodules")), 'utf-8')
+ gitmodules_updated = copy.deepcopy(gitmodules_orig)
+
+ qt5_modules = get_qt5_submodules(config, ['essential', 'addon', 'deprecated', 'ignore', 'preview'])
+ qt5_repo.dep_list = list(qt5_modules.keys())
+ if config.args.simulate:
+ print(f"{qt5_repo.id} submodule update proposal:")
+ print("\n".join([f"{r.id}: {r.proposal.merged_ref}" for r in qt5_modules.values()]))
+ print()
+ return qt5_repo
+
+ change, edit = acquire_change_edit(config, qt5_repo,
+ f"Update submodules on '{config.args.branch} in {qt5_name}'")
+ if not edit:
+ diff: bool = False
+ for repo in qt5_modules.values():
+ submodule_patch_ref = bytes.decode(base64.b64decode(
+ change.get_revision("current").get_commit().get_file_content(repo.name)), 'utf-8')
+ if repo.proposal and submodule_patch_ref != repo.proposal.merged_ref:
+ diff = True
+ if diff:
+ print(f"WARN: Found a currently {change.status} change which doesn't match "
+ f"the proposed update! Waiting until {qt5_repo.id} -> {change.change_id} "
+ f"merges or fails.")
+ qt5_repo.proposal.change_id = ""
+ qt5_repo.proposal.change_number = ""
+ else:
+ qt5_repo.proposal.gerrit_status = change.status
+ print(f"Currently {change.status} change in {qt5_repo.id} is already up-to-date!")
+ return qt5_repo
+
+ for repo in qt5_modules.values():
+ try:
+ if repo.proposal.merged_ref:
+ edit.put_change_file_content(repo.name, repo.proposal.merged_ref)
+ else:
+ continue # The module didn't get updated this round.
+ time.sleep(0.5)
+ except GerritExceptions.ConflictError:
+ # A conflict error at this point just means that no
+ # changes were made. So just catch the exception and
+ # move on. This would usually occur if the change is
+ # reused some shas are already up-to-date.
+ print(f"Submodule sha for {repo.id} is already up-to-date: {repo.proposal.merged_ref}")
+ continue
+
+ if config.drop_dependency:
+ # Edit the .gitmodules file to remove references to the
+ # module to drop. Remove it entirely if necessary, or just
+ # from the depends/recommends list of other modules.
+ if qt5_repo.id in config.drop_dependency_from:
+ module_entry = snip_gitmodules(config.drop_dependency.name, gitmodules_orig)
+ gitmodules_orig.replace(module_entry, "")
+ for repo in config.drop_dependency_from:
+ module_entry = snip_gitmodules(repo.name, gitmodules_orig)
+ module_entry_lines = module_entry.splitlines()
+ depends_orig = [line for line in module_entry_lines if "depends" in line]
+ depends = depends_orig.pop().split(" ") if len(depends_orig) else []
+ recommends_orig = [line for line in module_entry_lines if "recommends" in line]
+ recommends = recommends_orig.pop().split(" ") if len(recommends_orig) else []
+
+ if config.drop_dependency.name in depends:
+ del depends[depends.index(repo.name)]
+ if config.drop_dependency.name in recommends:
+ del depends[depends.index(repo.name)]
+ depends_new = " ".join(depends)
+ recommends_new = " ".join(recommends)
+ gitmodules_updated.replace(" ".join(depends_orig), depends_new)
+ gitmodules_updated.replace(" ".join(recommends_orig), recommends_new)
+ try:
+ change.rebase({"base": ""})
+ print(f"Rebased change {change.change_id}")
+ except GerritExceptions.ConflictError:
+ if not change.get_revision("current").get_commit().parents[0]["commit"]\
+ == get_head(config, qt5_repo, True):
+ print("WARN: Failed to rebase change due to conflicts."
+ " Abandoning and recreating the change.")
+ # Failed to rebase because of conflicts
+ edit.delete()
+ post_gerrit_comment(config, change.change_id, "Abandoning this change because"
+ "it cannot be rebased without conflicts.")
+ time.sleep(1)
+ change.abandon()
+ config.state_data[qt5_repo.id] = reset_module_properties(config, qt5_repo)
+ qt5_repo = push_supermodule_update(config)
+ return qt5_repo
+ else:
+ # Already on HEAD. OK to move on.
+ pass
+ if not gitmodules_orig == gitmodules_updated:
+ try:
+ edit.put_change_file_content(".gitmodules", gitmodules_updated)
+ except GerritExceptions.ConflictError:
+ print("WARN: Trying to push new .gitmodules, but the patchset is already up-to-date.")
+ pass
+ try:
+ time.sleep(1)
+ edit.publish({
+ "notify": "NONE"
+ })
+ print(f"Published edit as new patchset on {change.change_id}")
+ qt5_repo.progress = PROGRESS.IN_PROGRESS
+ approve_change_id(change, qt5_repo.id)
+ config.teams_connector.send_teams_webhook_basic(text=f"Updating {qt5_repo.id} with a consistent"
+ f" set of submodules in **{qt5_repo.branch}**", repo=qt5_repo)
+ except GerritExceptions.ConflictError:
+ print(f"No changes made to {qt5_repo.id}, possible that the current patchset is up-to-date")
+ edit.delete()
+ diff: bool = False
+ for repo in qt5_modules.values():
+ submodule_patch_ref = bytes.decode(base64.b64decode(
+ change.get_revision("current").get_commit().get_file_content(repo.name)), 'utf-8')
+ if repo.proposal and submodule_patch_ref == repo.proposal.merged_ref\
+ and not submodule_patch_ref == repo.original_ref:
+ diff = True
+ if not diff:
+ # The current patchset is the same as HEAD. Don't stage empty changes!
+ # TODO: Figure out a way to make rebased changes accept updated shas!
+ change.abandon()
+ if not retry:
+ print("Retrying update with a fresh change...")
+ qt5_repo = push_supermodule_update(config, retry=True)
+ return qt5_repo
+ else:
+ # Still returned that everything is up-to-date even on a fresh change.
+ # Odd, but probably true at this point.
+ qt5_repo.progress = PROGRESS.DONE_NO_UPDATE
+ else:
+ # Seems we actually succeeded in publishing the change.
+ qt5_repo.progress = PROGRESS.IN_PROGRESS
+ approve_change_id(change, qt5_repo.id)
+
+ return qt5_repo
+
+
+def search_pinned_submodule(config: Config, module: Repo, submodule: [str, Repo]) -> str:
+ """Fetch the gitmodules for a repo and retrieve pinned submodule
+ sha for the given submodule."""
+ gerrit = config.datasources.gerrit_client
+ module_ref = module.proposal.merged_ref or module.original_ref
+ gitmodules = parse_gitmodules(config, repo=module,
+ ref=module_ref)
+ submodule_name = submodule.name if type(submodule) == Repo else submodule
+ for key, data in gitmodules.items():
+ if submodule_name in key or submodule_name in data.get("url"):
+ print(
+ f"Found submodule {submodule_name} in {[d for d in [key, 'url: ' + data.get('url')] if submodule_name in d]}")
+ # Fetch the pinned submodule ref
+ r = gerrit.projects.get(module.id).get_commit(module_ref).get_file_content(
+ data.get("path"))
+ return bytes.decode(base64.b64decode(r), "utf-8")
+
+
+def push_yocto_update(config: Config, retry: bool = False) -> Repo:
+ """Push the meta-update with all the applicable shas to the yocto/meta-qt6 repo"""
+ gerrit = config.datasources.gerrit_client
+ yocto_repo = search_for_repo(config, "yocto/meta-qt6")
+ filename = "recipes-qt/qt6/qt6-git.inc"
+ if yocto_repo.progress >= PROGRESS.IN_PROGRESS:
+ return yocto_repo
+ yocto_repo.is_supermodule = True
+ yocto_repo.branch = config.args.branch
+ yocto_repo.proposal.change_id, yocto_repo.proposal.change_number \
+ = search_existing_change(config, yocto_repo, "Update submodule refs")
+
+ r = gerrit.projects.get(yocto_repo.id).branches.get(f"refs/heads/{yocto_repo.branch}") \
+ .get_file_content(filename)
+ old_file = bytes.decode(base64.b64decode(r), "utf-8")
+ file_lines = old_file.splitlines()
+ # The trial-and error nature of finding submodules can be a bit noisy, so suppress warnings.
+ config.suppress_warn = True
+ print("Preparing yocto/meta-qt6 update:")
+ for i, line in enumerate(file_lines):
+ if not line.startswith("SRCREV_"):
+ continue
+ SRCREV, sha = line.split(" = ")
+ print("OLD: ", line)
+ repo_name_maybe_submodule = SRCREV.split("_")[1]
+ module_name = ""
+ pinned_submodule_sha = ""
+ if "-" in repo_name_maybe_submodule:
+ split = repo_name_maybe_submodule.split("-")
+ maybe_parent = "-".join(split[:-1])
+ parent_lines = [l for l in file_lines[:i] if l.startswith(f"SRCREV_{maybe_parent}")]
+ if parent_lines:
+ parent_line = parent_lines[-1].split(" = ")
+ module_name = parent_line[0].split("_").pop()
+ if "-" in module_name:
+ module_name = module_name.split("-").pop()
+ module_repo = search_for_repo(config, module_name)
+ module_repo.original_ref = parent_line[1].strip('"')
+ submodule_name = split[-1]
+ submodule_repo = search_for_repo(config, submodule_name)
+ if submodule_repo:
+ pinned_submodule_sha = search_pinned_submodule(config, module_repo,
+ submodule_repo)
+ if not pinned_submodule_sha:
+ print(f"Couldn't find a submodule named {submodule_repo.id}"
+ f' in {module_repo.id}. Trying raw submodule name: "{submodule_name}"')
+ pinned_submodule_sha = search_pinned_submodule(config, module_repo,
+ submodule_name)
+ if pinned_submodule_sha:
+ print(f"Found {submodule_name} as a submodule"
+ f" to {module_name}@{module_repo.original_ref}")
+ if not pinned_submodule_sha:
+ print(f"Couldn't figure out {repo_name_maybe_submodule} as a submodule.\n"
+ f"Trying {repo_name_maybe_submodule} as a regular module instead.")
+ module_name = repo_name_maybe_submodule
+ submodule_name = ""
+ else:
+ module_name = repo_name_maybe_submodule
+ module_repo = search_for_repo(config, module_name)
+ if not module_repo.original_ref:
+ module_repo.original_ref = get_head(config, module_repo)
+ if pinned_submodule_sha:
+ file_lines[i] = line.replace(sha, f'"{pinned_submodule_sha}"')
+ else:
+ file_lines[i] = line.replace(sha,
+ f'"{module_repo.proposal.merged_ref or module_repo.original_ref}"')
+ print("NEW: ", file_lines[i])
+
+ config.suppress_warn = False
+
+ if config.drop_dependency and (
+ "yocto/meta-qt6" in config.drop_dependency_from or not config.drop_dependency_from):
+ print(f"Deleting {config.drop_dependency} as a dependency from yocto/meta-qt6.")
+ file_lines = [line for line in file_lines if config.drop_dependency.name not in line]
+
+ new_file = "\n".join(file_lines) + "\n"
+ print()
+ if old_file == new_file:
+ print("yocto/meta-qt6 is up-to-date. No changes necessary.")
+ yocto_repo.progress = PROGRESS.DONE_NO_UPDATE
+ return yocto_repo
+ print("yocto/meta-qt6 proposed update:")
+ print(new_file)
+
+ if config.args.simulate:
+ return yocto_repo
+
+ change, edit = acquire_change_edit(config, yocto_repo,
+ f"Update submodule refs on '{yocto_repo.branch}' in {yocto_repo.id}")
+ if not edit:
+ current_patch_file = bytes.decode(base64.b64decode(
+ change.get_revision("current").get_commit().get_file_content(filename)),
+ 'utf-8')
+ if current_patch_file == new_file:
+ yocto_repo.proposal.gerrit_status = change.status
+ print(f"Currently {change.status} change in {yocto_repo.id} is already up-to-date!")
+ else:
+ yocto_repo.proposal.change_id = ""
+ yocto_repo.proposal.change_number = ""
+ print(f"WARN: Found a currently {change.status} change which doesn't match "
+ f"the proposed update! Waiting until {yocto_repo.id} -> {change.change_id} "
+ f"merges or fails.")
+ return yocto_repo
+ print()
+ # Try to rebase the change, or if not
+ try:
+ change.get_revision("current").rebase({"base": ""})
+ print(f"Rebased change {change.change_id}")
+ except GerritExceptions.ConflictError:
+ if not change.get_revision("current").get_commit().parents[0]["commit"] == get_head(config, yocto_repo, True):
+ print("WARN: Failed to rebase change due to conflicts."
+ " Abandoning and recreating the change.")
+ # Failed to rebase because of conflicts
+ edit.delete()
+ post_gerrit_comment(config, change.change_id, "Abandoning this change because"
+ "it cannot be rebased without conflicts.")
+ change.abandon()
+ config.state_data[yocto_repo.id] = reset_module_properties(config, yocto_repo)
+ yocto_repo = push_yocto_update(config)
+ return yocto_repo
+ else:
+ # Already on HEAD. OK to move on.
+ pass
+
+ try:
+ edit.put_change_file_content(filename, new_file)
+ print(f"Pushed {filename}")
+ except GerritExceptions.ConflictError:
+ # A conflict error at this point just means that no
+ # changes were made. So just catch the exception and
+ # move on.
+ print(f"WARN: No changes made to {filename}.")
+ try:
+ time.sleep(1)
+ edit.publish({
+ "notify": "NONE"
+ })
+ print(f"Published edit as new patchset on {change.change_id}")
+ except GerritExceptions.ConflictError:
+ print(
+ f"No changes made to {yocto_repo.id}, possible that the current patchset is up-to-date")
+ edit.delete()
+ change.abandon()
+ if not retry:
+ print("Retrying update with a fresh change...")
+ yocto_repo = push_yocto_update(config, retry=True)
+ return yocto_repo
+ else:
+ # Still returned that everything is up-to-date even on a fresh change.
+ # Odd, but probably true at this point.
+ yocto_repo.progress = PROGRESS.DONE_NO_UPDATE
+ return yocto_repo
+ yocto_repo.progress = PROGRESS.IN_PROGRESS
+ config.teams_connector.send_teams_webhook_basic(
+ text=f"Updating {yocto_repo.id} with a consistent"
+ f" set of submodules in **{yocto_repo.branch}**", repo=yocto_repo)
+ approve_change_id(change, yocto_repo.id)
+
+ return yocto_repo
+
+
+def acquire_change_edit(config: Config, repo: Repo, subject: str) -> tuple[
+ GerritChange, GerritChangeEdit]:
+ """Create a new codereview change if necessary and acquire an edit
+ on the change"""
+ gerrit = config.datasources.gerrit_client
+ if repo.proposal.change_id:
+ change = gerrit.changes.get(repo.proposal.change_id)
+ if change.status in ["STAGED", "INTEGRATING"]:
+ print(f"Change is in state: {change.status}. Cannot create an edit.")
+ return change, None
+ else:
+ change = gerrit.changes.create({
+ "project": repo.id,
+ "subject": subject,
+ "branch": repo.branch,
+ "status": "NEW"
+ })
+ print(f"Created new change for {repo.id}: {change.change_id}")
+ repo.proposal.change_id = change.change_id
+ repo.proposal.change_number = change._number
+ repo.proposal.gerrit_status = "NEW"
+ try:
+ change.create_empty_edit()
+ except GerritExceptions.ConflictError:
+ print(f"WARN: {repo.id} change {change.change_id} may already have edit."
+ f" Attempting to clear it!")
+ edit = change.get_edit()
+ if edit:
+ edit.delete()
+ else:
+ print(f"Change is in state: {change.status}")
+ # Some issue creating the edit!
+ return change, None
+ change.create_empty_edit()
+ edit = change.get_edit()
+ return change, edit
+
+
+def approve_change_id(change: GerritChange, repo_name: str) -> bool:
+ """Give a +2 to a change. It's fine to self-approve a submodule update."""
+ try:
+ change.get_revision("current").set_review({
+ "message": "Auto-approving submodule update.",
+ "ready": "true",
+ "labels": {
+ "Code-Review": 2,
+ "Sanity-Review": 1
+ }
+ })
+ return True
+ except GerritExceptions.NotAllowedError:
+ print(f"WARN: You do not have self-approval rights to auto-approve in {repo_name}\n"
+ f"You must have change ID {change.change_id} approved and"
+ f" manually stage it.")
+ return False
+
+
+def snip_gitmodules(repo_name: str, gitmodules: str) -> str:
+ """Get the snippet of gitmodules for a repo."""
+ loc = gitmodules.find(repo_name)
+ start = -1
+ end = -1
+ if loc >= 0:
+ start = gitmodules.rfind('[', 0, loc - 1)
+ end = gitmodules.find('[', start + 1) - 1
+ if end < 0:
+ end = len(gitmodules) - 1
+ return gitmodules[start:end]
+
+
+def reset_module_properties(config: Config, repo: Repo) -> Repo:
+ """Resets a module to the default state and refreshes the head."""
+ print(f"Resetting module state for {repo.id}")
+ repo.progress = PROGRESS.UNSPECIFIED
+ repo.proposal = Proposal()
+ repo.stage_count = 0
+ repo.retry_count = 0
+ repo.to_stage = list()
+ repo.original_ref = get_head(config, repo, True)
+ return repo
+
+
+def reset_stage_count(repo: Repo) -> Repo:
+ """Drop the recorded staging attempts to 0"""
+ repo.stage_count = 0
+ repo.retry_count = 0
+ return repo
+
+
+def state_printer(config: Config) -> tuple[dict[PROGRESS, int], str]:
+ """Assembles a prett-print string of the current state of updates"""
+ ret_str = ""
+
+ def _print(*val, end="\n"):
+ buffer = ""
+ for val in val:
+ buffer += val
+ buffer += end
+ return buffer
+
+ total_state = {state: 0 for state in PROGRESS}
+ msg = "\nThe following repos are ready to be updated:"
+ repos = list()
+ for repo in config.state_data.keys():
+ if config.state_data[repo].progress == PROGRESS.READY:
+ total_state[PROGRESS.READY.value] += 1
+ repos.append(repo)
+ if repos:
+ ret_str += _print(f"{msg}\n\t", "\n\t".join(repos))
+ repos.clear()
+
+ msg = "\nThe following repos are in-progress:"
+ for repo in config.state_data.keys():
+ if config.state_data[repo].progress in [PROGRESS.IN_PROGRESS, PROGRESS.RETRY]:
+ total_state[PROGRESS.IN_PROGRESS.value] += 1
+ repos.append(repo)
+ if repos:
+ ret_str += _print(msg)
+ for repo in repos:
+ ret_str += _print(
+ f"\t{repo} - Change ID: {config.state_data[repo].proposal.change_id}\n"
+ f"\t {gerrit_link_maker(config, config.state_data[repo])[1]}")
+ repos.clear()
+
+ msg = "\nThe following repos are waiting on dependencies to be updated:"
+ for repo in config.state_data.keys():
+ if config.state_data[repo].progress in [PROGRESS.WAIT_DEPENDENCY,
+ PROGRESS.WAIT_INCONSISTENT]:
+ total_state[PROGRESS.WAIT_DEPENDENCY.value] += 1
+ repos.append(repo)
+ if repos:
+ ret_str += _print(msg)
+ for repo in repos:
+ ret_str += _print("\t", repo, " depends on: ",
+ ", ".join(
+ list(config.state_data[repo].deps_yaml["dependencies"].keys())))
+
+ repos.clear()
+ msg = "\nThe following repos have been updated and merged:"
+ for repo in config.state_data.keys():
+ if config.state_data[repo].progress == PROGRESS.DONE:
+ total_state[PROGRESS.DONE.value] += 1
+ repos.append(repo)
+ if repos:
+ ret_str += _print(msg)
+ for repo in repos:
+ ret_str += _print(f"\t{repo} - Change ID: {config.state_data[repo].proposal.change_id}")
+ repos.clear()
+ msg = "\nThe following repos did not require an update:"
+ for repo in config.state_data.keys():
+ if config.state_data[repo].progress == PROGRESS.DONE_NO_UPDATE:
+ total_state[PROGRESS.DONE_NO_UPDATE.value] += 1
+ repos.append(repo)
+ if repos:
+ ret_str += _print(msg)
+ for repo in repos:
+ ret_str += _print(f"\t{repo}")
+ repos.clear()
+ msg = "\nThe following repos failed to update:"
+ for repo in config.state_data.keys():
+ if config.state_data[repo].progress >= PROGRESS.DONE_FAILED_NON_BLOCKING:
+ total_state[PROGRESS.DONE_FAILED_NON_BLOCKING.value] += 1
+ repos.append(repo)
+ if repos:
+ ret_str += _print(msg)
+ for repo in repos:
+ ret_str += _print(f"\t{repo}")
+
+ return total_state, ret_str