summaryrefslogtreecommitdiff
path: root/tests
diff options
context:
space:
mode:
Diffstat (limited to 'tests')
-rwxr-xr-xtests/make_playbooks.py3
-rw-r--r--tests/unit/test_connection.py192
-rw-r--r--tests/unit/test_gerrit.py45
-rw-r--r--tests/unit/test_git_driver.py3
-rw-r--r--tests/unit/test_github_driver.py43
-rw-r--r--tests/unit/test_merger_repo.py58
-rw-r--r--tests/unit/test_model_upgrade.py27
-rw-r--r--tests/unit/test_reporting.py4
-rw-r--r--tests/unit/test_scheduler.py48
-rw-r--r--tests/unit/test_v3.py37
-rw-r--r--tests/unit/test_zk.py84
11 files changed, 428 insertions, 116 deletions
diff --git a/tests/make_playbooks.py b/tests/make_playbooks.py
index 93c37bc81..cb7a98096 100755
--- a/tests/make_playbooks.py
+++ b/tests/make_playbooks.py
@@ -40,7 +40,8 @@ def handle_repo(path):
config_path = os.path.join(path, fn)
break
try:
- config = yaml.safe_load(open(config_path))
+ with open(config_path) as f:
+ config = yaml.safe_load(f)
except Exception:
print(" Has yaml errors")
return
diff --git a/tests/unit/test_connection.py b/tests/unit/test_connection.py
index c30e1743d..26a99215e 100644
--- a/tests/unit/test_connection.py
+++ b/tests/unit/test_connection.py
@@ -65,7 +65,7 @@ class TestSQLConnectionMysql(ZuulTestCase):
def _sql_tables_created(self, connection_name):
connection = self.scheds.first.connections.connections[connection_name]
- insp = sa.engine.reflection.Inspector(connection.engine)
+ insp = sa.inspect(connection.engine)
table_prefix = connection.table_prefix
self.assertEqual(self.expected_table_prefix, table_prefix)
@@ -82,7 +82,7 @@ class TestSQLConnectionMysql(ZuulTestCase):
def _sql_indexes_created(self, connection_name):
connection = self.scheds.first.connections.connections[connection_name]
- insp = sa.engine.reflection.Inspector(connection.engine)
+ insp = sa.inspect(connection.engine)
table_prefix = connection.table_prefix
self.assertEqual(self.expected_table_prefix, table_prefix)
@@ -127,7 +127,7 @@ class TestSQLConnectionMysql(ZuulTestCase):
engine.connect() as conn:
result = conn.execute(
- sa.sql.select([reporter.connection.zuul_buildset_table]))
+ sa.sql.select(reporter.connection.zuul_buildset_table))
buildsets = result.fetchall()
self.assertEqual(5, len(buildsets))
@@ -137,107 +137,107 @@ class TestSQLConnectionMysql(ZuulTestCase):
buildset3 = buildsets[3]
buildset4 = buildsets[4]
- self.assertEqual('check', buildset0['pipeline'])
- self.assertEqual('org/project', buildset0['project'])
- self.assertEqual(1, buildset0['change'])
- self.assertEqual('1', buildset0['patchset'])
- self.assertEqual('SUCCESS', buildset0['result'])
- self.assertEqual('Build succeeded.', buildset0['message'])
- self.assertEqual('tenant-one', buildset0['tenant'])
+ self.assertEqual('check', buildset0.pipeline)
+ self.assertEqual('org/project', buildset0.project)
+ self.assertEqual(1, buildset0.change)
+ self.assertEqual('1', buildset0.patchset)
+ self.assertEqual('SUCCESS', buildset0.result)
+ self.assertEqual('Build succeeded.', buildset0.message)
+ self.assertEqual('tenant-one', buildset0.tenant)
self.assertEqual(
- 'https://review.example.com/%d' % buildset0['change'],
- buildset0['ref_url'])
- self.assertNotEqual(None, buildset0['event_id'])
- self.assertNotEqual(None, buildset0['event_timestamp'])
+ 'https://review.example.com/%d' % buildset0.change,
+ buildset0.ref_url)
+ self.assertNotEqual(None, buildset0.event_id)
+ self.assertNotEqual(None, buildset0.event_timestamp)
buildset0_builds = conn.execute(
- sa.sql.select([
+ sa.sql.select(
reporter.connection.zuul_build_table
- ]).where(
+ ).where(
reporter.connection.zuul_build_table.c.buildset_id ==
- buildset0['id']
+ buildset0.id
)
).fetchall()
# Check the first result, which should be the project-merge job
self.assertEqual(
- 'project-merge', buildset0_builds[0]['job_name'])
- self.assertEqual("SUCCESS", buildset0_builds[0]['result'])
- self.assertEqual(None, buildset0_builds[0]['log_url'])
- self.assertEqual('check', buildset1['pipeline'])
- self.assertEqual('master', buildset1['branch'])
- self.assertEqual('org/project', buildset1['project'])
- self.assertEqual(2, buildset1['change'])
- self.assertEqual('1', buildset1['patchset'])
- self.assertEqual('FAILURE', buildset1['result'])
- self.assertEqual('Build failed.', buildset1['message'])
+ 'project-merge', buildset0_builds[0].job_name)
+ self.assertEqual("SUCCESS", buildset0_builds[0].result)
+ self.assertEqual(None, buildset0_builds[0].log_url)
+ self.assertEqual('check', buildset1.pipeline)
+ self.assertEqual('master', buildset1.branch)
+ self.assertEqual('org/project', buildset1.project)
+ self.assertEqual(2, buildset1.change)
+ self.assertEqual('1', buildset1.patchset)
+ self.assertEqual('FAILURE', buildset1.result)
+ self.assertEqual('Build failed.', buildset1.message)
buildset1_builds = conn.execute(
- sa.sql.select([
+ sa.sql.select(
reporter.connection.zuul_build_table
- ]).where(
+ ).where(
reporter.connection.zuul_build_table.c.buildset_id ==
- buildset1['id']
+ buildset1.id
)
).fetchall()
# Check the second result, which should be the project-test1
# job which failed
self.assertEqual(
- 'project-test1', buildset1_builds[1]['job_name'])
- self.assertEqual("FAILURE", buildset1_builds[1]['result'])
- self.assertEqual(None, buildset1_builds[1]['log_url'])
+ 'project-test1', buildset1_builds[1].job_name)
+ self.assertEqual("FAILURE", buildset1_builds[1].result)
+ self.assertEqual(None, buildset1_builds[1].log_url)
buildset2_builds = conn.execute(
- sa.sql.select([
+ sa.sql.select(
reporter.connection.zuul_build_table
- ]).where(
+ ).where(
reporter.connection.zuul_build_table.c.buildset_id ==
- buildset2['id']
+ buildset2.id
)
).fetchall()
# Check the first result, which should be the project-publish
# job
self.assertEqual('project-publish',
- buildset2_builds[0]['job_name'])
- self.assertEqual("SUCCESS", buildset2_builds[0]['result'])
+ buildset2_builds[0].job_name)
+ self.assertEqual("SUCCESS", buildset2_builds[0].result)
buildset3_builds = conn.execute(
- sa.sql.select([
+ sa.sql.select(
reporter.connection.zuul_build_table
- ]).where(
+ ).where(
reporter.connection.zuul_build_table.c.buildset_id ==
- buildset3['id']
+ buildset3.id
)
).fetchall()
self.assertEqual(
- 'project-test1', buildset3_builds[1]['job_name'])
- self.assertEqual('NODE_FAILURE', buildset3_builds[1]['result'])
- self.assertEqual(None, buildset3_builds[1]['log_url'])
- self.assertIsNotNone(buildset3_builds[1]['start_time'])
- self.assertIsNotNone(buildset3_builds[1]['end_time'])
+ 'project-test1', buildset3_builds[1].job_name)
+ self.assertEqual('NODE_FAILURE', buildset3_builds[1].result)
+ self.assertEqual(None, buildset3_builds[1].log_url)
+ self.assertIsNotNone(buildset3_builds[1].start_time)
+ self.assertIsNotNone(buildset3_builds[1].end_time)
self.assertGreaterEqual(
- buildset3_builds[1]['end_time'],
- buildset3_builds[1]['start_time'])
+ buildset3_builds[1].end_time,
+ buildset3_builds[1].start_time)
# Check the paused build result
buildset4_builds = conn.execute(
- sa.sql.select([
+ sa.sql.select(
reporter.connection.zuul_build_table
- ]).where(
+ ).where(
reporter.connection.zuul_build_table.c.buildset_id ==
- buildset4['id']
+ buildset4.id
).order_by(reporter.connection.zuul_build_table.c.id)
).fetchall()
paused_build_events = conn.execute(
- sa.sql.select([
+ sa.sql.select(
reporter.connection.zuul_build_event_table
- ]).where(
+ ).where(
reporter.connection.zuul_build_event_table.c.build_id
- == buildset4_builds[0]["id"]
+ == buildset4_builds[0].id
)
).fetchall()
@@ -245,16 +245,16 @@ class TestSQLConnectionMysql(ZuulTestCase):
pause_event = paused_build_events[0]
resume_event = paused_build_events[1]
self.assertEqual(
- pause_event["event_type"], "paused")
- self.assertIsNotNone(pause_event["event_time"])
- self.assertIsNone(pause_event["description"])
+ pause_event.event_type, "paused")
+ self.assertIsNotNone(pause_event.event_time)
+ self.assertIsNone(pause_event.description)
self.assertEqual(
- resume_event["event_type"], "resumed")
- self.assertIsNotNone(resume_event["event_time"])
- self.assertIsNone(resume_event["description"])
+ resume_event.event_type, "resumed")
+ self.assertIsNotNone(resume_event.event_time)
+ self.assertIsNone(resume_event.description)
self.assertGreater(
- resume_event["event_time"], pause_event["event_time"])
+ resume_event.event_time, pause_event.event_time)
self.executor_server.hold_jobs_in_build = True
@@ -333,51 +333,51 @@ class TestSQLConnectionMysql(ZuulTestCase):
engine.connect() as conn:
result = conn.execute(
- sa.sql.select([reporter.connection.zuul_buildset_table])
+ sa.sql.select(reporter.connection.zuul_buildset_table)
)
buildsets = result.fetchall()
self.assertEqual(1, len(buildsets))
buildset0 = buildsets[0]
- self.assertEqual('check', buildset0['pipeline'])
- self.assertEqual('org/project', buildset0['project'])
- self.assertEqual(1, buildset0['change'])
- self.assertEqual('1', buildset0['patchset'])
- self.assertEqual('SUCCESS', buildset0['result'])
- self.assertEqual('Build succeeded.', buildset0['message'])
- self.assertEqual('tenant-one', buildset0['tenant'])
+ self.assertEqual('check', buildset0.pipeline)
+ self.assertEqual('org/project', buildset0.project)
+ self.assertEqual(1, buildset0.change)
+ self.assertEqual('1', buildset0.patchset)
+ self.assertEqual('SUCCESS', buildset0.result)
+ self.assertEqual('Build succeeded.', buildset0.message)
+ self.assertEqual('tenant-one', buildset0.tenant)
self.assertEqual(
- 'https://review.example.com/%d' % buildset0['change'],
- buildset0['ref_url'])
+ 'https://review.example.com/%d' % buildset0.change,
+ buildset0.ref_url)
buildset0_builds = conn.execute(
sa.sql.select(
- [reporter.connection.zuul_build_table]
+ reporter.connection.zuul_build_table
).where(
reporter.connection.zuul_build_table.c.buildset_id ==
- buildset0['id']
+ buildset0.id
)
).fetchall()
# Check the retry results
- self.assertEqual('project-merge', buildset0_builds[0]['job_name'])
- self.assertEqual('SUCCESS', buildset0_builds[0]['result'])
- self.assertTrue(buildset0_builds[0]['final'])
-
- self.assertEqual('project-test1', buildset0_builds[1]['job_name'])
- self.assertEqual('RETRY', buildset0_builds[1]['result'])
- self.assertFalse(buildset0_builds[1]['final'])
- self.assertEqual('project-test2', buildset0_builds[2]['job_name'])
- self.assertEqual('RETRY', buildset0_builds[2]['result'])
- self.assertFalse(buildset0_builds[2]['final'])
-
- self.assertEqual('project-test1', buildset0_builds[3]['job_name'])
- self.assertEqual('SUCCESS', buildset0_builds[3]['result'])
- self.assertTrue(buildset0_builds[3]['final'])
- self.assertEqual('project-test2', buildset0_builds[4]['job_name'])
- self.assertEqual('SUCCESS', buildset0_builds[4]['result'])
- self.assertTrue(buildset0_builds[4]['final'])
+ self.assertEqual('project-merge', buildset0_builds[0].job_name)
+ self.assertEqual('SUCCESS', buildset0_builds[0].result)
+ self.assertTrue(buildset0_builds[0].final)
+
+ self.assertEqual('project-test1', buildset0_builds[1].job_name)
+ self.assertEqual('RETRY', buildset0_builds[1].result)
+ self.assertFalse(buildset0_builds[1].final)
+ self.assertEqual('project-test2', buildset0_builds[2].job_name)
+ self.assertEqual('RETRY', buildset0_builds[2].result)
+ self.assertFalse(buildset0_builds[2].final)
+
+ self.assertEqual('project-test1', buildset0_builds[3].job_name)
+ self.assertEqual('SUCCESS', buildset0_builds[3].result)
+ self.assertTrue(buildset0_builds[3].final)
+ self.assertEqual('project-test2', buildset0_builds[4].job_name)
+ self.assertEqual('SUCCESS', buildset0_builds[4].result)
+ self.assertTrue(buildset0_builds[4].final)
self.executor_server.hold_jobs_in_build = True
@@ -430,7 +430,7 @@ class TestSQLConnectionMysql(ZuulTestCase):
engine.connect() as conn:
result = conn.execute(
- sa.sql.select([reporter.connection.zuul_buildset_table])
+ sa.sql.select(reporter.connection.zuul_buildset_table)
)
buildsets = result.fetchall()
@@ -439,10 +439,10 @@ class TestSQLConnectionMysql(ZuulTestCase):
buildset0_builds = conn.execute(
sa.sql.select(
- [reporter.connection.zuul_build_table]
+ reporter.connection.zuul_build_table
).where(
reporter.connection.zuul_build_table.c.buildset_id ==
- buildset0['id']
+ buildset0.id
)
).fetchall()
@@ -488,7 +488,7 @@ class TestSQLConnectionMysql(ZuulTestCase):
engine.connect() as conn:
result = conn.execute(
- sa.sql.select([reporter.connection.zuul_buildset_table])
+ sa.sql.select(reporter.connection.zuul_buildset_table)
)
buildsets = result.fetchall()
@@ -497,10 +497,10 @@ class TestSQLConnectionMysql(ZuulTestCase):
buildset0_builds = conn.execute(
sa.sql.select(
- [reporter.connection.zuul_build_table]
+ reporter.connection.zuul_build_table
).where(
reporter.connection.zuul_build_table.c.buildset_id ==
- buildset0['id']
+ buildset0.id
)
).fetchall()
diff --git a/tests/unit/test_gerrit.py b/tests/unit/test_gerrit.py
index 2e3057af6..2a63d5ef8 100644
--- a/tests/unit/test_gerrit.py
+++ b/tests/unit/test_gerrit.py
@@ -957,3 +957,48 @@ class TestGerritConnection(ZuulTestCase):
self.assertEqual(B.queried, 2)
self.assertEqual(A.data['status'], 'MERGED')
self.assertEqual(B.data['status'], 'MERGED')
+
+
+class TestGerritUnicodeRefs(ZuulTestCase):
+ config_file = 'zuul-gerrit-web.conf'
+ tenant_config_file = 'config/single-tenant/main.yaml'
+
+ upload_pack_data = (b'014452944ee370db5c87691e62e0f9079b6281319b4e HEAD'
+ b'\x00multi_ack thin-pack side-band side-band-64k '
+ b'ofs-delta shallow deepen-since deepen-not '
+ b'deepen-relative no-progress include-tag '
+ b'multi_ack_detailed allow-tip-sha1-in-want '
+ b'allow-reachable-sha1-in-want '
+ b'symref=HEAD:refs/heads/faster filter '
+ b'object-format=sha1 agent=git/2.37.1.gl1\n'
+ b'003d5f42665d737b3fd4ec22ca0209e6191859f09fd6 '
+ b'refs/for/faster\n'
+ b'004952944ee370db5c87691e62e0f9079b6281319b4e '
+ b'refs/heads/foo/\xf0\x9f\x94\xa5\xf0\x9f\x94\xa5'
+ b'\xf0\x9f\x94\xa5\n'
+ b'003f52944ee370db5c87691e62e0f9079b6281319b4e '
+ b'refs/heads/faster\n0000').decode("utf-8")
+
+ def test_mb_unicode_refs(self):
+ gerrit_config = {
+ 'user': 'gerrit',
+ 'server': 'localhost',
+ }
+ driver = GerritDriver()
+ gerrit = GerritConnection(driver, 'review_gerrit', gerrit_config)
+
+ def _uploadPack(project):
+ return self.upload_pack_data
+
+ self.patch(gerrit, '_uploadPack', _uploadPack)
+
+ project = gerrit.source.getProject('org/project')
+ refs = gerrit.getInfoRefs(project)
+
+ self.assertEqual(refs,
+ {'refs/for/faster':
+ '5f42665d737b3fd4ec22ca0209e6191859f09fd6',
+ 'refs/heads/foo/🔥🔥🔥':
+ '52944ee370db5c87691e62e0f9079b6281319b4e',
+ 'refs/heads/faster':
+ '52944ee370db5c87691e62e0f9079b6281319b4e'})
diff --git a/tests/unit/test_git_driver.py b/tests/unit/test_git_driver.py
index 06e2ac7c8..95fca30d3 100644
--- a/tests/unit/test_git_driver.py
+++ b/tests/unit/test_git_driver.py
@@ -62,7 +62,8 @@ class TestGitDriver(ZuulTestCase):
# Update zuul.yaml to force a tenant reconfiguration
path = os.path.join(self.upstream_root, 'common-config', 'zuul.yaml')
- config = yaml.safe_load(open(path, 'r').read())
+ with open(path, 'r') as f:
+ config = yaml.safe_load(f)
change = {
'name': 'org/project',
'check': {
diff --git a/tests/unit/test_github_driver.py b/tests/unit/test_github_driver.py
index e3706440b..fb46aa7d1 100644
--- a/tests/unit/test_github_driver.py
+++ b/tests/unit/test_github_driver.py
@@ -18,8 +18,10 @@ import re
from testtools.matchers import MatchesRegex, Not, StartsWith
import urllib
import socket
+import threading
import time
import textwrap
+from concurrent.futures import ThreadPoolExecutor
from unittest import mock, skip
import git
@@ -32,10 +34,11 @@ from zuul.zk.layout import LayoutState
from zuul.lib import strings
from zuul.merger.merger import Repo
from zuul.model import MergeRequest, EnqueueEvent, DequeueEvent
+from zuul.zk.change_cache import ChangeKey
from tests.base import (AnsibleZuulTestCase, BaseTestCase,
ZuulGithubAppTestCase, ZuulTestCase,
- simple_layout, random_sha1)
+ simple_layout, random_sha1, iterate_timeout)
from tests.base import ZuulWebFixture
EMPTY_LAYOUT_STATE = LayoutState("", "", 0, None, {}, -1)
@@ -1484,6 +1487,44 @@ class TestGithubDriver(ZuulTestCase):
"rebase not supported",
str(loading_errors[0].error))
+ @simple_layout("layouts/basic-github.yaml", driver="github")
+ def test_concurrent_get_change(self):
+ """
+ Test that getting a change concurrently returns the same
+ object from the cache.
+ """
+ conn = self.scheds.first.sched.connections.connections["github"]
+
+ # Create a new change object and remove it from the cache so
+ # the concurrent call will try to create a new change object.
+ A = self.fake_github.openFakePullRequest("org/project", "master", "A")
+ change_key = ChangeKey(conn.connection_name, "org/project",
+ "PullRequest", str(A.number), str(A.head_sha))
+ change = conn.getChange(change_key, refresh=True)
+ conn._change_cache.delete(change_key)
+
+ # Acquire the update lock so the concurrent get task needs to
+ # wait for the lock to be release.
+ lock = conn._change_update_lock.setdefault(change_key,
+ threading.Lock())
+ lock.acquire()
+ try:
+ executor = ThreadPoolExecutor(max_workers=1)
+ task = executor.submit(conn.getChange, change_key, refresh=True)
+ for _ in iterate_timeout(5, "task to be running"):
+ if task.running():
+ break
+ # Add the change back so the waiting task can get the
+ # change from the cache.
+ conn._change_cache.set(change_key, change)
+ finally:
+ lock.release()
+ executor.shutdown()
+
+ other_change = task.result()
+ self.assertIsNotNone(other_change.cache_stat)
+ self.assertIs(change, other_change)
+
class TestMultiGithubDriver(ZuulTestCase):
config_file = 'zuul-multi-github.conf'
diff --git a/tests/unit/test_merger_repo.py b/tests/unit/test_merger_repo.py
index 7806db347..f907cb8b4 100644
--- a/tests/unit/test_merger_repo.py
+++ b/tests/unit/test_merger_repo.py
@@ -163,6 +163,64 @@ class TestMergerRepo(ZuulTestCase):
work_repo.reset()
work_repo.checkout("foobar")
+ def test_rebase_merge_conflict_abort(self):
+ """Test that a failed rebase is properly aborted and related
+ directories are cleaned up."""
+ parent_path = os.path.join(self.upstream_root, 'org/project1')
+ parent_repo = git.Repo(parent_path)
+ parent_repo.create_head("feature")
+
+ files = {"test.txt": "master"}
+ self.create_commit("org/project1", files=files, head="master",
+ message="Add master file")
+
+ files = {"test.txt": "feature"}
+ self.create_commit("org/project1", files=files, head="feature",
+ message="Add feature file")
+
+ work_repo = Repo(parent_path, self.workspace_root,
+ "none@example.org", "User Name", "0", "0")
+
+ item = {"ref": "refs/heads/feature"}
+ # We expect the rebase to fail because of a conflict, but the
+ # rebase will be aborted.
+ with testtools.ExpectedException(git.exc.GitCommandError):
+ work_repo.rebaseMerge(item, "master")
+
+ # Assert that the failed rebase doesn't leave any temporary
+ # directories behind.
+ self.assertFalse(
+ os.path.exists(f"{work_repo.local_path}/.git/rebase-merge"))
+ self.assertFalse(
+ os.path.exists(f"{work_repo.local_path}/.git/rebase-apply"))
+
+ def test_rebase_merge_conflict_reset_cleanup(self):
+ """Test temporary directories of a failed rebase merge are
+ removed on repo reset."""
+ parent_path = os.path.join(self.upstream_root, 'org/project1')
+ parent_repo = git.Repo(parent_path)
+ parent_repo.create_head("feature")
+
+ files = {"master.txt": "master"}
+ self.create_commit("org/project1", files=files, head="master",
+ message="Add master file")
+
+ files = {"feature.txt": "feature"}
+ self.create_commit("org/project1", files=files, head="feature",
+ message="Add feature file")
+
+ work_repo = Repo(parent_path, self.workspace_root,
+ "none@example.org", "User Name", "0", "0")
+
+ # Simulate leftovers from a failed rebase
+ os.mkdir(f"{work_repo.local_path}/.git/rebase-merge")
+ os.mkdir(f"{work_repo.local_path}/.git/rebase-apply")
+
+ # Resetting the repo should clean up any leaked directories
+ work_repo.reset()
+ item = {"ref": "refs/heads/feature"}
+ work_repo.rebaseMerge(item, "master")
+
def test_set_refs(self):
parent_path = os.path.join(self.upstream_root, 'org/project1')
remote_sha = self.create_commit('org/project1')
diff --git a/tests/unit/test_model_upgrade.py b/tests/unit/test_model_upgrade.py
index a5a49bed4..c6cdee7ea 100644
--- a/tests/unit/test_model_upgrade.py
+++ b/tests/unit/test_model_upgrade.py
@@ -293,6 +293,33 @@ class TestModelUpgrade(ZuulTestCase):
result='SUCCESS', changes='1,1'),
], ordered=False)
+ @model_version(12)
+ def test_model_12_13(self):
+ # Initially queue items will still have the full trigger event
+ # stored in Zookeeper. The trigger event will be converted to
+ # an event info object after the model API update.
+ self.executor_server.hold_jobs_in_build = True
+ A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
+ self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+
+ self.assertEqual(len(self.builds), 1)
+
+ # Upgrade our component
+ self.model_test_component_info.model_api = 13
+
+ self.executor_server.hold_jobs_in_build = False
+ self.executor_server.release()
+ self.waitUntilSettled()
+
+ self.assertHistory([
+ dict(name='project-merge', result='SUCCESS', changes='1,1'),
+ dict(name='project-test1', result='SUCCESS', changes='1,1'),
+ dict(name='project-test2', result='SUCCESS', changes='1,1'),
+ dict(name='project1-project2-integration',
+ result='SUCCESS', changes='1,1'),
+ ], ordered=False)
+
class TestGithubModelUpgrade(ZuulTestCase):
config_file = 'zuul-github-driver.conf'
diff --git a/tests/unit/test_reporting.py b/tests/unit/test_reporting.py
index 2cf93cdcb..0c5c5fbc9 100644
--- a/tests/unit/test_reporting.py
+++ b/tests/unit/test_reporting.py
@@ -151,7 +151,7 @@ class TestReporting(ZuulTestCase):
engine.connect() as conn:
result = conn.execute(
- sa.sql.select([reporter.connection.zuul_buildset_table]))
+ sa.sql.select(reporter.connection.zuul_buildset_table))
buildsets = result.fetchall()
for x in buildsets:
@@ -180,7 +180,7 @@ class TestReporting(ZuulTestCase):
engine.connect() as conn:
result = conn.execute(
- sa.sql.select([reporter.connection.zuul_buildset_table]))
+ sa.sql.select(reporter.connection.zuul_buildset_table))
buildsets = result.fetchall()
for x in buildsets:
diff --git a/tests/unit/test_scheduler.py b/tests/unit/test_scheduler.py
index 35e632d46..172ed34dc 100644
--- a/tests/unit/test_scheduler.py
+++ b/tests/unit/test_scheduler.py
@@ -54,7 +54,7 @@ from tests.base import (
from zuul.zk.change_cache import ChangeKey
from zuul.zk.event_queues import PIPELINE_NAME_ROOT
from zuul.zk.layout import LayoutState
-from zuul.zk.locks import management_queue_lock
+from zuul.zk.locks import management_queue_lock, pipeline_lock
from zuul.zk import zkobject
EMPTY_LAYOUT_STATE = LayoutState("", "", 0, None, {}, -1)
@@ -461,6 +461,7 @@ class TestScheduler(ZuulTestCase):
'zuul.mergers.online', value='1', kind='g')
self.assertReportedStat('zuul.scheduler.eventqueues.connection.gerrit',
value='0', kind='g')
+ self.assertReportedStat('zuul.scheduler.run_handler', kind='ms')
# Catch time / monotonic errors
for key in [
@@ -490,9 +491,10 @@ class TestScheduler(ZuulTestCase):
'zuul.tenant.tenant-one.pipeline.gate.write_objects',
'zuul.tenant.tenant-one.pipeline.gate.read_znodes',
'zuul.tenant.tenant-one.pipeline.gate.write_znodes',
- 'zuul.tenant.tenant-one.pipeline.gate.read_bytes',
'zuul.tenant.tenant-one.pipeline.gate.write_bytes',
]:
+ # 'zuul.tenant.tenant-one.pipeline.gate.read_bytes' is
+ # expected to be zero since it's initialized after reading
val = self.assertReportedStat(key, kind='g')
self.assertTrue(0.0 < float(val) < 60000.0)
@@ -3587,8 +3589,11 @@ class TestScheduler(ZuulTestCase):
FakeChange = namedtuple('FakeChange', ['project', 'branch'])
fake_a = FakeChange(project1, 'master')
fake_b = FakeChange(project2, 'master')
- with self.createZKContext() as ctx,\
- gate.manager.currentContext(ctx):
+ with pipeline_lock(
+ self.zk_client, tenant.name,
+ gate.name) as lock,\
+ self.createZKContext(lock) as ctx,\
+ gate.manager.currentContext(ctx):
gate.manager.getChangeQueue(fake_a, None)
gate.manager.getChangeQueue(fake_b, None)
q1 = gate.getQueue(project1.canonical_name, None)
@@ -3610,8 +3615,11 @@ class TestScheduler(ZuulTestCase):
FakeChange = namedtuple('FakeChange', ['project', 'branch'])
fake_a = FakeChange(project1, 'master')
fake_b = FakeChange(project2, 'master')
- with self.createZKContext() as ctx,\
- gate.manager.currentContext(ctx):
+ with pipeline_lock(
+ self.zk_client, tenant.name,
+ gate.name) as lock,\
+ self.createZKContext(lock) as ctx,\
+ gate.manager.currentContext(ctx):
gate.manager.getChangeQueue(fake_a, None)
gate.manager.getChangeQueue(fake_b, None)
q1 = gate.getQueue(project1.canonical_name, None)
@@ -3633,8 +3641,11 @@ class TestScheduler(ZuulTestCase):
FakeChange = namedtuple('FakeChange', ['project', 'branch'])
fake_a = FakeChange(project1, 'master')
fake_b = FakeChange(project2, 'master')
- with self.createZKContext() as ctx,\
- gate.manager.currentContext(ctx):
+ with pipeline_lock(
+ self.zk_client, tenant.name,
+ gate.name) as lock,\
+ self.createZKContext(lock) as ctx,\
+ gate.manager.currentContext(ctx):
gate.manager.getChangeQueue(fake_a, None)
gate.manager.getChangeQueue(fake_b, None)
q1 = gate.getQueue(project1.canonical_name, None)
@@ -3655,8 +3666,11 @@ class TestScheduler(ZuulTestCase):
FakeChange = namedtuple('FakeChange', ['project', 'branch'])
fake_a = FakeChange(project1, 'master')
fake_b = FakeChange(project2, 'master')
- with self.createZKContext() as ctx,\
- gate.manager.currentContext(ctx):
+ with pipeline_lock(
+ self.zk_client, tenant.name,
+ gate.name) as lock,\
+ self.createZKContext(lock) as ctx,\
+ gate.manager.currentContext(ctx):
gate.manager.getChangeQueue(fake_a, None)
gate.manager.getChangeQueue(fake_b, None)
q1 = gate.getQueue(project1.canonical_name, None)
@@ -3678,8 +3692,11 @@ class TestScheduler(ZuulTestCase):
FakeChange = namedtuple('FakeChange', ['project', 'branch'])
fake_a = FakeChange(project1, 'master')
fake_b = FakeChange(project2, 'master')
- with self.createZKContext() as ctx,\
- gate.manager.currentContext(ctx):
+ with pipeline_lock(
+ self.zk_client, tenant.name,
+ gate.name) as lock,\
+ self.createZKContext(lock) as ctx,\
+ gate.manager.currentContext(ctx):
gate.manager.getChangeQueue(fake_a, None)
gate.manager.getChangeQueue(fake_b, None)
q1 = gate.getQueue(project1.canonical_name, None)
@@ -3943,6 +3960,10 @@ class TestScheduler(ZuulTestCase):
else:
time.sleep(0)
+ self.assertGreater(new.last_reconfigured, old.last_reconfigured)
+ self.assertGreater(new.last_reconfigure_event_ltime,
+ old.last_reconfigure_event_ltime)
+
def test_tenant_reconfiguration_command_socket(self):
"Test that single-tenant reconfiguration via command socket works"
@@ -6200,7 +6221,8 @@ For CI problems and help debugging, contact ci@example.org"""
build = self.getBuildByName('check-job')
inv_path = os.path.join(build.jobdir.root, 'ansible', 'inventory.yaml')
- inventory = yaml.safe_load(open(inv_path, 'r'))
+ with open(inv_path, 'r') as f:
+ inventory = yaml.safe_load(f)
label = inventory['all']['hosts']['controller']['nodepool']['label']
self.assertEqual('slow-label', label)
diff --git a/tests/unit/test_v3.py b/tests/unit/test_v3.py
index de8b8f3ad..004ede862 100644
--- a/tests/unit/test_v3.py
+++ b/tests/unit/test_v3.py
@@ -1557,6 +1557,43 @@ class TestInRepoConfig(ZuulTestCase):
'start_line': 5},
})
+ def test_dynamic_config_job_anchors(self):
+ # Test the use of anchors in job configuration. This is a
+ # regression test designed to catch a failure where we freeze
+ # the first job and in doing so, mutate the vars dict. The
+ # intended behavior is that the two jobs end up with two
+ # separate python objects for their vars dicts.
+ in_repo_conf = textwrap.dedent(
+ """
+ - job:
+ name: myvars
+ vars: &anchor
+ plugins:
+ foo: bar
+
+ - job:
+ name: project-test1
+ timeout: 999999999999
+ vars: *anchor
+
+ - project:
+ name: org/project
+ check:
+ jobs:
+ - project-test1
+ """)
+
+ file_dict = {'.zuul.yaml': in_repo_conf}
+ A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A',
+ files=file_dict)
+ self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+ self.assertEqual(A.reported, 1,
+ "A should report failure")
+ self.assertEqual(A.patchsets[0]['approvals'][0]['value'], "-1")
+ self.assertIn('max-job-timeout', A.messages[0])
+ self.assertHistory([])
+
def test_dynamic_config_non_existing_job_in_template(self):
"""Test that requesting a non existent job fails"""
in_repo_conf = textwrap.dedent(
diff --git a/tests/unit/test_zk.py b/tests/unit/test_zk.py
index 7e3c19dfe..b5697ee36 100644
--- a/tests/unit/test_zk.py
+++ b/tests/unit/test_zk.py
@@ -18,6 +18,7 @@ import json
import queue
import threading
import uuid
+from unittest import mock
import testtools
@@ -53,10 +54,12 @@ from tests.base import (
BaseTestCase, HoldableExecutorApi, HoldableMergerApi,
iterate_timeout
)
-from zuul.zk.zkobject import ShardedZKObject, ZKObject, ZKContext
+from zuul.zk.zkobject import (
+ ShardedZKObject, ZKObject, ZKContext
+)
from zuul.zk.locks import tenant_write_lock
-from kazoo.exceptions import ZookeeperError, OperationTimeoutError
+from kazoo.exceptions import ZookeeperError, OperationTimeoutError, NoNodeError
class ZooKeeperBaseTestCase(BaseTestCase):
@@ -2037,3 +2040,80 @@ class TestBlobStore(ZooKeeperBaseTestCase):
with testtools.ExpectedException(KeyError):
bs.get(path)
+
+
+class TestPipelineInit(ZooKeeperBaseTestCase):
+ # Test the initialize-on-refresh code paths of various pipeline objects
+
+ def test_pipeline_state_new_object(self):
+ # Test the initialize-on-refresh code path with no existing object
+ tenant = model.Tenant('tenant')
+ pipeline = model.Pipeline('gate', tenant)
+ layout = model.Layout(tenant)
+ tenant.layout = layout
+ pipeline.state = model.PipelineState.create(
+ pipeline, pipeline.state)
+ context = ZKContext(self.zk_client, None, None, self.log)
+ pipeline.state.refresh(context)
+ self.assertTrue(self.zk_client.client.exists(pipeline.state.getPath()))
+ self.assertEqual(pipeline.state.layout_uuid, layout.uuid)
+
+ def test_pipeline_state_existing_object(self):
+ # Test the initialize-on-refresh code path with a pre-existing object
+ tenant = model.Tenant('tenant')
+ pipeline = model.Pipeline('gate', tenant)
+ layout = model.Layout(tenant)
+ tenant.layout = layout
+ pipeline.manager = mock.Mock()
+ pipeline.state = model.PipelineState.create(
+ pipeline, pipeline.state)
+ pipeline.change_list = model.PipelineChangeList.create(
+ pipeline)
+ context = ZKContext(self.zk_client, None, None, self.log)
+ # We refresh the change list here purely for the side effect
+ # of creating the pipeline state object with no data (the list
+ # is a subpath of the state object).
+ pipeline.change_list.refresh(context)
+ pipeline.state.refresh(context)
+ self.assertTrue(
+ self.zk_client.client.exists(pipeline.change_list.getPath()))
+ self.assertTrue(self.zk_client.client.exists(pipeline.state.getPath()))
+ self.assertEqual(pipeline.state.layout_uuid, layout.uuid)
+
+ def test_pipeline_change_list_new_object(self):
+ # Test the initialize-on-refresh code path with no existing object
+ tenant = model.Tenant('tenant')
+ pipeline = model.Pipeline('gate', tenant)
+ layout = model.Layout(tenant)
+ tenant.layout = layout
+ pipeline.state = model.PipelineState.create(
+ pipeline, pipeline.state)
+ pipeline.change_list = model.PipelineChangeList.create(
+ pipeline)
+ context = ZKContext(self.zk_client, None, None, self.log)
+ pipeline.change_list.refresh(context)
+ self.assertTrue(
+ self.zk_client.client.exists(pipeline.change_list.getPath()))
+ pipeline.manager = mock.Mock()
+ pipeline.state.refresh(context)
+ self.assertEqual(pipeline.state.layout_uuid, layout.uuid)
+
+ def test_pipeline_change_list_new_object_without_lock(self):
+ # Test the initialize-on-refresh code path if we don't have
+ # the lock. This should fail.
+ tenant = model.Tenant('tenant')
+ pipeline = model.Pipeline('gate', tenant)
+ layout = model.Layout(tenant)
+ tenant.layout = layout
+ pipeline.state = model.PipelineState.create(
+ pipeline, pipeline.state)
+ pipeline.change_list = model.PipelineChangeList.create(
+ pipeline)
+ context = ZKContext(self.zk_client, None, None, self.log)
+ with testtools.ExpectedException(NoNodeError):
+ pipeline.change_list.refresh(context, allow_init=False)
+ self.assertIsNone(
+ self.zk_client.client.exists(pipeline.change_list.getPath()))
+ pipeline.manager = mock.Mock()
+ pipeline.state.refresh(context)
+ self.assertEqual(pipeline.state.layout_uuid, layout.uuid)