summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorbst-marge-bot <marge-bot@buildstream.build>2019-03-14 10:52:30 +0000
committerbst-marge-bot <marge-bot@buildstream.build>2019-03-14 10:52:30 +0000
commitf8c60c6c9951003b306975874891e29253055c0c (patch)
tree3a092fb662fe1309f732c2c052fb17e98cf3ed5d
parent8998788de605fd5e1f558cae505b7bb66d254994 (diff)
parent4a873b9f5f4e55ec5c7bd1a3614c50da3ffdf4ca (diff)
downloadbuildstream-f8c60c6c9951003b306975874891e29253055c0c.tar.gz
Merge branch 'phil/consolidate-repo-tests' into 'master'
Consolidate templated source tests See merge request BuildStream/buildstream!1215
-rw-r--r--tests/frontend/buildcheckout.py49
-rw-r--r--tests/frontend/fetch.py75
-rw-r--r--tests/frontend/mirror.py398
-rw-r--r--tests/frontend/track.py369
-rw-r--r--tests/frontend/workspace.py20
-rw-r--r--tests/integration/source-determinism.py74
-rw-r--r--tests/sources/generic/__init__.py0
-rw-r--r--tests/sources/generic/build_checkout.py81
-rw-r--r--tests/sources/generic/fetch.py106
-rw-r--r--tests/sources/generic/mirror.py426
-rw-r--r--tests/sources/generic/project/elements/base.bst5
-rw-r--r--tests/sources/generic/project/elements/base/base-alpine.bst17
-rw-r--r--tests/sources/generic/project/elements/import-bin.bst4
-rw-r--r--tests/sources/generic/project/elements/import-dev.bst4
-rw-r--r--tests/sources/generic/project/elements/multiple_targets/dependency/horsey.bst3
-rw-r--r--tests/sources/generic/project/elements/multiple_targets/dependency/pony.bst1
-rw-r--r--tests/sources/generic/project/elements/multiple_targets/dependency/zebry.bst3
-rw-r--r--tests/sources/generic/project/elements/multiple_targets/order/0.bst7
-rw-r--r--tests/sources/generic/project/elements/multiple_targets/order/1.bst4
-rw-r--r--tests/sources/generic/project/elements/multiple_targets/order/2.bst4
-rw-r--r--tests/sources/generic/project/elements/multiple_targets/order/3.bst6
-rw-r--r--tests/sources/generic/project/elements/multiple_targets/order/4.bst2
-rw-r--r--tests/sources/generic/project/elements/multiple_targets/order/5.bst2
-rw-r--r--tests/sources/generic/project/elements/multiple_targets/order/6.bst4
-rw-r--r--tests/sources/generic/project/elements/multiple_targets/order/7.bst4
-rw-r--r--tests/sources/generic/project/elements/multiple_targets/order/8.bst4
-rw-r--r--tests/sources/generic/project/elements/multiple_targets/order/9.bst4
-rw-r--r--tests/sources/generic/project/elements/multiple_targets/order/run.bst2
-rw-r--r--tests/sources/generic/project/files/bar0
-rwxr-xr-xtests/sources/generic/project/files/bin-files/usr/bin/hello3
-rw-r--r--tests/sources/generic/project/files/dev-files/usr/include/pony.h12
-rw-r--r--tests/sources/generic/project/files/etc-files/etc/buildstream/config1
-rw-r--r--tests/sources/generic/project/files/foo0
-rw-r--r--tests/sources/generic/project/files/source-bundle/llamas.txt1
-rw-r--r--tests/sources/generic/project/files/sub-project/elements/import-etc.bst4
-rw-r--r--tests/sources/generic/project/files/sub-project/files/etc-files/etc/animal.conf1
-rw-r--r--tests/sources/generic/project/files/sub-project/project.conf4
-rw-r--r--tests/sources/generic/project/project.conf27
-rw-r--r--tests/sources/generic/source_determinism.py116
-rw-r--r--tests/sources/generic/track.py412
-rw-r--r--tests/sources/generic/track_cross_junction.py (renamed from tests/frontend/track_cross_junction.py)23
-rw-r--r--tests/sources/generic/workspace.py159
-rw-r--r--tests/sources/project/elements/base.bst5
-rw-r--r--tests/sources/project/elements/base/base-alpine.bst17
44 files changed, 1489 insertions, 974 deletions
diff --git a/tests/frontend/buildcheckout.py b/tests/frontend/buildcheckout.py
index 808f7e128..a2283c94b 100644
--- a/tests/frontend/buildcheckout.py
+++ b/tests/frontend/buildcheckout.py
@@ -4,7 +4,7 @@ import hashlib
import pytest
import subprocess
from tests.testutils.site import IS_WINDOWS
-from tests.testutils import create_repo, ALL_REPO_KINDS, generate_junction
+from tests.testutils import create_repo, generate_junction
from buildstream.plugintestutils import cli
from buildstream import _yaml
@@ -405,53 +405,6 @@ def test_build_checkout_force_tarball(datafiles, cli):
assert os.path.join('.', 'usr', 'include', 'pony.h') in tar.getnames()
-fetch_build_checkout_combos = \
- [("strict", kind) for kind in ALL_REPO_KINDS] + \
- [("non-strict", kind) for kind in ALL_REPO_KINDS]
-
-
-@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("strict,kind", fetch_build_checkout_combos)
-def test_fetch_build_checkout(cli, tmpdir, datafiles, strict, kind):
- checkout = os.path.join(cli.directory, 'checkout')
- project = os.path.join(datafiles.dirname, datafiles.basename)
- dev_files_path = os.path.join(project, 'files', 'dev-files')
- element_path = os.path.join(project, 'elements')
- element_name = 'build-test-{}.bst'.format(kind)
-
- # Create our repo object of the given source type with
- # the dev files, and then collect the initial ref.
- #
- repo = create_repo(kind, str(tmpdir))
- ref = repo.create(dev_files_path)
-
- # Write out our test target
- element = {
- 'kind': 'import',
- 'sources': [
- repo.source_config(ref=ref)
- ]
- }
- _yaml.dump(element,
- os.path.join(element_path,
- element_name))
-
- assert cli.get_element_state(project, element_name) == 'fetch needed'
- result = cli.run(project=project, args=strict_args(['build', element_name], strict))
- result.assert_success()
- assert cli.get_element_state(project, element_name) == 'cached'
-
- # Now check it out
- result = cli.run(project=project, args=strict_args([
- 'artifact', 'checkout', element_name, '--directory', checkout
- ], strict))
- result.assert_success()
-
- # Check that the pony.h include from files/dev-files exists
- filename = os.path.join(checkout, 'usr', 'include', 'pony.h')
- assert os.path.exists(filename)
-
-
@pytest.mark.datafiles(DATA_DIR)
def test_install_to_build(cli, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
diff --git a/tests/frontend/fetch.py b/tests/frontend/fetch.py
index c6914fb3b..5f2b63791 100644
--- a/tests/frontend/fetch.py
+++ b/tests/frontend/fetch.py
@@ -14,43 +14,6 @@ TOP_DIR = os.path.dirname(os.path.realpath(__file__))
DATA_DIR = os.path.join(TOP_DIR, 'project')
-@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
-def test_fetch(cli, tmpdir, datafiles, kind):
- project = os.path.join(datafiles.dirname, datafiles.basename)
- bin_files_path = os.path.join(project, 'files', 'bin-files')
- element_path = os.path.join(project, 'elements')
- element_name = 'fetch-test-{}.bst'.format(kind)
-
- # Create our repo object of the given source type with
- # the bin files, and then collect the initial ref.
- #
- repo = create_repo(kind, str(tmpdir))
- ref = repo.create(bin_files_path)
-
- # Write out our test target
- element = {
- 'kind': 'import',
- 'sources': [
- repo.source_config(ref=ref)
- ]
- }
- _yaml.dump(element,
- os.path.join(element_path,
- element_name))
-
- # Assert that a fetch is needed
- assert cli.get_element_state(project, element_name) == 'fetch needed'
-
- # Now try to fetch it
- result = cli.run(project=project, args=['source', 'fetch', element_name])
- result.assert_success()
-
- # Assert that we are now buildable because the source is
- # now cached.
- assert cli.get_element_state(project, element_name) == 'buildable'
-
-
@pytest.mark.datafiles(os.path.join(TOP_DIR, 'project_world'))
def test_fetch_default_targets(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
@@ -194,41 +157,3 @@ def test_inconsistent_junction(cli, tmpdir, datafiles, ref_storage):
# informing the user to track the junction first
result = cli.run(project=project, args=['source', 'fetch', 'junction-dep.bst'])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.SUBPROJECT_INCONSISTENT)
-
-
-@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("ref_storage", [('inline'), ('project.refs')])
-@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
-def test_fetch_cross_junction(cli, tmpdir, datafiles, ref_storage, kind):
- project = str(datafiles)
- subproject_path = os.path.join(project, 'files', 'sub-project')
- junction_path = os.path.join(project, 'elements', 'junction.bst')
-
- import_etc_path = os.path.join(subproject_path, 'elements', 'import-etc-repo.bst')
- etc_files_path = os.path.join(subproject_path, 'files', 'etc-files')
-
- repo = create_repo(kind, str(tmpdir.join('import-etc')))
- ref = repo.create(etc_files_path)
-
- element = {
- 'kind': 'import',
- 'sources': [
- repo.source_config(ref=(ref if ref_storage == 'inline' else None))
- ]
- }
- _yaml.dump(element, import_etc_path)
-
- configure_project(project, {
- 'ref-storage': ref_storage
- })
-
- generate_junction(tmpdir, subproject_path, junction_path, store_ref=(ref_storage == 'inline'))
-
- if ref_storage == 'project.refs':
- result = cli.run(project=project, args=['source', 'track', 'junction.bst'])
- result.assert_success()
- result = cli.run(project=project, args=['source', 'track', 'junction.bst:import-etc.bst'])
- result.assert_success()
-
- result = cli.run(project=project, args=['source', 'fetch', 'junction.bst:import-etc.bst'])
- result.assert_success()
diff --git a/tests/frontend/mirror.py b/tests/frontend/mirror.py
index aea7f49bf..55bb8a328 100644
--- a/tests/frontend/mirror.py
+++ b/tests/frontend/mirror.py
@@ -1,7 +1,7 @@
import os
import pytest
-from tests.testutils import create_repo, ALL_REPO_KINDS, generate_junction
+from tests.testutils import create_repo, generate_junction
from buildstream import _yaml
from buildstream._exceptions import ErrorDomain
@@ -81,65 +81,6 @@ def generate_project():
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
-def test_mirror_fetch(cli, tmpdir, datafiles, kind):
- bin_files_path = os.path.join(str(datafiles), 'files', 'bin-files', 'usr')
- dev_files_path = os.path.join(str(datafiles), 'files', 'dev-files', 'usr')
- upstream_repodir = os.path.join(str(tmpdir), 'upstream')
- mirror_repodir = os.path.join(str(tmpdir), 'mirror')
- project_dir = os.path.join(str(tmpdir), 'project')
- os.makedirs(project_dir)
- element_dir = os.path.join(project_dir, 'elements')
-
- # Create repo objects of the upstream and mirror
- upstream_repo = create_repo(kind, upstream_repodir)
- upstream_repo.create(bin_files_path)
- mirror_repo = upstream_repo.copy(mirror_repodir)
- upstream_ref = upstream_repo.create(dev_files_path)
-
- element = {
- 'kind': 'import',
- 'sources': [
- upstream_repo.source_config(ref=upstream_ref)
- ]
- }
- element_name = 'test.bst'
- element_path = os.path.join(element_dir, element_name)
- full_repo = element['sources'][0]['url']
- upstream_map, repo_name = os.path.split(full_repo)
- alias = 'foo-' + kind
- aliased_repo = alias + ':' + repo_name
- element['sources'][0]['url'] = aliased_repo
- full_mirror = mirror_repo.source_config()['url']
- mirror_map, _ = os.path.split(full_mirror)
- os.makedirs(element_dir)
- _yaml.dump(element, element_path)
-
- project = {
- 'name': 'test',
- 'element-path': 'elements',
- 'aliases': {
- alias: upstream_map + "/"
- },
- 'mirrors': [
- {
- 'name': 'middle-earth',
- 'aliases': {
- alias: [mirror_map + "/"],
- },
- },
- ]
- }
- project_file = os.path.join(project_dir, 'project.conf')
- _yaml.dump(project, project_file)
-
- # No obvious ways of checking that the mirror has been fetched
- # But at least we can be sure it succeeds
- result = cli.run(project=project_dir, args=['source', 'fetch', element_name])
- result.assert_success()
-
-
-@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize("ref_storage", [("inline"), ("project.refs")])
@pytest.mark.parametrize("mirror", [("no-mirror"), ("mirror"), ("unrelated-mirror")])
def test_mirror_fetch_ref_storage(cli, tmpdir, datafiles, ref_storage, mirror):
@@ -215,62 +156,6 @@ def test_mirror_fetch_ref_storage(cli, tmpdir, datafiles, ref_storage, mirror):
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
-def test_mirror_fetch_upstream_absent(cli, tmpdir, datafiles, kind):
- dev_files_path = os.path.join(str(datafiles), 'files', 'dev-files', 'usr')
- upstream_repodir = os.path.join(str(tmpdir), 'upstream')
- mirror_repodir = os.path.join(str(tmpdir), 'mirror')
- project_dir = os.path.join(str(tmpdir), 'project')
- os.makedirs(project_dir)
- element_dir = os.path.join(project_dir, 'elements')
-
- # Create repo objects of the upstream and mirror
- upstream_repo = create_repo(kind, upstream_repodir)
- ref = upstream_repo.create(dev_files_path)
- mirror_repo = upstream_repo.copy(mirror_repodir)
-
- element = {
- 'kind': 'import',
- 'sources': [
- upstream_repo.source_config(ref=ref)
- ]
- }
-
- element_name = 'test.bst'
- element_path = os.path.join(element_dir, element_name)
- full_repo = element['sources'][0]['url']
- upstream_map, repo_name = os.path.split(full_repo)
- alias = 'foo-' + kind
- aliased_repo = alias + ':' + repo_name
- element['sources'][0]['url'] = aliased_repo
- full_mirror = mirror_repo.source_config()['url']
- mirror_map, _ = os.path.split(full_mirror)
- os.makedirs(element_dir)
- _yaml.dump(element, element_path)
-
- project = {
- 'name': 'test',
- 'element-path': 'elements',
- 'aliases': {
- alias: 'http://www.example.com/'
- },
- 'mirrors': [
- {
- 'name': 'middle-earth',
- 'aliases': {
- alias: [mirror_map + "/"],
- },
- },
- ]
- }
- project_file = os.path.join(project_dir, 'project.conf')
- _yaml.dump(project, project_file)
-
- result = cli.run(project=project_dir, args=['source', 'fetch', element_name])
- result.assert_success()
-
-
-@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.usefixtures("datafiles")
def test_mirror_fetch_multi(cli, tmpdir):
output_file = os.path.join(str(tmpdir), "output.txt")
@@ -406,287 +291,6 @@ def test_mirror_fetch_default_cmdline_overrides_config(cli, tmpdir):
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
-def test_mirror_track_upstream_present(cli, tmpdir, datafiles, kind):
- bin_files_path = os.path.join(str(datafiles), 'files', 'bin-files', 'usr')
- dev_files_path = os.path.join(str(datafiles), 'files', 'dev-files', 'usr')
- upstream_repodir = os.path.join(str(tmpdir), 'upstream')
- mirror_repodir = os.path.join(str(tmpdir), 'mirror')
- project_dir = os.path.join(str(tmpdir), 'project')
- os.makedirs(project_dir)
- element_dir = os.path.join(project_dir, 'elements')
-
- # Create repo objects of the upstream and mirror
- upstream_repo = create_repo(kind, upstream_repodir)
- upstream_repo.create(bin_files_path)
- mirror_repo = upstream_repo.copy(mirror_repodir)
- upstream_ref = upstream_repo.create(dev_files_path)
-
- element = {
- 'kind': 'import',
- 'sources': [
- upstream_repo.source_config(ref=upstream_ref)
- ]
- }
-
- element['sources'][0]
- element_name = 'test.bst'
- element_path = os.path.join(element_dir, element_name)
- full_repo = element['sources'][0]['url']
- upstream_map, repo_name = os.path.split(full_repo)
- alias = 'foo-' + kind
- aliased_repo = alias + ':' + repo_name
- element['sources'][0]['url'] = aliased_repo
- full_mirror = mirror_repo.source_config()['url']
- mirror_map, _ = os.path.split(full_mirror)
- os.makedirs(element_dir)
- _yaml.dump(element, element_path)
-
- project = {
- 'name': 'test',
- 'element-path': 'elements',
- 'aliases': {
- alias: upstream_map + "/"
- },
- 'mirrors': [
- {
- 'name': 'middle-earth',
- 'aliases': {
- alias: [mirror_map + "/"],
- },
- },
- ]
- }
- project_file = os.path.join(project_dir, 'project.conf')
- _yaml.dump(project, project_file)
-
- result = cli.run(project=project_dir, args=['source', 'track', element_name])
- result.assert_success()
-
- # Tracking tries upstream first. Check the ref is from upstream.
- new_element = _yaml.load(element_path)
- source = new_element['sources'][0]
- if 'ref' in source:
- assert source['ref'] == upstream_ref
-
-
-@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
-def test_mirror_track_upstream_absent(cli, tmpdir, datafiles, kind):
- bin_files_path = os.path.join(str(datafiles), 'files', 'bin-files', 'usr')
- dev_files_path = os.path.join(str(datafiles), 'files', 'dev-files', 'usr')
- upstream_repodir = os.path.join(str(tmpdir), 'upstream')
- mirror_repodir = os.path.join(str(tmpdir), 'mirror')
- project_dir = os.path.join(str(tmpdir), 'project')
- os.makedirs(project_dir)
- element_dir = os.path.join(project_dir, 'elements')
-
- # Create repo objects of the upstream and mirror
- upstream_repo = create_repo(kind, upstream_repodir)
- upstream_ref = upstream_repo.create(bin_files_path)
- mirror_repo = upstream_repo.copy(mirror_repodir)
- mirror_ref = upstream_ref
- upstream_ref = upstream_repo.create(dev_files_path)
-
- element = {
- 'kind': 'import',
- 'sources': [
- upstream_repo.source_config(ref=upstream_ref)
- ]
- }
-
- element['sources'][0]
- element_name = 'test.bst'
- element_path = os.path.join(element_dir, element_name)
- full_repo = element['sources'][0]['url']
- upstream_map, repo_name = os.path.split(full_repo)
- alias = 'foo-' + kind
- aliased_repo = alias + ':' + repo_name
- element['sources'][0]['url'] = aliased_repo
- full_mirror = mirror_repo.source_config()['url']
- mirror_map, _ = os.path.split(full_mirror)
- os.makedirs(element_dir)
- _yaml.dump(element, element_path)
-
- project = {
- 'name': 'test',
- 'element-path': 'elements',
- 'aliases': {
- alias: 'http://www.example.com/'
- },
- 'mirrors': [
- {
- 'name': 'middle-earth',
- 'aliases': {
- alias: [mirror_map + "/"],
- },
- },
- ]
- }
- project_file = os.path.join(project_dir, 'project.conf')
- _yaml.dump(project, project_file)
-
- result = cli.run(project=project_dir, args=['source', 'track', element_name])
- result.assert_success()
-
- # Check that tracking fell back to the mirror
- new_element = _yaml.load(element_path)
- source = new_element['sources'][0]
- if 'ref' in source:
- assert source['ref'] == mirror_ref
-
-
-@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
-def test_mirror_from_includes(cli, tmpdir, datafiles, kind):
- bin_files_path = os.path.join(str(datafiles), 'files', 'bin-files', 'usr')
- upstream_repodir = os.path.join(str(tmpdir), 'upstream')
- mirror_repodir = os.path.join(str(tmpdir), 'mirror')
- project_dir = os.path.join(str(tmpdir), 'project')
- os.makedirs(project_dir)
- element_dir = os.path.join(project_dir, 'elements')
-
- # Create repo objects of the upstream and mirror
- upstream_repo = create_repo(kind, upstream_repodir)
- upstream_ref = upstream_repo.create(bin_files_path)
- mirror_repo = upstream_repo.copy(mirror_repodir)
-
- element = {
- 'kind': 'import',
- 'sources': [
- upstream_repo.source_config(ref=upstream_ref)
- ]
- }
- element_name = 'test.bst'
- element_path = os.path.join(element_dir, element_name)
- full_repo = element['sources'][0]['url']
- upstream_map, repo_name = os.path.split(full_repo)
- alias = 'foo-' + kind
- aliased_repo = alias + ':' + repo_name
- element['sources'][0]['url'] = aliased_repo
- full_mirror = mirror_repo.source_config()['url']
- mirror_map, _ = os.path.split(full_mirror)
- os.makedirs(element_dir)
- _yaml.dump(element, element_path)
-
- config_project_dir = str(tmpdir.join('config'))
- os.makedirs(config_project_dir, exist_ok=True)
- config_project = {
- 'name': 'config'
- }
- _yaml.dump(config_project, os.path.join(config_project_dir, 'project.conf'))
- extra_mirrors = {
- 'mirrors': [
- {
- 'name': 'middle-earth',
- 'aliases': {
- alias: [mirror_map + "/"],
- }
- }
- ]
- }
- _yaml.dump(extra_mirrors, os.path.join(config_project_dir, 'mirrors.yml'))
- generate_junction(str(tmpdir.join('config_repo')),
- config_project_dir,
- os.path.join(element_dir, 'config.bst'))
-
- project = {
- 'name': 'test',
- 'element-path': 'elements',
- 'aliases': {
- alias: upstream_map + "/"
- },
- '(@)': [
- 'config.bst:mirrors.yml'
- ]
- }
- project_file = os.path.join(project_dir, 'project.conf')
- _yaml.dump(project, project_file)
-
- # Now make the upstream unavailable.
- os.rename(upstream_repo.repo, '{}.bak'.format(upstream_repo.repo))
- result = cli.run(project=project_dir, args=['source', 'fetch', element_name])
- result.assert_success()
-
-
-@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
-def test_mirror_junction_from_includes(cli, tmpdir, datafiles, kind):
- bin_files_path = os.path.join(str(datafiles), 'files', 'bin-files', 'usr')
- upstream_repodir = os.path.join(str(tmpdir), 'upstream')
- mirror_repodir = os.path.join(str(tmpdir), 'mirror')
- project_dir = os.path.join(str(tmpdir), 'project')
- os.makedirs(project_dir)
- element_dir = os.path.join(project_dir, 'elements')
-
- # Create repo objects of the upstream and mirror
- upstream_repo = create_repo(kind, upstream_repodir)
- upstream_ref = upstream_repo.create(bin_files_path)
- mirror_repo = upstream_repo.copy(mirror_repodir)
-
- element = {
- 'kind': 'junction',
- 'sources': [
- upstream_repo.source_config(ref=upstream_ref)
- ]
- }
- element_name = 'test.bst'
- element_path = os.path.join(element_dir, element_name)
- full_repo = element['sources'][0]['url']
- upstream_map, repo_name = os.path.split(full_repo)
- alias = 'foo-' + kind
- aliased_repo = alias + ':' + repo_name
- element['sources'][0]['url'] = aliased_repo
- full_mirror = mirror_repo.source_config()['url']
- mirror_map, _ = os.path.split(full_mirror)
- os.makedirs(element_dir)
- _yaml.dump(element, element_path)
-
- config_project_dir = str(tmpdir.join('config'))
- os.makedirs(config_project_dir, exist_ok=True)
- config_project = {
- 'name': 'config'
- }
- _yaml.dump(config_project, os.path.join(config_project_dir, 'project.conf'))
- extra_mirrors = {
- 'mirrors': [
- {
- 'name': 'middle-earth',
- 'aliases': {
- alias: [mirror_map + "/"],
- }
- }
- ]
- }
- _yaml.dump(extra_mirrors, os.path.join(config_project_dir, 'mirrors.yml'))
- generate_junction(str(tmpdir.join('config_repo')),
- config_project_dir,
- os.path.join(element_dir, 'config.bst'))
-
- project = {
- 'name': 'test',
- 'element-path': 'elements',
- 'aliases': {
- alias: upstream_map + "/"
- },
- '(@)': [
- 'config.bst:mirrors.yml'
- ]
- }
- project_file = os.path.join(project_dir, 'project.conf')
- _yaml.dump(project, project_file)
-
- # Now make the upstream unavailable.
- os.rename(upstream_repo.repo, '{}.bak'.format(upstream_repo.repo))
- result = cli.run(project=project_dir, args=['source', 'fetch', element_name])
- result.assert_main_error(ErrorDomain.STREAM, None)
- # Now make the upstream available again.
- os.rename('{}.bak'.format(upstream_repo.repo), upstream_repo.repo)
- result = cli.run(project=project_dir, args=['source', 'fetch', element_name])
- result.assert_success()
-
-
-@pytest.mark.datafiles(DATA_DIR)
def test_mirror_git_submodule_fetch(cli, tmpdir, datafiles):
# Test that it behaves as expected with submodules, both defined in config
# and discovered when fetching.
diff --git a/tests/frontend/track.py b/tests/frontend/track.py
index 1eadac8d9..322e9ad08 100644
--- a/tests/frontend/track.py
+++ b/tests/frontend/track.py
@@ -1,7 +1,7 @@
import stat
import os
import pytest
-from tests.testutils import create_repo, ALL_REPO_KINDS, generate_junction
+from tests.testutils import create_repo, generate_junction
from buildstream.plugintestutils import cli
from buildstream._exceptions import ErrorDomain, LoadErrorReason
@@ -28,131 +28,6 @@ def generate_element(repo, element_path, dep_name=None):
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("ref_storage", [('inline'), ('project.refs')])
-@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
-def test_track(cli, tmpdir, datafiles, ref_storage, kind):
- project = os.path.join(datafiles.dirname, datafiles.basename)
- dev_files_path = os.path.join(project, 'files', 'dev-files')
- element_path = os.path.join(project, 'elements')
- element_name = 'track-test-{}.bst'.format(kind)
-
- configure_project(project, {
- 'ref-storage': ref_storage
- })
-
- # Create our repo object of the given source type with
- # the dev files, and then collect the initial ref.
- #
- repo = create_repo(kind, str(tmpdir))
- repo.create(dev_files_path)
-
- # Generate the element
- generate_element(repo, os.path.join(element_path, element_name))
-
- # Assert that a fetch is needed
- assert cli.get_element_state(project, element_name) == 'no reference'
-
- # Now first try to track it
- result = cli.run(project=project, args=['source', 'track', element_name])
- result.assert_success()
-
- # And now fetch it: The Source has probably already cached the
- # latest ref locally, but it is not required to have cached
- # the associated content of the latest ref at track time, that
- # is the job of fetch.
- result = cli.run(project=project, args=['source', 'fetch', element_name])
- result.assert_success()
-
- # Assert that we are now buildable because the source is
- # now cached.
- assert cli.get_element_state(project, element_name) == 'buildable'
-
- # Assert there was a project.refs created, depending on the configuration
- if ref_storage == 'project.refs':
- assert os.path.exists(os.path.join(project, 'project.refs'))
- else:
- assert not os.path.exists(os.path.join(project, 'project.refs'))
-
-
-# NOTE:
-#
-# This test checks that recursive tracking works by observing
-# element states after running a recursive tracking operation.
-#
-# However, this test is ALSO valuable as it stresses the source
-# plugins in a situation where many source plugins are operating
-# at once on the same backing repository.
-#
-# Do not change this test to use a separate 'Repo' per element
-# as that would defeat the purpose of the stress test, otherwise
-# please refactor that aspect into another test.
-#
-@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("amount", [(1), (10)])
-@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
-def test_track_recurse(cli, tmpdir, datafiles, kind, amount):
- project = os.path.join(datafiles.dirname, datafiles.basename)
- dev_files_path = os.path.join(project, 'files', 'dev-files')
- element_path = os.path.join(project, 'elements')
-
- # Try to actually launch as many fetch jobs as possible at the same time
- #
- # This stresses the Source plugins and helps to ensure that
- # they handle concurrent access to the store correctly.
- cli.configure({
- 'scheduler': {
- 'fetchers': amount,
- }
- })
-
- # Create our repo object of the given source type with
- # the dev files, and then collect the initial ref.
- #
- repo = create_repo(kind, str(tmpdir))
- repo.create(dev_files_path)
-
- # Write out our test targets
- element_names = []
- last_element_name = None
- for i in range(amount + 1):
- element_name = 'track-test-{}-{}.bst'.format(kind, i + 1)
- filename = os.path.join(element_path, element_name)
-
- element_names.append(element_name)
-
- generate_element(repo, filename, dep_name=last_element_name)
- last_element_name = element_name
-
- # Assert that a fetch is needed
- states = cli.get_element_states(project, [last_element_name])
- for element_name in element_names:
- assert states[element_name] == 'no reference'
-
- # Now first try to track it
- result = cli.run(project=project, args=[
- 'source', 'track', '--deps', 'all',
- last_element_name])
- result.assert_success()
-
- # And now fetch it: The Source has probably already cached the
- # latest ref locally, but it is not required to have cached
- # the associated content of the latest ref at track time, that
- # is the job of fetch.
- result = cli.run(project=project, args=[
- 'source', 'fetch', '--deps', 'all',
- last_element_name])
- result.assert_success()
-
- # Assert that the base is buildable and the rest are waiting
- states = cli.get_element_states(project, [last_element_name])
- for element_name in element_names:
- if element_name == element_names[0]:
- assert states[element_name] == 'buildable'
- else:
- assert states[element_name] == 'waiting'
-
-
-@pytest.mark.datafiles(DATA_DIR)
def test_track_single(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
dev_files_path = os.path.join(project, 'files', 'dev-files')
@@ -194,52 +69,6 @@ def test_track_single(cli, tmpdir, datafiles):
assert states[element_target_name] == 'waiting'
-@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
-def test_track_recurse_except(cli, tmpdir, datafiles, kind):
- project = os.path.join(datafiles.dirname, datafiles.basename)
- dev_files_path = os.path.join(project, 'files', 'dev-files')
- element_path = os.path.join(project, 'elements')
- element_dep_name = 'track-test-dep-{}.bst'.format(kind)
- element_target_name = 'track-test-target-{}.bst'.format(kind)
-
- # Create our repo object of the given source type with
- # the dev files, and then collect the initial ref.
- #
- repo = create_repo(kind, str(tmpdir))
- repo.create(dev_files_path)
-
- # Write out our test targets
- generate_element(repo, os.path.join(element_path, element_dep_name))
- generate_element(repo, os.path.join(element_path, element_target_name),
- dep_name=element_dep_name)
-
- # Assert that a fetch is needed
- states = cli.get_element_states(project, [element_target_name])
- assert states[element_dep_name] == 'no reference'
- assert states[element_target_name] == 'no reference'
-
- # Now first try to track it
- result = cli.run(project=project, args=[
- 'source', 'track', '--deps', 'all', '--except', element_dep_name,
- element_target_name])
- result.assert_success()
-
- # And now fetch it: The Source has probably already cached the
- # latest ref locally, but it is not required to have cached
- # the associated content of the latest ref at track time, that
- # is the job of fetch.
- result = cli.run(project=project, args=[
- 'source', 'fetch', '--deps', 'none',
- element_target_name])
- result.assert_success()
-
- # Assert that the dependency is buildable and the target is waiting
- states = cli.get_element_states(project, [element_target_name])
- assert states[element_dep_name] == 'no reference'
- assert states[element_target_name] == 'waiting'
-
-
@pytest.mark.datafiles(os.path.join(TOP_DIR))
@pytest.mark.parametrize("ref_storage", [('inline'), ('project-refs')])
def test_track_optional(cli, tmpdir, datafiles, ref_storage):
@@ -480,202 +309,6 @@ def test_junction_element(cli, tmpdir, datafiles, ref_storage):
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("ref_storage", [('inline'), ('project.refs')])
-@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
-def test_cross_junction(cli, tmpdir, datafiles, ref_storage, kind):
- project = os.path.join(datafiles.dirname, datafiles.basename)
- subproject_path = os.path.join(project, 'files', 'sub-project')
- junction_path = os.path.join(project, 'elements', 'junction.bst')
- etc_files = os.path.join(subproject_path, 'files', 'etc-files')
- repo_element_path = os.path.join(subproject_path, 'elements',
- 'import-etc-repo.bst')
-
- configure_project(project, {
- 'ref-storage': ref_storage
- })
-
- repo = create_repo(kind, str(tmpdir.join('element_repo')))
- repo.create(etc_files)
-
- generate_element(repo, repo_element_path)
-
- generate_junction(str(tmpdir.join('junction_repo')),
- subproject_path, junction_path, store_ref=False)
-
- # Track the junction itself first.
- result = cli.run(project=project, args=['source', 'track', 'junction.bst'])
- result.assert_success()
-
- assert cli.get_element_state(project, 'junction.bst:import-etc-repo.bst') == 'no reference'
-
- # Track the cross junction element. -J is not given, it is implied.
- result = cli.run(project=project, args=['source', 'track', 'junction.bst:import-etc-repo.bst'])
-
- if ref_storage == 'inline':
- # This is not allowed to track cross junction without project.refs.
- result.assert_main_error(ErrorDomain.PIPELINE, 'untrackable-sources')
- else:
- result.assert_success()
-
- assert cli.get_element_state(project, 'junction.bst:import-etc-repo.bst') == 'buildable'
-
- assert os.path.exists(os.path.join(project, 'project.refs'))
-
-
-@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("ref_storage", [('inline'), ('project.refs')])
-@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
-def test_track_include(cli, tmpdir, datafiles, ref_storage, kind):
- project = os.path.join(datafiles.dirname, datafiles.basename)
- dev_files_path = os.path.join(project, 'files', 'dev-files')
- element_path = os.path.join(project, 'elements')
- element_name = 'track-test-{}.bst'.format(kind)
-
- configure_project(project, {
- 'ref-storage': ref_storage
- })
-
- # Create our repo object of the given source type with
- # the dev files, and then collect the initial ref.
- #
- repo = create_repo(kind, str(tmpdir))
- ref = repo.create(dev_files_path)
-
- # Generate the element
- element = {
- 'kind': 'import',
- '(@)': ['elements/sources.yml']
- }
- sources = {
- 'sources': [
- repo.source_config()
- ]
- }
-
- _yaml.dump(element, os.path.join(element_path, element_name))
- _yaml.dump(sources, os.path.join(element_path, 'sources.yml'))
-
- # Assert that a fetch is needed
- assert cli.get_element_state(project, element_name) == 'no reference'
-
- # Now first try to track it
- result = cli.run(project=project, args=['source', 'track', element_name])
- result.assert_success()
-
- # And now fetch it: The Source has probably already cached the
- # latest ref locally, but it is not required to have cached
- # the associated content of the latest ref at track time, that
- # is the job of fetch.
- result = cli.run(project=project, args=['source', 'fetch', element_name])
- result.assert_success()
-
- # Assert that we are now buildable because the source is
- # now cached.
- assert cli.get_element_state(project, element_name) == 'buildable'
-
- # Assert there was a project.refs created, depending on the configuration
- if ref_storage == 'project.refs':
- assert os.path.exists(os.path.join(project, 'project.refs'))
- else:
- assert not os.path.exists(os.path.join(project, 'project.refs'))
- new_sources = _yaml.load(os.path.join(element_path, 'sources.yml'))
- assert 'sources' in new_sources
- assert len(new_sources['sources']) == 1
- assert 'ref' in new_sources['sources'][0]
- assert ref == new_sources['sources'][0]['ref']
-
-
-@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("ref_storage", [('inline'), ('project.refs')])
-@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
-def test_track_include_junction(cli, tmpdir, datafiles, ref_storage, kind):
- project = os.path.join(datafiles.dirname, datafiles.basename)
- dev_files_path = os.path.join(project, 'files', 'dev-files')
- element_path = os.path.join(project, 'elements')
- element_name = 'track-test-{}.bst'.format(kind)
- subproject_path = os.path.join(project, 'files', 'sub-project')
- sub_element_path = os.path.join(subproject_path, 'elements')
- junction_path = os.path.join(element_path, 'junction.bst')
-
- configure_project(project, {
- 'ref-storage': ref_storage
- })
-
- # Create our repo object of the given source type with
- # the dev files, and then collect the initial ref.
- #
- repo = create_repo(kind, str(tmpdir.join('element_repo')))
- repo.create(dev_files_path)
-
- # Generate the element
- element = {
- 'kind': 'import',
- '(@)': ['junction.bst:elements/sources.yml']
- }
- sources = {
- 'sources': [
- repo.source_config()
- ]
- }
-
- _yaml.dump(element, os.path.join(element_path, element_name))
- _yaml.dump(sources, os.path.join(sub_element_path, 'sources.yml'))
-
- generate_junction(str(tmpdir.join('junction_repo')),
- subproject_path, junction_path, store_ref=True)
-
- result = cli.run(project=project, args=['source', 'track', 'junction.bst'])
- result.assert_success()
-
- # Assert that a fetch is needed
- assert cli.get_element_state(project, element_name) == 'no reference'
-
- # Now first try to track it
- result = cli.run(project=project, args=['source', 'track', element_name])
-
- # Assert there was a project.refs created, depending on the configuration
- if ref_storage == 'inline':
- # FIXME: We should expect an error. But only a warning is emitted
- # result.assert_main_error(ErrorDomain.SOURCE, 'tracking-junction-fragment')
-
- assert 'junction.bst:elements/sources.yml: Cannot track source in a fragment from a junction' in result.stderr
- else:
- assert os.path.exists(os.path.join(project, 'project.refs'))
-
- # And now fetch it: The Source has probably already cached the
- # latest ref locally, but it is not required to have cached
- # the associated content of the latest ref at track time, that
- # is the job of fetch.
- result = cli.run(project=project, args=['source', 'fetch', element_name])
- result.assert_success()
-
- # Assert that we are now buildable because the source is
- # now cached.
- assert cli.get_element_state(project, element_name) == 'buildable'
-
-
-@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("ref_storage", [('inline'), ('project.refs')])
-@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
-def test_track_junction_included(cli, tmpdir, datafiles, ref_storage, kind):
- project = os.path.join(datafiles.dirname, datafiles.basename)
- element_path = os.path.join(project, 'elements')
- subproject_path = os.path.join(project, 'files', 'sub-project')
- junction_path = os.path.join(element_path, 'junction.bst')
-
- configure_project(project, {
- 'ref-storage': ref_storage,
- '(@)': ['junction.bst:test.yml']
- })
-
- generate_junction(str(tmpdir.join('junction_repo')),
- subproject_path, junction_path, store_ref=False)
-
- result = cli.run(project=project, args=['source', 'track', 'junction.bst'])
- result.assert_success()
-
-
-@pytest.mark.datafiles(DATA_DIR)
def test_track_error_cannot_write_file(cli, tmpdir, datafiles):
if os.geteuid() == 0:
pytest.skip("This is not testable with root permissions")
diff --git a/tests/frontend/workspace.py b/tests/frontend/workspace.py
index 6760b101c..f0ce48eb9 100644
--- a/tests/frontend/workspace.py
+++ b/tests/frontend/workspace.py
@@ -46,7 +46,7 @@ DATA_DIR = os.path.join(
)
-class WorkspaceCreater():
+class WorkspaceCreator():
def __init__(self, cli, tmpdir, datafiles, project_path=None):
self.cli = cli
self.tmpdir = tmpdir
@@ -161,7 +161,7 @@ class WorkspaceCreater():
def open_workspace(cli, tmpdir, datafiles, kind, track, suffix='', workspace_dir=None,
project_path=None, element_attrs=None, no_checkout=False):
- workspace_object = WorkspaceCreater(cli, tmpdir, datafiles, project_path)
+ workspace_object = WorkspaceCreator(cli, tmpdir, datafiles, project_path)
workspaces = workspace_object.open_workspaces((kind, ), track, (suffix, ), workspace_dir,
element_attrs, no_checkout)
assert len(workspaces) == 1
@@ -170,12 +170,6 @@ def open_workspace(cli, tmpdir, datafiles, kind, track, suffix='', workspace_dir
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("kind", repo_kinds)
-def test_open(cli, tmpdir, datafiles, kind):
- open_workspace(cli, tmpdir, datafiles, kind, False)
-
-
-@pytest.mark.datafiles(DATA_DIR)
def test_open_bzr_customize(cli, tmpdir, datafiles):
element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, "bzr", False)
@@ -196,7 +190,7 @@ def test_open_bzr_customize(cli, tmpdir, datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_open_multi(cli, tmpdir, datafiles):
- workspace_object = WorkspaceCreater(cli, tmpdir, datafiles)
+ workspace_object = WorkspaceCreator(cli, tmpdir, datafiles)
workspaces = workspace_object.open_workspaces(repo_kinds, False)
for (elname, workspace), kind in zip(workspaces, repo_kinds):
@@ -214,7 +208,7 @@ def test_open_multi(cli, tmpdir, datafiles):
@pytest.mark.skipif(os.geteuid() == 0, reason="root may have CAP_DAC_OVERRIDE and ignore permissions")
@pytest.mark.datafiles(DATA_DIR)
def test_open_multi_unwritable(cli, tmpdir, datafiles):
- workspace_object = WorkspaceCreater(cli, tmpdir, datafiles)
+ workspace_object = WorkspaceCreator(cli, tmpdir, datafiles)
element_tuples = workspace_object.create_workspace_elements(repo_kinds, False, repo_kinds)
os.makedirs(workspace_object.workspace_cmd, exist_ok=True)
@@ -241,7 +235,7 @@ def test_open_multi_unwritable(cli, tmpdir, datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_open_multi_with_directory(cli, tmpdir, datafiles):
- workspace_object = WorkspaceCreater(cli, tmpdir, datafiles)
+ workspace_object = WorkspaceCreator(cli, tmpdir, datafiles)
element_tuples = workspace_object.create_workspace_elements(repo_kinds, False, repo_kinds)
os.makedirs(workspace_object.workspace_cmd, exist_ok=True)
@@ -260,7 +254,7 @@ def test_open_multi_with_directory(cli, tmpdir, datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_open_defaultlocation(cli, tmpdir, datafiles):
- workspace_object = WorkspaceCreater(cli, tmpdir, datafiles)
+ workspace_object = WorkspaceCreator(cli, tmpdir, datafiles)
((element_name, workspace_dir), ) = workspace_object.create_workspace_elements(['git'], False, ['git'])
os.makedirs(workspace_object.workspace_cmd, exist_ok=True)
@@ -292,7 +286,7 @@ def test_open_defaultlocation(cli, tmpdir, datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_open_defaultlocation_exists(cli, tmpdir, datafiles):
- workspace_object = WorkspaceCreater(cli, tmpdir, datafiles)
+ workspace_object = WorkspaceCreator(cli, tmpdir, datafiles)
((element_name, workspace_dir), ) = workspace_object.create_workspace_elements(['git'], False, ['git'])
os.makedirs(workspace_object.workspace_cmd, exist_ok=True)
diff --git a/tests/integration/source-determinism.py b/tests/integration/source-determinism.py
index 9fc153e1a..7cfca98e0 100644
--- a/tests/integration/source-determinism.py
+++ b/tests/integration/source-determinism.py
@@ -3,7 +3,7 @@ import pytest
from buildstream import _yaml
from buildstream.plugintestutils import cli_integration as cli
-from tests.testutils import create_repo, ALL_REPO_KINDS
+from tests.testutils import create_repo
from tests.testutils.site import HAVE_SANDBOX
@@ -29,78 +29,6 @@ def create_test_directory(*path, mode=0o644):
@pytest.mark.integration
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("kind", ['local', *ALL_REPO_KINDS])
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
-def test_deterministic_source_umask(cli, tmpdir, datafiles, kind):
- project = str(datafiles)
- element_name = 'list.bst'
- element_path = os.path.join(project, 'elements', element_name)
- repodir = os.path.join(str(tmpdir), 'repo')
- sourcedir = os.path.join(project, 'source')
-
- create_test_file(sourcedir, 'a.txt', mode=0o700)
- create_test_file(sourcedir, 'b.txt', mode=0o755)
- create_test_file(sourcedir, 'c.txt', mode=0o600)
- create_test_file(sourcedir, 'd.txt', mode=0o400)
- create_test_file(sourcedir, 'e.txt', mode=0o644)
- create_test_file(sourcedir, 'f.txt', mode=0o4755)
- create_test_file(sourcedir, 'g.txt', mode=0o2755)
- create_test_file(sourcedir, 'h.txt', mode=0o1755)
- create_test_directory(sourcedir, 'dir-a', mode=0o0700)
- create_test_directory(sourcedir, 'dir-c', mode=0o0755)
- create_test_directory(sourcedir, 'dir-d', mode=0o4755)
- create_test_directory(sourcedir, 'dir-e', mode=0o2755)
- create_test_directory(sourcedir, 'dir-f', mode=0o1755)
-
- if kind == 'local':
- source = {'kind': 'local',
- 'path': 'source'}
- else:
- repo = create_repo(kind, repodir)
- ref = repo.create(sourcedir)
- source = repo.source_config(ref=ref)
- element = {
- 'kind': 'manual',
- 'depends': [
- {
- 'filename': 'base.bst',
- 'type': 'build'
- }
- ],
- 'sources': [
- source
- ],
- 'config': {
- 'install-commands': [
- 'ls -l >"%{install-root}/ls-l"'
- ]
- }
- }
- _yaml.dump(element, element_path)
-
- def get_value_for_umask(umask):
- checkoutdir = os.path.join(str(tmpdir), 'checkout-{}'.format(umask))
-
- old_umask = os.umask(umask)
-
- try:
- result = cli.run(project=project, args=['build', element_name])
- result.assert_success()
-
- result = cli.run(project=project, args=['artifact', 'checkout', element_name, '--directory', checkoutdir])
- result.assert_success()
-
- with open(os.path.join(checkoutdir, 'ls-l'), 'r') as f:
- return f.read()
- finally:
- os.umask(old_umask)
- cli.remove_artifact_from_cache(project, element_name)
-
- assert get_value_for_umask(0o022) == get_value_for_umask(0o077)
-
-
-@pytest.mark.integration
-@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
def test_deterministic_source_local(cli, tmpdir, datafiles):
"""Only user rights should be considered for local source.
diff --git a/tests/sources/generic/__init__.py b/tests/sources/generic/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/tests/sources/generic/__init__.py
diff --git a/tests/sources/generic/build_checkout.py b/tests/sources/generic/build_checkout.py
new file mode 100644
index 000000000..14a2bc142
--- /dev/null
+++ b/tests/sources/generic/build_checkout.py
@@ -0,0 +1,81 @@
+#
+# Copyright (C) 2018 Codethink Limited
+# Copyright (C) 2019 Bloomberg Finance LP
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library. If not, see <http://www.gnu.org/licenses/>.
+#
+
+import os
+import pytest
+
+from tests.testutils import create_repo, ALL_REPO_KINDS
+
+from buildstream.plugintestutils import cli
+from buildstream import _yaml
+
+# Project directory
+TOP_DIR = os.path.dirname(os.path.realpath(__file__))
+DATA_DIR = os.path.join(TOP_DIR, 'project')
+
+fetch_build_checkout_combos = \
+ [("strict", kind) for kind in ALL_REPO_KINDS] + \
+ [("non-strict", kind) for kind in ALL_REPO_KINDS]
+
+
+def strict_args(args, strict):
+ if strict != "strict":
+ return ['--no-strict', *args]
+ return args
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("strict,kind", fetch_build_checkout_combos)
+def test_fetch_build_checkout(cli, tmpdir, datafiles, strict, kind):
+ checkout = os.path.join(cli.directory, 'checkout')
+ project = os.path.join(datafiles.dirname, datafiles.basename)
+ dev_files_path = os.path.join(project, 'files', 'dev-files')
+ element_path = os.path.join(project, 'elements')
+ element_name = 'build-test-{}.bst'.format(kind)
+
+ # Create our repo object of the given source type with
+ # the dev files, and then collect the initial ref.
+ #
+ repo = create_repo(kind, str(tmpdir))
+ ref = repo.create(dev_files_path)
+
+ # Write out our test target
+ element = {
+ 'kind': 'import',
+ 'sources': [
+ repo.source_config(ref=ref)
+ ]
+ }
+ _yaml.dump(element,
+ os.path.join(element_path,
+ element_name))
+
+ assert cli.get_element_state(project, element_name) == 'fetch needed'
+ result = cli.run(project=project, args=strict_args(['build', element_name], strict))
+ result.assert_success()
+ assert cli.get_element_state(project, element_name) == 'cached'
+
+ # Now check it out
+ result = cli.run(project=project, args=strict_args([
+ 'artifact', 'checkout', element_name, '--directory', checkout
+ ], strict))
+ result.assert_success()
+
+ # Check that the pony.h include from files/dev-files exists
+ filename = os.path.join(checkout, 'usr', 'include', 'pony.h')
+ assert os.path.exists(filename)
diff --git a/tests/sources/generic/fetch.py b/tests/sources/generic/fetch.py
new file mode 100644
index 000000000..bdb8b8970
--- /dev/null
+++ b/tests/sources/generic/fetch.py
@@ -0,0 +1,106 @@
+#
+# Copyright (C) 2018 Codethink Limited
+# Copyright (C) 2019 Bloomberg Finance LP
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library. If not, see <http://www.gnu.org/licenses/>.
+#
+
+import os
+import pytest
+
+from tests.testutils import create_repo, ALL_REPO_KINDS, generate_junction
+from tests.frontend import configure_project
+
+from buildstream.plugintestutils import cli
+from buildstream import _yaml
+
+
+# Project directory
+TOP_DIR = os.path.dirname(os.path.realpath(__file__))
+DATA_DIR = os.path.join(TOP_DIR, 'project')
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
+def test_fetch(cli, tmpdir, datafiles, kind):
+ project = os.path.join(datafiles.dirname, datafiles.basename)
+ bin_files_path = os.path.join(project, 'files', 'bin-files')
+ element_path = os.path.join(project, 'elements')
+ element_name = 'fetch-test-{}.bst'.format(kind)
+
+ # Create our repo object of the given source type with
+ # the bin files, and then collect the initial ref.
+ #
+ repo = create_repo(kind, str(tmpdir))
+ ref = repo.create(bin_files_path)
+
+ # Write out our test target
+ element = {
+ 'kind': 'import',
+ 'sources': [
+ repo.source_config(ref=ref)
+ ]
+ }
+ _yaml.dump(element,
+ os.path.join(element_path,
+ element_name))
+
+ # Assert that a fetch is needed
+ assert cli.get_element_state(project, element_name) == 'fetch needed'
+
+ # Now try to fetch it
+ result = cli.run(project=project, args=['source', 'fetch', element_name])
+ result.assert_success()
+
+ # Assert that we are now buildable because the source is
+ # now cached.
+ assert cli.get_element_state(project, element_name) == 'buildable'
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("ref_storage", [('inline'), ('project.refs')])
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
+def test_fetch_cross_junction(cli, tmpdir, datafiles, ref_storage, kind):
+ project = str(datafiles)
+ subproject_path = os.path.join(project, 'files', 'sub-project')
+ junction_path = os.path.join(project, 'elements', 'junction.bst')
+
+ import_etc_path = os.path.join(subproject_path, 'elements', 'import-etc-repo.bst')
+ etc_files_path = os.path.join(subproject_path, 'files', 'etc-files')
+
+ repo = create_repo(kind, str(tmpdir.join('import-etc')))
+ ref = repo.create(etc_files_path)
+
+ element = {
+ 'kind': 'import',
+ 'sources': [
+ repo.source_config(ref=(ref if ref_storage == 'inline' else None))
+ ]
+ }
+ _yaml.dump(element, import_etc_path)
+
+ configure_project(project, {
+ 'ref-storage': ref_storage
+ })
+
+ generate_junction(tmpdir, subproject_path, junction_path, store_ref=(ref_storage == 'inline'))
+
+ if ref_storage == 'project.refs':
+ result = cli.run(project=project, args=['source', 'track', 'junction.bst'])
+ result.assert_success()
+ result = cli.run(project=project, args=['source', 'track', 'junction.bst:import-etc.bst'])
+ result.assert_success()
+
+ result = cli.run(project=project, args=['source', 'fetch', 'junction.bst:import-etc.bst'])
+ result.assert_success()
diff --git a/tests/sources/generic/mirror.py b/tests/sources/generic/mirror.py
new file mode 100644
index 000000000..63571234e
--- /dev/null
+++ b/tests/sources/generic/mirror.py
@@ -0,0 +1,426 @@
+#
+# Copyright (C) 2018 Codethink Limited
+# Copyright (C) 2019 Bloomberg Finance LP
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library. If not, see <http://www.gnu.org/licenses/>.
+#
+
+import os
+import pytest
+
+from tests.testutils import create_repo, ALL_REPO_KINDS, generate_junction
+
+from buildstream.plugintestutils import cli
+from buildstream import _yaml
+from buildstream._exceptions import ErrorDomain
+
+# Project directory
+TOP_DIR = os.path.dirname(os.path.realpath(__file__))
+DATA_DIR = os.path.join(TOP_DIR, 'project')
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
+def test_mirror_fetch(cli, tmpdir, datafiles, kind):
+ bin_files_path = os.path.join(str(datafiles), 'files', 'bin-files', 'usr')
+ dev_files_path = os.path.join(str(datafiles), 'files', 'dev-files', 'usr')
+ upstream_repodir = os.path.join(str(tmpdir), 'upstream')
+ mirror_repodir = os.path.join(str(tmpdir), 'mirror')
+ project_dir = os.path.join(str(tmpdir), 'project')
+ os.makedirs(project_dir)
+ element_dir = os.path.join(project_dir, 'elements')
+
+ # Create repo objects of the upstream and mirror
+ upstream_repo = create_repo(kind, upstream_repodir)
+ upstream_repo.create(bin_files_path)
+ mirror_repo = upstream_repo.copy(mirror_repodir)
+ upstream_ref = upstream_repo.create(dev_files_path)
+
+ element = {
+ 'kind': 'import',
+ 'sources': [
+ upstream_repo.source_config(ref=upstream_ref)
+ ]
+ }
+ element_name = 'test.bst'
+ element_path = os.path.join(element_dir, element_name)
+ full_repo = element['sources'][0]['url']
+ upstream_map, repo_name = os.path.split(full_repo)
+ alias = 'foo-' + kind
+ aliased_repo = alias + ':' + repo_name
+ element['sources'][0]['url'] = aliased_repo
+ full_mirror = mirror_repo.source_config()['url']
+ mirror_map, _ = os.path.split(full_mirror)
+ os.makedirs(element_dir)
+ _yaml.dump(element, element_path)
+
+ project = {
+ 'name': 'test',
+ 'element-path': 'elements',
+ 'aliases': {
+ alias: upstream_map + "/"
+ },
+ 'mirrors': [
+ {
+ 'name': 'middle-earth',
+ 'aliases': {
+ alias: [mirror_map + "/"],
+ },
+ },
+ ]
+ }
+ project_file = os.path.join(project_dir, 'project.conf')
+ _yaml.dump(project, project_file)
+
+ # No obvious ways of checking that the mirror has been fetched
+ # But at least we can be sure it succeeds
+ result = cli.run(project=project_dir, args=['source', 'fetch', element_name])
+ result.assert_success()
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
+def test_mirror_fetch_upstream_absent(cli, tmpdir, datafiles, kind):
+ dev_files_path = os.path.join(str(datafiles), 'files', 'dev-files', 'usr')
+ upstream_repodir = os.path.join(str(tmpdir), 'upstream')
+ mirror_repodir = os.path.join(str(tmpdir), 'mirror')
+ project_dir = os.path.join(str(tmpdir), 'project')
+ os.makedirs(project_dir)
+ element_dir = os.path.join(project_dir, 'elements')
+
+ # Create repo objects of the upstream and mirror
+ upstream_repo = create_repo(kind, upstream_repodir)
+ ref = upstream_repo.create(dev_files_path)
+ mirror_repo = upstream_repo.copy(mirror_repodir)
+
+ element = {
+ 'kind': 'import',
+ 'sources': [
+ upstream_repo.source_config(ref=ref)
+ ]
+ }
+
+ element_name = 'test.bst'
+ element_path = os.path.join(element_dir, element_name)
+ full_repo = element['sources'][0]['url']
+ upstream_map, repo_name = os.path.split(full_repo)
+ alias = 'foo-' + kind
+ aliased_repo = alias + ':' + repo_name
+ element['sources'][0]['url'] = aliased_repo
+ full_mirror = mirror_repo.source_config()['url']
+ mirror_map, _ = os.path.split(full_mirror)
+ os.makedirs(element_dir)
+ _yaml.dump(element, element_path)
+
+ project = {
+ 'name': 'test',
+ 'element-path': 'elements',
+ 'aliases': {
+ alias: 'http://www.example.com/'
+ },
+ 'mirrors': [
+ {
+ 'name': 'middle-earth',
+ 'aliases': {
+ alias: [mirror_map + "/"],
+ },
+ },
+ ]
+ }
+ project_file = os.path.join(project_dir, 'project.conf')
+ _yaml.dump(project, project_file)
+
+ result = cli.run(project=project_dir, args=['source', 'fetch', element_name])
+ result.assert_success()
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
+def test_mirror_from_includes(cli, tmpdir, datafiles, kind):
+ bin_files_path = os.path.join(str(datafiles), 'files', 'bin-files', 'usr')
+ upstream_repodir = os.path.join(str(tmpdir), 'upstream')
+ mirror_repodir = os.path.join(str(tmpdir), 'mirror')
+ project_dir = os.path.join(str(tmpdir), 'project')
+ os.makedirs(project_dir)
+ element_dir = os.path.join(project_dir, 'elements')
+
+ # Create repo objects of the upstream and mirror
+ upstream_repo = create_repo(kind, upstream_repodir)
+ upstream_ref = upstream_repo.create(bin_files_path)
+ mirror_repo = upstream_repo.copy(mirror_repodir)
+
+ element = {
+ 'kind': 'import',
+ 'sources': [
+ upstream_repo.source_config(ref=upstream_ref)
+ ]
+ }
+ element_name = 'test.bst'
+ element_path = os.path.join(element_dir, element_name)
+ full_repo = element['sources'][0]['url']
+ upstream_map, repo_name = os.path.split(full_repo)
+ alias = 'foo-' + kind
+ aliased_repo = alias + ':' + repo_name
+ element['sources'][0]['url'] = aliased_repo
+ full_mirror = mirror_repo.source_config()['url']
+ mirror_map, _ = os.path.split(full_mirror)
+ os.makedirs(element_dir)
+ _yaml.dump(element, element_path)
+
+ config_project_dir = str(tmpdir.join('config'))
+ os.makedirs(config_project_dir, exist_ok=True)
+ config_project = {
+ 'name': 'config'
+ }
+ _yaml.dump(config_project, os.path.join(config_project_dir, 'project.conf'))
+ extra_mirrors = {
+ 'mirrors': [
+ {
+ 'name': 'middle-earth',
+ 'aliases': {
+ alias: [mirror_map + "/"],
+ }
+ }
+ ]
+ }
+ _yaml.dump(extra_mirrors, os.path.join(config_project_dir, 'mirrors.yml'))
+ generate_junction(str(tmpdir.join('config_repo')),
+ config_project_dir,
+ os.path.join(element_dir, 'config.bst'))
+
+ project = {
+ 'name': 'test',
+ 'element-path': 'elements',
+ 'aliases': {
+ alias: upstream_map + "/"
+ },
+ '(@)': [
+ 'config.bst:mirrors.yml'
+ ]
+ }
+ project_file = os.path.join(project_dir, 'project.conf')
+ _yaml.dump(project, project_file)
+
+ # Now make the upstream unavailable.
+ os.rename(upstream_repo.repo, '{}.bak'.format(upstream_repo.repo))
+ result = cli.run(project=project_dir, args=['source', 'fetch', element_name])
+ result.assert_success()
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
+def test_mirror_junction_from_includes(cli, tmpdir, datafiles, kind):
+ bin_files_path = os.path.join(str(datafiles), 'files', 'bin-files', 'usr')
+ upstream_repodir = os.path.join(str(tmpdir), 'upstream')
+ mirror_repodir = os.path.join(str(tmpdir), 'mirror')
+ project_dir = os.path.join(str(tmpdir), 'project')
+ os.makedirs(project_dir)
+ element_dir = os.path.join(project_dir, 'elements')
+
+ # Create repo objects of the upstream and mirror
+ upstream_repo = create_repo(kind, upstream_repodir)
+ upstream_ref = upstream_repo.create(bin_files_path)
+ mirror_repo = upstream_repo.copy(mirror_repodir)
+
+ element = {
+ 'kind': 'junction',
+ 'sources': [
+ upstream_repo.source_config(ref=upstream_ref)
+ ]
+ }
+ element_name = 'test.bst'
+ element_path = os.path.join(element_dir, element_name)
+ full_repo = element['sources'][0]['url']
+ upstream_map, repo_name = os.path.split(full_repo)
+ alias = 'foo-' + kind
+ aliased_repo = alias + ':' + repo_name
+ element['sources'][0]['url'] = aliased_repo
+ full_mirror = mirror_repo.source_config()['url']
+ mirror_map, _ = os.path.split(full_mirror)
+ os.makedirs(element_dir)
+ _yaml.dump(element, element_path)
+
+ config_project_dir = str(tmpdir.join('config'))
+ os.makedirs(config_project_dir, exist_ok=True)
+ config_project = {
+ 'name': 'config'
+ }
+ _yaml.dump(config_project, os.path.join(config_project_dir, 'project.conf'))
+ extra_mirrors = {
+ 'mirrors': [
+ {
+ 'name': 'middle-earth',
+ 'aliases': {
+ alias: [mirror_map + "/"],
+ }
+ }
+ ]
+ }
+ _yaml.dump(extra_mirrors, os.path.join(config_project_dir, 'mirrors.yml'))
+ generate_junction(str(tmpdir.join('config_repo')),
+ config_project_dir,
+ os.path.join(element_dir, 'config.bst'))
+
+ project = {
+ 'name': 'test',
+ 'element-path': 'elements',
+ 'aliases': {
+ alias: upstream_map + "/"
+ },
+ '(@)': [
+ 'config.bst:mirrors.yml'
+ ]
+ }
+ project_file = os.path.join(project_dir, 'project.conf')
+ _yaml.dump(project, project_file)
+
+ # Now make the upstream unavailable.
+ os.rename(upstream_repo.repo, '{}.bak'.format(upstream_repo.repo))
+ result = cli.run(project=project_dir, args=['source', 'fetch', element_name])
+ result.assert_main_error(ErrorDomain.STREAM, None)
+ # Now make the upstream available again.
+ os.rename('{}.bak'.format(upstream_repo.repo), upstream_repo.repo)
+ result = cli.run(project=project_dir, args=['source', 'fetch', element_name])
+ result.assert_success()
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
+def test_mirror_track_upstream_present(cli, tmpdir, datafiles, kind):
+ bin_files_path = os.path.join(str(datafiles), 'files', 'bin-files', 'usr')
+ dev_files_path = os.path.join(str(datafiles), 'files', 'dev-files', 'usr')
+ upstream_repodir = os.path.join(str(tmpdir), 'upstream')
+ mirror_repodir = os.path.join(str(tmpdir), 'mirror')
+ project_dir = os.path.join(str(tmpdir), 'project')
+ os.makedirs(project_dir)
+ element_dir = os.path.join(project_dir, 'elements')
+
+ # Create repo objects of the upstream and mirror
+ upstream_repo = create_repo(kind, upstream_repodir)
+ upstream_repo.create(bin_files_path)
+ mirror_repo = upstream_repo.copy(mirror_repodir)
+ upstream_ref = upstream_repo.create(dev_files_path)
+
+ element = {
+ 'kind': 'import',
+ 'sources': [
+ upstream_repo.source_config(ref=upstream_ref)
+ ]
+ }
+
+ element['sources'][0]
+ element_name = 'test.bst'
+ element_path = os.path.join(element_dir, element_name)
+ full_repo = element['sources'][0]['url']
+ upstream_map, repo_name = os.path.split(full_repo)
+ alias = 'foo-' + kind
+ aliased_repo = alias + ':' + repo_name
+ element['sources'][0]['url'] = aliased_repo
+ full_mirror = mirror_repo.source_config()['url']
+ mirror_map, _ = os.path.split(full_mirror)
+ os.makedirs(element_dir)
+ _yaml.dump(element, element_path)
+
+ project = {
+ 'name': 'test',
+ 'element-path': 'elements',
+ 'aliases': {
+ alias: upstream_map + "/"
+ },
+ 'mirrors': [
+ {
+ 'name': 'middle-earth',
+ 'aliases': {
+ alias: [mirror_map + "/"],
+ },
+ },
+ ]
+ }
+ project_file = os.path.join(project_dir, 'project.conf')
+ _yaml.dump(project, project_file)
+
+ result = cli.run(project=project_dir, args=['source', 'track', element_name])
+ result.assert_success()
+
+ # Tracking tries upstream first. Check the ref is from upstream.
+ new_element = _yaml.load(element_path)
+ source = new_element['sources'][0]
+ if 'ref' in source:
+ assert source['ref'] == upstream_ref
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
+def test_mirror_track_upstream_absent(cli, tmpdir, datafiles, kind):
+ bin_files_path = os.path.join(str(datafiles), 'files', 'bin-files', 'usr')
+ dev_files_path = os.path.join(str(datafiles), 'files', 'dev-files', 'usr')
+ upstream_repodir = os.path.join(str(tmpdir), 'upstream')
+ mirror_repodir = os.path.join(str(tmpdir), 'mirror')
+ project_dir = os.path.join(str(tmpdir), 'project')
+ os.makedirs(project_dir)
+ element_dir = os.path.join(project_dir, 'elements')
+
+ # Create repo objects of the upstream and mirror
+ upstream_repo = create_repo(kind, upstream_repodir)
+ upstream_ref = upstream_repo.create(bin_files_path)
+ mirror_repo = upstream_repo.copy(mirror_repodir)
+ mirror_ref = upstream_ref
+ upstream_ref = upstream_repo.create(dev_files_path)
+
+ element = {
+ 'kind': 'import',
+ 'sources': [
+ upstream_repo.source_config(ref=upstream_ref)
+ ]
+ }
+
+ element['sources'][0]
+ element_name = 'test.bst'
+ element_path = os.path.join(element_dir, element_name)
+ full_repo = element['sources'][0]['url']
+ upstream_map, repo_name = os.path.split(full_repo)
+ alias = 'foo-' + kind
+ aliased_repo = alias + ':' + repo_name
+ element['sources'][0]['url'] = aliased_repo
+ full_mirror = mirror_repo.source_config()['url']
+ mirror_map, _ = os.path.split(full_mirror)
+ os.makedirs(element_dir)
+ _yaml.dump(element, element_path)
+
+ project = {
+ 'name': 'test',
+ 'element-path': 'elements',
+ 'aliases': {
+ alias: 'http://www.example.com/'
+ },
+ 'mirrors': [
+ {
+ 'name': 'middle-earth',
+ 'aliases': {
+ alias: [mirror_map + "/"],
+ },
+ },
+ ]
+ }
+ project_file = os.path.join(project_dir, 'project.conf')
+ _yaml.dump(project, project_file)
+
+ result = cli.run(project=project_dir, args=['source', 'track', element_name])
+ result.assert_success()
+
+ # Check that tracking fell back to the mirror
+ new_element = _yaml.load(element_path)
+ source = new_element['sources'][0]
+ if 'ref' in source:
+ assert source['ref'] == mirror_ref
diff --git a/tests/sources/generic/project/elements/base.bst b/tests/sources/generic/project/elements/base.bst
new file mode 100644
index 000000000..428afa736
--- /dev/null
+++ b/tests/sources/generic/project/elements/base.bst
@@ -0,0 +1,5 @@
+# elements/base.bst
+
+kind: stack
+depends:
+ - base/base-alpine.bst
diff --git a/tests/sources/generic/project/elements/base/base-alpine.bst b/tests/sources/generic/project/elements/base/base-alpine.bst
new file mode 100644
index 000000000..c5833095d
--- /dev/null
+++ b/tests/sources/generic/project/elements/base/base-alpine.bst
@@ -0,0 +1,17 @@
+kind: import
+
+description: |
+ Alpine Linux base for tests
+
+ Generated using the `tests/integration-tests/base/generate-base.sh` script.
+
+sources:
+ - kind: tar
+ base-dir: ''
+ (?):
+ - arch == "x86-64":
+ ref: 3eb559250ba82b64a68d86d0636a6b127aa5f6d25d3601a79f79214dc9703639
+ url: "alpine:integration-tests-base.v1.x86_64.tar.xz"
+ - arch == "aarch64":
+ ref: 431fb5362032ede6f172e70a3258354a8fd71fcbdeb1edebc0e20968c792329a
+ url: "alpine:integration-tests-base.v1.aarch64.tar.xz"
diff --git a/tests/sources/generic/project/elements/import-bin.bst b/tests/sources/generic/project/elements/import-bin.bst
new file mode 100644
index 000000000..a847c0c23
--- /dev/null
+++ b/tests/sources/generic/project/elements/import-bin.bst
@@ -0,0 +1,4 @@
+kind: import
+sources:
+- kind: local
+ path: files/bin-files
diff --git a/tests/sources/generic/project/elements/import-dev.bst b/tests/sources/generic/project/elements/import-dev.bst
new file mode 100644
index 000000000..152a54667
--- /dev/null
+++ b/tests/sources/generic/project/elements/import-dev.bst
@@ -0,0 +1,4 @@
+kind: import
+sources:
+- kind: local
+ path: files/dev-files
diff --git a/tests/sources/generic/project/elements/multiple_targets/dependency/horsey.bst b/tests/sources/generic/project/elements/multiple_targets/dependency/horsey.bst
new file mode 100644
index 000000000..bd1ffae9c
--- /dev/null
+++ b/tests/sources/generic/project/elements/multiple_targets/dependency/horsey.bst
@@ -0,0 +1,3 @@
+kind: autotools
+depends:
+ - multiple_targets/dependency/pony.bst
diff --git a/tests/sources/generic/project/elements/multiple_targets/dependency/pony.bst b/tests/sources/generic/project/elements/multiple_targets/dependency/pony.bst
new file mode 100644
index 000000000..3c29b4ea1
--- /dev/null
+++ b/tests/sources/generic/project/elements/multiple_targets/dependency/pony.bst
@@ -0,0 +1 @@
+kind: autotools
diff --git a/tests/sources/generic/project/elements/multiple_targets/dependency/zebry.bst b/tests/sources/generic/project/elements/multiple_targets/dependency/zebry.bst
new file mode 100644
index 000000000..98447ab52
--- /dev/null
+++ b/tests/sources/generic/project/elements/multiple_targets/dependency/zebry.bst
@@ -0,0 +1,3 @@
+kind: autotools
+depends:
+ - multiple_targets/dependency/horsey.bst
diff --git a/tests/sources/generic/project/elements/multiple_targets/order/0.bst b/tests/sources/generic/project/elements/multiple_targets/order/0.bst
new file mode 100644
index 000000000..a99be06a0
--- /dev/null
+++ b/tests/sources/generic/project/elements/multiple_targets/order/0.bst
@@ -0,0 +1,7 @@
+kind: autotools
+description: Root node
+depends:
+ - multiple_targets/order/2.bst
+ - multiple_targets/order/3.bst
+ - filename: multiple_targets/order/run.bst
+ type: runtime
diff --git a/tests/sources/generic/project/elements/multiple_targets/order/1.bst b/tests/sources/generic/project/elements/multiple_targets/order/1.bst
new file mode 100644
index 000000000..82b507a62
--- /dev/null
+++ b/tests/sources/generic/project/elements/multiple_targets/order/1.bst
@@ -0,0 +1,4 @@
+kind: autotools
+description: Root node
+depends:
+ - multiple_targets/order/9.bst
diff --git a/tests/sources/generic/project/elements/multiple_targets/order/2.bst b/tests/sources/generic/project/elements/multiple_targets/order/2.bst
new file mode 100644
index 000000000..ee1afae20
--- /dev/null
+++ b/tests/sources/generic/project/elements/multiple_targets/order/2.bst
@@ -0,0 +1,4 @@
+kind: autotools
+description: First dependency level
+depends:
+ - multiple_targets/order/3.bst
diff --git a/tests/sources/generic/project/elements/multiple_targets/order/3.bst b/tests/sources/generic/project/elements/multiple_targets/order/3.bst
new file mode 100644
index 000000000..4c3a23dab
--- /dev/null
+++ b/tests/sources/generic/project/elements/multiple_targets/order/3.bst
@@ -0,0 +1,6 @@
+kind: autotools
+description: Second dependency level
+depends:
+ - multiple_targets/order/4.bst
+ - multiple_targets/order/5.bst
+ - multiple_targets/order/6.bst
diff --git a/tests/sources/generic/project/elements/multiple_targets/order/4.bst b/tests/sources/generic/project/elements/multiple_targets/order/4.bst
new file mode 100644
index 000000000..b663a0b52
--- /dev/null
+++ b/tests/sources/generic/project/elements/multiple_targets/order/4.bst
@@ -0,0 +1,2 @@
+kind: autotools
+description: Third level dependency
diff --git a/tests/sources/generic/project/elements/multiple_targets/order/5.bst b/tests/sources/generic/project/elements/multiple_targets/order/5.bst
new file mode 100644
index 000000000..b9efcf71b
--- /dev/null
+++ b/tests/sources/generic/project/elements/multiple_targets/order/5.bst
@@ -0,0 +1,2 @@
+kind: autotools
+description: Fifth level dependency
diff --git a/tests/sources/generic/project/elements/multiple_targets/order/6.bst b/tests/sources/generic/project/elements/multiple_targets/order/6.bst
new file mode 100644
index 000000000..6c19d04e3
--- /dev/null
+++ b/tests/sources/generic/project/elements/multiple_targets/order/6.bst
@@ -0,0 +1,4 @@
+kind: autotools
+description: Fourth level dependency
+depends:
+ - multiple_targets/order/5.bst
diff --git a/tests/sources/generic/project/elements/multiple_targets/order/7.bst b/tests/sources/generic/project/elements/multiple_targets/order/7.bst
new file mode 100644
index 000000000..6805b3e6d
--- /dev/null
+++ b/tests/sources/generic/project/elements/multiple_targets/order/7.bst
@@ -0,0 +1,4 @@
+kind: autotools
+description: Third level dependency
+depends:
+ - multiple_targets/order/6.bst
diff --git a/tests/sources/generic/project/elements/multiple_targets/order/8.bst b/tests/sources/generic/project/elements/multiple_targets/order/8.bst
new file mode 100644
index 000000000..b8d8964a0
--- /dev/null
+++ b/tests/sources/generic/project/elements/multiple_targets/order/8.bst
@@ -0,0 +1,4 @@
+kind: autotools
+description: Second level dependency
+depends:
+ - multiple_targets/order/7.bst
diff --git a/tests/sources/generic/project/elements/multiple_targets/order/9.bst b/tests/sources/generic/project/elements/multiple_targets/order/9.bst
new file mode 100644
index 000000000..cc13bf3f0
--- /dev/null
+++ b/tests/sources/generic/project/elements/multiple_targets/order/9.bst
@@ -0,0 +1,4 @@
+kind: autotools
+description: First level dependency
+depends:
+ - multiple_targets/order/8.bst
diff --git a/tests/sources/generic/project/elements/multiple_targets/order/run.bst b/tests/sources/generic/project/elements/multiple_targets/order/run.bst
new file mode 100644
index 000000000..9b3d2446c
--- /dev/null
+++ b/tests/sources/generic/project/elements/multiple_targets/order/run.bst
@@ -0,0 +1,2 @@
+kind: autotools
+description: Not a root node, yet built at the same time as root nodes
diff --git a/tests/sources/generic/project/files/bar b/tests/sources/generic/project/files/bar
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/tests/sources/generic/project/files/bar
diff --git a/tests/sources/generic/project/files/bin-files/usr/bin/hello b/tests/sources/generic/project/files/bin-files/usr/bin/hello
new file mode 100755
index 000000000..f534a4083
--- /dev/null
+++ b/tests/sources/generic/project/files/bin-files/usr/bin/hello
@@ -0,0 +1,3 @@
+#!/bin/bash
+
+echo "Hello !"
diff --git a/tests/sources/generic/project/files/dev-files/usr/include/pony.h b/tests/sources/generic/project/files/dev-files/usr/include/pony.h
new file mode 100644
index 000000000..40bd0c2e7
--- /dev/null
+++ b/tests/sources/generic/project/files/dev-files/usr/include/pony.h
@@ -0,0 +1,12 @@
+#ifndef __PONY_H__
+#define __PONY_H__
+
+#define PONY_BEGIN "Once upon a time, there was a pony."
+#define PONY_END "And they lived happily ever after, the end."
+
+#define MAKE_PONY(story) \
+ PONY_BEGIN \
+ story \
+ PONY_END
+
+#endif /* __PONY_H__ */
diff --git a/tests/sources/generic/project/files/etc-files/etc/buildstream/config b/tests/sources/generic/project/files/etc-files/etc/buildstream/config
new file mode 100644
index 000000000..04204c7c9
--- /dev/null
+++ b/tests/sources/generic/project/files/etc-files/etc/buildstream/config
@@ -0,0 +1 @@
+config
diff --git a/tests/sources/generic/project/files/foo b/tests/sources/generic/project/files/foo
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/tests/sources/generic/project/files/foo
diff --git a/tests/sources/generic/project/files/source-bundle/llamas.txt b/tests/sources/generic/project/files/source-bundle/llamas.txt
new file mode 100644
index 000000000..f98b24871
--- /dev/null
+++ b/tests/sources/generic/project/files/source-bundle/llamas.txt
@@ -0,0 +1 @@
+llamas
diff --git a/tests/sources/generic/project/files/sub-project/elements/import-etc.bst b/tests/sources/generic/project/files/sub-project/elements/import-etc.bst
new file mode 100644
index 000000000..f0171990e
--- /dev/null
+++ b/tests/sources/generic/project/files/sub-project/elements/import-etc.bst
@@ -0,0 +1,4 @@
+kind: import
+sources:
+- kind: local
+ path: files/etc-files
diff --git a/tests/sources/generic/project/files/sub-project/files/etc-files/etc/animal.conf b/tests/sources/generic/project/files/sub-project/files/etc-files/etc/animal.conf
new file mode 100644
index 000000000..db8c36cba
--- /dev/null
+++ b/tests/sources/generic/project/files/sub-project/files/etc-files/etc/animal.conf
@@ -0,0 +1 @@
+animal=Pony
diff --git a/tests/sources/generic/project/files/sub-project/project.conf b/tests/sources/generic/project/files/sub-project/project.conf
new file mode 100644
index 000000000..bbb8414a3
--- /dev/null
+++ b/tests/sources/generic/project/files/sub-project/project.conf
@@ -0,0 +1,4 @@
+# Project config for frontend build test
+name: subtest
+
+element-path: elements
diff --git a/tests/sources/generic/project/project.conf b/tests/sources/generic/project/project.conf
new file mode 100644
index 000000000..05b68bfeb
--- /dev/null
+++ b/tests/sources/generic/project/project.conf
@@ -0,0 +1,27 @@
+# Project config for frontend build test
+name: test
+element-path: elements
+aliases:
+ alpine: https://bst-integration-test-images.ams3.cdn.digitaloceanspaces.com/
+ project_dir: file://{project_dir}
+options:
+ linux:
+ type: bool
+ description: Whether to expect a linux platform
+ default: True
+ arch:
+ type: arch
+ description: Current architecture
+ values:
+ - x86-64
+ - aarch64
+split-rules:
+ test:
+ - |
+ /tests
+ - |
+ /tests/*
+
+fatal-warnings:
+- bad-element-suffix
+- bad-characters-in-name
diff --git a/tests/sources/generic/source_determinism.py b/tests/sources/generic/source_determinism.py
new file mode 100644
index 000000000..98c4c9835
--- /dev/null
+++ b/tests/sources/generic/source_determinism.py
@@ -0,0 +1,116 @@
+#
+# Copyright (C) 2018 Codethink Limited
+# Copyright (C) 2019 Bloomberg Finance LP
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library. If not, see <http://www.gnu.org/licenses/>.
+#
+
+import os
+import pytest
+
+from tests.testutils import create_repo, ALL_REPO_KINDS
+from tests.testutils.site import HAVE_SANDBOX
+
+from buildstream.plugintestutils import cli
+from buildstream import _yaml
+
+# Project directory
+TOP_DIR = os.path.dirname(os.path.realpath(__file__))
+DATA_DIR = os.path.join(TOP_DIR, 'project')
+
+
+def create_test_file(*path, mode=0o644, content='content\n'):
+ path = os.path.join(*path)
+ os.makedirs(os.path.dirname(path), exist_ok=True)
+ with open(path, 'w') as f:
+ f.write(content)
+ os.fchmod(f.fileno(), mode)
+
+
+def create_test_directory(*path, mode=0o644):
+ create_test_file(*path, '.keep', content='')
+ path = os.path.join(*path)
+ os.chmod(path, mode)
+
+
+@pytest.mark.integration
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("kind", ['local', *ALL_REPO_KINDS])
+@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+def test_deterministic_source_umask(cli, tmpdir, datafiles, kind):
+ project = str(datafiles)
+ element_name = 'list.bst'
+ element_path = os.path.join(project, 'elements', element_name)
+ repodir = os.path.join(str(tmpdir), 'repo')
+ sourcedir = os.path.join(project, 'source')
+
+ create_test_file(sourcedir, 'a.txt', mode=0o700)
+ create_test_file(sourcedir, 'b.txt', mode=0o755)
+ create_test_file(sourcedir, 'c.txt', mode=0o600)
+ create_test_file(sourcedir, 'd.txt', mode=0o400)
+ create_test_file(sourcedir, 'e.txt', mode=0o644)
+ create_test_file(sourcedir, 'f.txt', mode=0o4755)
+ create_test_file(sourcedir, 'g.txt', mode=0o2755)
+ create_test_file(sourcedir, 'h.txt', mode=0o1755)
+ create_test_directory(sourcedir, 'dir-a', mode=0o0700)
+ create_test_directory(sourcedir, 'dir-c', mode=0o0755)
+ create_test_directory(sourcedir, 'dir-d', mode=0o4755)
+ create_test_directory(sourcedir, 'dir-e', mode=0o2755)
+ create_test_directory(sourcedir, 'dir-f', mode=0o1755)
+
+ if kind == 'local':
+ source = {'kind': 'local',
+ 'path': 'source'}
+ else:
+ repo = create_repo(kind, repodir)
+ ref = repo.create(sourcedir)
+ source = repo.source_config(ref=ref)
+ element = {
+ 'kind': 'manual',
+ 'depends': [
+ {
+ 'filename': 'base.bst',
+ 'type': 'build'
+ }
+ ],
+ 'sources': [
+ source
+ ],
+ 'config': {
+ 'install-commands': [
+ 'ls -l >"%{install-root}/ls-l"'
+ ]
+ }
+ }
+ _yaml.dump(element, element_path)
+
+ def get_value_for_umask(umask):
+ checkoutdir = os.path.join(str(tmpdir), 'checkout-{}'.format(umask))
+
+ old_umask = os.umask(umask)
+
+ try:
+ result = cli.run(project=project, args=['build', element_name])
+ result.assert_success()
+
+ result = cli.run(project=project, args=['artifact', 'checkout', element_name, '--directory', checkoutdir])
+ result.assert_success()
+
+ with open(os.path.join(checkoutdir, 'ls-l'), 'r') as f:
+ return f.read()
+ finally:
+ os.umask(old_umask)
+ cli.remove_artifact_from_cache(project, element_name)
+
+ assert get_value_for_umask(0o022) == get_value_for_umask(0o077)
diff --git a/tests/sources/generic/track.py b/tests/sources/generic/track.py
new file mode 100644
index 000000000..9764bfe76
--- /dev/null
+++ b/tests/sources/generic/track.py
@@ -0,0 +1,412 @@
+#
+# Copyright (C) 2018 Codethink Limited
+# Copyright (C) 2019 Bloomberg Finance LP
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library. If not, see <http://www.gnu.org/licenses/>.
+#
+
+import os
+import pytest
+
+from tests.testutils import create_repo, ALL_REPO_KINDS, generate_junction
+from tests.frontend import configure_project
+
+from buildstream.plugintestutils import cli
+from buildstream import _yaml
+from buildstream._exceptions import ErrorDomain
+
+
+# Project directory
+TOP_DIR = os.path.dirname(os.path.realpath(__file__))
+DATA_DIR = os.path.join(TOP_DIR, 'project')
+
+
+def generate_element(repo, element_path, dep_name=None):
+ element = {
+ 'kind': 'import',
+ 'sources': [
+ repo.source_config()
+ ]
+ }
+ if dep_name:
+ element['depends'] = [dep_name]
+
+ _yaml.dump(element, element_path)
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("ref_storage", [('inline'), ('project.refs')])
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
+def test_track(cli, tmpdir, datafiles, ref_storage, kind):
+ project = os.path.join(datafiles.dirname, datafiles.basename)
+ dev_files_path = os.path.join(project, 'files', 'dev-files')
+ element_path = os.path.join(project, 'elements')
+ element_name = 'track-test-{}.bst'.format(kind)
+
+ configure_project(project, {
+ 'ref-storage': ref_storage
+ })
+
+ # Create our repo object of the given source type with
+ # the dev files, and then collect the initial ref.
+ #
+ repo = create_repo(kind, str(tmpdir))
+ repo.create(dev_files_path)
+
+ # Generate the element
+ generate_element(repo, os.path.join(element_path, element_name))
+
+ # Assert that a fetch is needed
+ assert cli.get_element_state(project, element_name) == 'no reference'
+
+ # Now first try to track it
+ result = cli.run(project=project, args=['source', 'track', element_name])
+ result.assert_success()
+
+ # And now fetch it: The Source has probably already cached the
+ # latest ref locally, but it is not required to have cached
+ # the associated content of the latest ref at track time, that
+ # is the job of fetch.
+ result = cli.run(project=project, args=['source', 'fetch', element_name])
+ result.assert_success()
+
+ # Assert that we are now buildable because the source is
+ # now cached.
+ assert cli.get_element_state(project, element_name) == 'buildable'
+
+ # Assert there was a project.refs created, depending on the configuration
+ if ref_storage == 'project.refs':
+ assert os.path.exists(os.path.join(project, 'project.refs'))
+ else:
+ assert not os.path.exists(os.path.join(project, 'project.refs'))
+
+
+# NOTE:
+#
+# This test checks that recursive tracking works by observing
+# element states after running a recursive tracking operation.
+#
+# However, this test is ALSO valuable as it stresses the source
+# plugins in a situation where many source plugins are operating
+# at once on the same backing repository.
+#
+# Do not change this test to use a separate 'Repo' per element
+# as that would defeat the purpose of the stress test, otherwise
+# please refactor that aspect into another test.
+#
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("amount", [(1), (10)])
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
+def test_track_recurse(cli, tmpdir, datafiles, kind, amount):
+ project = os.path.join(datafiles.dirname, datafiles.basename)
+ dev_files_path = os.path.join(project, 'files', 'dev-files')
+ element_path = os.path.join(project, 'elements')
+
+ # Try to actually launch as many fetch jobs as possible at the same time
+ #
+ # This stresses the Source plugins and helps to ensure that
+ # they handle concurrent access to the store correctly.
+ cli.configure({
+ 'scheduler': {
+ 'fetchers': amount,
+ }
+ })
+
+ # Create our repo object of the given source type with
+ # the dev files, and then collect the initial ref.
+ #
+ repo = create_repo(kind, str(tmpdir))
+ repo.create(dev_files_path)
+
+ # Write out our test targets
+ element_names = []
+ last_element_name = None
+ for i in range(amount + 1):
+ element_name = 'track-test-{}-{}.bst'.format(kind, i + 1)
+ filename = os.path.join(element_path, element_name)
+
+ element_names.append(element_name)
+
+ generate_element(repo, filename, dep_name=last_element_name)
+ last_element_name = element_name
+
+ # Assert that a fetch is needed
+ states = cli.get_element_states(project, [last_element_name])
+ for element_name in element_names:
+ assert states[element_name] == 'no reference'
+
+ # Now first try to track it
+ result = cli.run(project=project, args=[
+ 'source', 'track', '--deps', 'all',
+ last_element_name])
+ result.assert_success()
+
+ # And now fetch it: The Source has probably already cached the
+ # latest ref locally, but it is not required to have cached
+ # the associated content of the latest ref at track time, that
+ # is the job of fetch.
+ result = cli.run(project=project, args=[
+ 'source', 'fetch', '--deps', 'all',
+ last_element_name])
+ result.assert_success()
+
+ # Assert that the base is buildable and the rest are waiting
+ states = cli.get_element_states(project, [last_element_name])
+ for element_name in element_names:
+ if element_name == element_names[0]:
+ assert states[element_name] == 'buildable'
+ else:
+ assert states[element_name] == 'waiting'
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
+def test_track_recurse_except(cli, tmpdir, datafiles, kind):
+ project = os.path.join(datafiles.dirname, datafiles.basename)
+ dev_files_path = os.path.join(project, 'files', 'dev-files')
+ element_path = os.path.join(project, 'elements')
+ element_dep_name = 'track-test-dep-{}.bst'.format(kind)
+ element_target_name = 'track-test-target-{}.bst'.format(kind)
+
+ # Create our repo object of the given source type with
+ # the dev files, and then collect the initial ref.
+ #
+ repo = create_repo(kind, str(tmpdir))
+ repo.create(dev_files_path)
+
+ # Write out our test targets
+ generate_element(repo, os.path.join(element_path, element_dep_name))
+ generate_element(repo, os.path.join(element_path, element_target_name),
+ dep_name=element_dep_name)
+
+ # Assert that a fetch is needed
+ states = cli.get_element_states(project, [element_target_name])
+ assert states[element_dep_name] == 'no reference'
+ assert states[element_target_name] == 'no reference'
+
+ # Now first try to track it
+ result = cli.run(project=project, args=[
+ 'source', 'track', '--deps', 'all', '--except', element_dep_name,
+ element_target_name])
+ result.assert_success()
+
+ # And now fetch it: The Source has probably already cached the
+ # latest ref locally, but it is not required to have cached
+ # the associated content of the latest ref at track time, that
+ # is the job of fetch.
+ result = cli.run(project=project, args=[
+ 'source', 'fetch', '--deps', 'none',
+ element_target_name])
+ result.assert_success()
+
+ # Assert that the dependency is buildable and the target is waiting
+ states = cli.get_element_states(project, [element_target_name])
+ assert states[element_dep_name] == 'no reference'
+ assert states[element_target_name] == 'waiting'
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("ref_storage", [('inline'), ('project.refs')])
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
+def test_cross_junction(cli, tmpdir, datafiles, ref_storage, kind):
+ project = os.path.join(datafiles.dirname, datafiles.basename)
+ subproject_path = os.path.join(project, 'files', 'sub-project')
+ junction_path = os.path.join(project, 'elements', 'junction.bst')
+ etc_files = os.path.join(subproject_path, 'files', 'etc-files')
+ repo_element_path = os.path.join(subproject_path, 'elements',
+ 'import-etc-repo.bst')
+
+ configure_project(project, {
+ 'ref-storage': ref_storage
+ })
+
+ repo = create_repo(kind, str(tmpdir.join('element_repo')))
+ repo.create(etc_files)
+
+ generate_element(repo, repo_element_path)
+
+ generate_junction(str(tmpdir.join('junction_repo')),
+ subproject_path, junction_path, store_ref=False)
+
+ # Track the junction itself first.
+ result = cli.run(project=project, args=['source', 'track', 'junction.bst'])
+ result.assert_success()
+
+ assert cli.get_element_state(project, 'junction.bst:import-etc-repo.bst') == 'no reference'
+
+ # Track the cross junction element. -J is not given, it is implied.
+ result = cli.run(project=project, args=['source', 'track', 'junction.bst:import-etc-repo.bst'])
+
+ if ref_storage == 'inline':
+ # This is not allowed to track cross junction without project.refs.
+ result.assert_main_error(ErrorDomain.PIPELINE, 'untrackable-sources')
+ else:
+ result.assert_success()
+
+ assert cli.get_element_state(project, 'junction.bst:import-etc-repo.bst') == 'buildable'
+
+ assert os.path.exists(os.path.join(project, 'project.refs'))
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("ref_storage", [('inline'), ('project.refs')])
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
+def test_track_include(cli, tmpdir, datafiles, ref_storage, kind):
+ project = os.path.join(datafiles.dirname, datafiles.basename)
+ dev_files_path = os.path.join(project, 'files', 'dev-files')
+ element_path = os.path.join(project, 'elements')
+ element_name = 'track-test-{}.bst'.format(kind)
+
+ configure_project(project, {
+ 'ref-storage': ref_storage
+ })
+
+ # Create our repo object of the given source type with
+ # the dev files, and then collect the initial ref.
+ #
+ repo = create_repo(kind, str(tmpdir))
+ ref = repo.create(dev_files_path)
+
+ # Generate the element
+ element = {
+ 'kind': 'import',
+ '(@)': ['elements/sources.yml']
+ }
+ sources = {
+ 'sources': [
+ repo.source_config()
+ ]
+ }
+
+ _yaml.dump(element, os.path.join(element_path, element_name))
+ _yaml.dump(sources, os.path.join(element_path, 'sources.yml'))
+
+ # Assert that a fetch is needed
+ assert cli.get_element_state(project, element_name) == 'no reference'
+
+ # Now first try to track it
+ result = cli.run(project=project, args=['source', 'track', element_name])
+ result.assert_success()
+
+ # And now fetch it: The Source has probably already cached the
+ # latest ref locally, but it is not required to have cached
+ # the associated content of the latest ref at track time, that
+ # is the job of fetch.
+ result = cli.run(project=project, args=['source', 'fetch', element_name])
+ result.assert_success()
+
+ # Assert that we are now buildable because the source is
+ # now cached.
+ assert cli.get_element_state(project, element_name) == 'buildable'
+
+ # Assert there was a project.refs created, depending on the configuration
+ if ref_storage == 'project.refs':
+ assert os.path.exists(os.path.join(project, 'project.refs'))
+ else:
+ assert not os.path.exists(os.path.join(project, 'project.refs'))
+ new_sources = _yaml.load(os.path.join(element_path, 'sources.yml'))
+ assert 'sources' in new_sources
+ assert len(new_sources['sources']) == 1
+ assert 'ref' in new_sources['sources'][0]
+ assert ref == new_sources['sources'][0]['ref']
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("ref_storage", [('inline'), ('project.refs')])
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
+def test_track_include_junction(cli, tmpdir, datafiles, ref_storage, kind):
+ project = os.path.join(datafiles.dirname, datafiles.basename)
+ dev_files_path = os.path.join(project, 'files', 'dev-files')
+ element_path = os.path.join(project, 'elements')
+ element_name = 'track-test-{}.bst'.format(kind)
+ subproject_path = os.path.join(project, 'files', 'sub-project')
+ sub_element_path = os.path.join(subproject_path, 'elements')
+ junction_path = os.path.join(element_path, 'junction.bst')
+
+ configure_project(project, {
+ 'ref-storage': ref_storage
+ })
+
+ # Create our repo object of the given source type with
+ # the dev files, and then collect the initial ref.
+ #
+ repo = create_repo(kind, str(tmpdir.join('element_repo')))
+ repo.create(dev_files_path)
+
+ # Generate the element
+ element = {
+ 'kind': 'import',
+ '(@)': ['junction.bst:elements/sources.yml']
+ }
+ sources = {
+ 'sources': [
+ repo.source_config()
+ ]
+ }
+
+ _yaml.dump(element, os.path.join(element_path, element_name))
+ _yaml.dump(sources, os.path.join(sub_element_path, 'sources.yml'))
+
+ generate_junction(str(tmpdir.join('junction_repo')),
+ subproject_path, junction_path, store_ref=True)
+
+ result = cli.run(project=project, args=['source', 'track', 'junction.bst'])
+ result.assert_success()
+
+ # Assert that a fetch is needed
+ assert cli.get_element_state(project, element_name) == 'no reference'
+
+ # Now first try to track it
+ result = cli.run(project=project, args=['source', 'track', element_name])
+
+ # Assert there was a project.refs created, depending on the configuration
+ if ref_storage == 'inline':
+ # FIXME: We should expect an error. But only a warning is emitted
+ # result.assert_main_error(ErrorDomain.SOURCE, 'tracking-junction-fragment')
+
+ assert 'junction.bst:elements/sources.yml: Cannot track source in a fragment from a junction' in result.stderr
+ else:
+ assert os.path.exists(os.path.join(project, 'project.refs'))
+
+ # And now fetch it: The Source has probably already cached the
+ # latest ref locally, but it is not required to have cached
+ # the associated content of the latest ref at track time, that
+ # is the job of fetch.
+ result = cli.run(project=project, args=['source', 'fetch', element_name])
+ result.assert_success()
+
+ # Assert that we are now buildable because the source is
+ # now cached.
+ assert cli.get_element_state(project, element_name) == 'buildable'
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("ref_storage", [('inline'), ('project.refs')])
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
+def test_track_junction_included(cli, tmpdir, datafiles, ref_storage, kind):
+ project = os.path.join(datafiles.dirname, datafiles.basename)
+ element_path = os.path.join(project, 'elements')
+ subproject_path = os.path.join(project, 'files', 'sub-project')
+ junction_path = os.path.join(element_path, 'junction.bst')
+
+ configure_project(project, {
+ 'ref-storage': ref_storage,
+ '(@)': ['junction.bst:test.yml']
+ })
+
+ generate_junction(str(tmpdir.join('junction_repo')),
+ subproject_path, junction_path, store_ref=False)
+
+ result = cli.run(project=project, args=['source', 'track', 'junction.bst'])
+ result.assert_success()
diff --git a/tests/frontend/track_cross_junction.py b/tests/sources/generic/track_cross_junction.py
index e01ea81d9..6a2a8001f 100644
--- a/tests/frontend/track_cross_junction.py
+++ b/tests/sources/generic/track_cross_junction.py
@@ -1,10 +1,33 @@
+#
+# Copyright (C) 2018 Codethink Limited
+# Copyright (C) 2019 Bloomberg Finance LP
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library. If not, see <http://www.gnu.org/licenses/>.
+#
+
import os
import pytest
+
from tests.testutils import create_repo, ALL_REPO_KINDS, generate_junction
from buildstream.plugintestutils import cli
from buildstream import _yaml
+# Project directory
+TOP_DIR = os.path.dirname(os.path.realpath(__file__))
+DATA_DIR = os.path.join(TOP_DIR, 'project')
+
def generate_element(repo, element_path, dep_name=None):
element = {
diff --git a/tests/sources/generic/workspace.py b/tests/sources/generic/workspace.py
new file mode 100644
index 000000000..ee84676de
--- /dev/null
+++ b/tests/sources/generic/workspace.py
@@ -0,0 +1,159 @@
+#
+# Copyright (C) 2018 Codethink Limited
+# Copyright (C) 2019 Bloomberg Finance LP
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library. If not, see <http://www.gnu.org/licenses/>.
+#
+
+import os
+import shutil
+import pytest
+
+from tests.testutils import create_repo, ALL_REPO_KINDS
+
+from buildstream.plugintestutils import cli
+from buildstream import _yaml
+
+# Project directory
+TOP_DIR = os.path.dirname(os.path.realpath(__file__))
+DATA_DIR = os.path.join(TOP_DIR, 'project')
+
+
+class WorkspaceCreator():
+ def __init__(self, cli, tmpdir, datafiles, project_path=None):
+ self.cli = cli
+ self.tmpdir = tmpdir
+ self.datafiles = datafiles
+
+ if not project_path:
+ project_path = os.path.join(datafiles.dirname, datafiles.basename)
+ else:
+ shutil.copytree(os.path.join(datafiles.dirname, datafiles.basename), project_path)
+
+ self.project_path = project_path
+ self.bin_files_path = os.path.join(project_path, 'files', 'bin-files')
+
+ self.workspace_cmd = os.path.join(self.project_path, 'workspace_cmd')
+
+ def create_workspace_element(self, kind, track, suffix='', workspace_dir=None,
+ element_attrs=None):
+ element_name = 'workspace-test-{}{}.bst'.format(kind, suffix)
+ element_path = os.path.join(self.project_path, 'elements')
+ if not workspace_dir:
+ workspace_dir = os.path.join(self.workspace_cmd, element_name)
+ if workspace_dir[-4:] == '.bst':
+ workspace_dir = workspace_dir[:-4]
+
+ # Create our repo object of the given source type with
+ # the bin files, and then collect the initial ref.
+ repo = create_repo(kind, str(self.tmpdir))
+ ref = repo.create(self.bin_files_path)
+ if track:
+ ref = None
+
+ # Write out our test target
+ element = {
+ 'kind': 'import',
+ 'sources': [
+ repo.source_config(ref=ref)
+ ]
+ }
+ if element_attrs:
+ element = {**element, **element_attrs}
+ _yaml.dump(element,
+ os.path.join(element_path,
+ element_name))
+ return element_name, element_path, workspace_dir
+
+ def create_workspace_elements(self, kinds, track, suffixs=None, workspace_dir_usr=None,
+ element_attrs=None):
+
+ element_tuples = []
+
+ if suffixs is None:
+ suffixs = ['', ] * len(kinds)
+ else:
+ if len(suffixs) != len(kinds):
+ raise "terable error"
+
+ for suffix, kind in zip(suffixs, kinds):
+ element_name, element_path, workspace_dir = \
+ self.create_workspace_element(kind, track, suffix, workspace_dir_usr,
+ element_attrs)
+ element_tuples.append((element_name, workspace_dir))
+
+ # Assert that there is no reference, a track & fetch is needed
+ states = self.cli.get_element_states(self.project_path, [
+ e for e, _ in element_tuples
+ ])
+ if track:
+ assert not any(states[e] != 'no reference' for e, _ in element_tuples)
+ else:
+ assert not any(states[e] != 'fetch needed' for e, _ in element_tuples)
+
+ return element_tuples
+
+ def open_workspaces(self, kinds, track, suffixs=None, workspace_dir=None,
+ element_attrs=None, no_checkout=False):
+
+ element_tuples = self.create_workspace_elements(kinds, track, suffixs, workspace_dir,
+ element_attrs)
+ os.makedirs(self.workspace_cmd, exist_ok=True)
+
+ # Now open the workspace, this should have the effect of automatically
+ # tracking & fetching the source from the repo.
+ args = ['workspace', 'open']
+ if track:
+ args.append('--track')
+ if no_checkout:
+ args.append('--no-checkout')
+ if workspace_dir is not None:
+ assert len(element_tuples) == 1, "test logic error"
+ _, workspace_dir = element_tuples[0]
+ args.extend(['--directory', workspace_dir])
+
+ args.extend([element_name for element_name, workspace_dir_suffix in element_tuples])
+ result = self.cli.run(cwd=self.workspace_cmd, project=self.project_path, args=args)
+
+ result.assert_success()
+
+ if not no_checkout:
+ # Assert that we are now buildable because the source is now cached.
+ states = self.cli.get_element_states(self.project_path, [
+ e for e, _ in element_tuples
+ ])
+ assert not any(states[e] != 'buildable' for e, _ in element_tuples)
+
+ # Check that the executable hello file is found in each workspace
+ for element_name, workspace_dir in element_tuples:
+ filename = os.path.join(workspace_dir, 'usr', 'bin', 'hello')
+ assert os.path.exists(filename)
+
+ return element_tuples
+
+
+def open_workspace(cli, tmpdir, datafiles, kind, track, suffix='', workspace_dir=None,
+ project_path=None, element_attrs=None, no_checkout=False):
+ workspace_object = WorkspaceCreator(cli, tmpdir, datafiles, project_path)
+ workspaces = workspace_object.open_workspaces((kind, ), track, (suffix, ), workspace_dir,
+ element_attrs, no_checkout)
+ assert len(workspaces) == 1
+ element_name, workspace = workspaces[0]
+ return element_name, workspace_object.project_path, workspace
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("kind", ALL_REPO_KINDS)
+def test_open(cli, tmpdir, datafiles, kind):
+ open_workspace(cli, tmpdir, datafiles, kind, False)
diff --git a/tests/sources/project/elements/base.bst b/tests/sources/project/elements/base.bst
new file mode 100644
index 000000000..428afa736
--- /dev/null
+++ b/tests/sources/project/elements/base.bst
@@ -0,0 +1,5 @@
+# elements/base.bst
+
+kind: stack
+depends:
+ - base/base-alpine.bst
diff --git a/tests/sources/project/elements/base/base-alpine.bst b/tests/sources/project/elements/base/base-alpine.bst
new file mode 100644
index 000000000..c5833095d
--- /dev/null
+++ b/tests/sources/project/elements/base/base-alpine.bst
@@ -0,0 +1,17 @@
+kind: import
+
+description: |
+ Alpine Linux base for tests
+
+ Generated using the `tests/integration-tests/base/generate-base.sh` script.
+
+sources:
+ - kind: tar
+ base-dir: ''
+ (?):
+ - arch == "x86-64":
+ ref: 3eb559250ba82b64a68d86d0636a6b127aa5f6d25d3601a79f79214dc9703639
+ url: "alpine:integration-tests-base.v1.x86_64.tar.xz"
+ - arch == "aarch64":
+ ref: 431fb5362032ede6f172e70a3258354a8fd71fcbdeb1edebc0e20968c792329a
+ url: "alpine:integration-tests-base.v1.aarch64.tar.xz"