summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorPhillip Smyth <knownexus@gmail.com>2017-11-30 16:25:31 +0000
committerPhillip Smyth <knownexus@gmail.com>2017-11-30 16:25:31 +0000
commit410377b782d6f1287a7961b426ae3d2582ce1773 (patch)
tree8c5c7386b80500c322c1fd6db0fd8b063a0af61c
parent58e5d38b99ed0c7cc0a537f7e12b60c9eee253c7 (diff)
parentd58d12e337697a02d74a51fdd9414928d9858b9c (diff)
downloadbuildstream-410377b782d6f1287a7961b426ae3d2582ce1773.tar.gz
Merge branch 'master' of https://gitlab.com/BuildStream/buildstream into invoking_page_changes
-rw-r--r--.coveragerc1
-rw-r--r--.gitlab-ci.yml89
-rw-r--r--buildstream/_artifactcache/artifactcache.py55
-rw-r--r--buildstream/_artifactcache/ostreecache.py65
-rw-r--r--buildstream/_artifactcache/pushreceive.py78
-rw-r--r--buildstream/_context.py16
-rw-r--r--buildstream/_frontend/main.py12
-rw-r--r--buildstream/_loader.py26
-rw-r--r--buildstream/_options/optionpool.py17
-rw-r--r--buildstream/_pipeline.py39
-rw-r--r--buildstream/_project.py9
-rw-r--r--buildstream/data/userconfig.yaml13
-rw-r--r--buildstream/plugin.py12
-rw-r--r--buildstream/plugins/elements/compose.py4
-rw-r--r--buildstream/plugins/sources/_downloadablefilesource.py20
-rw-r--r--buildstream/plugins/sources/tar.py4
-rwxr-xr-xcontrib/bst-here12
-rw-r--r--doc/source/artifacts.rst15
-rw-r--r--doc/source/config.rst4
-rw-r--r--doc/source/projectconf.rst10
-rwxr-xr-xsetup.py4
-rw-r--r--tests/frontend/pull.py12
-rw-r--r--tests/frontend/push.py12
23 files changed, 300 insertions, 229 deletions
diff --git a/.coveragerc b/.coveragerc
index ec8cd78d6..6014b7fd0 100644
--- a/.coveragerc
+++ b/.coveragerc
@@ -6,6 +6,7 @@ include =
omit =
# Omit profiling helper module
*/buildstream/_profile.py
+ */.eggs/*
[report]
show_missing = True
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 66674e839..c4af56754 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -1,10 +1,11 @@
-image: buildstream/buildstream-fedora:latest
+image: buildstream/buildstream-fedora:master-26-3fd7fe1
cache:
paths:
- cache/buildstream/sources/
stages:
+ - dist
- test
- coverage
- docs
@@ -20,19 +21,64 @@ before_script:
- adduser -m buildstream
- chown -R buildstream:buildstream .
+# Create a source distribution
+#
+distcheck:
+ stage: dist
+ script:
+
+ # Generate the source distribution tarball
+ #
+ - python3 setup.py sdist
+ - tar -ztf dist/*
+ - tarball=$(cd dist && echo $(ls *))
+
+ # Create an installer script
+ - |
+ cat > dist/install.sh << EOF
+ #!/bin/sh
+ tar -zxf ${tarball}
+ cd ${tarball%.tar.gz}
+ pip3 install --no-index .
+ EOF
+
+ # unpack tarball as `dist/buildstream` directory
+ - |
+ cat > dist/unpack.sh << EOF
+ #!/bin/sh
+ tar -zxf ${tarball}
+ mv ${tarball%.tar.gz} buildstream
+ EOF
+
+ # Make our helpers executable
+ - chmod +x dist/install.sh
+ - chmod +x dist/unpack.sh
+ artifacts:
+ paths:
+ - dist/
+
# Run premerge commits
#
-pytest:
+pytest_linux:
stage: test
script:
- # We run as a simple user to test for permission issues
+
+ # Unpack and get into dist/buildstream
+ - cd dist && ./unpack.sh
+ - chown -R buildstream:buildstream buildstream
+ - cd buildstream
+
+ # Run the tests from the source distribution, We run as a simple
+ # user to test for permission issues
- su buildstream -c 'python3 setup.py test --index-url invalid://uri'
- - mkdir -p coverage-pytest/
- - cp .coverage.* coverage-pytest/coverage.pytest
+ # Go back to the toplevel and collect our reports
+ - cd ../..
+ - mkdir -p coverage-pytest-linux/
+ - cp dist/buildstream/.coverage.* coverage-pytest-linux/coverage.pytest-linux
artifacts:
paths:
- - coverage-pytest/
+ - coverage-pytest-linux/
# Run integration tests
#
@@ -40,7 +86,7 @@ integration_linux:
stage: test
script:
- - pip3 install --no-index .
+ - cd dist && ./install.sh && cd ..
- cd integration-tests
# We run as a simple user to test for permission issues
@@ -56,6 +102,9 @@ integration_linux:
- coverage-linux/
- logs-linux/
+ dependencies:
+ - distcheck
+
pytest_unix:
stage: test
variables:
@@ -67,11 +116,16 @@ pytest_unix:
- dnf mark install fuse-libs
- dnf erase -y bubblewrap ostree
+ # Unpack and get into dist/buildstream
+ - cd dist && ./unpack.sh && cd buildstream
+
# Since the unix platform is required to run as root, no user change required
- python3 setup.py test --index-url invalid://uri
+ # Go back to the toplevel and collect our reports
+ - cd ../..
- mkdir -p coverage-pytest-unix
- - cp .coverage.* coverage-pytest-unix/coverage.pytest-unix
+ - cp dist/buildstream/.coverage.* coverage-pytest-unix/coverage.pytest-unix
artifacts:
paths:
- coverage-pytest-unix/
@@ -81,7 +135,7 @@ integration_unix:
variables:
BST_FORCE_BACKEND: "unix"
script:
- - pip3 install --no-index .
+ - cd dist && ./install.sh && cd ..
- cd integration-tests
# Since the unix platform is required to run as root, no user change required
@@ -97,6 +151,9 @@ integration_unix:
- coverage-unix/
- logs-unix/
+ dependencies:
+ - distcheck
+
# Collate coverage reports
#
coverage:
@@ -108,13 +165,13 @@ coverage:
- cp ../coverage-linux/coverage.linux .coverage
- cp ../coverage-unix/coverage.unix .
- coverage combine --rcfile=../.coveragerc -a ../coverage-unix/coverage.unix
- - cp ../coverage-pytest/coverage.pytest .
- - coverage combine --rcfile=../.coveragerc -a coverage.pytest
+ - cp ../coverage-pytest-linux/coverage.pytest-linux .
+ - coverage combine --rcfile=../.coveragerc -a coverage.pytest-linux
- cp ../coverage-pytest-unix/coverage.pytest-unix .
- coverage combine --rcfile=../.coveragerc -a coverage.pytest-unix
- coverage report --rcfile=../.coveragerc -m
dependencies:
- - pytest
+ - pytest_linux
- integration_linux
- pytest_unix
- integration_unix
@@ -131,11 +188,15 @@ pages:
- dnf install -y python2
- pip3 install sphinx
- pip3 install sphinx-click
- - pip3 install --user .
+ - cd dist && ./unpack.sh && cd buildstream
+ - pip3 install .
- make -C doc
- - mv doc/build/html public
+ - cd ../..
+ - mv dist/buildstream/doc/build/html public
artifacts:
paths:
- public/
only:
- master
+ dependencies:
+ - distcheck
diff --git a/buildstream/_artifactcache/artifactcache.py b/buildstream/_artifactcache/artifactcache.py
index ac4f6080d..1283b37ab 100644
--- a/buildstream/_artifactcache/artifactcache.py
+++ b/buildstream/_artifactcache/artifactcache.py
@@ -40,48 +40,39 @@ class ArtifactCache():
os.makedirs(context.artifactdir, exist_ok=True)
self.extractdir = os.path.join(context.artifactdir, 'extract')
- self._pull_local = False
- self._push_local = False
+ self._local = False
project_overrides = context._get_overrides(project.name)
artifact_overrides = _yaml.node_get(project_overrides, Mapping, 'artifacts', default_value={})
- override_pull = _yaml.node_get(artifact_overrides, str, 'pull-url', default_value='') or None
- override_push = _yaml.node_get(artifact_overrides, str, 'push-url', default_value='') or None
- override_push_port = _yaml.node_get(artifact_overrides, int, 'push-port', default_value=22)
+ override_url = _yaml.node_get(artifact_overrides, str, 'url', default_value='') or None
- _yaml.node_validate(artifact_overrides, ['pull-url', 'push-url', 'push-port'])
-
- if override_pull or override_push:
- self.artifact_pull = override_pull
- self.artifact_push = override_push
- self.artifact_push_port = override_push_port
-
- elif any((project.artifact_pull, project.artifact_push)):
- self.artifact_pull = project.artifact_pull
- self.artifact_push = project.artifact_push
- self.artifact_push_port = project.artifact_push_port
+ _yaml.node_validate(artifact_overrides, ['url'])
+ if override_url:
+ self.url = override_url
+ elif project.artifact_url:
+ self.url = project.artifact_url
else:
- self.artifact_pull = context.artifact_pull
- self.artifact_push = context.artifact_push
- self.artifact_push_port = context.artifact_push_port
+ self.url = context.artifact_url
- if self.artifact_push:
- if self.artifact_push.startswith("/") or \
- self.artifact_push.startswith("file://"):
- self._push_local = True
+ if self.url:
+ if self.url.startswith('/') or self.url.startswith('file://'):
+ self._local = True
- if self.artifact_pull:
- if self.artifact_pull.startswith("/") or \
- self.artifact_pull.startswith("file://"):
- self._pull_local = True
-
- self.remote = utils.url_directory_name(self.artifact_pull)
+ self.remote = utils.url_directory_name(self.url)
else:
self.remote = None
self._offline = False
+ # initialize_remote():
+ #
+ # Initialize any remote artifact cache, if needed. This may require network
+ # access and could block for several seconds.
+ #
+ def initialize_remote(self):
+ pass
+
# contains():
#
# Check whether the artifact for the specified Element is already available
@@ -143,7 +134,7 @@ class ArtifactCache():
# Returns: True if remote repository is available, False otherwise
#
def can_fetch(self):
- return (not self._offline or self._pull_local) and \
+ return (not self._offline or self._local) and \
self.remote is not None
# can_push():
@@ -153,8 +144,8 @@ class ArtifactCache():
# Returns: True if remote repository is available, False otherwise
#
def can_push(self):
- return (not self._offline or self._push_local) and \
- self.artifact_push is not None
+ return (not self._offline or self._local) and \
+ self.url is not None
# remote_contains_key():
#
diff --git a/buildstream/_artifactcache/ostreecache.py b/buildstream/_artifactcache/ostreecache.py
index e2e623e78..78796dc6c 100644
--- a/buildstream/_artifactcache/ostreecache.py
+++ b/buildstream/_artifactcache/ostreecache.py
@@ -29,7 +29,7 @@ from ..element import _KeyStrength
from .._ostree import OSTreeError
from . import ArtifactCache
-from .pushreceive import check_push_connection
+from .pushreceive import initialize_push_connection
from .pushreceive import push as push_artifact
from .pushreceive import PushException
@@ -69,25 +69,37 @@ class OSTreeCache(ArtifactCache):
ostreedir = os.path.join(context.artifactdir, 'ostree')
self.repo = _ostree.ensure(ostreedir, False)
- if self.artifact_pull:
- _ostree.configure_remote(self.repo, self.remote, self.artifact_pull)
+ self.push_url = None
+ self.pull_url = None
self._remote_refs = None
+ def initialize_remote(self):
+ if self.url is not None:
+ if self.url.startswith('ssh://'):
+ self.push_url = self.url
+ try:
+ # Contact the remote cache.
+ self.pull_url = initialize_push_connection(self.push_url)
+ except PushException as e:
+ raise ArtifactError("BuildStream did not connect succesfully "
+ "to the shared cache: {}".format(e))
+ elif self.url.startswith('http://') or self.url.startswith('https://'):
+ self.push_url = None
+ self.pull_url = self.url
+ elif self._local:
+ self.push_url = self.url
+ self.pull_url = self.url
+ else:
+ raise ArtifactError("Unsupported URL scheme: {}".format(self.url))
+
+ _ostree.configure_remote(self.repo, self.remote, self.pull_url)
+
def can_push(self):
if self.enable_push:
- return super().can_push()
+ return (not self._offline or self._local) and self.push_url is not None
return False
- def preflight(self):
- if self.can_push() and not self.artifact_push.startswith("/"):
- try:
- check_push_connection(self.artifact_push,
- self.artifact_push_port)
- except PushException as e:
- raise ArtifactError("BuildStream will be unable to push artifacts "
- "to the shared cache: {}".format(e))
-
# contains():
#
# Check whether the artifact for the specified Element is already available
@@ -237,15 +249,13 @@ class OSTreeCache(ArtifactCache):
#
def pull(self, element, progress=None):
- if self._offline and not self._pull_local:
+ if self._offline and not self._local:
raise ArtifactError("Attempt to pull artifact while offline")
- if self.artifact_pull.startswith("/"):
- remote = "file://" + self.artifact_pull
- elif self.remote is not None:
- remote = self.remote
+ if self.pull_url.startswith("/"):
+ remote = "file://" + self.pull_url
else:
- raise ArtifactError("Attempt to pull artifact without any pull URL")
+ remote = self.remote
weak_ref = buildref(element, element._get_cache_key(strength=_KeyStrength.WEAK))
@@ -287,8 +297,8 @@ class OSTreeCache(ArtifactCache):
# Fetch list of artifacts from remote repository.
#
def fetch_remote_refs(self):
- if self.artifact_pull.startswith("/"):
- remote = "file://" + self.artifact_pull
+ if self.pull_url.startswith("/"):
+ remote = "file://" + self.pull_url
elif self.remote is not None:
remote = self.remote
else:
@@ -326,17 +336,17 @@ class OSTreeCache(ArtifactCache):
# (ArtifactError): if there was an error
def push(self, element):
- if self._offline and not self._push_local:
+ if self._offline and not self._local:
raise ArtifactError("Attempt to push artifact while offline")
- if self.artifact_push is None:
- raise ArtifactError("Attempt to push artifact without any push URL")
+ if self.push_url is None:
+ raise ArtifactError("The protocol in use does not support pushing.")
ref = buildref(element, element._get_cache_key_from_artifact())
weak_ref = buildref(element, element._get_cache_key(strength=_KeyStrength.WEAK))
- if self.artifact_push.startswith("/"):
+ if self.push_url.startswith("/"):
# local repository
- push_repo = _ostree.ensure(self.artifact_push, True)
+ push_repo = _ostree.ensure(self.push_url, True)
_ostree.fetch(push_repo, remote=self.repo.get_path().get_uri(), ref=ref)
_ostree.fetch(push_repo, remote=self.repo.get_path().get_uri(), ref=weak_ref)
@@ -360,8 +370,7 @@ class OSTreeCache(ArtifactCache):
element._output_file() as output_file:
try:
pushed = push_artifact(temp_repo.get_path().get_path(),
- self.artifact_push,
- self.artifact_push_port,
+ self.push_url,
[ref, weak_ref], output_file)
except PushException as e:
raise ArtifactError("Failed to push artifact {}: {}".format(ref, e)) from e
diff --git a/buildstream/_artifactcache/pushreceive.py b/buildstream/_artifactcache/pushreceive.py
index 9aef842a8..4df5bbe49 100644
--- a/buildstream/_artifactcache/pushreceive.py
+++ b/buildstream/_artifactcache/pushreceive.py
@@ -136,8 +136,8 @@ class PushMessageWriter(object):
self.file.flush()
def send_hello(self):
- # The 'hello' message is used to check connectivity, and is actually
- # an empty info request in order to keep the receiver code simple.
+ # The 'hello' message is used to check connectivity and discover the
+ # cache's pull URL. It's actually transmitted as an empty info request.
args = {
'mode': GLib.Variant('i', 0),
'refs': GLib.Variant('a{ss}', {})
@@ -145,7 +145,7 @@ class PushMessageWriter(object):
command = PushCommand(PushCommandType.info, args)
self.write(command)
- def send_info(self, repo, refs):
+ def send_info(self, repo, refs, pull_url=None):
cmdtype = PushCommandType.info
mode = repo.get_mode()
@@ -161,6 +161,12 @@ class PushMessageWriter(object):
'mode': GLib.Variant('i', mode),
'refs': GLib.Variant('a{ss}', ref_map)
}
+
+ # The server sends this so clients can discover the correct pull URL
+ # for this cache without requiring end-users to specify it.
+ if pull_url:
+ args['pull_url'] = GLib.Variant('s', pull_url)
+
command = PushCommand(cmdtype, args)
self.write(command)
@@ -309,7 +315,7 @@ class PushMessageReader(object):
return args
-def parse_remote_location(remotepath, remote_port):
+def parse_remote_location(remotepath):
"""Parse remote artifact cache URL that's been specified in our config."""
remote_host = remote_user = remote_repo = None
@@ -321,7 +327,7 @@ def parse_remote_location(remotepath, remote_port):
remote_host = url.hostname
remote_user = url.username
remote_repo = url.path
- remote_port = url.port
+ remote_port = url.port or 22
else:
# Scp/git style remote (user@hostname:path)
parts = remotepath.split('@', 1)
@@ -337,6 +343,8 @@ def parse_remote_location(remotepath, remote_port):
'contain a hostname and path separated '
'by ":"'.format(remotepath))
remote_host, remote_repo = parts
+ # This form doesn't make it possible to specify a non-standard port.
+ remote_port = 22
return remote_host, remote_user, remote_repo, remote_port
@@ -352,7 +360,7 @@ def ssh_commandline(remote_host, remote_user=None, remote_port=22):
class OSTreePusher(object):
- def __init__(self, repopath, remotepath, remote_port, branches=[], verbose=False,
+ def __init__(self, repopath, remotepath, branches=[], verbose=False,
debug=False, output=None):
self.repopath = repopath
self.remotepath = remotepath
@@ -361,7 +369,7 @@ class OSTreePusher(object):
self.output = output
self.remote_host, self.remote_user, self.remote_repo, self.remote_port = \
- parse_remote_location(remotepath, remote_port)
+ parse_remote_location(remotepath)
if self.repopath is None:
self.repo = OSTree.Repo.new_default()
@@ -511,9 +519,16 @@ class OSTreePusher(object):
return self.close()
+# OSTreeReceiver is on the receiving end of an OSTree push.
+#
+# Args:
+# repopath (str): On-disk location of the receiving repository.
+# pull_url (str): Redirection for clients who want to pull, not push.
+#
class OSTreeReceiver(object):
- def __init__(self, repopath):
+ def __init__(self, repopath, pull_url):
self.repopath = repopath
+ self.pull_url = pull_url
if self.repopath is None:
self.repo = OSTree.Repo.new_default()
@@ -552,7 +567,8 @@ class OSTreeReceiver(object):
remote_refs = args['refs']
# Send info back
- self.writer.send_info(self.repo, list(remote_refs.keys()))
+ self.writer.send_info(self.repo, list(remote_refs.keys()),
+ pull_url=self.pull_url)
# Wait for update or done command
cmdtype, args = self.reader.receive([PushCommandType.update,
@@ -606,20 +622,28 @@ class OSTreeReceiver(object):
return 0
-# check_push_connection()
+# initialize_push_connection()
+#
+# Test that we can connect to the remote bst-artifact-receive program, and
+# receive the pull URL for this artifact cache.
#
-# Test that we can connect to the remote bst-artifact-receive program.
# We don't want to make the user wait until the first artifact has been built
-# to discover that they actually cannot push.
+# to discover that they actually cannot push, so this should be called early.
+#
+# The SSH push protocol doesn't allow pulling artifacts. We don't want to
+# require users to specify two URLs for a single cache, so we have the push
+# protocol return the corresponding pull URL as part of the 'hello' response.
#
# Args:
# remote: The ssh remote url to push to
-# remote_port: The ssh port at the remote url
+#
+# Returns:
+# (str): The URL that should be used for pushing to this cache.
#
# Raises:
# PushException if there was an issue connecting to the remote.
-def check_push_connection(remote, remote_port):
- remote_host, remote_user, remote_repo, remote_port = parse_remote_location(remote, remote_port)
+def initialize_push_connection(remote):
+ remote_host, remote_user, remote_repo, remote_port = parse_remote_location(remote)
ssh_cmd = ssh_commandline(remote_host, remote_user, remote_port)
# We need a short timeout here because if 'remote' isn't reachable at
@@ -632,7 +656,18 @@ def check_push_connection(remote, remote_port):
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
writer = PushMessageWriter(ssh.stdin)
+ reader = PushMessageReader(ssh.stdout)
+
writer.send_hello()
+ args = reader.receive_info()
+
+ if 'pull_url' in args:
+ pull_url = args['pull_url']
+ else:
+ raise PushException(
+ "Remote cache did not tell us its pull URL. This cache probably "
+ "requires updating to a newer version of `bst-artifact-receive`.")
+
writer.send_done()
ssh.wait()
@@ -640,6 +675,8 @@ def check_push_connection(remote, remote_port):
error = ssh.stderr.read().decode('unicode-escape')
raise PushException(error)
+ return pull_url
+
# push()
#
@@ -648,7 +685,6 @@ def check_push_connection(remote, remote_port):
# Args:
# repo: The local repository path
# remote: The ssh remote url to push to
-# remote_port: The ssh port at the remote url
# branches: The refs to push
# output: The output where logging should go
#
@@ -659,12 +695,12 @@ def check_push_connection(remote, remote_port):
# Raises:
# PushException if there was an error
#
-def push(repo, remote, remote_port, branches, output):
+def push(repo, remote, branches, output):
logging.basicConfig(format='%(module)s: %(levelname)s: %(message)s',
level=logging.INFO, stream=output)
- pusher = OSTreePusher(repo, remote, remote_port, branches, True, False, output=output)
+ pusher = OSTreePusher(repo, remote, branches, True, False, output=output)
def terminate_push():
pusher.close()
@@ -691,8 +727,10 @@ def push(repo, remote, remote_port, branches, output):
@click.command(short_help="Receive pushed artifacts over ssh")
@click.option('--verbose', '-v', is_flag=True, default=False, help="Verbose mode")
@click.option('--debug', '-d', is_flag=True, default=False, help="Debug mode")
+@click.option('--pull-url', type=str, required=True,
+ help="Clients who try to pull over SSH will be redirected here")
@click.argument('repo')
-def receive_main(verbose, debug, repo):
+def receive_main(verbose, debug, pull_url, repo):
"""A BuildStream sister program for receiving artifacts send to a shared artifact cache
"""
loglevel = logging.WARNING
@@ -703,5 +741,5 @@ def receive_main(verbose, debug, repo):
logging.basicConfig(format='%(module)s: %(levelname)s: %(message)s',
level=loglevel, stream=sys.stderr)
- receiver = OSTreeReceiver(repo)
+ receiver = OSTreeReceiver(repo, pull_url)
return receiver.run()
diff --git a/buildstream/_context.py b/buildstream/_context.py
index f85771bac..5fd4a428e 100644
--- a/buildstream/_context.py
+++ b/buildstream/_context.py
@@ -59,14 +59,8 @@ class Context():
# The local binary artifact cache directory
self.artifactdir = None
- # The URL from which to download prebuilt artifacts
- self.artifact_pull = None
-
- # The URL to upload built artifacts to
- self.artifact_push = None
-
- # The port number for pushing artifacts over ssh
- self.artifact_push_port = 22
+ # The URL from which to push and pull prebuilt artifacts
+ self.artifact_url = None
# The directory to store build logs
self.logdir = None
@@ -166,10 +160,8 @@ class Context():
# Load artifact share configuration
artifacts = _yaml.node_get(defaults, Mapping, 'artifacts')
- _yaml.node_validate(artifacts, ['pull-url', 'push-url', 'push-port'])
- self.artifact_pull = _yaml.node_get(artifacts, str, 'pull-url', default_value='') or None
- self.artifact_push = _yaml.node_get(artifacts, str, 'push-url', default_value='') or None
- self.artifact_push_port = _yaml.node_get(artifacts, int, 'push-port', default_value=22)
+ _yaml.node_validate(artifacts, ['url'])
+ self.artifact_url = _yaml.node_get(artifacts, str, 'url', default_value='') or None
# Load logging config
logging = _yaml.node_get(defaults, Mapping, 'logging')
diff --git a/buildstream/_frontend/main.py b/buildstream/_frontend/main.py
index e02582799..d70645887 100644
--- a/buildstream/_frontend/main.py
+++ b/buildstream/_frontend/main.py
@@ -201,7 +201,7 @@ def cli(context, **kwargs):
def build(app, elements, all, track):
"""Build elements in a pipeline"""
- app.initialize(elements, rewritable=track, inconsistent=track, fetch_remote_refs=True)
+ app.initialize(elements, rewritable=track, inconsistent=track, use_remote_cache=True)
app.print_heading()
try:
app.pipeline.build(app.scheduler, all, track)
@@ -316,7 +316,7 @@ def pull(app, elements, deps):
none: No dependencies, just the element itself
all: All dependencies
"""
- app.initialize(elements, fetch_remote_refs=True)
+ app.initialize(elements, use_remote_cache=True)
try:
to_pull = app.pipeline.deps_elements(deps)
app.pipeline.pull(app.scheduler, to_pull)
@@ -346,7 +346,7 @@ def push(app, elements, deps):
none: No dependencies, just the element itself
all: All dependencies
"""
- app.initialize(elements, fetch_remote_refs=True)
+ app.initialize(elements, use_remote_cache=True)
try:
to_push = app.pipeline.deps_elements(deps)
app.pipeline.push(app.scheduler, to_push)
@@ -425,7 +425,7 @@ def show(app, elements, deps, except_, order, format, downloadable):
bst show target.bst --format \\
$'---------- %{name} ----------\\n%{vars}'
"""
- app.initialize(elements, except_=except_, fetch_remote_refs=downloadable)
+ app.initialize(elements, except_=except_, use_remote_cache=downloadable)
try:
dependencies = app.pipeline.deps_elements(deps)
except PipelineError as e:
@@ -766,7 +766,7 @@ class App():
# Initialize the main pipeline
#
def initialize(self, elements, except_=tuple(), rewritable=False,
- inconsistent=False, fetch_remote_refs=False):
+ inconsistent=False, use_remote_cache=False):
profile_start(Topics.LOAD_PIPELINE, "_".join(t.replace(os.sep, '-') for t in elements))
@@ -842,7 +842,7 @@ class App():
self.pipeline = Pipeline(self.context, self.project, elements, except_,
inconsistent=inconsistent,
rewritable=rewritable,
- fetch_remote_refs=fetch_remote_refs,
+ use_remote_cache=use_remote_cache,
load_ticker=self.load_ticker,
resolve_ticker=self.resolve_ticker,
remote_ticker=self.remote_ticker,
diff --git a/buildstream/_loader.py b/buildstream/_loader.py
index 39feeeb06..d96a1f2c6 100644
--- a/buildstream/_loader.py
+++ b/buildstream/_loader.py
@@ -58,11 +58,9 @@ class Symbol():
# A simple dependency object
#
class Dependency():
- def __init__(self, owner_name, name, filename=None,
+ def __init__(self, name,
dep_type=None, provenance=None):
- self.owner = owner_name
self.name = name
- self.filename = filename
self.dep_type = dep_type
self.provenance = provenance
@@ -73,16 +71,12 @@ class Dependency():
#
class LoadElement():
- def __init__(self, data, filename, basedir, elements):
+ def __init__(self, data, filename, elements):
- self.filename = filename
self.data = data
self.name = filename
self.elements = elements
- # These are shared with the owning Loader object
- self.basedir = basedir
-
# Ensure the root node is valid
_yaml.node_validate(self.data, [
'kind', 'depends', 'sources',
@@ -94,7 +88,7 @@ class LoadElement():
self.dep_cache = None
# Dependencies
- self.deps = extract_depends_from_node(self.name, self.data)
+ self.deps = extract_depends_from_node(self.data)
#############################################
# Routines used by the Loader #
@@ -134,7 +128,7 @@ class LoadElement():
# After extracting depends, they are removed from the data node
#
# Returns a normalized array of Dependency objects
-def extract_depends_from_node(owner, data):
+def extract_depends_from_node(data):
depends = _yaml.node_get(data, list, Symbol.DEPENDS, default_value=[])
output_deps = []
@@ -142,7 +136,7 @@ def extract_depends_from_node(owner, data):
dep_provenance = _yaml.node_get_provenance(data, key=Symbol.DEPENDS, indices=[depends.index(dep)])
if isinstance(dep, str):
- dependency = Dependency(owner, dep, filename=dep, provenance=dep_provenance)
+ dependency = Dependency(dep, provenance=dep_provenance)
elif isinstance(dep, Mapping):
_yaml.node_validate(dep, ['filename', 'type'])
@@ -158,7 +152,7 @@ def extract_depends_from_node(owner, data):
.format(provenance, dep_type))
filename = _yaml.node_get(dep, str, Symbol.FILENAME)
- dependency = Dependency(owner, filename, filename=filename,
+ dependency = Dependency(filename,
dep_type=dep_type, provenance=dep_provenance)
else:
@@ -241,7 +235,7 @@ class Loader():
# Set up a dummy element that depends on all top-level targets
# to resolve potential circular dependencies between them
DummyTarget = namedtuple('DummyTarget', ['name', 'deps'])
- dummy = DummyTarget(name='', deps=[self.elements[e] for e in self.targets])
+ dummy = DummyTarget(name='', deps=[Dependency(e) for e in self.targets])
self.elements[''] = dummy
profile_key = "_".join(t for t in self.targets)
@@ -282,13 +276,13 @@ class Loader():
data = _yaml.load(fullpath, shortname=filename, copy_tree=rewritable)
self.options.process_node(data)
- element = LoadElement(data, filename, self.basedir, self.elements)
+ element = LoadElement(data, filename, self.elements)
self.elements[filename] = element
# Load all dependency files for the new LoadElement
for dep in element.deps:
- self.load_file(dep.filename, rewritable, ticker)
+ self.load_file(dep.name, rewritable, ticker)
return element
@@ -315,7 +309,7 @@ class Loader():
if check_elements.get(element_name) is not None:
raise LoadError(LoadErrorReason.CIRCULAR_DEPENDENCY,
"Circular dependency detected for element: {}"
- .format(element.filename))
+ .format(element.name))
# Push / Check each dependency / Pop
check_elements[element_name] = True
diff --git a/buildstream/_options/optionpool.py b/buildstream/_options/optionpool.py
index 90a404bfb..41990ce4a 100644
--- a/buildstream/_options/optionpool.py
+++ b/buildstream/_options/optionpool.py
@@ -74,17 +74,15 @@ class OptionPool():
option = opt_type(option_name, option_definition, self)
self.options[option_name] = option
- # load_values()
+ # load_yaml_values()
#
# Loads the option values specified in a key/value
- # dictionary loaded from YAML, and a list of tuples
- # collected from the command line
+ # dictionary loaded from YAML
#
# Args:
# node (dict): The loaded YAML options
- # cli_options (list): A list of (str, str) tuples
#
- def load_values(self, node, cli_options):
+ def load_yaml_values(self, node):
for option_name, _ in _yaml.node_items(node):
try:
option = self.options[option_name]
@@ -94,6 +92,15 @@ class OptionPool():
"{}: Unknown option '{}' specified".format(p, option_name))
option.load_value(node)
+ # load_cli_values()
+ #
+ # Loads the option values specified in a list of tuples
+ # collected from the command line
+ #
+ # Args:
+ # cli_options (list): A list of (str, str) tuples
+ #
+ def load_cli_values(self, cli_options):
for option_name, option_value in cli_options:
try:
option = self.options[option_name]
diff --git a/buildstream/_pipeline.py b/buildstream/_pipeline.py
index 9db1967be..c2d11c847 100644
--- a/buildstream/_pipeline.py
+++ b/buildstream/_pipeline.py
@@ -94,7 +94,7 @@ class Planner():
# current source refs will not be the effective refs.
# rewritable (bool): Whether the loaded files should be rewritable
# this is a bit more expensive due to deep copies
-# fetch_remote_refs (bool): Whether to attempt to check remote artifact server for new refs
+# use_remote_cache (bool): Whether to connect with remote artifact cache
# load_ticker (callable): A function which will be called for each loaded element
# resolve_ticker (callable): A function which will be called for each resolved element
# cache_ticker (callable): A function which will be called for each element
@@ -116,7 +116,7 @@ class Pipeline():
def __init__(self, context, project, targets, except_,
inconsistent=False,
rewritable=False,
- fetch_remote_refs=False,
+ use_remote_cache=False,
load_ticker=None,
resolve_ticker=None,
remote_ticker=None,
@@ -128,17 +128,17 @@ class Pipeline():
self.unused_workspaces = []
self._resolved_elements = {}
+ # Load selected platform
+ Platform._create_instance(context, project)
+ self.platform = Platform.get_platform()
+ self.artifacts = self.platform.artifactcache
+
loader = Loader(self.project.element_path, targets + except_,
self.project._options)
meta_elements = loader.load(rewritable, load_ticker)
if load_ticker:
load_ticker(None)
- # Load selected platform
- Platform._create_instance(context, project)
- self.platform = Platform.get_platform()
- self.artifacts = self.platform.artifactcache
-
# Create the factories after resolving the project
pluginbase = PluginBase(package='buildstream.plugins')
self.element_factory = ElementFactory(pluginbase, project._plugin_element_paths)
@@ -171,10 +171,11 @@ class Pipeline():
self.project._set_workspace(element, source, workspace)
- if fetch_remote_refs and self.artifacts.can_fetch():
+ if use_remote_cache and self.artifacts.can_fetch():
try:
if remote_ticker:
- remote_ticker(self.artifacts.artifact_pull)
+ remote_ticker(self.artifacts.url)
+ self.artifacts.initialize_remote()
self.artifacts.fetch_remote_refs()
except ArtifactError:
self.message(MessageType.WARN, "Failed to fetch remote refs")
@@ -284,24 +285,6 @@ class Pipeline():
return element
- # Internal: If a remote artifact cache is configured for pushing, check
- # that it actually works. Returns True if it works, False otherwise.
- def can_push_remote_artifact_cache(self):
- if self.artifacts.can_push():
- starttime = datetime.datetime.now()
- self.message(MessageType.START, "Checking connectivity to remote artifact cache")
- try:
- self.artifacts.preflight()
- except ArtifactError as e:
- self.message(MessageType.WARN, str(e),
- elapsed=datetime.datetime.now() - starttime)
- return False
- self.message(MessageType.SUCCESS, "Connectivity OK",
- elapsed=datetime.datetime.now() - starttime)
- return True
- else:
- return False
-
#############################################################
# Commands #
#############################################################
@@ -433,7 +416,7 @@ class Pipeline():
queues.append(pull)
queues.append(fetch)
queues.append(build)
- if self.can_push_remote_artifact_cache():
+ if self.artifacts.can_push():
push = PushQueue()
queues.append(push)
queues[0].enqueue(plan)
diff --git a/buildstream/_project.py b/buildstream/_project.py
index 46f271f5b..957bcf263 100644
--- a/buildstream/_project.py
+++ b/buildstream/_project.py
@@ -152,7 +152,8 @@ class Project():
# Collect option values specified in the user configuration
overrides = self._context._get_overrides(self.name)
override_options = _yaml.node_get(overrides, Mapping, 'options', default_value={})
- self._options.load_values(override_options, self._context._cli_options)
+ self._options.load_yaml_values(override_options)
+ self._options.load_cli_values(self._context._cli_options)
# We're done modifying options, now we can use them for substitutions
self._options.resolve()
@@ -171,10 +172,8 @@ class Project():
# Load artifacts pull/push configuration for this project
artifacts = _yaml.node_get(config, Mapping, 'artifacts', default_value={})
- _yaml.node_validate(artifacts, ['pull-url', 'push-url', 'push-port'])
- self.artifact_pull = _yaml.node_get(artifacts, str, 'pull-url', default_value='') or None
- self.artifact_push = _yaml.node_get(artifacts, str, 'push-url', default_value='') or None
- self.artifact_push_port = _yaml.node_get(artifacts, int, 'push-port', default_value=22)
+ _yaml.node_validate(artifacts, ['url'])
+ self.artifact_url = _yaml.node_get(artifacts, str, 'url', default_value='') or None
# Workspace configurations
self._workspaces = self._load_workspace_config()
diff --git a/buildstream/data/userconfig.yaml b/buildstream/data/userconfig.yaml
index 8d066ab52..f43989dcc 100644
--- a/buildstream/data/userconfig.yaml
+++ b/buildstream/data/userconfig.yaml
@@ -53,16 +53,9 @@ scheduler:
#
artifacts:
- # A url from which to download prebuilt artifacts
- pull-url: ''
-
- # A url to upload built artifacts to
- # (must point to the same repository as pull-url)
- push-url: ''
-
- # Specify the port number for pushing artifacts, if it's
- # not the default port 22
- push-port: 22
+ # A url from which to push and pull prebuilt artifacts.
+ # Some protocols only support pushing.
+ url: ''
#
# Logging
diff --git a/buildstream/plugin.py b/buildstream/plugin.py
index 30f4c1cb2..a793c5e5b 100644
--- a/buildstream/plugin.py
+++ b/buildstream/plugin.py
@@ -416,6 +416,18 @@ class Plugin():
"""
self.__message(MessageType.ERROR, brief, detail=detail)
+ def log(self, brief, *, detail=None):
+ """Log a message into the plugin's log file
+
+ The message will not be shown in the master log at all (so it will not
+ be displayed to the user on the console).
+
+ Args:
+ brief (str): The brief message
+ detail (str): An optional detailed message, can be multiline output
+ """
+ self.__message(MessageType.LOG, brief, detail=detail)
+
@contextmanager
def timed_activity(self, activity_name, *, detail=None, silent_nested=False):
"""Context manager for performing timed activities in plugins
diff --git a/buildstream/plugins/elements/compose.py b/buildstream/plugins/elements/compose.py
index 6acd8d5d4..29e289a30 100644
--- a/buildstream/plugins/elements/compose.py
+++ b/buildstream/plugins/elements/compose.py
@@ -125,8 +125,10 @@ class ComposeElement(Element):
if require_split:
+ seen = set()
# Calculate added modified files
for path in utils.list_relative_paths(basedir):
+ seen.add(path)
if snapshot.get(path) is None:
added_files.append(path)
elif snapshot[path] != getmtime(os.path.join(basedir, path)):
@@ -135,7 +137,7 @@ class ComposeElement(Element):
# Calculate removed files
removed_files = [
path for path in manifest
- if not os.path.lexists(os.path.join(basedir, path))
+ if path not in seen
]
self.info("Integration modified {}, added {} and removed {} files"
.format(len(modified_files), len(added_files), len(removed_files)))
diff --git a/buildstream/plugins/sources/_downloadablefilesource.py b/buildstream/plugins/sources/_downloadablefilesource.py
index a2ed366ad..99eab340e 100644
--- a/buildstream/plugins/sources/_downloadablefilesource.py
+++ b/buildstream/plugins/sources/_downloadablefilesource.py
@@ -3,6 +3,8 @@
import os
import urllib.request
import urllib.error
+import contextlib
+import shutil
from buildstream import Source, SourceError, Consistency
from buildstream import utils
@@ -72,14 +74,16 @@ class DownloadableFileSource(Source):
# Downloads from the url and caches it according to its sha256sum.
try:
with self.tempdir() as td:
- # Using basename because there needs to be a filename, and 'foo'
- # would be too silly.
- temp_dest = os.path.join(td, os.path.basename(self.url))
-
- local_file, _ = urllib.request.urlretrieve(self.url, temp_dest)
- if local_file != temp_dest:
- raise SourceError("Expected to download file to '{}', downloaded to '{}' instead!"
- .format(temp_dest, local_file))
+ default_name = os.path.basename(self.url)
+ request = urllib.request.Request(self.url)
+ request.add_header('Accept', '*/*')
+ with contextlib.closing(urllib.request.urlopen(request)) as response:
+ info = response.info()
+ filename = info.get_filename(default_name)
+ filename = os.path.basename(filename)
+ local_file = os.path.join(td, filename)
+ with open(local_file, 'wb') as dest:
+ shutil.copyfileobj(response, dest)
# Make sure url-specific mirror dir exists.
if not os.path.isdir(self._get_mirror_dir()):
diff --git a/buildstream/plugins/sources/tar.py b/buildstream/plugins/sources/tar.py
index a93183ab0..385f96c0e 100644
--- a/buildstream/plugins/sources/tar.py
+++ b/buildstream/plugins/sources/tar.py
@@ -104,9 +104,7 @@ class TarSource(DownloadableFileSource):
# consider links which point outside of the chosen
# base directory.
#
- if member.linkname:
- if member.linkname.startswith('./'):
- member.linkname = member.linkname[2:]
+ if member.type == tarfile.LNKTYPE:
member.linkname = member.linkname[l:]
member.path = member.path[l:]
diff --git a/contrib/bst-here b/contrib/bst-here
index 6b4e6b2f7..153d520f7 100755
--- a/contrib/bst-here
+++ b/contrib/bst-here
@@ -64,11 +64,19 @@ else
command="/usr/bin/bst $@"
fi
+# FIXME: We run with --privileged to allow bwrap to mount system
+# directories, but this is overkill. We should add the correct
+# --cap-add calls, or seccomp settings, but we are not sure
+# what those are yet.
+#
+# Old settings:
+# --cap-add SYS_ADMIN
+# --security-opt seccomp=unconfined
+#
exec docker run --rm -i${is_tty:+ -t} \
- --cap-add SYS_ADMIN \
+ --privileged \
--env PS1="$BST_HERE_PS1" \
--device /dev/fuse \
- --security-opt seccomp=unconfined \
--volume buildstream-cache:/root/.cache/buildstream \
--volume buildstream-config:/root/.config \
--volume "$PWD":/src \
diff --git a/doc/source/artifacts.rst b/doc/source/artifacts.rst
index 8ad2f600c..330de818c 100644
--- a/doc/source/artifacts.rst
+++ b/doc/source/artifacts.rst
@@ -122,8 +122,8 @@ For this you will want something like the following in your ``/etc/ssh/sshd_conf
# bst-artifact-receive program, note that the full
# command must be specified here; 'artifacts' is
# the HOME relative path to the artifact cache.
- #
- ForceCommand bst-artifact-receive --verbose artifacts
+ # The exact pull URL must also be specified.
+ ForceCommand bst-artifact-receive --pull-url https://example.com/artifacts --verbose artifacts
Summary file updates
@@ -159,15 +159,10 @@ then a user can use the following user configuration:
#
artifacts:
- # A url from which to download prebuilt artifacts
- pull-url: https://artifacts.com
-
- # A url to upload built artifacts to
- push-url: artifacts@artifacts.com:artifacts
+ url: https://artifacts.com/artifacts
- # If the artifact server uses a custom port for sshd
- # then you can specify it here
- push-port: 666
+ # Alternative form if you have push access to the cache
+ #url: ssh://artifacts@artifacts.com:22200/artifacts
Authenticating Users
diff --git a/doc/source/config.rst b/doc/source/config.rst
index 2b86156bd..2b9883f93 100644
--- a/doc/source/config.rst
+++ b/doc/source/config.rst
@@ -41,9 +41,7 @@ it can be overridden on a per project basis using the same format
projects:
project-name:
artifacts:
- pull-url: https://artifacts.com
- push-url: artifacts@artifacts.com:artifacts
- push-port: 443
+ url: https://artifacts.com/artifacts
Strict Build Plan
diff --git a/doc/source/projectconf.rst b/doc/source/projectconf.rst
index cb0a2911b..c8bfdeefc 100644
--- a/doc/source/projectconf.rst
+++ b/doc/source/projectconf.rst
@@ -74,15 +74,7 @@ with an artifact share.
artifacts:
# A url from which to download prebuilt artifacts
- pull-url: https://foo.com/artifacts
-
- # A url to upload built artifacts to
- # (must point to the same repository as pull-url)
- push-url: artifacts@foo.com:artifacts
-
- # Specify the port number for pushing artifacts, if it's
- # not the default port 22
- push-port: 10000
+ url: https://foo.com/artifacts
Plugin Paths
diff --git a/setup.py b/setup.py
index 4d06c87f7..795e8f0cd 100755
--- a/setup.py
+++ b/setup.py
@@ -177,10 +177,12 @@ setup(name='BuildStream',
tests_require=['pep8',
# Pin coverage to 4.2 for now, we're experiencing
# random crashes with 4.4.2
- 'coverage == 4.2',
+ 'coverage == 4.4.0',
'pytest-datafiles',
'pytest-env',
'pytest-pep8',
'pytest-cov',
+ # Provide option to run tests in parallel, less reliable
+ 'pytest-xdist',
'pytest >= 3.1.0'],
zip_safe=False)
diff --git a/tests/frontend/pull.py b/tests/frontend/pull.py
index 07d664c5f..116bc9e49 100644
--- a/tests/frontend/pull.py
+++ b/tests/frontend/pull.py
@@ -53,14 +53,12 @@ def test_push_pull(cli, tmpdir, datafiles, user_url, project_url, override_url):
# Configure artifact share
cli.configure({
'artifacts': {
- 'pull-url': user_url,
- 'push-url': user_url,
+ 'url': user_url,
},
'projects': {
'test': {
'artifacts': {
- 'pull-url': override_url,
- 'push-url': override_url,
+ 'url': override_url,
}
}
}
@@ -71,8 +69,7 @@ def test_push_pull(cli, tmpdir, datafiles, user_url, project_url, override_url):
project_config = _yaml.load(project_conf_file)
project_config.update({
'artifacts': {
- 'pull-url': project_url,
- 'push-url': project_url,
+ 'url': project_url,
}
})
_yaml.dump(_yaml.node_sanitize(project_config), filename=project_conf_file)
@@ -137,8 +134,7 @@ def test_push_pull_all(cli, tmpdir, datafiles):
'pushers': 1
},
'artifacts': {
- 'pull-url': share.repo,
- 'push-url': share.repo,
+ 'url': share.repo,
}
})
diff --git a/tests/frontend/push.py b/tests/frontend/push.py
index 89a864d16..d4ae6c6dc 100644
--- a/tests/frontend/push.py
+++ b/tests/frontend/push.py
@@ -52,14 +52,12 @@ def test_push(cli, tmpdir, datafiles, user_url, project_url, override_url):
# Configure artifact share
cli.configure({
'artifacts': {
- 'pull-url': user_url,
- 'push-url': user_url,
+ 'url': user_url,
},
'projects': {
'test': {
'artifacts': {
- 'pull-url': override_url,
- 'push-url': override_url,
+ 'url': override_url,
}
}
}
@@ -70,8 +68,7 @@ def test_push(cli, tmpdir, datafiles, user_url, project_url, override_url):
project_config = _yaml.load(project_conf_file)
project_config.update({
'artifacts': {
- 'pull-url': project_url,
- 'push-url': project_url,
+ 'url': project_url,
}
})
_yaml.dump(_yaml.node_sanitize(project_config), filename=project_conf_file)
@@ -112,8 +109,7 @@ def test_push_all(cli, tmpdir, datafiles):
'pushers': 1
},
'artifacts': {
- 'pull-url': share.repo,
- 'push-url': share.repo,
+ 'url': share.repo,
}
})