diff options
-rw-r--r-- | buildstream/_frontend/cli.py | 10 | ||||
-rw-r--r-- | buildstream/_stream.py | 21 | ||||
-rw-r--r-- | tests/frontend/source_checkout.py | 35 |
3 files changed, 57 insertions, 9 deletions
diff --git a/buildstream/_frontend/cli.py b/buildstream/_frontend/cli.py index 048cce735..46d598837 100644 --- a/buildstream/_frontend/cli.py +++ b/buildstream/_frontend/cli.py @@ -738,11 +738,12 @@ def checkout(app, element, location, force, deps, integrate, hardlinks, tar): @click.option('--tar', 'tar', default=False, is_flag=True, help='Create a tarball from the element\'s sources instead of a ' 'file tree.') -@click.argument('element', required=False, - type=click.Path(readable=False)) +@click.option('--include-build-scripts', 'build_scripts', is_flag=True) +@click.argument('element', required=False, type=click.Path(readable=False)) @click.argument('location', type=click.Path(), required=False) @click.pass_obj -def source_checkout(app, element, location, force, deps, fetch_, except_, tar): +def source_checkout(app, element, location, force, deps, fetch_, except_, + tar, build_scripts): """Checkout sources of an element to the specified location """ if not element and not location: @@ -766,7 +767,8 @@ def source_checkout(app, element, location, force, deps, fetch_, except_, tar): deps=deps, fetch=fetch_, except_targets=except_, - tar=tar) + tar=tar, + include_build_scripts=build_scripts) ################################################################## diff --git a/buildstream/_stream.py b/buildstream/_stream.py index 03e189714..059b7f653 100644 --- a/buildstream/_stream.py +++ b/buildstream/_stream.py @@ -453,7 +453,8 @@ class Stream(): deps='none', fetch=False, except_targets=(), - tar=False): + tar=False, + include_build_scripts=False): self._check_location_writable(location, force=force, tar=tar) @@ -469,7 +470,8 @@ class Stream(): # Stage all sources determined by scope try: - self._source_checkout(elements, location, force, deps, fetch, tar) + self._source_checkout(elements, location, force, deps, + fetch, tar, include_build_scripts) except BstError as e: raise StreamError("Error while writing sources" ": '{}'".format(e), detail=e.detail, reason=e.reason) from e @@ -807,7 +809,7 @@ class Stream(): ] self._write_element_sources(os.path.join(tempdir, "source"), elements) - self._write_build_script(tempdir, elements) + self._write_master_build_script(tempdir, elements) self._collect_sources(tempdir, tar_location, target.normal_name, compression) @@ -1197,7 +1199,8 @@ class Stream(): force=False, deps='none', fetch=False, - tar=False): + tar=False, + include_build_scripts=False): location = os.path.abspath(location) location_parent = os.path.abspath(os.path.join(location, "..")) @@ -1207,6 +1210,8 @@ class Stream(): temp_source_dir = tempfile.TemporaryDirectory(dir=location_parent) try: self._write_element_sources(temp_source_dir.name, elements) + if include_build_scripts: + self._write_build_scripts(temp_source_dir.name, elements) if tar: self._create_tarball(temp_source_dir.name, location) else: @@ -1266,8 +1271,14 @@ class Stream(): except OSError as e: raise StreamError("Failed to create tar archive: {}".format(e)) from e + # Write all the build_scripts for elements in the directory location + def _write_build_scripts(self, location, elements): + for element in elements: + self._write_element_script(location, element) + self._write_master_build_script(location, elements) + # Write a master build script to the sandbox - def _write_build_script(self, directory, elements): + def _write_master_build_script(self, directory, elements): module_string = "" for element in elements: diff --git a/tests/frontend/source_checkout.py b/tests/frontend/source_checkout.py index 0815d9b9c..f6067498d 100644 --- a/tests/frontend/source_checkout.py +++ b/tests/frontend/source_checkout.py @@ -170,3 +170,38 @@ def test_source_checkout_fetch(datafiles, cli, fetch): assert os.path.exists(os.path.join(checkout, 'remote-import-dev', 'pony.h')) else: result.assert_main_error(ErrorDomain.PIPELINE, 'uncached-sources') + + +@pytest.mark.datafiles(DATA_DIR) +def test_source_checkout_build_scripts(cli, tmpdir, datafiles): + project_path = os.path.join(datafiles.dirname, datafiles.basename) + element_name = 'source-bundle/source-bundle-hello.bst' + normal_name = 'source-bundle-source-bundle-hello' + checkout = os.path.join(str(tmpdir), 'source-checkout') + + args = ['source-checkout', '--include-build-scripts', element_name, checkout] + result = cli.run(project=project_path, args=args) + result.assert_success() + + # There sould be a script for each element (just one in this case) and a top level build script + expected_scripts = ['build.sh', 'build-' + normal_name] + for script in expected_scripts: + assert script in os.listdir(checkout) + + +@pytest.mark.datafiles(DATA_DIR) +def test_source_checkout_tar_buildscripts(cli, tmpdir, datafiles): + project_path = os.path.join(datafiles.dirname, datafiles.basename) + element_name = 'source-bundle/source-bundle-hello.bst' + normal_name = 'source-bundle-source-bundle-hello' + tar_file = os.path.join(str(tmpdir), 'source-checkout.tar') + + args = ['source-checkout', '--include-build-scripts', '--tar', element_name, tar_file] + result = cli.run(project=project_path, args=args) + result.assert_success() + + expected_scripts = ['build.sh', 'build-' + normal_name] + + with tarfile.open(tar_file, 'r') as tf: + for script in expected_scripts: + assert script in tf.getnames() |