summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDaniel Firth <dan.firth@codethink.co.uk>2016-11-30 12:36:49 +0000
committerDaniel Firth <dan.firth@codethink.co.uk>2016-11-30 12:45:04 +0000
commite9a573e1baf737d65353e4bb3febcd2fdc4ffd86 (patch)
treeff9a674ca349c2919a263d8b4ee66fce925a6a2b
parentde60729f09489a1144d17623295281d764b75c7e (diff)
downloadybd-lc/backport_splitting_off.tar.gz
Backport - Turn off splitting at chunk compile time, write out chunk metafiles to the system baserock/ folder at systemlc/backport_splitting_off
installation time. Do not perform any actual splitting.
-rw-r--r--ybd/assembly.py24
-rw-r--r--ybd/splitting.py215
2 files changed, 40 insertions, 199 deletions
diff --git a/ybd/assembly.py b/ybd/assembly.py
index 34f278a..d917253 100644
--- a/ybd/assembly.py
+++ b/ybd/assembly.py
@@ -21,13 +21,15 @@ import fcntl
import errno
import app
+import splitting
+import utils
from app import config, timer, elapsed
from app import log, log_riemann, lockfile, RetryException
from cache import cache, cache_key, get_cache, get_remote
import repos
import sandbox
import datetime
-from splitting import write_metadata, install_split_artifacts
+from fs.osfs import OSFS
def compose(dn):
@@ -134,6 +136,18 @@ def install_dependencies(dn, dependencies=None):
sandbox.list_files(dn)
+def get_includes_iterator(dn):
+ f = [app.defs.get(x)
+ for x in dn['contents']
+ if app.defs.get(x).get('build-mode', 'staging') == 'staging']
+ for y in f:
+ if y.get('kind', 'chunk') == 'chunk':
+ yield y
+ g = list(get_includes_iterator(y))
+ for z in g:
+ yield z
+
+
def build(dn):
'''Create an artifact for a single component and add it to the cache'''
@@ -149,9 +163,13 @@ def build(dn):
with timer(dn, 'artifact creation'):
if dn.get('kind', 'chunk') == 'system':
- install_split_artifacts(dn)
+ for x in get_includes_iterator(dn):
+ rules = splitting.compile_rules(x)
+ a = get_cache(x) + '.unpacked'
+ meta = os.path.join(dn['baserockdir'], x['name'] + '.meta')
+ splitting.write_chunk_metafile(x, meta)
+ utils.copy_all_files(a, dn['install'])
- write_metadata(dn)
cache(dn)
diff --git a/ybd/splitting.py b/ybd/splitting.py
index 38ead98..a2a6d9e 100644
--- a/ybd/splitting.py
+++ b/ybd/splitting.py
@@ -21,90 +21,7 @@ import os
import re
import yaml
from utils import copy_file_list
-
-
-def install_split_artifacts(dn):
- '''Create the .meta files for a split system
-
- Given a list of artifacts to split, writes new .meta files to
- the baserock dir in dn['install'] and copies the files from the
- sandbox to the dn['install']
-
- '''
-
- for content in dn['contents']:
- key = content.keys()[0]
- stratum = app.defs.get(key)
- move_required_files(dn, stratum, content[key])
-
-
-def move_required_files(dn, stratum, artifacts):
- stratum_metadata = get_metadata(stratum)
- split_stratum_metadata = {}
- if not artifacts:
- # Include all artifacts if no ones were explicitly given for an
- # included stratum on a system.
- artifacts = [p['artifact'] for p in stratum_metadata['products']]
-
- to_keep = [component
- for product in stratum_metadata['products']
- for component in product['components']
- if product['artifact'] in artifacts]
-
- split_stratum_metadata['products'] = (
- [product
- for product in stratum_metadata['products']
- if product['artifact'] in artifacts])
-
- log(dn, 'Installing %s artifacts' % stratum['name'], artifacts)
- log(dn, 'Installing components:', to_keep, verbose=True)
-
- baserockpath = os.path.join(dn['install'], 'baserock')
- if not os.path.isdir(baserockpath):
- os.mkdir(baserockpath)
- split_stratum_metafile = os.path.join(baserockpath,
- stratum['name'] + '.meta')
- with open(split_stratum_metafile, "w") as f:
- yaml.safe_dump(split_stratum_metadata, f, default_flow_style=False)
-
- for path in stratum['contents']:
- chunk = app.defs.get(path)
- if chunk.get('build-mode', 'staging') == 'bootstrap':
- continue
-
- try:
- metafile = path_to_metafile(chunk)
- with open(metafile, "r") as f:
- filelist = []
- metadata = yaml.safe_load(f)
- split_metadata = {'ref': metadata.get('ref'),
- 'repo': metadata.get('repo'),
- 'products': []}
- if config.get('artifact-version', 0) not in range(0, 1):
- metadata['cache'] = dn.get('cache')
-
- for product in metadata['products']:
- if product['artifact'] in to_keep:
- filelist += product.get('components', [])
- # handle old artifacts still containing 'files'
- filelist += product.get('files', [])
-
- split_metadata['products'].append(product)
-
- if split_metadata['products'] != []:
- split_metafile = os.path.join(baserockpath,
- os.path.basename(metafile))
- with open(split_metafile, "w") as f:
- yaml.safe_dump(split_metadata, f,
- default_flow_style=False)
- log(dn, 'Splits split_metadata is\n', split_metadata,
- verbose=True)
- log(dn, 'Splits filelist is\n', filelist, verbose=True)
- copy_file_list(dn['sandbox'], dn['install'], filelist)
- except:
- import traceback
- traceback.print_exc()
- log(dn, 'Failed to install split components', exit=True)
+from fs.osfs import OSFS
def check_overlaps(dn):
@@ -129,33 +46,8 @@ def check_overlaps(dn):
config['new-overlaps'] = []
-def get_metadata(dn):
- '''Load an individual .meta file
-
- The .meta file is expected to be in the .unpacked/baserock directory of the
- built artifact
-
- '''
- try:
- with open(path_to_metafile(dn), "r") as f:
- metadata = yaml.safe_load(f)
- log(dn, 'Loaded metadata', dn['path'], verbose=True)
- return metadata
- except:
- log(dn, 'WARNING: problem loading metadata', dn)
- return None
-
-
-def path_to_metafile(dn):
- ''' Return the path to metadata file for dn. '''
-
- return os.path.join(get_cache(dn) + '.unpacked', 'baserock',
- dn['name'] + '.meta')
-
-
def compile_rules(dn):
regexps = []
- splits = {}
split_rules = dn.get('products', [])
default_rules = app.defs.defaults.get_split_rules(dn.get('kind', 'chunk'))
for rules in split_rules, default_rules:
@@ -165,103 +57,34 @@ def compile_rules(dn):
if artifact.startswith('-'):
artifact = dn['name'] + artifact
regexps.append([artifact, regexp])
- splits[artifact] = []
- return regexps, splits
+ return regexps
-def write_metadata(dn):
- if dn.get('kind', 'chunk') == 'chunk':
- write_chunk_metafile(dn)
- elif dn.get('kind', 'chunk') == 'stratum':
- write_stratum_metafiles(dn)
- if config.get('check-overlaps', 'ignore') != 'ignore':
- check_overlaps(dn)
-
-
-def write_chunk_metafile(chunk):
- '''Writes a chunk .meta file to the baserock dir of the chunk
+def write_chunk_metafile(chunk, metafile):
+ '''Writes a .meta file to the specified filename
The split rules are used to divide up the installed files for the chunk
into artifacts in the 'products' list
'''
log(chunk['name'], 'Splitting', chunk.get('kind'))
- rules, splits = compile_rules(chunk)
-
- with chdir(chunk['install']):
- for root, dirs, files in os.walk('.', topdown=False):
- for name in files + dirs:
- path = os.path.join(root, name)[2:]
- for artifact, rule in rules:
- if rule.match(path) or rule.match(path + '/'):
- splits[artifact].append(path)
- break
-
- write_metafile(rules, splits, chunk)
-
-
-def write_stratum_metafiles(stratum):
- '''Write the .meta files for a stratum to the baserock dir
-
- The split rules are used to divide up the installed components into
- artifacts in the 'products' list in the stratum .meta file. Each artifact
- contains a list of chunk artifacts which match the stratum splitting rules
-
- '''
-
- log(stratum['name'], 'Splitting', stratum.get('kind'))
- rules, splits = compile_rules(stratum)
-
- for item in stratum['contents']:
- chunk = app.defs.get(item)
- if chunk.get('build-mode', 'staging') == 'bootstrap':
- continue
-
- metadata = get_metadata(chunk)
- split_metadata = {'ref': metadata.get('ref'),
- 'repo': metadata.get('repo'),
- 'products': []}
-
- if config.get('artifact-version', 0) not in range(0, 1):
- split_metadata['cache'] = metadata.get('cache')
-
- chunk_artifacts = app.defs.get(chunk).get('artifacts', {})
- for artifact, target in chunk_artifacts.items():
- splits[target].append(artifact)
-
- for product in metadata['products']:
- for artifact, rule in rules:
- if rule.match(product['artifact']):
- split_metadata['products'].append(product)
- splits[artifact].append(product['artifact'])
- break
-
- meta = os.path.join(stratum['baserockdir'], chunk['name'] + '.meta')
-
- with open(meta, "w") as f:
- yaml.safe_dump(split_metadata, f, default_flow_style=False)
-
- write_metafile(rules, splits, stratum)
-
-
-def write_metafile(rules, splits, dn):
- metadata = {'products': [{'artifact': a,
+ rules = compile_rules(chunk)
+ splits = dict((a[0], []) for a in rules)
+
+ inst = OSFS(get_cache(chunk) + '.unpacked')
+ for path in inst.walkfiles():
+ for artifact, rule in rules:
+ if rule.match(str(path)) or rule.match(str(path) + '/'):
+ splits[artifact].append(path)
+ break
+
+ metadata = {'cache': chunk['cache'],
+ 'repo': chunk['repo'],
+ 'ref': chunk['ref'],
+ 'products': [{'artifact': a,
'components': sorted(set(splits[a]))}
for a, r in rules]}
- if dn.get('kind', 'chunk') == 'chunk':
- metadata['repo'] = dn.get('repo')
- metadata['ref'] = dn.get('ref')
- else:
- if config.get('artifact-version', 0) not in range(0, 2):
- metadata['repo'] = config['defdir']
- metadata['ref'] = config['def-version']
-
- if config.get('artifact-version', 0) not in range(0, 1):
- metadata['cache'] = dn.get('cache')
-
- meta = os.path.join(dn['baserockdir'], dn['name'] + '.meta')
-
- with open(meta, "w") as f:
+ with open(metafile, "w") as f:
yaml.safe_dump(metadata, f, default_flow_style=False)